def test_is_resource_version_id(): """ Test whether the is_resource_version_id() method of the Id class works correctly. """ assert Id.is_resource_version_id("test::Resource[agent,key=id],v=3") assert Id.is_resource_version_id("test::mod::Resource[agent,key=id],v=3") assert not Id.is_resource_version_id("test::Resource[agent,key=id]") assert not Id.is_resource_version_id("test::mod::Resource[agent,key=id]") assert not Id.is_resource_version_id("test::Resource")
def test_resource(self): cache = AgentCache() value = "test too" resource = Id("test::Resource", "test", "key", "test", 100).get_instance() cache.cache_value("test", value, resource=resource) assert value == cache.find("test", resource=resource)
def test_context_changes(): """Test registering changes in the handler context""" resource = PurgeableResource(Id.parse_id("std::File[agent,path=/test],v=1")) ctx = HandlerContext(resource) # use attribute change attributes ctx.update_changes({"value": AttributeStateChange(current="a", desired="b")}) assert len(ctx.changes) == 1 # use dict ctx.update_changes({"value": dict(current="a", desired="b")}) assert len(ctx.changes) == 1 assert isinstance(ctx.changes["value"], AttributeStateChange) # use dict with empty string ctx.update_changes({"value": dict(current="", desired="value")}) assert len(ctx.changes) == 1 assert ctx.changes["value"].current == "" assert ctx.changes["value"].desired == "value" # use tuple ctx.update_changes({"value": ("a", "b")}) assert len(ctx.changes) == 1 assert isinstance(ctx.changes["value"], AttributeStateChange) # use wrong arguments with pytest.raises(InvalidOperation): ctx.update_changes({"value": ("a", "b", 3)}) with pytest.raises(InvalidOperation): ctx.update_changes({"value": ["a", "b"]}) with pytest.raises(InvalidOperation): ctx.update_changes({"value": "test"})
def test_resource_fail(my_resource): cache = AgentCache() value = "test too" resource = Id("test::Resource", "test", "key", "test", 100).get_instance() cache.cache_value("test", value, resource=resource) with pytest.raises(KeyError): assert value == cache.find("test")
async def dryrun_update( self, env: data.Environment, dryrun_id: uuid.UUID, resource: ResourceVersionIdStr, changes: JsonType ) -> Apireturn: async with self.dryrun_lock: payload = {"changes": changes, "id_fields": Id.parse_id(resource).to_dict(), "id": resource} await data.DryRun.update_resource(dryrun_id, resource, payload) return 200
def test_resource_and_version(): cache = AgentCache() value = "test too" resource = Id("test::Resource", "test", "key", "test", 100).get_instance() version = 200 cache.open_version(version) cache.cache_value("test", value, resource=resource, version=version) assert value == cache.find("test", resource=resource, version=version)
def test_CRUD_handler_purged_response(purged_desired, purged_actual, excn, create, delete, updated, caplog): """ purged_actual and excn are conceptually equivalent, this test case serves to prove that they are in fact, equivalent """ caplog.set_level(logging.DEBUG) class DummyCrud(CRUDHandler): def __init__(self): self.updated = False self.created = False self.deleted = False def read_resource(self, ctx: HandlerContext, resource: resources.PurgeableResource) -> None: resource.purged = purged_actual if updated: resource.value = "b" if excn: raise ResourcePurged() def update_resource(self, ctx: HandlerContext, changes: dict, resource: resources.PurgeableResource) -> None: self.updated = True def create_resource(self, ctx: HandlerContext, resource: resources.PurgeableResource) -> None: self.created = True def delete_resource(self, ctx: HandlerContext, resource: resources.PurgeableResource) -> None: self.deleted = True @resource("aa::Aa", "aa", "aa") class TestResource(PurgeableResource): fields = ("value", ) res = TestResource(Id("aa::Aa", "aa", "aa", "aa", 1)) res.purged = purged_desired res.value = "a" ctx = HandlerContext(res, False) handler = DummyCrud() handler.execute(ctx, res, False) assert handler.updated == ((not (create or delete)) and updated and not purged_desired) assert handler.created == create assert handler.deleted == delete no_error_in_logs(caplog) log_contains(caplog, "inmanta.agent.handler", logging.DEBUG, "resource aa::Aa[aa,aa=aa],v=1: Calling read_resource")
def test_get_or_else(self): called = [] def creator(param, resource, version): called.append("x") return param cache = AgentCache() value = "test too" value2 = "test too x" resource = Id("test::Resource", "test", "key", "test", 100).get_instance() resourcev2 = Id("test::Resource", "test", "key", "test", 200).get_instance() assert 200 == resourcev2.id.version version = 200 cache.open_version(version) assert value == cache.get_or_else("test", creator, resource=resource, version=version, param=value) assert value == cache.get_or_else("test", creator, resource=resource, version=version, param=value) assert len(called) == 1 assert value == cache.get_or_else("test", creator, resource=resourcev2, version=version, param=value) assert len(called) == 1 assert value2 == cache.get_or_else("test", creator, resource=resource, version=version, param=value2)
async def dryrun_diff(self, env: data.Environment, version: int, report_id: uuid.UUID) -> DryRunReport: dryrun = await data.DryRun.get_one(environment=env.id, model=version, id=report_id) if dryrun is None: raise NotFound("The given dryrun does not exist!") resources = dryrun.to_dict()["resources"] from_resources = {} to_resources = {} resources_with_already_known_status = { resource_version_id: resource for resource_version_id, resource in resources.items() if resource.get("diff_status") } resources_to_diff = { resource_version_id: resource for resource_version_id, resource in resources.items() if resource_version_id not in resources_with_already_known_status.keys() } for resource_version_id, resource in resources_to_diff.items(): resource_id = Id.parse_id(resource_version_id).resource_str() from_attributes = self.get_attributes_from_changes(resource["changes"], "current") to_attributes = self.get_attributes_from_changes(resource["changes"], "desired") from_resources[resource_id] = diff.Resource(resource_id, from_attributes) to_resources[resource_id] = diff.Resource(resource_id, to_attributes) if "purged" in resource["changes"]: if self.resource_will_be_unpurged(from_attributes, to_attributes): from_resources.pop(resource_id) if self.resource_will_be_purged(from_attributes, to_attributes): to_resources.pop(resource_id) version_diff = diff.generate_diff(from_resources, to_resources, include_unmodified=True) version_diff += [ ResourceDiff( resource_id=Id.parse_resource_version_id(rvid).resource_str(), attributes={}, status=resource.get("diff_status") ) for rvid, resource in resources_with_already_known_status.items() ] version_diff.sort(key=lambda r: r.resource_id) dto = DryRunReport(summary=dryrun.to_dto(), diff=version_diff) return dto
def test_context_manager(): cache = AgentCache() value = "test too" version = 200 with cache.manager(version): cache.cache_value("test", value, version=version) cache.cache_value("test0", value, version=version) cache.cache_value("test4", value, version=version) resource = Id("test::Resource", "test", "key", "test", 100).get_instance() cache.cache_value("testx", value, resource=resource) assert value == cache.find("test", version=version) assert value == cache.find("testx", resource=resource) assert value, cache.find("testx", resource=resource) with pytest.raises(KeyError): assert value == cache.find("test", version=version)
async def test_db_migration(migrate_v3_to_v4, postgresql_client: Connection): for table_name in ["form", "formrecord", "resourceversionid"]: assert not await does_table_exist(postgresql_client, table_name) result = await postgresql_client.fetch( """SELECT environment, version, action_id, resource_version_ids FROM public.resourceaction """) for r in result: assert r["environment"] == uuid.UUID( "6c66ca44-da58-4924-ad17-151abc2f3726") rvids_old_table = migrate_v3_to_v4[r["action_id"]] rvids_new_table = r["resource_version_ids"] assert sorted(rvids_old_table) == sorted(rvids_new_table) assert r["version"] == int(Id.parse_id(rvids_old_table[0]).version) # Verify that the number of action_ids match assert len(result) == len(migrate_v3_to_v4)
def test_version_close(): cache = AgentCache() value = "test too" version = 200 cache.open_version(version) cache.cache_value("test", value, version=version) cache.cache_value("test0", value, version=version) cache.cache_value("test4", value, version=version) resource = Id("test::Resource", "test", "key", "test", 100).get_instance() cache.cache_value("testx", value, resource=resource) assert value == cache.find("test", version=version) assert value == cache.find("testx", resource=resource) cache.close_version(version) assert value, cache.find("testx", resource=resource) with pytest.raises(KeyError): assert value == cache.find("test", version=version) raise AssertionError("Should get exception")
async def test_add_value_to_resource_table( migrate_v202106080_to_v202106210: Callable[[], Awaitable[None]], postgresql_client: Connection, get_columns_in_db_table: Callable[[str], Awaitable[List[str]]], ) -> None: """ Test whether the value column was added to the resource table. """ # Migrate DB schema await migrate_v202106080_to_v202106210() results = await postgresql_client.fetch( "SELECT resource_id, resource_id_value FROM public.Resource") for r in results: assert r["resource_id_value"] is not None parsed_id = Id.parse_id(r["resource_id"]) assert r["resource_id_value"] == parsed_id.attribute_value
async def _save_resources_without_changes_to_dryrun( self, dryrun_id: uuid.UUID, resources: List[data.Resource], diff_status: Optional[ResourceDiffStatus] = None ): for res in resources: parsed_id = Id.parse_id(res.resource_version_id) payload = { "changes": {}, "id_fields": { "entity_type": res.resource_type, "agent_name": res.agent, "attribute": parsed_id.attribute, "attribute_value": parsed_id.attribute_value, "version": res.model, }, "id": res.resource_version_id, } payload = {**payload, "diff_status": diff_status} if diff_status else payload await data.DryRun.update_resource(dryrun_id, res.resource_version_id, payload)
def test_get_or_else_none(): called = [] def creator(param, resource, version): called.append("x") return param class Sequencer(object): def __init__(self, sequence): self.seq = sequence self.count = 0 def __call__(self, **kwargs): out = self.seq[self.count] self.count += 1 return out cache = AgentCache() value = "test too" resource = Id("test::Resource", "test", "key", "test", 100).get_instance() version = 100 cache.open_version(version) assert None is cache.get_or_else("test", creator, resource=resource, version=version, cache_none=False, param=None) assert len(called) == 1 assert None is cache.get_or_else("test", creator, resource=resource, version=version, cache_none=False, param=None) assert len(called) == 2 assert value == cache.get_or_else("test", creator, resource=resource, version=version, cache_none=False, param=value) assert value == cache.get_or_else("test", creator, resource=resource, version=version, cache_none=False, param=value) assert len(called) == 3 seq = Sequencer([None, None, "A"]) assert None is cache.get_or_else("testx", seq, resource=resource, version=version, cache_none=False) assert seq.count == 1 assert None is cache.get_or_else("testx", seq, resource=resource, version=version, cache_none=False) assert seq.count == 2 assert "A" == cache.get_or_else("testx", seq, resource=resource, version=version, cache_none=False) assert seq.count == 3 assert "A" == cache.get_or_else("testx", seq, resource=resource, version=version, cache_none=False) assert seq.count == 3 assert "A" == cache.get_or_else("testx", seq, resource=resource, version=version, cache_none=False) assert seq.count == 3
def cleanup(requires: Union[ResourceVersionIdStr, Resource, Id]) -> Id: """ Main type cleanup :param requires: a requirement, can be a string, resource, Id :return: the same requirement, but as an Id :raises Exception: the requirement can not be converted """ if isinstance(requires, str): myid = Id.parse_id(requires) if myid.version == 0: raise Exception( f"A dependency manager inserted a resource id without version this is not allowed {requires}" ) return myid if isinstance(requires, Resource): return requires.id if isinstance(requires, Id): return requires raise Exception( f"A dependency manager inserted the object {repr(requires)} of type {type(requires)} " "into a requires relation. However, only string, Resource or Id are allowable types " )
async def test_addition_resource_type_column(migrate_v2_to_v3, postgresql_client: Connection): results = await postgresql_client.fetch("SELECT resource_version_id, resource_type FROM public.Resource") for r in results: assert r["resource_type"] is not None parsed_id = Id.parse_id(r["resource_version_id"]) assert r["resource_type"] == parsed_id.entity_type
def validate_resource_version_id(ctx: click.Context, option: Union[click.Option, click.Parameter], value: str) -> ResourceVersionIdStr: if not Id.is_resource_version_id(value): raise click.BadParameter(value) return ResourceVersionIdStr(value)
def version_report(client: Client, environment: str, version: str, show_detailed_report: bool) -> None: tid = client.to_environment_id(environment) result = client.do_request("get_version", arguments=dict(tid=tid, id=version, include_logs=True)) if not result: return agents: Dict[str, Dict[str, List[str]]] = defaultdict( lambda: defaultdict(lambda: [])) for res in result["resources"]: if len(res["actions"]) > 0 or show_detailed_report: agents[res["agent"]][res["resource_type"]].append(res) for agent in sorted(agents.keys()): click.echo(click.style("Agent: %s" % agent, bold=True)) click.echo("=" * 72) for t in sorted(agents[agent].keys()): parsed_resource_version_id = Id.parse_id( ResourceVersionIdStr( agents[agent][t][0]["resource_version_id"])) click.echo( click.style("Resource type:", bold=True) + "{type} ({attr})".format( type=t, attr=parsed_resource_version_id.attribute)) click.echo("-" * 72) for res in agents[agent][t]: parsed_id = Id.parse_id(res["resource_version_id"]) click.echo((click.style(parsed_id.attribute_value, bold=True) + " (#actions=%d)") % len(res["actions"])) # for dryrun show only the latest, for deploy all if not result["model"]["released"]: if len(res["actions"]) > 0: action = res["actions"][0] click.echo("* last check: %s" % action["timestamp"]) click.echo("* result: %s" % ("error" if action["level"] != "INFO" else "success")) if len(action["data"]) == 0: click.echo("* no changes") else: click.echo("* changes:") for field in sorted(action["data"].keys()): values = action["data"][field] if field == "hash": click.echo(" - content:") diff_value = client.do_request( "diff", arguments=dict(a=values[0], b=values[1])) click.echo(" " + " ".join(diff_value["diff"])) else: click.echo(" - %s:" % field) click.echo( " " + click.style("from:", bold=True) + " %s" % values[0]) click.echo(" " + click.style("to:", bold=True) + " %s" % values[1]) click.echo("") click.echo("") else: pass click.echo("")
async def dryrun_request(self, env: data.Environment, version_id: int) -> Apireturn: model = await data.ConfigurationModel.get_version(environment=env.id, version=version_id) if model is None: return 404, {"message": "The request version does not exist."} # fetch all resource in this cm and create a list of distinct agents rvs = await data.Resource.get_list(model=version_id, environment=env.id) # Create a dryrun document dryrun = await data.DryRun.create(environment=env.id, model=version_id, todo=len(rvs), total=len(rvs)) agents = await data.ConfigurationModel.get_agents(env.id, version_id) await self.autostarted_agent_manager._ensure_agents(env, agents) for agent in agents: client = self.agent_manager.get_agent_client(env.id, agent) if client is not None: self.add_background_task( client.do_dryrun(env.id, dryrun.id, agent, version_id)) else: LOGGER.warning( "Agent %s from model %s in env %s is not available for a dryrun", agent, version_id, env.id) # Mark the resources in an undeployable state as done async with self.dryrun_lock: undeployable_ids = await model.get_undeployable() undeployable_version_ids = [ ResourceVersionIdStr(rid + ",v=%s" % version_id) for rid in undeployable_ids ] undeployable = await data.Resource.get_resources( environment=env.id, resource_version_ids=undeployable_version_ids) for res in undeployable: parsed_id = Id.parse_id(res.resource_version_id) payload = { "changes": {}, "id_fields": { "entity_type": res.resource_type, "agent_name": res.agent, "attribute": parsed_id.attribute, "attribute_value": parsed_id.attribute_value, "version": res.model, }, "id": res.resource_version_id, } await data.DryRun.update_resource(dryrun.id, res.resource_version_id, payload) skip_undeployable_ids = await model.get_skipped_for_undeployable() skip_undeployable_version_ids = [ ResourceVersionIdStr(rid + ",v=%s" % version_id) for rid in skip_undeployable_ids ] skipundeployable = await data.Resource.get_resources( environment=env.id, resource_version_ids=skip_undeployable_version_ids) for res in skipundeployable: parsed_id = Id.parse_id(res.resource_version_id) payload = { "changes": {}, "id_fields": { "entity_type": res.resource_type, "agent_name": res.agent, "attribute": parsed_id.attribute, "attribute_value": parsed_id.attribute_value, "version": res.model, }, "id": res.resource_version_id, } await data.DryRun.update_resource(dryrun.id, res.resource_version_id, payload) return 200, {"dryrun": dryrun}
async def put_version( self, env: data.Environment, version: int, resources: List[JsonType], resource_state: Dict[ResourceIdStr, const.ResourceState], unknowns: List[Dict[str, PrimitiveTypes]], version_info: JsonType, compiler_version: Optional[str] = None, ) -> Apireturn: """ :param resources: a list of serialized resources :param unknowns: dict with the following structure { "resource": ResourceIdStr, "parameter": str, "source": str } :param version_info: :param compiler_version: :return: """ if not compiler_version: raise BadRequest( "Older compiler versions are no longer supported, please update your compiler" ) if version > env.last_version: raise BadRequest( f"The version number used is {version} " f"which is higher than the last outstanding reservation {env.last_version}" ) if version <= 0: raise BadRequest( f"The version number used ({version}) is not positive") started = datetime.datetime.now().astimezone() agents = set() # lookup for all RV's, lookup by resource id rv_dict: Dict[ResourceVersionIdStr, data.Resource] = {} # reverse dependency tree, Resource.provides [:] -- Resource.requires as resource_id provides_tree: Dict[str, List[str]] = defaultdict(lambda: []) # list of all resources which have a cross agent dependency, as a tuple, (dependant,requires) cross_agent_dep = [] # list of all resources which are undeployable undeployable: List[data.Resource] = [] resource_objects = [] resource_version_ids = [] for res_dict in resources: res_obj = data.Resource.new(env.id, res_dict["id"]) if res_obj.resource_id in resource_state: res_obj.status = const.ResourceState[resource_state[ res_obj.resource_id]] if res_obj.status in const.UNDEPLOYABLE_STATES: undeployable.append(res_obj) # collect all agents agents.add(res_obj.agent) attributes = {} for field, value in res_dict.items(): if field != "id": attributes[field] = value res_obj.attributes = attributes resource_objects.append(res_obj) resource_version_ids.append(res_obj.resource_version_id) rv_dict[res_obj.resource_id] = res_obj # find cross agent dependencies agent = res_obj.agent resc_id = res_obj.resource_id if "requires" not in attributes: LOGGER.warning( "Received resource without requires attribute (%s)" % res_obj.resource_id) else: for req in attributes["requires"]: rid = Id.parse_id(req) provides_tree[rid.resource_str()].append(resc_id) if rid.get_agent_name() != agent: # it is a CAD cross_agent_dep.append((res_obj, rid)) # hook up all CADs for f, t in cross_agent_dep: res_obj = rv_dict[t.resource_str()] res_obj.provides.append(f.resource_version_id) # detect failed compiles def safe_get(input: JsonType, key: str, default: object) -> object: if not isinstance(input, dict): return default if key not in input: return default return input[key] metadata: JsonType = safe_get(version_info, const.EXPORT_META_DATA, {}) compile_state = safe_get(metadata, const.META_DATA_COMPILE_STATE, "") failed = compile_state == const.Compilestate.failed resources_to_purge: List[data.Resource] = [] if not failed and (await env.get(PURGE_ON_DELETE)): # search for deleted resources (purge_on_delete) resources_to_purge = await data.Resource.get_deleted_resources( env.id, version, set(rv_dict.keys())) previous_requires = {} for res in resources_to_purge: LOGGER.warning("Purging %s, purged resource based on %s" % (res.resource_id, res.resource_version_id)) attributes = res.attributes.copy() attributes["purged"] = True attributes["requires"] = [] res_obj = data.Resource.new( env.id, resource_version_id=ResourceVersionIdStr( "%s,v=%s" % (res.resource_id, version)), attributes=attributes, ) resource_objects.append(res_obj) previous_requires[ res_obj.resource_id] = res.attributes["requires"] resource_version_ids.append(res_obj.resource_version_id) agents.add(res_obj.agent) rv_dict[res_obj.resource_id] = res_obj # invert dependencies on purges for res_id, requires in previous_requires.items(): res_obj = rv_dict[res_id] for require in requires: req_id = Id.parse_id(require) if req_id.resource_str() in rv_dict: req_res = rv_dict[req_id.resource_str()] req_res.attributes["requires"].append( res_obj.resource_version_id) res_obj.provides.append(req_res.resource_version_id) undeployable_ids: List[str] = [res.resource_id for res in undeployable] # get skipped for undeployable work = list(undeployable_ids) skippeable: Set[str] = set() while len(work) > 0: current = work.pop() if current in skippeable: continue skippeable.add(current) work.extend(provides_tree[current]) skip_list = sorted(list(skippeable - set(undeployable_ids))) try: cm = data.ConfigurationModel( environment=env.id, version=version, date=datetime.datetime.now().astimezone(), total=len(resources), version_info=version_info, undeployable=undeployable_ids, skipped_for_undeployable=skip_list, ) await cm.insert() except asyncpg.exceptions.UniqueViolationError: raise ServerError( "The given version is already defined. Versions should be unique." ) await data.Resource.insert_many(resource_objects) await cm.update_fields(total=cm.total + len(resources_to_purge)) for uk in unknowns: if "resource" not in uk: uk["resource"] = "" if "metadata" not in uk: uk["metadata"] = {} up = data.UnknownParameter( resource_id=uk["resource"], name=uk["parameter"], source=uk["source"], environment=env.id, version=version, metadata=uk["metadata"], ) await up.insert() for agent in agents: await self.agentmanager_service.ensure_agent_registered(env, agent) # Don't log ResourceActions without resource_version_ids, because # no API call exists to retrieve them. if resource_version_ids: now = datetime.datetime.now().astimezone() log_line = data.LogLine.log( logging.INFO, "Successfully stored version %(version)d", version=version) self.resource_service.log_resource_action(env.id, resource_version_ids, logging.INFO, now, log_line.msg) ra = data.ResourceAction( environment=env.id, version=version, resource_version_ids=resource_version_ids, action_id=uuid.uuid4(), action=const.ResourceAction.store, started=started, finished=now, messages=[log_line], ) await ra.insert() LOGGER.debug("Successfully stored version %d", version) self.resource_service.clear_env_cache(env) auto_deploy = await env.get(data.AUTO_DEPLOY) if auto_deploy: LOGGER.debug("Auto deploying version %d", version) push_on_auto_deploy = cast(bool, await env.get(data.PUSH_ON_AUTO_DEPLOY)) agent_trigger_method_on_autodeploy = cast( str, await env.get(data.AGENT_TRIGGER_METHOD_ON_AUTO_DEPLOY)) agent_trigger_method_on_autodeploy = const.AgentTriggerMethod[ agent_trigger_method_on_autodeploy] await self.release_version(env, version, push_on_auto_deploy, agent_trigger_method_on_autodeploy) return 200