async def test_log_without_kwargs(server, client, environment): await data.ConfigurationModel( environment=uuid.UUID(environment), version=1, date=datetime.datetime.now(), total=1, released=True, version_info={}, ).insert() resource_action = data.ResourceAction( environment=uuid.UUID(environment), version=1, resource_version_ids=[ f"{resource_id_a},v=1", "std::Directory[agent1,path=/tmp/dir2],v=1", ], action_id=uuid.uuid4(), action=const.ResourceAction.deploy, started=datetime.datetime.now(), ) await resource_action.insert() resource_action.add_logs([{ "level": "INFO", "msg": "Setting deployed due to known good status", "timestamp": datetime.datetime.now(), "args": [], }]) await resource_action.save() result = await client.resource_logs(environment, resource_id_a) assert result.code == 200
async def test_log_nested_kwargs(server, client, environment): await data.ConfigurationModel( environment=uuid.UUID(environment), version=1, date=datetime.datetime.now(), total=1, released=True, version_info={}, ).insert() resource_action = data.ResourceAction( environment=uuid.UUID(environment), version=1, resource_version_ids=[ f"{resource_id_a},v=1", "std::Directory[agent1,path=/tmp/dir2],v=1", ], action_id=uuid.uuid4(), action=const.ResourceAction.deploy, started=datetime.datetime.now(), ) await resource_action.insert() resource_action.add_logs([ data.LogLine.log( logging.INFO, "Calling update_resource ", timestamp=datetime.datetime.now(), changes={ "characteristics": { "current": { "Status": "Planned" }, "desired": { "Status": "In Service" } } }, ), ]) await resource_action.save() result = await client.resource_logs(environment, resource_id_a) assert result.code == 200 assert result.result["data"][0]["kwargs"]["changes"][ "characteristics"] == { "current": { "Status": "Planned" }, "desired": { "Status": "In Service" }, }
def test_data_document_recursion(data_module): env_id = uuid.uuid4() now = datetime.datetime.now() ra = data.ResourceAction(environment=env_id, resource_version_ids=["id"], action_id=uuid.uuid4(), action=const.ResourceAction.store, started=now, finished=now, messages=[ data.LogLine.log( logging.INFO, "Successfully stored version %(version)d", version=2) ]) yield ra.insert()
def test_resource_action(data_module): env_id = uuid.uuid4() action_id = uuid.uuid4() resource_action = data.ResourceAction(environment=env_id, resource_version_ids=[], action_id=action_id, action=const.ResourceAction.deploy, started=datetime.datetime.now()) yield resource_action.insert() resource_action.add_changes( {"rid": { "field1": { "old": "a", "new": "b" }, "field2": {} }}) yield resource_action.save() resource_action.add_changes( {"rid": { "field2": { "old": "c", "new": "d" }, "field3": {} }}) yield resource_action.save() resource_action.add_logs([{}, {}]) yield resource_action.save() resource_action.add_logs([{}, {}]) yield resource_action.save() ra = yield data.ResourceAction.get_by_id(resource_action.id) assert len(ra.changes["rid"]) == 3 assert len(ra.messages) == 4 assert ra.changes["rid"]["field1"]["old"] == "a" assert ra.changes["rid"]["field1"]["new"] == "b" assert ra.changes["rid"]["field2"]["old"] == "c" assert ra.changes["rid"]["field2"]["new"] == "d" assert ra.changes["rid"]["field3"] == {}
async def put_version( self, env: data.Environment, version: int, resources: List[JsonType], resource_state: Dict[ResourceIdStr, const.ResourceState], unknowns: List[Dict[str, PrimitiveTypes]], version_info: JsonType, compiler_version: Optional[str] = None, ) -> Apireturn: """ :param resources: a list of serialized resources :param unknowns: dict with the following structure { "resource": ResourceIdStr, "parameter": str, "source": str } :param version_info: :param compiler_version: :return: """ if not compiler_version: raise BadRequest( "Older compiler versions are no longer supported, please update your compiler" ) if version > env.last_version: raise BadRequest( f"The version number used is {version} " f"which is higher than the last outstanding reservation {env.last_version}" ) if version <= 0: raise BadRequest( f"The version number used ({version}) is not positive") started = datetime.datetime.now().astimezone() agents = set() # lookup for all RV's, lookup by resource id rv_dict: Dict[ResourceVersionIdStr, data.Resource] = {} # reverse dependency tree, Resource.provides [:] -- Resource.requires as resource_id provides_tree: Dict[str, List[str]] = defaultdict(lambda: []) # list of all resources which have a cross agent dependency, as a tuple, (dependant,requires) cross_agent_dep = [] # list of all resources which are undeployable undeployable: List[data.Resource] = [] resource_objects = [] resource_version_ids = [] for res_dict in resources: res_obj = data.Resource.new(env.id, res_dict["id"]) if res_obj.resource_id in resource_state: res_obj.status = const.ResourceState[resource_state[ res_obj.resource_id]] if res_obj.status in const.UNDEPLOYABLE_STATES: undeployable.append(res_obj) # collect all agents agents.add(res_obj.agent) attributes = {} for field, value in res_dict.items(): if field != "id": attributes[field] = value res_obj.attributes = attributes resource_objects.append(res_obj) resource_version_ids.append(res_obj.resource_version_id) rv_dict[res_obj.resource_id] = res_obj # find cross agent dependencies agent = res_obj.agent resc_id = res_obj.resource_id if "requires" not in attributes: LOGGER.warning( "Received resource without requires attribute (%s)" % res_obj.resource_id) else: for req in attributes["requires"]: rid = Id.parse_id(req) provides_tree[rid.resource_str()].append(resc_id) if rid.get_agent_name() != agent: # it is a CAD cross_agent_dep.append((res_obj, rid)) # hook up all CADs for f, t in cross_agent_dep: res_obj = rv_dict[t.resource_str()] res_obj.provides.append(f.resource_version_id) # detect failed compiles def safe_get(input: JsonType, key: str, default: object) -> object: if not isinstance(input, dict): return default if key not in input: return default return input[key] metadata: JsonType = safe_get(version_info, const.EXPORT_META_DATA, {}) compile_state = safe_get(metadata, const.META_DATA_COMPILE_STATE, "") failed = compile_state == const.Compilestate.failed resources_to_purge: List[data.Resource] = [] if not failed and (await env.get(PURGE_ON_DELETE)): # search for deleted resources (purge_on_delete) resources_to_purge = await data.Resource.get_deleted_resources( env.id, version, set(rv_dict.keys())) previous_requires = {} for res in resources_to_purge: LOGGER.warning("Purging %s, purged resource based on %s" % (res.resource_id, res.resource_version_id)) attributes = res.attributes.copy() attributes["purged"] = True attributes["requires"] = [] res_obj = data.Resource.new( env.id, resource_version_id=ResourceVersionIdStr( "%s,v=%s" % (res.resource_id, version)), attributes=attributes, ) resource_objects.append(res_obj) previous_requires[ res_obj.resource_id] = res.attributes["requires"] resource_version_ids.append(res_obj.resource_version_id) agents.add(res_obj.agent) rv_dict[res_obj.resource_id] = res_obj # invert dependencies on purges for res_id, requires in previous_requires.items(): res_obj = rv_dict[res_id] for require in requires: req_id = Id.parse_id(require) if req_id.resource_str() in rv_dict: req_res = rv_dict[req_id.resource_str()] req_res.attributes["requires"].append( res_obj.resource_version_id) res_obj.provides.append(req_res.resource_version_id) undeployable_ids: List[str] = [res.resource_id for res in undeployable] # get skipped for undeployable work = list(undeployable_ids) skippeable: Set[str] = set() while len(work) > 0: current = work.pop() if current in skippeable: continue skippeable.add(current) work.extend(provides_tree[current]) skip_list = sorted(list(skippeable - set(undeployable_ids))) try: cm = data.ConfigurationModel( environment=env.id, version=version, date=datetime.datetime.now().astimezone(), total=len(resources), version_info=version_info, undeployable=undeployable_ids, skipped_for_undeployable=skip_list, ) await cm.insert() except asyncpg.exceptions.UniqueViolationError: raise ServerError( "The given version is already defined. Versions should be unique." ) await data.Resource.insert_many(resource_objects) await cm.update_fields(total=cm.total + len(resources_to_purge)) for uk in unknowns: if "resource" not in uk: uk["resource"] = "" if "metadata" not in uk: uk["metadata"] = {} up = data.UnknownParameter( resource_id=uk["resource"], name=uk["parameter"], source=uk["source"], environment=env.id, version=version, metadata=uk["metadata"], ) await up.insert() for agent in agents: await self.agentmanager_service.ensure_agent_registered(env, agent) # Don't log ResourceActions without resource_version_ids, because # no API call exists to retrieve them. if resource_version_ids: now = datetime.datetime.now().astimezone() log_line = data.LogLine.log( logging.INFO, "Successfully stored version %(version)d", version=version) self.resource_service.log_resource_action(env.id, resource_version_ids, logging.INFO, now, log_line.msg) ra = data.ResourceAction( environment=env.id, version=version, resource_version_ids=resource_version_ids, action_id=uuid.uuid4(), action=const.ResourceAction.store, started=started, finished=now, messages=[log_line], ) await ra.insert() LOGGER.debug("Successfully stored version %d", version) self.resource_service.clear_env_cache(env) auto_deploy = await env.get(data.AUTO_DEPLOY) if auto_deploy: LOGGER.debug("Auto deploying version %d", version) push_on_auto_deploy = cast(bool, await env.get(data.PUSH_ON_AUTO_DEPLOY)) agent_trigger_method_on_autodeploy = cast( str, await env.get(data.AGENT_TRIGGER_METHOD_ON_AUTO_DEPLOY)) agent_trigger_method_on_autodeploy = const.AgentTriggerMethod[ agent_trigger_method_on_autodeploy] await self.release_version(env, version, push_on_auto_deploy, agent_trigger_method_on_autodeploy) return 200
async def env_with_logs(client, server, environment): cm_times = [] for i in range(1, 10): cm_times.append( datetime.datetime.strptime(f"2021-07-07T10:1{i}:00.0", "%Y-%m-%dT%H:%M:%S.%f")) cm_time_idx = 0 for i in range(1, 10): cm = data.ConfigurationModel( environment=uuid.UUID(environment), version=i, date=cm_times[cm_time_idx], total=1, released=i != 1 and i != 9, version_info={}, ) cm_time_idx += 1 await cm.insert() msg_timings = [] for i in range(1, 30): msg_timings.append( datetime.datetime.strptime( "2021-07-07T10:10:00.0", "%Y-%m-%dT%H:%M:%S.%f").replace(minute=i).astimezone( datetime.timezone.utc)) msg_timings_idx = 0 for i in range(1, 10): action_id = uuid.uuid4() resource_action = data.ResourceAction( environment=uuid.UUID(environment), version=i, resource_version_ids=[ f"{resource_id_a},v={i}", f"std::Directory[agent1,path=/tmp/dir2],v={i}", ], action_id=action_id, action=const.ResourceAction.deploy if i % 2 else const.ResourceAction.pull, started=cm_times[i - 1], ) await resource_action.insert() if i % 2: resource_action.add_logs([ data.LogLine.log( logging.INFO, "Successfully stored version %(version)d", version=i, timestamp=msg_timings[msg_timings_idx], ), ]) msg_timings_idx += 1 else: resource_action.add_logs([ data.LogLine.log( logging.INFO, "Resource version pulled by client for agent %(agent)s", agent="admin", timestamp=msg_timings[msg_timings_idx], ), data.LogLine.log(logging.DEBUG, "Setting deployed due to known good status", timestamp=msg_timings[msg_timings_idx + 1]), ]) msg_timings_idx += 2 await resource_action.save() yield environment, msg_timings
async def test_resource_action_pagination(postgresql_client, client, clienthelper, server, agent): """ Test querying resource actions via the API, including the pagination links.""" project = data.Project(name="test") await project.insert() env = data.Environment(name="dev", project=project.id, repo_url="", repo_branch="") await env.insert() # Add multiple versions of model for i in range(0, 11): cm = data.ConfigurationModel( environment=env.id, version=i, date=datetime.now(), total=1, version_info={}, ) await cm.insert() # Add resource actions for motd motd_first_start_time = datetime.now() earliest_action_id = uuid.uuid4() resource_action = data.ResourceAction( environment=env.id, version=0, resource_version_ids=[f"std::File[agent1,path=/etc/motd],v={0}"], action_id=earliest_action_id, action=const.ResourceAction.deploy, started=motd_first_start_time - timedelta(minutes=1), ) await resource_action.insert() resource_action.add_logs([ data.LogLine.log(logging.INFO, "Successfully stored version %(version)d", version=0) ]) await resource_action.save() action_ids_with_the_same_timestamp = [] for i in range(1, 6): action_id = uuid.uuid4() action_ids_with_the_same_timestamp.append(action_id) resource_action = data.ResourceAction( environment=env.id, version=i, resource_version_ids=[f"std::File[agent1,path=/etc/motd],v={i}"], action_id=action_id, action=const.ResourceAction.deploy, started=motd_first_start_time, ) await resource_action.insert() resource_action.add_logs([ data.LogLine.log(logging.INFO, "Successfully stored version %(version)d", version=i) ]) await resource_action.save() action_ids_with_the_same_timestamp = sorted( action_ids_with_the_same_timestamp, reverse=True) later_action_id = uuid.uuid4() resource_action = data.ResourceAction( environment=env.id, version=6, resource_version_ids=[f"std::File[agent1,path=/etc/motd],v={6}"], action_id=later_action_id, action=const.ResourceAction.deploy, started=motd_first_start_time + timedelta(minutes=6), ) await resource_action.insert() resource_action.add_logs([ data.LogLine.log(logging.INFO, "Successfully stored version %(version)d", version=6) ]) await resource_action.save() for i in range(0, 11): res1 = data.Resource.new( environment=env.id, resource_version_id="std::File[agent1,path=/etc/motd],v=%s" % str(i), status=const.ResourceState.deployed, last_deploy=datetime.now() + timedelta(minutes=i), attributes={ "attr": [{ "a": 1, "b": "c" }], "path": "/etc/motd" }, ) await res1.insert() result = await client.get_resource_actions( tid=env.id, resource_type="std::File", attribute="path", attribute_value="/etc/motd", last_timestamp=motd_first_start_time + timedelta(minutes=7), limit=2, ) assert result.code == 200 resource_actions = result.result["data"] expected_action_ids = [later_action_id ] + action_ids_with_the_same_timestamp[:1] assert [ uuid.UUID(resource_action["action_id"]) for resource_action in resource_actions ] == expected_action_ids # Use the next link for pagination next_page = result.result["links"]["next"] port = opt.get_bind_port() base_url = "http://localhost:%s" % (port, ) url = f"{base_url}{next_page}" client = AsyncHTTPClient() request = HTTPRequest( url=url, headers={"X-Inmanta-tid": str(env.id)}, ) response = await client.fetch(request, raise_error=False) assert response.code == 200 response = json.loads(response.body.decode("utf-8")) second_page_action_ids = [ uuid.UUID(resource_action["action_id"]) for resource_action in response["data"] ] assert second_page_action_ids == action_ids_with_the_same_timestamp[1:3] next_page = response["links"]["next"] url = f"{base_url}{next_page}" request.url = url response = await client.fetch(request, raise_error=False) assert response.code == 200 response = json.loads(response.body.decode("utf-8")) third_page_action_ids = [ uuid.UUID(resource_action["action_id"]) for resource_action in response["data"] ] assert third_page_action_ids == action_ids_with_the_same_timestamp[3:5] # Go back to the previous page prev_page = response["links"]["prev"] url = f"{base_url}{prev_page}" request.url = url response = await client.fetch(request, raise_error=False) assert response.code == 200 response = json.loads(response.body.decode("utf-8")) action_ids = [ uuid.UUID(resource_action["action_id"]) for resource_action in response["data"] ] assert action_ids == second_page_action_ids # And back to the third prev_page = response["links"]["next"] url = f"{base_url}{prev_page}" request.url = url response = await client.fetch(request, raise_error=False) assert response.code == 200 response = json.loads(response.body.decode("utf-8")) action_ids = [ uuid.UUID(resource_action["action_id"]) for resource_action in response["data"] ] assert action_ids == third_page_action_ids