def test_model_list(data_module): env_id = uuid.uuid4() for version in range(1, 20): cm = data.ConfigurationModel(environment=env_id, version=version, date=datetime.datetime.now(), total=0, version_info={}) yield cm.insert() versions = yield data.ConfigurationModel.get_versions(env_id, 0, 1) assert len(versions) == 1 assert versions[0].version == 19 versions = yield data.ConfigurationModel.get_versions(env_id, 1, 1) assert len(versions) == 1 assert versions[0].version == 18 versions = yield data.ConfigurationModel.get_versions(env_id) assert len(versions) == 19 assert versions[0].version == 19 assert versions[-1].version == 1 versions = yield data.ConfigurationModel.get_versions(env_id, 10) assert len(versions) == 9 assert versions[0].version == 9 assert versions[-1].version == 1
def test_config_model(data_module): project = data.Project(name="test") yield project.insert() env = data.Environment(name="dev", project=project.id, repo_url="", repo_branch="") yield env.insert() version = int(time.time()) cm = data.ConfigurationModel(environment=env.id, version=version, date=datetime.datetime.now(), total=1, version_info={}) yield cm.insert() # create resources key = "std::File[agent1,path=/etc/motd]" res1 = data.Resource.new(environment=env.id, resource_version_id=key + ",v=%d" % version, attributes={"path": "/etc/motd"}) yield res1.insert() agents = yield data.ConfigurationModel.get_agents(env.id, version) assert len(agents) == 1 assert "agent1" in agents
async def test_resource_list_no_released_version(server, client): """Test that if there are no released versions of a resource, the result set is empty""" project = data.Project(name="test") await project.insert() env = data.Environment(name="dev", project=project.id, repo_url="", repo_branch="") await env.insert() version = 1 cm = data.ConfigurationModel( environment=env.id, version=version, date=datetime.now(), total=1, released=False, version_info={}, ) await cm.insert() path = f"/etc/file{1}" key = f"std::File[agent1,path={path}]" res1_v1 = data.Resource.new( environment=env.id, resource_version_id=ResourceVersionIdStr(f"{key},v={version}"), attributes={"path": path} ) await res1_v1.insert() result = await client.resource_list(env.id) assert result.code == 200 assert len(result.result["data"]) == 0
async def env_with_versions(environment): env_id = uuid.UUID(environment) for i in range(1, 4): cm = data.ConfigurationModel( environment=env_id, version=i, date=datetime.datetime.now(), total=1, released=i != 1, version_info={}, ) await cm.insert()
async def put_version( self, env: data.Environment, version: int, resources: List[JsonType], resource_state: Dict[ResourceIdStr, const.ResourceState], unknowns: List[Dict[str, PrimitiveTypes]], version_info: JsonType, compiler_version: Optional[str] = None, ) -> Apireturn: """ :param resources: a list of serialized resources :param unknowns: dict with the following structure { "resource": ResourceIdStr, "parameter": str, "source": str } :param version_info: :param compiler_version: :return: """ if not compiler_version: raise BadRequest( "Older compiler versions are no longer supported, please update your compiler" ) if version > env.last_version: raise BadRequest( f"The version number used is {version} " f"which is higher than the last outstanding reservation {env.last_version}" ) if version <= 0: raise BadRequest( f"The version number used ({version}) is not positive") started = datetime.datetime.now().astimezone() agents = set() # lookup for all RV's, lookup by resource id rv_dict: Dict[ResourceVersionIdStr, data.Resource] = {} # reverse dependency tree, Resource.provides [:] -- Resource.requires as resource_id provides_tree: Dict[str, List[str]] = defaultdict(lambda: []) # list of all resources which have a cross agent dependency, as a tuple, (dependant,requires) cross_agent_dep = [] # list of all resources which are undeployable undeployable: List[data.Resource] = [] resource_objects = [] resource_version_ids = [] for res_dict in resources: res_obj = data.Resource.new(env.id, res_dict["id"]) if res_obj.resource_id in resource_state: res_obj.status = const.ResourceState[resource_state[ res_obj.resource_id]] if res_obj.status in const.UNDEPLOYABLE_STATES: undeployable.append(res_obj) # collect all agents agents.add(res_obj.agent) attributes = {} for field, value in res_dict.items(): if field != "id": attributes[field] = value res_obj.attributes = attributes resource_objects.append(res_obj) resource_version_ids.append(res_obj.resource_version_id) rv_dict[res_obj.resource_id] = res_obj # find cross agent dependencies agent = res_obj.agent resc_id = res_obj.resource_id if "requires" not in attributes: LOGGER.warning( "Received resource without requires attribute (%s)" % res_obj.resource_id) else: for req in attributes["requires"]: rid = Id.parse_id(req) provides_tree[rid.resource_str()].append(resc_id) if rid.get_agent_name() != agent: # it is a CAD cross_agent_dep.append((res_obj, rid)) # hook up all CADs for f, t in cross_agent_dep: res_obj = rv_dict[t.resource_str()] res_obj.provides.append(f.resource_version_id) # detect failed compiles def safe_get(input: JsonType, key: str, default: object) -> object: if not isinstance(input, dict): return default if key not in input: return default return input[key] metadata: JsonType = safe_get(version_info, const.EXPORT_META_DATA, {}) compile_state = safe_get(metadata, const.META_DATA_COMPILE_STATE, "") failed = compile_state == const.Compilestate.failed resources_to_purge: List[data.Resource] = [] if not failed and (await env.get(PURGE_ON_DELETE)): # search for deleted resources (purge_on_delete) resources_to_purge = await data.Resource.get_deleted_resources( env.id, version, set(rv_dict.keys())) previous_requires = {} for res in resources_to_purge: LOGGER.warning("Purging %s, purged resource based on %s" % (res.resource_id, res.resource_version_id)) attributes = res.attributes.copy() attributes["purged"] = True attributes["requires"] = [] res_obj = data.Resource.new( env.id, resource_version_id=ResourceVersionIdStr( "%s,v=%s" % (res.resource_id, version)), attributes=attributes, ) resource_objects.append(res_obj) previous_requires[ res_obj.resource_id] = res.attributes["requires"] resource_version_ids.append(res_obj.resource_version_id) agents.add(res_obj.agent) rv_dict[res_obj.resource_id] = res_obj # invert dependencies on purges for res_id, requires in previous_requires.items(): res_obj = rv_dict[res_id] for require in requires: req_id = Id.parse_id(require) if req_id.resource_str() in rv_dict: req_res = rv_dict[req_id.resource_str()] req_res.attributes["requires"].append( res_obj.resource_version_id) res_obj.provides.append(req_res.resource_version_id) undeployable_ids: List[str] = [res.resource_id for res in undeployable] # get skipped for undeployable work = list(undeployable_ids) skippeable: Set[str] = set() while len(work) > 0: current = work.pop() if current in skippeable: continue skippeable.add(current) work.extend(provides_tree[current]) skip_list = sorted(list(skippeable - set(undeployable_ids))) try: cm = data.ConfigurationModel( environment=env.id, version=version, date=datetime.datetime.now().astimezone(), total=len(resources), version_info=version_info, undeployable=undeployable_ids, skipped_for_undeployable=skip_list, ) await cm.insert() except asyncpg.exceptions.UniqueViolationError: raise ServerError( "The given version is already defined. Versions should be unique." ) await data.Resource.insert_many(resource_objects) await cm.update_fields(total=cm.total + len(resources_to_purge)) for uk in unknowns: if "resource" not in uk: uk["resource"] = "" if "metadata" not in uk: uk["metadata"] = {} up = data.UnknownParameter( resource_id=uk["resource"], name=uk["parameter"], source=uk["source"], environment=env.id, version=version, metadata=uk["metadata"], ) await up.insert() for agent in agents: await self.agentmanager_service.ensure_agent_registered(env, agent) # Don't log ResourceActions without resource_version_ids, because # no API call exists to retrieve them. if resource_version_ids: now = datetime.datetime.now().astimezone() log_line = data.LogLine.log( logging.INFO, "Successfully stored version %(version)d", version=version) self.resource_service.log_resource_action(env.id, resource_version_ids, logging.INFO, now, log_line.msg) ra = data.ResourceAction( environment=env.id, version=version, resource_version_ids=resource_version_ids, action_id=uuid.uuid4(), action=const.ResourceAction.store, started=started, finished=now, messages=[log_line], ) await ra.insert() LOGGER.debug("Successfully stored version %d", version) self.resource_service.clear_env_cache(env) auto_deploy = await env.get(data.AUTO_DEPLOY) if auto_deploy: LOGGER.debug("Auto deploying version %d", version) push_on_auto_deploy = cast(bool, await env.get(data.PUSH_ON_AUTO_DEPLOY)) agent_trigger_method_on_autodeploy = cast( str, await env.get(data.AGENT_TRIGGER_METHOD_ON_AUTO_DEPLOY)) agent_trigger_method_on_autodeploy = const.AgentTriggerMethod[ agent_trigger_method_on_autodeploy] await self.release_version(env, version, push_on_auto_deploy, agent_trigger_method_on_autodeploy) return 200
async def env_with_resources(server, client): project = data.Project(name="test") await project.insert() env = data.Environment(name="dev", project=project.id, repo_url="", repo_branch="") await env.insert() env2 = data.Environment(name="dev2", project=project.id, repo_url="", repo_branch="") await env2.insert() env3 = data.Environment(name="dev3", project=project.id, repo_url="", repo_branch="") await env3.insert() cm_times = [] for i in range(1, 10): cm_times.append( datetime.datetime.strptime(f"2021-07-07T1{i}:00:00.0", "%Y-%m-%dT%H:%M:%S.%f")) cm_time_idx = 0 resource_deploy_times = [] for i in range(30): resource_deploy_times.append( datetime.datetime.strptime(f"2021-07-07T11:{i}:00.0", "%Y-%m-%dT%H:%M:%S.%f")) # Add multiple versions of model, with 2 of them released for i in range(1, 6): cm = data.ConfigurationModel( environment=env.id, version=i, date=cm_times[cm_time_idx], total=1, released=i != 1 and i != 5, version_info={}, ) cm_time_idx += 1 await cm.insert() cm = data.ConfigurationModel( environment=env2.id, version=4, date=datetime.datetime.now(tz=datetime.timezone.utc), total=1, released=True, version_info={}, ) cm_time_idx += 1 await cm.insert() cm = data.ConfigurationModel( environment=env3.id, version=6, date=datetime.datetime.now(tz=datetime.timezone.utc), total=1, released=True, version_info={}, ) cm_time_idx += 1 await cm.insert() resources = { env.id: defaultdict(list), env2.id: defaultdict(list), env3.id: defaultdict(list) } def total_number_of_resources(): return sum([ len(resource_list_by_env) for resource_list_by_env in [[ specific_resource for specific_resource_list in envdict.values() for specific_resource in specific_resource_list ] for envdict in resources.values()] ]) async def create_resource( path: str, status: ResourceState, version: int, attributes: Dict[str, object], agent: str = "internal", resource_type: str = "std::File", environment: UUID = env.id, ): key = f"{resource_type}[{agent},path={path}]" res = data.Resource.new( environment=environment, resource_version_id=ResourceVersionIdStr(f"{key},v={version}"), attributes={ **attributes, **{ "path": path } }, status=status, last_deploy=resource_deploy_times[total_number_of_resources()], ) await res.insert() return res # A resource with multiple resources in its requires list, and multiple versions where it was released, # and is also present in versions that were not released resources[env.id]["std::File[internal,path=/tmp/dir1/file1]"].append( await create_resource( "/tmp/dir1/file1", ResourceState.undefined, 1, { "key1": "val1", "requires": ["std::Directory[internal,path=/tmp/dir1],v=1"] }, )) resources[env.id]["std::File[internal,path=/tmp/dir1/file1]"].append( await create_resource( "/tmp/dir1/file1", ResourceState.skipped, 2, { "key1": "modified_value", "another_key": "val", "requires": [ "std::Directory[internal,path=/tmp/dir1],v=2", "std::File[internal,path=/tmp/dir1/file2],v=2" ], }, )) resources[env.id]["std::File[internal,path=/tmp/dir1/file1]"].append( await create_resource( "/tmp/dir1/file1", ResourceState.deploying, 3, { "key1": "modified_value", "another_key": "val", "requires": [ "std::Directory[internal,path=/tmp/dir1],v=3", "std::File[internal,path=/tmp/dir1/file2],v=3" ], }, )) resources[env.id]["std::File[internal,path=/tmp/dir1/file1]"].append( await create_resource( "/tmp/dir1/file1", ResourceState.deployed, 4, { "key1": "modified_value", "another_key": "val", "requires": [ "std::Directory[internal,path=/tmp/dir1],v=4", "std::File[internal,path=/tmp/dir1/file2],v=4" ], }, )) resources[env.id]["std::File[internal,path=/tmp/dir1/file1]"].append( await create_resource( "/tmp/dir1/file1", ResourceState.undefined, 5, { "key1": "modified_value", "another_key": "val", "requires": [ "std::Directory[internal,path=/tmp/dir1],v=5", "std::File[internal,path=/tmp/dir1/file2],v=5" ], }, )) # A resource that didn't change its attributes, but was only released with the second version and has no requirements resources[env.id]["std::Directory[internal,path=/tmp/dir1]"].append( await create_resource( "/tmp/dir1", ResourceState.undefined, 1, { "key2": "val2", "requires": [] }, resource_type="std::Directory", )) resources[env.id]["std::Directory[internal,path=/tmp/dir1]"].append( await create_resource("/tmp/dir1", ResourceState.deploying, 2, { "key2": "val2", "requires": [] }, resource_type="std::Directory")) resources[env.id]["std::Directory[internal,path=/tmp/dir1]"].append( await create_resource("/tmp/dir1", ResourceState.deployed, 3, { "key2": "val2", "requires": [] }, resource_type="std::Directory")) resources[env.id]["std::Directory[internal,path=/tmp/dir1]"].append( await create_resource("/tmp/dir1", ResourceState.deployed, 4, { "key2": "val2", "requires": [] }, resource_type="std::Directory")) # A resource that changed the attributes in the last released version, # so the last and the first time the attributes are the same, is the same as well; # And it also has a single requirement resources[env.id]["std::File[internal,path=/tmp/dir1/file2]"].append( await create_resource("/tmp/dir1/file2", ResourceState.undefined, 1, { "key3": "val3", "requires": [] })) resources[env.id]["std::File[internal,path=/tmp/dir1/file2]"].append( await create_resource( "/tmp/dir1/file2", ResourceState.deployed, 2, { "key3": "val3", "requires": ["std::Directory[internal,path=/tmp/dir1],v=2"] }, )) resources[env.id]["std::File[internal,path=/tmp/dir1/file2]"].append( await create_resource( "/tmp/dir1/file2", ResourceState.deployed, 3, { "key3": "val3", "requires": ["std::Directory[internal,path=/tmp/dir1],v=3"] }, )) resources[env.id]["std::File[internal,path=/tmp/dir1/file2]"].append( await create_resource( "/tmp/dir1/file2", ResourceState.deploying, 4, { "key3": "val3updated", "requires": ["std::Directory[internal,path=/tmp/dir1],v=4"] }, )) # Add an unreleased resource resources[env.id]["std::File[internal,path=/etc/filexyz]"].append( await create_resource( "/etc/filexyz", ResourceState.undefined, 5, { "key4": "val4", "requires": [] }, )) resources[env.id]["std::File[internal,path=/etc/never_deployed]"].append( await create_resource( "/etc/never_deployed", ResourceState.undefined, 3, { "key5": "val5", "requires": [] }, )) resources[env.id]["std::File[internal,path=/etc/never_deployed]"].append( await create_resource( "/etc/never_deployed", ResourceState.unavailable, 4, { "key5": "val5", "requires": [] }, )) resources[env.id][ "std::File[internal,path=/etc/deployed_only_with_different_hash]"].append( await create_resource( "/etc/deployed_only_with_different_hash", ResourceState.deployed, 3, { "key6": "val6", "requires": [] }, )) resources[env.id][ "std::File[internal,path=/etc/deployed_only_with_different_hash]"].append( await create_resource( "/etc/deployed_only_with_different_hash", ResourceState.undefined, 4, { "key6": "val6different", "requires": [] }, )) resources[env.id][ "std::File[internal,path=/etc/deployed_only_in_earlier_version]"].append( await create_resource( "/etc/deployed_only_in_earlier_version", ResourceState.deployed, 3, { "key7": "val7", "requires": [ "std::File[internal,path=/etc/requirement_in_later_version],v=3" ] }, )) resources[env.id][ "std::File[internal,path=/etc/requirement_in_later_version]"].append( await create_resource( "/etc/requirement_in_later_version", ResourceState.deploying, 3, { "key8": "val8", "requires": [] }, )) resources[env.id][ "std::File[internal,path=/etc/requirement_in_later_version]"].append( await create_resource( "/etc/requirement_in_later_version", ResourceState.deployed, 4, { "key8": "val8", "requires": [] }, )) resources[env.id][ "std::File[internal,path=/etc/requirement_in_later_version]"].append( await create_resource( "/etc/requirement_in_later_version", ResourceState.skipped, 5, { "key8": "val8", "requires": [] }, )) resources[env.id]["std::File[internal,path=/tmp/orphaned]"].append( await create_resource( "/tmp/orphaned", ResourceState.deployed, 3, { "key9": "val9", "requires": ["std::File[internal,path=/tmp/orphaned_req],v=3"] }, )) resources[env.id]["std::File[internal,path=/tmp/orphaned_req]"].append( await create_resource( "/tmp/orphaned_req", ResourceState.deployed, 3, { "key9": "val9", "requires": [] }, )) # Add the same resources the first one requires in another environment resources[env2.id]["std::File[internal,path=/tmp/dir1/file2]"].append( await create_resource( "/tmp/dir1/file2", ResourceState.unavailable, 4, { "key3": "val3", "requires": ["std::Directory[internal,path=/tmp/dir1],v=4"] }, resource_type="std::Directory", environment=env2.id, )) resources[env2.id]["std::Directory[internal,path=/tmp/dir1]"].append( await create_resource( "/tmp/dir1", ResourceState.available, 4, { "key2": "val2", "requires": [] }, resource_type="std::Directory", environment=env2.id, )) # Add the same main resource to another environment with higher version resources[env3.id]["std::File[internal,path=/tmp/dir1/file1]"].append( await create_resource( "/tmp/dir1/file1", ResourceState.deploying, 6, { "key1": "modified_value", "another_key": "val", "requires": [ "std::Directory[internal,path=/tmp/dir1],v=6", "std::File[internal,path=/tmp/dir1/file2],v=6" ], }, environment=env3.id, )) ids = { "multiple_requires": "std::File[internal,path=/tmp/dir1/file1]", "no_requires": "std::Directory[internal,path=/tmp/dir1]", "single_requires": "std::File[internal,path=/tmp/dir1/file2]", "unreleased": "std::File[internal,path=/etc/filexyz]", "never_deployed": "std::File[internal,path=/etc/never_deployed]", "deployed_only_with_different_hash": "std::File[internal,path=/etc/deployed_only_with_different_hash]", "deployed_only_in_earlier_version": "std::File[internal,path=/etc/deployed_only_in_earlier_version]", "orphaned_and_requires_orphaned": "std::File[internal,path=/tmp/orphaned]", } yield env, cm_times, ids, resources
async def environments_with_versions( server, client) -> Tuple[Dict[str, uuid.UUID], List[datetime.datetime]]: project = data.Project(name="test") await project.insert() env = data.Environment(name="dev", project=project.id, repo_url="", repo_branch="") await env.insert() cm_timestamps = [] for i in range(0, 10): cm_timestamps.append( datetime.datetime.strptime(f"2021-12-06T11:{i}:00.0", "%Y-%m-%dT%H:%M:%S.%f")) # Add multiple versions of model # 1: skipped_candidate, 2,3: retired, 4,5,6: skipped_candidate, 7: active, 8,9: candidate for i in range(1, 10): cm = data.ConfigurationModel( environment=env.id, version=i, date=cm_timestamps[i - 1], total=1, released=i in {2, 3, 7}, version_info={ "export_metadata": { "message": "Recompile model because state transition", "type": "lsm_export" } } if i % 2 else { "export_metadata": { "message": "Recompile model because one or more parameters were updated", "type": "param" } }, ) await cm.insert() env2 = data.Environment(name="dev-test2", project=project.id, repo_url="", repo_branch="") await env2.insert() cm = data.ConfigurationModel( environment=env2.id, version=11, date=datetime.datetime.now(), total=1, released=True, version_info={}, ) await cm.insert() env3 = data.Environment(name="dev-test3", project=project.id, repo_url="", repo_branch="") await env3.insert() cm = data.ConfigurationModel( environment=env3.id, version=7, date=datetime.datetime.now(), total=1, released=False, version_info={}, ) await cm.insert() env4 = data.Environment(name="dev-test4", project=project.id, repo_url="", repo_branch="") await env4.insert() environments = { "multiple_versions": env.id, "single_released_version": env2.id, "no_released_version": env3.id, "no_versions": env4.id, } yield environments, cm_timestamps
async def env_with_logs(client, server, environment): cm_times = [] for i in range(1, 10): cm_times.append( datetime.datetime.strptime(f"2021-07-07T10:1{i}:00.0", "%Y-%m-%dT%H:%M:%S.%f")) cm_time_idx = 0 for i in range(1, 10): cm = data.ConfigurationModel( environment=uuid.UUID(environment), version=i, date=cm_times[cm_time_idx], total=1, released=i != 1 and i != 9, version_info={}, ) cm_time_idx += 1 await cm.insert() msg_timings = [] for i in range(1, 30): msg_timings.append( datetime.datetime.strptime( "2021-07-07T10:10:00.0", "%Y-%m-%dT%H:%M:%S.%f").replace(minute=i).astimezone( datetime.timezone.utc)) msg_timings_idx = 0 for i in range(1, 10): action_id = uuid.uuid4() resource_action = data.ResourceAction( environment=uuid.UUID(environment), version=i, resource_version_ids=[ f"{resource_id_a},v={i}", f"std::Directory[agent1,path=/tmp/dir2],v={i}", ], action_id=action_id, action=const.ResourceAction.deploy if i % 2 else const.ResourceAction.pull, started=cm_times[i - 1], ) await resource_action.insert() if i % 2: resource_action.add_logs([ data.LogLine.log( logging.INFO, "Successfully stored version %(version)d", version=i, timestamp=msg_timings[msg_timings_idx], ), ]) msg_timings_idx += 1 else: resource_action.add_logs([ data.LogLine.log( logging.INFO, "Resource version pulled by client for agent %(agent)s", agent="admin", timestamp=msg_timings[msg_timings_idx], ), data.LogLine.log(logging.DEBUG, "Setting deployed due to known good status", timestamp=msg_timings[msg_timings_idx + 1]), ]) msg_timings_idx += 2 await resource_action.save() yield environment, msg_timings
async def test_resource_action_pagination(postgresql_client, client, clienthelper, server, agent): """ Test querying resource actions via the API, including the pagination links.""" project = data.Project(name="test") await project.insert() env = data.Environment(name="dev", project=project.id, repo_url="", repo_branch="") await env.insert() # Add multiple versions of model for i in range(0, 11): cm = data.ConfigurationModel( environment=env.id, version=i, date=datetime.now(), total=1, version_info={}, ) await cm.insert() # Add resource actions for motd motd_first_start_time = datetime.now() earliest_action_id = uuid.uuid4() resource_action = data.ResourceAction( environment=env.id, version=0, resource_version_ids=[f"std::File[agent1,path=/etc/motd],v={0}"], action_id=earliest_action_id, action=const.ResourceAction.deploy, started=motd_first_start_time - timedelta(minutes=1), ) await resource_action.insert() resource_action.add_logs([ data.LogLine.log(logging.INFO, "Successfully stored version %(version)d", version=0) ]) await resource_action.save() action_ids_with_the_same_timestamp = [] for i in range(1, 6): action_id = uuid.uuid4() action_ids_with_the_same_timestamp.append(action_id) resource_action = data.ResourceAction( environment=env.id, version=i, resource_version_ids=[f"std::File[agent1,path=/etc/motd],v={i}"], action_id=action_id, action=const.ResourceAction.deploy, started=motd_first_start_time, ) await resource_action.insert() resource_action.add_logs([ data.LogLine.log(logging.INFO, "Successfully stored version %(version)d", version=i) ]) await resource_action.save() action_ids_with_the_same_timestamp = sorted( action_ids_with_the_same_timestamp, reverse=True) later_action_id = uuid.uuid4() resource_action = data.ResourceAction( environment=env.id, version=6, resource_version_ids=[f"std::File[agent1,path=/etc/motd],v={6}"], action_id=later_action_id, action=const.ResourceAction.deploy, started=motd_first_start_time + timedelta(minutes=6), ) await resource_action.insert() resource_action.add_logs([ data.LogLine.log(logging.INFO, "Successfully stored version %(version)d", version=6) ]) await resource_action.save() for i in range(0, 11): res1 = data.Resource.new( environment=env.id, resource_version_id="std::File[agent1,path=/etc/motd],v=%s" % str(i), status=const.ResourceState.deployed, last_deploy=datetime.now() + timedelta(minutes=i), attributes={ "attr": [{ "a": 1, "b": "c" }], "path": "/etc/motd" }, ) await res1.insert() result = await client.get_resource_actions( tid=env.id, resource_type="std::File", attribute="path", attribute_value="/etc/motd", last_timestamp=motd_first_start_time + timedelta(minutes=7), limit=2, ) assert result.code == 200 resource_actions = result.result["data"] expected_action_ids = [later_action_id ] + action_ids_with_the_same_timestamp[:1] assert [ uuid.UUID(resource_action["action_id"]) for resource_action in resource_actions ] == expected_action_ids # Use the next link for pagination next_page = result.result["links"]["next"] port = opt.get_bind_port() base_url = "http://localhost:%s" % (port, ) url = f"{base_url}{next_page}" client = AsyncHTTPClient() request = HTTPRequest( url=url, headers={"X-Inmanta-tid": str(env.id)}, ) response = await client.fetch(request, raise_error=False) assert response.code == 200 response = json.loads(response.body.decode("utf-8")) second_page_action_ids = [ uuid.UUID(resource_action["action_id"]) for resource_action in response["data"] ] assert second_page_action_ids == action_ids_with_the_same_timestamp[1:3] next_page = response["links"]["next"] url = f"{base_url}{next_page}" request.url = url response = await client.fetch(request, raise_error=False) assert response.code == 200 response = json.loads(response.body.decode("utf-8")) third_page_action_ids = [ uuid.UUID(resource_action["action_id"]) for resource_action in response["data"] ] assert third_page_action_ids == action_ids_with_the_same_timestamp[3:5] # Go back to the previous page prev_page = response["links"]["prev"] url = f"{base_url}{prev_page}" request.url = url response = await client.fetch(request, raise_error=False) assert response.code == 200 response = json.loads(response.body.decode("utf-8")) action_ids = [ uuid.UUID(resource_action["action_id"]) for resource_action in response["data"] ] assert action_ids == second_page_action_ids # And back to the third prev_page = response["links"]["next"] url = f"{base_url}{prev_page}" request.url = url response = await client.fetch(request, raise_error=False) assert response.code == 200 response = json.loads(response.body.decode("utf-8")) action_ids = [ uuid.UUID(resource_action["action_id"]) for resource_action in response["data"] ] assert action_ids == third_page_action_ids
def test_issue_422(data_module): env_id = uuid.uuid4() version = 1 # model 1 cm1 = data.ConfigurationModel(environment=env_id, version=version, date=datetime.datetime.now(), total=1, version_info={}, released=True, deployed=True) yield cm1.insert() res11 = data.Resource.new( environment=env_id, resource_version_id="std::File[agent1,path=/etc/motd],v=%s" % version, status=const.ResourceState.deployed, attributes={ "path": "/etc/motd", "purge_on_delete": True, "purged": False }) yield res11.insert() # model 2 (multiple undeployed versions) version += 1 cm2 = data.ConfigurationModel(environment=env_id, version=version, date=datetime.datetime.now(), total=1, version_info={}, released=False, deployed=False) yield cm2.insert() res21 = data.Resource.new( environment=env_id, resource_version_id="std::File[agent1,path=/etc/motd],v=%s" % version, status=const.ResourceState.available, attributes={ "path": "/etc/motd", "purge_on_delete": True, "purged": False }) yield res21.insert() # model 3 version += 1 cm3 = data.ConfigurationModel(environment=env_id, version=version, date=datetime.datetime.now(), total=0, version_info={}) yield cm3.insert() to_purge = yield data.Resource.get_deleted_resources( env_id, version, set()) assert len(to_purge) == 1 assert to_purge[0].model == 1 assert to_purge[0].resource_id == "std::File[agent1,path=/etc/motd]"
async def env_with_resources(server, client): project = data.Project(name="test") await project.insert() env = data.Environment(name="dev", project=project.id, repo_url="", repo_branch="") await env.insert() # Add multiple versions of model, with 2 of them released for i in range(1, 4): cm = data.ConfigurationModel( environment=env.id, version=i, date=datetime.now(), total=1, released=i != 1, version_info={}, ) await cm.insert() async def create_resource(agent: str, path: str, resource_type: str, versions: List[int], environment: UUID = env.id): for version in versions: key = f"{resource_type}[{agent},path={path}]" res = data.Resource.new( environment=environment, resource_version_id=ResourceVersionIdStr(f"{key},v={version}"), attributes={ "path": path, "v": version }, status=ResourceState.deployed, ) await res.insert() await create_resource("agent1", "/etc/file1", "std::File", [1, 2, 3]) await create_resource("agent1", "/etc/file2", "std::File", [1, 2]) await create_resource("agent2", "/etc/file3", "std::File", [2]) await create_resource("agent2", "/tmp/file4", "std::File", [3]) await create_resource("agent2", "/tmp/dir5", "std::Directory", [3]) await create_resource("agent2", "/tmp/dir6", "std::Directory", [3]) await create_resource("agent2", "/tmp/dir7", "std::Directory", [3]) await create_resource("agent3", "/tmp/dir8", "std::Directory", [3]) env2 = data.Environment(name="dev-test2", project=project.id, repo_url="", repo_branch="") await env2.insert() cm = data.ConfigurationModel( environment=env2.id, version=3, date=datetime.now(), total=1, released=True, version_info={}, ) await cm.insert() await create_resource("agent1", "/tmp/file7", "std::File", [3], environment=env2.id) await create_resource("agent1", "/tmp/file2", "std::File", [3], environment=env2.id) yield env
async def test_has_only_one_version_from_resource(server, client): """Test querying resources, when there are multiple released versions of a resource. The query should return only the latest one from those """ project = data.Project(name="test") await project.insert() env = data.Environment(name="dev", project=project.id, repo_url="", repo_branch="") await env.insert() # Add multiple versions of model, with 2 of them released for i in range(1, 5): cm = data.ConfigurationModel( environment=env.id, version=i, date=datetime.now(), total=1, released=i != 1 and i != 4, version_info={}, ) await cm.insert() version = 1 path = "/etc/file" + str(1) key = "std::File[agent1,path=" + path + "]" res1_v1 = data.Resource.new(environment=env.id, resource_version_id=key + ",v=%d" % version, attributes={"path": path}) await res1_v1.insert() version = 2 res1_v2 = data.Resource.new( environment=env.id, resource_version_id=key + ",v=%d" % version, attributes={"path": path}, status=ResourceState.deploying, ) await res1_v2.insert() version = 3 res1_v3 = data.Resource.new( environment=env.id, resource_version_id=key + ",v=%d" % version, attributes={"path": path}, status=ResourceState.deployed, ) await res1_v3.insert() version = 4 res1_v4 = data.Resource.new( environment=env.id, resource_version_id=key + ",v=%d" % version, attributes={"path": path, "new_attr": 123, "requires": ["abc"]}, status=ResourceState.deployed, ) await res1_v4.insert() version = 1 path = "/etc/file" + str(2) key = "std::File[agent1,path=" + path + "]" res2_v1 = data.Resource.new(environment=env.id, resource_version_id=key + ",v=%d" % version, attributes={"path": path}) await res2_v1.insert() version = 2 res2_v2 = data.Resource.new( environment=env.id, resource_version_id=key + ",v=%d" % version, attributes={"path": path}, status=ResourceState.deploying, ) await res2_v2.insert() result = await client.resource_list(env.id, sort="status.asc") assert result.code == 200 assert len(result.result["data"]) == 2 assert result.result["data"][0]["status"] == "deployed" assert result.result["data"][0]["requires"] == [] # Orphaned, since there is already a version 3 released assert result.result["data"][1]["status"] == "orphaned"