Exemple #1
0
def test_project_unique(data_module):
    project = data.Project(name="test")
    yield project.insert()

    project = data.Project(name="test")
    with pytest.raises(pymongo.errors.DuplicateKeyError):
        yield project.insert()
Exemple #2
0
def test_config_model(data_module):
    project = data.Project(name="test")
    yield project.insert()

    env = data.Environment(name="dev",
                           project=project.id,
                           repo_url="",
                           repo_branch="")
    yield env.insert()

    version = int(time.time())
    cm = data.ConfigurationModel(environment=env.id,
                                 version=version,
                                 date=datetime.datetime.now(),
                                 total=1,
                                 version_info={})
    yield cm.insert()

    # create resources
    key = "std::File[agent1,path=/etc/motd]"
    res1 = data.Resource.new(environment=env.id,
                             resource_version_id=key + ",v=%d" % version,
                             attributes={"path": "/etc/motd"})
    yield res1.insert()

    agents = yield data.ConfigurationModel.get_agents(env.id, version)
    assert len(agents) == 1
    assert "agent1" in agents
Exemple #3
0
async def test_resource_list_no_released_version(server, client):
    """Test that if there are no released versions of a resource, the result set is empty"""
    project = data.Project(name="test")
    await project.insert()

    env = data.Environment(name="dev", project=project.id, repo_url="", repo_branch="")
    await env.insert()

    version = 1
    cm = data.ConfigurationModel(
        environment=env.id,
        version=version,
        date=datetime.now(),
        total=1,
        released=False,
        version_info={},
    )
    await cm.insert()

    path = f"/etc/file{1}"
    key = f"std::File[agent1,path={path}]"
    res1_v1 = data.Resource.new(
        environment=env.id, resource_version_id=ResourceVersionIdStr(f"{key},v={version}"), attributes={"path": path}
    )
    await res1_v1.insert()

    result = await client.resource_list(env.id)
    assert result.code == 200
    assert len(result.result["data"]) == 0
async def test_e2e_recompile_failure(compilerservice: CompilerService):
    project = data.Project(name="test")
    await project.insert()

    env = data.Environment(name="dev",
                           project=project.id,
                           repo_url="",
                           repo_branch="")
    await env.insert()

    u1 = uuid.uuid4()
    await compilerservice.request_recompile(
        env, False, False, u1, env_vars={"my_unique_var": str(u1)})
    u2 = uuid.uuid4()
    await compilerservice.request_recompile(
        env, False, False, u2, env_vars={"my_unique_var": str(u2)})

    assert await compilerservice.is_compiling(env.id) == 200

    async def compile_done():
        res = await compilerservice.is_compiling(env.id)
        print(res)
        return res == 204

    await retry_limited(compile_done, 10)

    _, all_compiles = await compilerservice.get_reports(env)
    all_reports = {
        i["remote_id"]: await compilerservice.get_report(i["id"])
        for i in all_compiles["reports"]
    }

    def assert_report(uid):
        code, report = all_reports[uid]
        report = report["report"]
        assert report["remote_id"] == uid
        assert not report["success"]
        reports = report["reports"]
        reports = {r["name"]: r for r in reports}

        # stages
        init = reports["Init"]
        assert not init["errstream"]
        assert "project found in" in init[
            "outstream"] and "and no repository set" in init["outstream"]

        # compile
        comp = reports["Recompiling configuration model"]
        assert "Unable to find an inmanta project (project.yml expected)" in comp[
            "errstream"]
        assert comp["returncode"] == 1
        return report["requested"], report["started"], report["completed"]

    r1, s1, f1 = assert_report(u1)
    r2, s2, f2 = assert_report(u2)

    assert r2 > r1
    assert r1 < s1 < f1
    assert r2 < s2 < f2
    assert f1 < s2
Exemple #5
0
def test_project(data_module):
    project = data.Project(name="test")
    yield project.insert()

    projects = yield data.Project.get_list(name="test")
    assert len(projects) == 1
    assert projects[0].id == project.id

    other = yield data.Project.get_by_id(project.id)
    assert project != other
    assert project.id == other.id
    async def project_create(self, name: str,
                             project_id: Optional[uuid.UUID]) -> model.Project:
        if project_id is None:
            project_id = uuid.uuid4()

        try:
            project = data.Project(id=project_id, name=name)
            await project.insert()
        except asyncpg.exceptions.UniqueViolationError:
            raise ServerError(f"A project with name {name} already exists.")

        return project.to_dto()
Exemple #7
0
def test_environment(data_module):
    project = data.Project(name="test")
    yield project.insert()

    env = data.Environment(name="dev",
                           project=project.id,
                           repo_url="",
                           repo_branch="")
    yield env.insert()
    assert env.project == project.id

    yield project.delete_cascade()

    projects = yield data.Project.get_list()
    envs = yield data.Environment.get_list()
    assert len(projects) == 0
    assert len(envs) == 0
Exemple #8
0
def test_agent_process(data_module):
    project = data.Project(name="test")
    yield project.insert()

    env = data.Environment(name="dev",
                           project=project.id,
                           repo_url="",
                           repo_branch="")
    yield env.insert()

    agent_proc = data.AgentProcess(hostname="testhost",
                                   environment=env.id,
                                   first_seen=datetime.datetime.now(),
                                   last_seen=datetime.datetime.now(),
                                   sid=uuid.uuid4())
    yield agent_proc.insert()

    agi1 = data.AgentInstance(process=agent_proc.id, name="agi1", tid=env.id)
    yield agi1.insert()
    agi2 = data.AgentInstance(process=agent_proc.id, name="agi2", tid=env.id)
    yield agi2.insert()

    agent = data.Agent(environment=env.id,
                       name="agi1",
                       last_failover=datetime.datetime.now(),
                       paused=False,
                       primary=agi1.id)
    agent = yield agent.insert()

    agents = yield data.Agent.get_list()
    assert len(agents) == 1
    agent = agents[0]

    primary_instance = yield data.AgentInstance.get_by_id(agent.primary)
    primary_process = yield data.AgentProcess.get_by_id(
        primary_instance.process)
    assert primary_process.id == agent_proc.id
Exemple #9
0
async def test_pause_agent(server, cli):
    project = data.Project(name="test")
    await project.insert()
    env1 = data.Environment(name="env1", project=project.id)
    await env1.insert()
    env2 = data.Environment(name="env2", project=project.id)
    await env2.insert()

    await data.Agent(environment=env1.id, name="agent1", paused=False).insert()
    await data.Agent(environment=env1.id, name="agent2", paused=False).insert()
    await data.Agent(environment=env2.id, name="agent3", paused=False).insert()

    async def assert_agent_paused(env_id: uuid.UUID,
                                  expected_records: Dict[str, bool]) -> None:
        result = await cli.run("agent", "list", "-e", str(env_id))
        assert result.exit_code == 0
        output = result.stdout.replace(" ", "")
        assert "Agent|Environment|Paused" in output
        for (agent_name, paused) in expected_records.items():
            assert f"{agent_name}|{env_id}|{paused}" in output

    await assert_agent_paused(env_id=env1.id,
                              expected_records=dict(agent1=False,
                                                    agent2=False))
    await assert_agent_paused(env_id=env2.id,
                              expected_records=dict(agent3=False))

    # Pause
    result = await cli.run("agent", "pause", "-e", str(env1.id), "--agent",
                           "agent1")
    assert result.exit_code == 0
    await assert_agent_paused(env_id=env1.id,
                              expected_records=dict(agent1=True, agent2=False))
    await assert_agent_paused(env_id=env2.id,
                              expected_records=dict(agent3=False))

    # Unpause
    result = await cli.run("agent", "unpause", "-e", str(env1.id), "--agent",
                           "agent1")
    assert result.exit_code == 0
    await assert_agent_paused(env_id=env1.id,
                              expected_records=dict(agent1=False,
                                                    agent2=False))
    await assert_agent_paused(env_id=env2.id,
                              expected_records=dict(agent3=False))

    # Pause all agents in env1
    result = await cli.run("agent", "pause", "-e", str(env1.id), "--all")
    assert result.exit_code == 0
    await assert_agent_paused(env_id=env1.id,
                              expected_records=dict(agent1=True, agent2=True))
    await assert_agent_paused(env_id=env2.id,
                              expected_records=dict(agent3=False))

    # Unpause all agents in env1
    result = await cli.run("agent", "unpause", "-e", str(env1.id), "--all")
    assert result.exit_code == 0
    await assert_agent_paused(env_id=env1.id,
                              expected_records=dict(agent1=False,
                                                    agent2=False))
    await assert_agent_paused(env_id=env2.id,
                              expected_records=dict(agent3=False))

    # Mandatory option -e not specified
    for action in ["pause", "unpause"]:
        result = await cli.run("agent", action, "--agent", "agent1")
        assert result.exit_code != 0

    # --agent and --all are both set
    for action in ["pause", "unpause"]:
        result = await cli.run("agent", action, "-e", str(env1.id), "--agent",
                               "agent1", "--all")
        assert result.exit_code != 0

    # --agent and --all are both not set
    for action in ["pause", "unpause"]:
        result = await cli.run("agent", action, "-e", str(env1.id))
        assert result.exit_code != 0
 def __init__(self, dir: str) -> None:
     self.src_dir: str = os.path.join(dir, "src")
     self.project: data.Project = data.Project(name="test")
     self._ready: bool = False
async def test_scheduler(server_config, init_dataclasses_and_load_schema,
                         caplog):
    """Test the scheduler part in isolation, mock out compile runner and listen to state updates"""
    class Collector(CompileStateListener):
        """
        Collect all state updates, optionally hang the processing of listeners
        """
        def __init__(self):
            self.seen = []
            self.preseen = []
            self.lock = Semaphore(1)

        def reset(self):
            self.seen = []
            self.preseen = []

        async def compile_done(self, compile: data.Compile):
            self.preseen.append(compile)
            print("Got compile done for ", compile.remote_id)
            async with self.lock:
                self.seen.append(compile)

        async def hang(self):
            await self.lock.acquire()

        def release(self):
            self.lock.release()

        def verify(self, envs: uuid.UUID):
            assert sorted([x.remote_id for x in self.seen]) == sorted(envs)
            self.reset()

    class HangRunner(object):
        """
        compile runner mock, hang until released
        """
        def __init__(self):
            self.lock = Semaphore(0)
            self.started = False
            self.done = False
            self.version = None

        async def run(self, force_update: Optional[bool] = False):
            self.started = True
            await self.lock.acquire()
            self.done = True
            return True, None

        def release(self):
            self.lock.release()

    class HookedCompilerService(CompilerService):
        """
        hook in the hangrunner
        """
        def __init__(self):
            super(HookedCompilerService, self).__init__()
            self.locks = {}

        def _get_compile_runner(self, compile: data.Compile, project_dir: str):
            print("Get Run: ", compile.remote_id, compile.id)
            runner = HangRunner()
            self.locks[compile.remote_id] = runner
            return runner

        def get_runner(self, remote_id: uuid.UUID) -> HangRunner:
            return self.locks.get(remote_id)

    # manual setup of server
    server = Server()
    cs = HookedCompilerService()
    await cs.prestart(server)
    await cs.start()
    server.add_slice(cs)
    notification_service = NotificationService()
    await notification_service.prestart(server)
    await notification_service.start()
    collector = Collector()
    cs.add_listener(collector)

    async def request_compile(env: data.Environment) -> uuid.UUID:
        """Request compile for given env, return remote_id"""
        u1 = uuid.uuid4()
        # add unique environment variables to prevent merging in request_recompile
        await cs.request_recompile(env,
                                   False,
                                   False,
                                   u1,
                                   env_vars={"uuid": str(u1)})
        results = await data.Compile.get_by_remote_id(env.id, u1)
        assert len(results) == 1
        assert results[0].remote_id == u1
        print("request: ", u1, results[0].id)
        return u1

    # setup projects in the database
    project = data.Project(name="test")
    await project.insert()
    env1 = data.Environment(name="dev",
                            project=project.id,
                            repo_url="",
                            repo_branch="")
    await env1.insert()
    env2 = data.Environment(name="dev2",
                            project=project.id,
                            repo_url="",
                            repo_branch="")
    await env2.insert()

    # setup series of compiles for two envs
    # e1 is for a plain run
    # e2 is for server restart
    e1 = [await request_compile(env1) for i in range(3)]
    e2 = [await request_compile(env2) for i in range(4)]
    print("env 1:", e1)

    async def check_compile_in_sequence(env: data.Environment,
                                        remote_ids: List[uuid.UUID], idx: int):
        """
        Check integrity of a compile sequence and progress the hangrunner.
        """
        before = remote_ids[:idx]

        for rid in before:
            prevrunner = cs.get_runner(rid)
            assert prevrunner.done

        if idx < len(remote_ids):
            current = remote_ids[idx]
            after = remote_ids[idx + 1:]

            assert await cs.is_compiling(env.id) == 200

            await retry_limited(lambda: cs.get_runner(current) is not None, 1)
            await retry_limited(lambda: cs.get_runner(current).started, 1)

            for rid in after:
                nextrunner = cs.get_runner(rid)
                assert nextrunner is None

            cs.get_runner(current).release()
            await asyncio.sleep(0)
            await retry_limited(lambda: cs.get_runner(current).done, 1)

        else:

            async def isdone():
                return await cs.is_compiling(env.id) == 204

            await retry_limited(isdone, 1)

    # run through env1, entire sequence
    for i in range(4):
        await check_compile_in_sequence(env1, e1, i)
    collector.verify(e1)
    print("env1 done")

    print("env2 ", e2)
    # make event collector hang
    await collector.hang()
    # progress two steps into env2
    for i in range(2):
        await check_compile_in_sequence(env2, e2, i)

    assert not collector.seen
    print(collector.preseen)
    await retry_limited(lambda: len(collector.preseen) == 2, 1)

    # test server restart
    await notification_service.prestop()
    await notification_service.stop()
    await cs.prestop()
    await cs.stop()

    # in the log, find cancel of compile(hangs) and handler(hangs)
    LogSequence(caplog, allow_errors=False).contains(
        "inmanta.util", logging.WARNING,
        "was cancelled").contains("inmanta.util", logging.WARNING,
                                  "was cancelled").no_more_errors()

    print("restarting")

    # restart new server
    cs = HookedCompilerService()
    await cs.prestart(server)
    await cs.start()
    collector = Collector()
    cs.add_listener(collector)

    # complete the sequence, expect re-run of third compile
    for i in range(3):
        print(i)
        await check_compile_in_sequence(env2, e2[2:], i)

    # all are re-run, entire sequence present
    collector.verify(e2)

    await report_db_index_usage()
async def env_with_resources(server, client):
    project = data.Project(name="test")
    await project.insert()

    env = data.Environment(name="dev",
                           project=project.id,
                           repo_url="",
                           repo_branch="")
    await env.insert()

    env2 = data.Environment(name="dev2",
                            project=project.id,
                            repo_url="",
                            repo_branch="")
    await env2.insert()

    env3 = data.Environment(name="dev3",
                            project=project.id,
                            repo_url="",
                            repo_branch="")
    await env3.insert()

    cm_times = []
    for i in range(1, 10):
        cm_times.append(
            datetime.datetime.strptime(f"2021-07-07T1{i}:00:00.0",
                                       "%Y-%m-%dT%H:%M:%S.%f"))
    cm_time_idx = 0
    resource_deploy_times = []
    for i in range(30):
        resource_deploy_times.append(
            datetime.datetime.strptime(f"2021-07-07T11:{i}:00.0",
                                       "%Y-%m-%dT%H:%M:%S.%f"))

    # Add multiple versions of model, with 2 of them released
    for i in range(1, 6):
        cm = data.ConfigurationModel(
            environment=env.id,
            version=i,
            date=cm_times[cm_time_idx],
            total=1,
            released=i != 1 and i != 5,
            version_info={},
        )
        cm_time_idx += 1
        await cm.insert()

    cm = data.ConfigurationModel(
        environment=env2.id,
        version=4,
        date=datetime.datetime.now(tz=datetime.timezone.utc),
        total=1,
        released=True,
        version_info={},
    )
    cm_time_idx += 1
    await cm.insert()

    cm = data.ConfigurationModel(
        environment=env3.id,
        version=6,
        date=datetime.datetime.now(tz=datetime.timezone.utc),
        total=1,
        released=True,
        version_info={},
    )
    cm_time_idx += 1
    await cm.insert()
    resources = {
        env.id: defaultdict(list),
        env2.id: defaultdict(list),
        env3.id: defaultdict(list)
    }

    def total_number_of_resources():
        return sum([
            len(resource_list_by_env) for resource_list_by_env in [[
                specific_resource
                for specific_resource_list in envdict.values()
                for specific_resource in specific_resource_list
            ] for envdict in resources.values()]
        ])

    async def create_resource(
        path: str,
        status: ResourceState,
        version: int,
        attributes: Dict[str, object],
        agent: str = "internal",
        resource_type: str = "std::File",
        environment: UUID = env.id,
    ):
        key = f"{resource_type}[{agent},path={path}]"
        res = data.Resource.new(
            environment=environment,
            resource_version_id=ResourceVersionIdStr(f"{key},v={version}"),
            attributes={
                **attributes,
                **{
                    "path": path
                }
            },
            status=status,
            last_deploy=resource_deploy_times[total_number_of_resources()],
        )
        await res.insert()
        return res

    # A resource with multiple resources in its requires list, and multiple versions where it was released,
    # and is also present in versions that were not released
    resources[env.id]["std::File[internal,path=/tmp/dir1/file1]"].append(
        await create_resource(
            "/tmp/dir1/file1",
            ResourceState.undefined,
            1,
            {
                "key1": "val1",
                "requires": ["std::Directory[internal,path=/tmp/dir1],v=1"]
            },
        ))
    resources[env.id]["std::File[internal,path=/tmp/dir1/file1]"].append(
        await create_resource(
            "/tmp/dir1/file1",
            ResourceState.skipped,
            2,
            {
                "key1":
                "modified_value",
                "another_key":
                "val",
                "requires": [
                    "std::Directory[internal,path=/tmp/dir1],v=2",
                    "std::File[internal,path=/tmp/dir1/file2],v=2"
                ],
            },
        ))
    resources[env.id]["std::File[internal,path=/tmp/dir1/file1]"].append(
        await create_resource(
            "/tmp/dir1/file1",
            ResourceState.deploying,
            3,
            {
                "key1":
                "modified_value",
                "another_key":
                "val",
                "requires": [
                    "std::Directory[internal,path=/tmp/dir1],v=3",
                    "std::File[internal,path=/tmp/dir1/file2],v=3"
                ],
            },
        ))
    resources[env.id]["std::File[internal,path=/tmp/dir1/file1]"].append(
        await create_resource(
            "/tmp/dir1/file1",
            ResourceState.deployed,
            4,
            {
                "key1":
                "modified_value",
                "another_key":
                "val",
                "requires": [
                    "std::Directory[internal,path=/tmp/dir1],v=4",
                    "std::File[internal,path=/tmp/dir1/file2],v=4"
                ],
            },
        ))
    resources[env.id]["std::File[internal,path=/tmp/dir1/file1]"].append(
        await create_resource(
            "/tmp/dir1/file1",
            ResourceState.undefined,
            5,
            {
                "key1":
                "modified_value",
                "another_key":
                "val",
                "requires": [
                    "std::Directory[internal,path=/tmp/dir1],v=5",
                    "std::File[internal,path=/tmp/dir1/file2],v=5"
                ],
            },
        ))

    # A resource that didn't change its attributes, but was only released with the second version and has no requirements
    resources[env.id]["std::Directory[internal,path=/tmp/dir1]"].append(
        await create_resource(
            "/tmp/dir1",
            ResourceState.undefined,
            1,
            {
                "key2": "val2",
                "requires": []
            },
            resource_type="std::Directory",
        ))
    resources[env.id]["std::Directory[internal,path=/tmp/dir1]"].append(
        await create_resource("/tmp/dir1",
                              ResourceState.deploying,
                              2, {
                                  "key2": "val2",
                                  "requires": []
                              },
                              resource_type="std::Directory"))
    resources[env.id]["std::Directory[internal,path=/tmp/dir1]"].append(
        await create_resource("/tmp/dir1",
                              ResourceState.deployed,
                              3, {
                                  "key2": "val2",
                                  "requires": []
                              },
                              resource_type="std::Directory"))
    resources[env.id]["std::Directory[internal,path=/tmp/dir1]"].append(
        await create_resource("/tmp/dir1",
                              ResourceState.deployed,
                              4, {
                                  "key2": "val2",
                                  "requires": []
                              },
                              resource_type="std::Directory"))

    # A resource that changed the attributes in the last released version,
    # so the last and the first time the attributes are the same, is the same as well;
    # And it also has a single requirement
    resources[env.id]["std::File[internal,path=/tmp/dir1/file2]"].append(
        await create_resource("/tmp/dir1/file2", ResourceState.undefined, 1, {
            "key3": "val3",
            "requires": []
        }))
    resources[env.id]["std::File[internal,path=/tmp/dir1/file2]"].append(
        await create_resource(
            "/tmp/dir1/file2",
            ResourceState.deployed,
            2,
            {
                "key3": "val3",
                "requires": ["std::Directory[internal,path=/tmp/dir1],v=2"]
            },
        ))
    resources[env.id]["std::File[internal,path=/tmp/dir1/file2]"].append(
        await create_resource(
            "/tmp/dir1/file2",
            ResourceState.deployed,
            3,
            {
                "key3": "val3",
                "requires": ["std::Directory[internal,path=/tmp/dir1],v=3"]
            },
        ))
    resources[env.id]["std::File[internal,path=/tmp/dir1/file2]"].append(
        await create_resource(
            "/tmp/dir1/file2",
            ResourceState.deploying,
            4,
            {
                "key3": "val3updated",
                "requires": ["std::Directory[internal,path=/tmp/dir1],v=4"]
            },
        ))

    # Add an unreleased resource
    resources[env.id]["std::File[internal,path=/etc/filexyz]"].append(
        await create_resource(
            "/etc/filexyz",
            ResourceState.undefined,
            5,
            {
                "key4": "val4",
                "requires": []
            },
        ))
    resources[env.id]["std::File[internal,path=/etc/never_deployed]"].append(
        await create_resource(
            "/etc/never_deployed",
            ResourceState.undefined,
            3,
            {
                "key5": "val5",
                "requires": []
            },
        ))
    resources[env.id]["std::File[internal,path=/etc/never_deployed]"].append(
        await create_resource(
            "/etc/never_deployed",
            ResourceState.unavailable,
            4,
            {
                "key5": "val5",
                "requires": []
            },
        ))

    resources[env.id][
        "std::File[internal,path=/etc/deployed_only_with_different_hash]"].append(
            await create_resource(
                "/etc/deployed_only_with_different_hash",
                ResourceState.deployed,
                3,
                {
                    "key6": "val6",
                    "requires": []
                },
            ))

    resources[env.id][
        "std::File[internal,path=/etc/deployed_only_with_different_hash]"].append(
            await create_resource(
                "/etc/deployed_only_with_different_hash",
                ResourceState.undefined,
                4,
                {
                    "key6": "val6different",
                    "requires": []
                },
            ))

    resources[env.id][
        "std::File[internal,path=/etc/deployed_only_in_earlier_version]"].append(
            await create_resource(
                "/etc/deployed_only_in_earlier_version",
                ResourceState.deployed,
                3,
                {
                    "key7":
                    "val7",
                    "requires": [
                        "std::File[internal,path=/etc/requirement_in_later_version],v=3"
                    ]
                },
            ))

    resources[env.id][
        "std::File[internal,path=/etc/requirement_in_later_version]"].append(
            await create_resource(
                "/etc/requirement_in_later_version",
                ResourceState.deploying,
                3,
                {
                    "key8": "val8",
                    "requires": []
                },
            ))
    resources[env.id][
        "std::File[internal,path=/etc/requirement_in_later_version]"].append(
            await create_resource(
                "/etc/requirement_in_later_version",
                ResourceState.deployed,
                4,
                {
                    "key8": "val8",
                    "requires": []
                },
            ))
    resources[env.id][
        "std::File[internal,path=/etc/requirement_in_later_version]"].append(
            await create_resource(
                "/etc/requirement_in_later_version",
                ResourceState.skipped,
                5,
                {
                    "key8": "val8",
                    "requires": []
                },
            ))

    resources[env.id]["std::File[internal,path=/tmp/orphaned]"].append(
        await create_resource(
            "/tmp/orphaned",
            ResourceState.deployed,
            3,
            {
                "key9": "val9",
                "requires": ["std::File[internal,path=/tmp/orphaned_req],v=3"]
            },
        ))
    resources[env.id]["std::File[internal,path=/tmp/orphaned_req]"].append(
        await create_resource(
            "/tmp/orphaned_req",
            ResourceState.deployed,
            3,
            {
                "key9": "val9",
                "requires": []
            },
        ))

    # Add the same resources the first one requires in another environment
    resources[env2.id]["std::File[internal,path=/tmp/dir1/file2]"].append(
        await create_resource(
            "/tmp/dir1/file2",
            ResourceState.unavailable,
            4,
            {
                "key3": "val3",
                "requires": ["std::Directory[internal,path=/tmp/dir1],v=4"]
            },
            resource_type="std::Directory",
            environment=env2.id,
        ))

    resources[env2.id]["std::Directory[internal,path=/tmp/dir1]"].append(
        await create_resource(
            "/tmp/dir1",
            ResourceState.available,
            4,
            {
                "key2": "val2",
                "requires": []
            },
            resource_type="std::Directory",
            environment=env2.id,
        ))

    # Add the same main resource to another environment with higher version
    resources[env3.id]["std::File[internal,path=/tmp/dir1/file1]"].append(
        await create_resource(
            "/tmp/dir1/file1",
            ResourceState.deploying,
            6,
            {
                "key1":
                "modified_value",
                "another_key":
                "val",
                "requires": [
                    "std::Directory[internal,path=/tmp/dir1],v=6",
                    "std::File[internal,path=/tmp/dir1/file2],v=6"
                ],
            },
            environment=env3.id,
        ))
    ids = {
        "multiple_requires": "std::File[internal,path=/tmp/dir1/file1]",
        "no_requires": "std::Directory[internal,path=/tmp/dir1]",
        "single_requires": "std::File[internal,path=/tmp/dir1/file2]",
        "unreleased": "std::File[internal,path=/etc/filexyz]",
        "never_deployed": "std::File[internal,path=/etc/never_deployed]",
        "deployed_only_with_different_hash":
        "std::File[internal,path=/etc/deployed_only_with_different_hash]",
        "deployed_only_in_earlier_version":
        "std::File[internal,path=/etc/deployed_only_in_earlier_version]",
        "orphaned_and_requires_orphaned":
        "std::File[internal,path=/tmp/orphaned]",
    }

    yield env, cm_times, ids, resources
async def environments_with_versions(
        server,
        client) -> Tuple[Dict[str, uuid.UUID], List[datetime.datetime]]:
    project = data.Project(name="test")
    await project.insert()

    env = data.Environment(name="dev",
                           project=project.id,
                           repo_url="",
                           repo_branch="")
    await env.insert()

    cm_timestamps = []
    for i in range(0, 10):
        cm_timestamps.append(
            datetime.datetime.strptime(f"2021-12-06T11:{i}:00.0",
                                       "%Y-%m-%dT%H:%M:%S.%f"))

    # Add multiple versions of model
    # 1: skipped_candidate, 2,3: retired, 4,5,6: skipped_candidate, 7: active, 8,9: candidate
    for i in range(1, 10):
        cm = data.ConfigurationModel(
            environment=env.id,
            version=i,
            date=cm_timestamps[i - 1],
            total=1,
            released=i in {2, 3, 7},
            version_info={
                "export_metadata": {
                    "message": "Recompile model because state transition",
                    "type": "lsm_export"
                }
            } if i % 2 else {
                "export_metadata": {
                    "message":
                    "Recompile model because one or more parameters were updated",
                    "type": "param"
                }
            },
        )
        await cm.insert()
    env2 = data.Environment(name="dev-test2",
                            project=project.id,
                            repo_url="",
                            repo_branch="")
    await env2.insert()
    cm = data.ConfigurationModel(
        environment=env2.id,
        version=11,
        date=datetime.datetime.now(),
        total=1,
        released=True,
        version_info={},
    )
    await cm.insert()

    env3 = data.Environment(name="dev-test3",
                            project=project.id,
                            repo_url="",
                            repo_branch="")
    await env3.insert()
    cm = data.ConfigurationModel(
        environment=env3.id,
        version=7,
        date=datetime.datetime.now(),
        total=1,
        released=False,
        version_info={},
    )
    await cm.insert()

    env4 = data.Environment(name="dev-test4",
                            project=project.id,
                            repo_url="",
                            repo_branch="")
    await env4.insert()
    environments = {
        "multiple_versions": env.id,
        "single_released_version": env2.id,
        "no_released_version": env3.id,
        "no_versions": env4.id,
    }

    yield environments, cm_timestamps
async def test_resource_action_pagination(postgresql_client, client,
                                          clienthelper, server, agent):
    """ Test querying resource actions via the API, including the pagination links."""
    project = data.Project(name="test")
    await project.insert()

    env = data.Environment(name="dev",
                           project=project.id,
                           repo_url="",
                           repo_branch="")
    await env.insert()

    # Add multiple versions of model
    for i in range(0, 11):
        cm = data.ConfigurationModel(
            environment=env.id,
            version=i,
            date=datetime.now(),
            total=1,
            version_info={},
        )
        await cm.insert()

    # Add resource actions for motd
    motd_first_start_time = datetime.now()
    earliest_action_id = uuid.uuid4()
    resource_action = data.ResourceAction(
        environment=env.id,
        version=0,
        resource_version_ids=[f"std::File[agent1,path=/etc/motd],v={0}"],
        action_id=earliest_action_id,
        action=const.ResourceAction.deploy,
        started=motd_first_start_time - timedelta(minutes=1),
    )
    await resource_action.insert()
    resource_action.add_logs([
        data.LogLine.log(logging.INFO,
                         "Successfully stored version %(version)d",
                         version=0)
    ])
    await resource_action.save()

    action_ids_with_the_same_timestamp = []
    for i in range(1, 6):
        action_id = uuid.uuid4()
        action_ids_with_the_same_timestamp.append(action_id)
        resource_action = data.ResourceAction(
            environment=env.id,
            version=i,
            resource_version_ids=[f"std::File[agent1,path=/etc/motd],v={i}"],
            action_id=action_id,
            action=const.ResourceAction.deploy,
            started=motd_first_start_time,
        )
        await resource_action.insert()
        resource_action.add_logs([
            data.LogLine.log(logging.INFO,
                             "Successfully stored version %(version)d",
                             version=i)
        ])
        await resource_action.save()
    action_ids_with_the_same_timestamp = sorted(
        action_ids_with_the_same_timestamp, reverse=True)
    later_action_id = uuid.uuid4()
    resource_action = data.ResourceAction(
        environment=env.id,
        version=6,
        resource_version_ids=[f"std::File[agent1,path=/etc/motd],v={6}"],
        action_id=later_action_id,
        action=const.ResourceAction.deploy,
        started=motd_first_start_time + timedelta(minutes=6),
    )
    await resource_action.insert()
    resource_action.add_logs([
        data.LogLine.log(logging.INFO,
                         "Successfully stored version %(version)d",
                         version=6)
    ])
    await resource_action.save()
    for i in range(0, 11):
        res1 = data.Resource.new(
            environment=env.id,
            resource_version_id="std::File[agent1,path=/etc/motd],v=%s" %
            str(i),
            status=const.ResourceState.deployed,
            last_deploy=datetime.now() + timedelta(minutes=i),
            attributes={
                "attr": [{
                    "a": 1,
                    "b": "c"
                }],
                "path": "/etc/motd"
            },
        )
        await res1.insert()

    result = await client.get_resource_actions(
        tid=env.id,
        resource_type="std::File",
        attribute="path",
        attribute_value="/etc/motd",
        last_timestamp=motd_first_start_time + timedelta(minutes=7),
        limit=2,
    )
    assert result.code == 200
    resource_actions = result.result["data"]
    expected_action_ids = [later_action_id
                           ] + action_ids_with_the_same_timestamp[:1]
    assert [
        uuid.UUID(resource_action["action_id"])
        for resource_action in resource_actions
    ] == expected_action_ids

    # Use the next link for pagination
    next_page = result.result["links"]["next"]
    port = opt.get_bind_port()
    base_url = "http://localhost:%s" % (port, )
    url = f"{base_url}{next_page}"
    client = AsyncHTTPClient()
    request = HTTPRequest(
        url=url,
        headers={"X-Inmanta-tid": str(env.id)},
    )
    response = await client.fetch(request, raise_error=False)
    assert response.code == 200
    response = json.loads(response.body.decode("utf-8"))
    second_page_action_ids = [
        uuid.UUID(resource_action["action_id"])
        for resource_action in response["data"]
    ]
    assert second_page_action_ids == action_ids_with_the_same_timestamp[1:3]
    next_page = response["links"]["next"]
    url = f"{base_url}{next_page}"
    request.url = url
    response = await client.fetch(request, raise_error=False)
    assert response.code == 200
    response = json.loads(response.body.decode("utf-8"))
    third_page_action_ids = [
        uuid.UUID(resource_action["action_id"])
        for resource_action in response["data"]
    ]
    assert third_page_action_ids == action_ids_with_the_same_timestamp[3:5]
    # Go back to the previous page
    prev_page = response["links"]["prev"]
    url = f"{base_url}{prev_page}"
    request.url = url
    response = await client.fetch(request, raise_error=False)
    assert response.code == 200
    response = json.loads(response.body.decode("utf-8"))
    action_ids = [
        uuid.UUID(resource_action["action_id"])
        for resource_action in response["data"]
    ]
    assert action_ids == second_page_action_ids
    # And back to the third
    prev_page = response["links"]["next"]
    url = f"{base_url}{prev_page}"
    request.url = url
    response = await client.fetch(request, raise_error=False)
    assert response.code == 200
    response = json.loads(response.body.decode("utf-8"))
    action_ids = [
        uuid.UUID(resource_action["action_id"])
        for resource_action in response["data"]
    ]
    assert action_ids == third_page_action_ids
Exemple #15
0
async def test_has_only_one_version_from_resource(server, client):
    """Test querying resources, when there are multiple released versions of a resource.
    The query should return only the latest one from those
    """
    project = data.Project(name="test")
    await project.insert()

    env = data.Environment(name="dev", project=project.id, repo_url="", repo_branch="")
    await env.insert()

    # Add multiple versions of model, with 2 of them released
    for i in range(1, 5):
        cm = data.ConfigurationModel(
            environment=env.id,
            version=i,
            date=datetime.now(),
            total=1,
            released=i != 1 and i != 4,
            version_info={},
        )
        await cm.insert()

    version = 1
    path = "/etc/file" + str(1)
    key = "std::File[agent1,path=" + path + "]"
    res1_v1 = data.Resource.new(environment=env.id, resource_version_id=key + ",v=%d" % version, attributes={"path": path})
    await res1_v1.insert()
    version = 2
    res1_v2 = data.Resource.new(
        environment=env.id,
        resource_version_id=key + ",v=%d" % version,
        attributes={"path": path},
        status=ResourceState.deploying,
    )
    await res1_v2.insert()
    version = 3
    res1_v3 = data.Resource.new(
        environment=env.id,
        resource_version_id=key + ",v=%d" % version,
        attributes={"path": path},
        status=ResourceState.deployed,
    )
    await res1_v3.insert()
    version = 4
    res1_v4 = data.Resource.new(
        environment=env.id,
        resource_version_id=key + ",v=%d" % version,
        attributes={"path": path, "new_attr": 123, "requires": ["abc"]},
        status=ResourceState.deployed,
    )
    await res1_v4.insert()

    version = 1
    path = "/etc/file" + str(2)
    key = "std::File[agent1,path=" + path + "]"
    res2_v1 = data.Resource.new(environment=env.id, resource_version_id=key + ",v=%d" % version, attributes={"path": path})
    await res2_v1.insert()
    version = 2
    res2_v2 = data.Resource.new(
        environment=env.id,
        resource_version_id=key + ",v=%d" % version,
        attributes={"path": path},
        status=ResourceState.deploying,
    )
    await res2_v2.insert()

    result = await client.resource_list(env.id, sort="status.asc")
    assert result.code == 200
    assert len(result.result["data"]) == 2
    assert result.result["data"][0]["status"] == "deployed"
    assert result.result["data"][0]["requires"] == []
    # Orphaned, since there is already a version 3 released
    assert result.result["data"][1]["status"] == "orphaned"
Exemple #16
0
async def test_notification_cleanup_on_start(init_dataclasses_and_load_schema,
                                             async_finalizer,
                                             server_config) -> None:
    project = data.Project(name="test")
    await project.insert()

    env_with_default_retention = data.Environment(name="testenv",
                                                  project=project.id)
    await env_with_default_retention.insert()
    env_with_short_retention = data.Environment(name="testenv2",
                                                project=project.id)
    await env_with_short_retention.insert()
    await env_with_short_retention.set(data.NOTIFICATION_RETENTION, 30)

    timestamps = [
        datetime.datetime.now().astimezone() - datetime.timedelta(days=366),
        datetime.datetime.now().astimezone() - datetime.timedelta(days=35),
        datetime.datetime.now().astimezone(),
    ]

    async def insert_notifications_with_timestamps(
            timestamps: Sequence[datetime.datetime],
            environment_ids: Sequence[uuid.UUID]) -> None:
        for env_id in environment_ids:
            for created in timestamps:
                await data.Notification(
                    title="Notification",
                    message="Something happened",
                    environment=env_id,
                    severity=const.NotificationSeverity.message,
                    uri="/api/v2/notification",
                    created=created,
                    read=False,
                    cleared=False,
                ).insert()

    await insert_notifications_with_timestamps(
        timestamps,
        [env_with_default_retention.id, env_with_short_retention.id])

    server = Server()
    notification_service = NotificationService()
    compiler_service = CompilerService()
    server.add_slice(compiler_service)
    server.add_slice(notification_service)
    await server.start()
    async_finalizer.add(server.stop)

    async def notification_cleaned_up(
            env_id: uuid.UUID, expected_length_after_cleanup: int) -> bool:
        default_env_notifications = await data.Notification.get_list(
            environment=env_id)
        return len(default_env_notifications) == expected_length_after_cleanup

    await retry_limited(partial(notification_cleaned_up,
                                env_with_default_retention.id, 2),
                        timeout=10)

    default_env_notifications = await data.Notification.get_list(
        environment=env_with_default_retention.id,
        order="DESC",
        order_by_column="created")
    # Only the oldest one is deleted
    assert len(default_env_notifications) == 2
    assert default_env_notifications[0].created == timestamps[2]
    assert default_env_notifications[1].created == timestamps[1]

    await retry_limited(partial(notification_cleaned_up,
                                env_with_short_retention.id, 1),
                        timeout=10)
    short_retention_notifications = await data.Notification.get_list(
        environment=env_with_short_retention.id,
        order="DESC",
        order_by_column="created")
    # Only the latest one is kept
    assert len(short_retention_notifications) == 1
    assert short_retention_notifications[0].created == timestamps[2]
async def env_with_resources(server, client):
    project = data.Project(name="test")
    await project.insert()

    env = data.Environment(name="dev",
                           project=project.id,
                           repo_url="",
                           repo_branch="")
    await env.insert()

    # Add multiple versions of model, with 2 of them released
    for i in range(1, 4):
        cm = data.ConfigurationModel(
            environment=env.id,
            version=i,
            date=datetime.now(),
            total=1,
            released=i != 1,
            version_info={},
        )
        await cm.insert()

    async def create_resource(agent: str,
                              path: str,
                              resource_type: str,
                              versions: List[int],
                              environment: UUID = env.id):
        for version in versions:
            key = f"{resource_type}[{agent},path={path}]"
            res = data.Resource.new(
                environment=environment,
                resource_version_id=ResourceVersionIdStr(f"{key},v={version}"),
                attributes={
                    "path": path,
                    "v": version
                },
                status=ResourceState.deployed,
            )
            await res.insert()

    await create_resource("agent1", "/etc/file1", "std::File", [1, 2, 3])
    await create_resource("agent1", "/etc/file2", "std::File", [1, 2])
    await create_resource("agent2", "/etc/file3", "std::File", [2])
    await create_resource("agent2", "/tmp/file4", "std::File", [3])
    await create_resource("agent2", "/tmp/dir5", "std::Directory", [3])
    await create_resource("agent2", "/tmp/dir6", "std::Directory", [3])
    await create_resource("agent2", "/tmp/dir7", "std::Directory", [3])
    await create_resource("agent3", "/tmp/dir8", "std::Directory", [3])

    env2 = data.Environment(name="dev-test2",
                            project=project.id,
                            repo_url="",
                            repo_branch="")
    await env2.insert()
    cm = data.ConfigurationModel(
        environment=env2.id,
        version=3,
        date=datetime.now(),
        total=1,
        released=True,
        version_info={},
    )
    await cm.insert()
    await create_resource("agent1",
                          "/tmp/file7",
                          "std::File", [3],
                          environment=env2.id)
    await create_resource("agent1",
                          "/tmp/file2",
                          "std::File", [3],
                          environment=env2.id)

    yield env