Exemplo n.º 1
0
    async def start(self,
                    targets: Sequence[inmanta.protocol.endpoints.CallTarget],
                    additional_rules: List[routing.Rule] = []) -> None:
        """
        Start the server on the current ioloop
        """
        global_url_map: Dict[str, Dict[
            str, common.UrlMethod]] = self.get_global_url_map(targets)

        rules: List[routing.Rule] = []
        rules.extend(additional_rules)

        for url, handler_config in global_url_map.items():
            rules.append(
                routing.Rule(routing.PathMatches(url), RESTHandler, {
                    "transport": self,
                    "config": handler_config
                }))
            LOGGER.debug("Registering handler(s) for url %s and methods %s",
                         url, ", ".join(handler_config.keys()))

        application = web.Application(rules, compress_response=True)

        crt = inmanta_config.Config.get("server", "ssl_cert_file", None)
        key = inmanta_config.Config.get("server", "ssl_key_file", None)

        if crt is not None and key is not None:
            ssl_ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
            ssl_ctx.load_cert_chain(crt, key)

            self._http_server = httpserver.HTTPServer(application,
                                                      decompress_request=True,
                                                      ssl_options=ssl_ctx)
            LOGGER.debug("Created REST transport with SSL")
        else:
            self._http_server = httpserver.HTTPServer(application,
                                                      decompress_request=True)

        bind_port = server_config.get_bind_port()
        bind_addresses = server_config.server_bind_address.get()

        for bind_addr in bind_addresses:
            self._http_server.listen(bind_port, bind_addr)
            LOGGER.info(f"Server listening on {bind_addr}:{bind_port}")
        self.running = True

        LOGGER.debug("Start REST transport")
Exemplo n.º 2
0
async def test_agents_paging(server, client, env_with_agents: None, environment: str, order_by_column: str, order: str) -> None:
    result = await client.get_agents(
        environment,
        filter={"status": ["paused", "up"]},
    )
    assert result.code == 200
    assert len(result.result["data"]) == 7
    all_agents = result.result["data"]
    for agent in all_agents:
        if not agent["process_name"]:
            agent["process_name"] = ""
        if not agent["last_failover"]:
            agent["last_failover"] = datetime.datetime.min.replace(tzinfo=datetime.timezone.utc)
        else:
            agent["last_failover"] = datetime.datetime.strptime(agent["last_failover"], "%Y-%m-%dT%H:%M:%S.%f").replace(
                tzinfo=datetime.timezone.utc
            )
    all_agents_in_expected_order = sorted(all_agents, key=itemgetter(order_by_column, "name"), reverse=order == "DESC")
    all_agent_names_in_expected_order = agent_names(all_agents_in_expected_order)

    result = await client.get_agents(
        environment,
        limit=2,
        sort=f"{order_by_column}.{order}",
        filter={"status": ["paused", "up"]},
    )
    assert result.code == 200
    assert len(result.result["data"]) == 2

    assert agent_names(result.result["data"]) == all_agent_names_in_expected_order[:2]

    assert result.result["metadata"] == {"total": 7, "before": 0, "after": 5, "page_size": 2}
    assert result.result["links"].get("next") is not None
    assert result.result["links"].get("prev") is None

    port = get_bind_port()
    base_url = "http://localhost:%s" % (port,)
    http_client = AsyncHTTPClient()

    # Test link for next page
    url = f"""{base_url}{result.result["links"]["next"]}"""
    assert "limit=2" in url
    assert "filter.status=" in url
    request = HTTPRequest(
        url=url,
        headers={"X-Inmanta-tid": str(environment)},
    )
    response = await http_client.fetch(request, raise_error=False)
    assert response.code == 200
    response = json.loads(response.body.decode("utf-8"))
    assert agent_names(response["data"]) == all_agent_names_in_expected_order[2:4]
    assert response["links"].get("prev") is not None
    assert response["links"].get("next") is not None
    assert response["metadata"] == {"total": 7, "before": 2, "after": 3, "page_size": 2}

    # Test link for next page
    url = f"""{base_url}{response["links"]["next"]}"""
    assert "limit=2" in url
    request = HTTPRequest(
        url=url,
        headers={"X-Inmanta-tid": str(environment)},
    )
    response = await http_client.fetch(request, raise_error=False)
    assert response.code == 200
    response = json.loads(response.body.decode("utf-8"))
    next_page_agent_names = agent_names(response["data"])
    assert next_page_agent_names == all_agent_names_in_expected_order[4:6]
    assert response["links"].get("prev") is not None
    assert response["links"].get("next") is not None
    assert response["metadata"] == {"total": 7, "before": 4, "after": 1, "page_size": 2}

    # Test link for previous page
    url = f"""{base_url}{response["links"]["prev"]}"""
    assert "limit=2" in url
    request = HTTPRequest(
        url=url,
        headers={"X-Inmanta-tid": str(environment)},
    )
    response = await http_client.fetch(request, raise_error=False)
    assert response.code == 200
    response = json.loads(response.body.decode("utf-8"))
    prev_page_agent_names = agent_names(response["data"])
    assert prev_page_agent_names == all_agent_names_in_expected_order[2:4]
    assert response["links"].get("prev") is not None
    assert response["links"].get("next") is not None
    assert response["metadata"] == {"total": 7, "before": 2, "after": 3, "page_size": 2}

    result = await client.get_agents(
        environment,
        limit=100,
        sort=f"{order_by_column}.{order}",
        filter={"status": ["paused", "up"]},
    )
    assert result.code == 200
    assert len(result.result["data"]) == 7
    assert agent_names(result.result["data"]) == all_agent_names_in_expected_order

    assert result.result["metadata"] == {"total": 7, "before": 0, "after": 0, "page_size": 100}
async def test_compile_reports_paging(server, client, env_with_compile_reports,
                                      order_by_column, order):
    environment, compile_requested_timestamps = env_with_compile_reports

    result = await client.get_compile_reports(
        environment,
        filter={
            "success":
            True,
            "requested": [
                f"lt:{compile_requested_timestamps[-1].astimezone(datetime.timezone.utc)}"
            ]
        },
    )
    assert result.code == 200
    assert len(result.result["data"]) == 6

    all_compiles_in_expected_order = sorted(result.result["data"],
                                            key=itemgetter(
                                                order_by_column, "id"),
                                            reverse=order == "DESC")
    all_compile_ids_in_expected_order = compile_ids(
        all_compiles_in_expected_order)

    result = await client.get_compile_reports(
        environment,
        limit=2,
        sort=f"{order_by_column}.{order}",
        filter={
            "success":
            True,
            "requested": [
                f"lt:{compile_requested_timestamps[-1].astimezone(datetime.timezone.utc)}"
            ]
        },
    )
    assert result.code == 200
    assert len(result.result["data"]) == 2

    assert compile_ids(
        result.result["data"]) == all_compile_ids_in_expected_order[:2]

    assert result.result["metadata"] == {
        "total": 6,
        "before": 0,
        "after": 4,
        "page_size": 2
    }
    assert result.result["links"].get("next") is not None
    assert result.result["links"].get("prev") is None

    port = get_bind_port()
    base_url = "http://localhost:%s" % (port, )
    http_client = AsyncHTTPClient()

    # Test link for next page
    url = f"""{base_url}{result.result["links"]["next"]}"""
    assert "limit=2" in url
    assert "filter.success=" in url
    assert "filter.requested=" in url
    request = HTTPRequest(
        url=url,
        headers={"X-Inmanta-tid": str(environment)},
    )
    response = await http_client.fetch(request, raise_error=False)
    assert response.code == 200
    response = json.loads(response.body.decode("utf-8"))
    assert compile_ids(
        response["data"]) == all_compile_ids_in_expected_order[2:4]
    assert response["links"].get("prev") is not None
    assert response["links"].get("next") is not None
    assert response["metadata"] == {
        "total": 6,
        "before": 2,
        "after": 2,
        "page_size": 2
    }

    # Test link for next page
    url = f"""{base_url}{response["links"]["next"]}"""
    assert "limit=2" in url
    request = HTTPRequest(
        url=url,
        headers={"X-Inmanta-tid": str(environment)},
    )
    response = await http_client.fetch(request, raise_error=False)
    assert response.code == 200
    response = json.loads(response.body.decode("utf-8"))
    next_page_compile_ids = compile_ids(response["data"])
    assert next_page_compile_ids == all_compile_ids_in_expected_order[4:]
    assert response["links"].get("prev") is not None
    assert response["links"].get("next") is None
    assert response["metadata"] == {
        "total": 6,
        "before": 4,
        "after": 0,
        "page_size": 2
    }

    # Test link for previous page
    url = f"""{base_url}{response["links"]["prev"]}"""
    assert "limit=2" in url
    request = HTTPRequest(
        url=url,
        headers={"X-Inmanta-tid": str(environment)},
    )
    response = await http_client.fetch(request, raise_error=False)
    assert response.code == 200
    response = json.loads(response.body.decode("utf-8"))
    prev_page_compile_ids = compile_ids(response["data"])
    assert prev_page_compile_ids == all_compile_ids_in_expected_order[2:4]
    assert response["links"].get("prev") is not None
    assert response["links"].get("next") is not None
    assert response["metadata"] == {
        "total": 6,
        "before": 2,
        "after": 2,
        "page_size": 2
    }

    result = await client.get_compile_reports(
        environment,
        limit=6,
        sort=f"{order_by_column}.{order}",
        filter={
            "success":
            True,
            "requested": [
                f"lt:{compile_requested_timestamps[-1].astimezone(datetime.timezone.utc)}"
            ]
        },
    )
    assert result.code == 200
    assert len(result.result["data"]) == 6
    assert compile_ids(
        result.result["data"]) == all_compile_ids_in_expected_order

    assert result.result["metadata"] == {
        "total": 6,
        "before": 0,
        "after": 0,
        "page_size": 6
    }
Exemplo n.º 4
0
 def _collect_server_information(self) -> List[Server]:
     bind_port = config.get_bind_port()
     server_address = config.server_address.get()
     return [
         Server(url=AnyUrl(url=f"http://{server_address}:{bind_port}/", scheme="http", host=server_address, port=bind_port))
     ]
Exemplo n.º 5
0
    async def run(
        self,
        force_update: Optional[bool] = False
    ) -> Tuple[bool, Optional[model.CompileData]]:
        """
        Runs this compile run.

        :return: Tuple of a boolean representing success and the compile data, if any.
        """
        success = False
        now = datetime.datetime.now().astimezone()
        await self.request.update_fields(started=now)

        compile_data_json_file = NamedTemporaryFile()
        try:
            await self._start_stage("Init", "")

            environment_id = self.request.environment
            project_dir = self._project_dir

            env = await data.Environment.get_by_id(environment_id)

            env_string = ", ".join([
                f"{k}='{v}'"
                for k, v in self.request.environment_variables.items()
            ])
            assert self.stage
            await self.stage.update_streams(
                out=
                f"Using extra environment variables during compile {env_string}\n"
            )

            if env is None:
                await self._error("Environment %s does not exist." %
                                  environment_id)
                await self._end_stage(-1)
                return False, None

            if not os.path.exists(project_dir):
                await self._info(
                    "Creating project directory for environment %s at %s" %
                    (environment_id, project_dir))
                os.mkdir(project_dir)

            # Use a separate venv to compile the project to prevent that packages are installed in the
            # venv of the Inmanta server.
            venv_dir = os.path.join(project_dir, ".env")

            async def ensure_venv() -> Optional[data.Report]:
                """
                Ensure a venv is present at `venv_dir`.
                """
                virtual_env = VirtualEnv(venv_dir)
                if virtual_env.exists():
                    return None

                await self._start_stage("Creating venv", command="")
                try:
                    virtual_env.init_env()
                except VenvCreationFailedError as e:
                    await self._error(message=e.msg)
                    return await self._end_stage(returncode=1)
                else:
                    return await self._end_stage(returncode=0)

            async def update_modules() -> data.Report:
                return await run_compile_stage_in_venv(
                    "Updating modules", ["-vvv", "-X", "modules", "update"],
                    cwd=project_dir)

            async def install_modules() -> data.Report:
                return await run_compile_stage_in_venv(
                    "Installing modules", ["-vvv", "-X", "project", "install"],
                    cwd=project_dir)

            async def run_compile_stage_in_venv(
                    stage_name: str,
                    inmanta_args: List[str],
                    cwd: str,
                    env: Dict[str, str] = {}) -> data.Report:
                """
                Run a compile stage by executing the given command in the venv `venv_dir`.

                :param stage_name: Name of the compile stage.
                :param inmanta_args: The command to be executed in the venv. This command should not include the part
                                      ["<python-interpreter>", "-m", "inmanta.app"]
                :param cwd: The current working directory to be used for the command invocation.
                :param env: Execute the command with these environment variables.
                """
                LOGGER.info(stage_name)
                python_path = PythonEnvironment.get_python_path_for_env_path(
                    venv_dir)
                assert os.path.exists(python_path)
                full_cmd = [python_path, "-m", "inmanta.app"] + inmanta_args
                return await self._run_compile_stage(stage_name, full_cmd, cwd,
                                                     env)

            async def setup(
            ) -> AsyncIterator[Awaitable[Optional[data.Report]]]:
                """
                Returns an iterator over all setup stages. Inspecting stage success state is the responsibility of the caller.
                """
                repo_url: str = env.repo_url
                repo_branch: str = env.repo_branch
                if os.path.exists(os.path.join(project_dir, "project.yml")):
                    yield self._end_stage(0)

                    yield ensure_venv()

                    should_update: bool = force_update or self.request.force_update

                    # switch branches
                    if repo_branch:
                        branch = await self.get_branch()
                        if branch is not None and repo_branch != branch:
                            if should_update:
                                yield self._run_compile_stage(
                                    "Fetching new branch heads",
                                    ["git", "fetch"], project_dir)
                            yield self._run_compile_stage(
                                f"Switching branch from {branch} to {repo_branch}",
                                ["git", "checkout", repo_branch],
                                project_dir,
                            )
                            if not should_update:
                                # if we update, update procedure will install modules
                                yield install_modules()

                    # update project
                    if should_update:
                        # only pull changes if there is an upstream branch
                        if await self.get_upstream_branch():
                            yield self._run_compile_stage(
                                "Pulling updates", ["git", "pull"],
                                project_dir)
                        yield update_modules()
                else:
                    if not repo_url:
                        await self._warning(
                            f"Failed to compile: no project found in {project_dir} and no repository set."
                        )
                        yield self._end_stage(1)
                        return

                    if len(os.listdir(project_dir)) > 0:
                        await self._warning(
                            f"Failed to compile: no project found in {project_dir} but directory is not empty."
                        )
                        yield self._end_stage(1)
                        return

                    yield self._end_stage(0)

                    # clone repo and install project
                    cmd = ["git", "clone", repo_url, "."]
                    if repo_branch:
                        cmd.extend(["-b", repo_branch])
                    yield self._run_compile_stage("Cloning repository", cmd,
                                                  project_dir)
                    yield ensure_venv()
                    yield install_modules()

            async for stage in setup():
                stage_result: Optional[data.Report] = await stage
                if stage_result and (stage_result.returncode is None
                                     or stage_result.returncode > 0):
                    return False, None

            server_address = opt.server_address.get()
            server_port = opt.get_bind_port()
            cmd = [
                "-vvv",
                "export",
                "-X",
                "-e",
                str(environment_id),
                "--server_address",
                server_address,
                "--server_port",
                str(server_port),
                "--metadata",
                json.dumps(self.request.metadata),
                "--export-compile-data",
                "--export-compile-data-file",
                compile_data_json_file.name,
            ]

            if not self.request.do_export:
                f = NamedTemporaryFile()
                cmd.append("-j")
                cmd.append(f.name)

            if config.Config.get("server", "auth", False):
                token = encode_token(["compiler", "api"], str(environment_id))
                cmd.append("--token")
                cmd.append(token)

            if opt.server_ssl_cert.get() is not None:
                cmd.append("--ssl")

            if opt.server_ssl_ca_cert.get() is not None:
                cmd.append("--ssl-ca-cert")
                cmd.append(opt.server_ssl_ca_cert.get())

            self.tail_stdout = ""

            env_vars_compile: Dict[str, str] = os.environ.copy()
            env_vars_compile.update(self.request.environment_variables)

            result: data.Report = await run_compile_stage_in_venv(
                "Recompiling configuration model",
                cmd,
                cwd=project_dir,
                env=env_vars_compile)
            success = result.returncode == 0
            if not success:
                if self.request.do_export:
                    LOGGER.warning("Compile %s failed", self.request.id)
                else:
                    LOGGER.debug("Compile %s failed", self.request.id)

            print("---", self.tail_stdout, result.errstream)
            match = re.search(r"Committed resources with version (\d+)",
                              self.tail_stdout)
            if match:
                self.version = int(match.group(1))
        except CancelledError:
            # This compile was cancelled. Catch it here otherwise a warning will be printed in the logs because of an
            # unhandled exception in a backgrounded coroutine.
            pass

        except Exception:
            LOGGER.exception("An error occurred while recompiling")

        finally:

            async def warn(message: str) -> None:
                if self.stage is not None:
                    await self._warning(message)
                else:
                    LOGGER.warning(message)

            with compile_data_json_file as file:
                compile_data_json: str = file.read().decode()
                if compile_data_json:
                    try:
                        return success, model.CompileData.parse_raw(
                            compile_data_json)
                    except json.JSONDecodeError:
                        await warn(
                            "Failed to load compile data json for compile %s. Invalid json: '%s'"
                            % (self.request.id, compile_data_json))
                    except pydantic.ValidationError:
                        await warn(
                            "Failed to parse compile data for compile %s. Json does not match CompileData model: '%s'"
                            % (self.request.id, compile_data_json))
            return success, None
Exemplo n.º 6
0
async def test_resource_logs_paging(server, client, order_by_column, order,
                                    env_with_logs):
    """Test querying resource logs with paging, using different sorting parameters."""
    environment, msg_timings = env_with_logs

    result = await client.resource_logs(
        environment,
        resource_id_a,
        filter={
            "minimal_log_level": "INFO",
            "timestamp": [f"ge:{msg_timings[2]}"]
        },
    )
    assert result.code == 200
    assert len(result.result["data"]) == 7
    all_logs_in_expected_order = sorted(result.result["data"],
                                        key=itemgetter(order_by_column),
                                        reverse=order == "DESC")
    all_log_messages_in_expected_order = log_messages(
        all_logs_in_expected_order)

    result = await client.resource_logs(
        environment,
        resource_id_a,
        limit=2,
        sort=f"{order_by_column}.{order}",
        filter={
            "minimal_log_level": "INFO",
            "timestamp": [f"ge:{msg_timings[2]}"]
        },
    )
    assert result.code == 200
    assert len(result.result["data"]) == 2
    assert log_messages(
        result.result["data"]) == all_log_messages_in_expected_order[:2]

    assert result.result["metadata"] == {
        "total": 7,
        "before": 0,
        "after": 5,
        "page_size": 2
    }
    assert result.result["links"].get("next") is not None
    assert result.result["links"].get("prev") is None

    port = get_bind_port()
    base_url = "http://localhost:%s" % (port, )
    http_client = AsyncHTTPClient()

    # Test link for next page
    url = f"""{base_url}{result.result["links"]["next"]}"""
    assert "limit=2" in url
    assert "filter.minimal_log_level=INFO" in url
    request = HTTPRequest(
        url=url,
        headers={"X-Inmanta-tid": environment},
    )
    response = await http_client.fetch(request, raise_error=False)
    assert response.code == 200
    response = json.loads(response.body.decode("utf-8"))
    assert log_messages(
        response["data"]) == all_log_messages_in_expected_order[2:4]
    assert response["links"].get("prev") is not None
    assert response["links"].get("next") is not None
    assert response["metadata"] == {
        "total": 7,
        "before": 2,
        "after": 3,
        "page_size": 2
    }

    # Test link for next page
    url = f"""{base_url}{response["links"]["next"]}"""
    # The filters should be present for the links as well
    assert "limit=2" in url
    assert "filter.minimal_log_level=INFO" in url
    request = HTTPRequest(
        url=url,
        headers={"X-Inmanta-tid": environment},
    )
    response = await http_client.fetch(request, raise_error=False)
    assert response.code == 200
    response = json.loads(response.body.decode("utf-8"))
    next_page_log_messages = log_messages(response["data"])
    assert next_page_log_messages == all_log_messages_in_expected_order[4:6]
    assert response["links"].get("prev") is not None
    assert response["links"].get("next") is not None
    assert response["metadata"] == {
        "total": 7,
        "before": 4,
        "after": 1,
        "page_size": 2
    }

    # Test link for previous page
    url = f"""{base_url}{response["links"]["prev"]}"""
    assert "limit=2" in url
    assert "filter.minimal_log_level=INFO" in url
    request = HTTPRequest(
        url=url,
        headers={"X-Inmanta-tid": environment},
    )
    response = await http_client.fetch(request, raise_error=False)
    assert response.code == 200
    response = json.loads(response.body.decode("utf-8"))
    prev_page_log_messages = log_messages(response["data"])
    assert prev_page_log_messages == all_log_messages_in_expected_order[2:4]
    assert response["links"].get("prev") is not None
    assert response["links"].get("next") is not None
    assert response["metadata"] == {
        "total": 7,
        "before": 2,
        "after": 3,
        "page_size": 2
    }
async def test_desired_state_versions_paging(
    server, client, order: str,
    environments_with_versions: Tuple[Dict[str, uuid.UUID],
                                      List[datetime.datetime]]):
    """Test querying desired state versions with paging, using different sorting parameters."""
    environments, timestamps = environments_with_versions
    env = environments["multiple_versions"]
    order_by_column = "version"

    result = await client.list_desired_state_versions(
        env,
        filter={
            "date": [f"gt:{timestamps[1].astimezone(datetime.timezone.utc)}"]
        },
    )
    assert result.code == 200
    assert len(result.result["data"]) == 7
    all_versions_in_expected_order = sorted(result.result["data"],
                                            key=itemgetter(order_by_column),
                                            reverse=order == "DESC")
    all_versions_in_expected_order = version_numbers(
        all_versions_in_expected_order)

    result = await client.list_desired_state_versions(
        env,
        limit=2,
        sort=f"{order_by_column}.{order}",
        filter={
            "date": [f"gt:{timestamps[1].astimezone(datetime.timezone.utc)}"]
        },
    )
    assert result.code == 200
    assert len(result.result["data"]) == 2
    assert version_numbers(
        result.result["data"]) == all_versions_in_expected_order[:2]

    assert result.result["metadata"] == {
        "total": 7,
        "before": 0,
        "after": 5,
        "page_size": 2
    }
    assert result.result["links"].get("next") is not None
    assert result.result["links"].get("prev") is None

    port = get_bind_port()
    base_url = "http://localhost:%s" % (port, )
    http_client = AsyncHTTPClient()

    # Test link for next page
    url = f"""{base_url}{result.result["links"]["next"]}"""
    assert "limit=2" in url
    assert "filter.date=" in url
    request = HTTPRequest(
        url=url,
        headers={"X-Inmanta-tid": str(env)},
    )
    response = await http_client.fetch(request, raise_error=False)
    assert response.code == 200
    response = json.loads(response.body.decode("utf-8"))
    assert version_numbers(
        response["data"]) == all_versions_in_expected_order[2:4]
    assert response["links"].get("prev") is not None
    assert response["links"].get("next") is not None
    assert response["metadata"] == {
        "total": 7,
        "before": 2,
        "after": 3,
        "page_size": 2
    }

    # Test link for next page
    url = f"""{base_url}{response["links"]["next"]}"""
    # The filters should be present for the links as well
    assert "limit=2" in url
    assert "filter.date=" in url
    request = HTTPRequest(
        url=url,
        headers={"X-Inmanta-tid": str(env)},
    )
    response = await http_client.fetch(request, raise_error=False)
    assert response.code == 200
    response = json.loads(response.body.decode("utf-8"))
    next_page_versions = version_numbers(response["data"])
    assert next_page_versions == all_versions_in_expected_order[4:6]
    assert response["links"].get("prev") is not None
    assert response["links"].get("next") is not None
    assert response["metadata"] == {
        "total": 7,
        "before": 4,
        "after": 1,
        "page_size": 2
    }

    # Test link for previous page
    url = f"""{base_url}{response["links"]["prev"]}"""
    assert "limit=2" in url
    assert "filter.date=" in url
    request = HTTPRequest(
        url=url,
        headers={"X-Inmanta-tid": str(env)},
    )
    response = await http_client.fetch(request, raise_error=False)
    assert response.code == 200
    response = json.loads(response.body.decode("utf-8"))
    prev_page_versions = version_numbers(response["data"])
    assert prev_page_versions == all_versions_in_expected_order[2:4]
    assert response["links"].get("prev") is not None
    assert response["links"].get("next") is not None
    assert response["metadata"] == {
        "total": 7,
        "before": 2,
        "after": 3,
        "page_size": 2
    }

    result = await client.list_desired_state_versions(
        env,
        limit=100,
        sort=f"{order_by_column}.{order}",
        filter={
            "date": [f"gt:{timestamps[1].astimezone(datetime.timezone.utc)}"]
        },
    )
    assert result.code == 200
    assert len(result.result["data"]) == 7
    assert version_numbers(
        result.result["data"]) == all_versions_in_expected_order

    assert result.result["metadata"] == {
        "total": 7,
        "before": 0,
        "after": 0,
        "page_size": 100
    }
Exemplo n.º 8
0
async def test_resource_history_paging(server, client, order_by_column, order,
                                       env_with_resources):
    """Test querying resource history with paging, using different sorting parameters."""
    env, cm_times, ids, resources = env_with_resources
    resource_with_long_history = ids["long_history"]

    result = await client.resource_history(env.id, resource_with_long_history)
    assert result.code == 200
    assert len(result.result["data"]) == 5
    all_resources_in_expected_order = sorted(result.result["data"],
                                             key=itemgetter(
                                                 order_by_column,
                                                 "attribute_hash"),
                                             reverse=order == "DESC")
    all_resource_ids_in_expected_order = attribute_hashes(
        all_resources_in_expected_order)

    result = await client.resource_history(env.id,
                                           resource_with_long_history,
                                           limit=2,
                                           sort=f"{order_by_column}.{order}")
    assert result.code == 200
    assert len(result.result["data"]) == 2
    assert attribute_hashes(
        result.result["data"]) == all_resource_ids_in_expected_order[:2]

    assert result.result["metadata"] == {
        "total": 5,
        "before": 0,
        "after": 3,
        "page_size": 2
    }
    assert result.result["links"].get("next") is not None
    assert result.result["links"].get("prev") is None

    port = get_bind_port()
    base_url = "http://localhost:%s" % (port, )
    http_client = AsyncHTTPClient()

    # Test link for next page
    url = f"""{base_url}{result.result["links"]["next"]}"""
    assert "limit=2" in url
    request = HTTPRequest(
        url=url,
        headers={"X-Inmanta-tid": str(env.id)},
    )
    response = await http_client.fetch(request, raise_error=False)
    assert response.code == 200
    response = json.loads(response.body.decode("utf-8"))
    assert attribute_hashes(
        response["data"]) == all_resource_ids_in_expected_order[2:4]
    assert response["links"].get("prev") is not None
    assert response["links"].get("next") is not None
    assert response["metadata"] == {
        "total": 5,
        "before": 2,
        "after": 1,
        "page_size": 2
    }

    # Test link for next page
    url = f"""{base_url}{response["links"]["next"]}"""
    assert "limit=2" in url
    request = HTTPRequest(
        url=url,
        headers={"X-Inmanta-tid": str(env.id)},
    )
    response = await http_client.fetch(request, raise_error=False)
    assert response.code == 200
    response = json.loads(response.body.decode("utf-8"))
    next_page_instance_ids = attribute_hashes(response["data"])
    assert next_page_instance_ids == all_resource_ids_in_expected_order[4:]
    assert response["links"].get("prev") is not None
    assert response["links"].get("next") is None
    assert response["metadata"] == {
        "total": 5,
        "before": 4,
        "after": 0,
        "page_size": 2
    }

    # Test link for previous page
    url = f"""{base_url}{response["links"]["prev"]}"""
    assert "limit=2" in url
    request = HTTPRequest(
        url=url,
        headers={"X-Inmanta-tid": str(env.id)},
    )
    response = await http_client.fetch(request, raise_error=False)
    assert response.code == 200
    response = json.loads(response.body.decode("utf-8"))
    prev_page_instance_ids = attribute_hashes(response["data"])
    assert prev_page_instance_ids == all_resource_ids_in_expected_order[2:4]
    assert response["links"].get("prev") is not None
    assert response["links"].get("next") is not None
    assert response["metadata"] == {
        "total": 5,
        "before": 2,
        "after": 1,
        "page_size": 2
    }

    result = await client.resource_history(env.id,
                                           resource_with_long_history,
                                           limit=5,
                                           sort=f"{order_by_column}.{order}")
    assert result.code == 200
    assert len(result.result["data"]) == 5
    assert attribute_hashes(
        result.result["data"]) == all_resource_ids_in_expected_order

    assert result.result["metadata"] == {
        "total": 5,
        "before": 0,
        "after": 0,
        "page_size": 5
    }
Exemplo n.º 9
0
async def test_resource_action_pagination(postgresql_client, client,
                                          clienthelper, server, agent):
    """ Test querying resource actions via the API, including the pagination links."""
    project = data.Project(name="test")
    await project.insert()

    env = data.Environment(name="dev",
                           project=project.id,
                           repo_url="",
                           repo_branch="")
    await env.insert()

    # Add multiple versions of model
    for i in range(0, 11):
        cm = data.ConfigurationModel(
            environment=env.id,
            version=i,
            date=datetime.now(),
            total=1,
            version_info={},
        )
        await cm.insert()

    # Add resource actions for motd
    motd_first_start_time = datetime.now()
    earliest_action_id = uuid.uuid4()
    resource_action = data.ResourceAction(
        environment=env.id,
        version=0,
        resource_version_ids=[f"std::File[agent1,path=/etc/motd],v={0}"],
        action_id=earliest_action_id,
        action=const.ResourceAction.deploy,
        started=motd_first_start_time - timedelta(minutes=1),
    )
    await resource_action.insert()
    resource_action.add_logs([
        data.LogLine.log(logging.INFO,
                         "Successfully stored version %(version)d",
                         version=0)
    ])
    await resource_action.save()

    action_ids_with_the_same_timestamp = []
    for i in range(1, 6):
        action_id = uuid.uuid4()
        action_ids_with_the_same_timestamp.append(action_id)
        resource_action = data.ResourceAction(
            environment=env.id,
            version=i,
            resource_version_ids=[f"std::File[agent1,path=/etc/motd],v={i}"],
            action_id=action_id,
            action=const.ResourceAction.deploy,
            started=motd_first_start_time,
        )
        await resource_action.insert()
        resource_action.add_logs([
            data.LogLine.log(logging.INFO,
                             "Successfully stored version %(version)d",
                             version=i)
        ])
        await resource_action.save()
    action_ids_with_the_same_timestamp = sorted(
        action_ids_with_the_same_timestamp, reverse=True)
    later_action_id = uuid.uuid4()
    resource_action = data.ResourceAction(
        environment=env.id,
        version=6,
        resource_version_ids=[f"std::File[agent1,path=/etc/motd],v={6}"],
        action_id=later_action_id,
        action=const.ResourceAction.deploy,
        started=motd_first_start_time + timedelta(minutes=6),
    )
    await resource_action.insert()
    resource_action.add_logs([
        data.LogLine.log(logging.INFO,
                         "Successfully stored version %(version)d",
                         version=6)
    ])
    await resource_action.save()
    for i in range(0, 11):
        res1 = data.Resource.new(
            environment=env.id,
            resource_version_id="std::File[agent1,path=/etc/motd],v=%s" %
            str(i),
            status=const.ResourceState.deployed,
            last_deploy=datetime.now() + timedelta(minutes=i),
            attributes={
                "attr": [{
                    "a": 1,
                    "b": "c"
                }],
                "path": "/etc/motd"
            },
        )
        await res1.insert()

    result = await client.get_resource_actions(
        tid=env.id,
        resource_type="std::File",
        attribute="path",
        attribute_value="/etc/motd",
        last_timestamp=motd_first_start_time + timedelta(minutes=7),
        limit=2,
    )
    assert result.code == 200
    resource_actions = result.result["data"]
    expected_action_ids = [later_action_id
                           ] + action_ids_with_the_same_timestamp[:1]
    assert [
        uuid.UUID(resource_action["action_id"])
        for resource_action in resource_actions
    ] == expected_action_ids

    # Use the next link for pagination
    next_page = result.result["links"]["next"]
    port = opt.get_bind_port()
    base_url = "http://localhost:%s" % (port, )
    url = f"{base_url}{next_page}"
    client = AsyncHTTPClient()
    request = HTTPRequest(
        url=url,
        headers={"X-Inmanta-tid": str(env.id)},
    )
    response = await client.fetch(request, raise_error=False)
    assert response.code == 200
    response = json.loads(response.body.decode("utf-8"))
    second_page_action_ids = [
        uuid.UUID(resource_action["action_id"])
        for resource_action in response["data"]
    ]
    assert second_page_action_ids == action_ids_with_the_same_timestamp[1:3]
    next_page = response["links"]["next"]
    url = f"{base_url}{next_page}"
    request.url = url
    response = await client.fetch(request, raise_error=False)
    assert response.code == 200
    response = json.loads(response.body.decode("utf-8"))
    third_page_action_ids = [
        uuid.UUID(resource_action["action_id"])
        for resource_action in response["data"]
    ]
    assert third_page_action_ids == action_ids_with_the_same_timestamp[3:5]
    # Go back to the previous page
    prev_page = response["links"]["prev"]
    url = f"{base_url}{prev_page}"
    request.url = url
    response = await client.fetch(request, raise_error=False)
    assert response.code == 200
    response = json.loads(response.body.decode("utf-8"))
    action_ids = [
        uuid.UUID(resource_action["action_id"])
        for resource_action in response["data"]
    ]
    assert action_ids == second_page_action_ids
    # And back to the third
    prev_page = response["links"]["next"]
    url = f"{base_url}{prev_page}"
    request.url = url
    response = await client.fetch(request, raise_error=False)
    assert response.code == 200
    response = json.loads(response.body.decode("utf-8"))
    action_ids = [
        uuid.UUID(resource_action["action_id"])
        for resource_action in response["data"]
    ]
    assert action_ids == third_page_action_ids
async def test_resources_paging(server, client, order_by_column, order,
                                env_with_resources):
    """Test querying resources with paging, using different sorting parameters."""
    env = env_with_resources
    version = 3
    result = await client.get_resources_in_version(
        env.id,
        version,
        filter={"agent": ["1", "2"]},
    )
    assert result.code == 200
    assert len(result.result["data"]) == 5
    flattened_resources = [
        VersionedResource(**res).all_fields for res in result.result["data"]
    ]
    all_resources_in_expected_order = sorted(flattened_resources,
                                             key=itemgetter(
                                                 order_by_column,
                                                 "resource_version_id"),
                                             reverse=order == "DESC")
    all_resource_ids_in_expected_order = resource_ids(
        all_resources_in_expected_order)

    result = await client.get_resources_in_version(
        env.id,
        version,
        limit=2,
        sort=f"{order_by_column}.{order}",
        filter={"agent": ["1", "2"]})
    assert result.code == 200
    assert len(result.result["data"]) == 2
    assert resource_ids(
        result.result["data"]) == all_resource_ids_in_expected_order[:2]

    assert result.result["metadata"] == {
        "total": 5,
        "before": 0,
        "after": 3,
        "page_size": 2
    }
    assert result.result["links"].get("next") is not None
    assert result.result["links"].get("prev") is None

    port = get_bind_port()
    base_url = "http://localhost:%s" % (port, )
    http_client = AsyncHTTPClient()

    # Test link for next page
    url = f"""{base_url}{result.result["links"]["next"]}"""
    assert "limit=2" in url
    assert "filter.agent=1" in url
    assert "filter.agent=2" in url
    request = HTTPRequest(
        url=url,
        headers={"X-Inmanta-tid": str(env.id)},
    )
    response = await http_client.fetch(request, raise_error=False)
    assert response.code == 200
    response = json.loads(response.body.decode("utf-8"))
    assert resource_ids(
        response["data"]) == all_resource_ids_in_expected_order[2:4]
    assert response["links"].get("prev") is not None
    assert response["links"].get("next") is not None
    assert response["metadata"] == {
        "total": 5,
        "before": 2,
        "after": 1,
        "page_size": 2
    }

    # Test link for next page
    url = f"""{base_url}{response["links"]["next"]}"""
    # The filters should be present for the links as well
    assert "limit=2" in url
    assert "filter.agent=1" in url
    assert "filter.agent=2" in url
    request = HTTPRequest(
        url=url,
        headers={"X-Inmanta-tid": str(env.id)},
    )
    response = await http_client.fetch(request, raise_error=False)
    assert response.code == 200
    response = json.loads(response.body.decode("utf-8"))
    next_page_instance_ids = resource_ids(response["data"])
    assert next_page_instance_ids == all_resource_ids_in_expected_order[4:]
    assert response["links"].get("prev") is not None
    assert response["links"].get("next") is None
    assert response["metadata"] == {
        "total": 5,
        "before": 4,
        "after": 0,
        "page_size": 2
    }

    # Test link for previous page
    url = f"""{base_url}{response["links"]["prev"]}"""
    assert "limit=2" in url
    assert "filter.agent=1" in url
    assert "filter.agent=2" in url
    request = HTTPRequest(
        url=url,
        headers={"X-Inmanta-tid": str(env.id)},
    )
    response = await http_client.fetch(request, raise_error=False)
    assert response.code == 200
    response = json.loads(response.body.decode("utf-8"))
    prev_page_instance_ids = resource_ids(response["data"])
    assert prev_page_instance_ids == all_resource_ids_in_expected_order[2:4]
    assert response["links"].get("prev") is not None
    assert response["links"].get("next") is not None
    assert response["metadata"] == {
        "total": 5,
        "before": 2,
        "after": 1,
        "page_size": 2
    }
Exemplo n.º 11
0
async def test_facts_paging(server, client, order_by_column, order,
                            env_with_facts):
    """Test querying facts with paging, using different sorting parameters."""
    env, _, _ = env_with_facts
    result = await client.get_all_facts(
        env,
        filter={"name": "res"},
    )
    assert result.code == 200
    assert len(result.result["data"]) == 6
    all_facts_in_expected_order = sorted(result.result["data"],
                                         key=itemgetter(order_by_column, "id"),
                                         reverse=order == "DESC")
    all_fact_ids_in_expected_order = fact_ids(all_facts_in_expected_order)

    result = await client.get_all_facts(env,
                                        limit=2,
                                        sort=f"{order_by_column}.{order}",
                                        filter={"name": "res"})
    assert result.code == 200
    assert len(result.result["data"]) == 2
    assert fact_ids(
        result.result["data"]) == all_fact_ids_in_expected_order[:2]

    assert result.result["metadata"] == {
        "total": 6,
        "before": 0,
        "after": 4,
        "page_size": 2
    }
    assert result.result["links"].get("next") is not None
    assert result.result["links"].get("prev") is None

    port = get_bind_port()
    base_url = "http://localhost:%s" % (port, )
    http_client = AsyncHTTPClient()

    # Test link for next page
    url = f"""{base_url}{result.result["links"]["next"]}"""
    assert "limit=2" in url
    assert "filter.name=res" in url
    request = HTTPRequest(
        url=url,
        headers={"X-Inmanta-tid": env},
    )
    response = await http_client.fetch(request, raise_error=False)
    assert response.code == 200
    response = json.loads(response.body.decode("utf-8"))
    assert fact_ids(response["data"]) == all_fact_ids_in_expected_order[2:4]
    assert response["links"].get("prev") is not None
    assert response["links"].get("next") is not None
    assert response["metadata"] == {
        "total": 6,
        "before": 2,
        "after": 2,
        "page_size": 2
    }

    # Test link for next page
    url = f"""{base_url}{response["links"]["next"]}"""
    # The filters should be present for the links as well
    assert "limit=2" in url
    assert "filter.name=res" in url
    request = HTTPRequest(
        url=url,
        headers={"X-Inmanta-tid": env},
    )
    response = await http_client.fetch(request, raise_error=False)
    assert response.code == 200
    response = json.loads(response.body.decode("utf-8"))
    next_page_ids = fact_ids(response["data"])
    assert next_page_ids == all_fact_ids_in_expected_order[4:]
    assert response["links"].get("prev") is not None
    assert response["links"].get("next") is None
    assert response["metadata"] == {
        "total": 6,
        "before": 4,
        "after": 0,
        "page_size": 2
    }

    # Test link for previous page
    url = f"""{base_url}{response["links"]["prev"]}"""
    assert "limit=2" in url
    assert "filter.name=res" in url
    request = HTTPRequest(
        url=url,
        headers={"X-Inmanta-tid": env},
    )
    response = await http_client.fetch(request, raise_error=False)
    assert response.code == 200
    response = json.loads(response.body.decode("utf-8"))
    prev_page_ids = fact_ids(response["data"])
    assert prev_page_ids == all_fact_ids_in_expected_order[2:4]
    assert response["links"].get("prev") is not None
    assert response["links"].get("next") is not None
    assert response["metadata"] == {
        "total": 6,
        "before": 2,
        "after": 2,
        "page_size": 2
    }
Exemplo n.º 12
0
async def test_parameters_paging(server, client, order_by_column, order,
                                 env_with_parameters):
    """Test querying parameters with paging, using different sorting parameters."""
    env, timestamps = env_with_parameters
    result = await client.get_parameters(
        env,
        filter={"source": "plugin"},
    )
    assert result.code == 200
    assert len(result.result["data"]) == 6
    all_parameters = result.result["data"]
    for parameter in all_parameters:
        if not parameter["updated"]:
            parameter["updated"] = datetime.datetime.min.replace(
                tzinfo=datetime.timezone.utc)
        else:
            parameter["updated"] = datetime.datetime.strptime(
                parameter["updated"],
                "%Y-%m-%dT%H:%M:%S.%f").replace(tzinfo=datetime.timezone.utc)
    all_parameters_in_expected_order = sorted(all_parameters,
                                              key=itemgetter(
                                                  order_by_column, "id"),
                                              reverse=order == "DESC")
    all_parameter_ids_in_expected_order = parameter_ids(
        all_parameters_in_expected_order)

    result = await client.get_parameters(env,
                                         limit=2,
                                         sort=f"{order_by_column}.{order}",
                                         filter={"source": "plugin"})
    assert result.code == 200
    assert len(result.result["data"]) == 2
    assert parameter_ids(
        result.result["data"]) == all_parameter_ids_in_expected_order[:2]

    assert result.result["metadata"] == {
        "total": 6,
        "before": 0,
        "after": 4,
        "page_size": 2
    }
    assert result.result["links"].get("next") is not None
    assert result.result["links"].get("prev") is None

    port = get_bind_port()
    base_url = "http://localhost:%s" % (port, )
    http_client = AsyncHTTPClient()

    # Test link for next page
    url = f"""{base_url}{result.result["links"]["next"]}"""
    assert "limit=2" in url
    assert "filter.source=plugin" in url
    request = HTTPRequest(
        url=url,
        headers={"X-Inmanta-tid": env},
    )
    response = await http_client.fetch(request, raise_error=False)
    assert response.code == 200
    response = json.loads(response.body.decode("utf-8"))
    assert parameter_ids(
        response["data"]) == all_parameter_ids_in_expected_order[2:4]
    assert response["links"].get("prev") is not None
    assert response["links"].get("next") is not None
    assert response["metadata"] == {
        "total": 6,
        "before": 2,
        "after": 2,
        "page_size": 2
    }

    # Test link for next page
    url = f"""{base_url}{response["links"]["next"]}"""
    # The filters should be present for the links as well
    assert "limit=2" in url
    assert "filter.source=plugin" in url
    request = HTTPRequest(
        url=url,
        headers={"X-Inmanta-tid": env},
    )
    response = await http_client.fetch(request, raise_error=False)
    assert response.code == 200
    response = json.loads(response.body.decode("utf-8"))
    next_page_ids = parameter_ids(response["data"])
    assert next_page_ids == all_parameter_ids_in_expected_order[4:]
    assert response["links"].get("prev") is not None
    assert response["links"].get("next") is None
    assert response["metadata"] == {
        "total": 6,
        "before": 4,
        "after": 0,
        "page_size": 2
    }

    # Test link for previous page
    url = f"""{base_url}{response["links"]["prev"]}"""
    assert "limit=2" in url
    assert "filter.source=plugin" in url
    request = HTTPRequest(
        url=url,
        headers={"X-Inmanta-tid": env},
    )
    response = await http_client.fetch(request, raise_error=False)
    assert response.code == 200
    response = json.loads(response.body.decode("utf-8"))
    prev_page_ids = parameter_ids(response["data"])
    assert prev_page_ids == all_parameter_ids_in_expected_order[2:4]
    assert response["links"].get("prev") is not None
    assert response["links"].get("next") is not None
    assert response["metadata"] == {
        "total": 6,
        "before": 2,
        "after": 2,
        "page_size": 2
    }
Exemplo n.º 13
0
async def test_notifications_paging(server, client,
                                    environment_with_notifications, order):
    environment = environment_with_notifications
    order_by_column = "created"

    result = await client.list_notifications(
        environment,
        filter={
            "read": False,
        },
    )
    assert result.code == 200
    assert len(result.result["data"]) == 6

    all_notifications_in_expected_order = sorted(result.result["data"],
                                                 key=itemgetter(
                                                     order_by_column, "id"),
                                                 reverse=order == "DESC")
    all_notification_ids_in_expected_order = notification_ids(
        all_notifications_in_expected_order)

    result = await client.list_notifications(
        environment,
        limit=2,
        sort=f"{order_by_column}.{order}",
        filter={
            "read": False,
        },
    )
    assert result.code == 200
    assert len(result.result["data"]) == 2

    assert notification_ids(
        result.result["data"]) == all_notification_ids_in_expected_order[:2]

    assert result.result["metadata"] == {
        "total": 6,
        "before": 0,
        "after": 4,
        "page_size": 2
    }
    assert result.result["links"].get("next") is not None
    assert result.result["links"].get("prev") is None

    port = get_bind_port()
    base_url = "http://localhost:%s" % (port, )
    http_client = AsyncHTTPClient()

    # Test link for next page
    url = f"""{base_url}{result.result["links"]["next"]}"""
    assert "limit=2" in url
    assert "filter.read=" in url
    request = HTTPRequest(
        url=url,
        headers={"X-Inmanta-tid": str(environment)},
    )
    response = await http_client.fetch(request, raise_error=False)
    assert response.code == 200
    response = json.loads(response.body.decode("utf-8"))
    assert notification_ids(
        response["data"]) == all_notification_ids_in_expected_order[2:4]
    assert response["links"].get("prev") is not None
    assert response["links"].get("next") is not None
    assert response["metadata"] == {
        "total": 6,
        "before": 2,
        "after": 2,
        "page_size": 2
    }

    # Test link for next page
    url = f"""{base_url}{response["links"]["next"]}"""
    assert "limit=2" in url
    request = HTTPRequest(
        url=url,
        headers={"X-Inmanta-tid": str(environment)},
    )
    response = await http_client.fetch(request, raise_error=False)
    assert response.code == 200
    response = json.loads(response.body.decode("utf-8"))
    assert notification_ids(
        response["data"]) == all_notification_ids_in_expected_order[4:]
    assert response["links"].get("prev") is not None
    assert response["links"].get("next") is None
    assert response["metadata"] == {
        "total": 6,
        "before": 4,
        "after": 0,
        "page_size": 2
    }

    # Test link for previous page
    url = f"""{base_url}{response["links"]["prev"]}"""
    assert "limit=2" in url
    request = HTTPRequest(
        url=url,
        headers={"X-Inmanta-tid": str(environment)},
    )
    response = await http_client.fetch(request, raise_error=False)
    assert response.code == 200
    response = json.loads(response.body.decode("utf-8"))
    assert notification_ids(
        response["data"]) == all_notification_ids_in_expected_order[2:4]
    assert response["links"].get("prev") is not None
    assert response["links"].get("next") is not None
    assert response["metadata"] == {
        "total": 6,
        "before": 2,
        "after": 2,
        "page_size": 2
    }

    result = await client.list_notifications(
        environment,
        limit=6,
        sort=f"{order_by_column}.{order}",
        filter={
            "read": False,
        },
    )
    assert result.code == 200
    assert len(result.result["data"]) == 6
    assert notification_ids(
        result.result["data"]) == all_notification_ids_in_expected_order

    assert result.result["metadata"] == {
        "total": 6,
        "before": 0,
        "after": 0,
        "page_size": 6
    }
Exemplo n.º 14
0
    async def run(
        self,
        force_update: Optional[bool] = False
    ) -> Tuple[bool, Optional[model.CompileData]]:
        success = False
        now = datetime.datetime.now()
        await self.request.update_fields(started=now)

        compile_data_json_file = NamedTemporaryFile()
        try:
            await self._start_stage("Init", "")

            environment_id = self.request.environment
            project_dir = self._project_dir

            env = await data.Environment.get_by_id(environment_id)

            env_string = ", ".join([
                f"{k}='{v}'"
                for k, v in self.request.environment_variables.items()
            ])
            await self.stage.update_streams(
                out=
                f"Using extra environment variables during compile {env_string}\n"
            )

            if env is None:
                await self._error("Environment %s does not exist." %
                                  environment_id)
                await self._end_stage(-1)
                return False, None

            inmanta_path = [sys.executable, "-m", "inmanta.app"]

            if not os.path.exists(project_dir):
                await self._info(
                    "Creating project directory for environment %s at %s" %
                    (environment_id, project_dir))
                os.mkdir(project_dir)

            repo_url: str = env.repo_url
            repo_branch: str = env.repo_branch
            if not repo_url:
                if not os.path.exists(os.path.join(project_dir,
                                                   "project.yml")):
                    await self._warning(
                        f"Failed to compile: no project found in {project_dir} and no repository set"
                    )
                await self._end_stage(0)
            else:
                await self._end_stage(0)
                # checkout repo

                if not os.path.exists(os.path.join(project_dir, ".git")):
                    cmd = ["git", "clone", repo_url, "."]
                    if repo_branch:
                        cmd.extend(["-b", repo_branch])
                    result = await self._run_compile_stage(
                        "Cloning repository", cmd, project_dir)
                    if result.returncode is None or result.returncode > 0:
                        return False, None

                elif force_update or self.request.force_update:
                    result = await self._run_compile_stage(
                        "Fetching changes", ["git", "fetch", repo_url],
                        project_dir)
                if repo_branch:
                    branch = await self.get_branch()
                    if branch is not None and repo_branch != branch:
                        result = await self._run_compile_stage(
                            f"switching branch from {branch} to {repo_branch}",
                            ["git", "checkout", repo_branch], project_dir)

                if force_update or self.request.force_update:
                    await self._run_compile_stage("Pulling updates",
                                                  ["git", "pull"], project_dir)
                    LOGGER.info("Installing and updating modules")
                    await self._run_compile_stage(
                        "Updating modules",
                        inmanta_path + ["modules", "update"], project_dir)

            server_address = opt.server_address.get()
            server_port = opt.get_bind_port()
            cmd = inmanta_path + [
                "-vvv",
                "export",
                "-X",
                "-e",
                str(environment_id),
                "--server_address",
                server_address,
                "--server_port",
                str(server_port),
                "--metadata",
                json.dumps(self.request.metadata),
                "--export-compile-data",
                "--export-compile-data-file",
                compile_data_json_file.name,
            ]

            if not self.request.do_export:
                f = NamedTemporaryFile()
                cmd.append("-j")
                cmd.append(f.name)

            if config.Config.get("server", "auth", False):
                token = encode_token(["compiler", "api"], str(environment_id))
                cmd.append("--token")
                cmd.append(token)

            if opt.server_ssl_cert.get() is not None:
                cmd.append("--ssl")

            if opt.server_ssl_ca_cert.get() is not None:
                cmd.append("--ssl-ca-cert")
                cmd.append(opt.server_ssl_ca_cert.get())

            self.tail_stdout = ""

            env_vars_compile: Dict[str, str] = os.environ.copy()
            env_vars_compile.update(self.request.environment_variables)

            result = await self._run_compile_stage(
                "Recompiling configuration model",
                cmd,
                project_dir,
                env=env_vars_compile)
            success = result.returncode == 0
            if not success:
                if self.request.do_export:
                    LOGGER.warning("Compile %s failed", self.request.id)
                else:
                    LOGGER.debug("Compile %s failed", self.request.id)

            print("---", self.tail_stdout, result.errstream)
            match = re.search(r"Committed resources with version (\d+)",
                              self.tail_stdout)
            if match:
                self.version = int(match.group(1))
        except CancelledError:
            # This compile was cancelled. Catch it here otherwise a warning will be printed in the logs because of an
            # unhandled exception in a backgrounded coroutine.
            pass

        except Exception:
            LOGGER.exception("An error occured while recompiling")

        finally:
            with compile_data_json_file as file:
                compile_data_json: str = file.read().decode()
                if compile_data_json:
                    try:
                        return success, model.CompileData.parse_raw(
                            compile_data_json)
                    except json.JSONDecodeError:
                        LOGGER.warning(
                            "Failed to load compile data json for compile %s. Invalid json: '%s'",
                            (self.request.id, compile_data_json),
                        )
                    except pydantic.ValidationError:
                        LOGGER.warning(
                            "Failed to parse compile data for compile %s. Json does not match CompileData model: '%s'",
                            (self.request.id, compile_data_json),
                        )
            return success, None