Esempio n. 1
0
    def __init__(self, host: Optional[str], port: Optional[int]) -> None:
        if host is None:
            self.host = cmdline_rest_transport.host.get()
        else:
            self.host = host
            Config.set("cmdline_rest_transport", "host", host)

        if port is None:
            self.port = cmdline_rest_transport.port.get()
        else:
            self.port = port
            Config.set("cmdline_rest_transport", "port", str(port))

        self._client = protocol.SyncClient("cmdline")
Esempio n. 2
0
 def get_version(self, no_commit=False):
     if no_commit:
         return 0
     tid = cfg_env.get()
     if tid is None:
         LOGGER.warning(
             "The environment for this model should be set for export to server!"
         )
         return 0
     else:
         conn = protocol.SyncClient("compiler")
         result = conn.reserve_version(tid)
         if result.code != 200:
             raise Exception(
                 f"Unable to reserve version number from server (msg: {result.result})"
             )
         return result.result["data"]
Esempio n. 3
0
def sync_client_multi(server_multi):
    client = protocol.SyncClient("client")
    yield client
Esempio n. 4
0
    def commit_resources(self, version: int, resources: List[Dict[str, str]],
                         metadata: Dict[str, str], model: Dict) -> None:
        """
        Commit the entire list of resource to the configurations server.
        """
        tid = cfg_env.get()
        if tid is None:
            LOGGER.error("The environment for this model should be set!")
            raise Exception("The environment for this model should be set!")

        conn = protocol.SyncClient("compiler")
        self.deploy_code(conn, tid, version)

        LOGGER.info("Uploading %d files" % len(self._file_store))

        # collect all hashes and send them at once to the server to check
        # if they are already uploaded
        hashes = list(self._file_store.keys())

        res = conn.stat_files(files=hashes)

        if res.code != 200:
            raise Exception("Unable to check status of files at server")

        to_upload = res.result["files"]

        LOGGER.info("Only %d files are new and need to be uploaded" %
                    len(to_upload))
        for hash_id in to_upload:
            content = self._file_store[hash_id]

            res = conn.upload_file(
                id=hash_id, content=base64.b64encode(content).decode("ascii"))

            if res.code != 200:
                LOGGER.error("Unable to upload file with hash %s" % hash_id)
            else:
                LOGGER.debug("Uploaded file with hash %s" % hash_id)

        # Collecting version information
        version_info = {const.EXPORT_META_DATA: metadata, "model": model}

        # TODO: start transaction
        LOGGER.info("Sending resource updates to server")
        for res in resources:
            LOGGER.debug("  %s", res["id"])

        res = conn.put_version(
            tid=tid,
            version=version,
            resources=resources,
            unknowns=unknown_parameters,
            resource_state=self._resource_state,
            version_info=version_info,
            compiler_version=get_compiler_version(),
        )

        if res.code != 200:
            LOGGER.error("Failed to commit resource updates (%s)",
                         res.result["message"])
            raise Exception("Failed to commit resource updates (%s)" %
                            res.result["message"])
Esempio n. 5
0
def export(options: argparse.Namespace) -> None:
    if options.environment is not None:
        Config.set("config", "environment", options.environment)

    if options.server is not None:
        Config.set("compiler_rest_transport", "host", options.server)

    if options.port is not None:
        Config.set("compiler_rest_transport", "port", options.port)

    if options.token is not None:
        Config.set("compiler_rest_transport", "token", options.token)

    if options.ssl:
        Config.set("compiler_rest_transport", "ssl", "true")

    if options.ca_cert is not None:
        Config.set("compiler_rest_transport", "ssl-ca-cert-file", options.ca_cert)

    if options.export_compile_data is True:
        Config.set("compiler", "export_compile_data", "true")

    if options.export_compile_data_file is not None:
        Config.set("compiler", "export_compile_data_file", options.export_compile_data_file)

    compiler_features.read_options_to_config(options)

    # try to parse the metadata as json. If a normal string, create json for it.
    if options.metadata is not None and len(options.metadata) > 0:
        try:
            metadata = json.loads(options.metadata)
        except json.decoder.JSONDecodeError:
            metadata = {"message": options.metadata}
    else:
        metadata = {"message": "Manual compile on the CLI by user"}

    if "cli-user" not in metadata and "USERNAME" in os.environ:
        metadata["cli-user"] = os.environ["USERNAME"]

    if "hostname" not in metadata:
        metadata["hostname"] = socket.gethostname()

    if "type" not in metadata:
        metadata["type"] = "manual"

    module.Project.get(options.main_file)

    from inmanta.export import Exporter  # noqa: H307

    exp = None
    types: Optional[Dict[str, inmanta_type.Type]]
    scopes: Optional[Namespace]
    try:
        (types, scopes) = do_compile()
    except Exception as e:
        exp = e
        types, scopes = (None, None)

    # Even if the compile failed we might have collected additional data such as unknowns. So
    # continue the export

    export = Exporter(options)
    results = export.run(
        types, scopes, metadata=metadata, model_export=options.model_export, export_plugin=options.export_plugin
    )
    version = results[0]

    if exp is not None:
        raise exp

    if options.model:
        modelexporter = ModelExporter(types)
        with open("testdump.json", "w", encoding="utf-8") as fh:
            print(yaml.dump(modelexporter.export_all()))
            json.dump(modelexporter.export_all(), fh)

    if options.deploy:
        conn = protocol.SyncClient("compiler")
        LOGGER.info("Triggering deploy for version %d" % version)
        tid = cfg_env.get()
        agent_trigger_method = const.AgentTriggerMethod.get_agent_trigger_method(options.full_deploy)
        conn.release_version(tid, version, True, agent_trigger_method)
Esempio n. 6
0
 def get_sync_client(self) -> "protocol.SyncClient":
     if self.__class__.__sync_client is None:
         self.__class__.__sync_client = protocol.SyncClient("compiler")
     return self.__class__.__sync_client
Esempio n. 7
0
    def setup_project(self) -> bool:
        """
        Set up the configured project and environment on the embedded server
        """
        self._client = protocol.SyncClient("client")

        # get config
        project_name = cfg_prj.get()
        if project_name is None:
            LOGGER.error(
                "The name of the project should be configured for an all-in-one deploy"
            )
            return False

        environment_name = cfg_env.get()
        if environment_name is None:
            LOGGER.error(
                "The name of the environment in the project should be configured for an all-in-one deploy"
            )
            return False

        # wait and check to see if the server is up
        tries = 0
        while tries < MAX_TRIES:
            try:
                self._client.list_projects()
                break
            except Exception:
                tries += 1

        # get project id
        projects = self._client.list_projects()
        if projects.code != 200:
            LOGGER.error("Unable to retrieve project listing from the server")
            return False

        project_id = None
        for project in projects.result["projects"]:
            if project_name == project["name"]:
                project_id = project["id"]
                break

        if project_id is None:
            project_id = self._create_project(project_name)
            if not project_id:
                return False

        # get or create the environment
        environments = self._client.list_environments()
        if environments.code != 200:
            LOGGER.error("Unable to retrieve environments from server")
            return False

        env_id = None
        for env in environments.result["environments"]:
            if project_id == env["project"] and environment_name == env["name"]:
                env_id = env["id"]
                break

        if env_id is None:
            env_id = self._create_environment(project_id, environment_name)
            if not env_id:
                return False

        self._environment_id = env_id

        # link the project into the server environment
        server_env = os.path.join(self._data_path, "state", "server",
                                  "environments", self._environment_id)
        full_path = os.path.abspath(self._project_path)
        if not os.path.islink(
                server_env) or os.readlink(server_env) != full_path:
            if os.path.exists(server_env):
                os.unlink(server_env)
            os.symlink(full_path, server_env)

        if not os.path.exists(os.path.join(
                full_path, ".git")) and self._options.dashboard:
            LOGGER.error(
                "Make sure the project is a git repository, otherwise the embedded server cannot recompile the model."
            )

        return True