Esempio n. 1
0
    def deploy(
        self,
        api_spec: Dict[str, Any],
        force: bool = True,
        wait: bool = False,
    ):
        """
        Deploy or update an API.

        Args:
            api_spec: A dictionary defining a single Cortex API. See https://docs.cortex.dev/v/master/ for schema.
            force: Override any in-progress api updates.
            wait: Block until the API is ready.

        Returns:
            Deployment status, API specification, and endpoint for each API.
        """

        temp_deploy_dir = util.cli_config_dir() / "deployments" / api_spec["name"]
        if temp_deploy_dir.exists():
            shutil.rmtree(str(temp_deploy_dir))
        temp_deploy_dir.mkdir(parents=True)

        cortex_yaml_path = os.path.join(temp_deploy_dir, "cortex.yaml")

        with util.open_temporarily(cortex_yaml_path, "w", delete_parent_if_empty=True) as f:
            yaml.dump([api_spec], f)  # write a list
            return self.deploy_from_file(cortex_yaml_path, force=force, wait=wait)
Esempio n. 2
0
    def patch(self, api_spec: dict, force: bool = False) -> dict:
        """
        Update the api specification for an API that has already been deployed.

        Args:
            api_spec: The new api specification to apply
            force: Override an already in-progress API update.
        """

        cortex_yaml_file = (util.cli_config_dir() / "deployments" /
                            f"cortex-{str(uuid.uuid4())}.yaml")
        with util.open_temporarily(cortex_yaml_file, "w") as f:
            yaml.dump([api_spec], f)
            args = [
                "patch", cortex_yaml_file, "--env", self.env_name, "-o", "json"
            ]

            if force:
                args.append("--force")

            output = run_cli(args, hide_output=True)
            return json.loads(output.strip())
Esempio n. 3
0
def _create_default_scope(optional_tags: dict = {}) -> sentry_sdk.Scope:
    """
    Creates default scope. Adds user ID as tag to the reported event.
    Can add optional tags.
    """

    scope = sentry_sdk.Scope()

    user_id = None
    client_id_file_path = util.cli_config_dir() / "client-id.txt"
    if not client_id_file_path.is_file():
        client_id_file_path.parent.mkdir(parents=True, exist_ok=True)
        client_id_file_path.write_text(str(uuid4()))
    user_id = client_id_file_path.read_text()

    if user_id:
        scope.set_user({"id": user_id})

    for k, v in optional_tags.items():
        scope.set_tag(k, v)

    return scope
Esempio n. 4
0
    def create_api(
        self,
        api_spec: dict,
        predictor=None,
        task=None,
        requirements=[],
        conda_packages=[],
        project_dir: Optional[str] = None,
        force: bool = True,
        wait: bool = False,
    ) -> list:
        """
        Deploy an API.

        Args:
            api_spec: A dictionary defining a single Cortex API. See https://docs.cortex.dev/v/master/ for schema.
            predictor: A Cortex Predictor class implementation. Not required for TaskAPI/TrafficSplitter kinds.
            task: A callable class/function implementation. Not required for RealtimeAPI/BatchAPI/TrafficSplitter kinds.
            requirements: A list of PyPI dependencies that will be installed before the predictor class implementation is invoked.
            conda_packages: A list of Conda dependencies that will be installed before the predictor class implementation is invoked.
            project_dir: Path to a python project.
            force: Override any in-progress api updates.
            wait: Streams logs until the APIs are ready.

        Returns:
            Deployment status, API specification, and endpoint for each API.
        """

        if project_dir is not None:
            if predictor is not None:
                raise ValueError(
                    "`predictor` and `project_dir` parameters cannot be specified at the same time, please choose one"
                )
            if task is not None:
                raise ValueError(
                    "`task` and `project_dir` parameters cannot be specified at the same time, please choose one"
                )

        if project_dir is not None:
            cortex_yaml_path = os.path.join(project_dir,
                                            f".cortex-{uuid.uuid4()}.yaml")

            with util.open_temporarily(cortex_yaml_path, "w") as f:
                yaml.dump([api_spec], f)  # write a list
                return self._deploy(cortex_yaml_path, force, wait)

        api_kind = api_spec.get("kind")
        if api_kind == "TrafficSplitter":
            if predictor:
                raise ValueError(
                    f"`predictor` parameter cannot be specified for {api_kind} kind"
                )
            if task:
                raise ValueError(
                    f"`task` parameter cannot be specified for {api_kind} kind"
                )
        elif api_kind == "TaskAPI":
            if predictor:
                raise ValueError(
                    f"`predictor` parameter cannnot be specified for {api_kind} kind"
                )
            if task is None:
                raise ValueError(
                    f"`task` parameter must be specified for {api_kind} kind")
        elif api_kind in ["BatchAPI", "RealtimeAPI"]:
            if not predictor:
                raise ValueError(
                    f"`predictor` parameter must be specified for {api_kind}")
            if task:
                raise ValueError(
                    f"`task` parameter cannot be specified for {api_kind}")
        else:
            raise ValueError(
                f"invalid {api_kind} kind, `api_spec` must have the `kind` field set to one of the following kinds: {['TrafficSplitter', 'TaskAPI', 'BatchAPI', 'RealtimeAPI']}"
            )

        if api_spec.get("name") is None:
            raise ValueError("`api_spec` must have the `name` key set")

        project_dir = util.cli_config_dir() / "deployments" / api_spec["name"]

        if project_dir.exists():
            shutil.rmtree(str(project_dir))

        project_dir.mkdir(parents=True)

        cortex_yaml_path = os.path.join(project_dir, "cortex.yaml")

        if api_kind == "TrafficSplitter":
            # for deploying a traffic splitter
            with open(cortex_yaml_path, "w") as f:
                yaml.dump([api_spec], f)  # write a list
                return self._deploy(cortex_yaml_path, force=force, wait=wait)

        actual_version = (
            f"{sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.micro}"
        )

        if actual_version != EXPECTED_PYTHON_VERSION:
            is_python_set = any(
                conda_dep.startswith("python=") or "::python=" in conda_dep
                for conda_dep in conda_packages)

            if not is_python_set:
                conda_packages = [f"python={actual_version}", "pip=19.*"
                                  ] + conda_packages

        if len(requirements) > 0:
            with open(project_dir / "requirements.txt",
                      "w") as requirements_file:
                requirements_file.write("\n".join(requirements))

        if len(conda_packages) > 0:
            with open(project_dir / "conda-packages.txt", "w") as conda_file:
                conda_file.write("\n".join(conda_packages))

        if api_kind in ["BatchAPI", "RealtimeAPI"]:
            if not inspect.isclass(predictor):
                raise ValueError(
                    "`predictor` parameter must be a class definition")

            impl_rel_path = self._save_impl(predictor, project_dir,
                                            "predictor")
            if api_spec.get("predictor") is None:
                api_spec["predictor"] = {}

            if predictor.__name__ == "PythonPredictor":
                predictor_type = "python"
            if predictor.__name__ == "TensorFlowPredictor":
                predictor_type = "tensorflow"

            api_spec["predictor"]["path"] = impl_rel_path
            api_spec["predictor"]["type"] = predictor_type

        if api_kind == "TaskAPI":
            if not callable(task):
                raise ValueError(
                    "`task` parameter must be a callable (e.g. a function definition or a class definition called `Task` with a `__call__` method implemented"
                )

            impl_rel_path = self._save_impl(task, project_dir, "task")
            if api_spec.get("definition") is None:
                api_spec["definition"] = {}
            api_spec["definition"]["path"] = impl_rel_path

        with open(cortex_yaml_path, "w") as f:
            yaml.dump([api_spec], f)  # write a list
            return self._deploy(cortex_yaml_path, force=force, wait=wait)