Example #1
0
def resolve_workflow_name(obj, name):
    project = current.get("project_name")
    obj._is_workflow_name_modified = False
    if project:
        if name:
            raise MetaflowException(
                "--name is not supported for @projects. Use --branch instead.")
        workflow_name = current.project_flow_name
        project_branch = to_bytes(".".join((project, current.branch_name)))
        token_prefix = (
            "mfprj-%s" %
            to_unicode(base64.b32encode(sha1(project_branch).digest()))[:16])
        is_project = True
        # Argo Workflow names can't be longer than 253 characters, so we truncate
        # by default. Also, while project and branch allow for underscores, Argo
        # Workflows doesn't (DNS Subdomain names as defined in RFC 1123) - so we will
        # remove any underscores as well as convert the name to lower case.
        if len(workflow_name) > 253:
            name_hash = to_unicode(
                base64.b32encode(sha1(
                    to_bytes(workflow_name)).digest()))[:8].lower()
            workflow_name = "%s-%s" % (workflow_name[:242], name_hash)
            obj._is_workflow_name_modified = True
        if not VALID_NAME.search(workflow_name):
            workflow_name = (re.compile(r"^[^A-Za-z0-9]+").sub(
                "", workflow_name).replace("_", "").lower())
            obj._is_workflow_name_modified = True
    else:
        if name and not VALID_NAME.search(name):
            raise MetaflowException(
                "Name '%s' contains invalid characters. The "
                "name must consist of lower case alphanumeric characters, '-' or '.'"
                ", and must start and end with an alphanumeric character." %
                name)

        workflow_name = name if name else current.flow_name
        token_prefix = workflow_name
        is_project = False

        if len(workflow_name) > 253:
            msg = ("The full name of the workflow:\n*%s*\nis longer than 253 "
                   "characters.\n\n"
                   "To deploy this workflow to Argo Workflows, please "
                   "assign a shorter name\nusing the option\n"
                   "*argo-workflows --name <name> create*." % workflow_name)
            raise ArgoWorkflowsNameTooLong(msg)

        if not VALID_NAME.search(workflow_name):
            workflow_name = (re.compile(r"^[^A-Za-z0-9]+").sub(
                "", workflow_name).replace("_", "").lower())
            obj._is_workflow_name_modified = True

    return workflow_name, token_prefix.lower(), is_project
Example #2
0
def resolve_state_machine_name(obj, name):
    def attach_prefix(name):
        if SFN_STATE_MACHINE_PREFIX is not None:
            return SFN_STATE_MACHINE_PREFIX + "_" + name
        return name

    project = current.get("project_name")
    obj._is_state_machine_name_hashed = False
    if project:
        if name:
            raise MetaflowException(
                "--name is not supported for @projects. " "Use --branch instead."
            )
        state_machine_name = attach_prefix(current.project_flow_name)
        project_branch = to_bytes(".".join((project, current.branch_name)))
        token_prefix = (
            "mfprj-%s"
            % to_unicode(base64.b32encode(sha1(project_branch).digest()))[:16]
        )
        is_project = True
        # AWS Step Functions has a limit of 80 chars for state machine names.
        # We truncate the state machine name if the computed name is greater
        # than 60 chars and append a hashed suffix to ensure uniqueness.
        if len(state_machine_name) > 60:
            name_hash = to_unicode(
                base64.b32encode(sha1(to_bytes(state_machine_name)).digest())
            )[:16].lower()
            state_machine_name = "%s-%s" % (state_machine_name[:60], name_hash)
            obj._is_state_machine_name_hashed = True
    else:
        if name and VALID_NAME.search(name):
            raise MetaflowException("Name '%s' contains invalid characters." % name)

        state_machine_name = attach_prefix(name if name else current.flow_name)
        token_prefix = state_machine_name
        is_project = False

        if len(state_machine_name) > 80:
            msg = (
                "The full name of the workflow:\n*%s*\nis longer than 80 "
                "characters.\n\n"
                "To deploy this workflow to AWS Step Functions, please "
                "assign a shorter name\nusing the option\n"
                "*step-functions --name <name> create*." % state_machine_name
            )
            raise StepFunctionsStateMachineNameTooLong(msg)

    return state_machine_name, token_prefix.lower(), is_project
Example #3
0
    def create_job(
        self,
        flow_name,
        run_id,
        step_name,
        task_id,
        attempt,
        user,
        code_package_sha,
        code_package_url,
        code_package_ds,
        step_cli,
        docker_image,
        service_account=None,
        secrets=None,
        node_selector=None,
        namespace=None,
        cpu=None,
        gpu=None,
        gpu_vendor=None,
        disk=None,
        memory=None,
        run_time_limit=None,
        env={},
    ):

        job = (
            KubernetesClient().job(
                generate_name="t-",
                namespace=namespace,
                service_account=service_account,
                secrets=secrets,
                node_selector=node_selector,
                command=self._command(
                    flow_name=flow_name,
                    run_id=run_id,
                    step_name=step_name,
                    task_id=task_id,
                    attempt=attempt,
                    code_package_url=code_package_url,
                    step_cmds=[step_cli],
                ),
                image=docker_image,
                cpu=cpu,
                memory=memory,
                disk=disk,
                gpu=gpu,
                gpu_vendor=gpu_vendor,
                timeout_in_seconds=run_time_limit,
                # Retries are handled by Metaflow runtime
                retries=0,
                step_name=step_name,
            ).environment_variable("METAFLOW_CODE_SHA",
                                   code_package_sha).environment_variable(
                                       "METAFLOW_CODE_URL",
                                       code_package_url).environment_variable(
                                           "METAFLOW_CODE_DS",
                                           code_package_ds).
            environment_variable("METAFLOW_USER", user).environment_variable(
                "METAFLOW_SERVICE_URL",
                BATCH_METADATA_SERVICE_URL).environment_variable(
                    "METAFLOW_SERVICE_HEADERS",
                    json.dumps(BATCH_METADATA_SERVICE_HEADERS),
                ).environment_variable(
                    "METAFLOW_DATASTORE_SYSROOT_S3",
                    DATASTORE_SYSROOT_S3).environment_variable(
                        "METAFLOW_DATATOOLS_S3ROOT",
                        DATATOOLS_S3ROOT).environment_variable(
                            "METAFLOW_DEFAULT_DATASTORE",
                            "s3").environment_variable(
                                "METAFLOW_DEFAULT_METADATA",
                                DEFAULT_METADATA).environment_variable(
                                    "METAFLOW_KUBERNETES_WORKLOAD",
                                    1).environment_variable(
                                        "METAFLOW_RUNTIME_ENVIRONMENT",
                                        "kubernetes").
            environment_variable("METAFLOW_CARD_S3ROOT",
                                 DATASTORE_CARD_S3ROOT).environment_variable(
                                     "METAFLOW_DEFAULT_AWS_CLIENT_PROVIDER",
                                     DEFAULT_AWS_CLIENT_PROVIDER)
            # Skip setting METAFLOW_DATASTORE_SYSROOT_LOCAL because metadata sync
            # between the local user instance and the remote Kubernetes pod
            # assumes metadata is stored in DATASTORE_LOCAL_DIR on the Kubernetes
            # pod; this happens when METAFLOW_DATASTORE_SYSROOT_LOCAL is NOT set (
            # see get_datastore_root_from_config in datastore/local.py).
        )

        # add METAFLOW_S3_ENDPOINT_URL
        if S3_ENDPOINT_URL is not None:
            job.environment_variable("METAFLOW_S3_ENDPOINT_URL",
                                     S3_ENDPOINT_URL)

        # support Metaflow sandboxes
        if KUBERNETES_SANDBOX_INIT_SCRIPT is not None:
            job.environment_variable("METAFLOW_INIT_SCRIPT",
                                     KUBERNETES_SANDBOX_INIT_SCRIPT)

        for name, value in env.items():
            job.environment_variable(name, value)

        annotations = {
            "metaflow/user": user,
            "metaflow/flow_name": flow_name,
        }
        if current.get("project_name"):
            annotations.update({
                "metaflow/project_name":
                current.project_name,
                "metaflow/branch_name":
                current.branch_name,
                "metaflow/project_flow_name":
                current.project_flow_name,
            })

        for name, value in annotations.items():
            job.annotation(name, value)

        (job.annotation("metaflow/run_id", run_id).annotation(
            "metaflow/step_name", step_name).annotation(
                "metaflow/task_id",
                task_id).annotation("metaflow/attempt", attempt).label(
                    "app.kubernetes.io/name",
                    "metaflow-task").label("app.kubernetes.io/part-of",
                                           "metaflow"))

        return job.create()