Esempio n. 1
0
def make_flow(obj, token, name, tags, namespace, max_workers, workflow_timeout,
              is_project):
    if obj.flow_datastore.TYPE != "s3":
        raise MetaflowException("AWS Step Functions requires --datastore=s3.")

    # Attach AWS Batch decorator to the flow
    decorators._attach_decorators(obj.flow, [BatchDecorator.name])
    decorators._init_step_decorators(obj.flow, obj.graph, obj.environment,
                                     obj.flow_datastore, obj.logger)

    obj.package = MetaflowPackage(obj.flow, obj.environment, obj.echo,
                                  obj.package_suffixes)
    package_url, package_sha = obj.flow_datastore.save_data([obj.package.blob],
                                                            len_hint=1)[0]

    return StepFunctions(
        name,
        obj.graph,
        obj.flow,
        package_sha,
        package_url,
        token,
        obj.metadata,
        obj.flow_datastore,
        obj.environment,
        obj.event_logger,
        obj.monitor,
        tags=tags,
        namespace=namespace,
        max_workers=max_workers,
        username=get_username(),
        workflow_timeout=workflow_timeout,
        is_project=is_project,
    )
Esempio n. 2
0
def create(obj,
           image,
           image_pull_secrets,
           env,
           env_from,
           labels,
           annotations,
           k8s_namespace,
           embedded,
           max_workers,
           volumes,
           workflow_timeout=None,
           only_json=False):
    obj.echo("Deploying *%s* to Argo Workflow Templates..." %
             obj.workflow_template_name,
             bold=True)

    if obj.flow_datastore.TYPE != 's3':
        raise MetaflowException("Argo Workflows require --datastore=s3.")

    # When using conda attach AWS Batch decorator to the flow.
    # This results in 'linux-64' libraries to be packaged.
    decorators._attach_decorators(obj.flow, [BatchDecorator.name])
    decorators._init_step_decorators(obj.flow, obj.graph, obj.environment,
                                     obj.flow_datastore, obj.logger)

    obj.package = MetaflowPackage(obj.flow, obj.environment, obj.echo,
                                  obj.package_suffixes)
    package_url, package_sha = obj.flow_datastore.save_data([obj.package.blob],
                                                            len_hint=1)[0]

    warn_use_argo_image(obj)
    workflow = ArgoWorkflow(obj.workflow_template_name, obj.flow, obj.graph,
                            obj.package, package_url if not embedded else None,
                            obj.metadata, obj.flow_datastore, obj.environment,
                            obj.event_logger, obj.monitor, image,
                            image_pull_secrets, env, env_from, labels,
                            annotations, max_workers, volumes,
                            workflow_timeout)

    if only_json:
        obj.echo_always(workflow.to_json(), err=False, no_bold=True, nl=False)
    else:
        workflow.deploy(k8s_namespace)
        obj.echo(
            "WorkflowTemplate *{name}* is pushed to Argo Workflows successfully.\n"
            .format(name=obj.workflow_template_name),
            bold=True)
        workflow.schedule(k8s_namespace)
        obj.echo("What will trigger execution of the workflow:", bold=True)
        obj.echo(workflow.trigger_explanation(), indent=True)
Esempio n. 3
0
def make_flow(obj,
              token,
              name,
              tags,
              namespace,
              max_workers,
              workflow_timeout,
              is_project):
    datastore = obj.datastore(obj.flow.name,
                              mode='w',
                              metadata=obj.metadata,
                              event_logger=obj.event_logger,
                              monitor=obj.monitor)
    if datastore.TYPE != 's3':
        raise MetaflowException("AWS Step Functions requires --datastore=s3.")

    # Attach AWS Batch decorator to the flow
    decorators._attach_decorators(obj.flow, [BatchDecorator.name])
    decorators._init_step_decorators(
            obj.flow, obj.graph, obj.environment, obj.datastore, obj.logger)

    obj.package = MetaflowPackage(
        obj.flow, obj.environment, obj.echo, obj.package_suffixes)
    package_url = datastore.save_data(
        obj.package.sha, TransformableObject(obj.package.blob))

    return StepFunctions(name,
                         obj.graph,
                         obj.flow,
                         obj.package,
                         package_url,
                         token,
                         obj.metadata,
                         obj.datastore,
                         obj.environment,
                         obj.event_logger,
                         obj.monitor,
                         tags=tags,
                         namespace=namespace,
                         max_workers=max_workers,
                         username=get_username(),
                         workflow_timeout=workflow_timeout,
                         is_project=is_project)
Esempio n. 4
0
def make_flow(obj, token, name, tags, namespace, max_workers, workflow_timeout,
              workflow_priority):
    # TODO: Make this check less specific to Amazon S3 as we introduce
    #       support for more cloud object stores.
    if obj.flow_datastore.TYPE != "s3":
        raise MetaflowException("Argo Workflows requires --datastore=s3.")

    # Attach @kubernetes and @environment decorator to the flow to
    # ensure that the related decorator hooks are invoked.
    decorators._attach_decorators(
        obj.flow, [KubernetesDecorator.name, EnvironmentDecorator.name])

    decorators._init_step_decorators(obj.flow, obj.graph, obj.environment,
                                     obj.flow_datastore, obj.logger)

    # Save the code package in the flow datastore so that both user code and
    # metaflow package can be retrieved during workflow execution.
    obj.package = MetaflowPackage(obj.flow, obj.environment, obj.echo,
                                  obj.package_suffixes)
    package_url, package_sha = obj.flow_datastore.save_data([obj.package.blob],
                                                            len_hint=1)[0]

    return ArgoWorkflows(
        name,
        obj.graph,
        obj.flow,
        package_sha,
        package_url,
        token,
        obj.metadata,
        obj.flow_datastore,
        obj.environment,
        obj.event_logger,
        obj.monitor,
        tags=tags,
        namespace=namespace,
        max_workers=max_workers,
        username=get_username(),
        workflow_timeout=workflow_timeout,
        workflow_priority=workflow_priority,
    )
Esempio n. 5
0
def package(obj):
    # Prepare the package before any of the sub-commands are invoked.
    obj.package = MetaflowPackage(obj.flow,
                                  obj.environment,
                                  obj.echo,
                                  obj.package_suffixes)