def copy_workspace_workflows(destination_workspace_namespace, destination_workspace_name, source_workspace_namespace, source_workspace_name):
    """Copy workflows from source workspace to a destination workspace."""

    # get the list of all the workflows in source workspaces - allRepos = agora, dockstore
    try:
        source_workflows = fapi.list_workspace_configs(source_workspace_namespace, source_workspace_name, allRepos=True)

        # Store all the workflow names
        source_workflow_names = []
        destination_workflow_names = []
        workflow_copy_errors = []

        for workflow in source_workflows.json():
            # get workflow name and add to source workflow names list
            source_workflow_name = workflow['name']
            source_workflow_namespace = workflow['namespace']
            source_workflow_names.append(source_workflow_name)

            # get full source workflow configuration (detailed config with inputs, oututs, etc) for single workflow
            source_workflow_config = fapi.get_workspace_config(source_workspace_namespace, source_workspace_name, source_workflow_namespace, source_workflow_name)

            # create a workflow based on source workflow config returned above
            response = fapi.create_workspace_config(destination_workspace_namespace, destination_workspace_name, source_workflow_config.json())
            status_code = response.status_code

            # if copy failed (409 = already exists, does not count as failure)
            if status_code not in [201, 409]:
                workflow_copy_errors.append(response.text)

            # if copy successful, append workflow name to destination workflow list
            destination_workflow_names.append(source_workflow_name)

        # check if workflows in source and destination workspaces match
        if source_workflow_names.sort() != destination_workflow_names.sort():
            missing_workflows = list(set(source_workflow_names) - set(destination_workflow_names))
            print(f"WARNING: Failed to copy the following workflows to {destination_workspace_namespace}/{destination_workspace_name}: {missing_workflows}. Check output file for details.")
            return False, workflow_copy_errors

        print(f"Successfully copied all workflows from {source_workspace_namespace}/{source_workspace_namespace} to {destination_workspace_namespace}/{destination_workspace_namespace}")
        return True, destination_workflow_names

    except Exception as error:
        print(f"WARNING: Workflow copying failed due to: {error}")
        return False, error
def update_workflow_config_in_workspace(config_namespace: str,
                                        config_name: str, method_body: dict,
                                        workspace_namespace: str,
                                        workspace_name: str):
    """Update workflow configuration in the given workspace. If config does not exist, create one.
    """
    config_exists = fapi.get_workspace_config(workspace_namespace,
                                              workspace_name, config_namespace,
                                              config_name)
    if config_exists.status_code == 200:
        config_submission = fapi.update_workspace_config(
            workspace_namespace, workspace_name, config_namespace, config_name,
            method_body)
        if config_submission.status_code != 200:
            raise ValueError(
                f"Unable to update workflow config {config_namespace}/{config_name} in the workspace {workspace_namespace}/{workspace_name}. Response: {config_submission.status_code} - {config_submission.json()}!"
            )
    else:
        config_submission = fapi.create_workspace_config(
            workspace_namespace, workspace_name, method_body)
        if config_submission.status_code != 201:
            raise ValueError(
                f"Unable to create workflow config {config_namespace}/{config_name} in the workspace {workspace_namespace}/{workspace_name}. Response: {config_submission.status_code} - {config_submission.json()}!"
            )
示例#3
0
# print('ORIG_TEMPLATE is', orig_template)
# del orig_template['rootEntityType']
# z = fapi.create_workspace_config(namespace=SEL_NAMESPACE, workspace=SEL_WORKSPACE, body=orig_template)
# print('CREATED CONFIG WITH ORIG TEMPLATE:', z, z.json())
print('methodConfigVersion was', config_json['methodConfigVersion'])
config_json['methodConfigVersion'] = snapshot_id
print('methodConfigVersion now is', config_json['methodConfigVersion'])
config_json['namespace'] = SEL_NAMESPACE   # configuration namespace
config_json['name'] = TERRA_CONFIG_NAME
if 'rootEntityType' in config_json:
    del config_json['rootEntityType']
config_json['inputs'].update(inputs)

print('AFTER UPDATING METHODCONFIGVERSION config_json is', config_json)

z = fapi.create_workspace_config(namespace=SEL_NAMESPACE, workspace=SEL_WORKSPACE, body=config_json)
print('CREATED CONFIG WITH OUR INPUTS:', z, z.json())

z = fapi.validate_config(namespace=SEL_NAMESPACE, workspace=SEL_WORKSPACE, cnamespace=SEL_NAMESPACE, config=TERRA_CONFIG_NAME)
print('VALIDATE_CONFIG:', z, z.json())

z = fapi.get_repository_config_acl(namespace=SEL_NAMESPACE, config=TERRA_CONFIG_NAME, snapshot_id=1)
print('REPO CONFIG ACL:', z, z.json())

z = fapi.get_workspace_acl(namespace=SEL_NAMESPACE, workspace=SEL_WORKSPACE)
print('WORKSPACE ACL:', z, z.json())


# z = fapi.overwrite_workspace_config(namespace=SEL_NAMESPACE, workspace=SEL_WORKSPACE,
#                                     cnamespace=SEL_NAMESPACE, configname=TERRA_CONFIG_NAME, body=config_json)
# print('OVERWROTE', z, z.json())
示例#4
0
def do_fc_run(method: str, workspace: str, wdl_inputs: Union[str, dict],
              out_json: str, bucket_folder: str) -> str:
    """Run a FireCloud method.

    Args:
        method: method namespace/name/version. Version is optional
        workspace: workspace namespace/name
        wdl_inputs: WDL input JSON.
        upload: Whether to upload inputs and convert local file paths to gs:// URLs.
        bucket_folder: The folder under google bucket for uploading files.

    Returns:
        URL to check submission status
   """
    inputs = kco.get_wdl_inputs(wdl_inputs)
    method_namespace, method_name, method_version = kco.fs_split(method)
    if method_version is None:
        version = -1
        list_methods = fapi.list_repository_methods(method_name)
        if list_methods.status_code != 200:
            raise ValueError('Unable to list methods ' + ' - ' +
                             str(list_methods.json))
        methods = list_methods.json()
        for method in methods:
            if method['namespace'] == method_namespace:
                version = max(version, method['snapshotId'])
        if version == -1:
            raise ValueError(method_name + ' not found')
        method_version = version

    root_entity = None
    launch_entity = None
    workspace_namespace, workspace_name, workspace_version = kco.fs_split(
        workspace)
    kco.get_or_create_workspace(workspace_namespace, workspace_name)

    if out_json is not None:
        kco.do_fc_upload(inputs, workspace, False, bucket_folder)
        with open(out_json, 'w') as fout:
            json.dump(inputs, fout)
    config_namespace = method_namespace
    config_name = method_name

    method_body = {
        'name': config_name,
        'namespace': config_namespace,
        'methodRepoMethod': {
            'methodNamespace':
            method_namespace,
            'methodName':
            method_name,
            'methodVersion':
            method_version,
            'sourceRepo':
            'agora',
            'methodUri':
            'agora://{0}/{1}/{2}'.format(method_namespace, method_name,
                                         method_version)
        },
        'rootEntityType': root_entity,
        'prerequisites': {},
        'inputs': convert_inputs(inputs),
        'outputs': {},
        'methodConfigVersion': 1,
        'deleted': False
    }

    config_exists = fapi.get_workspace_config(workspace_namespace,
                                              workspace_name, config_namespace,
                                              config_name)

    if config_exists.status_code == 200:
        config_submission = fapi.update_workspace_config(
            workspace_namespace, workspace_name, config_namespace, config_name,
            method_body)
        if config_submission.status_code != 200:
            raise ValueError('Unable to update workspace config. Response: ' +
                             str(config_submission.status_code))

    else:
        config_submission = fapi.create_workspace_config(
            workspace_namespace, workspace_name, method_body)
        if config_submission.status_code != 201:
            raise ValueError('Unable to create workspace config - ' +
                             str(config_submission.json()))

    launch_submission = fapi.create_submission(workspace_namespace,
                                               workspace_name,
                                               config_namespace, config_name,
                                               launch_entity, root_entity, "")

    if launch_submission.status_code == 201:
        submission_id = launch_submission.json()['submissionId']
        url = 'https://portal.firecloud.org/#workspaces/{}/{}/monitor/{}'.format(
            workspace_namespace, workspace_name, submission_id)

        return url
    else:
        raise ValueError('Unable to launch submission - ' +
                         str(launch_submission.json()))
示例#5
0
def submit_job_to_terra(method: str, workspace: str, wdl_inputs: Union[str,
                                                                       dict],
                        out_json: str, bucket_folder: str, cache: bool) -> str:
    """Run a FireCloud method.

    Args:
        method: method namespace/name/version. Version is optional
        workspace: workspace namespace/name
        wdl_inputs: WDL input JSON.
        upload: Whether to upload inputs and convert local file paths to gs:// URLs.
        bucket_folder: The folder under google bucket for uploading files.
        cache: Use call cache if applicable.

    Returns:
        URL to check submission status
   """
    inputs = alto.get_wdl_inputs(wdl_inputs)  # parse input

    # check method exists and get latest snapshot if version is not provided
    method_namespace, method_name, method_version = alto.fs_split(method)
    method_def = alto.get_method(method_namespace, method_name, method_version)
    method_version = method_def[
        'snapshotId'] if method_version is None else method_version

    # check workspace exists
    workspace_namespace, workspace_name, workspace_version = alto.fs_split(
        workspace)
    alto.get_or_create_workspace(workspace_namespace, workspace_name)

    # upload input data to google bucket and generate modified JSON input file
    if out_json is not None:
        alto.upload_to_google_bucket(inputs, workspace, False, bucket_folder,
                                     out_json)

    # Do not use data model
    root_entity = None
    launch_entity = None

    # update method configuration
    config_namespace = method_namespace
    config_name = method_name

    method_body = {
        'name': config_name,
        'namespace': config_namespace,
        'methodRepoMethod': {
            'methodNamespace':
            method_namespace,
            'methodName':
            method_name,
            'methodVersion':
            method_version,
            'sourceRepo':
            'agora',
            'methodUri':
            'agora://{0}/{1}/{2}'.format(method_namespace, method_name,
                                         method_version)
        },
        'rootEntityType': root_entity,
        'prerequisites': {},
        'inputs': convert_inputs(inputs),
        'outputs': {},
        'methodConfigVersion': 1,
        'deleted': False
    }

    config_exists = fapi.get_workspace_config(workspace_namespace,
                                              workspace_name, config_namespace,
                                              config_name)

    if config_exists.status_code == 200:
        config_submission = fapi.update_workspace_config(
            workspace_namespace, workspace_name, config_namespace, config_name,
            method_body)
        if config_submission.status_code != 200:
            raise ValueError('Unable to update workspace config. Response: ' +
                             str(config_submission.status_code) + '-' +
                             str(config_submission.json()))
    else:
        config_submission = fapi.create_workspace_config(
            workspace_namespace, workspace_name, method_body)
        if config_submission.status_code != 201:
            raise ValueError('Unable to create workspace config - ' +
                             str(config_submission.json()))

    # submit job to terra
    launch_submission = alto.create_submission(workspace_namespace,
                                               workspace_name,
                                               config_namespace,
                                               config_name,
                                               launch_entity,
                                               root_entity,
                                               use_callcache=cache)

    if launch_submission.status_code == 201:
        submission_id = launch_submission.json()['submissionId']
        url = 'https://app.terra.bio/#workspaces/{0}/{1}/job_history/{2}'.format(
            workspace_namespace, workspace_name, submission_id)
        return url
    else:
        raise ValueError('Unable to launch submission - ' +
                         str(launch_submission.json()))