Exemplo n.º 1
0
def main(args):
    s = fapi.get_submission("broadtagteam", args.wkspc,
                            args.sid).json()['workflows']
    for wf in s:
        if wf['status'] == "Failed":
            getwf = fapi.get_workflow_metadata("broadtagteam", args.wkspc,
                                               args.sid,
                                               wf['workflowId']).json()
            print(getwf['failures'][0]['causedBy'][0]['message'])
Exemplo n.º 2
0
    def __init__(self,
                 namespace,
                 workspace,
                 submission_id,
                 api_url=fapi.PROD_API_ROOT):
        r = fapi.get_submission(namespace, workspace, submission_id, api_url)
        fapi._check_response_code(r, 200)

        self.namespace = namespace
        self.workspace = workspace
        self.submission_id = submission_id
        self.api_url = api_url
Exemplo n.º 3
0
workspace = os.environ['WORKSPACE_NAME']
bucket = os.environ['WORKSPACE_BUCKET']


# ## Using the submission id, link the notebook to your output
# 
# A submission id can be found in the job history of the workspace.

# In[9]:


# id of submission
submission= '8be4b1e9-8baf-480a-a989-edfe13fa658b'

#Given a submission id, get all input, output and metadata.
temp = fc.get_submission(project,workspace,submission).json()

#Information on all the workflows run on that single submission
workflows = temp['workflows']

#For each workflow, extract the sample_id and workflow id and store it in a dictionary
wf = {}
for i in range(len(workflows)):
    wf[workflows[i]['workflowEntity']['entityName']]= [workflows[i]['workflowId']]


# ### For each sample/workflow, exrtract the summary file output url containing specificity and sensitivity metrics.
# 

# In[10]:
Exemplo n.º 4
0
    return json.dumps(json_dict, indent=4, separators=(',', ': '), sort_keys=sort_keys)

def _write_json(fname, **json_dict):
    dump_file(fname=fname, value=_pretty_print_json(json_dict))



#print('ENTITIES ARE', fapi.list_entity_types(namespace=SEL_NAMESPACE, workspace=SEL_WORKSPACE).json())
z = fapi.list_submissions(namespace=SEL_NAMESPACE, workspace=SEL_WORKSPACE)
#print('SUBMISSIONS ARE', z, z.json())
for s in sorted(list(z.json()), key=operator.itemgetter('submissionDate'), reverse=True)[:1]:
    #if not s['submissionDate'].startswith('2020-06-29'): continue

    print('====================================================')
    print(s)
    y = fapi.get_submission(namespace=SEL_NAMESPACE, workspace=SEL_WORKSPACE, submission_id=s['submissionId']).json()
    if 'workflowId' not in y['workflows'][0]: continue
    zz = fapi.get_workflow_metadata(namespace=SEL_NAMESPACE, workspace=SEL_WORKSPACE, submission_id=s['submissionId'],
                                    workflow_id=y['workflows'][0]['workflowId']).json()
    _write_json('tmp/j2.json', **zz)
    dump_file(fname='tmp/w.wdl', value=zz['submittedFiles']['workflow'])
    succ = [v["succeeded"] for v in zz['outputs']["run_sims_cosi2.replicaInfos"]]
    print(f'Succeeded: {sum(succ)} of {len(succ)}')

    # zzz = fapi.get_workflow_metadata(namespace=SEL_NAMESPACE, workspace=SEL_WORKSPACE, submission_id=s['submissionId'],
    #                                 workflow_id='ad1e8271-fe66-4e05-9005-af570e9e5884').json()
    # _write_json('tmp/jz.json', **zzz)



Exemplo n.º 5
0
def main():
    if len(sys.argv) < 2:
        return

    global billing_project
    global template_workspace_name

    parser = argparse.ArgumentParser(prog="python " + sys.argv[0],
                                     add_help=False)
    subparser = parser.add_subparsers(dest="cmd")

    delete_workspace = subparser.add_parser('delete_workspace',
                                            help='delete workspace')
    delete_workspace.add_argument('--workspace-name',
                                  dest="workspace_name",
                                  help="name of the workspace")

    clone_workspace = subparser.add_parser(
        'clone_workspace', help='clone from existing workspace')
    clone_workspace.add_argument('--source-work-space',
                                 dest='src_work_space',
                                 help="name of source workspace")
    clone_workspace.add_argument('--destination-work-space',
                                 dest='dest_work_space',
                                 help="name of destination workspace")

    get_data_info = subparser.add_parser('get_participant_table',
                                         help='get participant.tsv')
    get_data_info.add_argument('--workspace-name',
                               dest="workspace_name",
                               help="name of the workspace")
    get_data_info.add_argument('--participant-table-name',
                               dest="participant_table_name",
                               help="name of sample table")
    get_data_info.add_argument('--output-name',
                               dest="output_table_name",
                               required=False,
                               default="participant.tsv",
                               help="name of output tsv")

    create_participant_lane = subparser.add_parser(
        'create_participant_lane',
        help='create participant_lane/lane_set_id tables')
    create_participant_lane.add_argument('--input-name',
                                         dest="input_participant_table_name",
                                         required=False,
                                         default="participant.tsv",
                                         help="input participant table  name")

    create_participant_lane.add_argument(
        '--output-prefix',
        dest="output_prefix",
        required=False,
        help="name of output prefix for the lanes")

    upload_participant_lane = subparser.add_parser(
        'upload_participant',
        help=
        'uploads the participant_lane_set, _lane_membership and _lane_entity files'
    )
    upload_participant_lane.add_argument('--workspace-name',
                                         dest="workspace_name",
                                         help="name of the workspace")
    upload_participant_lane.add_argument('--input-prefix',
                                         dest="input_prefix",
                                         help="name of the input prefix")

    upload_workflow = subparser.add_parser(
        'upload_workflow', help='uploads wdl to --workspace-name')
    upload_workflow.add_argument('--workspace-name',
                                 dest="workspace_name",
                                 help="name of the workspace")
    upload_workflow.add_argument('--method',
                                 dest="method",
                                 help="name of the input prefix")
    upload_workflow.add_argument('--wdl',
                                 dest="wdl",
                                 help="name of the input prefix")

    upload_config = subparser.add_parser('upload_config',
                                         help='upload config information')
    upload_config.add_argument('--workspace-name',
                               dest="workspace_name",
                               help="name of the workspace")
    upload_config.add_argument('--chemistry',
                               dest="chemistry",
                               choices=["V2", "V3"],
                               help="chemistry")
    upload_config.add_argument(
        '--counting-mode',
        dest="counting_mode",
        choices=["sc_rna", "sn_rna"],
        help="counting mode: whether to count intronic alignments")
    upload_config.add_argument('--species',
                               dest="species",
                               choices=["human", "mouse"],
                               help="species")

    submit_workflow = subparser.add_parser('submit_workflow',
                                           help='submit a workflow run')
    submit_workflow.add_argument('--workspace-name',
                                 dest="workspace_name",
                                 help="name of the workspace")
    submit_workflow.add_argument('--workflow-repo',
                                 dest="workflow_repo",
                                 help="workflow repo name")
    submit_workflow.add_argument('--workflow-name',
                                 dest="workflow_name",
                                 help="workflow name")
    submit_workflow.add_argument('--entity-id',
                                 dest="entity_id",
                                 help="entity id")

    get_status = subparser.add_parser('get_status',
                                      help='get status of a submission')
    get_status.add_argument('--workspace-name',
                            dest="workspace_name",
                            help="name of the workspace")
    get_status.add_argument('--submission-id',
                            dest="submission_id",
                            help="submission_id")

    # show help when no arguments supplied
    if len(sys.argv) == 1:
        parser.print_help()
        sys.exit(0)

    args = parser.parse_args()

    # new_workspace_name = "DCP2_Optimus_template_KMK_v1"
    if args.cmd == 'delete_workspace':
        print("Delete existing workspace ", args.workspace_name)
        delete_status = fapi.delete_workspace(billing_project,
                                              args.workspace_name)

    elif args.cmd == 'clone_workspace':
        print("Cloning a new workspace from template", args.src_work_space)
        status = create_newworkspace(billing_project, args.src_work_space,
                                     args.dest_work_space)

    elif args.cmd == 'get_participant_table':
        print("Get information from workspace", args.workspace_name)
        r = fapi.get_entities_tsv(billing_project, args.workspace_name,
                                  args.participant_table_name)
        with open(args.output_table_name, 'w') as fout:
            fout.write(r.content.decode())

    elif args.cmd == 'create_participant_lane':
        parse_terra.create_output_files(args.input_participant_table_name,
                                        args.output_prefix)

    elif args.cmd == 'upload_participant':
        upload_tables(args.input_prefix + ".tsv", billing_project,
                      args.workspace_name)
        upload_tables(args.input_prefix + "_membership.tsv", billing_project,
                      args.workspace_name)
        upload_tables(args.input_prefix + "_entity.tsv", billing_project,
                      args.workspace_name)
    elif args.cmd == 'upload_workflow':
        r = fapi.update_repository_method(args.workspace_name, args.method,
                                          "args.synopsis", args.wdl,
                                          "comment.txt", "args.comment")
        with open("response.txt", 'w') as fout:
            fout.write(r.content.decode())

    elif args.cmd == 'upload_config':

        work_space_config = fapi.get_workspace_config(billing_project,
                                                      args.workspace_name,
                                                      args.workspace_name,
                                                      "Optimus")
        work_space_json = work_space_config.json()
        work_space_json['inputs'] = json_templates.optimus_inputs
        work_space_json['outputs'] = json_templates.optimus_outputs

        if args.chemistry == "V2":
            work_space_json['inputs']['Optimus.chemistry'] = '\"tenX_v2\"'
            work_space_json['inputs'][
                'Optimus.whitelist'] = 'workspace.whitelist_v2'
        if args.chemistry == "V3":
            work_space_json['inputs']['Optimus.chemistry'] = '\"tenX_v3\"'
            work_space_json['inputs'][
                'Optimus.whitelist'] = 'workspace.whitelist_v3'

        if args.chemistry == "sn_rna":
            work_space_json['inputs']['Optimus.counting_mode'] = "\"sn_rna\""
        if args.chemistry == "sc_rna":
            work_space_json['inputs']['Optimus.counting_mode'] = "\"sc_rna\""

        if args.species == "human":
            work_space_json['inputs'][
                'Optimus.annotations_gtf'] = 'workspace.human_annotations_gtf'
            work_space_json['inputs'][
                'Optimus.ref_genome_fasta'] = 'workspace.human_ref_genome_fasta'
            work_space_json['inputs'][
                'Optimus.tar_star_reference'] = 'workspace.human_tar_star_reference'
        if args.species == "mouse":
            work_space_json['inputs'][
                'Optimus.annotations_gtf'] = 'workspace.mouse_annotations_gtf'
            work_space_json['inputs'][
                'Optimus.ref_genome_fasta'] = 'workspace.mouse_ref_genome_fasta'
            work_space_json['inputs'][
                'Optimus.tar_star_reference'] = 'workspace.mouse_tar_star_reference'

        updated_workflow = fapi.update_workspace_config(
            billing_project, args.workspace_name, args.workspace_name,
            "Optimus", work_space_json)

        if updated_workflow.status_code != 200:
            print("ERROR :" + updated_workflow.content)
            sys.exit(1)
        else:
            print("updated successfully")

    elif args.cmd == 'submit_workflow':
        # Launching the Updated Monitor Submission Workflow
        response = fapi.get_entities_with_type(billing_project,
                                               args.workspace_name)
        entities = response.json()

        for ent in entities:
            ent_name = ent['name']
            ent_type = ent['entityType']
            ent_attrs = ent['attributes']

        submisson_response = fapi.create_submission(
            billing_project,
            args.workspace_name,
            args.workflow_repo,
            args.workflow_name,
            entity=args.entity_id,
            etype="participant_lane_set",
            expression=None,
            use_callcache=True)

        if submisson_response.status_code != 201:
            print(submisson_response.content)
            sys.exit(1)
        else:
            print("Successfully Created Submisson")
            with open('response.txt', 'w') as fout:
                # json.dump(submisson_response.json(), fout)
                fout.write(submisson_response.json()['submissionId'] + '\n')
        # r = create_workspace_config("broadgdac", args.workspace_name, body):
        # print(r.content.decode())
    elif args.cmd == 'get_status':
        res = fapi.get_submission(billing_project, args.workspace_name,
                                  args.submission_id)
        print(res.content.decode())