def main(argsv): parser = argparse.ArgumentParser( description='Add one or more methods to Broad Methods Repository') parser.add_argument('-n', '--namespace', dest='namespace', action='store', required=True, help='Methods namespace') parser.add_argument('-p', '--public', dest='public', action='store_true', help='Make methods publicly readable') parser.add_argument(dest='wdl', help='Path to WDL file.', nargs='+') args = parser.parse_args(argsv) namespace = args.namespace public = args.public for wdl in args.wdl: method_name = os.path.basename(wdl) suffix = method_name.lower().rfind('.wdl') if suffix != -1: method_name = method_name[0:suffix] method_acl = [] try: existing_method = alto.get_method(namespace, method_name) method_acl = fapi.get_repository_method_acl( namespace=existing_method['namespace'], method=existing_method['name'], snapshot_id=existing_method['snapshotId']).json() except ValueError: pass if public: existing_public_user = False for i in range(len(method_acl)): if method_acl[i]['user'] == 'public': existing_public_user = True method_acl[i] = dict(user="******", role="READER") break if not existing_public_user: method_acl.append(dict(user="******", role="READER")) result = fapi.update_repository_method(namespace=namespace, method=method_name, wdl=wdl, synopsis='') if result.status_code == 201: result = result.json() if len(method_acl) > 0: fapi.update_repository_method_acl( namespace=result['namespace'], method=result['name'], snapshot_id=result['snapshotId'], acl_updates=method_acl) print( 'import "https://api.firecloud.org/ga4gh/v1/tools/{}:{}/versions/{}/plain-WDL/descriptor"' .format(result['namespace'], result['name'], result['snapshotId'])) else: print('Unable to add {}'.format(method_name)) print(result.json())
def main(argv): parser = argparse.ArgumentParser(description='Add one or more methods to Broad Methods Repository.') parser.add_argument('-n', '--namespace', dest='namespace', action='store', required=True, help='Methods namespace') parser.add_argument('-p', '--public', dest='public', action='store_true', help='Make methods publicly readable') parser.add_argument(dest='wdl', help='Path to WDL file.', nargs='+') args = parser.parse_args(argv) namespace = args.namespace public = args.public n_success = 0 for wdl in args.wdl: method_name = os.path.basename(wdl) suffix = method_name.lower().rfind('.wdl') if suffix != -1: method_name = method_name[0:suffix] method_acl = [] try: existing_method = get_firecloud_workflow(namespace, method_name) method_acl = fapi.get_repository_method_acl(namespace=existing_method['namespace'], method=existing_method['name'], snapshot_id=existing_method['snapshotId']).json() except ValueError: pass if public: existing_public_user = False for i in range(len(method_acl)): if method_acl[i]['user'] == 'public': existing_public_user = True method_acl[i] = dict(user='******', role='READER') break if not existing_public_user: method_acl.append(dict(user='******', role='READER')) result = fapi.update_repository_method(namespace=namespace, method=method_name, wdl=wdl, synopsis='') if result.status_code == 201: result = result.json() if len(method_acl) > 0: fapi.update_repository_method_acl(namespace=result['namespace'], method=result['name'], snapshot_id=result['snapshotId'], acl_updates=method_acl) print(f'Workflow {method_name} is imported! See https://api.firecloud.org/ga4gh/v1/tools/{result["namespace"]}:{result["name"]}/versions/{result["snapshotId"]}/plain-WDL/descriptor') n_success += 1 else: print(f'Unable to add workflow {method_name} - {result.json()}') print(f'Successfully added {n_success} workflows.')
execute(f'git remote add origin-staging "https://{GH_TOKEN}@github.com/{STAGING_TRAVIS_REPO_SLUG}.git"') execute(f'git push --set-upstream origin-staging {STAGING_BRANCH}') #dir(fapi) #help(fapi) #z = fapi.list_workspace_configs(namespace=SEL_NAMESPACE, workspace=SEL_WORKSPACE, allRepos=True).json() #print('LIST_WORKSPACE_CONFIGS result is', z) #z = fapi.get_workspace_config(workspace=SEL_WORKSPACE, namespace=SEL_NAMESPACE, # config=TERRA_CONFIG_NAME, cnamespace=SEL_NAMESPACE) #print('CONFIG_IS', z, z.json()) #z = fapi.list_repository_methods(namespace=SEL_NAMESPACE, name=TERRA_METHOD_NAME).json() #print('METHODS LIST BEF', z) z = fapi.update_repository_method(namespace=SEL_NAMESPACE, method=TERRA_METHOD_NAME, synopsis='run sims and compute component stats', wdl=os.path.abspath(f'./Dockstore.wdl')) #print('UPDATE IS', z, z.json()) new_method = z.json() print('NEW_METHOD IS', new_method) #z = fapi.list_repository_methods(namespace=SEL_NAMESPACE, name=TERRA_METHOD_NAME).json() #print('METHODS LIST AFT', z) snapshot_id = new_method['snapshotId'] z = fapi.get_repository_method_acl(namespace=SEL_NAMESPACE, method=TERRA_METHOD_NAME, snapshot_id=snapshot_id) print('ACL:', z, z.json()) z = fapi.update_repository_method_acl(namespace=SEL_NAMESPACE, method=TERRA_METHOD_NAME, snapshot_id=snapshot_id, acl_updates=[{'role': 'OWNER', 'user': '******'}, {'role': 'OWNER', 'user': '******'}])
def main(): if len(sys.argv) < 2: return global billing_project global template_workspace_name parser = argparse.ArgumentParser(prog="python " + sys.argv[0], add_help=False) subparser = parser.add_subparsers(dest="cmd") delete_workspace = subparser.add_parser('delete_workspace', help='delete workspace') delete_workspace.add_argument('--workspace-name', dest="workspace_name", help="name of the workspace") clone_workspace = subparser.add_parser( 'clone_workspace', help='clone from existing workspace') clone_workspace.add_argument('--source-work-space', dest='src_work_space', help="name of source workspace") clone_workspace.add_argument('--destination-work-space', dest='dest_work_space', help="name of destination workspace") get_data_info = subparser.add_parser('get_participant_table', help='get participant.tsv') get_data_info.add_argument('--workspace-name', dest="workspace_name", help="name of the workspace") get_data_info.add_argument('--participant-table-name', dest="participant_table_name", help="name of sample table") get_data_info.add_argument('--output-name', dest="output_table_name", required=False, default="participant.tsv", help="name of output tsv") create_participant_lane = subparser.add_parser( 'create_participant_lane', help='create participant_lane/lane_set_id tables') create_participant_lane.add_argument('--input-name', dest="input_participant_table_name", required=False, default="participant.tsv", help="input participant table name") create_participant_lane.add_argument( '--output-prefix', dest="output_prefix", required=False, help="name of output prefix for the lanes") upload_participant_lane = subparser.add_parser( 'upload_participant', help= 'uploads the participant_lane_set, _lane_membership and _lane_entity files' ) upload_participant_lane.add_argument('--workspace-name', dest="workspace_name", help="name of the workspace") upload_participant_lane.add_argument('--input-prefix', dest="input_prefix", help="name of the input prefix") upload_workflow = subparser.add_parser( 'upload_workflow', help='uploads wdl to --workspace-name') upload_workflow.add_argument('--workspace-name', dest="workspace_name", help="name of the workspace") upload_workflow.add_argument('--method', dest="method", help="name of the input prefix") upload_workflow.add_argument('--wdl', dest="wdl", help="name of the input prefix") upload_config = subparser.add_parser('upload_config', help='upload config information') upload_config.add_argument('--workspace-name', dest="workspace_name", help="name of the workspace") upload_config.add_argument('--chemistry', dest="chemistry", choices=["V2", "V3"], help="chemistry") upload_config.add_argument( '--counting-mode', dest="counting_mode", choices=["sc_rna", "sn_rna"], help="counting mode: whether to count intronic alignments") upload_config.add_argument('--species', dest="species", choices=["human", "mouse"], help="species") submit_workflow = subparser.add_parser('submit_workflow', help='submit a workflow run') submit_workflow.add_argument('--workspace-name', dest="workspace_name", help="name of the workspace") submit_workflow.add_argument('--workflow-repo', dest="workflow_repo", help="workflow repo name") submit_workflow.add_argument('--workflow-name', dest="workflow_name", help="workflow name") submit_workflow.add_argument('--entity-id', dest="entity_id", help="entity id") get_status = subparser.add_parser('get_status', help='get status of a submission') get_status.add_argument('--workspace-name', dest="workspace_name", help="name of the workspace") get_status.add_argument('--submission-id', dest="submission_id", help="submission_id") # show help when no arguments supplied if len(sys.argv) == 1: parser.print_help() sys.exit(0) args = parser.parse_args() # new_workspace_name = "DCP2_Optimus_template_KMK_v1" if args.cmd == 'delete_workspace': print("Delete existing workspace ", args.workspace_name) delete_status = fapi.delete_workspace(billing_project, args.workspace_name) elif args.cmd == 'clone_workspace': print("Cloning a new workspace from template", args.src_work_space) status = create_newworkspace(billing_project, args.src_work_space, args.dest_work_space) elif args.cmd == 'get_participant_table': print("Get information from workspace", args.workspace_name) r = fapi.get_entities_tsv(billing_project, args.workspace_name, args.participant_table_name) with open(args.output_table_name, 'w') as fout: fout.write(r.content.decode()) elif args.cmd == 'create_participant_lane': parse_terra.create_output_files(args.input_participant_table_name, args.output_prefix) elif args.cmd == 'upload_participant': upload_tables(args.input_prefix + ".tsv", billing_project, args.workspace_name) upload_tables(args.input_prefix + "_membership.tsv", billing_project, args.workspace_name) upload_tables(args.input_prefix + "_entity.tsv", billing_project, args.workspace_name) elif args.cmd == 'upload_workflow': r = fapi.update_repository_method(args.workspace_name, args.method, "args.synopsis", args.wdl, "comment.txt", "args.comment") with open("response.txt", 'w') as fout: fout.write(r.content.decode()) elif args.cmd == 'upload_config': work_space_config = fapi.get_workspace_config(billing_project, args.workspace_name, args.workspace_name, "Optimus") work_space_json = work_space_config.json() work_space_json['inputs'] = json_templates.optimus_inputs work_space_json['outputs'] = json_templates.optimus_outputs if args.chemistry == "V2": work_space_json['inputs']['Optimus.chemistry'] = '\"tenX_v2\"' work_space_json['inputs'][ 'Optimus.whitelist'] = 'workspace.whitelist_v2' if args.chemistry == "V3": work_space_json['inputs']['Optimus.chemistry'] = '\"tenX_v3\"' work_space_json['inputs'][ 'Optimus.whitelist'] = 'workspace.whitelist_v3' if args.chemistry == "sn_rna": work_space_json['inputs']['Optimus.counting_mode'] = "\"sn_rna\"" if args.chemistry == "sc_rna": work_space_json['inputs']['Optimus.counting_mode'] = "\"sc_rna\"" if args.species == "human": work_space_json['inputs'][ 'Optimus.annotations_gtf'] = 'workspace.human_annotations_gtf' work_space_json['inputs'][ 'Optimus.ref_genome_fasta'] = 'workspace.human_ref_genome_fasta' work_space_json['inputs'][ 'Optimus.tar_star_reference'] = 'workspace.human_tar_star_reference' if args.species == "mouse": work_space_json['inputs'][ 'Optimus.annotations_gtf'] = 'workspace.mouse_annotations_gtf' work_space_json['inputs'][ 'Optimus.ref_genome_fasta'] = 'workspace.mouse_ref_genome_fasta' work_space_json['inputs'][ 'Optimus.tar_star_reference'] = 'workspace.mouse_tar_star_reference' updated_workflow = fapi.update_workspace_config( billing_project, args.workspace_name, args.workspace_name, "Optimus", work_space_json) if updated_workflow.status_code != 200: print("ERROR :" + updated_workflow.content) sys.exit(1) else: print("updated successfully") elif args.cmd == 'submit_workflow': # Launching the Updated Monitor Submission Workflow response = fapi.get_entities_with_type(billing_project, args.workspace_name) entities = response.json() for ent in entities: ent_name = ent['name'] ent_type = ent['entityType'] ent_attrs = ent['attributes'] submisson_response = fapi.create_submission( billing_project, args.workspace_name, args.workflow_repo, args.workflow_name, entity=args.entity_id, etype="participant_lane_set", expression=None, use_callcache=True) if submisson_response.status_code != 201: print(submisson_response.content) sys.exit(1) else: print("Successfully Created Submisson") with open('response.txt', 'w') as fout: # json.dump(submisson_response.json(), fout) fout.write(submisson_response.json()['submissionId'] + '\n') # r = create_workspace_config("broadgdac", args.workspace_name, body): # print(r.content.decode()) elif args.cmd == 'get_status': res = fapi.get_submission(billing_project, args.workspace_name, args.submission_id) print(res.content.decode())
allRepos=True).json() print(z) z = fapi.get_workspace_config(workspace=SEL_WORKSPACE, namespace=SEL_NAMESPACE, config='dockstore-tool-cosi2', cnamespace=SEL_NAMESPACE) print('CONFIG_IS', z, z.json()) z = fapi.list_repository_methods(namespace=SEL_NAMESPACE, name='test-cosi2-method-01').json() print('METHODS LIST BEF', z) z = fapi.update_repository_method( namespace=SEL_NAMESPACE, method='test-cosi2-method-01', synopsis='run cosi2', wdl='/data/ilya-work/proj/dockstore-tool-cosi2/Dockstore.wdl') print('UPDATE IS', z, z.json()) new_method = z.json() z = fapi.list_repository_methods(namespace=SEL_NAMESPACE, name='test-cosi2-method-01').json() print('METHODS LIST AFT', z) z = fapi.get_config_template(namespace=SEL_NAMESPACE, method='test-cosi2-method-01', version=new_method['snapshotId']) print('CONFIG TEMPLATE AFT IS', z, z.json()) config_template = z.json()