def upload_workflow_json(billing_project, workspace_name, namespace, workflow_name, json_templates): work_space_config = fapi.get_workspace_config(billing_project, workspace_name, namespace, workflow_name) work_space_json = work_space_config.json() work_space_json['inputs'] = json_templates.optimus_inputs work_space_json['outputs'] = json_templates.optimus_outputs updated_workflow = fapi.update_workspace_config(billing_project, workspace_name, namespace, workflow_name, work_space_json) if updated_workflow.status_code != 200: print("ERROR :" + updated_workflow.content) sys.exit(1) else: print("updated successfully")
def update_workflow_config_in_workspace(config_namespace: str, config_name: str, method_body: dict, workspace_namespace: str, workspace_name: str): """Update workflow configuration in the given workspace. If config does not exist, create one. """ config_exists = fapi.get_workspace_config(workspace_namespace, workspace_name, config_namespace, config_name) if config_exists.status_code == 200: config_submission = fapi.update_workspace_config( workspace_namespace, workspace_name, config_namespace, config_name, method_body) if config_submission.status_code != 200: raise ValueError( f"Unable to update workflow config {config_namespace}/{config_name} in the workspace {workspace_namespace}/{workspace_name}. Response: {config_submission.status_code} - {config_submission.json()}!" ) else: config_submission = fapi.create_workspace_config( workspace_namespace, workspace_name, method_body) if config_submission.status_code != 201: raise ValueError( f"Unable to create workflow config {config_namespace}/{config_name} in the workspace {workspace_namespace}/{workspace_name}. Response: {config_submission.status_code} - {config_submission.json()}!" )
def main(): if len(sys.argv) < 2: return global billing_project global template_workspace_name parser = argparse.ArgumentParser(prog="python " + sys.argv[0], add_help=False) subparser = parser.add_subparsers(dest="cmd") delete_workspace = subparser.add_parser('delete_workspace', help='delete workspace') delete_workspace.add_argument('--workspace-name', dest="workspace_name", help="name of the workspace") clone_workspace = subparser.add_parser( 'clone_workspace', help='clone from existing workspace') clone_workspace.add_argument('--source-work-space', dest='src_work_space', help="name of source workspace") clone_workspace.add_argument('--destination-work-space', dest='dest_work_space', help="name of destination workspace") get_data_info = subparser.add_parser('get_participant_table', help='get participant.tsv') get_data_info.add_argument('--workspace-name', dest="workspace_name", help="name of the workspace") get_data_info.add_argument('--participant-table-name', dest="participant_table_name", help="name of sample table") get_data_info.add_argument('--output-name', dest="output_table_name", required=False, default="participant.tsv", help="name of output tsv") create_participant_lane = subparser.add_parser( 'create_participant_lane', help='create participant_lane/lane_set_id tables') create_participant_lane.add_argument('--input-name', dest="input_participant_table_name", required=False, default="participant.tsv", help="input participant table name") create_participant_lane.add_argument( '--output-prefix', dest="output_prefix", required=False, help="name of output prefix for the lanes") upload_participant_lane = subparser.add_parser( 'upload_participant', help= 'uploads the participant_lane_set, _lane_membership and _lane_entity files' ) upload_participant_lane.add_argument('--workspace-name', dest="workspace_name", help="name of the workspace") upload_participant_lane.add_argument('--input-prefix', dest="input_prefix", help="name of the input prefix") upload_workflow = subparser.add_parser( 'upload_workflow', help='uploads wdl to --workspace-name') upload_workflow.add_argument('--workspace-name', dest="workspace_name", help="name of the workspace") upload_workflow.add_argument('--method', dest="method", help="name of the input prefix") upload_workflow.add_argument('--wdl', dest="wdl", help="name of the input prefix") upload_config = subparser.add_parser('upload_config', help='upload config information') upload_config.add_argument('--workspace-name', dest="workspace_name", help="name of the workspace") upload_config.add_argument('--chemistry', dest="chemistry", choices=["V2", "V3"], help="chemistry") upload_config.add_argument( '--counting-mode', dest="counting_mode", choices=["sc_rna", "sn_rna"], help="counting mode: whether to count intronic alignments") upload_config.add_argument('--species', dest="species", choices=["human", "mouse"], help="species") submit_workflow = subparser.add_parser('submit_workflow', help='submit a workflow run') submit_workflow.add_argument('--workspace-name', dest="workspace_name", help="name of the workspace") submit_workflow.add_argument('--workflow-repo', dest="workflow_repo", help="workflow repo name") submit_workflow.add_argument('--workflow-name', dest="workflow_name", help="workflow name") submit_workflow.add_argument('--entity-id', dest="entity_id", help="entity id") get_status = subparser.add_parser('get_status', help='get status of a submission') get_status.add_argument('--workspace-name', dest="workspace_name", help="name of the workspace") get_status.add_argument('--submission-id', dest="submission_id", help="submission_id") # show help when no arguments supplied if len(sys.argv) == 1: parser.print_help() sys.exit(0) args = parser.parse_args() # new_workspace_name = "DCP2_Optimus_template_KMK_v1" if args.cmd == 'delete_workspace': print("Delete existing workspace ", args.workspace_name) delete_status = fapi.delete_workspace(billing_project, args.workspace_name) elif args.cmd == 'clone_workspace': print("Cloning a new workspace from template", args.src_work_space) status = create_newworkspace(billing_project, args.src_work_space, args.dest_work_space) elif args.cmd == 'get_participant_table': print("Get information from workspace", args.workspace_name) r = fapi.get_entities_tsv(billing_project, args.workspace_name, args.participant_table_name) with open(args.output_table_name, 'w') as fout: fout.write(r.content.decode()) elif args.cmd == 'create_participant_lane': parse_terra.create_output_files(args.input_participant_table_name, args.output_prefix) elif args.cmd == 'upload_participant': upload_tables(args.input_prefix + ".tsv", billing_project, args.workspace_name) upload_tables(args.input_prefix + "_membership.tsv", billing_project, args.workspace_name) upload_tables(args.input_prefix + "_entity.tsv", billing_project, args.workspace_name) elif args.cmd == 'upload_workflow': r = fapi.update_repository_method(args.workspace_name, args.method, "args.synopsis", args.wdl, "comment.txt", "args.comment") with open("response.txt", 'w') as fout: fout.write(r.content.decode()) elif args.cmd == 'upload_config': work_space_config = fapi.get_workspace_config(billing_project, args.workspace_name, args.workspace_name, "Optimus") work_space_json = work_space_config.json() work_space_json['inputs'] = json_templates.optimus_inputs work_space_json['outputs'] = json_templates.optimus_outputs if args.chemistry == "V2": work_space_json['inputs']['Optimus.chemistry'] = '\"tenX_v2\"' work_space_json['inputs'][ 'Optimus.whitelist'] = 'workspace.whitelist_v2' if args.chemistry == "V3": work_space_json['inputs']['Optimus.chemistry'] = '\"tenX_v3\"' work_space_json['inputs'][ 'Optimus.whitelist'] = 'workspace.whitelist_v3' if args.chemistry == "sn_rna": work_space_json['inputs']['Optimus.counting_mode'] = "\"sn_rna\"" if args.chemistry == "sc_rna": work_space_json['inputs']['Optimus.counting_mode'] = "\"sc_rna\"" if args.species == "human": work_space_json['inputs'][ 'Optimus.annotations_gtf'] = 'workspace.human_annotations_gtf' work_space_json['inputs'][ 'Optimus.ref_genome_fasta'] = 'workspace.human_ref_genome_fasta' work_space_json['inputs'][ 'Optimus.tar_star_reference'] = 'workspace.human_tar_star_reference' if args.species == "mouse": work_space_json['inputs'][ 'Optimus.annotations_gtf'] = 'workspace.mouse_annotations_gtf' work_space_json['inputs'][ 'Optimus.ref_genome_fasta'] = 'workspace.mouse_ref_genome_fasta' work_space_json['inputs'][ 'Optimus.tar_star_reference'] = 'workspace.mouse_tar_star_reference' updated_workflow = fapi.update_workspace_config( billing_project, args.workspace_name, args.workspace_name, "Optimus", work_space_json) if updated_workflow.status_code != 200: print("ERROR :" + updated_workflow.content) sys.exit(1) else: print("updated successfully") elif args.cmd == 'submit_workflow': # Launching the Updated Monitor Submission Workflow response = fapi.get_entities_with_type(billing_project, args.workspace_name) entities = response.json() for ent in entities: ent_name = ent['name'] ent_type = ent['entityType'] ent_attrs = ent['attributes'] submisson_response = fapi.create_submission( billing_project, args.workspace_name, args.workflow_repo, args.workflow_name, entity=args.entity_id, etype="participant_lane_set", expression=None, use_callcache=True) if submisson_response.status_code != 201: print(submisson_response.content) sys.exit(1) else: print("Successfully Created Submisson") with open('response.txt', 'w') as fout: # json.dump(submisson_response.json(), fout) fout.write(submisson_response.json()['submissionId'] + '\n') # r = create_workspace_config("broadgdac", args.workspace_name, body): # print(r.content.decode()) elif args.cmd == 'get_status': res = fapi.get_submission(billing_project, args.workspace_name, args.submission_id) print(res.content.decode())
def do_fc_run(method: str, workspace: str, wdl_inputs: Union[str, dict], out_json: str, bucket_folder: str) -> str: """Run a FireCloud method. Args: method: method namespace/name/version. Version is optional workspace: workspace namespace/name wdl_inputs: WDL input JSON. upload: Whether to upload inputs and convert local file paths to gs:// URLs. bucket_folder: The folder under google bucket for uploading files. Returns: URL to check submission status """ inputs = kco.get_wdl_inputs(wdl_inputs) method_namespace, method_name, method_version = kco.fs_split(method) if method_version is None: version = -1 list_methods = fapi.list_repository_methods(method_name) if list_methods.status_code != 200: raise ValueError('Unable to list methods ' + ' - ' + str(list_methods.json)) methods = list_methods.json() for method in methods: if method['namespace'] == method_namespace: version = max(version, method['snapshotId']) if version == -1: raise ValueError(method_name + ' not found') method_version = version root_entity = None launch_entity = None workspace_namespace, workspace_name, workspace_version = kco.fs_split( workspace) kco.get_or_create_workspace(workspace_namespace, workspace_name) if out_json is not None: kco.do_fc_upload(inputs, workspace, False, bucket_folder) with open(out_json, 'w') as fout: json.dump(inputs, fout) config_namespace = method_namespace config_name = method_name method_body = { 'name': config_name, 'namespace': config_namespace, 'methodRepoMethod': { 'methodNamespace': method_namespace, 'methodName': method_name, 'methodVersion': method_version, 'sourceRepo': 'agora', 'methodUri': 'agora://{0}/{1}/{2}'.format(method_namespace, method_name, method_version) }, 'rootEntityType': root_entity, 'prerequisites': {}, 'inputs': convert_inputs(inputs), 'outputs': {}, 'methodConfigVersion': 1, 'deleted': False } config_exists = fapi.get_workspace_config(workspace_namespace, workspace_name, config_namespace, config_name) if config_exists.status_code == 200: config_submission = fapi.update_workspace_config( workspace_namespace, workspace_name, config_namespace, config_name, method_body) if config_submission.status_code != 200: raise ValueError('Unable to update workspace config. Response: ' + str(config_submission.status_code)) else: config_submission = fapi.create_workspace_config( workspace_namespace, workspace_name, method_body) if config_submission.status_code != 201: raise ValueError('Unable to create workspace config - ' + str(config_submission.json())) launch_submission = fapi.create_submission(workspace_namespace, workspace_name, config_namespace, config_name, launch_entity, root_entity, "") if launch_submission.status_code == 201: submission_id = launch_submission.json()['submissionId'] url = 'https://portal.firecloud.org/#workspaces/{}/{}/monitor/{}'.format( workspace_namespace, workspace_name, submission_id) return url else: raise ValueError('Unable to launch submission - ' + str(launch_submission.json()))
def create_method_configs(billing_project, ws_name, attr_list, auth_domain): config_namespace = "broadinstitute_cga" file_downloader_name = "gdc_file_downloader__default_cfg" bam_downloader_name = "gdc_bam_downloader__default_cfg" file_downloader_cfg_snapshot_id = 3 bam_downloader_cfg_snapshot_id = 2 for attr in attr_list: attr_name = attr[0] attr_name_base = attr_name[:-17] attr_entity = attr[1] if "aligned_reads" in attr_name: new_config_name = "gdc_bam_downloader__" + attr_name_base + "cfg" print("Uploading and configuring method config {0}, based on {1}". format(new_config_name, bam_downloader_name)) api.copy_config_from_repo(billing_project, ws_name, config_namespace, bam_downloader_name, bam_downloader_cfg_snapshot_id, config_namespace, new_config_name) current_config = api.get_workspace_config(billing_project, ws_name, config_namespace, new_config_name) current_config = current_config.json() inputs = current_config['inputs'] outputs = current_config['outputs'] inputs[ 'gdc_bam_downloader_workflow.uuid_and_filename'] = "this.{0}".format( attr_name) outputs[ 'gdc_bam_downloader_workflow.gdc_bam_downloader.bam_file'] = "this.{0}bam_url".format( attr_name_base) outputs[ 'gdc_bam_downloader_workflow.gdc_bam_downloader.bai_file'] = "this.{0}bai_url".format( attr_name_base) current_config['inputs'] = inputs current_config['outputs'] = outputs current_config['rootEntityType'] = attr_entity api.update_workspace_config(billing_project, ws_name, config_namespace, new_config_name, current_config) else: new_config_name = "gdc_file_downloader__" + attr_name_base + "cfg" print("Uploading and configuring method config {0}, based on {1}". format(new_config_name, file_downloader_name)) api.copy_config_from_repo(billing_project, ws_name, config_namespace, file_downloader_name, file_downloader_cfg_snapshot_id, config_namespace, new_config_name) current_config = api.get_workspace_config(billing_project, ws_name, config_namespace, new_config_name) current_config = current_config.json() inputs = current_config['inputs'] outputs = current_config['outputs'] if not auth_domain: inputs.pop( 'gdc_file_downloader_workflow.gdc_file_downloader.gdc_user_token', None) inputs[ 'gdc_file_downloader_workflow.uuid_and_filename'] = "this.{0}".format( attr_name) outputs[ 'gdc_file_downloader_workflow.gdc_file_downloader.file'] = "this.{0}url".format( attr_name_base) current_config['inputs'] = inputs current_config['outputs'] = outputs current_config['rootEntityType'] = attr_entity api.update_workspace_config(billing_project, ws_name, config_namespace, new_config_name, current_config)
def submit_job_to_terra(method: str, workspace: str, wdl_inputs: Union[str, dict], out_json: str, bucket_folder: str, cache: bool) -> str: """Run a FireCloud method. Args: method: method namespace/name/version. Version is optional workspace: workspace namespace/name wdl_inputs: WDL input JSON. upload: Whether to upload inputs and convert local file paths to gs:// URLs. bucket_folder: The folder under google bucket for uploading files. cache: Use call cache if applicable. Returns: URL to check submission status """ inputs = alto.get_wdl_inputs(wdl_inputs) # parse input # check method exists and get latest snapshot if version is not provided method_namespace, method_name, method_version = alto.fs_split(method) method_def = alto.get_method(method_namespace, method_name, method_version) method_version = method_def[ 'snapshotId'] if method_version is None else method_version # check workspace exists workspace_namespace, workspace_name, workspace_version = alto.fs_split( workspace) alto.get_or_create_workspace(workspace_namespace, workspace_name) # upload input data to google bucket and generate modified JSON input file if out_json is not None: alto.upload_to_google_bucket(inputs, workspace, False, bucket_folder, out_json) # Do not use data model root_entity = None launch_entity = None # update method configuration config_namespace = method_namespace config_name = method_name method_body = { 'name': config_name, 'namespace': config_namespace, 'methodRepoMethod': { 'methodNamespace': method_namespace, 'methodName': method_name, 'methodVersion': method_version, 'sourceRepo': 'agora', 'methodUri': 'agora://{0}/{1}/{2}'.format(method_namespace, method_name, method_version) }, 'rootEntityType': root_entity, 'prerequisites': {}, 'inputs': convert_inputs(inputs), 'outputs': {}, 'methodConfigVersion': 1, 'deleted': False } config_exists = fapi.get_workspace_config(workspace_namespace, workspace_name, config_namespace, config_name) if config_exists.status_code == 200: config_submission = fapi.update_workspace_config( workspace_namespace, workspace_name, config_namespace, config_name, method_body) if config_submission.status_code != 200: raise ValueError('Unable to update workspace config. Response: ' + str(config_submission.status_code) + '-' + str(config_submission.json())) else: config_submission = fapi.create_workspace_config( workspace_namespace, workspace_name, method_body) if config_submission.status_code != 201: raise ValueError('Unable to create workspace config - ' + str(config_submission.json())) # submit job to terra launch_submission = alto.create_submission(workspace_namespace, workspace_name, config_namespace, config_name, launch_entity, root_entity, use_callcache=cache) if launch_submission.status_code == 201: submission_id = launch_submission.json()['submissionId'] url = 'https://app.terra.bio/#workspaces/{0}/{1}/job_history/{2}'.format( workspace_namespace, workspace_name, submission_id) return url else: raise ValueError('Unable to launch submission - ' + str(launch_submission.json()))
# Getting the json for the Monitor Submission Workflow workflow = fapi.get_workspace_config(workspace_project, workspace_name, workflow_repo, workflow_name) workflow_json = workflow.json() if workflow.status_code != 200: print(workflow.content) raise ferrors.FireCloudServerError(workflow.status_code, workflow.content) # Updating the inputs in the JSON workflow_json['inputs'] = { "monitor_submission.submission_id": f'"{submission_id}"', "monitor_submission.terra_project": f'"{workspace_project}"', "monitor_submission.terra_workspace": f'"{workspace_name}"' } updated_workflow = fapi.update_workspace_config(workspace_project, workspace_name, workflow_repo, workflow_name, workflow_json) if updated_workflow.status_code != 200: print(updated_workflow.content) raise ferrors.FireCloudServerError(updated_workflow.status_code, updated_workflow.content) # Launching the Updated Monitor Submission Workflow create_submisson_response = fapi.create_submission(workspace_project, workspace_name, workflow_repo, workflow_name, entity=None, etype=None, expression=None, use_callcache=True)