def invoke(self, trans, workflow_id, payload, **kwd): """ POST /api/workflows/{encoded_workflow_id}/invocations Schedule the workflow specified by `workflow_id` to run. """ # Get workflow + accessibility check. stored_workflow = self.__get_stored_accessible_workflow(trans, workflow_id) workflow = stored_workflow.latest_workflow run_configs = build_workflow_run_configs(trans, workflow, payload) is_batch = payload.get('batch') if not is_batch and len(run_configs) != 1: raise exceptions.RequestParameterInvalidException("Must specify 'batch' to use batch parameters.") invocations = [] for run_config in run_configs: workflow_scheduler_id = payload.get('scheduler', None) # TODO: workflow scheduler hints work_request_params = dict(scheduler=workflow_scheduler_id) workflow_invocation = queue_invoke( trans=trans, workflow=workflow, workflow_run_config=run_config, request_params=work_request_params ) invocation = self.encode_all_ids(trans, workflow_invocation.to_dict(), recursive=True) invocations.append(invocation) if is_batch: return invocations else: return invocations[0]
def invoke(self, trans, workflow_id, payload, **kwd): """ POST /api/workflows/{encoded_workflow_id}/invocations Schedule the workflow specified by `workflow_id` to run. """ # Get workflow + accessibility check. stored_workflow = self.__get_stored_accessible_workflow( trans, workflow_id) workflow = stored_workflow.latest_workflow run_configs = build_workflow_run_configs(trans, workflow, payload) is_batch = payload.get('batch') if not is_batch and len(run_configs) != 1: raise exceptions.RequestParameterInvalidException( "Must specify 'batch' to use batch parameters.") invocations = [] for run_config in run_configs: workflow_scheduler_id = payload.get('scheduler', None) # TODO: workflow scheduler hints work_request_params = dict(scheduler=workflow_scheduler_id) workflow_invocation = queue_invoke( trans=trans, workflow=workflow, workflow_run_config=run_config, request_params=work_request_params) invocation = self.encode_all_ids(trans, workflow_invocation.to_dict(), recursive=True) invocations.append(invocation) if is_batch: return invocations else: return invocations[0]
def invoke(self, trans, workflow_id, payload, **kwd): """ POST /api/workflows/{encoded_workflow_id}/invocations Schedule the workflow specified by `workflow_id` to run. """ # /usage is awkward in this context but is consistent with the rest of # this module. Would prefer to redo it all to use /invocation(s). # Get workflow + accessibility check. stored_workflow = self.__get_stored_accessible_workflow( trans, workflow_id) workflow = stored_workflow.latest_workflow run_configs = build_workflow_run_configs(trans, workflow, payload) index = 0 thingy = run_configs[0].param_map for key, value in thingy.iteritems(): for key1, val in value.iteritems(): if 'JPCNn681vcGV4KuvuT16' == key1: val = '"' + val + '"' workflow.steps[index].tool_inputs[unicode( 'JPCNn681vcGV4KuvuT16', "utf-8")] = val index = index + 1 is_batch = payload.get('batch') if not is_batch and len(run_configs) != 1: raise exceptions.RequestParameterInvalidException( "Must specify 'batch' to use batch parameters.") invocations = [] for run_config in run_configs: workflow_scheduler_id = payload.get('scheduler', None) # TODO: workflow scheduler hints work_request_params = dict(scheduler=workflow_scheduler_id) workflow_invocation = queue_invoke( trans=trans, workflow=workflow, workflow_run_config=run_config, request_params=work_request_params) invocation = self.encode_all_ids(trans, workflow_invocation.to_dict(), recursive=True) invocations.append(invocation) if is_batch: return invocations else: return invocations[0]
def create(self, trans, payload, **kwd): """ POST /api/workflows Run or create workflows from the api. If installed_repository_file or from_history_id is specified a new workflow will be created for this user. Otherwise, workflow_id must be specified and this API method will cause a workflow to execute. :param installed_repository_file The path of a workflow to import. Either workflow_id, installed_repository_file or from_history_id must be specified :type installed_repository_file str :param workflow_id: An existing workflow id. Either workflow_id, installed_repository_file or from_history_id must be specified :type workflow_id: str :param parameters: If workflow_id is set - see _update_step_parameters() :type parameters: dict :param ds_map: If workflow_id is set - a dictionary mapping each input step id to a dictionary with 2 keys: 'src' (which can be 'ldda', 'ld' or 'hda') and 'id' (which should be the id of a LibraryDatasetDatasetAssociation, LibraryDataset or HistoryDatasetAssociation respectively) :type ds_map: dict :param no_add_to_history: If workflow_id is set - if present in the payload with any value, the input datasets will not be added to the selected history :type no_add_to_history: str :param history: If workflow_id is set - optional history where to run the workflow, either the name of a new history or "hist_id=HIST_ID" where HIST_ID is the id of an existing history. If not specified, the workflow will be run a new unnamed history :type history: str :param replacement_params: If workflow_id is set - an optional dictionary used when renaming datasets :type replacement_params: dict :param from_history_id: Id of history to extract a workflow from. Either workflow_id, installed_repository_file or from_history_id must be specified :type from_history_id: str :param job_ids: If from_history_id is set - optional list of jobs to include when extracting a workflow from history :type job_ids: str :param dataset_ids: If from_history_id is set - optional list of HDA `hid`s corresponding to workflow inputs when extracting a workflow from history :type dataset_ids: str :param dataset_collection_ids: If from_history_id is set - optional list of HDCA `hid`s corresponding to workflow inputs when extracting a workflow from history :type dataset_collection_ids: str :param workflow_name: If from_history_id is set - name of the workflow to create when extracting a workflow from history :type workflow_name: str :param allow_tool_state_corrections: If set to True, any Tool parameter changes will not prevent running workflow, defaults to False :type allow_tool_state_corrections: bool :param use_cached_job: If set to True galaxy will attempt to find previously executed steps for all workflow steps with the exact same parameter combinations and will copy the outputs of the previously executed step. """ ways_to_create = set([ 'archive_source', 'workflow_id', 'installed_repository_file', 'from_history_id', 'from_path', 'shared_workflow_id', 'workflow', ]) if len(ways_to_create.intersection(payload)) == 0: message = "One parameter among - %s - must be specified" % ", ".join( ways_to_create) raise exceptions.RequestParameterMissingException(message) if len(ways_to_create.intersection(payload)) > 1: message = "Only one parameter among - %s - must be specified" % ", ".join( ways_to_create) raise exceptions.RequestParameterInvalidException(message) if 'installed_repository_file' in payload: if not trans.user_is_admin: raise exceptions.AdminRequiredException() installed_repository_file = payload.get( 'installed_repository_file', '') if not os.path.exists(installed_repository_file): raise exceptions.MessageException( "Repository file '%s' not found.") elif os.path.getsize( os.path.abspath(installed_repository_file)) > 0: workflow_data = None with open(installed_repository_file, 'rb') as f: workflow_data = f.read() return self.__api_import_from_archive(trans, workflow_data) else: raise exceptions.MessageException( "You attempted to open an empty file.") if 'archive_source' in payload: archive_source = payload['archive_source'] archive_file = payload.get('archive_file') archive_data = None if archive_source: if archive_source.startswith("file://"): if not trans.user_is_admin: raise exceptions.AdminRequiredException() workflow_src = { "src": "from_path", "path": archive_source[len("file://"):] } payload["workflow"] = workflow_src return self.__api_import_new_workflow( trans, payload, **kwd) else: try: archive_data = requests.get(archive_source).text except Exception: raise exceptions.MessageException( "Failed to open URL '%s'." % escape(archive_source)) elif hasattr(archive_file, 'file'): uploaded_file = archive_file.file uploaded_file_name = uploaded_file.name if os.path.getsize(os.path.abspath(uploaded_file_name)) > 0: archive_data = uploaded_file.read() else: raise exceptions.MessageException( "You attempted to upload an empty file.") else: raise exceptions.MessageException( "Please provide a URL or file.") return self.__api_import_from_archive(trans, archive_data, "uploaded file") if 'from_history_id' in payload: from_history_id = payload.get('from_history_id') from_history_id = self.decode_id(from_history_id) history = self.history_manager.get_accessible( from_history_id, trans.user, current_history=trans.history) job_ids = [self.decode_id(_) for _ in payload.get('job_ids', [])] dataset_ids = payload.get('dataset_ids', []) dataset_collection_ids = payload.get('dataset_collection_ids', []) workflow_name = payload['workflow_name'] stored_workflow = extract_workflow( trans=trans, user=trans.get_user(), history=history, job_ids=job_ids, dataset_ids=dataset_ids, dataset_collection_ids=dataset_collection_ids, workflow_name=workflow_name, ) item = stored_workflow.to_dict( value_mapper={'id': trans.security.encode_id}) item['url'] = url_for('workflow', id=item['id']) return item if 'from_path' in payload: from_path = payload.get('from_path') payload["workflow"] = {"src": "from_path", "path": from_path} return self.__api_import_new_workflow(trans, payload, **kwd) if 'shared_workflow_id' in payload: workflow_id = payload['shared_workflow_id'] return self.__api_import_shared_workflow(trans, workflow_id, payload) if 'workflow' in payload: return self.__api_import_new_workflow(trans, payload, **kwd) workflow_id = payload.get('workflow_id', None) if not workflow_id: message = "Invalid workflow_id specified." raise exceptions.RequestParameterInvalidException(message) # Get workflow + accessibility check. stored_workflow = self.__get_stored_accessible_workflow( trans, workflow_id) workflow = stored_workflow.latest_workflow run_configs = build_workflow_run_configs(trans, workflow, payload) assert len(run_configs) == 1 run_config = run_configs[0] history = run_config.target_history # invoke may throw MessageExceptions on tool erors, failure # to match up inputs, etc... outputs, invocation = invoke( trans=trans, workflow=workflow, workflow_run_config=run_config, populate_state=True, ) trans.sa_session.flush() # Build legacy output - should probably include more information from # outputs. rval = {} rval['history'] = trans.security.encode_id(history.id) rval['outputs'] = [] if outputs: # Newer outputs don't necessarily fill outputs (?) for step in workflow.steps: if step.type == 'tool' or step.type is None: for v in outputs[step.id].values(): rval['outputs'].append(trans.security.encode_id(v.id)) # Newer version of this API just returns the invocation as a dict, to # facilitate migration - produce the newer style response and blend in # the older information. invocation_response = self.__encode_invocation(invocation, **kwd) invocation_response.update(rval) return invocation_response
def create(self, trans, payload, **kwd): """ POST /api/workflows Run or create workflows from the api. If installed_repository_file or from_history_id is specified a new workflow will be created for this user. Otherwise, workflow_id must be specified and this API method will cause a workflow to execute. :param installed_repository_file The path of a workflow to import. Either workflow_id, installed_repository_file or from_history_id must be specified :type installed_repository_file str :param workflow_id: An existing workflow id. Either workflow_id, installed_repository_file or from_history_id must be specified :type workflow_id: str :param parameters: If workflow_id is set - see _update_step_parameters() :type parameters: dict :param ds_map: If workflow_id is set - a dictionary mapping each input step id to a dictionary with 2 keys: 'src' (which can be 'ldda', 'ld' or 'hda') and 'id' (which should be the id of a LibraryDatasetDatasetAssociation, LibraryDataset or HistoryDatasetAssociation respectively) :type ds_map: dict :param no_add_to_history: If workflow_id is set - if present in the payload with any value, the input datasets will not be added to the selected history :type no_add_to_history: str :param history: If workflow_id is set - optional history where to run the workflow, either the name of a new history or "hist_id=HIST_ID" where HIST_ID is the id of an existing history. If not specified, the workflow will be run a new unnamed history :type history: str :param replacement_params: If workflow_id is set - an optional dictionary used when renaming datasets :type replacement_params: dict :param from_history_id: Id of history to extract a workflow from. Either workflow_id, installed_repository_file or from_history_id must be specified :type from_history_id: str :param job_ids: If from_history_id is set - optional list of jobs to include when extracting a workflow from history :type job_ids: str :param dataset_ids: If from_history_id is set - optional list of HDA `hid`s corresponding to workflow inputs when extracting a workflow from history :type dataset_ids: str :param dataset_collection_ids: If from_history_id is set - optional list of HDCA `hid`s corresponding to workflow inputs when extracting a workflow from history :type dataset_collection_ids: str :param workflow_name: If from_history_id is set - name of the workflow to create when extracting a workflow from history :type workflow_name: str :param allow_tool_state_corrections: If set to True, any Tool parameter changes will not prevent running workflow, defaults to False :type allow_tool_state_corrections: bool :param use_cached_job: If set to True galaxy will attempt to find previously executed steps for all workflow steps with the exact same parameter combinations and will copy the outputs of the previously executed step. """ ways_to_create = set([ 'archive_source', 'workflow_id', 'installed_repository_file', 'from_history_id', 'from_path', 'shared_workflow_id', 'workflow', ]) if len(ways_to_create.intersection(payload)) == 0: message = "One parameter among - %s - must be specified" % ", ".join(ways_to_create) raise exceptions.RequestParameterMissingException(message) if len(ways_to_create.intersection(payload)) > 1: message = "Only one parameter among - %s - must be specified" % ", ".join(ways_to_create) raise exceptions.RequestParameterInvalidException(message) if 'installed_repository_file' in payload: if not trans.user_is_admin: raise exceptions.AdminRequiredException() installed_repository_file = payload.get('installed_repository_file', '') if not os.path.exists(installed_repository_file): raise exceptions.MessageException("Repository file '%s' not found.") elif os.path.getsize(os.path.abspath(installed_repository_file)) > 0: workflow_data = None with open(installed_repository_file, 'rb') as f: workflow_data = f.read() return self.__api_import_from_archive(trans, workflow_data) else: raise exceptions.MessageException("You attempted to open an empty file.") if 'archive_source' in payload: archive_source = payload['archive_source'] archive_file = payload.get('archive_file') archive_data = None if archive_source: if archive_source.startswith("file://"): if not trans.user_is_admin: raise exceptions.AdminRequiredException() workflow_src = {"src": "from_path", "path": archive_source[len("file://"):]} payload["workflow"] = workflow_src return self.__api_import_new_workflow(trans, payload, **kwd) else: try: archive_data = requests.get(archive_source).text except Exception: raise exceptions.MessageException("Failed to open URL '%s'." % escape(archive_source)) elif hasattr(archive_file, 'file'): uploaded_file = archive_file.file uploaded_file_name = uploaded_file.name if os.path.getsize(os.path.abspath(uploaded_file_name)) > 0: archive_data = uploaded_file.read() else: raise exceptions.MessageException("You attempted to upload an empty file.") else: raise exceptions.MessageException("Please provide a URL or file.") return self.__api_import_from_archive(trans, archive_data, "uploaded file") if 'from_history_id' in payload: from_history_id = payload.get('from_history_id') from_history_id = self.decode_id(from_history_id) history = self.history_manager.get_accessible(from_history_id, trans.user, current_history=trans.history) job_ids = [self.decode_id(_) for _ in payload.get('job_ids', [])] dataset_ids = payload.get('dataset_ids', []) dataset_collection_ids = payload.get('dataset_collection_ids', []) workflow_name = payload['workflow_name'] stored_workflow = extract_workflow( trans=trans, user=trans.get_user(), history=history, job_ids=job_ids, dataset_ids=dataset_ids, dataset_collection_ids=dataset_collection_ids, workflow_name=workflow_name, ) item = stored_workflow.to_dict(value_mapper={'id': trans.security.encode_id}) item['url'] = url_for('workflow', id=item['id']) return item if 'from_path' in payload: from_path = payload.get('from_path') payload["workflow"] = {"src": "from_path", "path": from_path} return self.__api_import_new_workflow(trans, payload, **kwd) if 'shared_workflow_id' in payload: workflow_id = payload['shared_workflow_id'] return self.__api_import_shared_workflow(trans, workflow_id, payload) if 'workflow' in payload: return self.__api_import_new_workflow(trans, payload, **kwd) workflow_id = payload.get('workflow_id', None) if not workflow_id: message = "Invalid workflow_id specified." raise exceptions.RequestParameterInvalidException(message) # Get workflow + accessibility check. stored_workflow = self.__get_stored_accessible_workflow(trans, workflow_id) workflow = stored_workflow.latest_workflow run_configs = build_workflow_run_configs(trans, workflow, payload) assert len(run_configs) == 1 run_config = run_configs[0] history = run_config.target_history # invoke may throw MessageExceptions on tool erors, failure # to match up inputs, etc... outputs, invocation = invoke( trans=trans, workflow=workflow, workflow_run_config=run_config, populate_state=True, ) trans.sa_session.flush() # Build legacy output - should probably include more information from # outputs. rval = {} rval['history'] = trans.security.encode_id(history.id) rval['outputs'] = [] if outputs: # Newer outputs don't necessarily fill outputs (?) for step in workflow.steps: if step.type == 'tool' or step.type is None: for v in outputs[step.id].values(): rval['outputs'].append(trans.security.encode_id(v.id)) # Newer version of this API just returns the invocation as a dict, to # facilitate migration - produce the newer style response and blend in # the older information. invocation_response = self.__encode_invocation(invocation, **kwd) invocation_response.update(rval) return invocation_response
def create(self, trans, payload, **kwd): """ POST /api/workflows Run or create workflows from the api. If installed_repository_file or from_history_id is specified a new workflow will be created for this user. Otherwise, workflow_id must be specified and this API method will cause a workflow to execute. :param installed_repository_file The path of a workflow to import. Either workflow_id, installed_repository_file or from_history_id must be specified :type installed_repository_file str :param workflow_id: An existing workflow id. Either workflow_id, installed_repository_file or from_history_id must be specified :type workflow_id: str :param parameters: If workflow_id is set - see _update_step_parameters() :type parameters: dict :param ds_map: If workflow_id is set - a dictionary mapping each input step id to a dictionary with 2 keys: 'src' (which can be 'ldda', 'ld' or 'hda') and 'id' (which should be the id of a LibraryDatasetDatasetAssociation, LibraryDataset or HistoryDatasetAssociation respectively) :type ds_map: dict :param no_add_to_history: If workflow_id is set - if present in the payload with any value, the input datasets will not be added to the selected history :type no_add_to_history: str :param history: If workflow_id is set - optional history where to run the workflow, either the name of a new history or "hist_id=HIST_ID" where HIST_ID is the id of an existing history. If not specified, the workflow will be run a new unnamed history :type history: str :param replacement_params: If workflow_id is set - an optional dictionary used when renaming datasets :type replacement_params: dict :param from_history_id: Id of history to extract a workflow from. Either workflow_id, installed_repository_file or from_history_id must be specified :type from_history_id: str :param job_ids: If from_history_id is set - optional list of jobs to include when extracting a workflow from history :type job_ids: str :param dataset_ids: If from_history_id is set - optional list of HDA `hid`s corresponding to workflow inputs when extracting a workflow from history :type dataset_ids: str :param dataset_collection_ids: If from_history_id is set - optional list of HDCA `hid`s corresponding to workflow inputs when extracting a workflow from history :type dataset_collection_ids: str :param workflow_name: If from_history_id is set - name of the workflow to create when extracting a workflow from history :type workflow_name: str :param allow_tool_state_corrections: If set to True, any Tool parameter changes will not prevent running workflow, defaults to False :type allow_tool_state_corrections: bool """ ways_to_create = set([ 'workflow_id', 'installed_repository_file', 'from_history_id', 'shared_workflow_id', 'workflow', ]) if len(ways_to_create.intersection(payload)) == 0: message = "One parameter among - %s - must be specified" % ", ".join( ways_to_create) raise exceptions.RequestParameterMissingException(message) if len(ways_to_create.intersection(payload)) > 1: message = "Only one parameter among - %s - must be specified" % ", ".join( ways_to_create) raise exceptions.RequestParameterInvalidException(message) if 'installed_repository_file' in payload: workflow_controller = trans.webapp.controllers['workflow'] result = workflow_controller.import_workflow(trans=trans, cntrller='api', **payload) return result if 'from_history_id' in payload: from_history_id = payload.get('from_history_id') from_history_id = self.decode_id(from_history_id) history = self.history_manager.get_accessible( from_history_id, trans.user, current_history=trans.history) job_ids = [self.decode_id(_) for _ in payload.get('job_ids', [])] dataset_ids = payload.get('dataset_ids', []) dataset_collection_ids = payload.get('dataset_collection_ids', []) workflow_name = payload['workflow_name'] stored_workflow = extract_workflow( trans=trans, user=trans.get_user(), history=history, job_ids=job_ids, dataset_ids=dataset_ids, dataset_collection_ids=dataset_collection_ids, workflow_name=workflow_name, ) item = stored_workflow.to_dict( value_mapper={'id': trans.security.encode_id}) item['url'] = url_for('workflow', id=item['id']) return item if 'shared_workflow_id' in payload: workflow_id = payload['shared_workflow_id'] return self.__api_import_shared_workflow(trans, workflow_id, payload) if 'workflow' in payload: return self.__api_import_new_workflow(trans, payload, **kwd) workflow_id = payload.get('workflow_id', None) if not workflow_id: message = "Invalid workflow_id specified." raise exceptions.RequestParameterInvalidException(message) # Get workflow + accessibility check. stored_workflow = self.__get_stored_accessible_workflow( trans, workflow_id) workflow = stored_workflow.latest_workflow run_configs = build_workflow_run_configs(trans, workflow, payload) assert len(run_configs) == 1 run_config = run_configs[0] history = run_config.target_history # invoke may throw MessageExceptions on tool erors, failure # to match up inputs, etc... outputs, invocation = invoke( trans=trans, workflow=workflow, workflow_run_config=run_config, populate_state=True, ) trans.sa_session.flush() # Build legacy output - should probably include more information from # outputs. rval = {} rval['history'] = trans.security.encode_id(history.id) rval['outputs'] = [] for step in workflow.steps: if step.type == 'tool' or step.type is None: for v in outputs[step.id].values(): rval['outputs'].append(trans.security.encode_id(v.id)) # Newer version of this API just returns the invocation as a dict, to # facilitate migration - produce the newer style response and blend in # the older information. invocation_response = self.__encode_invocation(trans, invocation, step_details=kwd.get( 'step_details', False)) invocation_response.update(rval) return invocation_response
def create(self, trans, payload, **kwd): """ POST /api/workflows Run or create workflows from the api. If installed_repository_file or from_history_id is specified a new workflow will be created for this user. Otherwise, workflow_id must be specified and this API method will cause a workflow to execute. :param installed_repository_file The path of a workflow to import. Either workflow_id, installed_repository_file or from_history_id must be specified :type installed_repository_file str :param workflow_id: An existing workflow id. Either workflow_id, installed_repository_file or from_history_id must be specified :type workflow_id: str :param parameters: If workflow_id is set - see _update_step_parameters() :type parameters: dict :param ds_map: If workflow_id is set - a dictionary mapping each input step id to a dictionary with 2 keys: 'src' (which can be 'ldda', 'ld' or 'hda') and 'id' (which should be the id of a LibraryDatasetDatasetAssociation, LibraryDataset or HistoryDatasetAssociation respectively) :type ds_map: dict :param no_add_to_history: If workflow_id is set - if present in the payload with any value, the input datasets will not be added to the selected history :type no_add_to_history: str :param history: If workflow_id is set - optional history where to run the workflow, either the name of a new history or "hist_id=HIST_ID" where HIST_ID is the id of an existing history. If not specified, the workflow will be run a new unnamed history :type history: str :param replacement_params: If workflow_id is set - an optional dictionary used when renaming datasets :type replacement_params: dict :param from_history_id: Id of history to extract a workflow from. Either workflow_id, installed_repository_file or from_history_id must be specified :type from_history_id: str :param job_ids: If from_history_id is set - optional list of jobs to include when extracting a workflow from history :type job_ids: str :param dataset_ids: If from_history_id is set - optional list of HDA `hid`s corresponding to workflow inputs when extracting a workflow from history :type dataset_ids: str :param dataset_collection_ids: If from_history_id is set - optional list of HDCA `hid`s corresponding to workflow inputs when extracting a workflow from history :type dataset_collection_ids: str :param workflow_name: If from_history_id is set - name of the workflow to create when extracting a workflow from history :type workflow_name: str :param allow_tool_state_corrections: If set to True, any Tool parameter changes will not prevent running workflow, defaults to False :type allow_tool_state_corrections: bool """ ways_to_create = set( [ 'workflow_id', 'installed_repository_file', 'from_history_id', 'shared_workflow_id', 'workflow', ] ) if len( ways_to_create.intersection( payload ) ) == 0: message = "One parameter among - %s - must be specified" % ", ".join( ways_to_create ) raise exceptions.RequestParameterMissingException( message ) if len( ways_to_create.intersection( payload ) ) > 1: message = "Only one parameter among - %s - must be specified" % ", ".join( ways_to_create ) raise exceptions.RequestParameterInvalidException( message ) if 'installed_repository_file' in payload: workflow_controller = trans.webapp.controllers[ 'workflow' ] result = workflow_controller.import_workflow( trans=trans, cntrller='api', **payload) return result if 'from_history_id' in payload: from_history_id = payload.get( 'from_history_id' ) from_history_id = self.decode_id( from_history_id ) history = self.history_manager.get_accessible( from_history_id, trans.user, current_history=trans.history ) job_ids = [ self.decode_id(_) for _ in payload.get( 'job_ids', [] ) ] dataset_ids = payload.get( 'dataset_ids', [] ) dataset_collection_ids = payload.get( 'dataset_collection_ids', [] ) workflow_name = payload[ 'workflow_name' ] stored_workflow = extract_workflow( trans=trans, user=trans.get_user(), history=history, job_ids=job_ids, dataset_ids=dataset_ids, dataset_collection_ids=dataset_collection_ids, workflow_name=workflow_name, ) item = stored_workflow.to_dict( value_mapper={ 'id': trans.security.encode_id } ) item[ 'url' ] = url_for( 'workflow', id=item[ 'id' ] ) return item if 'shared_workflow_id' in payload: workflow_id = payload[ 'shared_workflow_id' ] return self.__api_import_shared_workflow( trans, workflow_id, payload ) if 'workflow' in payload: return self.__api_import_new_workflow( trans, payload, **kwd ) workflow_id = payload.get( 'workflow_id', None ) if not workflow_id: message = "Invalid workflow_id specified." raise exceptions.RequestParameterInvalidException( message ) # Get workflow + accessibility check. stored_workflow = self.__get_stored_accessible_workflow( trans, workflow_id ) workflow = stored_workflow.latest_workflow run_configs = build_workflow_run_configs( trans, workflow, payload ) assert len(run_configs) == 1 run_config = run_configs[0] history = run_config.target_history # invoke may throw MessageExceptions on tool erors, failure # to match up inputs, etc... outputs, invocation = invoke( trans=trans, workflow=workflow, workflow_run_config=run_config, populate_state=True, ) trans.sa_session.flush() # Build legacy output - should probably include more information from # outputs. rval = {} rval['history'] = trans.security.encode_id( history.id ) rval['outputs'] = [] for step in workflow.steps: if step.type == 'tool' or step.type is None: for v in outputs[ step.id ].values(): rval[ 'outputs' ].append( trans.security.encode_id( v.id ) ) # Newer version of this API just returns the invocation as a dict, to # facilitate migration - produce the newer style response and blend in # the older information. invocation_response = self.__encode_invocation( trans, invocation, step_details=kwd.get('step_details', False) ) invocation_response.update( rval ) return invocation_response