Ejemplo n.º 1
0
    def invoke(self, trans, workflow_id, payload, **kwd):
        """
        POST /api/workflows/{encoded_workflow_id}/invocations

        Schedule the workflow specified by `workflow_id` to run.
        """
        # /usage is awkward in this context but is consistent with the rest of
        # this module. Would prefer to redo it all to use /invocation(s).
        # Get workflow + accessibility check.
        stored_workflow = self.__get_stored_accessible_workflow(
            trans, workflow_id)
        workflow = stored_workflow.latest_workflow

        run_config = build_workflow_run_config(trans, workflow, payload)
        workflow_scheduler_id = payload.get("scheduler", None)
        # TODO: workflow scheduler hints
        work_request_params = dict(scheduler=workflow_scheduler_id)

        workflow_invocation = queue_invoke(trans=trans,
                                           workflow=workflow,
                                           workflow_run_config=run_config,
                                           request_params=work_request_params)
        return self.encode_all_ids(trans,
                                   workflow_invocation.to_dict(),
                                   recursive=True)
Ejemplo n.º 2
0
    def invoke(self, trans, workflow_id, payload, **kwd):
        """
        POST /api/workflows/{encoded_workflow_id}/invocations

        Schedule the workflow specified by `workflow_id` to run.
        """
        # /usage is awkward in this context but is consistent with the rest of
        # this module. Would prefer to redo it all to use /invocation(s).
        # Get workflow + accessibility check.
        stored_workflow = self.__get_stored_accessible_workflow(trans, workflow_id)
        workflow = stored_workflow.latest_workflow

        run_config = build_workflow_run_config(trans, workflow, payload)
        workflow_scheduler_id = payload.get("scheduler", None)
        # TODO: workflow scheduler hints
        work_request_params = dict(scheduler=workflow_scheduler_id)

        workflow_invocation = queue_invoke(
            trans=trans, workflow=workflow, workflow_run_config=run_config, request_params=work_request_params
        )
        return self.encode_all_ids(trans, workflow_invocation.to_dict(), recursive=True)
Ejemplo n.º 3
0
    def create(self, trans, payload, **kwd):
        """
        POST /api/workflows

        Run or create workflows from the api.

        If installed_repository_file or from_history_id is specified a new
        workflow will be created for this user. Otherwise, workflow_id must be
        specified and this API method will cause a workflow to execute.

        :param  installed_repository_file    The path of a workflow to import. Either workflow_id, installed_repository_file or from_history_id must be specified
        :type   installed_repository_file    str

        :param  workflow_id:                 An existing workflow id. Either workflow_id, installed_repository_file or from_history_id must be specified
        :type   workflow_id:                 str

        :param  parameters:                  If workflow_id is set - see _update_step_parameters()
        :type   parameters:                  dict

        :param  ds_map:                      If workflow_id is set - a dictionary mapping each input step id to a dictionary with 2 keys: 'src' (which can be 'ldda', 'ld' or 'hda') and 'id' (which should be the id of a LibraryDatasetDatasetAssociation, LibraryDataset or HistoryDatasetAssociation respectively)
        :type   ds_map:                      dict

        :param  no_add_to_history:           If workflow_id is set - if present in the payload with any value, the input datasets will not be added to the selected history
        :type   no_add_to_history:           str

        :param  history:                     If workflow_id is set - optional history where to run the workflow, either the name of a new history or "hist_id=HIST_ID" where HIST_ID is the id of an existing history. If not specified, the workflow will be run a new unnamed history
        :type   history:                     str

        :param  replacement_params:          If workflow_id is set - an optional dictionary used when renaming datasets
        :type   replacement_params:          dict

        :param  from_history_id:             Id of history to extract a workflow from. Either workflow_id, installed_repository_file or from_history_id must be specified
        :type   from_history_id:             str

        :param  job_ids:                     If from_history_id is set - optional list of jobs to include when extracting a workflow from history
        :type   job_ids:                     str

        :param  dataset_ids:                 If from_history_id is set - optional list of HDA `hid`s corresponding to workflow inputs when extracting a workflow from history
        :type   dataset_ids:                 str

        :param  dataset_collection_ids:      If from_history_id is set - optional list of HDCA `hid`s corresponding to workflow inputs when extracting a workflow from history
        :type   dataset_collection_ids:      str

        :param  workflow_name:               If from_history_id is set - name of the workflow to create when extracting a workflow from history
        :type   workflow_name:               str

        :param  allow_tool_state_corrections:  If set to True, any Tool parameter changes will not prevent running workflow, defaults to False
        :type   allow_tool_state_corrections:  bool
        """
        ways_to_create = set([
            'workflow_id',
            'installed_repository_file',
            'from_history_id',
            'shared_workflow_id',
            'workflow',
        ]).intersection(payload)
        if len(ways_to_create) == 0:
            message = "One parameter among - %s - must be specified" % ", ".join(
                ways_to_create)
            raise exceptions.RequestParameterMissingException(message)

        if len(ways_to_create) > 1:
            message = "Only one parameter among - %s - must be specified" % ", ".join(
                ways_to_create)
            raise exceptions.RequestParameterInvalidException(message)

        if 'installed_repository_file' in payload:
            workflow_controller = trans.webapp.controllers['workflow']
            result = workflow_controller.import_workflow(trans=trans,
                                                         cntrller='api',
                                                         **payload)
            return result

        if 'from_history_id' in payload:
            from_history_id = payload.get('from_history_id')
            from_history_id = self.decode_id(from_history_id)
            history = self.history_manager.get_accessible(
                from_history_id, trans.user, current_history=trans.history)

            job_ids = map(self.decode_id, payload.get('job_ids', []))
            dataset_ids = payload.get('dataset_ids', [])
            dataset_collection_ids = payload.get('dataset_collection_ids', [])
            workflow_name = payload['workflow_name']
            stored_workflow = extract_workflow(
                trans=trans,
                user=trans.get_user(),
                history=history,
                job_ids=job_ids,
                dataset_ids=dataset_ids,
                dataset_collection_ids=dataset_collection_ids,
                workflow_name=workflow_name,
            )
            item = stored_workflow.to_dict(
                value_mapper={'id': trans.security.encode_id})
            item['url'] = url_for('workflow', id=item['id'])
            return item

        if 'shared_workflow_id' in payload:
            workflow_id = payload['shared_workflow_id']
            return self.__api_import_shared_workflow(trans, workflow_id,
                                                     payload)

        if 'workflow' in payload:
            return self.__api_import_new_workflow(trans, payload, **kwd)

        workflow_id = payload.get('workflow_id', None)
        if not workflow_id:
            message = "Invalid workflow_id specified."
            raise exceptions.RequestParameterInvalidException(message)

        # Get workflow + accessibility check.
        stored_workflow = self.__get_stored_accessible_workflow(
            trans, workflow_id)
        workflow = stored_workflow.latest_workflow

        run_config = build_workflow_run_config(trans, workflow, payload)
        history = run_config.target_history

        # invoke may throw MessageExceptions on tool erors, failure
        # to match up inputs, etc...
        outputs, invocation = invoke(
            trans=trans,
            workflow=workflow,
            workflow_run_config=run_config,
            populate_state=True,
        )
        trans.sa_session.flush()

        # Build legacy output - should probably include more information from
        # outputs.
        rval = {}
        rval['history'] = trans.security.encode_id(history.id)
        rval['outputs'] = []
        for step in workflow.steps:
            if step.type == 'tool' or step.type is None:
                for v in outputs[step.id].itervalues():
                    rval['outputs'].append(trans.security.encode_id(v.id))

        # Newer version of this API just returns the invocation as a dict, to
        # facilitate migration - produce the newer style response and blend in
        # the older information.
        invocation_response = self.__encode_invocation(trans,
                                                       invocation,
                                                       step_details=kwd.get(
                                                           'step_details',
                                                           False))
        invocation_response.update(rval)
        return invocation_response
Ejemplo n.º 4
0
    def create(self, trans, payload, **kwd):
        """
        POST /api/workflows

        Run or create workflows from the api.

        If installed_repository_file or from_history_id is specified a new
        workflow will be created for this user. Otherwise, workflow_id must be
        specified and this API method will cause a workflow to execute.

        :param  installed_repository_file    The path of a workflow to import. Either workflow_id, installed_repository_file or from_history_id must be specified
        :type   installed_repository_file    str

        :param  workflow_id:                 An existing workflow id. Either workflow_id, installed_repository_file or from_history_id must be specified
        :type   workflow_id:                 str

        :param  parameters:                  If workflow_id is set - see _update_step_parameters()
        :type   parameters:                  dict

        :param  ds_map:                      If workflow_id is set - a dictionary mapping each input step id to a dictionary with 2 keys: 'src' (which can be 'ldda', 'ld' or 'hda') and 'id' (which should be the id of a LibraryDatasetDatasetAssociation, LibraryDataset or HistoryDatasetAssociation respectively)
        :type   ds_map:                      dict

        :param  no_add_to_history:           If workflow_id is set - if present in the payload with any value, the input datasets will not be added to the selected history
        :type   no_add_to_history:           str

        :param  history:                     If workflow_id is set - optional history where to run the workflow, either the name of a new history or "hist_id=HIST_ID" where HIST_ID is the id of an existing history. If not specified, the workflow will be run a new unnamed history
        :type   history:                     str

        :param  replacement_params:          If workflow_id is set - an optional dictionary used when renaming datasets
        :type   replacement_params:          dict

        :param  from_history_id:             Id of history to extract a workflow from. Either workflow_id, installed_repository_file or from_history_id must be specified
        :type   from_history_id:             str

        :param  job_ids:                     If from_history_id is set - optional list of jobs to include when extracting a workflow from history
        :type   job_ids:                     str

        :param  dataset_ids:                 If from_history_id is set - optional list of HDA `hid`s corresponding to workflow inputs when extracting a workflow from history
        :type   dataset_ids:                 str

        :param  dataset_collection_ids:      If from_history_id is set - optional list of HDCA `hid`s corresponding to workflow inputs when extracting a workflow from history
        :type   dataset_collection_ids:      str

        :param  workflow_name:               If from_history_id is set - name of the workflow to create when extracting a workflow from history
        :type   workflow_name:               str
        """
        ways_to_create = set(
            ["workflow_id", "installed_repository_file", "from_history_id", "shared_workflow_id", "workflow"]
        ).intersection(payload)
        if len(ways_to_create) == 0:
            message = "One parameter among - %s - must be specified" % ", ".join(ways_to_create)
            raise exceptions.RequestParameterMissingException(message)

        if len(ways_to_create) > 1:
            message = "Only one parameter among - %s - must be specified" % ", ".join(ways_to_create)
            raise exceptions.RequestParameterInvalidException(message)

        if "installed_repository_file" in payload:
            workflow_controller = trans.webapp.controllers["workflow"]
            result = workflow_controller.import_workflow(trans=trans, cntrller="api", **payload)
            return result

        if "from_history_id" in payload:
            from_history_id = payload.get("from_history_id")
            from_history_id = self.decode_id(from_history_id)
            history = self.history_manager.get_accessible(from_history_id, trans.user, current_history=trans.history)

            job_ids = map(self.decode_id, payload.get("job_ids", []))
            dataset_ids = payload.get("dataset_ids", [])
            dataset_collection_ids = payload.get("dataset_collection_ids", [])
            workflow_name = payload["workflow_name"]
            stored_workflow = extract_workflow(
                trans=trans,
                user=trans.get_user(),
                history=history,
                job_ids=job_ids,
                dataset_ids=dataset_ids,
                dataset_collection_ids=dataset_collection_ids,
                workflow_name=workflow_name,
            )
            item = stored_workflow.to_dict(value_mapper={"id": trans.security.encode_id})
            item["url"] = url_for("workflow", id=item["id"])
            return item

        if "shared_workflow_id" in payload:
            workflow_id = payload["shared_workflow_id"]
            return self.__api_import_shared_workflow(trans, workflow_id, payload)

        if "workflow" in payload:
            return self.__api_import_new_workflow(trans, payload, **kwd)

        workflow_id = payload.get("workflow_id", None)
        if not workflow_id:
            message = "Invalid workflow_id specified."
            raise exceptions.RequestParameterInvalidException(message)

        # Get workflow + accessibility check.
        stored_workflow = self.__get_stored_accessible_workflow(trans, workflow_id)
        workflow = stored_workflow.latest_workflow

        run_config = build_workflow_run_config(trans, workflow, payload)
        history = run_config.target_history

        # invoke may throw MessageExceptions on tool erors, failure
        # to match up inputs, etc...
        outputs, invocation = invoke(
            trans=trans, workflow=workflow, workflow_run_config=run_config, populate_state=True
        )
        trans.sa_session.flush()

        # Build legacy output - should probably include more information from
        # outputs.
        rval = {}
        rval["history"] = trans.security.encode_id(history.id)
        rval["outputs"] = []
        for step in workflow.steps:
            if step.type == "tool" or step.type is None:
                for v in outputs[step.id].itervalues():
                    rval["outputs"].append(trans.security.encode_id(v.id))

        # Newer version of this API just returns the invocation as a dict, to
        # facilitate migration - produce the newer style response and blend in
        # the older information.
        invocation_response = self.__encode_invocation(trans, invocation)
        invocation_response.update(rval)
        return invocation_response