예제 #1
0
 def __to_pja( self, key, value, step ):
     if 'output_name' in value:
         output_name = value['output_name']
     else:
         output_name = None
     if 'action_arguments' in value:
         action_arguments = value['action_arguments']
     else:
         action_arguments = None
     return PostJobAction(value['action_type'], step, output_name, action_arguments)
예제 #2
0
 def save_to_step( self, step ):
     step.type = self.type
     step.tool_id = self.tool_id
     if self.tool:
         step.tool_version = self.get_tool_version()
         step.tool_inputs = self.tool.params_to_strings( self.state.inputs, self.trans.app )
     else:
         step.tool_version = None
         step.tool_inputs = None
     step.tool_errors = self.errors
     for k, v in self.post_job_actions.iteritems():
         # Must have action_type, step.  output and a_args are optional.
         if 'output_name' in v:
             output_name = v['output_name']
         else:
             output_name = None
         if 'action_arguments' in v:
             action_arguments = v['action_arguments']
         else:
             action_arguments = None
         n_p = PostJobAction(v['action_type'], step, output_name, action_arguments)
예제 #3
0
파일: tools.py 프로젝트: msauria/galaxy
    def _create(self, trans: GalaxyWebTransaction, payload, **kwd):
        action = payload.get('action')
        if action == 'rerun':
            raise Exception("'rerun' action has been deprecated")

        # Get tool.
        tool_version = payload.get('tool_version')
        tool_id = payload.get('tool_id')
        tool_uuid = payload.get('tool_uuid')
        get_kwds = dict(
            tool_id=tool_id,
            tool_uuid=tool_uuid,
            tool_version=tool_version,
        )
        if tool_id is None and tool_uuid is None:
            raise exceptions.RequestParameterMissingException("Must specify either a tool_id or a tool_uuid.")

        tool = trans.app.toolbox.get_tool(**get_kwds)
        if not tool:
            log.debug(f"Not found tool with kwds [{get_kwds}]")
            raise exceptions.ToolMissingException('Tool not found.')
        if not tool.allow_user_access(trans.user):
            raise exceptions.ItemAccessibilityException('Tool not accessible.')
        if trans.app.config.user_activation_on:
            if not trans.user:
                log.warning("Anonymous user attempts to execute tool, but account activation is turned on.")
            elif not trans.user.active:
                log.warning(f"User \"{trans.user.email}\" attempts to execute tool, but account activation is turned on and user account is not active.")

        # Set running history from payload parameters.
        # History not set correctly as part of this API call for
        # dataset upload.
        history_id = payload.get('history_id')
        if history_id:
            decoded_id = self.decode_id(history_id)
            target_history = self.history_manager.get_owned(decoded_id, trans.user, current_history=trans.history)
        else:
            target_history = None

        # Set up inputs.
        inputs = payload.get('inputs', {})
        if not isinstance(inputs, dict):
            raise exceptions.RequestParameterInvalidException(f"inputs invalid {inputs}")

        # Find files coming in as multipart file data and add to inputs.
        for k, v in payload.items():
            if k.startswith('files_') or k.startswith('__files_'):
                inputs[k] = v

        # for inputs that are coming from the Library, copy them into the history
        self._patch_library_inputs(trans, inputs, target_history)

        # TODO: encode data ids and decode ids.
        # TODO: handle dbkeys
        params = util.Params(inputs, sanitize=False)
        incoming = params.__dict__

        # use_cached_job can be passed in via the top-level payload or among the tool inputs.
        # I think it should be a top-level parameter, but because the selector is implemented
        # as a regular tool parameter we accept both.
        use_cached_job = payload.get('use_cached_job', False) or util.string_as_bool(inputs.get('use_cached_job', 'false'))

        input_format = str(payload.get('input_format', 'legacy'))

        vars = tool.handle_input(trans, incoming, history=target_history, use_cached_job=use_cached_job, input_format=input_format)

        # TODO: check for errors and ensure that output dataset(s) are available.
        output_datasets = vars.get('out_data', [])
        rval: Dict[str, Any] = {'outputs': [], 'output_collections': [], 'jobs': [], 'implicit_collections': []}
        rval['produces_entry_points'] = tool.produces_entry_points
        job_errors = vars.get('job_errors', [])
        if job_errors:
            # If we are here - some jobs were successfully executed but some failed.
            rval['errors'] = job_errors

        outputs = rval['outputs']
        # TODO:?? poss. only return ids?
        for output_name, output in output_datasets:
            output_dict = output.to_dict()
            # add the output name back into the output data structure
            # so it's possible to figure out which newly created elements
            # correspond with which tool file outputs
            output_dict['output_name'] = output_name
            outputs.append(trans.security.encode_dict_ids(output_dict, skip_startswith="metadata_"))

        new_pja_flush = False
        for job in vars.get('jobs', []):
            rval['jobs'].append(self.encode_all_ids(trans, job.to_dict(view='collection'), recursive=True))
            if inputs.get('send_email_notification', False):
                # Unless an anonymous user is invoking this via the API it
                # should never be an option, but check and enforce that here
                if trans.user is None:
                    raise exceptions.ToolExecutionError("Anonymously run jobs cannot send an email notification.")
                else:
                    job_email_action = PostJobAction('EmailAction')
                    job.add_post_job_action(job_email_action)
                    new_pja_flush = True

        if new_pja_flush:
            trans.sa_session.flush()

        for output_name, collection_instance in vars.get('output_collections', []):
            history = target_history or trans.history
            output_dict = dictify_dataset_collection_instance(collection_instance, security=trans.security, parent=history)
            output_dict['output_name'] = output_name
            rval['output_collections'].append(output_dict)

        for output_name, collection_instance in vars.get('implicit_collections', {}).items():
            history = target_history or trans.history
            output_dict = dictify_dataset_collection_instance(collection_instance, security=trans.security, parent=history)
            output_dict['output_name'] = output_name
            rval['implicit_collections'].append(output_dict)

        return rval
예제 #4
0
    def _create(self, trans: GalaxyWebTransaction, payload, **kwd):
        if trans.user_is_bootstrap_admin:
            raise exceptions.RealUserRequiredException(
                "Only real users can execute tools or run jobs.")
        action = payload.get('action')
        if action == 'rerun':
            raise Exception("'rerun' action has been deprecated")

        # Get tool.
        tool_version = payload.get('tool_version')
        tool_id = payload.get('tool_id')
        tool_uuid = payload.get('tool_uuid')
        get_kwds = dict(
            tool_id=tool_id,
            tool_uuid=tool_uuid,
            tool_version=tool_version,
        )
        if tool_id is None and tool_uuid is None:
            raise exceptions.RequestParameterMissingException(
                "Must specify either a tool_id or a tool_uuid.")

        tool = trans.app.toolbox.get_tool(**get_kwds)
        if not tool:
            log.debug(f"Not found tool with kwds [{get_kwds}]")
            raise exceptions.ToolMissingException('Tool not found.')
        if not tool.allow_user_access(trans.user):
            raise exceptions.ItemAccessibilityException('Tool not accessible.')
        if trans.app.config.user_activation_on:
            if not trans.user:
                log.warning(
                    "Anonymous user attempts to execute tool, but account activation is turned on."
                )
            elif not trans.user.active:
                log.warning(
                    f"User \"{trans.user.email}\" attempts to execute tool, but account activation is turned on and user account is not active."
                )

        # Set running history from payload parameters.
        # History not set correctly as part of this API call for
        # dataset upload.
        history_id = payload.get('history_id')
        if history_id:
            decoded_id = self.decode_id(history_id)
            target_history = self.history_manager.get_owned(
                decoded_id, trans.user, current_history=trans.history)
        else:
            target_history = None

        # Set up inputs.
        inputs = payload.get('inputs', {})
        if not isinstance(inputs, dict):
            raise exceptions.RequestParameterInvalidException(
                f"inputs invalid {inputs}")

        # Find files coming in as multipart file data and add to inputs.
        for k, v in payload.items():
            if k.startswith('files_') or k.startswith('__files_'):
                inputs[k] = v

        # for inputs that are coming from the Library, copy them into the history
        self._patch_library_inputs(trans, inputs, target_history)

        # TODO: encode data ids and decode ids.
        # TODO: handle dbkeys
        params = util.Params(inputs, sanitize=False)
        incoming = params.__dict__

        # use_cached_job can be passed in via the top-level payload or among the tool inputs.
        # I think it should be a top-level parameter, but because the selector is implemented
        # as a regular tool parameter we accept both.
        use_cached_job = payload.get('use_cached_job',
                                     False) or util.string_as_bool(
                                         inputs.get('use_cached_job', 'false'))

        input_format = str(payload.get('input_format', 'legacy'))

        vars = tool.handle_input(trans,
                                 incoming,
                                 history=target_history,
                                 use_cached_job=use_cached_job,
                                 input_format=input_format)

        new_pja_flush = False
        for job in vars.get('jobs', []):
            if inputs.get('send_email_notification', False):
                # Unless an anonymous user is invoking this via the API it
                # should never be an option, but check and enforce that here
                if trans.user is None:
                    raise exceptions.ToolExecutionError(
                        "Anonymously run jobs cannot send an email notification."
                    )
                else:
                    job_email_action = PostJobAction('EmailAction')
                    job.add_post_job_action(job_email_action)
                    new_pja_flush = True

        if new_pja_flush:
            trans.sa_session.flush()

        return self._handle_inputs_output_to_api_response(
            trans, tool, target_history, vars)