Exemplo n.º 1
0
    def set_compute_environment(self, compute_environment, get_special=None):
        """
        Setup the compute environment and established the outline of the param_dict
        for evaluating command and config cheetah templates.
        """
        self.compute_environment = compute_environment

        job = self.job
        incoming = dict([(p.name, p.value) for p in job.parameters])
        incoming = self.tool.params_from_strings(incoming, self.app)

        # Full parameter validation
        request_context = WorkRequestContext(app=self.app, user=self._user, history=self._history)

        def validate_inputs(input, value, context, **kwargs):
            value = input.from_json(value, request_context, context)
            input.validate(value, request_context)
        visit_input_values(self.tool.inputs, incoming, validate_inputs)

        # Restore input / output data lists
        inp_data, out_data, out_collections = job.io_dicts()

        if get_special:

            # Set up output dataset association for export history jobs. Because job
            # uses a Dataset rather than an HDA or LDA, it's necessary to set up a
            # fake dataset association that provides the needed attributes for
            # preparing a job.
            class FakeDatasetAssociation (object):
                fake_dataset_association = True

                def __init__(self, dataset=None):
                    self.dataset = dataset
                    self.file_name = dataset.file_name
                    self.metadata = dict()

            special = get_special()
            if special:
                out_data["output_file"] = FakeDatasetAssociation(dataset=special.dataset)

        # These can be passed on the command line if wanted as $__user_*__
        incoming.update(model.User.user_template_environment(job.history and job.history.user))

        # Build params, done before hook so hook can use
        param_dict = self.build_param_dict(
            incoming,
            inp_data,
            out_data,
            output_collections=out_collections,
        )

        # Certain tools require tasks to be completed prior to job execution
        # ( this used to be performed in the "exec_before_job" hook, but hooks are deprecated ).
        self.tool.exec_before_job(self.app, inp_data, out_data, param_dict)
        # Run the before queue ("exec_before_job") hook
        self.tool.call_hook('exec_before_job', self.app, inp_data=inp_data,
                            out_data=out_data, tool=self.tool, param_dict=incoming)

        self.param_dict = param_dict
Exemplo n.º 2
0
    def search(self, trans, payload, **kwd):
        """
        search( trans, payload )
        * POST /api/jobs/search:
            return jobs for current user

        :type   payload: dict
        :param  payload: Dictionary containing description of requested job. This is in the same format as
            a request to POST /apt/tools would take to initiate a job

        :rtype:     list
        :returns:   list of dictionaries containing summary job information of the jobs that match the requested job run

        This method is designed to scan the list of previously run jobs and find records of jobs that had
        the exact some input parameters and datasets. This can be used to minimize the amount of repeated work, and simply
        recycle the old results.
        """
        tool_id = payload.get('tool_id')
        if tool_id is None:
            raise exceptions.ObjectAttributeMissingException("No tool id")
        tool = trans.app.toolbox.get_tool(tool_id)
        if tool is None:
            raise exceptions.ObjectNotFound("Requested tool not found")
        if 'inputs' not in payload:
            raise exceptions.ObjectAttributeMissingException(
                "No inputs defined")
        inputs = payload.get('inputs', {})
        # Find files coming in as multipart file data and add to inputs.
        for k, v in payload.items():
            if k.startswith('files_') or k.startswith('__files_'):
                inputs[k] = v
        request_context = WorkRequestContext(app=trans.app,
                                             user=trans.user,
                                             history=trans.history)
        all_params, all_errors, _, _ = tool.expand_incoming(
            trans=trans, incoming=inputs, request_context=request_context)
        if any(all_errors):
            return []
        params_dump = [
            tool.params_to_strings(param, self.app, nested=True)
            for param in all_params
        ]
        jobs = []
        for param_dump, param in zip(params_dump, all_params):
            job = self.job_search.by_tool_input(trans=trans,
                                                tool_id=tool_id,
                                                tool_version=tool.version,
                                                param=param,
                                                param_dump=param_dump,
                                                job_state=payload.get('state'))
            if job:
                jobs.append(job)
        return [
            self.encode_all_ids(trans, single_job.to_dict('element'), True)
            for single_job in jobs
        ]
Exemplo n.º 3
0
    def set_compute_environment(self, compute_environment, get_special=None):
        """
        Setup the compute environment and established the outline of the param_dict
        for evaluating command and config cheetah templates.
        """
        self.compute_environment = compute_environment

        job = self.job
        incoming = {p.name: p.value for p in job.parameters}
        incoming = self.tool.params_from_strings(incoming, self.app)

        # Full parameter validation
        request_context = WorkRequestContext(app=self.app,
                                             user=self._user,
                                             history=self._history)
        self.request_context = request_context

        def validate_inputs(input, value, context, **kwargs):
            value = input.from_json(value, request_context, context)
            input.validate(value, request_context)

        visit_input_values(self.tool.inputs, incoming, validate_inputs)

        # Restore input / output data lists
        inp_data, out_data, out_collections = job.io_dicts()

        if get_special:
            special = get_special()
            if special:
                out_data["output_file"] = special.fda

        # These can be passed on the command line if wanted as $__user_*__
        incoming.update(
            model.User.user_template_environment(job.history
                                                 and job.history.user))

        # Build params, done before hook so hook can use
        param_dict = self.build_param_dict(
            incoming,
            inp_data,
            out_data,
            output_collections=out_collections,
        )

        # Certain tools require tasks to be completed prior to job execution
        # ( this used to be performed in the "exec_before_job" hook, but hooks are deprecated ).
        self.tool.exec_before_job(self.app, inp_data, out_data, param_dict)
        # Run the before queue ("exec_before_job") hook
        self.tool.call_hook('exec_before_job',
                            self.app,
                            inp_data=inp_data,
                            out_data=out_data,
                            tool=self.tool,
                            param_dict=incoming)

        self.param_dict = param_dict
Exemplo n.º 4
0
    def set_compute_environment(self, compute_environment: ComputeEnvironment, get_special: Optional[Callable] = None):
        """
        Setup the compute environment and established the outline of the param_dict
        for evaluating command and config cheetah templates.
        """
        self.compute_environment = compute_environment

        job = self.job
        incoming = {p.name: p.value for p in job.parameters}
        incoming = self.tool.params_from_strings(incoming, self.app)

        # Full parameter validation
        request_context = WorkRequestContext(app=self.app, user=self._user, history=self._history)
        self.file_sources_dict = compute_environment.get_file_sources_dict()

        def validate_inputs(input, value, context, **kwargs):
            value = input.from_json(value, request_context, context)
            input.validate(value, request_context)
        visit_input_values(self.tool.inputs, incoming, validate_inputs)

        # Restore input / output data lists
        inp_data, out_data, out_collections = job.io_dicts()

        if get_special:
            special = get_special()
            if special:
                out_data["output_file"] = special

        # These can be passed on the command line if wanted as $__user_*__
        incoming.update(model.User.user_template_environment(self._user))

        # Build params, done before hook so hook can use
        self.param_dict = self.build_param_dict(
            incoming,
            inp_data,
            out_data,
            output_collections=out_collections,
        )
        self.execute_tool_hooks(inp_data=inp_data, out_data=out_data, incoming=incoming)
Exemplo n.º 5
0
 def work_context(self):
     from galaxy.work.context import WorkRequestContext
     return WorkRequestContext(self.app, user=self.user)