Ejemplo n.º 1
0
 def __to_incoming(self, state, **kwds):
     new_incoming = {}
     params_to_incoming(new_incoming, self.tool.inputs, state.inputs,
                        self.app)
     new_incoming["tool_state"] = self.__state_to_string(state)
     new_incoming.update(kwds)
     return new_incoming
Ejemplo n.º 2
0
 def __to_incoming( self, state, **kwds ):
     new_incoming = {}
     params_to_incoming( new_incoming, self.tool.inputs, state.inputs, self.app )
     # Copy meta parameters over lost by params_to_incoming...
     for key, value in state.inputs.iteritems():
         if key.endswith( "|__multirun__" ):
             new_incoming[ key ] = value
     new_incoming[ "tool_state" ] = self.__state_to_string( state )
     new_incoming.update( kwds )
     return new_incoming
Ejemplo n.º 3
0
 def __to_incoming(self, state, **kwds):
     new_incoming = {}
     params_to_incoming(new_incoming, self.tool.inputs, state.inputs,
                        self.app)
     # Copy meta parameters over lost by params_to_incoming...
     for key, value in state.inputs.iteritems():
         if key.endswith("|__multirun__"):
             new_incoming[key] = value
     new_incoming["tool_state"] = self.__state_to_string(state)
     new_incoming.update(kwds)
     return new_incoming
Ejemplo n.º 4
0
def render_body(context, **pageargs):
    context.caller_stack._push_frame()
    try:
        __M_locals = __M_dict_builtin(pageargs=pageargs)
        h = context.get("h", UNDEFINED)
        tool = context.get("tool", UNDEFINED)
        module = context.get("module", UNDEFINED)
        job = context.get("job", UNDEFINED)
        trans = context.get("trans", UNDEFINED)
        self = context.get("self", UNDEFINED)
        __M_writer = context.writer()
        # SOURCE LINE 1

        ## TEMPORARY: create tool dictionary in mako while both tool forms are in use.
        ## This avoids making two separate requests since the classic form requires the mako anyway.
        from galaxy.tools.parameters import params_to_incoming

        incoming = {}
        params_to_incoming(incoming, tool.inputs, module.state.inputs, trans.app, to_html=False)
        self.form_config = tool.to_json(trans, incoming, is_workflow=True)
        self.form_config.update(
            {
                "id": tool.id,
                "job_id": trans.security.encode_id(job.id) if job else None,
                "history_id": trans.security.encode_id(trans.history.id),
                "container": "#right-content",
            }
        )

        __M_locals_builtin_stored = __M_locals_builtin()
        __M_locals.update(
            __M_dict_builtin(
                [
                    (__M_key, __M_locals_builtin_stored[__M_key])
                    for __M_key in ["params_to_incoming", "incoming"]
                    if __M_key in __M_locals_builtin_stored
                ]
            )
        )
        # SOURCE LINE 14
        __M_writer(u"\n")
        # SOURCE LINE 15
        __M_writer(unicode(h.dumps(self.form_config)))
        return ""
    finally:
        context.caller_stack._pop_frame()
Ejemplo n.º 5
0
 def __to_incoming(self, state, **kwds):
     new_incoming = {}
     params_to_incoming(new_incoming, self.tool.inputs, state.inputs, self.app)
     new_incoming["tool_state"] = self.__state_to_string(state)
     new_incoming.update(kwds)
     return new_incoming
Ejemplo n.º 6
0
 def _workflow_to_dict_run(self, trans, stored):
     """
     Builds workflow dictionary used by run workflow form
     """
     workflow = stored.latest_workflow
     if len(workflow.steps) == 0:
         raise exceptions.MessageException(
             'Workflow cannot be run because it does not have any steps.')
     if attach_ordered_steps(workflow, workflow.steps):
         raise exceptions.MessageException(
             'Workflow cannot be run because it contains cycles.')
     trans.workflow_building_mode = workflow_building_modes.USE_HISTORY
     module_injector = WorkflowModuleInjector(trans)
     has_upgrade_messages = False
     step_version_changes = []
     missing_tools = []
     errors = {}
     for step in workflow.steps:
         try:
             module_injector.inject(step,
                                    steps=workflow.steps,
                                    exact_tools=False)
         except exceptions.ToolMissingException:
             if step.tool_id not in missing_tools:
                 missing_tools.append(step.tool_id)
             continue
         if step.upgrade_messages:
             has_upgrade_messages = True
         if step.type == 'tool' or step.type is None:
             if step.module.version_changes:
                 step_version_changes.extend(step.module.version_changes)
             step_errors = step.module.get_errors()
             if step_errors:
                 errors[step.id] = step_errors
     if missing_tools:
         workflow.annotation = self.get_item_annotation_str(
             trans.sa_session, trans.user, workflow)
         raise exceptions.MessageException('Following tools missing: %s' %
                                           ', '.join(missing_tools))
     workflow.annotation = self.get_item_annotation_str(
         trans.sa_session, trans.user, workflow)
     step_order_indices = {}
     for step in workflow.steps:
         step_order_indices[step.id] = step.order_index
     step_models = []
     for i, step in enumerate(workflow.steps):
         step_model = None
         if step.type == 'tool':
             incoming = {}
             tool = trans.app.toolbox.get_tool(
                 step.tool_id, tool_version=step.tool_version)
             params_to_incoming(incoming, tool.inputs, step.state.inputs,
                                trans.app)
             step_model = tool.to_json(
                 trans,
                 incoming,
                 workflow_building_mode=workflow_building_modes.USE_HISTORY)
             step_model['post_job_actions'] = [{
                 'short_str':
                 ActionBox.get_short_str(pja),
                 'action_type':
                 pja.action_type,
                 'output_name':
                 pja.output_name,
                 'action_arguments':
                 pja.action_arguments
             } for pja in step.post_job_actions]
         else:
             inputs = step.module.get_runtime_inputs(
                 connections=step.output_connections)
             step_model = {
                 'inputs':
                 [input.to_dict(trans) for input in inputs.values()]
             }
         step_model['step_type'] = step.type
         step_model['step_label'] = step.label
         step_model['step_name'] = step.module.get_name()
         step_model['step_version'] = step.module.get_version()
         step_model['step_index'] = step.order_index
         step_model['output_connections'] = [{
             'input_step_index':
             step_order_indices.get(oc.input_step_id),
             'output_step_index':
             step_order_indices.get(oc.output_step_id),
             'input_name':
             oc.input_name,
             'output_name':
             oc.output_name
         } for oc in step.output_connections]
         if step.annotations:
             step_model['annotation'] = step.annotations[0].annotation
         if step.upgrade_messages:
             step_model['messages'] = step.upgrade_messages
         step_models.append(step_model)
     return {
         'id':
         trans.app.security.encode_id(stored.id),
         'history_id':
         trans.app.security.encode_id(trans.history.id)
         if trans.history else None,
         'name':
         stored.name,
         'steps':
         step_models,
         'step_version_changes':
         step_version_changes,
         'has_upgrade_messages':
         has_upgrade_messages,
         'workflow_resource_parameters':
         self._workflow_resource_parameters(trans, stored, workflow),
     }
Ejemplo n.º 7
0
    def rerun(self, trans, id=None, from_noframe=None, **kwd):
        """
        Given a HistoryDatasetAssociation id, find the job and that created
        the dataset, extract the parameters, and display the appropriate tool
        form with parameters already filled in.
        """
        if not id:
            error("'id' parameter is required")
        try:
            id = int(id)

        except:
            # it's not an un-encoded id, try to parse as encoded
            try:
                id = trans.security.decode_id(id)
            except:
                error("Invalid value for 'id' parameter")

        # Get the dataset object
        data = trans.sa_session.query(
            trans.app.model.HistoryDatasetAssociation).get(id)
        #only allow rerunning if user is allowed access to the dataset.
        if not (trans.user_is_admin()
                or trans.app.security_agent.can_access_dataset(
                    trans.get_current_user_roles(), data.dataset)):
            error("You are not allowed to access this dataset")
        # Get the associated job, if any.
        job = data.creating_job
        if not job:
            raise Exception(
                "Failed to get job information for dataset hid %d" % data.hid)
        # Get the tool object
        tool_id = job.tool_id
        tool_version = job.tool_version
        try:
            tool_version_select_field, tools, tool = self.__get_tool_components(
                tool_id,
                tool_version=tool_version,
                get_loaded_tools_by_lineage=False,
                set_selected=True)
            if (tool.id == job.tool_id or tool.old_id
                    == job.tool_id) and tool.version == job.tool_version:
                tool_id_version_message = ''
            elif tool.id == job.tool_id:
                if job.tool_version == None:
                    # For some reason jobs don't always keep track of the tool version.
                    tool_id_version_message = ''
                else:
                    tool_id_version_message = 'This job was initially run with tool version "%s", which is not currently available.  ' % job.tool_version
                    if len(tools) > 1:
                        tool_id_version_message += 'You can rerun the job with the selected tool or choose another derivation of the tool.'
                    else:
                        tool_id_version_message += 'You can rerun the job with this tool version, which is a derivation of the original tool.'
            else:
                if len(tools) > 1:
                    tool_id_version_message = 'This job was initially run with tool version "%s", which is not currently available.  ' % job.tool_version
                    tool_id_version_message += 'You can rerun the job with the selected tool or choose another derivation of the tool.'
                else:
                    tool_id_version_message = 'This job was initially run with tool id "%s", version "%s", which is not ' % (
                        job.tool_id, job.tool_version)
                    tool_id_version_message += 'currently available.  You can rerun the job with this tool, which is a derivation of the original tool.'
            assert tool is not None, 'Requested tool has not been loaded.'
        except:
            # This is expected so not an exception.
            tool_id_version_message = ''
            error(
                "This dataset was created by an obsolete tool (%s). Can't re-run."
                % tool_id)
        # Can't rerun upload, external data sources, et cetera. Workflow compatible will proxy this for now
        if not tool.is_workflow_compatible:
            error("The '%s' tool does not currently support rerunning." %
                  tool.name)
        # Get the job's parameters
        try:
            params_objects = job.get_param_values(trans.app,
                                                  ignore_errors=True)
        except:
            raise Exception("Failed to get parameters for dataset id %d " %
                            data.id)
        upgrade_messages = tool.check_and_update_param_values(
            params_objects, trans, update_values=False)
        # Need to remap dataset parameters. Job parameters point to original
        # dataset used; parameter should be the analygous dataset in the
        # current history.
        history = trans.get_history()
        hda_source_dict = {}  # Mapping from HDA in history to source HDAs.
        for hda in history.datasets:
            source_hda = hda.copied_from_history_dataset_association
            while source_hda:  #should this check library datasets as well?
                #FIXME: could be multiple copies of a hda in a single history, this does a better job of matching on cloned histories,
                #but is still less than perfect when eg individual datasets are copied between histories
                if source_hda not in hda_source_dict or source_hda.hid == hda.hid:
                    hda_source_dict[source_hda] = hda
                source_hda = source_hda.copied_from_history_dataset_association
        # Unpack unvalidated values to strings, they'll be validated when the
        # form is submitted (this happens when re-running a job that was
        # initially run by a workflow)
        #This needs to be done recursively through grouping parameters
        def rerun_callback(input, value, prefixed_name, prefixed_label):
            if isinstance(value, UnvalidatedValue):
                return str(value)
            if isinstance(input, DataToolParameter):
                if isinstance(value, list):
                    values = []
                    for val in value:
                        if is_hashable(val):
                            if val in history.datasets:
                                values.append(val)
                            elif val in hda_source_dict:
                                values.append(hda_source_dict[val])
                    return values
                if is_hashable(
                        value
                ) and value not in history.datasets and value in hda_source_dict:
                    return hda_source_dict[value]

        visit_input_values(tool.inputs, params_objects, rerun_callback)
        # Create a fake tool_state for the tool, with the parameters values
        state = tool.new_state(trans)
        state.inputs = params_objects
        # If the job failed and has dependencies, allow dependency remap
        if job.state == job.states.ERROR:
            try:
                if [
                        hda.dependent_jobs for hda in
                    [jtod.dataset for jtod in job.output_datasets]
                        if hda.dependent_jobs
                ]:
                    state.rerun_remap_job_id = trans.app.security.encode_id(
                        job.id)
            except:
                # Job has no outputs?
                pass
        #create an incoming object from the original job's dataset-modified param objects
        incoming = {}
        params_to_incoming(incoming, tool.inputs, params_objects, trans.app)
        incoming["tool_state"] = galaxy.util.object_to_string(
            state.encode(tool, trans.app))
        template, vars = tool.handle_input(
            trans, incoming,
            old_errors=upgrade_messages)  #update new state with old parameters
        # Is the "add frame" stuff neccesary here?
        add_frame = AddFrameData()
        add_frame.debug = trans.debug
        if from_noframe is not None:
            add_frame.wiki_url = trans.app.config.wiki_url
            add_frame.from_noframe = True
        return trans.fill_template(
            template,
            history=history,
            toolbox=self.get_toolbox(),
            tool_version_select_field=tool_version_select_field,
            tool=tool,
            util=galaxy.util,
            add_frame=add_frame,
            tool_id_version_message=tool_id_version_message,
            **vars)
Ejemplo n.º 8
0
    def rerun( self, trans, id=None, from_noframe=None, **kwd ):
        """
        Given a HistoryDatasetAssociation id, find the job and that created
        the dataset, extract the parameters, and display the appropriate tool
        form with parameters already filled in.
        """
        if not id:
            error( "'id' parameter is required" );
        try:
            id = int( id )

        except:
            # it's not an un-encoded id, try to parse as encoded
            try:
                id = trans.security.decode_id( id )
            except:
                error( "Invalid value for 'id' parameter" )

        # Get the dataset object
        data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( id )
        #only allow rerunning if user is allowed access to the dataset.
        if not ( trans.user_is_admin() or trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), data.dataset ) ):
            error( "You are not allowed to access this dataset" )
        # Get the associated job, if any. 
        job = data.creating_job
        if not job:
            raise Exception("Failed to get job information for dataset hid %d" % data.hid)
        # Get the tool object
        tool_id = job.tool_id
        tool_version = job.tool_version
        try:
            tool_version_select_field, tools, tool = self.__get_tool_components( tool_id,
                                                                                 tool_version=tool_version,
                                                                                 get_loaded_tools_by_lineage=False,
                                                                                 set_selected=True )
            if ( tool.id == job.tool_id or tool.old_id == job.tool_id ) and tool.version == job.tool_version:
                tool_id_version_message = ''
            elif tool.id == job.tool_id:
                if job.tool_version == None:
                    # For some reason jobs don't always keep track of the tool version.
                    tool_id_version_message = ''
                else:
                    tool_id_version_message = 'This job was initially run with tool version "%s", which is not currently available.  ' % job.tool_version
                    if len( tools ) > 1:
                        tool_id_version_message += 'You can rerun the job with the selected tool or choose another derivation of the tool.'
                    else:
                        tool_id_version_message += 'You can rerun the job with this tool version, which is a derivation of the original tool.'
            else:
                if len( tools ) > 1:
                    tool_id_version_message = 'This job was initially run with tool version "%s", which is not currently available.  ' % job.tool_version
                    tool_id_version_message += 'You can rerun the job with the selected tool or choose another derivation of the tool.'
                else:
                    tool_id_version_message = 'This job was initially run with tool id "%s", version "%s", which is not ' % ( job.tool_id, job.tool_version )
                    tool_id_version_message += 'currently available.  You can rerun the job with this tool, which is a derivation of the original tool.'
            assert tool is not None, 'Requested tool has not been loaded.'
        except:
            # This is expected so not an exception.
            tool_id_version_message = ''
            error( "This dataset was created by an obsolete tool (%s). Can't re-run." % tool_id )
        # Can't rerun upload, external data sources, et cetera. Workflow compatible will proxy this for now
        if not tool.is_workflow_compatible:
            error( "The '%s' tool does not currently support rerunning." % tool.name )
        # Get the job's parameters
        try:
            params_objects = job.get_param_values( trans.app, ignore_errors = True )
        except:
            raise Exception( "Failed to get parameters for dataset id %d " % data.id )
        upgrade_messages = tool.check_and_update_param_values( params_objects, trans, update_values=False )
        # Need to remap dataset parameters. Job parameters point to original 
        # dataset used; parameter should be the analygous dataset in the 
        # current history.
        history = trans.get_history()
        hda_source_dict = {} # Mapping from HDA in history to source HDAs.
        for hda in history.datasets:
            source_hda = hda.copied_from_history_dataset_association
            while source_hda:#should this check library datasets as well?
                #FIXME: could be multiple copies of a hda in a single history, this does a better job of matching on cloned histories, 
                #but is still less than perfect when eg individual datasets are copied between histories
                if source_hda not in hda_source_dict or source_hda.hid == hda.hid:
                    hda_source_dict[ source_hda ] = hda
                source_hda = source_hda.copied_from_history_dataset_association
        # Unpack unvalidated values to strings, they'll be validated when the
        # form is submitted (this happens when re-running a job that was
        # initially run by a workflow)
        #This needs to be done recursively through grouping parameters
        def rerun_callback( input, value, prefixed_name, prefixed_label ):
            if isinstance( value, UnvalidatedValue ):
                return str( value )
            if isinstance( input, DataToolParameter ):
                if isinstance(value,list):
                    values = []
                    for val in value:
                        if is_hashable( val ):
                            if val in history.datasets:
                                values.append( val )
                            elif val in hda_source_dict:
                                values.append( hda_source_dict[ val ])
                    return values
                if is_hashable( value ) and value not in history.datasets and value in hda_source_dict:
                    return hda_source_dict[ value ]
        visit_input_values( tool.inputs, params_objects, rerun_callback )
        # Create a fake tool_state for the tool, with the parameters values
        state = tool.new_state( trans )
        state.inputs = params_objects
        # If the job failed and has dependencies, allow dependency remap
        if job.state == job.states.ERROR:
            try:
                if [ hda.dependent_jobs for hda in [ jtod.dataset for jtod in job.output_datasets ] if hda.dependent_jobs ]:
                    state.rerun_remap_job_id = trans.app.security.encode_id(job.id)
            except:
                # Job has no outputs?
                pass
        #create an incoming object from the original job's dataset-modified param objects
        incoming = {}
        params_to_incoming( incoming, tool.inputs, params_objects, trans.app )
        incoming[ "tool_state" ] = galaxy.util.object_to_string( state.encode( tool, trans.app ) )
        template, vars = tool.handle_input( trans, incoming, old_errors=upgrade_messages ) #update new state with old parameters
        # Is the "add frame" stuff neccesary here?
        add_frame = AddFrameData()
        add_frame.debug = trans.debug
        if from_noframe is not None:
            add_frame.wiki_url = trans.app.config.wiki_url
            add_frame.from_noframe = True
        return trans.fill_template( template,
                                    history=history,
                                    toolbox=self.get_toolbox(),
                                    tool_version_select_field=tool_version_select_field,
                                    tool=tool,
                                    util=galaxy.util,
                                    add_frame=add_frame,
                                    tool_id_version_message=tool_id_version_message,
                                    **vars )
Ejemplo n.º 9
0
 def get_config_form(self):
     if self.tool:
         self.add_dummy_datasets()
         incoming = {}
         params_to_incoming(incoming, self.tool.inputs, self.state.inputs, self.trans.app)
         return self.tool.to_json(self.trans, incoming, workflow_building_mode=True)
Ejemplo n.º 10
0
class ToolRunner(BaseUIController):

    # Hack to get biomart to work, ideally, we could pass tool_id to biomart and receive it back
    @web.expose
    def biomart(self, trans, tool_id='biomart', **kwd):
        """Catches the tool id and redirects as needed"""
        return self.index(trans, tool_id=tool_id, **kwd)

    # test to get hapmap to work, ideally, we could pass tool_id to hapmap biomart and receive it back
    @web.expose
    def hapmapmart(self, trans, tool_id='hapmapmart', **kwd):
        """Catches the tool id and redirects as needed"""
        return self.index(trans, tool_id=tool_id, **kwd)

    @web.expose
    def default(self, trans, tool_id=None, **kwd):
        """Catches the tool id and redirects as needed"""
        return self.index(trans, tool_id=tool_id, **kwd)

    def __get_tool_components(self,
                              tool_id,
                              tool_version=None,
                              get_loaded_tools_by_lineage=False,
                              set_selected=False):
        return self.get_toolbox().get_tool_components(
            tool_id, tool_version, get_loaded_tools_by_lineage, set_selected)

    @web.expose
    def index(self, trans, tool_id=None, from_noframe=None, **kwd):
        # No tool id passed, redirect to main page
        if tool_id is None:
            return trans.response.send_redirect(
                url_for(controller="root", action="welcome"))
        # When the tool form is initially loaded, the received kwd will not include a 'refresh'
        # entry (which only is included when another option is selected in the tool_version_select_field),
        # so the default selected option should be the most recent version of the tool.  The following
        # check will mae sure this occurs.
        refreshed_on_change = kwd.get('refresh', False)
        tool_version_select_field, tools, tool = self.__get_tool_components(
            tool_id,
            tool_version=None,
            get_loaded_tools_by_lineage=False,
            set_selected=refreshed_on_change)
        # No tool matching the tool id, display an error (shouldn't happen)
        if not tool or not tool.allow_user_access(trans.user):
            log.error("index called with tool id '%s' but no such tool exists",
                      tool_id)
            trans.log_event("Tool id '%s' does not exist" % tool_id)
            trans.response.status = 404
            return trans.show_error_message("Tool '%s' does not exist." %
                                            (escape(tool_id)))
        if tool.require_login and not trans.user:
            message = "You must be logged in to use this tool."
            status = "info"
            redirect = url_for(controller='tool_runner',
                               action='index',
                               tool_id=tool_id,
                               **kwd)
            return trans.response.send_redirect(
                url_for(controller='user',
                        action='login',
                        cntrller='user',
                        message=message,
                        status=status,
                        redirect=redirect))

        def _validated_params_for(kwd):
            params = galaxy.util.Params(
                kwd, sanitize=False
            )  # Sanitize parameters when substituting into command line via input wrappers
            # do param translation here, used by datasource tools
            if tool.input_translator:
                tool.input_translator.translate(params)
            return params

        params = _validated_params_for(kwd)
        # We may be visiting Galaxy for the first time ( e.g., sending data from UCSC ),
        # so make sure to create a new history if we've never had one before.
        history = tool.get_default_history_by_trans(trans, create=True)
        try:
            template, vars = tool.handle_input(trans, params.__dict__)
        except KeyError:
            # This error indicates (or at least can indicate) there was a
            # problem with the stored tool_state - it is incompatible with
            # this variant of the tool - possibly because the tool changed
            # or because the tool version changed.
            del kwd["tool_state"]
            params = _validated_params_for(kwd)
            template, vars = tool.handle_input(trans, params.__dict__)
        if len(params) > 0:
            trans.log_event("Tool params: %s" % (str(params)), tool_id=tool_id)
        add_frame = AddFrameData()
        add_frame.debug = trans.debug
        if from_noframe is not None:
            add_frame.wiki_url = trans.app.config.wiki_url
            add_frame.from_noframe = True
        return trans.fill_template(
            template,
            history=history,
            toolbox=self.get_toolbox(),
            tool_version_select_field=tool_version_select_field,
            tool=tool,
            util=galaxy.util,
            add_frame=add_frame,
            form_input_auto_focus=True,
            **vars)

    @web.expose
    def rerun(self, trans, id=None, from_noframe=None, job_id=None, **kwd):
        """
        Given a HistoryDatasetAssociation id, find the job and that created
        the dataset, extract the parameters, and display the appropriate tool
        form with parameters already filled in.
        """
        if job_id:
            try:
                job_id = trans.security.decode_id(job_id)
                job = trans.sa_session.query(trans.app.model.Job).get(job_id)
            except:
                error("Invalid value for 'job_id' parameter")
            if not trans.user_is_admin():
                for data_assoc in job.output_datasets:
                    # only allow rerunning if user is allowed access to the dataset.
                    if not trans.app.security_agent.can_access_dataset(
                            trans.get_current_user_roles(),
                            data_assoc.dataset.dataset):
                        error("You are not allowed to rerun this job")
            param_error_text = "Failed to get parameters for job id %d " % job_id
        else:
            if not id:
                error("'id' parameter is required")
            try:
                id = int(id)
            except:
                # it's not an un-encoded id, try to parse as encoded
                try:
                    id = trans.security.decode_id(id)
                except:
                    error("Invalid value for 'id' parameter")
            # Get the dataset object
            data = trans.sa_session.query(
                trans.app.model.HistoryDatasetAssociation).get(id)
            # only allow rerunning if user is allowed access to the dataset.
            if not (trans.user_is_admin()
                    or trans.app.security_agent.can_access_dataset(
                        trans.get_current_user_roles(), data.dataset)):
                error("You are not allowed to access this dataset")
            # Get the associated job, if any.
            job = data.creating_job
            if not job:
                raise Exception(
                    "Failed to get job information for dataset hid %d" %
                    data.hid)
            param_error_text = "Failed to get parameters for dataset id %d " % data.id
        # Get the tool object
        tool_id = job.tool_id
        tool_version = job.tool_version
        try:
            tool_version_select_field, tools, tool = self.__get_tool_components(
                tool_id,
                tool_version=tool_version,
                get_loaded_tools_by_lineage=False,
                set_selected=True)
            if (tool.id == job.tool_id or tool.old_id
                    == job.tool_id) and tool.version == job.tool_version:
                tool_id_version_message = ''
            elif tool.id == job.tool_id:
                if job.tool_version is None:
                    # For some reason jobs don't always keep track of the tool version.
                    tool_id_version_message = ''
                else:
                    tool_id_version_message = 'This job was initially run with tool version "%s", which is not currently available.  ' % job.tool_version
                    if len(tools) > 1:
                        tool_id_version_message += 'You can rerun the job with the selected tool or choose another derivation of the tool.'
                    else:
                        tool_id_version_message += 'You can rerun the job with this tool version, which is a derivation of the original tool.'
            else:
                if len(tools) > 1:
                    tool_id_version_message = 'This job was initially run with tool version "%s", which is not currently available.  ' % job.tool_version
                    tool_id_version_message += 'You can rerun the job with the selected tool or choose another derivation of the tool.'
                else:
                    tool_id_version_message = 'This job was initially run with tool id "%s", version "%s", which is not ' % (
                        job.tool_id, job.tool_version)
                    tool_id_version_message += 'currently available.  You can rerun the job with this tool, which is a derivation of the original tool.'
            assert tool is not None, 'Requested tool has not been loaded.'
        except:
            # This is expected so not an exception.
            tool_id_version_message = ''
            error(
                "This dataset was created by an obsolete tool (%s). Can't re-run."
                % tool_id)
        if not tool.allow_user_access(trans.user):
            error("The requested tool is unknown.")
        # Can't rerun upload, external data sources, et cetera. Workflow compatible will proxy this for now
        if not tool.is_workflow_compatible:
            error("The '%s' tool does not currently support rerunning." %
                  tool.name)
        # Get the job's parameters
        try:
            params_objects = job.get_param_values(trans.app,
                                                  ignore_errors=True)
        except:
            raise Exception(param_error_text)
        upgrade_messages = tool.check_and_update_param_values(
            params_objects, trans, update_values=False)
        # Need to remap dataset parameters. Job parameters point to original
        # dataset used; parameter should be the analygous dataset in the
        # current history.
        history = trans.get_history()
        hda_source_dict = {}  # Mapping from HDA in history to source HDAs.
        for hda in history.datasets:
            source_hda = hda.copied_from_history_dataset_association
            while source_hda:  # should this check library datasets as well?
                # FIXME: could be multiple copies of a hda in a single history, this does a better job of matching on cloned histories,
                # but is still less than perfect when eg individual datasets are copied between histories
                if source_hda not in hda_source_dict or source_hda.hid == hda.hid:
                    hda_source_dict[source_hda] = hda
                source_hda = source_hda.copied_from_history_dataset_association
        # Ditto for dataset collections.
        hdca_source_dict = {}
        for hdca in history.dataset_collections:
            source_hdca = hdca.copied_from_history_dataset_collection_association
            while source_hdca:
                if source_hdca not in hdca_source_dict or source_hdca.hid == hdca.hid:
                    hdca_source_dict[source_hdca] = hdca
                source_hdca = source_hdca.copied_from_history_dataset_collection_association

        # Unpack unvalidated values to strings, they'll be validated when the
        # form is submitted (this happens when re-running a job that was
        # initially run by a workflow)
        # This needs to be done recursively through grouping parameters
        def rerun_callback(input, value, prefixed_name, prefixed_label):
            if isinstance(value, UnvalidatedValue):
                try:
                    return input.to_html_value(value.value, trans.app)
                except Exception, e:
                    # Need to determine when (if ever) the to_html_value call could fail.
                    log.debug(
                        "Failed to use input.to_html_value to determine value of unvalidated parameter, defaulting to string: %s"
                        % (e))
                    return str(value)
            if isinstance(input, DataToolParameter):
                if isinstance(value, list):
                    values = []
                    for val in value:
                        if is_hashable(val):
                            if val in history.datasets:
                                values.append(val)
                            elif val in hda_source_dict:
                                values.append(hda_source_dict[val])
                    return values
                if is_hashable(
                        value
                ) and value not in history.datasets and value in hda_source_dict:
                    return hda_source_dict[value]
            elif isinstance(input, DataCollectionToolParameter):
                if is_hashable(
                        value
                ) and value not in history.dataset_collections and value in hdca_source_dict:
                    return hdca_source_dict[value]

        visit_input_values(tool.inputs, params_objects, rerun_callback)
        # Create a fake tool_state for the tool, with the parameters values
        state = tool.new_state(trans)
        state.inputs = params_objects
        # If the job failed and has dependencies, allow dependency remap
        if job.state == job.states.ERROR:
            try:
                if [
                        hda.dependent_jobs for hda in
                    [jtod.dataset for jtod in job.output_datasets]
                        if hda.dependent_jobs
                ]:
                    state.rerun_remap_job_id = trans.app.security.encode_id(
                        job.id)
            except:
                # Job has no outputs?
                pass
        # create an incoming object from the original job's dataset-modified param objects
        incoming = {}
        params_to_incoming(incoming, tool.inputs, params_objects, trans.app)
        incoming["tool_state"] = galaxy.util.object_to_string(
            state.encode(tool, trans.app))
        template, vars = tool.handle_input(
            trans, incoming, old_errors=upgrade_messages
        )  # update new state with old parameters
        # Is the "add frame" stuff neccesary here?
        add_frame = AddFrameData()
        add_frame.debug = trans.debug
        if from_noframe is not None:
            add_frame.wiki_url = trans.app.config.wiki_url
            add_frame.from_noframe = True
        return trans.fill_template(
            template,
            history=history,
            toolbox=self.get_toolbox(),
            tool_version_select_field=tool_version_select_field,
            tool=tool,
            job=job,
            util=galaxy.util,
            add_frame=add_frame,
            tool_id_version_message=tool_id_version_message,
            **vars)
Ejemplo n.º 11
0
 def _workflow_to_dict_run(self, trans, stored):
     """
     Builds workflow dictionary used by run workflow form
     """
     workflow = stored.latest_workflow
     if len(workflow.steps) == 0:
         raise exceptions.MessageException('Workflow cannot be run because it does not have any steps.')
     if attach_ordered_steps(workflow, workflow.steps):
         raise exceptions.MessageException('Workflow cannot be run because it contains cycles.')
     trans.workflow_building_mode = workflow_building_modes.USE_HISTORY
     module_injector = WorkflowModuleInjector(trans)
     has_upgrade_messages = False
     step_version_changes = []
     missing_tools = []
     errors = {}
     for step in workflow.steps:
         try:
             module_injector.inject(step, steps=workflow.steps, exact_tools=False)
         except exceptions.ToolMissingException:
             if step.tool_id not in missing_tools:
                 missing_tools.append(step.tool_id)
             continue
         if step.upgrade_messages:
             has_upgrade_messages = True
         if step.type == 'tool' or step.type is None:
             if step.module.version_changes:
                 step_version_changes.extend(step.module.version_changes)
             step_errors = step.module.get_errors()
             if step_errors:
                 errors[step.id] = step_errors
     if missing_tools:
         workflow.annotation = self.get_item_annotation_str(trans.sa_session, trans.user, workflow)
         raise exceptions.MessageException('Following tools missing: %s' % missing_tools)
     workflow.annotation = self.get_item_annotation_str(trans.sa_session, trans.user, workflow)
     step_order_indices = {}
     for step in workflow.steps:
         step_order_indices[step.id] = step.order_index
     step_models = []
     for i, step in enumerate(workflow.steps):
         step_model = None
         if step.type == 'tool':
             incoming = {}
             tool = trans.app.toolbox.get_tool(step.tool_id, tool_version=step.tool_version)
             params_to_incoming(incoming, tool.inputs, step.state.inputs, trans.app)
             step_model = tool.to_json(trans, incoming, workflow_building_mode=workflow_building_modes.USE_HISTORY)
             step_model['post_job_actions'] = [{
                 'short_str'         : ActionBox.get_short_str(pja),
                 'action_type'       : pja.action_type,
                 'output_name'       : pja.output_name,
                 'action_arguments'  : pja.action_arguments
             } for pja in step.post_job_actions]
         else:
             inputs = step.module.get_runtime_inputs(connections=step.output_connections)
             step_model = {
                 'inputs' : [input.to_dict(trans) for input in inputs.values()]
             }
         step_model['step_type'] = step.type
         step_model['step_label'] = step.label
         step_model['step_name'] = step.module.get_name()
         step_model['step_version'] = step.module.get_version()
         step_model['step_index'] = step.order_index
         step_model['output_connections'] = [{
             'input_step_index'  : step_order_indices.get(oc.input_step_id),
             'output_step_index' : step_order_indices.get(oc.output_step_id),
             'input_name'        : oc.input_name,
             'output_name'       : oc.output_name
         } for oc in step.output_connections]
         if step.annotations:
             step_model['annotation'] = step.annotations[0].annotation
         if step.upgrade_messages:
             step_model['messages'] = step.upgrade_messages
         step_models.append(step_model)
     return {
         'id': trans.app.security.encode_id(stored.id),
         'history_id': trans.app.security.encode_id(trans.history.id) if trans.history else None,
         'name': stored.name,
         'steps': step_models,
         'step_version_changes': step_version_changes,
         'has_upgrade_messages': has_upgrade_messages,
         'workflow_resource_parameters': self._workflow_resource_parameters(trans, stored, workflow),
     }
Ejemplo n.º 12
0
         job = data.creating_job
         if not job:
             trans.response.status = 500
             return { 'error': 'Creating job not found.' }
     except Exception, exception:
         trans.response.status = 500
         return { 'error': 'Failed to get job information.' }
 
 # load job parameters into incoming
 tool_message = ''
 if job:
     try:
         job_params = job.get_param_values( trans.app, ignore_errors = True )
         job_messages = tool.check_and_update_param_values( job_params, trans, update_values=False )
         tool_message = self._compare_tool_version(trans, tool, job)
         params_to_incoming( kwd, tool.inputs, job_params, trans.app )
     except Exception, exception:
         trans.response.status = 500
         return { 'error': str( exception ) }
         
 # create parameter object
 params = galaxy.util.Params( kwd, sanitize = False )
 
 # convert value to jsonifiable value
 def convert(v):
     # check if value is numeric
     isnumber = False
     try:
         float(v)
         isnumber = True
     except Exception:
Ejemplo n.º 13
0
 def get_config_form( self ):
     if self.tool:
         self.add_dummy_datasets()
         incoming = {}
         params_to_incoming( incoming, self.tool.inputs, self.state.inputs, self.trans.app )
         return self.tool.to_json( self.trans, incoming, workflow_building_mode=True )