コード例 #1
0
ファイル: tool_runner.py プロジェクト: knowingchaos/galaxy
 def rerun_callback( input, value, prefixed_name, prefixed_label ):
     if isinstance( value, UnvalidatedValue ):
         return str( value )
     if isinstance( input, DataToolParameter ):
         if isinstance(value,list):
             values = []
             for val in value:
                 if is_hashable( val ):
                     if val in history.datasets:
                         values.append( val )
                     elif val in hda_source_dict:
                         values.append( hda_source_dict[ val ])
             return values
         if is_hashable( value ) and value not in history.datasets and value in hda_source_dict:
             return hda_source_dict[ value ]
コード例 #2
0
 def rerun_callback( input, value, prefixed_name, prefixed_label ):
     if isinstance( value, UnvalidatedValue ):
         return str( value )
     if isinstance( input, DataToolParameter ):
         if isinstance(value,list):
             values = []
             for val in value:
                 if is_hashable( val ):
                     if val in history.datasets:
                         values.append( val )
                     elif val in hda_source_dict:
                         values.append( hda_source_dict[ val ])
             return values
         if is_hashable( value ) and value not in history.datasets and value in hda_source_dict:
             return hda_source_dict[ value ]
コード例 #3
0
ファイル: tool_runner.py プロジェクト: mvollger/galaxy
    def rerun( self, trans, id=None, from_noframe=None, job_id=None, **kwd ):
        """
        Given a HistoryDatasetAssociation id, find the job and that created
        the dataset, extract the parameters, and display the appropriate tool
        form with parameters already filled in.
        """
        if job_id:
            try:
                job_id = trans.security.decode_id( job_id )
                job = trans.sa_session.query( trans.app.model.Job ).get( job_id )
            except:
                error( "Invalid value for 'job_id' parameter" )
            if not trans.user_is_admin():
                for data_assoc in job.output_datasets:
                    #only allow rerunning if user is allowed access to the dataset.
                    if not trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), data_assoc.dataset.dataset ):
                        error( "You are not allowed to rerun this job" )
            param_error_text = "Failed to get parameters for job id %d " % job_id
        else:
            if not id:
                error( "'id' parameter is required" );
            try:
                id = int( id )
            except:
                # it's not an un-encoded id, try to parse as encoded
                try:
                    id = trans.security.decode_id( id )
                except:
                    error( "Invalid value for 'id' parameter" )
            # Get the dataset object
            data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( id )
            #only allow rerunning if user is allowed access to the dataset.
            if not ( trans.user_is_admin() or trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), data.dataset ) ):
                error( "You are not allowed to access this dataset" )
            # Get the associated job, if any.
            job = data.creating_job
            if not job:
                raise Exception("Failed to get job information for dataset hid %d" % data.hid)
            param_error_text = "Failed to get parameters for dataset id %d " % data.id
        # Get the tool object
        tool_id = job.tool_id
        tool_version = job.tool_version
        try:
            tool_version_select_field, tools, tool = self.__get_tool_components( tool_id,
                                                                                 tool_version=tool_version,
                                                                                 get_loaded_tools_by_lineage=False,
                                                                                 set_selected=True )
            if ( tool.id == job.tool_id or tool.old_id == job.tool_id ) and tool.version == job.tool_version:
                tool_id_version_message = ''
            elif tool.id == job.tool_id:
                if job.tool_version == None:
                    # For some reason jobs don't always keep track of the tool version.
                    tool_id_version_message = ''
                else:
                    tool_id_version_message = 'This job was initially run with tool version "%s", which is not currently available.  ' % job.tool_version
                    if len( tools ) > 1:
                        tool_id_version_message += 'You can rerun the job with the selected tool or choose another derivation of the tool.'
                    else:
                        tool_id_version_message += 'You can rerun the job with this tool version, which is a derivation of the original tool.'
            else:
                if len( tools ) > 1:
                    tool_id_version_message = 'This job was initially run with tool version "%s", which is not currently available.  ' % job.tool_version
                    tool_id_version_message += 'You can rerun the job with the selected tool or choose another derivation of the tool.'
                else:
                    tool_id_version_message = 'This job was initially run with tool id "%s", version "%s", which is not ' % ( job.tool_id, job.tool_version )
                    tool_id_version_message += 'currently available.  You can rerun the job with this tool, which is a derivation of the original tool.'
            assert tool is not None, 'Requested tool has not been loaded.'
        except:
            # This is expected so not an exception.
            tool_id_version_message = ''
            error( "This dataset was created by an obsolete tool (%s). Can't re-run." % tool_id )
        # Can't rerun upload, external data sources, et cetera. Workflow compatible will proxy this for now
        if not tool.is_workflow_compatible:
            error( "The '%s' tool does not currently support rerunning." % tool.name )
        # Get the job's parameters
        try:
            params_objects = job.get_param_values( trans.app, ignore_errors = True )
        except:
            raise Exception( param_error_text )
        upgrade_messages = tool.check_and_update_param_values( params_objects, trans, update_values=False )
        # Need to remap dataset parameters. Job parameters point to original
        # dataset used; parameter should be the analygous dataset in the
        # current history.
        history = trans.get_history()
        hda_source_dict = {} # Mapping from HDA in history to source HDAs.
        for hda in history.datasets:
            source_hda = hda.copied_from_history_dataset_association
            while source_hda:#should this check library datasets as well?
                #FIXME: could be multiple copies of a hda in a single history, this does a better job of matching on cloned histories,
                #but is still less than perfect when eg individual datasets are copied between histories
                if source_hda not in hda_source_dict or source_hda.hid == hda.hid:
                    hda_source_dict[ source_hda ] = hda
                source_hda = source_hda.copied_from_history_dataset_association
        # Ditto for dataset collections.
        hdca_source_dict = {}
        for hdca in history.dataset_collections:
            source_hdca = hdca.copied_from_history_dataset_collection_association
            while source_hdca:
                if source_hdca not in hdca_source_dict or source_hdca.hid == hdca.hid:
                    hdca_source_dict[ source_hdca ] = hdca
                source_hdca = source_hdca.copied_from_history_dataset_collection_association

        # Unpack unvalidated values to strings, they'll be validated when the
        # form is submitted (this happens when re-running a job that was
        # initially run by a workflow)
        #This needs to be done recursively through grouping parameters
        def rerun_callback( input, value, prefixed_name, prefixed_label ):
            if isinstance( value, UnvalidatedValue ):
                try:
                    return input.to_html_value( value.value, trans.app )
                except Exception, e:
                    # Need to determine when (if ever) the to_html_value call could fail.
                    log.debug( "Failed to use input.to_html_value to determine value of unvalidated parameter, defaulting to string: %s" % ( e ) )
                    return str( value )
            if isinstance( input, DataToolParameter ):
                if isinstance(value,list):
                    values = []
                    for val in value:
                        if is_hashable( val ):
                            if val in history.datasets:
                                values.append( val )
                            elif val in hda_source_dict:
                                values.append( hda_source_dict[ val ])
                    return values
                if is_hashable( value ) and value not in history.datasets and value in hda_source_dict:
                    return hda_source_dict[ value ]
            elif isinstance( input, DataCollectionToolParameter ):
                if is_hashable( value ) and value not in history.dataset_collections and value in hdca_source_dict:
                    return hdca_source_dict[ value ]
コード例 #4
0
ファイル: tool_runner.py プロジェクト: mb12985/Galaxy
    def rerun(self, trans, id=None, from_noframe=None, job_id=None, **kwd):
        """
        Given a HistoryDatasetAssociation id, find the job and that created
        the dataset, extract the parameters, and display the appropriate tool
        form with parameters already filled in.
        """
        if job_id:
            try:
                job_id = trans.security.decode_id(job_id)
                job = trans.sa_session.query(trans.app.model.Job).get(job_id)
            except:
                error("Invalid value for 'job_id' parameter")
            if not trans.user_is_admin():
                for data_assoc in job.output_datasets:
                    # only allow rerunning if user is allowed access to the dataset.
                    if not trans.app.security_agent.can_access_dataset(
                            trans.get_current_user_roles(),
                            data_assoc.dataset.dataset):
                        error("You are not allowed to rerun this job")
            param_error_text = "Failed to get parameters for job id %d " % job_id
        else:
            if not id:
                error("'id' parameter is required")
            try:
                id = int(id)
            except:
                # it's not an un-encoded id, try to parse as encoded
                try:
                    id = trans.security.decode_id(id)
                except:
                    error("Invalid value for 'id' parameter")
            # Get the dataset object
            data = trans.sa_session.query(
                trans.app.model.HistoryDatasetAssociation).get(id)
            # only allow rerunning if user is allowed access to the dataset.
            if not (trans.user_is_admin()
                    or trans.app.security_agent.can_access_dataset(
                        trans.get_current_user_roles(), data.dataset)):
                error("You are not allowed to access this dataset")
            # Get the associated job, if any.
            job = data.creating_job
            if not job:
                raise Exception(
                    "Failed to get job information for dataset hid %d" %
                    data.hid)
            param_error_text = "Failed to get parameters for dataset id %d " % data.id
        # Get the tool object
        tool_id = job.tool_id
        tool_version = job.tool_version
        try:
            tool_version_select_field, tools, tool = self.__get_tool_components(
                tool_id,
                tool_version=tool_version,
                get_loaded_tools_by_lineage=False,
                set_selected=True)
            if (tool.id == job.tool_id or tool.old_id
                    == job.tool_id) and tool.version == job.tool_version:
                tool_id_version_message = ''
            elif tool.id == job.tool_id:
                if job.tool_version is None:
                    # For some reason jobs don't always keep track of the tool version.
                    tool_id_version_message = ''
                else:
                    tool_id_version_message = 'This job was initially run with tool version "%s", which is not currently available.  ' % job.tool_version
                    if len(tools) > 1:
                        tool_id_version_message += 'You can rerun the job with the selected tool or choose another derivation of the tool.'
                    else:
                        tool_id_version_message += 'You can rerun the job with this tool version, which is a derivation of the original tool.'
            else:
                if len(tools) > 1:
                    tool_id_version_message = 'This job was initially run with tool version "%s", which is not currently available.  ' % job.tool_version
                    tool_id_version_message += 'You can rerun the job with the selected tool or choose another derivation of the tool.'
                else:
                    tool_id_version_message = 'This job was initially run with tool id "%s", version "%s", which is not ' % (
                        job.tool_id, job.tool_version)
                    tool_id_version_message += 'currently available.  You can rerun the job with this tool, which is a derivation of the original tool.'
            assert tool is not None, 'Requested tool has not been loaded.'
        except:
            # This is expected so not an exception.
            tool_id_version_message = ''
            error(
                "This dataset was created by an obsolete tool (%s). Can't re-run."
                % tool_id)
        if not tool.allow_user_access(trans.user):
            error("The requested tool is unknown.")
        # Can't rerun upload, external data sources, et cetera. Workflow compatible will proxy this for now
        if not tool.is_workflow_compatible:
            error("The '%s' tool does not currently support rerunning." %
                  tool.name)
        # Get the job's parameters
        try:
            params_objects = job.get_param_values(trans.app,
                                                  ignore_errors=True)
        except:
            raise Exception(param_error_text)
        upgrade_messages = tool.check_and_update_param_values(
            params_objects, trans, update_values=False)
        # Need to remap dataset parameters. Job parameters point to original
        # dataset used; parameter should be the analygous dataset in the
        # current history.
        history = trans.get_history()
        hda_source_dict = {}  # Mapping from HDA in history to source HDAs.
        for hda in history.datasets:
            source_hda = hda.copied_from_history_dataset_association
            while source_hda:  # should this check library datasets as well?
                # FIXME: could be multiple copies of a hda in a single history, this does a better job of matching on cloned histories,
                # but is still less than perfect when eg individual datasets are copied between histories
                if source_hda not in hda_source_dict or source_hda.hid == hda.hid:
                    hda_source_dict[source_hda] = hda
                source_hda = source_hda.copied_from_history_dataset_association
        # Ditto for dataset collections.
        hdca_source_dict = {}
        for hdca in history.dataset_collections:
            source_hdca = hdca.copied_from_history_dataset_collection_association
            while source_hdca:
                if source_hdca not in hdca_source_dict or source_hdca.hid == hdca.hid:
                    hdca_source_dict[source_hdca] = hdca
                source_hdca = source_hdca.copied_from_history_dataset_collection_association

        # Unpack unvalidated values to strings, they'll be validated when the
        # form is submitted (this happens when re-running a job that was
        # initially run by a workflow)
        # This needs to be done recursively through grouping parameters
        def rerun_callback(input, value, prefixed_name, prefixed_label):
            if isinstance(value, UnvalidatedValue):
                try:
                    return input.to_html_value(value.value, trans.app)
                except Exception, e:
                    # Need to determine when (if ever) the to_html_value call could fail.
                    log.debug(
                        "Failed to use input.to_html_value to determine value of unvalidated parameter, defaulting to string: %s"
                        % (e))
                    return str(value)
            if isinstance(input, DataToolParameter):
                if isinstance(value, list):
                    values = []
                    for val in value:
                        if is_hashable(val):
                            if val in history.datasets:
                                values.append(val)
                            elif val in hda_source_dict:
                                values.append(hda_source_dict[val])
                    return values
                if is_hashable(
                        value
                ) and value not in history.datasets and value in hda_source_dict:
                    return hda_source_dict[value]
            elif isinstance(input, DataCollectionToolParameter):
                if is_hashable(
                        value
                ) and value not in history.dataset_collections and value in hdca_source_dict:
                    return hdca_source_dict[value]