예제 #1
0
 def from_workflow_step( Class, trans, step ):
     tool_id = step.tool_id
     if trans.app.toolbox and tool_id not in trans.app.toolbox.tools_by_id:
         # See if we have access to a different version of the tool.
         # TODO: If workflows are ever enhanced to use tool version
         # in addition to tool id, enhance the selection process here
         # to retrieve the correct version of the tool.
         tool_version = Class.__get_tool_version( trans, tool_id )
         if tool_version:
             tool_version_ids = tool_version.get_version_ids( trans.app )
             for tool_version_id in tool_version_ids:
                 if tool_version_id in trans.app.toolbox.tools_by_id:
                     tool_id = tool_version_id
                     break
     if ( trans.app.toolbox and tool_id in trans.app.toolbox.tools_by_id ):
         module = Class( trans, tool_id )
         module.state = DefaultToolState()
         module.state.inputs = module.tool.params_from_strings( step.tool_inputs, trans.app, ignore_errors=True )
         module.errors = step.tool_errors
         # module.post_job_actions = step.post_job_actions
         module.workflow_outputs = step.workflow_outputs
         pjadict = {}
         for pja in step.post_job_actions:
             pjadict[pja.action_type] = pja
         module.post_job_actions = pjadict
         return module
     return None
예제 #2
0
 def from_dict(Class, trans, d):
     tool_id = d['tool_id']
     module = Class(trans, tool_id)
     module.state = DefaultToolState()
     module.state.decode(d["tool_state"], module.tool, module.trans.app)
     module.errors = d.get("tool_errors", None)
     return module
예제 #3
0
 def from_dict( Class, trans, repository_id, changeset_revision, step_dict, tools_metadata, secure=True ):
     tool_id = step_dict[ 'tool_id' ]
     module = Class( trans, repository_id, changeset_revision, tools_metadata, tool_id )
     module.state = DefaultToolState()
     if module.tool is not None:
         module.state.decode( step_dict[ "tool_state" ], module.tool, module.trans.app, secure=secure )
     module.errors = step_dict.get( "tool_errors", None )
     return module
예제 #4
0
 def from_workflow_step(Class, trans, step):
     tool_id = step.tool_id
     module = Class(trans, tool_id)
     module.state = DefaultToolState()
     module.state.inputs = module.tool.params_from_strings(
         step.tool_inputs, trans.app, ignore_errors=True)
     module.errors = step.tool_errors
     return module
예제 #5
0
 def from_workflow_step( Class, trans, repository_id, changeset_revision, tools_metadata, step ):
     module = Class( trans, repository_id, changeset_revision, tools_metadata, step.tool_id )
     module.state = DefaultToolState()
     if module.tool:
         module.state.inputs = module.tool.params_from_strings( step.tool_inputs, trans.app, ignore_errors=True )
     else:
         module.state.inputs = {}
     module.errors = step.tool_errors
     return module
예제 #6
0
 def from_dict( Class, trans, d, secure=True ):
     tool_id = d[ 'tool_id' ]
     module = Class( trans, tool_id )
     module.state = DefaultToolState()
     if module.tool is not None:
         module.state.decode( d[ "tool_state" ], module.tool, module.trans.app, secure=secure )
     module.errors = d.get( "tool_errors", None )
     module.post_job_actions = d.get( "post_job_actions", {} )
     module.workflow_outputs = d.get( "workflow_outputs", [] )
     return module
예제 #7
0
    def recover_state( self, state, **kwds ):
        """ Recover state `dict` from simple dictionary describing configuration
        state (potentially from persisted step state).

        Sub-classes should supply a `default_state` method which contains the
        initial state `dict` with key, value pairs for all available attributes.
        """
        self.state = DefaultToolState()
        inputs = self.get_inputs()
        if inputs:
            self.state.decode( state, Bunch( inputs=inputs ), self.trans.app )
        else:
            self.state.inputs = safe_loads( state ) or {}
예제 #8
0
 def decode_runtime_state( self, trans, string ):
     fake_tool = Bunch( inputs = self.get_runtime_inputs() )
     state = DefaultToolState()
     state.decode( string, fake_tool, trans.app )
     return state
예제 #9
0
 def get_runtime_state( self ):
     state = DefaultToolState()
     state.inputs = dict( input=None )
     return state
예제 #10
0
 def __inputs_to_state(self, inputs):
     tool_state = DefaultToolState()
     tool_state.inputs = inputs
     return tool_state
예제 #11
0
 def __string_to_state(self, state_string):
     encoded_state = string_to_object(state_string)
     state = DefaultToolState()
     state.decode(encoded_state, self.tool, self.app)
     return state
예제 #12
0
    def upload_async_create(self, trans, tool_id=None, **kwd):
        """
        Precreate datasets for asynchronous uploading.
        """
        cntrller = kwd.get('cntrller', '')
        roles = kwd.get('roles', False)
        if roles:
            # The user associated the DATASET_ACCESS permission on the uploaded datasets with 1 or more roles.
            # We need to ensure that the roles are legitimately derived from the roles associated with the LIBRARY_ACCESS
            # permission if the library is not public ( this should always be the case since any ill-legitimate roles
            # were filtered out of the roles displayed on the upload form.  In addition, we need to ensure that the user
            # did not associated roles that would make the dataset in-accessible by everyone.
            library_id = trans.app.security.decode_id(kwd.get(
                'library_id', ''))
            vars = dict(DATASET_ACCESS_in=roles)
            permissions, in_roles, error, msg = trans.app.security_agent.derive_roles_from_access(
                trans, library_id, cntrller, library=True, **vars)
            if error:
                return ['error', msg]

        def create_dataset(name):
            ud = Bunch(name=name, file_type=None, dbkey=None)
            if nonfile_params.get('folder_id', False):
                replace_id = nonfile_params.get('replace_id', None)
                if replace_id not in [None, 'None']:
                    replace_dataset = trans.sa_session.query(
                        trans.app.model.LibraryDataset).get(
                            trans.security.decode_id(replace_id))
                else:
                    replace_dataset = None
                # FIXME: instead of passing params here ( chiech have been process by util.Params(), the original kwd
                # should be passed so that complex objects that may have been included in the initial request remain.
                library_bunch = upload_common.handle_library_params(
                    trans, nonfile_params, nonfile_params.folder_id,
                    replace_dataset)
            else:
                library_bunch = None
            return upload_common.new_upload(
                trans,
                cntrller,
                ud,
                library_bunch=library_bunch,
                state=trans.app.model.HistoryDatasetAssociation.states.UPLOAD)

        tool = self.get_toolbox().get_tool(tool_id)
        if not tool:
            return False  # bad tool_id
        nonfile_params = galaxy.util.Params(kwd, sanitize=False)
        if kwd.get('tool_state', None) not in (None, 'None'):
            encoded_state = galaxy.util.string_to_object(kwd["tool_state"])
            tool_state = DefaultToolState()
            tool_state.decode(encoded_state, tool, trans.app)
        else:
            tool_state = tool.new_state(trans)
        tool.update_state(trans,
                          tool.inputs,
                          tool_state.inputs,
                          kwd,
                          update_only=True)
        datasets = []
        dataset_upload_inputs = []
        for input_name, input in tool.inputs.iteritems():
            if input.type == "upload_dataset":
                dataset_upload_inputs.append(input)
        assert dataset_upload_inputs, Exception(
            "No dataset upload groups were found.")
        for dataset_upload_input in dataset_upload_inputs:
            d_type = dataset_upload_input.get_datatype(trans, kwd)
            if d_type.composite_type is not None:
                datasets.append(
                    create_dataset(
                        dataset_upload_input.get_composite_dataset_name(kwd)))
            else:
                params = Bunch(
                    **tool_state.inputs[dataset_upload_input.name][0])
                if params.file_data not in [None, ""]:
                    name = params.file_data
                    if name.count('/'):
                        name = name.rsplit('/', 1)[1]
                    if name.count('\\'):
                        name = name.rsplit('\\', 1)[1]
                    datasets.append(create_dataset(name))
                if params.url_paste not in [None, ""]:
                    url_paste = params.url_paste.replace('\r', '').split('\n')
                    url = False
                    for line in url_paste:
                        line = line.rstrip('\r\n').strip()
                        if not line:
                            continue
                        elif line.lower().startswith('http://') or line.lower(
                        ).startswith('ftp://') or line.lower().startswith(
                                'https://'):
                            url = True
                            datasets.append(create_dataset(line))
                        else:
                            if url:
                                continue  # non-url when we've already processed some urls
                            else:
                                # pasted data
                                datasets.append(create_dataset('Pasted Entry'))
                                break
        return [d.id for d in datasets]
예제 #13
0
    def upload_async_create(self, trans, tool_id=None, **kwd):
        """
        Precreate datasets for asynchronous uploading.
        """
        permissions = trans.app.security_agent.history_get_default_permissions(
            trans.history)

        def create_dataset(name):
            ud = Bunch(name=name, file_type=None, dbkey=None)
            if nonfile_params.get('folder_id', False):
                replace_id = nonfile_params.get('replace_id', None)
                if replace_id not in [None, 'None']:
                    replace_dataset = trans.sa_session.query(
                        l.LibraryDataset).get(int(replace_id))
                else:
                    replace_dataset = None
                library_bunch = upload_common.handle_library_params(
                    trans, nonfile_params, nonfile_params.folder_id,
                    replace_dataset)
            else:
                library_bunch = None
            return upload_common.new_upload(
                trans,
                ud,
                library_bunch=library_bunch,
                state=trans.app.model.HistoryDatasetAssociation.states.UPLOAD)

        tool = self.get_toolbox().tools_by_id.get(tool_id, None)
        if not tool:
            return False  # bad tool_id
        nonfile_params = util.Params(kwd, sanitize=False)
        if kwd.get('tool_state', None) not in (None, 'None'):
            encoded_state = util.string_to_object(kwd["tool_state"])
            tool_state = DefaultToolState()
            tool_state.decode(encoded_state, tool, trans.app)
        else:
            tool_state = tool.new_state(trans)
        errors = tool.update_state(trans,
                                   tool.inputs,
                                   tool_state.inputs,
                                   kwd,
                                   update_only=True)
        datasets = []
        dataset_upload_inputs = []
        for input_name, input in tool.inputs.iteritems():
            if input.type == "upload_dataset":
                dataset_upload_inputs.append(input)
        assert dataset_upload_inputs, Exception(
            "No dataset upload groups were found.")
        for dataset_upload_input in dataset_upload_inputs:
            d_type = dataset_upload_input.get_datatype(trans, kwd)

            if d_type.composite_type is not None:
                datasets.append(
                    create_dataset(
                        'Uploaded Composite Dataset (%s)' %
                        dataset_upload_input.get_datatype_ext(trans, kwd)))
            else:
                params = Bunch(
                    **tool_state.inputs[dataset_upload_input.name][0])
                if params.file_data not in [None, ""]:
                    name = params.file_data
                    if name.count('/'):
                        name = name.rsplit('/', 1)[1]
                    if name.count('\\'):
                        name = name.rsplit('\\', 1)[1]
                    datasets.append(create_dataset(name))
                if params.url_paste not in [None, ""]:
                    url_paste = params.url_paste.replace('\r', '').split('\n')
                    url = False
                    for line in url_paste:
                        line = line.rstrip('\r\n').strip()
                        if not line:
                            continue
                        elif line.lower().startswith('http://') or line.lower(
                        ).startswith('ftp://'):
                            url = True
                            datasets.append(create_dataset(line))
                        else:
                            if url:
                                continue  # non-url when we've already processed some urls
                            else:
                                # pasted data
                                datasets.append(create_dataset('Pasted Entry'))
                                break
        return [d.id for d in datasets]
예제 #14
0
 def get_runtime_state( self ):
     state = DefaultToolState()
     state.inputs = self.state.inputs
     return state
예제 #15
0
 def __init__( self, trans, content_id=None, **kwds ):
     self.trans = trans
     self.content_id = content_id
     self.state = DefaultToolState()
예제 #16
0
 def decode_runtime_state( self, runtime_state ):
     """ Takes the serialized runtime state and decodes it when running the workflow. """
     state = DefaultToolState()
     state.decode( runtime_state, Bunch( inputs=self.get_runtime_inputs() ), self.trans.app )
     return state