def recover_state( self, state, **kwds ): """ Recover state `dict` from simple dictionary describing configuration state (potentially from persisted step state). Sub-classes should supply a `default_state` method which contains the initial state `dict` with key, value pairs for all available attributes. """ self.state = DefaultToolState() inputs = self.get_inputs() if inputs: self.state.decode( state, Bunch( inputs=inputs ), self.trans.app ) else: self.state.inputs = safe_loads( state ) or {}
def from_workflow_step( Class, trans, step ): tool_id = step.tool_id if trans.app.toolbox and tool_id not in trans.app.toolbox.tools_by_id: # See if we have access to a different version of the tool. # TODO: If workflows are ever enhanced to use tool version # in addition to tool id, enhance the selection process here # to retrieve the correct version of the tool. tool_version = Class.__get_tool_version( trans, tool_id ) if tool_version: tool_version_ids = tool_version.get_version_ids( trans.app ) for tool_version_id in tool_version_ids: if tool_version_id in trans.app.toolbox.tools_by_id: tool_id = tool_version_id break if ( trans.app.toolbox and tool_id in trans.app.toolbox.tools_by_id ): module = Class( trans, tool_id ) module.state = DefaultToolState() module.state.inputs = module.tool.params_from_strings( step.tool_inputs, trans.app, ignore_errors=True ) module.errors = step.tool_errors # module.post_job_actions = step.post_job_actions module.workflow_outputs = step.workflow_outputs pjadict = {} for pja in step.post_job_actions: pjadict[pja.action_type] = pja module.post_job_actions = pjadict return module return None
def from_dict(Class, trans, d): tool_id = d['tool_id'] module = Class(trans, tool_id) module.state = DefaultToolState() module.state.decode(d["tool_state"], module.tool, module.trans.app) module.errors = d.get("tool_errors", None) return module
def from_dict( Class, trans, repository_id, changeset_revision, step_dict, tools_metadata, secure=True ): tool_id = step_dict[ 'tool_id' ] module = Class( trans, repository_id, changeset_revision, tools_metadata, tool_id ) module.state = DefaultToolState() if module.tool is not None: module.state.decode( step_dict[ "tool_state" ], module.tool, module.trans.app, secure=secure ) module.errors = step_dict.get( "tool_errors", None ) return module
def from_workflow_step(Class, trans, step): tool_id = step.tool_id module = Class(trans, tool_id) module.state = DefaultToolState() module.state.inputs = module.tool.params_from_strings( step.tool_inputs, trans.app, ignore_errors=True) module.errors = step.tool_errors return module
def from_workflow_step( Class, trans, repository_id, changeset_revision, tools_metadata, step ): module = Class( trans, repository_id, changeset_revision, tools_metadata, step.tool_id ) module.state = DefaultToolState() if module.tool: module.state.inputs = module.tool.params_from_strings( step.tool_inputs, trans.app, ignore_errors=True ) else: module.state.inputs = {} module.errors = step.tool_errors return module
def from_dict( Class, trans, d, secure=True ): tool_id = d[ 'tool_id' ] module = Class( trans, tool_id ) module.state = DefaultToolState() if module.tool is not None: module.state.decode( d[ "tool_state" ], module.tool, module.trans.app, secure=secure ) module.errors = d.get( "tool_errors", None ) module.post_job_actions = d.get( "post_job_actions", {} ) module.workflow_outputs = d.get( "workflow_outputs", [] ) return module
def recover_state(self, state, **kwds): """ Recover state `dict` from simple dictionary describing configuration state (potentially from persisted step state). Sub-classes should supply a `default_state` method which contains the initial state `dict` with key, value pairs for all available attributes. """ self.state = DefaultToolState() inputs = self.get_inputs() if inputs: self.state.decode(state, Bunch(inputs=inputs), self.trans.app) else: self.state.inputs = safe_loads(state) or {}
def upload_async_create( self, trans, tool_id=None, **kwd ): """ Precreate datasets for asynchronous uploading. """ cntrller = kwd.get( 'cntrller', '' ) roles = kwd.get( 'roles', False ) if roles: # The user associated the DATASET_ACCESS permission on the uploaded datasets with 1 or more roles. # We need to ensure that the roles are legitimately derived from the roles associated with the LIBRARY_ACCESS # permission if the library is not public ( this should always be the case since any ill-legitimate roles # were filtered out of the roles displayed on the upload form. In addition, we need to ensure that the user # did not associated roles that would make the dataset in-accessible by everyone. library_id = trans.app.security.decode_id( kwd.get( 'library_id', '' ) ) vars = dict( DATASET_ACCESS_in=roles ) permissions, in_roles, error, msg = trans.app.security_agent.derive_roles_from_access( trans, library_id, cntrller, library=True, **vars ) if error: return [ 'error', msg ] def create_dataset( name ): ud = Bunch( name=name, file_type=None, dbkey=None ) if nonfile_params.get( 'folder_id', False ): replace_id = nonfile_params.get( 'replace_id', None ) if replace_id not in [ None, 'None' ]: replace_dataset = trans.sa_session.query( trans.app.model.LibraryDataset ).get( trans.security.decode_id( replace_id ) ) else: replace_dataset = None # FIXME: instead of passing params here ( chiech have been process by util.Params(), the original kwd # should be passed so that complex objects that may have been included in the initial request remain. library_bunch = upload_common.handle_library_params( trans, nonfile_params, nonfile_params.folder_id, replace_dataset ) else: library_bunch = None return upload_common.new_upload( trans, cntrller, ud, library_bunch=library_bunch, state=trans.app.model.HistoryDatasetAssociation.states.UPLOAD ) tool = self.get_toolbox().get_tool( tool_id ) if not tool: return False # bad tool_id nonfile_params = galaxy.util.Params( kwd, sanitize=False ) if kwd.get( 'tool_state', None ) not in ( None, 'None' ): encoded_state = galaxy.util.string_to_object( kwd["tool_state"] ) tool_state = DefaultToolState() tool_state.decode( encoded_state, tool, trans.app ) else: tool_state = tool.new_state( trans ) tool.update_state( trans, tool.inputs, tool_state.inputs, kwd, update_only = True ) datasets = [] dataset_upload_inputs = [] for input_name, input in tool.inputs.iteritems(): if input.type == "upload_dataset": dataset_upload_inputs.append( input ) assert dataset_upload_inputs, Exception( "No dataset upload groups were found." ) for dataset_upload_input in dataset_upload_inputs: d_type = dataset_upload_input.get_datatype( trans, kwd ) if d_type.composite_type is not None: datasets.append( create_dataset( dataset_upload_input.get_composite_dataset_name( kwd ) ) ) else: params = Bunch( ** tool_state.inputs[dataset_upload_input.name][0] ) if params.file_data not in [ None, "" ]: name = params.file_data if name.count('/'): name = name.rsplit('/',1)[1] if name.count('\\'): name = name.rsplit('\\',1)[1] datasets.append( create_dataset( name ) ) if params.url_paste not in [ None, "" ]: url_paste = params.url_paste.replace( '\r', '' ).split( '\n' ) url = False for line in url_paste: line = line.rstrip( '\r\n' ).strip() if not line: continue elif line.lower().startswith( 'http://' ) or line.lower().startswith( 'ftp://' ) or line.lower().startswith( 'https://' ): url = True datasets.append( create_dataset( line ) ) else: if url: continue # non-url when we've already processed some urls else: # pasted data datasets.append( create_dataset( 'Pasted Entry' ) ) break return [ d.id for d in datasets ]
def __inputs_to_state(self, inputs): tool_state = DefaultToolState() tool_state.inputs = inputs return tool_state
def __string_to_state(self, state_string): encoded_state = string_to_object(state_string) state = DefaultToolState() state.decode(encoded_state, self.tool, self.app) return state
def upload_async_create(self, trans, tool_id=None, **kwd): """ Precreate datasets for asynchronous uploading. """ cntrller = kwd.get('cntrller', '') roles = kwd.get('roles', False) if roles: # The user associated the DATASET_ACCESS permission on the uploaded datasets with 1 or more roles. # We need to ensure that the roles are legitimately derived from the roles associated with the LIBRARY_ACCESS # permission if the library is not public ( this should always be the case since any ill-legitimate roles # were filtered out of the roles displayed on the upload form. In addition, we need to ensure that the user # did not associated roles that would make the dataset in-accessible by everyone. library_id = trans.app.security.decode_id(kwd.get( 'library_id', '')) vars = dict(DATASET_ACCESS_in=roles) permissions, in_roles, error, msg = trans.app.security_agent.derive_roles_from_access( trans, library_id, cntrller, library=True, **vars) if error: return ['error', msg] def create_dataset(name): ud = Bunch(name=name, file_type=None, dbkey=None) if nonfile_params.get('folder_id', False): replace_id = nonfile_params.get('replace_id', None) if replace_id not in [None, 'None']: replace_dataset = trans.sa_session.query( trans.app.model.LibraryDataset).get( trans.security.decode_id(replace_id)) else: replace_dataset = None # FIXME: instead of passing params here ( chiech have been process by util.Params(), the original kwd # should be passed so that complex objects that may have been included in the initial request remain. library_bunch = upload_common.handle_library_params( trans, nonfile_params, nonfile_params.folder_id, replace_dataset) else: library_bunch = None return upload_common.new_upload( trans, cntrller, ud, library_bunch=library_bunch, state=trans.app.model.HistoryDatasetAssociation.states.UPLOAD) tool = self.get_toolbox().get_tool(tool_id) if not tool: return False # bad tool_id nonfile_params = galaxy.util.Params(kwd, sanitize=False) if kwd.get('tool_state', None) not in (None, 'None'): encoded_state = galaxy.util.string_to_object(kwd["tool_state"]) tool_state = DefaultToolState() tool_state.decode(encoded_state, tool, trans.app) else: tool_state = tool.new_state(trans) tool.update_state(trans, tool.inputs, tool_state.inputs, kwd, update_only=True) datasets = [] dataset_upload_inputs = [] for input_name, input in tool.inputs.iteritems(): if input.type == "upload_dataset": dataset_upload_inputs.append(input) assert dataset_upload_inputs, Exception( "No dataset upload groups were found.") for dataset_upload_input in dataset_upload_inputs: d_type = dataset_upload_input.get_datatype(trans, kwd) if d_type.composite_type is not None: datasets.append( create_dataset( dataset_upload_input.get_composite_dataset_name(kwd))) else: params = Bunch( **tool_state.inputs[dataset_upload_input.name][0]) if params.file_data not in [None, ""]: name = params.file_data if name.count('/'): name = name.rsplit('/', 1)[1] if name.count('\\'): name = name.rsplit('\\', 1)[1] datasets.append(create_dataset(name)) if params.url_paste not in [None, ""]: url_paste = params.url_paste.replace('\r', '').split('\n') url = False for line in url_paste: line = line.rstrip('\r\n').strip() if not line: continue elif line.lower().startswith('http://') or line.lower( ).startswith('ftp://') or line.lower().startswith( 'https://'): url = True datasets.append(create_dataset(line)) else: if url: continue # non-url when we've already processed some urls else: # pasted data datasets.append(create_dataset('Pasted Entry')) break return [d.id for d in datasets]
def upload_async_create(self, trans, tool_id=None, **kwd): """ Precreate datasets for asynchronous uploading. """ permissions = trans.app.security_agent.history_get_default_permissions( trans.history) def create_dataset(name): ud = Bunch(name=name, file_type=None, dbkey=None) if nonfile_params.get('folder_id', False): replace_id = nonfile_params.get('replace_id', None) if replace_id not in [None, 'None']: replace_dataset = trans.sa_session.query( l.LibraryDataset).get(int(replace_id)) else: replace_dataset = None library_bunch = upload_common.handle_library_params( trans, nonfile_params, nonfile_params.folder_id, replace_dataset) else: library_bunch = None return upload_common.new_upload( trans, ud, library_bunch=library_bunch, state=trans.app.model.HistoryDatasetAssociation.states.UPLOAD) tool = self.get_toolbox().tools_by_id.get(tool_id, None) if not tool: return False # bad tool_id nonfile_params = util.Params(kwd, sanitize=False) if kwd.get('tool_state', None) not in (None, 'None'): encoded_state = util.string_to_object(kwd["tool_state"]) tool_state = DefaultToolState() tool_state.decode(encoded_state, tool, trans.app) else: tool_state = tool.new_state(trans) errors = tool.update_state(trans, tool.inputs, tool_state.inputs, kwd, update_only=True) datasets = [] dataset_upload_inputs = [] for input_name, input in tool.inputs.iteritems(): if input.type == "upload_dataset": dataset_upload_inputs.append(input) assert dataset_upload_inputs, Exception( "No dataset upload groups were found.") for dataset_upload_input in dataset_upload_inputs: d_type = dataset_upload_input.get_datatype(trans, kwd) if d_type.composite_type is not None: datasets.append( create_dataset( 'Uploaded Composite Dataset (%s)' % dataset_upload_input.get_datatype_ext(trans, kwd))) else: params = Bunch( **tool_state.inputs[dataset_upload_input.name][0]) if params.file_data not in [None, ""]: name = params.file_data if name.count('/'): name = name.rsplit('/', 1)[1] if name.count('\\'): name = name.rsplit('\\', 1)[1] datasets.append(create_dataset(name)) if params.url_paste not in [None, ""]: url_paste = params.url_paste.replace('\r', '').split('\n') url = False for line in url_paste: line = line.rstrip('\r\n').strip() if not line: continue elif line.lower().startswith('http://') or line.lower( ).startswith('ftp://'): url = True datasets.append(create_dataset(line)) else: if url: continue # non-url when we've already processed some urls else: # pasted data datasets.append(create_dataset('Pasted Entry')) break return [d.id for d in datasets]
def decode_runtime_state( self, runtime_state ): """ Takes the serialized runtime state and decodes it when running the workflow. """ state = DefaultToolState() state.decode( runtime_state, Bunch( inputs=self.get_runtime_inputs() ), self.trans.app ) return state
def __init__( self, trans, content_id=None, **kwds ): self.trans = trans self.content_id = content_id self.state = DefaultToolState()
def __inputs_to_state( self, inputs ): tool_state = DefaultToolState() tool_state.inputs = inputs return tool_state
def __string_to_state( self, state_string ): encoded_state = string_to_object( state_string ) state = DefaultToolState() state.decode( encoded_state, self.tool, self.app ) return state
def upload_async_create( self, trans, tool_id=None, **kwd ): """ Precreate datasets for asynchronous uploading. """ def create_dataset( name, history ): data = trans.app.model.HistoryDatasetAssociation( create_dataset = True ) data.name = name data.state = data.states.UPLOAD data.history = history data.flush() history.add_dataset( data ) return data tool = self.get_toolbox().tools_by_id.get( tool_id, None ) if not tool: return False # bad tool_id #params = util.Params( kwd, sanitize=tool.options.sanitize, tool=tool ) if "tool_state" in kwd: encoded_state = util.string_to_object( kwd["tool_state"] ) tool_state = DefaultToolState() tool_state.decode( encoded_state, tool, trans.app ) else: tool_state = tool.new_state( trans ) errors = tool.update_state( trans, tool.inputs, tool_state.inputs, kwd, update_only = True ) datasets = [] dataset_upload_inputs = [] for input_name, input in tool.inputs.iteritems(): if input.type == "upload_dataset": dataset_upload_inputs.append( input ) assert dataset_upload_inputs, Exception( "No dataset upload groups were found." ) for dataset_upload_input in dataset_upload_inputs: d_type = dataset_upload_input.get_datatype( trans, kwd ) if d_type.composite_type is not None: datasets.append( create_dataset( 'Uploaded Composite Dataset (%s)' % dataset_upload_input.get_datatype_ext( trans, kwd ), trans.history ) ) else: params = Bunch( ** tool_state.inputs[dataset_upload_input.name][0] ) if params.file_data not in [ None, "" ]: name = params.file_data if name.count('/'): name = name.rsplit('/',1)[1] if name.count('\\'): name = name.rsplit('\\',1)[1] datasets.append( create_dataset( name, trans.history ) ) if params.url_paste not in [ None, "" ]: url_paste = params.url_paste.replace( '\r', '' ).split( '\n' ) url = False for line in url_paste: line = line.rstrip( '\r\n' ).strip() if not line: continue elif line.lower().startswith( 'http://' ) or line.lower().startswith( 'ftp://' ): url = True datasets.append( create_dataset( line, trans.history ) ) else: if url: continue # non-url when we've already processed some urls else: # pasted data datasets.append( create_dataset( 'Pasted Entry', trans.history ) ) break if datasets: trans.model.flush() return [ d.id for d in datasets ]
def upload_async_create( self, trans, tool_id=None, **kwd ): """ Precreate datasets for asynchronous uploading. """ permissions = trans.app.security_agent.history_get_default_permissions( trans.history ) def create_dataset( name ): ud = Bunch( name=name, file_type=None, dbkey=None ) if nonfile_params.get( 'folder_id', False ): replace_id = nonfile_params.get( 'replace_id', None ) if replace_id not in [ None, 'None' ]: replace_dataset = trans.sa_session.query( l.LibraryDataset ).get( int( replace_id ) ) else: replace_dataset = None library_bunch = upload_common.handle_library_params( trans, nonfile_params, nonfile_params.folder_id, replace_dataset ) else: library_bunch = None return upload_common.new_upload( trans, ud, library_bunch=library_bunch, state=trans.app.model.HistoryDatasetAssociation.states.UPLOAD ) tool = self.get_toolbox().tools_by_id.get( tool_id, None ) if not tool: return False # bad tool_id nonfile_params = util.Params( kwd, sanitize=False ) if kwd.get( 'tool_state', None ) not in ( None, 'None' ): encoded_state = util.string_to_object( kwd["tool_state"] ) tool_state = DefaultToolState() tool_state.decode( encoded_state, tool, trans.app ) else: tool_state = tool.new_state( trans ) errors = tool.update_state( trans, tool.inputs, tool_state.inputs, kwd, update_only = True ) datasets = [] dataset_upload_inputs = [] for input_name, input in tool.inputs.iteritems(): if input.type == "upload_dataset": dataset_upload_inputs.append( input ) assert dataset_upload_inputs, Exception( "No dataset upload groups were found." ) for dataset_upload_input in dataset_upload_inputs: d_type = dataset_upload_input.get_datatype( trans, kwd ) if d_type.composite_type is not None: datasets.append( create_dataset( 'Uploaded Composite Dataset (%s)' % dataset_upload_input.get_datatype_ext( trans, kwd ) ) ) else: params = Bunch( ** tool_state.inputs[dataset_upload_input.name][0] ) if params.file_data not in [ None, "" ]: name = params.file_data if name.count('/'): name = name.rsplit('/',1)[1] if name.count('\\'): name = name.rsplit('\\',1)[1] datasets.append( create_dataset( name ) ) if params.url_paste not in [ None, "" ]: url_paste = params.url_paste.replace( '\r', '' ).split( '\n' ) url = False for line in url_paste: line = line.rstrip( '\r\n' ).strip() if not line: continue elif line.lower().startswith( 'http://' ) or line.lower().startswith( 'ftp://' ): url = True datasets.append( create_dataset( line ) ) else: if url: continue # non-url when we've already processed some urls else: # pasted data datasets.append( create_dataset( 'Pasted Entry' ) ) break return [ d.id for d in datasets ]
def get_runtime_state( self ): state = DefaultToolState() state.inputs = dict( input=None ) return state
class WorkflowModule(object): def __init__(self, trans, content_id=None, **kwds): self.trans = trans self.content_id = content_id self.state = DefaultToolState() # ---- Creating modules from various representations --------------------- @classmethod def from_dict(Class, trans, d, **kwds): module = Class(trans, **kwds) module.recover_state(d.get("tool_state")) module.label = d.get("label") return module @classmethod def from_workflow_step(Class, trans, step, **kwds): module = Class(trans, **kwds) module.recover_state(step.tool_inputs) module.label = step.label return module # ---- Saving in various forms ------------------------------------------ def save_to_step(self, step): step.type = self.type step.tool_inputs = self.get_state() # ---- General attributes ----------------------------------------------- def get_type(self): return self.type def get_name(self): return self.name def get_version(self): return None def get_content_id(self): """ If this component has an identifier external to the step (such as a tool or another workflow) return the identifier for that content. """ return None def get_tooltip(self, static_path=''): return None # ---- Configuration time ----------------------------------------------- def get_state(self, nested=True): """ Return a serializable representation of the persistable state of the step. """ inputs = self.get_inputs() if inputs: return self.state.encode(Bunch(inputs=inputs), self.trans.app, nested=nested) else: return self.state.inputs def recover_state(self, state, **kwds): """ Recover state `dict` from simple dictionary describing configuration state (potentially from persisted step state). Sub-classes should supply a `default_state` method which contains the initial state `dict` with key, value pairs for all available attributes. """ self.state = DefaultToolState() inputs = self.get_inputs() if inputs: self.state.decode(state, Bunch(inputs=inputs), self.trans.app) else: self.state.inputs = safe_loads(state) or {} def get_errors(self): """ This returns a step related error message as string or None """ return None def get_inputs(self): """ This returns inputs displayed in the workflow editor """ return {} def get_data_inputs(self): """ Get configure time data input descriptions. """ return [] def get_data_outputs(self): return [] def get_post_job_actions(self, incoming): return [] def check_and_update_state(self): """ If the state is not in sync with the current implementation of the module, try to update. Returns a list of messages to be displayed """ pass def add_dummy_datasets(self, connections=None, steps=None): """ Replace connected inputs with placeholder/dummy values. """ pass def get_config_form(self): """ Serializes input parameters of a module into input dictionaries. """ return { 'title' : self.name, 'inputs': [param.to_dict(self.trans) for param in self.get_inputs().values()] } # ---- Run time --------------------------------------------------------- def get_runtime_state(self): raise TypeError("Abstract method") def get_runtime_inputs(self, **kwds): """ Used internally by modules and when displaying inputs in workflow editor and run workflow templates. """ return {} def compute_runtime_state(self, trans, step_updates=None): """ Determine the runtime state (potentially different from self.state which describes configuration state). This (again unlike self.state) is currently always a `DefaultToolState` object. If `step_updates` is `None`, this is likely for rendering the run form for instance and no runtime properties are available and state must be solely determined by the default runtime state described by the step. If `step_updates` are available they describe the runtime properties supplied by the workflow runner. """ state = self.get_runtime_state() step_errors = {} if step_updates: def update_value(input, context, prefixed_name, **kwargs): if prefixed_name in step_updates: value, error = check_param(trans, input, step_updates.get(prefixed_name), context) if error is not None: step_errors[prefixed_name] = error return value return NO_REPLACEMENT visit_input_values(self.get_runtime_inputs(), state.inputs, update_value, no_replacement_value=NO_REPLACEMENT) return state, step_errors def encode_runtime_state(self, runtime_state): """ Takes the computed runtime state and serializes it during run request creation. """ return runtime_state.encode(Bunch(inputs=self.get_runtime_inputs()), self.trans.app) def decode_runtime_state(self, runtime_state): """ Takes the serialized runtime state and decodes it when running the workflow. """ state = DefaultToolState() state.decode(runtime_state, Bunch(inputs=self.get_runtime_inputs()), self.trans.app) return state def execute(self, trans, progress, invocation, step): """ Execute the given workflow step in the given workflow invocation. Use the supplied workflow progress object to track outputs, find inputs, etc... """ raise TypeError("Abstract method") def do_invocation_step_action(self, step, action): """ Update or set the workflow invocation state action - generic extension point meant to allows users to interact with interactive workflow modules. The action object returned from this method will be attached to the WorkflowInvocationStep and be available the next time the workflow scheduler visits the workflow. """ raise exceptions.RequestParameterInvalidException("Attempting to perform invocation step action on module that does not support actions.") def recover_mapping(self, step, step_invocations, progress): """ Re-populate progress object with information about connections from previously executed steps recorded via step_invocations. """ raise TypeError("Abstract method")
def decode_runtime_state( self, trans, string ): fake_tool = Bunch( inputs = self.get_runtime_inputs() ) state = DefaultToolState() state.decode( string, fake_tool, trans.app ) return state
def __init__(self, trans, content_id=None, **kwds): self.trans = trans self.content_id = content_id self.state = DefaultToolState()
class WorkflowModule( object ): def __init__( self, trans, content_id=None, **kwds ): self.trans = trans self.content_id = content_id self.state = DefaultToolState() # ---- Creating modules from various representations --------------------- @classmethod def from_dict( Class, trans, d, **kwds ): module = Class( trans, **kwds ) module.recover_state( d.get( "tool_state" ) ) module.label = d.get( "label" ) return module @classmethod def from_workflow_step( Class, trans, step, **kwds ): module = Class( trans, **kwds ) module.recover_state( step.tool_inputs ) module.label = step.label return module # ---- Saving in various forms ------------------------------------------ def save_to_step( self, step ): step.type = self.type step.tool_inputs = self.get_state() # ---- General attributes ----------------------------------------------- def get_type( self ): return self.type def get_name( self ): return self.name def get_version( self ): return None def get_content_id( self ): """ If this component has an identifier external to the step (such as a tool or another workflow) return the identifier for that content. """ return None def get_tooltip( self, static_path='' ): return None # ---- Configuration time ----------------------------------------------- def get_state( self, nested=True ): """ Return a serializable representation of the persistable state of the step. """ inputs = self.get_inputs() if inputs: return self.state.encode( Bunch( inputs=inputs ), self.trans.app, nested=nested ) else: return self.state.inputs def recover_state( self, state, **kwds ): """ Recover state `dict` from simple dictionary describing configuration state (potentially from persisted step state). Sub-classes should supply a `default_state` method which contains the initial state `dict` with key, value pairs for all available attributes. """ self.state = DefaultToolState() inputs = self.get_inputs() if inputs: self.state.decode( state, Bunch( inputs=inputs ), self.trans.app ) else: self.state.inputs = safe_loads( state ) or {} def get_errors( self ): """ This returns a step related error message as string or None """ return None def get_inputs( self ): """ This returns inputs displayed in the workflow editor """ return {} def get_data_inputs( self ): """ Get configure time data input descriptions. """ return [] def get_data_outputs( self ): return [] def get_post_job_actions( self, incoming ): return [] def check_and_update_state( self ): """ If the state is not in sync with the current implementation of the module, try to update. Returns a list of messages to be displayed """ pass def add_dummy_datasets( self, connections=None, steps=None ): """ Replace connected inputs with placeholder/dummy values. """ pass def get_config_form( self ): """ Serializes input parameters of a module into input dictionaries. """ return { 'title' : self.name, 'inputs': [ param.to_dict( self.trans ) for param in self.get_inputs().values() ] } # ---- Run time --------------------------------------------------------- def get_runtime_state( self ): raise TypeError( "Abstract method" ) def get_runtime_inputs( self, **kwds ): """ Used internally by modules and when displaying inputs in workflow editor and run workflow templates. """ return {} def compute_runtime_state( self, trans, step_updates=None ): """ Determine the runtime state (potentially different from self.state which describes configuration state). This (again unlike self.state) is currently always a `DefaultToolState` object. If `step_updates` is `None`, this is likely for rendering the run form for instance and no runtime properties are available and state must be solely determined by the default runtime state described by the step. If `step_updates` are available they describe the runtime properties supplied by the workflow runner. """ state = self.get_runtime_state() step_errors = {} if step_updates: def update_value( input, context, prefixed_name, **kwargs ): if prefixed_name in step_updates: value, error = check_param( trans, input, step_updates.get( prefixed_name ), context ) if error is not None: step_errors[ prefixed_name ] = error return value return NO_REPLACEMENT visit_input_values( self.get_runtime_inputs(), state.inputs, update_value, no_replacement_value=NO_REPLACEMENT ) return state, step_errors def encode_runtime_state( self, runtime_state ): """ Takes the computed runtime state and serializes it during run request creation. """ return runtime_state.encode( Bunch( inputs=self.get_runtime_inputs() ), self.trans.app ) def decode_runtime_state( self, runtime_state ): """ Takes the serialized runtime state and decodes it when running the workflow. """ state = DefaultToolState() state.decode( runtime_state, Bunch( inputs=self.get_runtime_inputs() ), self.trans.app ) return state def execute( self, trans, progress, invocation, step ): """ Execute the given workflow step in the given workflow invocation. Use the supplied workflow progress object to track outputs, find inputs, etc... """ raise TypeError( "Abstract method" ) def do_invocation_step_action( self, step, action ): """ Update or set the workflow invocation state action - generic extension point meant to allows users to interact with interactive workflow modules. The action object returned from this method will be attached to the WorkflowInvocationStep and be available the next time the workflow scheduler visits the workflow. """ raise exceptions.RequestParameterInvalidException( "Attempting to perform invocation step action on module that does not support actions." ) def recover_mapping( self, step, step_invocations, progress ): """ Re-populate progress object with information about connections from previously executed steps recorded via step_invocations. """ raise TypeError( "Abstract method" )
def get_runtime_state(self): state = DefaultToolState() state.inputs = self.state.inputs return state
def decode_runtime_state(self, runtime_state): """ Takes the serialized runtime state and decodes it when running the workflow. """ state = DefaultToolState() state.decode(runtime_state, Bunch(inputs=self.get_runtime_inputs()), self.trans.app) return state
def get_runtime_state( self ): state = DefaultToolState() state.inputs = self.state.inputs return state