class Car(Controller): wheels = traits.Str() engine = traits.Str() driver = ControllerTrait(Driver(), desc='the guy who would better take a ' 'bus') problems = ControllerTrait(OpenKeyController(traits.Str()))
def __init__(self, study_config, configuration): super(SomaWorkflowConfig, self).__init__(study_config, configuration) study_config.add_trait( 'use_soma_workflow', Bool(False, output=False, desc='Use soma workflow for the execution')) study_config.add_trait( 'somaworkflow_computing_resource', Str(Undefined, output=False, desc= 'Soma-workflow computing resource to be used to run processing' )) study_config.add_trait( 'somaworkflow_computing_resources_config', ControllerTrait(OpenKeyController(value_trait=ControllerTrait( SomaWorkflowConfig.ResourceController(), output=False, allow_none=False, desc='Computing resource config')), output=False, allow_none=False, desc='Computing resource config'))
def __init__(self, study_config, configuration): super(SomaWorkflowConfig, self).__init__(study_config, configuration) study_config.add_trait( 'use_soma_workflow', Bool(False, output=False, desc='Use soma workflow for the execution', groups=['soma-workflow'])) study_config.add_trait( 'somaworkflow_computing_resource', Str(Undefined, output=False, desc= 'Soma-workflow computing resource to be used to run processing', groups=['soma-workflow'])) study_config.add_trait( 'somaworkflow_config_file', File(Undefined, output=False, optional=True, desc='Soma-Workflow configuration file. ' 'Default: $HOME/.soma_workflow.cfg', groups=['soma-workflow'])) study_config.add_trait( 'somaworkflow_keep_failed_workflows', Bool(True, desc='Keep failed workflows after pipeline execution through ' 'StudyConfig', groups=['soma-workflow'])) study_config.add_trait( 'somaworkflow_keep_succeeded_workflows', Bool(False, desc='Keep succeeded workflows after pipeline execution ' 'through StudyConfig', groups=['soma-workflow'])) study_config.add_trait( 'somaworkflow_computing_resources_config', ControllerTrait(OpenKeyController( value_trait=ControllerTrait(ResourceController(), output=False, allow_none=False, desc='Computing resource config')), output=False, allow_none=False, desc='Computing resource config', groups=['soma-workflow'])) self.study_config.modules_data.somaworkflow = {}
def get_attribute_values(self): ''' Get attributes Controller associated to a process Returns ------- attributes: Controller ''' t = self.trait('capsul_attributes') if t is None: try: pattributes = ProcessCompletionEngine.get_completion_engine( self.process.process).get_attribute_values() except AttributeError: # ProcessCompletionEngine not implemented for this process: # no completion return schemas = self._get_schemas() attributes = ProcessAttributes(self.process, schemas) self.add_trait('capsul_attributes', ControllerTrait(Controller())) self.capsul_attributes = attributes iter_attrib = self.get_iterated_attributes() for attrib, trait in six.iteritems(pattributes.user_traits()): if attrib not in iter_attrib: attributes.add_trait(attrib, trait) for attrib in iter_attrib: trait = pattributes.trait(attrib) if trait is not None: attributes.add_trait( attrib, traits.List(trait, output=trait.output)) value = getattr(pattributes, attrib, None) if value is not None and value is not traits.Undefined: setattr(attributes, attrib, [value]) return self.capsul_attributes
def __init__(self): super(SomaWorkflowConfig.ResourceController, self).__init__() self.add_trait( 'transfer_paths', List([], output=False, desc='list of paths where files have to be transferred ' 'by soma-workflow')) self.add_trait( 'path_translations', ControllerTrait( OpenKeyController(value_trait=List( trait=Str(), value=('', ''), minlen=2, maxlen=2)), output=False, desc='Soma-workflow paths translations mapping: ' '{local_path: (identifier, uuid)}'))
def get_attribute_values(self): ''' Get attributes Controller associated to a process Returns ------- attributes: Controller ''' if self.trait('capsul_attributes') is not None \ and hasattr(self, 'capsul_attributes'): return self.capsul_attributes self.add_trait('capsul_attributes', ControllerTrait(Controller())) schemas = self._get_schemas() #schemas = self.process.get_study_config().modules_data.foms.keys() self.capsul_attributes = ProcessAttributes(self.process, schemas) self.create_attributes_with_fom() return self.capsul_attributes
def __init__(self): super(ResourceController, self).__init__() self.add_trait( 'queue', Str(Undefined, output=False, desc='Jobs queue to be used on the computing resource for ' 'workflow submissions')) self.add_trait( 'transfer_paths', List([], output=False, desc='list of paths where files have to be transferred ' 'by soma-workflow')) self.add_trait( 'path_translations', ControllerTrait(OpenKeyController(value_trait=List( trait=Str(), value=('', ''), minlen=2, maxlen=2)), output=False, desc='Soma-workflow paths translations mapping: ' '{local_path: (identifier, uuid)}'))
def get_attribute_values(self): ''' Get attributes Controller associated to a process Returns ------- attributes: ProcessAttributes instance The default implementation does nothing for a single Process instance, and merges attributes from its children if the process is a pipeline. ''' if not self._rebuild_attributes \ and self.trait('capsul_attributes') is not None \ and hasattr(self, 'capsul_attributes'): return self.capsul_attributes schemas = self._get_schemas() study_config = self.process.get_study_config() proc_attr_cls = ProcessAttributes if 'AttributesConfig' in study_config.modules: factory = study_config.modules_data.attributes_factory names = [self.process.name] if hasattr(self.process, 'context_name'): names.insert(0, self.process.context_name) for name in names: try: proc_attr_cls = factory.get('process_attributes', name) found = True break except ValueError: pass if not hasattr(self, 'capsul_attributes'): self.add_trait('capsul_attributes', ControllerTrait(Controller())) self.capsul_attributes = proc_attr_cls(self.process, schemas) self._rebuild_attributes = False # if no specialized attributes set and process is a pipeline, # try building from children nodes if proc_attr_cls is ProcessAttributes \ and isinstance(self.process, Pipeline): attributes = self.capsul_attributes name = getattr(self.process, 'context_name', self.process.name) for node_name, node in six.iteritems(self.process.nodes): if node_name == '': continue subprocess = None if hasattr(node, 'process'): subprocess = node.process elif isinstance(node, Switch): subprocess = node if subprocess is not None: pname = '.'.join([name, node_name]) subprocess_compl = \ ProcessCompletionEngine.get_completion_engine( subprocess, pname) try: sub_attributes \ = subprocess_compl.get_attribute_values() except: try: subprocess_compl = self.__class__(subprocess) sub_attributes \ = subprocess_compl.get_attribute_values() except: continue for attribute, trait \ in six.iteritems(sub_attributes.user_traits()): if attributes.trait(attribute) is None: attributes.add_trait(attribute, trait) setattr(attributes, attribute, getattr(sub_attributes, attribute)) self._get_linked_attributes() return self.capsul_attributes
def get_attribute_values(self): if self.trait('capsul_attributes') is not None \ and hasattr(self, 'capsul_attributes'): return self.capsul_attributes self.add_trait('capsul_attributes', ControllerTrait(Controller())) capsul_attributes = ProcessAttributes(self.process, {}) self.capsul_attributes = capsul_attributes outputs = self.process._outputs schema = 'switch' # FIXME name = getattr(self.process, 'context_name', self.name) pipeline_name = '.'.join(name.split('.')[:-1]) if pipeline_name == '': pipeline_name = [] else: pipeline_name = [pipeline_name] forbidden_attributes = set( ['generated_by_parameter', 'generated_by_process']) traits_types = { str: traits.Str, unicode: traits.Str, int: traits.Int, float: traits.Float, list: traits.List } for out_name in outputs: in_name = '_switch_'.join((self.process.switch, out_name)) found = False for output, name in ((False, in_name), (True, out_name)): plug = self.process.plugs.get(name) if plug is None: continue if output: links = plug.links_to else: links = plug.links_from for link in links: node = link[2] if isinstance(node, Switch): # FIXME: just for now continue if link[0] == '': # link to the parent pipeline: don't call it to avoid # an infinite loop. # Either it will provide attributes by its own, either # we must not take them into account, so skip it. continue if hasattr(node, 'process'): process = node.process else: process = node proc_name = '.'.join(pipeline_name + [link[0]]) completion_engine \ = ProcessCompletionEngine.get_completion_engine( process, name=proc_name) attributes = completion_engine.get_attribute_values() try: param_attributes \ = attributes.get_parameters_attributes()[link[1]] except: continue if len(param_attributes) != 0 \ and len([x for x in param_attributes.keys() if x not in forbidden_attributes]) != 0: ea = EditableAttributes() for attribute, value in six.iteritems( param_attributes): if attribute not in forbidden_attributes: ttype = traits_types.get(type(value)) if ttype is not None: trait = ttype() else: trait = value ea.add_trait(attribute, ttype) setattr(ea, attribute, value) capsul_attributes.set_parameter_attributes( name, schema, ea, {}) found = True break if found: break if found: # propagate from input/output to other side ea = EditableAttributes() for attribute, value in six.iteritems(param_attributes): ttype = traits_types.get(type(value)) if ttype is not None: trait = ttype() else: trait = value ea.add_trait(attribute, ttype) setattr(ea, attribute, value) if output: capsul_attributes.set_parameter_attributes( in_name, schema, ea, {}) else: capsul_attributes.set_parameter_attributes( out_name, schema, ea, {}) self.install_switch_observer() return capsul_attributes