def test_process_adhoc_completion(self):
     study_config = self.study_config
     threshold = get_process_instance(
         'bv_capsul_ex.ex_processes.ThresholdProcess', study_config)
     athreshold = ProcessCompletionEngine.get_completion_engine(
         threshold, 'threshold')
     self.assertTrue(athreshold is not None)
     attrib = {
         'center': 'alpha_centauri',
         'subject': 'r2d2',
         'analysis': 'M0',
     }
     pinputs = {
         'capsul_attributes': attrib,
         'threshold': 0.43,
     }
     athreshold.complete_parameters(process_inputs=pinputs)
     self.assertEqual(athreshold.__class__.__name__,
                      'ThresholdProcessAdhocCompletion')
     self.assertEqual(threshold.array_file,
                      os.path.join(study_config.input_directory,
                                   'alpha_centauri_r2d2.npy'))
     self.assertEqual(threshold.threshold, 0.43)
     self.assertEqual(
         threshold.mask_inf,
         os.path.join(study_config.output_directory,
                      'alpha_centauri_r2d2_M0_thresholded_inf.npy'))
     self.assertEqual(
         threshold.mask_sup,
         os.path.join(study_config.output_directory,
                      'alpha_centauri_r2d2_M0_thresholded_sup.npy'))
     mask =  get_process_instance('bv_capsul_ex.ex_processes.Mask',
                                  study_config)
     amask = ProcessCompletionEngine.get_completion_engine(mask, 'mask')
     self.assertTrue(amask is not None)
     attrib = {
         'center': 'alpha_centauri',
         'subject': 'r2d2',
         'analysis': 'M0',
     }
     pinputs = {
         'capsul_attributes': attrib,
         'input': os.path.join(study_config.output_directory,
                               'input_data_thresholded_inf.npy'),
     }
     amask.complete_parameters(process_inputs=pinputs)
     self.assertEqual(mask.input, pinputs['input'])
     self.assertEqual(mask.mask,
                      os.path.join(study_config.shared_directory,
                                   'template_masks/mask.npy'))
     self.assertEqual(mask.output,
                      os.path.join(study_config.output_directory,
                                   'input_data_thresholded_inf_masked.npy'))
Пример #2
0
def get_process_with_params(process_name,
                            study_config,
                            iterated_params=[],
                            attributes={},
                            *args,
                            **kwargs):
    ''' Instantiate a process, or an iteration over processes, and fill in its
    parameters.

    Parameters
    ----------
    process_name: string
        name (mosule and class) of the process to instantiate
    study_config: StudyConfig instance
    iterated_params: list (optional)
        parameters names which should be iterated on. If this list is not
        empty, an iteration process is built. All parameters values
        corresponding to the selected names should be lists with the same size.
    attributes: dict (optional)
        dictionary of attributes for completion system.
    *args:
        sequential parameters for the process. In iteration, "normal"
        parameters are set with the same value for all iterations, and iterated
        parameters dispatch their values to each iteration.
    **kwargs:
        named parameters for the process. Same as above for iterations.

    Returns
    -------
    process: Process instance
    '''
    process = study_config.get_process_instance(process_name)
    signature = process.user_traits()
    params = list(signature.keys())

    # check for iterations
    if iterated_params:

        pipeline = study_config.get_process_instance(Pipeline)
        pipeline.add_iterative_process('iteration', process, iterated_params)
        pipeline.autoexport_nodes_parameters(include_optional=True)
        process = pipeline

        # transform iterated attributes into lists if needed
        for param, value in attributes.items():
            if not isinstance(value, list) and not isinstance(value, tuple):
                attributes[param] = list([value])

    else:
        # not iterated
        for i, arg in enumerate(args):
            set_process_param_from_str(process, params[i], arg)
        for k, arg in six.iteritems(kwargs):
            set_process_param_from_str(process, k, arg)

    completion_engine = ProcessCompletionEngine.get_completion_engine(process)
    completion_engine.get_attribute_values().import_from_dict(attributes)
    completion_engine.complete_parameters()

    return process
Пример #3
0
 def test_list_completion(self):
     study_config = self.study_config
     process = study_config.get_process_instance(
         'capsul.attributes.test.test_attributed_process.DummyListProcess')
     from capsul.attributes.test.test_attributed_process \
         import DummyListProcessAttributes, MyPathCompletion
     patt = ProcessCompletionEngine.get_completion_engine(process)
     atts = patt.get_attribute_values()
     self.assertTrue(isinstance(patt, ProcessCompletionEngine))
     self.assertTrue(isinstance(atts, DummyListProcessAttributes))
     self.assertTrue(isinstance(
         patt.get_path_completion_engine(),
         MyPathCompletion))
     atts.center = ['jojo', 'koko']
     atts.subject = ['barbapapa', 'barbatruc']
     atts.group = 'cartoon'
     patt.complete_parameters()
     self.assertEqual([os.path.normpath(p) for p in process.truc],
                      [os.path.normpath(p) for p in
                         ['/tmp/in/DummyListProcess_truc_jojo_barbapapa',
                          '/tmp/in/DummyListProcess_truc_koko_barbatruc',]])
     self.assertEqual([os.path.normpath(p) for p in process.bidule],
                      [os.path.normpath(p) for p in
                         ['/tmp/in/DummyListProcess_bidule_jojo_barbapapa',
                          '/tmp/in/DummyListProcess_bidule_koko_barbatruc']]
     )
     self.assertEqual(os.path.normpath(process.result),
                      os.path.normpath(
                         '/tmp/out/DummyListProcess_result_cartoon'))
    def test_group_pipeline_adhoc_completion(self):
        self.setup_pipeline()
        study_config = self.study_config
        agpipeline = ProcessCompletionEngine.get_completion_engine(
            self.pipeline2, 'group_average_pipeline')
        self.assertTrue(agpipeline is not None)
        attrib = {
            'center': self.groups,
            'subject': self.subjects,
            'mask_type': 'amyelencephalic',
            'analysis': ['M0'],
        }
        pinputs = {
            'capsul_attributes': attrib,
            'threshold': 0.55,
            'input_files': self.input_files
        }
        self.assertEqual(
            sorted(agpipeline.get_attribute_values().user_traits().keys()),
            ['analysis', 'center', 'mask_type', 'subject'])
        agpipeline.complete_parameters(process_inputs=pinputs)
        for ifname in self.pipeline2.input_files:
            self.assertTrue(os.path.exists(ifname))
        self.assertEqual(len(self.pipeline2.averages_sup), len(self.subjects))

        # run sequentially
        study_config.use_soma_workflow = False
        res = study_config.run(self.pipeline2)
        self.assertEqual(res, None)
        for ofname in self.pipeline2.averages_sup:
            self.assertTrue(os.path.exists(ofname))
        for ofname in self.pipeline2.averages_inf:
            self.assertTrue(os.path.exists(ofname))
        self.assertTrue(os.path.exists(self.pipeline2.group_average_sup))
        self.assertTrue(os.path.exists(self.pipeline2.group_average_inf))
 def complete_iteration_step(self, step):
     ''' Complete the parameters on the iterated process for a given
     iteration step.
     '''
     try:
         attributes_set = self.get_attribute_values()
         completion_engine = ProcessCompletionEngine.get_completion_engine(
             self.process.process, self.name)
         step_attributes = completion_engine.get_attribute_values()
     except AttributeError:
         # ProcessCompletionEngine not implemented for this process:
         # no completion
         return
     iterated_attributes = self.get_iterated_attributes()
     self.capsul_iteration_step = step
     for attribute in iterated_attributes:
         iterated_values = getattr(attributes_set, attribute)
         step = min(len(iterated_values) - 1, self.capsul_iteration_step)
         value = iterated_values[step]
         setattr(step_attributes, attribute, value)
     for attribute in attributes_set.user_traits():
         if attribute not in iterated_attributes:
             setattr(step_attributes, attribute,
                     getattr(attributes_set, attribute))
     parameters = {}
     for parameter in self.process.regular_parameters:
         parameters[parameter] = getattr(self.process, parameter)
     for parameter in self.process.iterative_parameters:
         values = getattr(self.process, parameter)
         if len(values) > self.capsul_iteration_step:
             parameters[parameter] = values[self.capsul_iteration_step]
     completion_engine.complete_parameters(parameters)
    def get_induced_iterative_parameters(self):
        '''Iterating over some parameters, and triggering completion through
        attributes, imply that some other parameters will also vary with the
        iteration.
        Ex: process A has 2 parameters, "input" and "output", which are linked
        by the completion system. If we iterate on A.input, then A.output will
        also change with the iteration: parameter "output" should thus be
        included in iterative parameters: it is induced by the iteration over
        "input".

        This method gives the induced iterative parameters.
        '''
        # 1st get iterated attributes
        attributes = self.get_iterated_attributes()
        # now look on which parameters they are acting
        pattributes = ProcessCompletionEngine.get_completion_engine(
            self.process.process).get_attribute_values()
        param_attributes = pattributes.get_parameters_attributes()
        induced_params = []
        for parameter in self.process.process.user_traits():
            if parameter not in self.process.iterative_parameters:
                par_attributes = param_attributes.get(parameter)
                if par_attributes:
                    par_attributes = set(par_attributes.keys())
                    if [attribute for attribute in attributes
                        if attribute in par_attributes]:
                        induced_params.append(parameter)

        return induced_params
Пример #7
0
 def test_iteration(self):
     study_config = self.study_config
     pipeline = Pipeline()
     pipeline.set_study_config(study_config)
     pipeline.add_iterative_process(
         'dummy',
         'capsul.attributes.test.test_attributed_process.DummyProcess',
         ['truc', 'bidule'])
     pipeline.autoexport_nodes_parameters()
     cm = ProcessCompletionEngine.get_completion_engine(pipeline)
     atts = cm.get_attribute_values()
     atts.center = ['muppets']
     atts.subject = ['kermit', 'piggy', 'stalter', 'waldorf']
     cm.complete_parameters()
     self.assertEqual([os.path.normpath(p) for p in pipeline.truc], [
         os.path.normpath(p) for p in [
             '/tmp/in/DummyProcess_truc_muppets_kermit',
             '/tmp/in/DummyProcess_truc_muppets_piggy',
             '/tmp/in/DummyProcess_truc_muppets_stalter',
             '/tmp/in/DummyProcess_truc_muppets_waldorf'
         ]
     ])
     self.assertEqual([os.path.normpath(p) for p in pipeline.bidule], [
         os.path.normpath(p) for p in [
             '/tmp/out/DummyProcess_bidule_muppets_kermit',
             '/tmp/out/DummyProcess_bidule_muppets_piggy',
             '/tmp/out/DummyProcess_bidule_muppets_stalter',
             '/tmp/out/DummyProcess_bidule_muppets_waldorf'
         ]
     ])
    def get_attribute_values(self):
        ''' Get attributes Controller associated to a process

        Returns
        -------
        attributes: Controller
        '''
        t = self.trait('capsul_attributes')
        if t is None:
            try:
                pattributes = ProcessCompletionEngine.get_completion_engine(
                    self.process.process).get_attribute_values()
            except AttributeError:
                # ProcessCompletionEngine not implemented for this process:
                # no completion
                return

            schemas = self._get_schemas()
            attributes = ProcessAttributes(self.process, schemas)

            self.add_trait('capsul_attributes', ControllerTrait(Controller()))
            self.capsul_attributes = attributes
            iter_attrib = self.get_iterated_attributes()
            for attrib, trait in six.iteritems(pattributes.user_traits()):
                if attrib not in iter_attrib:
                    attributes.add_trait(attrib, trait)
            for attrib in iter_attrib:
                trait = pattributes.trait(attrib)
                if trait is not None:
                    attributes.add_trait(
                        attrib, traits.List(trait, output=trait.output))
                value = getattr(pattributes, attrib, None)
                if value is not None and value is not traits.Undefined:
                    setattr(attributes, attrib, [value])
        return self.capsul_attributes
Пример #9
0
    def complete_iteration(self, iteration):
        # don't import this at module level to avoid cyclic imports
        from capsul.attributes.completion_engine import ProcessCompletionEngine

        completion_engine = ProcessCompletionEngine.get_completion_engine(
            self)
        # check if it is an iterative completion engine
        if hasattr(completion_engine, 'complete_iteration_step'):
            completion_engine.complete_iteration_step(iteration)
Пример #10
0
 def __init__(self, study):
     super(SharedPipelineAnalysis, self).__init__(study)
     if study.template_pipeline is None:
         study.template_pipeline = self.build_pipeline()
     completion_model = ProcessCompletionEngine.get_completion_engine(
         study.template_pipeline)
     # share the same instance of the pipeline to save memory and, most of
     # all, instantiation time
     self.pipeline = study.template_pipeline
Пример #11
0
    def __init__(self,
                 process,
                 iterative_parameters,
                 study_config=None,
                 context_name=None):
        super(ProcessIteration, self).__init__()

        if self.study_config is None and hasattr(Process, '_study_config'):
            study_config = Process._study_config
        if study_config is not None:
            self.study_config = study_config

        self.process = get_process_instance(process, study_config=study_config)

        if context_name is not None:
            self.process.context_name = context_name
        self.regular_parameters = set()
        self.iterative_parameters = set(iterative_parameters)

        # Check that all iterative parameters are valid process parameters
        user_traits = self.process.user_traits()
        for parameter in self.iterative_parameters:
            if parameter not in user_traits:
                raise ValueError('Cannot iterate on parameter %s '
                                 'that is not a parameter of process %s' %
                                 (parameter, self.process.id))

        # use the completion system (if any) to get induced (additional)
        # iterated parameters
        if study_config is not None:
            completion_engine \
                = ProcessCompletionEngine.get_completion_engine(self)
            if hasattr(completion_engine, 'get_induced_iterative_parameters'):
                induced_iterative_parameters \
                    = completion_engine.get_induced_iterative_parameters()
                self.iterative_parameters.update(induced_iterative_parameters)
                iterative_parameters = self.iterative_parameters

        # Create iterative process parameters by copying process parameter
        # and changing iterative parameters to list
        for name, trait in six.iteritems(user_traits):
            if name in iterative_parameters:
                self.add_trait(
                    name,
                    List(trait, output=trait.output, optional=trait.optional))
                if trait.groups:
                    self.trait(name).groups = trait.groups
            else:
                self.regular_parameters.add(name)
                self.add_trait(name, trait)
                # copy initial value of the underlying process to self
                # Note: should be this be done via a links system ?
                setattr(self, name, getattr(self.process, name))
    def complete_parameters(self, process_inputs={}):
        self.completion_progress = 0.
        try:
            self.set_parameters(process_inputs)
            attributes_set = self.get_attribute_values()
            completion_engine = ProcessCompletionEngine.get_completion_engine(
                self.process.process, self.name)
            step_attributes = completion_engine.get_attribute_values()
        except AttributeError:
            # ProcessCompletionEngine not implemented for this process:
            # no completion
            return
        iterated_attributes = self.get_iterated_attributes()
        for attribute in attributes_set.user_traits():
            if attribute not in iterated_attributes:
                setattr(step_attributes, attribute,
                        getattr(attributes_set, attribute))
        parameters = {}
        for parameter in self.process.regular_parameters:
            parameters[parameter] = getattr(self.process, parameter)

        size = max([
            len(getattr(attributes_set, attribute))
            for attribute in iterated_attributes
        ])

        # complete each step to get iterated parameters.
        # This is generally "too much" but it's difficult to perform a partial
        # completion only on iterated parameters

        iterative_parameters = dict([
            (key, []) for key in self.process.iterative_parameters
        ])

        self.completion_progress_total = size
        for it_step in xrange(size):
            self.capsul_iteration_step = it_step
            for attribute in iterated_attributes:
                iterated_values = getattr(attributes_set, attribute)
                step = min(len(iterated_values) - 1, it_step)
                value = iterated_values[step]
                setattr(step_attributes, attribute, value)
            for parameter in self.process.iterative_parameters:
                values = getattr(self.process, parameter)
                if isinstance(values, list) and len(values) > it_step:
                    parameters[parameter] = values[it_step]
            completion_engine.complete_parameters(parameters)
            for parameter in self.process.iterative_parameters:
                value = getattr(self.process.process, parameter)
                iterative_parameters[parameter].append(value)
            self.completion_progress = it_step + 1
        for parameter, values in six.iteritems(iterative_parameters):
            setattr(self.process, parameter, values)
Пример #13
0
    def get_completion_engine(self, process, name=None):
        '''
        Factory for ProcessCompletionEngine: get an ProcessCompletionEngine
        instance for a process in the context of a given StudyConfig.

        The study_config should specify which completion system(s) is (are)
        used (FOM, ...)
        If nothing is configured, a ProcessCompletionEngine base instance will
        be returned. It will not be able to perform completion at all, but will
        conform to the API.
        '''
        if hasattr(process, 'completion_engine'):
            return process.completion_engine

        study_config = process.get_study_config()

        # FOM
        if 'FomConfig' in study_config.modules and study_config.use_fom:
            try:
                pfom = FomProcessCompletionEngine(process, name)
                if pfom is not None:
                    pfom.create_attributes_with_fom()
                    return pfom
            except KeyError:
                # process not in FOM
                pass

        # iteration
        if isinstance(process, ProcessIteration):
            if isinstance(
                    ProcessCompletionEngine.get_completion_engine(
                        process.process),
                    FomProcessCompletionEngine):
                return FomProcessCompletionEngineIteration(process, name)
            else:
                return ProcessCompletionEngineIteration(process, name)

        # standard ProcessCompletionEngine
        return ProcessCompletionEngine(process, name)
Пример #14
0
    def complete_parameters(self, process_inputs={}):
        self.completion_progress = 0.
        try:
            self.set_parameters(process_inputs)
            attributes_set = self.get_attribute_values()
            completion_engine = ProcessCompletionEngine.get_completion_engine(
                self.process.process, self.name)
            step_attributes = completion_engine.get_attribute_values()
        except AttributeError:
            # ProcessCompletionEngine not implemented for this process:
            # no completion
            return
        iterated_attributes = self.get_iterated_attributes()
        for attribute in attributes_set.user_traits():
            if attribute not in iterated_attributes:
                setattr(step_attributes, attribute,
                        getattr(attributes_set, attribute))
        parameters = {}
        for parameter in self.process.regular_parameters:
            parameters[parameter] = getattr(self.process, parameter)

        size = max([len(getattr(attributes_set, attribute))
                    for attribute in iterated_attributes])

        # complete each step to get iterated parameters.
        # This is generally "too much" but it's difficult to perform a partial
        # completion only on iterated parameters

        iterative_parameters = dict(
            [(key, []) for key in self.process.iterative_parameters])

        self.completion_progress_total = size
        for it_step in xrange(size):
            self.capsul_iteration_step = it_step
            for attribute in iterated_attributes:
                iterated_values = getattr(attributes_set, attribute)
                step = min(len(iterated_values) - 1, it_step)
                value = iterated_values[step]
                setattr(step_attributes, attribute, value)
            for parameter in self.process.iterative_parameters:
                values = getattr(self.process, parameter)
                if isinstance(values, list) and len(values) > it_step:
                    parameters[parameter] = values[it_step]
            completion_engine.complete_parameters(parameters)
            for parameter in self.process.iterative_parameters:
                value = getattr(self.process.process, parameter)
                iterative_parameters[parameter].append(value)
            self.completion_progress = it_step + 1
        for parameter, values in six.iteritems(iterative_parameters):
            setattr(self.process, parameter, values)
Пример #15
0
 def setup_fom(process):
     completion_engine \
         = ProcessCompletionEngine.get_completion_engine(process)
     if not isinstance(completion_engine, FomProcessCompletionEngine):
         return
     if not hasattr(completion_engine, 'input_fom') \
             or completion_engine.input_fom is None:
         completion_engine.create_attributes_with_fom()
     if process.study_config.input_fom != completion_engine.input_fom:
         process.study_config.input_fom = completion_engine.input_fom
     if process.study_config.output_fom != completion_engine.output_fom:
         process.study_config.output_fom = completion_engine.output_fom
     if process.study_config.shared_fom != completion_engine.shared_fom:
         process.study_config.shared_fom = completion_engine.shared_fom
Пример #16
0
    def __init__(self, process, iterative_parameters, study_config=None,
                 context_name=None):
        super(ProcessIteration, self).__init__()

        if self.study_config is None and hasattr(Process, '_study_config'):
            study_config = Process._study_config
        if study_config is not None:
            self.study_config = study_config

        self.process = get_process_instance(process,
                                            study_config=study_config)

        if context_name is not None:
            self.process.context_name = context_name
        self.regular_parameters = set()
        self.iterative_parameters = set(iterative_parameters)

        # Check that all iterative parameters are valid process parameters
        user_traits = self.process.user_traits()
        for parameter in self.iterative_parameters:
            if parameter not in user_traits:
                raise ValueError('Cannot iterate on parameter %s '
                  'that is not a parameter of process %s'
                  % (parameter, self.process.id))

        # use the completion system (if any) to get induced (additional)
        # iterated parameters
        if study_config is not None:
            completion_engine \
                = ProcessCompletionEngine.get_completion_engine(self)
            if hasattr(completion_engine, 'get_induced_iterative_parameters'):
                induced_iterative_parameters \
                    = completion_engine.get_induced_iterative_parameters()
                self.iterative_parameters.update(induced_iterative_parameters)
                iterative_parameters = self.iterative_parameters

        # Create iterative process parameters by copying process parameter
        # and changing iterative parameters to list
        for name, trait in six.iteritems(user_traits):
            if name in iterative_parameters:
                self.add_trait(name, List(trait, output=trait.output,
                                          optional=trait.optional))
                if trait.groups:
                    self.trait(name).groups = trait.groups
            else:
                self.regular_parameters.add(name)
                self.add_trait(name, trait)
                # copy initial value of the underlying process to self
                # Note: should be this be done via a links system ?
                setattr(self, name, getattr(self.process, name))
Пример #17
0
 def get_iterated_attributes(self):
     '''
     '''
     try:
         pattributes = ProcessCompletionEngine.get_completion_engine(
             self.process.process).get_attribute_values()
     except AttributeError:
         # ProcessCompletionEngine not implemented for this process:
         # no completion
         return []
     param_attributes = pattributes.get_parameters_attributes()
     attribs = set()
     for parameter in self.process.iterative_parameters:
         attribs.update(param_attributes.get(parameter, {}).keys())
     return [param for param in pattributes.user_traits().keys()
             if param in attribs]
Пример #18
0
    def test_run_iteraton_swf(self):
        study_config = self.study_config
        tmp_dir = tempfile.mkdtemp(prefix='capsul_')
        self.temps.append(tmp_dir)

        study_config.input_directory = os.path.join(tmp_dir, 'in')
        study_config.output_directory = os.path.join(tmp_dir, 'out')
        os.mkdir(study_config.input_directory)
        os.mkdir(study_config.output_directory)

        pipeline = study_config.get_iteration_pipeline(
            'iter',
            'dummy',
            'capsul.attributes.test.test_attributed_process.DummyProcess',
            ['truc', 'bidule'])
        cm = ProcessCompletionEngine.get_completion_engine(pipeline)
        atts = cm.get_attribute_values()
        atts.center = ['muppets']
        atts.subject = ['kermit', 'piggy', 'stalter', 'waldorf']
        cm.complete_parameters()

        # create input files
        for s in atts.subject:
            with open(os.path.join(
                    study_config.input_directory,
                    'DummyProcess_truc_muppets_%s' % s), 'w') as f:
                f.write('%s\n' %s)

        #from capsul.pipeline import pipeline_workflow
        #wf = pipeline_workflow.workflow_from_pipeline(pipeline)
        #from soma_workflow import client as swc
        #swc.Helper.serialize('/tmp/workflow.workflow', wf)

        # run
        study_config.use_soma_workflow = True
        study_config.run(pipeline)

        # check outputs
        out_files = [
            os.path.join(
                study_config.output_directory,
                'DummyProcess_bidule_muppets_%s' % s) for s in atts.subject]
        for s, out_file in zip(atts.subject, out_files):
            self.assertTrue(os.path.isfile(out_file))
            with open(out_file) as f:
                self.assertTrue(f.read() == '%s\n' % s)
Пример #19
0
    def complete_iteration_step(self, step):
        ''' Complete the parameters on the iterated process for a given
        iteration step.
        '''
        process = self.process
        if isinstance(process, ProcessNode):
            process = process.process

        # propagate forbid_completion
        for param, trait in six.iteritems(process.user_traits()):
            if trait.forbid_completion:
                if hasattr(process.process, 'propagate_metadata'):
                    process.process.propagate_metadata(
                        '', param, {'forbid_completion': True})
                else:
                    process.process.trait(param).forbid_completion = True

        try:
            attributes_set = self.get_attribute_values()
            completion_engine = ProcessCompletionEngine.get_completion_engine(
                process.process, self.name)
            step_attributes = completion_engine.get_attribute_values()
        except AttributeError:
            # ProcessCompletionEngine not implemented for this process:
            # no completion
            return
        iterated_attributes = self.get_iterated_attributes()
        self.capsul_iteration_step = step
        for attribute in iterated_attributes:
            iterated_values = getattr(attributes_set, attribute)
            step = min(len(iterated_values) - 1, self.capsul_iteration_step)
            value = iterated_values[step]
            setattr(step_attributes, attribute, value)
        for attribute in attributes_set.user_traits():
            if attribute not in iterated_attributes:
                setattr(step_attributes, attribute,
                        getattr(attributes_set, attribute))
        parameters = {}
        for parameter in process.regular_parameters:
            parameters[parameter] = getattr(process, parameter)
        for parameter in process.iterative_parameters:
            values = getattr(process, parameter)
            if len(values) > self.capsul_iteration_step:
                parameters[parameter] = values[self.capsul_iteration_step]
        completion_engine.complete_parameters(parameters)
Пример #20
0
    def test_run_iteraton_swf(self):
        study_config = self.study_config
        tmp_dir = tempfile.mkdtemp(prefix='capsul_')
        self.temps.append(tmp_dir)

        study_config.input_directory = os.path.join(tmp_dir, 'in')
        study_config.output_directory = os.path.join(tmp_dir, 'out')
        os.mkdir(study_config.input_directory)
        os.mkdir(study_config.output_directory)

        pipeline = Pipeline()
        pipeline.set_study_config(study_config)
        pipeline.add_iterative_process(
            'dummy',
            'capsul.attributes.test.test_attributed_process.DummyProcess',
            ['truc', 'bidule'])
        pipeline.autoexport_nodes_parameters()
        cm = ProcessCompletionEngine.get_completion_engine(pipeline)
        atts = cm.get_attribute_values()
        atts.center = ['muppets']
        atts.subject = ['kermit', 'piggy', 'stalter', 'waldorf']
        cm.complete_parameters()

        # create input files
        for s in atts.subject:
            open(
                os.path.join(study_config.input_directory,
                             'DummyProcess_truc_muppets_%s' % s),
                'w').write('%s\n' % s)

        # run
        study_config.use_soma_workflow = True
        study_config.run(pipeline)

        # check outputs
        out_files = [
            os.path.join(study_config.output_directory,
                         'DummyProcess_bidule_muppets_%s' % s)
            for s in atts.subject
        ]
        for s, out_file in zip(atts.subject, out_files):
            self.assertTrue(os.path.isfile(out_file))
            self.assertTrue(open(out_file).read() == '%s\n' % s)
Пример #21
0
 def test_completion(self):
     study_config = self.study_config
     process = study_config.get_process_instance(
         'capsul.attributes.test.test_attributed_process.DummyProcess')
     from capsul.attributes.test.test_attributed_process \
         import DummyProcessAttributes, MyPathCompletion
     patt = ProcessCompletionEngine.get_completion_engine(process)
     atts = patt.get_attribute_values()
     self.assertTrue(isinstance(patt, ProcessCompletionEngine))
     self.assertTrue(isinstance(atts, DummyProcessAttributes))
     self.assertTrue(isinstance(
         patt.get_path_completion_engine(),
         MyPathCompletion))
     atts.center = 'jojo'
     atts.subject = 'barbapapa'
     patt.complete_parameters()
     self.assertEqual(os.path.normpath(process.truc),
                      os.path.normpath('/tmp/in/DummyProcess_truc_jojo_barbapapa'))
     self.assertEqual(os.path.normpath(process.bidule),
                      os.path.normpath('/tmp/out/DummyProcess_bidule_jojo_barbapapa'))
Пример #22
0
 def test_completion(self):
     study_config = self.study_config
     process = study_config.get_process_instance(
         'capsul.attributes.test.test_attributed_process.DummyProcess')
     from capsul.attributes.test.test_attributed_process \
         import DummyProcessAttributes, MyPathCompletion
     patt = ProcessCompletionEngine.get_completion_engine(process)
     atts = patt.get_attribute_values()
     self.assertTrue(isinstance(patt, ProcessCompletionEngine))
     self.assertTrue(isinstance(atts, DummyProcessAttributes))
     self.assertTrue(isinstance(
         patt.get_path_completion_engine(),
         MyPathCompletion))
     atts.center = 'jojo'
     atts.subject = 'barbapapa'
     patt.complete_parameters()
     self.assertEqual(process.truc,
                      '/tmp/in/DummyProcess_truc_jojo_barbapapa')
     self.assertEqual(process.bidule,
                      '/tmp/out/DummyProcess_bidule_jojo_barbapapa')
Пример #23
0
    def test_run_iteraton_swf(self):
        study_config = self.study_config
        tmp_dir = tempfile.mkdtemp(prefix='capsul_')
        self.temps.append(tmp_dir)

        study_config.input_directory = os.path.join(tmp_dir, 'in')
        study_config.output_directory = os.path.join(tmp_dir, 'out')
        os.mkdir(study_config.input_directory)
        os.mkdir(study_config.output_directory)

        pipeline = Pipeline()
        pipeline.set_study_config(study_config)
        pipeline.add_iterative_process(
            'dummy',
            'capsul.attributes.test.test_attributed_process.DummyProcess',
            ['truc', 'bidule'])
        pipeline.autoexport_nodes_parameters()
        cm = ProcessCompletionEngine.get_completion_engine(pipeline)
        atts = cm.get_attribute_values()
        atts.center = ['muppets']
        atts.subject = ['kermit', 'piggy', 'stalter', 'waldorf']
        cm.complete_parameters()

        # create input files
        for s in atts.subject:
            open(os.path.join(
                study_config.input_directory,
                'DummyProcess_truc_muppets_%s' % s), 'w').write('%s\n' %s)

        # run
        study_config.use_soma_workflow = True
        study_config.run(pipeline)

        # check outputs
        out_files = [
            os.path.join(
                study_config.output_directory,
                'DummyProcess_bidule_muppets_%s' % s) for s in atts.subject]
        for s, out_file in zip(atts.subject, out_files):
            self.assertTrue(os.path.isfile(out_file))
            self.assertTrue(open(out_file).read() == '%s\n' % s)
Пример #24
0
    def get_completion_engine(self, process, name=None):
        '''
        Factory for ProcessCompletionEngine: get an ProcessCompletionEngine
        instance for a process in the context of a given StudyConfig.

        The study_config should specify which completion system(s) is (are)
        used (FOM, ...)
        If nothing is configured, a ProcessCompletionEngine base instance will
        be returned. It will not be able to perform completion at all, but will
        conform to the API.
        '''
        if hasattr(process, 'completion_engine'):
            return process.completion_engine

        study_config = process.get_study_config()

        # FOM
        if 'FomConfig' in study_config.modules and study_config.use_fom:
            try:
                pfom = FomProcessCompletionEngine(process, name)
                if pfom is not None:
                    pfom.create_attributes_with_fom()
                    return pfom
            except KeyError:
                # process not in FOM
                pass

        # iteration
        if isinstance(process, ProcessIteration):
            if isinstance(
                    ProcessCompletionEngine.get_completion_engine(
                        process.process),
                    FomProcessCompletionEngine):
                return FomProcessCompletionEngineIteration(process, name)
            else:
                return ProcessCompletionEngineIteration(process, name)

        # standard ProcessCompletionEngine
        return ProcessCompletionEngine(process, name)
Пример #25
0
 def get_iterated_attributes(self):
     '''
     '''
     process = self.process
     if isinstance(process, ProcessNode):
         process = process.process
     try:
         pattributes = ProcessCompletionEngine.get_completion_engine(
             process.process).get_attribute_values()
     except AttributeError:
         # ProcessCompletionEngine not implemented for this process:
         # no completion
         return []
     param_attributes = pattributes.get_parameters_attributes()
     attribs = set()
     for parameter in process.iterative_parameters:
         attribs.update(list(param_attributes.get(parameter, {}).keys()))
     # if no iterative parameter has been declared, use all attributes
     if not process.iterative_parameters:
         return set(pattributes.user_traits().keys())
     return [
         param for param in pattributes.user_traits().keys()
         if param in attribs
     ]
Пример #26
0
 def test_iteration(self):
     study_config = self.study_config
     pipeline = Pipeline()
     pipeline.set_study_config(study_config)
     pipeline.add_iterative_process(
         'dummy',
         'capsul.attributes.test.test_attributed_process.DummyProcess',
         ['truc', 'bidule'])
     pipeline.autoexport_nodes_parameters()
     cm = ProcessCompletionEngine.get_completion_engine(pipeline)
     atts = cm.get_attribute_values()
     atts.center = ['muppets']
     atts.subject = ['kermit', 'piggy', 'stalter', 'waldorf']
     cm.complete_parameters()
     self.assertEqual(pipeline.truc,
                      ['/tmp/in/DummyProcess_truc_muppets_kermit',
                       '/tmp/in/DummyProcess_truc_muppets_piggy',
                       '/tmp/in/DummyProcess_truc_muppets_stalter',
                       '/tmp/in/DummyProcess_truc_muppets_waldorf'])
     self.assertEqual(pipeline.bidule,
                      ['/tmp/out/DummyProcess_bidule_muppets_kermit',
                       '/tmp/out/DummyProcess_bidule_muppets_piggy',
                       '/tmp/out/DummyProcess_bidule_muppets_stalter',
                       '/tmp/out/DummyProcess_bidule_muppets_waldorf'])
Пример #27
0
    def __init__(self, process, iterative_parameters, study_config=None,
                 context_name=None):
        super(ProcessIteration, self).__init__()

        if self.study_config is None and hasattr(Process, '_study_config'):
            study_config = study_cmod.default_study_config()
        if study_config is not None:
            self.study_config = study_config

        self.process = get_process_instance(process,
                                            study_config=study_config)

        if context_name is not None:
            self.process.context_name = context_name
        self.regular_parameters = set()
        self.iterative_parameters = set(iterative_parameters)

        # use the completion system (if any) to get induced (additional)
        # iterated parameters
        if study_config is not None:
            # don't import this at module level to avoid cyclic imports
            from capsul.attributes.completion_engine \
                import ProcessCompletionEngine

            completion_engine \
                = ProcessCompletionEngine.get_completion_engine(self)
            if hasattr(completion_engine, 'get_induced_iterative_parameters'):
                induced_iterative_parameters \
                    = completion_engine.get_induced_iterative_parameters()
                self.iterative_parameters.update(induced_iterative_parameters)
                iterative_parameters = self.iterative_parameters

        # Check that all iterative parameters are valid process parameters
        user_traits = self.process.user_traits()
        has_output = False
        inputs = []
        for parameter in self.iterative_parameters:
            if parameter not in user_traits:
                raise ValueError('Cannot iterate on parameter %s '
                  'that is not a parameter of process %s'
                  % (parameter, self.process.id))
            if user_traits[parameter].output:
                has_output = True
            else:
                inputs.append(parameter)

        # Create iterative process parameters by copying process parameter
        # and changing iterative parameters to list
        for name, trait in six.iteritems(user_traits):
            if name in iterative_parameters:
                kw = {}
                if trait.input_filename is False:
                    kw['input_filename'] = False
                self.add_trait(name, List(trait, output=trait.output,
                                          optional=trait.optional, **kw))
                if trait.groups:
                    self.trait(name).groups = trait.groups
                if trait.forbid_completion is not None:
                    # we don't have access to the pipeline or even the
                    # node in self, we cannot propagate the forbid_completion
                    # value outside of self.
                    # However this will be done in Pipeline.add_process() when
                    # inserting self in a pipeline, so this is OK.
                    trait(name).forbid_completion \
                        = trait.forbid_completion
            else:
                self.regular_parameters.add(name)
                self.add_trait(name, trait)
                # copy initial value of the underlying process to self
                # Note: should be this be done via a links system ?
                setattr(self, name, getattr(self.process, name))

        # if the process has iterative outputs, the output lists have to be
        # resized according to inputs
        if has_output:
            self.on_trait_change(self._resize_outputs, inputs)
 def test_pipeline_adhoc_completion(self):
     study_config = self.study_config
     pipeline = get_process_instance(
         'bv_capsul_ex.ex_processes.AveragePipeline', study_config)
     apipeline = ProcessCompletionEngine.get_completion_engine(
         pipeline, 'average_pipeline')
     self.assertTrue(apipeline is not None)
     attrib = {
         'center': 'alpha_centauri',
         'subject': 'r2d2',
         'analysis': 'M0',
         'mask_type': 'amyelencephalic',
     }
     pinputs = {
         'capsul_attributes': attrib,
         'threshold': 0.75,
     }
     self.assertEqual(
         sorted(apipeline.get_attribute_values().user_traits().keys()),
         ['analysis', 'center', 'mask_type', 'subject'])
     apipeline.complete_parameters(process_inputs=pinputs)
     self.assertEqual(pipeline.threshold, 0.75)
     self.assertEqual(
         pipeline.template_mask,
         os.path.join(study_config.shared_directory,
                      'template_masks/amyelencephalic.npy'))
     #self.assertEqual(
         #pipeline.nodes['threshold'].process.mask_inf,
         #os.path.join(study_config.output_directory,
                      #'input_data_thresholded_inf.npy'))
     #self.assertEqual(
         #pipeline.nodes['threshold'].process.mask_sup,
         #os.path.join(study_config.output_directory,
                      #'input_data_thresholded_sup.npy'))
     #self.assertEqual(
         #pipeline.nodes['template_mask_inf'].process.output,
         #os.path.join(study_config.output_directory,
                      #'input_data_thresholded_inf_masked.npy'))
     #self.assertEqual(
         #pipeline.nodes['template_mask_sup'].process.output,
         #os.path.join(study_config.output_directory,
                      #'input_data_thresholded_sup_masked.npy'))
     self.assertEqual(
         pipeline.nodes['average_inf'].process.average,
         os.path.join(
             study_config.output_directory,
             'alpha_centauri_r2d2_M0_average_inf.npy'))
     self.assertEqual(
         pipeline.nodes['average_sup'].process.average,
         os.path.join(
             study_config.output_directory,
             'alpha_centauri_r2d2_M0_average_sup.npy'))
     self.assertEqual(
         pipeline.average_inf,
         os.path.join(
             study_config.output_directory,
             'alpha_centauri_r2d2_M0_average_inf.npy'))
     self.assertEqual(
         pipeline.average_sup,
         os.path.join(
             study_config.output_directory,
             'alpha_centauri_r2d2_M0_average_sup.npy'))
Пример #29
0
    def create_attributes_with_fom(self):
        """To get useful attributes by the fom"""

        process = self.process

        #Get attributes in input fom
        names_search_list = (self.name, process.id, process.name)
        capsul_attributes = self.get_attribute_values()
        matching_fom = False

        for schema, fom \
                in six.iteritems(process.study_config.modules_data.foms):
            atp = process.study_config.modules_data.fom_atp.get(schema)
            if atp is None:
                continue
            for name in names_search_list:
                fom_patterns = fom.patterns.get(name)
                if fom_patterns is not None:
                    break
            else:
                continue

            matching_fom = True
            def editable_attributes(attributes, fom):
                ea = EditableAttributes()
                for attribute in attributes:
                    if attribute.startswith('fom_'):
                        continue # skip FOM internals
                    default_value = fom.attribute_definitions[attribute].get(
                        'default_value', '')
                    ea.add_trait(attribute, Str(default_value))
                return ea

            for parameter in fom_patterns:
                param_attributes = atp.find_discriminant_attributes(
                        fom_parameter=parameter, fom_process=name)
                if param_attributes:
                    #process_attributes[parameter] = param_attributes
                    ea = editable_attributes(param_attributes, fom)
                    try:
                        capsul_attributes.set_parameter_attributes(
                            parameter, schema, ea, {})
                    except KeyError:
                        # param already registered
                        pass

        if not matching_fom:
            raise KeyError('Process not found in FOMs')

        # in a pipeline, we still must iterate over nodes to find switches,
        # which have their own behaviour.
        if isinstance(self.process, Pipeline):
            attributes = self.capsul_attributes
            name = self.process.name

            for node_name, node in six.iteritems(self.process.nodes):
                if isinstance(node, Switch):
                    subprocess = node
                    if subprocess is None:
                        continue
                    pname = '.'.join([name, node_name])
                    subprocess_compl = \
                        ProcessCompletionEngine.get_completion_engine(
                            subprocess, pname)
                    try:
                        sub_attributes \
                            = subprocess_compl.get_attribute_values()
                    except:
                        try:
                            subprocess_compl = self.__class__(subprocess)
                            sub_attributes \
                                = subprocess_compl.get_attribute_values()
                        except:
                            continue
                    subprocess_compl.install_switch_observer(self)
                    for attribute, trait \
                            in six.iteritems(sub_attributes.user_traits()):
                        if attributes.trait(attribute) is None:
                            attributes.add_trait(attribute, trait)
                            setattr(attributes, attribute,
                                    getattr(sub_attributes, attribute))

            self._get_linked_attributes()
Пример #30
0
 def complete_iteration(self, iteration):
     completion_engine = ProcessCompletionEngine.get_completion_engine(self)
     # check if it is an iterative completion engine
     if hasattr(completion_engine, 'complete_iteration_step'):
         completion_engine.complete_iteration_step(iteration)
Пример #31
0
    def create_attributes_with_fom(self):
        """To get useful attributes by the fom"""

        process = self.process

        #Get attributes in input fom
        names_search_list = (self.name, process.id, process.name,
                             getattr(process, 'context_name', ''))
        capsul_attributes = self.get_attribute_values()
        matching_fom = False

        for schema, fom \
                in six.iteritems(process.study_config.modules_data.foms):
            atp = process.study_config.modules_data.fom_atp.get(schema)
            if atp is None:
                continue
            for name in names_search_list:
                fom_patterns = fom.patterns.get(name)
                if fom_patterns is not None:
                    break
            else:
                continue

            matching_fom = True

            def editable_attributes(attributes, fom):
                ea = EditableAttributes()
                for attribute in attributes:
                    if attribute.startswith('fom_'):
                        continue  # skip FOM internals
                    default_value = fom.attribute_definitions[attribute].get(
                        'default_value', '')
                    ea.add_trait(attribute, Str(default_value))
                return ea

            for parameter in fom_patterns:
                param_attributes = atp.find_discriminant_attributes(
                    fom_parameter=parameter, fom_process=name)
                if param_attributes:
                    #process_attributes[parameter] = param_attributes
                    ea = editable_attributes(param_attributes, fom)
                    try:
                        capsul_attributes.set_parameter_attributes(
                            parameter, schema, ea, {})
                    except KeyError:
                        # param already registered
                        pass

        if not matching_fom:
            raise KeyError('Process not found in FOMs')

        # in a pipeline, we still must iterate over nodes to find switches,
        # which have their own behaviour.
        if isinstance(self.process, Pipeline):
            attributes = self.capsul_attributes
            name = self.process.name

            for node_name, node in six.iteritems(self.process.nodes):
                if isinstance(node, Switch):
                    subprocess = node
                    if subprocess is None:
                        continue
                    pname = '.'.join([name, node_name])
                    subprocess_compl = \
                        ProcessCompletionEngine.get_completion_engine(
                            subprocess, pname)
                    try:
                        sub_attributes \
                            = subprocess_compl.get_attribute_values()
                    except:
                        try:
                            subprocess_compl = self.__class__(subprocess)
                            sub_attributes \
                                = subprocess_compl.get_attribute_values()
                        except:
                            continue
                    for attribute, trait \
                            in six.iteritems(sub_attributes.user_traits()):
                        if attributes.trait(attribute) is None:
                            attributes.add_trait(attribute, trait)
                            setattr(attributes, attribute,
                                    getattr(sub_attributes, attribute))

            self._get_linked_attributes()
Пример #32
0
 def complete_iteration(self, iteration):
     completion_engine = ProcessCompletionEngine.get_completion_engine(
         self)
     # check if it is an iterative completion engine
     if hasattr(completion_engine, 'complete_iteration_step'):
         completion_engine.complete_iteration_step(iteration)
Пример #33
0
def main():
    ''' Run the :mod:`capsul.process.runprocess` module as a commandline
    '''

    usage = '''Usage: python -m capsul [options] processname [arg1] [arg2] ...
    [argx=valuex] [argy=valuey] ...

    Example:
    python -m capsul threshold ~/data/irm.ima /tmp/th.nii threshold1=80

    Named arguments (in the shape argx=valuex) may address sub-processes of a
    pipeline, using the dot separator:

    PrepareSubject.t1mri=/home/myself/mymri.nii

    For a more precise description, please look at the web documentation:
    http://brainvisa.info/capsul/user_doc/user_guide_tree/index.html
    '''

    # Set up logging on stderr. This must be called before any logging takes
    # place, to avoid "No handlers could be found for logger" errors.
    logging.basicConfig()

    parser = OptionParser(description='Run a single CAPSUL process',
                          usage=usage)
    group1 = OptionGroup(
        parser,
        'Config',
        description='Processing configuration, database options')
    group1.add_option(
        '--studyconfig',
        dest='studyconfig',
        help='load StudyConfig configuration from the given file (JSON)')
    group1.add_option('-i',
                      '--input',
                      dest='input_directory',
                      help='input data directory (if not specified in '
                      'studyconfig file). If not specified neither on the '
                      'commandline nor study configfile, taken as the same as '
                      'output.')
    group1.add_option('-o',
                      '--output',
                      dest='output_directory',
                      help='output data directory (if not specified in '
                      'studyconfig file). If not specified neither on the '
                      'commandline nor study configfile, taken as the same as '
                      'input.')
    parser.add_option_group(group1)

    group2 = OptionGroup(
        parser,
        'Processing',
        description='Processing options, distributed execution')
    group2.add_option('--swf',
                      '--soma_workflow',
                      dest='soma_workflow',
                      default=False,
                      action='store_true',
                      help='use soma_workflow. Soma-Workflow '
                      'configuration has to be setup and valid for non-local '
                      'execution, and additional file transfer options '
                      'may be used. The default is *not* to use SWF and '
                      'process mono-processor, sequential execution.')
    group2.add_option('-r',
                      '--resource_id',
                      dest='resource_id',
                      default=None,
                      help='soma-workflow resource ID, defaults to localhost')
    group2.add_option('-p',
                      '--password',
                      dest='password',
                      default=None,
                      help='password to access the remote computing resource. '
                      'Do not specify it if using a ssh key')
    group2.add_option('--rsa-pass',
                      dest='rsa_key_pass',
                      default=None,
                      help='RSA key password, for ssh key access')
    group2.add_option('--queue',
                      dest='queue',
                      default=None,
                      help='Queue to use on the computing resource. If not '
                      'specified, use the default queue.')
    #group2.add_option('--input-processing', dest='input_file_processing',
    #default=None, help='Input files processing: local_path, '
    #'transfer, translate, or translate_shared. The default is '
    #'local_path if the computing resource is the localhost, or '
    #'translate_shared otherwise.')
    #group2.add_option('--output-processing', dest='output_file_processing',
    #default=None, help='Output files processing: local_path, '
    #'transfer, or translate. The default is local_path.')
    group2.add_option('--keep-succeeded-workflow',
                      dest='keep_succeded_workflow',
                      action='store_true',
                      default=False,
                      help='keep the workflow in the computing resource '
                      'database after execution. By default it is removed.')
    group2.add_option('--delete-failed-workflow',
                      dest='delete_failed_workflow',
                      action='store_true',
                      default=False,
                      help='delete the workflow in the computing resource '
                      'database after execution, if it has failed. By default '
                      'it is kept.')
    parser.add_option_group(group2)

    group3 = OptionGroup(parser, 'Iteration', description='Iteration')
    group3.add_option('-I',
                      '--iterate',
                      dest='iterate_on',
                      action='append',
                      help='Iterate the given process, iterating over the '
                      'given parameter(s). Multiple parameters may be '
                      'iterated jointly using several -I options. In the '
                      'process parameters, values are replaced by lists, all '
                      'iterated lists should have the same size.\n'
                      'Ex:\n'
                      'python -m capsul -I par_a -I par_c a_process '
                      'par_a="[1, 2]" par_b="something" '
                      'par_c="[\\"one\\", \\"two\\"]"')
    parser.add_option_group(group3)

    group4 = OptionGroup(parser, 'Attributes completion')
    group4.add_option('-a',
                      '--attribute',
                      dest='attributes',
                      action='append',
                      default=[],
                      help='set completion (including FOM) attribute. '
                      'Syntax: attribute=value, value the same syntax as '
                      'process parameters (python syntax for lists, for '
                      'instance), with proper quotes if needed for shell '
                      'escaping.\n'
                      'Ex: -a acquisition="default" '
                      '-a subject=\'["s1", "s2"]\'')
    parser.add_option_group(group4)

    group5 = OptionGroup(parser,
                         'Help',
                         description='Help and documentation options')
    group5.add_option('--process-help',
                      dest='process_help',
                      action='store_true',
                      default=False,
                      help='display specified process help')
    parser.add_option_group(group5)

    parser.disable_interspersed_args()
    (options, args) = parser.parse_args()

    if options.studyconfig:
        study_config = StudyConfig(modules=StudyConfig.default_modules +
                                   ['FomConfig', 'BrainVISAConfig'])
        if yaml:
            scdict = yaml.load(open(options.studyconfig))
        else:
            scdict = json.load(open(options.studyconfig))
        study_config.set_study_configuration(scdict)
    else:
        study_config = StudyConfig()
        study_config.read_configuration()

    if options.input_directory:
        study_config.input_directory = options.input_directory
    if options.output_directory:
        study_config.output_directory = options.output_directory
    if study_config.output_directory in (None, Undefined) \
            and study_config.input_directory not in (None, Undefined):
        study_config.output_directory = study_config.input_directory
    if study_config.input_directory in (None, Undefined) \
            and study_config.output_directory not in (None, Undefined):
        study_config.input_directory = study_config.output_directory
    study_config.somaworkflow_keep_succeeded_workflows \
        = options.keep_succeded_workflow
    study_config.somaworkflow_keep_failed_workflows \
        = not options.delete_failed_workflow

    kwre = re.compile('([a-zA-Z_](\.?[a-zA-Z0-9_])*)\s*=\s*(.*)$')

    attributes = {}
    for att in options.attributes:
        m = kwre.match(att)
        if m is None:
            raise SyntaxError('syntax error in attribute definition: %s' % att)
        attributes[m.group(1)] = convert_commandline_parameter(m.group(3))

    args = tuple((convert_commandline_parameter(i) for i in args))
    kwargs = {}
    todel = []
    for arg in args:
        if isinstance(arg, six.string_types):
            m = kwre.match(arg)
            if m is not None:
                kwargs[m.group(1)] = convert_commandline_parameter(m.group(3))
                todel.append(arg)
    args = [arg for arg in args if arg not in todel]

    if not args:
        parser.print_usage()
        sys.exit(2)

    # get the main process
    process_name = args[0]
    args = args[1:]

    iterated = options.iterate_on
    try:
        process = get_process_with_params(process_name, study_config, iterated,
                                          attributes, *args, **kwargs)
    except ProcessParamError as e:
        print("error: {0}".format(e), file=sys.stderr)
        sys.exit(1)

    if options.process_help:
        process.help()

        print()

        completion_engine \
            = ProcessCompletionEngine.get_completion_engine(process)
        attribs = completion_engine.get_attribute_values()
        aval = attribs.export_to_dict()
        print('Completion attributes:')
        print('----------------------')
        print()
        print('(note: may differ depending on study config file contents, '
              'completion rules (FOM)...)')
        print()

        skipped = set(['generated_by_parameter', 'generated_by_process'])
        for name, value in six.iteritems(aval):
            if name in skipped:
                continue
            ttype = attribs.trait(name).trait_type.__class__.__name__
            if isinstance(attribs.trait(name).trait_type, List):
                ttype += '(%s)' \
                    % attribs.trait(name).inner_traits[
                        0].trait_type.__class__.__name__
            print('%s:' % name, ttype)
            if value not in (None, Undefined):
                print('   ', value)

        print()
        del aval, attribs, completion_engine, process
        sys.exit(0)

    resource_id = options.resource_id
    password = options.password
    rsa_key_pass = options.rsa_key_pass
    queue = options.queue
    file_processing = []

    study_config.use_soma_workflow = options.soma_workflow

    if options.soma_workflow:
        file_processing = [None, None]

    else:
        file_processing = [None, None]

    res = run_process_with_distribution(
        study_config,
        process,
        options.soma_workflow,
        resource_id=resource_id,
        password=password,
        rsa_key_pass=rsa_key_pass,
        queue=queue,
        input_file_processing=file_processing[0],
        output_file_processing=file_processing[1])

    sys.exit(0)
Пример #34
0
    def create_attributes_with_fom(self):
        """To get useful attributes by the fom"""

        process = self.process
        study_config = process.study_config
        modules_data = study_config.modules_data

        #Get attributes in input fom
        names_search_list = (self.name, process.id, process.name,
                             getattr(process, 'context_name', ''))
        capsul_attributes = self.get_attribute_values()
        matching_fom = False
        input_found = False
        output_found = False

        foms = SortedDictionary()
        foms.update(modules_data.foms)
        if study_config.auto_fom:
            # in auto-fom mode, also search in additional and non-loaded FOMs
            for schema, fom in six.iteritems(modules_data.all_foms):
                if schema not in (study_config.input_fom,
                                  study_config.output_fom,
                                  study_config.shared_fom):
                    foms[schema] = fom

        def editable_attributes(attributes, fom):
            ea = EditableAttributes()
            for attribute in attributes:
                if attribute.startswith('fom_'):
                    continue  # skip FOM internals
                default_value = fom.attribute_definitions[attribute].get(
                    'default_value', '')
                ea.add_trait(attribute, Str(default_value))
            return ea

        for schema, fom in six.iteritems(foms):
            if fom is None:
                fom, atp, pta \
                    = study_config.modules['FomConfig'].load_fom(schema)
            else:
                atp = modules_data.fom_atp.get(schema) \
                    or modules_data.fom_atp['all'].get(schema)

            if atp is None:
                continue
            for name in names_search_list:
                fom_patterns = fom.patterns.get(name)
                if fom_patterns is not None:
                    break
            else:
                continue

            if not matching_fom:
                matching_fom = True
            if schema == 'input':
                input_found = True
            elif schema == 'output':
                output_found = True
            elif matching_fom in (False, True, None):
                matching_fom = schema, fom, atp, fom_patterns
            #print('completion using FOM:', schema, 'for', process)
            #break

            for parameter in fom_patterns:
                param_attributes = atp.find_discriminant_attributes(
                    fom_parameter=parameter, fom_process=name)
                if param_attributes:
                    #process_attributes[parameter] = param_attributes
                    ea = editable_attributes(param_attributes, fom)
                    try:
                        capsul_attributes.set_parameter_attributes(
                            parameter, schema, ea, {})
                    except KeyError:
                        # param already registered
                        pass

        if not matching_fom:
            raise KeyError('Process not found in FOMs')
        #print('matching_fom:', matching_fom)

        if not input_found and matching_fom is not True:
            fom_type, fom, atp, fom_patterns = matching_fom
            schema = 'input'
            for parameter in fom_patterns:
                param_attributes = atp.find_discriminant_attributes(
                    fom_parameter=parameter, fom_process=name)
                if param_attributes:
                    #process_attributes[parameter] = param_attributes
                    ea = editable_attributes(param_attributes, fom)
                    try:
                        capsul_attributes.set_parameter_attributes(
                            parameter, schema, ea, {})
                    except KeyError:
                        # param already registered
                        pass
            modules_data.foms[schema] = fom
            modules_data.fom_atp[schema] = atp
            study_config.input_fom = fom_type

        if not output_found and matching_fom is not True:
            fom_type, fom, atp, fom_patterns = matching_fom
            schema = 'output'
            for parameter in fom_patterns:
                param_attributes = atp.find_discriminant_attributes(
                    fom_parameter=parameter, fom_process=name)
                if param_attributes:
                    #process_attributes[parameter] = param_attributes
                    ea = editable_attributes(param_attributes, fom)
                    try:
                        capsul_attributes.set_parameter_attributes(
                            parameter, schema, ea, {})
                    except KeyError:
                        # param already registered
                        pass
            modules_data.foms[schema] = fom
            modules_data.fom_atp[schema] = atp
            study_config.output_fom = fom_type

        # in a pipeline, we still must iterate over nodes to find switches,
        # which have their own behaviour.
        if isinstance(self.process, Pipeline):
            attributes = self.capsul_attributes
            name = self.process.name

            for node_name, node in six.iteritems(self.process.nodes):
                if isinstance(node, Switch):
                    subprocess = node
                    if subprocess is None:
                        continue
                    pname = '.'.join([name, node_name])
                    subprocess_compl = \
                        ProcessCompletionEngine.get_completion_engine(
                            subprocess, pname)
                    try:
                        sub_attributes \
                            = subprocess_compl.get_attribute_values()
                    except:
                        try:
                            subprocess_compl = self.__class__(subprocess)
                            sub_attributes \
                                = subprocess_compl.get_attribute_values()
                        except:
                            continue
                    for attribute, trait \
                            in six.iteritems(sub_attributes.user_traits()):
                        if attributes.trait(attribute) is None:
                            attributes.add_trait(attribute, trait)
                            setattr(attributes, attribute,
                                    getattr(sub_attributes, attribute))

            self._get_linked_attributes()
Пример #35
0
    def complete_parameters(self, process_inputs={}, complete_iterations=True):

        if not complete_iterations:
            # then do nothing...
            return

        self.completion_progress = 0.

        process = self.process
        if isinstance(process, ProcessNode):
            process = process.process

        try:
            self.set_parameters(process_inputs)
            attributes_set = self.get_attribute_values()
            completion_engine = ProcessCompletionEngine.get_completion_engine(
                process.process, self.name)
            step_attributes = completion_engine.get_attribute_values()
        except AttributeError:
            # ProcessCompletionEngine not implemented for this process:
            # no completion
            return

        size = self.iteration_size()

        iterated_attributes = self.get_iterated_attributes()
        for attribute in attributes_set.user_traits():
            if attribute not in iterated_attributes:
                setattr(step_attributes, attribute,
                        getattr(attributes_set, attribute))
        parameters = {}
        for parameter in process.regular_parameters:
            parameters[parameter] = getattr(process, parameter)

        # complete each step to get iterated parameters.
        # This is generally "too much" but it's difficult to perform a partial
        # completion only on iterated parameters

        iterative_parameters = dict([(key, [])
                                     for key in process.iterative_parameters])

        # propagate forbid_completion
        for param, trait in six.iteritems(process.user_traits()):
            if trait.forbid_completion:
                if hasattr(process.process, 'propagate_metadata'):
                    process.process.propagate_metadata(
                        '', param, {'forbid_completion': True})
                else:
                    process.process.trait(param).forbid_completion = True

        self.completion_progress_total = size
        for it_step in range(size):
            self.capsul_iteration_step = it_step
            for attribute in iterated_attributes:
                iterated_values = getattr(attributes_set, attribute)
                step = min(len(iterated_values) - 1, it_step)
                value = iterated_values[step]
                setattr(step_attributes, attribute, value)
            for parameter in process.iterative_parameters:
                values = getattr(process, parameter)
                if isinstance(values, list) and len(values) > it_step:
                    parameters[parameter] = values[it_step]
            completion_engine.complete_parameters(
                parameters, complete_iterations=complete_iterations)
            for parameter in process.iterative_parameters:
                value = getattr(process.process, parameter)
                iterative_parameters[parameter].append(value)
            self.completion_progress = it_step + 1
        for parameter, values in six.iteritems(iterative_parameters):
            try:
                setattr(process, parameter, values)
            except Exception as e:
                print('assign iteration parameter',
                      parameter,
                      ':\n',
                      e,
                      file=sys.stderr)
Пример #36
0
if __name__ == '__main__':
    print("RETURNCODE: ", test())

    if '-v' in sys.argv[1:] or '--verbose' in sys.argv[1:]:

        from capsul.qt_gui.widgets.pipeline_developper_view \
            import PipelineDeveloperView
        from capsul.qt_gui.widgets.attributed_process_widget \
            import AttributedProcessWidget
        from soma.qt_gui.qt_backend import QtGui, QtCore

        study_config = init_study_config()

        process = study_config.get_process_instance(
            'capsul.attributes.test.test_attributed_process.DummyProcess')
        patt = ProcessCompletionEngine.get_completion_engine(process)
        atts = patt.get_attribute_values()

        qapp = None
        if QtGui.QApplication.instance() is None:
            qapp = QtGui.QApplication(['test_app'])
        pv = PipelineDeveloperView(process, allow_open_controller=True,
                                    enable_edition=True,
                                    show_sub_pipelines=True)
        pc = AttributedProcessWidget(process, enable_attr_from_filename=True,
                                    enable_load_buttons=True)

        pv.show()
        pc.show()
        if qapp:
            qapp.exec_()
Пример #37
0
if __name__ == '__main__':
    print("RETURNCODE: ", test())

    if '-v' in sys.argv[1:] or '--verbose' in sys.argv[1:]:

        from capsul.qt_gui.widgets.pipeline_developper_view \
            import PipelineDevelopperView
        from capsul.qt_gui.widgets.attributed_process_widget \
            import AttributedProcessWidget
        from soma.qt_gui.qt_backend import QtGui, QtCore

        study_config = init_study_config()

        process = study_config.get_process_instance(
            'capsul.attributes.test.test_attributed_process.DummyProcess')
        patt = ProcessCompletionEngine.get_completion_engine(process)
        atts = patt.get_attribute_values()

        qapp = None
        if QtGui.QApplication.instance() is None:
            qapp = QtGui.QApplication(['test_app'])
        pv = PipelineDevelopperView(process, allow_open_controller=True,
                                    enable_edition=True,
                                    show_sub_pipelines=True)
        pc = AttributedProcessWidget(process, enable_attr_from_filename=True,
                                    enable_load_buttons=True)

        pv.show()
        pc.show()
        if qapp:
            qapp.exec_()