Example #1
0
 def create(self):
     # FIXME moved here because otherwise we hit the registry too early
     from core.modules.module_utils import FilePool
     self._file_pool = FilePool()
     self._persistent_pipeline = core.vistrail.pipeline.Pipeline()
     self._objects = {}
     self._executed = {}
     self.filePool = self._file_pool
Example #2
0
 def create(self):
     self._file_pool = FilePool()
     self._persistent_pipeline = core.vistrail.pipeline.Pipeline()
     self._objects = {}
     self._executed = {}
     self.filePool = self._file_pool
Example #3
0
class CachedInterpreter(core.interpreter.base.BaseInterpreter):
    def __init__(self):
        core.interpreter.base.BaseInterpreter.__init__(self)
        self.create()

    def create(self):
        self._file_pool = FilePool()
        self._persistent_pipeline = core.vistrail.pipeline.Pipeline()
        self._objects = {}
        self._executed = {}
        self.filePool = self._file_pool

    def clear(self):
        self._file_pool.cleanup()
        self._persistent_pipeline.clear()
        for obj in self._objects.itervalues():
            obj.clear()
        self._objects = {}
        self._executed = {}

    def __del__(self):
        self.clear()

    def clean_modules(self, modules_to_clean):
        """clean_modules(modules_to_clean: list of persistent module ids)

        Removes modules from the persistent pipeline, and the modules that
        depend on them."""
        if modules_to_clean == []:
            return
        g = self._persistent_pipeline.graph
        dependencies = g.vertices_topological_sort(modules_to_clean)
        for v in dependencies:
            self._persistent_pipeline.delete_module(v)
            del self._objects[v]

    def clean_non_cacheable_modules(self):
        """clean_non_cacheable_modules() -> None

        Removes all modules that are not cacheable from the persistent
        pipeline, and the modules that depend on them."""
        non_cacheable_modules = [i for (i, mod) in self._objects.iteritems() if not mod.is_cacheable()]
        self.clean_modules(non_cacheable_modules)

    def unlocked_execute(self, controller, pipeline, locator, currentVersion, view, aliases=None, **kwargs):
        """unlocked_execute(controller, pipeline, locator,
        currentVersion, view): Executes a pipeline using
        caching. Caching works by reusing pipelines directly.  This
        means that there exists one global pipeline whose parts get
        executed over and over again. This allows nested execution."""
        if view == None:
            raise VistrailsInternalError("This shouldn't have happened")
        logger = kwargs["logger"]
        self.resolve_aliases(pipeline, aliases)

        def get_remapped_id(id):
            return get_remapped_info(id)[0]

        def get_remapped_info(id, is_persistent=True):
            """get_remapped_info(id : long) -> (pipeline_local_id : long,
                                                abstraction_local_id : long,
                                                abstraction_id : long,
                                                abstraction_version : long)

            """
            if is_persistent:
                new_id = tmp_to_persistent_module_map.inverse[id]
            else:
                new_id = id
            info = (new_id, new_id, None, None)
            if kwargs.has_key("module_remap"):
                while kwargs["module_remap"].has_key(new_id):
                    # want to only set the immediate info, but need to go back
                    # all the way to get the id for the displayed abstraction
                    new_info = kwargs["module_remap"][new_id]
                    new_id = new_info[0]
                    abstractions[new_info[2]] = new_info[0]
                    if info[2] is None:
                        # only want to return the immediate info
                        info = new_info
            return (new_id, info[1], info[2], info[3])

        parameter_changes = []

        def change_parameter(obj, name, value):
            parameter_changes.append((get_remapped_id(obj.id), name, value))

        # the executed dict works on persistent ids
        def add_to_executed(obj):
            executed[obj.id] = True
            if kwargs.has_key("moduleExecutedHook"):
                for callable_ in kwargs["moduleExecutedHook"]:
                    callable_(obj.id)

        # views work on local ids
        def begin_compute(obj):
            (i, old_id, a_id, version) = get_remapped_info(obj.id)
            view.set_module_computing(i)

            reg = modules.module_registry.registry
            module_name = reg.get_descriptor(obj.__class__).name

            logger.start_module_execution(obj, old_id, module_name, a_id, version)

        # views and loggers work on local ids
        def begin_update(obj):
            i = get_remapped_id(obj.id)
            view.set_module_active(i)

        def update_cached(obj):
            (i, old_id, a_id, version) = get_remapped_info(obj.id)

            reg = modules.module_registry.registry
            module_name = reg.get_descriptor(obj.__class__).name

            logger.start_module_execution(obj, old_id, module_name, a_id, version, 1)
            logger.finish_module_execution(obj)

        # views and loggers work on local ids
        def end_update(obj, error=""):
            i = get_remapped_id(obj.id)
            if not error:
                view.set_module_success(i)
            else:
                view.set_module_error(i, error)

            logger.finish_module_execution(obj, error)

        # views and loggers work on local ids
        def annotate(obj, d):
            i = get_remapped_id(obj.id)
            logger.insert_module_annotations(obj, d)

        logging_obj = InstanceObject(
            signalSuccess=add_to_executed,
            begin_update=begin_update,
            begin_compute=begin_compute,
            end_update=end_update,
            update_cached=update_cached,
            annotate=annotate,
        )

        (tmp_to_persistent_module_map, conn_map, module_added_set, conn_added_set) = self.add_to_persistent_pipeline(
            pipeline, logging_obj
        )

        def create_null():
            """Creates a Null value"""
            getter = modules.module_registry.registry.get_descriptor_by_name
            descriptor = getter("edu.utah.sci.vistrails.basic", "Null")
            return descriptor.module()

        def create_constant(param, module):
            """Creates a Constant from a parameter spec"""
            getter = modules.module_registry.registry.get_descriptor_by_name
            desc = getter(param.identifier, param.type, param.namespace)
            constant = desc.module()
            constant.id = module.id
            if param.evaluatedStrValue:
                constant.setValue(param.evaluatedStrValue)
            else:
                constant.setValue(constant.default_value)
            return constant

        ## Checking 'sinks' from kwargs to resolve only requested sinks
        if kwargs.has_key("sinks"):
            requestedSinks = kwargs["sinks"]
            persistent_sinks = [
                tmp_to_persistent_module_map[sink] for sink in pipeline.graph.sinks() if sink in requestedSinks
            ]
        else:
            persistent_sinks = [tmp_to_persistent_module_map[sink] for sink in pipeline.graph.sinks()]

        errors = {}
        executed = {}
        abstractions = {}

        def make_change_parameter(obj):
            return lambda *args: change_parameter(obj, *args)

        # Create the new objects
        for i in module_added_set:
            persistent_id = tmp_to_persistent_module_map[i]
            module = self._persistent_pipeline.modules[persistent_id]
            self._objects[persistent_id] = module.summon()
            obj = self._objects[persistent_id]
            obj.interpreter = self
            obj.id = persistent_id
            obj.logging = logging_obj
            obj.change_parameter = make_change_parameter(obj)

            # Update object pipeline information
            obj.moduleInfo["locator"] = locator
            obj.moduleInfo["version"] = currentVersion
            obj.moduleInfo["moduleId"] = i
            obj.moduleInfo["pipeline"] = pipeline
            if kwargs.has_key("reason"):
                obj.moduleInfo["reason"] = kwargs["reason"]
            if kwargs.has_key("actions"):
                obj.moduleInfo["actions"] = kwargs["actions"]

            reg = modules.module_registry.registry
            for f in module.functions:
                if len(f.params) == 0:
                    connector = ModuleConnector(create_null(), "value")
                elif len(f.params) == 1:
                    p = f.params[0]
                    connector = ModuleConnector(create_constant(p, module), "value")
                else:
                    tupleModule = core.interpreter.base.InternalTuple()
                    tupleModule.length = len(f.params)
                    for (i, p) in enumerate(f.params):
                        constant = create_constant(p, module)
                        constant.update()
                        connector = ModuleConnector(constant, "value")
                        tupleModule.set_input_port(i, connector)
                    connector = ModuleConnector(tupleModule, "value")
                obj.set_input_port(f.name, connector, is_method=True)

        # Create the new connections
        for i in conn_added_set:
            persistent_id = conn_map[i]
            conn = self._persistent_pipeline.connections[persistent_id]
            src = self._objects[conn.sourceId]
            dst = self._objects[conn.destinationId]
            conn.makeConnection(src, dst)

        if self.done_summon_hook:
            self.done_summon_hook(self._persistent_pipeline, self._objects)
        if kwargs.has_key("done_summon_hook"):
            for callable_ in kwargs["done_summon_hook"]:
                callable_(self._persistent_pipeline, self._objects)

        # Update new sinks
        for v in persistent_sinks:
            try:
                self._objects[v].update()
            except ModuleError, me:
                me.module.logging.end_update(me.module, me.msg)
                errors[me.module.id] = me

        if self.done_update_hook:
            self.done_update_hook(self._persistent_pipeline, self._objects)

        # objs, errs, and execs are mappings that use the local ids as keys,
        # as opposed to the persistent ids.
        # They are thus ideal to external consumption.
        objs = {}
        # dict([(i, self._objects[tmp_to_persistent_module_map[i]])
        #              for i in tmp_to_persistent_module_map.keys()])
        errs = {}
        execs = {}

        to_delete = []
        for (tmp_id, pst_id) in tmp_to_persistent_module_map.iteritems():
            objs[tmp_id] = self._objects[pst_id]
            if errors.has_key(pst_id):
                errs[tmp_id] = errors[pst_id]
                to_delete.append(pst_id)
            if executed.has_key(pst_id):
                execs[tmp_id] = executed[pst_id]
            else:
                # these modules didn't execute
                execs[tmp_id] = False

        # FIXME all abstractions will register as "executed" due to expansion
        # can probably fix by keeping track of all ids and determine status
        # from the pieces recursively, but too much work for now
        for m_id in abstractions.itervalues():
            info = get_remapped_info(m_id, False)
            obj = InstanceObject()
            logger.start_module_execution(obj, info[1], "Abstraction", info[2], info[3])
            logger.finish_module_execution(obj)

        # Clean up modules that failed to execute
        self.clean_modules(to_delete)
        #         print "objs:", objs
        #         print "errs:", errs
        #         print "execs:", execs

        for i, obj in objs.iteritems():
            if errs.has_key(i):
                view.set_module_error(i, errs[i].msg)
            elif execs.has_key(i) and execs[i]:
                view.set_module_success(i)
            else:
                view.set_module_not_executed(i)

        return InstanceObject(
            objects=objs,
            errors=errs,
            executed=execs,
            modules_added=module_added_set,
            connections_added=conn_added_set,
            parameter_changes=parameter_changes,
        )
Example #4
0
class CachedInterpreter(core.interpreter.base.BaseInterpreter):

    def __init__(self):
        core.interpreter.base.BaseInterpreter.__init__(self)
        self.debugger = None
        self.create()

    def create(self):
        # FIXME moved here because otherwise we hit the registry too early
        from core.modules.module_utils import FilePool
        self._file_pool = FilePool()
        self._persistent_pipeline = core.vistrail.pipeline.Pipeline()
        self._objects = {}
        self._executed = {}
        self.filePool = self._file_pool
        
    def clear(self):
        self._file_pool.cleanup()
        self._persistent_pipeline.clear()
        for obj in self._objects.itervalues():
            obj.clear()
        self._objects = {}
        self._executed = {}

    def __del__(self):
        self.clear()

    def clean_modules(self, modules_to_clean):
        """clean_modules(modules_to_clean: list of persistent module ids)

        Removes modules from the persistent pipeline, and the modules that
        depend on them."""
        if modules_to_clean == []:
            return
        g = self._persistent_pipeline.graph
        dependencies = g.vertices_topological_sort(modules_to_clean)
        for v in dependencies:
            self._persistent_pipeline.delete_module(v)
            del self._objects[v]

    def clean_non_cacheable_modules(self):
        """clean_non_cacheable_modules() -> None

        Removes all modules that are not cacheable from the persistent
        pipeline, and the modules that depend on them, and 
        previously suspended modules """
        non_cacheable_modules = [i for
                                 (i, mod) in self._objects.iteritems()
                                 if not mod.is_cacheable() or \
                                 mod.suspended]
        self.clean_modules(non_cacheable_modules)
        

    def setup_pipeline(self, pipeline, **kwargs):
        """setup_pipeline(controller, pipeline, locator, currentVersion,
                          view, aliases, **kwargs)
        Matches a pipeline with the persistent pipeline and creates
        instances of modules that aren't in the cache.
        """
        def fetch(name, default):
            r = kwargs.get(name, default)
            try:
                del kwargs[name]
            except KeyError:
                pass
            return r
        controller = fetch('controller', None)
        locator = fetch('locator', None)
        current_version = fetch('current_version', None)
        view = fetch('view', DummyView())
        aliases = fetch('aliases', None)
        params = fetch('params', None)
        extra_info = fetch('extra_info', None)
        logger = fetch('logger', DummyLogController())
        sinks = fetch('sinks', None)
        reason = fetch('reason', None)
        actions = fetch('actions', None)
        done_summon_hooks = fetch('done_summon_hooks', [])
        module_executed_hook = fetch('module_executed_hook', [])

        if len(kwargs) > 0:
            raise VistrailsInternalError('Wrong parameters passed '
                                         'to setup_pipeline: %s' % kwargs)

        def create_null():
            """Creates a Null value"""
            getter = modules.module_registry.registry.get_descriptor_by_name
            descriptor = getter('edu.utah.sci.vistrails.basic', 'Null')
            return descriptor.module()
        
        def create_constant(param, module):
            """Creates a Constant from a parameter spec"""
            reg = modules.module_registry.get_module_registry()
            getter = reg.get_descriptor_by_name
            desc = getter(param.identifier, param.type, param.namespace)
            constant = desc.module()
            constant.id = module.id
#             if param.evaluatedStrValue:
#                 constant.setValue(param.evaluatedStrValue)
            if param.strValue != '':
                constant.setValue(param.strValue)
            else:
                constant.setValue( \
                    constant.translate_to_string(constant.default_value))
            return constant

        ### BEGIN METHOD ###

#         if self.debugger:
#             self.debugger.update()
        to_delete = []
        errors = {}

        if controller is not None:
            # Controller is none for sub_modules
            controller.validate(pipeline)
        else:
            pipeline.validate()

        self.resolve_aliases(pipeline, aliases)
        if controller is not None:
            # Controller is none for sub_modules, so we can't resolve variables
            self.resolve_variables(controller, pipeline)

        self.update_params(pipeline, params)
        
        (tmp_to_persistent_module_map,
         conn_map,
         module_added_set,
         conn_added_set) = self.add_to_persistent_pipeline(pipeline)

        # Create the new objects
        for i in module_added_set:
            persistent_id = tmp_to_persistent_module_map[i]
            module = self._persistent_pipeline.modules[persistent_id]
            self._objects[persistent_id] = module.summon()
            obj = self._objects[persistent_id]
            obj.interpreter = self
            obj.id = persistent_id
            obj.is_breakpoint = module.is_breakpoint
            obj.signature = module._signature
                
            reg = modules.module_registry.get_module_registry()
            for f in module.functions:
                connector = None
                if len(f.params) == 0:
                    connector = ModuleConnector(create_null(), 'value')
                elif len(f.params) == 1:
                    p = f.params[0]
                    try:
                        constant = create_constant(p, module)
                        connector = ModuleConnector(constant, 'value')
                    except ValueError, e:
                        err = ModuleError(self, 'Cannot convert parameter '
                                          'value "%s"\n' % p.strValue + str(e))
                        errors[i] = err
                        to_delete.append(obj.id)
                    except Exception, e:
                        import traceback
                        traceback.print_exc()
                        err = ModuleError(self, 'Uncaught exception: "%s"' %  str(e) )
                        errors[i] = err
                        to_delete.append(obj.id)
                else:
                    tupleModule = core.interpreter.base.InternalTuple()
                    tupleModule.length = len(f.params)
                    if f.name == 'levelRangeScale':
                        pass
                    for (j,p) in enumerate(f.params):
                        try:
                            constant = create_constant(p, module)
                            constant.update()
                            connector = ModuleConnector(constant, 'value')
                            tupleModule.set_input_port(j, connector)
                        except ValueError, e:
                            err = ModuleError(self, "Cannot convert parameter "
                                              "value '%s'\n" % p.strValue + \
                                                  str(e))
                            errors[i] = err
                            to_delete.append(obj.id)
                        except Exception, e:
                            import traceback
                            traceback.print_exc()
                            err = ModuleError(self, 'Uncaught exception:  "%s"' % str(e))
                            errors[i] = err
                            to_delete.append(obj.id)
                    connector = ModuleConnector(tupleModule, 'value')