示例#1
0
    def setUpClass(cls):
        class FakeVariable(object):
            def __init__(self, name):
                self.name = name

            def __eq__(self, other):
                return self.name == other.name

        cls.plot = FakeObj(name='My Plot',
                           package_identifier='tests.dat.vistrail_data')
        cls.var1 = FakeVariable('var1')
        cls.var2 = FakeVariable('var2')
        cls.var3 = FakeVariable('var3')
        all_vars = dict(var1=cls.var1, var2=cls.var2, var3=cls.var3)

        def get_variable(name):
            return all_vars.get(name)

        cls.vistraildata = FakeObj(get_variable=get_variable)

        cls.recipe = DATRecipe(
            cls.plot,
            {
                'param1': (
                    RecipeParameterValue(
                        variable=cls.var1),
                    RecipeParameterValue(
                        variable=cls.var2),
                ),
                'param2': (
                    RecipeParameterValue(
                        constant='test\'";b=c,r\xC3\xA9mi'),
                ),
                'param3': (
                    RecipeParameterValue(
                        variable=cls.var3),
                ),
            })
        cls.conn_map = {
            'param1': (
                (1, 2),
                (5,),
            ),
            'param2': (
                (4,),
            ),
            'param3': (
                (3,),
            ),
        }
        cls.port_map = {
            'param1': (
                (1, 'port1'), (2, 'port2'),
            ),
            'param2': (
            ),
            'param3': (
                (3, 'port3'),
            ),
        }
示例#2
0
文件: cellcontainer.py 项目: rbax/DAT
    def update_pipeline(self, force_reexec=False):
        """Updates the recipe and execute the workflow if enough ports are set.
        """
        # Look this recipe up in the VistrailData
        vistraildata = VistrailManager(self._controller)
        recipe = DATRecipe(self._plot, self._parameters)

        # Try to get an existing pipeline for this cell
        pipeline = self.get_pipeline()

        try:
            # No pipeline: build one
            if pipeline is None:
                pipeline = vistrails_interface.create_pipeline(
                    self._controller,
                    recipe,
                    self.cellInfo.row,
                    self.cellInfo.column,
                    vistraildata.sheetname_var(self.cellInfo.tab),
                    typecast=self._typecast)
                recipe = pipeline.recipe
                new_params_it = recipe.parameters.iteritems()
                self._parameters = {
                    param: list(values)
                    for param, values in new_params_it
                }
                vistraildata.created_pipeline(self.cellInfo, pipeline)

            # Pipeline with a different content: update it
            elif pipeline.recipe != recipe:
                try:
                    pipeline = vistrails_interface.update_pipeline(
                        self._controller,
                        pipeline,
                        recipe,
                        typecast=self._typecast)
                except vistrails_interface.UpdateError, e:
                    warnings.warn("Could not update pipeline, creating new "
                                  "one:\n"
                                  "%s" % e)
                    pipeline = vistrails_interface.create_pipeline(
                        self._controller,
                        recipe,
                        self.cellInfo.row,
                        self.cellInfo.column,
                        vistraildata.sheetname_var(self.cellInfo.tab),
                        typecast=self._typecast)
                recipe = pipeline.recipe
                new_params_it = recipe.parameters.iteritems()
                self._parameters = {
                    param: list(values)
                    for param, values in new_params_it
                }
                vistraildata.created_pipeline(self.cellInfo, pipeline)

            # Nothing changed
            elif not force_reexec:
                return True
示例#3
0
    def _infer_pipelineinfo(self, version, cellInfo):
        """Try to make up a pipelineInfo for a version and store it.

        Returns the new pipelineInfo, or None if we failed.
        """
        # This ensures that we don't try to infer a DAT recipe from the same
        # pipeline over and over again
        if version in self._failed_infer_calls:
            return None

        def fail():
            self._failed_infer_calls.add(version)
            return None

        # Recursively obtains the parent version's pipelineInfo
        try:
            parentId = self._controller.vistrail.actionMap[version].prevId
        except KeyError:
            return fail()
        parentInfo = self.get_pipeline(parentId, infer_for_cell=cellInfo)
        if parentInfo is None:
            return fail()

        # Here we loop on modules/connections to check that the required things
        # from the old pipeline are still here

        pipeline = get_upgraded_pipeline(self._controller.vistrail, version)

        new_parameters = dict()
        new_conn_map = dict()

        # Check that the plot is still there by finding the plot ports
        for name, port_list in parentInfo.port_map.iteritems():
            for mod_id, portname in port_list:
                if mod_id not in pipeline.modules:
                    return fail()

        # Loop on parameters to check they are still there
        for name, parameter_list in parentInfo.recipe.parameters.iteritems():
            conn_list = parentInfo.conn_map[name]
            new_parameter_list = []
            new_conn_list = []
            for parameter, conns in itertools.izip(parameter_list, conn_list):
                if all(conn_id in pipeline.connections for conn_id in conns):
                    new_parameter_list.append(parameter)
                    new_conn_list.append(conns)
            new_parameters[name] = new_parameter_list
            new_conn_map[name] = new_conn_list

        new_recipe = DATRecipe(parentInfo.recipe.plot, new_parameters)
        pipelineInfo = PipelineInformation(version, new_recipe, new_conn_map,
                                           parentInfo.port_map)
        self.created_pipeline(cellInfo, pipelineInfo)
        return pipelineInfo
示例#4
0
    def _read_recipe_annotation(vistraildata, value):
        """Reads (recipe, conn_map) from an annotation value.
        """
        def read_connlist(connlist):
            return tuple(int(conn_id) for conn_id in connlist.split(','))

        value = iter(value.split(';'))
        try:
            plot = next(value)
            plot = plot.split(',')
            if len(plot) != 2:
                raise ValueError
            plot = GlobalManager.get_plot(*plot)  # Might raise KeyError
            parameters = dict()
            conn_map = dict()
            for param in value:
                # Might raise ValueError or TypeError
                param, t, pvals = param.split('=')
                pvals = pvals.split('|')
                plist = []
                cplist = []
                if t not in ('c', 'v'):
                    raise ValueError
                for val in pvals:
                    val = val.split(':')
                    if len(val) != 2:
                        raise ValueError
                    if t == 'c':
                        plist.append(
                            RecipeParameterValue(
                                constant=urllib2.unquote(val[0])))
                    else:  # t == 'v':
                        v = val[0].split(',')
                        if len(v) not in (1, 2):
                            raise ValueError
                        variable = vistraildata.get_variable(v[0])
                        if len(v) == 2:
                            plist.append(
                                RecipeParameterValue(variable=variable,
                                                     typecast=v[1]))
                        else:
                            plist.append(
                                RecipeParameterValue(variable=variable))
                    cplist.append(read_connlist(val[1]))
                parameters[param] = tuple(plist)
                conn_map[param] = tuple(cplist)
            return DATRecipe(plot, parameters), conn_map
        except (KeyError, ValueError, TypeError):
            return None, None
示例#5
0
文件: test_dat.py 项目: rbax/DAT
    def test_eq(self):
        myvar = FakeObj(name='myvar')
        othervar = FakeObj(name='othervar')
        plot = FakeObj(package_identifier='tests.dat', name='My Plot')
        plot2 = FakeObj(package_identifier='tests.dat', name='Not My Plot')
        rec1 = DATRecipe(
            plot, dict(param1=[],
                       param2=[RecipeParameterValue(variable=myvar)]))
        rec2 = DATRecipe(
            plot2,
            dict(param1=[], param2=[RecipeParameterValue(variable=myvar)]))
        rec3 = DATRecipe(
            plot,
            dict(param1=(), param2=(RecipeParameterValue(variable=myvar), )))
        rec4 = DATRecipe(
            plot,
            dict(param1=[RecipeParameterValue(variable=othervar)],
                 param2=[RecipeParameterValue(variable=myvar)]))
        rec5 = DATRecipe(
            plot,
            dict(param1=[], param2=[RecipeParameterValue(constant='myvar')]))
        rec6 = DATRecipe(
            plot,
            dict(param1=[],
                 param2=[RecipeParameterValue(constant='othervar')]))
        rec7 = DATRecipe(
            plot,
            dict(param1=[],
                 param2=[RecipeParameterValue(variable=myvar,
                                              typecast='op1')]))
        rec8 = DATRecipe(
            plot,
            dict(param1=[],
                 param2=[RecipeParameterValue(variable=myvar, typecast='*')]))

        self.assertTrue(rec1 == rec1)
        self.assertTrue(rec3 == rec3)
        self.assertTrue(rec5 == rec5)
        self.assertFalse(rec1 == rec2)
        self.assertTrue(rec1 == rec3)
        self.assertFalse(rec1 == rec4)
        self.assertFalse(rec1 == rec5)
        self.assertFalse(rec1 == rec6)
        self.assertTrue(rec1 == rec7)
        self.assertTrue(rec7 == rec7)
        self.assertTrue(rec7 == rec8)
示例#6
0
    def test_pipeline_creation(self):
        import dat.tests.pkg_test_plots.init as pkg_test_plots

        controller = self.vt_controller()
        vistraildata = VistrailManager(controller)
        loader = Test_generation._loaders.get('StrMaker')

        loader.v = 'Hello'
        vistraildata.new_variable('var1', loader.load())

        loader.v = 'world'
        vistraildata.new_variable('var2', loader.load())

        cellInfo = FakeObj(
            row=0,
            column=0,
            tab=FakeObj(tabWidget=FakeObj(tabText=lambda w: 'Sheet 1')))

        recipe = DATRecipe(
            pkg_test_plots.concat_plot, {
                'param1': (RecipeParameterValue(
                    variable=vistraildata.get_variable('var1')), ),
                'param2': (RecipeParameterValue(
                    variable=vistraildata.get_variable('var2')), ),
                'param3': (RecipeParameterValue(constant="!"), ),
            })

        pipelineInfo = vistrails_interface.create_pipeline(
            controller, recipe, cellInfo.row, cellInfo.column,
            None)  # This plot has no cell module so this is fine

        controller.change_selected_version(pipelineInfo.version)

        result = CallRecorder()
        pkg_test_plots.Recorder.callback = result

        interpreter = get_default_interpreter()
        interpreter.execute(controller.current_pipeline,
                            view=DummyView(),
                            locator=controller.locator,
                            current_version=pipelineInfo.version)

        call = (['Hello, world!'], dict())
        self.assertEqual(result.calls, [call])
示例#7
0
文件: __init__.py 项目: rbax/DAT
def update_pipeline(controller, pipelineInfo, new_recipe, typecast=None):
    """Update a pipeline to a new recipe.

    This takes a similar pipeline and turns it into the new recipe by adding/
    removing/replacing the variable subworkflows.

    It will raise UpdateError if it can't be done; in this case
    create_pipeline() should be considered.
    """
    # Retrieve the pipeline
    controller.change_selected_version(pipelineInfo.version)
    pipeline = controller.current_pipeline
    old_recipe = pipelineInfo.recipe

    # The plots have to be the same
    if old_recipe.plot != new_recipe.plot:
        raise UpdateError("update_pipeline cannot change plot type!")

    generator = PipelineGenerator(controller)

    conn_map = dict()

    # Used to build the description
    added_params = []
    removed_params = []

    name_to_port = {port.name: port for port in new_recipe.plot.ports}
    actual_parameters = {}
    for port_name in (set(old_recipe.parameters.iterkeys())
                      | set(new_recipe.parameters.iterkeys())):
        # param -> [[conn_id]]
        old_params = dict()
        for i, param in enumerate(old_recipe.parameters.get(port_name, [])):
            conns = old_params.setdefault(param, [])
            conns.append(list(pipelineInfo.conn_map[port_name][i]))
        new_params = list(new_recipe.parameters.get(port_name, []))
        conn_lists = conn_map.setdefault(port_name, [])

        # Loop on new parameters
        actual_values = []
        for param in new_params:
            # Remove one from old_params
            old = old_params.get(param)
            if old:
                old_conns = old.pop(0)
                if not old:
                    del old_params[param]

                conn_lists.append(old_conns)
                actual_values.append(param)
                continue

            # Can't remove, meaning that there is more of this param than there
            # was before
            # Add this param on this port
            plot_ports = [
                (pipeline.modules[mod_id], port)
                for mod_id, port in (pipelineInfo.port_map[port_name])
            ]
            if param.type == RecipeParameterValue.VARIABLE:
                conns, actual_param = add_variable_subworkflow_typecast(
                    generator,
                    param.variable,
                    plot_ports,
                    name_to_port[port_name].type,
                    typecast=typecast)
                conn_lists.append(conns)
                actual_values.append(actual_param)
            else:  # param.type == RecipeParameterValue.CONSTANT:
                desc = name_to_port[port_name].type
                conn_lists.append(
                    add_constant_module(generator, desc, param.constant,
                                        plot_ports))
                actual_values.append(param)

            added_params.append(port_name)

        # Now loop on the remaining old parameters
        # If they haven't been removed by the previous loop, that means that
        # there were more of them in the old recipe
        for conn_lists in old_params.itervalues():
            for connections in conn_lists:
                # Remove the variable subworkflow
                modules = set(
                    pipeline.modules[pipeline.connections[c].source.moduleId]
                    for c in connections)
                generator.delete_linked(
                    modules,
                    connection_filter=lambda c: c.id not in connections)

                removed_params.append(port_name)

        actual_parameters[port_name] = actual_values

    # We didn't find anything to change
    if not (added_params or removed_params):
        return pipelineInfo

    pipeline_version = generator.perform_action()

    controller.vistrail.change_description(
        describe_dat_update(added_params, removed_params), pipeline_version)

    controller.change_selected_version(pipeline_version, from_root=True)

    return PipelineInformation(pipeline_version,
                               DATRecipe(new_recipe.plot, actual_parameters),
                               conn_map, pipelineInfo.port_map)
示例#8
0
文件: __init__.py 项目: rbax/DAT
def create_pipeline(controller,
                    recipe,
                    row,
                    column,
                    var_sheetname,
                    typecast=None):
    """Create a pipeline from a recipe and return its information.
    """
    # Build from the root version
    controller.change_selected_version(0)

    reg = get_module_registry()

    generator = PipelineGenerator(controller)

    inputport_desc = reg.get_descriptor_by_name(
        'org.vistrails.vistrails.basic', 'InputPort')

    # Add the plot subworkflow
    plot_pipeline = recipe.plot.get_pipeline()

    connected_to_inputport = set(
        c.source.moduleId for c in plot_pipeline.connection_list
        if (plot_pipeline.modules[c.destination.moduleId].module_descriptor is
            inputport_desc))

    # Copy every module but the InputPorts and up
    plot_modules_map = dict()  # old module id -> new module
    for module in plot_pipeline.modules.itervalues():
        if (module.module_descriptor is not inputport_desc
                and module.id not in connected_to_inputport):
            plot_modules_map[module.id] = generator.copy_module(module)

    del connected_to_inputport

    def _get_or_create_module(moduleType):
        """Returns or creates a new module of the given type.

        Warns if multiple modules of that type were found.
        """
        modules = find_modules_by_type(plot_pipeline, [moduleType])
        if not modules:
            desc = reg.get_descriptor_from_module(moduleType)
            module = controller.create_module_from_descriptor(desc)
            generator.add_module(module)
            return module, True
        else:
            # Currently we do not support multiple cell locations in one
            # pipeline but this may be a feature in the future, to have
            # linked visualizations in multiple cells
            if len(modules) > 1:
                warnings.warn("Found multiple %s modules in plot "
                              "subworkflow, only using one." % moduleType)
            return plot_modules_map[modules[0].id], False

    # Connect the CellLocation to the SpreadsheetCell
    cell_modules = find_modules_by_type(plot_pipeline, [SpreadsheetCell])
    if cell_modules:
        cell_module = plot_modules_map[cell_modules[0].id]

        # Add a CellLocation module if the plot subworkflow didn't contain one
        location_module, new_location = _get_or_create_module(CellLocation)

        if new_location:
            # Connect the CellLocation to the SpreadsheetCell
            generator.connect_modules(location_module, 'value', cell_module,
                                      'Location')

        generator.update_function(location_module, 'Row', [str(row + 1)])
        generator.update_function(location_module, 'Column', [str(column + 1)])

        if len(cell_modules) > 1:
            warnings.warn("Plot subworkflow '%s' contains more than "
                          "one spreadsheet cell module. Only one "
                          "was connected to a location module." %
                          recipe.plot.name)

        # Add a SheetReference module
        sheetref_module, new_sheetref = _get_or_create_module(SheetReference)

        if new_sheetref or new_location:
            # Connection the SheetReference to the CellLocation
            generator.connect_modules(sheetref_module, 'value',
                                      location_module, 'SheetReference')

        generator.connect_var(var_sheetname, sheetref_module, 'SheetName')
    else:
        warnings.warn("Plot subworkflow '%s' does not contain a "
                      "spreadsheet cell module" % recipe.plot.name)

    # TODO : use walk_modules() to find all modules above an InputPort's
    # 'Default' port and ignore them in the following loop

    # Copy the connections and locate the input ports
    plot_params = dict()  # param name -> [(module, input port name)]
    for connection in plot_pipeline.connection_list:
        src = plot_pipeline.modules[connection.source.moduleId]
        dest = plot_pipeline.modules[connection.destination.moduleId]
        if dest.module_descriptor is inputport_desc:
            continue
        elif src.module_descriptor is inputport_desc:
            param = get_function(src, 'name')
            ports = plot_params.setdefault(param, [])
            ports.append((plot_modules_map[connection.destination.moduleId],
                          connection.destination.name))
        else:
            generator.connect_modules(
                plot_modules_map[connection.source.moduleId],
                connection.source.name,
                plot_modules_map[connection.destination.moduleId],
                connection.destination.name)

    # Find the constant ports declared with aliases
    aliases = {port.name: port for port in recipe.plot.ports if port.is_alias}
    for module in plot_pipeline.module_list:
        for function in module.functions:
            remove = False
            for param in function.parameters:
                if param.alias in aliases:
                    plot_params[param.alias] = [(plot_modules_map[module.id],
                                                 function.name)]
                    remove = True

            if remove:
                # Remove the function from the generated pipeline
                generator.update_function(plot_modules_map[module.id],
                                          function.name, None)
    del aliases

    # Adds default values for unset constants
    parameters_incl_defaults = dict(recipe.parameters)
    for port in recipe.plot.ports:
        if (isinstance(port, ConstantPort) and port.default_value is not None
                and port.name not in recipe.parameters):
            parameters_incl_defaults[port.name] = [
                RecipeParameterValue(constant=port.default_value)
            ]

    # Maps a port name to the list of parameters
    # for each parameter, we have a list of connections tying it to modules of
    # the plot
    conn_map = dict()  # param: str -> [[conn_id: int]]

    name_to_port = {port.name: port for port in recipe.plot.ports}
    actual_parameters = {}
    for port_name, parameters in parameters_incl_defaults.iteritems():
        plot_ports = plot_params.get(port_name, [])
        p_conns = conn_map[port_name] = []
        actual_values = []
        for parameter in parameters:
            if parameter.type == RecipeParameterValue.VARIABLE:
                conns, actual_param = add_variable_subworkflow_typecast(
                    generator,
                    parameter.variable,
                    plot_ports,
                    name_to_port[port_name].type,
                    typecast=typecast)
                p_conns.append(conns)
                actual_values.append(actual_param)
            else:  # parameter.type == RecipeParameterValue.CONSTANT
                desc = name_to_port[port_name].type
                p_conns.append(
                    add_constant_module(generator, desc, parameter.constant,
                                        plot_ports))
                actual_values.append(parameter)
        actual_parameters[port_name] = actual_values
    del name_to_port

    pipeline_version = generator.perform_action()
    controller.vistrail.change_description(
        "Created DAT plot %s" % recipe.plot.name, pipeline_version)
    # FIXME : from_root seems to be necessary here, I don't know why
    controller.change_selected_version(pipeline_version, from_root=True)

    # Convert the modules to module ids in the port_map
    port_map = dict()
    for param, portlist in plot_params.iteritems():
        port_map[param] = [(module.id, port) for module, port in portlist]

    return PipelineInformation(pipeline_version,
                               DATRecipe(recipe.plot, actual_parameters),
                               conn_map, port_map)