def add_variable_subworkflow_typecast(generator, variable, plot_ports, expected_type, typecast): if issubclass(variable.type.module, expected_type.module): return (add_variable_subworkflow(generator, variable.name, plot_ports), RecipeParameterValue(variable=variable)) else: # Load the variable from the workflow var_pipeline = Variable.from_workflow(variable) # Apply the operation var_pipeline, typecast_operation = typecast(generator.controller, var_pipeline, variable.type, expected_type) generator.append_operations(var_pipeline._generator.operations) if plot_ports: connection_ids = [] for var_output_mod, var_output_port in plot_ports: connection_ids.append( generator.connect_modules(var_pipeline._output_module, var_pipeline._outputport_name, var_output_mod, var_output_port)) return connection_ids, RecipeParameterValue( variable=variable, typecast=typecast_operation.name) else: return (var_pipeline._output_module, var_pipeline._outputport_name)
def setUpClass(cls): class FakeVariable(object): def __init__(self, name): self.name = name def __eq__(self, other): return self.name == other.name cls.plot = FakeObj(name='My Plot', package_identifier='tests.dat.vistrail_data') cls.var1 = FakeVariable('var1') cls.var2 = FakeVariable('var2') cls.var3 = FakeVariable('var3') all_vars = dict(var1=cls.var1, var2=cls.var2, var3=cls.var3) def get_variable(name): return all_vars.get(name) cls.vistraildata = FakeObj(get_variable=get_variable) cls.recipe = DATRecipe( cls.plot, { 'param1': ( RecipeParameterValue( variable=cls.var1), RecipeParameterValue( variable=cls.var2), ), 'param2': ( RecipeParameterValue( constant='test\'";b=c,r\xC3\xA9mi'), ), 'param3': ( RecipeParameterValue( variable=cls.var3), ), }) cls.conn_map = { 'param1': ( (1, 2), (5,), ), 'param2': ( (4,), ), 'param3': ( (3,), ), } cls.port_map = { 'param1': ( (1, 'port1'), (2, 'port2'), ), 'param2': ( ), 'param3': ( (3, 'port3'), ), }
def _read_recipe_annotation(vistraildata, value): """Reads (recipe, conn_map) from an annotation value. """ def read_connlist(connlist): return tuple(int(conn_id) for conn_id in connlist.split(',')) value = iter(value.split(';')) try: plot = next(value) plot = plot.split(',') if len(plot) != 2: raise ValueError plot = GlobalManager.get_plot(*plot) # Might raise KeyError parameters = dict() conn_map = dict() for param in value: # Might raise ValueError or TypeError param, t, pvals = param.split('=') pvals = pvals.split('|') plist = [] cplist = [] if t not in ('c', 'v'): raise ValueError for val in pvals: val = val.split(':') if len(val) != 2: raise ValueError if t == 'c': plist.append( RecipeParameterValue( constant=urllib2.unquote(val[0]))) else: # t == 'v': v = val[0].split(',') if len(v) not in (1, 2): raise ValueError variable = vistraildata.get_variable(v[0]) if len(v) == 2: plist.append( RecipeParameterValue(variable=variable, typecast=v[1])) else: plist.append( RecipeParameterValue(variable=variable)) cplist.append(read_connlist(val[1])) parameters[param] = tuple(plist) conn_map[param] = tuple(cplist) return DATRecipe(plot, parameters), conn_map except (KeyError, ValueError, TypeError): return None, None
def test_pipeline_creation(self): import dat.tests.pkg_test_plots.init as pkg_test_plots controller = self.vt_controller() vistraildata = VistrailManager(controller) loader = Test_generation._loaders.get('StrMaker') loader.v = 'Hello' vistraildata.new_variable('var1', loader.load()) loader.v = 'world' vistraildata.new_variable('var2', loader.load()) cellInfo = FakeObj( row=0, column=0, tab=FakeObj(tabWidget=FakeObj(tabText=lambda w: 'Sheet 1'))) recipe = DATRecipe( pkg_test_plots.concat_plot, { 'param1': (RecipeParameterValue( variable=vistraildata.get_variable('var1')), ), 'param2': (RecipeParameterValue( variable=vistraildata.get_variable('var2')), ), 'param3': (RecipeParameterValue(constant="!"), ), }) pipelineInfo = vistrails_interface.create_pipeline( controller, recipe, cellInfo.row, cellInfo.column, None) # This plot has no cell module so this is fine controller.change_selected_version(pipelineInfo.version) result = CallRecorder() pkg_test_plots.Recorder.callback = result interpreter = get_default_interpreter() interpreter.execute(controller.current_pipeline, view=DummyView(), locator=controller.locator, current_version=pipelineInfo.version) call = (['Hello, world!'], dict()) self.assertEqual(result.calls, [call])
def test_eq(self): myvar = FakeObj(name='myvar') othervar = FakeObj(name='othervar') plot = FakeObj(package_identifier='tests.dat', name='My Plot') plot2 = FakeObj(package_identifier='tests.dat', name='Not My Plot') rec1 = DATRecipe( plot, dict(param1=[], param2=[RecipeParameterValue(variable=myvar)])) rec2 = DATRecipe( plot2, dict(param1=[], param2=[RecipeParameterValue(variable=myvar)])) rec3 = DATRecipe( plot, dict(param1=(), param2=(RecipeParameterValue(variable=myvar), ))) rec4 = DATRecipe( plot, dict(param1=[RecipeParameterValue(variable=othervar)], param2=[RecipeParameterValue(variable=myvar)])) rec5 = DATRecipe( plot, dict(param1=[], param2=[RecipeParameterValue(constant='myvar')])) rec6 = DATRecipe( plot, dict(param1=[], param2=[RecipeParameterValue(constant='othervar')])) rec7 = DATRecipe( plot, dict(param1=[], param2=[RecipeParameterValue(variable=myvar, typecast='op1')])) rec8 = DATRecipe( plot, dict(param1=[], param2=[RecipeParameterValue(variable=myvar, typecast='*')])) self.assertTrue(rec1 == rec1) self.assertTrue(rec3 == rec3) self.assertTrue(rec5 == rec5) self.assertFalse(rec1 == rec2) self.assertTrue(rec1 == rec3) self.assertFalse(rec1 == rec4) self.assertFalse(rec1 == rec5) self.assertFalse(rec1 == rec6) self.assertTrue(rec1 == rec7) self.assertTrue(rec7 == rec7) self.assertTrue(rec7 == rec8)
def dropEvent(self, event): mimeData = event.mimeData() if mimeData.hasFormat(MIMETYPE_DAT_VARIABLE): if self._plot is not None and self._parameter_hovered is not None: event.accept() port_name = self._plot.ports[self._parameter_hovered].name varname = str(mimeData.data(MIMETYPE_DAT_VARIABLE)) # Here we keep the old values around, and we revert if # update_pipeline() returns False old_values = self._parameters.get(port_name) if old_values is not None: old_values = list(old_values) # Try to update values = self._parameters.setdefault(port_name, []) if values and values[0].type == RecipeParameterValue.CONSTANT: # The overlay shouldn't allow this warnings.warn("a variable was dropped on a port where a " "constant is set") event.ignore() return variable = (VistrailManager( self._controller).get_variable(varname)) param = RecipeParameterValue(variable=variable) if self._insert_pos < len(values): values[self._insert_pos] = param else: values.append(param) if not self.update_pipeline(): # This is wrong somehow (ex: typecasting failed) # Revert to previous values if old_values is None: del self._parameters[port_name] else: self._parameters[port_name] = old_values else: event.ignore() elif mimeData.hasFormat(MIMETYPE_DAT_PLOT): event.accept() plotname = str(mimeData.data(MIMETYPE_DAT_PLOT)) plotname = plotname.split(',') if len(plotname) == 2: self._plot = GlobalManager.get_plot(*plotname) self._parameters = dict() self._parameter_hovered = None self.update_pipeline() else: event.ignore() self._set_overlay(None)
def change_constant(self, port_name, value): constant = self._parameters.get(port_name) if constant and constant[0].type != RecipeParameterValue.CONSTANT: # The overlay shouldn't do this warnings.warn("change_constant() on port where variables are set") return False elif constant is not None: constant = constant[0] if value is None: del self._parameters[port_name] return True elif constant.constant == value: return False self._parameters[port_name] = [RecipeParameterValue(constant=value)] if self.widget() is not None: self._execute_pending = True else: self.update_pipeline(False, defer=True) return True
def create_pipeline(controller, recipe, row, column, var_sheetname, typecast=None): """Create a pipeline from a recipe and return its information. """ # Build from the root version controller.change_selected_version(0) reg = get_module_registry() generator = PipelineGenerator(controller) inputport_desc = reg.get_descriptor_by_name( 'org.vistrails.vistrails.basic', 'InputPort') # Add the plot subworkflow plot_pipeline = recipe.plot.get_pipeline() connected_to_inputport = set( c.source.moduleId for c in plot_pipeline.connection_list if (plot_pipeline.modules[c.destination.moduleId].module_descriptor is inputport_desc)) # Copy every module but the InputPorts and up plot_modules_map = dict() # old module id -> new module for module in plot_pipeline.modules.itervalues(): if (module.module_descriptor is not inputport_desc and module.id not in connected_to_inputport): plot_modules_map[module.id] = generator.copy_module(module) del connected_to_inputport def _get_or_create_module(moduleType): """Returns or creates a new module of the given type. Warns if multiple modules of that type were found. """ modules = find_modules_by_type(plot_pipeline, [moduleType]) if not modules: desc = reg.get_descriptor_from_module(moduleType) module = controller.create_module_from_descriptor(desc) generator.add_module(module) return module, True else: # Currently we do not support multiple cell locations in one # pipeline but this may be a feature in the future, to have # linked visualizations in multiple cells if len(modules) > 1: warnings.warn("Found multiple %s modules in plot " "subworkflow, only using one." % moduleType) return plot_modules_map[modules[0].id], False # Connect the CellLocation to the SpreadsheetCell cell_modules = find_modules_by_type(plot_pipeline, [SpreadsheetCell]) if cell_modules: cell_module = plot_modules_map[cell_modules[0].id] # Add a CellLocation module if the plot subworkflow didn't contain one location_module, new_location = _get_or_create_module(CellLocation) if new_location: # Connect the CellLocation to the SpreadsheetCell generator.connect_modules(location_module, 'value', cell_module, 'Location') generator.update_function(location_module, 'Row', [str(row + 1)]) generator.update_function(location_module, 'Column', [str(column + 1)]) if len(cell_modules) > 1: warnings.warn("Plot subworkflow '%s' contains more than " "one spreadsheet cell module. Only one " "was connected to a location module." % recipe.plot.name) # Add a SheetReference module sheetref_module, new_sheetref = _get_or_create_module(SheetReference) if new_sheetref or new_location: # Connection the SheetReference to the CellLocation generator.connect_modules(sheetref_module, 'value', location_module, 'SheetReference') generator.connect_var(var_sheetname, sheetref_module, 'SheetName') else: warnings.warn("Plot subworkflow '%s' does not contain a " "spreadsheet cell module" % recipe.plot.name) # TODO : use walk_modules() to find all modules above an InputPort's # 'Default' port and ignore them in the following loop # Copy the connections and locate the input ports plot_params = dict() # param name -> [(module, input port name)] for connection in plot_pipeline.connection_list: src = plot_pipeline.modules[connection.source.moduleId] dest = plot_pipeline.modules[connection.destination.moduleId] if dest.module_descriptor is inputport_desc: continue elif src.module_descriptor is inputport_desc: param = get_function(src, 'name') ports = plot_params.setdefault(param, []) ports.append((plot_modules_map[connection.destination.moduleId], connection.destination.name)) else: generator.connect_modules( plot_modules_map[connection.source.moduleId], connection.source.name, plot_modules_map[connection.destination.moduleId], connection.destination.name) # Find the constant ports declared with aliases aliases = {port.name: port for port in recipe.plot.ports if port.is_alias} for module in plot_pipeline.module_list: for function in module.functions: remove = False for param in function.parameters: if param.alias in aliases: plot_params[param.alias] = [(plot_modules_map[module.id], function.name)] remove = True if remove: # Remove the function from the generated pipeline generator.update_function(plot_modules_map[module.id], function.name, None) del aliases # Adds default values for unset constants parameters_incl_defaults = dict(recipe.parameters) for port in recipe.plot.ports: if (isinstance(port, ConstantPort) and port.default_value is not None and port.name not in recipe.parameters): parameters_incl_defaults[port.name] = [ RecipeParameterValue(constant=port.default_value) ] # Maps a port name to the list of parameters # for each parameter, we have a list of connections tying it to modules of # the plot conn_map = dict() # param: str -> [[conn_id: int]] name_to_port = {port.name: port for port in recipe.plot.ports} actual_parameters = {} for port_name, parameters in parameters_incl_defaults.iteritems(): plot_ports = plot_params.get(port_name, []) p_conns = conn_map[port_name] = [] actual_values = [] for parameter in parameters: if parameter.type == RecipeParameterValue.VARIABLE: conns, actual_param = add_variable_subworkflow_typecast( generator, parameter.variable, plot_ports, name_to_port[port_name].type, typecast=typecast) p_conns.append(conns) actual_values.append(actual_param) else: # parameter.type == RecipeParameterValue.CONSTANT desc = name_to_port[port_name].type p_conns.append( add_constant_module(generator, desc, parameter.constant, plot_ports)) actual_values.append(parameter) actual_parameters[port_name] = actual_values del name_to_port pipeline_version = generator.perform_action() controller.vistrail.change_description( "Created DAT plot %s" % recipe.plot.name, pipeline_version) # FIXME : from_root seems to be necessary here, I don't know why controller.change_selected_version(pipeline_version, from_root=True) # Convert the modules to module ids in the port_map port_map = dict() for param, portlist in plot_params.iteritems(): port_map[param] = [(module.id, port) for module, port in portlist] return PipelineInformation(pipeline_version, DATRecipe(recipe.plot, actual_parameters), conn_map, port_map)