def update_output_modules(self, *args, **kwargs): # need to find all currently loaded output modes (need to # check after modules are loaded and spin through registery) # and display them here reg = get_module_registry() output_d = reg.get_descriptor_by_name(get_vistrails_basic_pkg_id(), "OutputModule") sublist = reg.get_descriptor_subclasses(output_d) modes = {} for d in sublist: if hasattr(d.module, '_output_modes_dict'): for mode_type, (mode, _) in ( d.module._output_modes_dict.iteritems()): modes[mode_type] = mode found_modes = set() for mode_type, mode in modes.iteritems(): found_modes.add(mode_type) if mode_type not in self.mode_widgets: mode_config = None output_settings = self.persistent_config.outputDefaultSettings if output_settings.has(mode_type): mode_config = getattr(output_settings, mode_type) widget = OutputModeConfigurationWidget(mode, mode_config) widget.fieldChanged.connect(self.field_was_changed) self.inner_layout.addWidget(widget) self.mode_widgets[mode_type] = widget for mode_type, widget in self.mode_widgets.items(): if mode_type not in found_modes: self.inner_layout.removeWidget(self.mode_widgets[mode_type]) del self.mode_widgets[mode_type]
def test_registry_ports_can_connect(self): """Test registry isPortSubType""" descriptor = self.registry.get_descriptor_by_name( \ get_vistrails_basic_pkg_id(), 'String') oport = self.registry.source_ports_from_descriptor(descriptor)[0] iport = self.registry.destination_ports_from_descriptor(descriptor)[0] assert self.registry.ports_can_connect(oport, iport)
def add_group_portSpecs_index(workflow): basic_pkg = get_vistrails_basic_pkg_id() def process_group(group): def get_port_name(module): port_name = None for function in module.db_functions: if function.db_name == 'name': port_name = function.db_parameters[0].db_val return port_name g_workflow = group.db_workflow group.db_portSpecs_name_index = {} for module in g_workflow.db_modules: if module.db_name == 'InputPort' and module.db_package == basic_pkg: port_name = get_port_name(module) # FIXME add sigstring to DBPortSpec group.db_portSpecs_name_index[(port_name, 'input')] = \ DBPortSpec(id=-1, name=port_name, type='input') elif module.db_name == 'OutputPort' and \ module.db_package == basic_package: port_name = get_port_name(module) # FIXME add sigstring to DBPortSpec group.db_portSpecs_name_index[(port_name, 'output')] = \ DBPortSpec(id=-1, name=port_name, type='output') elif module.db_name == 'Group' and module.db_package == basic_pkg: process_group(module) for module in workflow.db_modules: if module.vtType == DBGroup.vtType: process_group(module)
def testValue(self): """ Test values returned by value() function """ basic_pkg = get_vistrails_basic_pkg_id() p = ModuleParam() p.type = "Float" p.identifier = basic_pkg assert p.value() == 0.0 p.strValue = "1.5" assert p.value() == 1.5 p.type = "Integer" p.identifier = basic_pkg p.strValue = "" assert p.value() == 0 p.strValue = "2" assert p.value() == 2 p.type = "String" p.identifier = basic_pkg p.strValue = "" assert p.value() == "" p.strValue = "test" assert p.value() == "test" p.type = "Boolean" p.identifier = basic_pkg p.strValue = "" assert p.value() == False p.strValue = "False" assert p.value() == False p.strValue = "True" assert p.value() == True
def test_parse(self): basic_pkg = get_vistrails_basic_pkg_id() p = ModuleParam(type='Integer', val='1.5') self.assertEqual(p.identifier, basic_pkg) self.assertEqual(p.type, 'Integer') self.assertFalse(p.namespace)
def check_port_spec(module, port_name, port_type, descriptor=None, sigstring=None): basic_pkg = get_vistrails_basic_pkg_id() reg = get_module_registry() found = False try: if descriptor is not None: s = reg.get_port_spec_from_descriptor(descriptor, port_name, port_type) found = True spec_tuples = parse_port_spec_string(sigstring, basic_pkg) for i in xrange(len(spec_tuples)): spec_tuple = spec_tuples[i] port_pkg = reg.get_package_by_name(spec_tuple[0]) if port_pkg.identifier != spec_tuple[0]: # we have an old identifier spec_tuples[i] = (port_pkg.identifier,) + spec_tuple[1:] sigstring = create_port_spec_string(spec_tuples) # sigstring = expand_port_spec_string(sigstring, basic_pkg) if s.sigstring != sigstring: msg = ('%s port "%s" of module "%s" exists, but ' 'signatures differ "%s" != "%s"') % \ (port_type.capitalize(), port_name, module.name, s.sigstring, sigstring) raise UpgradeWorkflowError(msg, module, port_name, port_type) except MissingPort: pass if not found and \ not module.has_portSpec_with_name((port_name, port_type)): msg = '%s port "%s" of module "%s" does not exist.' % \ (port_type.capitalize(), port_name, module.name) raise UpgradeWorkflowError(msg, module, port_name, port_type)
def create_workflow(self, c): upgrade_test_pkg = "org.vistrails.vistrails.tests.upgrade" d1 = ModuleDescriptor(package=upgrade_test_pkg, name="TestUpgradeA", namespace="", package_version="0.8") m1 = c.create_module_from_descriptor(d1, use_desc_pkg_version=True) m1.is_valid = False c.add_module_action(m1) d2 = ModuleDescriptor(package=upgrade_test_pkg, name="TestUpgradeB", namespace="", package_version="0.8") m2 = c.create_module_from_descriptor(d2, use_desc_pkg_version=True) m2.is_valid = False c.add_module_action(m2) basic_pkg = get_vistrails_basic_pkg_id() psi = PortSpecItem(module="Float", package=basic_pkg, namespace="", pos=0) function_port_spec = PortSpec(name="a", type="input", items=[psi]) f = c.create_function(m1, function_port_spec, [12]) c.add_function_action(m1, f) conn_out_psi = PortSpecItem(module="Integer", package=basic_pkg, namespace="", pos=0) conn_out_spec = PortSpec(name="z", type="output", items=[conn_out_psi]) conn_in_psi = PortSpecItem(module="Integer", package=basic_pkg, namespace="", pos=0) conn_in_spec = PortSpec(name="b", type="input", items=[conn_in_psi]) conn = c.create_connection(m1, conn_out_spec, m2, conn_in_spec) c.add_connection_action(conn) return c.current_version
def createEditor(self, parent, option, index): registry = get_module_registry() if index.column()==2: #Depth type spinbox = QtGui.QSpinBox(parent) spinbox.setValue(0) return spinbox elif index.column()==1: #Port type combo = CompletingComboBox(parent) # FIXME just use descriptors here!! variant_desc = registry.get_descriptor_by_name( get_vistrails_basic_pkg_id(), 'Variant') for _, pkg in sorted(registry.packages.iteritems()): pkg_item = QtGui.QStandardItem("----- %s -----" % pkg.name) pkg_item.setData('', QtCore.Qt.UserRole) pkg_item.setFlags(pkg_item.flags() & ~( QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable)) font = pkg_item.font() font.setBold(True) pkg_item.setFont(font) combo.model().appendRow(pkg_item) for _, descriptor in sorted(pkg.descriptors.iteritems()): if descriptor is variant_desc: variant_index = combo.count() combo.addItem("%s (%s)" % (descriptor.name, descriptor.identifier), descriptor.sigstring) combo.select_default_item(variant_index) return combo else: return QtGui.QItemDelegate.createEditor(self, parent, option, index)
def update_output_modules(self, *args, **kwargs): # need to find all currently loaded output modes (need to # check after modules are loaded and spin through registery) # and display them here reg = get_module_registry() output_d = reg.get_descriptor_by_name(get_vistrails_basic_pkg_id(), "OutputModule") sublist = reg.get_descriptor_subclasses(output_d) modes = {} for d in sublist: if hasattr(d.module, '_output_modes_dict'): for mode_type, (mode, _) in (d.module._output_modes_dict .iteritems()): modes[mode_type] = mode found_modes = set() for mode_type, mode in modes.iteritems(): found_modes.add(mode_type) if mode_type not in self.mode_widgets: mode_config = None output_settings = self.persistent_config.outputDefaultSettings if output_settings.has(mode_type): mode_config = getattr(output_settings, mode_type) widget = OutputModeConfigurationWidget(mode, mode_config) widget.fieldChanged.connect(self.field_was_changed) self.inner_layout.addWidget(widget) self.mode_widgets[mode_type] = widget for mode_type, widget in self.mode_widgets.items(): if mode_type not in found_modes: self.inner_layout.removeWidget(self.mode_widgets[mode_type]) del self.mode_widgets[mode_type]
def createEditor(self, parent, option, index): registry = get_module_registry() if index.column() == 2: #Depth type spinbox = QtGui.QSpinBox(parent) spinbox.setValue(0) return spinbox elif index.column() == 1: #Port type combo = CompletingComboBox(parent) # FIXME just use descriptors here!! variant_desc = registry.get_descriptor_by_name( get_vistrails_basic_pkg_id(), 'Variant') for _, pkg in sorted(registry.packages.iteritems()): pkg_item = QtGui.QStandardItem("----- %s -----" % pkg.name) pkg_item.setData('', QtCore.Qt.UserRole) pkg_item.setFlags( pkg_item.flags() & ~(QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable)) font = pkg_item.font() font.setBold(True) pkg_item.setFont(font) combo.model().appendRow(pkg_item) for _, descriptor in sorted(pkg.descriptors.iteritems()): if descriptor is variant_desc: variant_index = combo.count() combo.addItem( "%s (%s)" % (descriptor.name, descriptor.identifier), descriptor.sigstring) combo.select_default_item(variant_index) return combo else: return QtGui.QItemDelegate.createEditor(self, parent, option, index)
def can_convert(cls, sub_descs, super_descs): from vistrails.core.modules.module_registry import get_module_registry from vistrails.core.system import get_vistrails_basic_pkg_id reg = get_module_registry() basic_pkg = get_vistrails_basic_pkg_id() variant_desc = reg.get_descriptor_by_name(basic_pkg, 'Variant') desc = reg.get_descriptor(cls) def check_types(sub_descs, super_descs): for (sub_desc, super_desc) in izip(sub_descs, super_descs): if (sub_desc == variant_desc or super_desc == variant_desc): continue if not reg.is_descriptor_subclass(sub_desc, super_desc): return False return True in_port = reg.get_port_spec_from_descriptor( desc, 'in_value', 'input') if (len(sub_descs) != len(in_port.descriptors()) or not check_types(sub_descs, in_port.descriptors())): return False out_port = reg.get_port_spec_from_descriptor( desc, 'out_value', 'output') if (len(out_port.descriptors()) != len(super_descs) or not check_types(out_port.descriptors(), super_descs)): return False return True
def create_port_spec_item(self): return PortSpecItem(id=0, pos=0, module="String", package=get_vistrails_basic_pkg_id(), label="testLabel", default="abc", values=["abc", "def", "ghi"], entry_type="enum")
def create_port(self, id_scope=IdScope()): port = Port(id=id_scope.getNewId(Port.vtType), type='source', moduleId=12L, moduleName='String', name='value', signature='(%s:String)' % get_vistrails_basic_pkg_id()) return port
def __init__(self, param, parent=None): QtGui.QSpinBox.__init__(self, parent) ConstantWidgetMixin.__init__(self, param.strValue) assert param.type == 'Integer' assert param.identifier == get_vistrails_basic_pkg_id() self.connect(self, QtCore.SIGNAL('valueChanged(int)'), self.change_val) self.setContents(param.strValue)
def __init__(self, param, parent=None): QtGui.QDoubleSpinBox.__init__(self, parent) ConstantWidgetMixin.__init__(self, param.strValue) assert param.type == "Float" assert param.identifier == get_vistrails_basic_pkg_id() self.connect(self, QtCore.SIGNAL("valueChanged(double)"), self.change_val) self.setContents(param.strValue)
def replace_group(controller, pipeline, module_id, new_subpipeline): basic_pkg = get_vistrails_basic_pkg_id() old_group = pipeline.modules[module_id] new_group = controller.create_module(basic_pkg, 'Group', '', old_group.location.x, old_group.location.y) new_group.pipeline = new_subpipeline return UpgradeWorkflowHandler.replace_generic(controller, pipeline, old_group, new_group)
def create_artifact_from_filename(filename, account, id_scope): parameter = DBParameter(id=-1, pos=0, type='%s:File' % get_vistrails_basic_pkg_id(), val=filename) function = DBFunction(id=-1, name="file", pos=0, parameters=[parameter]) return DBOpmArtifact(id='a' + str(id_scope.getNewId(DBOpmArtifact.vtType)), value=DBOpmArtifactValue(function), accounts=[DBOpmAccountId(id=account.db_id)])
def create_port_spec(self, id_scope=IdScope()): # FIXME add a valid port spec port_spec = PortSpec(id=id_scope.getNewId(PortSpec.vtType), name='SetValue', type='input', sigstring='(%s:String)' % \ get_vistrails_basic_pkg_id(), ) return port_spec
def parse_db_type(self): if self.db_type: (self._identifier, self._type, self._namespace) = \ parse_port_spec_item_string(self.db_type, get_vistrails_basic_pkg_id()) else: self._identifier = None self._type = None self._namespace = None
def createAliasWidget(self, val=None, parent=None): if self.vtparam.identifier == "": idn = get_vistrails_basic_pkg_id() else: idn = self.vtparam.identifier reg = get_module_registry() p_descriptor = reg.get_descriptor_by_name(idn, self.vtparam.type, self.vtparam.namespace) widget_type = get_widget_class(p_descriptor) if val: self.vtparam.strValue = val return widget_type(self.vtparam, parent)
def createAliasWidget(alias, controller, parent=None): v = controller.vtController.vistrail p = v.db_get_object(alias.component.vttype, alias.component.vtid) if p.identifier == "": idn = get_vistrails_basic_pkg_id() else: idn = p.identifier reg = get_module_registry() p_module = reg.get_module_by_name(idn, p.type, p.namespace) widget_type = get_widget_class(p_module) p.strValue = alias.component.val return widget_type(p, parent)
def createAliasWidget(self, val=None, parent=None): if self.vtparam.identifier == '': idn = get_vistrails_basic_pkg_id() else: idn = self.vtparam.identifier reg = get_module_registry() p_descriptor = reg.get_descriptor_by_name(idn, self.vtparam.type, self.vtparam.namespace) widget_type = get_widget_class(p_descriptor) if val: self.vtparam.strValue = val return widget_type(self.vtparam, parent)
def createAliasWidget(alias, controller, parent=None): v = controller.vtController.vistrail p = v.db_get_object(alias.component.vttype, alias.component.vtid) if p.identifier == '': idn = get_vistrails_basic_pkg_id() else: idn = p.identifier reg = get_module_registry() p_descriptor = reg.get_descriptor_by_name(idn, p.type, p.namespace) widget_type = get_widget_class(p_descriptor) p.strValue = alias.component.val return widget_type(p, parent)
def create_artifact_from_db_tuple(db_tuple, account, id_scope): parameters = [] for db_str in db_tuple: parameter = DBParameter(id=-1, pos=0, type='%s:String' % get_vistrails_basic_pkg_id(), val=db_str) parameters.append(parameter) function = DBFunction(id=-1, name="dbEntry", pos=0, parameters=parameters) return DBOpmArtifact(id='a' + str(id_scope.getNewId(DBOpmArtifact.vtType)), value=DBOpmArtifactValue(function), accounts=[DBOpmAccountId(id=account.db_id)])
def canonicalize(sig_item): if isinstance(sig_item, tuple): # assert len(sig_item) == 2 # assert isinstance(sig_item[0], type) # assert isinstance(sig_item[1], str) descriptor = registry.get_descriptor(sig_item[0]) label = sig_item[1] return (descriptor, label) elif isinstance(sig_item, list): descriptor = registry.get_descriptor_by_name( get_vistrails_basic_pkg_id(), 'List') return (descriptor, None) else: # isinstance(sig_item, type): return (registry.get_descriptor(sig_item), None)
def __init__(self, param, parent=None): QtGui.QSlider.__init__(self, QtCore.Qt.Horizontal, parent) ConstantWidgetMixin.__init__(self, param.strValue) assert param.type in['Integer', 'Float'] self.sliderType = int if param.type == 'Integer' else float assert param.identifier == get_vistrails_basic_pkg_id() self.connect(self, QtCore.SIGNAL('valueChanged(int)'),self.change_val) QtGui.QSlider.setSingleStep(self, 1) QtGui.QSlider.setPageStep(self, 5) self.floatMinVal = 0.0 self.floatMaxVal = 1.0 self.floatStepSize = 1 self.numSteps = 1 self.setContents(param.strValue) self.setTickPosition(QtGui.QSlider.TicksAbove)
def test_create_from_items(self): basic_pkg = get_vistrails_basic_pkg_id() item_a = PortSpecItem(pos=0, package=basic_pkg, module="Integer", label="a", default="123") item_b = PortSpecItem(pos=1, package=basic_pkg, module="String", label="b", default="abc") port_spec = PortSpec(id=-1, name="SetValue", type='input', portSpecItems=[item_a, item_b])
def __init__(self, param, parent=None): QtGui.QSlider.__init__(self, QtCore.Qt.Horizontal, parent) ConstantWidgetMixin.__init__(self, param.strValue) assert param.type in ['Integer', 'Float'] self.sliderType = int if param.type == 'Integer' else float assert param.identifier == get_vistrails_basic_pkg_id() self.connect(self, QtCore.SIGNAL('valueChanged(int)'), self.change_val) QtGui.QSlider.setSingleStep(self, 1) QtGui.QSlider.setPageStep(self, 5) self.floatMinVal = 0.0 self.floatMaxVal = 1.0 self.floatStepSize = 1 self.numSteps = 1 self.setContents(param.strValue) self.setTickPosition(QtGui.QSlider.TicksAbove)
def parse_descriptor_string(d_string, cur_package=None): """parse_descriptor_string will expand names of modules using information about the current package and allowing shortcuts for any bundled vistrails packages (e.g. "basic" for "org.vistrails.vistrails.basic"). It also allows a nicer format for namespace/module specification (namespace comes fist unlike port specifications where it is after the module name... Examples: "persistence:PersistentInputFile", None -> ("org.vistrails.vistrails.persistence", PersistentInputFile", "") "basic:String", None -> ("org.vistrails.vistrails.basic", "String", "") "NamespaceA|NamespaceB|Module", "org.example.my" -> ("org.example.my", "Module", "NamespaceA|NamespaceB") """ package = '' qual_name = '' name = '' namespace = None parts = d_string.strip().split(':', 1) if len(parts) > 1: qual_name = parts[1] if '.' in parts[0]: package = parts[0] else: package = '%s.%s' % (_defaultPkgPrefix, parts[0]) else: qual_name = d_string if cur_package is None: reg = get_module_registry() if reg._current_package is not None: package = reg._current_package.identifier else: package = get_vistrails_basic_pkg_id() else: package = cur_package qual_parts = qual_name.rsplit('|', 1) if len(qual_parts) > 1: namespace, name = qual_parts else: name = qual_name return (package, name, namespace)
def parse_descriptor_string(d_string, cur_package=None): """parse_descriptor_string will expand names of modules using information about the current package and allowing shortcuts for any bundled vistrails packages (e.g. "basic" for "org.vistrails.vistrails.basic"). It also allows a nicer format for namespace/module specification (namespace comes fist unlike port specifications where it is after the module name... Examples: "persistence:PersistentInputFile", None -> ("org.vistrails.vistrails.persistence", PersistentInputFile", "") "basic:String", None -> ("org.vistrails.vistrails.basic", "String", "") "NamespaceA|NamespaceB|Module", "org.example.my" -> ("org.example.my", "Module", "NamespaceA|NamespaceB") """ package = '' qual_name = '' name = '' namespace = None parts = d_string.strip().split(':', 1) if len(parts) > 1: qual_name = parts[1] if '.' in parts[0]: package = parts[0] else: package = '%s.%s' % (get_vistrails_default_pkg_prefix(), parts[0]) else: qual_name = d_string if cur_package is None: reg = get_module_registry() if reg._current_package is not None: package = reg._current_package.identifier else: package = get_vistrails_basic_pkg_id() else: package = cur_package qual_parts = qual_name.rsplit('|', 1) if len(qual_parts) > 1: namespace, name = qual_parts else: name = qual_name return (package, name, namespace)
def create_workflow(self, c): upgrade_test_pkg = 'org.vistrails.vistrails.tests.upgrade' d1 = ModuleDescriptor(package=upgrade_test_pkg, name='TestUpgradeA', namespace='', package_version='0.8') m1 = c.create_module_from_descriptor(d1, use_desc_pkg_version=True) m1.is_valid = False c.add_module_action(m1) d2 = ModuleDescriptor(package=upgrade_test_pkg, name='TestUpgradeB', namespace='', package_version='0.8') m2 = c.create_module_from_descriptor(d2, use_desc_pkg_version=True) m2.is_valid = False c.add_module_action(m2) basic_pkg = get_vistrails_basic_pkg_id() psi = PortSpecItem(module="Float", package=basic_pkg, namespace="", pos=0) function_port_spec = PortSpec(name="a", type="input", items=[psi]) f = c.create_function(m1, function_port_spec, [12]) c.add_function_action(m1, f) conn_out_psi = PortSpecItem(module="Integer", package=basic_pkg, namespace="", pos=0) conn_out_spec = PortSpec(name="z", type="output", items=[conn_out_psi]) conn_in_psi = PortSpecItem(module="Integer", package=basic_pkg, namespace="", pos=0) conn_in_spec = PortSpec(name="b", type="input", items=[conn_in_psi]) conn = c.create_connection(m1, conn_out_spec, m2, conn_in_spec) c.add_connection_action(conn) return c.current_version
def updateVistrail(self): """ updateVistrail() -> None Update Vistrail to contain changes in the port table """ requested = self.number.value() current = self.countAdditionalPorts() if requested == current: # Nothing changed return if requested > current: sigstring = '(%s:Variant)' % get_vistrails_basic_pkg_id() add_ports = [('input', 'item%d' % i, sigstring, -1) for i in xrange(current, requested)] self.controller.update_ports(self.module.id, [], add_ports) elif requested < current: delete_ports = [('input', p.name) for p in self.module.input_port_specs[requested-current:]] self.controller.update_ports(self.module.id, delete_ports, []) return True
def updateVistrail(self): """ updateVistrail() -> None Update Vistrail to contain changes in the port table """ requested = self.number.value() current = self.countAdditionalPorts() if requested == current: # Nothing changed return if requested > current: sigstring = '(%s:Module)' % get_vistrails_basic_pkg_id() add_ports = [('input', 'item%d' % i, sigstring, -1) for i in xrange(current, requested)] self.controller.update_ports(self.module.id, [], add_ports) elif requested < current: delete_ports = [('input', p.name) for p in self.module.input_port_specs[requested-current:]] self.controller.update_ports(self.module.id, delete_ports, []) return True
def updateVistrail(self): """ updateVistrail() -> None Update Vistrail to contain changes in the port table """ table_sig = '(%s:Table)' % identifier list_sig = '(%s:List)' % get_vistrails_basic_pkg_id() seen_new_ports = set() current_ports = dict(self.getCurrentPorts()) add_ports = [] delete_ports = [] for i in xrange(self._list_layout.count()): widget = self._list_layout.itemAt(i).widget() is_table = isinstance(widget, TableEntry) name = widget.name if name in seen_new_ports: QtGui.QMessageBox.critical( self, "Duplicated port name", "There is several input ports with name %r" % name) return seen_new_ports.add(name) if name in current_ports: old_is_table = current_ports.pop(name) if is_table == old_is_table: continue delete_ports.append(('input', name)) sigstring = table_sig if is_table else list_sig add_ports.append(('input', name, sigstring, -1)) delete_ports.extend(('input', unseen_port) for unseen_port in current_ports.iterkeys()) self.controller.update_ports(self.module.id, delete_ports, add_ports) return True
def create_vistrail(self): vistrail = Vistrail() m = Module(id=vistrail.idScope.getNewId(Module.vtType), name='Float', package=get_vistrails_basic_pkg_id()) add_op = AddOp(id=vistrail.idScope.getNewId(AddOp.vtType), what=Module.vtType, objectId=m.id, data=m) function_id = vistrail.idScope.getNewId(ModuleFunction.vtType) function = ModuleFunction(id=function_id, name='value') change_op = ChangeOp(id=vistrail.idScope.getNewId(ChangeOp.vtType), what=ModuleFunction.vtType, oldObjId=2, newObjId=function.real_id, parentObjId=m.id, parentObjType=Module.vtType, data=function) param = ModuleParam(id=vistrail.idScope.getNewId(ModuleParam.vtType), type='Integer', val='1') delete_op = DeleteOp(id=vistrail.idScope.getNewId(DeleteOp.vtType), what=ModuleParam.vtType, objectId=param.real_id, parentObjId=function.real_id, parentObjType=ModuleFunction.vtType) action1 = Action(id=vistrail.idScope.getNewId(Action.vtType), operations=[add_op]) action2 = Action(id=vistrail.idScope.getNewId(Action.vtType), operations=[change_op, delete_op]) vistrail.add_action(action1, 0) vistrail.add_action(action2, action1.id) vistrail.addTag('first action', action1.id) vistrail.addTag('second action', action2.id) return vistrail
def updateVistrail(self): """ updateVistrail() -> None Update Vistrail to contain changes in the port table """ table_sig = '(%s:Table)' % identifier list_sig = '(%s:List)' % get_vistrails_basic_pkg_id() seen_new_ports = set() current_ports = dict(self.getCurrentPorts()) add_ports = [] delete_ports = [] for i in xrange(self._list_layout.count()): widget = self._list_layout.itemAt(i).widget() is_table = isinstance(widget, TableEntry) name = widget.name if name in seen_new_ports: QtGui.QMessageBox.critical( self, "Duplicated port name", "There is several input ports with name %r" % name) return seen_new_ports.add(name) if name in current_ports: old_is_table = current_ports.pop(name) if is_table == old_is_table: continue delete_ports.append(('input', name)) sigstring = table_sig if is_table else list_sig add_ports.append(('input', name, sigstring, -1)) delete_ports.extend( ('input', unseen_port) for unseen_port in current_ports.iterkeys()) self.controller.update_ports(self.module.id, delete_ports, add_ports) return True
def process_exec(item_exec, workflow, account, upstream_lookup, downstream_lookup, depth, conn_artifacts=None, function_artifacts=None, module_processes=None, in_upstream_artifacts={}, in_downstream_artifacts={}, add_extras=False): print 'in_upstream:', [ (n, x.db_id) for n, x_list in in_upstream_artifacts.iteritems() for x in x_list ] print 'in_downstream:', [ (n, x.db_id) for n, x_list in in_downstream_artifacts.iteritems() for x in x_list ] # FIXME merge conn_artifacts and function_artifacts # problem is that a conn_artifact is OUTPUT while function_artifact # is INPUT if conn_artifacts is None: conn_artifacts = {} if function_artifacts is None: function_artifacts = {} if module_processes is None: module_processes = {} # while item_exec.vtType == DBLoopExec.vtType: # item_exec = item_exec.db_item_execs[0] (module, process) = module_processes[item_exec.db_module_id] def process_connection(conn): source = conn.db_ports_type_index['source'] source_t = (source.db_moduleId, source.db_name) in_cache = False print '!!! processing', source_t if source_t in conn_artifacts: artifact = conn_artifacts[source_t] in_cache = True else: # key off source module and port name # get descriptor from registry and then port_spec # store port_spec as artifact if source.db_moduleId < 0: dest = conn.db_ports_type_index['destination'] module = source.db_module else: module = workflow.db_modules_id_index[source.db_moduleId] print module.db_name, module.db_id pkg = get_package(reg, module.db_package, module.db_version) if not module.db_namespace: module_namespace = '' else: module_namespace = module.db_namespace module_desc = \ pkg.db_module_descriptors_name_index[(module.db_name, module_namespace, '')] # FIXME make work for module port_specs, too # for example, a PythonSource with a given port in # module.db_portSpecs port_spec = None spec_t = (source.db_name, 'output') if spec_t in module.db_portSpecs_name_index: port_spec = module.db_portSpecs_name_index[spec_t] while port_spec is None and \ module_desc.db_id != reg.db_root_descriptor_id: if spec_t in module_desc.db_portSpecs_name_index: port_spec = module_desc.db_portSpecs_name_index[spec_t] base_id = module_desc.db_base_descriptor_id # inefficient spin through db_packages but we do # not have the descriptors_by_id index that exists # on core.module_registry.ModuleRegistry here module_desc = None for pkg in reg.db_packages: if base_id in pkg.db_module_descriptors_id_index: module_desc = \ pkg.db_module_descriptors_id_index[base_id] break if module_desc is None: raise KeyError("Cannot find base descriptor id %d" % base_id) # pkg = get_package(reg, module_desc.db_package, # module_desc.db_package_version) # module_desc = pkg.db_module_descriptors_id_index[base_id] if port_spec is None: port_spec = module_desc.db_portSpecs_name_index[spec_t] print module_desc.db_name artifact = \ create_artifact_from_port_spec(port_spec, account, id_scope) artifacts.append(artifact) print 'adding conn_artifact', artifact.db_id, source_t, \ source.db_moduleName conn_artifacts[source_t] = artifact return (artifact, in_cache) def process_map(module, found_input_ports, found_output_ports): print "*** Processing Map" if depth + 1 in depth_accounts: account = depth_accounts[depth + 1] else: account = create_account(depth + 1, id_scope) accounts.append(account) depth_accounts[depth + 1] = account # need to have process that extracts artifacts for each iteration input_list_artifact = found_input_ports['InputList'] result_artifact = found_output_ports.get('Result', None) # if InputPort or OutputPort is a Connection we cannot do anything if (found_input_ports['InputPort'].vtType == DBConnection.vtType or found_input_ports['OutputPort'].vtType == DBConnection.vtType): return input_port_list = \ literal_eval(found_input_ports['InputPort'].db_parameters[0].db_val) output_port = \ found_input_ports['OutputPort'].db_parameters[0].db_val s_process = create_process_manual('Split', account, id_scope) processes.append(s_process) dependencies.append( create_used(s_process, input_list_artifact, account, id_scope)) # need to have process that condenses artifacts from each iteration if result_artifact is not None: j_process = create_process_manual('Join', account, id_scope) processes.append(j_process) for loop_exec in item_exec.db_loop_execs: for loop_iteration in loop_exec.db_loop_iterations: loop_up_artifacts = {} loop_down_artifacts = {} for input_name in input_port_list: port_spec = DBPortSpec(id=-1, name=input_name, type='output') s_artifact = \ create_artifact_from_port_spec(port_spec, account, id_scope) artifacts.append(s_artifact) dependencies.append( create_was_generated_by(s_artifact, s_process, account, id_scope)) if input_name not in loop_up_artifacts: loop_up_artifacts[input_name] = [] loop_up_artifacts[input_name].append(s_artifact) # process output_port if loop_iteration.db_completed == 1: port_spec = DBPortSpec(id=-1, name=output_port, type='output') o_artifact = \ create_artifact_from_port_spec(port_spec, account, id_scope) artifacts.append(o_artifact) if output_port not in loop_down_artifacts: loop_down_artifacts[output_port] = [] loop_down_artifacts[output_port].append(o_artifact) if result_artifact is not None: dependencies.append( create_used(j_process, o_artifact, account, id_scope)) # now process a loop_exec for child_exec in loop_iteration.db_item_execs: do_create_process(workflow, child_exec, account, module_processes) for child_exec in loop_iteration.db_item_execs: process_exec(child_exec, workflow, account, upstream_lookup, downstream_lookup, depth + 1, conn_artifacts, function_artifacts, module_processes, loop_up_artifacts, loop_down_artifacts, True) # need to set Return artifact and connect j_process to it if result_artifact is not None: dependencies.append( create_was_generated_by(result_artifact, j_process, account, id_scope)) def process_module_loop(module, found_input_ports, found_output_ports): print "*** Processing Module with loops" if depth + 1 in depth_accounts: account = depth_accounts[depth + 1] else: account = create_account(depth + 1, id_scope) accounts.append(account) depth_accounts[depth + 1] = account # need to have process that extracts artifacts for each iteration result_artifacts = [ a for r in found_output_ports if found_output_ports[r] is not None for a in found_output_ports[r] ] s_process = create_process_manual('Split', account, id_scope) processes.append(s_process) for input_port in found_input_ports: for input_name in input_port: dependencies.append( create_used(s_process, found_input_ports[input_name], account, id_scope)) # need to have process that condenses artifacts from each iteration if result_artifacts: j_process = create_process_manual('Join', account, id_scope) processes.append(j_process) for loop_exec in item_exec.db_loop_execs: for loop_iteration in loop_exec.db_loop_iterations: loop_up_artifacts = {} loop_down_artifacts = {} for input_port in found_input_ports: for input_name in input_port: port_spec = DBPortSpec(id=-1, name=input_name, type='output') s_artifact = \ create_artifact_from_port_spec(port_spec, account, id_scope) artifacts.append(s_artifact) dependencies.append( create_was_generated_by( s_artifact, s_process, account, id_scope)) if input_name not in loop_up_artifacts: loop_up_artifacts[input_name] = [] loop_up_artifacts[input_name].append(s_artifact) # process output_port if loop_iteration.db_completed == 1: for output_name in found_output_ports: port_spec = DBPortSpec(id=-1, name=output_name, type='output') o_artifact = \ create_artifact_from_port_spec(port_spec, account, id_scope) artifacts.append(o_artifact) if output_name not in loop_down_artifacts: loop_down_artifacts[output_name] = [] loop_down_artifacts[output_name].append(o_artifact) if result_artifacts: dependencies.append( create_used(j_process, o_artifact, account, id_scope)) # now process a loop_exec for child_exec in loop_iteration.db_item_execs: do_create_process(workflow, child_exec, account, module_processes) for child_exec in loop_iteration.db_item_execs: process_exec(child_exec, workflow, account, upstream_lookup, downstream_lookup, depth + 1, conn_artifacts, function_artifacts, module_processes, loop_up_artifacts, loop_down_artifacts, True) # need to set Return artifacts and connect j_process to it for result_artifact in result_artifacts: dependencies.append( create_was_generated_by(result_artifact, j_process, account, id_scope)) def process_group(module, found_input_ports, found_output_ports): # identify depth and create new account if necessary # recurse with new account # need to link to upstream and downstream correctly workflow = module.db_workflow # run the whole upstream construction, etc, using this exec # and the group's workflow if depth + 1 in depth_accounts: account = depth_accounts[depth + 1] else: account = create_account(depth + 1, id_scope) accounts.append(account) depth_accounts[depth + 1] = account process_workflow(workflow, item_exec, account, out_upstream_artifacts, out_downstream_artifacts, depth + 1) def process_port_module(module, found_input_ports, found_output_ports): port_name = found_input_ports['name'].db_parameters[0].db_val if module.db_name == 'InputPort': if port_name in in_upstream_artifacts: for artifact in in_upstream_artifacts[port_name]: dependencies.append( create_used(process, artifact, account, id_scope)) elif module.db_name == 'OutputPort': if port_name in in_downstream_artifacts: for artifact in in_downstream_artifacts[port_name]: dependencies.append( create_was_generated_by(artifact, process, account, id_scope)) def process_if_module(module, found_input_ports, found_output_ports): print 'processing IFFFF' # need to decide which path was taken? # check which module was executed, then know which branch was # taken? true_conn = found_input_ports['TruePort'] false_conn = found_input_ports['FalsePort'] true_id = true_conn.db_ports_type_index['source'].db_moduleId false_id = false_conn.db_ports_type_index['source'].db_moduleId print '$$ TRUE ID:', true_id print '$$ FALSE ID:', false_id for x, y in module_processes.iteritems(): print x, ':', y if true_id in module_processes: cond_process = module_processes[true_id][1] elif false_id in module_processes: cond_process = module_processes[false_id][1] else: raise RuntimeError("cannot process if") # FIXME: assume true for now # eventually need to check which module_id was execed for this # current item exec dependencies.append( create_was_triggered_by(cond_process, process, account, id_scope)) if add_extras: print '***adding extras' out_upstream_artifacts = copy.copy(in_upstream_artifacts) out_downstream_artifacts = copy.copy(in_downstream_artifacts) for port_name, artifact_list in in_upstream_artifacts.iteritems(): for artifact in artifact_list: dependencies.append( create_used(process, artifact, account, id_scope)) for port_name, artifact_list in in_downstream_artifacts.iteritems( ): for artifact in artifact_list: # conn_artifacts[(port_name, 'output')] = artifact dependencies.append( create_was_generated_by(artifact, process, account, id_scope)) else: out_upstream_artifacts = {} out_downstream_artifacts = {} ctrl_flow_pkg = 'org.vistrails.vistrails.control_flow' basic_pkg = get_vistrails_basic_pkg_id() all_special_ports = { '%s:Map' % ctrl_flow_pkg: [{ 'InputPort': False, 'OutputPort': False, 'InputList': True, 'FunctionPort': False }, { 'Result': True }, process_map], '%s:Group' % basic_pkg: [{}, {}, process_group], '%s:InputPort' % basic_pkg: [{ 'name': False, 'spec': False, 'old_name': False }, {}, process_port_module], '%s:OutputPort' % basic_pkg: [{ 'name': False, 'spec': False, 'old_name': False }, {}, process_port_module], '%s:If' % ctrl_flow_pkg: [{ 'TruePort': False, 'FalsePort': False }, {}, process_if_module], } module_desc_str = module.db_package + ':' + module.db_name special_ports = all_special_ports.get(module_desc_str, [{}, {}, None]) found_input_ports = {} found_output_ports = {} # process used_files annotations # process generated_tables annotations: for annotation in item_exec.db_annotations: def process_db_tuple(db_tuple): db_tuple = (str(db_tuple[0]), ) + db_tuple[1:] if db_tuple not in db_artifacts: artifact = create_artifact_from_db_tuple( db_tuple, account, id_scope) artifacts.append(artifact) db_artifacts[db_tuple] = artifact else: artifact = db_artifacts[db_tuple] if int(artifact.db_accounts[0].db_id[4:]) > \ int(account.db_id[4:]): artifact.db_accounts[0] = account return artifact if annotation.db_key == 'used_files': used_files = literal_eval(annotation.db_value) for fname in used_files: if fname not in file_artifacts: artifact = create_artifact_from_filename( fname, account, id_scope) artifacts.append(artifact) file_artifacts[fname] = artifact else: artifact = file_artifacts[fname] if int(artifact.db_accounts[0].db_id[4:]) > \ int(account.db_id[4:]): artifact.db_accounts[0] = account dependencies.append( create_used(process, artifact, account, id_scope)) elif annotation.db_key == 'generated_tables': generated_tables = literal_eval(annotation.db_value) for db_tuple in generated_tables: artifact = process_db_tuple(db_tuple) dependencies.append( create_was_generated_by(artifact, process, account, id_scope)) elif annotation.db_key == 'used_tables': used_tables = literal_eval(annotation.db_value) for db_tuple in used_tables: artifact = process_db_tuple(db_tuple) dependencies.append( create_used(process, artifact, account, id_scope)) # process functions for function in module.db_functions: # FIXME let found_input_ports, found_output_ports store lists? if function.db_name in special_ports[0]: if not special_ports[0][function.db_name]: found_input_ports[function.db_name] = function continue function_t = (module.db_id, function.db_name) if function_t in function_artifacts: artifact = function_artifacts[function_t] if int(artifact.db_accounts[0].db_id[4:]) > \ int(account.db_id[4:]): artifact.db_accounts[0] = account else: artifact = create_artifact_from_function( function, account, id_scope) print 'adding artifact', artifact.db_id artifacts.append(artifact) function_artifacts[function_t] = artifact if function.db_name in special_ports[0]: found_input_ports[function.db_name] = artifact if function.db_name not in out_upstream_artifacts: out_upstream_artifacts[function.db_name] = [] out_upstream_artifacts[function.db_name].append(artifact) dependencies.append( create_used(process, artifact, account, id_scope)) # process connections if module.db_id in upstream_lookup: for conns in upstream_lookup[module.db_id].itervalues(): for conn in conns: dest = conn.db_ports_type_index['destination'] if dest.db_name in special_ports[0]: if not special_ports[0][dest.db_name]: found_input_ports[dest.db_name] = conn continue (artifact, in_cache) = process_connection(conn) if dest.db_name in special_ports[0]: found_input_ports[dest.db_name] = artifact if dest.db_name not in out_upstream_artifacts: out_upstream_artifacts[dest.db_name] = [] out_upstream_artifacts[dest.db_name].append(artifact) print 'adding dependency (pa)', process.db_id, \ artifact.db_id dependencies.append( create_used(process, artifact, account, id_scope)) if item_exec.db_completed == 1: if module.db_id in downstream_lookup: # check if everything completed successfully for this? for conns in downstream_lookup[module.db_id].itervalues(): for conn in conns: source = conn.db_ports_type_index['source'] if source.db_name in special_ports[1]: if not special_ports[1][source.db_name]: found_output_ports[source.db_name] = conn continue dest = conn.db_ports_type_index['destination'] dest_module = \ workflow.db_modules_id_index[dest.db_moduleId] dest_desc_str = dest_module.db_package + ':' + \ dest_module.db_name dest_special_ports = all_special_ports.get( dest_desc_str, [{}, {}, None]) if dest.db_name in dest_special_ports[0] and \ not dest_special_ports[0][dest.db_name]: print 'skipping', dest.db_name continue (artifact, in_cache) = process_connection(conn) if not in_cache: if source.db_name in special_ports[1]: found_output_ports[source.db_name] = artifact if source.db_name not in out_downstream_artifacts: out_downstream_artifacts[source.db_name] = [] out_downstream_artifacts[source.db_name].append( artifact) print 'adding dependency (ap)', artifact.db_id, \ process.db_id dependencies.append( create_was_generated_by( artifact, process, account, id_scope)) if special_ports[2] is not None: special_ports[2](module, found_input_ports, found_output_ports) elif item_exec.db_loop_execs: # A normal module that is looping internally # Probably an automatic list loop process_module_loop(module, in_upstream_artifacts, out_upstream_artifacts)
def test_registry_port_subtype(self): """Test registry isPortSubType""" descriptor = self.registry.get_descriptor_by_name( \ get_vistrails_basic_pkg_id(), 'String') ports = self.registry.source_ports_from_descriptor(descriptor) assert self.registry.is_port_sub_type(ports[0], ports[0])
def process_exec(item_exec, workflow, account, upstream_lookup, downstream_lookup, depth, conn_artifacts=None, function_artifacts=None, module_processes=None, in_upstream_artifacts={}, in_downstream_artifacts={}, add_extras=False): print 'in_upstream:', [(n, x.db_id) for n, x_list in in_upstream_artifacts.iteritems() for x in x_list] print 'in_downstream:', [(n, x.db_id) for n, x_list in in_downstream_artifacts.iteritems() for x in x_list] # FIXME merge conn_artifacts and function_artifacts # problem is that a conn_artifact is OUTPUT while function_artifact # is INPUT if conn_artifacts is None: conn_artifacts = {} if function_artifacts is None: function_artifacts = {} if module_processes is None: module_processes = {} # while item_exec.vtType == DBLoopExec.vtType: # item_exec = item_exec.db_item_execs[0] (module, process) = module_processes[item_exec.db_module_id] def process_connection(conn): source = conn.db_ports_type_index['source'] source_t = (source.db_moduleId, source.db_name) in_cache = False print '!!! processing', source_t if source_t in conn_artifacts: artifact = conn_artifacts[source_t] in_cache = True else: # key off source module and port name # get descriptor from registry and then port_spec # store port_spec as artifact if source.db_moduleId < 0: dest = conn.db_ports_type_index['destination'] module = source.db_module else: module = workflow.db_modules_id_index[source.db_moduleId] print module.db_name, module.db_id pkg = get_package(reg, module.db_package, module.db_version) if not module.db_namespace: module_namespace = '' else: module_namespace = module.db_namespace module_desc = \ pkg.db_module_descriptors_name_index[(module.db_name, module_namespace, '')] # FIXME make work for module port_specs, too # for example, a PythonSource with a given port in # module.db_portSpecs port_spec = None spec_t = (source.db_name, 'output') if spec_t in module.db_portSpecs_name_index: port_spec = module.db_portSpecs_name_index[spec_t] while port_spec is None and \ module_desc.db_id != reg.db_root_descriptor_id: if spec_t in module_desc.db_portSpecs_name_index: port_spec = module_desc.db_portSpecs_name_index[spec_t] base_id = module_desc.db_base_descriptor_id # inefficient spin through db_packages but we do # not have the descriptors_by_id index that exists # on core.module_registry.ModuleRegistry here module_desc = None for pkg in reg.db_packages: if base_id in pkg.db_module_descriptors_id_index: module_desc = \ pkg.db_module_descriptors_id_index[base_id] break if module_desc is None: raise KeyError("Cannot find base descriptor id %d" % base_id) # pkg = get_package(reg, module_desc.db_package, # module_desc.db_package_version) # module_desc = pkg.db_module_descriptors_id_index[base_id] if port_spec is None: port_spec = module_desc.db_portSpecs_name_index[spec_t] print module_desc.db_name artifact = \ create_artifact_from_port_spec(port_spec, account, id_scope) artifacts.append(artifact) print 'adding conn_artifact', artifact.db_id, source_t, \ source.db_moduleName conn_artifacts[source_t] = artifact return (artifact, in_cache) def process_map(module, found_input_ports, found_output_ports): print "*** Processing Map" if depth+1 in depth_accounts: account = depth_accounts[depth+1] else: account = create_account(depth+1, id_scope) accounts.append(account) depth_accounts[depth+1] = account # need to have process that extracts artifacts for each iteration input_list_artifact = found_input_ports['InputList'] result_artifact = found_output_ports.get('Result', None) input_port_list = \ eval(found_input_ports['InputPort'].db_parameters[0].db_val) output_port = \ found_input_ports['OutputPort'].db_parameters[0].db_val s_process = create_process_manual('Split', account, id_scope) processes.append(s_process) dependencies.append(create_used(s_process, input_list_artifact, account, id_scope)) # need to have process that condenses artifacts from each iteration if result_artifact is not None: j_process = create_process_manual('Join', account, id_scope) processes.append(j_process) for loop_exec in item_exec.db_loop_execs: loop_up_artifacts = {} loop_down_artifacts = {} for input_name in input_port_list: port_spec = DBPortSpec(id=-1, name=input_name, type='output') s_artifact = \ create_artifact_from_port_spec(port_spec, account, id_scope) artifacts.append(s_artifact) dependencies.append(create_was_generated_by(s_artifact, s_process, account, id_scope)) if input_name not in loop_up_artifacts: loop_up_artifacts[input_name] = [] loop_up_artifacts[input_name].append(s_artifact) # process output_port if loop_exec.db_completed == 1: port_spec = DBPortSpec(id=-1, name=output_port, type='output') o_artifact = \ create_artifact_from_port_spec(port_spec, account, id_scope) artifacts.append(o_artifact) if output_port not in loop_down_artifacts: loop_down_artifacts[output_port] = [] loop_down_artifacts[output_port].append(o_artifact) if result_artifact is not None: dependencies.append(create_used(j_process, o_artifact, account, id_scope)) # now process a loop_exec for child_exec in loop_exec.db_item_execs: do_create_process(workflow, child_exec, account, module_processes) for child_exec in loop_exec.db_item_execs: process_exec(child_exec, workflow, account, upstream_lookup, downstream_lookup, depth+1, conn_artifacts, function_artifacts, module_processes, loop_up_artifacts, loop_down_artifacts, True) # need to set Return artifact and connect j_process to it if result_artifact is not None: dependencies.append(create_was_generated_by(result_artifact, j_process, account, id_scope)) def process_group(module, found_input_ports, found_output_ports): # identify depth and create new account if necessary # recurse with new account # need to link to upstream and downstream correctly workflow = module.db_workflow # run the whole upstream construction, etc, using this exec # and the group's workflow if depth+1 in depth_accounts: account = depth_accounts[depth+1] else: account = create_account(depth+1, id_scope) accounts.append(account) depth_accounts[depth+1] = account process_workflow(workflow, item_exec, account, out_upstream_artifacts, out_downstream_artifacts, depth+1) def process_port_module(module, found_input_ports, found_output_ports): port_name = found_input_ports['name'].db_parameters[0].db_val if module.db_name == 'InputPort': if port_name in in_upstream_artifacts: for artifact in in_upstream_artifacts[port_name]: dependencies.append(create_used(process, artifact, account, id_scope)) elif module.db_name == 'OutputPort': if port_name in in_downstream_artifacts: for artifact in in_downstream_artifacts[port_name]: dependencies.append(create_was_generated_by(artifact, process, account, id_scope)) def process_if_module(module, found_input_ports, found_output_ports): print 'processing IFFFF' # need to decide which path was taken? # check which module was executed, then know which branch was # taken? true_conn = found_input_ports['TruePort'] false_conn = found_input_ports['FalsePort'] true_id = true_conn.db_ports_type_index['source'].db_moduleId false_id = false_conn.db_ports_type_index['source'].db_moduleId print '$$ TRUE ID:', true_id print '$$ FALSE ID:', false_id for x,y in module_processes.iteritems(): print x, ':', y if true_id in module_processes: cond_process = module_processes[true_id][1] elif false_id in module_processes: cond_process = module_processes[false_id][1] else: raise RuntimeError("cannot process if") # FIXME: assume true for now # eventually need to check which module_id was execed for this # current item exec dependencies.append(create_was_triggered_by(cond_process, process, account, id_scope)) if add_extras: print '***adding extras' out_upstream_artifacts = copy.copy(in_upstream_artifacts) out_downstream_artifacts = copy.copy(in_downstream_artifacts) for port_name, artifact_list in in_upstream_artifacts.iteritems(): for artifact in artifact_list: dependencies.append(create_used(process, artifact, account, id_scope)) for port_name, artifact_list in in_downstream_artifacts.iteritems(): for artifact in artifact_list: # conn_artifacts[(port_name, 'output')] = artifact dependencies.append(create_was_generated_by(artifact, process, account, id_scope)) else: out_upstream_artifacts = {} out_downstream_artifacts = {} ctrl_flow_pkg = '%s.control_flow' % get_vistrails_default_pkg_prefix() basic_pkg = get_vistrails_basic_pkg_id() all_special_ports = {'%s:Map' % ctrl_flow_pkg: [{'InputPort': False, 'OutputPort': False, 'InputList': True, 'FunctionPort': False}, {'Result': True}, process_map], '%s:Group' % basic_pkg: [{}, {}, process_group], '%s:InputPort' % basic_pkg: [{'name': False, 'spec': False, 'old_name': False}, {}, process_port_module], '%s:OutputPort' % basic_pkg: [{'name': False, 'spec': False, 'old_name': False}, {}, process_port_module], '%s:If' % ctrl_flow_pkg: [{'TruePort': False, 'FalsePort': False}, {}, process_if_module], } module_desc_str = module.db_package + ':' + module.db_name special_ports = all_special_ports.get(module_desc_str, [{}, {}, None]) found_input_ports = {} found_output_ports = {} # process used_files annotations # process generated_tables annotations: for annotation in item_exec.db_annotations: def process_db_tuple(db_tuple): db_tuple = (str(db_tuple[0]),) + db_tuple[1:] if db_tuple not in db_artifacts: artifact = create_artifact_from_db_tuple(db_tuple, account, id_scope) artifacts.append(artifact) db_artifacts[db_tuple] = artifact else: artifact = db_artifacts[db_tuple] if int(artifact.db_accounts[0].db_id[4:]) > \ int(account.db_id[4:]): artifact.db_accounts[0] = account return artifact if annotation.db_key == 'used_files': used_files = eval(annotation.db_value) for fname in used_files: if fname not in file_artifacts: artifact = create_artifact_from_filename(fname, account, id_scope) artifacts.append(artifact) file_artifacts[fname] = artifact else: artifact = file_artifacts[fname] if int(artifact.db_accounts[0].db_id[4:]) > \ int(account.db_id[4:]): artifact.db_accounts[0] = account dependencies.append(create_used(process, artifact, account, id_scope)) elif annotation.db_key == 'generated_tables': generated_tables = eval(annotation.db_value) for db_tuple in generated_tables: artifact = process_db_tuple(db_tuple) dependencies.append(create_was_generated_by(artifact, process, account, id_scope)) elif annotation.db_key == 'used_tables': used_tables = eval(annotation.db_value) for db_tuple in used_tables: artifact = process_db_tuple(db_tuple) dependencies.append(create_used(process, artifact, account, id_scope)) # process functions for function in module.db_functions: # FIXME let found_input_ports, found_output_ports store lists? if function.db_name in special_ports[0]: if not special_ports[0][function.db_name]: found_input_ports[function.db_name] = function continue function_t = (module.db_id, function.db_name) if function_t in function_artifacts: artifact = function_artifacts[function_t] if int(artifact.db_accounts[0].db_id[4:]) > \ int(account.db_id[4:]): artifact.db_accounts[0] = account else: artifact = create_artifact_from_function(function, account, id_scope) print 'adding artifact', artifact.db_id artifacts.append(artifact) function_artifacts[function_t] = artifact if function.db_name in special_ports[0]: found_input_ports[function.db_name] = artifact if function.db_name not in out_upstream_artifacts: out_upstream_artifacts[function.db_name] = [] out_upstream_artifacts[function.db_name].append(artifact) dependencies.append(create_used(process, artifact, account, id_scope)) # process connections if module.db_id in upstream_lookup: for conns in upstream_lookup[module.db_id].itervalues(): for conn in conns: dest = conn.db_ports_type_index['destination'] if dest.db_name in special_ports[0]: if not special_ports[0][dest.db_name]: found_input_ports[dest.db_name] = conn continue (artifact, in_cache) = process_connection(conn) if dest.db_name in special_ports[0]: found_input_ports[dest.db_name] = artifact if dest.db_name not in out_upstream_artifacts: out_upstream_artifacts[dest.db_name] = [] out_upstream_artifacts[dest.db_name].append(artifact) print 'adding dependency (pa)', process.db_id, \ artifact.db_id dependencies.append(create_used(process, artifact, account, id_scope)) if item_exec.db_completed == 1: if module.db_id in downstream_lookup: # check if everything completed successfully for this? for conns in downstream_lookup[module.db_id].itervalues(): for conn in conns: source = conn.db_ports_type_index['source'] if source.db_name in special_ports[1]: if not special_ports[1][source.db_name]: found_output_ports[source.db_name] = conn continue dest = conn.db_ports_type_index['destination'] dest_module = \ workflow.db_modules_id_index[dest.db_moduleId] dest_desc_str = dest_module.db_package + ':' + \ dest_module.db_name dest_special_ports = all_special_ports.get(dest_desc_str, [{}, {}, None]) if dest.db_name in dest_special_ports[0] and \ not dest_special_ports[0][dest.db_name]: print 'skipping', dest.db_name continue (artifact, in_cache) = process_connection(conn) if not in_cache: if source.db_name in special_ports[1]: found_output_ports[source.db_name] = artifact if source.db_name not in out_downstream_artifacts: out_downstream_artifacts[source.db_name] = [] out_downstream_artifacts[source.db_name].append(artifact) print 'adding dependency (ap)', artifact.db_id, \ process.db_id dependencies.append(create_was_generated_by(artifact, process, account, id_scope)) if special_ports[2] is not None: special_ports[2](module, found_input_ports, found_output_ports)
def replace_generic(controller, pipeline, old_module, new_module, function_remap=None, src_port_remap=None, dst_port_remap=None, annotation_remap=None, control_param_remap=None, use_registry=True): if function_remap is None: function_remap = {} if src_port_remap is None: src_port_remap = {} if dst_port_remap is None: dst_port_remap = {} if annotation_remap is None: annotation_remap = {} if control_param_remap is None: control_param_remap = {} basic_pkg = get_vistrails_basic_pkg_id() ops = [] ops.extend(controller.delete_module_list_ops(pipeline, [old_module.id])) for annotation in old_module.annotations: if annotation.key not in annotation_remap: annotation_key = annotation.key else: remap = annotation_remap[annotation.key] if remap is None: # don't add the annotation back in continue elif not isinstance(remap, basestring): ops.extend(remap(annotation)) continue else: annotation_key = remap new_annotation = \ Annotation(id=controller.id_scope.getNewId(Annotation.vtType), key=annotation_key, value=annotation.value) new_module.add_annotation(new_annotation) for control_param in old_module.control_parameters: if control_param.name not in control_param_remap: control_param_name = control_param.name else: remap = control_param_remap[control_param.name] if remap is None: # don't add the control param back in continue elif not isinstance(remap, basestring): ops.extend(remap(control_param)) continue else: control_param_name = remap new_control_param = \ ModuleControlParam(id=controller.id_scope.getNewId( ModuleControlParam.vtType), name=control_param_name, value=control_param.value) new_module.add_control_parameter(new_control_param) if not old_module.is_group() and not old_module.is_abstraction(): for port_spec in old_module.port_spec_list: if port_spec.type == 'input': if port_spec.name not in dst_port_remap: spec_name = port_spec.name else: remap = dst_port_remap[port_spec.name] if remap is None: continue elif not isinstance(remap, basestring): ops.extend(remap(port_spec)) continue else: spec_name = remap elif port_spec.type == 'output': if port_spec.name not in src_port_remap: spec_name = port_spec.name else: remap = src_port_remap[port_spec.name] if remap is None: continue elif not isinstance(remap, basestring): ops.extend(remap(port_spec)) continue else: spec_name = remap new_spec = port_spec.do_copy(True, controller.id_scope, {}) new_spec.name = spec_name new_module.add_port_spec(new_spec) function_ops = [] for function in old_module.functions: if function.name not in function_remap: function_name = function.name else: remap = function_remap[function.name] if remap is None: # don't add the function back in continue elif not isinstance(remap, basestring): function_ops.extend(remap(function, new_module)) continue else: function_name = remap if len(function.parameters) > 0: new_param_vals, aliases = zip(*[(p.strValue, p.alias) for p in function.parameters]) else: new_param_vals = [] aliases = [] if use_registry: function_port_spec = function_name else: def mk_psi(pos): psi = PortSpecItem(module="Module", package=basic_pkg, namespace="", pos=pos) return psi n_items = len(new_param_vals) function_port_spec = PortSpec( name=function_name, items=[mk_psi(i) for i in xrange(n_items)]) new_function = controller.create_function(new_module, function_port_spec, new_param_vals, aliases) new_module.add_function(new_function) if None in function_remap: # used to add new functions remap = function_remap[None] function_ops.extend(remap(None, new_module)) # add the new module ops.append(('add', new_module)) ops.extend(function_ops) create_new_connection = UpgradeWorkflowHandler.create_new_connection for _, conn_id in pipeline.graph.edges_from(old_module.id): old_conn = pipeline.connections[conn_id] if old_conn.source.name not in src_port_remap: source_name = old_conn.source.name else: remap = src_port_remap[old_conn.source.name] if remap is None: # don't add this connection back in continue elif not isinstance(remap, basestring): ops.extend(remap(old_conn, new_module)) continue else: source_name = remap old_dst_module = pipeline.modules[old_conn.destination.moduleId] if use_registry: source_port = source_name else: source_port = Port(name=source_name, type='source', signature=create_port_spec_string([ (basic_pkg, 'Variant', '') ])) new_conn = create_new_connection(controller, new_module, source_port, old_dst_module, old_conn.destination) ops.append(('add', new_conn)) for _, conn_id in pipeline.graph.edges_to(old_module.id): old_conn = pipeline.connections[conn_id] if old_conn.destination.name not in dst_port_remap: destination_name = old_conn.destination.name else: remap = dst_port_remap[old_conn.destination.name] if remap is None: # don't add this connection back in continue elif not isinstance(remap, basestring): ops.extend(remap(old_conn, new_module)) continue else: destination_name = remap old_src_module = pipeline.modules[old_conn.source.moduleId] if use_registry: destination_port = destination_name else: destination_port = Port(name=destination_name, type='destination', signature=create_port_spec_string([ (basic_pkg, 'Variant', '') ])) new_conn = create_new_connection(controller, old_src_module, old_conn.source, new_module, destination_port) ops.append(('add', new_conn)) return [vistrails.core.db.action.create_action(ops)]
def execute(self, *args, **kwargs): """Execute the pipeline. Positional arguments are either input values (created from ``module == value``, where `module` is a Module from the pipeline and `value` is some value or Function instance) for the pipeline's InputPorts, or Module instances (to select sink modules). Keyword arguments are also used to set InputPort by looking up inputs by name. Example:: input_bound = pipeline.get_input('higher_bound') input_url = pipeline.get_input('url') sinkmodule = pipeline.get_module(32) pipeline.execute(sinkmodule, input_bound == vt.Function(Integer, 10), input_url == 'http://www.vistrails.org/', resolution=15) # kwarg: only one equal sign """ sinks = set() inputs = {} reg = get_module_registry() InputPort_desc = reg.get_descriptor_by_name( get_vistrails_basic_pkg_id(), 'InputPort') # Read args for arg in args: if isinstance(arg, ModuleValuePair): if arg.module.id in inputs: raise ValueError("Multiple values set for InputPort %r" % get_inputoutput_name(arg.module)) if not reg.is_descriptor_subclass(arg.module.module_descriptor, InputPort_desc): raise ValueError("Module %d is not an InputPort" % arg.module.id) inputs[arg.module.id] = arg.value elif isinstance(arg, Module): sinks.add(arg.module_id) # Read kwargs for key, value in kwargs.iteritems(): key = self.get_input(key) # Might raise KeyError if key.module_id in inputs: raise ValueError("Multiple values set for InputPort %r" % get_inputoutput_name(key.module)) inputs[key.module_id] = value reason = "API pipeline execution" sinks = sinks or None # Use controller only if no inputs were passed in if (not inputs and self.vistrail is not None and self.vistrail.current_version == self.version): controller = self.vistrail.controller results, changed = controller.execute_workflow_list([[ controller.locator, # locator self.version, # version self.pipeline, # pipeline DummyView(), # view None, # custom_aliases None, # custom_params reason, # reason sinks, # sinks None, # extra_info ]]) result, = results else: pipeline = self.pipeline if inputs: id_scope = IdScope(1) pipeline = pipeline.do_copy(False, id_scope) # A hach to get ids from id_scope that we know won't collide: # make them negative id_scope.getNewId = lambda t, g=id_scope.getNewId: -g(t) create_module = \ VistrailController.create_module_from_descriptor_static create_function = VistrailController.create_function_static create_connection = VistrailController.create_connection_static # Fills in the ExternalPipe ports for module_id, values in inputs.iteritems(): module = pipeline.modules[module_id] if not isinstance(values, (list, tuple)): values = [values] # Guess the type of the InputPort _, sigstrings, _, _, _ = get_port_spec_info( pipeline, module) sigstrings = parse_port_spec_string(sigstrings) # Convert whatever we got to a list of strings, for the # pipeline values = [ reg.convert_port_val(val, sigstring, None) for val, sigstring in izip(values, sigstrings) ] if len(values) == 1: # Create the constant module constant_desc = reg.get_descriptor_by_name( *sigstrings[0]) constant_mod = create_module(id_scope, constant_desc) func = create_function(id_scope, constant_mod, 'value', values) constant_mod.add_function(func) pipeline.add_module(constant_mod) # Connect it to the ExternalPipe port conn = create_connection(id_scope, constant_mod, 'value', module, 'ExternalPipe') pipeline.db_add_connection(conn) else: raise RuntimeError("TODO : create tuple") interpreter = get_default_interpreter() result = interpreter.execute(pipeline, reason=reason, sinks=sinks) if result.errors: raise ExecutionErrors(self, result) else: return ExecutionResults(self, result)
def execute(self, *args, **kwargs): """Execute the pipeline. Positional arguments are either input values (created from ``module == value``, where `module` is a Module from the pipeline and `value` is some value or Function instance) for the pipeline's InputPorts, or Module instances (to select sink modules). Keyword arguments are also used to set InputPort by looking up inputs by name. Example:: input_bound = pipeline.get_input('higher_bound') input_url = pipeline.get_input('url') sinkmodule = pipeline.get_module(32) pipeline.execute(sinkmodule, input_bound == vt.Function(Integer, 10), input_url == 'http://www.vistrails.org/', resolution=15) # kwarg: only one equal sign """ pipeline = self.pipeline sinks = set() inputs = {} reg = get_module_registry() InputPort_desc = reg.get_descriptor_by_name( get_vistrails_basic_pkg_id(), 'InputPort') # Read args for arg in args: if isinstance(arg, ModuleValuePair): if arg.module.id in inputs: raise ValueError("Multiple values set for InputPort %r" % get_inputoutput_name(arg.module)) if not reg.is_descriptor_subclass(arg.module.module_descriptor, InputPort_desc): raise ValueError("Module %d is not an InputPort" % arg.module.id) inputs[arg.module.id] = arg.value elif isinstance(arg, Module): sinks.add(arg.module_id) # Read kwargs for key, value in kwargs.iteritems(): name = key key = self.get_python_parameter(key) # Might raise KeyError if name in inputs: raise ValueError("Multiple values set for input %r" % name) inputs[name] = [key.module_id, value] reason = "API pipeline execution" sinks = sinks or None # Use controller only if no inputs were passed in if (not inputs and self.vistrail is not None and self.vistrail.current_version == self.version): controller = self.vistrail.controller results, changed = controller.execute_workflow_list([[ controller.locator, # locator self.version, # version self.pipeline, # pipeline DummyView(), # view None, # custom_aliases None, # custom_params reason, # reason sinks, # sinks None, # extra_info ]]) result, = results else: # pipeline = self.pipeline if inputs: # id_scope = IdScope(1) # pipeline = pipeline.do_copy(False, id_scope) # A hach to get ids from id_scope that we know won't collide: # make them negative # id_scope.getNewId = lambda t, g=id_scope.getNewId: -g(t) # create_module = \ # VistrailController.create_module_from_descriptor_static # create_function = VistrailController.create_function_static # create_connection = VistrailController.create_connection_static # Fills in the ExternalPipe ports for name, input_list in inputs.iteritems(): module_id, values = input_list module = pipeline.modules[module_id] if not isinstance(values, (list, tuple)): values = [values] ''' # Guess the type of the InputPort _, sigstrings, _, _, _ = get_port_spec_info(pipeline, module) sigstrings = parse_port_spec_string(sigstrings) # Convert whatever we got to a list of strings, for the # pipeline values = [reg.convert_port_val(val, sigstring, None) for val, sigstring in izip(values, sigstrings)] if len(values) == 1: # Create the constant module constant_desc = reg.get_descriptor_by_name( *sigstrings[0]) #print('Setting desription: ',str(constant_desc),str(sigstrings[0])) constant_mod = create_module(id_scope, constant_desc) func = create_function(id_scope, constant_mod, 'value', values) constant_mod.add_function(func) pipeline.add_module(constant_mod) # Connect it to the ExternalPipe port conn = create_connection(id_scope, constant_mod, 'value', module, 'ExternalPipe') pipeline.db_add_connection(conn) else: raise RuntimeError("TODO : create tuple") ''' port_spec = reg.get_input_port_spec(module, name) added_functions = {} tmp_f_id = -1L tmp_p_id = -1L function = [ f for f in module.functions if f.name == port_spec.name ] if function: function = function[0] else: try: function = added_functions[(module.id, port_spec.name)] except KeyError: # add to function list params = [] for psi in port_spec.port_spec_items: parameter = ModuleParam( id=tmp_p_id, pos=psi.pos, name='<no description>', val=psi.default, type=psi.descriptor.sigstring) params.append(parameter) tmp_p_id -= 1 function = ModuleFunction( id=tmp_f_id, pos=module.getNumFunctions(), name=port_spec.name, parameters=params) tmp_f_id -= 1 added_functions[(module.id, port_spec.name)] = function action = vistrails.core.db.action.create_action([ ('add', function, module.vtType, module.id) ]) # function_actions.append(action) parameter = function.params[0] # find old parameter old_param = parameter actions = [] for v in values: desc = reg.get_descriptor_by_name( 'org.vistrails.vistrails.basic', 'String', None) if not isinstance(v, str): str_value = desc.module.translate_to_string(v) else: str_value = v new_param = ModuleParam(id=tmp_p_id, pos=old_param.pos, name=old_param.name, alias=old_param.alias, val=str_value, type=old_param.type) tmp_p_id -= 1 action_spec = ('change', old_param, new_param, function.vtType, function.real_id) action = vistrails.core.db.action.create_action( [action_spec]) actions.append(action) # controller = self.vistrail.controller self.controller.perform_action(action) ######################################################################### interpreter = get_default_interpreter() result = interpreter.execute(pipeline, locator=self.locator, reason=reason, sinks=sinks, actions=actions) if result.errors: raise ExecutionErrors(self, result) else: return ExecutionResults(self, result)
def replace_generic(controller, pipeline, old_module, new_module, function_remap=None, src_port_remap=None, dst_port_remap=None, annotation_remap=None, control_param_remap=None, use_registry=True): if function_remap is None: function_remap = {} if src_port_remap is None: src_port_remap = {} if dst_port_remap is None: dst_port_remap = {} if annotation_remap is None: annotation_remap = {} if control_param_remap is None: control_param_remap = {} basic_pkg = get_vistrails_basic_pkg_id() ops = [] ops.extend(controller.delete_module_list_ops(pipeline, [old_module.id])) for annotation in old_module.annotations: if annotation.key not in annotation_remap: annotation_key = annotation.key else: remap = annotation_remap[annotation.key] if remap is None: # don't add the annotation back in continue elif not isinstance(remap, basestring): ops.extend(remap(annotation)) continue else: annotation_key = remap new_annotation = \ Annotation(id=controller.id_scope.getNewId(Annotation.vtType), key=annotation_key, value=annotation.value) new_module.add_annotation(new_annotation) for control_param in old_module.control_parameters: if control_param.name not in control_param_remap: control_param_name = control_param.name else: remap = control_param_remap[control_param.name] if remap is None: # don't add the control param back in continue elif not isinstance(remap, basestring): ops.extend(remap(control_param)) continue else: control_param_name = remap new_control_param = \ ModuleControlParam(id=controller.id_scope.getNewId( ModuleControlParam.vtType), name=control_param_name, value=control_param.value) new_module.add_control_parameter(new_control_param) if not old_module.is_group() and not old_module.is_abstraction(): for port_spec in old_module.port_spec_list: if port_spec.type == 'input': if port_spec.name not in dst_port_remap: spec_name = port_spec.name else: remap = dst_port_remap[port_spec.name] if remap is None: continue elif not isinstance(remap, basestring): ops.extend(remap(port_spec)) continue else: spec_name = remap elif port_spec.type == 'output': if port_spec.name not in src_port_remap: spec_name = port_spec.name else: remap = src_port_remap[port_spec.name] if remap is None: continue elif not isinstance(remap, basestring): ops.extend(remap(port_spec)) continue else: spec_name = remap new_spec = port_spec.do_copy(True, controller.id_scope, {}) new_spec.name = spec_name new_module.add_port_spec(new_spec) function_ops = [] for function in old_module.functions: if function.name not in function_remap: function_name = function.name else: remap = function_remap[function.name] if remap is None: # don't add the function back in continue elif not isinstance(remap, basestring): function_ops.extend(remap(function, new_module)) continue else: function_name = remap if len(function.parameters) > 0: new_param_vals, aliases = zip(*[(p.strValue, p.alias) for p in function.parameters]) else: new_param_vals = [] aliases = [] if use_registry: function_port_spec = function_name else: def mk_psi(pos): psi = PortSpecItem(module="Module", package=basic_pkg, namespace="", pos=pos) return psi n_items = len(new_param_vals) function_port_spec = PortSpec(name=function_name, items=[mk_psi(i) for i in xrange(n_items)]) new_function = controller.create_function(new_module, function_port_spec, new_param_vals, aliases) new_module.add_function(new_function) if None in function_remap: # used to add new functions remap = function_remap[None] function_ops.extend(remap(None, new_module)) # add the new module ops.append(('add', new_module)) ops.extend(function_ops) create_new_connection = UpgradeWorkflowHandler.create_new_connection for _, conn_id in pipeline.graph.edges_from(old_module.id): old_conn = pipeline.connections[conn_id] if old_conn.source.name not in src_port_remap: source_name = old_conn.source.name else: remap = src_port_remap[old_conn.source.name] if remap is None: # don't add this connection back in continue elif not isinstance(remap, basestring): ops.extend(remap(old_conn, new_module)) continue else: source_name = remap old_dst_module = pipeline.modules[old_conn.destination.moduleId] if use_registry: source_port = source_name else: source_port = Port(name=source_name, type='source', signature=create_port_spec_string( [(basic_pkg, 'Variant', '')])) new_conn = create_new_connection(controller, new_module, source_port, old_dst_module, old_conn.destination) ops.append(('add', new_conn)) for _, conn_id in pipeline.graph.edges_to(old_module.id): old_conn = pipeline.connections[conn_id] if old_conn.destination.name not in dst_port_remap: destination_name = old_conn.destination.name else: remap = dst_port_remap[old_conn.destination.name] if remap is None: # don't add this connection back in continue elif not isinstance(remap, basestring): ops.extend(remap(old_conn, new_module)) continue else: destination_name = remap old_src_module = pipeline.modules[old_conn.source.moduleId] if use_registry: destination_port = destination_name else: destination_port = Port(name=destination_name, type='destination', signature=create_port_spec_string( [(basic_pkg, 'Variant', '')])) new_conn = create_new_connection(controller, old_src_module, old_conn.source, new_module, destination_port) ops.append(('add', new_conn)) return [vistrails.core.db.action.create_action(ops)]