Esempio n. 1
0
    def create_annotation(self, id_scope=None):
        from vistrails.db.domain import IdScope

        if id_scope is None:
            id_scope = IdScope()
        annotation = Annotation(id=id_scope.getNewId(Annotation.vtType), key="akey %s", value="some value %s")
        return annotation
Esempio n. 2
0
    def create_control_parameter(self, id_scope=None):
        from vistrails.db.domain import IdScope

        if id_scope is None:
            id_scope = IdScope()
        control_parameter = ModuleControlParam(id=id_scope.getNewId(ModuleControlParam.vtType),
                                name='name %s',
                                value='some value %s')
        return control_parameter
Esempio n. 3
0
    def create_annotation(self, id_scope=None):
        from vistrails.db.domain import IdScope

        if id_scope is None:
            id_scope = IdScope()
        annotation = Annotation(id=id_scope.getNewId(Annotation.vtType),
                                key='akey %s',
                                value='some value %s')
        return annotation
Esempio n. 4
0
    def create_annotation(self, id_scope=None):
        from vistrails.db.domain import IdScope

        if id_scope is None:
            id_scope = IdScope()
        annotation = ActionAnnotation(
            id=id_scope.getNewId(ActionAnnotation.vtType), key="akey", action_id=1L, value="some value", user="******"
        )
        return annotation
Esempio n. 5
0
    def create_annotation(self, id_scope=None):
        from vistrails.db.domain import IdScope

        if id_scope is None:
            id_scope = IdScope()
        annotation = \
            ActionAnnotation(id=id_scope.getNewId(ActionAnnotation.vtType),
                             key='akey', action_id=1L,
                             value='some value', user='******')
        return annotation
Esempio n. 6
0
    def create_annotation(self, id_scope=None):
        from vistrails.db.domain import IdScope

        if id_scope is None:
            id_scope = IdScope()
        annotation = \
            ActionAnnotation(id=id_scope.getNewId(ActionAnnotation.vtType),
                             key='akey', action_id=1L,
                             value='some value', user='******')
        return annotation
Esempio n. 7
0
    def test_port_spec_copy(self):
        id_scope = IdScope()
        psi = DBPortSpecItem(id=id_scope.getNewId(DBPortSpecItem.vtType),
                             pos=0,
                             module="File",
                             package="org.vistrails.vistrails.basic")

        ps1 = DBPortSpec(id=id_scope.getNewId(DBPortSpec.vtType),
                         portSpecItems=[psi,])
        ops = create_copy_op_chain(ps1, id_scope=id_scope)
        self.assertNotEqual(ps1.db_portSpecItems[0].db_id,
                            ops[0].db_data.db_portSpecItems[0].db_id)
Esempio n. 8
0
    def __init__(self,
                 id,
                 name,
                 vtid=None,
                 version=None,
                 alias_list=None,
                 t='vistrail',
                 has_seq=None,
                 layout='',
                 geometry='',
                 id_scope=IdScope()):
        if has_seq is None:
            has_seq = 0

        DBMashup.__init__(self, id, name, version, alias_list, t, vtid, layout,
                          geometry, has_seq)
        self.id_scope = id_scope
        if has_seq is None:
            self.has_seq = False
            if isinstance(self.alias_list, list):
                for v in self.alias_list:
                    if v.component.seq == True:
                        self.has_seq = True
        else:
            self.has_seq = has_seq
Esempio n. 9
0
 def create_component(self, id_scope=IdScope()):
     c = Component(id=id_scope.getNewId('mashup_component'),
                       vttype='parameter', param_id=15L, 
                       parent_vttype='function', parent_id=3L, mid=4L,
                       type='String', value='test', p_pos=0, pos=1, 
                       strvaluelist='test1,test2', widget="text")
     return c
Esempio n. 10
0
    def create_action(self, id_scope=IdScope()):
        from vistrails.core.mashup.component import Component
        from vistrails.core.mashup.alias import Alias
        from vistrails.core.mashup.mashup import Mashup
        c1 = Component(id=id_scope.getNewId('mashup_component'),
                       vttype='parameter',
                       param_id=15L,
                       parent_vttype='function',
                       parent_id=3L,
                       mid=4L,
                       type='String',
                       value='test',
                       p_pos=0,
                       pos=1,
                       strvaluelist='test1,test2',
                       widget="text")
        a1 = Alias(id=id_scope.getNewId('mashup_alias'),
                   name='alias1',
                   component=c1)

        m = Mashup(id=id_scope.getNewId('mashup'),
                   name='mashup1',
                   vtid='empty.vt',
                   version=15L,
                   alias_list=[a1])
        action = Action(id=id_scope.getNewId('mashup_action'),
                        prevId=0L,
                        date=datetime(2007, 11, 18),
                        mashup=m)
        return action
Esempio n. 11
0
    def create_annotation(self, id_scope=IdScope()):

        annotation = \
            ActionAnnotation(id=id_scope.getNewId('mashup_actionAnnotation'),
                             key='akey', action_id=1L,
                             value='some value', user='******')
        return annotation
Esempio n. 12
0
 def create_param(self, id_scope=IdScope()):
     param = PEParam(id=id_scope.getNewId(PEParam.vtType),
                         pos=1,
                         interpolator='normal-int',
                         value='[1, 2]',
                         dimension=1)
     return param
Esempio n. 13
0
 def create_module(self, id_scope=None):
     from vistrails.core.modules.basic_modules import identifier as basic_pkg
     from vistrails.db.domain import IdScope
     if id_scope is None:
         id_scope = IdScope()
     
     params = [ModuleParam(id=id_scope.getNewId(ModuleParam.vtType),
                               type='Int',
                               val='1')]
     functions = [ModuleFunction(id=id_scope.getNewId(ModuleFunction.vtType),
                                 name='value',
                                 parameters=params)]
     module = Module(id=id_scope.getNewId(Module.vtType),
                     name='Float',
                     package=basic_pkg,
                     functions=functions)
     return module
Esempio n. 14
0
 def create_port(self, id_scope=IdScope()):
     port = Port(id=id_scope.getNewId(Port.vtType),
                 type='source',
                 moduleId=12L,
                 moduleName='String',
                 name='value',
                 signature='(%s:String)' % get_vistrails_basic_pkg_id())
     return port
Esempio n. 15
0
    def create_action(self, id_scope=None):
        from vistrails.core.modules.basic_modules import identifier as basic_pkg
        from vistrails.core.vistrail.action import Action
        from vistrails.core.vistrail.module import Module
        from vistrails.core.vistrail.module_function import ModuleFunction
        from vistrails.core.vistrail.module_param import ModuleParam
        from vistrails.db.domain import IdScope

        if id_scope is None:
            id_scope = IdScope()
        param = ModuleParam(id=id_scope.getNewId(ModuleParam.vtType),
                            type='Integer',
                            val='1')
        function = ModuleFunction(id=id_scope.getNewId(ModuleFunction.vtType),
                                  name='value',
                                  parameters=[param])
        m = Module(id=id_scope.getNewId(Module.vtType),
                   name='Float',
                   package=basic_pkg,
                   functions=[function])

        add_op = AddOp(id=id_scope.getNewId('operation'),
                       what='module',
                       objectId=m.id,
                       data=m)
        action = Action(id=id_scope.getNewId(Action.vtType),
                        prevId=0,
                        date=datetime(2007, 11, 18),
                        operations=[add_op])
        return action
Esempio n. 16
0
 def test_copy(self):        
     id_scope = IdScope()
     pe1 = self.create_pe(id_scope)
     pe2 = copy.copy(pe1)
     self.assertEquals(pe1, pe2)
     self.assertEquals(pe1.id, pe2.id)
     pe3 = pe1.do_copy(True, id_scope, {})
     self.assertEquals(pe1, pe3)
     self.assertNotEquals(pe1.id, pe3.id)
Esempio n. 17
0
 def test_copy(self):        
     id_scope = IdScope()
     p1 = self.create_param(id_scope)
     p2 = copy.copy(p1)
     self.assertEquals(p1, p2)
     self.assertEquals(p1.id, p2.id)
     p3 = p1.do_copy(True, id_scope, {})
     self.assertEquals(p1, p3)
     self.assertNotEquals(p1.real_id, p3.real_id)
Esempio n. 18
0
 def test_copy(self):        
     id_scope = IdScope()
     f1 = self.create_function(id_scope)
     f2 = copy.copy(f1)
     self.assertEquals(f1, f2)
     self.assertEquals(f1.id, f2.id)
     f3 = f1.do_copy(True, id_scope, {})
     self.assertEquals(f1, f3)
     self.assertNotEquals(f1.real_id, f3.real_id)
Esempio n. 19
0
 def test_copy(self):
     id_scope = IdScope()
     a1 = self.create_action(id_scope)
     a2 = copy.copy(a1)
     self.assertEquals(a1, a2)
     self.assertEquals(a1.id, a2.id)
     a3 = a1.do_copy(True, id_scope, {})
     self.assertEquals(a1, a3)
     self.assertNotEquals(a1.id, a3.id)
Esempio n. 20
0
 def test_copy(self):
     id_scope = IdScope()
     m1 = self.create_mashup(id_scope)
     m2 = copy.copy(m1)
     self.assertEqual(m1, m2)
     self.assertEqual(m1.id, m2.id)
     m3 = m2.do_copy(True, id_scope, {})
     self.assertEqual(m1, m3)
     self.assertNotEqual(m1.id, m3.id)
Esempio n. 21
0
 def test_copy(self):
     id_scope = IdScope()
     c1 = self.create_component(id_scope)
     c2 = copy.copy(c1)
     self.assertEqual(c1, c2)
     self.assertEqual(c1.id, c2.id)
     c3 = c2.do_copy(True, id_scope, {})
     self.assertEqual(c1, c3)
     self.assertNotEqual(c1.id, c3.id)
Esempio n. 22
0
 def create_port_spec(self, id_scope=IdScope()):
     # FIXME add a valid port spec
     port_spec = PortSpec(id=id_scope.getNewId(PortSpec.vtType),
                          name='SetValue',
                          type='input',
                          sigstring='(%s:String)' % \
                              get_vistrails_basic_pkg_id(),
                          )
     return port_spec
Esempio n. 23
0
    def test_copy(self):
        id_scope = IdScope()

        t1 = self.create_tag(id_scope)
        t2 = copy.copy(t1)
        self.assertEquals(t1, t2)
        self.assertEquals(t1.id, t2.id)
        t3 = t1.do_copy(True, id_scope, {})
        self.assertEquals(t1, t3)
        self.assertNotEquals(t1.id, t3.id)
Esempio n. 24
0
 def create_function(self, id_scope=IdScope()):
     param = ModuleParam(id=id_scope.getNewId(ModuleParam.vtType),
                         pos=2,
                         type='Int',
                         val='1')
     function = ModuleFunction(id=id_scope.getNewId(ModuleFunction.vtType),
                               pos=0,
                               name='value',
                               parameters=[param])
     return function
Esempio n. 25
0
    def test_copy(self):
        id_scope = IdScope()

        s1 = self.create_port_spec(id_scope)
        s2 = copy.copy(s1)
        self.assertEquals(s1, s2)
        self.assertEquals(s1.id, s2.id)
        s3 = s1.do_copy(True, id_scope, {})
        self.assertEquals(s1, s3)
        self.assertNotEquals(s1.id, s3.id)
Esempio n. 26
0
    def test_copy(self):
        id_scope = IdScope()

        loc1 = self.create_location(id_scope)
        loc2 = copy.copy(loc1)
        self.assertEquals(loc1, loc2)
        self.assertEquals(loc1.id, loc2.id)
        loc3 = loc1.do_copy(True, id_scope, {})
        self.assertEquals(loc1, loc3)
        self.assertNotEquals(loc1.id, loc3.id)
Esempio n. 27
0
 def __init__(self, id, vt_version, id_scope=None):
     DBMashuptrail.__init__(self, None, id, version="", vtVersion=vt_version)
     self.db_actions = []
     self.currentVersion = -1
     self.db_annotations = []
     self.db_actionAnnotations = []
     if not id_scope:
         self.id_scope = IdScope(1L)
     else:
         self.id_scope = id_scope
    def create_ops(self, id_scope=IdScope()):
        from vistrails.core.modules.basic_modules import identifier as basic_pkg
        from vistrails.core.vistrail.module import Module
        from vistrails.core.vistrail.module_function import ModuleFunction
        from vistrails.core.vistrail.module_param import ModuleParam
        from vistrails.core.vistrail.annotation import Annotation
        
        if id_scope is None:
            id_scope = IdScope(remap={AddOp.vtType: 'operation',
                                      ChangeOp.vtType: 'operation',
                                      DeleteOp.vtType: 'operation'})

        m = Module(id=id_scope.getNewId(Module.vtType),
                   name='Float',
                   package=basic_pkg)
        add_op = AddOp(id=id_scope.getNewId(AddOp.vtType),
                       what=Module.vtType,
                       objectId=m.id,
                       data=m)
        function = ModuleFunction(id=id_scope.getNewId(ModuleFunction.vtType),
                                  name='value')
        change_op = ChangeOp(id=id_scope.getNewId(ChangeOp.vtType),
                             what=ModuleFunction.vtType,
                             oldObjId=2,
                             newObjId=function.real_id,
                             parentObjId=m.id,
                             parentObjType=Module.vtType,
                             data=function)
        param = ModuleParam(id=id_scope.getNewId(ModuleParam.vtType),
                            type='Float',
                            val='1.0')
        
        delete_op = DeleteOp(id=id_scope.getNewId(DeleteOp.vtType),
                             what=ModuleParam.vtType,
                             objectId=param.real_id,
                             parentObjId=function.real_id,
                             parentObjType=ModuleFunction.vtType)

        annotation = Annotation(id=id_scope.getNewId(Annotation.vtType),
                                key='foo',
                                value='bar')
        add_annotation = AddOp(id=id_scope.getNewId(AddOp.vtType),
                               what=Annotation.vtType,
                               objectId=m.id,
                               data=annotation)
        
        return [add_op, change_op, delete_op, add_annotation]
Esempio n. 29
0
 def create_mashup(self, id_scope=IdScope()):
     c1 = Component(id=id_scope.getNewId('mashup_component'),
                       vttype='parameter', param_id=15L, 
                       parent_vttype='function', parent_id=3L, mid=4L,
                       type='String', value='test', p_pos=0, pos=1, 
                       strvaluelist='test1,test2', widget="text")
     a1 = Alias(id=id_scope.getNewId('mashup_alias'), name='alias1', component=c1)
     
     m = Mashup(id=id_scope.getNewId('mashup'), name='mashup1', vtid='empty.vt', 
                version=15L, alias_list=[a1])
     return m
    def test_copy(self):
        from vistrails.db.domain import IdScope
        id_scope = IdScope()

        a1 = self.create_control_parameter(id_scope)
        a2 = copy.copy(a1)
        self.assertEquals(a1, a2)
        self.assertEquals(a1.id, a2.id)
        a3 = a1.do_copy(True, id_scope, {})
        self.assertEquals(a1, a3)
        self.assertNotEquals(a1.id, a3.id)
Esempio n. 31
0
    def test_copy(self):
        """Check that copy works correctly"""

        id_scope = IdScope()
        m1 = self.create_group(id_scope)
        m2 = copy.copy(m1)
        self.assertEquals(m1, m2)
        self.assertEquals(m1.id, m2.id)
        m3 = m1.do_copy(True, id_scope, {})
        self.assertEquals(m1, m3)
        self.assertNotEquals(m1.id, m3.id)
Esempio n. 32
0
 def create_pe(self, id_scope=IdScope()):
     pe = ParameterExploration(
                         id=id_scope.getNewId(ParameterExploration.vtType),
                         action_id=6,
                         user='******',
                         date='2007-11-23 12:48',
                         dims='[1,2]',
                         layout='{1:"normal"}',
                         name='test-pe',
                         functions=[])
     return pe
Esempio n. 33
0
    def test_copy(self):
        """Check that copy works correctly"""
        from vistrails.db.domain import IdScope

        id_scope = IdScope()
        m1 = self.create_module(id_scope)
        m2 = copy.copy(m1)
        self.assertEquals(m1, m2)
        self.assertEquals(m1.id, m2.id)
        m3 = m1.do_copy(True, id_scope, {})
        self.assertEquals(m1, m3)
        self.assertNotEquals(m1.id, m3.id)
Esempio n. 34
0
 def create_function(self, id_scope=IdScope()):
     param = PEParam(id=id_scope.getNewId(PEParam.vtType),
                     pos=2,
                     interpolator='Stepper',
                     dimension=4,
                     value='[1, 2]')
     function = PEFunction(id=id_scope.getNewId(PEFunction.vtType),
                           module_id=7,
                           port_name='value',
                           is_alias=0,
                           parameters=[param])
     return function
Esempio n. 35
0
    def create_action(self, id_scope=None):
        from vistrails.core.modules.basic_modules import identifier as basic_pkg
        from vistrails.core.vistrail.action import Action
        from vistrails.core.vistrail.module import Module
        from vistrails.core.vistrail.module_function import ModuleFunction
        from vistrails.core.vistrail.module_param import ModuleParam
        from vistrails.core.vistrail.operation import AddOp
        from vistrails.db.domain import IdScope
        from datetime import datetime
        
        if id_scope is None:
            id_scope = IdScope()
        param = ModuleParam(id=id_scope.getNewId(ModuleParam.vtType),
                            type='Integer',
                            val='1')
        function = ModuleFunction(id=id_scope.getNewId(ModuleFunction.vtType),
                                  name='value',
                                  parameters=[param])
        m = Module(id=id_scope.getNewId(Module.vtType),
                   name='Float',
                   package=basic_pkg,
                   functions=[function])

        add_op = AddOp(id=id_scope.getNewId('operation'),
                       what='module',
                       objectId=m.id,
                       data=m)
        action = Action(id=id_scope.getNewId(Action.vtType),
                        prevId=0,
                        date=datetime(2007,11,18),
                        operations=[add_op])
        return action
Esempio n. 36
0
    def create_module(self, id_scope=None):
        from vistrails.core.modules.basic_modules import identifier as basic_pkg
        from vistrails.db.domain import IdScope
        if id_scope is None:
            id_scope = IdScope()

        params = [
            ModuleParam(id=id_scope.getNewId(ModuleParam.vtType),
                        type='Int',
                        val='1')
        ]
        functions = [
            ModuleFunction(id=id_scope.getNewId(ModuleFunction.vtType),
                           name='value',
                           parameters=params)
        ]
        control_parameters = [
            ModuleControlParam(id=id_scope.getNewId(ModuleControlParam.vtType),
                               name='combiner',
                               value='pairwise')
        ]
        module = Module(id=id_scope.getNewId(Module.vtType),
                        name='Float',
                        package=basic_pkg,
                        functions=functions,
                        controlParameters=control_parameters)
        return module
Esempio n. 37
0
 def test_copy(self):
     id_scope = IdScope(
         remap={
             AddOp.vtType: 'operation',
             ChangeOp.vtType: 'operation',
             DeleteOp.vtType: 'operation'
         })
     for op1 in self.create_ops(id_scope):
         op2 = copy.copy(op1)
         self.assertEquals(op1, op2)
         self.assertEquals(op1.id, op2.id)
         op3 = op1.do_copy(True, id_scope, {})
         self.assertEquals(op1, op3)
         self.assertNotEquals(op1.id, op3.id)
         if hasattr(op1, 'data'):
             self.assertNotEquals(op1.data.db_id, op3.data.db_id)
Esempio n. 38
0
class Mashuptrail(DBMashuptrail):
    """ MashupTrail is a class that stores versions of Mashups.
    For now it keeps a linear history."""
    def __init__(self, id, vt_version, id_scope=None):
        DBMashuptrail.__init__(self, None, id, version="", vtVersion=vt_version)
        self.db_actions = []
        self.currentVersion = -1
        self.db_annotations = []
        self.db_actionAnnotations = []
        if not id_scope:
            self.id_scope = IdScope(1L)
        else:
            self.id_scope = id_scope
        
    id = DBMashuptrail.db_name
    vtVersion = DBMashuptrail.db_vtVersion
    actions = DBMashuptrail.db_actions
    annotations = DBMashuptrail.db_annotations
    actionAnnotations = DBMashuptrail.db_actionAnnotations
    
    def _get_actionMap(self):
        return self.db_actions_id_index
    actionMap = property(_get_actionMap)
    
    @staticmethod
    def convert(_mtrail):
        _mtrail.__class__ = Mashuptrail

        for action in _mtrail.actions:
            Action.convert(action)

        for annotation in _mtrail.annotations:
            Annotation.convert(annotation)

        for aannotation in _mtrail.actionAnnotations:
            ActionAnnotation.convert(aannotation)
        _mtrail.id_scope = IdScope(1L)
        _mtrail.updateIdScope()
            
    def addVersion(self, parent_id, mashup, user, date):
        id = self.getLatestVersion() + 1
        mashup.id_scope = self.id_scope
        mashup.id = id
        mashup.version = self.vtVersion
        action = Action(id=id, prevId=parent_id, mashup=mashup,
                        user=user, date=date)
        self.db_add_action(action)
        
        return action.id
    
    def __copy__(self):
        return Mashuptrail.do_copy(self)
    
    def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
        """do_copy() -> Mashuptrail 
        returns a clone of itself"""
        cp = DBMashuptrail.do_copy(self, new_ids, id_scope, id_remap)
        Mashuptrail.convert(cp)        
        cp.currentVersion = self.currentVersion        
        return cp
    
    def getLatestVersion(self):
        if not self.actions:
            return 0
        max_ver = max(a.id for a in self.actions)
        return max_ver

    def getMashup(self, version):
        if version in self.actionMap.keys():
            return self.actionMap[version].mashup
        else:
            return None
        
    def validateMashupsForPipeline(self, version, pipeline):
        """validateMashupsForPipeline(version:long, pipeline:Pipeline)->None
        This will make sure that the aliases present in all mashups are 
        consistent with the current pipeline. 
        
        """
        for action in self.actions:
            action.mashup.id_scope = self.id_scope
            action.mashup.validateForPipeline(pipeline)
            action.mashup.version = version
    
    ####################################################################
    ## Tag manipulation
    ##    
    def hasTagWithName(self, name):
        for a in self.actionAnnotations:
            if a.key == "__tag__":
                if a.value == name:
                    return True
        return False
        
    def hasTagForActionId(self, action_id):
        for a in self.actionAnnotations:
            if a.key == "__tag__" and a.action_id == action_id:
                return True
        return False
    
    def getTagForActionId(self, action_id):
        for a in self.actionAnnotations:
            if a.key == "__tag__" and a.action_id == action_id:
                return a.value
        return ""
    
    def changeTag(self, action_id, name, user, date):
        if self.hasTagWithName(name):
            return False
        if self.hasTagForActionId(action_id):
            self.removeTagByActionId(action_id)
        return self.addTag(action_id, name, user, date)
            
    def addTag(self, action_id, name, user, date):
        if not self.hasTagWithName(name):
            self.addActionAnnotation(action_id=action_id, key="__tag__", 
                                     value=name, user=user, date=date)
            return True
        return False
    
    def removeTagByActionId(self, action_id):
        found = None
        for a in self.actionAnnotations:
            if a.key == "__tag__" and a.action_id == action_id:
                found = a
                break
        if found:
            self.actionAnnotations.remove(found)
                   
    def getTagMap(self):
        """getTagMap() -> dict of tag:action_id"""
        tagMap = {}
        for a in self.actionAnnotations:
            if a.key == "__tag__":
                tagMap[a.value] = a.action_id
        return tagMap
    
    def addActionAnnotation(self, action_id, key, value, user, date):
        id = self.id_scope.getNewId("mashup_actionAnnotation")
        annot = ActionAnnotation(id=id, action_id=action_id, key=key,
                                 value=value, user=user, date=date)
        self.actionAnnotations.append(annot)


    ##########################################################################
    # Operators

    def __str__(self):
        """ __str__() -> str - Returns a string representation of itself """
        
        return ("(Mashuptrail id='%s' vtVersion='%s' actions='%s')@%X" %
                    (self.id,
                     self.vtVersion,
                     self.actions,
                     id(self)))
    
    ######################################################################
    ## Serialization and Unserialization
    ##                
#    def toXml(self, node=None):
#        """toXml(node: ElementTree.Element) -> ElementTree.Element
#           writes itself to xml
#        """
#
#        if node is None:
#            node = ElementTree.Element('mashuptrail')
#        
#        #set attributes
#        node.set('id', self.convert_to_str(self.id, 'uuid'))
#        node.set('vtVersion', self.convert_to_str(self.vtVersion,'long'))
#        node.set('version', self.convert_to_str(self.version, 'str'))
#        for action in self.actions:
#            child_ = ElementTree.SubElement(node, 'action')
#            action.toXml(child_)
#        for annot in self.annotations:
#            child_ = ElementTree.SubElement(node, 'actionAnnotation')
#            annot.toXml(child_)
#        return node
#    
#    @staticmethod
#    def fromXml(node):
#        if node.tag != 'mashuptrail':
#            debug.debug("node.tag != 'mashuptrail'")
#            return None
#        #read attributes
#        data = node.get('id', None)
#        id = Mashuptrail.convert_from_str(data, 'uuid')
#        data = node.get('vtVersion', None)
#        vtVersion = Mashuptrail.convert_from_str(data, 'long')
#        data = node.get('version', None)
#        version = Mashuptrail.convert_from_str(data, 'str')
#        actions = []
#        action_map = {}
#        annotations = []
#        for child in node.getchildren():
#            if child.tag == 'action':
#                action = Action.fromXml(child)
#                actions.append(action)
#                action_map[action.id] = action
#            elif child.tag == 'actionAnnotation':
#                annot = ActionAnnotation.fromXml(child)
#                annotations.append(annot)
#                
#        mtrail = Mashuptrail(id,vtVersion)
#        mtrail.version = version
#        mtrail.actions = actions
#        mtrail.actionMap = action_map
#        mtrail.annotations = annotations
#        mtrail.currentVersion = mtrail.getLatestVersion()
#        mtrail.updateIdScope()
#        return mtrail
    
    ######################################################################
    ## IdScope
    ##      
    def updateIdScope(self):
        for action in self.actions:
            self.id_scope.updateBeginId('mashup_action', action.id+1)
            for alias in action.mashup.alias_list:
                self.id_scope.updateBeginId('mashup_alias', alias.id+1)
                self.id_scope.updateBeginId('mashup_component', alias.component.id+1)
        for annotation in self.annotations:
            self.id_scope.updateBeginId('annotation', annotation.id+1)
        for aannotation in self.actionAnnotations:
            self.id_scope.updateBeginId('mashup_actionAnnotation', aannotation.id+1)
Esempio n. 39
0
def create_opm(workflow, version, log, reg):
    id_scope = IdScope()
    processes = []
    # conn_artifacts = {}
    artifacts = []
    dependencies = []
    accounts = []
    depth_accounts = {}
    file_artifacts = {}
    db_artifacts = {}

    def do_create_process(workflow, item_exec, account, module_processes):
        process = create_process(item_exec, account, id_scope)
        print 'adding process', process.db_id,
        if hasattr(item_exec, 'db_module_name'):
            print item_exec.db_module_name
        elif hasattr(item_exec, 'db_group_nane'):
            print item_exec.db_module_name
        processes.append(process)
        module = workflow.db_modules_id_index[item_exec.db_module_id]
        module_processes[module.db_id] = (module, process)

    def get_package(reg, pkg_identifier, pkg_version=''):
        if not pkg_version:
            # spin and get current package
            for pkg in reg.db_packages:
                if pkg.db_identifier == pkg_identifier:
                    break
                pkg = None
        else:
            pkg = reg.db_packages_identifier_index[(pkg_identifier,
                                                    pkg_version)]
        return pkg

    def process_exec(item_exec, workflow, account, upstream_lookup,
                     downstream_lookup, depth, conn_artifacts=None,
                     function_artifacts=None, module_processes=None,
                     in_upstream_artifacts={}, in_downstream_artifacts={},
                     add_extras=False):

        print 'in_upstream:', [(n, x.db_id) 
                               for n, x_list in in_upstream_artifacts.iteritems() for x in x_list]
        print 'in_downstream:', [(n, x.db_id)  
                                 for n, x_list in in_downstream_artifacts.iteritems() for x in x_list]
        # FIXME merge conn_artifacts and function_artifacts
        # problem is that a conn_artifact is OUTPUT while function_artifact
        # is INPUT
        if conn_artifacts is None:
            conn_artifacts = {}
        if function_artifacts is None:
            function_artifacts = {}
        if module_processes is None:
            module_processes = {}
#         while item_exec.vtType == DBLoopExec.vtType:
#             item_exec = item_exec.db_item_execs[0]
        (module, process) = module_processes[item_exec.db_module_id]

        def process_connection(conn):
            source = conn.db_ports_type_index['source']
            source_t = (source.db_moduleId, source.db_name)
            in_cache = False
            print '!!! processing', source_t
            if source_t in conn_artifacts:
                artifact = conn_artifacts[source_t]
                in_cache = True
            else:
                # key off source module and port name
                # get descriptor from registry and then port_spec
                # store port_spec as artifact

                if source.db_moduleId < 0:
                    dest = conn.db_ports_type_index['destination']
                    module = source.db_module
                else:
                    module = workflow.db_modules_id_index[source.db_moduleId]
                print module.db_name, module.db_id

                pkg = get_package(reg, module.db_package, module.db_version)

                if not module.db_namespace:
                    module_namespace = ''
                else:
                    module_namespace = module.db_namespace
                module_desc = \
                    pkg.db_module_descriptors_name_index[(module.db_name,
                                                          module_namespace,
                                                          '')]
                # FIXME make work for module port_specs, too
                # for example, a PythonSource with a given port in 
                # module.db_portSpecs
                port_spec = None
                spec_t = (source.db_name, 'output')
                if spec_t in module.db_portSpecs_name_index:
                    port_spec = module.db_portSpecs_name_index[spec_t]
                while port_spec is None and \
                        module_desc.db_id != reg.db_root_descriptor_id:
                    if spec_t in module_desc.db_portSpecs_name_index:
                        port_spec = module_desc.db_portSpecs_name_index[spec_t]
                    base_id = module_desc.db_base_descriptor_id

                   # inefficient spin through db_packages but we do
                   # not have the descriptors_by_id index that exists
                   # on core.module_registry.ModuleRegistry here
                    module_desc = None
                    for pkg in reg.db_packages:
                        if base_id in pkg.db_module_descriptors_id_index:
                            module_desc = \
                                pkg.db_module_descriptors_id_index[base_id]
                            break
                    if module_desc is None:
                        raise KeyError("Cannot find base descriptor id %d" %
                                       base_id)
                    # pkg = get_package(reg, module_desc.db_package,
                    #                   module_desc.db_package_version)
                    # module_desc = pkg.db_module_descriptors_id_index[base_id]
                if port_spec is None:
                    port_spec = module_desc.db_portSpecs_name_index[spec_t]
                print module_desc.db_name
                
                artifact = \
                    create_artifact_from_port_spec(port_spec, account, id_scope)
                artifacts.append(artifact)
                print 'adding conn_artifact', artifact.db_id, source_t, \
                    source.db_moduleName
                conn_artifacts[source_t] = artifact
            return (artifact, in_cache)

        def process_map(module, found_input_ports, found_output_ports):
            print "*** Processing Map"
            if depth+1 in depth_accounts:
                account = depth_accounts[depth+1]
            else:
                account = create_account(depth+1, id_scope)
                accounts.append(account)
                depth_accounts[depth+1] = account

            # need to have process that extracts artifacts for each iteration
            input_list_artifact = found_input_ports['InputList']
            result_artifact = found_output_ports.get('Result', None)
            input_port_list = \
                eval(found_input_ports['InputPort'].db_parameters[0].db_val)
            output_port = \
                found_input_ports['OutputPort'].db_parameters[0].db_val

            s_process = create_process_manual('Split', account, id_scope)
            processes.append(s_process)
            dependencies.append(create_used(s_process,
                                            input_list_artifact,
                                            account,
                                            id_scope))
            # need to have process that condenses artifacts from each iteration
            if result_artifact is not None:
                j_process = create_process_manual('Join', account, id_scope)
                processes.append(j_process)
            for loop_exec in item_exec.db_loop_execs:
                loop_up_artifacts = {}
                loop_down_artifacts = {}
                for input_name in input_port_list:
                    port_spec = DBPortSpec(id=-1,
                                           name=input_name,
                                           type='output')
                    s_artifact = \
                        create_artifact_from_port_spec(port_spec, account, 
                                                       id_scope)
                    artifacts.append(s_artifact)
                    dependencies.append(create_was_generated_by(s_artifact,
                                                                s_process,
                                                                account,
                                                                id_scope))
                    if input_name not in loop_up_artifacts:
                        loop_up_artifacts[input_name] = []
                    loop_up_artifacts[input_name].append(s_artifact)

                # process output_port
                if loop_exec.db_completed == 1:
                    port_spec = DBPortSpec(id=-1,
                                           name=output_port,
                                           type='output')
                    o_artifact = \
                            create_artifact_from_port_spec(port_spec, account, 
                                                           id_scope)
                    artifacts.append(o_artifact)
                    if output_port not in loop_down_artifacts:
                        loop_down_artifacts[output_port] = []
                    loop_down_artifacts[output_port].append(o_artifact)

                if result_artifact is not None:
                    dependencies.append(create_used(j_process, o_artifact, 
                                                    account, id_scope))
                                                
                # now process a loop_exec
                for child_exec in loop_exec.db_item_execs:
                    do_create_process(workflow, child_exec, account, 
                                      module_processes)
                for child_exec in loop_exec.db_item_execs:
                    process_exec(child_exec, workflow, account, upstream_lookup,
                                 downstream_lookup, depth+1, conn_artifacts,
                                 function_artifacts, module_processes,
                                 loop_up_artifacts, loop_down_artifacts, True)

            # need to set Return artifact and connect j_process to it
            if result_artifact is not None:
                dependencies.append(create_was_generated_by(result_artifact,
                                                            j_process,
                                                            account,
                                                            id_scope))

        def process_group(module, found_input_ports, found_output_ports):
            # identify depth and create new account if necessary
            # recurse with new account
            # need to link to upstream and downstream correctly
            workflow = module.db_workflow
            # run the whole upstream construction, etc, using this exec
            # and the group's workflow
            if depth+1 in depth_accounts:
                account = depth_accounts[depth+1]
            else:
                account = create_account(depth+1, id_scope)
                accounts.append(account)
                depth_accounts[depth+1] = account
            process_workflow(workflow, item_exec, account, 
                             out_upstream_artifacts,
                             out_downstream_artifacts, depth+1)            

        def process_port_module(module, found_input_ports, found_output_ports):
            port_name = found_input_ports['name'].db_parameters[0].db_val
            if module.db_name == 'InputPort':
                if port_name in in_upstream_artifacts:
                    for artifact in in_upstream_artifacts[port_name]:
                        dependencies.append(create_used(process, artifact,
                                                        account, id_scope))
            elif module.db_name == 'OutputPort':
                if port_name in in_downstream_artifacts:
                    for artifact in in_downstream_artifacts[port_name]:
                        dependencies.append(create_was_generated_by(artifact,
                                                                    process, 
                                                                    account, 
                                                                    id_scope))

        def process_if_module(module, found_input_ports, found_output_ports):
            print 'processing IFFFF'
            # need to decide which path was taken?
            # check which module was executed, then know which branch was
            # taken?
            true_conn = found_input_ports['TruePort']
            false_conn = found_input_ports['FalsePort']
            true_id = true_conn.db_ports_type_index['source'].db_moduleId
            false_id = false_conn.db_ports_type_index['source'].db_moduleId
            print '$$ TRUE ID:', true_id
            print '$$ FALSE ID:', false_id
            for x,y in module_processes.iteritems():
                print x, ':', y
            if true_id in module_processes:
                cond_process = module_processes[true_id][1]
            elif false_id in module_processes:
                cond_process = module_processes[false_id][1]
            else:
                raise RuntimeError("cannot process if")
            # FIXME: assume true for now
            # eventually need to check which module_id was execed for this
            # current item exec
            dependencies.append(create_was_triggered_by(cond_process,
                                                        process,
                                                        account,
                                                        id_scope))

        if add_extras:
            print '***adding extras'
            out_upstream_artifacts = copy.copy(in_upstream_artifacts)
            out_downstream_artifacts = copy.copy(in_downstream_artifacts)
            for port_name, artifact_list in in_upstream_artifacts.iteritems():
                for artifact in artifact_list:
                    dependencies.append(create_used(process, artifact,
                                                    account, id_scope))
            for port_name, artifact_list in in_downstream_artifacts.iteritems():
                for artifact in artifact_list:
                    # conn_artifacts[(port_name, 'output')] = artifact
                    dependencies.append(create_was_generated_by(artifact,
                                                                process,
                                                                account,
                                                                id_scope))
        else:
            out_upstream_artifacts = {}
            out_downstream_artifacts = {}


        ctrl_flow_pkg = '%s.control_flow' % get_vistrails_default_pkg_prefix()
        basic_pkg = get_vistrails_basic_pkg_id()
        all_special_ports = {'%s:Map' % ctrl_flow_pkg:
                                 [{'InputPort': False, 
                                   'OutputPort': False, 
                                   'InputList': True,
                                   'FunctionPort': False},
                                  {'Result': True},
                                  process_map],
                             '%s:Group' % basic_pkg:
                                 [{},
                                  {},
                                  process_group],
                             '%s:InputPort' % basic_pkg:
                                 [{'name': False,
                                   'spec': False,
                                   'old_name': False},
                                  {},
                                  process_port_module],
                             '%s:OutputPort' % basic_pkg:
                                 [{'name': False,
                                   'spec': False,
                                   'old_name': False},
                                  {},
                                  process_port_module],
                             '%s:If' % ctrl_flow_pkg:
                                 [{'TruePort': False,
                                   'FalsePort': False},
                                  {},
                                  process_if_module],
                             }
        
        module_desc_str = module.db_package + ':' + module.db_name
        special_ports = all_special_ports.get(module_desc_str, [{}, {}, None])
        found_input_ports = {}
        found_output_ports = {}
        
        # process used_files annotations
        # process generated_tables annotations:
        for annotation in item_exec.db_annotations:
            def process_db_tuple(db_tuple):
                db_tuple = (str(db_tuple[0]),) + db_tuple[1:]
                if db_tuple not in db_artifacts:
                    artifact = create_artifact_from_db_tuple(db_tuple,
                                                             account,
                                                             id_scope)
                    artifacts.append(artifact)
                    db_artifacts[db_tuple] = artifact
                else:
                    artifact = db_artifacts[db_tuple]
                    if int(artifact.db_accounts[0].db_id[4:]) > \
                            int(account.db_id[4:]):
                        artifact.db_accounts[0] = account
                return artifact

            if annotation.db_key == 'used_files':
                used_files = eval(annotation.db_value)
                for fname in used_files:
                    if fname not in file_artifacts:
                        artifact = create_artifact_from_filename(fname,
                                                                 account,
                                                                 id_scope)
                        artifacts.append(artifact)
                        file_artifacts[fname] = artifact
                    else:
                        artifact = file_artifacts[fname]
                        if int(artifact.db_accounts[0].db_id[4:]) > \
                                int(account.db_id[4:]):
                            artifact.db_accounts[0] = account
                    dependencies.append(create_used(process, artifact,
                                                    account, id_scope))
            elif annotation.db_key == 'generated_tables':
                generated_tables = eval(annotation.db_value)
                for db_tuple in generated_tables:
                    artifact = process_db_tuple(db_tuple)
                    dependencies.append(create_was_generated_by(artifact,
                                                                process,
                                                                account,
                                                                id_scope))
            elif annotation.db_key == 'used_tables':
                used_tables = eval(annotation.db_value)
                for db_tuple in used_tables:
                    artifact = process_db_tuple(db_tuple)
                    dependencies.append(create_used(process, artifact,
                                                    account, id_scope))

        # process functions
        for function in module.db_functions:
            # FIXME let found_input_ports, found_output_ports store lists?
            if function.db_name in special_ports[0]:
                if not special_ports[0][function.db_name]:
                    found_input_ports[function.db_name] = function
                    continue
            function_t = (module.db_id, function.db_name)
            if function_t in function_artifacts:
                artifact = function_artifacts[function_t]
                if int(artifact.db_accounts[0].db_id[4:]) > \
                        int(account.db_id[4:]):
                    artifact.db_accounts[0] = account
            else:
                artifact = create_artifact_from_function(function, 
                                                         account,
                                                         id_scope)
                print 'adding artifact', artifact.db_id
                artifacts.append(artifact)
                function_artifacts[function_t] = artifact
            if function.db_name in special_ports[0]:
                found_input_ports[function.db_name] = artifact
            if function.db_name not in out_upstream_artifacts:
                out_upstream_artifacts[function.db_name] = []
            out_upstream_artifacts[function.db_name].append(artifact)
            dependencies.append(create_used(process, artifact, account,
                                            id_scope))

        # process connections
        if module.db_id in upstream_lookup:
            for conns in upstream_lookup[module.db_id].itervalues():
                for conn in conns:
                    dest = conn.db_ports_type_index['destination']
                    if dest.db_name in special_ports[0]:
                        if not special_ports[0][dest.db_name]:
                            found_input_ports[dest.db_name] = conn
                            continue
                    (artifact, in_cache) = process_connection(conn)
                    if dest.db_name in special_ports[0]:
                        found_input_ports[dest.db_name] = artifact
                    if dest.db_name not in out_upstream_artifacts:
                        out_upstream_artifacts[dest.db_name] = []
                    out_upstream_artifacts[dest.db_name].append(artifact)
                    print 'adding dependency (pa)', process.db_id, \
                        artifact.db_id
                    dependencies.append(create_used(process, artifact, 
                                                    account, id_scope))

        if item_exec.db_completed == 1:
            if module.db_id in downstream_lookup:
                # check if everything completed successfully for this?
                for conns in downstream_lookup[module.db_id].itervalues():
                    for conn in conns:
                        source = conn.db_ports_type_index['source']
                        if source.db_name in special_ports[1]:
                            if not special_ports[1][source.db_name]:
                                found_output_ports[source.db_name] = conn
                                continue
                        dest = conn.db_ports_type_index['destination']
                        dest_module = \
                            workflow.db_modules_id_index[dest.db_moduleId]
                        dest_desc_str = dest_module.db_package + ':' + \
                            dest_module.db_name
                        dest_special_ports = all_special_ports.get(dest_desc_str,
                                                                   [{}, {}, None])
                        if dest.db_name in dest_special_ports[0] and \
                                not dest_special_ports[0][dest.db_name]:
                            print 'skipping', dest.db_name
                            continue
                        (artifact, in_cache) = process_connection(conn)
                        if not in_cache:
                            if source.db_name in special_ports[1]:
                                found_output_ports[source.db_name] = artifact
                            if source.db_name not in out_downstream_artifacts:
                                out_downstream_artifacts[source.db_name] = []
                            out_downstream_artifacts[source.db_name].append(artifact)
                            print 'adding dependency (ap)', artifact.db_id, \
                                process.db_id
                            dependencies.append(create_was_generated_by(artifact, 
                                                                        process, 
                                                                        account,
                                                                        id_scope))

        if special_ports[2] is not None:
            special_ports[2](module, found_input_ports, found_output_ports)

    def process_workflow(workflow, parent_exec, account, upstream_artifacts={},
                         downstream_artifacts={}, depth=0, top_version=False):
        # create process for each module_exec
        # for each module, find parameters and upstream connections
        # tie them in
        # each connection's source port is 
        # associated with a transient data item
        # use wasDerivedBy and used relationships to tie things together
        # check run-time annotations?
        print 'processing workflow', parent_exec

        upstream_lookup = {}
        downstream_lookup = {}
        for connection in workflow.db_connections:
            source = connection.db_ports_type_index['source']
            if source.db_moduleId not in downstream_lookup:
                downstream_lookup[source.db_moduleId] = {}
            if source.db_name not in downstream_lookup[source.db_moduleId]:
                downstream_lookup[source.db_moduleId][source.db_name] = []
            downstream_lookup[source.db_moduleId][source.db_name].append(connection)

            dest = connection.db_ports_type_index['destination']
            if dest.db_moduleId not in upstream_lookup:
                upstream_lookup[dest.db_moduleId] = {}
            if dest.db_name not in upstream_lookup[dest.db_moduleId]:
                upstream_lookup[dest.db_moduleId][dest.db_name] = []
            upstream_lookup[dest.db_moduleId][dest.db_name].append(connection)

        conn_artifacts = {}
        function_artifacts = {}
        module_processes = {}
        print '  upstream_lookup:'
        lookup = upstream_lookup
        for id, name_list in lookup.iteritems():
            print '    ', id, ':', name_list.keys()

        print '  downstream_lookup:'
        lookup = downstream_lookup
        for id, name_list in lookup.iteritems():
            print '    ', id, ':', name_list.keys()
            
        # print '  upstream_lookup:', upstream_lookup
        # print '  downstream_lookup:', downstream_lookup
        if top_version:
            for workflow_exec in parent_exec.db_workflow_execs:
                if workflow_exec.db_parent_version != version:
                    continue
                conn_artifacts = {}
                function_artifacts = {}
                module_processes = {}
                upstream_artifacts = {}
                downstream_artifacts = {}
                for item_exec in workflow_exec.db_item_execs:
                    do_create_process(workflow, item_exec, account, 
                                      module_processes)
                for item_exec in workflow_exec.db_item_execs:
                    process_exec(item_exec, workflow, account,
                                 upstream_lookup, downstream_lookup,
                                 depth, conn_artifacts, function_artifacts,
                                 module_processes,
                                 upstream_artifacts, downstream_artifacts)
        else:
            for item_exec in parent_exec.db_item_execs:
                do_create_process(workflow, item_exec, account, 
                                  module_processes)
            for item_exec in parent_exec.db_item_execs:
                process_exec(item_exec, workflow, account, upstream_lookup,
                             downstream_lookup, depth, conn_artifacts,
                             function_artifacts, module_processes,
                             upstream_artifacts, downstream_artifacts)
                
    account_id = id_scope.getNewId(DBOpmAccount.vtType)
    account = DBOpmAccount(id='acct' + str(account_id),
                           value=str(0))
    accounts.append(account)
    depth_accounts[0] = account
    process_workflow(workflow, log, account, {}, {}, 0, True) 

    #print processes
    #print dependencies
    max_depth = max(depth_accounts)
    def add_finer_depths(objs, exclude_groups=False, exclude_deps=False, 
                         p_ids=set()):
        new_p_ids = []
        for obj in objs:
            can_update=True
            if exclude_groups:
                if obj.db_value.db_value.vtType == DBGroupExec.vtType:
                    new_p_ids.append(obj.db_id)
                    can_update = False
                elif obj.db_value.db_value.vtType == DBModuleExec.vtType and \
                        len(obj.db_value.db_value.db_loop_execs) > 0:
                    new_p_ids.append(obj.db_id)
                    can_update = False
                
            if exclude_deps:
                if ((obj.vtType == DBOpmWasGeneratedBy.vtType and
                     obj.db_cause.db_id in p_ids) or 
                    (obj.vtType == DBOpmUsed.vtType and
                     obj.db_effect.db_id in p_ids)):
                    can_update = False
            if can_update:
                min_depth = int(obj.db_accounts[0].db_id[4:])
                for i in xrange(min_depth+1, max_depth+1):
                    obj.db_add_account(DBOpmAccountId(id='acct' + str(i)))
        return new_p_ids

    # FIXME: also exclude group dependencies (used, wasGeneratedBy)...
    p_ids = add_finer_depths(processes, True)
    print p_ids
    add_finer_depths(artifacts)
    add_finer_depths(dependencies, False, True, set(p_ids))

    overlaps = []
    for i in xrange(max_depth+1):
        for j in xrange(i+1, max_depth+1):
            ids = [DBOpmAccountId(id='acct' + str(i)),
                   DBOpmAccountId(id='acct' + str(j))]
            overlaps.append(DBOpmOverlaps(opm_account_ids=ids))

    opm_graph = DBOpmGraph(accounts=DBOpmAccounts(accounts=accounts,
                                                  opm_overlapss=overlaps),
                           processes=DBOpmProcesses(processs=processes),
                           artifacts=\
                               DBOpmArtifacts(artifacts=artifacts),
                           dependencies=\
                               DBOpmDependencies(dependencys=dependencies),
                           )
    return opm_graph
    def createMashupController(self, vt_controller, version, view=DummyView()):
        #print "Manager creating mashup controller ", vt_controller, version
        newvt_controller = MashupsManager.copyVistrailController(vt_controller,
                                                                view)
        mashuptrail = \
         MashupsManager.getMashuptrailforVersionInVistrailController(vt_controller,
                                                                     version)
        if mashuptrail is None:
            (p_mashuptrail, p_version) = \
                     MashupsManager.findClosestParentMashuptrail(vt_controller, 
                                                                 version)
            id_scope = IdScope(1L)
            if p_mashuptrail is not None:
                version_name = vt_controller.get_pipeline_name(p_version)
                (res, mshpv) = MashupsManager.showFoundMashupsDialog(p_mashuptrail, 
                                                            version_name)
                if res in ['Copy', 'Move']:
                    pipeline = newvt_controller.vistrail.getPipeline(version)
                    if res == 'Copy':
                        # we will copy the mashup from the parent trail and 
                        # validate it to the current pipeline before adding
                        # to the current mashup trail
                        mashuptrail = Mashuptrail(self.getNewMashuptrailId(), 
                                                  version, id_scope)
                        p_mashup = p_mashuptrail.getMashup(mshpv)
                        mashup = p_mashup.do_copy()
                        mashup.id_scope = id_scope
                        mashup.version = version
                        mashup.validateForPipeline(pipeline)
                        currVersion = mashuptrail.addVersion(
                                      parent_id=mashuptrail.getLatestVersion(),
                                      mashup=mashup, 
                                      user=vistrails.core.system.current_user(),
                                      date=vistrails.core.system.current_time())
                        mashuptrail.currentVersion = currVersion
                        mashuptrail.updateIdScope()
                        p_tag = p_mashuptrail.getTagForActionId(mshpv)
                        if p_tag == '':
                            tag = "<latest>"
                        tag = "Copy from %s"%p_tag
                        MashupsManager.addMashuptrailtoVistrailController(vt_controller,
                                                                          mashuptrail)    
                        
                    elif res == 'Move':
                        # we will move the parent trail and validate all mashups
                        # for the current pipeline to make sure they will be 
                        # executable for the current version

                        mashuptrail = p_mashuptrail
                        currVersion = mashuptrail.getLatestVersion()
                        mashuptrail.currentVersion = currVersion
                        mashuptrail.validateMashupsForPipeline(version, pipeline)
                        tag = None
                        
                    mashuptrail.vtVersion = version
                    mshpController = MashupController(vt_controller, 
                                                      newvt_controller, 
                                                      version, mashuptrail)
                    mshpController.setCurrentVersion(mashuptrail.currentVersion)
                    # this is to make sure the pipeline displayed in the mashup
                    # view is consistent with the list of aliases in the central
                    # panel
                    mshpController.updatePipelineAliasesFromCurrentMashup()
                    if tag is not None:
                        mshpController.updateCurrentTag(tag)
                    return mshpController
                
            mashuptrail = Mashuptrail(self.getNewMashuptrailId(), version, 
                                      id_scope)
            pipeline = newvt_controller.vistrail.getPipeline(version)
            id = id_scope.getNewId('mashup')
            mashup = Mashup(id=id, name="mashup%s"%id, vtid=vt_controller.locator, 
                        version=version)
            mashup.loadAliasesFromPipeline(pipeline, id_scope)
            currVersion = mashuptrail.addVersion(parent_id=mashuptrail.getLatestVersion(),
                                             mashup=mashup, 
                                             user=vistrails.core.system.current_user(),
                                             date=vistrails.core.system.current_time())
    
            mashuptrail.currentVersion = currVersion
            
            MashupsManager.addMashuptrailtoVistrailController(vt_controller,
                                                              mashuptrail)
            mshpController = MashupController(vt_controller,
                                              newvt_controller, 
                                              version, mashuptrail)
            mshpController.setCurrentVersion(mashuptrail.currentVersion)
            if mshpController.currentVersion == 1L:
                mshpController.updateCurrentTag("ROOT")
        else:
            #print "----> found mashuptrail ", mashuptrail.currentVersion
            mshpController = MashupController(vt_controller, 
                                              newvt_controller, 
                                              version, mashuptrail)
            mshpController.setCurrentVersion(mashuptrail.currentVersion)
            mshpController.updatePipelineAliasesFromCurrentMashup()
        
        return mshpController
Esempio n. 41
0
    def updateFunctionPort(self):
        """
        Function to be used inside the updateUsptream method of the Map module. It
        updates the module connected to the FunctionPort port, executing it in
        parallel.
        """
        nameInput = self.getInputFromPort('InputPort')
        nameOutput = self.getInputFromPort('OutputPort')
        rawInputList = self.getInputFromPort('InputList')

        # Create inputList to always have iterable elements
        # to simplify code
        if len(nameInput) == 1:
            element_is_iter = False
            inputList = [[element] for element in rawInputList]
        else:
            element_is_iter = True
            inputList = rawInputList

        workflows = []
        module = None
        vtType = None

        # iterating through the connectors
        for connector in self.inputPorts.get('FunctionPort'):
            module = connector.obj

            # pipeline
            original_pipeline = connector.obj.moduleInfo['pipeline']

            # module
            module_id = connector.obj.moduleInfo['moduleId']
            vtType = original_pipeline.modules[module_id].vtType

            # serialize the module for each value in the list
            for i, element in enumerate(inputList):
                if element_is_iter:
                    self.element = element
                else:
                    self.element = element[0]

                # checking type and setting input in the module
                self.typeChecking(connector.obj, nameInput, inputList)
                self.setInputValues(connector.obj, nameInput, element)

                pipeline_db_module = original_pipeline.modules[module_id].do_copy()

                # transforming a subworkflow in a group
                # TODO: should we also transform inner subworkflows?
                if pipeline_db_module.is_abstraction():
                    group = Group(id=pipeline_db_module.id,
                                  cache=pipeline_db_module.cache,
                                  location=pipeline_db_module.location,
                                  functions=pipeline_db_module.functions,
                                  annotations=pipeline_db_module.annotations)

                    source_port_specs = pipeline_db_module.sourcePorts()
                    dest_port_specs = pipeline_db_module.destinationPorts()
                    for source_port_spec in source_port_specs:
                        group.add_port_spec(source_port_spec)
                    for dest_port_spec in dest_port_specs:
                        group.add_port_spec(dest_port_spec)

                    group.pipeline = pipeline_db_module.pipeline
                    pipeline_db_module = group

                # getting highest id between functions to guarantee unique ids
                # TODO: can get current IdScope here?
                if pipeline_db_module.functions:
                    high_id = max(function.db_id
                                  for function in pipeline_db_module.functions)
                else:
                    high_id = 0

                # adding function and parameter to module in pipeline
                # TODO: 'pos' should not be always 0 here
                id_scope = IdScope(beginId=long(high_id+1))
                for elementValue, inputPort in izip(element, nameInput):

                    p_spec = pipeline_db_module.get_port_spec(inputPort, 'input')
                    descrs = p_spec.descriptors()
                    if len(descrs) != 1:
                        raise ModuleError(
                                self,
                                "Tuple input ports are not supported")
                    if not issubclass(descrs[0].module, Constant):
                        raise ModuleError(
                                self,
                                "Module inputs should be Constant types")
                    type = p_spec.sigstring[1:-1]

                    mod_function = ModuleFunction(id=id_scope.getNewId(ModuleFunction.vtType),
                                                  pos=0,
                                                  name=inputPort)
                    mod_param = ModuleParam(id=0L,
                                            pos=0,
                                            type=type,
                                            val=elementValue)

                    mod_function.add_parameter(mod_param)
                    pipeline_db_module.add_function(mod_function)

                # serializing module
                wf = self.serialize_module(pipeline_db_module)
                workflows.append(wf)

            # getting first connector, ignoring the rest
            break

        # IPython stuff
        try:
            rc = get_client()
        except Exception, error:
            raise ModuleError(self, "Exception while loading IPython: "
                              "%s" % error)
Esempio n. 42
0
    def execute(self, *args, **kwargs):
        """Execute the pipeline.

        Positional arguments are either input values (created from
        ``module == value``, where `module` is a Module from the pipeline and
        `value` is some value or Function instance) for the pipeline's
        InputPorts, or Module instances (to select sink modules).

        Keyword arguments are also used to set InputPort by looking up inputs
        by name.

        Example::

           input_bound = pipeline.get_input('higher_bound')
           input_url = pipeline.get_input('url')
           sinkmodule = pipeline.get_module(32)
           pipeline.execute(sinkmodule,
                            input_bound == vt.Function(Integer, 10),
                            input_url == 'http://www.vistrails.org/',
                            resolution=15)  # kwarg: only one equal sign
        """
        sinks = set()
        inputs = {}

        reg = get_module_registry()
        InputPort_desc = reg.get_descriptor_by_name(
                get_vistrails_basic_pkg_id(),
                'InputPort')

        # Read args
        for arg in args:
            if isinstance(arg, ModuleValuePair):
                if arg.module.id in inputs:
                    raise ValueError(
                            "Multiple values set for InputPort %r" %
                            get_inputoutput_name(arg.module))
                if not reg.is_descriptor_subclass(arg.module.module_descriptor,
                                                  InputPort_desc):
                    raise ValueError("Module %d is not an InputPort" %
                                     arg.module.id)
                inputs[arg.module.id] = arg.value
            elif isinstance(arg, Module):
                sinks.add(arg.module_id)

        # Read kwargs
        for key, value in kwargs.iteritems():
            key = self.get_input(key)  # Might raise KeyError
            if key.module_id in inputs:
                raise ValueError("Multiple values set for InputPort %r" %
                                 get_inputoutput_name(key.module))
            inputs[key.module_id] = value

        reason = "API pipeline execution"
        sinks = sinks or None

        # Use controller only if no inputs were passed in
        if (not inputs and self.vistrail is not None and
                self.vistrail.current_version == self.version):
            controller = self.vistrail.controller
            results, changed = controller.execute_workflow_list([[
                    controller.locator,  # locator
                    self.version,  # version
                    self.pipeline,  # pipeline
                    DummyView(),  # view
                    None,  # custom_aliases
                    None,  # custom_params
                    reason,  # reason
                    sinks,  # sinks
                    None,  # extra_info
                    ]])
            result, = results
        else:
            pipeline = self.pipeline
            if inputs:
                id_scope = IdScope(1)
                pipeline = pipeline.do_copy(False, id_scope)

                # A hach to get ids from id_scope that we know won't collide:
                # make them negative
                id_scope.getNewId = lambda t, g=id_scope.getNewId: -g(t)

                create_module = \
                        VistrailController.create_module_from_descriptor_static
                create_function = VistrailController.create_function_static
                create_connection = VistrailController.create_connection_static
                # Fills in the ExternalPipe ports
                for module_id, values in inputs.iteritems():
                    module = pipeline.modules[module_id]
                    if not isinstance(values, (list, tuple)):
                        values = [values]

                    # Guess the type of the InputPort
                    _, sigstrings, _, _, _ = get_port_spec_info(pipeline, module)
                    sigstrings = parse_port_spec_string(sigstrings)

                    # Convert whatever we got to a list of strings, for the
                    # pipeline
                    values = [reg.convert_port_val(val, sigstring, None)
                              for val, sigstring in izip(values, sigstrings)]

                    if len(values) == 1:
                        # Create the constant module
                        constant_desc = reg.get_descriptor_by_name(
                                *sigstrings[0])
                        constant_mod = create_module(id_scope, constant_desc)
                        func = create_function(id_scope, constant_mod,
                                               'value', values)
                        constant_mod.add_function(func)
                        pipeline.add_module(constant_mod)

                        # Connect it to the ExternalPipe port
                        conn = create_connection(id_scope,
                                                 constant_mod, 'value',
                                                 module, 'ExternalPipe')
                        pipeline.db_add_connection(conn)
                    else:
                        raise RuntimeError("TODO : create tuple")

            interpreter = get_default_interpreter()
            result = interpreter.execute(pipeline,
                                         reason=reason,
                                         sinks=sinks)

        if result.errors:
            raise ExecutionErrors(self, result)
        else:
            return ExecutionResults(self, result)