Example #1
0
def getJob():
    global data_file, workflow_file
    reader = XmlReader()
    workflow = reader.parse_file(workflow_file).pop()
    if os.access(data_file, os.R_OK):
        input = open(data_file, 'rb')
        job   = pickle.load(input)
        input.close()
        os.remove(data_file)
    else:
        job = Job(workflow)
    return job
Example #2
0
class TaskTest(unittest.TestCase):
    def setUp(self):
        self.connectDB()
        self.reader = XmlReader()
        self.input = os.path.dirname(__file__) + "/xml/anlage_benutzer.xml"
        self.workflow = self.reader.parse_file(self.input).pop()

    def testFileExists(self):
        assert(os.path.isfile(self.input))
        assert(os.access(self.input, os.R_OK))

    def testReader(self):
        self.reader.parse_file(self.input)
        
    def testTask(self):
        self.reader.parse_file(self.input).pop()

    def testJob(self):
        self.job = Job(self.workflow)
        for name in self.workflow.tasks:
            self.workflow.tasks[name].signal_connect('reached',   self.on_reached_cb)
            self.workflow.tasks[name].signal_connect('completed', self.on_complete_cb)
        while not self.job.is_completed():
            self.job.complete_next()


    def connectDB(self):
        global engine, db

        host = "build-lenny-32.intranet.gonicus.de"
        db_name = "spiff"
        username = "******"
        password = "******"

        # Connect to MySQL.
        auth        = username + ':' + password
        dbn         = 'mysql://' + auth + '@' + host + '/' + db_name
        engine = create_engine(dbn)
        clear_mappers()

    def on_reached_cb(self, job, task):
        # print "Reached Task: %s" % task.get_name()
        pass

    def on_complete_cb(self, job, task):
        # print "Completed Task: %s" % task.get_name()
        pass
Example #3
0
 def setUp(self):
     Task.id_pool = 0
     Task.thread_id_pool = 0
     self.xml_path = ['xml/spiff/control-flow/',
                      'xml/spiff/data/',
                      'xml/spiff/resource/']
     self.reader   = XmlReader()
     self.wf       = None
Example #4
0
    def _on_ready_before_hook(self, my_task):
        file          = valueof(my_task, self.file)
        xml_reader    = XmlReader()
        workflow_list = xml_reader.parse_file(file)
        workflow      = workflow_list[0]
        outer_job     = my_task.job.outer_job
        subjob        = SpiffWorkflow.Job(workflow, parent = outer_job)
        subjob.signal_connect('completed', self._on_subjob_completed, my_task)

        # Integrate the tree of the subjob into the tree of this job.
        my_task._update_children(self.outputs, Task.FUTURE)
        for child in my_task.children:
            child._inherit_attributes()
        for child in subjob.task_tree.children:
            my_task.children.insert(0, child)
            child.parent = my_task

        my_task._set_internal_attribute(subjob = subjob)
        return True
Example #5
0
 def __init__(self, db):
     """
     Instantiates a new Driver.
     
     @type  db: object
     @param db: An sqlalchemy database connection.
     @rtype:  Driver
     @return: The new instance.
     """
     self.db        = DB(db)
     self.xmlreader = XmlReader()
Example #6
0
class XmlReaderTest(WorkflowTest):
    def setUp(self):
        WorkflowTest.setUp(self)
        self.reader = XmlReader()


    def testParseString(self):
        self.assertRaises(ExpatError,
                          self.reader.parse_string,
                          '')
        self.reader.parse_string('<xml></xml>')


    def testParseFile(self):
        # File not found.
        self.assertRaises(IOError,
                          self.reader.parse_file,
                          'foo')

        # 0 byte sized file.
        file = os.path.join(os.path.dirname(__file__), 'xml', 'empty1.xml')
        self.assertRaises(ExpatError, self.reader.parse_file, file)

        # File containing only "<xml></xml>".
        file = os.path.join(os.path.dirname(__file__), 'xml', 'empty2.xml')
        self.reader.parse_file(file)

        # Read a complete workflow.
        file = os.path.join(os.path.dirname(__file__), 'xml', 'spiff', 'workflow1.xml')
        self.reader.parse_file(file)


    def testRunWorkflow(self):
        file = os.path.join(os.path.dirname(__file__), 'xml', 'spiff', 'workflow1.xml')
        workflow_list = self.reader.parse_file(file)
        for wf in workflow_list:
            self.runWorkflow(wf)
Example #7
0
 def setUp(self):
     WorkflowTest.setUp(self)
     self.reader = XmlReader()
Example #8
0
class PatternTest(unittest.TestCase):
    def setUp(self):
        Task.id_pool = 0
        Task.thread_id_pool = 0
        self.xml_path = ['xml/spiff/control-flow/',
                         'xml/spiff/data/',
                         'xml/spiff/resource/']
        self.reader   = XmlReader()
        self.wf       = None


    def testPattern(self):
        for dirname in self.xml_path:
            dirname = os.path.join(os.path.dirname(__file__), dirname)
            for filename in os.listdir(dirname):
                if not filename.endswith('.xml'):
                    continue
                self.runFile(os.path.join(dirname, filename))

    def runFile(self, xml_filename):
        try:
            #print '\n%s: ok' % xml_filename,
            workflow_list = self.reader.parse_file(xml_filename)
            self.runWorkflow(workflow_list[0], xml_filename)
        except:
            print '%s:' % xml_filename
            raise

    def runWorkflow(self, wf, xml_filename):
        taken_path = []
        for name in wf.tasks:
            wf.tasks[name].signal_connect('reached',   on_reached_cb,  taken_path)
            wf.tasks[name].signal_connect('completed', on_complete_cb, taken_path)

        # Execute all tasks within the Job.
        job = Job(wf)
        self.assert_(not job.is_completed(), 'Job is complete before start')
        try:
            job.complete_all(False)
        except:
            job.task_tree.dump()
            raise

        #job.task_tree.dump()
        self.assert_(job.is_completed(),
                     'complete_all() returned, but job is not complete\n'
                   + job.task_tree.get_dump())

        # Make sure that there are no waiting tasks left in the tree.
        for node in Task.Iterator(job.task_tree, Task.READY):
            job.task_tree.dump()
            raise Exception('Node with state READY: %s' % node.name)

        # Check whether the correct route was taken.
        filename = xml_filename + '.path'
        if os.path.exists(filename):
            file     = open(filename, 'r')
            expected = file.read()
            file.close()
            taken_path = '\n'.join(taken_path) + '\n'
            error      = '%s:\n'       % name
            error     += 'Expected:\n'
            error     += '%s\n'        % expected
            error     += 'but got:\n'
            error     += '%s\n'        % taken_path
            self.assert_(taken_path == expected, error)

        # Check attribute availibility.
        filename = xml_filename + '.data'
        if os.path.exists(filename):
            file     = open(filename, 'r')
            expected = file.read()
            file.close()
            result   = job.get_attribute('data', '')
            error    = '%s:\n'       % name
            error   += 'Expected:\n'
            error   += '%s\n'        % expected
            error   += 'but got:\n'
            error   += '%s\n'        % result
            self.assert_(result == expected, error)
Example #9
0
 def setUp(self):
     self.xml_path = ['xml/spiff/control-flow/',
                      'xml/spiff/data/',
                      'xml/spiff/resource/']
     self.reader   = XmlReader()
     self.wf       = None
Example #10
0
class Driver(object):
    """
    A driver provides an API for storing and loading workflows, receiving
    information regarding running Jobs, and for driving the workflow
    execution.
    """
    
    def __init__(self, db):
        """
        Instantiates a new Driver.
        
        @type  db: object
        @param db: An sqlalchemy database connection.
        @rtype:  Driver
        @return: The new instance.
        """
        self.db        = DB(db)
        self.xmlreader = XmlReader()


    def install(self):
        """
        Installs (or upgrades) the workflow server.

        @rtype:  Boolean
        @return: True on success, False otherwise.
        """
        return self.db.install()


    def uninstall(self):
        """
        Uninstall the workflow engine. This also permanently removes all data,
        history, and running jobs. Use with care.

        @rtype:  Boolean
        @return: True on success, False otherwise.
        """
        return self.db.uninstall()


    def get_workflow_info(self, **filter):
        """
        Returns the WorkflowInfo objects that match the given criteria.

        @rtype:  [WorkflowInfo]
        @return: A list of WorkflowInfo objects from the database.
        """
        return self.db.get_workflow_info(**filter)


    def save_workflow_info(self, object):
        """
        Store the WorkflowInfo in the database.

        @rtype:  Boolean
        @return: True on success, False otherwise.
        """
        return self.db.save(object)


    def delete_workflow_info(self, object):
        """
        Delete the WorkflowInfo from the database.

        @rtype:  Boolean
        @return: True on success, False otherwise.
        """
        return self.db.delete(object)


    def create_job(self, workflow_info):
        """
        Creates an instance of the given workflow.

        @rtype:  JobInfo
        @return: The JobInfo for the newly created workflow instance.
        """
        if workflow_info is None:
            raise WorkflowServerException('workflow_info argument is None')
        if workflow_info.id is None:
            raise WorkflowServerException('workflow_info must be saved first')
        workflow = self.xmlreader.parse_string(workflow_info.xml)
        job      = Job(workflow[0])
        job_info = JobInfo(workflow_info.id, job)
        self.__save_job_info(job_info)
        return job_info


    def get_job_info(self, **filter):
        """
        Returns the workflow instances that matches the given criteria.

        @rtype:  [JobInfo]
        @return: A list of JobInfo objects from the database.
        """
        return self.db.get_job_info(**filter)


    def __save_job_info(self, job_info):
        self.db.save(job_info)
        for node in job_info.instance.task_tree:
            task_info = self.get_task_info(job_id  = job_info.id,
                                           node_id = node.id)
            if len(task_info) == 1:
                task_info = task_info[0]
            elif len(task_info) == 0:
                task_info = TaskInfo(job_info.id, node)
            else:
                raise WorkflowServerException('More than one task found')
            task_info.status = node.state
            self.db.save(task_info)


    def delete_job_info(self, object):
        """
        Delete the workflow instance from the database.

        @rtype:  Boolean
        @return: True on success, False otherwise.
        """
        return self.db.delete(object)


    def get_task_info(self, **filter):
        """
        Returns the tasks that match the given criteria.

        @rtype:  [TaskInfo]
        @return: A list of TaskInfo objects from the database.
        """
        return self.db.get_task_info(**filter)


    def execute_task(self, task_info):
        if task_info is None:
            raise WorkflowServerException('task_info argument is None')
        if task_info.id is None:
            raise WorkflowServerException('task_info must be saved first')
        if task_info.status & task_info.WAITING == 0:
            raise WorkflowServerException('task is not in WAITING state')
        if task_info.job_id is None:
            raise WorkflowServerException('task_info must be associated with a job')
        job_info_list = self.get_job_info(id = task_info.job_id)
        if len(job_info_list) == 0:
            raise WorkflowServerException('Job not found')
        elif len(job_info_list) > 1:
            raise WorkflowServerException('Fatal error: More than one Job found')

        job_info = job_info_list[0]
        if job_info.status is job_info.COMPLETED:
            raise WorkflowServerException('Job is already completed')
        result = job_info.instance.execute_task_from_id(task_info.node_id)
        self.__save_job_info(job_info)
        return result
Example #11
0
 def setUp(self):
     self.connectDB()
     self.reader = XmlReader()
     self.input = os.path.dirname(__file__) + "/xml/anlage_benutzer.xml"
     self.workflow = self.reader.parse_file(self.input).pop()
Example #12
0
 def setUp(self):
     WorkflowTest.setUp(self)
     self.reader     = XmlReader()
     self.data_file  = 'data.pkl'
     self.taken_path = None
Example #13
0
class PersistenceTest(WorkflowTest):
    def setUp(self):
        WorkflowTest.setUp(self)
        self.reader     = XmlReader()
        self.data_file  = 'data.pkl'
        self.taken_path = None


    def testPickleSingle(self, workflow, job):
        self.taken_path = {'reached':   [],
                           'completed': []}
        for name, task in workflow.tasks.iteritems():
            task.signal_connect('reached',
                                on_reached_cb,
                                self.taken_path['reached'])
            task.signal_connect('completed',
                                on_complete_cb,
                                self.taken_path['completed'])

        # Execute a random number of steps.
        for i in xrange(randint(0, len(workflow.tasks))):
            job.complete_next()
    
        # Store the workflow instance in a file.
        output = open(self.data_file, 'wb')
        pickle.dump(job, output, -1)
        output.close()
        before = job.get_dump()

        # Load the workflow instance from a file and delete the file.
        input = open(self.data_file, 'rb')
        job   = pickle.load(input)
        input.close()
        os.remove(self.data_file)
        after = job.get_dump()

        # Make sure that the state of the job did not change.
        self.assert_(before == after, 'Before:\n' + before + '\n' \
                                    + 'After:\n'  + after  + '\n')

        # Re-connect signals, because the pickle dump now only contains a 
        # copy of self.taken_path.
        for name, task in job.workflow.tasks.iteritems():
            task.signal_disconnect('reached',   on_reached_cb)
            task.signal_disconnect('completed', on_complete_cb)
            task.signal_connect('reached',
                                on_reached_cb,
                                self.taken_path['reached'])
            task.signal_connect('completed',
                                on_complete_cb,
                                self.taken_path['completed'])

        # Run the rest of the workflow.
        job.complete_all()
        after = job.get_dump()
        self.assert_(job.is_completed(), 'Job done, but not complete:' + after)
        assert_same_path(self,
                         self.expected_path,
                         self.taken_path['completed'])


    def testPickle(self):
        # Read a complete workflow.
        file = os.path.join(os.path.dirname(__file__), 'xml/spiff/workflow1.xml')

        for i in xrange(5):
            workflow_list = self.reader.parse_file(file)
            wf            = workflow_list[0]
            job           = Job(wf)
            self.testPickleSingle(wf, job)