def testRunWorkflow(self):
        filename = os.path.join(os.path.dirname(__file__),
                                'xml/openwfe/workflow1.xml')
        wf_specs = self.reader.parse_file(filename)
        wf_spec = wf_specs[0]

        for name in wf_spec.task_specs:
            wf_spec.task_specs[name].reached_event.connect(self.on_reached_cb)
            wf_spec.task_specs[name].completed_event.connect(
                on_complete_cb, self.taken_path)

        workflow = Workflow(wf_spec)
        try:
            workflow.complete_all()
        except:
            workflow.dump()
            raise

        path = [(1, 'Start'), (2, 'concurrence_1'), (3, 'task_a1'),
                (4, 'task_a2'), (5, 'if_condition_1'), (6, 'task_a3'),
                (7, 'if_condition_1_end'), (8, 'if_condition_2'),
                (9, 'task_a5'), (10, 'if_condition_2_end'), (3, 'task_b1'),
                (4, 'task_b2'), (5, 'concurrence_1_end'), (6, 'task_c1'),
                (7, 'task_c2'), (8, 'End')]

        assert_same_path(self, path, self.taken_path)
Example #2
0
    def testRunWorkflow(self):
        wf = self.reader.parse_file(os.path.join(os.path.dirname(__file__), 'xml/openwfe/workflow1.xml'))

        for name in wf[0].tasks:
            wf[0].tasks[name].signal_connect('reached',   self.on_reached_cb)
            wf[0].tasks[name].signal_connect('completed', on_complete_cb, self.taken_path)

        job = Job(wf[0])
        try:
            job.complete_all()
        except:
            job.dump()
            raise

        path = [( 1, 'Start'),
                ( 2, 'concurrence_1'),
                ( 3, 'task_a1'),
                ( 4, 'task_a2'),
                ( 5, 'if_condition_1'),
                ( 6, 'task_a3'),
                ( 7, 'if_condition_1_end'),
                ( 8, 'if_condition_2'),
                ( 9, 'task_a5'),
                (10, 'if_condition_2_end'),
                ( 3, 'task_b1'),
                ( 4, 'task_b2'),
                ( 5, 'concurrence_1_end'),
                ( 6, 'task_c1'),
                ( 7, 'task_c2'),
                ( 8, 'End')]

        assert_same_path(self, path, self.taken_path)
    def testRunWorkflow(self):
        filename = os.path.join(os.path.dirname(__file__), 'xml/openwfe/workflow1.xml')
        wf_specs = self.reader.parse_file(filename)
        wf_spec = wf_specs[0]

        for name in wf_spec.task_specs:
            wf_spec.task_specs[name].reached_event.connect(self.on_reached_cb)
            wf_spec.task_specs[name].completed_event.connect(on_complete_cb, self.taken_path)

        workflow = Workflow(wf_spec)
        try:
            workflow.complete_all()
        except:
            workflow.dump()
            raise

        path = [( 1, 'Start'),
                ( 2, 'concurrence_1'),
                ( 3, 'task_a1'),
                ( 4, 'task_a2'),
                ( 5, 'if_condition_1'),
                ( 6, 'task_a3'),
                ( 7, 'if_condition_1_end'),
                ( 8, 'if_condition_2'),
                ( 9, 'task_a5'),
                (10, 'if_condition_2_end'),
                ( 3, 'task_b1'),
                ( 4, 'task_b2'),
                ( 5, 'concurrence_1_end'),
                ( 6, 'task_c1'),
                ( 7, 'task_c2'),
                ( 8, 'End')]

        assert_same_path(self, path, self.taken_path)
Example #4
0
    def testPickleSingle(self, workflow, job):
        self.taken_path = {'reached':   [],
                           'completed': []}
        for name, task in workflow.tasks.iteritems():
            task.signal_connect('reached',
                                on_reached_cb,
                                self.taken_path['reached'])
            task.signal_connect('completed',
                                on_complete_cb,
                                self.taken_path['completed'])

        # Execute a random number of steps.
        for i in xrange(randint(0, len(workflow.tasks))):
            job.complete_next()
    
        # Store the workflow instance in a file.
        output = open(self.data_file, 'wb')
        pickle.dump(job, output, -1)
        output.close()
        before = job.get_dump()

        # Load the workflow instance from a file and delete the file.
        input = open(self.data_file, 'rb')
        job   = pickle.load(input)
        input.close()
        os.remove(self.data_file)
        after = job.get_dump()

        # Make sure that the state of the job did not change.
        self.assert_(before == after, 'Before:\n' + before + '\n' \
                                    + 'After:\n'  + after  + '\n')

        # Re-connect signals, because the pickle dump now only contains a 
        # copy of self.taken_path.
        for name, task in job.workflow.tasks.iteritems():
            task.signal_disconnect('reached',   on_reached_cb)
            task.signal_disconnect('completed', on_complete_cb)
            task.signal_connect('reached',
                                on_reached_cb,
                                self.taken_path['reached'])
            task.signal_connect('completed',
                                on_complete_cb,
                                self.taken_path['completed'])

        # Run the rest of the workflow.
        job.complete_all()
        after = job.get_dump()
        self.assert_(job.is_completed(), 'Job done, but not complete:' + after)
        assert_same_path(self,
                         self.expected_path,
                         self.taken_path['completed'])
Example #5
0
    def doPickleSingle(self, workflow):
        self.taken_path = {'reached': [], 'completed': []}
        for name, task in workflow.spec.task_specs.iteritems():
            task.reached_event.connect(on_reached_cb,
                                       self.taken_path['reached'])
            task.completed_event.connect(on_complete_cb,
                                         self.taken_path['completed'])

        # Execute a random number of steps.
        for i in xrange(randint(0, len(workflow.spec.task_specs))):
            workflow.complete_next()

        # Store the workflow instance in a file.
        output = open(self.data_file, 'wb')
        pickle.dump(workflow, output, -1)
        output.close()
        before = workflow.get_dump()

        # Load the workflow instance from a file and delete the file.
        input = open(self.data_file, 'rb')
        workflow = pickle.load(input)
        input.close()
        os.remove(self.data_file)
        after = workflow.get_dump()

        # Make sure that the state of the workflow did not change.
        self.assert_(before == after, 'Before:\n' + before + '\n' \
                                    + 'After:\n'  + after  + '\n')

        # Re-connect signals, because the pickle dump now only contains a
        # copy of self.taken_path.
        for name, task in workflow.spec.task_specs.iteritems():
            task.reached_event.disconnect(on_reached_cb)
            task.completed_event.disconnect(on_complete_cb)
            task.reached_event.connect(on_reached_cb,
                                       self.taken_path['reached'])
            task.completed_event.connect(on_complete_cb,
                                         self.taken_path['completed'])

        # Run the rest of the workflow.
        workflow.complete_all()
        after = workflow.get_dump()
        self.assert_(workflow.is_completed(), 'Workflow not complete:' + after)
        assert_same_path(self, self.expected_path,
                         self.taken_path['completed'])