def setUp(self): Task.id_pool = 0 Task.thread_id_pool = 0 self.xml_path = ['xml/spiff/control-flow/', 'xml/spiff/data/', 'xml/spiff/resource/'] self.reader = XmlReader()
class XmlReaderTest(WorkflowTest): def setUp(self): WorkflowTest.setUp(self) self.reader = XmlReader() def testParseString(self): self.assertRaises(ExpatError, self.reader.parse_string, '') self.reader.parse_string('<xml></xml>') def testParseFile(self): # File not found. self.assertRaises(IOError, self.reader.parse_file, 'foo') # 0 byte sized file. file = os.path.join(os.path.dirname(__file__), 'xml', 'empty1.xml') self.assertRaises(ExpatError, self.reader.parse_file, file) # File containing only "<xml></xml>". file = os.path.join(os.path.dirname(__file__), 'xml', 'empty2.xml') self.reader.parse_file(file) # Read a complete workflow. file = os.path.join(os.path.dirname(__file__), 'xml', 'spiff', 'workflow1.xml') self.reader.parse_file(file) def testRunWorkflow(self): file = os.path.join(os.path.dirname(__file__), 'xml', 'spiff', 'workflow1.xml') workflow_spec_list = self.reader.parse_file(file) for wf_spec in workflow_spec_list: self._runWorkflow(wf_spec)
def _on_ready_before_hook(self, my_task): file = valueof(my_task, self.file) xml_reader = XmlReader() workflow_list = xml_reader.parse_file(file) workflow = workflow_list[0] outer_workflow = my_task.workflow.outer_workflow subworkflow = SpiffWorkflow.Workflow(workflow, parent = outer_workflow) subworkflow.completed_event.connect(self._on_subworkflow_completed, my_task) # Integrate the tree of the subworkflow into the tree of this workflow. my_task._update_children(self.outputs, Task.FUTURE) for child in my_task.children: child._inherit_attributes() for child in subworkflow.task_tree.children: my_task.children.insert(0, child) child.parent = my_task my_task._set_internal_attribute(subworkflow = subworkflow) return True
def _on_ready_before_hook(self, my_task): file = valueof(my_task, self.file) xml_reader = XmlReader() workflow_list = xml_reader.parse_file(file) workflow = workflow_list[0] outer_workflow = my_task.workflow.outer_workflow subworkflow = SpiffWorkflow.Workflow(workflow, parent=outer_workflow) subworkflow.completed_event.connect(self._on_subworkflow_completed, my_task) # Integrate the tree of the subworkflow into the tree of this workflow. my_task._update_children(self.outputs, Task.FUTURE) for child in my_task.children: child._inherit_attributes() for child in subworkflow.task_tree.children: my_task.children.insert(0, child) child.parent = my_task my_task._set_internal_attribute(subworkflow=subworkflow) return True
def setUp(self): WorkflowTest.setUp(self) self.reader = XmlReader()
def setUp(self): WorkflowTest.setUp(self) self.reader = XmlReader() self.data_file = 'data.pkl' self.taken_path = None
class PersistenceTest(WorkflowTest): def setUp(self): WorkflowTest.setUp(self) self.reader = XmlReader() self.data_file = 'data.pkl' self.taken_path = None def doPickleSingle(self, workflow): self.taken_path = {'reached': [], 'completed': []} for name, task in workflow.spec.task_specs.iteritems(): task.reached_event.connect(on_reached_cb, self.taken_path['reached']) task.completed_event.connect(on_complete_cb, self.taken_path['completed']) # Execute a random number of steps. for i in xrange(randint(0, len(workflow.spec.task_specs))): workflow.complete_next() # Store the workflow instance in a file. output = open(self.data_file, 'wb') pickle.dump(workflow, output, -1) output.close() before = workflow.get_dump() # Load the workflow instance from a file and delete the file. input = open(self.data_file, 'rb') workflow = pickle.load(input) input.close() os.remove(self.data_file) after = workflow.get_dump() # Make sure that the state of the workflow did not change. self.assert_(before == after, 'Before:\n' + before + '\n' \ + 'After:\n' + after + '\n') # Re-connect signals, because the pickle dump now only contains a # copy of self.taken_path. for name, task in workflow.spec.task_specs.iteritems(): task.reached_event.disconnect(on_reached_cb) task.completed_event.disconnect(on_complete_cb) task.reached_event.connect(on_reached_cb, self.taken_path['reached']) task.completed_event.connect(on_complete_cb, self.taken_path['completed']) # Run the rest of the workflow. workflow.complete_all() after = workflow.get_dump() self.assert_(workflow.is_completed(), 'Workflow not complete:' + after) assert_same_path(self, self.expected_path, self.taken_path['completed']) def testPickle(self): # Read a complete workflow. file = os.path.join(os.path.dirname(__file__), 'xml/spiff/workflow1.xml') for i in xrange(5): wf_spec_list = self.reader.parse_file(file) wf_spec = wf_spec_list[0] workflow = Workflow(wf_spec) self.doPickleSingle(workflow)
class PatternTest(unittest.TestCase): def setUp(self): Task.id_pool = 0 Task.thread_id_pool = 0 self.xml_path = ['xml/spiff/control-flow/', 'xml/spiff/data/', 'xml/spiff/resource/'] self.reader = XmlReader() def testPattern(self): for dirname in self.xml_path: dirname = os.path.join(os.path.dirname(__file__), dirname) for filename in os.listdir(dirname): if not filename.endswith('.xml'): continue self.runFile(os.path.join(dirname, filename)) def runFile(self, xml_filename): try: #print '\n%s: ok' % xml_filename, workflow_list = self.reader.parse_file(xml_filename) self.runWorkflow(workflow_list[0], xml_filename) except: print '%s:' % xml_filename raise def runWorkflow(self, wf_spec, xml_filename): taken_path = [] for name in wf_spec.task_specs: wf_spec.task_specs[name].reached_event.connect(on_reached_cb, taken_path) wf_spec.task_specs[name].completed_event.connect(on_complete_cb, taken_path) # Execute all tasks within the Workflow workflow = Workflow(wf_spec) self.assert_(not workflow.is_completed(), 'Workflow is complete before start') try: workflow.complete_all(False) except: workflow.task_tree.dump() raise #workflow.task_tree.dump() self.assert_(workflow.is_completed(), 'complete_all() returned, but workflow is not complete\n' + workflow.task_tree.get_dump()) # Make sure that there are no waiting tasks left in the tree. for thetask in Task.Iterator(workflow.task_tree, Task.READY): workflow.task_tree.dump() raise Exception('Task with state READY: %s' % thetask.name) # Check whether the correct route was taken. filename = xml_filename + '.path' if os.path.exists(filename): file = open(filename, 'r') expected = file.read() file.close() taken_path = '\n'.join(taken_path) + '\n' error = '%s:\n' % name error += 'Expected:\n' error += '%s\n' % expected error += 'but got:\n' error += '%s\n' % taken_path self.assert_(taken_path == expected, error) # Check attribute availibility. filename = xml_filename + '.data' if os.path.exists(filename): file = open(filename, 'r') expected = file.read() file.close() result = workflow.get_attribute('data', '') error = '%s:\n' % name error += 'Expected:\n' error += '%s\n' % expected error += 'but got:\n' error += '%s\n' % result self.assert_(result == expected, error)
def setUp(self): Task.id_pool = 0 Task.thread_id_pool = 0 self.xml_path = ["xml/spiff/control-flow/", "xml/spiff/data/", "xml/spiff/resource/"] self.reader = XmlReader()
class PatternTest(unittest.TestCase): def setUp(self): Task.id_pool = 0 Task.thread_id_pool = 0 self.xml_path = ["xml/spiff/control-flow/", "xml/spiff/data/", "xml/spiff/resource/"] self.reader = XmlReader() def testPattern(self): for dirname in self.xml_path: dirname = os.path.join(os.path.dirname(__file__), dirname) for filename in os.listdir(dirname): if not filename.endswith(".xml"): continue self.runFile(os.path.join(dirname, filename)) def runFile(self, xml_filename): try: # print '\n%s: ok' % xml_filename, workflow_list = self.reader.parse_file(xml_filename) self.runWorkflow(workflow_list[0], xml_filename) except: print "%s:" % xml_filename raise def runWorkflow(self, wf_spec, xml_filename): taken_path = [] for name in wf_spec.task_specs: wf_spec.task_specs[name].reached_event.connect(on_reached_cb, taken_path) wf_spec.task_specs[name].completed_event.connect(on_complete_cb, taken_path) # Execute all tasks within the Workflow workflow = Workflow(wf_spec) self.assert_(not workflow.is_completed(), "Workflow is complete before start") try: workflow.complete_all(False) except: workflow.task_tree.dump() raise # workflow.task_tree.dump() self.assert_( workflow.is_completed(), "complete_all() returned, but workflow is not complete\n" + workflow.task_tree.get_dump(), ) # Make sure that there are no waiting tasks left in the tree. for thetask in Task.Iterator(workflow.task_tree, Task.READY): workflow.task_tree.dump() raise Exception("Task with state READY: %s" % thetask.name) # Check whether the correct route was taken. filename = xml_filename + ".path" if os.path.exists(filename): file = open(filename, "r") expected = file.read() file.close() taken_path = "\n".join(taken_path) + "\n" error = "%s:\n" % name error += "Expected:\n" error += "%s\n" % expected error += "but got:\n" error += "%s\n" % taken_path self.assert_(taken_path == expected, error) # Check attribute availibility. filename = xml_filename + ".data" if os.path.exists(filename): file = open(filename, "r") expected = file.read() file.close() result = workflow.get_attribute("data", "") error = "%s:\n" % name error += "Expected:\n" error += "%s\n" % expected error += "but got:\n" error += "%s\n" % result self.assert_(result == expected, error)