class TaskTest(unittest.TestCase): def setUp(self): self.connectDB() self.reader = XmlReader() self.input = os.path.dirname(__file__) + "/xml/anlage_benutzer.xml" self.workflow = self.reader.parse_file(self.input).pop() def testFileExists(self): assert(os.path.isfile(self.input)) assert(os.access(self.input, os.R_OK)) def testReader(self): self.reader.parse_file(self.input) def testTask(self): self.reader.parse_file(self.input).pop() def testJob(self): self.job = Job(self.workflow) for name in self.workflow.tasks: self.workflow.tasks[name].signal_connect('reached', self.on_reached_cb) self.workflow.tasks[name].signal_connect('completed', self.on_complete_cb) while not self.job.is_completed(): self.job.complete_next() def connectDB(self): global engine, db host = "build-lenny-32.intranet.gonicus.de" db_name = "spiff" username = "******" password = "******" # Connect to MySQL. auth = username + ':' + password dbn = 'mysql://' + auth + '@' + host + '/' + db_name engine = create_engine(dbn) clear_mappers() def on_reached_cb(self, job, task): # print "Reached Task: %s" % task.get_name() pass def on_complete_cb(self, job, task): # print "Completed Task: %s" % task.get_name() pass
def getJob(): global data_file, workflow_file reader = XmlReader() workflow = reader.parse_file(workflow_file).pop() if os.access(data_file, os.R_OK): input = open(data_file, 'rb') job = pickle.load(input) input.close() os.remove(data_file) else: job = Job(workflow) return job
class XmlReaderTest(WorkflowTest): def setUp(self): WorkflowTest.setUp(self) self.reader = XmlReader() def testParseString(self): self.assertRaises(ExpatError, self.reader.parse_string, '') self.reader.parse_string('<xml></xml>') def testParseFile(self): # File not found. self.assertRaises(IOError, self.reader.parse_file, 'foo') # 0 byte sized file. file = os.path.join(os.path.dirname(__file__), 'xml', 'empty1.xml') self.assertRaises(ExpatError, self.reader.parse_file, file) # File containing only "<xml></xml>". file = os.path.join(os.path.dirname(__file__), 'xml', 'empty2.xml') self.reader.parse_file(file) # Read a complete workflow. file = os.path.join(os.path.dirname(__file__), 'xml', 'spiff', 'workflow1.xml') self.reader.parse_file(file) def testRunWorkflow(self): file = os.path.join(os.path.dirname(__file__), 'xml', 'spiff', 'workflow1.xml') workflow_list = self.reader.parse_file(file) for wf in workflow_list: self.runWorkflow(wf)
def _on_ready_before_hook(self, my_task): file = valueof(my_task, self.file) xml_reader = XmlReader() workflow_list = xml_reader.parse_file(file) workflow = workflow_list[0] outer_job = my_task.job.outer_job subjob = SpiffWorkflow.Job(workflow, parent = outer_job) subjob.signal_connect('completed', self._on_subjob_completed, my_task) # Integrate the tree of the subjob into the tree of this job. my_task._update_children(self.outputs, Task.FUTURE) for child in my_task.children: child._inherit_attributes() for child in subjob.task_tree.children: my_task.children.insert(0, child) child.parent = my_task my_task._set_internal_attribute(subjob = subjob) return True
class PatternTest(unittest.TestCase): def setUp(self): Task.id_pool = 0 Task.thread_id_pool = 0 self.xml_path = ['xml/spiff/control-flow/', 'xml/spiff/data/', 'xml/spiff/resource/'] self.reader = XmlReader() self.wf = None def testPattern(self): for dirname in self.xml_path: dirname = os.path.join(os.path.dirname(__file__), dirname) for filename in os.listdir(dirname): if not filename.endswith('.xml'): continue self.runFile(os.path.join(dirname, filename)) def runFile(self, xml_filename): try: #print '\n%s: ok' % xml_filename, workflow_list = self.reader.parse_file(xml_filename) self.runWorkflow(workflow_list[0], xml_filename) except: print '%s:' % xml_filename raise def runWorkflow(self, wf, xml_filename): taken_path = [] for name in wf.tasks: wf.tasks[name].signal_connect('reached', on_reached_cb, taken_path) wf.tasks[name].signal_connect('completed', on_complete_cb, taken_path) # Execute all tasks within the Job. job = Job(wf) self.assert_(not job.is_completed(), 'Job is complete before start') try: job.complete_all(False) except: job.task_tree.dump() raise #job.task_tree.dump() self.assert_(job.is_completed(), 'complete_all() returned, but job is not complete\n' + job.task_tree.get_dump()) # Make sure that there are no waiting tasks left in the tree. for node in Task.Iterator(job.task_tree, Task.READY): job.task_tree.dump() raise Exception('Node with state READY: %s' % node.name) # Check whether the correct route was taken. filename = xml_filename + '.path' if os.path.exists(filename): file = open(filename, 'r') expected = file.read() file.close() taken_path = '\n'.join(taken_path) + '\n' error = '%s:\n' % name error += 'Expected:\n' error += '%s\n' % expected error += 'but got:\n' error += '%s\n' % taken_path self.assert_(taken_path == expected, error) # Check attribute availibility. filename = xml_filename + '.data' if os.path.exists(filename): file = open(filename, 'r') expected = file.read() file.close() result = job.get_attribute('data', '') error = '%s:\n' % name error += 'Expected:\n' error += '%s\n' % expected error += 'but got:\n' error += '%s\n' % result self.assert_(result == expected, error)
class PersistenceTest(WorkflowTest): def setUp(self): WorkflowTest.setUp(self) self.reader = XmlReader() self.data_file = 'data.pkl' self.taken_path = None def testPickleSingle(self, workflow, job): self.taken_path = {'reached': [], 'completed': []} for name, task in workflow.tasks.iteritems(): task.signal_connect('reached', on_reached_cb, self.taken_path['reached']) task.signal_connect('completed', on_complete_cb, self.taken_path['completed']) # Execute a random number of steps. for i in xrange(randint(0, len(workflow.tasks))): job.complete_next() # Store the workflow instance in a file. output = open(self.data_file, 'wb') pickle.dump(job, output, -1) output.close() before = job.get_dump() # Load the workflow instance from a file and delete the file. input = open(self.data_file, 'rb') job = pickle.load(input) input.close() os.remove(self.data_file) after = job.get_dump() # Make sure that the state of the job did not change. self.assert_(before == after, 'Before:\n' + before + '\n' \ + 'After:\n' + after + '\n') # Re-connect signals, because the pickle dump now only contains a # copy of self.taken_path. for name, task in job.workflow.tasks.iteritems(): task.signal_disconnect('reached', on_reached_cb) task.signal_disconnect('completed', on_complete_cb) task.signal_connect('reached', on_reached_cb, self.taken_path['reached']) task.signal_connect('completed', on_complete_cb, self.taken_path['completed']) # Run the rest of the workflow. job.complete_all() after = job.get_dump() self.assert_(job.is_completed(), 'Job done, but not complete:' + after) assert_same_path(self, self.expected_path, self.taken_path['completed']) def testPickle(self): # Read a complete workflow. file = os.path.join(os.path.dirname(__file__), 'xml/spiff/workflow1.xml') for i in xrange(5): workflow_list = self.reader.parse_file(file) wf = workflow_list[0] job = Job(wf) self.testPickleSingle(wf, job)