def load_workflow_spec(self, folder, f): file = os.path.join(os.path.dirname(__file__), '..', 'data', 'spiff', folder, f) serializer = XmlSerializer() xml = open(file).read() self.wf_spec = WorkflowSpec.deserialize(serializer, xml, filename=file) self.workflow = Workflow(self.wf_spec)
def testSerializeWorkflowSpec(self, path_file=None, data=None): if self.serializer is None: return # Back to back testing. try: serialized1 = self.wf_spec.serialize(self.serializer) wf_spec = WorkflowSpec.deserialize(self.serializer, serialized1) serialized2 = wf_spec.serialize(self.serializer) except TaskSpecNotSupportedError as e: pass else: self.assert_(isinstance(serialized1, self.serial_type)) self.assert_(isinstance(serialized2, self.serial_type)) self.compareSerialization(serialized1, serialized2) # Test whether the restored workflow still works. if path_file is None: path_file = os.path.join(data_dir, 'spiff', 'workflow1.path') path = open(path_file).read() elif os.path.exists(path_file): path = open(path_file).read() else: path = None run_workflow(self, wf_spec, path, data)
def run_pattern(self, filename): # Load the .path file. path_file = os.path.splitext(filename)[0] + '.path' # Load the .data file. data_file = os.path.splitext(filename)[0] + '.data' if os.path.exists(data_file): expected_data = open(data_file, 'r').read() else: expected_data = None # Test patterns that are defined in XML format. if filename.endswith('.xml'): xml = open(filename).read() wf_spec = WorkflowSpec.deserialize(self.serializer, xml, filename=filename) self.serializerTestClass.wf_spec = wf_spec self.serializerTestClass.testSerializeWorkflowSpec( path_file=path_file, data=expected_data) self.serializerTestClass.testSerializeWorkflow(path_file=path_file, data=expected_data) # Test patterns that are defined in Python. if filename.endswith('.py') and not filename.endswith('__.py'): code = compile(open(filename).read(), filename, 'exec') thedict = {} result = eval(code, thedict) wf_spec = thedict['TestWorkflowSpec']() self.serializerTestClass.wf_spec = wf_spec self.serializerTestClass.testSerializeWorkflowSpec( path_file=path_file, data=expected_data) self.serializerTestClass.testSerializeWorkflow(path_file=path_file, data=expected_data)
def run_pattern(self, filename): # Load the .path file. path_file = os.path.splitext(filename)[0] + '.path' # Load the .data file. data_file = os.path.splitext(filename)[0] + '.data' if os.path.exists(data_file): expected_data = open(data_file, 'r').read() else: expected_data = None # Test patterns that are defined in XML format. if filename.endswith('.xml'): xml = open(filename).read() wf_spec = WorkflowSpec.deserialize(self.serializer, xml, filename = filename) self.serializerTestClass.wf_spec = wf_spec self.serializerTestClass.testSerializeWorkflowSpec(path_file=path_file, data=expected_data) self.serializerTestClass.testSerializeWorkflow(path_file=path_file, data=expected_data) # Test patterns that are defined in Python. if filename.endswith('.py') and not filename.endswith('__.py'): code = compile(open(filename).read(), filename, 'exec') thedict = {} result = eval(code, thedict) wf_spec = thedict['TestWorkflowSpec']() self.serializerTestClass.wf_spec = wf_spec self.serializerTestClass.testSerializeWorkflowSpec(path_file=path_file, data=expected_data) self.serializerTestClass.testSerializeWorkflow(path_file=path_file, data=expected_data)
def testDeserializeWorkflowSpec(self): xml_file = os.path.join(data_dir, 'spiff', 'workflow1.xml') xml = open(xml_file).read() path_file = os.path.splitext(xml_file)[0] + '.path' path = open(path_file).read() wf_spec = WorkflowSpec.deserialize(self.serializer, xml) run_workflow(self, wf_spec, path, None)
def _create_subworkflow(self, my_task): from SpiffWorkflow.specs import WorkflowSpec file = valueof(my_task, self.file) serializer = self.serializer_cls() s_state = open(file).read() wf_spec = WorkflowSpec.deserialize(serializer, s_state, filename=file) outer_workflow = my_task.workflow.outer_workflow return SpiffWorkflow.Workflow(wf_spec, parent=outer_workflow)
def _create_subworkflow(self, my_task): from SpiffWorkflow.specs import WorkflowSpec file = valueof(my_task, self.file) serializer = self.serializer_cls() s_state = open(file).read() wf_spec = WorkflowSpec.deserialize(serializer, s_state, filename = file) outer_workflow = my_task.workflow.outer_workflow return SpiffWorkflow.Workflow(wf_spec, parent = outer_workflow)
def testDeserializeWorkflowSpec(self): xml_file = os.path.join(data_dir, 'openwfe', 'workflow1.xml') xml = open(xml_file).read() path_file = os.path.splitext(xml_file)[0] + '.path' path = open(path_file).read() wf_spec = WorkflowSpec.deserialize(self.serializer, xml) run_workflow(self, wf_spec, path, None)
def load_workflow_spec(self, folder, f): file = os.path.join( os.path.dirname(__file__), '..', 'data', 'spiff', folder, f) serializer = XmlSerializer() with open(file) as fp: xml = fp.read() self.wf_spec = WorkflowSpec.deserialize( serializer, xml, filename=file) self.workflow = Workflow(self.wf_spec)
def _create_subworkflow(self, my_task): from SpiffWorkflow.storage import XmlSerializer from SpiffWorkflow.specs import WorkflowSpec file = valueof(my_task, self.file) serializer = XmlSerializer() xml = open(file).read() wf_spec = WorkflowSpec.deserialize(serializer, xml, filename = file) outer_workflow = my_task.workflow.outer_workflow return SpiffWorkflow.Workflow(wf_spec, parent = outer_workflow)
def _create_subworkflow(self, my_task): from SpiffWorkflow.storage import XmlSerializer from SpiffWorkflow.specs import WorkflowSpec file = valueof(my_task, self.file) serializer = XmlSerializer() xml = open(file).read() wf_spec = WorkflowSpec.deserialize(serializer, xml, filename=file) outer_workflow = my_task.workflow.outer_workflow return SpiffWorkflow.Workflow(wf_spec, parent=outer_workflow)
def testSerialize(self): # Read a complete workflow spec. xml_file = os.path.join(data_dir, 'spiff', 'workflow1.xml') xml = open(xml_file).read() path_file = os.path.splitext(xml_file)[0] + '.path' expected_path = open(path_file).read().strip().split('\n') wf_spec = WorkflowSpec.deserialize(serializer, xml) for i in range(5): workflow = Workflow(wf_spec) self.doPickleSingle(workflow, expected_path)
def testPickle(self): # Read a complete workflow. xml_file = os.path.join(data_dir, 'spiff', 'workflow1.xml') xml = open(xml_file).read() path_file = os.path.splitext(xml_file)[0] + '.path' expected_path = open(path_file).read().strip().split('\n') wf_spec = WorkflowSpec.deserialize(self.serializer, xml) for i in xrange(5): workflow = Workflow(wf_spec) self.doPickleSingle(workflow, expected_path)
def testSerializeWorkflowSpec(self): if self.serializer is None: return # Back to back testing. serialized1 = self.wf_spec.serialize(self.serializer) wf_spec = WorkflowSpec.deserialize(self.serializer, serialized1) serialized2 = wf_spec.serialize(self.serializer) self.assert_(isinstance(serialized1, self.serial_type)) self.assert_(isinstance(serialized2, self.serial_type)) self.compareSerialization(serialized1, serialized2) # Test whether the restored workflow still works. path_file = os.path.join(data_dir, 'spiff', 'workflow1.path') path = open(path_file).read() run_workflow(self, wf_spec, path, None)
def testSerializeWorkflowSpec(self): if self.serializer is None: return # Back to back testing. serialized1 = self.wf_spec.serialize(self.serializer) wf_spec = WorkflowSpec.deserialize(self.serializer, serialized1) serialized2 = wf_spec.serialize(self.serializer) self.assert_(isinstance(serialized1, self.serial_type)) self.assert_(isinstance(serialized2, self.serial_type)) self.compare_serialized(serialized1, serialized2) self.assertEqual(serialized1, serialized2) # Test whether the restored workflow still works. path_file = os.path.join(data_dir, 'spiff', 'workflow1.path') path = open(path_file).read() run_workflow(self, wf_spec, path, None)
def _on_ready_before_hook(self, my_task): from SpiffWorkflow.storage import XmlSerializer from SpiffWorkflow.specs import WorkflowSpec file = valueof(my_task, self.file) serializer = XmlSerializer() xml = open(file).read() wf_spec = WorkflowSpec.deserialize(serializer, xml, filename = file) outer_workflow = my_task.workflow.outer_workflow subworkflow = SpiffWorkflow.Workflow(wf_spec, parent = outer_workflow) subworkflow.completed_event.connect(self._on_subworkflow_completed, my_task) # Integrate the tree of the subworkflow into the tree of this workflow. my_task._sync_children(self.outputs, Task.FUTURE) for child in my_task.children: child.task_spec._update_state(child) child._inherit_attributes() for child in subworkflow.task_tree.children: my_task.children.insert(0, child) child.parent = my_task my_task._set_internal_attribute(subworkflow = subworkflow)
def _on_ready_before_hook(self, my_task): from SpiffWorkflow.storage import XmlSerializer from SpiffWorkflow.specs import WorkflowSpec file = valueof(my_task, self.file) serializer = XmlSerializer() xml = open(file).read() wf_spec = WorkflowSpec.deserialize(serializer, xml, filename=file) outer_workflow = my_task.workflow.outer_workflow subworkflow = SpiffWorkflow.Workflow(wf_spec, parent=outer_workflow) subworkflow.completed_event.connect(self._on_subworkflow_completed, my_task) # Integrate the tree of the subworkflow into the tree of this workflow. my_task._sync_children(self.outputs, Task.FUTURE) for child in my_task.children: child.task_spec._update_state(child) child._inherit_attributes() for child in subworkflow.task_tree.children: my_task.children.insert(0, child) child.parent = my_task my_task._set_internal_attribute(subworkflow=subworkflow)
from SpiffWorkflow.specs import WorkflowSpec from SpiffWorkflow import Task,Workflow #from SpiffWorkflow.serializer.json import JSONSerializer from ansibleserializer import AnsibleSerializer import time with open('ansible-workflow-spec.json') as fp: workflow_json = fp.read() serializer = AnsibleSerializer() spec = WorkflowSpec.deserialize(serializer, workflow_json) # Create the workflow. workflow = Workflow(spec) # Execute until all tasks are done or require manual intervention. # For the sake of this tutorial, we ignore the "manual" flag on the # tasks. In practice, you probably don't want to do that. for i in range(20): print(i) workflow.complete_all(False) if workflow.is_completed(): break time.sleep(0.5) #workflow.complete_all(halt_on_manual=False) #workflow.complete_next() #tasks = workflow.get_tasks(Task.WAITING) #for t in tasks: # print(t.get_name()) # t.complete()
def load_workflow(self, test_file_path): json_string = load_json_string(test_file_path) workflow_spec = WorkflowSpec.deserialize(self.serializer, json_string) self.workflow = Workflow(workflow_spec)
from SpiffWorkflow.specs import WorkflowSpec from SpiffWorkflow.serializer.json import JSONSerializer serializer = JSONSerializer() with open('workflow-spec.json') as fp: workflow_json = fp.read() spec = WorkflowSpec.deserialize(serializer, workflow_json)