def testDeserialization(self): """ Tests the that deserialized workflow matches the original workflow """ old_workflow = self.workflow old_workflow.spec.start.set_property(marker=True) serializer = DictionarySerializer() serialized_workflow = old_workflow.serialize(serializer) serializer = DictionarySerializer() new_workflow = Workflow.deserialize(serializer, serialized_workflow) self.assertEqual(len(new_workflow.get_tasks()), len(old_workflow.get_tasks())) self.assertEqual(new_workflow.spec.start.get_property('marker'), old_workflow.spec.start.get_property('marker')) self.assertEqual( 1, len([ t for t in new_workflow.get_tasks() if t.task_spec.name == 'Start' ])) self.assertEqual( 1, len([ t for t in new_workflow.get_tasks() if t.task_spec.name == 'Root' ]))
def testDictionarySerializer(self): """ Tests the SelectivePickler serializer for persisting Workflows and Tasks. """ old_workflow = self.workflow serializer = DictionarySerializer() serialized_workflow = old_workflow.serialize(serializer) serializer = DictionarySerializer() new_workflow = Workflow.deserialize(serializer, serialized_workflow) before = old_workflow.get_dump() after = new_workflow.get_dump() self.assert_(before == after, 'Before:\n' + before + '\n' \ + 'After:\n' + after + '\n')
def testSerializationWithoutKwargs(self): new_wf_spec = WorkflowSpec() serializer = DictionarySerializer() nokw = Celery(self.wf_spec, 'testnokw', 'call.name', call_args=[Attrib('the_attribute'), 1]) data = nokw.serialize(serializer) nokw2 = Celery.deserialize(serializer, new_wf_spec, data) self.assertDictEqual(nokw.kwargs, nokw2.kwargs) kw = Celery(self.wf_spec, 'testkw', 'call.name', call_args=[Attrib('the_attribute'), 1], some_arg={"key": "value"}) data = kw.serialize(serializer) kw2 = Celery.deserialize(serializer, new_wf_spec, data) self.assertDictEqual(kw.kwargs, kw2.kwargs) # Has kwargs, but they belong to TaskSpec kw_defined = Celery(self.wf_spec, 'testkwdef', 'call.name', call_args=[Attrib('the_attribute'), 1], some_ref=Attrib('value'), defines={"key": "value"}) data = kw_defined.serialize(serializer) kw_defined2 = Celery.deserialize(serializer, new_wf_spec, data) self.assertIsInstance(kw_defined2.kwargs['some_ref'], Attrib) args = [ b64encode(pickle.dumps(v)) for v in [ Attrib('the_attribute'), u'ip', u'dc455016e2e04a469c01a866f11c0854' ] ] data = {u'R': b64encode(pickle.dumps(u'1'))} # Comes from live data. Bug not identified, but there we are... data = { u'inputs': [u'Wait:1'], u'lookahead': 2, u'description': u'', u'outputs': [], u'args': args, u'manual': False, u'data': data, u'locks': [], u'pre_assign': [], u'call': u'call.x', u'internal': False, u'post_assign': [], u'id': 8, u'result_key': None, u'defines': data, u'class': u'SpiffWorkflow.specs.Celery.Celery', u'name': u'RS1:1' } Celery.deserialize(serializer, new_wf_spec, data)
def testDeserialization(self): """ Tests the that deserialized workflow can be completed. """ old_workflow = self.workflow old_workflow.complete_next() self.assertEquals('task_a2', old_workflow.last_task.get_name()) serializer = DictionarySerializer() serialized_workflow = old_workflow.serialize(serializer) serializer = DictionarySerializer() new_workflow = Workflow.deserialize(serializer, serialized_workflow) self.assertEquals('task_a2', old_workflow.last_task.get_name()) new_workflow.complete_all() self.assertEquals('task_a2', old_workflow.last_task.get_name())
def testSerialize(self): serializer = DictionarySerializer() spec = self.create_instance() serialized = spec.serialize(serializer) self.assert_(isinstance(serialized, dict)) new_spec = spec.__class__.deserialize(serializer, self.wf_spec, serialized) before = spec.serialize(serializer) after = new_spec.serialize(serializer) self.assertEqual(before, after, 'Before:\n%s\nAfter:\n%s\n' % (before, after))
def load_or_create_workflow(self): """ tries to load the workflow from session creates a new one if it can't find :return: """ try: workflow_path = self.get_worfklow_path() serialized_wf = self.request.session.workflows[workflow_path] self.workflow = BpmnWorkflow.deserialize(DictionarySerializer(), serialized_wf) except Exception as e: print e wf_pkg_file = open(self.workflow_name) self.workflow_spec = BpmnSerializer().deserialize_workflow_spec(wf_pkg_file) self.workflow = BpmnWorkflow(self.workflow_spec)
def _get_full_workflow_state(self): # self.workflow.do_engine_steps() self.workflow.refresh_waiting_tasks() return self.workflow.serialize(serializer=DictionarySerializer())
def full_restore(self, state): return BpmnWorkflow.deserialize(DictionarySerializer(), state)
def save_workflow(self): if 'workflows' in self.request.session: serialized_wf = self.workflow.serialize(serializer=DictionarySerializer()) self.request.session['workflows'][self.workflow_name] = serialized_wf self.request.session.save() # TODO: check if this is realy neccessary
# # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA import json import uuid from SpiffWorkflow.storage import DictionarySerializer from SpiffWorkflow.storage.Serializer import Serializer from SpiffWorkflow.operators import Attrib _dictserializer = DictionarySerializer() def object_hook(dct): if '__uuid__' in dct: return uuid.UUID(dct['__uuid__']) if '__bytes__' in dct: return dct['__bytes__'].encode('ascii') if '__attrib__' in dct: return Attrib(dct['__attrib__']) return dct def default(obj): if isinstance(obj, uuid.UUID):
def testConstructor(self): DictionarySerializer()
def setUp(self): SerializerTest.setUp(self) self.serializer = DictionarySerializer() self.serial_type = dict