def test_dump(regress_flow_reg, fix_flow_serde): # Testing that we don't modify or lose information in the round trip # Processing in memory for json export_obj = json.loads(fix_flow_serde.raw.decode('utf-8')) ss_json = utils.dump( obj=export_obj, mode='json' ) assert isinstance(ss_json, six.string_types) round_trip_json = utils.load(ss_json) with pytest.raises(AssertionError): _ = utils.dump('', 'FakeNews') with pytest.raises(TypeError): _ = utils.dump({None}, 'json') # Test Yaml ss_yaml = utils.dump( obj=export_obj, mode='yaml' ) assert isinstance(ss_yaml, six.string_types) round_trip_yaml = utils.load(ss_yaml) assert DeepDiff( round_trip_json, round_trip_yaml, verbose_level=2, ignore_order=False ) == {}
def test_import_flow_version(regress_flow_reg, fix_flow_serde): compare_obj = fix_flow_serde.snapshot test_obj = fix_flow_serde.raw # Test that our test_obj serialises and deserialises through the layers of # json reformatting. This is because we load the NiFi Java json object, # dump it using the Python json library, and load it again using # ruamel.yaml. assert DeepDiff(compare_obj, utils.load(utils.dump(utils.load(obj=test_obj), mode='json'), dto=fix_flow_serde.dto), ignore_order=False, verbose_level=2) == {} # Test that we can issue a simple create_flow with this object r0 = versioning.create_flow_version(flow=fix_flow_serde.flow, flow_snapshot=utils.load( obj=fix_flow_serde.json, dto=fix_flow_serde.dto)) assert isinstance(r0, registry.VersionedFlowSnapshot) assert DeepDiff(compare_obj.flow_contents, r0.flow_contents, ignore_order=False, verbose_level=2) == {} # Test we can import from a String in memory # Test we can import as new version in existing bucket r1 = versioning.import_flow_version( bucket_id=fix_flow_serde.bucket.identifier, encoded_flow=fix_flow_serde.json, flow_id=fix_flow_serde.flow.identifier) assert isinstance(r1, registry.VersionedFlowSnapshot) assert DeepDiff(compare_obj.flow_contents, r1.flow_contents, ignore_order=False, verbose_level=2) == {} # Test we can also import from a file r2 = versioning.import_flow_version( bucket_id=fix_flow_serde.bucket.identifier, file_path=fix_flow_serde.filepath + '.yaml', flow_id=fix_flow_serde.flow.identifier) assert isinstance(r2, registry.VersionedFlowSnapshot) assert DeepDiff(compare_obj.flow_contents, r2.flow_contents, ignore_order=False, verbose_level=2) == {} # Test import into another bucket as first version f_bucket_2 = versioning.create_registry_bucket(conftest.test_bucket_name + '_02') r3 = versioning.import_flow_version( bucket_id=f_bucket_2.identifier, encoded_flow=fix_flow_serde.yaml, flow_name=conftest.test_cloned_ver_flow_name + '_01') assert isinstance(r3, registry.VersionedFlowSnapshot) assert DeepDiff(compare_obj.flow_contents, r3.flow_contents, ignore_order=False, verbose_level=2) == {}
def test_deser_flow(): r = canvas.get_flow('root') assert isinstance(r, ProcessGroupFlowEntity) s = utils.dump(r, 'json') f = utils.load(s, ('nifi', 'ProcessGroupFlowEntity')) assert isinstance(f, ProcessGroupFlowEntity)