def test_export_flow_version(regress_flow_reg, fix_flow_serde): # Test we can turn a flow snapshot into a json string r1 = versioning.export_flow_version( fix_flow_serde.bucket.identifier, fix_flow_serde.flow.identifier ) assert isinstance(r1, str) # Test writing it to a file r2 = versioning.export_flow_version( fix_flow_serde.bucket.identifier, fix_flow_serde.flow.identifier, file_path=fix_flow_serde.filepath + '_test.json' ) assert isinstance(r2, str) r2l = utils.load(r2) assert isinstance(r2l, dict) assert r2l['snapshotMetadata'].__contains__('flowIdentifier') # read in the file r2f = utils.fs_read(fix_flow_serde.filepath + '_test.json') DeepDiff( r2, r2f, ignore_order=False, verbose_level=2 ) # Test yaml dump r3 = versioning.export_flow_version( fix_flow_serde.bucket.identifier, fix_flow_serde.flow.identifier, mode='yaml' ) assert isinstance(r3, str) r3l = utils.load(r3) assert isinstance(r3l, dict) assert r3l['snapshotMetadata'].__contains__('flowIdentifier')
def test_dump(regress_flow_reg, fix_flow_serde): # Testing that we don't modify or lose information in the round trip # Processing in memory for json export_obj = json.loads(fix_flow_serde.raw.decode('utf-8')) ss_json = utils.dump( obj=export_obj, mode='json' ) assert isinstance(ss_json, six.string_types) round_trip_json = utils.load(ss_json) with pytest.raises(AssertionError): _ = utils.dump('', 'FakeNews') with pytest.raises(TypeError): _ = utils.dump({None}, 'json') # Test Yaml ss_yaml = utils.dump( obj=export_obj, mode='yaml' ) assert isinstance(ss_yaml, six.string_types) round_trip_yaml = utils.load(ss_yaml) assert DeepDiff( round_trip_json, round_trip_yaml, verbose_level=2, ignore_order=False ) == {}
def test_import_flow_version(regress_flow_reg, fix_flow_serde): compare_obj = fix_flow_serde.snapshot test_obj = fix_flow_serde.raw # Test that our test_obj serialises and deserialises through the layers of # json reformatting. This is because we load the NiFi Java json object, # dump it using the Python json library, and load it again using # ruamel.yaml. assert DeepDiff(compare_obj, utils.load(utils.dump(utils.load(obj=test_obj), mode='json'), dto=fix_flow_serde.dto), ignore_order=False, verbose_level=2) == {} # Test that we can issue a simple create_flow with this object r0 = versioning.create_flow_version(flow=fix_flow_serde.flow, flow_snapshot=utils.load( obj=fix_flow_serde.json, dto=fix_flow_serde.dto)) assert isinstance(r0, registry.VersionedFlowSnapshot) assert DeepDiff(compare_obj.flow_contents, r0.flow_contents, ignore_order=False, verbose_level=2) == {} # Test we can import from a String in memory # Test we can import as new version in existing bucket r1 = versioning.import_flow_version( bucket_id=fix_flow_serde.bucket.identifier, encoded_flow=fix_flow_serde.json, flow_id=fix_flow_serde.flow.identifier) assert isinstance(r1, registry.VersionedFlowSnapshot) assert DeepDiff(compare_obj.flow_contents, r1.flow_contents, ignore_order=False, verbose_level=2) == {} # Test we can also import from a file r2 = versioning.import_flow_version( bucket_id=fix_flow_serde.bucket.identifier, file_path=fix_flow_serde.filepath + '.yaml', flow_id=fix_flow_serde.flow.identifier) assert isinstance(r2, registry.VersionedFlowSnapshot) assert DeepDiff(compare_obj.flow_contents, r2.flow_contents, ignore_order=False, verbose_level=2) == {} # Test import into another bucket as first version f_bucket_2 = versioning.create_registry_bucket(conftest.test_bucket_name + '_02') r3 = versioning.import_flow_version( bucket_id=f_bucket_2.identifier, encoded_flow=fix_flow_serde.yaml, flow_name=conftest.test_cloned_ver_flow_name + '_01') assert isinstance(r3, registry.VersionedFlowSnapshot) assert DeepDiff(compare_obj.flow_contents, r3.flow_contents, ignore_order=False, verbose_level=2) == {}
def test_load(regress_flow_reg, fix_flow_serde): # Validating load testing again in case we break the 'dump' test r1 = utils.load(obj=fix_flow_serde.json, dto=fix_flow_serde.dto) # Validate match assert DeepDiff(fix_flow_serde.snapshot.flow_contents, r1.flow_contents, verbose_level=2, ignore_order=True) == {} with pytest.raises(AssertionError): _ = utils.load({})
def test_get_flow_version(regress_flow_reg, fix_ver_flow): r1 = versioning.get_flow_version(bucket_id=fix_ver_flow.bucket.identifier, flow_id=fix_ver_flow.flow.identifier, version=None) assert isinstance(r1, registry.VersionedFlowSnapshot) assert r1.snapshot_metadata.version == 1 test_vf_2 = versioning.create_flow_version(flow=r1.flow, flow_snapshot=r1) assert isinstance(test_vf_2, registry.VersionedFlowSnapshot) assert test_vf_2.snapshot_metadata.version == 2 r2 = versioning.get_flow_version( bucket_id=test_vf_2.flow.bucket_identifier, flow_id=test_vf_2.flow.identifier, version=None) assert r2.flow.version_count == 2 assert r2.snapshot_metadata.version == 2 r3 = versioning.get_flow_version( bucket_id=test_vf_2.flow.bucket_identifier, flow_id=test_vf_2.flow.identifier, version='1', ) assert r3.snapshot_metadata.version == 1 assert r3.flow.version_count == 2 r4 = versioning.get_flow_version( bucket_id=test_vf_2.flow.bucket_identifier, flow_id=test_vf_2.flow.identifier, version=None, export=True) assert isinstance(r4, bytes) assert isinstance(utils.load(r4), dict)
def test_deser_flow(): r = canvas.get_flow('root') assert isinstance(r, ProcessGroupFlowEntity) s = utils.dump(r, 'json') f = utils.load(s, ('nifi', 'ProcessGroupFlowEntity')) assert isinstance(f, ProcessGroupFlowEntity)