예제 #1
0
def test_get_registry_bucket(fixture_registry_bucket):
    r1 = versioning.get_registry_bucket(config.test_bucket_name)
    assert r1.name == config.test_bucket_name
    r2 = versioning.get_registry_bucket(r1.identifier, 'id')
    assert r2.name == r1.name
    with pytest.raises(ValueError):
        _ = versioning.get_registry_bucket('Irrelevant', 'Invalid')
    r3 = versioning.get_registry_bucket('NonExistantProbably')
    assert r3 is None
예제 #2
0
def test_get_registry_bucket(regress_flow_reg, fix_bucket):
    f_bucket = fix_bucket()
    r1 = versioning.get_registry_bucket(f_bucket.name)
    assert r1.name == conftest.test_bucket_name
    r2 = versioning.get_registry_bucket(r1.identifier, 'id')
    assert r2.name == r1.name
    with pytest.raises(ValueError):
        _ = versioning.get_registry_bucket('Irrelevant', 'Invalid')
    r3 = versioning.get_registry_bucket('NonExistantProbably')
    assert r3 is None
예제 #3
0
def step2_edge_flow(env):
    # Create input port and funnel in NiFi
    env.from_gw = canvas.create_port(env.root_pg.id, 'INPUT_PORT',
                                     'from Gateway', 'STOPPED', (0, 200))
    funnel_position = (96, 350)
    env.temp_funnel = create_funnel(env.root_pg.id, (96, 350))
    canvas.create_connection(env.from_gw, env.temp_funnel)

    # Create flow in EFM
    env.consume_mqtt = efm_create_processor(
        env.flow_id, env.efm_pg_id, 'ConsumeMQTT',
        'org.apache.nifi.processors.mqtt.ConsumeMQTT', (100, 100), {
            'Broker URI': 'tcp://edge2ai-1.dim.local:1883',
            'Client ID': 'minifi-iot',
            'Topic Filter': 'iot/#',
            'Max Queue Size': '60',
        })
    env.nifi_rpg = efm_create_remote_processor_group(env.flow_id,
                                                     env.efm_pg_id,
                                                     'Remote PG', _NIFI_URL,
                                                     'HTTP', (100, 400))
    env.consume_conn = efm_create_connection(env.flow_id,
                                             env.efm_pg_id,
                                             env.consume_mqtt,
                                             'PROCESSOR',
                                             env.nifi_rpg,
                                             'REMOTE_INPUT_PORT', ['Message'],
                                             destination_port=env.from_gw.id)

    # Create a bucket in NiFi Registry to save the edge flow versions
    if not versioning.get_registry_bucket('IoT'):
        versioning.create_registry_bucket('IoT')

    # Publish/version the flow
    efm_publish_flow(env.flow_id, 'First version - ' + env.run_id)
예제 #4
0
def nifireg_delete_flows(identifier, identifier_type='name'):
    bucket = versioning.get_registry_bucket(identifier, identifier_type)
    if bucket:
        for flow in versioning.list_flows_in_bucket(bucket.identifier):
            endpoint = '/buckets/{bucketId}/flows/{flowId}'.format(
                bucketId=flow.bucket_identifier, flowId=flow.identifier)
            resp = nifireg_api_delete(
                endpoint, headers={'Content-Type': 'application/json'})
예제 #5
0
def lab4_nifi_flow(env):
    LOG.info("Running step4_nifi_flow")
    # Create a bucket in NiFi Registry to save the edge flow versions
    env.sensor_bucket = versioning.get_registry_bucket('SensorFlows')
    if not env.sensor_bucket:
        env.sensor_bucket = versioning.create_registry_bucket('SensorFlows')

    # Create NiFi Process Group
    env.reg_client = versioning.create_registry_client('NiFi Registry', _NIFIREG_URL, 'The registry...')
    env.sensor_pg = canvas.create_process_group(env.root_pg, PG_NAME, (330, 350))
    #env.sensor_flow = versioning.save_flow_ver(env.sensor_pg, env.reg_client, env.sensor_bucket, flow_name='SensorProcessGroup', comment='Enabled version control - ' + env.run_id)
    env.sensor_flow = save_flow_ver(env.sensor_pg, env.reg_client, env.sensor_bucket, flow_name='SensorProcessGroup', comment='Enabled version control - ' + str(env.run_id))

    # Create controller services
    env.sr_svc = create_controller(env.sensor_pg, 'org.apache.nifi.schemaregistry.hortonworks.HortonworksSchemaRegistry', {'url': _SCHREG_API_URL}, True)
    env.json_reader_svc = create_controller(env.sensor_pg, 'org.apache.nifi.json.JsonTreeReader', {'schema-access-strategy': 'schema-name', 'schema-registry': env.sr_svc.id}, True)
    env.json_writer_svc = create_controller(env.sensor_pg, 'org.apache.nifi.json.JsonRecordSetWriter', {'schema-access-strategy': 'schema-name', 'schema-registry': env.sr_svc.id, 'Schema Write Strategy': 'hwx-schema-ref-attributes'}, True)

    # Create flow
    sensor_port = canvas.create_port(env.sensor_pg.id, 'INPUT_PORT', 'Sensor Data', 'RUNNING', (0, 0))

    upd_attr = create_processor(env.sensor_pg, 'Set Schema Name', 'org.apache.nifi.processors.attributes.UpdateAttribute', (0, 100),
        {
            'properties': {
                'schema.name': 'SensorReading',
            },
        }
    )
    canvas.create_connection(sensor_port, upd_attr)

    pub_kafka = create_processor(env.sensor_pg, 'Publish to Kafka topic: iot', 'org.apache.nifi.processors.kafka.pubsub.PublishKafkaRecord_2_0', (0, 300),
        {
            'properties': {
                'bootstrap.servers': 'edge2ai-1.dim.local:9092',
                'topic': 'iot',
                'record-reader': env.json_reader_svc.id,
                'record-writer': env.json_writer_svc.id,
                'use-transactions': 'false',
                'attribute-name-regex': 'schema.*',
                'client.id': PRODUCER_CLIENT_ID,
            },
            'autoTerminatedRelationships': ['success'],
        }
    )
    canvas.create_connection(upd_attr, pub_kafka, ['success'])

    fail_funnel = create_funnel(env.sensor_pg.id, (600, 343))
    canvas.create_connection(pub_kafka, fail_funnel, ['failure'])

    # Commit changes
    #versioning.save_flow_ver(env.sensor_pg, env.reg_client, env.sensor_bucket, flow_id=env.sensor_flow.version_control_information.flow_id, comment='First version - ' + env.run_id)
    save_flow_ver(env.sensor_pg, env.reg_client, env.sensor_bucket, flow_id=env.sensor_flow.version_control_information.flow_id, comment='First version - ' + str(env.run_id))

    # Start flow
    canvas.schedule_process_group(env.root_pg.id, True)

    # Update "from Gateway" input port to connect to the process group
    update_connection(env.from_gw, env.temp_funnel, sensor_port)
예제 #6
0
def test_nifi_registry_iot(run_id):
    bucket = versioning.get_registry_bucket('IoT')
    assert bucket
    flows = versioning.list_flows_in_bucket(bucket.identifier)
    assert len(flows) == 1
    flow = flows[0]
    versions = versioning.list_flow_versions(bucket.identifier, flow.identifier)
    assert len(versions) >= 2
    assert versions[0].comments == 'Second version - ' + run_id, 'Comments: ' + versions[0].comments
    assert versions[1].comments == 'First version - ' + run_id, 'Comments: ' + versions[1].comments
예제 #7
0
def test_nifi_registry_sensorflows(run_id):
    bucket = versioning.get_registry_bucket('SensorFlows')
    assert bucket
    flows = versioning.list_flows_in_bucket(bucket.identifier)
    assert len(flows) == 1
    flow = flows[0]
    versions = versioning.list_flow_versions(bucket.identifier, flow.identifier)
    assert len(versions) == 3
    assert versions[0].comments == 'Second version - ' + run_id, 'Comments: ' + versions[0].comments
    assert versions[1].comments == 'First version - ' + run_id, 'Comments: ' + versions[1].comments
    assert versions[2].comments == 'Enabled version control - ' + run_id, 'Comments: ' + versions[2].comments
예제 #8
0
def test_save_flow_ver(regress_flow_reg, fix_bucket, fix_pg, fix_proc):
    f_reg_client = conftest.ensure_registry_client(
        config.registry_local_name
    )
    f_bucket = fix_bucket()
    f_pg = fix_pg.generate()
    test_bucket = versioning.get_registry_bucket(f_bucket.identifier, 'id')
    assert test_bucket.name == conftest.test_bucket_name
    r1 = versioning.save_flow_ver(
        process_group=f_pg,
        registry_client=f_reg_client,
        bucket=test_bucket,
        flow_name=conftest.test_versioned_flow_name,
        comment='a test comment',
        desc='a test description'
    )
    assert isinstance(r1, nifi.VersionControlInformationEntity)
    # Next we validate you can't duplicate a flow name in a bucket
    with pytest.raises(ValueError):
        _ = versioning.save_flow_ver(
            process_group=f_pg,
            registry_client=f_reg_client,
            bucket=f_bucket,
            flow_name=conftest.test_versioned_flow_name,
            comment='NiPyApi Test',
            desc='NiPyApi Test'
        )
    # Add a processor, refresh status, and save a new version
    fix_proc.generate(parent_pg=f_pg)
    f_pg = canvas.get_process_group(f_pg.id, 'id')
    r2 = versioning.save_flow_ver(
        process_group=f_pg,
        registry_client=f_reg_client,
        bucket=f_bucket,
        flow_id=r1.version_control_information.flow_id,
        comment='a test comment'
    )
    assert isinstance(r2, nifi.VersionControlInformationEntity)
    assert r2.version_control_information.version > \
        r1.version_control_information.version
    with pytest.raises(ValueError):
        _ = versioning.save_flow_ver(
            process_group=f_pg,
            registry_client=f_reg_client,
            bucket=f_bucket,
            flow_name=conftest.test_versioned_flow_name,
            comment='a test comment',
            desc='a test description',
            refresh=False
        )
    # shortcut to clean up the test objects when not using the fixture
    conftest.cleanup_reg()
    def lab2_edge_flow(self):
        # Create input port and funnel in NiFi
        self.context.from_gw = canvas.create_port(self.context.root_pg.id,
                                                  'INPUT_PORT', 'from Gateway',
                                                  'STOPPED', (0, 200))
        self.context.temp_funnel = nf.create_funnel(self.context.root_pg.id,
                                                    (96, 350))
        canvas.create_connection(self.context.from_gw,
                                 self.context.temp_funnel)
        canvas.schedule_components(self.context.root_pg.id, True,
                                   [self.context.from_gw])

        # Create flow in EFM
        self.context.consume_mqtt = efm.create_processor(
            self.context.flow_id, self.context.efm_pg_id, 'ConsumeMQTT',
            'org.apache.nifi.processors.mqtt.ConsumeMQTT', (100, 100), {
                'Broker URI':
                'tcp://{hostname}:1883'.format(hostname=get_hostname()),
                'Client ID':
                'minifi-iot',
                'Topic Filter':
                'iot/#',
                'Max Queue Size':
                '60',
            })
        self.context.nifi_rpg = efm.create_remote_processor_group(
            self.context.flow_id, self.context.efm_pg_id, 'Remote PG',
            nf.get_url(), 'HTTP', (100, 400))
        self.context.consume_conn = efm.create_connection(
            self.context.flow_id,
            self.context.efm_pg_id,
            self.context.consume_mqtt,
            'PROCESSOR',
            self.context.nifi_rpg,
            'REMOTE_INPUT_PORT', ['Message'],
            destination_port=self.context.from_gw.id,
            name='Sensor data',
            flow_file_expiration='60 seconds')

        # Create a bucket in NiFi Registry to save the edge flow versions
        if not versioning.get_registry_bucket('IoT'):
            versioning.create_registry_bucket('IoT')

        # Publish/version the flow
        efm.publish_flow(self.context.flow_id,
                         'First version - {}'.format(self.run_id))
예제 #10
0
def test_delete_registry_bucket(fixture_registry_bucket):
    b1, c1 = fixture_registry_bucket
    r = versioning.delete_registry_bucket(b1)
    assert r.identifier == b1.identifier
    with pytest.raises(ValueError):
        _ = versioning.get_registry_bucket(b1.identifier, 'id')
예제 #11
0
def delete_flows(identifier, identifier_type='name'):
    bucket = versioning.get_registry_bucket(identifier, identifier_type)
    if bucket:
        for flow in versioning.list_flows_in_bucket(bucket.identifier):
            BucketFlowsApi().delete_flow(flow.bucket_identifier,
                                         flow.identifier)
예제 #12
0
def lab4_nifi_flow(env):
    LOG.info("Running step4_nifi_flow")
    # Create a bucket in NiFi Registry to save the edge flow versions
    env.sensor_bucket = versioning.get_registry_bucket('SensorFlows')
    if not env.sensor_bucket:
        env.sensor_bucket = versioning.create_registry_bucket('SensorFlows')

    # Create NiFi Process Group
    env.reg_client = versioning.create_registry_client('NiFi Registry',
                                                       _get_nifireg_url(),
                                                       'The registry...')
    env.sensor_pg = canvas.create_process_group(env.root_pg, PG_NAME,
                                                (330, 350))
    #env.sensor_flow = versioning.save_flow_ver(env.sensor_pg, env.reg_client, env.sensor_bucket, flow_name='SensorProcessGroup', comment='Enabled version control - ' + env.run_id)
    env.sensor_flow = save_flow_ver(env.sensor_pg,
                                    env.reg_client,
                                    env.sensor_bucket,
                                    flow_name='SensorProcessGroup',
                                    comment='Enabled version control - ' +
                                    str(env.run_id))

    # Update default SSL context controller service
    ssl_svc_name = 'Default NiFi SSL Context Service'
    if _IS_TLS_ENABLED:
        props = {
            'SSL Protocol': 'TLS',
            'Truststore Type': 'JKS',
            'Truststore Filename': '/opt/cloudera/security/jks/truststore.jks',
            'Truststore Password': _THE_PWD,
            'Keystore Type': 'JKS',
            'Keystore Filename': '/opt/cloudera/security/jks/keystore.jks',
            'Keystore Password': _THE_PWD,
            'key-password': _THE_PWD,
        }
        env.ssl_svc = canvas.get_controller(ssl_svc_name, 'name')
        if env.ssl_svc:
            canvas.schedule_controller(env.ssl_svc, False)
            env.ssl_svc = canvas.get_controller(ssl_svc_name, 'name')
            canvas.update_controller(
                env.ssl_svc, nifi.ControllerServiceDTO(properties=props))
            env.ssl_svc = canvas.get_controller(ssl_svc_name, 'name')
            canvas.schedule_controller(env.ssl_svc, True)
        else:
            env.keytab_svc = create_controller(
                env.root_pg,
                'org.apache.nifi.ssl.StandardRestrictedSSLContextService',
                props,
                True,
                name=ssl_svc_name)

    # Create controller services
    if _IS_TLS_ENABLED:
        env.ssl_svc = canvas.get_controller(ssl_svc_name, 'name')
        props = {
            'Kerberos Keytab': '/keytabs/admin.keytab',
            'Kerberos Principal': 'admin',
        }
        env.keytab_svc = create_controller(
            env.sensor_pg, 'org.apache.nifi.kerberos.KeytabCredentialsService',
            props, True)
    else:
        env.ssl_svc = None
        env.keytab_svc = None

    props = {
        'url': _get_schreg_api_url(),
    }
    if _IS_TLS_ENABLED:
        props.update({
            'kerberos-credentials-service': env.keytab_svc.id,
            'ssl-context-service': env.ssl_svc.id,
        })
    env.sr_svc = create_controller(
        env.sensor_pg,
        'org.apache.nifi.schemaregistry.hortonworks.HortonworksSchemaRegistry',
        props, True)
    env.json_reader_svc = create_controller(
        env.sensor_pg, 'org.apache.nifi.json.JsonTreeReader', {
            'schema-access-strategy': 'schema-name',
            'schema-registry': env.sr_svc.id
        }, True)
    env.json_writer_svc = create_controller(
        env.sensor_pg, 'org.apache.nifi.json.JsonRecordSetWriter', {
            'schema-access-strategy': 'schema-name',
            'schema-registry': env.sr_svc.id,
            'Schema Write Strategy': 'hwx-schema-ref-attributes'
        }, True)
    env.avro_writer_svc = create_controller(
        env.sensor_pg, 'org.apache.nifi.avro.AvroRecordSetWriter', {
            'schema-access-strategy': 'schema-name',
            'schema-registry': env.sr_svc.id,
            'Schema Write Strategy': 'hwx-content-encoded-schema'
        }, True)

    # Create flow
    sensor_port = canvas.create_port(env.sensor_pg.id, 'INPUT_PORT',
                                     'Sensor Data', 'RUNNING', (0, 0))

    upd_attr = create_processor(
        env.sensor_pg, 'Set Schema Name',
        'org.apache.nifi.processors.attributes.UpdateAttribute', (0, 100), {
            'properties': {
                'schema.name': 'SensorReading',
            },
        })
    canvas.create_connection(sensor_port, upd_attr)

    props = {
        'topic': 'iot',
        'record-reader': env.json_reader_svc.id,
        'record-writer': env.json_writer_svc.id,
    }
    props.update(_get_common_kafka_client_properties(env, 'producer'))
    pub_kafka = create_processor(
        env.sensor_pg, 'Publish to Kafka topic: iot',
        'org.apache.nifi.processors.kafka.pubsub.PublishKafkaRecord_2_0',
        (0, 300), {
            'properties': props,
            'autoTerminatedRelationships': ['success'],
        })
    canvas.create_connection(upd_attr, pub_kafka, ['success'])

    fail_funnel = create_funnel(env.sensor_pg.id, (600, 343))
    canvas.create_connection(pub_kafka, fail_funnel, ['failure'])

    # Commit changes
    #versioning.save_flow_ver(env.sensor_pg, env.reg_client, env.sensor_bucket, flow_id=env.sensor_flow.version_control_information.flow_id, comment='First version - ' + env.run_id)
    save_flow_ver(env.sensor_pg,
                  env.reg_client,
                  env.sensor_bucket,
                  flow_id=env.sensor_flow.version_control_information.flow_id,
                  comment='First version - ' + str(env.run_id))

    # Start flow
    canvas.schedule_process_group(env.root_pg.id, True)

    # Update "from Gateway" input port to connect to the process group
    update_connection(env.from_gw, env.temp_funnel, sensor_port)
    def lab2_nifi_flow(self):
        # Create a bucket in NiFi Registry to save the edge flow versions
        self.context.sensor_bucket = versioning.get_registry_bucket(
            'SensorFlows')
        if not self.context.sensor_bucket:
            self.context.sensor_bucket = versioning.create_registry_bucket(
                'SensorFlows')

        # Create NiFi Process Group
        self.context.reg_client = versioning.create_registry_client(
            'NiFi Registry', nifireg.get_url(), 'The registry...')
        self.context.sensor_pg = canvas.create_process_group(
            self.context.root_pg, PG_NAME, (330, 350))
        self.context.sensor_flow = nifireg.save_flow_ver(
            self.context.sensor_pg,
            self.context.reg_client,
            self.context.sensor_bucket,
            flow_name='SensorProcessGroup',
            comment='Enabled version control - {}'.format(self.run_id))

        # Update default SSL context controller service
        ssl_svc_name = 'Default NiFi SSL Context Service'
        if is_tls_enabled():
            props = {
                'SSL Protocol': 'TLS',
                'Truststore Type': 'JKS',
                'Truststore Filename':
                '/opt/cloudera/security/jks/truststore.jks',
                'Truststore Password': get_the_pwd(),
                'Keystore Type': 'JKS',
                'Keystore Filename': '/opt/cloudera/security/jks/keystore.jks',
                'Keystore Password': get_the_pwd(),
                'key-password': get_the_pwd(),
            }
            self.context.ssl_svc = canvas.get_controller(ssl_svc_name, 'name')
            if self.context.ssl_svc:
                canvas.schedule_controller(self.context.ssl_svc, False)
                self.context.ssl_svc = canvas.get_controller(
                    ssl_svc_name, 'name')
                canvas.update_controller(
                    self.context.ssl_svc,
                    nifi.ControllerServiceDTO(properties=props))
                self.context.ssl_svc = canvas.get_controller(
                    ssl_svc_name, 'name')
                canvas.schedule_controller(self.context.ssl_svc, True)
            else:
                self.context.keytab_svc = nf.create_controller(
                    self.context.root_pg,
                    'org.apache.nifi.ssl.StandardRestrictedSSLContextService',
                    props,
                    True,
                    name=ssl_svc_name)

        # Create controller services
        if is_tls_enabled():
            self.context.ssl_svc = canvas.get_controller(ssl_svc_name, 'name')
            props = {
                'Kerberos Keytab': '/keytabs/admin.keytab',
                'Kerberos Principal': 'admin',
            }
            self.context.keytab_svc = nf.create_controller(
                self.context.sensor_pg,
                'org.apache.nifi.kerberos.KeytabCredentialsService', props,
                True)
        else:
            self.context.ssl_svc = None
            self.context.keytab_svc = None

        props = {
            'url': schreg.get_api_url(),
        }
        if is_tls_enabled():
            props.update({
                'kerberos-credentials-service':
                self.context.keytab_svc.id,
                'ssl-context-service':
                self.context.ssl_svc.id,
            })
        self.context.sr_svc = nf.create_controller(
            self.context.sensor_pg,
            'org.apache.nifi.schemaregistry.hortonworks.HortonworksSchemaRegistry',
            props, True)
        self.context.json_reader_svc = nf.create_controller(
            self.context.sensor_pg, 'org.apache.nifi.json.JsonTreeReader', {
                'schema-access-strategy': 'schema-name',
                'schema-registry': self.context.sr_svc.id
            }, True)
        self.context.json_writer_svc = nf.create_controller(
            self.context.sensor_pg, 'org.apache.nifi.json.JsonRecordSetWriter',
            {
                'schema-access-strategy': 'schema-name',
                'schema-registry': self.context.sr_svc.id,
                'Schema Write Strategy': 'hwx-schema-ref-attributes'
            }, True)
        self.context.avro_writer_svc = nf.create_controller(
            self.context.sensor_pg, 'org.apache.nifi.avro.AvroRecordSetWriter',
            {
                'schema-access-strategy': 'schema-name',
                'schema-registry': self.context.sr_svc.id,
                'Schema Write Strategy': 'hwx-content-encoded-schema'
            }, True)

        # Create flow
        sensor_port = canvas.create_port(self.context.sensor_pg.id,
                                         'INPUT_PORT', 'Sensor Data',
                                         'STOPPED', (0, 0))

        upd_attr = nf.create_processor(
            self.context.sensor_pg, 'Set Schema Name',
            'org.apache.nifi.processors.attributes.UpdateAttribute', (0, 100),
            {
                'properties': {
                    'schema.name': 'SensorReading',
                },
            })
        canvas.create_connection(sensor_port, upd_attr)

        props = {
            'topic': 'iot',
            'record-reader': self.context.json_reader_svc.id,
            'record-writer': self.context.json_writer_svc.id,
        }
        props.update(
            kafka.get_common_client_properties(self.context, 'producer',
                                               CONSUMER_GROUP_ID,
                                               PRODUCER_CLIENT_ID))
        pub_kafka = nf.create_processor(
            self.context.sensor_pg, 'Publish to Kafka topic: iot', [
                'org.apache.nifi.processors.kafka.pubsub.PublishKafkaRecord_2_6',
                'org.apache.nifi.processors.kafka.pubsub.PublishKafkaRecord_2_0'
            ], (0, 300), {
                'properties': props,
                'autoTerminatedRelationships': ['success'],
            })
        canvas.create_connection(upd_attr, pub_kafka, ['success'])

        fail_funnel = nf.create_funnel(self.context.sensor_pg.id, (600, 343))
        canvas.create_connection(pub_kafka, fail_funnel, ['failure'])

        # Commit changes
        nifireg.save_flow_ver(self.context.sensor_pg,
                              self.context.reg_client,
                              self.context.sensor_bucket,
                              flow_id=self.context.sensor_flow.
                              version_control_information.flow_id,
                              comment='First version - {}'.format(self.run_id))

        # Start flow
        canvas.schedule_process_group(self.context.root_pg.id, True)

        # Update "from Gateway" input port to connect to the process group
        nf.update_connection(self.context.from_gw, self.context.temp_funnel,
                             sensor_port)
        canvas.schedule_components(self.context.root_pg.id, True,
                                   [sensor_port])
예제 #14
0
dev_nifi_port = 8080
dev_reg_port = 18080
dev_nifi_url = 'http://edge2ai-1.dim.local:' + str(dev_nifi_port) + '/nifi'
dev_reg_url = 'http://edge2ai-1.dim.local:' + str(
    dev_reg_port) + '/nifi-registry'
dev_nifi_api_url = dev_nifi_url + '-api'
dev_reg_api_url = dev_reg_url + '-api'

# edge2ai-1.dim.local
config.nifi_config.host = 'http://edge2ai-1.dim.local:8080/nifi-api'
config.registry_config.host = 'http://edge2ai-1.dim.local:18080/nifi-registry-api'
nipyapi.config.nifi_config.host = 'http://edge2ai-1.dim.local:8080/nifi-api'
config.nifi_config.host = 'http://edge2ai-1.dim.local:8080/nifi-api'

# Get Registry Bucket
sensor_bucket = versioning.get_registry_bucket('SensorFlows')
print("=====")
print("bucket %s", sensor_bucket)
print("=====")

if not sensor_bucket:
    sensor_bucket = versioning.create_registry_bucket('SensorFlows')

# if you change it, back it up for migration
# Backup a Flow
#dlx = versioning.get_flow_version(sensor_bucket.identifier, 'e6d35be3-3073-405e-9c0d-46c8f980bf10', version=None, export=True)
# Export Flow Version
#xprt = versioning.export_flow_version(sensor_bucket.identifier, 'e6d35be3-3073-405e-9c0d-46c8f980bf10', version=None, file_path='/opt/demo/ApacheConAtHome2020/flows/ApacheConDemos.json', mode='json')
#print(dlx)
#print(xprt)