def setUpClass(cls):
        super(CanvasNavigatorTest, cls).setUpClass()
        print("Start of tests: preparing nifi objects")
        config.nifi_config.host = 'http://192.168.56.5:8080/nifi-api'

        root = canvas.get_process_group(canvas.get_root_pg_id(), 'id')
        # Create new process group in root
        CanvasNavigatorTest.pg_parent = canvas.create_process_group(
            root, "parent", CANVAS_CENTER)
        # Create new child process groups in unittest
        CanvasNavigatorTest.pg_child1 = canvas.create_process_group(
            CanvasNavigatorTest.pg_parent, "child1", CANVAS_CENTER)
        CanvasNavigatorTest.pg_child2 = canvas.create_process_group(
            CanvasNavigatorTest.pg_parent, "child2", CANVAS_CENTER)
        CanvasNavigatorTest.pg_child2_2 = canvas.create_process_group(
            CanvasNavigatorTest.pg_parent, "child2", CANVAS_CENTER)
        CanvasNavigatorTest.pg_grandchild1 = canvas.create_process_group(
            CanvasNavigatorTest.pg_child1, "grandchild1", CANVAS_CENTER)

        # Create other objects as well
        CanvasNavigatorTest.proc = canvas.create_processor(
            CanvasNavigatorTest.pg_parent,
            canvas.get_processor_type("GenerateFlowFile"), CANVAS_CENTER,
            "proc")
        CanvasNavigatorTest.input_port = canvas.create_port(
            CanvasNavigatorTest.pg_parent.component.id, "INPUT_PORT",
            "input_port", "STOPPED", CANVAS_CENTER)
        CanvasNavigatorTest.output_port = canvas.create_port(
            CanvasNavigatorTest.pg_parent.component.id, "OUTPUT_PORT",
            "output_port", "STOPPED", CANVAS_CENTER)
        CanvasNavigatorTest.controller = canvas.create_controller(
            CanvasNavigatorTest.pg_parent,
            canvas.list_all_controller_types()[0], "controller")
Esempio n. 2
0
def test_connect_output_ports(regress_nifi, fix_pg):
    f_pg_1 = fix_pg.generate()
    f_pg_2 = fix_pg.generate()
    f_pg_1_output = canvas.create_port(f_pg_1.id, 'OUTPUT_PORT',
                                       conftest.test_basename + 'output',
                                       'STOPPED')
    f_pg_2_input = canvas.create_port(f_pg_2.id, 'INPUT_PORT',
                                      conftest.test_basename + 'input',
                                      'STOPPED')
    r1 = canvas.create_connection(source=f_pg_1_output,
                                  target=f_pg_2_input,
                                  name=conftest.test_basename)
    assert isinstance(r1, nifi.ConnectionEntity)
Esempio n. 3
0
def test_input_output_ports(regress_nifi, fix_pg):
    root_input_port = canvas.create_port(pg_id=canvas.get_root_pg_id(),
                                         port_type='INPUT_PORT',
                                         name=conftest.test_basename +
                                         'input_port',
                                         state='STOPPED')
    assert isinstance(root_input_port, nifi.PortEntity)
    root_output_port = canvas.create_port(pg_id=canvas.get_root_pg_id(),
                                          port_type='OUTPUT_PORT',
                                          name=conftest.test_basename +
                                          'output_port',
                                          state='STOPPED')
    assert isinstance(root_output_port, nifi.PortEntity)
    input_ports = [
        x for x in canvas.list_all_by_kind('input_ports')
        if conftest.test_basename in x.status.name
    ]
    assert len(input_ports) == 1
    output_ports = [
        x for x in canvas.list_all_by_kind('output_ports')
        if conftest.test_basename in x.status.name
    ]
    assert len(output_ports) == 1
    f_pg = fix_pg.generate()
    f_pg_input_port = canvas.create_port(pg_id=f_pg.id,
                                         port_type='INPUT_PORT',
                                         name=conftest.test_basename +
                                         'input_port',
                                         state='STOPPED')
    assert isinstance(f_pg_input_port, nifi.PortEntity)
    f_pg_output_port = canvas.create_port(pg_id=f_pg.id,
                                          port_type='OUTPUT_PORT',
                                          name=conftest.test_basename +
                                          'output_port',
                                          state='STOPPED')
    assert isinstance(f_pg_output_port, nifi.PortEntity)
    input_ports = [
        x for x in canvas.list_all_by_kind('input_ports')
        if conftest.test_basename in x.status.name
    ]
    assert len(input_ports) == 2
    output_ports = [
        x for x in canvas.list_all_by_kind('output_ports')
        if conftest.test_basename in x.status.name
    ]
    assert len(output_ports) == 2
    d1 = canvas.delete_port(root_input_port)
    assert isinstance(d1, nifi.PortEntity)
    assert d1.status is None
Esempio n. 4
0
def step2_edge_flow(env):
    # Create input port and funnel in NiFi
    env.from_gw = canvas.create_port(env.root_pg.id, 'INPUT_PORT',
                                     'from Gateway', 'STOPPED', (0, 200))
    funnel_position = (96, 350)
    env.temp_funnel = create_funnel(env.root_pg.id, (96, 350))
    canvas.create_connection(env.from_gw, env.temp_funnel)

    # Create flow in EFM
    env.consume_mqtt = efm_create_processor(
        env.flow_id, env.efm_pg_id, 'ConsumeMQTT',
        'org.apache.nifi.processors.mqtt.ConsumeMQTT', (100, 100), {
            'Broker URI': 'tcp://edge2ai-1.dim.local:1883',
            'Client ID': 'minifi-iot',
            'Topic Filter': 'iot/#',
            'Max Queue Size': '60',
        })
    env.nifi_rpg = efm_create_remote_processor_group(env.flow_id,
                                                     env.efm_pg_id,
                                                     'Remote PG', _NIFI_URL,
                                                     'HTTP', (100, 400))
    env.consume_conn = efm_create_connection(env.flow_id,
                                             env.efm_pg_id,
                                             env.consume_mqtt,
                                             'PROCESSOR',
                                             env.nifi_rpg,
                                             'REMOTE_INPUT_PORT', ['Message'],
                                             destination_port=env.from_gw.id)

    # Create a bucket in NiFi Registry to save the edge flow versions
    if not versioning.get_registry_bucket('IoT'):
        versioning.create_registry_bucket('IoT')

    # Publish/version the flow
    efm_publish_flow(env.flow_id, 'First version - ' + env.run_id)
Esempio n. 5
0
def lab4_nifi_flow(env):
    LOG.info("Running step4_nifi_flow")
    # Create a bucket in NiFi Registry to save the edge flow versions
    env.sensor_bucket = versioning.get_registry_bucket('SensorFlows')
    if not env.sensor_bucket:
        env.sensor_bucket = versioning.create_registry_bucket('SensorFlows')

    # Create NiFi Process Group
    env.reg_client = versioning.create_registry_client('NiFi Registry', _NIFIREG_URL, 'The registry...')
    env.sensor_pg = canvas.create_process_group(env.root_pg, PG_NAME, (330, 350))
    #env.sensor_flow = versioning.save_flow_ver(env.sensor_pg, env.reg_client, env.sensor_bucket, flow_name='SensorProcessGroup', comment='Enabled version control - ' + env.run_id)
    env.sensor_flow = save_flow_ver(env.sensor_pg, env.reg_client, env.sensor_bucket, flow_name='SensorProcessGroup', comment='Enabled version control - ' + str(env.run_id))

    # Create controller services
    env.sr_svc = create_controller(env.sensor_pg, 'org.apache.nifi.schemaregistry.hortonworks.HortonworksSchemaRegistry', {'url': _SCHREG_API_URL}, True)
    env.json_reader_svc = create_controller(env.sensor_pg, 'org.apache.nifi.json.JsonTreeReader', {'schema-access-strategy': 'schema-name', 'schema-registry': env.sr_svc.id}, True)
    env.json_writer_svc = create_controller(env.sensor_pg, 'org.apache.nifi.json.JsonRecordSetWriter', {'schema-access-strategy': 'schema-name', 'schema-registry': env.sr_svc.id, 'Schema Write Strategy': 'hwx-schema-ref-attributes'}, True)

    # Create flow
    sensor_port = canvas.create_port(env.sensor_pg.id, 'INPUT_PORT', 'Sensor Data', 'RUNNING', (0, 0))

    upd_attr = create_processor(env.sensor_pg, 'Set Schema Name', 'org.apache.nifi.processors.attributes.UpdateAttribute', (0, 100),
        {
            'properties': {
                'schema.name': 'SensorReading',
            },
        }
    )
    canvas.create_connection(sensor_port, upd_attr)

    pub_kafka = create_processor(env.sensor_pg, 'Publish to Kafka topic: iot', 'org.apache.nifi.processors.kafka.pubsub.PublishKafkaRecord_2_0', (0, 300),
        {
            'properties': {
                'bootstrap.servers': 'edge2ai-1.dim.local:9092',
                'topic': 'iot',
                'record-reader': env.json_reader_svc.id,
                'record-writer': env.json_writer_svc.id,
                'use-transactions': 'false',
                'attribute-name-regex': 'schema.*',
                'client.id': PRODUCER_CLIENT_ID,
            },
            'autoTerminatedRelationships': ['success'],
        }
    )
    canvas.create_connection(upd_attr, pub_kafka, ['success'])

    fail_funnel = create_funnel(env.sensor_pg.id, (600, 343))
    canvas.create_connection(pub_kafka, fail_funnel, ['failure'])

    # Commit changes
    #versioning.save_flow_ver(env.sensor_pg, env.reg_client, env.sensor_bucket, flow_id=env.sensor_flow.version_control_information.flow_id, comment='First version - ' + env.run_id)
    save_flow_ver(env.sensor_pg, env.reg_client, env.sensor_bucket, flow_id=env.sensor_flow.version_control_information.flow_id, comment='First version - ' + str(env.run_id))

    # Start flow
    canvas.schedule_process_group(env.root_pg.id, True)

    # Update "from Gateway" input port to connect to the process group
    update_connection(env.from_gw, env.temp_funnel, sensor_port)
    def lab2_edge_flow(self):
        # Create input port and funnel in NiFi
        self.context.from_gw = canvas.create_port(self.context.root_pg.id,
                                                  'INPUT_PORT', 'from Gateway',
                                                  'STOPPED', (0, 200))
        self.context.temp_funnel = nf.create_funnel(self.context.root_pg.id,
                                                    (96, 350))
        canvas.create_connection(self.context.from_gw,
                                 self.context.temp_funnel)
        canvas.schedule_components(self.context.root_pg.id, True,
                                   [self.context.from_gw])

        # Create flow in EFM
        self.context.consume_mqtt = efm.create_processor(
            self.context.flow_id, self.context.efm_pg_id, 'ConsumeMQTT',
            'org.apache.nifi.processors.mqtt.ConsumeMQTT', (100, 100), {
                'Broker URI':
                'tcp://{hostname}:1883'.format(hostname=get_hostname()),
                'Client ID':
                'minifi-iot',
                'Topic Filter':
                'iot/#',
                'Max Queue Size':
                '60',
            })
        self.context.nifi_rpg = efm.create_remote_processor_group(
            self.context.flow_id, self.context.efm_pg_id, 'Remote PG',
            nf.get_url(), 'HTTP', (100, 400))
        self.context.consume_conn = efm.create_connection(
            self.context.flow_id,
            self.context.efm_pg_id,
            self.context.consume_mqtt,
            'PROCESSOR',
            self.context.nifi_rpg,
            'REMOTE_INPUT_PORT', ['Message'],
            destination_port=self.context.from_gw.id,
            name='Sensor data',
            flow_file_expiration='60 seconds')

        # Create a bucket in NiFi Registry to save the edge flow versions
        if not versioning.get_registry_bucket('IoT'):
            versioning.create_registry_bucket('IoT')

        # Publish/version the flow
        efm.publish_flow(self.context.flow_id,
                         'First version - {}'.format(self.run_id))
Esempio n. 7
0
def lab4_nifi_flow(env):
    LOG.info("Running step4_nifi_flow")
    # Create a bucket in NiFi Registry to save the edge flow versions
    env.sensor_bucket = versioning.get_registry_bucket('SensorFlows')
    if not env.sensor_bucket:
        env.sensor_bucket = versioning.create_registry_bucket('SensorFlows')

    # Create NiFi Process Group
    env.reg_client = versioning.create_registry_client('NiFi Registry',
                                                       _get_nifireg_url(),
                                                       'The registry...')
    env.sensor_pg = canvas.create_process_group(env.root_pg, PG_NAME,
                                                (330, 350))
    #env.sensor_flow = versioning.save_flow_ver(env.sensor_pg, env.reg_client, env.sensor_bucket, flow_name='SensorProcessGroup', comment='Enabled version control - ' + env.run_id)
    env.sensor_flow = save_flow_ver(env.sensor_pg,
                                    env.reg_client,
                                    env.sensor_bucket,
                                    flow_name='SensorProcessGroup',
                                    comment='Enabled version control - ' +
                                    str(env.run_id))

    # Update default SSL context controller service
    ssl_svc_name = 'Default NiFi SSL Context Service'
    if _IS_TLS_ENABLED:
        props = {
            'SSL Protocol': 'TLS',
            'Truststore Type': 'JKS',
            'Truststore Filename': '/opt/cloudera/security/jks/truststore.jks',
            'Truststore Password': _THE_PWD,
            'Keystore Type': 'JKS',
            'Keystore Filename': '/opt/cloudera/security/jks/keystore.jks',
            'Keystore Password': _THE_PWD,
            'key-password': _THE_PWD,
        }
        env.ssl_svc = canvas.get_controller(ssl_svc_name, 'name')
        if env.ssl_svc:
            canvas.schedule_controller(env.ssl_svc, False)
            env.ssl_svc = canvas.get_controller(ssl_svc_name, 'name')
            canvas.update_controller(
                env.ssl_svc, nifi.ControllerServiceDTO(properties=props))
            env.ssl_svc = canvas.get_controller(ssl_svc_name, 'name')
            canvas.schedule_controller(env.ssl_svc, True)
        else:
            env.keytab_svc = create_controller(
                env.root_pg,
                'org.apache.nifi.ssl.StandardRestrictedSSLContextService',
                props,
                True,
                name=ssl_svc_name)

    # Create controller services
    if _IS_TLS_ENABLED:
        env.ssl_svc = canvas.get_controller(ssl_svc_name, 'name')
        props = {
            'Kerberos Keytab': '/keytabs/admin.keytab',
            'Kerberos Principal': 'admin',
        }
        env.keytab_svc = create_controller(
            env.sensor_pg, 'org.apache.nifi.kerberos.KeytabCredentialsService',
            props, True)
    else:
        env.ssl_svc = None
        env.keytab_svc = None

    props = {
        'url': _get_schreg_api_url(),
    }
    if _IS_TLS_ENABLED:
        props.update({
            'kerberos-credentials-service': env.keytab_svc.id,
            'ssl-context-service': env.ssl_svc.id,
        })
    env.sr_svc = create_controller(
        env.sensor_pg,
        'org.apache.nifi.schemaregistry.hortonworks.HortonworksSchemaRegistry',
        props, True)
    env.json_reader_svc = create_controller(
        env.sensor_pg, 'org.apache.nifi.json.JsonTreeReader', {
            'schema-access-strategy': 'schema-name',
            'schema-registry': env.sr_svc.id
        }, True)
    env.json_writer_svc = create_controller(
        env.sensor_pg, 'org.apache.nifi.json.JsonRecordSetWriter', {
            'schema-access-strategy': 'schema-name',
            'schema-registry': env.sr_svc.id,
            'Schema Write Strategy': 'hwx-schema-ref-attributes'
        }, True)
    env.avro_writer_svc = create_controller(
        env.sensor_pg, 'org.apache.nifi.avro.AvroRecordSetWriter', {
            'schema-access-strategy': 'schema-name',
            'schema-registry': env.sr_svc.id,
            'Schema Write Strategy': 'hwx-content-encoded-schema'
        }, True)

    # Create flow
    sensor_port = canvas.create_port(env.sensor_pg.id, 'INPUT_PORT',
                                     'Sensor Data', 'RUNNING', (0, 0))

    upd_attr = create_processor(
        env.sensor_pg, 'Set Schema Name',
        'org.apache.nifi.processors.attributes.UpdateAttribute', (0, 100), {
            'properties': {
                'schema.name': 'SensorReading',
            },
        })
    canvas.create_connection(sensor_port, upd_attr)

    props = {
        'topic': 'iot',
        'record-reader': env.json_reader_svc.id,
        'record-writer': env.json_writer_svc.id,
    }
    props.update(_get_common_kafka_client_properties(env, 'producer'))
    pub_kafka = create_processor(
        env.sensor_pg, 'Publish to Kafka topic: iot',
        'org.apache.nifi.processors.kafka.pubsub.PublishKafkaRecord_2_0',
        (0, 300), {
            'properties': props,
            'autoTerminatedRelationships': ['success'],
        })
    canvas.create_connection(upd_attr, pub_kafka, ['success'])

    fail_funnel = create_funnel(env.sensor_pg.id, (600, 343))
    canvas.create_connection(pub_kafka, fail_funnel, ['failure'])

    # Commit changes
    #versioning.save_flow_ver(env.sensor_pg, env.reg_client, env.sensor_bucket, flow_id=env.sensor_flow.version_control_information.flow_id, comment='First version - ' + env.run_id)
    save_flow_ver(env.sensor_pg,
                  env.reg_client,
                  env.sensor_bucket,
                  flow_id=env.sensor_flow.version_control_information.flow_id,
                  comment='First version - ' + str(env.run_id))

    # Start flow
    canvas.schedule_process_group(env.root_pg.id, True)

    # Update "from Gateway" input port to connect to the process group
    update_connection(env.from_gw, env.temp_funnel, sensor_port)
    def lab2_nifi_flow(self):
        # Create a bucket in NiFi Registry to save the edge flow versions
        self.context.sensor_bucket = versioning.get_registry_bucket(
            'SensorFlows')
        if not self.context.sensor_bucket:
            self.context.sensor_bucket = versioning.create_registry_bucket(
                'SensorFlows')

        # Create NiFi Process Group
        self.context.reg_client = versioning.create_registry_client(
            'NiFi Registry', nifireg.get_url(), 'The registry...')
        self.context.sensor_pg = canvas.create_process_group(
            self.context.root_pg, PG_NAME, (330, 350))
        self.context.sensor_flow = nifireg.save_flow_ver(
            self.context.sensor_pg,
            self.context.reg_client,
            self.context.sensor_bucket,
            flow_name='SensorProcessGroup',
            comment='Enabled version control - {}'.format(self.run_id))

        # Update default SSL context controller service
        ssl_svc_name = 'Default NiFi SSL Context Service'
        if is_tls_enabled():
            props = {
                'SSL Protocol': 'TLS',
                'Truststore Type': 'JKS',
                'Truststore Filename':
                '/opt/cloudera/security/jks/truststore.jks',
                'Truststore Password': get_the_pwd(),
                'Keystore Type': 'JKS',
                'Keystore Filename': '/opt/cloudera/security/jks/keystore.jks',
                'Keystore Password': get_the_pwd(),
                'key-password': get_the_pwd(),
            }
            self.context.ssl_svc = canvas.get_controller(ssl_svc_name, 'name')
            if self.context.ssl_svc:
                canvas.schedule_controller(self.context.ssl_svc, False)
                self.context.ssl_svc = canvas.get_controller(
                    ssl_svc_name, 'name')
                canvas.update_controller(
                    self.context.ssl_svc,
                    nifi.ControllerServiceDTO(properties=props))
                self.context.ssl_svc = canvas.get_controller(
                    ssl_svc_name, 'name')
                canvas.schedule_controller(self.context.ssl_svc, True)
            else:
                self.context.keytab_svc = nf.create_controller(
                    self.context.root_pg,
                    'org.apache.nifi.ssl.StandardRestrictedSSLContextService',
                    props,
                    True,
                    name=ssl_svc_name)

        # Create controller services
        if is_tls_enabled():
            self.context.ssl_svc = canvas.get_controller(ssl_svc_name, 'name')
            props = {
                'Kerberos Keytab': '/keytabs/admin.keytab',
                'Kerberos Principal': 'admin',
            }
            self.context.keytab_svc = nf.create_controller(
                self.context.sensor_pg,
                'org.apache.nifi.kerberos.KeytabCredentialsService', props,
                True)
        else:
            self.context.ssl_svc = None
            self.context.keytab_svc = None

        props = {
            'url': schreg.get_api_url(),
        }
        if is_tls_enabled():
            props.update({
                'kerberos-credentials-service':
                self.context.keytab_svc.id,
                'ssl-context-service':
                self.context.ssl_svc.id,
            })
        self.context.sr_svc = nf.create_controller(
            self.context.sensor_pg,
            'org.apache.nifi.schemaregistry.hortonworks.HortonworksSchemaRegistry',
            props, True)
        self.context.json_reader_svc = nf.create_controller(
            self.context.sensor_pg, 'org.apache.nifi.json.JsonTreeReader', {
                'schema-access-strategy': 'schema-name',
                'schema-registry': self.context.sr_svc.id
            }, True)
        self.context.json_writer_svc = nf.create_controller(
            self.context.sensor_pg, 'org.apache.nifi.json.JsonRecordSetWriter',
            {
                'schema-access-strategy': 'schema-name',
                'schema-registry': self.context.sr_svc.id,
                'Schema Write Strategy': 'hwx-schema-ref-attributes'
            }, True)
        self.context.avro_writer_svc = nf.create_controller(
            self.context.sensor_pg, 'org.apache.nifi.avro.AvroRecordSetWriter',
            {
                'schema-access-strategy': 'schema-name',
                'schema-registry': self.context.sr_svc.id,
                'Schema Write Strategy': 'hwx-content-encoded-schema'
            }, True)

        # Create flow
        sensor_port = canvas.create_port(self.context.sensor_pg.id,
                                         'INPUT_PORT', 'Sensor Data',
                                         'STOPPED', (0, 0))

        upd_attr = nf.create_processor(
            self.context.sensor_pg, 'Set Schema Name',
            'org.apache.nifi.processors.attributes.UpdateAttribute', (0, 100),
            {
                'properties': {
                    'schema.name': 'SensorReading',
                },
            })
        canvas.create_connection(sensor_port, upd_attr)

        props = {
            'topic': 'iot',
            'record-reader': self.context.json_reader_svc.id,
            'record-writer': self.context.json_writer_svc.id,
        }
        props.update(
            kafka.get_common_client_properties(self.context, 'producer',
                                               CONSUMER_GROUP_ID,
                                               PRODUCER_CLIENT_ID))
        pub_kafka = nf.create_processor(
            self.context.sensor_pg, 'Publish to Kafka topic: iot', [
                'org.apache.nifi.processors.kafka.pubsub.PublishKafkaRecord_2_6',
                'org.apache.nifi.processors.kafka.pubsub.PublishKafkaRecord_2_0'
            ], (0, 300), {
                'properties': props,
                'autoTerminatedRelationships': ['success'],
            })
        canvas.create_connection(upd_attr, pub_kafka, ['success'])

        fail_funnel = nf.create_funnel(self.context.sensor_pg.id, (600, 343))
        canvas.create_connection(pub_kafka, fail_funnel, ['failure'])

        # Commit changes
        nifireg.save_flow_ver(self.context.sensor_pg,
                              self.context.reg_client,
                              self.context.sensor_bucket,
                              flow_id=self.context.sensor_flow.
                              version_control_information.flow_id,
                              comment='First version - {}'.format(self.run_id))

        # Start flow
        canvas.schedule_process_group(self.context.root_pg.id, True)

        # Update "from Gateway" input port to connect to the process group
        nf.update_connection(self.context.from_gw, self.context.temp_funnel,
                             sensor_port)
        canvas.schedule_components(self.context.root_pg.id, True,
                                   [sensor_port])