Ejemplo n.º 1
0
def test_list_all_connections(regress_nifi, fix_proc):
    f_p1 = fix_proc.generate()
    f_p2 = fix_proc.generate()
    r1 = [
        x for x in canvas.list_all_connections()
        if conftest.test_basename in x.component.name
    ]
    assert not r1
    # connect single relationship
    c1 = canvas.create_connection(f_p1, f_p2, ['success'],
                                  conftest.test_basename)
    r2 = [
        x for x in canvas.list_all_connections('root')
        if conftest.test_basename in x.component.name
    ]
    assert len(r2) == 1
    r3 = [
        x for x in canvas.list_all_connections(canvas.get_root_pg_id())
        if conftest.test_basename in x.component.name
    ]
    assert len(r3) == 1
    assert isinstance(r2[0], nifi.ConnectionEntity)
    c2 = canvas.create_connection(f_p1, f_p2, name=conftest.test_basename)
    r2 = [
        x for x in canvas.list_all_connections('root')
        if conftest.test_basename in x.component.name
    ]
    assert len(r2) == 2
    _ = canvas.delete_connection(c1)
    _ = canvas.delete_connection(c2)
    r3 = [
        x for x in canvas.list_all_connections('root')
        if conftest.test_basename in x.component.name
    ]
    assert not r3
Ejemplo n.º 2
0
    def create(self, destination_type = None, destination_group = None):
        if destination_group == None:
            destination_group = self.group
        
        if destination_type == None:
            destination_type = 'PROCESSOR'

        canvas.create_connection(
            parent_pg=self.group,
            connection=nifi.ConnectionDTO(
                source=nifi.ConnectableDTO(
                    id=self.source.id,
                    type='PROCESSOR',
                    group_id=self.group.id
                ),
                destination=nifi.ConnectableDTO(
                    id=self.destination.id,
                    type=destination_type,
                    group_id=destination_group.id
                ),
            selected_relationships=self.selected_relationships,
            ),
            name=self.name
        )

        return self.destination
Ejemplo n.º 3
0
def step2_edge_flow(env):
    # Create input port and funnel in NiFi
    env.from_gw = canvas.create_port(env.root_pg.id, 'INPUT_PORT',
                                     'from Gateway', 'STOPPED', (0, 200))
    funnel_position = (96, 350)
    env.temp_funnel = create_funnel(env.root_pg.id, (96, 350))
    canvas.create_connection(env.from_gw, env.temp_funnel)

    # Create flow in EFM
    env.consume_mqtt = efm_create_processor(
        env.flow_id, env.efm_pg_id, 'ConsumeMQTT',
        'org.apache.nifi.processors.mqtt.ConsumeMQTT', (100, 100), {
            'Broker URI': 'tcp://edge2ai-1.dim.local:1883',
            'Client ID': 'minifi-iot',
            'Topic Filter': 'iot/#',
            'Max Queue Size': '60',
        })
    env.nifi_rpg = efm_create_remote_processor_group(env.flow_id,
                                                     env.efm_pg_id,
                                                     'Remote PG', _NIFI_URL,
                                                     'HTTP', (100, 400))
    env.consume_conn = efm_create_connection(env.flow_id,
                                             env.efm_pg_id,
                                             env.consume_mqtt,
                                             'PROCESSOR',
                                             env.nifi_rpg,
                                             'REMOTE_INPUT_PORT', ['Message'],
                                             destination_port=env.from_gw.id)

    # Create a bucket in NiFi Registry to save the edge flow versions
    if not versioning.get_registry_bucket('IoT'):
        versioning.create_registry_bucket('IoT')

    # Publish/version the flow
    efm_publish_flow(env.flow_id, 'First version - ' + env.run_id)
Ejemplo n.º 4
0
def test_create_connection_funnels(regress_nifi, fix_proc, fix_funnel):
    f_p1 = fix_proc.generate()
    f_f1 = fix_funnel.generate()
    r1 = canvas.create_connection(source=f_p1, target=f_f1)
    assert isinstance(r1, nifi.ConnectionEntity)
    f_p2 = fix_proc.generate()
    r2 = canvas.create_connection(source=f_f1, target=f_p2)
    assert isinstance(r2, nifi.ConnectionEntity)
Ejemplo n.º 5
0
def test_create_connection_processors(regress_nifi, fix_proc):
    f_p1 = fix_proc.generate()
    f_p2 = fix_proc.generate()
    # connect single relationship
    r1 = canvas.create_connection(f_p1, f_p2, ['success'],
                                  conftest.test_basename)
    assert isinstance(r1, nifi.ConnectionEntity)
    # connect all relationships by default
    r2 = canvas.create_connection(f_p1, f_p2, name=conftest.test_basename)
    assert isinstance(r2, nifi.ConnectionEntity)
    with pytest.raises(AssertionError):
        _ = canvas.create_connection(f_p1, f_p2, ['not a connection'])
Ejemplo n.º 6
0
    def setUpClass(cls):
        super(Test1ToNTest, cls).setUpClass()
        print("Start of tests: preparing nifi objects")
        config.nifi_config.host = 'http://192.168.56.5:8080/nifi-api'

        flow_name = "Test1ToNTest"

        nav = CanvasNavigator()
        # Delete all leftovers from previous (failed?) tests
        pgs_to_be_deleted = nav.groups(flow_name)
        for pg in pgs_to_be_deleted:
            canvas.delete_process_group(pg, force=True)
        # Create new process group in root
        Test1ToNTest.pg_test = canvas.create_process_group(nav.current, flow_name, (0, 0))

        # Create simple flow to test
        Test1ToNTest.proc_start = canvas.create_processor(
            Test1ToNTest.pg_test,
            canvas.get_processor_type("GenerateFlowFile"),
            CANVAS_CENTER,
            "Start")
        Test1ToNTest.proc_2 = canvas.create_processor(
            Test1ToNTest.pg_test,
            canvas.get_processor_type("DebugFlow"),
            CANVAS_CENTER,
            "Processor 2")
        Test1ToNTest.proc_3 = canvas.create_processor(
            Test1ToNTest.pg_test,
            canvas.get_processor_type("DebugFlow"),
            CANVAS_CENTER,
            "Processor 3")
        Test1ToNTest.proc_end_1 = canvas.create_processor(
            Test1ToNTest.pg_test,
            canvas.get_processor_type("DebugFlow"),
            CANVAS_CENTER,
            "End 1")
        Test1ToNTest.proc_end_2 = canvas.create_processor(
            Test1ToNTest.pg_test,
            canvas.get_processor_type("DebugFlow"),
            CANVAS_CENTER,
            "End 2")
        canvas.update_processor(Test1ToNTest.proc_end_1,
                                nifi.ProcessorConfigDTO(auto_terminated_relationships=["success", "failure"]))
        canvas.update_processor(Test1ToNTest.proc_end_2,
                                nifi.ProcessorConfigDTO(auto_terminated_relationships=["success", "failure"]))
        Test1ToNTest.conn_1 = canvas.create_connection(Test1ToNTest.proc_start, Test1ToNTest.proc_2, ["success"])
        Test1ToNTest.conn_2 = canvas.create_connection(Test1ToNTest.proc_2, Test1ToNTest.proc_3, ["success", "failure"])
        Test1ToNTest.conn_3 = canvas.create_connection(Test1ToNTest.proc_3, Test1ToNTest.proc_end_1, ["success", "failure"])
        Test1ToNTest.conn_4 = canvas.create_connection(Test1ToNTest.proc_3, Test1ToNTest.proc_end_2, ["success", "failure"])

        canvas.schedule_process_group(Test1ToNTest.pg_test.component.id, scheduled=True)
Ejemplo n.º 7
0
def test_list_all_connections(regress_nifi, fix_pg, fix_proc):
    f_p1 = fix_proc.generate()
    f_p2 = fix_proc.generate()
    r1 = [
        x for x in canvas.list_all_connections()
        if conftest.test_basename in x.component.name
    ]
    assert not r1
    # connect single relationship
    c1 = canvas.create_connection(f_p1, f_p2, ['success'],
                                  conftest.test_basename)
    r2 = [
        x for x in canvas.list_all_connections('root')
        if conftest.test_basename in x.component.name
    ]
    assert len(r2) == 1
    r3 = [
        x for x in canvas.list_all_connections(canvas.get_root_pg_id())
        if conftest.test_basename in x.component.name
    ]
    assert len(r3) == 1
    assert isinstance(r2[0], nifi.ConnectionEntity)
    c2 = canvas.create_connection(f_p1, f_p2, name=conftest.test_basename)
    r2 = [
        x for x in canvas.list_all_connections('root')
        if conftest.test_basename in x.component.name
    ]
    assert len(r2) == 2
    _ = canvas.delete_connection(c1)
    _ = canvas.delete_connection(c2)
    r4 = [
        x for x in canvas.list_all_connections('root')
        if conftest.test_basename in x.component.name
    ]
    assert not r4
    # Test Issue #129 - nested PGs with descendents missing nested content
    f_pg1 = fix_pg.generate()
    f_pg2 = fix_pg.generate(parent_pg=f_pg1)
    f_p3 = fix_proc.generate(parent_pg=f_pg2)
    f_p4 = fix_proc.generate(parent_pg=f_pg2)
    c2 = canvas.create_connection(f_p3, f_p4, ['success'],
                                  conftest.test_basename)
    r5 = [
        x for x in canvas.list_all_connections(f_pg2.id)
        if conftest.test_basename in x.component.name
    ]
    assert len(r5) == 1
    assert r5[0].id == c2.id
Ejemplo n.º 8
0
def test_get_component_connections(regress_nifi, fix_proc):
    f_p1 = fix_proc.generate()
    f_p2 = fix_proc.generate()
    f_p3 = canvas.create_processor(
        parent_pg=canvas.get_process_group(canvas.get_root_pg_id(), 'id'),
        processor=canvas.get_processor_type('AttributesToJSON'),
        location=(400.0, 425.0),
        name=conftest.test_processor_name + '_inbound')
    canvas.create_connection(f_p1, f_p3, name=conftest.test_basename)
    canvas.create_connection(f_p2, f_p3, name=conftest.test_basename)
    r1 = canvas.get_component_connections(f_p1)
    assert len(r1) == 1
    assert r1[0].source_id == f_p1.id
    r2 = canvas.get_component_connections(f_p3)
    assert len(r2) == 2
    assert r2[0].destination_id == f_p3.id
    assert r2[1].source_id in [f_p1.id, f_p2.id]
    def lab2_edge_flow(self):
        # Create input port and funnel in NiFi
        self.context.from_gw = canvas.create_port(self.context.root_pg.id,
                                                  'INPUT_PORT', 'from Gateway',
                                                  'STOPPED', (0, 200))
        self.context.temp_funnel = nf.create_funnel(self.context.root_pg.id,
                                                    (96, 350))
        canvas.create_connection(self.context.from_gw,
                                 self.context.temp_funnel)
        canvas.schedule_components(self.context.root_pg.id, True,
                                   [self.context.from_gw])

        # Create flow in EFM
        self.context.consume_mqtt = efm.create_processor(
            self.context.flow_id, self.context.efm_pg_id, 'ConsumeMQTT',
            'org.apache.nifi.processors.mqtt.ConsumeMQTT', (100, 100), {
                'Broker URI':
                'tcp://{hostname}:1883'.format(hostname=get_hostname()),
                'Client ID':
                'minifi-iot',
                'Topic Filter':
                'iot/#',
                'Max Queue Size':
                '60',
            })
        self.context.nifi_rpg = efm.create_remote_processor_group(
            self.context.flow_id, self.context.efm_pg_id, 'Remote PG',
            nf.get_url(), 'HTTP', (100, 400))
        self.context.consume_conn = efm.create_connection(
            self.context.flow_id,
            self.context.efm_pg_id,
            self.context.consume_mqtt,
            'PROCESSOR',
            self.context.nifi_rpg,
            'REMOTE_INPUT_PORT', ['Message'],
            destination_port=self.context.from_gw.id,
            name='Sensor data',
            flow_file_expiration='60 seconds')

        # Create a bucket in NiFi Registry to save the edge flow versions
        if not versioning.get_registry_bucket('IoT'):
            versioning.create_registry_bucket('IoT')

        # Publish/version the flow
        efm.publish_flow(self.context.flow_id,
                         'First version - {}'.format(self.run_id))
Ejemplo n.º 10
0
    def __build_inputs(self):
        # Create http context map for communication
        self.logger.debug(
            "Creating 'StandardHttpContextMap' for communication")
        self.http_context = canvas_ext.create_http_context_map(
            self.test_group, self.name)

        # Keep track of "cursor" location on canvas
        location = Location()

        # Create http request for starting a test
        self.logger.debug("Creating 'HandleHttpRequest' for starting a test")
        self.http_in = canvas_ext.create_request_handler(
            self.test_group, location, self.http_context, self.port)

        location.y += 200

        # Set start time
        self.logger.debug("Creating 'UpdateAttribute' to set test start time")
        in_attribute = canvas_ext.create_input_attribute(
            self.test_group, location, "Set test start time")

        location.y += 200

        self.logger.debug(
            "Connecting 'HandleHttpRequest' with 'UpdateAttribute'")
        canvas.create_connection(self.http_in, in_attribute, ["success"])

        # Route request to correct port
        self.logger.debug(
            "Creating 'RouteOnAttribute' to send each request to the correct input port"
        )
        in_route = canvas_ext.create_input_router(self.test_group, location,
                                                  self.inputs)

        location.y += 200

        self.logger.debug(
            "Connecting 'UpdateAttribute' with 'RouteOnAttribute'")
        canvas.create_connection(in_attribute, in_route, ["success"])

        location.y += 100  # Taking some extra vertical space, because we can have so many ports

        for test_input in self.inputs:
            input_name = test_input.component.name
            self.logger.debug("Creating port for input '%s'", input_name)
            output_port = canvas_ext.create_test_input(self.test_group,
                                                       location, input_name)
            location.x += 400
            self.logger.debug("Connecting 'RouteOnAttribute' to port '%s'",
                              input_name)
            canvas.create_connection(in_route, output_port, [input_name])
            self.logger.debug("Connecting port '%s' to processor '%s'",
                              input_name, input_name)
            canvas.create_connection(output_port, test_input)
Ejemplo n.º 11
0
def test_delete_connection(regress_nifi, fix_proc):
    f_p1 = fix_proc.generate()
    f_p2 = fix_proc.generate()
    # connect single relationship
    c1 = canvas.create_connection(f_p1, f_p2, ['success'],
                                  conftest.test_basename)
    r1 = canvas.delete_connection(c1)
    assert isinstance(r1, nifi.ConnectionEntity)
    assert r1.status is None
Ejemplo n.º 12
0
def test_connect_output_ports(regress_nifi, fix_pg):
    f_pg_1 = fix_pg.generate()
    f_pg_2 = fix_pg.generate()
    f_pg_1_output = canvas.create_port(f_pg_1.id, 'OUTPUT_PORT',
                                       conftest.test_basename + 'output',
                                       'STOPPED')
    f_pg_2_input = canvas.create_port(f_pg_2.id, 'INPUT_PORT',
                                      conftest.test_basename + 'input',
                                      'STOPPED')
    r1 = canvas.create_connection(source=f_pg_1_output,
                                  target=f_pg_2_input,
                                  name=conftest.test_basename)
    assert isinstance(r1, nifi.ConnectionEntity)
Ejemplo n.º 13
0
def lab4_nifi_flow(env):
    LOG.info("Running step4_nifi_flow")
    # Create a bucket in NiFi Registry to save the edge flow versions
    env.sensor_bucket = versioning.get_registry_bucket('SensorFlows')
    if not env.sensor_bucket:
        env.sensor_bucket = versioning.create_registry_bucket('SensorFlows')

    # Create NiFi Process Group
    env.reg_client = versioning.create_registry_client('NiFi Registry', _NIFIREG_URL, 'The registry...')
    env.sensor_pg = canvas.create_process_group(env.root_pg, PG_NAME, (330, 350))
    #env.sensor_flow = versioning.save_flow_ver(env.sensor_pg, env.reg_client, env.sensor_bucket, flow_name='SensorProcessGroup', comment='Enabled version control - ' + env.run_id)
    env.sensor_flow = save_flow_ver(env.sensor_pg, env.reg_client, env.sensor_bucket, flow_name='SensorProcessGroup', comment='Enabled version control - ' + str(env.run_id))

    # Create controller services
    env.sr_svc = create_controller(env.sensor_pg, 'org.apache.nifi.schemaregistry.hortonworks.HortonworksSchemaRegistry', {'url': _SCHREG_API_URL}, True)
    env.json_reader_svc = create_controller(env.sensor_pg, 'org.apache.nifi.json.JsonTreeReader', {'schema-access-strategy': 'schema-name', 'schema-registry': env.sr_svc.id}, True)
    env.json_writer_svc = create_controller(env.sensor_pg, 'org.apache.nifi.json.JsonRecordSetWriter', {'schema-access-strategy': 'schema-name', 'schema-registry': env.sr_svc.id, 'Schema Write Strategy': 'hwx-schema-ref-attributes'}, True)

    # Create flow
    sensor_port = canvas.create_port(env.sensor_pg.id, 'INPUT_PORT', 'Sensor Data', 'RUNNING', (0, 0))

    upd_attr = create_processor(env.sensor_pg, 'Set Schema Name', 'org.apache.nifi.processors.attributes.UpdateAttribute', (0, 100),
        {
            'properties': {
                'schema.name': 'SensorReading',
            },
        }
    )
    canvas.create_connection(sensor_port, upd_attr)

    pub_kafka = create_processor(env.sensor_pg, 'Publish to Kafka topic: iot', 'org.apache.nifi.processors.kafka.pubsub.PublishKafkaRecord_2_0', (0, 300),
        {
            'properties': {
                'bootstrap.servers': 'edge2ai-1.dim.local:9092',
                'topic': 'iot',
                'record-reader': env.json_reader_svc.id,
                'record-writer': env.json_writer_svc.id,
                'use-transactions': 'false',
                'attribute-name-regex': 'schema.*',
                'client.id': PRODUCER_CLIENT_ID,
            },
            'autoTerminatedRelationships': ['success'],
        }
    )
    canvas.create_connection(upd_attr, pub_kafka, ['success'])

    fail_funnel = create_funnel(env.sensor_pg.id, (600, 343))
    canvas.create_connection(pub_kafka, fail_funnel, ['failure'])

    # Commit changes
    #versioning.save_flow_ver(env.sensor_pg, env.reg_client, env.sensor_bucket, flow_id=env.sensor_flow.version_control_information.flow_id, comment='First version - ' + env.run_id)
    save_flow_ver(env.sensor_pg, env.reg_client, env.sensor_bucket, flow_id=env.sensor_flow.version_control_information.flow_id, comment='First version - ' + str(env.run_id))

    # Start flow
    canvas.schedule_process_group(env.root_pg.id, True)

    # Update "from Gateway" input port to connect to the process group
    update_connection(env.from_gw, env.temp_funnel, sensor_port)
Ejemplo n.º 14
0
 def test_recreate_connection(self):
     nav = CanvasNavigator()
     loc = Location()
     name = "nipytest - unit test - test_recreate_connection"
     # Testing incorrect inputs
     with self.assertRaises(AssertionError):
         canvas_ext.recreate_connection(1)
     # Create connection
     input_port = canvas_ext.create_test_output(nav.current, loc, name)
     output_port = canvas_ext.create_test_input(nav.current, loc, name)
     connection = canvas.create_connection(input_port, output_port)
     canvas.delete_connection(connection)
     # Run function
     new_connection = canvas_ext.recreate_connection(connection)
     # If output is ok type, function was successful
     self.assertIsInstance(new_connection, nifi.ConnectionEntity)
     self.assertNotEqual(connection.component.id,
                         new_connection.component.id)
     # Remove temporary created object(s)
     canvas.delete_connection(new_connection)
     canvas.delete_port(input_port)
     canvas.delete_port(output_port)
Ejemplo n.º 15
0
    def __build_outputs(self):
        # First creating response, so we can connect all outputs immediately in the loop
        location = Location(0, 1200)

        self.logger.debug(
            "Creating RouteOnAttribute for filtering only test results")
        out_route = canvas_ext.create_output_router(self.test_group, location,
                                                    self.name)

        location.y += 200

        self.logger.debug("Creating HandleHttpResponse for test results")
        self.http_out = canvas_ext.create_response_handler(
            self.test_group, location, self.http_context)
        canvas.create_connection(out_route, self.http_out, ["test"])

        location.x = 0
        for output in self.outputs:
            location.y = 800
            output_name = output.component.name
            self.logger.debug("Creating input_port for output '%s'",
                              output_name)
            input_port = canvas_ext.create_test_output(self.test_group,
                                                       location, output_name)
            location.y += 200
            self.logger.debug("Connecting processor '%s' to input_port '%s'",
                              output_name, output_name)
            canvas.create_connection(output, input_port)

            self.logger.debug("Creating UpdateAttribute for output '%s'",
                              output_name)
            update = canvas_ext.create_output_attribute(
                self.test_group, location, output_name)
            canvas.create_connection(input_port, update)
            canvas.create_connection(update, out_route)
            location.x += 400
Ejemplo n.º 16
0
    def lab4_rest_and_kudu(self):
        # Prepare Impala/Kudu table
        kudu.create_table()
        kudu_table_name = kudu.get_kudu_table_name('default', 'sensors')

        # Set required variables
        if not self.context.skip_cdsw:
            # Set the variable with the CDSW access key
            canvas.update_variable_registry(
                self.context.sensor_pg,
                [('cdsw.access.key', cdsw.get_model_access_key())])
            # Set the variable with the CDSW model API key
            canvas.update_variable_registry(
                self.context.sensor_pg,
                [('cdsw.model.api.key', cdsw.create_model_api_key())])

        # Create controllers
        self.context.json_reader_with_schema_svc = nf.create_controller(
            self.context.sensor_pg,
            'org.apache.nifi.json.JsonTreeReader', {
                'schema-access-strategy': 'hwx-schema-ref-attributes',
                'schema-registry': self.context.sr_svc.id
            },
            True,
            name='JsonTreeReader - With schema identifier')
        props = {
            'rest-lookup-url': cdsw.get_model_endpoint_url(),
            'rest-lookup-record-reader': self.context.json_reader_svc.id,
            'rest-lookup-record-path': '/response',
            'Authorization': 'Bearer ${cdsw.model.api.key}',
        }
        if is_tls_enabled():
            props.update({
                'rest-lookup-ssl-context-service':
                self.context.ssl_svc.id,
            })
        rest_lookup_svc = nf.create_controller(
            self.context.sensor_pg, 'org.apache.nifi.lookup.RestLookupService',
            props, True)

        # Build flow
        fail_funnel = nf.create_funnel(self.context.sensor_pg.id, (1400, 340))

        props = {
            'topic': 'iot',
            'topic_type': 'names',
            'record-reader': self.context.json_reader_with_schema_svc.id,
            'record-writer': self.context.json_writer_svc.id,
        }
        props.update(
            kafka.get_common_client_properties(self.context, 'consumer',
                                               CONSUMER_GROUP_ID,
                                               PRODUCER_CLIENT_ID))
        consume_kafka = nf.create_processor(
            self.context.sensor_pg, 'Consume Kafka iot messages', [
                'org.apache.nifi.processors.kafka.pubsub.ConsumeKafkaRecord_2_6',
                'org.apache.nifi.processors.kafka.pubsub.ConsumeKafkaRecord_2_0'
            ], (700, 0), {'properties': props})
        canvas.create_connection(consume_kafka, fail_funnel, ['parse.failure'])

        predict = nf.create_processor(
            self.context.sensor_pg, 'Predict machine health',
            'org.apache.nifi.processors.standard.LookupRecord', (700, 200), {
                'properties': {
                    'record-reader':
                    self.context.json_reader_with_schema_svc.id,
                    'record-writer':
                    self.context.json_writer_svc.id,
                    'lookup-service':
                    rest_lookup_svc.id,
                    'result-record-path':
                    '/response',
                    'routing-strategy':
                    'route-to-success',
                    'result-contents':
                    'insert-entire-record',
                    'mime.type':
                    "toString('application/json', 'UTF-8')",
                    'request.body':
                    "concat('{\"accessKey\":\"', '${cdsw.access.key}', "
                    "'\",\"request\":{\"feature\":\"', /sensor_0, ', ', "
                    "/sensor_1, ', ', /sensor_2, ', ', /sensor_3, ', ', "
                    "/sensor_4, ', ', /sensor_5, ', ', /sensor_6, ', ', "
                    "/sensor_7, ', ', /sensor_8, ', ', /sensor_9, ', ', "
                    "/sensor_10, ', ', /sensor_11, '\"}}')",
                    'request.method':
                    "toString('post', 'UTF-8')",
                },
            })
        canvas.create_connection(predict, fail_funnel, ['failure'])
        canvas.create_connection(consume_kafka, predict, ['success'])

        update_health = nf.create_processor(
            self.context.sensor_pg, 'Update health flag',
            'org.apache.nifi.processors.standard.UpdateRecord', (700, 400), {
                'properties': {
                    'record-reader':
                    self.context.json_reader_with_schema_svc.id,
                    'record-writer': self.context.json_writer_svc.id,
                    'replacement-value-strategy': 'record-path-value',
                    '/is_healthy': '/response/result',
                },
            })
        canvas.create_connection(update_health, fail_funnel, ['failure'])
        canvas.create_connection(predict, update_health, ['success'])

        write_kudu = nf.create_processor(
            self.context.sensor_pg, 'Write to Kudu',
            'org.apache.nifi.processors.kudu.PutKudu', (700, 600), {
                'properties': {
                    'Kudu Masters':
                    get_hostname() + ':7051',
                    'Table Name':
                    kudu_table_name,
                    'record-reader':
                    self.context.json_reader_with_schema_svc.id,
                    'kerberos-credentials-service':
                    self.context.keytab_svc.id if is_tls_enabled() else None,
                },
            })
        canvas.create_connection(write_kudu, fail_funnel, ['failure'])
        canvas.create_connection(update_health, write_kudu, ['success'])

        props = {
            'topic': 'iot_enriched',
            'record-reader': self.context.json_reader_with_schema_svc.id,
            'record-writer': self.context.json_writer_svc.id,
        }
        props.update(
            kafka.get_common_client_properties(self.context, 'producer',
                                               CONSUMER_GROUP_ID,
                                               PRODUCER_CLIENT_ID))
        pub_kafka_enriched = nf.create_processor(
            self.context.sensor_pg, 'Publish to Kafka topic: iot_enriched', [
                'org.apache.nifi.processors.kafka.pubsub.PublishKafkaRecord_2_6',
                'org.apache.nifi.processors.kafka.pubsub.PublishKafkaRecord_2_0'
            ], (300, 600), {
                'properties': props,
                'autoTerminatedRelationships': ['success', 'failure'],
            })
        canvas.create_connection(update_health, pub_kafka_enriched,
                                 ['success'])

        props = {
            'topic': 'iot_enriched_avro',
            'record-reader': self.context.json_reader_with_schema_svc.id,
            'record-writer': self.context.avro_writer_svc.id,
        }
        props.update(
            kafka.get_common_client_properties(self.context, 'producer',
                                               CONSUMER_GROUP_ID,
                                               PRODUCER_CLIENT_ID))
        pub_kafka_enriched_avro = nf.create_processor(
            self.context.sensor_pg,
            'Publish to Kafka topic: iot_enriched_avro', [
                'org.apache.nifi.processors.kafka.pubsub.PublishKafkaRecord_2_6',
                'org.apache.nifi.processors.kafka.pubsub.PublishKafkaRecord_2_0'
            ], (-100, 600), {
                'properties': props,
                'autoTerminatedRelationships': ['success', 'failure'],
            })
        canvas.create_connection(update_health, pub_kafka_enriched_avro,
                                 ['success'])

        monitor_activity = nf.create_processor(
            self.context.sensor_pg, 'Monitor Activity',
            'org.apache.nifi.processors.standard.MonitorActivity', (700, 800),
            {
                'properties': {
                    'Threshold Duration': '45 secs',
                    'Continually Send Messages': 'true',
                },
                'autoTerminatedRelationships':
                ['activity.restored', 'success'],
            })
        canvas.create_connection(monitor_activity, fail_funnel, ['inactive'])
        canvas.create_connection(write_kudu, monitor_activity, ['success'])

        # Version flow
        nifireg.save_flow_ver(self.context.sensor_pg,
                              self.context.reg_client,
                              self.context.sensor_bucket,
                              flow_id=self.context.sensor_flow.
                              version_control_information.flow_id,
                              comment='Second version - {}'.format(
                                  self.run_id))

        # Start everything
        canvas.schedule_process_group(self.context.root_pg.id, True)
Ejemplo n.º 17
0
    def lab2_nifi_flow(self):
        # Create a bucket in NiFi Registry to save the edge flow versions
        self.context.sensor_bucket = versioning.get_registry_bucket(
            'SensorFlows')
        if not self.context.sensor_bucket:
            self.context.sensor_bucket = versioning.create_registry_bucket(
                'SensorFlows')

        # Create NiFi Process Group
        self.context.reg_client = versioning.create_registry_client(
            'NiFi Registry', nifireg.get_url(), 'The registry...')
        self.context.sensor_pg = canvas.create_process_group(
            self.context.root_pg, PG_NAME, (330, 350))
        self.context.sensor_flow = nifireg.save_flow_ver(
            self.context.sensor_pg,
            self.context.reg_client,
            self.context.sensor_bucket,
            flow_name='SensorProcessGroup',
            comment='Enabled version control - {}'.format(self.run_id))

        # Update default SSL context controller service
        ssl_svc_name = 'Default NiFi SSL Context Service'
        if is_tls_enabled():
            props = {
                'SSL Protocol': 'TLS',
                'Truststore Type': 'JKS',
                'Truststore Filename':
                '/opt/cloudera/security/jks/truststore.jks',
                'Truststore Password': get_the_pwd(),
                'Keystore Type': 'JKS',
                'Keystore Filename': '/opt/cloudera/security/jks/keystore.jks',
                'Keystore Password': get_the_pwd(),
                'key-password': get_the_pwd(),
            }
            self.context.ssl_svc = canvas.get_controller(ssl_svc_name, 'name')
            if self.context.ssl_svc:
                canvas.schedule_controller(self.context.ssl_svc, False)
                self.context.ssl_svc = canvas.get_controller(
                    ssl_svc_name, 'name')
                canvas.update_controller(
                    self.context.ssl_svc,
                    nifi.ControllerServiceDTO(properties=props))
                self.context.ssl_svc = canvas.get_controller(
                    ssl_svc_name, 'name')
                canvas.schedule_controller(self.context.ssl_svc, True)
            else:
                self.context.keytab_svc = nf.create_controller(
                    self.context.root_pg,
                    'org.apache.nifi.ssl.StandardRestrictedSSLContextService',
                    props,
                    True,
                    name=ssl_svc_name)

        # Create controller services
        if is_tls_enabled():
            self.context.ssl_svc = canvas.get_controller(ssl_svc_name, 'name')
            props = {
                'Kerberos Keytab': '/keytabs/admin.keytab',
                'Kerberos Principal': 'admin',
            }
            self.context.keytab_svc = nf.create_controller(
                self.context.sensor_pg,
                'org.apache.nifi.kerberos.KeytabCredentialsService', props,
                True)
        else:
            self.context.ssl_svc = None
            self.context.keytab_svc = None

        props = {
            'url': schreg.get_api_url(),
        }
        if is_tls_enabled():
            props.update({
                'kerberos-credentials-service':
                self.context.keytab_svc.id,
                'ssl-context-service':
                self.context.ssl_svc.id,
            })
        self.context.sr_svc = nf.create_controller(
            self.context.sensor_pg,
            'org.apache.nifi.schemaregistry.hortonworks.HortonworksSchemaRegistry',
            props, True)
        self.context.json_reader_svc = nf.create_controller(
            self.context.sensor_pg, 'org.apache.nifi.json.JsonTreeReader', {
                'schema-access-strategy': 'schema-name',
                'schema-registry': self.context.sr_svc.id
            }, True)
        self.context.json_writer_svc = nf.create_controller(
            self.context.sensor_pg, 'org.apache.nifi.json.JsonRecordSetWriter',
            {
                'schema-access-strategy': 'schema-name',
                'schema-registry': self.context.sr_svc.id,
                'Schema Write Strategy': 'hwx-schema-ref-attributes'
            }, True)
        self.context.avro_writer_svc = nf.create_controller(
            self.context.sensor_pg, 'org.apache.nifi.avro.AvroRecordSetWriter',
            {
                'schema-access-strategy': 'schema-name',
                'schema-registry': self.context.sr_svc.id,
                'Schema Write Strategy': 'hwx-content-encoded-schema'
            }, True)

        # Create flow
        sensor_port = canvas.create_port(self.context.sensor_pg.id,
                                         'INPUT_PORT', 'Sensor Data',
                                         'STOPPED', (0, 0))

        upd_attr = nf.create_processor(
            self.context.sensor_pg, 'Set Schema Name',
            'org.apache.nifi.processors.attributes.UpdateAttribute', (0, 100),
            {
                'properties': {
                    'schema.name': 'SensorReading',
                },
            })
        canvas.create_connection(sensor_port, upd_attr)

        props = {
            'topic': 'iot',
            'record-reader': self.context.json_reader_svc.id,
            'record-writer': self.context.json_writer_svc.id,
        }
        props.update(
            kafka.get_common_client_properties(self.context, 'producer',
                                               CONSUMER_GROUP_ID,
                                               PRODUCER_CLIENT_ID))
        pub_kafka = nf.create_processor(
            self.context.sensor_pg, 'Publish to Kafka topic: iot', [
                'org.apache.nifi.processors.kafka.pubsub.PublishKafkaRecord_2_6',
                'org.apache.nifi.processors.kafka.pubsub.PublishKafkaRecord_2_0'
            ], (0, 300), {
                'properties': props,
                'autoTerminatedRelationships': ['success'],
            })
        canvas.create_connection(upd_attr, pub_kafka, ['success'])

        fail_funnel = nf.create_funnel(self.context.sensor_pg.id, (600, 343))
        canvas.create_connection(pub_kafka, fail_funnel, ['failure'])

        # Commit changes
        nifireg.save_flow_ver(self.context.sensor_pg,
                              self.context.reg_client,
                              self.context.sensor_bucket,
                              flow_id=self.context.sensor_flow.
                              version_control_information.flow_id,
                              comment='First version - {}'.format(self.run_id))

        # Start flow
        canvas.schedule_process_group(self.context.root_pg.id, True)

        # Update "from Gateway" input port to connect to the process group
        nf.update_connection(self.context.from_gw, self.context.temp_funnel,
                             sensor_port)
        canvas.schedule_components(self.context.root_pg.id, True,
                                   [sensor_port])
Ejemplo n.º 18
0
def lab4_nifi_flow(env):
    LOG.info("Running step4_nifi_flow")
    # Create a bucket in NiFi Registry to save the edge flow versions
    env.sensor_bucket = versioning.get_registry_bucket('SensorFlows')
    if not env.sensor_bucket:
        env.sensor_bucket = versioning.create_registry_bucket('SensorFlows')

    # Create NiFi Process Group
    env.reg_client = versioning.create_registry_client('NiFi Registry',
                                                       _get_nifireg_url(),
                                                       'The registry...')
    env.sensor_pg = canvas.create_process_group(env.root_pg, PG_NAME,
                                                (330, 350))
    #env.sensor_flow = versioning.save_flow_ver(env.sensor_pg, env.reg_client, env.sensor_bucket, flow_name='SensorProcessGroup', comment='Enabled version control - ' + env.run_id)
    env.sensor_flow = save_flow_ver(env.sensor_pg,
                                    env.reg_client,
                                    env.sensor_bucket,
                                    flow_name='SensorProcessGroup',
                                    comment='Enabled version control - ' +
                                    str(env.run_id))

    # Update default SSL context controller service
    ssl_svc_name = 'Default NiFi SSL Context Service'
    if _IS_TLS_ENABLED:
        props = {
            'SSL Protocol': 'TLS',
            'Truststore Type': 'JKS',
            'Truststore Filename': '/opt/cloudera/security/jks/truststore.jks',
            'Truststore Password': _THE_PWD,
            'Keystore Type': 'JKS',
            'Keystore Filename': '/opt/cloudera/security/jks/keystore.jks',
            'Keystore Password': _THE_PWD,
            'key-password': _THE_PWD,
        }
        env.ssl_svc = canvas.get_controller(ssl_svc_name, 'name')
        if env.ssl_svc:
            canvas.schedule_controller(env.ssl_svc, False)
            env.ssl_svc = canvas.get_controller(ssl_svc_name, 'name')
            canvas.update_controller(
                env.ssl_svc, nifi.ControllerServiceDTO(properties=props))
            env.ssl_svc = canvas.get_controller(ssl_svc_name, 'name')
            canvas.schedule_controller(env.ssl_svc, True)
        else:
            env.keytab_svc = create_controller(
                env.root_pg,
                'org.apache.nifi.ssl.StandardRestrictedSSLContextService',
                props,
                True,
                name=ssl_svc_name)

    # Create controller services
    if _IS_TLS_ENABLED:
        env.ssl_svc = canvas.get_controller(ssl_svc_name, 'name')
        props = {
            'Kerberos Keytab': '/keytabs/admin.keytab',
            'Kerberos Principal': 'admin',
        }
        env.keytab_svc = create_controller(
            env.sensor_pg, 'org.apache.nifi.kerberos.KeytabCredentialsService',
            props, True)
    else:
        env.ssl_svc = None
        env.keytab_svc = None

    props = {
        'url': _get_schreg_api_url(),
    }
    if _IS_TLS_ENABLED:
        props.update({
            'kerberos-credentials-service': env.keytab_svc.id,
            'ssl-context-service': env.ssl_svc.id,
        })
    env.sr_svc = create_controller(
        env.sensor_pg,
        'org.apache.nifi.schemaregistry.hortonworks.HortonworksSchemaRegistry',
        props, True)
    env.json_reader_svc = create_controller(
        env.sensor_pg, 'org.apache.nifi.json.JsonTreeReader', {
            'schema-access-strategy': 'schema-name',
            'schema-registry': env.sr_svc.id
        }, True)
    env.json_writer_svc = create_controller(
        env.sensor_pg, 'org.apache.nifi.json.JsonRecordSetWriter', {
            'schema-access-strategy': 'schema-name',
            'schema-registry': env.sr_svc.id,
            'Schema Write Strategy': 'hwx-schema-ref-attributes'
        }, True)
    env.avro_writer_svc = create_controller(
        env.sensor_pg, 'org.apache.nifi.avro.AvroRecordSetWriter', {
            'schema-access-strategy': 'schema-name',
            'schema-registry': env.sr_svc.id,
            'Schema Write Strategy': 'hwx-content-encoded-schema'
        }, True)

    # Create flow
    sensor_port = canvas.create_port(env.sensor_pg.id, 'INPUT_PORT',
                                     'Sensor Data', 'RUNNING', (0, 0))

    upd_attr = create_processor(
        env.sensor_pg, 'Set Schema Name',
        'org.apache.nifi.processors.attributes.UpdateAttribute', (0, 100), {
            'properties': {
                'schema.name': 'SensorReading',
            },
        })
    canvas.create_connection(sensor_port, upd_attr)

    props = {
        'topic': 'iot',
        'record-reader': env.json_reader_svc.id,
        'record-writer': env.json_writer_svc.id,
    }
    props.update(_get_common_kafka_client_properties(env, 'producer'))
    pub_kafka = create_processor(
        env.sensor_pg, 'Publish to Kafka topic: iot',
        'org.apache.nifi.processors.kafka.pubsub.PublishKafkaRecord_2_0',
        (0, 300), {
            'properties': props,
            'autoTerminatedRelationships': ['success'],
        })
    canvas.create_connection(upd_attr, pub_kafka, ['success'])

    fail_funnel = create_funnel(env.sensor_pg.id, (600, 343))
    canvas.create_connection(pub_kafka, fail_funnel, ['failure'])

    # Commit changes
    #versioning.save_flow_ver(env.sensor_pg, env.reg_client, env.sensor_bucket, flow_id=env.sensor_flow.version_control_information.flow_id, comment='First version - ' + env.run_id)
    save_flow_ver(env.sensor_pg,
                  env.reg_client,
                  env.sensor_bucket,
                  flow_id=env.sensor_flow.version_control_information.flow_id,
                  comment='First version - ' + str(env.run_id))

    # Start flow
    canvas.schedule_process_group(env.root_pg.id, True)

    # Update "from Gateway" input port to connect to the process group
    update_connection(env.from_gw, env.temp_funnel, sensor_port)
Ejemplo n.º 19
0
def lab7_rest_and_kudu(env):
    LOG.info("Running step7_rest_and_kudu")
    # Create controllers
    env.json_reader_with_schema_svc = create_controller(
        env.sensor_pg,
        'org.apache.nifi.json.JsonTreeReader', {
            'schema-access-strategy': 'hwx-schema-ref-attributes',
            'schema-registry': env.sr_svc.id
        },
        True,
        name='JsonTreeReader - With schema identifier')
    props = {
        'rest-lookup-url': get_cdsw_altus_api() + '/models/call-model',
        'rest-lookup-record-reader': env.json_reader_svc.id,
        'rest-lookup-record-path': '/response'
    }
    if _IS_TLS_ENABLED:
        props.update({
            'rest-lookup-ssl-context-service': env.ssl_svc.id,
        })
    rest_lookup_svc = create_controller(
        env.sensor_pg, 'org.apache.nifi.lookup.RestLookupService', props, True)

    # Build flow
    fail_funnel = create_funnel(env.sensor_pg.id, (1400, 340))

    props = {
        'topic': 'iot',
        'topic_type': 'names',
        'record-reader': env.json_reader_with_schema_svc.id,
        'record-writer': env.json_writer_svc.id,
    }
    props.update(_get_common_kafka_client_properties(env, 'consumer'))
    consume_kafka = create_processor(
        env.sensor_pg, 'Consume Kafka iot messages',
        'org.apache.nifi.processors.kafka.pubsub.ConsumeKafkaRecord_2_0',
        (700, 0), {'properties': props})
    canvas.create_connection(consume_kafka, fail_funnel, ['parse.failure'])

    predict = create_processor(
        env.sensor_pg, 'Predict machine health',
        'org.apache.nifi.processors.standard.LookupRecord', (700, 200), {
            'properties': {
                'record-reader': env.json_reader_with_schema_svc.id,
                'record-writer': env.json_writer_svc.id,
                'lookup-service': rest_lookup_svc.id,
                'result-record-path': '/response',
                'routing-strategy': 'route-to-success',
                'result-contents': 'insert-entire-record',
                'mime.type': "toString('application/json', 'UTF-8')",
                'request.body':
                "concat('{\"accessKey\":\"', '${cdsw.access.key}', '\",\"request\":{\"feature\":\"', /sensor_0, ', ', /sensor_1, ', ', /sensor_2, ', ', /sensor_3, ', ', /sensor_4, ', ', /sensor_5, ', ', /sensor_6, ', ', /sensor_7, ', ', /sensor_8, ', ', /sensor_9, ', ', /sensor_10, ', ', /sensor_11, '\"}}')",
                'request.method': "toString('post', 'UTF-8')",
            },
        })
    canvas.create_connection(predict, fail_funnel, ['failure'])
    canvas.create_connection(consume_kafka, predict, ['success'])

    update_health = create_processor(
        env.sensor_pg, 'Update health flag',
        'org.apache.nifi.processors.standard.UpdateRecord', (700, 400), {
            'properties': {
                'record-reader': env.json_reader_with_schema_svc.id,
                'record-writer': env.json_writer_svc.id,
                'replacement-value-strategy': 'record-path-value',
                '/is_healthy': '/response/result',
            },
        })
    canvas.create_connection(update_health, fail_funnel, ['failure'])
    canvas.create_connection(predict, update_health, ['success'])

    if get_kudu_version() >= (1, 14):
        kudu_table_name = 'default.sensors'
    else:
        kudu_table_name = 'impala::default.sensors'
    write_kudu = create_processor(
        env.sensor_pg, 'Write to Kudu',
        'org.apache.nifi.processors.kudu.PutKudu', (700, 600), {
            'properties': {
                'Kudu Masters':
                _HOSTNAME + ':7051',
                'Table Name':
                kudu_table_name,
                'record-reader':
                env.json_reader_with_schema_svc.id,
                'kerberos-credentials-service':
                env.keytab_svc.id if _IS_TLS_ENABLED else None,
            },
        })
    canvas.create_connection(write_kudu, fail_funnel, ['failure'])
    canvas.create_connection(update_health, write_kudu, ['success'])

    props = {
        'topic': 'iot_enriched',
        'record-reader': env.json_reader_with_schema_svc.id,
        'record-writer': env.json_writer_svc.id,
    }
    props.update(_get_common_kafka_client_properties(env, 'producer'))
    pub_kafka_enriched = create_processor(
        env.sensor_pg, 'Publish to Kafka topic: iot_enriched',
        'org.apache.nifi.processors.kafka.pubsub.PublishKafkaRecord_2_0',
        (300, 600), {
            'properties': props,
            'autoTerminatedRelationships': ['success', 'failure'],
        })
    canvas.create_connection(update_health, pub_kafka_enriched, ['success'])

    props = {
        'topic': 'iot_enriched_avro',
        'record-reader': env.json_reader_with_schema_svc.id,
        'record-writer': env.avro_writer_svc.id,
    }
    props.update(_get_common_kafka_client_properties(env, 'producer'))
    pub_kafka_enriched_avro = create_processor(
        env.sensor_pg, 'Publish to Kafka topic: iot_enriched_avro',
        'org.apache.nifi.processors.kafka.pubsub.PublishKafkaRecord_2_0',
        (-100, 600), {
            'properties': props,
            'autoTerminatedRelationships': ['success', 'failure'],
        })
    canvas.create_connection(update_health, pub_kafka_enriched_avro,
                             ['success'])

    monitor_activity = create_processor(
        env.sensor_pg, 'Monitor Activity',
        'org.apache.nifi.processors.standard.MonitorActivity', (700, 800), {
            'properties': {
                'Threshold Duration': '45 secs',
                'Continually Send Messages': 'true',
            },
            'autoTerminatedRelationships': ['activity.restored', 'success'],
        })
    canvas.create_connection(monitor_activity, fail_funnel, ['inactive'])
    canvas.create_connection(write_kudu, monitor_activity, ['success'])

    # Version flow
    save_flow_ver(env.sensor_pg,
                  env.reg_client,
                  env.sensor_bucket,
                  flow_id=env.sensor_flow.version_control_information.flow_id,
                  comment='Second version - ' + str(env.run_id))

    # Prepare Impala/Kudu table
    create_kudu_table()

    # Set the variable with the CDSW access key
    if env.cdsw_flag:
        canvas.update_variable_registry(
            env.sensor_pg, [('cdsw.access.key', get_cdsw_model_access_key())])

    # Start everything
    canvas.schedule_process_group(env.root_pg.id, True)
Ejemplo n.º 20
0
def step7_rest_and_kudu(env):
    # Create controllers
    json_reader_with_schema_svc = create_controller(
        env.sensor_pg,
        'org.apache.nifi.json.JsonTreeReader', {
            'schema-access-strategy': 'hwx-schema-ref-attributes',
            'schema-registry': env.sr_svc.id
        },
        True,
        name='JsonTreeReader - With schema identifier')
    rest_lookup_svc = create_controller(
        env.sensor_pg, 'org.apache.nifi.lookup.RestLookupService', {
            'rest-lookup-url': get_cdsw_altus_api() + '/models/call-model',
            'rest-lookup-record-reader': env.json_reader_svc.id,
            'rest-lookup-record-path': '/response'
        }, True)

    # Build flow
    fail_funnel = create_funnel(env.sensor_pg.id, (1400, 340))

    consume_kafka = create_processor(
        env.sensor_pg, 'Consume Kafka iot messages',
        'org.apache.nifi.processors.kafka.pubsub.ConsumeKafkaRecord_2_0',
        (700, 0), {
            'properties': {
                'bootstrap.servers': 'edge2ai-1.dim.local:9092',
                'topic': 'iot',
                'topic_type': 'names',
                'record-reader': json_reader_with_schema_svc.id,
                'record-writer': env.json_writer_svc.id,
                'honor-transactions': 'false',
                'group.id': CONSUMER_GROUP_ID,
                'auto.offset.reset': 'latest',
                'header-name-regex': 'schema.*',
            },
        })
    canvas.create_connection(consume_kafka, fail_funnel, ['parse.failure'])

    predict = create_processor(
        env.sensor_pg, 'Predict machine health',
        'org.apache.nifi.processors.standard.LookupRecord', (700, 200), {
            'properties': {
                'record-reader': json_reader_with_schema_svc.id,
                'record-writer': env.json_writer_svc.id,
                'lookup-service': rest_lookup_svc.id,
                'result-record-path': '/response',
                'routing-strategy': 'route-to-success',
                'result-contents': 'insert-entire-record',
                'mime.type': "toString('application/json', 'UTF-8')",
                'request.body':
                "concat('{\"accessKey\":\"', '${cdsw.access.key}', '\",\"request\":{\"feature\":\"', /sensor_0, ', ', /sensor_1, ', ', /sensor_2, ', ', /sensor_3, ', ', /sensor_4, ', ', /sensor_5, ', ', /sensor_6, ', ', /sensor_7, ', ', /sensor_8, ', ', /sensor_9, ', ', /sensor_10, ', ', /sensor_11, '\"}}')",
                'request.method': "toString('post', 'UTF-8')",
            },
        })
    canvas.create_connection(predict, fail_funnel, ['failure'])
    canvas.create_connection(consume_kafka, predict, ['success'])

    update_health = create_processor(
        env.sensor_pg, 'Update health flag',
        'org.apache.nifi.processors.standard.UpdateRecord', (700, 400), {
            'properties': {
                'record-reader': json_reader_with_schema_svc.id,
                'record-writer': env.json_writer_svc.id,
                'replacement-value-strategy': 'record-path-value',
                '/is_healthy': '/response/result',
            },
        })
    canvas.create_connection(update_health, fail_funnel, ['failure'])
    canvas.create_connection(predict, update_health, ['success'])

    write_kudu = create_processor(
        env.sensor_pg, 'Write to Kudu',
        'org.apache.nifi.processors.kudu.PutKudu', (700, 600), {
            'properties': {
                'Kudu Masters': 'edge2ai-1.dim.local:7051',
                'Table Name': 'impala::default.sensors',
                'record-reader': json_reader_with_schema_svc.id,
            },
        })
    canvas.create_connection(write_kudu, fail_funnel, ['failure'])
    canvas.create_connection(update_health, write_kudu, ['success'])

    monitor_activity = create_processor(
        env.sensor_pg, 'Monitor Activity',
        'org.apache.nifi.processors.standard.MonitorActivity', (700, 800), {
            'properties': {
                'Threshold Duration': '45 secs',
                'Continually Send Messages': 'true',
            },
            'autoTerminatedRelationships': ['activity.restored', 'success'],
        })
    canvas.create_connection(monitor_activity, fail_funnel, ['inactive'])
    canvas.create_connection(write_kudu, monitor_activity, ['success'])

    # Version flow
    #versioning.save_flow_ver(env.sensor_pg, env.reg_client, env.sensor_bucket, flow_id=env.sensor_flow.version_control_information.flow_id, comment='Second version - ' + env.run_id)
    save_flow_ver(env.sensor_pg,
                  env.reg_client,
                  env.sensor_bucket,
                  flow_id=env.sensor_flow.version_control_information.flow_id,
                  comment='Second version - ' + env.run_id)

    # Prepare Impala/Kudu table
    create_kudu_table()

    # Set the variable with the CDSW access key
    canvas.update_variable_registry(
        env.sensor_pg, [('cdsw.access.key', get_cdsw_model_access_key())])

    # Start everything
    canvas.schedule_process_group(env.root_pg.id, True)