def setUpClass(cls): super(CanvasNavigatorTest, cls).setUpClass() print("Start of tests: preparing nifi objects") config.nifi_config.host = 'http://192.168.56.5:8080/nifi-api' root = canvas.get_process_group(canvas.get_root_pg_id(), 'id') # Create new process group in root CanvasNavigatorTest.pg_parent = canvas.create_process_group( root, "parent", CANVAS_CENTER) # Create new child process groups in unittest CanvasNavigatorTest.pg_child1 = canvas.create_process_group( CanvasNavigatorTest.pg_parent, "child1", CANVAS_CENTER) CanvasNavigatorTest.pg_child2 = canvas.create_process_group( CanvasNavigatorTest.pg_parent, "child2", CANVAS_CENTER) CanvasNavigatorTest.pg_child2_2 = canvas.create_process_group( CanvasNavigatorTest.pg_parent, "child2", CANVAS_CENTER) CanvasNavigatorTest.pg_grandchild1 = canvas.create_process_group( CanvasNavigatorTest.pg_child1, "grandchild1", CANVAS_CENTER) # Create other objects as well CanvasNavigatorTest.proc = canvas.create_processor( CanvasNavigatorTest.pg_parent, canvas.get_processor_type("GenerateFlowFile"), CANVAS_CENTER, "proc") CanvasNavigatorTest.input_port = canvas.create_port( CanvasNavigatorTest.pg_parent.component.id, "INPUT_PORT", "input_port", "STOPPED", CANVAS_CENTER) CanvasNavigatorTest.output_port = canvas.create_port( CanvasNavigatorTest.pg_parent.component.id, "OUTPUT_PORT", "output_port", "STOPPED", CANVAS_CENTER) CanvasNavigatorTest.controller = canvas.create_controller( CanvasNavigatorTest.pg_parent, canvas.list_all_controller_types()[0], "controller")
def test_create_process_group(regress_nifi): r = canvas.create_process_group(parent_pg=canvas.get_process_group( canvas.get_root_pg_id(), 'id'), new_pg_name=conftest.test_pg_name, location=(400.0, 400.0), comment='some comment') assert r.component.name == conftest.test_pg_name assert r.position.x == r.position.y == 400 assert r.component.parent_group_id == canvas.get_root_pg_id() assert isinstance(r, nifi.ProcessGroupEntity) # Test process group creation on other than root process group. s = canvas.create_process_group(parent_pg=canvas.get_process_group( conftest.test_pg_name), location=(200.0, 200.0), new_pg_name=conftest.test_another_pg_name) assert s.component.name == conftest.test_another_pg_name assert s.position.x == s.position.y == 200 assert s.component.parent_group_id == canvas.get_process_group( conftest.test_pg_name, "name").id assert isinstance(s, nifi.ProcessGroupEntity) with pytest.raises(ValueError): parent_pg = canvas.get_process_group('NiFi Flow') parent_pg.id = 'invalid' _ = canvas.create_process_group(parent_pg, 'irrelevant', (0, 0))
def test_create_process_group(regress): r = canvas.create_process_group(canvas.get_process_group( canvas.get_root_pg_id(), 'id'), config.test_pg_name, location=(400.0, 400.0)) assert r.component.name == config.test_pg_name assert r.position.x == r.position.y == 400 assert r.component.parent_group_id == canvas.get_root_pg_id() with pytest.raises(ApiException): parent_pg = canvas.get_process_group('NiFi Flow') parent_pg.id = 'invalid' _ = canvas.create_process_group(parent_pg, 'irrelevant', (0, 0))
def lab4_nifi_flow(env): LOG.info("Running step4_nifi_flow") # Create a bucket in NiFi Registry to save the edge flow versions env.sensor_bucket = versioning.get_registry_bucket('SensorFlows') if not env.sensor_bucket: env.sensor_bucket = versioning.create_registry_bucket('SensorFlows') # Create NiFi Process Group env.reg_client = versioning.create_registry_client('NiFi Registry', _NIFIREG_URL, 'The registry...') env.sensor_pg = canvas.create_process_group(env.root_pg, PG_NAME, (330, 350)) #env.sensor_flow = versioning.save_flow_ver(env.sensor_pg, env.reg_client, env.sensor_bucket, flow_name='SensorProcessGroup', comment='Enabled version control - ' + env.run_id) env.sensor_flow = save_flow_ver(env.sensor_pg, env.reg_client, env.sensor_bucket, flow_name='SensorProcessGroup', comment='Enabled version control - ' + str(env.run_id)) # Create controller services env.sr_svc = create_controller(env.sensor_pg, 'org.apache.nifi.schemaregistry.hortonworks.HortonworksSchemaRegistry', {'url': _SCHREG_API_URL}, True) env.json_reader_svc = create_controller(env.sensor_pg, 'org.apache.nifi.json.JsonTreeReader', {'schema-access-strategy': 'schema-name', 'schema-registry': env.sr_svc.id}, True) env.json_writer_svc = create_controller(env.sensor_pg, 'org.apache.nifi.json.JsonRecordSetWriter', {'schema-access-strategy': 'schema-name', 'schema-registry': env.sr_svc.id, 'Schema Write Strategy': 'hwx-schema-ref-attributes'}, True) # Create flow sensor_port = canvas.create_port(env.sensor_pg.id, 'INPUT_PORT', 'Sensor Data', 'RUNNING', (0, 0)) upd_attr = create_processor(env.sensor_pg, 'Set Schema Name', 'org.apache.nifi.processors.attributes.UpdateAttribute', (0, 100), { 'properties': { 'schema.name': 'SensorReading', }, } ) canvas.create_connection(sensor_port, upd_attr) pub_kafka = create_processor(env.sensor_pg, 'Publish to Kafka topic: iot', 'org.apache.nifi.processors.kafka.pubsub.PublishKafkaRecord_2_0', (0, 300), { 'properties': { 'bootstrap.servers': 'edge2ai-1.dim.local:9092', 'topic': 'iot', 'record-reader': env.json_reader_svc.id, 'record-writer': env.json_writer_svc.id, 'use-transactions': 'false', 'attribute-name-regex': 'schema.*', 'client.id': PRODUCER_CLIENT_ID, }, 'autoTerminatedRelationships': ['success'], } ) canvas.create_connection(upd_attr, pub_kafka, ['success']) fail_funnel = create_funnel(env.sensor_pg.id, (600, 343)) canvas.create_connection(pub_kafka, fail_funnel, ['failure']) # Commit changes #versioning.save_flow_ver(env.sensor_pg, env.reg_client, env.sensor_bucket, flow_id=env.sensor_flow.version_control_information.flow_id, comment='First version - ' + env.run_id) save_flow_ver(env.sensor_pg, env.reg_client, env.sensor_bucket, flow_id=env.sensor_flow.version_control_information.flow_id, comment='First version - ' + str(env.run_id)) # Start flow canvas.schedule_process_group(env.root_pg.id, True) # Update "from Gateway" input port to connect to the process group update_connection(env.from_gw, env.temp_funnel, sensor_port)
def __create_test_group(self): # Delete group with same name if exists self.__delete_test_group() # Create group self.test_group = canvas.create_process_group(self.base, self.name, (0, 0)) # Create contents self.__build_inputs() self.__build_outputs()
def create(self): new_process_group = canvas.create_process_group( self.parent, self.name, self.location ) new_process_group.revision.version = 0 self.id = new_process_group.id return new_process_group
def setUpClass(cls): super(Test1ToNTest, cls).setUpClass() print("Start of tests: preparing nifi objects") config.nifi_config.host = 'http://192.168.56.5:8080/nifi-api' flow_name = "Test1ToNTest" nav = CanvasNavigator() # Delete all leftovers from previous (failed?) tests pgs_to_be_deleted = nav.groups(flow_name) for pg in pgs_to_be_deleted: canvas.delete_process_group(pg, force=True) # Create new process group in root Test1ToNTest.pg_test = canvas.create_process_group(nav.current, flow_name, (0, 0)) # Create simple flow to test Test1ToNTest.proc_start = canvas.create_processor( Test1ToNTest.pg_test, canvas.get_processor_type("GenerateFlowFile"), CANVAS_CENTER, "Start") Test1ToNTest.proc_2 = canvas.create_processor( Test1ToNTest.pg_test, canvas.get_processor_type("DebugFlow"), CANVAS_CENTER, "Processor 2") Test1ToNTest.proc_3 = canvas.create_processor( Test1ToNTest.pg_test, canvas.get_processor_type("DebugFlow"), CANVAS_CENTER, "Processor 3") Test1ToNTest.proc_end_1 = canvas.create_processor( Test1ToNTest.pg_test, canvas.get_processor_type("DebugFlow"), CANVAS_CENTER, "End 1") Test1ToNTest.proc_end_2 = canvas.create_processor( Test1ToNTest.pg_test, canvas.get_processor_type("DebugFlow"), CANVAS_CENTER, "End 2") canvas.update_processor(Test1ToNTest.proc_end_1, nifi.ProcessorConfigDTO(auto_terminated_relationships=["success", "failure"])) canvas.update_processor(Test1ToNTest.proc_end_2, nifi.ProcessorConfigDTO(auto_terminated_relationships=["success", "failure"])) Test1ToNTest.conn_1 = canvas.create_connection(Test1ToNTest.proc_start, Test1ToNTest.proc_2, ["success"]) Test1ToNTest.conn_2 = canvas.create_connection(Test1ToNTest.proc_2, Test1ToNTest.proc_3, ["success", "failure"]) Test1ToNTest.conn_3 = canvas.create_connection(Test1ToNTest.proc_3, Test1ToNTest.proc_end_1, ["success", "failure"]) Test1ToNTest.conn_4 = canvas.create_connection(Test1ToNTest.proc_3, Test1ToNTest.proc_end_2, ["success", "failure"]) canvas.schedule_process_group(Test1ToNTest.pg_test.component.id, scheduled=True)
def lab4_nifi_flow(env): LOG.info("Running step4_nifi_flow") # Create a bucket in NiFi Registry to save the edge flow versions env.sensor_bucket = versioning.get_registry_bucket('SensorFlows') if not env.sensor_bucket: env.sensor_bucket = versioning.create_registry_bucket('SensorFlows') # Create NiFi Process Group env.reg_client = versioning.create_registry_client('NiFi Registry', _get_nifireg_url(), 'The registry...') env.sensor_pg = canvas.create_process_group(env.root_pg, PG_NAME, (330, 350)) #env.sensor_flow = versioning.save_flow_ver(env.sensor_pg, env.reg_client, env.sensor_bucket, flow_name='SensorProcessGroup', comment='Enabled version control - ' + env.run_id) env.sensor_flow = save_flow_ver(env.sensor_pg, env.reg_client, env.sensor_bucket, flow_name='SensorProcessGroup', comment='Enabled version control - ' + str(env.run_id)) # Update default SSL context controller service ssl_svc_name = 'Default NiFi SSL Context Service' if _IS_TLS_ENABLED: props = { 'SSL Protocol': 'TLS', 'Truststore Type': 'JKS', 'Truststore Filename': '/opt/cloudera/security/jks/truststore.jks', 'Truststore Password': _THE_PWD, 'Keystore Type': 'JKS', 'Keystore Filename': '/opt/cloudera/security/jks/keystore.jks', 'Keystore Password': _THE_PWD, 'key-password': _THE_PWD, } env.ssl_svc = canvas.get_controller(ssl_svc_name, 'name') if env.ssl_svc: canvas.schedule_controller(env.ssl_svc, False) env.ssl_svc = canvas.get_controller(ssl_svc_name, 'name') canvas.update_controller( env.ssl_svc, nifi.ControllerServiceDTO(properties=props)) env.ssl_svc = canvas.get_controller(ssl_svc_name, 'name') canvas.schedule_controller(env.ssl_svc, True) else: env.keytab_svc = create_controller( env.root_pg, 'org.apache.nifi.ssl.StandardRestrictedSSLContextService', props, True, name=ssl_svc_name) # Create controller services if _IS_TLS_ENABLED: env.ssl_svc = canvas.get_controller(ssl_svc_name, 'name') props = { 'Kerberos Keytab': '/keytabs/admin.keytab', 'Kerberos Principal': 'admin', } env.keytab_svc = create_controller( env.sensor_pg, 'org.apache.nifi.kerberos.KeytabCredentialsService', props, True) else: env.ssl_svc = None env.keytab_svc = None props = { 'url': _get_schreg_api_url(), } if _IS_TLS_ENABLED: props.update({ 'kerberos-credentials-service': env.keytab_svc.id, 'ssl-context-service': env.ssl_svc.id, }) env.sr_svc = create_controller( env.sensor_pg, 'org.apache.nifi.schemaregistry.hortonworks.HortonworksSchemaRegistry', props, True) env.json_reader_svc = create_controller( env.sensor_pg, 'org.apache.nifi.json.JsonTreeReader', { 'schema-access-strategy': 'schema-name', 'schema-registry': env.sr_svc.id }, True) env.json_writer_svc = create_controller( env.sensor_pg, 'org.apache.nifi.json.JsonRecordSetWriter', { 'schema-access-strategy': 'schema-name', 'schema-registry': env.sr_svc.id, 'Schema Write Strategy': 'hwx-schema-ref-attributes' }, True) env.avro_writer_svc = create_controller( env.sensor_pg, 'org.apache.nifi.avro.AvroRecordSetWriter', { 'schema-access-strategy': 'schema-name', 'schema-registry': env.sr_svc.id, 'Schema Write Strategy': 'hwx-content-encoded-schema' }, True) # Create flow sensor_port = canvas.create_port(env.sensor_pg.id, 'INPUT_PORT', 'Sensor Data', 'RUNNING', (0, 0)) upd_attr = create_processor( env.sensor_pg, 'Set Schema Name', 'org.apache.nifi.processors.attributes.UpdateAttribute', (0, 100), { 'properties': { 'schema.name': 'SensorReading', }, }) canvas.create_connection(sensor_port, upd_attr) props = { 'topic': 'iot', 'record-reader': env.json_reader_svc.id, 'record-writer': env.json_writer_svc.id, } props.update(_get_common_kafka_client_properties(env, 'producer')) pub_kafka = create_processor( env.sensor_pg, 'Publish to Kafka topic: iot', 'org.apache.nifi.processors.kafka.pubsub.PublishKafkaRecord_2_0', (0, 300), { 'properties': props, 'autoTerminatedRelationships': ['success'], }) canvas.create_connection(upd_attr, pub_kafka, ['success']) fail_funnel = create_funnel(env.sensor_pg.id, (600, 343)) canvas.create_connection(pub_kafka, fail_funnel, ['failure']) # Commit changes #versioning.save_flow_ver(env.sensor_pg, env.reg_client, env.sensor_bucket, flow_id=env.sensor_flow.version_control_information.flow_id, comment='First version - ' + env.run_id) save_flow_ver(env.sensor_pg, env.reg_client, env.sensor_bucket, flow_id=env.sensor_flow.version_control_information.flow_id, comment='First version - ' + str(env.run_id)) # Start flow canvas.schedule_process_group(env.root_pg.id, True) # Update "from Gateway" input port to connect to the process group update_connection(env.from_gw, env.temp_funnel, sensor_port)
def lab2_nifi_flow(self): # Create a bucket in NiFi Registry to save the edge flow versions self.context.sensor_bucket = versioning.get_registry_bucket( 'SensorFlows') if not self.context.sensor_bucket: self.context.sensor_bucket = versioning.create_registry_bucket( 'SensorFlows') # Create NiFi Process Group self.context.reg_client = versioning.create_registry_client( 'NiFi Registry', nifireg.get_url(), 'The registry...') self.context.sensor_pg = canvas.create_process_group( self.context.root_pg, PG_NAME, (330, 350)) self.context.sensor_flow = nifireg.save_flow_ver( self.context.sensor_pg, self.context.reg_client, self.context.sensor_bucket, flow_name='SensorProcessGroup', comment='Enabled version control - {}'.format(self.run_id)) # Update default SSL context controller service ssl_svc_name = 'Default NiFi SSL Context Service' if is_tls_enabled(): props = { 'SSL Protocol': 'TLS', 'Truststore Type': 'JKS', 'Truststore Filename': '/opt/cloudera/security/jks/truststore.jks', 'Truststore Password': get_the_pwd(), 'Keystore Type': 'JKS', 'Keystore Filename': '/opt/cloudera/security/jks/keystore.jks', 'Keystore Password': get_the_pwd(), 'key-password': get_the_pwd(), } self.context.ssl_svc = canvas.get_controller(ssl_svc_name, 'name') if self.context.ssl_svc: canvas.schedule_controller(self.context.ssl_svc, False) self.context.ssl_svc = canvas.get_controller( ssl_svc_name, 'name') canvas.update_controller( self.context.ssl_svc, nifi.ControllerServiceDTO(properties=props)) self.context.ssl_svc = canvas.get_controller( ssl_svc_name, 'name') canvas.schedule_controller(self.context.ssl_svc, True) else: self.context.keytab_svc = nf.create_controller( self.context.root_pg, 'org.apache.nifi.ssl.StandardRestrictedSSLContextService', props, True, name=ssl_svc_name) # Create controller services if is_tls_enabled(): self.context.ssl_svc = canvas.get_controller(ssl_svc_name, 'name') props = { 'Kerberos Keytab': '/keytabs/admin.keytab', 'Kerberos Principal': 'admin', } self.context.keytab_svc = nf.create_controller( self.context.sensor_pg, 'org.apache.nifi.kerberos.KeytabCredentialsService', props, True) else: self.context.ssl_svc = None self.context.keytab_svc = None props = { 'url': schreg.get_api_url(), } if is_tls_enabled(): props.update({ 'kerberos-credentials-service': self.context.keytab_svc.id, 'ssl-context-service': self.context.ssl_svc.id, }) self.context.sr_svc = nf.create_controller( self.context.sensor_pg, 'org.apache.nifi.schemaregistry.hortonworks.HortonworksSchemaRegistry', props, True) self.context.json_reader_svc = nf.create_controller( self.context.sensor_pg, 'org.apache.nifi.json.JsonTreeReader', { 'schema-access-strategy': 'schema-name', 'schema-registry': self.context.sr_svc.id }, True) self.context.json_writer_svc = nf.create_controller( self.context.sensor_pg, 'org.apache.nifi.json.JsonRecordSetWriter', { 'schema-access-strategy': 'schema-name', 'schema-registry': self.context.sr_svc.id, 'Schema Write Strategy': 'hwx-schema-ref-attributes' }, True) self.context.avro_writer_svc = nf.create_controller( self.context.sensor_pg, 'org.apache.nifi.avro.AvroRecordSetWriter', { 'schema-access-strategy': 'schema-name', 'schema-registry': self.context.sr_svc.id, 'Schema Write Strategy': 'hwx-content-encoded-schema' }, True) # Create flow sensor_port = canvas.create_port(self.context.sensor_pg.id, 'INPUT_PORT', 'Sensor Data', 'STOPPED', (0, 0)) upd_attr = nf.create_processor( self.context.sensor_pg, 'Set Schema Name', 'org.apache.nifi.processors.attributes.UpdateAttribute', (0, 100), { 'properties': { 'schema.name': 'SensorReading', }, }) canvas.create_connection(sensor_port, upd_attr) props = { 'topic': 'iot', 'record-reader': self.context.json_reader_svc.id, 'record-writer': self.context.json_writer_svc.id, } props.update( kafka.get_common_client_properties(self.context, 'producer', CONSUMER_GROUP_ID, PRODUCER_CLIENT_ID)) pub_kafka = nf.create_processor( self.context.sensor_pg, 'Publish to Kafka topic: iot', [ 'org.apache.nifi.processors.kafka.pubsub.PublishKafkaRecord_2_6', 'org.apache.nifi.processors.kafka.pubsub.PublishKafkaRecord_2_0' ], (0, 300), { 'properties': props, 'autoTerminatedRelationships': ['success'], }) canvas.create_connection(upd_attr, pub_kafka, ['success']) fail_funnel = nf.create_funnel(self.context.sensor_pg.id, (600, 343)) canvas.create_connection(pub_kafka, fail_funnel, ['failure']) # Commit changes nifireg.save_flow_ver(self.context.sensor_pg, self.context.reg_client, self.context.sensor_bucket, flow_id=self.context.sensor_flow. version_control_information.flow_id, comment='First version - {}'.format(self.run_id)) # Start flow canvas.schedule_process_group(self.context.root_pg.id, True) # Update "from Gateway" input port to connect to the process group nf.update_connection(self.context.from_gw, self.context.temp_funnel, sensor_port) canvas.schedule_components(self.context.root_pg.id, True, [sensor_port])