Example #1
0
    def test_kinesis_firehose_sink(self):
        _load_specific_flink_module_jars('/flink-connectors/'
                                         'flink-sql-connector-aws-kinesis-firehose')

        sink_properties = {
            'aws.region': 'eu-west-1',
            'aws.credentials.provider.basic.accesskeyid': 'aws_access_key_id',
            'aws.credentials.provider.basic.secretkey': 'aws_secret_access_key'
        }

        ds = self.env.from_collection([('ab', 1), ('bdc', 2), ('cfgs', 3), ('deeefg', 4)],
                                      type_info=Types.ROW([Types.STRING(), Types.INT()]))

        kinesis_firehose_sink = KinesisFirehoseSink.builder() \
            .set_firehose_client_properties(sink_properties) \
            .set_serialization_schema(SimpleStringSchema()) \
            .set_delivery_stream_name('stream-1') \
            .set_fail_on_error(False) \
            .set_max_batch_size(500) \
            .set_max_in_flight_requests(50) \
            .set_max_buffered_requests(10000) \
            .set_max_batch_size_in_bytes(5 * 1024 * 1024) \
            .set_max_time_in_buffer_ms(5000) \
            .set_max_record_size_in_bytes(1 * 1024 * 1024) \
            .build()

        ds.sink_to(kinesis_firehose_sink).name('kinesis firehose sink')
        plan = eval(self.env.get_execution_plan())

        self.assertEqual('kinesis firehose sink: Writer', plan['nodes'][1]['type'])
        self.assertEqual(get_field_value(kinesis_firehose_sink.get_java_function(), 'failOnError'),
                         False)
        self.assertEqual(
            get_field_value(kinesis_firehose_sink.get_java_function(), 'deliveryStreamName'),
            'stream-1')
Example #2
0
 def setUp(self) -> None:
     self.env = StreamExecutionEnvironment.get_execution_environment()
     self.test_sink = DataStreamTestSinkFunction()
     _load_specific_flink_module_jars(
         '/flink-connectors/flink-connector-files')
     _load_specific_flink_module_jars(
         '/flink-connectors/flink-connector-sink-common')
Example #3
0
 def setUp(self) -> None:
     self.env = StreamExecutionEnvironment.get_execution_environment()
     # Cache current ContextClassLoader, we will replace it with a temporary URLClassLoader to
     # load specific connector jars with given module path to do dependency isolation. And We
     # will change the ClassLoader back to the cached ContextClassLoader after the test case
     # finished.
     self._cxt_clz_loader = get_gateway().jvm.Thread.currentThread().getContextClassLoader()
     _load_specific_flink_module_jars(self._get_jars_relative_path())
Example #4
0
    def test_use_modules(self):
        # please do not change this order since ModuleMock depends on FunctionDefinitionMock
        _load_specific_flink_module_jars('/flink-table/flink-table-common')
        _load_specific_flink_module_jars('/flink-table/flink-table-api-java')

        self.t_env.load_module('x', Module(
            get_gateway().jvm.org.apache.flink.table.utils.ModuleMock("x")
        ))
        self.t_env.load_module('y', Module(
            get_gateway().jvm.org.apache.flink.table.utils.ModuleMock("y")
        ))
        self.check_list_modules('core', 'x', 'y')
        self.check_list_full_modules(3, 'core', 'x', 'y')

        self.t_env.use_modules('y', 'core')
        self.check_list_modules('y', 'core')
        self.check_list_full_modules(2, 'y', 'core', 'x')
Example #5
0
 def test_kafka_connector_universal(self):
     _load_specific_flink_module_jars('/flink-connectors/flink-sql-connector-kafka')
     self.kafka_connector_assertion(FlinkKafkaConsumer, FlinkKafkaProducer)
Example #6
0
 def setUp(self) -> None:
     self.env = StreamExecutionEnvironment.get_execution_environment()
     self._cxt_clz_loader = get_gateway().jvm.Thread.currentThread().getContextClassLoader()
     _load_specific_flink_module_jars('/flink-connectors/flink-connector-jdbc')
Example #7
0
 def setUpClass(cls):
     super(KafkaDescriptorTests, cls).setUpClass()
     cls._cxt_clz_loader = get_gateway().jvm.Thread.currentThread().getContextClassLoader()
     _load_specific_flink_module_jars('/flink-connectors/flink-connector-kafka')
Example #8
0
 def setUpClass(cls):
     super(ElasticsearchDescriptorTest, cls).setUpClass()
     cls._cxt_clz_loader = get_gateway().jvm.Thread.currentThread().getContextClassLoader()
     _load_specific_flink_module_jars('/flink-connectors/flink-connector-elasticsearch-base')
Example #9
0
 def test_kafka_connector_011(self):
     _load_specific_flink_module_jars(
         '/flink-connectors/flink-sql-connector-kafka-0.11')
     self.kafka_connector_assertion(FlinkKafkaConsumer011,
                                    FlinkKafkaProducer011)
Example #10
0
 def setUpClass(cls):
     cls._cxt_clz_loader = get_gateway().jvm.Thread.currentThread(
     ).getContextClassLoader()
     _load_specific_flink_module_jars(
         '/flink-connectors/flink-connector-kafka-base')
Example #11
0
 def setUp(self):
     self._cxt_clz_loader = get_gateway().jvm.Thread.currentThread(
     ).getContextClassLoader()
     _load_specific_flink_module_jars(
         '/flink-connectors/flink-sql-connector-rabbitmq')