def execute(self, function_context: FlinkFunctionContext,
             input_table: Table) -> None:
     example_meta: ExampleMeta = function_context.node_spec.example_meta
     table_env: TableEnvironment = function_context.get_table_env()
     statement_set = function_context.get_statement_set()
     table_env.execute_sql("""
            create table write_predict_test_table (
                 face_id varchar,
                 label varchar
             ) with (
                 'connector' = 'kafka',
                 'topic' = 'tianchi_write_example',
                 'properties.bootstrap.servers' = '{}',
                 'properties.group.id' = 'write_example',
                 'properties.request.timeout.ms' = '30000',
                 'format' = 'csv',
                 'scan.startup.mode' = 'earliest-offset',
                 'csv.disable-quote-character' = 'true'
             )
             """.format(example_meta.stream_uri))
     input_table.insert_into('write_predict_test_table')
     # table_env.execute_sql("""
     #        create table write_predict_test_table (
     #             face_id varchar,
     #             label varchar
     #         ) with (
     #             'connector' = 'blackhole'
     #         )
     #         """)
     statement_set.add_insert('write_predict_test_table', input_table)
示例#2
0
    def execute(self, function_context: FlinkFunctionContext,
                input_table: Table) -> None:
        example_meta: ExampleMeta = function_context.get_example_meta()
        output_file = example_meta.batch_uri
        if os.path.exists(output_file):
            if os.path.isdir(output_file):
                shutil.rmtree(output_file)
            else:
                os.remove(output_file)
        t_env = function_context.get_table_env()
        statement_set = function_context.get_statement_set()
        sink = CsvTableSink(
            ['a', 'b'],
            [DataTypes.STRING(), DataTypes.STRING()], output_file, ';')

        t_env.register_table_sink('mySink', sink)
        statement_set.add_insert('mySink', input_table)
示例#3
0
 def execute(self, function_context: FlinkFunctionContext,
             input_table: Table) -> None:
     t_env = function_context.get_table_env()
     statement_set = function_context.get_statement_set()
     dummy_output_path = function_context.get_example_meta().batch_uri
     if os.path.exists(dummy_output_path):
         if os.path.isdir(dummy_output_path):
             shutil.rmtree(dummy_output_path)
         else:
             os.remove(dummy_output_path)
     sink = CsvTableSink(
         ['a', 'b', 'c'],
         [DataTypes.STRING(),
          DataTypes.STRING(),
          DataTypes.STRING()], dummy_output_path, ';')
     t_env.register_table_sink('mySink', sink)
     statement_set.add_insert("mySink", input_table)
示例#4
0
 def execute(self, function_context: FlinkFunctionContext,
             input_table: Table) -> None:
     table_env: TableEnvironment = function_context.get_table_env()
     statement_set = function_context.get_statement_set()
     table_env.execute_sql("""
            create table write_example (
                 face_id varchar,
                 device_id varchar,
                 near_id int
             ) with (
                 'connector' = 'kafka',
                 'topic' = 'tianchi_write_example',
                 'properties.bootstrap.servers' = 'localhost:9092',
                 'properties.group.id' = 'write_example',
                 'format' = 'csv',
                 'scan.startup.mode' = 'earliest-offset',
                 'csv.disable-quote-character' = 'true'
             )
             """)
     statement_set.add_insert('write_example', input_table)