def test_json_spec_only_columns(self): t = table_helper() cleanup = pk.produce(t, {'bootstrap.servers': 'redpanda:29092'}, 'orders', key_spec=KeyValueSpec.IGNORE, value_spec=pk.json_spec(['Symbol', 'Price']), last_by_key_columns=False) self.assertIsNotNone(cleanup) cleanup()
def test_simple_spec(self): """ Check a simple Kafka subscription creates the right table. """ t = new_table(cols=[double_col('Price', [10.0, 10.5, 11.0, 11.5])]) cleanup = pk.produce(t, {'bootstrap.servers': 'redpanda:29092'}, 'orders', key_spec=KeyValueSpec.IGNORE, value_spec=pk.simple_spec('Price')) self.assertIsNotNone(cleanup) cleanup()
def test_avro_spec(self): schema = \ """ { "type" : "record", "namespace" : "io.deephaven.examples", "name" : "share_price_timestamped", "fields" : [ { "name" : "Symbol", "type" : "string" }, { "name" : "Side", "type" : "string" }, { "name" : "Qty", "type" : "int" }, { "name" : "Price", "type" : "double" }, { "name" : "Timestamp", "type" : { "type" : "long", "logicalType" : "timestamp-micros" } } ] } """ schema_str = '{ "schema" : "%s" }' % \ schema.replace('\n', ' ').replace('"', '\\"') sys_str = \ """ curl -X POST \ -H 'Content-type: application/vnd.schemaregistry.v1+json; artifactType=AVRO' \ --data-binary '%s' \ http://redpanda:8081/subjects/share_price_timestamped_record/versions """ % schema_str r = os.system(sys_str) self.assertEqual(0, r) t = table_helper() cleanup = pk.produce(t, { 'bootstrap.servers': 'redpanda:29092', 'schema.registry.url': 'http://redpanda:8081' }, 'share_price_timestamped', key_spec=KeyValueSpec.IGNORE, value_spec=pk.avro_spec( 'share_price_timestamped_record', timestamp_field='Timestamp'), last_by_key_columns=False) self.assertIsNotNone(cleanup) cleanup()
def test_json_spec_all_arguments(self): t = table_helper() cleanup = pk.produce(t, {'bootstrap.servers': 'redpanda:29092'}, 'orders', key_spec=KeyValueSpec.IGNORE, value_spec=pk.json_spec(['Symbol', 'Price'], mapping={ 'Symbol': 'jSymbol', 'Price': 'jPrice' }, timestamp_field='jTs'), last_by_key_columns=False) self.assertIsNotNone(cleanup) cleanup()
) \ .where(['lifetime_value > 10000']) \ .natural_join(users, ['user_id = id'], ['email']) \ .view(['id = user_id', 'email', 'lifetime_value', 'purchases']) # column rename and reorder schema_namespace = 'io.deephaven.examples' cancel_callback = pk.produce( high_value_users, kafka_base_properties, topic='high_value_users_sink', key_spec=pk.avro_spec('high_value_users_sink_key', publish_schema=True, schema_namespace=schema_namespace, include_only_columns=['user_id']), value_spec=pk.avro_spec('high_value_users_sink_value', publish_schema=True, schema_namespace=schema_namespace, column_properties={ "lifetime_value.precision": "12", "lifetime_value.scale": "4" }), last_by_key_columns=True) hvu_test = ck.consume(consume_properties, topic='high_value_users_sink', offsets=ck.ALL_PARTITIONS_SEEK_TO_BEGINNING, key_spec=KeyValueSpec.IGNORE, value_spec=ck.avro_spec('high_value_users_sink_value'), table_type=TableType.Append)