Esempio n. 1
0
 def test_creds(self):
     creds_file = os.environ['EVENTSTREAMS_CREDENTIALS']
     with open(creds_file) as data_file:
         credentials = json.load(data_file)
     topo = Topology()
     stream = topo.source(['Hello', 'World']).as_json()
     evstr.publish(stream, 'Topic', credentials=credentials)
     evstr.publish(stream, 'Topic', credentials='eventstreams')
Esempio n. 2
0
    def test_string(self):
        n = 107
        topo = Topology()
        add_mh_toolkit(topo)
        add_pip_toolkits(topo)
        uid = str(uuid.uuid4())
        s = topo.source(StringData(uid, n)).as_string()
        evstr.publish(s, 'MH_TEST')
        print('test_string')

        r = evstr.subscribe(topo, 'MH_TEST', CommonSchema.String)
        r = r.filter(lambda t: t.startswith(uid))
        expected = list(StringData(uid, n, False)())

        tester = Tester(topo)
        tester.contents(r, expected)
        tester.tuple_count(r, n)
        tester.test(self.test_ctxtype, self.test_config)
Esempio n. 3
0
    def test_string_creds(self):
        n = 107
        creds_file = os.environ['EVENTSTREAMS_CREDENTIALS']
        with open(creds_file) as data_file:
            credentials = json.load(data_file)
        topo = Topology()
        add_mh_toolkit(topo)
        add_pip_toolkits(topo)
        uid = str(uuid.uuid4())
        s = topo.source(StringData(uid, n)).as_string()
        print('test_string_creds')
        evstr.publish(s, 'MH_TEST', credentials=credentials)

        r = evstr.subscribe(topo,
                            'MH_TEST',
                            CommonSchema.String,
                            credentials=credentials)
        r = r.filter(lambda t: t.startswith(uid))
        expected = list(StringData(uid, n, False)())

        tester = Tester(topo)
        tester.contents(r, expected)
        tester.tuple_count(r, n)
        tester.test(self.test_ctxtype, self.test_config)
Esempio n. 4
0
 def test_schemas_ok(self):
     topo = Topology()
     pyObjStream = topo.source(['Hello', 'World!'])
     jsonStream = pyObjStream.as_json()
     stringStream = pyObjStream.as_string()
     binMsgStream = pyObjStream.map(func=lambda s: {
         'message': bytes(s, 'utf-8'),
         'key': s
     },
                                    schema=MsgSchema.BinaryMessage)
     strMsgStream = pyObjStream.map(func=lambda s: {
         'message': s,
         'key': s
     },
                                    schema=MsgSchema.StringMessage)
     evstr.publish(binMsgStream, "Topic")
     evstr.publish(strMsgStream, "Topic")
     evstr.publish(stringStream, "Topic")
     evstr.publish(jsonStream, "Topic")
event_streams_topic = 'THREE_PARTITIONS_TOPIC'
#
# the producer part
#
# create the data and map them to the attributes 'message' and 'key' of the
# 'Schema.StringMessage' schema for Kafka, so that we have messages with keys
sensorStream = topology.source(SensorReadingsSource(), "RawDataSource").map(
    func=lambda reading: {
        'message': json.dumps(reading),
        'key': reading['sensor_id']
    },
    name="ToKeyedMessage",
    schema=Schema.StringMessage)
# assume, we have created an application configuration with name 'messagehub'
eventStreamsSink = evst.publish(sensorStream,
                                topic=event_streams_topic,
                                credentials='messagehub',
                                name="SensorPublish")

#
# the consumer side
#
# subscribe, create a consumer group with 3 consumers
consumerSchema = Schema.StringMessageMeta
received = evst.subscribe(
    topology,
    topic=event_streams_topic,
    schema=consumerSchema,
    group='my_consumer_group',
    credentials='messagehub',
    name="SensorSubscribe").set_parallel(3).end_parallel()