def test_writeTxn(self):
        scope = ''.join(
            secrets.choice(string.ascii_lowercase + string.digits)
            for i in range(10))
        print("Creating a Stream Manager, ensure Pravega is running")
        stream_manager = pravega_client.StreamManager("127.0.0.1:9090")

        print("Creating a scope")
        scope_result = stream_manager.create_scope(scope)
        self.assertEqual(True, scope_result, "Scope creation status")

        print("Creating a stream")
        stream_result = stream_manager.create_stream(scope, "testTxn", 1)
        self.assertEqual(True, stream_result, "Stream creation status")

        print("Creating a txn writer for Stream")
        w1 = stream_manager.create_transaction_writer(scope, "testTxn", 1)
        txn1 = w1.begin_txn()
        print("Write events")
        txn1.write_event("test event1")
        txn1.write_event("test event2")
        self.assertTrue(txn1.is_open(), "Transaction is open")
        print("commit transaction")
        txn1.commit()
        self.assertEqual(False, txn1.is_open(), "Transaction is closed")

        txn2 = w1.begin_txn()
        print("Write events")
        txn2.write_event("test event1")
        txn2.write_event("test event2")
        self.assertTrue(txn2.is_open(), "Transaction is open")
        print("commit transaction")
        txn2.abort()
        self.assertEqual(False, txn2.is_open(), "Transaction is closed")
def write_data_to_pravega(
        controller_uri: str, scope: str, stream: str,
        table_data: Generator[TripRecord, None, None]) -> None:
    """Write data directly to the Pravega, without the help of Flink.

    Args:
        controller_uri (str): The pravega uri
        scope (str): Scope name
        stream (str): Stream name
        table_data (Generator[TripRecord, None, None]): Data processed by Flink
    """
    manager = pravega_client.StreamManager(controller_uri)
    manager.create_scope(scope_name=scope)
    manager.create_stream(scope_name=scope,
                          stream_name=stream,
                          initial_segments=3)

    uncapitalize = lambda s: f'{s[0].lower()}{s[1:]}' if s else ''

    writer = manager.create_writer(scope, stream)
    for row in table_data:
        event = {
            # convert dataclass to dict with key in camel case
            uncapitalize(''.join(w.title() for w in k.split('_'))): v
            for k, v in dataclasses.asdict(row).items()
        }
        print(event)
        writer.write_event(json.dumps(event),
                           routing_key=str(row.start_location_id))
    def test_writeEventAndRead(self):
        suffix = str(random.randint(0, 100))
        scope = "testRead";
        stream = "testStream" + suffix
        print("Creating a Stream Manager, ensure Pravega is running")
        stream_manager = pravega_client.StreamManager("127.0.0.1:9090")

        print("Creating a scope")
        scope_result = stream_manager.create_scope(scope)
        print(scope_result)
        print("Creating a stream ", stream)
        stream_result = stream_manager.create_stream(scope, stream, 1)
        print(stream_result)

        print("Creating a writer for Stream")
        w1 = stream_manager.create_writer(scope, stream)

        print("Write events")
        w1.write_event("test event")
        w1.write_event("test event")
        reader_group = stream_manager.create_reader_group("rg" + suffix, scope, stream);
        r1 = reader_group.create_reader("reader-1")
        segment_slice = _run(self.get_segment_slice(r1))
        print(segment_slice)
        # consume the segment slice for events.
        count=0
        for event in segment_slice:
            count+=1
            print(event.data())
            self.assertEqual(b'test event', event.data(), "Invalid event data")
        self.assertEqual(count, 2, "Two events are expected")
Exemple #4
0
    async def test_largeEvents(self):
        suffix = str(random.randint(0, 100))
        scope = "testRead"
        stream = "testLargeEvent" + suffix
        print("Creating a Stream Manager, ensure Pravega is running")
        stream_manager = pravega_client.StreamManager("127.0.0.1:9090")

        print("Creating a scope")
        scope_result = stream_manager.create_scope(scope)
        print(scope_result)
        print("Creating a stream ", stream)
        stream_result = stream_manager.create_stream(scope, stream, 1)
        print(stream_result)

        print("Creating a writer for Stream")
        w1 = stream_manager.create_writer(scope, stream)

        print("Write events")
        for x in range(0, 1000):
            payload = str(x) * 100000
            w1.write_event(payload)
        reader_group = stream_manager.create_reader_group(
            "rg" + suffix, scope, stream)
        r1 = reader_group.create_reader("reader-1")
        # consume the segment slice for events.
        count = 0
        while count != 1000:
            segment_slice = await r1.get_segment_slice_async()
            for event in segment_slice:
                count += 1
            r1.release_segment(segment_slice)
    def test_TxnError(self):
        scope = ''.join(
            secrets.choice(string.ascii_lowercase + string.digits)
            for i in range(10))

        print("Creating a Stream Manager, ensure Pravega is running")
        stream_manager = pravega_client.StreamManager("127.0.0.1:9090")

        print("Creating a scope")
        scope_result = stream_manager.create_scope(scope)
        self.assertEqual(True, scope_result, "Scope creation status")

        print("Creating a stream")
        stream_result = stream_manager.create_stream(scope, "testTxn", 1)
        self.assertEqual(True, stream_result, "Stream creation status")

        print("Creating a txn writer for Stream")
        w1 = stream_manager.create_transaction_writer(scope, "testTxn", 1)
        txn1 = w1.begin_txn()
        print("Write events")
        txn1.write_event("test event1")
        txn1.write_event("test event2")
        self.assertTrue(txn1.is_open(), "Transaction is open")
        print("commit transaction")
        txn1.commit()
        self.assertEqual(False, txn1.is_open(), "Transaction is closed")

        #Attempt writing to an already commited transaction.
        try:
            txn1.write_event("Error")
            self.fail(
                "Write on an already closed transaction should throw a TxnFailedException"
            )
        except TxnFailedException as e:
            print("Exception ", e)

        #Attempt committing an closed transaction.
        try:
            txn1.commit()
            self.fail(
                "Commit of an already closed transaction should throw a TxnFailedException"
            )
        except TxnFailedException as e:
            print("Exception ", e)

        #Attempt aborting an closed transaction.
        try:
            txn1.abort()
            self.fail(
                "Abort of an already closed transaction should throw a TxnFailedException"
            )
        except TxnFailedException as e:
            print("Exception ", e)
Exemple #6
0
    async def test_multipleReaderPartialRead(self):
        suffix = str(random.randint(0, 100))
        scope = "testRead"
        stream = "testPartial" + suffix
        print("Creating a Stream Manager, ensure Pravega is running")
        stream_manager = pravega_client.StreamManager("127.0.0.1:9090")

        print("Creating a scope")
        scope_result = stream_manager.create_scope(scope)
        print(scope_result)
        print("Creating a stream ", stream)
        stream_result = stream_manager.create_stream(scope, stream, 2)
        print(stream_result)

        print("Creating a writer for Stream ", stream)
        w1 = stream_manager.create_writer(scope, stream)

        print("Write events 100 events")
        for i in range(100):
            w1.write_event("data")

        reader_group = stream_manager.create_reader_group(
            "rg-partial" + suffix, scope, stream)
        r1 = reader_group.create_reader("r1")
        slice1 = await r1.get_segment_slice_async()
        print(slice1)
        # consume the just 1 event from the first segment slice.
        count = 0
        event = next(slice1)
        self.assertEqual(b'data', event.data(), "Invalid event data")
        count += 1

        print("Number of events read after consuming slice1 ", count)
        #release the partially read segment slice.
        r1.release_segment(slice1)
        #mark the reader as offline.
        r1.reader_offline()

        r2 = reader_group.create_reader("r2")
        slice2 = await r2.get_segment_slice_async()
        for event in slice2:
            count += 1
            self.assertEqual(b'data', event.data(), "Invalid event data")
        print("Number of events read after consuming slice2 ", count)
        if count != 100:
            slice3 = await r2.get_segment_slice_async()
            for event in slice3:
                count += 1
                self.assertEqual(b'data', event.data(), "Invalid event data")
            print("Number of events read after consuming slice3 ", count)

        self.assertEqual(count, 100, "100 events are expected")
Exemple #7
0
def get_online_reader():
    manager = pravega_client.StreamManager("{}:{}".format(
        settings.pravega.host, settings.pravega.port))
    reader_group = manager.create_reader_group(settings.pravega.group,
                                               settings.pravega.scope,
                                               settings.pravega.stream)
    while True:
        try:
            reader = reader_group.create_reader(settings.pravega.reader_id +
                                                get_suffix())
            break
        except:
            pass
    return reader
    def test_multipleReader(self):
        suffix = str(random.randint(0, 100))
        scope = "testRead";
        stream = "testMulti" + suffix
        print("Creating a Stream Manager, ensure Pravega is running")
        stream_manager = pravega_client.StreamManager("127.0.0.1:9090")

        print("Creating a scope")
        scope_result = stream_manager.create_scope(scope)
        print(scope_result)
        print("Creating a stream ", stream)
        stream_result = stream_manager.create_stream(scope, stream, 2)
        print(stream_result)

        print("Creating a writer for Stream")
        w1 = stream_manager.create_writer(scope, stream)

        print("Write events 100 events")
        for i in range(100):
            w1.write_event("data")

        reader_group = stream_manager.create_reader_group("rg-multi" + suffix, scope, stream);
        r1 = reader_group.create_reader("r1")
        slice1 = _run(self.get_segment_slice(r1))
        print(slice1)
        # consume the segment slice for events.
        count=0
        for event in slice1:
            count+=1
            self.assertEqual(b'data', event.data(), "Invalid event data")
        print("Number of events read after consuming slice1 ", count)
        #release the segment.
        r1.release_segment(slice1)
        #mark the reader as offline.
        r1.reader_offline()

        r2 = reader_group.create_reader("r2")
        slice2 = _run(self.get_segment_slice(r2))
        for event in slice2:
            count+=1
            self.assertEqual(b'data', event.data(), "Invalid event data")

        print("Number of events read after consuming slice2 ", count)
        self.assertEqual(count, 100, "100 events are expected")
Exemple #9
0
def writeRawJson(directory, outdata, server_ip):
    #print(outdata)
    iceman_json_data = {}
    json_list_data = []
    try:
        id = outdata.get('Id', 'No ID')
        iceman_json_data["fields.IDRACIP"] = server_ip
        iceman_json_data["MetricReport"] = id
        iceman_json_data["source"] = "Telemetry-Redfish-Listener"
        iceman_json_data["@timestamp"] = DT.utcnow().strftime(
            "%Y-%m-%dT%H:%M:%S%z")
        for data in outdata["MetricValues"]:
            metric_value_data = {}
            metric_value_data["MetricId"] = data.get("MetricId")
            try:
                metric_value_data["MetricValue"] = float(
                    data.get("MetricValue"))
                metric_value_data["MetricType"] = "Number"
            except:
                metric_value_data["MetricValue1"] = data.get("MetricValue")
                metric_value_data["MetricType"] = "String"
            metric_value_data["ContextID"] = data.get("Oem").get("Dell").get(
                "ContextID")
            metric_value_data.update(iceman_json_data)
            json_list_data.append(metric_value_data)

        str_data = json.dumps(json_list_data)
        #print(str_data)

        manager = pravega_client.StreamManager("{}:{}".format(
            pravega_ip, pravega_port))
        # assuming the Pravega scope and stream are already created.
        manager.create_scope(pravega_scope)
        manager.create_stream(pravega_scope, pravega_stream, 1)
        writer = manager.create_writer(pravega_scope, pravega_stream)
        # write into Pravega stream without specifying the routing key.
        #jsol_data = """{"key":"value"}"""
        writer.write_event(str_data)

    except Exception as e:
        logging.exception("Failed to send data to ELK")
Exemple #10
0
    def test_writeEvent(self):
        scope = ''.join(
            secrets.choice(string.ascii_lowercase + string.digits)
            for i in range(10))
        print("Creating a Stream Manager, ensure Pravega is running")
        stream_manager = pravega_client.StreamManager("127.0.0.1:9090")

        print("Creating a scope")
        scope_result = stream_manager.create_scope(scope)
        self.assertEqual(True, scope_result, "Scope creation status")

        print("Creating a stream")
        stream_result = stream_manager.create_stream(scope, "testStream", 1)
        self.assertEqual(True, stream_result, "Stream creation status")

        print("Creating a writer for Stream")
        w1 = stream_manager.create_writer(scope, "testStream")

        print("Write events")
        w1.write_event("test event1")
        w1.write_event("test event2")
Exemple #11
0
import os
import time

mysql_password = os.environ[
    'mysql_password'] if 'mysql_password' in os.environ else 'dbz'

########################################################################

print("Create scope and stream for pravega.")

import pravega_client

manager = pravega_client.StreamManager("tcp://pravega:9090")
manager.create_scope('stock')
manager.create_stream('stock', 'dbserver1', 1)
manager.create_stream('stock', 'dbserver1.stock.stock', 1)
manager.create_stream('stock', 'dbserver1.stock.metadata', 1)

# create a file so that the healthcheck will know pravega is ready
with open('log', 'w') as fp:
    fp.write('Scope and streams are created.')

########################################################################

print("Get stock data.")

import yfinance as yf

tickers_list = ['AAPL', 'IBM', 'MU', 'BA', 'TSLA', 'NKE', 'GE', 'MMM']
df = yf.download(tickers=tickers_list, period="5d", interval="1m")