def test_event_missing_duration(): event = { "timestamp": "2018-12-26 18:11:00.000000", "translation_id": "5aa5b2f39f7254a75aa5" } res = evs.is_translation_event(event) assert res == False
def process_events(input_file, window_size, output_file): reader = EventStreamReader(input_file) calculator = None writer = ResultsWriter(output_file) for event in reader: event_ts = tsa.to_datetime(event["timestamp"]) # initial case for first average if calculator == None: start_timeslot = tsa.beginning_period(event_ts) calculator = tsa(start_timeslot, window_size) # calculate all average timeslots until next event timeslot while event_ts > calculator.current_timeslot: current_ts = tsa.datetime_to_str(calculator.current_timeslot) average = calculator.calculate_average() result = {"date": current_ts, "average_delivery_time": average} writer.write_result(result) calculator.next_timeslot() calculator.add_event(event) # Last timeslot if needed if calculator and calculator.current_timeslot >= event_ts: average = calculator.calculate_average() current_ts = tsa.datetime_to_str(calculator.current_timeslot) result = {"date": current_ts, "average_delivery_time": average} writer.write_result(result) writer.close()
def test_event_ok(): event = { "timestamp": "2018-12-26 18:11:00.000000", "duration": 10, "translation_id": "5aa5b2f39f7254a75aa5" } res = evs.is_translation_event(event) assert res
def test_event_malformed_timestamp(): event = { "timestamp": "text", "duration": 10, "translation_id": "5aa5b2f39f7254a75aa5" } res = evs.is_translation_event(event) assert res == False
from event_stream_reader import EventStreamReader from config import SSE_SERVER_MAIN_URL from message_structure import MessageData esr = EventStreamReader(SSE_SERVER_MAIN_URL) for msg in esr.messages(): if not msg: continue data = MessageData(msg.data) if data.is_block_added: proposer = data.data['block', 'body', 'proposer'] # if proposer in ("01aa2976834459371b1cf7f476873dd091a0e364bd18abed8e77659b83fd892084", # "0163e03c3aa2b383f9d1b2f7c69498d339dcd1061059792ce51afda49135ff7876", # "01e61c8b8227afd8f7d4daece145546aa6775cf1c4ebfb6f3f56c18df558aed72d"): # print(f"{data.data['block_hash']} Proposed block by Marco {proposer}") # if proposer in "01e61c8b8227afd8f7d4daece145546aa6775cf1c4ebfb6f3f56c18df558aed72d": # print("########################################") # else: # print("not") if "010a78ee" in proposer: print( f"{data.data['block_hash']} Proposed block by Make {proposer}") else: print("not")
from event_stream_reader import EventStreamReader import time server_path = "http://18.220.220.20:9999/events" esr = EventStreamReader(server_path, 80000) esr.RECONNECT_DELAY_SEC = 0.1 esr.RECONNECT_COUNT = 15 last_id = 0 for msg in esr.messages(): print(msg.id, msg) if last_id + 1 != int(msg.id): print(f"ID skip: {last_id} -> {msg.id}") last_id = int(msg.id)
from event_stream_reader import EventStreamSimulator, EventStreamReader from pathlib import Path import json from collections import defaultdict SCRIPT_DIR = Path(__file__).parent.absolute() DATA_PATH = SCRIPT_DIR / "delta-11_event_stream_16280" esr = EventStreamReader("http://18.220.220.20:9999/events") message = defaultdict(int) for msg in esr.messages(): if msg.id is None: continue data = json.loads(msg.data) message[list(data.keys())[0]] += 1 print(msg.data) # FinalitySignature - block_hash, era_id, signature, public_key # DeployProcessed - deploy_hash, account, timestamp, ttl, dependencies, block_hash, execution_results # BlockAdded - block_hash - block:header:era_id - block:header:height #{"FinalitySignature": finality_signature = { "block_hash": "fcc5e8f8672f44f3531c2a0498aefd0397c2bcbc4660a7542e04431617ff749d", "era_id": 67, "signature": "0103290c2676c4c7650c455f04e8aba49ae8e1f21888096778a297095b224656ca982e41e94dd9e92bd3fbe74644fd673ad339c5b475b4203b54ef354f33c67e04", "public_key": "01b99d5f54a5147ee34f472d546d84037007025df5e1a13cfdca7aac05e9ac5858"
from event_stream_reader import EventStreamReader, file_message_streamer_with_disconnects # With changes to always pull from 0 and overwrite old data # This many never finish. With server restarts, the id resets # to 0, so we cannot safely resume if the error was due to upgrade. data_path = "events_dryrun_120" esr = EventStreamReader(data_path, 70122, file_message_streamer_with_disconnects) esr.RECONNECT_DELAY_SEC = 0.5 esr.RECONNECT_COUNT = 15 for msg in esr.messages(): print(msg.id, msg)
def test_event_missing_timestamp(): event = {"duration": 10, "translation_id": "5aa5b2f39f7254a75aa5"} res = evs.is_translation_event(event) assert res == False
#!/usr/bin/env python3 import time from typing import Union import json import threading from pathlib import Path from itertools import chain import signal from event_stream_reader import EventStreamReader import config from message_structure import MessageData from generate_finality_signatures import generate_finality_signatures_for_block from node_rpc import get_deploy, get_block esr_main = EventStreamReader(config.SSE_SERVER_MAIN_URL) esr_deploys = EventStreamReader(config.SSE_SERVER_DEPLOYS_URL) esr_sigs = EventStreamReader(config.SSE_SERVER_SIGS_URL) # Three message types: # DeployProcessed - Has no era_id, can store in block-hash folder and move into era_id folder when we get BlockAdded # Final location: era_<era_id>/<block_hash>/deploy-<deploy_hash> # BlockAdded - Has era_id, can store in era_id folder # Final location: era_<era_id>/block-<block_hash> # FinalitySignature = Has era_id, can store in era_id folder # Final location: era_<era_id>/<block_hash>/finsig-<block_hash>-<public_key> def era_directory_name(era_id: Union[str, int]) -> str: return f"era_{era_id}"