def output(self, args, transport: EventTransport, message: EventMessage): """Print out the given message""" serialized = transport.serializer(message) if args.format in ("json", "pretty"): if args.format == "pretty": dumped = json.dumps(serialized, indent=4) else: dumped = json.dumps(serialized) sys.stdout.write(dumped) sys.stdout.write("\n") sys.stdout.flush() elif args.format == "human": print(Colors.BGreen, end="") print(f" {message.api_name}.{message.event_name} ".center(80, "=")) if hasattr(message, "datetime"): print(f" {message.datetime.strftime('%c')} ".center(80, " ")) print(Colors.Reset, end="") print(f"\n{Colors.BWhite}Metadata:{Colors.Reset}") for k, v in message.get_metadata().items(): print(f" {str(k).ljust(20)}: {v}") print(f"\n{Colors.BWhite}Data:{Colors.Reset}") for k, v in message.get_kwargs().items(): if isinstance(v, (dict, list)): v = json.dumps(v) print(f" {str(k).ljust(20)}: {v}") print("\n") else: sys.stderr.write(f"Unknown output format '{args.format}'\n") sys.exit(1)
async def get_messages( self, args, api_name: str, event_name: Optional[str], transport: EventTransport, bus: BusPath, ): """Yields messages from various sources Messages are returned from sources in this order: - Any on-disk cache - Reading history from the event transport """ CACHE_PATH.mkdir(parents=True, exist_ok=True) event_name = event_name or "*" # Construct the cache file name file_name_hash = sha1((api_name + "\0" + event_name).encode("utf8")).hexdigest()[:8] file_name_api = re.sub("[^a-zA-Z0-9_]", "_", api_name) file_name_event = event_name.replace("*", "all") cache_file_name = f"{file_name_hash}-{file_name_api}-{file_name_event}.json" cache_file = CACHE_PATH / cache_file_name logger.debug(f"Loading from cache file {cache_file}. Exists: {cache_file.exists()}") # Sanity check if not cache_file.exists() and args.cache_only: sys.stderr.write( f"No cache file exists for {api_name}.{event_name}, but --cache-only was specified\n" ) sys.exit(1) def _progress(force=False): if force or (cache_yield_count + transport_yield_count) % 1000 == 0: logger.debug( f"Yielded {cache_yield_count} from cache and {transport_yield_count} from bus" ) # Start by reading from cache cache_yield_count = 0 transport_yield_count = 0 start = None if cache_file.exists(): with cache_file.open() as f: for line in f: event_message = transport.deserializer(json.loads(line)) if not args.cache_only: if not hasattr(event_message, "datetime"): # Messages do not provide a datetime, stop loading from the cache as # this is required logger.warning( "Event transport does not provide message datetimes. Will not load from cache." ) break start = ( max(event_message.datetime, start) if start else event_message.datetime ) cache_yield_count += 1 _progress() yield event_message if args.cache_only: return def _write_to_cache(f, event_message): f.write(json.dumps(transport.serializer(event_message))) f.write("\n") f.flush() # Now get messages from the transport, writing to the cache as we go allow_following = True if start: logger.debug( f"Finished reading from cache. Now reading from {start} on {api_name}.{event_name}" ) while True: with cache_file.open("a") as f: async for event_message in transport.history( api_name=api_name, event_name=event_name, start=start, start_inclusive=False ): _write_to_cache(f, event_message) transport_yield_count += 1 _progress() yield event_message if hasattr(event_message, "datetime"): start = event_message.datetime else: # Following requires the datetime property on event messages allow_following = False if args.follow: if allow_following: # We want to keep waiting for new messages, so wait a second then do it again await sleep(1) else: logger.warning( "Event transport does not provide message datetimes. Following not supported." ) break else: # No waiting for new messages, so break out of the while loop break _progress(force=True)
def output(self, args, transport: EventTransport, message: EventMessage, bus: BusPath): """Print out the given message""" serialized = transport.serializer(message) if args.format in ("json", "pretty"): if args.format == "pretty": dumped = json.dumps(serialized, indent=4) else: dumped = json.dumps(serialized) sys.stdout.write(dumped) sys.stdout.write("\n") sys.stdout.flush() elif args.format == "human": print(Colors.BGreen, end="") print(f" {message.api_name}.{message.event_name} ".center(80, "=")) if hasattr(message, "datetime"): print(f" {message.datetime.strftime('%c')} ".center(80, " ")) print(Colors.Reset, end="") print(f"\n{Colors.BWhite}Metadata:{Colors.Reset}") for k, v in message.get_metadata().items(): print(f" {str(k).ljust(20)}: {v}") print(f"\n{Colors.BWhite}Data:{Colors.Reset}") for k, v in message.get_kwargs().items(): if isinstance(v, (dict, list)): v = json.dumps(v, indent=4) pad = " " * 24 v = "".join(pad + v for v in v.splitlines(keepends=True)).lstrip() print(f" {str(k).ljust(20)}: {v}") if args.validate or args.show_casting: print(f"\n{Colors.BWhite}Extra:{Colors.Reset}") if args.validate: try: bus.client.schema.validate_parameters( message.api_name, message.event_name, message.kwargs) except ValidationError as e: validation_message = f"{Colors.Red}{e}{Colors.Reset}" else: validation_message = f"{Colors.Green}Passed{Colors.Reset}" print(f" Validation: {validation_message}") if args.show_casting: for listener in bus.client.event_client._event_listeners: if (message.api_name, message.event_name) not in listener.events: continue hints = get_type_hints(listener.callable) casted = cast_to_signature(parameters=message.kwargs, callable=listener.callable) print( f"\n {Colors.BWhite}Casting for listener: {listener.name}{Colors.Reset}" ) for key, value in message.kwargs.items(): was = type(value) via = hints[key] now = type(casted[key]) color = Colors.Green if via == now else Colors.Red print(f" " f"{color}{str(key).ljust(20)}: " f"Received a '{was.__name__}', " f"casted to a '{via.__name__}', " f"result was a '{now.__name__}'" f"{Colors.Reset}") print("\n") else: sys.stderr.write(f"Unknown output format '{args.format}'\n") sys.exit(1)