def validate_parser(parser, meta): # Validate upload if len(parser.games) != 1: raise ValidationError("Expected exactly 1 game, got %i" % (len(parser.games))) packet_tree = parser.games[0] with influx_timer("replay_exporter_duration"): exporter = RedshiftPublishingExporter( packet_tree, stream_prefix=fetch_active_stream_prefix() ).export() game = exporter.game if len(game.players) != 2: raise ValidationError("Expected 2 players, found %i" % (len(game.players))) for player in game.players: # Set the player's name player.name = parser.games[0].manager.get_player_by_id(player.id).name if player.name is None: # If it's None, this is an unsupported replay. log.error("Cannot find player %i name. Replay not supported.", player.player_id) raise GameTooShort("The game was too short to parse correctly") heroes = list(player.heroes) if not heroes: raise UnsupportedReplay("No hero found for player %r" % (player.name)) player._hero = heroes[0] try: db_hero = Card.objects.get(id=player._hero.card_id) except Card.DoesNotExist: raise UnsupportedReplay("Hero %r not found." % (player._hero)) if db_hero.type != CardType.HERO: raise ValidationError("%r is not a valid hero." % (player._hero)) friendly_player_id = packet_tree.export(cls=FriendlyPlayerExporter) if friendly_player_id: meta["friendly_player"] = friendly_player_id elif "friendly_player" not in meta: raise ValidationError("Friendly player ID not present at upload and could not guess it.") # We ignore "reconnecting" from the API, we only trust the log. # if "reconnecting" not in meta: # meta["reconnecting"] = False # There are two ways of identifying a reconnected game: # In reconnected games, the initial CREATE_GAME packet contains a STEP and STATE value. # In older versions of HS (pre-13xxx), STATE is RUNNING even in the CREATE_GAME packet. # Thankfully, looking at STEP is consistent across all versions, so we use that. # It will be Step.INVALID if it's NOT a reconnected game. meta["reconnecting"] = not not game.initial_step # Add the start/end time to meta dict meta["start_time"] = packet_tree.start_time meta["end_time"] = packet_tree.end_time return game, exporter
def authentication_error(self, request, provider_id, error, exception, extra_context): # Triggered upon social network login failure. log.error("[%s] Authentication error: %r (exception=%r)", provider_id, error, exception) # Write the error details to Influx region = request.GET.get("region", "us") payload = { "measurement": "hsreplaynet_socialauth_error", "tags": {"provider_id": provider_id, "error": error, "region": region}, "fields": {"exception": str(exception)}, } influx_write_payload([payload])
def validate_parser(parser, meta): # Validate upload if len(parser.games) != 1: raise ValidationError("Expected exactly 1 game, got %i" % (len(parser.games))) packet_tree = parser.games[0] exporter = InstrumentedExporter(packet_tree, meta).export() entity_tree = exporter.game if len(entity_tree.players) != 2: raise ValidationError("Expected 2 players, found %i" % (len(entity_tree.players))) for player in entity_tree.players: # Set the player's name player.name = parser.games[0].manager.get_player_by_id(player.id).name if player.name is None: # If it's None, this is an unsupported replay. log.error("Cannot find player %i name. Replay not supported.", player.player_id) raise GameTooShort("The game was too short to parse correctly") heroes = list(player.heroes) if not heroes: raise UnsupportedReplay("No hero found for player %r" % (player.name)) player._hero = heroes[0] try: db_hero = Card.objects.get(id=player._hero.card_id) except Card.DoesNotExist: raise UnsupportedReplay("Hero %r not found." % (player._hero)) if db_hero.type != CardType.HERO: raise ValidationError("%r is not a valid hero." % (player._hero)) if not meta.get("friendly_player"): from hearthstone.hslog.export import FriendlyPlayerExporter id = packet_tree.export(cls=FriendlyPlayerExporter) if not id: raise ValidationError("Friendly player ID not present at upload and could not guess it.") meta["friendly_player"] = id if "reconnecting" not in meta: meta["reconnecting"] = False # Add the start/end time to meta dict meta["start_time"] = packet_tree.start_time meta["end_time"] = packet_tree.end_time return entity_tree, exporter
def authentication_error(self, request, provider_id, error, exception, extra_context): # Triggered upon social network login failure. log.error("[%s] Authentication error: %r (exception=%r)", provider_id, error, exception) # Write the error details to Influx region = request.GET.get("region", "us") influx_metric( "hsreplaynet_socialauth_error", { "count": 1, "exception": str(exception) }, provider_id=provider_id, error=error, region=region, )
def validate_parser(parser, meta): # Validate upload if len(parser.games) != 1: raise ValidationError("Expected exactly 1 game, got %i" % (len(parser.games))) packet_tree = parser.games[0] exporter = packet_tree.export() entity_tree = exporter.game if len(entity_tree.players) != 2: raise ValidationError("Expected 2 players, found %i" % (len(entity_tree.players))) for player in entity_tree.players: # Set the player's name player.name = parser.games[0].manager.get_player_by_id(player.id).name if player.name is None: # If it's None, this is an unsupported replay. log.error("Cannot find player %i name. Replay not supported.", player.player_id) raise GameTooShort("The game was too short to parse correctly") if not player.heroes: raise UnsupportedReplay("No hero found for player %r" % (player.name)) player._hero = list(player.heroes)[0] try: db_hero = Card.objects.get(id=player._hero.card_id) except Card.DoesNotExist: raise UnsupportedReplay("Hero %r not found." % (player._hero)) if db_hero.type != CardType.HERO: raise ValidationError("%r is not a valid hero." % (player._hero)) if not meta.get("friendly_player"): from hearthstone.hslog.export import FriendlyPlayerExporter id = packet_tree.export(cls=FriendlyPlayerExporter) if not id: raise ValidationError("Friendly player ID not present at upload and could not guess it.") meta["friendly_player"] = id if "reconnecting" not in meta: meta["reconnecting"] = False # Add the start/end time to meta dict meta["start_time"] = packet_tree.start_time meta["end_time"] = packet_tree.end_time return entity_tree
def write_messages_to_queue(queue_name, messages): queue_url = get_or_create_queue(queue_name) # Messages can be batched to SQS 10 at a time for batch in batches(messages, 10): entries = [] for id, message in enumerate(batch): entries.append({ "Id": str(id), "MessageBody": json.dumps(message, separators=(",", ":")) }) response = SQS.send_message_batch(QueueUrl=queue_url, Entries=entries) if "Failed" in response and len(response["Failed"]): log.error(json.dumps(response["Failed"])) raise RuntimeError(json.dumps(response["Failed"]))