コード例 #1
0
ファイル: logic.py プロジェクト: GodOfOwls/Lightshield
    def __init__(self):
        """Initiate sync elements on creation."""
        self.logging = logging.getLogger("MatchHistory")
        level = logging.INFO
        self.logging.setLevel(level)
        handler = logging.StreamHandler()
        handler.setLevel(level)
        handler.setFormatter(
            logging.Formatter("%(asctime)s [MatchHistory] %(message)s"))
        self.logging.addHandler(handler)
        self.server = os.environ["SERVER"]

        self.redis = RedisConnector()
        self.db = PostgresConnector(user=self.server.lower())
        self.db.set_prepare(self.prepare)

        self.stopped = False
        self.retry_after = datetime.now()
        self.url = (
            f"http://{self.server.lower()}.api.riotgames.com/lol/" +
            "match/v4/matchlists/by-account/%s?beginIndex=%s&endIndex=%s")

        if "QUEUES" in os.environ:
            self.queues = [
                int(queue) for queue in os.environ["QUEUES"].split(",")
            ]
            self.url = self.url + "&queue=" + os.environ["QUEUES"]

        self.buffered_elements = (
            {}
        )  # Short term buffer to keep track of currently ongoing requests

        self.active_tasks = []
        self.insert_query = None
コード例 #2
0
    def __init__(self):
        """Initiate sync elements on creation."""
        self.logging = logging.getLogger("MatchTimeline")
        level = logging.INFO
        self.logging.setLevel(level)
        handler = logging.StreamHandler()
        handler.setLevel(level)
        handler.setFormatter(
            logging.Formatter("%(asctime)s [MatchTimeline] %(message)s"))
        self.logging.addHandler(handler)

        self.proxy = os.environ["PROXY_URL"]
        self.server = os.environ["SERVER"]
        self.batch_size = int(os.environ["BATCH_SIZE"])

        self.redis = RedisConnector()
        self.db = PostgresConnector(user=self.server.lower())
        self.db.set_prepare(self.prepare)

        self.stopped = False
        self.retry_after = datetime.now()
        self.url = (f"http://{self.server.lower()}.api.riotgames.com/lol/" +
                    "match/v4/timelines/by-match/%s")

        self.buffered_elements = (
            {}
        )  # Short term buffer to keep track of currently ongoing requests

        self.active_tasks = []
コード例 #3
0
 def __init__(self):
     self.logging = logging.getLogger("Main")
     level = logging.INFO
     self.logging.setLevel(level)
     handler = logging.StreamHandler()
     handler.setLevel(level)
     handler.setFormatter(logging.Formatter("%(asctime)s %(message)s"))
     self.logging.addHandler(handler)
     self.server = os.environ["SERVER"].lower()
     self.block_limit = int(os.environ["TASK_BLOCKING"])
     self.redis = RedisConnector()
     self.db = PostgresConnector(user=self.server.lower())
コード例 #4
0
    def __init__(self):
        self.logging = logging.getLogger("Main")
        level = logging.INFO
        self.logging.setLevel(level)
        handler = logging.StreamHandler()
        handler.setLevel(level)
        handler.setFormatter(logging.Formatter("%(asctime)s %(message)s"))
        self.min_matches = int(os.environ["MIN_MATCHES"])
        self.server = os.environ["SERVER"]
        self.logging.addHandler(handler)

        self.redis = RedisConnector()
        self.db = PostgresConnector(user=self.server.lower())
コード例 #5
0
ファイル: run.py プロジェクト: geozukunft/Lightshield
class Manager:
    stopped = False

    def __init__(self):
        self.logging = logging.getLogger("Main")
        level = logging.INFO
        self.logging.setLevel(level)
        handler = logging.StreamHandler()
        handler.setLevel(level)
        handler.setFormatter(logging.Formatter("%(asctime)s %(message)s"))
        self.logging.addHandler(handler)
        self.limit = int(os.environ["LIMIT"])
        self.server = os.environ["SERVER"]
        self.block_limit = int(os.environ["TASK_BLOCKING"])
        self.details_cutoff = os.environ["DETAILS_CUTOFF"]
        self.redis = RedisConnector()
        self.db = PostgresConnector(user=self.server.lower())
        self.db.set_prepare(self.prepare)

    async def init(self):
        async with self.redis.get_connection() as buffer:
            await buffer.delete("%s_match_details_in_progress" % self.server)
            await buffer.delete("%s_match_details_tasks" % self.server)

    def shutdown(self):
        self.stopped = True

    async def prepare(self, conn):
        self.insert_data_entry = await conn.prepare("""
            INSERT INTO %s.match_data (match_id, queue, timestamp)
            VALUES ($1, $2, $3)
            ON CONFLICT DO NOTHING;
            """ % self.server.lower())

    async def get_tasks(self):
        """Return tasks and full_refresh flag.

        If there are non-initialized user found only those will be selected.
        If none are found a list of the user with the most new games are returned.
        """
        async with self.db.get_connection() as db:
            tasks = await db.fetch(
                """
                SELECT match_id, queue, timestamp
                FROM %s.match
                WHERE details_pulled IS NULL
                AND timestamp::date >= %s
                LIMIT $1;
                """ % (self.server.lower(), self.details_cutoff),
                self.limit * 2,
            )
            tasks_formatted = [[
                int(task["match_id"]),
                int(task["queue"]), task["timestamp"]
            ] for task in tasks]
            await self.insert_data_entry.executemany(tasks_formatted)
            return tasks

    async def run(self):
        await self.init()
        min_count = 100
        blocked = False
        try:
            while not self.stopped:
                # Drop timed out tasks
                limit = int((datetime.utcnow() -
                             timedelta(minutes=self.block_limit)).timestamp())
                async with self.redis.get_connection() as buffer:
                    await buffer.zremrangebyscore(
                        "%s_match_details_in_progress" % self.server,
                        max=limit)
                    # Check remaining buffer size
                    if (size := await buffer.scard("%s_match_details_tasks" %
                                                   self.server)) >= self.limit:
                        await asyncio.sleep(10)
                        continue
                    # Pull new tasks
                    result = await self.get_tasks()
                    if len(result) - size < min_count:
                        if not blocked:
                            self.logging.info("%s tasks remaining.", size)
                            self.logging.info("No tasks found.")
                            blocked = True
                        min_count -= 1
                        await asyncio.sleep(30)
                        continue
                    min_count = 100
                    self.logging.info("%s tasks remaining.", size)
                    self.logging.info("Found %s tasks.", len(result))
                    # Add new tasks
                    for entry in result:
                        # Each entry will always be refered to by account_id
                        if await buffer.zscore(
                                "%s_match_details_in_progress" % self.server,
                                entry["match_id"],
                        ):
                            continue
                        # Insert task hook
                        await buffer.sadd(
                            "%s_match_details_tasks" % self.server,
                            entry["match_id"])

                    self.logging.info(
                        "Filled tasks to %s.",
                        await
                        buffer.scard("%s_match_details_tasks" % self.server),
                    )
                    await asyncio.sleep(1)

                await asyncio.sleep(5)

        except Exception as err:
            traceback.print_tb(err.__traceback__)
            self.logging.info(err)
コード例 #6
0
class Service:
    """Core service worker object."""

    queues = None

    def __init__(self):
        """Initiate sync elements on creation."""
        self.logging = logging.getLogger("MatchTimeline")
        level = logging.INFO
        self.logging.setLevel(level)
        handler = logging.StreamHandler()
        handler.setLevel(level)
        handler.setFormatter(
            logging.Formatter("%(asctime)s [MatchTimeline] %(message)s"))
        self.logging.addHandler(handler)

        self.proxy = os.environ["PROXY_URL"]
        self.server = os.environ["SERVER"]
        self.batch_size = int(os.environ["BATCH_SIZE"])

        self.redis = RedisConnector()
        self.db = PostgresConnector(user=self.server.lower())
        self.db.set_prepare(self.prepare)

        self.stopped = False
        self.retry_after = datetime.now()
        self.url = (f"http://{self.server.lower()}.api.riotgames.com/lol/" +
                    "match/v4/timelines/by-match/%s")

        self.buffered_elements = (
            {}
        )  # Short term buffer to keep track of currently ongoing requests

        self.active_tasks = []

    def shutdown(self):
        """Called on shutdown init."""
        self.stopped = True

    async def prepare(self, conn):
        self.match_update = await conn.prepare("""
        UPDATE %s.match
            SET timeline_pulled = TRUE
            WHERE match_id = $1
        """ % self.server.lower())
        self.match_data_update = await conn.prepare("""
            UPDATE %s.match_data
            SET timeline = $1
            WHERE match_id = $2
            """ % self.server.lower())

    async def flush_manager(self, match_timelines):
        """Update entries in postgres once enough tasks are done."""
        try:
            update_match_sets = []
            update_match_data_sets = []
            for match in match_timelines:
                if not match[1]:
                    continue
                timeline = match[1]
                # Team Details
                update_match_sets.append((int(match[0]), ))
                update_match_data_sets.append((
                    json.dumps(timeline),
                    int(match[0]),
                ))
            if update_match_sets:
                async with self.db.get_connection() as db:
                    async with db.transaction():
                        await self.match_data_update.executemany(
                            update_match_data_sets)
                        await self.match_update.executemany(update_match_sets)
            self.logging.info("Inserted %s match_timelines.",
                              len(update_match_sets))

        except Exception as err:
            traceback.print_tb(err.__traceback__)
            self.logging.info(err)

    async def get_task(self):
        """Return tasks to the async worker."""
        async with self.redis.get_connection() as buffer:
            if not (tasks := await buffer.spop(
                    "%s_match_timeline_tasks" % self.server, self.batch_size)):
                return tasks
            if self.stopped:
                return
            start = int(datetime.utcnow().timestamp())
            for entry in tasks:
                await buffer.zadd(
                    "%s_match_timeline_in_progress" % self.server, start,
                    entry)
            return tasks
コード例 #7
0
ファイル: logic.py プロジェクト: GodOfOwls/Lightshield
class Service:
    """Core service worker object."""

    queues = None

    def __init__(self):
        """Initiate sync elements on creation."""
        self.logging = logging.getLogger("MatchHistory")
        level = logging.INFO
        self.logging.setLevel(level)
        handler = logging.StreamHandler()
        handler.setLevel(level)
        handler.setFormatter(
            logging.Formatter("%(asctime)s [MatchHistory] %(message)s"))
        self.logging.addHandler(handler)
        self.server = os.environ["SERVER"]

        self.redis = RedisConnector()
        self.db = PostgresConnector(user=self.server.lower())
        self.db.set_prepare(self.prepare)

        self.stopped = False
        self.retry_after = datetime.now()
        self.url = (
            f"http://{self.server.lower()}.api.riotgames.com/lol/" +
            "match/v4/matchlists/by-account/%s?beginIndex=%s&endIndex=%s")

        if "QUEUES" in os.environ:
            self.queues = [
                int(queue) for queue in os.environ["QUEUES"].split(",")
            ]
            self.url = self.url + "&queue=" + os.environ["QUEUES"]

        self.buffered_elements = (
            {}
        )  # Short term buffer to keep track of currently ongoing requests

        self.active_tasks = []
        self.insert_query = None

    def shutdown(self):
        """Called on shutdown init."""
        self.stopped = True

    async def flush_manager(self, matches, account_id, keys):
        """Update entries in postgres once enough tasks are done."""
        try:
            sets = []
            for entry in matches:
                if self.queues and int(entry["queue"]) not in self.queues:
                    continue
                sets.append((
                    entry["gameId"],
                    entry["queue"],
                    datetime.fromtimestamp(entry["timestamp"] // 1000),
                ))
            async with self.db.get_connection() as db:
                if sets:
                    await self.insert_query.executemany(sets)
                    self.logging.info("Inserted %s sets for %s.", len(sets),
                                      account_id)

                await db.execute(
                    """
                    UPDATE %s.summoner
                    SET wins_last_updated = $1,
                        losses_last_updated = $2
                    WHERE account_id = $3
                    """ % self.server.lower(),
                    int(keys["wins"]),
                    int(keys["losses"]),
                    account_id,
                )
        except Exception as err:
            traceback.print_tb(err.__traceback__)
            self.logging.info(err)

    async def prepare(self, connection):
        self.insert_query = await connection.prepare("""
                                INSERT INTO %s.match (match_id, queue, timestamp)
                                VALUES ($1, $2, $3)
                                ON CONFLICT DO NOTHING;
                                """ % self.server.lower())

    async def get_task(self):
        """Return tasks to the async worker."""
        async with self.redis.get_connection() as buffer:
            while (not (task := await buffer.zpopmax(
                    "%s_match_history_tasks" % self.server, 1))
                   and not self.stopped):
                await asyncio.sleep(5)
            if self.stopped:
                return
            keys = await buffer.hgetall("%s:%s:%s" %
                                        (self.server, task[0], task[1]))
            await buffer.delete("%s:%s:%s" % (self.server, task[0], task[1]))
            start = int(datetime.utcnow().timestamp())
            await buffer.zadd("match_history_in_progress", start, task[0])
            return [task[0], int(task[1])], keys
コード例 #8
0
class Manager:
    stopped = False

    def __init__(self):
        self.logging = logging.getLogger("Main")
        level = logging.INFO
        self.logging.setLevel(level)
        handler = logging.StreamHandler()
        handler.setLevel(level)
        handler.setFormatter(logging.Formatter("%(asctime)s %(message)s"))
        self.logging.addHandler(handler)
        self.server = os.environ["SERVER"].lower()
        self.block_limit = int(os.environ["TASK_BLOCKING"])
        self.redis = RedisConnector()
        self.db = PostgresConnector(user=self.server.lower())

    async def init(self):
        async with self.redis.get_connection() as connection:
            await connection.delete("%s_summoner_id_in_progress" % self.server)
            await connection.delete("%s_summoner_id_tasks" % self.server)

    def shutdown(self):
        self.stopped = True

    async def run(self):
        await self.init()
        minimum = 100  # Local minimum that gets reset every time tasks are inserted
        blocked = False
        while not self.stopped:
            # Drop timed out tasks
            limit = int(
                (datetime.utcnow() - timedelta(minutes=self.block_limit)).timestamp()
            )
            async with self.redis.get_connection() as buffer:
                await buffer.zremrangebyscore(
                    "%s_summoner_id_in_progress" % self.server, max=limit
                )
                # Check remaining buffer size
                if (
                    size := await buffer.scard("%s_summoner_id_tasks" % self.server)
                ) >= 1000:
                    await asyncio.sleep(10)
                    continue
            async with self.db.get_connection() as db:
                result = await db.fetch(
                    """
                    SELECT summoner_id
                    FROM %s.summoner
                    WHERE account_id IS NULL
                    LIMIT 2000;
                    """
                    % self.server.lower()
                )
            if len(result) - size < minimum:
                if not blocked:
                    self.logging.info("No tasks found.")
                blocked = True
                minimum -= 1
                await asyncio.sleep(30)
                continue
            minimum = 100
            self.logging.info("%s tasks remaining.", size)
            async with self.redis.get_connection() as buffer:
                for entry in result:
                    if await buffer.sismember(
                        "%s_summoner_id_tasks" % self.server, entry["summoner_id"]
                    ):
                        continue
                    await buffer.sadd(
                        "%s_summoner_id_tasks" % self.server, entry["summoner_id"]
                    )
                    if await buffer.scard("%s_summoner_id_tasks" % self.server) >= 2000:
                        break
                self.logging.info(
                    "Filled tasks to %s.",
                    await buffer.scard("%s_summoner_id_tasks" % self.server),
                )
            await asyncio.sleep(5)
        await self.redis.close()
        await self.db.close()
コード例 #9
0
class Manager:
    stopped = False

    def __init__(self):
        self.logging = logging.getLogger("Main")
        level = logging.INFO
        self.logging.setLevel(level)
        handler = logging.StreamHandler()
        handler.setLevel(level)
        handler.setFormatter(logging.Formatter("%(asctime)s %(message)s"))
        self.min_matches = int(os.environ["MIN_MATCHES"])
        self.server = os.environ["SERVER"]
        self.logging.addHandler(handler)

        self.redis = RedisConnector()
        self.db = PostgresConnector(user=self.server.lower())

    async def init(self):
        async with self.redis.get_connection() as buffer:
            await buffer.delete("%s_match_history_in_progress" % self.server)
            await buffer.delete("%s_match_history_tasks" % self.server)

    def shutdown(self):
        self.stopped = True

    async def get_tasks(self):
        """Return tasks and full_refresh flag.

        If there are non-initialized user found only those will be selected.
        If none are found a list of the user with the most new games are returned.
        """
        async with self.db.get_connection() as db:
            full_refresh = await db.fetch("""
                                    SELECT account_id, 
                                           wins, 
                                           losses
                                    FROM %s.summoner
                                    WHERE wins_last_updated IS NULL 
                                    AND account_id IS NOT NULL
                                    ORDER BY (wins + losses) DESC
                                    LIMIT 2000;
                                    """ % self.server.lower())
            if len(full_refresh) >= 100:
                self.logging.info("Found %s full refresh tasks." %
                                  len(full_refresh))
                return full_refresh, True
            partial_refresh = await db.fetch(
                """
            SELECT account_id, 
                   wins, 
                   losses, 
                   wins_last_updated, 
                   losses_last_updated
            FROM %s.summoner
            WHERE wins_last_updated IS NOT NULL
            AND account_id IS NOT NULL
            AND (wins + losses - wins_last_updated - losses_last_updated) >= $1
            ORDER BY (wins + losses - wins_last_updated - losses_last_updated) DESC
            LIMIT 2000;
            """ % self.server.lower(),
                self.min_matches,
            )
            self.logging.info("Found %s partial refresh tasks." %
                              len(partial_refresh))
            return partial_refresh, False

    async def run(self):
        await self.init()

        while not self.stopped:
            # Drop timed out tasks
            limit = int(
                (datetime.utcnow() - timedelta(minutes=10)).timestamp())
            async with self.redis.get_connection() as buffer:
                await buffer.zremrangebyscore("%s_match_history_in_progress" %
                                              self.server,
                                              max=limit)
                # Check remaining buffer size
                if (size := await buffer.zcard(
                        "%s_match_history_tasks" % self.server)) >= 1000:
                    await asyncio.sleep(10)
                    continue

                result, full_refreshes = await self.get_tasks()
                if not result:
                    self.logging.info("No tasks found.")
                    await asyncio.sleep(60)
                    continue
                # Add new tasks
                self.logging.info("%s tasks remaining.", size)
                for entry in result:
                    # Each entry will always be refered to by account_id
                    if await buffer.zscore(
                            "%s_match_history_in_progress" % self.server,
                            entry["account_id"],
                    ):
                        continue
                    if full_refreshes:
                        z_index = 9999
                        package = {
                            key: entry[key]
                            for key in ["wins", "losses"]
                        }
                    else:
                        z_index = (entry["wins"] + entry["losses"] -
                                   entry["wins_last_updated"] -
                                   entry["losses_last_updated"])
                        package = {
                            key: entry[key]
                            for key in [
                                "wins",
                                "losses",
                                "wins_last_updated",
                                "losses_last_updated",
                            ]
                        }

                    # Insert task hook
                    await buffer.zadd(
                        "%s_match_history_tasks" % self.server,
                        z_index,
                        entry["account_id"],
                    )
                    # Insert task hash
                    await buffer.hmset_dict(
                        "%s:%s:%s" %
                        (self.server, entry["account_id"], z_index),
                        package,
                    )
                self.logging.info(
                    "Filled tasks to %s.",
                    await buffer.zcard("%s_match_history_tasks" % self.server),
                )

        await asyncio.sleep(5)
コード例 #10
0
class Service:
    """Core service worker object."""

    queues = None

    def __init__(self):
        """Initiate sync elements on creation."""
        self.logging = logging.getLogger("MatchDetails")
        level = logging.INFO
        self.logging.setLevel(level)
        handler = logging.StreamHandler()
        handler.setLevel(level)
        handler.setFormatter(
            logging.Formatter("%(asctime)s [MatchDetails] %(message)s"))
        self.logging.addHandler(handler)

        self.rune_ids = get_ids()
        self.rune_tree = get_trees()

        self.server = os.environ["SERVER"]
        self.batch_size = int(os.environ["BATCH_SIZE"])

        self.redis = RedisConnector()
        self.db = PostgresConnector(user=self.server.lower())
        self.db.set_prepare(self.prepare)

        self.stopped = False
        self.retry_after = datetime.now()
        self.url = (f"http://{self.server.lower()}.api.riotgames.com/lol/" +
                    "match/v4/matches/%s")

        self.buffered_elements = (
            {}
        )  # Short term buffer to keep track of currently ongoing requests

        self.active_tasks = []

    def shutdown(self):
        """Called on shutdown init."""
        self.stopped = True

    async def prepare(self, conn):
        template = ("""
                        INSERT INTO %s.team
                            (match_id, timestamp, win, side, bans, tower_kills, inhibitor_kills,
                             first_tower, first_rift_herald, first_dragon, first_baron, 
                             rift_herald_kills, dragon_kills, baron_kills)
                        VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14)
                        ON CONFLICT DO NOTHING;
                        """ % self.server.lower())
        self.team_insert = await conn.prepare(template)

        template = ("""
                                        INSERT INTO %s.participant
                        (match_id, timestamp, win, participant_id, summoner_id, summoner_spell,
                         rune_main_tree, rune_sec_tree, rune_main_select,
                         rune_sec_select,  -- 10
                         rune_shards, item, trinket, champ_level, champ_id, kills, deaths, assists, gold_earned,
                         neutral_minions_killed, neutral_minions_killed_enemy, 
                         neutral_minions_killed_team, total_minions_killed, 
                         vision_score, vision_wards_bought, wards_placed,
                         wards_killed, physical_taken, magical_taken, true_taken, 
                         damage_mitigated, physical_dealt, magical_dealt, 
                         true_dealt, turret_dealt, objective_dealt, total_heal,
                         total_units_healed, time_cc_others, total_cc_dealt)
                         VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10,
                                $11,$12,$13,$14,$15,$16,$17,$18,$19,$20,
                                $21,$22,$23,$24,$25,$26,$27,$28,$29,$30,
                                $31,$32,$33,$34,$35,$36,$37,$38, $39, $40)
                        ON CONFLICT DO NOTHING
                        """ % self.server.lower())
        self.participant_insert = await conn.prepare(template)

        self.match_update = await conn.prepare("""
        UPDATE %s.match
            SET duration = $1,
                win = $2,
                details_pulled = TRUE
            WHERE match_id = $3
        """ % self.server.lower())

    async def flush_manager(self, match_details):
        """Update entries in postgres once enough tasks are done."""
        try:
            match_ids = []
            for match in match_details:
                match_ids.append(match[0])

            async with self.db.get_connection() as db:
                existing_ids = [
                    match["match_id"] for match in await db.fetch("""
                    SELECT DISTINCT match_id
                    FROM %s.team
                    WHERE match_id IN (%s);
                    """ % (self.server.lower(), ",".join(match_ids)))
                ]
            team_sets = []
            participant_sets = []
            update_sets = []
            for match in match_details:
                if not match[1]:
                    continue
                if match[0] in existing_ids:
                    continue
                details = match[1]
                # Team Details
                update_sets.append((
                    details["gameDuration"],
                    details["teams"][0]["win"] == "Win",
                    int(match[0]),
                ))
                for team in details["teams"]:
                    bans = [ban["championId"] for ban in team["bans"]]
                    team_sets.append((
                        int(match[0]),
                        datetime.fromtimestamp(details["gameCreation"] //
                                               1000),
                        team["win"] == "Win",
                        team["teamId"] == 200,
                        bans,
                        team["towerKills"],
                        team["inhibitorKills"],
                        team["firstTower"],
                        team["firstRiftHerald"],
                        team["firstDragon"],
                        team["firstBaron"],
                        team["riftHeraldKills"],
                        team["dragonKills"],
                        team["baronKills"],
                    ))
                participants = {}
                for entry in details["participants"]:
                    participants[entry["participantId"]] = entry
                for entry in details["participantIdentities"]:
                    participants[entry["participantId"]].update(entry)

                for participant in participants.values():
                    try:
                        participant_sets.append((
                            int(match[0]),
                            datetime.fromtimestamp(details["gameCreation"] //
                                                   1000),
                            details["teams"][participant["participantId"] //
                                             6]["win"] == "Win",
                            participant["participantId"],
                            participant["player"]["summonerId"],
                            [participant["spell1Id"], participant["spell2Id"]],
                            self.rune_tree[participant["stats"]["perk0"]],
                            self.rune_tree[participant["stats"]["perk4"]],
                            self.rune_ids[participant["stats"]["perk0"]] +
                            self.rune_ids[participant["stats"]["perk1"]] +
                            self.rune_ids[participant["stats"]["perk2"]] +
                            self.rune_ids[participant["stats"]["perk3"]],
                            self.rune_ids[participant["stats"]["perk4"]] +
                            self.rune_ids[participant["stats"]["perk5"]],
                            shard_id[participant["stats"]["statPerk0"]] * 100 +
                            shard_id[participant["stats"]["statPerk1"]] * 10 +
                            shard_id[participant["stats"]["statPerk2"]],
                            [
                                participant["stats"]["item0"],
                                participant["stats"]["item1"],
                                participant["stats"]["item2"],
                                participant["stats"]["item3"],
                                participant["stats"]["item4"],
                                participant["stats"]["item5"],
                            ],
                            participant["stats"]["item6"],
                            participant["stats"]["champLevel"],
                            participant["championId"],
                            participant["stats"]["kills"],
                            participant["stats"]["deaths"],
                            participant["stats"]["assists"],
                            participant["stats"]["goldEarned"],
                            participant["stats"]["neutralMinionsKilled"],
                            participant["stats"]
                            ["neutralMinionsKilledEnemyJungle"],
                            participant["stats"]
                            ["neutralMinionsKilledTeamJungle"],
                            participant["stats"]["totalMinionsKilled"],
                            participant["stats"]["visionScore"],
                            participant["stats"]["visionWardsBoughtInGame"],
                            participant["stats"]["wardsPlaced"],
                            participant["stats"]["wardsKilled"],
                            participant["stats"]["physicalDamageTaken"],
                            participant["stats"]["magicalDamageTaken"],
                            participant["stats"]["trueDamageTaken"],
                            participant["stats"]["damageSelfMitigated"],
                            participant["stats"]
                            ["physicalDamageDealtToChampions"],
                            participant["stats"]
                            ["magicDamageDealtToChampions"],
                            participant["stats"]["trueDamageDealtToChampions"],
                            participant["stats"]["damageDealtToTurrets"],
                            participant["stats"]["damageDealtToObjectives"],
                            participant["stats"]["totalHeal"],
                            participant["stats"]["totalUnitsHealed"],
                            participant["stats"]["timeCCingOthers"],
                            participant["stats"]["totalTimeCrowdControlDealt"],
                        ))
                    except Exception as err:
                        self.logging.info(int(match[0]))
                        raise err
            if team_sets:
                lines = []
                async with self.db.get_connection() as db:
                    await self.team_insert.executemany(team_sets)

            if participant_sets:
                template = await format_queue(participant_sets[0])
                lines = []
                for line in participant_sets:
                    lines.append(template % tuple([
                        str(param) if type(param) in (list, bool) else param
                        for param in line
                    ]))
                values = ",".join(lines)
                async with self.db.get_connection() as db:
                    await self.participant_insert.executemany(participant_sets)

            if update_sets:
                async with self.db.get_connection() as db:
                    await self.match_update.executemany(update_sets)
            self.logging.info("Inserted %s match_details.", len(update_sets))

        except Exception as err:
            traceback.print_tb(err.__traceback__)
            self.logging.info(err)

    async def get_task(self):
        """Return tasks to the async worker."""
        async with self.redis.get_connection() as buffer:
            if not (tasks := await buffer.spop(
                    "%s_match_details_tasks" % self.server, self.batch_size)):
                return tasks
            if self.stopped:
                return
            start = int(datetime.utcnow().timestamp())
            for entry in tasks:
                await buffer.zadd("%s_match_details_in_progress" % self.server,
                                  start, entry)
            return tasks