コード例 #1
0
def save_database_csv(type: str, output_filepath: str) -> True:
    tables = ['{}_game_analysis'.format(type)]
    if type == 'current':
        tables = ['current_game_watch'] + tables
    if type == 'past':
        tables = ['past_game_stats'] + tables

    joined_tables = [
        '{}_game_player_stats'.format(type), '{}_game_team_stats'.format(type)
    ]
    if type == 'current':
        joined_tables = [
            'current_game_stats', 'current_game_unchanged',
            'current_game_team_unchanged', 'current_game_player_unchanged'
        ] + joined_tables

    # Save database data in CSV files
    sql = LibPool().libsql
    sql.start_trans_mode()
    cur = sql.cursor()
    tmp_files = {}
    queries = []

    for table in tables:
        csv_tmp_file_path = os.path.normpath(
            os.path.join(app_config['path_storage'], 'tmp', str(uuid.uuid4())))
        tmp_files['{}.csv'.format(table)] = csv_tmp_file_path
        queries.append(
            '''COPY (SELECT * FROM "{}") TO '{}' WITH CSV HEADER;'''.format(
                table, csv_tmp_file_path))

    i = 0
    for table in joined_tables:
        i += 1
        csv_tmp_file_path = os.path.normpath(
            os.path.join(app_config['path_storage'], 'tmp', str(uuid.uuid4())))
        tmp_files['{}.csv'.format(table)] = csv_tmp_file_path
        if i == 1 and type == 'current':
            queries.append('''
                COPY (
                    SELECT "S".*
                    FROM "current_game_watch" "W"
                    INNER JOIN "{}" "S"
                        ON "S"."watch_game_id" = "W"."id"
                ) TO '{}' WITH CSV HEADER;'''.format(table, csv_tmp_file_path))
        elif type == 'current':
            queries.append('''
                COPY (
                    SELECT "TABLE".*
                    FROM "current_game_watch" "W"
                    INNER JOIN "current_game_stats" "S"
                        ON "S"."watch_game_id" = "W"."id"
                    INNER JOIN "{}" "TABLE"
                        ON "TABLE"."stats_game_id" = "S"."id"
                ) TO '{}' WITH CSV HEADER;'''.format(table, csv_tmp_file_path))
        elif type == 'past':
            queries.append('''
                COPY (
                    SELECT "TABLE".*
                    FROM "past_game_stats" "S"
                    INNER JOIN "{}" "TABLE"
                        ON "TABLE"."stats_game_id" = "S"."id"
                ) TO '{}' WITH CSV HEADER;'''.format(table, csv_tmp_file_path))

    for query in queries:
        cur.q(query)
    sql.finish_trans_mode()

    # ZIP files into final package
    with ZipFile(output_filepath, 'w') as myzip:
        for tmp_name, tmp_file in tmp_files.items():
            myzip.write(tmp_file, tmp_name)
            os.unlink(tmp_file)

    return True
コード例 #2
0
    def process_data(self,
                     type: str,
                     data_src: str,
                     game_name: StrNone = None,
                     data_src_tournament_id: IntNone = None) -> OrderedDict:
        if type == 'past':
            # There is online analysis when grabbing data , so no need for this
            raise NotImplementedError

        # Current data
        sql = LibPool().libsql
        sql.start_trans_mode()
        cur = sql.cursor()

        # Games
        # games_watch_count
        query_addon = ''
        if game_name is not None:
            query_addon = """\nAND "W"."game_name" = %(game_name)s"""
        if data_src_tournament_id is not None:
            query_addon += """\nAND "W"."data_src_tournament_id" = %(data_src_tournament_id)s"""
        games_watch_data = cur.qfo(
            '''
            SELECT COUNT("W"."id") AS "count"
            FROM "current_game_watch" "W"
            WHERE "W"."data_src" = %(data_src)s
                AND "W"."is_deleted" = false {}
        '''.format(query_addon), {
                'data_src': data_src,
                'game_name': game_name,
                'data_src_tournament_id': data_src_tournament_id
            })
        games_watch_count = games_watch_data['count']
        if games_watch_count == 0:
            return 'No games to watch'

        # games_watch_with_stats_count
        games_watch_with_stats_data = cur.qfa(
            '''
            SELECT "W"."id"
            FROM "current_game_watch" "W"
            INNER JOIN "current_game_stats" "S"
                ON "S"."watch_game_id" = "W"."id"
            WHERE "W"."data_src" = %(data_src)s
                AND "W"."is_deleted" = false {}
            GROUP BY "W"."id"
        '''.format(query_addon), {
                'data_src': data_src,
                'game_name': game_name,
                'data_src_tournament_id': data_src_tournament_id
            })
        games_watch_with_stats_count = len(games_watch_with_stats_data)
        if games_watch_with_stats_count == 0:
            return 'No games to watch with stats to analyze'

        # games_correction_percent
        games_watch_with_stats_percent = 0
        if games_watch_with_stats_count > 0:
            games_watch_with_stats_percent = round(
                games_watch_with_stats_count * 100 / games_watch_count, 2)

        # Get stats data for later analysis
        games_stats_data = cur.qfa(
            '''
            SELECT
                "W"."id" AS "watch_id",
                "W"."game_name",
                "W"."data_src_finish_datetime",
                COUNT("S"."id") AS "stats_count",
                ARRAY_AGG ("S"."id"::int8) "stats"
            FROM "current_game_watch" "W"
            INNER JOIN "current_game_stats" "S"
                ON "S"."watch_game_id" = "W"."id"
            WHERE "W"."data_src" = %(data_src)s
                AND "W"."is_deleted" = false {}
            GROUP BY "W"."id"
            ORDER BY "W"."id"
        '''.format(query_addon), {
                'data_src': data_src,
                'game_name': game_name,
                'data_src_tournament_id': data_src_tournament_id
            })
        games_stats_watch_ids = [x['watch_id'] for x in games_stats_data]
        games_stats_counts = [x['stats_count'] for x in games_stats_data]
        games_stats_corrected_ids = [
            x['stats'] for x in games_stats_data if x['stats_count'] > 1
        ]
        games_stats_corrected_first_last_ids = [(min(x['stats']),
                                                 max(x['stats']))
                                                for x in games_stats_data
                                                if x['stats_count'] > 1]

        # games_watch_with_stats_corrected_count
        games_watch_with_stats_corrected_count = len(games_stats_corrected_ids)

        # games_watch_with_stats_corrected_percent
        games_watch_with_stats_corrected_percent = 0
        if games_watch_with_stats_corrected_count > 0:
            games_watch_with_stats_corrected_percent = round(
                games_watch_with_stats_corrected_count * 100 /
                games_watch_with_stats_count, 2)

        # games_correction_count
        correction_count_list = [x for x in games_stats_counts if x > 1]
        games_stats_correction_count = sum(correction_count_list) - len(
            correction_count_list)  # Minus first game

        # games_stats_correction_per_game_average_count
        games_stats_correction_per_game_average_count = 0
        if games_watch_with_stats_corrected_count > 0:
            games_stats_correction_per_game_average_count = round(
                games_stats_correction_count /
                games_watch_with_stats_corrected_count, 2)

        # games_stats_game_end_save_stats_average_seconds_diff
        games_stats_save_times = []
        if games_stats_watch_ids:
            for watch_game_id in games_stats_watch_ids:
                data = cur.qfo('''
                    SELECT "W"."data_src_finish_datetime", "S"."insert_datetime"
                    FROM "current_game_watch" "W"
                    INNER JOIN "current_game_stats" "S"
                        ON "S"."watch_game_id" = "W"."id"
                    WHERE "W"."id" = %s
                    ORDER BY "W"."id", "S"."id"
                    LIMIT 1
                ''',
                               params=[watch_game_id])
                games_stats_save_times.append(
                    (data['insert_datetime'] -
                     data['data_src_finish_datetime']).total_seconds())
        games_stats_game_end_save_stats_average_seconds_diff = 0
        if games_stats_save_times:
            games_stats_game_end_save_stats_average_seconds_diff = round(
                sum(games_stats_save_times) / len(games_stats_save_times), 2)

        # games_stats_save_stats_last_correction_average_seconds_diff
        games_stats_correction_times = []
        if games_stats_corrected_first_last_ids:
            for first_game_id, last_game_id in games_stats_corrected_first_last_ids:
                first_last = cur.qfa('''
                    SELECT "id", "insert_datetime" FROM "current_game_stats"
                    WHERE "id" IN %s
                ''',
                                     params=[(first_game_id, last_game_id)],
                                     key='id')
                games_stats_correction_times.append(
                    (first_last[last_game_id]['insert_datetime'] -
                     first_last[first_game_id]['insert_datetime']
                     ).total_seconds())
        games_stats_save_stats_last_correction_average_seconds_diff = 0
        if games_stats_correction_times:
            games_stats_save_stats_last_correction_average_seconds_diff = round(
                sum(games_stats_correction_times) /
                len(games_stats_correction_times), 2)

        # Datapoints
        # Each team in all games has 5 players
        # datapoints_total_count
        if not game_name:
            datapoints_stats_count = (sum([
                5 * api_config[x['game_name']]['datapoints'] * x['stats_count']
                for x in games_stats_data
            ]))
        else:
            # Total coun of saved games is `games_stats_correction_count` + `games_stats_correction_count`
            datapoints_stats_count = (
                5 * api_config[game_name]['datapoints'] *
                (games_stats_correction_count + games_watch_with_stats_count))

        # datapoints_correction_count
        datapoints_stats_correction_list = []
        if games_stats_corrected_ids:
            for game_stats_ids in games_stats_corrected_ids:
                # Teams
                teams_data = cur.qfa(
                    '''
                    SELECT "TS".*, "S"."insert_datetime"
                    FROM "current_game_stats" "S"
                    INNER JOIN "current_game_team_stats" "TS"
                        ON "S"."id" = "TS"."stats_game_id"
                    WHERE "S"."id" IN %s
                    ORDER BY "TS"."data_src_team_id", "TS"."id"
                ''', [(game_stats_ids)])
                if teams_data:
                    teams_data_final = self._transform_stats_data(
                        teams_data, 'data_src_team_id')
                    team_changes_count_per_game = self._check_stats_for_changes(
                        teams_data_final)

                # Players
                players_data = cur.qfa(
                    '''
                    SELECT "PS".*
                    FROM "current_game_stats" "S"
                    INNER JOIN "current_game_player_stats" "PS"
                        ON "S"."id" = "PS"."stats_game_id"
                    WHERE "S"."id" IN %s
                    ORDER BY "PS"."id"
                ''', [(game_stats_ids)])
                if players_data:
                    players_data_final = self._transform_stats_data(
                        players_data, 'data_src_player_id')
                    player_changes_count_per_game = self._check_stats_for_changes(
                        players_data_final)

                for game_id in game_stats_ids:
                    # Changes has to be saved per stats game due max and median analyze needs
                    changes_count = 0
                    if game_id in team_changes_count_per_game:
                        changes_count += team_changes_count_per_game[game_id]
                    if game_id in player_changes_count_per_game:
                        changes_count += player_changes_count_per_game[game_id]
                    datapoints_stats_correction_list.append(changes_count)

        # assign datapoints_correction_count
        datapoints_stats_correction_count = len(
            datapoints_stats_correction_list)

        # datapoints_correction_percent
        datapoints_stats_correction_percent = 0
        if datapoints_stats_correction_count > 0:
            datapoints_stats_correction_percent = round(
                datapoints_stats_correction_count * 100 /
                datapoints_stats_count, 2)

        # datapoints_correction_max
        datapoints_stats_correction_per_game_max = 0
        if datapoints_stats_correction_list:
            datapoints_stats_correction_per_game_max = max(
                datapoints_stats_correction_list)

        # datapoints_correction_median
        datapoints_stats_correction_per_game_median = 0
        if datapoints_stats_correction_list:
            datapoints_stats_correction_per_game_median = statistics.median(
                datapoints_stats_correction_list)

        result = OrderedDict([
            ('games_watch_count', games_watch_count),
            ('games_watch_with_stats_count', games_watch_with_stats_count),
            ('games_watch_with_stats_percent', games_watch_with_stats_percent),
            ('games_watch_with_stats_corrected_count',
             games_watch_with_stats_corrected_count),
            ('games_watch_with_stats_corrected_percent',
             games_watch_with_stats_corrected_percent),
            ('games_stats_correction_count', games_stats_correction_count),
            ('games_stats_correction_per_game_average_count',
             games_stats_correction_per_game_average_count),
            ('games_stats_game_end_save_stats_average_minutes_diff',
             round(games_stats_game_end_save_stats_average_seconds_diff / 60,
                   2)),
            ('games_stats_save_stats_last_correction_average_minutes_diff',
             round(
                 games_stats_save_stats_last_correction_average_seconds_diff /
                 60, 2)), ('datapoints_stats_count', datapoints_stats_count),
            ('datapoints_stats_correction_count',
             datapoints_stats_correction_count),
            ('datapoints_stats_correction_percent',
             datapoints_stats_correction_percent),
            ('datapoints_stats_correction_per_game_max',
             datapoints_stats_correction_per_game_max),
            ('datapoints_stats_correction_per_game_median',
             datapoints_stats_correction_per_game_median)
        ])

        # Update analysis results in database
        update_data = dict(deepcopy(result))
        for var in [
            ('games_stats_game_end_save_stats_average_seconds_diff',
             games_stats_game_end_save_stats_average_seconds_diff),
            ('games_stats_save_stats_last_correction_average_seconds_diff',
             games_stats_save_stats_last_correction_average_seconds_diff)
        ]:
            del (update_data[var[0].replace('seconds', 'minutes')])
            update_data[var[0]] = var[1]
        cur.update('current_game_analysis',
                   update_data,
                   conditions={
                       'data_src': data_src,
                       'game_name': game_name
                   })

        sql.finish_trans_mode()

        # Return analysis informations
        return result