def _log_msg(self, type: str, msg: str, **kwargs) -> None:
     if self._log:
         api_config[self._name]['log'] = True
     if api_config[self._name]['log'] is False:
         return
     if not hasattr(self, 'logger'):
         self.logger = LibPool().logger
     logdata = {'connection_details': api_config[self._name]}
     for name, value in kwargs.items():
         logdata[name] = value
     if type == 'warning':
         self.logger.warning(msg, logdata)
     elif type == 'error':
         self.logger.error(msg, logdata)
 def _logging(self, message: str, url: str = None, log_type: str = None, log_data: dict = {}) -> None:
     if self._log:
         api_config[self._name]['log'] = True
     if api_config[self._name]['log'] is False:
         return
     if not hasattr(self, 'logger'):
         self.logger = LibPool().logger
     logdata = log_data
     logdata = {
         'connection_details': api_config[self._name]
     }
     if url:
         logdata['url'] = url
     if log_type is not None and log_type == 'error':
         self.logger.error(message, logdata)
     else:
         self.logger.info(message, logdata)
def rollback_trans_mode():
    try:
        # Is package installed
        pkg_resources.get_distribution('psycopg2')  # noqa
    except pkg_resources.DistributionNotFound:
        return

    if not hasattr(cherrypy.thread_data,
                   'dbconn') or cherrypy.thread_data.dbconn is None:
        # Do not rollback transaction without connection
        return
    else:
        conn = cherrypy.thread_data.dbconn
        if conn.autocommit is not False:
            # Do not rollback transaction without transaction mode, putconn back to Pool
            sql = LibPool().libsql
            sql.disconnect()
            return

    # If exception is raised during handling a request, rolllback transaction
    sql = LibPool().libsql
    sql.finish_trans_mode(action='rollback')
    sql.disconnect()
def psql_execute_database_script(sql_filepath: str,
                                 database: str = None) -> bool:
    if not database:
        sql = LibPool().libsql
        database = sql.active_database
    kwargs = db_config['database'][database]
    kwargs['log_file'] = '{}{}datanal_sql.log'.format(
        os.path.join(app_config['path_storage'], 'log'), os.sep)
    cmd = 'psql --file={} --quiet --log-file={log_file}'.format(sql_filepath)
    cmd += ' --dbname=postgresql://{user}:{password}@{host}:{port}/{dbname}' ''.format(
        **kwargs)
    if os.system(cmd) == 0:
        return True
    return False
Beispiel #5
0
def save_database_dump(type: str, output_filepath: str) -> bool:
    sql = LibPool().libsql
    cmd = "pg_dump --file='{}' --data-only --encoding=utf8".format(
        output_filepath)
    cmd += ' --quote-all-identifiers --no-tablespaces --no-owner --inserts'
    cmd += " --table='{}_*'".format(type)
    cmd += ' --dbname=postgresql://{user}:{password}@{host}:{port}/{dbname}' ''.format(
        **db_config['database'][sql.active_database])

    if subprocess.call(cmd, shell=True) == 0:
        # replace SET client_min_messages = warning; with "fatal" to avoid warnings
        content = None
        with open(output_filepath, 'r') as data_file:
            content = data_file.read()
        content = content.replace('client_min_messages = warning',
                                  'client_min_messages = fatal')
        with open(output_filepath, 'w') as data_file:
            data_file.write(content)
        return True
    return False
def disconnect_database():
    try:
        # Is package installed
        pkg_resources.get_distribution('psycopg2')  # noqa
    except pkg_resources.DistributionNotFound:
        return

    if not hasattr(cherrypy.thread_data,
                   'dbconn') or cherrypy.thread_data.dbconn is None:
        # There is already no connection
        return
    else:
        conn = cherrypy.thread_data.dbconn
        sql = LibPool().libsql
        if conn.autocommit is False:
            # Rollback transaction mode, need to be done before disconnect
            sql.finish_trans_mode(action='rollback')

        # On the end of request, putconn back to Pool
        sql.disconnect()
Beispiel #7
0
class Validator(ValidatorInterface):
    '''Define concrete Adapter
    '''
    _name = 'provider1'
    lib_pool = LibPool()

    def __init__(self, *args, **kwargs):
        self.sql = self.lib_pool.libsql
        if 'game_name' in kwargs:
            self._game_name = kwargs['game_name']
        if 'log' in kwargs:
            self._log = kwargs['log']

    def _log_msg(self, type: str, msg: str, **kwargs) -> None:
        if self._log:
            api_config[self._name]['log'] = True
        if api_config[self._name]['log'] is False:
            return
        if not hasattr(self, 'logger'):
            self.logger = LibPool().logger
        logdata = {'connection_details': api_config[self._name]}
        for name, value in kwargs.items():
            logdata[name] = value
        if type == 'warning':
            self.logger.warning(msg, logdata)
        elif type == 'error':
            self.logger.error(msg, logdata)

    def validate_match(self, url: str, matches_data: dict,
                       tournament_data: dict, table: str) -> bool:
        msg = None
        if 'matches' not in matches_data or not matches_data['matches']:
            msg = '{} - "matches" missing in data or empty'.format(
                self._game_name.upper())
            # Save it as invalid and log it
            insert_data = {
                'data_src_url': url,
                'insert_datetime': datetime.datetime.utcnow(),
                'problem_msg': msg
            }
            self.sql.cursor().insert(table, insert_data)
            self._log_msg('error', msg, url=url)
            return False
        return True

    def validate_game(self, url: str, parent_game_id: int, data: dict,
                      match_data: dict, table: str) -> bool:
        side1 = api_config[self._game_name]['provider1_sides'][0]
        side2 = api_config[self._game_name]['provider1_sides'][1]
        msg = None
        if 'rosters' not in match_data or not match_data['rosters']:
            msg = '{} - "rosters" missing in data or empty for match {}'.format(
                self._game_name.upper(), match_data['id'])
        else:

            if 'match_summary' not in data or not data['match_summary']:
                msg = '{} - "match_summary" missing in data or empty'.format(
                    self._game_name.upper())

            elif self._game_name == 'csgo':
                if side1 not in data['match_summary'] and side2 not in data[
                        'match_summary']:
                    msg = '{} - "{}" and "{}" missing in summary data'.format(
                        self._game_name.upper(), side1, side2)
                elif side1 not in data['match_summary']:
                    msg = '{} - "{}" missing in summary data'.format(
                        self._game_name.upper(), side1)
                elif side2 not in data['match_summary']:
                    msg = '{} - "{}" missing in summary data'.format(
                        self._game_name.upper(), side2)
                elif (side1 not in data['match_summary']['scoreboard']
                      and side2 not in data['match_summary']['scoreboard']):
                    msg = '{} - "{}" and "{}" missing in scoreboard data'.format(
                        self._game_name.upper(), side1, side2)
                elif side1 not in data['match_summary']['scoreboard']:
                    msg = '{} - "{}" missing in scoreboard data'.format(
                        self._game_name.upper(), side1, side2)
                elif side2 not in data['match_summary']['scoreboard']:
                    msg = '{} - "{"} missing in scoreboard data'.format(
                        self._game_name.upper(), side1, side2)

            elif self._game_name == 'dota2':
                if 'rosters' not in data or not data['rosters']:
                    msg = '{} - "rosters" missing in data or empty for match {}'.format(
                        self._game_name.upper(), match_data['id'])
                elif 'winner' not in data or data['winner'] is None:
                    msg = '{} - "winner" missing in summary data or empty'.format(
                        self._game_name.upper())
                elif 'players_stats' not in data['match_summary']:
                    msg = '{} - "players_stats" missing in data[match_summary]'.format(
                        self._game_name.upper(), side1, side2)
                else:
                    if (data['match_summary'][side1] !=
                            data['rosters'][0]['id']
                            or data['match_summary'][side2] !=
                            data['rosters'][1]['id']):
                        msg = '{} - home or away team does not correspond with rosters'.format(
                            self._game_name.upper())

            elif self._game_name == 'lol':
                if 'rosters' not in data or not data['rosters']:
                    msg = '{} - missing "rosters" in data or empty for match {}'.format(
                        self._game_name.upper(), match_data['id'])
                elif side1 not in data['match_summary'] and side2 not in data[
                        'match_summary']:
                    msg = '{} - "{}" and "{}" missing in data[match_summary]'.format(
                        self._game_name.upper(), side1, side2)
                elif side1 not in data['match_summary']:
                    msg = '{} - "{}" missing in data[match_summary]'.format(
                        self._game_name.upper(), side1)
                elif side2 not in data['match_summary']:
                    msg = '{} - "{}" missing in data[match_summary]'.format(
                        self._game_name.upper(), side2)
                elif 'players' not in data['match_summary'][
                        side1] and 'players' not in data['match_summary'][
                            side2]:
                    msg = '{} - "players" missing in data[match_summary][{}] and data[match_summary][{}]'.format(
                        self._game_name.upper(), side1, side2)
                elif 'players' not in data['match_summary'][side1]:
                    msg = '{} - "players" missing in data[match_summary][{}]'.format(
                        self._game_name.upper(), side1)
                elif 'players' not in data['match_summary'][side2]:
                    msg = '{} - "players" missing in data[match_summary][{}]'.format(
                        self._game_name.upper(), side2)
                else:
                    if (data['match_summary'][side1]['id'] !=
                            data['rosters'][0]['id']
                            or data['match_summary'][side2]['id'] !=
                            data['rosters'][1]['id']):
                        msg = '{} - home or away team does not correspond with rosters'.format(
                            self._game_name.upper())

        if msg:
            # Save it as invalid and log it
            insert_data = {
                'data_src_url': url,
                'insert_datetime': datetime.datetime.utcnow(),
                'problem_msg': msg
            }
            if table == 'current_game_invalid':
                insert_data['watch_game_id'] = parent_game_id
            elif table == 'past_game_invalid':
                insert_data['stats_game_id'] = parent_game_id
            self.sql.cursor().insert(table, insert_data)
            self._log_msg('error', msg, url=url, match_id=match_data['id'])
            return False
        return True
import os
import threading
import pkg_resources
import re
import traceback
from io import StringIO
import time
import datetime

import cherrypy

from lib.libpool import LibPool
from config.settings import app_config
from lib.sql.config.settings import db_config

logger = LibPool().logger  # Get application logger


def on_start_resource_wrapper():
    check_license_expiration()
    set_request_timeout_start()
    request_log()


def check_license_expiration() -> None:
    if (app_config['app_license_expiration']
            and datetime.datetime.utcnow().date() > datetime.datetime.strptime(
                app_config['app_license_expiration'], '%Y-%m-%d').date()):
        cherrypy.request.show_tracebacks = False  # Disable traceback on Cherrypy html
        raise cherrypy.HTTPError(403, 'License expired')
class Grabber(GrabberInterface):
    '''Define concrete Adapter
    '''
    _name = 'provider1'
    lib_pool = LibPool()

    def __init__(self, *args, **kwargs):
        self.sql = self.lib_pool.libsql
        self.send_request = cherrypy.thread_data.client_obj.send_request
        if 'game_name' in kwargs:
            self._game_name = kwargs['game_name']
        if 'log' in kwargs:
            self._log = kwargs['log']
        self.validator = get_validator_for_api(self._name, self._game_name, self._log)
        self.transformer = get_transformer_for_api(self._name, self._game_name, self._log)
        super().__init__(*args, **kwargs)


    def _log_msg(self, type: str, msg: str, **kwargs) -> None:
        if self._log:
            api_config[self._name]['log'] = True
        if api_config[self._name]['log'] is False:
            return
        if not hasattr(self, 'logger'):
            self.logger = LibPool().logger
        logdata = {'connection_details': api_config[self._name]}
        for name, value in kwargs.items():
            logdata[name] = value
        if type == 'warning':
            self.logger.warning(msg, logdata)
        elif type == 'error':
            self.logger.error(msg, logdata)


    def _find_and_save_past_games(
            self, date_from: datetime.date = None, date_to: datetime.date = None, stats: dict = {}) -> dict:
        self.sql.start_trans_mode()
        cur = self.sql.cursor()
        # Find games mentioned in configuration, save them in database table `past_game_stats`
        valid_tournament_ids = list(toolz.itertoolz.unique([
            y[self._name]['tournament_id'] for x, y in api_config['{}_tournaments'.format(self._game_name)].items()
            if y[self._name] and y[self._name]['tournament_id']]))
        series = {'last_page': 1}  # Fake for loop start
        series_current_page = 0
        while series['last_page'] > series_current_page:  # Pagination
            series_current_page += 1
            # NOTE: `&tiers[]=1` is not necessary, when we filter tournament ids,
            # its open to lower levels of tournaments for testing
            # NOTE: &tournaments[]=123,345 does not work as expected, should be only one ID -> DO NOT USE IT!!!
            url_s = 'series?games[]={}&with[]=matches&page={}'.format(
                api_config[self._game_name]['provider1_id'],
                series_current_page)
            headers, series = self.send_request(api_endpoint=url_s)
            if not series or 'data' not in series:
                return {'return_msg': 'No data were found for Provider 2.'}

            limit_from = None
            if date_from:
                limit_from = datetime.datetime.strptime('{} 00:00:00'.format(date_from), '%Y-%m-%d %H:%M:%S')
            limit_to = None
            if date_to:
                limit_to = datetime.datetime.strptime('{} 23:59:59'.format(date_to), '%Y-%m-%d %H:%M:%S')
            for serie in series['data']:
                if serie['tournament_id'] not in valid_tournament_ids:
                    continue
                elif ((limit_from and limit_to
                        and serie['start'] and serie['end']
                        and (datetime.datetime.strptime(serie['start'], '%Y-%m-%d %H:%M:%S') < limit_from
                             or datetime.datetime.strptime(serie['end'], '%Y-%m-%d %H:%M:%S') > limit_to))
                        or (limit_from and serie['start']
                            and datetime.datetime.strptime(serie['start'], '%Y-%m-%d %H:%M:%S')
                            < limit_from)
                        or (limit_to and serie['end']
                            and datetime.datetime.strptime(serie['end'], '%Y-%m-%d %H:%M:%S')
                            > limit_to)):
                    continue

                stats['matches_total_count'] += 1
                url_m = 'series/{}?with[]=matches'.format(serie['id'])
                headers, matches = self.send_request(api_endpoint=url_m)
                if not self.validator.validate_match(url_m, matches, serie, 'past_game_invalid'):
                    stats['matches_invalid_count'] += 1
                    continue

                for match in matches['matches']:
                    stats['games_total_count'] += 1
                    existing = cur.qfo('''
                        SELECT * FROM "past_game_stats" WHERE "data_src_game_id" = %(src_game_id)s
                    ''', params={'src_game_id': match['id']})
                    common_data = {
                        'data_src': self._name,
                        'game_name': self._game_name,
                        'data_src_url': url_s,
                        'data_src_game_id': match['id'],
                        'data_src_game_title': serie['title'],
                        'data_src_start_datetime': matches['start'],
                        'data_src_finish_datetime': matches['end'],
                        'data_src_tournament_id': serie['tournament_id'],
                        'data_src_tournament_title': None
                    }
                    if not existing:
                        common_data['insert_datetime'] = datetime.datetime.utcnow()
                        cur.insert('past_game_stats', common_data)
                        stats_game_id = cur.get_last_id('past_game_stats')
                    else:
                        stats_game_id = existing['id']
                        del(existing['id'], existing['insert_datetime'], existing['update_datetime'])
                        diff_data = dict(toolz.itertoolz.diff(common_data, existing))
                        if diff_data:
                            common_data['update_datetime'] = datetime.datetime.utcnow()
                            cur.update('past_game_stats', diff_data, conditions={'data_src_game_id': match['id']})

                    url_g = 'matches/{}?with[]=summary'.format(match['id'])
                    headers, data = self.send_request(api_endpoint=url_g)

                    if not self.validator.validate_game(url_g, stats_game_id, data, match, 'past_game_invalid'):
                        print(url_g)
                        stats['games_invalid_count'] += 1
                        continue

                    # Save team and player game stats
                    stats['datapoints_missing_count'] += self._save_team_stats(url_m, stats_game_id, data)
                    stats['datapoints_missing_count'] += self._save_player_stats(url_m, stats_game_id, data)

        # Save statistics into "past_game_analysis" table
        final_stats = self._transform_stats_and_save_analysis(self._name, stats)

        self.sql.finish_trans_mode()
        return final_stats


    def _save_team_stats(
            self, url: str, stats_game_id: int, data: dict) -> int:
        prepared_data = self.transformer.prepare_teams_data(data)
        common_data = {
            'stats_game_id': stats_game_id,
            'data_src_url': url
        }
        teams_data = self.transformer.get_teams_data(prepared_data, data)
        return self._save_team_stats_common(common_data, teams_data)
class RestClient(RestClientInterface):
    '''Define concrete Adapter
    '''
    _name = 'provider1'
    _abiospool = Provider1Pool()

    def __init__(self, *args, **kwargs):
        cherrypy.thread_data.client_obj = self
        if 'log' in kwargs:
            self._log = kwargs['log']
        super().__init__(*args, **kwargs)


    def _logging(self, message: str, url: str = None, log_type: str = None, log_data: dict = {}) -> None:
        if self._log:
            api_config[self._name]['log'] = True
        if api_config[self._name]['log'] is False:
            return
        if not hasattr(self, 'logger'):
            self.logger = LibPool().logger
        logdata = log_data
        logdata = {
            'connection_details': api_config[self._name]
        }
        if url:
            logdata['url'] = url
        if log_type is not None and log_type == 'error':
            self.logger.error(message, logdata)
        else:
            self.logger.info(message, logdata)


    def send_request(self, api_endpoint: str, data: dict = {}) -> dict:
        auth_token = self._abiospool.auth_token
        if auth_token is None:
            # Auth token does not exists, get new one
            self.authenticate()
            auth_token = self._abiospool.auth_token
            if auth_token is None:
                error_message = 'Cannot connect to Provider 2'
                self._logging(error_message, log_type='error')
                raise cherrypy.HTTPError(401, error_message)

        ac = api_config[self._name]
        url_start = '{}{}?'.format(ac['url'], api_endpoint)
        if '?' in api_endpoint:
            url_start = '{}&'.format(url_start[0:-1])
        url = '{}access_token={}'.format(url_start, auth_token)
        if self._abiospool.request_permission:
            response = requests.get(url, data=to_json(data))
        if response.status_code == 401 and response.json()['error_description'] == 'Access token is not valid.':
            # Unauthorized -> authenticate & repeat request
            error_message = 'Provider 2 Unauthorized: {} {}'.format(response.json()['error_description'], auth_token)
            self._logging(error_message, url, 'error')
            info_message = 'Reconnect to Provider 2'
            self._logging(info_message)
            self.authenticate()
            response = requests.get(url, data=to_json(data))

        tools.check_request_timeout()  # Application hook to return 408 if time is over
        return response.headers, response.json()


    def authenticate(self) -> None:
        ac = api_config[self._name]
        auth_data = {
            'grant_type': 'client_credentials',
            'client_id': ac['client_id'],
            'client_secret': ac['client_secret']
        }
        headers = {'Content-Type': 'application/x-www-form-urlencoded'}
        url = '{}{}'.format(ac['url'], 'oauth/access_token')
        try:  # Authenticate
            response = requests.post(url, data=auth_data, headers=headers)
            if 'error' in response.json():
                raise AuthenticationError(response.json()['error_description'])

            self._abiospool.auth_token = response.json()['access_token']
        except Exception as e:
            error_message = 'Authenticate to Provider 2 was not succesfull: {}'.format(e)
            self._logging(error_message, url, 'error')
            raise cherrypy.HTTPError(401, error_message)


    def monitor(self) -> float:
        # Try to authenticate at REST endpoint
        start_time = time.perf_counter()
        if self.authenticate() is False:
            return False
        return time.perf_counter() - start_time
Beispiel #11
0
class GrabberInterface(object):
    '''Define the interface that Adapter uses
    '''
    lib_pool = LibPool()

    def __init__(self, *args, **kwargs):
        self.sql = self.lib_pool.libsql
        self.send_request = cherrypy.thread_data.client_obj.send_request
        if 'data_src' in kwargs:
            self._data_src = kwargs['data_src']
        if 'game_name' in kwargs:
            self._game_name = kwargs['game_name']
        self.transformer = get_transformer_for_api(self._name, self._game_name,
                                                   self._log)

    def grab_past_data(self, date_from: datetime.date, date_to: datetime.date,
                       delete_old: bool) -> OrderedDict:
        if delete_old:
            self._delete_past_db()
        empty_stats = {
            'data_src': '',
            'game_name': '',
            'matches_total_count': 0,
            'matches_invalid_count': 0,
            'games_total_count': 0,
            'games_invalid_count': 0,
            'datapoints_wanted_count': 0,
            'datapoints_missing_count': 0,
            'datapoints_unavailable_count': 0
        }
        stats = self._find_and_save_past_games(date_from, date_to, empty_stats)
        if 'return_msg' in stats:
            return stats['return_msg']
        # Return analysis informations
        return OrderedDict([
            ('api', stats['data_src']), ('game', stats['game_name']),
            ('processed_matches', stats['matches_total_count']),
            ('invalid_matches', stats['matches_invalid_count']),
            ('processed_games', stats['games_total_count']),
            ('invalid_games', stats['games_invalid_count']),
            ('processed_datapoints', stats['datapoints_wanted_count']),
            ('missing_datapoints', stats['datapoints_missing_count']),
            ('unavialable_datapoints', stats['datapoints_unavailable_count'])
        ])

    def _delete_past_db(self) -> None:
        # For past data everything is in `past_game_stats` table or its subordinates,
        # so its done only with one query
        self.sql.start_trans_mode()
        cur = self.sql.cursor()
        Q = '''DELETE FROM "past_game_stats" WHERE "data_src" = %(data_src)s AND "game_name" = %(game_name)s;'''
        cur.q(Q,
              params={
                  'data_src': self._data_src,
                  'game_name': self._game_name
              })
        Q = '''
            UPDATE "past_game_analysis" SET
                "matches_total_count" = 0,
                "matches_invalid_count" = 0,
                "games_total_count" = 0,
                "games_invalid_count" = 0,
                "datapoints_wanted_count" = 0,
                "datapoints_missing_count" = 0,
                "datapoints_unavailable_count" = 0,
                "analysis_update_datetime" = NULL
            WHERE "data_src" = %(data_src)s
                AND "game_name" = %(game_name)s;
        '''
        cur.q(Q,
              params={
                  'data_src': self._data_src,
                  'game_name': self._game_name
              })
        self.sql.finish_trans_mode()

    def _find_and_save_past_games(self, date_from: datetime.date,
                                  date_to: datetime.date) -> dict:
        raise NotImplementedError

    def _transform_stats_and_save_analysis(self, data_src: str,
                                           stats: dict) -> dict:
        # Each team in all games has 5 players
        datapoints_wanted_count = api_config[self._game_name]['datapoints']
        stats['datapoints_wanted_count'] = 5 * datapoints_wanted_count * (
            stats['games_total_count'] - stats['games_invalid_count'])
        datapoints_missing_count = api_config[self._game_name][
            '{}_missing_datapoints'.format(self._name)]
        stats['datapoints_missing_count'] = 5 * datapoints_missing_count * (
            stats['games_total_count'] - stats['games_invalid_count'])
        stats['data_src'] = data_src
        stats['game_name'] = self._game_name
        stats['analysis_update_datetime'] = datetime.datetime.utcnow()
        Q = '''
            UPDATE "past_game_analysis" SET
                "matches_total_count" = "matches_total_count" + %(matches_total_count)s,
                "matches_invalid_count" = "matches_invalid_count" + %(matches_invalid_count)s,
                "games_total_count" = "games_total_count" + %(games_total_count)s,
                "games_invalid_count" = "games_invalid_count" + %(games_invalid_count)s,
                "datapoints_wanted_count" = "datapoints_wanted_count" + %(datapoints_wanted_count)s,
                "datapoints_missing_count" = "datapoints_missing_count" + %(datapoints_missing_count)s,
                "datapoints_unavailable_count" = "datapoints_unavailable_count" + %(datapoints_unavailable_count)s,
                "analysis_update_datetime" = %(analysis_update_datetime)s
            WHERE "data_src" = %(data_src)s
                AND "game_name" = %(game_name)s;
        '''
        cur = self.sql.cursor()
        cur.q(Q, stats)
        return stats

    def _save_team_stats_common(self, common_data: dict,
                                teams_data: dict) -> int:
        cur = self.sql.cursor()
        count_missing = 0
        for team_id, team_data in teams_data.items():
            # Save team stats
            cur.insert('past_game_team_stats',
                       toolz.dicttoolz.merge(common_data, team_data))
            count_missing += self._count_stats_missing([
                'bomb_plant', 'bomb_defuse', 'round_win', 'round_lose',
                'team_win', 'team_lose', 'turret', 'dragon', 'baron'
            ], team_data)
        return count_missing

    def _save_player_stats(self, url: str, stats_game_id: int,
                           data: dict) -> int:
        prepared_data = self.transformer.prepare_players_data(data)
        common_data = {'stats_game_id': stats_game_id, 'data_src_url': url}
        players_data = self.transformer.get_players_data(prepared_data, data)

        cur = self.sql.cursor()
        count_missing = 0
        for player_id, player_data in players_data.items():
            # Save player stats
            cur.insert('past_game_player_stats',
                       toolz.dicttoolz.merge(common_data, player_data))
            count_missing += self._count_stats_missing([
                'kill', 'assist', 'death', 'tower_kill', 'roshan_kill',
                'creep_score'
            ], player_data)

        return count_missing

    def _count_stats_missing(self, field_keys: list, data: dict) -> int:
        count_missing = 0
        for name in field_keys:
            if name in data and data[name] is None:
                count_missing += 1
        return count_missing
class Grabber(GrabberInterface):
    '''Define concrete Adapter
    '''
    _name = 'provider2'
    lib_pool = LibPool()

    def __init__(self, *args, **kwargs):
        self.sql = self.lib_pool.libsql
        self.send_request = cherrypy.thread_data.client_obj.send_request
        if 'game_name' in kwargs:
            self._game_name = kwargs['game_name']
        if 'log' in kwargs:
            self._log = kwargs['log']
        self.validator = get_validator_for_api(self._name, self._game_name, self._log)
        self.transformer = get_transformer_for_api(self._name, self._game_name, self._log)
        super().__init__(*args, **kwargs)


    def _find_and_save_past_games(
            self, date_from: datetime.date = None, date_to: datetime.date = None, stats: dict = {}) -> dict:
        self.sql.start_trans_mode()
        cur = self.sql.cursor()
        # Find games mentioned in configuration, save them in database table `past_game_stats`
        valid_league_ids = list(toolz.itertoolz.unique([
            y[self._name]['league_id'] for x, y in api_config['{}_tournaments'.format(self._game_name)].items()
            if y[self._name] and y[self._name]['league_id']]))
        matches_last_page = 1  # Fake for loop start
        matches_current_page = 0
        while matches_last_page > matches_current_page:  # Pagination
            matches_current_page += 1
            url_m = '{}/matches/past?filter[status]=finished&league_id={}&page[size]=100&page[number]={}'.format(
                api_config[self._game_name]['provider2_slug'],
                str(valid_league_ids),
                matches_current_page)
            matches_headers, matches = self.send_request(api_endpoint=url_m)
            matches_last_page = self._round_up(int(matches_headers['X-Total']) / int(matches_headers['X-Per-Page']))
            if not matches:
                return {'return_msg': 'No data were found for Provider 1.'}

            limit_from = None
            if date_from:
                limit_from = datetime.datetime.strptime('{} 00:00:00'.format(date_from), '%Y-%m-%d %H:%M:%S')
            limit_to = None
            if date_to:
                limit_to = datetime.datetime.strptime('{} 23:59:59'.format(date_to), '%Y-%m-%d %H:%M:%S')
            for match in matches:
                if match['league_id'] not in valid_league_ids:
                    continue
                elif ((limit_from and limit_to
                      and match['begin_at'] and match['end_at']
                      and (datetime.datetime.strptime(match['begin_at'], '%Y-%m-%dT%H:%M:%SZ') < limit_from
                           or datetime.datetime.strptime(match['end_at'], '%Y-%m-%dT%H:%M:%SZ') > limit_to))
                        or (limit_from and match['begin_at']
                            and datetime.datetime.strptime(match['begin_at'], '%Y-%m-%dT%H:%M:%SZ')
                            < limit_from)
                        or (limit_to and match['end_at']
                            and datetime.datetime.strptime(match['end_at'], '%Y-%m-%dT%H:%M:%SZ')
                            > limit_to)):
                    continue
                stats['matches_total_count'] += 1
                if not self.validator.validate_match(url_m, match, 'past_game_invalid'):
                    stats['matches_invalid_count'] += 1
                    continue

                teams = [x['opponent']['id'] for x in match['opponents']]
                for game in match['games']:
                    stats['games_total_count'] += 1
                    existing = cur.qfo('''
                        SELECT * FROM "past_game_stats" WHERE "data_src_game_id" = %(src_game_id)s
                    ''', params={'src_game_id': game['id']})
                    common_data = {
                        'data_src': self._name,
                        'game_name': self._game_name,
                        'data_src_url': url_m,
                        'data_src_game_id': game['id'],
                        'data_src_game_title': match['name'],
                        'data_src_start_datetime': match['begin_at'],
                        'data_src_finish_datetime': match['end_at'],
                        'data_src_tournament_id': match['serie_id'],
                        'data_src_tournament_title': match['serie']['full_name']
                    }
                    if not existing:
                        common_data['insert_datetime'] = datetime.datetime.utcnow()
                        cur.insert('past_game_stats', common_data)
                        stats_game_id = cur.get_last_id('past_game_stats')
                    else:
                        stats_game_id = existing['id']
                        del(existing['id'], existing['insert_datetime'], existing['update_datetime'])
                        diff_data = dict(toolz.itertoolz.diff(common_data, existing))
                        if diff_data:
                            common_data['update_datetime'] = datetime.datetime.utcnow()
                            cur.update('past_game_stats', diff_data, conditions={'data_src_game_id': match['id']})

                    url_g = '{}/games/{}'.format(api_config[self._game_name]['provider2_slug'], game['id'])
                    headers, data = self.send_request(api_endpoint=url_g)

                    if not self.validator.validate_game(url_m, stats_game_id, data, match, 'past_game_invalid'):
                        stats['games_invalid_count'] += 1
                        continue

                    # Save team and player game stats
                    stats['datapoints_unavailable_count'] += self._save_team_stats(
                        url_g, stats_game_id, game['id'], data, teams, match['games'])
                    stats['datapoints_unavailable_count'] += self._save_player_stats(url_g, stats_game_id, data)

        # Save statistics into "past_game_analysis" table
        final_stats = self._transform_stats_and_save_analysis(self._name, stats)

        self.sql.finish_trans_mode()
        return final_stats


    def _save_team_stats(
            self, url: str, stats_game_id: int, game_id: int, data: dict, teams: list, games_data: dict) -> int:
        prepared_data = self.transformer.prepare_teams_data(teams, data, game_id, games_data)
        common_data = {
            'stats_game_id': stats_game_id,
            'data_src_url': url
        }
        teams_data = self.transformer.get_teams_data(prepared_data, data, teams)
        return self._save_team_stats_common(common_data, teams_data)


    def _round_up(self, n, decimals=0):
        multiplier = 10 ** decimals
        return math.ceil(n * multiplier) / multiplier
class Watcher(WatcherInterface):
    '''Define concrete Adapter
    '''
    _name = 'provider1'
    lib_pool = LibPool()

    def __init__(self, *args, **kwargs):
        self.sql = self.lib_pool.libsql
        self.send_request = cherrypy.thread_data.client_obj.send_request
        if 'game_name' in kwargs:
            self._game_name = kwargs['game_name']
        if 'log' in kwargs:
            self._log = kwargs['log']
        self.validator = get_validator_for_api(self._name, self._game_name,
                                               self._log)
        self.transformer = get_transformer_for_api(self._name, self._game_name,
                                                   self._log)
        self.time_limit = int(get_time_limit())
        super().__init__(*args, **kwargs)

    def _log_msg(self, type: str, msg: str, **kwargs) -> None:
        if self._log:
            api_config[self._name]['log'] = True
        if api_config[self._name]['log'] is False:
            return
        if not hasattr(self, 'logger'):
            self.logger = LibPool().logger
        logdata = {'connection_details': api_config[self._name]}
        for name, value in kwargs.items():
            logdata[name] = value
        if type == 'warning':
            self.logger.warning(msg, logdata)
        elif type == 'error':
            self.logger.error(msg, logdata)

    def watch_current_games(self) -> None:
        self.sql.start_trans_mode()
        cur = self.sql.cursor()
        # If there are some new just finished games mentioned in specification founded,
        # save them in database table `current_game_watch`
        valid_tournament_ids = list(
            toolz.itertoolz.unique([
                y[self._name]['tournament_id'] for x, y in api_config[
                    '{}_tournaments'.format(self._game_name)].items()
                if y[self._name] and y[self._name]['tournament_id']
            ]))
        if not valid_tournament_ids:
            msg = 'No valid tournaments were found for Provider 2'
            self._log_msg('error', msg)
            return
        for tournament_id in valid_tournament_ids:
            series = {'last_page': 1}  # Fake for loop start
            series_current_page = 0
            while series['last_page'] > series_current_page:  # Pagination
                series_current_page += 1
                # NOTE: `&tiers[]=1` is not necessary, when we filter tournament ids,
                # its open to lower levels of tournaments for testing
                url_s = 'series?games[]={}&with[]=matches&is_over=true&tournaments[]={}&page={}'.format(
                    api_config[self._game_name]['provider1_id'], tournament_id,
                    series_current_page)
                headers, series = self.send_request(api_endpoint=url_s)
                if not series or 'data' not in series:
                    msg = 'No data were found for Provider 2'
                    self._log_msg('error', msg, url=url_s)
                    return

                limit_datetime = datetime.datetime.utcnow(
                ) - datetime.timedelta(minutes=self.time_limit)
                for serie in series['data']:
                    if (serie['end'] is None or datetime.datetime.strptime(
                            serie['end'], '%Y-%m-%d %H:%M:%S') <
                            limit_datetime):
                        continue

                    url_m = 'series/{}?with[]=matches'.format(serie['id'])
                    headers, matches = self.send_request(api_endpoint=url_m)
                    if not self.validator.validate_match(
                            url_m, matches, serie, 'current_game_invalid'):
                        continue

                    for match in matches['matches']:
                        existing = cur.qfo('''
                            SELECT * FROM "current_game_watch"
                            WHERE "data_src_game_id" = %(src_game_id)s
                            AND "is_deleted" = false
                        ''',
                                           params={'src_game_id': match['id']})
                        if existing:
                            continue

                        common_data = {
                            'data_src': self._name,
                            'game_name': self._game_name,
                            'data_src_url': url_s,
                            'data_src_game_id': match['id'],
                            'data_src_game_title': matches['title'],
                            'data_src_start_datetime': matches['start'],
                            'data_src_finish_datetime': matches['end'],
                            'data_src_tournament_id': serie['tournament_id'],
                            'data_src_tournament_title': None,
                            'insert_datetime': datetime.datetime.utcnow(),
                            'is_watching': True
                        }
                        cur.insert('current_game_watch', common_data)
                        watch_game_id = cur.get_last_id('current_game_watch')

                        url_g = 'matches/{}?with[]=summary'.format(match['id'])
                        headers, data = self.send_request(api_endpoint=url_g)

                        if not self.validator.validate_game(
                                url_g, watch_game_id, data, match,
                                'current_game_invalid'):
                            cur.update('current_game_watch', {
                                'is_watching': False,
                                'is_deleted': True
                            }, {'id': watch_game_id})

        self.sql.finish_trans_mode()

    def collect_current_data(self) -> None:
        # Look for finished games (not longer than hour ago) mentioned in database table `current_game_watch`
        # and collect data for them into tables with team and player stats
        self.sql.start_trans_mode()
        cur = self.sql.cursor()
        matches = cur.qfa(
            '''
            SELECT * FROM "current_game_watch"
            WHERE "is_watching" = true
            AND "is_deleted" = false
            AND "data_src" = %s
            AND "game_name" = %s
        ''', [self._name, self._game_name])

        for match in matches:
            url_m = 'matches/{}?with[]=summary'.format(
                match['data_src_game_id'])
            headers, data = self.send_request(api_endpoint=url_m)

            # Save game stats connection object
            stats_game_id, unchanged_game_id = self._save_game_stats_connection_objects(
                match['id'])

            # Need to find last stats id
            prev_stats_game_id = self._get_previous_game_id(match['id'])

            # Save team and player game stats
            changes_count = {}
            changes_count['team'] = self._save_team_stats(
                url_m, stats_game_id, prev_stats_game_id, unchanged_game_id,
                data)
            changes_count['player'] = self._save_player_stats(
                url_m, stats_game_id, prev_stats_game_id, unchanged_game_id,
                data)
            self._solve_changes_count_aftermath(changes_count, stats_game_id,
                                                unchanged_game_id)

            # Set watching false if its over limit now
            self._check_watching_limit(match['id'], match['insert_datetime'])

        self.sql.finish_trans_mode()

    def _save_team_stats(self, url: str, stats_game_id: int,
                         prev_stats_game_id: int, unchanged_game_id: int,
                         data: dict) -> DictNone:
        # Load last stats to have a data for comparison
        last_data = self._get_last_data(prev_stats_game_id, 'team')
        prepared_data = self.transformer.prepare_teams_data(data)
        common_data = {'data_src_url': url}
        teams_data = self.transformer.get_teams_data(prepared_data, data)
        return self._save_team_stats_common(stats_game_id, unchanged_game_id,
                                            common_data, teams_data, last_data)
Beispiel #14
0
def save_database_csv(type: str, output_filepath: str) -> True:
    tables = ['{}_game_analysis'.format(type)]
    if type == 'current':
        tables = ['current_game_watch'] + tables
    if type == 'past':
        tables = ['past_game_stats'] + tables

    joined_tables = [
        '{}_game_player_stats'.format(type), '{}_game_team_stats'.format(type)
    ]
    if type == 'current':
        joined_tables = [
            'current_game_stats', 'current_game_unchanged',
            'current_game_team_unchanged', 'current_game_player_unchanged'
        ] + joined_tables

    # Save database data in CSV files
    sql = LibPool().libsql
    sql.start_trans_mode()
    cur = sql.cursor()
    tmp_files = {}
    queries = []

    for table in tables:
        csv_tmp_file_path = os.path.normpath(
            os.path.join(app_config['path_storage'], 'tmp', str(uuid.uuid4())))
        tmp_files['{}.csv'.format(table)] = csv_tmp_file_path
        queries.append(
            '''COPY (SELECT * FROM "{}") TO '{}' WITH CSV HEADER;'''.format(
                table, csv_tmp_file_path))

    i = 0
    for table in joined_tables:
        i += 1
        csv_tmp_file_path = os.path.normpath(
            os.path.join(app_config['path_storage'], 'tmp', str(uuid.uuid4())))
        tmp_files['{}.csv'.format(table)] = csv_tmp_file_path
        if i == 1 and type == 'current':
            queries.append('''
                COPY (
                    SELECT "S".*
                    FROM "current_game_watch" "W"
                    INNER JOIN "{}" "S"
                        ON "S"."watch_game_id" = "W"."id"
                ) TO '{}' WITH CSV HEADER;'''.format(table, csv_tmp_file_path))
        elif type == 'current':
            queries.append('''
                COPY (
                    SELECT "TABLE".*
                    FROM "current_game_watch" "W"
                    INNER JOIN "current_game_stats" "S"
                        ON "S"."watch_game_id" = "W"."id"
                    INNER JOIN "{}" "TABLE"
                        ON "TABLE"."stats_game_id" = "S"."id"
                ) TO '{}' WITH CSV HEADER;'''.format(table, csv_tmp_file_path))
        elif type == 'past':
            queries.append('''
                COPY (
                    SELECT "TABLE".*
                    FROM "past_game_stats" "S"
                    INNER JOIN "{}" "TABLE"
                        ON "TABLE"."stats_game_id" = "S"."id"
                ) TO '{}' WITH CSV HEADER;'''.format(table, csv_tmp_file_path))

    for query in queries:
        cur.q(query)
    sql.finish_trans_mode()

    # ZIP files into final package
    with ZipFile(output_filepath, 'w') as myzip:
        for tmp_name, tmp_file in tmp_files.items():
            myzip.write(tmp_file, tmp_name)
            os.unlink(tmp_file)

    return True
    def process_data(self,
                     type: str,
                     data_src: str,
                     game_name: StrNone = None,
                     data_src_tournament_id: IntNone = None) -> OrderedDict:
        if type == 'past':
            # There is online analysis when grabbing data , so no need for this
            raise NotImplementedError

        # Current data
        sql = LibPool().libsql
        sql.start_trans_mode()
        cur = sql.cursor()

        # Games
        # games_watch_count
        query_addon = ''
        if game_name is not None:
            query_addon = """\nAND "W"."game_name" = %(game_name)s"""
        if data_src_tournament_id is not None:
            query_addon += """\nAND "W"."data_src_tournament_id" = %(data_src_tournament_id)s"""
        games_watch_data = cur.qfo(
            '''
            SELECT COUNT("W"."id") AS "count"
            FROM "current_game_watch" "W"
            WHERE "W"."data_src" = %(data_src)s
                AND "W"."is_deleted" = false {}
        '''.format(query_addon), {
                'data_src': data_src,
                'game_name': game_name,
                'data_src_tournament_id': data_src_tournament_id
            })
        games_watch_count = games_watch_data['count']
        if games_watch_count == 0:
            return 'No games to watch'

        # games_watch_with_stats_count
        games_watch_with_stats_data = cur.qfa(
            '''
            SELECT "W"."id"
            FROM "current_game_watch" "W"
            INNER JOIN "current_game_stats" "S"
                ON "S"."watch_game_id" = "W"."id"
            WHERE "W"."data_src" = %(data_src)s
                AND "W"."is_deleted" = false {}
            GROUP BY "W"."id"
        '''.format(query_addon), {
                'data_src': data_src,
                'game_name': game_name,
                'data_src_tournament_id': data_src_tournament_id
            })
        games_watch_with_stats_count = len(games_watch_with_stats_data)
        if games_watch_with_stats_count == 0:
            return 'No games to watch with stats to analyze'

        # games_correction_percent
        games_watch_with_stats_percent = 0
        if games_watch_with_stats_count > 0:
            games_watch_with_stats_percent = round(
                games_watch_with_stats_count * 100 / games_watch_count, 2)

        # Get stats data for later analysis
        games_stats_data = cur.qfa(
            '''
            SELECT
                "W"."id" AS "watch_id",
                "W"."game_name",
                "W"."data_src_finish_datetime",
                COUNT("S"."id") AS "stats_count",
                ARRAY_AGG ("S"."id"::int8) "stats"
            FROM "current_game_watch" "W"
            INNER JOIN "current_game_stats" "S"
                ON "S"."watch_game_id" = "W"."id"
            WHERE "W"."data_src" = %(data_src)s
                AND "W"."is_deleted" = false {}
            GROUP BY "W"."id"
            ORDER BY "W"."id"
        '''.format(query_addon), {
                'data_src': data_src,
                'game_name': game_name,
                'data_src_tournament_id': data_src_tournament_id
            })
        games_stats_watch_ids = [x['watch_id'] for x in games_stats_data]
        games_stats_counts = [x['stats_count'] for x in games_stats_data]
        games_stats_corrected_ids = [
            x['stats'] for x in games_stats_data if x['stats_count'] > 1
        ]
        games_stats_corrected_first_last_ids = [(min(x['stats']),
                                                 max(x['stats']))
                                                for x in games_stats_data
                                                if x['stats_count'] > 1]

        # games_watch_with_stats_corrected_count
        games_watch_with_stats_corrected_count = len(games_stats_corrected_ids)

        # games_watch_with_stats_corrected_percent
        games_watch_with_stats_corrected_percent = 0
        if games_watch_with_stats_corrected_count > 0:
            games_watch_with_stats_corrected_percent = round(
                games_watch_with_stats_corrected_count * 100 /
                games_watch_with_stats_count, 2)

        # games_correction_count
        correction_count_list = [x for x in games_stats_counts if x > 1]
        games_stats_correction_count = sum(correction_count_list) - len(
            correction_count_list)  # Minus first game

        # games_stats_correction_per_game_average_count
        games_stats_correction_per_game_average_count = 0
        if games_watch_with_stats_corrected_count > 0:
            games_stats_correction_per_game_average_count = round(
                games_stats_correction_count /
                games_watch_with_stats_corrected_count, 2)

        # games_stats_game_end_save_stats_average_seconds_diff
        games_stats_save_times = []
        if games_stats_watch_ids:
            for watch_game_id in games_stats_watch_ids:
                data = cur.qfo('''
                    SELECT "W"."data_src_finish_datetime", "S"."insert_datetime"
                    FROM "current_game_watch" "W"
                    INNER JOIN "current_game_stats" "S"
                        ON "S"."watch_game_id" = "W"."id"
                    WHERE "W"."id" = %s
                    ORDER BY "W"."id", "S"."id"
                    LIMIT 1
                ''',
                               params=[watch_game_id])
                games_stats_save_times.append(
                    (data['insert_datetime'] -
                     data['data_src_finish_datetime']).total_seconds())
        games_stats_game_end_save_stats_average_seconds_diff = 0
        if games_stats_save_times:
            games_stats_game_end_save_stats_average_seconds_diff = round(
                sum(games_stats_save_times) / len(games_stats_save_times), 2)

        # games_stats_save_stats_last_correction_average_seconds_diff
        games_stats_correction_times = []
        if games_stats_corrected_first_last_ids:
            for first_game_id, last_game_id in games_stats_corrected_first_last_ids:
                first_last = cur.qfa('''
                    SELECT "id", "insert_datetime" FROM "current_game_stats"
                    WHERE "id" IN %s
                ''',
                                     params=[(first_game_id, last_game_id)],
                                     key='id')
                games_stats_correction_times.append(
                    (first_last[last_game_id]['insert_datetime'] -
                     first_last[first_game_id]['insert_datetime']
                     ).total_seconds())
        games_stats_save_stats_last_correction_average_seconds_diff = 0
        if games_stats_correction_times:
            games_stats_save_stats_last_correction_average_seconds_diff = round(
                sum(games_stats_correction_times) /
                len(games_stats_correction_times), 2)

        # Datapoints
        # Each team in all games has 5 players
        # datapoints_total_count
        if not game_name:
            datapoints_stats_count = (sum([
                5 * api_config[x['game_name']]['datapoints'] * x['stats_count']
                for x in games_stats_data
            ]))
        else:
            # Total coun of saved games is `games_stats_correction_count` + `games_stats_correction_count`
            datapoints_stats_count = (
                5 * api_config[game_name]['datapoints'] *
                (games_stats_correction_count + games_watch_with_stats_count))

        # datapoints_correction_count
        datapoints_stats_correction_list = []
        if games_stats_corrected_ids:
            for game_stats_ids in games_stats_corrected_ids:
                # Teams
                teams_data = cur.qfa(
                    '''
                    SELECT "TS".*, "S"."insert_datetime"
                    FROM "current_game_stats" "S"
                    INNER JOIN "current_game_team_stats" "TS"
                        ON "S"."id" = "TS"."stats_game_id"
                    WHERE "S"."id" IN %s
                    ORDER BY "TS"."data_src_team_id", "TS"."id"
                ''', [(game_stats_ids)])
                if teams_data:
                    teams_data_final = self._transform_stats_data(
                        teams_data, 'data_src_team_id')
                    team_changes_count_per_game = self._check_stats_for_changes(
                        teams_data_final)

                # Players
                players_data = cur.qfa(
                    '''
                    SELECT "PS".*
                    FROM "current_game_stats" "S"
                    INNER JOIN "current_game_player_stats" "PS"
                        ON "S"."id" = "PS"."stats_game_id"
                    WHERE "S"."id" IN %s
                    ORDER BY "PS"."id"
                ''', [(game_stats_ids)])
                if players_data:
                    players_data_final = self._transform_stats_data(
                        players_data, 'data_src_player_id')
                    player_changes_count_per_game = self._check_stats_for_changes(
                        players_data_final)

                for game_id in game_stats_ids:
                    # Changes has to be saved per stats game due max and median analyze needs
                    changes_count = 0
                    if game_id in team_changes_count_per_game:
                        changes_count += team_changes_count_per_game[game_id]
                    if game_id in player_changes_count_per_game:
                        changes_count += player_changes_count_per_game[game_id]
                    datapoints_stats_correction_list.append(changes_count)

        # assign datapoints_correction_count
        datapoints_stats_correction_count = len(
            datapoints_stats_correction_list)

        # datapoints_correction_percent
        datapoints_stats_correction_percent = 0
        if datapoints_stats_correction_count > 0:
            datapoints_stats_correction_percent = round(
                datapoints_stats_correction_count * 100 /
                datapoints_stats_count, 2)

        # datapoints_correction_max
        datapoints_stats_correction_per_game_max = 0
        if datapoints_stats_correction_list:
            datapoints_stats_correction_per_game_max = max(
                datapoints_stats_correction_list)

        # datapoints_correction_median
        datapoints_stats_correction_per_game_median = 0
        if datapoints_stats_correction_list:
            datapoints_stats_correction_per_game_median = statistics.median(
                datapoints_stats_correction_list)

        result = OrderedDict([
            ('games_watch_count', games_watch_count),
            ('games_watch_with_stats_count', games_watch_with_stats_count),
            ('games_watch_with_stats_percent', games_watch_with_stats_percent),
            ('games_watch_with_stats_corrected_count',
             games_watch_with_stats_corrected_count),
            ('games_watch_with_stats_corrected_percent',
             games_watch_with_stats_corrected_percent),
            ('games_stats_correction_count', games_stats_correction_count),
            ('games_stats_correction_per_game_average_count',
             games_stats_correction_per_game_average_count),
            ('games_stats_game_end_save_stats_average_minutes_diff',
             round(games_stats_game_end_save_stats_average_seconds_diff / 60,
                   2)),
            ('games_stats_save_stats_last_correction_average_minutes_diff',
             round(
                 games_stats_save_stats_last_correction_average_seconds_diff /
                 60, 2)), ('datapoints_stats_count', datapoints_stats_count),
            ('datapoints_stats_correction_count',
             datapoints_stats_correction_count),
            ('datapoints_stats_correction_percent',
             datapoints_stats_correction_percent),
            ('datapoints_stats_correction_per_game_max',
             datapoints_stats_correction_per_game_max),
            ('datapoints_stats_correction_per_game_median',
             datapoints_stats_correction_per_game_median)
        ])

        # Update analysis results in database
        update_data = dict(deepcopy(result))
        for var in [
            ('games_stats_game_end_save_stats_average_seconds_diff',
             games_stats_game_end_save_stats_average_seconds_diff),
            ('games_stats_save_stats_last_correction_average_seconds_diff',
             games_stats_save_stats_last_correction_average_seconds_diff)
        ]:
            del (update_data[var[0].replace('seconds', 'minutes')])
            update_data[var[0]] = var[1]
        cur.update('current_game_analysis',
                   update_data,
                   conditions={
                       'data_src': data_src,
                       'game_name': game_name
                   })

        sql.finish_trans_mode()

        # Return analysis informations
        return result
class RestClient(RestClientInterface):
    '''Define concrete Adapter
    '''
    _name = 'provider2'
    _pandapool = Provider2Pool()

    def __init__(self, *args, **kwargs):
        cherrypy.thread_data.client_obj = self
        if 'log' in kwargs:
            self._log = kwargs['log']
        super().__init__(*args, **kwargs)

    def _logging(self,
                 message: str,
                 url: str = None,
                 log_type: str = None) -> None:
        if self._log:
            api_config[self._name]['log'] = True
        if api_config[self._name]['log'] is False:
            return
        if not hasattr(self, 'logger'):
            self.logger = LibPool().logger
        logdata = {'connection_details': api_config[self._name]}
        if url:
            logdata['url'] = url
        if log_type is not None and log_type == 'error':
            self.logger.error(message, logdata)
        else:
            self.logger.info(message, logdata)

    def send_request(self, api_endpoint: str, data: dict = {}) -> dict:
        ac = api_config[self._name]
        url_start = '{}{}?'.format(ac['url'], api_endpoint)
        if '?' in api_endpoint:
            url_start = '{}&'.format(url_start[0:-1])
        url = '{}token={}'.format(url_start, ac['auth_token'])
        try:
            if self._pandapool.request_permission:
                response = requests.get(url, data=to_json(data))
            if response.status_code == 401:
                raise AuthenticationError(response.json()['error'])
            elif 'error' in response.json():
                raise Exception(response.json()['error'])
        except AuthenticationError as e:
            error_message = 'Authentication error on Provider 1: {}'.format(e)
            self._logging(error_message, url, 'error')
            raise cherrypy.HTTPError(401, error_message)
        except Exception as e:
            error_message = 'Request to Provider 1 was not succesfull: {}'.format(
                e)
            self._logging(error_message, url, 'error')
            raise cherrypy.HTTPError(500, error_message)

        tools.check_request_timeout(
        )  # Application hook to return 408 if time is over
        return response.headers, response.json()

    def authenticate(self) -> None:
        # Provider 1 is always authenticated, hit lives matches to check it
        url = '{}{}'.format(api_config[self._name]['url'], 'lives')
        response = requests.get(url)
        if response.status_code == 401:
            return False

    def monitor(self) -> float:
        # Try to authenticate at REST endpoint
        start_time = time.perf_counter()
        if self.authenticate() is False:
            return False
        return time.perf_counter() - start_time
Beispiel #17
0
 def __init__(self):
     self.logger = LibPool().logger
Beispiel #18
0
class Invoker(object):
    def __init__(self):
        self.logger = LibPool().logger

    def setup_server(self):
        self._set_config()
        self._set_cherrypy_hooks()

    def _set_config(self):
        default_config = {
            'tools.encode.on': True,
            'tools.encode.encoding': 'utf-8',
            # Prevents CherryPy Checker: The Application mounted at '' has an empty config.
            'checker.check_skipped_app_config': False,
            # Allow hooks
            'tools.on_start_resource.on': True,
            'tools.before_finalize.on': True,
            'tools.before_error_response.on': True,
            'tools.after_error_response.on': True,
            'tools.on_end_request.on': True
        }
        cherrypy.config.update(
            toolz.dicttoolz.merge(app_config['cherrypy'], default_config))

    def _set_cherrypy_hooks(self):
        cherrypy.tools.on_start_resource = cherrypy.Tool(
            'on_start_resource', app_tools.on_start_resource_wrapper)
        cherrypy.tools.before_finalize = cherrypy.Tool(
            'before_finalize', app_tools.before_finalize_wrapper)
        cherrypy.tools.before_error_response = cherrypy.Tool(
            'before_error_response', app_tools.before_error_response_wrapper)
        cherrypy.tools.after_error_response = cherrypy.Tool(
            'after_error_response', app_tools.after_error_response_wrapper)
        cherrypy.tools.on_end_request = cherrypy.Tool(
            'on_end_request', app_tools.on_end_request_wrapper)

    def cli_cmd(self):
        self.setup_server()
        cherrypy.config.update({'engine.autoreload.on': True})

        if platform.system() == 'Windows':
            # This enables Ctrl+C on Windows
            if hasattr(cherrypy.engine, 'signal_handler'):
                cherrypy.engine.signal_handler.subscribe()
            if hasattr(cherrypy.engine, 'console_control_handler'):
                cherrypy.engine.console_control_handler.subscribe()

        cherrypy.engine.start()
        if app_config['app_log_factory'] == 'gcp':
            self.logger.info(
                'Starting Cherrypy server on "{}:{}" at "{}"\n'.format(
                    app_config['cherrypy']['server.socket_host'],
                    app_config['cherrypy']['server.socket_port'],
                    datetime.strftime(datetime.utcnow(), '%Y-%m-%d %H:%M:%S')))
        cherrypy.engine.block()

    def wsgi_cmd(self):
        self.setup_server()
        cherrypy.config.update({
            'environment': 'embedded',
            'engine.autoreload.on': False
        })

        cherrypy.server.unsubscribe()
        sys.stdout = sys.stderr

        def application(environ, start_response):
            return cherrypy.tree(environ, start_response)

        return application
class Watcher(WatcherInterface):
    '''Define concrete Adapter
    '''
    _name = 'provider2'
    lib_pool = LibPool()

    def __init__(self, *args, **kwargs):
        self.sql = self.lib_pool.libsql
        self.send_request = cherrypy.thread_data.client_obj.send_request
        if 'game_name' in kwargs:
            self._game_name = kwargs['game_name']
        if 'log' in kwargs:
            self._log = kwargs['log']
        self.validator = get_validator_for_api(self._name, self._game_name,
                                               self._log)
        self.time_limit = int(get_time_limit())
        super().__init__(*args, **kwargs)

    def _log_msg(self, type: str, msg: str, **kwargs) -> None:
        if self._log:
            api_config[self._name]['log'] = True
        if api_config[self._name]['log'] is False:
            return
        if not hasattr(self, 'logger'):
            self.logger = LibPool().logger
        logdata = {'connection_details': api_config[self._name]}
        for name, value in kwargs.items():
            logdata[name] = value
        if type == 'warning':
            self.logger.warning(msg, logdata)
        elif type == 'error':
            self.logger.error(msg, logdata)

    def watch_current_games(self) -> None:
        self.sql.start_trans_mode()
        cur = self.sql.cursor()
        # If there are some new just finished games mentioned in specification founded,
        # save them in database table `current_game_watch`
        valid_league_ids = list(
            toolz.itertoolz.unique([
                y[self._name]['league_id'] for x, y in api_config[
                    '{}_tournaments'.format(self._game_name)].items()
                if y[self._name] and y[self._name]['league_id']
            ]))
        if not valid_league_ids:
            msg = 'No valid leagues were found for Provider 1'
            self._log_msg('error', msg)
            return
        matches_last_page = 1  # Fake for loop start
        matches_current_page = 0
        while matches_last_page > matches_current_page:  # Pagination
            matches_current_page += 1
            url_m = '{}/matches/past?filter[status]=finished&league_id={}&page[size]=100&page[number]={}'.format(
                api_config[self._game_name]['provider2_slug'],
                str(valid_league_ids), matches_current_page)
            matches_headers, matches = self.send_request(api_endpoint=url_m)
            matches_last_page = self._round_up(
                int(matches_headers['X-Total']) /
                int(matches_headers['X-Per-Page']))
            if not matches:
                return {'return_msg': 'No data were found for Provider 1.'}
                msg = 'No data were found for Provider 1'
                self._log_msg('error', msg, url=url_m)
                return

            limit_datetime = datetime.datetime.utcnow() - datetime.timedelta(
                minutes=self.time_limit)
            for match in matches:
                if match['league_id'] not in valid_league_ids:
                    continue
                elif (match['end_at'] is None or datetime.datetime.strptime(
                        match['end_at'], '%Y-%m-%dT%H:%M:%SZ') <
                      limit_datetime):
                    continue
                if not self.validator.validate_match(url_m, match,
                                                     'current_game_invalid'):
                    continue

                for game in match['games']:
                    existing = cur.qfo('''
                        SELECT * FROM "current_game_watch"
                        WHERE "data_src_game_id" = %(src_game_id)s
                        AND "is_deleted" = false
                    ''',
                                       params={'src_game_id': game['id']})
                    if existing:
                        continue

                    common_data = {
                        'data_src': self._name,
                        'game_name': self._game_name,
                        'data_src_url': url_m,
                        'data_src_game_id': game['id'],
                        'data_src_game_title': match['name'],
                        'data_src_start_datetime': match['begin_at'],
                        'data_src_finish_datetime': match['end_at'],
                        'data_src_tournament_id': match['serie_id'],
                        'data_src_tournament_title':
                        match['serie']['full_name'],
                        'insert_datetime': datetime.datetime.utcnow(),
                        'is_watching': True
                    }
                    cur.insert('current_game_watch', common_data)
                    watch_game_id = cur.get_last_id('current_game_watch')

                    url_g = '{}/games/{}'.format(
                        api_config[self._game_name]['provider2_slug'],
                        game['id'])
                    headers, data = self.send_request(api_endpoint=url_g)

                    if not self.validator.validate_game(
                            url_m, watch_game_id, data, match,
                            'current_game_invalid'):
                        cur.update('current_game_watch', {
                            'is_watching': False,
                            'is_deleted': True
                        }, {'id': watch_game_id})

        self.sql.finish_trans_mode()

    def collect_current_data(self):
        # Look for finished games (not longer than hour ago) mentioned in database table `current_game_watch`
        # and collect data for them into tables with team and player stats
        self.sql.start_trans_mode()
        cur = self.sql.cursor()
        games = cur.qfa(
            '''
            SELECT * FROM "current_game_watch"
            WHERE "is_watching" = true
            AND "is_deleted" = false
            AND "data_src" = %s
            AND "game_name" = %s
        ''', [self._name, self._game_name])

        for game in games:
            url_g = '{}/games/{}'.format(
                api_config[self._game_name]['provider2_slug'],
                game['data_src_game_id'])
            headers, data = self.send_request(api_endpoint=url_g)

            # Save game stats connection object
            stats_game_id, unchanged_game_id = self._save_game_stats_connection_objects(
                game['id'])

            # Need to find last stats id
            prev_stats_game_id = self._get_previous_game_id(game['id'])

            # Save team and player game stats
            changes_count = {}
            changes_count['team'] = self._save_team_stats(
                url_g, stats_game_id, prev_stats_game_id, unchanged_game_id,
                game['data_src_game_id'], data, data['match']['games'])
            changes_count['player'] = self._save_player_stats(
                url_g, stats_game_id, prev_stats_game_id, unchanged_game_id,
                data)
            self._solve_changes_count_aftermath(changes_count, stats_game_id,
                                                unchanged_game_id)

            # Set watching false if its over limit now
            self._check_watching_limit(game['id'], game['insert_datetime'])

        self.sql.finish_trans_mode()

    def _save_team_stats(self, url: str, stats_game_id: int,
                         prev_stats_game_id: int, unchanged_game_id: int,
                         game_id: int, data: dict,
                         games_data: dict) -> DictNone:
        # Load last stats to have a data for comparison
        last_data = self._get_last_data(prev_stats_game_id, 'team')
        teams = [x['opponent']['id'] for x in data['match']['opponents']]
        prepared_data = self.transformer.prepare_teams_data(
            teams, data, game_id, games_data)
        common_data = {'data_src_url': url}
        teams_data = self.transformer.get_teams_data(prepared_data, data,
                                                     teams)
        return self._save_team_stats_common(stats_game_id, unchanged_game_id,
                                            common_data, teams_data, last_data)

    def _round_up(self, n, decimals=0):
        multiplier = 10**decimals
        return math.ceil(n * multiplier) / multiplier
Beispiel #20
0
class Validator(ValidatorInterface):
    '''Define concrete Adapter
    '''
    _name = 'provider2'
    lib_pool = LibPool()

    def __init__(self, *args, **kwargs):
        self.sql = self.lib_pool.libsql
        if 'game_name' in kwargs:
            self._game_name = kwargs['game_name']
        if 'log' in kwargs:
            self._log = kwargs['log']

    def _log_msg(self, type: str, msg: str, **kwargs) -> None:
        if self._log:
            api_config[self._name]['log'] = True
        if api_config[self._name]['log'] is False:
            return
        if not hasattr(self, 'logger'):
            self.logger = LibPool().logger
        logdata = {'connection_details': api_config[self._name]}
        for name, value in kwargs.items():
            logdata[name] = value
        if type == 'warning':
            self.logger.warning(msg, logdata)
        elif type == 'error':
            self.logger.error(msg, logdata)

    def validate_match(self, url: str, matches_data: dict, table: str) -> bool:
        msg = None
        if 'games' not in matches_data or not matches_data['games']:
            msg = '{} - "games" missing in data or empty'.format(
                self._game_name.upper())
            # Save it as invalid and log it
            insert_data = {
                'data_src_url': url,
                'insert_datetime': datetime.datetime.utcnow(),
                'problem_msg': msg
            }
            self.sql.cursor().insert(table, insert_data)
            self._log_msg('error', msg, url=url)
            return False
        return True

    def validate_game(self, url: str, parent_game_id: int, data: dict,
                      match_data: dict, table: str) -> bool:
        teams = [x['opponent']['id'] for x in data['match']['opponents']]
        msg = None
        if not teams or len(teams) != 2:
            msg = '{} - "opponents" empty in data or invalid for match {}'.format(
                self._game_name.upper(), match_data['id'])
        elif 'players' not in data or not data['players']:
            msg = '{} - "players" missing in data or empty for match {}'.format(
                self._game_name.upper(), match_data['id'])
        if msg:
            # Save it as invalid and log it
            insert_data = {
                'data_src_url': url,
                'insert_datetime': datetime.datetime.utcnow(),
                'problem_msg': msg
            }
            if table == 'current_game_invalid':
                insert_data['watch_game_id'] = parent_game_id
            elif table == 'past_game_invalid':
                insert_data['stats_game_id'] = parent_game_id
            self.sql.cursor().insert(table, insert_data)
            self._log_msg('error', msg, url=url)
            return False
        return True
Beispiel #21
0
class Transformer(TransformerInterface):
    '''Define concrete Adapter
    '''
    _name = 'provider1'
    lib_pool = LibPool()

    def __init__(self, *args, **kwargs):
        self.sql = self.lib_pool.libsql
        if 'game_name' in kwargs:
            self._game_name = kwargs['game_name']
        if 'log' in kwargs:
            self._log = kwargs['log']

    def prepare_teams_data(self, data: dict) -> dict:
        side1 = api_config[self._game_name]['provider1_sides'][0]
        side2 = api_config[self._game_name]['provider1_sides'][1]
        summary = data['match_summary']
        if self._game_name == 'csgo':
            teams = [
                data['match_summary'][x]
                for x in api_config[self._game_name]['provider1_sides']
            ]

            # We need to pre-count round score because of using oponnents win score to set lose
            round_win = {}
            round_lose = {}
            for team_id in teams:
                round_win[team_id] = None
                round_lose[team_id] = None
            if 'scores' in summary and summary['scores']:
                for key, score in data['scores'].items():
                    round_win[key] = score
                round_lose[teams[0]] = round_win[teams[1]]
                round_lose[teams[1]] = round_win[teams[0]]

            # We need to pre-count bomb exploded / defused
            bomb_exploded = {teams[0]: None, teams[1]: None}
            bomb_defused = {teams[0]: None, teams[1]: None}
            if 'rounds' in data and data['rounds']:
                bomb_defused = {teams[0]: 0, teams[1]: 0}
                bomb_exploded = {teams[0]: 0, teams[1]: 0}
                for game_round in data['rounds']:
                    for event in game_round['bomb_events']:
                        if event['type'] == 'exploded':
                            bomb_exploded[game_round['winner_team']] += 1
                        elif event['type'] == 'defused':
                            bomb_defused[game_round['winner_team']] += 1

            return {
                'bomb_plant': bomb_exploded,
                'bomb_defuse': bomb_defused,
                'round_win': round_win,
                'round_lose': round_lose
            }

        elif self._game_name == 'dota2':
            teams = [x['team_id'] for x in data['rosters']]
            team_win = {}
            team_lose = {}
            for team_id in teams:
                team_win[team_id] = int(data['winner'] == team_id)
                team_lose[team_id] = int(data['winner'] != team_id)
            return {'team_win': team_win, 'team_lose': team_lose}

        elif self._game_name == 'lol':
            teams = [x['team_id'] for x in data['rosters']]
            players = {
                teams[0]: [x['player_id'] for x in summary[side1]['players']],
                teams[1]: [x['player_id'] for x in summary[side2]['players']]
            }

            # We need to pre-count turrets
            turrets = {teams[0]: None, teams[1]: None}
            if ('objective_events' in summary
                    and 'towers' in summary['objective_events']
                    and summary['objective_events']['towers']):
                turrets = {teams[0]: 0, teams[1]: 0}
                for tower in summary['objective_events']['towers']:
                    if tower['killer_id'] in players[teams[0]]:
                        turrets[teams[0]] += 1
                    elif tower['killer_id'] in players[teams[1]]:
                        turrets[teams[1]] += 1

            # We need to pre-count dragons
            dragons = {teams[0]: None, teams[1]: None}
            if ('objective_events' in summary
                    and 'dragons' in summary['objective_events']
                    and summary['objective_events']['dragons']):
                dragons = {teams[0]: 0, teams[1]: 0}
                for dragon in summary['objective_events']['dragons']:
                    if dragon['killer_id'] in players[teams[0]]:
                        dragons[teams[0]] += 1
                    elif dragon['killer_id'] in players[teams[1]]:
                        dragons[teams[1]] += 1

            # We need to pre-count barons
            barons = {teams[0]: None, teams[1]: None}
            if ('objective_events' in summary
                    and 'barons' in summary['objective_events']
                    and summary['objective_events']['barons']):
                barons = {teams[0]: 0, teams[1]: 0}
                for baron in summary['objective_events']['barons']:
                    if baron['killer_id'] in players[teams[0]]:
                        barons[teams[0]] += 1
                    elif baron['killer_id'] in players[teams[1]]:
                        barons[teams[1]] += 1

            return {'turret': turrets, 'dragon': dragons, 'baron': barons}

    def get_teams_data(self, prepared_data: dict, data: dict) -> dict:
        side1 = api_config[self._game_name]['provider1_sides'][0]
        side2 = api_config[self._game_name]['provider1_sides'][1]
        summary = data['match_summary']
        teams_data = {}
        for side in [side1, side2]:
            if self._game_name == 'csgo':
                team_id = summary[side]
                teams_data[team_id] = {
                    'bomb_plant': prepared_data['bomb_plant'][team_id],
                    'bomb_defuse': prepared_data['bomb_defuse'][team_id],
                    'round_win': prepared_data['round_win'][team_id],
                    'round_lose': prepared_data['round_lose'][team_id]
                }

            elif self._game_name == 'dota2':
                if side == side1:
                    team_id = data['rosters'][0]['team_id']
                elif side == side2:
                    team_id = data['rosters'][1]['team_id']
                teams_data[team_id] = {
                    'team_win': prepared_data['team_win'][team_id],
                    'team_lose': prepared_data['team_lose'][team_id]
                }

            elif self._game_name == 'lol':
                if side == side1:
                    team_id = data['rosters'][0]['team_id']
                elif side == side2:
                    team_id = data['rosters'][1]['team_id']
                teams_data[team_id] = {
                    'turret': prepared_data['turret'][team_id],
                    'dragon': prepared_data['dragon'][team_id],
                    'baron': prepared_data['baron'][team_id]
                }

            teams_data[team_id]['data_src_team_id'] = team_id

        return teams_data

    def prepare_players_data(self, data: dict) -> dict:
        if self._game_name == 'dota2':
            # Prepare players id list, specially for dota2
            return {
                'players': {
                    data['rosters'][0]['team_id']:
                    [x['id'] for x in data['rosters'][0]['players']],
                    data['rosters'][1]['team_id']:
                    [x['id'] for x in data['rosters'][1]['players']]
                }
            }
        return {}

    def get_players_data(self, prepared_data: dict, data: dict) -> dict:
        summary = data['match_summary']
        side1 = api_config[self._game_name]['provider1_sides'][0]
        side2 = api_config[self._game_name]['provider1_sides'][1]
        players_data = {}
        for side in [side1, side2]:
            if self._game_name == 'csgo':
                for player in summary['scoreboard'][side]:
                    player_data = self._set_common_player_stats(player)
                    players_data[
                        player_data['data_src_player_id']] = player_data

            elif self._game_name == 'dota2':
                for player in summary['player_stats']:
                    side1_players = prepared_data['players'][data['rosters'][0]
                                                             ['team_id']]
                    side2_players = prepared_data['players'][data['rosters'][1]
                                                             ['team_id']]
                    if ((side == side1
                         and player['player_id'] not in side1_players)
                            or (side == side2
                                and player['player_id'] not in side2_players)):
                        continue  # Skip players from other team

                    player_data = self._set_common_player_stats(player)
                    player_data['tower_kill'] = None
                    if 'structure_dest' in summary:
                        for dest in summary['structure_dest']:
                            if dest['structure_type'] == 'tower' and dest[
                                    'killer'] == player['player_id']:
                                if player_data['tower_kill'] is None:
                                    player_data['tower_kill'] = 0
                                player_data['tower_kill'] += 1
                    player_data['roshan_kill'] = None
                    if 'roshan_events' in summary:
                        for event in summary['roshan_events']:
                            if event['type'] == 'kill' and event[
                                    'killer'] == player['player_id']:
                                if player_data['roshan_kill'] is None:
                                    player_data['roshan_kill'] = 0
                                player_data['roshan_kill'] += 1
                    players_data[
                        player_data['data_src_player_id']] = player_data

            elif self._game_name == 'lol':
                for player in summary[side]['players']:
                    player_data = self._set_common_player_stats(player)
                    player_data['creep_score'] = None
                    if 'minion_kills' in player and 'total' in player[
                            'minion_kills']:
                        player_data['creep_score'] = player['minion_kills'][
                            'total']
                    players_data[
                        player_data['data_src_player_id']] = player_data

        return players_data

    def _set_common_player_stats(self, player: dict) -> dict:
        return {
            'data_src_player_id': player['player_id'],
            'kill': player['kills'],
            'assist': player['assists'],
            'death': player['deaths']
        }
class Transformer(TransformerInterface):
    '''Define concrete Adapter
    '''
    _name = 'provider2'
    lib_pool = LibPool()

    def __init__(self, *args, **kwargs):
        self.sql = self.lib_pool.libsql
        if 'game_name' in kwargs:
            self._game_name = kwargs['game_name']
        if 'log' in kwargs:
            self._log = kwargs['log']

    def prepare_teams_data(self, teams: list, data: dict, game_id: int,
                           games_data: dict) -> dict:
        if self._game_name == 'csgo':
            # We need to pre-count round score because of using oponnents win score to set lose
            round_win = {}
            round_lose = {}
            for team_id in teams:
                round_win[team_id] = None
                round_lose[team_id] = None
            if data['rounds_score']:
                for score in data['rounds_score']:
                    round_win[score['team_id']] = score['score']
                round_lose[teams[0]] = round_win[teams[1]]
                round_lose[teams[1]] = round_win[teams[0]]

            # We need to pre-count bomb exploded / defused
            bomb_exploded = {teams[0]: None, teams[1]: None}
            bomb_defused = {teams[0]: None, teams[1]: None}
            if data['rounds']:
                bomb_defused = {teams[0]: 0, teams[1]: 0}
                bomb_exploded = {teams[0]: 0, teams[1]: 0}
                for game_round in data['rounds']:
                    if game_round['outcome'] == 'exploded':
                        bomb_exploded[game_round['winner_team']] += 1
                    elif game_round['outcome'] == 'defused':
                        bomb_defused[game_round['winner_team']] += 1

            return {
                'bomb_plant': bomb_exploded,
                'bomb_defuse': bomb_defused,
                'round_win': round_win,
                'round_lose': round_lose
            }

        elif self._game_name == 'dota2':
            # We need to pre-find winner team
            team_win = {}
            team_lose = {}
            winner_team_id = None
            for game in games_data:
                if game['id'] == game_id:
                    winner_team_id = game['winner']['id']
                    break
            for team_id in teams:
                team_win[team_id] = int(winner_team_id == team_id)
                team_lose[team_id] = int(winner_team_id != team_id)
            return {'team_win': team_win, 'team_lose': team_lose}

        elif self._game_name == 'lol':
            # We need to pre-find team dragon and baron kills
            turrets = {teams[0]: None, teams[1]: None}
            dragons = {teams[0]: None, teams[1]: None}
            barons = {teams[0]: None, teams[1]: None}
            for team_id in teams:
                for var in data['teams']:
                    if var['team']['id'] == team_id:
                        turrets[team_id] = var['tower_kills']
                        dragons[team_id] = var['dragon_kills']
                        barons[team_id] = var['baron_kills']
                        break

            return {'turret': turrets, 'dragon': dragons, 'baron': barons}

    def get_teams_data(self, prepared_data: dict, data: dict,
                       teams: list) -> dict:
        teams_data = {}
        for team_id in teams:
            if self._game_name == 'csgo':
                teams_data[team_id] = {
                    'bomb_plant': prepared_data['bomb_plant'][team_id],
                    'bomb_defuse': prepared_data['bomb_defuse'][team_id],
                    'round_win': prepared_data['round_win'][team_id],
                    'round_lose': prepared_data['round_lose'][team_id]
                }

            elif self._game_name == 'dota2':
                teams_data[team_id] = {
                    'team_win': prepared_data['team_win'][team_id],
                    'team_lose': prepared_data['team_lose'][team_id]
                }

            elif self._game_name == 'lol':
                teams_data[team_id] = {
                    'turret': prepared_data['turret'][team_id],
                    'dragon': prepared_data['dragon'][team_id],
                    'baron': prepared_data['baron'][team_id]
                }

            teams_data[team_id]['data_src_team_id'] = team_id

        return teams_data

    def prepare_players_data(self, data: dict) -> dict:
        return {}

    def get_players_data(self, prepared_data: dict, data: dict) -> dict:
        players_data = {}
        for player in data['players']:
            player_data = {
                'data_src_player_id': player['player']['id'],
                'kill': player['kills'],
                'assist': player['assists'],
                'death': player['deaths']
            }
            if self._game_name == 'dota2':
                player_data['tower_kill'] = player['tower_kills']
                player_data['roshan_kill'] = player['neutral_creep']
            elif self._game_name == 'lol':
                player_data['creep_score'] = None
                if 'kill_counters' in player and 'neutral_minions' in player[
                        'kill_counters']:
                    player_data['creep_score'] = player['kill_counters'][
                        'neutral_minions']
            players_data[player_data['data_src_player_id']] = player_data

        return players_data