Exemple #1
0
 def __init__(self):
     download_global_cache()
     log("creating match cache")
     global_dict = get_global_ranked_match_cache()
     self._global_data = GlobalRankedData(global_dict=global_dict)
     self._player_ticks = get_player_ticks()
     self._players_to_save = []
Exemple #2
0
def ensure_test_data():
    global _test_data_created
    if _test_data_created:
        return
    _test_data_created = True
    ensure_test_tables()
    log('creating test data')
    already_exists = (
        Player
        .select()
        .where(Player.cfn_id == sample_player_id)
        .exists()
    )
    if already_exists:
        log('existing test data found')
        return
    create_test_player(
        cfn_id=sample_player_id,
        name='TestPlayer',
        subscribed=True,
    )
    Rank.create(
        id=1,
        created_at=datetime.utcnow(),
        player_cfn=sample_player_id,
        league_points=100,
        placement=1,
        favorite_character=1,
    )
Exemple #3
0
 def __init__(self):
     download_global_cache()
     log("creating match cache")
     global_dict = get_global_ranked_match_cache()
     self._global_data = GlobalRankedData(global_dict=global_dict)
     self._player_ticks = get_player_ticks()
     self._players_to_save = []
Exemple #4
0
def batch_query_match_history(session, pids):
    pids_list = list(pids)
    total = len(pids_list)
    batch = 50
    player_matches = {}
    any_error = False
    log("Begin querying %s players" % total)
    for idx in range(0, total, batch):
        next_idx = idx + batch
        bound = next_idx if next_idx < total else total
        sub_pids = pids_list[idx:bound]
        log('Attempting to query players %s-%s of %s' % (idx, bound, total))
        is_error, matches, player_matches_dict = _bulk_query_match_history(
            session, sub_pids
        )
        any_error = any_error or is_error
        save_match_list(matches)
        player_matches.update(player_matches_dict)
    set_player_updated_at(player_matches.keys())
    with DATABASE.atomic():
        cache = MatchCache()
        for pid, matches in player_matches.items():
            cache.process_matches(pid, matches)
        cache.save()
    return any_error
Exemple #5
0
def ensure_test_data():
    global _test_data_created
    if _test_data_created:
        return
    _test_data_created = True
    ensure_test_tables()
    log('creating test data')
    already_exists = (Player.select().where(
        Player.cfn_id == sample_player_id).exists())
    if already_exists:
        log('existing test data found')
        return
    create_test_player(
        cfn_id=sample_player_id,
        name='TestPlayer',
        subscribed=True,
    )
    Rank.create(
        id=1,
        created_at=datetime.utcnow(),
        player_cfn=sample_player_id,
        league_points=100,
        placement=1,
        favorite_character=1,
    )
Exemple #6
0
def bulk_insert(model, data_source):
    total = len(data_source)
    batch = 1000
    log("Trying to insert %s rows" % total)
    with DATABASE.atomic():
        for idx in range(0, total, batch):
            (model.insert_many(data_source[idx:idx + batch]).execute())
            log("Inserted %s" % (idx + batch))
Exemple #7
0
 def save(self):
     log("saving match cache")
     for player in self._players_to_save:
         save_player_cache(player)
     # this implicitly uploads
     save_global_ranked_match_cache(self.global_data)
     self._players_to_save = []
     log("cache successfully saved")
Exemple #8
0
 def save(self):
     log("saving match cache")
     for player in self._players_to_save:
         save_player_cache(player)
     # this implicitly uploads
     save_global_ranked_match_cache(self.global_data)
     self._players_to_save = []
     log("cache successfully saved")
Exemple #9
0
def set_player_updated_at(pids):
    log("setting players as updated")
    if not pids:
        log('no players to update!')
        return
    now_str = _get_now_str()
    (Player.update(
        updated_at=now_str).where(Player.cfn_id << list(pids)).execute())
Exemple #10
0
def prod_database():
    log('connecting to prod db')
    return PooledMySQLDatabase(
        ENVARS.database_name,
        host=ENVARS.host,
        user=ENVARS.username,
        password=ENVARS.password,
        max_connections=32,  # 50 max on t2.micro
        stale_timeout=300,  # 5 minutes.
    )
Exemple #11
0
def prod_database():
    log('connecting to prod db')
    return PooledMySQLDatabase(
        ENVARS.database_name,
        host=ENVARS.host,
        user=ENVARS.username,
        password=ENVARS.password,
        max_connections=32,  # 50 max on t2.micro
        stale_timeout=300,  # 5 minutes.
    )
Exemple #12
0
def set_subscribed(pids):
    log("updating affected players to subscribed")
    if not pids:
        return
    (
        Player
        .update(subscribed=1)
        .where(Player.subscribed == 0, Player.cfn_id << list(pids))
        .execute()
    )
Exemple #13
0
def shut_down(process_id):
    if process_id == str(PROCESS_ID):
        log('signal received! shutting down...')
        with open('temp/server.pid', 'wt') as f:
            f.write('')
        threading.Timer(1, lambda: _kill_server()).start()
        return 'killed: %s' % PROCESS_ID
    else:
        log('nice try: %r != %r' % (process_id, PROCESS_ID))
        abort(404)
Exemple #14
0
def _insert_missing_matches(match_source):
    log("determining missing matches")
    match_source_ids = [ms['cfn_id'] for ms in match_source]
    existing_mids, missing_mids = (_determine_missing_cfn_ids(
        Match, match_source_ids))
    missing_matches = [
        ms for ms in match_source if ms['cfn_id'] in missing_mids
    ]
    log("inserting missing matches")
    bulk_insert(Match, missing_matches)
Exemple #15
0
def _insert_missing_players(player_source):
    log("determining missing players")
    player_source_ids = [ps['cfn_id'] for ps in player_source]
    existing_pids, missing_pids = (_determine_missing_cfn_ids(
        Player, player_source_ids))
    missing_players = [
        ps for ps in player_source if ps['cfn_id'] in missing_pids
    ]
    log("inserting missing players")
    bulk_insert(Player, missing_players)
    return existing_pids
Exemple #16
0
def _query_rival(session, query_name, player_name):
    url = RIVAL_URL % ('%s', query_name)
    res = query_cfn(session, url)
    matching_players = res.json()['response'][0]['searchresult']
    for player in matching_players:
        cfn_model = CFNPlayerSearchModel(player)
        log('logging cfn player')
        log(cfn_model.__dict__)
        if cfn_model.name.lower() == player_name.lower():
            subscribe_to_new_player(cfn_model)
            return cfn_model.name
    return None
Exemple #17
0
def _query_rival(session, query_name, player_name):
    url = RIVAL_URL % ('%s', query_name)
    res = query_cfn(session, url)
    matching_players = res.json()['response'][0]['searchresult']
    for player in matching_players:
        cfn_model = CFNPlayerSearchModel(player)
        log('logging cfn player')
        log(cfn_model.__dict__)
        if cfn_model.name.lower() == player_name.lower():
            subscribe_to_new_player(cfn_model)
            return cfn_model.name
    return None
Exemple #18
0
def bulk_insert(model, data_source):
    total = len(data_source)
    batch = 1000
    log("Trying to insert %s rows" % total)
    with DATABASE.atomic():
        for idx in range(0, total, batch):
            (
                model
                .insert_many(data_source[idx:idx + batch])
                .execute()
            )
            log("Inserted %s" % (idx + batch))
Exemple #19
0
def _insert_missing_matches(match_source):
    log("determining missing matches")
    match_source_ids = [ms['cfn_id'] for ms in match_source]
    existing_mids, missing_mids = (
        _determine_missing_cfn_ids(Match, match_source_ids)
    )
    missing_matches = [
        ms for ms in match_source
        if ms['cfn_id'] in missing_mids
    ]
    log("inserting missing matches")
    bulk_insert(Match, missing_matches)
Exemple #20
0
def set_player_updated_at(pids):
    log("setting players as updated")
    if not pids:
        log('no players to update!')
        return
    now_str = _get_now_str()
    (
        Player
        .update(updated_at=now_str)
        .where(Player.cfn_id << list(pids))
        .execute()
    )
Exemple #21
0
def _insert_missing_players(player_source):
    log("determining missing players")
    player_source_ids = [ps['cfn_id'] for ps in player_source]
    existing_pids, missing_pids = (
        _determine_missing_cfn_ids(Player, player_source_ids)
    )
    missing_players = [
        ps for ps in player_source
        if ps['cfn_id'] in missing_pids
    ]
    log("inserting missing players")
    bulk_insert(Player, missing_players)
    return existing_pids
Exemple #22
0
def _query_player_match_history(session, pid):
    url = MATCH_URL % ('%s', pid)
    res = query_cfn(session, url)
    if res.status_code != 200:
        log("got a non-200 response:\n%s" % res.text)
        raise MatchQueryException()
    try:
        match_results = res.json()['response'][0]['matchresults']
    except Exception as e:
        log_exception(e)
        log('failed to extract json, dumping res:\n%s' % res.text)
        raise MatchQueryException()
    else:
        return match_results
Exemple #23
0
def backload_all_players():
    log('backloading match_last_updated players')
    log('building cache')
    cache = MatchCache()
    batch_size = 500
    backfill_player_ids = load_player_ids_to_backfill(
        batch_size=batch_size,
    )
    log('backloading %s players...' % batch_size)
    for player_id in backfill_player_ids:
        backload_player(cache, player_id)
    log("saving")
    cache.save()
    log("done")
Exemple #24
0
def backload_volatile_players():
    log('backloading volatile players')
    log('building cache')
    cache = MatchCache()
    batch_size = 600
    backfill_player_ids = load_volatile_player_ids(
        batch_size=batch_size,
    )
    log('backloading %s players...' % batch_size)
    for player_id in backfill_player_ids:
        backload_player(cache, player_id)
    log("saving")
    cache.save()
    log("done")
Exemple #25
0
def main():
    with create_session() as session:
        cd, res = query_cookie_status(session)
        log(res.status_code)
        log(res.headers)
        log(res.text)
        log("cookie is good: %s" % (not cd.is_invalid))
Exemple #26
0
def _query_player_match_history(session, pid):
    url = MATCH_URL % ('%s', pid)
    res = query_cfn(session, url)
    if res.status_code != 200:
        log("got a non-200 response:\n%s" % res.text)
        raise MatchQueryException()
    try:
        match_results = res.json()['response'][0]['matchresults']
    except Exception as e:
        log_exception(e)
        log('failed to extract json, dumping res:\n%s' % res.text)
        raise MatchQueryException()
    else:
        return match_results
Exemple #27
0
def save_match_list(match_list):
    log("arranging fetched data")
    matches = dict()
    for match in match_list:
        mid = int(match['matchid'])
        matches[mid] = match

    log("processing matches")
    match_source = [{
        'cfn_id': mid,
        'left_player_cfn_id': int(match['leftstartplayer']),
        'right_player_cfn_id': int(match['rightstartplayer']),
        'data': json.dumps(match),
    } for mid, match in matches.items()]
    _insert_missing_matches(match_source)
Exemple #28
0
def save_match_list(match_list):
    log("arranging fetched data")
    matches = dict()
    for match in match_list:
        mid = int(match['matchid'])
        matches[mid] = match

    log("processing matches")
    match_source = [
        {
            'cfn_id': mid,
            'left_player_cfn_id': int(match['leftstartplayer']),
            'right_player_cfn_id': int(match['rightstartplayer']),
            'data': json.dumps(match),
        } for mid, match in matches.items()
    ]
    _insert_missing_matches(match_source)
Exemple #29
0
def send_error_message(message):
    log('sending error email')
    text = "server: %s\n\n%s\n\nlogging:\n\n%s" % (
        ENVARS.server_name, message, read_logs()
    )
    return requests.post(
        "https://api.mailgun.net/v3/%s/messages" % ENVARS.mailgun_domain_name,
        auth=(
            "api",
            ENVARS.mailgun_api_key,
        ),
        data={
            "from": "FGCBOT <robot@%s>" % ENVARS.mailgun_domain_name,
            "to": [ENVARS.mailgun_recipient],
            "subject": "FGC STATUS %s" % convert_dt_to_nyc(),
            "text": text,
        },
    )
Exemple #30
0
def _bulk_query_match_history(session, pids):
    match_list = []
    count = 1
    total = len(pids)
    player_matches = {}
    is_error = False
    for pid in pids:
        log("Fetching match history for %s (%s/%s)" % (pid, count, total))
        count += 1
        try:
            matches = _query_player_match_history(session, pid)
        except MatchQueryException:
            # pretend we got 0 matches so that player gets marked as "updated"
            # this prevents a bunch of bad players from starving the rest
            matches = []
            is_error = True
        match_list.extend(matches)
        player_matches[pid] = matches
    return is_error, match_list, player_matches
Exemple #31
0
def _bulk_query_match_history(session, pids):
    match_list = []
    count = 1
    total = len(pids)
    player_matches = {}
    is_error = False
    for pid in pids:
        log("Fetching match history for %s (%s/%s)" % (pid, count, total))
        count += 1
        try:
            matches = _query_player_match_history(session, pid)
        except MatchQueryException:
            # pretend we got 0 matches so that player gets marked as "updated"
            # this prevents a bunch of bad players from starving the rest
            matches = []
            is_error = True
        match_list.extend(matches)
        player_matches[pid] = matches
    return is_error, match_list, player_matches
Exemple #32
0
def start_server():
    if ENVARS.is_web_testing:
        bottle_web.merge(bottle_api)
    if ENVARS.is_web_server():
        designated_bottle_app = bottle_web
        designated_setup = setup_web
    elif ENVARS.is_api_server():
        designated_bottle_app = bottle_api
        designated_setup = setup_api
    else:
        log('no webserver for instance type: %s' % ENVARS.instance_type)
        return
    designated_setup()
    run(
        designated_bottle_app,
        server='cherrypy',
        host='localhost',
        port=5555,
        debug=ENVARS.debug,
    )
Exemple #33
0
def fix_player_names(session):
    players = (
        Player
        .select()
        .where(Player.name == '')
    )
    player_ids = [p.cfn_id for p in players]
    log("found %s players with missing names" % len(player_ids))

    if len(player_ids) == 0:
        return
    for pid in player_ids:
        url = LICENSE_URL % ('%s', pid)
        res = query_cfn(session, url)
        try:
            new_name = res.json()['response'][0]['displayid']
        except json.decoder.JSONDecodeError as e:
            log(res.text)
            raise e
        log('%s -> %s' % (pid, new_name))
        (
            Player
            .update(name=new_name)
            .where(Player.cfn_id == pid)
            .execute()
        )
Exemple #34
0
def batch_query_match_history(session, pids):
    pids_list = list(pids)
    total = len(pids_list)
    batch = 50
    player_matches = {}
    any_error = False
    log("Begin querying %s players" % total)
    for idx in range(0, total, batch):
        next_idx = idx + batch
        bound = next_idx if next_idx < total else total
        sub_pids = pids_list[idx:bound]
        log('Attempting to query players %s-%s of %s' % (idx, bound, total))
        is_error, matches, player_matches_dict = _bulk_query_match_history(
            session, sub_pids)
        any_error = any_error or is_error
        save_match_list(matches)
        player_matches.update(player_matches_dict)
    set_player_updated_at(player_matches.keys())
    with DATABASE.atomic():
        cache = MatchCache()
        for pid, matches in player_matches.items():
            cache.process_matches(pid, matches)
        cache.save()
    return any_error
Exemple #35
0
def fix_player_names(session):
    players = (Player.select().where(Player.name == ''))
    player_ids = [p.cfn_id for p in players]
    log("found %s players with missing names" % len(player_ids))

    if len(player_ids) == 0:
        return
    for pid in player_ids:
        url = LICENSE_URL % ('%s', pid)
        res = query_cfn(session, url)
        try:
            new_name = res.json()['response'][0]['displayid']
        except json.decoder.JSONDecodeError as e:
            log(res.text)
            raise e
        log('%s -> %s' % (pid, new_name))
        (Player.update(name=new_name).where(Player.cfn_id == pid).execute())
Exemple #36
0
    log_exception,
    set_log_file,
)
from py.src.settings import (
    DATABASE,
)
from py.src.message import (
    send_error_message,
)
from py.src.cfn.api import (
    create_session,
    test_cookie_status,
    record_top_players,
)


if __name__ == "__main__":
    set_log_file("task_ranking")
    log("task_ranking begin")
    with create_session() as session:
        try:
            test_cookie_status(session)
            DATABASE.connect()
            record_top_players(session)
        except Exception as e:
            log_exception(e)
            send_error_message("FATAL ERROR when pulling rank data")
    if not DATABASE.is_closed():
        DATABASE.close()
    log("task_ranking complete")
Exemple #37
0
def ensure_test_tables():
    ensure_testing()
    if not Player.table_exists():
        log('creating test tables')
        DATABASE.create_tables([Player, Match, Rank])
Exemple #38
0
def load_crontab():
    crontab_path = "install/crontab/crontab-%s" % ENVARS.instance_type
    process_out = subprocess.check_output(["crontab", crontab_path], )
    log("crontab out: %s" % process_out)
Exemple #39
0
)
from py.src.cfn.api import (
    create_session,
    test_cookie_status,
    batch_query_match_history,
    fix_player_names,
)
from py.src.store import (
    load_subscribed_player_ids,
)

PLAYER_BATCH_SIZE = 300

if __name__ == "__main__":
    set_log_file("task_matches")
    log("task_matches begin")
    with create_session() as session:
        try:
            test_cookie_status(session)
            DATABASE.connect()
            pids = load_subscribed_player_ids(batch_size=PLAYER_BATCH_SIZE)
            any_error = batch_query_match_history(session, pids)
            fix_player_names(session)
        except Exception as e:
            log_exception(e)
            send_error_message("FATAL ERROR when pulling match data")
        else:
            if any_error:
                # suppress non-fatal errors for now
                # send_error_message('non-fatal errors when pulling matches')
                pass
Exemple #40
0
def test_local_database():
    log('connecting to local test db')
    return SqliteDatabase('local/test.db')
Exemple #41
0
def _download_file(file_name, file_path):
    log("downloading %s to %s" % (file_name, file_path))
    _connect_to_bucket().download_file(Key=file_name, Filename=file_path)
Exemple #42
0
def ensure_test_tables():
    ensure_testing()
    if not Player.table_exists():
        log('creating test tables')
        DATABASE.create_tables([Player, Match, Rank])
Exemple #43
0
def demo_database():
    log('connecting to local demo db')
    return SqliteDatabase('local/demo.db')
Exemple #44
0
def load_crontab():
    crontab_path = "install/crontab/crontab-%s" % ENVARS.instance_type
    process_out = subprocess.check_output(
        ["crontab", crontab_path],
    )
    log("crontab out: %s" % process_out)
Exemple #45
0
def test_temp_database():
    log('connecting to in-memory test db')
    return SqliteDatabase(':memory:')
Exemple #46
0
def set_subscribed(pids):
    log("updating affected players to subscribed")
    if not pids:
        return
    (Player.update(subscribed=1).where(Player.subscribed == 0,
                                       Player.cfn_id << list(pids)).execute())
Exemple #47
0
def invalidate_cache():
    _refresh_cache()
    log('cache refreshed')
    return
Exemple #48
0
def test_local_database():
    log('connecting to local test db')
    return SqliteDatabase('local/test.db')
Exemple #49
0
def test_temp_database():
    log('connecting to in-memory test db')
    return SqliteDatabase(':memory:')
Exemple #50
0
def _upload_file(file_name, file_path):
    log("uploading %s to %s" % (file_path, file_name))
    with open(file_path, 'rb') as data:
        _connect_to_bucket().put_object(Key=file_name, Body=data)
Exemple #51
0
def main():
    log('creating demo db from csv files')
    create_demo_db()
Exemple #52
0
def demo_database():
    log('connecting to local demo db')
    return SqliteDatabase('local/demo.db')