示例#1
0
# Use of this source code is governed by an MIT-style license that can be
# found in the LICENSE file.

# instructions for running this script:
# go to the gamedata/ directory
# PYTHONPATH=../gameserver ./make_ai_base_example.py
# (optional: use > to write output to a file instead of printing to the console)

import SpinJSON  # JSON reading/writing library
import SpinConfig
import AtomicFileWrite  # little library for safely overwriting files atomically
import sys, copy, getopt, os, random  # import some misc. Python libraries
import tr_base_generator_helper as bgh

# load in gamedata
gamedata = SpinJSON.load(open(SpinConfig.gamedata_filename()))
ncells = 180


class cannotFindCoordinateError(Exception):
    # def __init__(self):
    #
    def __str__(self):
        return "Error"


BASE_LAYOUT = {
    "sectors": [{
        "cluster": "toc_cluster",
        "positioning": "near_the_middle"
    }, {
示例#2
0
    do_unit_cost = False

    opts, args = getopt.gnu_getopt(
        sys.argv[1:], 'g:c:q', ['prune', 'optimize', 'dry-run', 'unit-cost'])

    for key, val in opts:
        if key == '-g': game_id = val
        elif key == '-c': commit_interval = int(val)
        elif key == '-q': verbose = False
        elif key == '--dry-run': dry_run = True
        elif key == '--prune': do_prune = True
        elif key == '--optimize': do_optimize = True
        elif key == '--unit-cost': do_unit_cost = True

    gamedata = SpinJSON.load(
        open(SpinConfig.gamedata_filename(override_game_id=game_id)))

    sql_util = SpinSQLUtil.MySQLUtil()
    if not verbose: sql_util.disable_warnings()

    cfg = SpinConfig.get_mysql_config(game_id + '_upcache')
    con = MySQLdb.connect(*cfg['connect_args'], **cfg['connect_kwargs'])
    store_table = cfg['table_prefix'] + game_id + '_store'
    store_daily_summary_table = cfg[
        'table_prefix'] + game_id + '_store_daily_summary'
    store_top_spenders_28d_table = cfg[
        'table_prefix'] + game_id + '_store_top_spenders_28d'
    unit_cost_table = cfg['table_prefix'] + game_id + '_unit_cost'
    unit_cost_daily_summary_table = cfg[
        'table_prefix'] + game_id + '_unit_cost_daily_summary'
示例#3
0
try:
    import simplejson as json
except:
    import json
import SpinConfig

# load some standard Python libraries
import sys, time, calendar
import csv

# load SpinPunch Upcache S3 access library
import SpinS3, SpinUpcacheIO

# load gamedata so we can reference it if necessary
# e.g. gamedata['units']['motion_cannon']['armor']
gamedata = json.load(open(SpinConfig.gamedata_filename()))

time_now = int(time.time())


def stream_userdb():
    bucket, name = SpinConfig.upcache_s3_location(SpinConfig.game())
    return SpinUpcacheIO.S3Reader(SpinS3.S3(SpinConfig.aws_key_file()), bucket,
                                  name).iter_all()


# main program
if __name__ == '__main__':

    # days at which to evaluate spend_dX
    SPEND_MARKS = (0, 1, 3, 5, 7, 14, 30, 60, 90, 120)
示例#4
0
    do_reset = False
    do_mongo_drop = False
    dry_run = 0

    opts, args = getopt.gnu_getopt(sys.argv[1:], 'g:c:q', ['reset','mongo-drop','dry-run','force'])

    for key, val in opts:
        if key == '-g': game_id = val
        elif key == '-c': commit_interval = int(val)
        elif key == '-q': verbose = False
        elif key == '--mongo-drop': do_mongo_drop = True
        elif key == '--reset': do_reset = True
        elif key == '--dry-run': dry_run = 1
        elif key == '--force': force = True

    gamedata = SpinJSON.load(open(SpinConfig.gamedata_filename(override_game_id = game_id)))

    if not verbose: sql_util.disable_warnings()

    cfg = SpinConfig.get_pgsql_config(SpinConfig.config['game_id']+'_scores2')

    if (not force) and \
       (SpinConfig.in_maintenance_window(cfg, time_now = time_now) or SpinConfig.in_maintenance_window(cfg, time_now = time_now + 1800)): # allow for 30min to operate
        if verbose: print 'in database maintenance window, aborting'
        sys.exit(0)

    with SpinSingletonProcess.SingletonProcess('scores2-to-sql-%s' % (game_id)):

        con = psycopg2.connect(*cfg['connect_args'], **cfg['connect_kwargs'])
        tbl = { 'player': cfg['table_prefix']+'player_scores2',
                'alliance': cfg['table_prefix']+'alliance_scores2' }
def do_slave(task):
    date = task['date']
    game_id = task['game_id']
    verbose = task['verbose']
    dry_run = task['dry_run']
    commit_interval = task['commit_interval']

    start_time = SpinConfig.cal_to_unix((int(date[0:4]),int(date[4:6]),int(date[6:8])))
    end_time = start_time + 86400

    gamedata = SpinJSON.load(open(SpinConfig.gamedata_filename(override_game_id=game_id)))
    STORE = {}
    [get_store_items(STORE, sku) for sku in gamedata['store']['catalog']]

    if verbose:
        print >> sys.stderr, 'converting date', date, 'start_time', start_time, 'end_time', end_time, '...'

    if not verbose: filterwarnings('ignore', category = MySQLdb.Warning)

    cfg = SpinConfig.get_mysql_config(game_id+'_upcache')
    con = MySQLdb.connect(*cfg['connect_args'], **cfg['connect_kwargs'])
    store_table = cfg['table_prefix']+game_id+'_store'

    s3 = SpinS3.S3(SpinConfig.aws_key_file())
    bucket = 'spinpunch-logs'

    batch = 0
    total = 0
    cur = con.cursor()

    for entry in s3.list_bucket(bucket, prefix='%s/%s-%s-metrics.json' % (date[0:6], SpinConfig.game_id_long(override_game_id=game_id), date)):
        filename = entry['name'].split('/')[-1]

        if verbose: print >> sys.stderr, 'reading', filename

        if entry['name'].endswith('.zip'):
            tf = tempfile.NamedTemporaryFile(prefix='old_metrics_to_mysql-'+filename, suffix='.zip')
            s3.get_file(bucket, entry['name'], tf.name)
            unzipper = subprocess.Popen(['unzip', '-q', '-p', tf.name],
                                        stdout = subprocess.PIPE)

        elif entry['name'].endswith('.gz'):
            fd = s3.get_open(bucket, entry['name'], allow_keepalive = False)
            unzipper = subprocess.Popen(['gunzip', '-c', '-'],
                                        stdin = fd.fileno(),
                                        stdout = subprocess.PIPE)

        for line in unzipper.stdout.xreadlines():
            if '5120_buy_item' in line:
                #and ('item:token' in line):
                entry = SpinJSON.loads(line)
                if entry['event_name'] != '5120_buy_item': continue

                if 'price_currency' not in entry:
                    # old metric, need to fill in manually
                    if entry['items'][0]['spec'] in STORE:
                        entry['price_currency'] = 'item:token'
                        entry['price'] = STORE[entry['items'][0]['spec']]

                if verbose: print >> sys.stderr, SpinJSON.dumps(entry)

                if entry.get('price_currency','unknown') != 'item:token': continue


                if '_id' in entry:
                    entry_id = entry['_id']
                else:
                    id_generator.set_time(int(time.time()))
                    entry_id = id_generator.generate() # arbitrary

                assert len(entry['items']) == 1
                item = entry['items'][0]
                keyvals = [('_id', entry_id),
                           ('time', entry['time']),
                           ('user_id', entry['user_id']),
                           ('price', entry['price']),
                           ('currency', entry['price_currency']),
                           ('item', item['spec']),
                           ('stack', item.get('stack',1))]

                query = "INSERT INTO " + store_table + \
                            "("+', '.join(['`'+k+'`' for k,v in keyvals])+")"+ \
                            " VALUES ("+', '.join(['%s'] * len(keyvals)) +")"
                if dry_run:
                    print >> sys.stderr, query, [v for k,v in keyvals]
                else:
                    cur.execute(query, [v for k,v in keyvals])

                    batch += 1
                    total += 1
                    if commit_interval > 0 and batch >= commit_interval:
                        batch = 0
                        con.commit()
                        cur = con.cursor()
                        if verbose: print >> sys.stderr, total, 'inserted'

    if not dry_run:
        con.commit()
示例#6
0
def main(args):
    global gamedata
    global basedata
    global hivedata
    global lootdata
    global time_now

    opts, args = getopt.gnu_getopt(args, 'cg:', [])

    game_id = 'ALL'
    output = 'txt'

    for key, val in opts:
        if key == '-g':
            game_id = val
        if key == '-c':
            output = 'csv'

    time_now = int(time.time())

    date_now = datetime.datetime.utcfromtimestamp(time_now)
    gamedata = SpinJSON.load(open(SpinConfig.gamedata_filename()))
    cur_week = SpinConfig.get_pvp_week(gamedata['matchmaking']['week_origin'], time_now)
    today5pm = SpinConfig.cal_to_unix(SpinConfig.unix_to_cal(time_now)) + 17*60*60
    if time_now < today5pm:
        cur_day = ((date_now.weekday() + 3) % 7 + 1)
    else:
        cur_day = ((date_now.weekday() + 4) % 7 + 1)

    if output == 'txt':
        print 'Sauron v0.01.015 | %s, %s %s (Week %s, Day %s) %s GMT:<br>' % (date_now.strftime('%A')[:3].upper(), date_now.strftime("%B")[:3].upper(), date_now.strftime('%d, %Y'), cur_week, cur_day, date_now.strftime('%H:%M:%S'))
        print '======================================================================='

        print '\n  TITLES :\n'
        print '    TR  - Thunder Run'
        print '    BFM - Battlefront Mars'
        print ''
        print '  ALL TITLES :\n'
        print '    TEST CALENDAR (Coming soon)'
        print '    EVENT CALENDAR (Coming soon)'
        print ''

        print '======================================================================='
        print '  LEGEND'
        print '=======================================================================\n'
        print '  EVENT DETAILS :\n'
        print '    ONP: Ops Needs Points event'
        print '    IMM: Immortal event'
        print '    TUT: Tutorial event'
        print '    ID:  AI base/attack id'
        print '    UNT: Number of AI units'
        print '    DPS: Total harmful damage per second of AI units/turrets'
        print '    HLT: Total max health of AI units/turrets'
        print '    SPC: Total unit space taken up by AI units'
        print '    TUR: Number of turrets'
        print '    SPT: Total sprite count of AI buildings, units, and scenery for gauging frame rates.'
        print '    CCL: Estimated difficulty in terms of CC level.'
        print ''

    if output == 'csv':
        print 'Level, Type, Base ID, Estimate CCL, ONP, Unit Count, Unit Health, Unit DPS, Unit Space, Turret Count, Turret Health, Turret DPS, Sprite Count, Loot'


    if game_id is 'ALL':
        for id in GAME_IDS:
            try:
                gamedata = SpinJSON.load(open(SpinConfig.gamedata_filename(override_game_id = id)))
                basedata = SpinJSON.load(open('./%s/built/%s_ai_bases_compiled.json' % (id, id)))
                hivedata = SpinJSON.load(open('./%s/built/%s_hives_compiled.json' % (id, id)))
                lootdata = SpinJSON.load(open('./%s/built/%s_loot_tables_compiled.json' % (id, id)))
            except IOError:
                print 'ERROR: Can\'t find compiled gamedata files for %s. Please run cd ../gameserver; ./make-gamedata.sh -u -g %s\n' % (id, id)
                return

        if output == 'txt':
            print_event_details_txt(id)
        elif output == 'csv':
            print_event_details_csv(id)
    else:
        if game_id not in GAME_IDS:
            print 'Invalid game id: %s' % game_id
            return

        try:
            gamedata = SpinJSON.load(open(SpinConfig.gamedata_filename(override_game_id = game_id)))
            basedata = SpinJSON.load(open('./%s/built/%s_ai_bases_compiled.json' % (game_id, game_id)))
            hivedata = SpinJSON.load(open('./%s/built/%s_hives_compiled.json' % (game_id, game_id)))
            lootdata = SpinJSON.load(open('./%s/built/%s_loot_tables_compiled.json' % (game_id, game_id)))
        except IOError:
                print 'ERROR: Can\'t find compiled gamedata files for %s. Please run cd ../gameserver; ./make-gamedata.sh -u -g %s\n' % (game_id, game_id)
                return

        if output == 'txt':
            print_event_details_txt(game_id)
        elif output == 'csv':
            print_event_details_csv(game_id)
示例#7
0
def do_slave(input):
    cache = open_cache(input['game_id'], input['cache_info'])
    batch = 0
    total = 0

    gamedata = SpinJSON.load(open(SpinConfig.gamedata_filename(override_game_id = input['game_id'])))
    gamedata['ai_bases'] = SpinJSON.load(open(SpinConfig.gamedata_component_filename('ai_bases_compiled.json', override_game_id = input['game_id'])))
    gamedata['loot_tables'] = SpinJSON.load(open(SpinConfig.gamedata_component_filename('loot_tables.json', override_game_id = input['game_id'])))

    if input['mode'] == 'get_fields':
        fields = {'money_spent': 'FLOAT4', # force this column into existence because analytics_views.sql depends on it
                  'account_creation_time': 'INT8', # same here
                  'country_tier': 'CHAR(1)', 'country': 'CHAR(2)',
                  'acquisition_campaign': 'VARCHAR(64)',
                  'acquisition_ad_skynet': 'VARCHAR(128)',

                  # these fields are extracted from compound objects inside of "user"
                  'connection_method': 'VARCHAR(32)',
                  'last_ping': 'FLOAT4',
                  'last_direct_ssl_ping': 'FLOAT4',
                  'playfield_speed': 'INT2',
                  }
        for user in cache.iter_segment(input['segnum']):
            for key, val in user.iteritems():
                if key not in fields:
                    field = setup_field(gamedata, key, val, field_mode = input['field_mode'])
                    if field is not None:
                        fields[key] = field
            batch += 1
            total += 1
            if batch >= 1000:
                batch = 0
                if input['verbose']: print >> sys.stderr, 'seg', input['segnum'], 'user', total
        return fields

    elif input['mode'] == 'get_rows':
        sql_util = SpinSQLUtil.MySQLUtil()
        if not input['verbose']: sql_util.disable_warnings()
        sorted_field_names = input['sorted_field_names']
        cfg = input['dbconfig']
        con = MySQLdb.connect(*cfg['connect_args'], **cfg['connect_kwargs'])
        cur = con.cursor()

        # buffer up keyvals to be updated in the achievement tables
        upgrade_achievement_counters = {}

        def flush():
            con.commit() # commit other tables first

            # MySQL often throws deadlock exceptions when doing upserts that reference existing rows (!)
            # in the upgrade_achievements table, so we need to loop on committing these updates
            deadlocks = 0

            while True:
                try:
                    cur.executemany("INSERT INTO "+sql_util.sym(input['upgrade_achievement_table']) + \
                                    " (" + ','.join([x[0] for x in sql_util.summary_out_dimensions()]) + ", kind, spec, level, is_maxed, num_players) " + \
                                    " VALUES (" + ','.join(['%s'] * len(sql_util.summary_out_dimensions())) + ", %s, %s, %s, %s, %s) " + \
                                    " ON DUPLICATE KEY UPDATE num_players = num_players + %s",
                                    [k + (v,v) for k,v in upgrade_achievement_counters.iteritems()])
                    con.commit()
                    upgrade_achievement_counters.clear()
                    break
                except MySQLdb.OperationalError as e:
                    if e.args[0] == 1213: # deadlock
                        con.rollback()
                        deadlocks += 1
                        continue
                    else:
                        raise

            if input['verbose']: print >> sys.stderr, 'seg', input['segnum'], total, 'flushed', deadlocks, 'deadlocks'

        for user in cache.iter_segment(input['segnum']):
            user_id = user['user_id']
            keys = [x for x in sorted_field_names if x in user]
            values = [user[x] for x in keys]

            # manual parsing of sprobe fields
            if ('last_sprobe_result' in user):
                connection_method = None
                if ('connection' in user['last_sprobe_result']['tests']):
                    connection_method = user['last_sprobe_result']['tests']['connection'].get('method',None)
                    if connection_method:
                        keys.append('connection_method')
                        values.append(connection_method)
                        if (connection_method in user['last_sprobe_result']['tests']) and ('ping' in user['last_sprobe_result']['tests'][connection_method]):
                            keys.append('last_ping')
                            values.append(user['last_sprobe_result']['tests'][connection_method]['ping'])

                if ('direct_ssl' in user['last_sprobe_result']['tests']) and ('ping' in user['last_sprobe_result']['tests']['direct_ssl']):
                    keys.append('last_direct_ssl_ping')
                    values.append(user['last_sprobe_result']['tests']['direct_ssl']['ping'])

            # manual parsing of other compound fields
            prefs = user.get('player_preferences', None)
            if prefs:
                if 'playfield_speed' in prefs:
                    keys.append('playfield_speed')
                    values.append(prefs['playfield_speed'])

            cur.execute("INSERT INTO " + input['upcache_table'] + \
                        "(user_id, "+', '.join(['`'+x+'`' for x in keys])+")"+ \
                        " VALUES (%s, "+', '.join(['%s'] * len(values)) +")",
                        [user_id,] + values)

            # we need the summary dimensions for achievement tables
            summary_keyvals = [('frame_platform', user.get('frame_platform',None)),
                               ('country_tier', str(user['country_tier']) if user.get('country_tier',None) else None),
                               ('townhall_level', user.get(gamedata['townhall']+'_level',1)),
                               ('spend_bracket', sql_util.get_spend_bracket(user.get('money_spent',0)))]

            # parse townhall progression
            if input['do_townhall'] and ('account_creation_time' in user):
                ts_key = gamedata['townhall']+'_level_at_time'
                if ts_key in user:
                    cur.executemany("INSERT INTO " +sql_util.sym(input['townhall_table']) + \
                                    " (user_id,townhall_level,time) VALUES (%s,%s,%s) ON DUPLICATE KEY UPDATE user_id=user_id;",
                                    [(user['user_id'], level, user['account_creation_time'] + int(sage)) for sage, level in user[ts_key].iteritems()]
                                    )

            # parse tech unlock timing
            if input['do_tech']:
                cur.executemany("INSERT INTO "+sql_util.sym(input['tech_table']) + " (user_id, tech_name, level, time) VALUES (%s,%s,%s,%s) ON DUPLICATE KEY UPDATE user_id=user_id;",
                                [(user['user_id'], tech, level, user['account_creation_time'] + int(sage)) \
                                 for tech in gamedata['tech'] \
                                 for sage, level in user.get('tech:'+tech+'_at_time', {}).iteritems()
                                 ])

                # summary dimensions, kind, spec, level, is_maxed
                for spec, level in user.get('tech',{}).iteritems():
                    if spec in gamedata['tech']:
                        is_maxed = 1 if (len(gamedata['tech'][spec]['research_time']) > 1 and level >= len(gamedata['tech'][spec]['research_time'])) else 0
                        k = tuple(x[1] for x in summary_keyvals) + ('tech', spec, level, is_maxed)
                        upgrade_achievement_counters[k] = upgrade_achievement_counters.get(k,0) + 1
                        if is_maxed:
                            # one row for "any" maxed tech
                            km = tuple(x[1] for x in summary_keyvals) + ('tech', 'ANY', None, 1)
                            upgrade_achievement_counters[km] = upgrade_achievement_counters.get(km,0) + 1

            # parse building upgrade timing
            if input['do_buildings']:
                cur.executemany("INSERT INTO "+sql_util.sym(input['buildings_table']) + " (user_id, building, max_level, time) VALUES (%s,%s,%s,%s) ON DUPLICATE KEY UPDATE user_id=user_id;",
                                [(user['user_id'], building, level, user['account_creation_time'] + int(sage)) \
                                 for building in gamedata['buildings'] \
                                 for sage, level in user.get(building+'_level_at_time', user.get('building:'+building+':max_level_at_time', {})).iteritems()
                                 ])

                # summary dimensions, kind, spec, level, is_maxed
                for spec in gamedata['buildings']:
                    level = max(user.get('building:'+spec+':max_level_at_time',{'asdf':0}).itervalues())
                    if level >= 1:
                        is_maxed = 1 if (len(gamedata['buildings'][spec]['build_time']) > 1 and level >= len(gamedata['buildings'][spec]['build_time'])) else 0
                        k = tuple(x[1] for x in summary_keyvals) + ('building', spec, level, is_maxed)
                        upgrade_achievement_counters[k] = upgrade_achievement_counters.get(k,0) + 1
                        if is_maxed:
                            # one row for "any" maxed building
                            km = tuple(x[1] for x in summary_keyvals) + ('building', 'ANY', None, 1)
                            upgrade_achievement_counters[km] = upgrade_achievement_counters.get(km,0) + 1

            # parse sessions
            if input['do_sessions'] and ('sessions' in user):
                cur.executemany("INSERT INTO "+sql_util.sym(input['sessions_table']) + " (user_id,start,end,frame_platform,country_tier,townhall_level,prev_receipts) VALUES (%s,%s,%s,%s,%s,%s,%s)",
                                [(user['user_id'], s[0], s[1], user.get('frame_platform','fb'), user.get('country_tier',None),
                                  SpinUpcache.building_level_at_age(user, gamedata['townhall'], s[1] - user['account_creation_time']),
                                  SpinUpcache.receipts_at_age(user, s[1] - user['account_creation_time'])) for s in user['sessions'] if (s[0] > 0 and s[1] > 0 and s[1]>=s[0])])

            # parse activity
            ACTIVITY_MIN_CC_LEVEL = 5 # only record for CCL5+ players (same as ANALYTICS2)
            # note! the source data, from gameserver, omits gamebucks_spent for players who never paid. This is by design to reduce bloat.

            if input['do_activity'] and ('activity' in user) and ('account_creation_time' in user) and user.get(gamedata['townhall']+'_level',1) >= ACTIVITY_MIN_CC_LEVEL:
                def parse_activity(user, stime, data):
                    ntime = long(stime)
                    age = ntime - user['account_creation_time']
                    cc_level = SpinUpcache.building_level_at_age(user, gamedata['townhall'], age)
                    if cc_level < ACTIVITY_MIN_CC_LEVEL: return None
                    act = SpinUpcache.classify_activity(gamedata, data)
                    return (user['user_id'], ntime, act['state'], act.get('ai_tag', None) or act.get('ai_ui_name', None), data.get('gamebucks_spent',None), data.get('money_spent',None),
                            user.get('frame_platform','fb'), user.get('country_tier',None), cc_level, SpinUpcache.receipts_at_age(user, age))

                cur.executemany("INSERT INTO "+sql_util.sym(input['activity_table']) + \
                                " (user_id, time, state, ai_ui_name, gamebucks_spent, receipts, frame_platform, country_tier, townhall_level, prev_receipts)" + \
                                " VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)",
                                filter(lambda x: x is not None, (parse_activity(user, stime, data) for stime, data in user['activity'].iteritems() if data['state'] not in ('idle','harvest'))))

            # update LTV estimate
            if input['do_ltv']:
                ltv_est = SkynetLTV.ltv_estimate(input['game_id'], gamedata, user, cache.update_time(), use_post_install_data = 9999999)
                if ltv_est is not None:
                    cur.execute("INSERT INTO "+sql_util.sym(input['ltv_table']) + " (user_id, est_90d) VALUES (%s,%s) ON DUPLICATE KEY UPDATE user_id=user_id;",
                                (user['user_id'], ltv_est))

            batch += 1
            total += 1
            if input['commit_interval'] > 0 and batch >= input['commit_interval']:
                batch = 0
                flush()

        # flush last commits
        flush()