def do_reminders(game_id, dry_run = False): filename = SpinConfig.gamedata_component_filename('dev_reminders.json', override_game_id = game_id) if not os.path.exists(filename): return data = SpinConfig.load(filename) time_now = int(time.time()) sender_name = data['sender'] for reminder in data['reminders']: subject = 'Automated reminder from %s' % sender_name if reminder['body'] == '$XXX': # special case for six-X comments body = get_six_X_comments(game_id, time_now) else: body = 'Due %s %s' % (pretty_print_time(abs(reminder['deadline']-time_now)), 'from now' if reminder['deadline']>time_now else 'ago') + '\n' + reminder['body'] if body: SpinReminders.send_reminders(sender_name, reminder['notify'], subject, body, dry_run = dry_run)
def to_playerdb(player_filename, player, base_id): gamedata['ai_bases'] = SpinJSON.load( open(SpinConfig.gamedata_component_filename("ai_bases_compiled.json"))) base = gamedata['ai_bases']['bases'][str(base_id)] my_base = [] townhall_level = -1 for building in base['buildings']: if building['spec'] == gamedata['townhall']: townhall_level = building.get('level', 1) props = { 'spec': building['spec'], 'xy': building['xy'], 'level': building.get('level', 1) } if 'equipment' in building: props['equipment'] = building['equipment'] my_base.append(props) for unit in base['units']: props = { 'spec': unit['spec'], 'level': unit.get('level', 1), 'xy': unit['xy'] } if 'orders' in unit: props['orders'] = unit['orders'] if 'patrol' in unit: props['patrol'] = unit['patrol'] my_base.append(props) for scenery in base.get('scenery', []): my_base.append({'spec': scenery['spec'], 'xy': scenery['xy']}) player['unit_repair_queue'] = [] player['my_base'] = my_base player['tech'] = base['tech'] if townhall_level > 0: player['history'][gamedata['townhall'] + '_level'] = townhall_level if 'base_climate' in base: player['base_climate'] = base['base_climate'] if 'deployment_buffer' in base: player['deployment_buffer'] = base['deployment_buffer'] atom = AtomicFileWrite.AtomicFileWrite(player_filename, 'w') SpinJSON.dump(player, atom.fd, pretty=True) atom.complete(fsync=False) print 'wrote contents of AI base %d to %s!' % (base_id, player_filename)
#!/usr/bin/env python # Copyright (c) 2015 SpinPunch. All rights reserved. # Use of this source code is governed by an MIT-style license that can be # found in the LICENSE file. import sys, os, time, getopt, re import SpinJSON # fast JSON library import SpinConfig quarries = SpinConfig.load( SpinConfig.gamedata_component_filename('quarries_compiled.json')) time_now = int(time.time()) def metrics_log_iterator(filename): for line in open(filename).xreadlines(): if '3830_battle_end' not in line: continue event = SpinJSON.loads(line) base_id = event.get('base_id', '') if (not base_id.startswith('q')): continue quarry_id = int(base_id[1:]) yield quarry_id def battle_log_dir_iterator(dirname): log_re = re.compile('^[0-9]+-[0-9]+-vs-[0-9]+-at-(.+).json.*$') for filename in os.listdir(dirname):
def do_slave(input): cache = open_cache(input['game_id'], input['cache_info']) batch = 0 total = 0 gamedata = SpinJSON.load(open(SpinConfig.gamedata_filename(override_game_id = input['game_id']))) gamedata['ai_bases'] = SpinJSON.load(open(SpinConfig.gamedata_component_filename('ai_bases_compiled.json', override_game_id = input['game_id']))) gamedata['loot_tables'] = SpinJSON.load(open(SpinConfig.gamedata_component_filename('loot_tables.json', override_game_id = input['game_id']))) if input['mode'] == 'get_fields': fields = {'money_spent': 'FLOAT4', # force this column into existence because analytics_views.sql depends on it 'account_creation_time': 'INT8', # same here 'country_tier': 'CHAR(1)', 'country': 'CHAR(2)', 'acquisition_campaign': 'VARCHAR(64)', 'acquisition_ad_skynet': 'VARCHAR(128)', # these fields are extracted from compound objects inside of "user" 'connection_method': 'VARCHAR(32)', 'last_ping': 'FLOAT4', 'last_direct_ssl_ping': 'FLOAT4', 'playfield_speed': 'INT2', } for user in cache.iter_segment(input['segnum']): for key, val in user.iteritems(): if key not in fields: field = setup_field(gamedata, key, val, field_mode = input['field_mode']) if field is not None: fields[key] = field batch += 1 total += 1 if batch >= 1000: batch = 0 if input['verbose']: print >> sys.stderr, 'seg', input['segnum'], 'user', total return fields elif input['mode'] == 'get_rows': sql_util = SpinSQLUtil.MySQLUtil() if not input['verbose']: sql_util.disable_warnings() sorted_field_names = input['sorted_field_names'] cfg = input['dbconfig'] con = MySQLdb.connect(*cfg['connect_args'], **cfg['connect_kwargs']) cur = con.cursor() # buffer up keyvals to be updated in the achievement tables upgrade_achievement_counters = {} def flush(): con.commit() # commit other tables first # MySQL often throws deadlock exceptions when doing upserts that reference existing rows (!) # in the upgrade_achievements table, so we need to loop on committing these updates deadlocks = 0 while True: try: cur.executemany("INSERT INTO "+sql_util.sym(input['upgrade_achievement_table']) + \ " (" + ','.join([x[0] for x in sql_util.summary_out_dimensions()]) + ", kind, spec, level, is_maxed, num_players) " + \ " VALUES (" + ','.join(['%s'] * len(sql_util.summary_out_dimensions())) + ", %s, %s, %s, %s, %s) " + \ " ON DUPLICATE KEY UPDATE num_players = num_players + %s", [k + (v,v) for k,v in upgrade_achievement_counters.iteritems()]) con.commit() upgrade_achievement_counters.clear() break except MySQLdb.OperationalError as e: if e.args[0] == 1213: # deadlock con.rollback() deadlocks += 1 continue else: raise if input['verbose']: print >> sys.stderr, 'seg', input['segnum'], total, 'flushed', deadlocks, 'deadlocks' for user in cache.iter_segment(input['segnum']): user_id = user['user_id'] keys = [x for x in sorted_field_names if x in user] values = [user[x] for x in keys] # manual parsing of sprobe fields if ('last_sprobe_result' in user): connection_method = None if ('connection' in user['last_sprobe_result']['tests']): connection_method = user['last_sprobe_result']['tests']['connection'].get('method',None) if connection_method: keys.append('connection_method') values.append(connection_method) if (connection_method in user['last_sprobe_result']['tests']) and ('ping' in user['last_sprobe_result']['tests'][connection_method]): keys.append('last_ping') values.append(user['last_sprobe_result']['tests'][connection_method]['ping']) if ('direct_ssl' in user['last_sprobe_result']['tests']) and ('ping' in user['last_sprobe_result']['tests']['direct_ssl']): keys.append('last_direct_ssl_ping') values.append(user['last_sprobe_result']['tests']['direct_ssl']['ping']) # manual parsing of other compound fields prefs = user.get('player_preferences', None) if prefs: if 'playfield_speed' in prefs: keys.append('playfield_speed') values.append(prefs['playfield_speed']) cur.execute("INSERT INTO " + input['upcache_table'] + \ "(user_id, "+', '.join(['`'+x+'`' for x in keys])+")"+ \ " VALUES (%s, "+', '.join(['%s'] * len(values)) +")", [user_id,] + values) # we need the summary dimensions for achievement tables summary_keyvals = [('frame_platform', user.get('frame_platform',None)), ('country_tier', str(user['country_tier']) if user.get('country_tier',None) else None), ('townhall_level', user.get(gamedata['townhall']+'_level',1)), ('spend_bracket', sql_util.get_spend_bracket(user.get('money_spent',0)))] # parse townhall progression if input['do_townhall'] and ('account_creation_time' in user): ts_key = gamedata['townhall']+'_level_at_time' if ts_key in user: cur.executemany("INSERT INTO " +sql_util.sym(input['townhall_table']) + \ " (user_id,townhall_level,time) VALUES (%s,%s,%s) ON DUPLICATE KEY UPDATE user_id=user_id;", [(user['user_id'], level, user['account_creation_time'] + int(sage)) for sage, level in user[ts_key].iteritems()] ) # parse tech unlock timing if input['do_tech']: cur.executemany("INSERT INTO "+sql_util.sym(input['tech_table']) + " (user_id, tech_name, level, time) VALUES (%s,%s,%s,%s) ON DUPLICATE KEY UPDATE user_id=user_id;", [(user['user_id'], tech, level, user['account_creation_time'] + int(sage)) \ for tech in gamedata['tech'] \ for sage, level in user.get('tech:'+tech+'_at_time', {}).iteritems() ]) # summary dimensions, kind, spec, level, is_maxed for spec, level in user.get('tech',{}).iteritems(): if spec in gamedata['tech']: is_maxed = 1 if (len(gamedata['tech'][spec]['research_time']) > 1 and level >= len(gamedata['tech'][spec]['research_time'])) else 0 k = tuple(x[1] for x in summary_keyvals) + ('tech', spec, level, is_maxed) upgrade_achievement_counters[k] = upgrade_achievement_counters.get(k,0) + 1 if is_maxed: # one row for "any" maxed tech km = tuple(x[1] for x in summary_keyvals) + ('tech', 'ANY', None, 1) upgrade_achievement_counters[km] = upgrade_achievement_counters.get(km,0) + 1 # parse building upgrade timing if input['do_buildings']: cur.executemany("INSERT INTO "+sql_util.sym(input['buildings_table']) + " (user_id, building, max_level, time) VALUES (%s,%s,%s,%s) ON DUPLICATE KEY UPDATE user_id=user_id;", [(user['user_id'], building, level, user['account_creation_time'] + int(sage)) \ for building in gamedata['buildings'] \ for sage, level in user.get(building+'_level_at_time', user.get('building:'+building+':max_level_at_time', {})).iteritems() ]) # summary dimensions, kind, spec, level, is_maxed for spec in gamedata['buildings']: level = max(user.get('building:'+spec+':max_level_at_time',{'asdf':0}).itervalues()) if level >= 1: is_maxed = 1 if (len(gamedata['buildings'][spec]['build_time']) > 1 and level >= len(gamedata['buildings'][spec]['build_time'])) else 0 k = tuple(x[1] for x in summary_keyvals) + ('building', spec, level, is_maxed) upgrade_achievement_counters[k] = upgrade_achievement_counters.get(k,0) + 1 if is_maxed: # one row for "any" maxed building km = tuple(x[1] for x in summary_keyvals) + ('building', 'ANY', None, 1) upgrade_achievement_counters[km] = upgrade_achievement_counters.get(km,0) + 1 # parse sessions if input['do_sessions'] and ('sessions' in user): cur.executemany("INSERT INTO "+sql_util.sym(input['sessions_table']) + " (user_id,start,end,frame_platform,country_tier,townhall_level,prev_receipts) VALUES (%s,%s,%s,%s,%s,%s,%s)", [(user['user_id'], s[0], s[1], user.get('frame_platform','fb'), user.get('country_tier',None), SpinUpcache.building_level_at_age(user, gamedata['townhall'], s[1] - user['account_creation_time']), SpinUpcache.receipts_at_age(user, s[1] - user['account_creation_time'])) for s in user['sessions'] if (s[0] > 0 and s[1] > 0 and s[1]>=s[0])]) # parse activity ACTIVITY_MIN_CC_LEVEL = 5 # only record for CCL5+ players (same as ANALYTICS2) # note! the source data, from gameserver, omits gamebucks_spent for players who never paid. This is by design to reduce bloat. if input['do_activity'] and ('activity' in user) and ('account_creation_time' in user) and user.get(gamedata['townhall']+'_level',1) >= ACTIVITY_MIN_CC_LEVEL: def parse_activity(user, stime, data): ntime = long(stime) age = ntime - user['account_creation_time'] cc_level = SpinUpcache.building_level_at_age(user, gamedata['townhall'], age) if cc_level < ACTIVITY_MIN_CC_LEVEL: return None act = SpinUpcache.classify_activity(gamedata, data) return (user['user_id'], ntime, act['state'], act.get('ai_tag', None) or act.get('ai_ui_name', None), data.get('gamebucks_spent',None), data.get('money_spent',None), user.get('frame_platform','fb'), user.get('country_tier',None), cc_level, SpinUpcache.receipts_at_age(user, age)) cur.executemany("INSERT INTO "+sql_util.sym(input['activity_table']) + \ " (user_id, time, state, ai_ui_name, gamebucks_spent, receipts, frame_platform, country_tier, townhall_level, prev_receipts)" + \ " VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)", filter(lambda x: x is not None, (parse_activity(user, stime, data) for stime, data in user['activity'].iteritems() if data['state'] not in ('idle','harvest')))) # update LTV estimate if input['do_ltv']: ltv_est = SkynetLTV.ltv_estimate(input['game_id'], gamedata, user, cache.update_time(), use_post_install_data = 9999999) if ltv_est is not None: cur.execute("INSERT INTO "+sql_util.sym(input['ltv_table']) + " (user_id, est_90d) VALUES (%s,%s) ON DUPLICATE KEY UPDATE user_id=user_id;", (user['user_id'], ltv_est)) batch += 1 total += 1 if input['commit_interval'] > 0 and batch >= input['commit_interval']: batch = 0 flush() # flush last commits flush()
import SpinJSON import SpinNoSQL import base64, lz4, SpinLZJB import sys, time, getopt if __name__ == '__main__': opts, args = getopt.gnu_getopt(sys.argv[1:], 'g:b:', []) game_id = SpinConfig.game() batch_size = None for key, val in opts: if key == '-g': game_id = val elif key == '-b': batch_size = int(val) gamedata = {} # SpinJSON.load(open(SpinConfig.gamedata_filename())) gamedata['regions'] = SpinConfig.load( SpinConfig.gamedata_component_filename("regions.json", override_game_id=game_id)) nosql_client = SpinNoSQL.NoSQLClient( SpinConfig.get_mongodb_config(SpinConfig.config['game_id'])) TRIALS = 10 region = None region_list = [name for name, data in sorted(gamedata['regions'].items()) if \ data.get('enable_map',1)] total_time = 0.0 for region in region_list: start_time = time.time() db_time = -1 result = list(
verbose = False trials = 10000 game_id = SpinConfig.game() opts, args = getopt.gnu_getopt(sys.argv, 'g:', []) for key, val in opts: if key == '-g': game_id = val gamedata = SpinJSON.load( open(SpinConfig.gamedata_filename(override_game_id=game_id))) tables = SpinJSON.load( open( SpinConfig.gamedata_component_filename("loot_tables.json", override_game_id=game_id))) def pred_resolver(verbose, pred): if verbose: print 'RESOLVING', pred if pred['predicate'] == "ALWAYS_TRUE": return True return False if len(args) >= 2: tables_to_test = args[1:] else: tables_to_test = ("store_random_item", ) by_item = {} for toplevel in tables_to_test: for i in xrange(trials):
pat_ls.append(pat) pattern = '\\b(' + '|'.join(pat_ls) + ')\\b' flags = 0 if 'i' in config['options']: flags |= re.I # no 'g' option, re is greedy by default self.bad_regex = re.compile(pattern, flags) def is_bad(self, input): return bool(self.bad_regex.search(input)) def censor(self, input): return self.bad_regex.sub(lambda match: '*' * len(match.group()), input) if __name__ == '__main__': import SpinConfig config = SpinConfig.load( SpinConfig.gamedata_component_filename('chat_filter.json')) cf = ChatFilter(config) TESTS = { 'asdf': 'asdf', 'sh!t': '****', 'fu!ckers': '********', 'dwarf shortage': 'dwarf shortage' } for input, expect in TESTS.iteritems(): assert cf.censor(input) == expect print 'OK'
['prune', 'optimize']) for key, val in opts: if key == '-g': game_id = val elif key == '-c': commit_interval = int(val) elif key == '-q': verbose = False elif key == '--prune': do_prune = True elif key == '--optimize': do_optimize = True gamedata = SpinJSON.load( open(SpinConfig.gamedata_filename(override_game_id=game_id))) # load some server-side-only pieces of gamedata for AI base parsing gamedata['ai_bases'] = SpinJSON.load( open( SpinConfig.gamedata_component_filename('ai_bases_compiled.json', override_game_id=game_id))) gamedata['quarries'] = SpinJSON.load( open( SpinConfig.gamedata_component_filename('quarries_compiled.json', override_game_id=game_id))) gamedata['hives'] = SpinJSON.load( open( SpinConfig.gamedata_component_filename('hives_compiled.json', override_game_id=game_id))) gamedata['loot_tables'] = SpinJSON.load( open( SpinConfig.gamedata_component_filename('loot_tables.json', override_game_id=game_id))) sql_util = SpinSQLUtil.MySQLUtil() if not verbose: sql_util.disable_warnings()
def do_slave(task): date = task['date'] game_id = task['game_id'] verbose = task['verbose'] dry_run = task['dry_run'] start_time = SpinConfig.cal_to_unix( (int(date[0:4]), int(date[4:6]), int(date[6:8]))) end_time = start_time + 86400 if verbose: print >> sys.stderr, 'converting date', date, 'start_time', start_time, 'end_time', end_time, '...' # gamedata = SpinJSON.load(open(SpinConfig.gamedata_filename(override_game_id = game_id))) if not verbose: filterwarnings('ignore', category=MySQLdb.Warning) quarries = SpinConfig.load( SpinConfig.gamedata_component_filename('quarries_compiled.json')) hives = SpinConfig.load( SpinConfig.gamedata_component_filename('hives_compiled.json')) # ensure that the spawn list is ordered by id_start - necessary for find_template() below for spawn_list in quarries['spawn'], hives['spawn']: spawn_list.sort(key=lambda x: x['id_start']) cfg = SpinConfig.get_mysql_config(game_id + '_upcache') con = MySQLdb.connect(*cfg['connect_args'], **cfg['connect_kwargs']) battles_table = cfg['table_prefix'] + game_id + '_battles' if 0: # find any already-converted battles cur = con.cursor() cur.execute( "SELECT COUNT(*) FROM %s WHERE time >= %%s and time < %%s" % battles_table, (start_time, end_time)) row = cur.fetchone() con.commit() if row and row[0] > 0: print >> sys.stderr, 'there are already', row[ 0], 'entries in this time range, aborting!' return s3 = SpinS3.S3(SpinConfig.aws_key_file()) bucket = 'spinpunch-%sprod-battle-logs' % game_id for entry in s3.list_bucket(bucket, prefix='%s-battles-%s/%s' % (game_id, date[0:6], date)): filename = entry['name'].split('/')[-1] event_time, attacker_id, defender_id, base_id = parse_battle_log_filename( filename) if (not base_id) or event_time < start_time or event_time >= end_time: continue if base_id[0] != 'v': continue # only look at hives print >> sys.stderr, event_time, SpinLog.pretty_time( time.gmtime(event_time)), filename fd = s3.get_open(bucket, entry['name'], allow_keepalive=False) unzipper = subprocess.Popen(['gunzip', '-c', '-'], stdin=fd.fileno(), stdout=subprocess.PIPE) battle_start = None battle_end = None for line in unzipper.stdout.xreadlines(): if '3820_battle_start' in line: battle_start = SpinJSON.loads(line) elif '3830_battle_end' in line: battle_end = SpinJSON.loads(line) if (not battle_start) or (not battle_end): continue base_template = find_template(hives['spawn'], int(base_id[1:])) if not base_template: sys.stderr.write('unknown hive %s\n' % base_id) continue # generate a fake summary summary = { 'time': event_time, 'attacker_id': battle_start['attacker_user_id'], 'attacker_level': battle_start['attacker_level'], 'attacker_outcome': battle_end['battle_outcome'], 'defender_id': battle_start['opponent_user_id'], 'defender_level': battle_start['opponent_level'], 'defender_outcome': 'victory' if battle_end['battle_outcome'] == 'defeat' else 'defeat', 'base_damage': battle_end['base_damage'], 'base_id': battle_start['base_id'], 'base_type': 'hive', 'base_template': base_template, 'loot': battle_end['loot'] } cur = con.cursor() cur.execute( "SELECT battle_id FROM %s WHERE time = %%s and attacker_id = %%s and defender_id = %%s" % battles_table, (event_time, battle_start['attacker_user_id'], battle_start['opponent_user_id'])) row = cur.fetchone() con.commit() if row: sys.stderr.write('appears to be a duplicate, skipping!\n') continue id_generator.set_time(int(time.time())) battle_id = id_generator.generate() # arbitrary keys = [ 'battle_id', ] values = [ battle_id, ] for kname, ktype in battle_fields.iteritems(): path = kname.split(':') probe = summary val = None for i in xrange(len(path)): if path[i] not in probe: break elif i == len(path) - 1: val = probe[path[i]] break else: probe = probe[path[i]] if val is not None: keys.append(kname) values.append(val) query = "INSERT INTO " + battles_table + \ "("+', '.join(['`'+x+'`' for x in keys])+")"+ \ " VALUES ("+', '.join(['%s'] * len(values)) +")" print >> sys.stderr, query print >> sys.stderr, values if not dry_run: cur = con.cursor() cur.execute(query, values) con.commit()
for key, val in opts: if key == '-g': game_id = val elif key == '-c': commit_interval = int(val) elif key == '-q': verbose = False elif key == '--dry-run': dry_run = True sql_util = SpinSQLUtil.MySQLUtil() if verbose or True: from warnings import filterwarnings filterwarnings('error', category=MySQLdb.Warning) else: sql_util.disable_warnings() gamedata = SpinJSON.load( open(SpinConfig.gamedata_filename(override_game_id=game_id))) fishing_json_file = SpinConfig.gamedata_component_filename( 'fishing_slates.json', override_game_id=game_id) try: fishing_json_fd = open(fishing_json_file) except IOError: fishing_json_fd = None # no fishing in this game fishing_slates = SpinConfig.load_fd( fishing_json_fd, stripped=True) if fishing_json_fd else None cfg = SpinConfig.get_mysql_config(game_id + '_upcache') con = MySQLdb.connect(*cfg['connect_args'], **cfg['connect_kwargs']) stats_table = cfg['table_prefix'] + game_id + '_stats' recipes_table = cfg['table_prefix'] + game_id + '_crafting_recipes' fishing_slates_table = cfg['table_prefix'] + game_id + '_fishing_slates' cur = con.cursor(MySQLdb.cursors.DictCursor)
#!/usr/bin/env python # Copyright (c) 2015 SpinPunch. All rights reserved. # Use of this source code is governed by an MIT-style license that can be # found in the LICENSE file. # load some standard Python libraries import sys, urllib, urllib2, getopt, socket import SpinConfig import SpinFacebook # just load achievements.json, not all of gamedata, so we can populate before running make-gamedata.sh gamedata = { 'achievements': SpinConfig.load( SpinConfig.gamedata_component_filename('achievements.json')) } def get_endpoint_url(params): port = SpinConfig.config['proxyserver']['external_http_port'] port_str = (':%d' % port) if port != 80 else '' # note: use stable ordering of key/value pairs for the query string, so that the canonical URL is deterministic qs = urllib.urlencode(sorted(params.items(), key=lambda k_v: k_v[0])) return ("http://%s%s/OGPAPI?" % (SpinConfig.config['proxyserver'].get( 'external_host', socket.gethostname()), port_str)) + qs if __name__ == '__main__': opts, args = getopt.gnu_getopt(sys.argv[1:], '', ['dry-run']) dry_run = False
#!/usr/bin/env python # Copyright (c) 2015 SpinPunch. All rights reserved. # Use of this source code is governed by an MIT-style license that can be # found in the LICENSE file. # obsolete AI base analytics tool import SpinJSON import SpinConfig import sys, os, glob, re, gzip, traceback gamedata = SpinJSON.load(open(SpinConfig.gamedata_filename())) gamedata['ai_bases'] = SpinConfig.load( SpinConfig.gamedata_component_filename("ai_bases_compiled.json")) ai_ids = sorted(map(int, gamedata['ai_bases']['bases'].keys())) ai_id_set = set(ai_ids) def get_leveled_quantity(qty, level): if type(qty) == list: return qty[level - 1] return qty def check_bad_units(): for id in ai_ids: base = gamedata['ai_bases']['bases'][str(id)] level = base['resources']['player_level']