def render_POST(self, request): assert request.args['secret'][0] == self.secret if ('shutdown' in request.args): reactor.callLater(0.1, reactor.stop) return 'ok' if self.simulate_fault(): request.setResponseCode(http.BAD_REQUEST) return '' if 'delete' in request.args: filename = request.args['delete'][0] try: os.unlink(filename) except: pass return 'ok' try: filename = request.args['filename'][0] do_sync = ('fsync' in request.args and bool(request.args['fsync'][0])) buf = request.content.read() atom = AtomicFileWrite.AtomicFileWrite(filename, 'w') atom.fd.write(buf) atom.complete(fsync=do_sync) return 'ok' except Exception as e: sys.stderr.write('ioslave exception: %s\n%s' % (repr(e), traceback.format_stack())) request.setResponseCode(http.INTERNAL_SERVER_ERROR) return 'error'
def do_split(game_id, game_name): GROUPS = ('all-A', 'all-B', 'payers-A', 'payers-B') info_file = '%s-raw/%s-upcache-info.json' % (game_id, game_name) info = SpinJSON.load(open(info_file)) for infile in info['segments']: print "PROCESSING", infile infile = '%s-raw/' % game_id + os.path.basename(infile) outfiles = dict([(name, outstream_open('%s-%s/%s' % (game_id, name, os.path.basename(infile)))) for name in GROUPS]) instream = FastGzipFile.Reader(infile) count = 0 payer_count = 0 for line in instream.xreadlines(): if 'EMPTY' in line: count += 1 continue data = SpinJSON.loads(line) group = 'A' if (count%2)==0 else 'B' outstream_write(outfiles['all-'+group], line) if data.get('money_spent',0)>0: if 'payers-'+group in outfiles: outstream_write(outfiles['payers-'+group], line) payer_count += 1 count += 1 for name, stream in outfiles.iteritems(): print 'CLOSING', name, 'count', count, 'payer_count', payer_count outstream_close(stream) for group in GROUPS: outfo = {'update_time': info['update_time'], 'segments': [os.path.basename(infile_name) for infile_name in info['segments']]} atom = AtomicFileWrite.AtomicFileWrite('%s-%s/%s-upcache-info.json' % (game_id, group, game_name), 'w') SpinJSON.dump(outfo, atom.fd, pretty=True, newline=True) atom.complete()
def do_apply(locale, gamedata, input_po_file, output_json_file, verbose = True): po = polib.pofile(input_po_file, encoding = 'utf-8', wrapwidth = -1) entries = dict([(entry.msgid, entry.msgstr) for entry in po]) # translate in place put_strings(gamedata['strings'], entries, filter = None, verbose = verbose) for category in TRANSLATE_CATEGORIES: put_strings(gamedata[category], entries, filter = 'ui_', verbose = verbose) atom = AtomicFileWrite.AtomicFileWrite(output_json_file, 'w') SpinJSON.dump(gamedata, atom.fd, ordered=True, pretty=False, newline=True, size_hint = 8*1024*1024) # ,double_precision=5) atom.complete() if verbose: print >>sys.stderr, "wrote", atom.filename
def to_playerdb(player_filename, player, base_id): gamedata['ai_bases'] = SpinJSON.load( open(SpinConfig.gamedata_component_filename("ai_bases_compiled.json"))) base = gamedata['ai_bases']['bases'][str(base_id)] my_base = [] townhall_level = -1 for building in base['buildings']: if building['spec'] == gamedata['townhall']: townhall_level = building.get('level', 1) props = { 'spec': building['spec'], 'xy': building['xy'], 'level': building.get('level', 1) } if 'equipment' in building: props['equipment'] = building['equipment'] my_base.append(props) for unit in base['units']: props = { 'spec': unit['spec'], 'level': unit.get('level', 1), 'xy': unit['xy'] } if 'orders' in unit: props['orders'] = unit['orders'] if 'patrol' in unit: props['patrol'] = unit['patrol'] my_base.append(props) for scenery in base.get('scenery', []): my_base.append({'spec': scenery['spec'], 'xy': scenery['xy']}) player['unit_repair_queue'] = [] player['my_base'] = my_base player['tech'] = base['tech'] if townhall_level > 0: player['history'][gamedata['townhall'] + '_level'] = townhall_level if 'base_climate' in base: player['base_climate'] = base['base_climate'] if 'deployment_buffer' in base: player['deployment_buffer'] = base['deployment_buffer'] atom = AtomicFileWrite.AtomicFileWrite(player_filename, 'w') SpinJSON.dump(player, atom.fd, pretty=True) atom.complete(fsync=False) print 'wrote contents of AI base %d to %s!' % (base_id, player_filename)
def async_flush_begin(self): assert self.async_atom is None self.do_prune() self.async_atom = AtomicFileWrite.AtomicFileWrite(self.filename, 'w') self.async_atom.fd.write('{\n') # take a virtual copy-on-write snapshot of self.map self.async_map = {} # sort keys so that the file gets written in nice pretty sorted order # if using numeric keys, sort numerically # note: reverse the sort so that pop() returns keys in sorted order self.async_keys = sorted(self.map.keys(), key=int if self.sort_numeric else None, reverse=True) if self.verbose: print 'async flush of', self.name, 'start:', len( self.async_keys), 'keys'
def flush(self, fsync=True): assert self.allow_write if self.verbose: print 'flushing', self.name, '...', start_time = time.time() if not self.dirty: if self.verbose: print 'not dirty' return else: if self.verbose: print '(sync=%d)' % fsync, # prune map just before write self.do_prune() self.dirty = False atom = AtomicFileWrite.AtomicFileWrite(self.filename, 'w') # put keys in sorted order so the output is pretty keylist = sorted(self.map.keys(), key=int if self.sort_numeric else None, reverse=True) # write dictionary in streaming fashion to save RAM atom.fd.write('{\n') while len(keylist) > 0: key = keylist.pop() comma = ',' if len(keylist) > 0 else '' atom.fd.write('"' + str(key) + '":' + SpinJSON.dumps(self.map[key], double_precision=5) + comma + '\n') atom.fd.write('}\n') atom.complete(fsync=fsync) if self.verbose: end_time = time.time() print 'done (%.1f ms)' % (1000.0 * (end_time - start_time))
def do_slave(input): seg = input['seg'] ignore_users = set(input['ignore_users']) mod_users = set(input['mod_users']) if (input['mod_users'] is not None) else None if input['s3_userdb']: userdb_driver = SpinUserDB.S3Driver() else: userdb_driver = SpinUserDB.driver to_mongodb_config = input['to_mongodb_config'] if to_mongodb_config: import pymongo nosql_client = pymongo.MongoClient( *input['to_mongodb_config']['connect_args'], **input['to_mongodb_config']['connect_kwargs'])[ to_mongodb_config['dbname']] nosql_table = nosql_client[to_mongodb_config['tablename']] else: nosql_table = None nosql_deltas_only = input.get('nosql_deltas_only', False) cache = open_cache(input['cache_read'], input['cache_segments'], input['from_s3_bucket'], input['from_s3_keyfile'], False) # list of all user_ids in this segment user_id_set = set([ id for id in xrange(input['user_id_range'][0], input['user_id_range'][1] + 1) if SpinUpcache.get_segment_for_user(id, input['cache_segments']) == seg ]) user_count = 0 # number updated so far user_total = len(user_id_set) # total number to update # set up segment output if input['filename']: write_atom = AtomicFileWrite.AtomicFileWrite(input['filename'], 'w') write_process = FastGzipFile.WriterProcess(write_atom.fd) write_zipfd = write_process.stdin else: write_zipfd = None # first stream through upcache, in whatever order the segment upcache is in if cache: cache_seg = cache.iter_segment(seg) for entry in cache_seg: id = entry['user_id'] if SpinUpcache.get_segment_for_user( id, input['cache_segments']) != seg: sys.stderr.write('\nuser %d does not belong in segment %d!\n' % (id, seg)) continue if id not in user_id_set: continue # mark the user as already done so we skip him in the second pass user_id_set.remove(id) dump_user(seg, id, entry, 'cache', write_zipfd, nosql_table, nosql_deltas_only, ignore_users, mod_users, input['progress'], user_count, user_total, userdb_driver, input['time_now']) user_count += 1 if input['progress']: sys.stderr.write('\ncached pass done seg %d\n' % seg) # now get the remaining users belonging to this segment from original userdb files for user_id in user_id_set: if SpinUpcache.get_segment_for_user(user_id, input['cache_segments']) == seg: dump_user(seg, user_id, None, 'source', write_zipfd, nosql_table, nosql_deltas_only, ignore_users, mod_users, input['progress'], user_count, user_total, userdb_driver, input['time_now']) user_count += 1 if input['progress']: sys.stderr.write('\nuncached pass done seg %d\n' % seg) if write_zipfd: write_zipfd.flush() write_zipfd.close() write_process.communicate() # force gzip to finish write_atom.complete() if input['to_s3_bucket']: SpinS3.S3(input['to_s3_keyfile'], verbose=False).put_file( input['to_s3_bucket'], os.path.basename(input['filename']), input['filename'])
game_id = None for key, val in opts: if key == '--game-id' or key == '-g': game_id = val assert game_id # partial build of gamedata gamedata = { 'predicate_library': SpinConfig.load(args[0]), 'buildings': SpinConfig.load(args[1]), 'tech': SpinConfig.load(args[2]), 'crafting': SpinConfig.load(args[3]) } out_fd = AtomicFileWrite.AtomicFileWrite(args[4], 'w', ident=str(os.getpid())) print >> out_fd.fd, "// AUTO-GENERATED BY invert_requirements.py" # note pluralization of the keys - this matches what UpgradeBar expects requirements = { 'building': prune_dict( dict( (name, get_requirements(gamedata, gamedata['buildings'][name])) for name in gamedata['buildings'])), 'tech': prune_dict( dict((name, get_requirements(gamedata, gamedata['tech'][name])) for name in gamedata['tech'])),
user = json.load(open(user_filename)) except: sys.stderr.write('user file missing for user_id %d!' % user_id) continue account_creation_time = user.get('account_creation_time', 0) if account_creation_time < 1 and (user_id in sessions) and len( sessions[user_id]) > 0: # fill in missing account_creation_time based on first session account_creation_time = sessions[user_id][0][0] if account_creation_time > 0: sys.stderr.write( 'fixing account_creation_time for user %d\n' % user_id) if do_write: user['account_creation_time'] = account_creation_time atom = AtomicFileWrite.AtomicFileWrite( user_filename, 'w') atom.fd.write(json.dumps(user, indent=2)) atom.complete(fsync=False) if account_creation_time < 1: sys.stderr.write('no account_creation_time for user %d\n' % user_id) continue spent_by_day = {} spent_at_time = {} if user_id in spend: purchase_list = spend[user_id] for purchase in purchase_list: daynum = int( (purchase[0] - account_creation_time) / (60 * 60 * 24))
opts, args = getopt.gnu_getopt(sys.argv[1:], 'g:', [ 'game-id=', ]) ident = str(os.getpid()) game_id = None for key, val in opts: if key == '--game-id' or key == '-g': game_id = val assert game_id if game_id != 'mf': # no mods except in old MF sys.exit(0) tech = SpinConfig.load(args[0], stripped=True) out_fd = AtomicFileWrite.AtomicFileWrite(args[1], 'w', ident=ident) print >> out_fd.fd, "// AUTO-GENERATED BY make_tech_mods.py" out = {} MOD_TYPES = [{ 'name': '_health', 'ui_name': ' Health', 'stat': 'max_hp', 'method': '*=(1+strength)', 'strength': [0.01, 0.02, 0.03, 0.04, 0.05], 'ui_description': 'Optimize defensive systems to withstand more damage', 'ui_congrats': 'Defensive systems upgraded for more toughness' }, {
elif key == '--min-level': level_range[0] = int(val) elif key == '--max-level': level_range[1] = int(val) elif key == '--spread': spread = True for filename in args[1:]: base = SpinConfig.load(filename, stripped=True) for wavenum in xrange(len(base['units'])): wave = base['units'][wavenum] count = 0 for key, val in wave.iteritems(): if key not in gamedata['units']: continue if type(val) is not dict: count += val continue count += val.get('qty', 1) if level_range[0] > 0 and level_range[1] > 0: val['force_level'] = int(level_range[0] + int( (level_range[1] - level_range[0] + 1) * random.random())) # spread out waves containing large numbers of units if spread and count >= 7: wave['spread'] = 15 print filename, "wave", wavenum, "units:", count atom = AtomicFileWrite.AtomicFileWrite(filename, 'w') atom.fd.write(SpinJSON.dumps(base, pretty=True)[1:-1] + '\n') # note: get rid of surrounding {} atom.complete()
# Use of this source code is governed by an MIT-style license that can be # found in the LICENSE file. # this script makes various procedurally-generated inventory items import SpinConfig import SpinJSON import AtomicFileWrite import sys, re, traceback, os, getopt if __name__ == '__main__': opts, args = getopt.gnu_getopt(sys.argv[1:], '', []) ident = str(os.getpid()) gamedata = {'resources': SpinConfig.load(args[0])} items_out = AtomicFileWrite.AtomicFileWrite(args[1], 'w', ident=ident) items_fd = items_out.fd spells_out = AtomicFileWrite.AtomicFileWrite(args[2], 'w', ident=ident) spells_fd = spells_out.fd for fd in items_fd, spells_fd: print >> fd, "// AUTO-GENERATED BY make_items_auto.py" out = {'items': [], 'spells': []} # create all iron/water items for resource, resdata in gamedata['resources'].iteritems(): RESOURCE = resource.upper() # names seen by player ui_resource = resdata['ui_name_lower']
def _sync_write(self, filename, buf): atom = AtomicFileWrite.AtomicFileWrite(filename, 'w') atom.fd.write(buf) atom.complete()
in_basename = in_filename in_file = sys.stdin if in_filename == '-' else open(in_basename) if get_deps: out = format_deps_flat(get_deps_as or in_filename, get_deps_from(in_file, game_id, prefix=in_dirname), base_path=save_dir) else: out = parse_input(in_file, in_filename, game_id, build_info) os.chdir(save_dir) if out_filename == '-': print out, if not get_deps: print else: atom = AtomicFileWrite.AtomicFileWrite(out_filename, 'w', ident=ident) atom.fd.write(out) if not get_deps: atom.fd.write('\n') atom.complete() except Exception as e: sys.stderr.write(str(e) + '\n') #sys.stderr.write(traceback.format_exc()) os.chdir(save_dir) sys.exit(1)
def outstream_open(filename): write_atom = AtomicFileWrite.AtomicFileWrite(filename, 'w') write_process = FastGzipFile.WriterProcess(write_atom.fd) return write_atom, write_process