def get_stack(): with dblock: entries = copy.deepcopy(db.values()) nats = map(lambda e: id_str(e['name']), entries) populations = map(lambda e: e['zombies']+e['survivors']+e['dead'], entries) _q = zip(populations, nats) _q.sort() return map(lambda q: q[1], _q)
def get_stack(): with dblock: entries = copy.deepcopy(db.values()) nats = map(lambda e: id_str(e['name']), entries) populations = map(lambda e: e['zombies'] + e['survivors'] + e['dead'], entries) _q = zip(populations, nats) _q.sort() return map(lambda q: q[1], _q)
def initialize_nations(nations): trawler.user_agent = api.user_agent res = trawler.request('GET', '/pages/nations.xml.gz', headers=True) if res.result != 200: raise Exception("Error {0}!".format(xf.result)) _ts = res.info().getdate('Last-Modified') dump_ts = struct_time(_ts) print "nations.xml date: {0} GMT".format(strftime('%Y-%m-%d %H:%M:%S', dump_ts)) dump_size = int(res.headers['content-length']) xf = gzip.GzipFile(fileobj=res) context = ET.iterparse(xf, events=('start','end')) ic = iter(context) db = dict() if progressbar: pb = progressbar.ProgressBar(maxval=dump_size).start() else: print "printing a . for every 100 nations processed (of about 100,000)" i = 0 try: event, root = ic.next() for event, elem in ic: if elem.tag == 'NATION' and event == 'end': if progressbar: pb.update(res.tell()) else: if i%100 == 0: print ".", i += 1 cs_name = elem.find('NAME').text nat = id_str(cs_name) if nat in nations: db[nat] = _init_nation(elem, dump_ts, cs_name) root.clear() except PE as e: lineno, column = e.position xf.seek(0) line = next(IT.islice(xf, lineno)) caret = '{:=>{}}'.format("^", column) print '' print line print caret print "i = {0}".format(i) raise except: print '' print "i = {0}".format(i) raise print '' # handle new nations new_nations = nations - frozenset(db.keys()) for nat in new_nations: db[nat] = init_nation(nat) return db
def default(self, *args, **params): cherrypy.response.headers['Content-Type']=JSON if( len(args) != 1 or len(params) != 0 ): cherrypy.response.status = 400 return '' region = id_str(args[0]) if( region not in region_names ): cherrypy.response.status = 404 return '[]' return self.region_api(region)
def default(self, *args, **params): cherrypy.response.headers['Content-Type'] = JSON if (len(args) != 1 or len(params) != 0): cherrypy.response.status = 400 return '' region = id_str(args[0]) if (region not in region_names): cherrypy.response.status = 404 return '[]' return self.region_api(region)
def scan(mm,beg,end,idx): done = False i = mm.find(beg) while not done: j = mm.find(beg, i+len(beg)) if( j == -1 ): j = mm.find(end,i+len(beg)) done = True namei = mm.find("<NAME>",i)+len("<NAME>") namej = mm.find("</NAME>",namei) name = mm[namei:namej] idx[id_str(name)] = (i,j) i=j
def api_result(key,val,idx,mm,q): name = id_str(val) if name in idx: cherrypy.response.headers['Content-Type']=XML i,j = idx[name] if( q == None ): return mm[i:j] else: src = ET.fromstring(mm[i:j]) root = ET.Element(src.tag) for shard in q: if shard in shards[key]: root.append(src.find(shards[key][shard])) return ET.tostring(root) else: cherrypy.response.status = 404 return """ <!DOCTYPE html> <h1 style="color:red">Unknown {0}: "{1}".</h1> <p style="font-size:small">Error: 404 Not Found <p><a href="/pages/api.html">The NationStates API Documentation</a> """.format(key,val)
parser.add_argument('-R', '--rows', default=25, help='number of rows to collect (default = collect top 25 for each column)') args = parser.parse_args() api.user_agent = "Trawler Python Region Scan (operated by {})".format(args.user) def get_nation_endos(nation): xmlnat = api.request({'nation':nation,'q':('endorsements','wa','name','censusscore-65')}) endos = xmlnat.find('ENDORSEMENTS').text name = xmlnat.find('NAME').text spdr = int(xmlnat.find('CENSUSSCORE').text) endocount = endos.count(',')+1 if endos else 0 return {'name':nation,'Name':name,'endos':endocount,'endorsers':endos.split(',') if endos else (),'influence_score':spdr} xmlreg = api.request({'region':id_str(args.region),'q':'nations'}) residents = xmlreg.find('NATIONS').text.split(':') if not args.all: resident_set = set(residents) xmlwa = api.request({'wa':'1','q':'members'}) all_wa_nations = xmlwa.find('MEMBERS').text.split(',') wa_nation_set=set(all_wa_nations) if args.influential_url: influential_nation_names = map( str.strip, urllib2.urlopen(args.influential_url).readlines() ) scanned_nations = [] endorser_counts = {} if args.all: to_scan = resident_set else: