def get_nation_endos(nation): xmlnat = api.request({'nation':nation,'q':('endorsements','wa','name','censusscore-65')}) endos = xmlnat.find('ENDORSEMENTS').text name = xmlnat.find('NAME').text spdr = int(xmlnat.find('CENSUSSCORE').text) endocount = endos.count(',')+1 if endos else 0 return {'name':nation,'Name':name,'endos':endocount,'endorsers':endos.split(',') if endos else (),'influence_score':spdr}
def _loop(user, audience, sinceid=None, period=2.0, no_reset = False): _user_agent(user) consecutive_empty = 0 if sinceid is None: xml = api.request({'q':'happenings','limit':'200'}) last = time.time() lastevent = xml.find("HAPPENINGS").find("EVENT") if lastevent is None: raise "No happenings available -- NS is down?" sinceid_s, beforeid_s = eventrange_s( lastevent ) events = xml.find("HAPPENINGS").findall("EVENT") wave(events, audience) else: sinceid_s, beforeid_s = str(sinceid), str(int(sinceid)+201) while True: if len(events) < 200: ts = time.time() tosleep = max(period - (ts - last),0) logger.debug("sleeping %fs...", tosleep) time.sleep(tosleep) last = time.time() xml = api.request({ 'q':'happenings', 'sinceid':sinceid_s, 'beforeid':beforeid_s, 'limit':'200', }, retries=10) happenings = xml.find("HAPPENINGS") lastevent = happenings.find("EVENT") events = xml.find("HAPPENINGS").findall("EVENT") if lastevent is not None: sinceid_s, beforeid_s = eventrange_s(lastevent) wave(events, audience) consecutive_empty = 0 else: consecutive_empty += 1 if( consecutive_empty > 90 ): logger.warn("resetting sinceid!") sinceid_s = "0" beforeid_s = ""
self.lock.release() ts = datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f %Z") print "{0} - released {3} for {1} (line {2})".format(ts, caller.function, caller.lineno, self.name) if args.verbose: dblock = VerboseLock('dblock') else: dblock = threading.Lock() regions = dict() rlocks = dict() rcache = dict() if args.verbose: for name in region_names: regions[name] = set(api.request({'q':'nations','region':name}).find('NATIONS').text.split(':')) rlocks[name] = VerboseLock("region."+name) else: for name in region_names: regions[name] = set(api.request({'q':'nations','region':name}).find('NATIONS').text.split(':')) rlocks[name] = threading.Lock() from calendar import timegm def save(): with dblock: db_copy = copy.deepcopy(db) for nat in db_copy: if "ts" in db_copy[nat]: ts = timegm(db_copy[nat]["ts"]) db_copy[nat]["ts"] = ts json.dump(db_copy,open("zombies.json","w"))
if args.verbose: dblock = VerboseLock('dblock') else: dblock = threading.Lock() regions = dict() rlocks = dict() rcache = dict() if args.verbose: for name in region_names: regions[name] = set( api.request({ 'q': 'nations', 'region': name }).find('NATIONS').text.split(':')) rlocks[name] = VerboseLock("region." + name) else: for name in region_names: regions[name] = set( api.request({ 'q': 'nations', 'region': name }).find('NATIONS').text.split(':')) rlocks[name] = threading.Lock() from calendar import timegm def save():
def init_nation(nat): elem = api.request({'nation':nat,'q':['name','population']}) ts = struct_time(elem.headers.getdate('Date')) return _init_nation(elem, ts)
parser.add_argument('-R', '--rows', default=25, help='number of rows to collect (default = collect top 25 for each column)') args = parser.parse_args() api.user_agent = "Trawler Python Region Scan (operated by {})".format(args.user) def get_nation_endos(nation): xmlnat = api.request({'nation':nation,'q':('endorsements','wa','name','censusscore-65')}) endos = xmlnat.find('ENDORSEMENTS').text name = xmlnat.find('NAME').text spdr = int(xmlnat.find('CENSUSSCORE').text) endocount = endos.count(',')+1 if endos else 0 return {'name':nation,'Name':name,'endos':endocount,'endorsers':endos.split(',') if endos else (),'influence_score':spdr} xmlreg = api.request({'region':id_str(args.region),'q':'nations'}) residents = xmlreg.find('NATIONS').text.split(':') if not args.all: resident_set = set(residents) xmlwa = api.request({'wa':'1','q':'members'}) all_wa_nations = xmlwa.find('MEMBERS').text.split(',') wa_nation_set=set(all_wa_nations) if args.influential_url: influential_nation_names = map( str.strip, urllib2.urlopen(args.influential_url).readlines() ) scanned_nations = [] endorser_counts = {} if args.all: to_scan = resident_set else: