def get_game_statuses(self, sql_dao): urls = self.build_urls() game_statuses = [] for day, url in urls.items(): print('Checking {}: {}...'.format(day, url)) try: request = Request(url, headers={'User-Agent': "Magic Browser"}) connection = urlopen(request) soup = BeautifulSoup(connection, 'html.parser') game_status_button = soup.find( 'div', attrs={'class': 'product-notes'}) is_game_sold_out = bool( game_status_button and self.SOLD_OUT in game_status_button.text) game_status = GameStatus(day, url, is_game_sold_out, sql_dao) game_statuses.append(game_status) game_status.set_prior_game_availability() # If this game never existed lets insert it. if game_status.was_game_sold_out is None: game_status.insert_game() except Exception as e: debug("Exception getting game status: {}".format(e)) game_status = GameStatus(day, url, None, sql_dao) game_statuses.append(game_status) print("All Hockey Games from SQL: ") print(sql_dao.get_hockey_games()) return game_statuses
def teamspeak_set_name(ts): name = settings["general"]["bot_name"] # If the bot already has this name, don't do anything if ts.whoami()[0]["client_nickname"] == name: return try: ts.clientupdate(client_nickname=name) except ts3.query.TS3QueryError as error: if error.resp.error["id"] == str(USERNAME_TAKEN_ERROR_ID): debug("Could not claim name `" + name + "` because someone else took it", urgency=10, fatal=False) for client in Client.clients.values(): if client.client_nickname == name: debug("Clid " + str(client.clid) + ", Cldbid " + str(client.cldbid) + ", steamid64 " + client.client_description + " was the culprit, kicked!", urgency=10, fatal=False) msg = "Please change your name" ts.clientkick( clid=client.clid, reasonid=ts3.definitions.ReasonIdentifier.KICK_SERVER, reasonmsg=msg) ts.clientupdate(client_nickname=name) return else: raise error
def move_channel(self, section_id, underneath_id, ts, db): if self.pid == section_id: debug("Attempting to move channel "+str(self.cid)+" to section "+str(section_id) + " under "+str(underneath_id)+" while already in that section.", urgency=10) return # Check if the name is available name = self.channel_name used_names_dest = {channel.channel_name for channel in Channel.channels.values() if channel.pid == section_id} if name in used_names_dest: used_names = {channel.channel_name for channel in Channel.channels.values() if channel.pid in {section_id, self.pid}} append_number = 2 base_name = name while name in used_names: name = base_name + " " + str(append_number) append_number += 1 self.edit_channel({"channel_name": name}, ts, db) # Execute the move ts.channelmove(cid=self.cid, cpid=section_id, order=underneath_id) # Update internal values to match again below_finder = {channel for channel in Channel.channels.values() if channel.channel_order == self.cid} for below in below_finder: below.channel_order = self.channel_order below.action_executed("edited_var_channel_order", ts, db) self.pid = section_id self.channel_order = underneath_id self.action_executed("edited_var_pid", ts, db) self.action_executed("edited_var_channel_order", ts, db) for client in self.clients: Client.clients[client].load_info(ts)
def snrazel(snriter, year, doy, rxloc): """A generator that yields tuples of prn, el, az, sod, snr from an iterator that gives prn, sec of day, and SNR. Requires lat, lon in degrees, alt in meters. """ satpos = SatPositions() satpos.rxadd(0, *rxloc) gts0 = int(gpstotsecyeardoy(year, doy)) for prn, sod, snr in snriter: time = gts0 + int(sod) if time >= satpos.endgts: ntime = GPSepoch + np.timedelta64(time, 's') debug('Updating satellite locations;', ntime, ' > ', satpos.endtime) satpos.update(ntime, 4) idx = time - satpos.start try: az0, el0 = satpos.rxazel[0][idx, satpos.prndict[prn], :] az1, el1 = satpos.rxazel[0][idx + 1, satpos.prndict[prn], :] except KeyError: continue frac = sod % 1 az = (1 - frac) * az0 + frac * az1 el = (1 - frac) * el0 + frac * el1 yield prn, el, az, sod, snr
def getHtmlContent(respInfo): htmlContent = '' try: respHtml = respInfo.read() if( ("Content-Encoding" in respInfo.headers) and (respInfo.headers['Content-Encoding'] == "gzip")): htmlContent = zlib.decompress(respHtml, 16+zlib.MAX_WBITS); else: htmlContent = respHtml except BaseException, e: debug(logger, traceback.format_exc())
def edit_channel(self, changes, ts, db): try: ts.channeledit(cid=self.cid, **changes) except Exception as e: debug("Error editing channel " + str(self.cid) + " to " + str(changes), urgency=10) raise e for key in changes: setattr(self, key, changes[key]) self.action_executed("edited_var_"+key, ts, db)
def main(): with ts3.query.TS3Connection(settings["teamspeak"]["host"]) as ts: try: teamspeak_login(ts) except Exception as e: debug("TeamSpeak login failed:", traceback.format_exc(), urgency=20, fatal=True, error=e) try: db = database_login() except Exception as e: debug("Database login failed:", traceback.format_exc(), urgency=20, fatal=True, error=e) try: Channel.init(ts) except Exception as e: debug("Channel initialization failed:", traceback.format_exc(), urgency=20, fatal=True, error=e) try: Client.init(ts) except Exception as e: debug("Client initialization failed:", traceback.format_exc(), urgency=20, fatal=True, error=e) try: teamspeak_set_name(ts) except Exception as e: debug("Bot name setting failed:", traceback.format_exc(), urgency=10, fatal=False, error=e) # Find an appropriate topological module order order_modules(loaded_modules) execute_modules_function("init", ts, db) execute_modules_function("execute", ts, db) execute_modules_function("finalize", ts, db) finalize_main(ts, db)
def load_modules(modules): for module in modules: if module not in Settings.loaded_modules: try: loaded_module = __import__("modules." + module, fromlist=["modules"]) if hasattr(loaded_module, "required_settings"): Settings.load_settings(loaded_module.required_settings) Settings.loaded_modules[module] = loaded_module if hasattr(loaded_module, "required_modules"): Settings.load_modules(loaded_module.required_modules) except Exception as e: debug("Exception while loading module "+module+":", repr(e), urgency=20, fatal=True, error=e)
def teamspeak_login(ts): try: ts.login(client_login_name=settings["teamspeak"]["username"], client_login_password=settings["teamspeak"]["password"]) except ts3.query.TS3QueryError as err: debug("TeamSpeak login failed:", repr(err.resp.error["msg"]), urgency=20, fatal=True, error=err) exit(1) ts.use(sid=settings["teamspeak"]["sid"])
def move(self, cid, ts, db): if not self.confirm_online(ts): return if self.cid == cid: debug("Tried moving client " + str(self.cldbid) + " to channel " + str(cid) + " but already in that channel") return ts.clientmove(cid=cid, clid=self.clid) Channel.channels[self.cid].clients.remove(self.cldbid) Channel.channels[self.cid].action_executed("clients_changed", ts, db) self.load_info(ts) Channel.channels[cid].clients.add(self.cldbid) Channel.channels[cid].action_executed("clients_changed", ts, db)
def init(ts, db): whitelist_enabled = Settings.settings["purge_ranks"]["whitelist_enabled"] blacklist_enabled = Settings.settings["purge_ranks"]["blacklist_enabled"] if whitelist_enabled and blacklist_enabled: # Both whitelist and blacklist enabled. Makes no sense debug( "Both whitelist and blacklist are enabled for purging ranks. Removing as many ranks as I can.", urgency=10) if whitelist_enabled or blacklist_enabled: Client.subscribe("do_purge_ranks") # Initially mark all clients as not rank-purged for client in Client.clients.values(): client.purge_ranks = False
def load_settings(required_settings): path = os.path.dirname(os.path.realpath(__file__)) for required_setting in required_settings: if required_setting not in Settings.loaded_settings: try: with open(path+"/conf/" + required_setting + ".json") as settings_file: data = json.load(settings_file) for key in data: if key in Settings.settings and (type(data[key]) == dict or type(data[key]) == list): Settings.settings[key].update(data[key]) else: Settings.settings[key] = data[key] Settings.loaded_settings.add(required_setting) except Exception as e: debug("Exception while loading settings "+required_setting+":", repr(e), urgency=20, fatal=True, error=e)
def confirm_online(self, ts): if self.online: try: ts.clientgetuidfromclid(clid=self.clid) except ts3.query.TS3QueryError as e: if e.resp.error["id"] != str(TS_INVALID_CLID_ERROR_ID): raise e else: debug("Checked client with clid " + str(self.clid) + " which is not online.", urgency=1) self.online = False clients_copy = Client.clients.copy() clients_copy.pop(self.cldbid, None) Client.clients = clients_copy return self.online
def execute_modules_function(function_name, ts, db, reverse=False): order = modules_ordered if reverse: order = reversed(order) for module_name in order.copy(): if module_name not in loaded_modules: continue module = loaded_modules[module_name] if hasattr(module, function_name): function = getattr(module, function_name) try: function(ts, db) except Exception as e: unload_module(module_name) debug("An exception occurred during execution of `" + function_name + "` on module `" + module_name + "`:", traceback.format_exc(), urgency=10, fatal=False, error=e)
def read_record(strm): readsync(strm) idbytes, recid = read_ubnxi(strm) lenbytes, msglen = read_ubnxi(strm) if msglen > 128: # We have no messages so long. Probably this is corrupted. # Instead of reading further, raise a ValueError, so the enclosing # loop can try for the next sync byte raise ValueError('Message too long (id {}, length {})'.format( recid, msglen)) # FIXME: KLUDGE: currently length is wrong in the stream for SNR records if recid == 192: corlen = round(msglen / 3) * 3 + 1 # should always be 1 mod 3 if corlen != msglen: debug('Correcting SNR message length from', msglen, 'to', corlen) msglen = corlen msg = strm.read(msglen) tries = 0 while len(msg) < msglen: tries += 1 if tries > 5: break time.sleep(.1) msg += strm.read(msglen - len(msg)) if len(msg) != msglen: raise ValueError( 'Could not read full message (received {} bytes out of {})'.format( len(msg), msglen)) verify(strm, idbytes + lenbytes + msg) # When verification fails, we could try advancing to the next sync byte # within the already-read message, and attempt to read a record there. # (Currently, we start trying after the end of all data previously read.) if recid not in RECS: info('Record ID', recid, 'unknown') return recid, msg return recid, RECS[recid](msg)
def stackpop(fin:IO, stack:list) -> Any: popped = '[Empty]' if len(stack) > 0: popped = stack.pop() debug(fin, stack) return popped
def stackpush(fin:IO, stack:list, item:Any) -> None: stack.append(item) debug(fin, stack)
def stackpopN(fin:IO, stack:list, N:int) -> None: debug(fin, 'stackpopN') while N != 0: N -= 1 stackpop(fin, stack)
def plotupdate(fname=None, handover=None, oldstate=None, write88=False): """Follow stream and update web plot periodically. With no arguments, follow the current file and attempt handover at UTC midnight. With handover=False, stop when the file is exhausted. If fname is given, follow that file, updating plots until it's exhausted; if handover=True, then attempt to switch to the current file (this only makes sense if fname is yesterday's data.) """ yesterday = False if oldstate: recgen, fid = oldstate elif fname is None and handover is not False: yesterday = np.datetime64('now') - np.timedelta64(1, 'D') try: fid = open(current_binex(yesterday), 'rb') except FileNotFoundError: yesterday = False fid = open(current_binex(), 'rb') elif fname is None: fid = open(current_binex(), 'rb') else: fid = open(fname, 'rb') ofile = os.path.abspath(fid.name) olens = defaultdict(int) rxfids = {} oldtic = rectic = dattic = np.datetime64('2000-01-01', 'ms') attempt = 0 if oldstate is None: recgen = reader(fid) try: for SNRs, HK in recgen: if yesterday: if write88: write_snr88(SNRs, rxfids, olens, ofile) for rx in list(rxfids): rxfids[rx].close() del rxfids[rx] olens = defaultdict(int, {rx: len(SNRs[rx]) for rx in SNRs}, hk=len(HK)) info("Done prepopulating. Switching to today's file.") fid.close() fid = open(current_binex(), 'rb') ofile = os.path.abspath(current_binex()) recgen.send(fid) yesterday = False continue nlens = defaultdict(int, {rx: len(SNRs[rx]) for rx in SNRs}, hk=len(HK)) tic = np.datetime64('now') if nlens == olens: attempt += 1 if attempt > 6: if handover or (fname is None and handover is not False and after_midnight()): if ofile != os.path.abspath(current_binex()): info('No new records at', tic, '. Attempting handover.') fid.close() guard_and_time('midnight plotting', midnightplots, tic, SNRs, HK) fid = open(current_binex(), 'rb') ofile = os.path.abspath(current_binex()) recgen.send(fid) for rx in list(rxfids): rxfids[rx].close() del rxfids[rx] attempt = 0 continue info('No new records at', tic, '. Reporting.') report_failure(rectic, dattic) time.sleep(config.PLOT_IVAL / np.timedelta64(1, 's')) info('No new records at', tic, '. Sleeping', attempt*5) time.sleep(attempt*5) continue attempt = 0 rectic = tic if SNRs: dattic = max(s[-1]['time'] for s in SNRs.values()) if HK.size and HK[-1]['time'] > dattic: dattic = HK[-1]['time'] debug('{:2} new records {} at'.format(sum(nlens.values()) - sum(olens.values()), [nlens[rx] - olens[rx] for rx in nlens]), tic, 'timestamped', dattic) if write88: write_snr88(SNRs, rxfids, olens, ofile) olens = nlens if tic - oldtic > config.PLOT_IVAL: guard_and_time('plotting', makeplots, tic, SNRs, HK, endtime=tic) oldtic = tic else: time.sleep(2) except KeyboardInterrupt: return recgen, fid
def strip_cpp(in_filepath:str, out_filepath:str, single_line_comments:bool=True, multiline_comments:bool=True, strings:bool=True, ppd_includes:bool=True, ppd_defines:bool=True, skip_newline:bool=False, qt_macros=True) -> None: ''' Function to remove certain tokens from the cpp file ''' QT_Macros = ['Q_OBJECT', 'Q_ENUM'] clear_file(debug_logs) clear_file(debug_read1_file) with open(out_filepath, 'w+') as fout: with open(in_filepath) as fin: while True: c = read1(fin) if not c: break # possible comment ahead elif c == '/': debug(fin, 'possible comment ahead') c = peek1(fin) # single line comment if c == '/': read1(fin) debug(fin, 'single line comment starts') if not single_line_comments: write(fout, '//') while c != '\n' and not not c: c = read1(fin) if c == '\n' and not skip_newline: write(fout, '\n') elif not single_line_comments: write(fout, c) debug(fin, 'single line comment ends') # multi-line comment elif c == '*': read1(fin) debug(fin, 'multiline comment starts') if not multiline_comments: write(fout, '/*') while True: c = read1(fin) if not multiline_comments: write(fout, c) if c == '*': c = peek1(fin) if c == '/': # /* read1(fin) if not multiline_comments: write(fout, '*/') debug(fin, 'multiline comment exited due to */') # if there is newline right after multiline comment, ignore it if skip_newline: c = peek1(fin) if c == '\n': skip1(fin) break # */ else: if not multiline_comments: write(fout, '*') elif c == '\n': if not skip_newline: write(fout, c) elif not c: debug(fin, 'multiline comment exited due to EOF') break # false alarm else: debug(fin, 'false alarm') write(fout, '/' + c) # string elif c == '"': debug(fin, 'string starts') if not strings: write(fout, '"') debug(fin, 'entering infinite loop') while True: c = read1(fin) if not strings: write(fout, c) if c == '\\': c = read1(fin) if not strings: write(fout, c) elif c == '"': debug(fin, 'exiting infinite loop') debug(fin, 'string ends') break elif not c: debug(fin, 'exiting infinite loop') debug(fin, 'EOF') break # preprocessor directives elif c == '#': debug(fin, 'possible preprocessor directive ahead') whitespaces = skipwhitespaces(fin) word = extract_word(fin) # include directive if word == 'include': debug(fin, 'include directive') if not ppd_includes: write(fout, '#' + whitespaces + 'include') debug(fin, 'entering infinite loop') while True: c = read1(fin) if c == '\\': c = read1(fin) if not ppd_includes: write(fout, '\\' + c) elif c == '\n': if not skip_newline: write(fout, '\n') debug(fin, 'exiting infinite loop') debug(fin, 'newline') break elif not ppd_includes and c: write(fout, c) elif not c: debug(fin, 'exiting infinite loop') debug(fin, 'EOF') break # define directive elif word == 'define': debug(fin, 'define directive') if not ppd_defines: write(fout, '#' + whitespaces + 'define') debug(fin, 'entering infinite loop') while True: c = read1(fin) if c == '\\': c = read1(fin) if not ppd_defines: write(fout, '\\' + c) elif c == '\n': if not skip_newline: write(fout, '\n') debug(fin, 'exiting infinite loop') debug(fin, 'newline') break elif not ppd_defines and c: write(fout, c) elif not c: debug(fin, 'exiting infinite loop') debug(fin, 'EOF') break # false alarm else: debug(fin, 'false alarm -- resetting position') write(fout, '#' + whitespaces + word) # possible qt enum elif isalpha(c): word = c + extract_word(fin) if word in QT_Macros: if not qt_macros: write(fout, word) else: write(fout, word) # meets no specified category else: write(fout, c)
map = json.loads(html) return map['errno']=='0' and map['errmsg']=='OK' if __name__ == '__main__': try: send_mail(['*****@*****.**'],'didi catcher starts','didi catcher starts') except Exception,e: pass log_path = os.path.dirname(os.path.realpath(__file__)) if not os.path.exists(log_path): os.makedirs(log_path) logger = getPyLogger('didi','debug',os.path.join(log_path,os.path.basename(__file__)+'.log'),'d',1,99999) debug(logger,'start to work...') while(1): try: #从坚果云中LOAD最新的参数 loadTimeConfig() debug(logger,'FLOW: get my orders') orders = getOrders_432() debug(logger,'FLOW: GOT ================='+str(len(orders))+' ==================orders') for order in orders: departure_time = order["trip_info"]['text_setup_time'] #route_id = order['route_id'] order_id = order["order_info"]['order_id'] from_name = order["trip_info"]['from_name'] from_address = order["trip_info"]['from_address'] to_name = order["trip_info"]['to_name']
def reader(fid, preSNRs=None, preHK=None): """A generator that yields all the available records from fid whenever the stream ends. After resuming, the next values are the arrays enlarged by subsequently received records (the entire arrays are returned each time, not just the new records). """ SNRs = defaultdict(growSNR) if preSNRs: for rx, SNR in preSNRs.items(): SNRs[rx] = growSNR(SNR) HK = growHK(preHK) satpos = SatPositions() if preHK is not None: rx_locations(preHK, satpos) numtot, numempty, numearly, numnoloc = [defaultdict(int) for _ in range(4)] thisweek = gpsweekgps(np.datetime64('now')) while True: try: rid, vals = read_record(fid) except EOFError: sliced = {rxid: SNR.sliced() for rxid, SNR in SNRs.items()}, HK.sliced() fid = (yield sliced) or fid continue except ValueError as e: info(e) continue if rid == 192: rxid, weeksow, snrs = vals if not snrs or weeksow[0] < 1000 or weeksow[0] > thisweek + 1: numempty[rxid] += (not snrs) numearly[rxid] += (weeksow[0] < 1000) numtot[rxid] += 1 continue time = weeksow_to_np(*weeksow) if numempty[rxid] or numearly[rxid]: info( "Skipped {:2} records ({:2} empty, {:2} early) at {:%H:%M:%S}." .format(numtot[rxid], numempty[rxid], numearly[rxid], time.tolist())) numtot[rxid] = numempty[rxid] = numearly[rxid] = 0 if rxid not in satpos.rxlocs: numnoloc[rxid] += 1 continue if numnoloc[rxid]: info("Skipped", numnoloc[rxid], "records before receiver", rxid, "location was known.") numnoloc[rxid] = 0 if time > satpos.endtime: debug('Updating satellite locations;', time, ' > ', satpos.endtime) satpos.update(time, 4) addrecords(SNRs[rxid], time, snrs, satpos, rxid) elif rid == 193: rxid = vals[0] vals = rxid, weeksow_to_np(*vals[1]), *vals[2:] HK.append(vals) if rxid not in satpos.rxlocs: if vals[1] > np.datetime64('now') + np.timedelta64(1, 'h'): info('Rx', rxid, ': Not using location from future time', vals[1]) continue lon, lat, alt = vals[3:6] if lon == 0 or lat == 0: info('Rx', rxid, ': Not using location with 0') continue lon /= 1e7 if lon > 0 and vals[1] < np.datetime64('2017-03-14'): lon *= -1 # All our data before this date is in the western hemisphere, # but some was reported with the wrong sign; fix it. info('Forcing longitude to western hemisphere, to correct ' 'bad data before 2017-03-14') lat /= 1e7 if not (-90 <= lat <= 90 and -180 <= lon <= 360): info('Not using bad location {}°E, {}°N'.format(lon, lat)) continue if alt: alt /= 1000 else: info('Obtaining terrain altitude from Google and Unavco.') alt = get_ellipsoid_ht(lat, lon) info("Receiver", rxid, "reported at", lon, "°E, ", lat, "°N, ", alt, " m.") satpos.rxadd(rxid, lat, lon, alt) else: info('Unknown record {}:'.format(rid), vals.hex())