def dig_info(flight): try: if len(flight.find_elements_by_class_name('colAirports')) != 1: util.log_info(u'AIRCHINA: 非直达航班') return None from_airport, to_airport = flight.\ find_elements_by_class_name('colAirports')[0].\ text.strip().split('-') flight_no = flight.find_element_by_class_name('colFlight').text.\ strip() from_time = flight.find_element_by_class_name('colDepart').text.\ strip()[0:5] to_time = flight.find_element_by_class_name('colArrive').text.\ strip()[0:5] prices = [int(x.text.strip().replace(',', '')) for x in flight.find_elements_by_class_name('colPrice')] if not prices: util.log_error(u'AIRCHINA: 抓取价格失败') return None price = min(prices) from_time_pair = [int(x) for x in from_time.split(':')] to_time_pair = [int(x) for x in to_time.split(':')] if to_time_pair[0] < from_time_pair[0]: to_time_pair[0] += 24 return [AIRLINE_NAME[Website.AIRCHINA], flight_no, from_airport, from_time, from_time_pair, to_airport, to_time, to_time_pair, price] except Exception as e: util.log_error('AIRCHINA: ' + str(e)) return None
def export_excels(self): file_2_converter = {} for value in xlsconfig.CONVENTION_TABLE: pattern = value[0] converter_name = value[1] new_name = value[2] if len(value) > 2 else None sheet_index = value[3] if len(value) > 3 else 0 compiled_pattern = re.compile(pattern) for infile in self.excel_files: if not compiled_pattern.match(infile): continue if infile in file_2_converter: util.log_error("文件'%s'匹配到了多个转换器", infile) outfile = None if new_name is None: outfile = os.path.splitext(infile)[0] else: outfile = compiled_pattern.sub(new_name, infile) if self.export_excel_to_python(infile, outfile, converter_name, sheet_index): pass elif not xlsconfig.FORCE_RUN: return file_2_converter[outfile] = value[1] return
def dig_info(flight): try: info = flight.find_element_by_class_name('logo').text.split() airline = info[0].strip() if len(info) >= 1 else '' flight_no = info[1].strip() if len(info) >= 2 else '' divs = flight.find_element_by_class_name('right').\ find_elements_by_tag_name('div') from_time = divs[0].find_element_by_class_name('time').text from_time_pair = [int(x) for x in from_time.split(':')] from_airport = divs[1].text divs = flight.find_element_by_class_name('left').\ find_elements_by_tag_name('div') to_time = divs[0].find_element_by_class_name('time').text to_time_pair = [int(x) for x in to_time.split(':')] to_airport = divs[1].text if to_time_pair[0] < from_time_pair[0]: to_time_pair[0] += 24 price = flight.find_element_by_class_name('price').text[1:] tmp_price = '' for ch in price: if ch.isdigit(): tmp_price += ch else: break price = int(tmp_price) return [airline, flight_no, from_airport, from_time, from_time_pair, to_airport, to_time, to_time_pair, price] except Exception as e: util.log_error('CTRIP: ' + str(e)) return None
def dig_info(flight): try: if len(flight.find_elements_by_class_name('colAirports')) != 1: util.log_info(u'AIRCHINA: 非直达航班') return None from_airport, to_airport = flight.\ find_elements_by_class_name('colAirports')[0].\ text.strip().split('-') flight_no = flight.find_element_by_class_name('colFlight').text.\ strip() from_time = flight.find_element_by_class_name('colDepart').text.\ strip()[0:5] to_time = flight.find_element_by_class_name('colArrive').text.\ strip()[0:5] prices = [ int(x.text.strip().replace(',', '')) for x in flight.find_elements_by_class_name('colPrice') ] if not prices: util.log_error(u'AIRCHINA: 抓取价格失败') return None price = min(prices) from_time_pair = [int(x) for x in from_time.split(':')] to_time_pair = [int(x) for x in to_time.split(':')] if to_time_pair[0] < from_time_pair[0]: to_time_pair[0] += 24 return [ AIRLINE_NAME[Website.AIRCHINA], flight_no, from_airport, from_time, from_time_pair, to_airport, to_time, to_time_pair, price ] except Exception as e: util.log_error('AIRCHINA: ' + str(e)) return None
def create_post(post, msg, post_type, msg_type): """ function for posting to subreddit; separate post for long monster table posts; self-edits signature for easy delete request PM link """ try: for i, m in enumerate(msg): if len(msg) > 1: m = m.replace('___MTABLE___', "(%s of %s)" % (i + 1, len(msg))) else: m = m.replace('___MTABLE___', "") if post_type == 'SUBMISSIONS': if debug: print m + signature_intro + signature else: c = post.add_comment(m + signature_intro + signature) log_msg("Made a %s comment in %s" % (msg_type, post.short_link)) update_db(log_coll, stat_coll, msg_type, post.short_link, '') elif post_type == 'COMMENTS': if debug: print m + signature_intro + signature else: c = post.reply(m + signature_intro + signature) log_msg("Made a %s reply to %s" % (msg_type, post.permalink)) update_db(log_coll, stat_coll, msg_type, post.permalink, '') sig_temp = signature_add.replace('___CID___', str(c.id)) sig_temp = sig_temp.replace('___PID___', str(c.link_id)[3:]) if not debug: sleep(SLEEP) m_tmp = c.body.replace('^^Processing...', sig_temp) m_tmp = m_tmp.replace('&#', '&#') r.get_info(thing_id='t1_' + str(c.id)).edit(m_tmp) sleep(SLEEP_LONG) except Exception as e: log_error(e)
def dig_info(flight): try: number = flight['No'].strip() from_airport = flight['Departure'] + flight['DepartureStation'] from_time = flight['DepartureTime'][11:16] to_airport = flight['Arrival'] + flight['ArrivalStation'] to_time = flight['ArrivalTime'][11:16] prices = [ x['Cabins'][0]['CabinPrice'] for x in flight['CabinInfos'] if x.get('Cabins') and isinstance(x['Cabins'][0].get('CabinPrice'), int) ] if not prices: util.log_error(u'CH: 抓取价格失败') return None from_time_pair = [int(x) for x in from_time.split(':')] to_time_pair = [int(x) for x in to_time.split(':')] if to_time_pair[0] < from_time_pair[0]: to_time_pair[0] += 24 price = min(prices) return [ AIRLINE_NAME[Website.CH], number, from_airport, from_time, from_time_pair, to_airport, to_time, to_time_pair, price ] except Exception as e: util.log_error('CH: ' + str(e)) return None
def parse_header(self, rows): header_row = [self.extract_cell_value(cell) for cell in rows[self.header_row_index]] field_row = [self.extract_cell_value(cell) for cell in rows[self.field_row_index]] type_row = [self.extract_cell_value(cell) for cell in rows[self.type_row_index]] for col, field in enumerate(field_row): if field == "": break self.converters[col] = None if field in self.field_2_col: util.log_error("列名'%s'重复,列:%s", field, util.int_to_base26(col)) continue self.field_2_col[field] = col header = header_row[col] or field type = type_row[col] or "String" method = None try: method = FAST_CONVERTER[type.lower()] except: util.log_error("无效的类型'%s',列:%s", type, util.int_to_base26(col)) continue self.converters[col] = ConverterInfo((header, field, method, True)) self.sheet_types[field] = (col, field, header, type) self.key_name = self.converters[0].field return
def set_boot_power(self): try: bat = serial_port.instance.getBat() util.log_info("bat", bat) if bat is not None: update_voltage_url = '%s/%s/Device/UpdateVoltage?id=%s' % ( util.util_remote_service(config.const_api_name_resouce), config.const_api_name_resouce, config.const_service_id) batValue = bat[3:7] feedback = requests.post( update_voltage_url, data=batValue, headers={'Content-Type': 'application/json'}) except Exception as err: util.log_error("get_bat", err) try: device_info = self.get_device_info() config.const_is_set_powerofboot = util.get_cached_version( config.const_is_set_powerofboot_name) if config.const_is_set_powerofboot is None and device_info[ 'has_power_control'] is not None or config.const_is_set_powerofboot != str( device_info['has_power_control']): config.const_is_set_powerofboot = device_info[ 'has_power_control'] util.set_cached_version(config.const_is_set_powerofboot_name, device_info['has_power_control']) if config.const_is_set_powerofboot == 'True': is_set_successfully = False if device_info['power_setting'] == 1: if device_info['timer']['switch_mode'] == 0: is_set_successfully = serial_port.instance.setModeM() if device_info['timer']['switch_mode'] == 1: start = device_info['timer']['boot_time'].replace( 'T', ' ') start_yms = time.strftime( '%H%M%S', time.strptime(start, '%Y-%m-%d %H:%M:%S')) end = device_info['timer']['shutdown_time'].replace( 'T', ' ') end_yms = time.strftime( '%H%M%S', time.strptime(end, '%Y-%m-%d %H:%M:%S')) is_set_successfully = serial_port.instance.setDailyTIme( start_yms, end_yms) if is_set_successfully: url = '%s/%s/Device/SetupPowerControl?id=%s' % ( util.util_remote_service( config.const_api_name_resouce), config.const_api_name_resouce, config.const_service_id) feedback = requests.post( url, data='2', headers={'Content-Type': 'application/json'}) except Exception as err: util.log_error("set_boot", err)
def compare_types(self, types, filename): for k, info1 in self.types.iteritems(): info2 = types.get(k) if info2 and self.if_info_equal(info1, info2): continue util.log_error("'%s':'%s'与'%s':'%s'表头不一致", filename, info2, info1, self.filename) return False return True
def post_convert_row(self, field_2_cfg, key_value, row): for field, cfg in field_2_cfg.iteritems(): value = row.pop(field, None) try: self.post_convert_value(cfg, value, row) except: traceback.print_exc() util.log_error("列(%s, %s)二次转换失败,value = %s", str(key_value), cfg.header, tp0.to_str(value)) return
def refresh_config(self): ''' re-read config and process any changes ''' try: logging.debug("reading config file from '%s'", self.config_filepath) # read in the config from the given path with open(self.config_filepath, 'r') as fin: conf = yaml.load(fin) dc_conf = conf['data_collector'] if 'counters' in dc_conf: dc_conf['counters'] = normalise_path(dc_conf['counters']) assert os.path.exists(os.path.dirname(dc_conf['counters'])) with open(dc_conf['counters'], 'r') as fin: counters_conf = yaml.load(fin) dc_conf['counters'] = counters_conf['counters'] else: dc_conf['counters'] = conf['counters'] dc_conf['state'] = normalise_path(dc_conf['state']) assert os.path.exists(os.path.dirname(dc_conf['state'])) assert dc_conf['name'] != '' assert dc_conf['noise_weight'] >= 0 assert dc_conf['tally_server_info']['ip'] is not None assert dc_conf['tally_server_info']['port'] > 0 assert dc_conf['event_source'] is not None assert dc_conf['event_source'] > 0 for key in dc_conf['counters']: assert dc_conf['counters'][key]['sigma'] >= 0.0 assert 'share_keepers' in dc_conf if self.config == None: self.config = dc_conf logging.info("using config = %s", str(self.config)) else: changed = False for k in dc_conf: if k not in self.config or dc_conf[k] != self.config[k]: logging.info("updated config for key {} from {} to {}".format(k, self.config[k], dc_conf[k])) self.config[k] = dc_conf[k] changed = True if not changed: logging.debug('no config changes found') except AssertionError: logging.warning("problem reading config file: invalid data") log_error() except KeyError: logging.warning("problem reading config file: missing required keys") log_error()
def worker(task): module, period, website = task util.log_info('deal at %s %s %s' % (str(module), str(period), str(website))) try: module.fetch(period) except Exception as e: util.log_error(WEBSITE_NAME[website] + ' ' + str(e) + ' ' + str(period) + traceback.format_exc().replace('\n', ' | ')) time.sleep(5)
def check_self_posts(posts): """ function for checking comment score of posts made by bot; delete if score is less than 1 (downvoted) """ try: for post in posts: if post.score < 1: delete_post(post, 'SCORE', '', '') sleep(SLEEP_LONG) except Exception as e: log_error(e)
def __build_for(self, platform, variant, min_platform_version): if os.path.exists('.tmp') is False: os.mkdir('.tmp') if os.path.exists('.tmp/{}'.format(self.name)) is False: os.mkdir('.tmp/{}'.format(self.name)) module_dir = '.tmp/{}/{}'.format(self.name, variant.name) os.mkdir(module_dir) cmd = ['xcrun', '-sdk', variant.name, 'swiftc'] if self.framework_search_paths is not None: cmd += ['-F./{}'.format(self.framework_search_paths)] cmd += self.source_paths cmd += self.flags cmd += [ '-o', '{}/{}'.format(module_dir, self.name), '-target', '{}-apple-{}{}'.format(variant.arch, platform.name, min_platform_version) ] print '{}-apple-{}{}'.format(variant.arch, platform.name, min_platform_version) cmd += ['-emit-module', '-emit-objc-header', '-emit-library'] print ' '.join(cmd) output = Popen(cmd, stderr=PIPE).stderr.read() output = re.sub(r'^\d+$', ',', output, flags=re.MULTILINE) if os.path.exists('OutputFileMaps') is False: os.mkdir('OutputFileMaps') if output is not None: if len(output.split('\n', 1)) > 1: try: output = json.loads('[{}]'.format( output.split('\n', 1)[1])) open( 'OutputFileMaps/{}-OutputFileMap.json'.format( self.name), 'w').write( json.dumps( map( lambda inner: inner['inputs'][0], filter( lambda obj: obj['name'] == 'compile' and 'inputs' in obj, output)))) except: log_error( 'could not generate output file map for {}'.format( self.name)) return module_dir
def refresh_config(self): ''' re-read config and process any changes ''' try: logging.debug("reading config file from '%s'", self.config_filepath) # read in the config from the given path with open(self.config_filepath, 'r') as fin: conf = yaml.load(fin) sk_conf = conf['share_keeper'] # if key path is not specified, look at default path, or generate a new key if 'key' in sk_conf: sk_conf['key'] = normalise_path(sk_conf['key']) assert os.path.exists(sk_conf['key']) else: sk_conf['key'] = normalise_path('privcount.rsa_key.pem') if not os.path.exists(sk_conf['key']): generate_keypair(sk_conf['key']) sk_conf['name'] = get_public_digest(sk_conf['key']) sk_conf['state'] = normalise_path(sk_conf['state']) assert os.path.exists(os.path.dirname(sk_conf['state'])) assert sk_conf['tally_server_info']['ip'] is not None assert sk_conf['tally_server_info']['port'] > 0 if self.config == None: self.config = sk_conf logging.info("using config = %s", str(self.config)) else: changed = False for k in sk_conf: if k not in self.config or sk_conf[k] != self.config[k]: logging.info( "updated config for key {} from {} to {}".format( k, self.config[k], sk_conf[k])) self.config[k] = sk_conf[k] changed = True if not changed: logging.debug('no config changes found') except AssertionError: logging.warning("problem reading config file: invalid data") log_error() except KeyError: logging.warning( "problem reading config file: missing required keys") log_error()
def __worker_thread(self): while True: try: dr = '%s/buildin/logs'%(config.const_client_root()) current_log_prefix = 'service_log_' + util.current_log_file for root, dirs, files in os.walk(dr, topdown=False): for name in files: if name.startswith(current_log_prefix) == False: self.upload_file(name) time.sleep(60) except Exception as err: util.log_error('downloader',err) time.sleep(60)
def processCmd(self,cmd): try: if self.serialport is not None: if self.serialport.is_open == False: self.serialport.open() cmd += '\r' self.serialport.write(cmd.encode()) result = self.serialport.readline() util.log_info("fck_result", result.decode()) return result return None except Exception as err: util.log_error("fck", err) return None
def get_mods(sub): """ function for grabbing mod list of subreddit (only works when one subreddit is specified) """ mod_list = [] try: mods = r.get_moderators(sub).children for mod in mods: mod_list.append(str(mod)) if debug: print "mods: %s" % ', '.join(mod_list) except Exception as e: log_error(e)
def delete_post(post, type, parent_url, user): """ function for deleting own post """ try: if type == 'SCORE': log_msg("Deleting post under %s due to downvote" % parent_url) update_db(log_coll, stat_coll, 'DEL_SCORE', parent_url, user) if type == 'PM': log_msg("Deleting post under %s due to PM request from %s" % (parent_url, user)) update_db(log_coll, stat_coll, 'DEL_PM', parent_url, user) post.delete() sleep(SLEEP) except Exception as e: log_error(e)
def post_check_sheet(self, data_module): converter = data_module.converter if converter is None: return check_method = getattr(converter, "post_check", None) if check_method is None: return try: check_method(data_module, self) except: traceback.print_exc() util.log_error("数据检查失败 '%s'", data_module.path) return
def post_process_sheet(self, data_module): converter = data_module.converter if converter is None: return process_method = getattr(converter, "post_process", None) if process_method is None: return try: data_module.extra_sheets = process_method(data_module) except: traceback.print_exc() util.log_error("后处理执行失败 '%s'", data_module.path) return
def login(mongo_cl_users, in_json, session_life, mongo_cl_log): try: login_user = mongo_cl_users.find_one({'email' : in_json["email"]}) out_json = {} if login_user: stored_password = login_user['password'].encode('utf-8') submitted_password = in_json['password'].encode('utf-8') if bcrypt.hashpw(submitted_password, stored_password) == stored_password: c = Cookie.SimpleCookie() session_id = make_hash_string() + make_hash_string() c['sessionid']= session_id c['email']= in_json["email"] c['sessionid']['expires'] = session_life email = in_json["email"] msg = "Signed in as " + email out_json = {"status":1, "email":email, "sessionid":session_id} if login_user["status"] == 0: out_json = {"status":0, "errormsg":"Looks like your account has not been activated yet!"} else: out_json = {"status":0, "errormsg":"Login failed! Invalid email/password combination"} else: out_json = {"status":0, "errormsg":"Login failed! Invalid email/password combination"} except Exception, e: out_json = util.log_error(mongo_cl_log, traceback.format_exc())
async def __send_handler(self, ws, path): while True: try: key = hash(ws) if self.__outbound.get(key) is None: self.__outbound[key] = queue.Queue() if ws.open: while not self.__outbound[key].empty(): msg = self.__outbound[key].get() await ws.send(msg) await asyncio.sleep(1) else: self.__outbound.pop(key) return except Exception as err: util.log_error('web_socket_server', err)
def authenticate(mongo_cl_log): out_json = {} try: if 'HTTP_COOKIE' in os.environ: string = os.environ.get('HTTP_COOKIE') c = Cookie.SimpleCookie() c.load(string) if 'sessionid' in c: session_id = c['sessionid'].value email = c['email'].value msg = "Signed in as " + email out_json = { "status": 1, "email": email, "sessionid": session_id } else: out_json = { "status": 0, "errormsg": "session ID not in cookie" } else: out_json = {"status": 0, "errormsg": "No cookie found!"} except Exception, e: out_json = util.log_error(mongo_cl_log, traceback.format_exc())
def save_user(mongo_cl_users, in_json, logged_email, mongo_cl_log): out_json = {} try: result = mongo_cl_users.update_one({"email":logged_email}, {'$set': in_json}, upsert=False) return {"taskstatus": 1} except Exception, e: out_json = util.log_error(mongo_cl_log, traceback.format_exc())
def import_bcos(in_json, logged_email): bco_domain = "https://data.glygen.org" bco_version = "v-1.0.13" file_obj = open('input1.json', 'a') file_obj.write('[') for i in xrange(1, 1000): bco_id = "DSBCO_000000"[0:12 - len(str(i))] + str(i) bco_url = "%s/%s/%s" % (bco_domain, bco_id, bco_version) try: with warnings.catch_warnings(): warnings.filterwarnings('ignore', category=InsecureRequestWarning) response = requests.get(bco_url, verify=False) if response.content.strip() != "": bco_obj = json.loads(response.content) print response.content file_obj.write(response.content) file_obj.write(', \n') except Exception as e: pdb.set_trace() util.log_error(mongo_cl_log, str(e), str(e)) bco_domain = "https://data.oncomx.org" bco_version = "v-1.0.6" for i in xrange(1, 1000): bco_id = "DSBCO_000000"[0:12 - len(str(i))] + str(i) bco_url = "%s/%s/%s" % (bco_domain, bco_id, bco_version) try: with warnings.catch_warnings(): warnings.filterwarnings('ignore', category=InsecureRequestWarning) response = requests.get(bco_url, verify=False) if response.content.strip() != "": bco_obj = json.loads(response.content) print response.content file_obj.write(response.content) file_obj.write(', \n') except Exception as e: pdb.set_trace() print "-" * 100 print e util.log_error(mongo_cl_log, str(e), str(e)) file_obj.write(']') file_obj.close() return {"result": True}
def dig_info(flight): try: part1 = flight.find_element_by_class_name('f-i').text elements = [x.strip() for x in part1.split('\n')] info = elements[0].replace('|', ' ').strip().split() airline = info[0].strip() if len(info) >= 1 else '' flight_no = info[1].strip() if len(info) >= 2 else '' airports = flight.find_elements_by_class_name('airport') from_infos = airports[0].text.split() to_infos = airports[1].text.split() from_time, from_airport = from_infos[0].strip(), from_infos[1].strip() to_time, to_airport = to_infos[0].strip(), to_infos[1].strip() from_time_pair = [int(x) for x in from_time.split(':')] to_time_pair = [int(x) for x in to_time.split(':')] if to_time_pair[0] < from_time_pair[0]: to_time_pair[0] += 24 elements = [x.strip() for x in flight.text.split('\n')] prices = [] for element in elements: if element.startswith(u'¥'): price = element[1:].replace(',', '') digits = [] for digit in price: if digit.isdigit(): digits.append(digit) else: break if not digits: continue prices.append(int(''.join(digits))) if prices[-1] == 160: prices = prices[0:-1] if not prices: util.log_error(u'CEAIR: 抓取价格失败') return None price = min(prices) if len(prices) >= 2 and price == 160: price = prices[-2] return [airline, flight_no, from_airport, from_time, from_time_pair, to_airport, to_time, to_time_pair, price] except Exception as e: util.log_error('CEAIR: ' + str(e)) return None
def search_objects(in_json): try: query_obj = {} if in_json["queryvalue"] != "": query = str(in_json["queryvalue"]) cond_objs = [] cond_objs.append({"bco_id": {'$regex': query, '$options': 'i'}}) cond_objs.append( {"provenance_domain.name": { '$regex': query, '$options': 'i' }}) cond_objs.append({ "provenance_domain.contributors.name": { '$regex': query, '$options': 'i' } }) query_obj = {"$or": cond_objs} row = [] obj_list = [] obj_list.append( config_json["tableheaders"]["searchresults"]["labellist"]) obj_list.append( config_json["tableheaders"]["searchresults"]["typelist"]) for doc in mongo_cl_bco.find(query_obj): if "provenance_domain" not in doc: continue tv_list = ["name" in doc["provenance_domain"]] tv_list += ["created" in doc["provenance_domain"]] tv_list += ["contributors" in doc["provenance_domain"]] if False in tv_list: continue tv_list = [len(doc["provenance_domain"]["contributors"]) > 0] if False in tv_list: continue doc.pop("_id") created_by = doc["provenance_domain"]["contributors"][0]["email"] bco_id = str(doc["bco_id"]) creators = [] for o in doc["provenance_domain"]["contributors"]: val = o["name"] if o["name"].strip() != "" else o["email"] creators.append(val) row = [ bco_id, doc["provenance_domain"]["name"], doc["provenance_domain"]["created"], ", ".join(creators) ] obj_list.append(row) taskstatus = 1 out_json = {} out_json["searchresults"] = obj_list out_json["taskstatus"] = taskstatus except Exception, e: out_json = util.log_error(mongo_cl_log, traceback.format_exc())
def do_parse(self): import openpyxl self.workbook = openpyxl.load_workbook(self.filename) sheets = self.workbook.worksheets if self.sheet_index >= len(sheets): log_error("Excel表'%s'没有子表'%d'", self.filename, self.sheet_index) return table = sheets[self.sheet_index] self.worksheet = table rows = list(table.rows) self.parse_arguments(rows) self.parse_header(rows) self.parse_defaults(rows) if self.data_row_index >= len(rows): return ncols = len(self.converters) # the remain rows is raw data. for r in xrange(self.data_row_index, len(rows)): cells = rows[r] # 遇到空白行,表示解析完成 first_value = cells[0].value if first_value == '' or first_value is None: break current_row_data = {} for c in xrange(ncols): value = self.extract_cell_value(cells[c]) try: self.convert_cell(r, c, value, current_row_data) except: traceback.print_exc() util.log_error("单元格(%d, %s) = [%s] 数据解析失败", r + 1, util.int_to_base26(c), value) self.add_row(current_row_data) return
def lipo(self, platform, *frameworks): """Lipo frameworks that share the same sdk/arch Create a framework that combines the variants of a given platform. Arguments: platform: Platform: ~Platform to build for frameworks: [Framework]: list of frameworks to lipo """ if len(frameworks) is 0: return log_error('cannot lipo: no frameworks created') # add tupled self frameworks = frameworks + (self,) if reduce(lambda a,b: a.is_created and b.is_created, frameworks) is False: return log_error('cannot lipo: not all frameworks created') cmd = ['lipo', '-create'] + map( lambda framework: framework.bin_path, frameworks) # output everything to the initial fmk. this is # arbitrary. we are going to move everything to # the top level anyway cmd += ['-o', '{}/{}'.format(self.abs_path, self.name)] # call the lipo command call(cmd) # copy them to the new top level copytree( self.abs_path, 'Frameworks/{}/{}.framework'.format( platform.name, self.name)) created_framework = Framework(self.name, 'Frameworks/{}/{}.framework/{}'.format( platform.name, self.name, self.name), False) created_framework.framework_path = 'Frameworks/{}/{}.framework'.format( platform.name, self.name) created_framework.is_created = True created_framework.abs_path = os.path.abspath(created_framework.framework_path) return created_framework
def dig_info(flight, pair): try: elements = [x.strip() for x in flight.text.split('\n')] flight_no, from_time, from_airport, to_time, to_airport, prices = \ '', None, None, None, None, [] flight_no = flight.find_element_by_class_name('sp-trip-stops').text.\ strip() for element in elements: if len(element) >= 5 and '00:00' <= element[0:5] <= '23:59': if not from_time: from_time = element[0:5] else: to_time = element[0:5] if element == AIRPORT_NAME_PARAMS[Website.CSAIR][pair[0]]: from_airport = element if element == AIRPORT_NAME_PARAMS[Website.CSAIR][pair[1]]: to_airport = element if element.startswith(u'¥ '): price = element[2:].replace(',', '').strip() digits = [] for digit in price: if digit.isdigit(): digits.append(digit) else: break if not digits: continue prices.append(int(''.join(digits))) if not (from_time and from_airport and to_time and to_airport and prices): util.log_error(u'CSAIR: 抓取内容失败') return None from_time_pair = [int(x) for x in from_time.split(':')] to_time_pair = [int(x) for x in to_time.split(':')] if to_time_pair[0] < from_time_pair[0]: to_time_pair[0] += 24 price = min(prices) return [AIRLINE_NAME[Website.CSAIR], flight_no, from_airport, from_time, from_time_pair, to_airport, to_time, to_time_pair, price] except Exception as e: util.log_error('CSAIR: ' + str(e)) return None
def __worker_thread(self): while True: try: self.set_boot_power() util.log_info("set_boot_power", 'completed set boot power') except Exception as err: util.log_error('set boot power error', err) try: self.update_qr_code() util.log_info('downloader', 'completed qr code update') self.update_product() util.log_info('downloader', 'completed product update') self.update_apps() util.log_info('downloader', 'completed apps update') time.sleep(random.randint(40, 60)) except Exception as err: util.log_error('downloader', err) time.sleep(random.randint(40, 60))
def register_user(mongo_cl_users, user_obj, mongo_cl_log): user_obj["status"] = 0 try: user_obj["password"] = bcrypt.hashpw(user_obj["password"].encode('utf-8'), bcrypt.gensalt()) if mongo_cl_users.find({"email":user_obj["email"]}).count() != 0: out_json = {"taskstatus":0, "errormsg":"The email submitted is already registered!"} else: res = mongo_cl_users.insert_one(user_obj) out_json = {"taskstatus":1} except Exception, e: out_json = util.log_error(mongo_cl_log, traceback.format_exc())
def generate_code(): print "=== 生成代码类文件 ..." configure_file_path = os.path.join(xlsconfig.TEMP_PATH, "configures.py") if not os.path.exists(configure_file_path): return util.log_error("配置文件'%s'不存在", configure_file_path) configure_module = imp.load_source("temp_configures", configure_file_path) for key, cfg in configure_module.configures.iteritems(): _generate(cfg["types"], key)
def randomize(self): if self.state != STATE_START: log_error(self, "Can only randomize in STATE_START.") raise HttpCodeException(403) for i in range(8): j = random.randint(i, 7) if i != j: p1 = Piece(getattr(self, "p" + str(i))) p2 = Piece(getattr(self, "p" + str(j))) p1.pos, p2.pos = p2.pos, p1.pos setattr(self, "p" + str(i), p1.state()) setattr(self, "p" + str(j), p2.state()) p1 = Piece(getattr(self, "p" + str(16 + i))) p2 = Piece(getattr(self, "p" + str(16 + j))) p1.pos, p2.pos = p2.pos, p1.pos setattr(self, "p" + str(16 + i), p1.state()) setattr(self, "p" + str(16 + j), p2.state()) self.put()
def dig_info(flight): try: number = flight['No'].strip() from_airport = flight['Departure'] + flight['DepartureStation'] from_time = flight['DepartureTime'][11:16] to_airport = flight['Arrival'] + flight['ArrivalStation'] to_time = flight['ArrivalTime'][11:16] prices = [x['Cabins'][0]['CabinPrice'] for x in flight['CabinInfos'] if x.get('Cabins') and isinstance(x['Cabins'][0].get('CabinPrice'), int)] if not prices: util.log_error(u'CH: 抓取价格失败') return None from_time_pair = [int(x) for x in from_time.split(':')] to_time_pair = [int(x) for x in to_time.split(':')] if to_time_pair[0] < from_time_pair[0]: to_time_pair[0] += 24 price = min(prices) return [AIRLINE_NAME[Website.CH], number, from_airport, from_time, from_time_pair, to_airport, to_time, to_time_pair, price] except Exception as e: util.log_error('CH: ' + str(e)) return None
def ensei_factory(ensei_id): if ensei_id == 1: return Ensei(1, 'ensei_name_01.png', 'ensei_area_01.png', datetime.timedelta(minutes=14, seconds=25)) elif ensei_id == 2: return Ensei(2, 'ensei_name_02.png', 'ensei_area_01.png', datetime.timedelta(minutes=29, seconds=25)) elif ensei_id == 3: return Ensei(3, 'ensei_name_03.png', 'ensei_area_01.png', datetime.timedelta(minutes=19, seconds=25)) elif ensei_id == 4: return Ensei(4, 'ensei_name_04.png', 'ensei_area_01.png', datetime.timedelta(minutes=49, seconds=25)) elif ensei_id == 5: return Ensei(5, 'ensei_name_05.png', 'ensei_area_01.png', datetime.timedelta(minutes=89, seconds=25)) elif ensei_id == 6: return Ensei(6, 'ensei_name_06.png', 'ensei_area_01.png', datetime.timedelta(minutes=39, seconds=25)) elif ensei_id == 7: return Ensei(7, 'ensei_name_07.png', 'ensei_area_01.png', datetime.timedelta(minutes=59, seconds=25)) elif ensei_id == 8: return Ensei(8, 'ensei_name_08.png', 'ensei_area_01.png', datetime.timedelta(hours=2, minutes=59, seconds=25)) elif ensei_id == 9: return Ensei(9, 'ensei_name_09.png', 'ensei_area_02.png', datetime.timedelta(hours=3, minutes=59, seconds=25)) elif ensei_id == 10: return Ensei(10, 'ensei_name_10.png', 'ensei_area_02.png', datetime.timedelta(hours=1, minutes=29, seconds=25)) elif ensei_id == 11: return Ensei(11, 'ensei_name_11.png', 'ensei_area_02.png', datetime.timedelta(hours=4, minutes=59, seconds=25)) elif ensei_id == 12: return Ensei(12, 'ensei_name_12.png', 'ensei_area_02.png', datetime.timedelta(hours=7, minutes=59, seconds=25)) elif ensei_id == 13: return Ensei(13, 'ensei_name_13.png', 'ensei_area_02.png', datetime.timedelta(hours=3, minutes=59, seconds=25)) elif ensei_id == 14: return Ensei(14, 'ensei_name_14.png', 'ensei_area_02.png', datetime.timedelta(hours=5, minutes=59, seconds=25)) elif ensei_id == 15: return Ensei(15, 'ensei_name_15.png', 'ensei_area_02.png', datetime.timedelta(hours=11, minutes=59, seconds=25)) elif ensei_id == 16: return Ensei(16, 'ensei_name_16.png', 'ensei_area_02.png', datetime.timedelta(hours=14, minutes=59, seconds=25)) elif ensei_id == 17: return Ensei(17, 'ensei_name_17.png', 'ensei_area_03.png', datetime.timedelta(minutes=44, seconds=25)) elif ensei_id == 18: return Ensei(18, 'ensei_name_18.png', 'ensei_area_03.png', datetime.timedelta(hours=4, minutes=59, seconds=25)) elif ensei_id == 19: return Ensei(19, 'ensei_name_19.png', 'ensei_area_03.png', datetime.timedelta(hours=5, minutes=59, seconds=25)) elif ensei_id == 20: return Ensei(20, 'ensei_name_20.png', 'ensei_area_03.png', datetime.timedelta(hours=1, minutes=59, seconds=25)) elif ensei_id == 21: return Ensei(21, 'ensei_name_21.png', 'ensei_area_03.png', datetime.timedelta(hours=2, minutes=19, seconds=25)) elif ensei_id == 22: return Ensei(22, 'ensei_name_22.png', 'ensei_area_03.png', datetime.timedelta(hours=2, minutes=59, seconds=25)) elif ensei_id == 23: return Ensei(23, 'ensei_name_23.png', 'ensei_area_03.png', datetime.timedelta(hours=3, minutes=59, seconds=25)) elif ensei_id == 24: return Ensei(24, 'ensei_name_24.png', 'ensei_area_03.png', datetime.timedelta(hours=8, minutes=19, seconds=25)) elif ensei_id == 25: return Ensei(25, 'ensei_name_25.png', 'ensei_area_04.png', datetime.timedelta(hours=39, minutes=59, seconds=25)) elif ensei_id == 26: return Ensei(26, 'ensei_name_26.png', 'ensei_area_04.png', datetime.timedelta(hours=79, minutes=59, seconds=25)) elif ensei_id == 27: return Ensei(27, 'ensei_name_27.png', 'ensei_area_04.png', datetime.timedelta(hours=19, minutes=59, seconds=25)) elif ensei_id == 28: return Ensei(28, 'ensei_name_28.png', 'ensei_area_04.png', datetime.timedelta(hours=24, minutes=59, seconds=25)) elif ensei_id == 29: return Ensei(29, 'ensei_name_29.png', 'ensei_area_04.png', datetime.timedelta(hours=23, minutes=59, seconds=25)) elif ensei_id == 30: return Ensei(30, 'ensei_name_30.png', 'ensei_area_04.png', datetime.timedelta(hours=47, minutes=59, seconds=25)) elif ensei_id == 31: return Ensei(31, 'ensei_name_31.png', 'ensei_area_04.png', datetime.timedelta(hours=1, minutes=59, seconds=25)) elif ensei_id == 32: return Ensei(32, 'ensei_name_32.png', 'ensei_area_04.png', datetime.timedelta(hours=23, minutes=59, seconds=25)) elif ensei_id == 35: return Ensei(35, 'ensei_name_35.png', 'ensei_area_05.png', datetime.timedelta(hours=6, minutes=59, seconds=25)) elif ensei_id == 36: return Ensei(36, 'ensei_name_36.png', 'ensei_area_05.png', datetime.timedelta(hours=8, minutes=59, seconds=25)) elif ensei_id == 37: return Ensei(37, 'ensei_name_37.png', 'ensei_area_05.png', datetime.timedelta(hours=2, minutes=44, seconds=25)) elif ensei_id == 38: return Ensei(38, 'ensei_name_38.png', 'ensei_area_05.png', datetime.timedelta(hours=2, minutes=54, seconds=25)) elif ensei_id == 39: return Ensei(39, 'ensei_name_39.png', 'ensei_area_05.png', datetime.timedelta(hours=29, minutes=59, seconds=25)) elif ensei_id == 40: return Ensei(40, 'ensei_name_40.png', 'ensei_area_05.png', datetime.timedelta(hours=6, minutes=49, seconds=25)) else: log_error("%s is an invalid/unsupported expedition! Defaulting to expedition 2!" % ensei_id) return ensei_factory(2)
print "mods: %s" % ', '.join(mod_list) except Exception as e: log_error(e) r = praw.Reddit(user_agent = user_agent) # log in LOGGING_IN = True while LOGGING_IN: try: r.login(USERNAME, PASSWORD) log_success("Login Successful") LOGGING_IN = False sleep(SLEEP) except praw.errors.InvalidUserPass: log_error("Incorrect login information") exit() except Exception as e: log_error(e) sleep(SLEEP_LONG) # get subreddit try: sub = r.get_subreddit(SUBREDDIT) except Exception as e: log_error(e) exit() try: mdb = pymongo.MongoClient(MONGO_URL, MONGO_PORT) db = mdb[MONGO_DB]
def check_pm(msgs): """ function for checking bot's private messages; delete if parent commentor (or subreddit moderator) has requested post deletion; stops bot if moderator has requested a halt """ for msg in msgs: # check for delete request m = re.search(ur'^\+delete\s(.+?)$', msg.body.lower()) if m: id = "t1_%s" % m.group(1) c = r.get_info(thing_id = id) if c is not None: c_parent = r.get_info(thing_id = c.parent_id) if c_parent.author is None: delete_post(c, 'PM', '', msg.author.name) else: if msg.author.name == c_parent.author.name or msg.author.name in mod_list or msg.author.name == 'mrmin123': if "Please request this post to be deleted to un-ignore." in c.body: log_msg("Un-ignoring posts under %s by %s's request" % (c_parent.permalink, msg.author.name)) update_db(log_coll, stat_coll, 'UNIGNORE', c_parent.permalink, msg.author.name) try: ignored_submissions.remove(str(c.parent_id)[3:]) except Exception as e: log_error(e) delete_post(c, 'PM', c_parent.permalink, msg.author.name) else: log_warning("Incorrect delete request from %s for %s" % (msg.author.name, c.permalink)) update_db(log_coll, stat_coll, 'DEL_BAD', c.permalink, msg.author.name) # check for ignore request m = re.search(ur'^\+ignore\s(.+?)$', msg.body.lower()) if m: id = "t3_%s" % m.group(1) c = r.get_info(thing_id = id) if c.author is not None and (msg.author.name == c.author.name or msg.author.name in mod_list or msg.author.name == 'mrmin123'): if m.group(1) not in ignored_submissions: ignored_submissions.append(m.group(1)) log_msg("Ignoring posts under %s by %s's request" % (c.short_link, msg.author.name)) update_db(log_coll, stat_coll, 'IGNORE_PM', c.short_link, msg.author.name) temp_msg = "%s\nI am ignoring any new posts in this thread by OP/moderator's request! Please request this post to be deleted to un-ignore.\n" % signature_intro create_post(c, [temp_msg], 'SUBMISSIONS', 'IGNORE_POST') else: if type(c) is praw.objects.Submission: tempLink = c.short_link elif type(c) is praw.objects.Comment: tempLink = c.permalink else: tempLink = m.group(1) log_warning("Incorrect ignore request from %s for %s" % (msg.author.name, tempLink)) update_db(log_coll, stat_coll, 'IGNORE_BAD', tempLink, msg.author.name) # check for revisit m = re.search(ur'^\+visit\s(.+?)$', msg.body.lower()) if m: temp_type = 'SUBMISSIONS' id = "t3_%s" % m.group(1) c = r.get_info(thing_id = id) if c is None: temp_type = 'COMMENTS' id = id = "t1_%s" % m.group(1) c = r.get_info(thing_id = id) if c is not None and c.subreddit.display_name.lower() == SUBREDDIT.lower() and (msg.author.name == c.author.name or msg.author.name in mod_list or msg.author.name == 'mrmin123'): log_msg("Revisiting %s under %s's request" % (c.permalink, msg.author.name)) update_db(log_coll, stat_coll, 'REVISIT', c.permalink, msg.author.name) check_posts([c], temp_type, True) else: if type(c) is praw.objects.Submission: tempLink = c.short_link elif type(c) is praw.objects.Comment: tempLink = c.permalink else: tempLink = m.group(1) log_msg("Incorrect revisit request for %s by %s" % (tempLink, msg.author.name)) update_db(log_coll, stat_coll, 'REVISIT_BAD', tempLink, msg.author.name) # check for moderator halt request if msg.author.name in mod_list or msg.author.name == 'mrmin123': m = re.search(ur'^\+halt$', msg.body.lower()) if m: msg.mark_as_read() log_error("Bot halt requested by %s" % msg.author.name) update_db(log_coll, stat_coll, 'HALT', '', msg.author.name) exit() msg.mark_as_read() sleep(SLEEP) sleep(SLEEP_LONG)