def get_gg_apk_info(): gg_apk_info = {} # {game_id: {pkg_name:,game_name:,ver_code:,ver_name}} conn = connections("GGCursor") cursor = conn.cursor() sql = """ SELECT pkg_name, game_name, ver_code, ver_name, game_id FROM iplay_game_pkg_info WHERE source=4 AND is_max_version=1 AND enabled=1 """ rows = None try: cursor.execute(sql) rows = cursor.fetchall() except Exception as e: logger.debug("get_gg_apk_info error: %s" % str(e.args)) finally: if cursor: cursor.close() if conn: conn.close() for row in rows: pkg_name = row[0] game_name = row[1] ver_code = int(row[2]) ver_name = row[3] game_id = row[4] info = { 'pkg_name': pkg_name, 'game_name': game_name, 'ver_code': ver_code, 'ver_name': ver_name } gg_apk_info[game_id] = info return gg_apk_info
def get_common_pkg_name(): conn = connections('Cursor') cursor = conn.cursor() sql = """SELECT pkgName, count(pkgName) as cnt, is_parse, is_merge FROM crawler_info GROUP BY pkgName HAVING cnt>1 AND is_parse=1 # AND is_merge=0 AND pkgName != "" AND pkgName IS NOT NULL """ result = [] try: cursor.execute(sql) rows = cursor.fetchall() for row in rows: pkg_name = row[0] result.append(pkg_name) except Exception as e: logger.debug("get_common_pkg_name: %s" % str(e.args)) finally: if cursor: cursor.close() if conn: conn.close() return result
def get_appmarket_apk_info(): appmarket_apk_info = {} # {apk_id: {pkg_name:,ver_code:,ver_name:}} conn = connections("AppMarketCursor") cursor = conn.cursor() sql = """ SELECT pkg_name, ver_code, ver_name, id FROM apk_info WHERE enabled=1 """ rows = None try: cursor.execute(sql) rows = cursor.fetchall() except Exception as e: logger.debug("get_appmarket_apk_info error: %s" % str(e.args)) finally: if cursor: cursor.close() if conn: conn.close() for row in rows: pkg_name = row[0] ver_code = int(row[1]) ver_name = row[2] apk_id = row[3] info = { 'pkg_name': pkg_name, 'ver_code': ver_code, 'ver_name': ver_name } appmarket_apk_info[apk_id] = info return appmarket_apk_info
def send_email(from_='*****@*****.**', to='', subject='', message='', server='', port=25, username='', password='', use_ssl=False, starttls=False): logger.debug('Emailing %s' % to) if not server: return msg = MIMEText(message) msg['Subject'] = subject msg['From'] = from_ msg['To'] = to if (use_ssl and not starttls): s = smtplib.SMTP_SSL(host=server, port=port) else: s = smtplib.SMTP(server, port) if (starttls): s.ehlo() s.starttls() s.ehlo() if (not username == '' and not password == ''): s.login(username, password) try: s.sendmail(from_, [to], msg.as_string()) logger.info('Email sent to %s' % (to)) except Exception as e: logger.error('Email sending produced exception %r' % e) s.quit()
def get_id_from_url(channel, url): result = "0" if not url: return result if channel == "百度": urls = url.split("item?") if len(urls) == 2: params = urls[1].split("&") for param in params: if "docid=" in param: result = param.replace("docid=", "") break elif channel == "360": urls = url.split("soft_id/") if len(urls) == 2: result = urls[1] elif channel == "91": # 91 获取包名 urls = url.split("Android/") if len(urls) == 2: urls = urls[1].split("-") if len(urls) > 0: result = urls[0] elif channel == "小米": urls = url.split("detail/") if len(urls) == 2: result = urls[1] if channel != "安卓市场" and channel != "应用宝" and channel != "豌豆荚" and channel != "拇指玩" and result == "0": logger.debug("url4details get id error. URL: %s", url) return result
def get_ms_fp_parameters(handler_input) -> str: """Returns message about the current free play parameters.""" fp_param_dict = FreePlayAttr.get_fp_parameters(handler_input) num_questions = fp_param_dict['num_questions'] if num_questions: ms_ask_q = free_play.data.MS_ASK_NUM_Q.format(num_questions) else: ms_ask_q = free_play.data.MS_ASK_Q times_tables = fp_param_dict['times_tables'] if not times_tables: times_tables = [" You have not set any times tables"] else: times_tables = [str(table) for table in times_tables] ms_tables = ''.join( [free_play.data.MS_FROM_TABLES, ', and '.join(times_tables), '.'] ) lower_b, upper_b = fp_param_dict['lower_bound'], fp_param_dict['upper_bound'] if lower_b is not None and upper_b is not None: ms_bound = free_play.data.MS_LOWER_UPPER_BOUND.format(lower_b, upper_b) elif lower_b is not None: ms_bound = free_play.data.MS_LOWER_BOUND.format(lower_b) elif upper_b is not None: ms_bound = free_play.data.MS_UPPER_BOUND.format(upper_b) else: ms_bound = '' speech_list = (ms_ask_q, ms_tables, ms_bound) logger.debug(speech_list) return ' '.join(speech_list)
def url_domain_method(self): self.keywordslug = translit(self.keyword.replace(' ', '-'), 'ru', reversed=True) self.rootslug = [translit(x.replace(' ', '-'), 'ru', reversed=True) for x in self.root] clean_url = unquote(self.url) p = urlparse(self.url) if p.scheme == 'https': self.url_https = 1 for r in self.root: self.url_root[r] = clean_url.count(r) for rs in self.rootslug: self.url_root_slug[rs] = clean_url.count(rs) self.url_length = len(self.url) self.url_level = len(p.path.split('/')) - 1 if p.query: self.url_params_count = len(p.query.split('&')) self.domain = p.netloc for r in self.root: self.domain_root = self.domain.count(r) self.domain_level = len(self.domain.split('.')) if self.domain.split('.')[-1] == 'ua': self.domain_country = 1 if self.domain.split('.')[0] == 'www': self.domain_is_www = 1 if self.url.endswith(self.domain + '/') or self.url.endswith(self.domain): self.url_ismain = 1 logger.debug('Analyzer url_domain_method done')
def read_proxy(url, referer): proxy = random.choice(utils.read_proxy_file()) print proxy protocol, pro = proxy.split('=') proxy_support = urllib2.ProxyHandler({protocol.lower(): '%s//%s' % (protocol.lower(), pro)}) http_handler = urllib2.HTTPHandler(debuglevel=1) https_handler = urllib2.HTTPSHandler(debuglevel=1) handlers = [proxy_support, http_handler, https_handler] # handlers = [proxy_support] headers = { 'User-Agent': choose(), 'Referer': referer, 'Content-Type': 'text/html', 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8', 'Accept-encoding': 'gzip' } req = urllib2.Request( url=url, headers=headers ) content = "" try: opener = urllib2.build_opener(*handlers) response = opener.open(req, timeout=10) content = response.read() response.close() except Exception, e: logger.debug("search(%s): %s .error.", url, str(e))
def update_label_and_pkg_img(info): conn = connections('RemoteCursor') cursor = conn.cursor() print info try: label_sql = """ UPDATE iplay_game_label_info SET screen_shot_urls = %(screen_shot_urls)s , icon_url = %(icon_url)s where game_id = %(game_id)s; """ pkg_sql = """ UPDATE iplay_game_pkg_info SET screen_shot_urls = %(screen_shot_urls)s , icon_url = %(icon_url)s where game_id = %(game_id)s; """ cursor.execute(label_sql, info) cursor.execute(pkg_sql, info) conn.commit() except Exception as e: conn.rollback() logger.debug("update_label_and_pkg_img: %s" % str(e.args)) finally: if cursor: cursor.close() if conn: conn.close()
def get_uc_apk_info(): uc_apk_info = {} # {pkg_name: {game_name:,ver_code:,ver_name}} conn = connections("UCApkInfoCursor") cursor = conn.cursor() sql = """ SELECT pkg_name, label, ver_code, ver_name FROM uc_apk_info WHERE data_source=4 """ rows = None try: cursor.execute(sql) rows = cursor.fetchall() except Exception as e: logger.debug("get_uc_apk_info error: %s" % str(e.args)) finally: if cursor: cursor.close() if conn: conn.close() for row in rows: pkg_name = row[0] game_name = row[1] ver_code = int(row[2]) ver_name = row[3] info = { "game_name": game_name, "ver_code": ver_code, "ver_name": ver_name } uc_apk_info[pkg_name] = info return uc_apk_info
def generate_plugin_outputs_from_dict(self, results, outputs=None): """ Take the result of self.generate_plugin_vectors and generate & save the appropriate PluginOutput objects. Immediately returns a deferred that will return when all PluginOutput objects have been saved. Optional paramater outputs is a list. This method will append the created PluginOutput objects to the list, if supplied. """ if outputs is None: outputs = [] dfs = [] for key in results: vector = results[key] file_key, plugin_key = key logger.debug("Created plugin output of length %d", len(vector)) po = PluginOutput(vector, plugin_key, file_key) df = self.model.save(po) outputs.append(po) dfs.append(df) list_df = defer.DeferredList(dfs) return list_df
def load_emulators(filesystem, override = None): path = settings_file_path('emulators.txt', filesystem, override) logger.debug("Loading emulators from path: %s" % path) return BackedObjectManager( ConfigFileBackingStore(path), EmulatorBackedObjectAdapter(filesystem) )
def load_consoles(emulators, filesystem, override = None): path = settings_file_path('consoles.txt', filesystem, override) logger.debug("Loading consoles from path: %s" % path) return BackedObjectManager( ConfigFileBackingStore(path), ConsoleBackedObjectAdapter(emulators) )
def load_emulators(filesystem, override=None): path = settings_file_path('emulators.txt', filesystem, override) logger.debug("Loading emulators from path: %s" % path) return BackedObjectManager( ConfigFileBackingStore(path), EmulatorBackedObjectAdapter(filesystem) )
def load_consoles(emulators, filesystem, override=None): path = settings_file_path('consoles.txt', filesystem, override) logger.debug("Loading consoles from path: %s" % path) return BackedObjectManager( ConfigFileBackingStore(path), ConsoleBackedObjectAdapter(emulators) )
def get_vectors_eventually(self, audio_file): def got_vectors(result): global go_through_scheduled logger.debug(" NB: Model generation returned %r for %r", type(result), audio_file) # Create new PluginOutput objects and store them on the network if result is None: # if the model couldn't do its special generation, immediately, # add the file to the generation queue. logger.debug(" NB: Adding %r to the queue", audio_file) df_p = defer.Deferred() generating_queue.append((df_p, audio_file)) if go_through_scheduled * network_size < len(generating_queue): generating_df.addCallback(self.go_through_queue) go_through_scheduled += 1 else: logger.debug(" NB: Got the plugin outputs for %r!", audio_file) df_p = self.generate_plugin_outputs_from_dict(result) return df_p logger.debug("NB: Trying model generation for %r", audio_file) df = self.model.special_generate_plugin_vectors(audio_file) df.addCallback(got_vectors) return df
def update_file_vectors(self, audio_file): """ Create new PluginOutputs and generate a new vector for the provided audio file. Save the updated file object. Immediately returns a deferred that will return the file object. """ outer_df = defer.Deferred() def get_file_vector(val): # Take all the new PluginOutput objects and generate and # apply a single vector to represent the file. df = self.mine.calculate_file_vector(audio_file) return df def save_file(vector): logger.debug("--> Applying vector to %r %r", audio_file, vector) logger.debug("--> Saving %r", audio_file) audio_file.vector = vector df_s = self.model.save(audio_file) return df_s logger.debug("NB: Updating File Vectors for %r", audio_file) df = self.get_vectors_eventually(audio_file) df.addCallback(get_file_vector) df.addCallback(save_file) return df
def read_proxy(url, referer): proxy = random.choice(utils.read_proxy_file()) print proxy protocol, pro = proxy.split('=') proxy_support = urllib2.ProxyHandler( {protocol.lower(): '%s//%s' % (protocol.lower(), pro)}) http_handler = urllib2.HTTPHandler(debuglevel=1) https_handler = urllib2.HTTPSHandler(debuglevel=1) handlers = [proxy_support, http_handler, https_handler] # handlers = [proxy_support] headers = { 'User-Agent': choose(), 'Referer': referer, 'Content-Type': 'text/html', 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8', 'Accept-encoding': 'gzip' } req = urllib2.Request(url=url, headers=headers) content = "" try: opener = urllib2.build_opener(*handlers) response = opener.open(req, timeout=10) content = response.read() response.close() except Exception, e: logger.debug("search(%s): %s .error.", url, str(e))
def func(self): # headers = {"User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.142 Safari/537.36"} while True: url, savepath = self.taskqueue.get() block = 1 << 14 # 16k try: req = request.Request(url, headers=USER_AGENT) with request.urlopen(req, timeout=60) as response: with open(savepath, "wb") as f: for data in iter(partial(response.read, block), b""): f.write(data) except socket.timeout: logger.warning("下载超时:{}".format(url)) os.remove(savepath) self.count += 1 self.taskqueue.put((url, savepath)) continue except Exception as e: logger.error("出错:{}".format(e)) logger.error("出错url:{}".format(url)) self.count += 1 self.taskqueue.put((url, savepath)) finally: # response.close() self.taskqueue.task_done() self.count -= 1 logger.info("下载完成:{}".format(savepath)) logger.debug("当前队列任务数:{}".format(self.count))
def select_google_pkg_info(): conn = connections('DictRemoteCursor') cursor = conn.cursor() rows = tuple() try: # sql = """ # SELECT game_id, icon_url, screen_shot_urls # FROM iplay_game_pkg_info # # WHERE market_channel = "google" # WHERE market_channel in ("GG官方", "三星", "高通版", "英伟达", "PowerVR", "Mali", "全球版", "GG官方原包", "三星原包", "高通版原包", "英伟达原包", "PowerVR原包", "Mali原包") # AND enabled = 1 # AND icon_url like '%ggpht.com%' # """ sql = """ SELECT game_id, icon_url, screen_shot_urls FROM iplay_game_pkg_info # WHERE market_channel = "google" WHERE enabled = 1 AND icon_url like '%ggpht.com%' """ cursor.execute(sql) rows = cursor.fetchall() except Exception as e: logger.debug("select_google_pkg_info: %s" % str(e.args)) finally: if cursor: cursor.close() if conn: conn.close() return rows
def handle(self, handler_input): logger.info("HAN FP_WrongAnswerHandler") speech_list = [] player_obj = PlayerDict.load_player_obj(handler_input) UserStats.update_player_stats(handler_input, correct=False, player_obj=player_obj) WrongAnswer.record_wrong_question(handler_input) ms_incorrect = IncorrectAnsUtils.get_buzz_and_incorrect() ms_retry_question = AllQuestionIntros.get_retry_question(handler_input) reprompt = GenQuestions.get_same_question(handler_input) speech_list += Pauser.make_ms_pause_level_list(ms_incorrect, 1, ms_retry_question) SessionStats.update_consecutive_correct(handler_input, correct=False) ModeStats.update_mode_stats(handler_input, correct=False) logger.debug(speech_list) speech = ' '.join(speech_list) card_title, card_text = CardFuncs.get_card_info(handler_input, speech) return (handler_input.response_builder.speak(speech).ask( reprompt).set_card(SimpleCard(card_title, card_text)).response)
def udpate_label_and_pkg_info(game_id): conn = connections('RemoteCursor') cursor = conn.cursor() try: label_sql = """ UPDATE iplay_game_label_info SET enabled = 0 where game_id = %s; """ pkg_sql = """ UPDATE iplay_game_pkg_info SET enabled = 0 where game_id = %s; """ cursor.executemany(label_sql, game_id) cursor.executemany(pkg_sql, game_id) conn.commit() except Exception as e: conn.rollback() logger.debug("udpate_label_and_pkg_info: %s" % str(e.args)) finally: if cursor: cursor.close() if conn: conn.close()
def get_ms_corresponding_help(handler_input) -> str: """Returns corresponding help message depending on mode. TODO: - Add other help pointers & corresponding functions. - Add corresponding prompts, etc for other functions. """ attr = handler_input.attributes_manager.session_attributes help_pointer = attr.get('help_pointer', 'overview') logger.debug(f"help_pointer {help_pointer}") HELP_MS_POINTER_DICT = { 'overview': HelpUtils.get_ms_help_overview, 'user_profile': HelpUtils.get_ms_user_profile, 'act_descript': HelpUtils.get_ms_act_descript, 'free_play': HelpUtils.get_ms_free_play, 'fp_input': HelpUtils.get_ms_fp_table_input, 'custom_practice': HelpUtils.get_ms_custom_practice, 'survival_mode': HelpUtils.get_ms_survival_mode, 'speed_challenge': HelpUtils.get_ms_speed_challenge, } try: ms_help_func = HELP_MS_POINTER_DICT[help_pointer] except KeyError: logger.warning( f"get_ms_corresponding_help: KeyError {help_pointer}") ms_help_func = HelpUtils.get_ms_act_descript return ms_help_func()
def handle(self, handler_input): logger.info("HAN FP_CorrectAnswerHandler") speech_list = [] player_obj = PlayerDict.load_player_obj(handler_input) UserStats.update_player_stats(handler_input, correct=True, player_obj=player_obj) ms_congrats = CongratUtils.get_answer_congrats(handler_input, player_obj=player_obj) ms_question = FPQuestions.get_question(handler_input) reprompt = FPQuestions.get_rephrased_question(handler_input) logger.debug(ms_congrats) if ms_congrats != "": speech_list += Pauser.make_ms_pause_level_list( ms_congrats, 1, ms_question) else: speech_list.append(ms_question) SessionStats.update_consecutive_correct(handler_input, correct=True) ModeStats.update_mode_stats(handler_input, correct=True) logger.debug(speech_list) speech = ' '.join(speech_list) card_title, card_text = CardFuncs.get_card_info(handler_input, speech) return (handler_input.response_builder.speak(speech).ask( reprompt).set_card(SimpleCard(card_title, card_text)).response)
def from_contigs(cls, contigs: list) -> "ReadGraph": """Creates initital graph with a list of contig obejcts. Args: contigs: List of contigs. Returns: A ReadGraph object. """ graph = nx.Graph() logger.debug("Initial graph calculation.") for (first_contig, second_contig) in itertools.combinations(contigs, 2): # Normalize the weight: # (overlap/len(C1))(overlap/len(C2)) / 2 overlap = len( first_contig.readset.intersection(second_contig.readset)) mapped_reads_contig_A = len(first_contig.readset) mapped_reads_contig_B = len(second_contig.readset) try: weight = ((overlap / mapped_reads_contig_A) + (overlap / mapped_reads_contig_B)) / 2 except ZeroDivisionError: weight = 0 if weight > 0: graph.add_edge(first_contig.name, second_contig.name, weight=weight) else: graph.add_nodes_from([first_contig.name, second_contig.name]) return cls(incoming_graph_data=graph)
def get_top_z_score_err_tables(handler_input) -> list: """Returns list of the top 3 z-score error tables.""" attr = handler_input.attributes_manager.session_attributes tbl_list_mean_errs = attr['tbl_list_mean_errs'] tbl_means = [ num[1] for num in tbl_list_mean_errs] tables_to_practice = [] tbl_mean_err = mean(tbl_means) tbl_mean_stdev = stdev(tbl_means, xbar = tbl_mean_err) for i in range(3): table, table_err = tbl_list_mean_errs[i] tbl_z_score = calc_z_score( data_point = table_err, data_mean= tbl_mean_err, data_stdev= tbl_mean_stdev, ) if tbl_z_score > custom_practice.data.HIGH_Z_SCORE_ERR: tables_to_practice.append( table ) tables_to_practice.sort() logger.debug( tbl_list_mean_errs) logger.debug( tables_to_practice) return tables_to_practice
def microsoft_account_login(self): # client_id为minecraft在azure的服务名,response_type为返回结果类型,scope为验证服务的类型,redirect_uri为返回的重定向链接。 URL = "https://login.live.com/oauth20_authorize.srf" param = { "client_id": self.CLIENT_ID, "response_type": "code", "grant_type": "authorization_code", "redirect_uri": "https://login.live.com/oauth20_desktop.srf", "scope": "service::user.auth.xboxlive.com::MBI_SSL" } data = parse.urlencode(param) webbrowser.open_new(URL + "?" + data) code = "" while code == "": #login_url = getpass.getpass("成功登录后的浏览器URL: ") print("如果之前登录过,浏览器可能已经获取到了URL类似:") print( "https://login.live.com/oauth20_desktop.srf?code=M.R3_BAY.xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxx&lc=xxxx" ) login_url = input("将成功登录后的浏览器URL复制粘贴到这: ") re_code = find_code.match(login_url) if re_code: code = re_code.group(1) logger.debug("Microsoft_auth ↓") logger.debug(pformat(code)) break else: print("不是成功登录后的浏览器URL,请重新输入。") return code
def update_graph(self, contigs: list) -> None: """Updates the graph with new given contig objs. And trims the graph again. Args: contigs: List of contigs. """ logger.debug("Updating graph.") logger.debug(f"{self.original_contigs}") for (existing_contig, new_contig) in itertools.product(self.original_contigs, contigs): overlap = len( existing_contig.readset.intersection(new_contig.readset)) # Normalize the weight: # (overlap/len(C1))(overlap/len(C2)) / 2 mapped_reads_contig_A = len(existing_contig.readset) mapped_reads_contig_B = len(new_contig.readset) try: weight = ((overlap / mapped_reads_contig_A) + (overlap / mapped_reads_contig_B)) / 2 except ZeroDivisionError: weight = 0 if weight > 0: self.add_edge(existing_contig.name, new_contig.name, weight=weight) else: self.add_node(new_contig.name)
def get_data_size(url, referer): ua = choose() headers = { 'User-Agent': ua, 'Referer': referer, 'Content-Type': 'text/html', 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8', 'Accept-Encoding': 'gzip,deflate,sdch' } req = urllib2.Request( url=referer, headers=headers ) try: opener = urllib2.build_opener(RedirectHandler) response = opener.open(req, timeout=10) # print "search..." # print "%s ===> %s" % (url, response) # print dir(response) # print response.headers except Exception, e: response = None logger.debug("search(%s): %s .error.", url, str(e))
def upload(path, name): """ 上传图片的路径和图片名 :param path: 图片的路径 :param name: 图片名 """ local_file = os.path.join(path, name) print '\n' print name,path try: key, suffix = name.split(".") # 如果图片名存在,则不继续上传 image_ids = read_file("uploaded_image_ids.txt") if key in image_ids: return else: write_file(key, "uploaded_image_ids.txt") extra = qiniu.io.PutExtra() extra.mime_type = "image/%s" % suffix ret, err = qiniu.io.put_file(uptoken, key, local_file, extra) print ret print err if err: logger.debug("err: %s", err) except Exception as e: logger.debug("upload Error: %s", str(e))
def check_more_than_one_incorrect_question(handler_input) -> bool: """Returns boolean if > 1 incorrect question to practice. NOTE: Question removed when correct, so if only 1 question, it is last question. """ attr = handler_input.attributes_manager.session_attributes wrong_quest_by_date = attr['wrong_quest_by_date'] logger.debug(wrong_quest_by_date) if len(wrong_quest_by_date.keys()) > 1: return True date_key = list(wrong_quest_by_date.keys())[0] date_dict = wrong_quest_by_date[date_key] if len(date_dict.keys()) > 1: return True table1_key = list(date_dict.keys())[0] table1_dict = date_dict[table1_key] if len(table1_dict.keys()) > 1: return True table2_key = list(table1_dict.keys())[0] table2_val = table1_dict[table2_key] if CP_Utils.reduce_num_incorrect(table2_val) == 1: return False return True
def handle(self, handler_input): logger.info("HAN SM_CorrectQuestionHandler") speech_list = [] player_obj = PlayerDict.load_player_obj(handler_input) UserStats.update_player_stats(handler_input, correct = True, player_obj= player_obj) SM_Attr.increment_sm_upper(handler_input) ms_congrats = CongratUtils.get_answer_congrats( handler_input, player_obj= player_obj, survival_mode=True) logger.debug(ms_congrats) ms_question = SMQuestions.get_question( handler_input, first_question= False, player_obj= player_obj) reprompt = GenQuestions.get_same_question(handler_input) if len(ms_congrats): sm_pause = Pauser.get_sm_pause_length(handler_input) speech_list += Pauser.make_ms_pause_level_list(ms_congrats, sm_pause) speech_list.append( ms_question) SessionStats.update_consecutive_correct(handler_input, True) ModeStats.update_mode_stats(handler_input, True) speech = ' '.join(speech_list) card_title, card_text = CardFuncs.get_card_info(handler_input, speech) return ( handler_input.response_builder .speak(speech) .ask(reprompt) .set_card( SimpleCard( card_title, card_text)) .response)
def handleConnected(self): # Log logger.debug('{} Connected'.format(self.address)) # Append connection global WebSocketConnections WebSocketConnections.append(self) # Data queue self.msg_queue = []
def build_index(self) -> None: """Builds the salmon index.""" logger.debug("Build salmon index.") # TODO: Implement check to avoid duplicate runs indexing = Cmd( f"salmon index -p {self.threads} -t {self.input_file} -i {self.index_name} --keepDuplicates" ) indexing.run()
def backup_directory(config): backup_dir = config.backup_directory if backup_dir is None: return None if backup_dir == "": backup_dir = default_backups_directory() logger.debug("Specified empty string as backup directory. Defaulting to %s" % backup_dir) return backup_dir
def nugong_callback(): for i in range(args.start, args.end + 1): logger.debug(f"使用快捷栏:{i}") time.sleep(0.05) kbm.key(str(i)) time.sleep(0.05) kbm.mouseclick("right") time.sleep(args.interval) kbm.key("1")
def enforce_allowed_answered_tables(answered_tables: list) -> list: """Enforces the allowed length of answered_tables.""" table_overflow = len(answered_tables) - players.data.ALLOWED_ANSWERED_TABLES if table_overflow <= 0: return answered_tables log_all(table_overflow, answered_tables) del answered_tables[0 : table_overflow] logger.debug(answered_tables) return answered_tables
def main(channel, html, url4details): info = dict() print 'start main' try: info = execute(0, channel, html) print '23' except Exception as e: logger.debug("analyze error. channel(%s),ERROR: %s" % (channel, str(e.args))) raise e return info
def got_tags(tags): dfs = [] for tag in tags: logger.debug("-> Fetching vector for %r", tag) cb = partial(got_vector, tag) t_df = self.mine.calculate_tag_vector(tag) t_df.addCallback(got_vector, tag) list_df = defer.DeferredList(dfs) return list_df
def got_all_files(val): # find the Set of files such that every file belongs to every tag files = None for tag_name in self.file_lists: if files is None: logger.debug(self.file_lists[tag_name]) files = Set(self.file_lists[tag_name]) else: files = files.intersection(self.file_lists[tag_name]) return list(files)
def got_tags(tags): logger.debug("guess_tags") scoped_tags[0] = tags # take care of fetching the tag and audio file objects... if audio_file is None: f_df = self.model.get_audio_files(user_name=user_name) else: f_df = defer.Deferred() f_df.callback([audio_file]) return f_df
def insert_pkg_info(conn, infos): # conn = connections('Cursor') cursor = conn.cursor() try: sql = """ INSERT IGNORE INTO iplay_game_pkg_info( apk_id , game_id , market_channel , game_name , pkg_name , ver_code , ver_name , file_size , download_url , game_desc , game_types , downloaded_cnts , game_language , screen_shot_urls , icon_url , save_timestamp , is_crack_apk , min_sdk ) VALUES ( %(apk_id)s , %(game_id)s , %(market_channel)s , %(game_name)s , %(pkg_name)s , %(ver_code)s , %(ver_name)s , %(file_size)s , %(download_urls)s , %(game_desc)s , %(game_types)s , %(downloaded_cnts)s , %(game_language)s , %(screen_shot_urls)s , %(icon_url)s , %(now)s , %(is_crack_apk)s , %(min_sdk_version)s ) """ cursor.executemany(sql, infos) conn.commit() except Exception as e: conn.rollback() logger.debug("insert_pkg_info name: %s" % str(e.args)) finally: if cursor: cursor.close()
def got_files(files): logger.debug("guess_tags") tags = scoped_tags[0] dfs = [] for file in files: for tag in tags: if self.mine.does_tag_match(file, tag): logger.debug("-> GENERATED: %r %r", file, tag) df = self.model.guess_tag_for_file(file, tag) dfs.append(df) list_df = defer.DeferredList(dfs) return list_df
def image_provider(config): providerByName = { "local": LocalProvider, "consolegrid": ConsoleGridProvider, } normalize = lambda s: s.strip().lower() names = map(normalize, config.provider_spec.split(",")) instances = map(lambda name: providerByName[name](), names) logger.debug("Creating with component providers: %s" % str(instances)) if len(instances) == 0: logger.error("No image providers specified. Ice will run, but will not \ find grid images for your ROMs. If this wasnt intentional, \ see config.txt.") return CombinedProvider(*instances)
def blocking_generate_vectors(self, plugins, file_name, file_key): """ Blocking function. Generates a dict of vectors for each file_key/plugin pair. """ outer_df = defer.Deferred() results = {} for plugin in plugins: vec = plugin.create_vector(file_name) key = (file_key, plugin.get_key()) logger.debug("Got plugin vector of length %d", len(vec)) results[key] = vec return results
def google_apk(apk_info, channel, detail_url): try: #if 1==1: # 1.入库google游戏 google.main(apk_info, channel, detail_url) time.sleep(5) except Exception as e: logger.debug("upload_data入库google游戏 ERROR: %s", str(e.args)) return 301 try: # if 1==1: # 2.下载图片 step03_02_01_download_images.main() time.sleep(5) except Exception as e: logger.debug("upload_data下载图片 ERROR: %s", str(e.args)) return 302 try: # 3.上传图片 step03_02_02_upload_images.main() time.sleep(5) except Exception as e: logger.debug("upload_data上传图片 ERROR: %s", str(e.args)) return 303 try: # 4.更新图片 step03_02_03_update_image_url.execute() time.sleep(5) except Exception as e: logger.debug("upload_data更新图片 ERROR: %s", str(e.args)) return 304 return 399
def roms_for_console(self, config, console): """ @param console - A console object @returns A list of ROM objects representing all of the valid ROMs for a given console. Valid ROMs are defined as ROMs for which the function `path_is_rom` returns True when given the console. Returns an empty list if `console` is not enabled """ roms_directory = consoles.console_roms_directory(config, console) logger.debug("[%s] Using `%s` as ROMs directory" % (console.shortname, roms_directory)) return self._search(roms_directory, console)
def request_url(url, path): if not url or not path: return if not ("http" in url): return image_name = gen_image_name(url) file_list = os.listdir(path) #print file_list # 文件存在,不存入 for f in file_list: if image_name in f: return #print image_name # 下载文件 headers = { 'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:33.0) Gecko/20100101 Firefox/33.0', 'Referer': 'https://play.google.com/store/apps/category/GAME', 'Content-Type': 'application/x-www-form-urlencoded;charset=utf-8', 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8', 'Accept-Encoding': 'gzip', # 以gzip下载html,降低网络资源负荷 'Accept-Language': 'zh-cn,en-us;q=0.7,en;q=0.3', 'Content-Length': '86', 'Cookie': 'PREF=ID=54d3e46c668f74ce:U=0806b1988518afdd:TM=1411874099:LM=1416486632:S=qv9drTd2LIVCahAX; NID=67=J6fg2e3bnpbDdaN_xcMZXrH7E-VvzYuXKf4jLz-oLWCJ-O_xEzG9JQ2nng236XgOwpm3DCq0JL6m_RIveJM4ASXqy8-7xkuEI-CK4Gz_Imbr3Wt9I8iUvuaf3GwH7pCD; _ga=GA1.3.86026158.1413180514; _gat=1; PLAY_PREFS=CgJVUxD2z9aCnSkosra56Zwp:S:ANO1ljJYk5JjleSs', 'Host': 'play.google.com', } r = requests.get(url, stream=True, proxies=proxyDict, verify=False) #r = requests.post(url, headers=headers, proxies=proxyDict, timeout=10) #print r #import sys #sys.exit() # r = requests.get(url, stream=True) content_type = r.headers.get('content-type') name = os.path.join(path, "%s.webp" % image_name) if content_type: suffix = content_type.split("/")[1] name = os.path.join(path, "%s.%s" % (image_name, suffix)) # 如果不是图片格式记录 if not "image" in content_type: logger.debug("file: %s is not webp,is %s", name, content_type) if r.status_code == 200: with open(name, 'wb') as f: for chunk in r.iter_content(1024): f.write(chunk)
def get_data(page_url, referer): info = '' try: page = browser.read(page_url, referer) # 91,360...被封时 if "很抱歉..." in page: info = 'ERROR:已被91封禁,等段时间在试' elif '<?xml version="1.0" encoding="UTF-8"?>' in page: info = 'ERROR:获取页面失败,请查看url是否正确' elif page and page.strip(): info = page except Exception as e: # raise e logger.debug("get_data ERROR: %s", str(e.args)) return info
def get_game_name(): conn = connections('Cursor') cursor = conn.cursor() sql = "SELECT distinct name FROM crawler_info;" rows = None try: cursor.execute(sql) rows = cursor.fetchall() except Exception as e: logger.debug("get_game_name: %s" % str(e.args)) finally: if cursor: cursor.close() if conn: conn.close() return rows