def archive_file(filepath=Path.SCREENSHOT_PATH, pattern=r"[\w\]\[]*\.png") -> None: # TODO: 递归实现 if not os.path.exists(filepath): os.makedirs(filepath) else: dirs_list = os.listdir(filepath) for dir_name in dirs_list: path = os.path.join(filepath, dir_name) if not os.path.isdir(path): continue dirs = ';'.join(os.listdir(path)) mv_dirs = re.findall(pattern, dirs) if mv_dirs: history = os.path.join(filepath, "history", dir_name) if not os.path.exists(history): os.makedirs(history) times = 1 while True: existing = os.path.join(history, str(times)) if not os.path.exists(existing): os.makedirs(existing) break times += 1 for i in mv_dirs: shutil.move(os.path.join(path, i), existing) else: try: os.removedirs(path) except OSError as e: logger.warning( f"Delete directory path:{path} error! Track:{e}")
def find_one(self, sql, expected=None, times=None) -> tuple: """ Query all results :param times: loop times :param sql: Execute database expression -> str :param expected: :return: results -> tuple """ res = None if not times: times = 20 try: logger.info("Model: fetchone, SQL: 【{}】".format(sql)) for i in range(times): row = self.cur.execute(sql) logger.debug("row: {}".format(row)) if not row: time.sleep(6) self.con.commit() continue res = self.cur.fetchone() logger.info("result: {}".format(res)) if not expected or res[0] == expected: return res time.sleep(6) self.con.commit() return res except pymysql.err.InterfaceError as e: self.con.ping(reconnect=True) logger.warning(f"Database connection failed: {e}") return self.find_one(sql, expected, times) except (pymysql.err.Error, TypeError) as e: logger.error("Database error rolling back: {}".format(e)) self.con.rollback() raise e
def run(self): input_queue = self.input_queue result_queue = self.result_queue queue_endian = self.queue_endian while True: if input_queue is not None: __get_queue = input_queue.get() if __get_queue == queue_endian: logger.debug("ImageCheckWorker Done") result_queue.put(queue_endian) break else: __get_data = __get_queue # print(__get_data) goodscode = __get_data.strip() if goodscode is not None: __img_exist = exist_check(goodscode) __ret_data = [goodscode, __img_exist] result_queue.put(__ret_data) else: logger.warning( str(os.getpid()) + " / Warning with Queue " + str(__get_queue)) else: logger.critical("Queue is Empty") break
def run(self): input_queue = self.input_queue queue_endian = self.queue_endian overwrite = self.overwrite while True: if input_queue is not None: __get_queue = input_queue.get() if __get_queue == queue_endian: logger.debug("ImageDownloader Done") break else: __get_data = __get_queue # print(__get_data) save_path = (__get_data[0]).strip() goodscode = (__get_data[1]).strip() if save_path is not None and goodscode is not None: __flag = url_image_download(save_path, goodscode, overwrite) if __flag is False: logger.warning("Image Donwload Error " + goodscode) else: logger.warning(str(os.getpid()) + " / Warning with Queue " + str(__get_queue)) else: logger.critical("Queue is Empty") break
def get_glow(): """ :return: 领取结果的基本格式 """ # 需要先访问一次直播间才会获得道具 logger.info("------正在获取荧光棒------") go_room() glow_url = "/japi/prop/backpack/web/v1?rid=12306" glow_res = dyreq.request("get", glow_url) global Bags logger.info("------背包检查开始------") try: # 查询获取到的荧光棒 assert glow_res.status_code == 200 assert glow_res.json()['msg'] == "success" # 防止没有道具导致程序报错 if glow_res.json()['data']['list']: global Own Own = jsonpath(glow_res.json(), '$..list[?(@.id == 268)].count') logger.info("当前拥有荧光棒%s个,给你喜欢的主播进行赠送吧" % Own) Bags = 1 logger.info("------背包检查结束------") else: logger.warning("当前背包中没有任何道具") logger.info("------背包检查结束------") except AssertionError: if glow_res.json()['msg'] == '请登录': logger.error("请更新COOKIE") else: logger.error("领取荧光棒时发生错误") logger.info("------背包检查结束------") return glow_res
def _wait_with_log(self, update_on): "Waits given seconds with a log." seconds = get_realtime_setting(update_on, int) # Log every `WAIT_SECS` seconds wait_secs = get_realtime_setting("WAIT_SECS", int, 10) spin_count = seconds // wait_secs leap = seconds % wait_secs time.sleep(leap) total_waited_time = leap while spin_count > 0 and self.is_active: wait_secs = get_realtime_setting("WAIT_SECS", int, 10) remaining_time = spin_count * wait_secs logger.debug("Waiting... %d seconds remained.", remaining_time) time.sleep(wait_secs) total_waited_time += wait_secs wait_time_s = get_realtime_setting(update_on, int) if wait_time_s != seconds: logger.warning("Time has been updated!") if total_waited_time >= wait_time_s: break spin_count = (wait_time_s - total_waited_time) // wait_secs seconds = wait_time_s continue spin_count -= 1 if self.is_active: logger.debug("Time is up.")
def glow_donate(num=1, room_id=12306): """ :param num: 向该房间赠送荧光棒的数量 :param room_id: 房间号 """ donate_url = "/japi/prop/donate/mainsite/v1" DATA = "propId=268&propCount=%s&roomId=%s&bizExt={\"yzxq\":{}}" % (num, room_id) # 背包中含有道具才会进行赠送,否则会报错 if Bags: donate_res = dyreq.request(method="post", path=donate_url, data=DATA) global Own try: assert donate_res.status_code == 200 assert donate_res.json()['msg'] == "success" # 计算剩余荧光棒 now_left = int(Own) - int(num) Own = now_left logger.info("向房间号%s赠送荧光棒%s个成功,当前剩余%s个" % (room_id, num, now_left)) except AssertionError: if donate_res.json()['msg'] == "用户没有足够的道具": logger.warning("向房间号%s赠送荧光棒失败,当前背包中荧光棒数量为:%s,而设定捐赠数量为%s" % (room_id, Own, num)) else: logger.warning(donate_res.json()['msg'])
def __error_screenshot(driver, screenshot_path): """ 该方法用于抛异常时截取屏幕 :param driver: 初始化设备信息 self.driver """ driver.get_screenshot_as_file(screenshot_path) logger.warning('当前生成了一张错误截图!') return
def find_elements(self, *loc): try: WebDriverWait(self.driver, 30).until(lambda driver: driver.find_elements(*loc)) return self.driver.find_elements(*loc) except NoSuchElementException: logger.warning('Can not find element: %s' % loc[1]) self.get_screenshot() raise
def stop(self): "stops the application. Usage: stop" self.queue.state = False if self.slave: self.slave.join() if self.master: self.master.join() start_log_level_checker() logger.warning("Application stopped.")
def start_app(): "Starts the application" Thread(target=read_console_commands, name="console_reader", daemon=True).start() start_log_level_checker() try: listen_exceptions() except (KeyboardInterrupt, EOFError): Q.state = False logger.warning("Application terminated")
def is_login(): """ :return:返回登陆结果,用于主程序判断 """ global Is_login login = dyreq.request("get", login_url).json() if login['error'] == 0: Is_login = 1 logger.info("Cookie有效,登陆成功") else: logger.warning("登陆失败,请检查Cookie有效性") return Is_login
def put_goodscode(self, goodscode): if goodscode not in self.item_dict: # print("Wrong Goodscode, " + goodscode) return False item_del_dict = self.item_del_dict if goodscode in item_del_dict: # print("Remove from Delete list, Goodscode :" + goodscode) del item_del_dict[goodscode] else: logger.warning("Already Delete, Goodscode :" + goodscode) self.item_del_dict = item_del_dict return True
def log_level_checker(queue): "Listens LOG_LEVEL at realtime from database. On change, sets." while queue.state: log_level_old = logger.level log_level_new = get_realtime_setting("LOG_LEVEL", int) if log_level_old != log_level_new: logger.setLevel(log_level_new) logger.warning("Log level has been changed from %s to %s", logging.getLevelName(log_level_old), logging.getLevelName(log_level_new)) # Check evert X seconds time.sleep(5)
def load(self): logger.debug("Model Path: %s" % self.file_model) if os.path.exists(self.file_model): self.model.load_weights(self.file_model) self.status_model_weights = ModelWeightsStatus.SUCCESS logger.debug("Weights Loaded") else: self.status_model_weights = ModelWeightsStatus.MODEL_NOT_FOUND logger.warning("Weights files not found.") if os.path.exists(STATE): with open(STATE) as f: self.state = json.load(f) logger.debug("State Loaded")
def find_element(self, *loc): try: # 元素可见时,返回查找到的元素;以下入参为元组的元素,需要加* WebDriverWait(self.driver, 30).until( lambda driver: driver.find_element(*loc).is_displayed()) return self.driver.find_element(*loc) except NoSuchElementException: logger.warning('Can not find element: %s' % loc[1]) logger.error('Can not find element: %s' % loc[1]) raise except TimeoutException: logger.error('Can not find element: %s' % loc[1]) raise
def delete_goodscode(self, goodscode): if goodscode not in self.item_dict: # print("Wrong Goodscode, " + goodscode) return False item_del_dict = self.item_del_dict if goodscode in item_del_dict: logger.warning("Already Added, Goodscode :" + goodscode) else: # print("Added to Delete list, Goodscode :" + goodscode) item_del_dict[goodscode] = None self.item_del_dict = item_del_dict return True
def is_login(): """ :return:返回登陆结果,用于主程序判断 """ global Is_login login = dyreq.request("get", login_url).json()['msg']['info'] if login: Is_login = 1 user_name = login['nn'] logger.info("Cookie有效,登陆成功") logger.info("用户名称:{}".format(user_name)) else: logger.warning("登陆失败,请检查Cookie有效性") return Is_login
def __handle_popup(driver): """ 该方法用于处理系统弹窗,默认点击"同意" :param driver: 初始化设备信息 self.driver """ try: logger.warning('正在尝试关闭系统权限申请弹窗......') while True: # if "不允许" in driver.page_source or "Don't Allow" in driver.page_source: driver.switch_to.alert.accept() logger.debug('已关闭系统权限申请弹窗!') break except Exception as error: logger.error("handle permission popup exception: %s", error) return
def set_proxy(obj, proxy=settings.DEFAULT_PROXY): "Sets the proxy" try: urllib.request.urlopen("http://www.google.com", timeout=1) except urllib.error.URLError: logger.warning("Setting proxy") obj.setProxy(proxy) else: return proxies = {"http": f"http://{proxy}", "https": f"http://{proxy}"} proxy_support = urllib.request.ProxyHandler(proxies) opener = urllib.request.build_opener(proxy_support, urllib.request.HTTPBasicAuthHandler(), urllib.request.CacheFTPHandler) urllib.request.install_opener(opener)
async def post(self, *args, **kwargs): data = json.loads(self.request.body) content = data["msg_body"]["text"]["content"] user_id = data["user_id"] logger.warning(f"{content}") logger.warning(f"{user_id}") msg = {"user_id": user_id, "data": content} # do sth. # websocket服务 # ws = websocket.WebSocket() # ws.connect(f"ws://39.96.21.121:8646/chat/{user_id}") # ws.send(f"{msg}") # ws.close() # ws.recv() await ChatHandler.ai_reply(user_id, content) self.write("success")
def download_image(url, path, filename): "Downloads the image" try: urllib.request.urlretrieve(url, filename=filename) except urllib.error.URLError: # If download fails, skip others and clean os.chdir(settings.BASE_DIR) shutil.rmtree(path) return None text_in_image = convert_jpg_to_text( os.path.join(path, filename), 'TURKISH' ) if text_in_image.strip(): logger.warning("The text of the image is: %s", text_in_image) return text_in_image
def create_driver(request): # Todo 1.传递参数创建driver;2.增加创建driver的自定义参数 if request.param: browser = request.param else: browser = 'gc' if browser in ['google chrome', 'chrome', 'gc']: try: driver = webdriver.Chrome( chrome.ChromeDriverManager(log_level=logging.WARN).install()) except requests.exceptions.ConnectionError: logger.warning('自动化下载驱动失败,使用系统配置驱动') driver = webdriver.Chrome() elif browser in ['mobile', 'gcm']: mobileEmulation = {'deviceName': 'iPhone X'} options = webdriver.ChromeOptions() options.add_experimental_option('mobileEmulation', mobileEmulation) try: driver = webdriver.Chrome( chrome.ChromeDriverManager(log_level=logging.WARN).install(), chrome_options=options) except ConnectionError: driver = webdriver.Chrome(chrome_options=options) elif browser in ['firefox', 'ff']: driver = webdriver.Firefox( executable_path=firefox.GeckoDriverManager().install()) elif browser in ['ie', 'internet explorer']: caps = DesiredCapabilities.INTERNETEXPLORER caps['nativeEvents'] = False try: driver = webdriver.Ie(microsoft.IEDriverManager( os_type='Win32', log_level=logging.WARN).install(), capabilities=caps) except requests.exceptions.ConnectionError: logger.warning('自动化下载驱动失败,使用系统配置驱动') driver = webdriver.Ie(capabilities=caps) else: driver = webdriver.Edge() driver.maximize_window() driver.speed = 1 yield driver
def _create_db_for_first_use(self, skip_error=False): logger.debug("Creating tables") # Create the tables for table in self.tables: try: self.database.query(table).create().execute() except sqlite3.OperationalError: if not skip_error: logger.error( "Could not created table %s", table.__table_name__ ) raise logger.warning( "Could not created table %s", table.__table_name__ ) self.insert(Settings())
async def get(self, path, headers=None, cache_ttl=Config.CACHE_TTL, **args): headers = headers if headers else {} params = urlencode(args, True) cached_data = await self.check_cache(url=path, params=params, headers=headers) if cached_data: return ujson.loads(cached_data.decode("utf8")) url = "{path}?{params}".format( path=path, params=params) if params else "{path}".format(path=path) data = None for _ in range(self.number_of_retries): data, status_code = await self.make_http_request("GET", url=url, headers=headers) if status_code == 200: logger.debug(f"Data is Successfully retrieved") break logger.debug( f"Could not retrieve data. Response status code: {status_code}" ) await asyncio.sleep(self.retry_sleep_time) else: logger.warning( f"Could not retrieve data from {self.name}. Response status code: {status_code}" ) raise UnexpectedResponseException(data, status_code) if data: await self.cache_response(cache_ttl, url=path, params=params, headers=headers, data=ujson.dumps(data)) return data
def copy_open(self, srcfile, dstfile): # 判断要复制的文件是否存在 if not os.path.isfile(srcfile): logger.error(srcfile + " not exist!") return # 判断要新建的文档是否存在,存在则提示 if os.path.isfile(dstfile): logger.warning( dstfile + " file already exist!") # 记录要保存的文件 self.df = dstfile # 读取excel到缓存 # formatting_info带格式的复制 self.workbook = xlrd.open_workbook(filename=srcfile, formatting_info=True) # 拷贝 self.wb = copy(self.workbook) # 默认使用第一个sheet # sheet = wb.get_sheet('Sheet1') return
def download_images(self, username, media_urls): "Downloads the images from given url list" downloaded_media_count = 0 is_shared_before = False for item in media_urls: key = list(item)[0] path = os.path.join(settings.DOWNLOADS, f"downloaded_images_{key}") os.makedirs(path, exist_ok=True) with LockDir(path): os.chdir(path) for index, (url, media_type) in enumerate(item[key]): filename = f"{index}_{key}_{username}{MediaTypes.get_extension(media_type)}" text_in_image = self.download_image(url, path, filename) if text_in_image is None: # Failed to download image logger.error("Image download failed.") continue downloaded_media_count += 1 if is_any_photo_shared(path): logger.warning( "The image shared before, filtered on image" ) is_shared_before = True os.chdir(settings.BASE_DIR) if is_shared_before: for _ in range(10): try: shutil.rmtree(path) except PermissionError: time.sleep(0.5) else: break logger.info("%s media has been downloaded.", downloaded_media_count)
def get_realtime_setting(setting, convert=lambda x: x, default=None): "Returns the given setting's value at real-time" # pylint: disable=bare-except try: with DB() as database: query = database.database.query(Settings).select().execute() settings_db = None for res in query: settings_db = Settings.to_user_class(res) # Could be only one record break if settings_db is not None: value = getattr(settings_db, setting) if value: return convert(value) except: logger.warning("Could not get the %s from database", setting) try: return getattr(settings, setting) except AttributeError: return default
def enable_tornado_log(): """开启 Tornado 内置日志信息 * ``tornado.access``: Per-request logging for Tornado's HTTP servers (and potentially other servers in the future) * ``tornado.application``: Logging of errors from application code (i.e. uncaught exceptions from callbacks) * ``tornado.general``: General-purpose logging, including any errors or warnings from Tornado itself. """ try: access_log.addHandler(filehandler) access_log.setLevel(logger.level) app_log.addHandler(filehandler) app_log.setLevel(logger.level) gen_log.addHandler(filehandler) gen_log.setLevel(logger.level) except Exception: error_msg = traceback.format_exc() logger.warning(f'enable tornado log fail.\t{error_msg}') logger.error(f'enable tornado log fail.')
def url_image_download(save_path, goodscode, overwrite): item_id = str(goodscode) __file_name = item_id + ".jpg" __file_path = os.path.join(save_path, __file_name) save_file_path = os.path.normcase(__file_path) if overwrite is False: if filesystem.file_exist(save_file_path): return True http = urllib3.PoolManager() img_url_convention = "http://image.g9.co.kr/g/" + str(item_id) + "/o" req = http.request('GET', img_url_convention) item_image_status = req.status if item_image_status != 200: logger.warning("Image URL Connection Error") req.release_conn() return False else: img_data = req.data is_saved = filesystem.file_save(save_file_path, img_data) req.release_conn() return is_saved
#解析字符串 try: infos = content.strip().split('&') dept_id = infos[0] #机场三字码 dest_id = infos[1] #机场三字码 day, month, year = infos[2][6:], infos[2][4:6], infos[2][0:4] dept_date = year+'-'+month+'-'+day dept_date_url = year[-2:] + month + day #140627 except Exception, e: logger.error('ceairFlight: Wrong Content Format with %s'%content) result['error'] = TASK_ERROR return result if AIRPORT_CITY_DICT.has_key(dept_id) == False or AIRPORT_CITY_DICT.has_key(dest_id) == False: logger.warning('ceairFlight: airport not in AIRPORT_CITY_DICT') result['error'] = DATA_NONE return result p = get_proxy(source = 'ceairFlight') if p == None: result['error'] = PROXY_NONE return result postdata = getPostData(dept_id,dest_id,dept_date) if postdata == '': result['error'] = UNKNOWN_TYPE return result
try: infos = content.strip().split('&') dept_id = infos[0] #机场三字码 dest_id = infos[1] #机场三字码 dept_day = infos[2] return_day = infos[3] dept_date = dept_day[0:4] + '-' + dept_day[4:6] + '-' + dept_day[6:] return_date = return_day[0:4] + '-' + return_day[4:6] + '-' + return_day[6:] except Exception,e: logger.info('lcairRoundFlight: Wrong Content Format with %s'%content) return result if AIRPORT_CITY_DICT.has_key(dept_id) == False or AIRPORT_CITY_DICT.has_key(dest_id) == False: logger.warning('lcairRoundFlight: airport not in AIRPORT_CITY_DICT') result['error'] = DATA_NONE return result p = get_proxy(source = 'lcairRoundFlight') if p == None: result['error'] = PROXY_NONE return result postdata = getPostData(dept_id, dest_id, dept_date, return_date) if postdata == None: result['error'] = UNKNOWN_TYPE return result uc = UrllibCrawler(p = p)