def start(self): self.running = True logger.info("TCP socket server [%s:%s] start." % (self.host, self.port)) while self.running: selectors = self.selector.poll() for fd, event in selectors: if fd == self.server_socket.fileno(): sock, addr = self.server_socket.accept() # new client sock.setblocking(False) self._set_socket_opt(sock) self.clients.update({sock.fileno(): sock}) self.clients_ext.update({sock.fileno(): addr}) self.selector.register(sock.fileno(), select.EPOLLIN | select.EPOLLPRI | select.EPOLLET) logger.info("new socket client: [%s]" % str(addr)) elif event & select.EPOLLIN: try: recv_data = self.recv(self.clients[fd]) if self.handler and recv_data: self.handler.handle(self, self.clients[fd], recv_data) except Exception, e: logger.error("recv data and handle data with exception: %s" % e) if fd in self.clients_ext.keys(): logger.info("close socket [%s]" % str(self.clients_ext[fd])) #self.send(sock, "close you!", 10) self.close_socket(self.clients[fd]) elif event & select.EPOLLPRI: logger.info("EPOLLPRI") elif event & select.EPOLLERR or event & select.EPOLLHUP: logger.info("socket [%s] is error, closed it." % str(self.clients_ext[fd])) if fd in self.clients_ext.keys(): self.close_socket(self.clients[fd])
def dowload_pdf_and_convert(): projects = db.read_data() for project in projects: if project.get("downloaded"): pass else: browser.get(project["link"]) time.sleep(5) href = browser.find_elements_by_css_selector("tr#tile30 td.vs1 a") if len(href) != 1: log.error("web format changed, cannot find downloadlink") break else: href = href[0] pdf_file = requests.get(href.get_attribute("href"), stream=True) with open("..\\data\\{}.pdf".format(project.get("title")), "wb") as f: for chunk in pdf_file.iter_content(chunk_size=128): if chunk: f.write(chunk) f.flush() project["downloaded"] = time.strftime("%d_%m_%Y") log.info("{} has been downloaded.".format(project.get("title"))) try: with pdfplumber.open("..\\data\\{}.pdf".format(project.get("title"))) as pdf: txt_filename = project.get("title") + ".txt" with codecs.open("..\\data\\{}".format(txt_filename), "w", encoding="utf-8") as new_f: for page in pdf.pages: new_f.write(page.extract_text()) log.info("{} had been converted.".format(project.get("title"))) except: log.error("{} had convert failed.".format(project.get("title"))) db.write_data(projects)
def make_order(order_num): order_num = str(order_num) ts = int(time.time()) # step1 r = requests.post("https://m.xiaomiyoupin.com/app/stat/visitv2", data=[{"e":{"et":"shop","ref":"$Detail$?v=8&id=https%3A%2F%2Fm.xiaomiyoupin.com%2Fdetail%3Fgid%3D{order_num}&event=TOUCH&time={ts}&area=buy_confirm&iid=g%3D{order_num}%26pid%3D87152".format(order_num=order_num, ts=ts), "t":ts,"spm":"YouPinM.$Detail$_{order_num}.buy_confirm.0.50164756".format(order_num=order_num)}}], headers={ "Content-Type":"application/x-www-form-urlencoded", "DToken":"", "Origin":"https://m.xiaomiyoupin.com", "Referer":"https://m.xiaomiyoupin.com/detail?gid=115147", "User-Agent":"Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.119 Mobile Safari/537.36", "X-User-Agent":"channel/youpin platform/youpin.m", "X-Yp-App-Source":"front-RNWeb-old" }) log.info("step1 result: "+str(r.status_code)) # step2 r = requests.post("https://m.xiaomiyoupin.com/api/auth/login/isloggedin", headers={ # Reffer 中有加密字段, 暂时放弃 "Content-Type":"application/x-www-form-urlencoded", "DToken":"", "Origin":"https://m.xiaomiyoupin.com", "User-Agent":"Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.119 Mobile Safari/537.36", "X-User-Agent":"channel/youpin platform/youpin.m", "X-Yp-App-Source":"front-RNWeb-old" }) log.info("step2 result: "+str(r.text)) # step3 r = requests.post("https://m.xiaomiyoupin.com/app/stat/visitv2", )
def get(self): page = int(self.get_argument("page", 1)) q = self.get_argument('q', '') if not q : self.redirect('/images_info') return sql = 'SELECT * FROM tb_images WHERE name LIKE upper(?) OR tags LIKE upper(?) OR info LIKE upper(?) OR author LIKE upper(?) OR types LIKE upper(?) ;' images_count = self.db_select(sql, ['%' + q + '%', '%' + q + '%', '%' + q + '%', '%' + q + '%', '%' + q + '%']) sql = 'SELECT * FROM tb_images WHERE name LIKE upper(?) OR tags LIKE upper(?) OR info LIKE upper(?) OR author LIKE upper(?) OR types LIKE upper(?) LIMIT ?,?;' images_result = self.db_select(sql, ['%' + q + '%', '%' + q + '%', '%' + q + '%', '%' + q + '%', '%' + q + '%', (page - 1) * page_size, page_size]) sql = 'SELECT images_id FROM tb_status WHERE containers_user = ? AND containers_status = "runing";' statrt_result = self.db_select(sql, [self.current_user.decode()]) result = [] for x in images_result: if {'images_id': x['images_id']} not in statrt_result: x['json_images_port'] = json.loads(x['images_port']) result.append(x) result_count = [] for x in images_count: if {'images_id': x['images_id']} not in statrt_result: result_count.append(x) logger.info('获取用户%s搜索的可以使用的镜像!' % self.current_user.decode()) self.render('images.html', cursor = result, count = len(result_count))
def get(self): images_id = self.get_argument("images_id", '').strip() sql = 'SELECT * FROM tb_images WHERE images_id = ? ;' result = self.db_select(sql, [images_id]) if not result : logger.error('用户%s提交的镜像id错误' % self.current_user.decode()) self.render('docker.html', status = '', port = '', error = True) return port, containers_id, tf = self.__start(result[0]['images_id'], result[0]['images_port']) if not containers_id: logger.error('用户%s开启容器时错误,错误镜像id是%s' % (self.current_user.decode(), result[0]['images_id'])) self.render('docker.html', status = '', port = '', error = True) return if not tf : r = self.__insert_sql(containers_id, result[0]['images_id'], json.dumps(port)) if r : logger.info('用户%s开启容器成功,容器id是%s,数据写入数据库成功!' % (self.current_user.decode(), containers_id)) else: logger.error('用户%s开启容器成功,容器id是%s,数据写入数据库失败!' % (self.current_user.decode(), containers_id)) con = Dockers_Info.Get_Containers_Message(containers_id) Dockers_Stop.Stop_Containers(con) self.render('docker.html', status = '', port = '', error = True) return self.render('docker.html', status = 'start', port = self.__get_http(port), error = False)
def __re_port(self, strings): result = [] logger.info('处理传入的环境端口信息!') string = re.split(r',|;', strings.lower()) if not string: logger.error('传入的环境端口信息有误!错误端口信息为:%s' % strings) return '' res = r'(\d{1,5})/(tcp|udp)' print(string) for _ in string: if _ == '': continue r = re.search(res, _) if not r : logger.error('传入的环境端口信息有误!错误端口信息为:%s' % _) return '' port = r.group(1) protocol = r.group(2) result.append({'port': port, 'protocol': protocol}) logger.info('成功处理传入的环境端口信息,获取到端口信息为:%s' % json.dumps(result)) return json.dumps(result)
def check_all_shops(): with open(os.path.join(os.path.dirname(__file__),"config","shop.json"), "r", encoding='UTF-8') as f: infos = json.loads(f.read()) for info in infos: for shop in info["shop"]: log.info("checking {} / {}".format(shop, info.get("keyword"))) keywords = info.get("key_word").split(",") check_shop(shop, keywords)
def getZhuanXianPages(self, pages=pages): city_list = self.getZhuanXianCityList() logger.info(city_list) for city in random.shuffle(city_list): for page in range(1, pages): for url in self.getUrlsForCity(city, page): for page in range(1, pages): for id in self.getIdsForAUrl(url, page): self.getAndSaveZhuanXianFromId(id)
def __get_port(self): logger.info('获取系统已经开启的端口,并返回一个随机端口') cmd = "netstat -ntl | grep -v Active | grep -v Proto | awk '{print $4}' | awk -F: '{print $NF}'" ports = os.popen(cmd).read().split('\n') port = random.randint(1024, 65535) if str(port) not in ports: return port else: self.__get_port()
def stop(self): self.running = False logger.info("TCP socket server [%s:%s] stop." % (self.host, self.port)) for fd, client in self.clients.items(): self.selector.unregister(fd) self.clients_ext.pop(fd) client.close() self.server_socket.close() self.selector.close()
def stop(self): self.running = False logger.info("TCP socket server [%s:%s] stop." % (self.host, self.port)) for fd,client in self.clients.items(): self.selector.unregister(fd) self.clients_ext.pop(fd) client.close() self.server_socket.close() self.selector.close()
def __send_messages(cls, que): asyncio.set_event_loop(asyncio.new_event_loop()) while 1: status = json.dumps(que.get()) for waiters in cls.waiters: try: waiters.write_message(status) logger.info('向客户端发送系统信息成功!') except Exception as e: continue que.queue.clear()
def get(self): sql = 'SELECT containers_id FROM tb_status WHERE containers_user = ? and containers_status = ?;' containers_list = self.db_select(sql, [self.current_user.decode(), 'runing']) if containers_list: logger.error('用户%s没有关闭所有的容器,无法退出登陆!' % self.current_user.decode()) self.render('logout.html') return logger.info('用户%s退出登录!' % self.current_user.decode()) self.clear_cookie("cookie_user") self.redirect("/")
def __re_hub(self, hub): result = hub.strip().split(':') if len(result) > 1 : images_name = result[0] images_tag = result[1] else: images_name = hub.strip() images_tag = 'latest' if images_name : logger.info('成功获取到镜像名称为:%s:%s' % (images_name, images_tag)) return images_name, images_tag
def get(self): sql = 'SELECT * FROM tb_status WHERE containers_user = ? AND containers_status = "runing";' status_result = self.db_select(sql, [self.current_user.decode()]) images_result = self.db_select('SELECT * FROM tb_images;') for _ in status_result: sql = 'SELECT * FROM tb_images WHERE images_id = ?' _['images_info'] = self.db_select(sql, [_['images_id']])[0] _['json_containers_port'] = json.loads(_['containers_port']) logger.info('获取用户%s已开启的镜像名字和端口' % self.current_user.decode()) self.render('status.html', sysinfo = self.status, cursor = status_result, start_counts = len(status_result), images_counts = len(images_result))
async def post_json(url, params, semaphore_num=500): logger.info(f"async_post_json url is {url} params are {params}") semaphore = asyncio.Semaphore(semaphore_num) async with semaphore: async with ClientSession() as session: try: async with session.post(url, json=params) as resp: try: return await resp.json() except ContentTypeError as e: logger.exception(f'Result is not json formatted, {e}') return # await resp.text() except asyncio.TimeoutError as e: logger.exception(f'current request timeout. params is {params}')
def getAndSaveZXFromId(self, id): url = "http://56gate.com/html/zx/zxdetail_%s.html" % id try: GateZhuanXian.objects.get(webSiteId=id) logger.info("Zhuanxian %s already exsists" % id) return except: pass text = self.httpClient.geturlcon(url) result = {} try: result = self.phaseLine(text, id) except Exception, e: logger.error(e)
def getAndSaveHYFromId(self, id): url = "http://www.tk56.com/goodsdetail.aspx?id=%s&t=1" % id try: TK56HuoYuan.objects.get(webSiteId=id) logger.info("%s already exsists" % id) return except: pass text = self.httpClient.geturlcon(url) result = {} try: result = self.pharseHYMeta(text, id) except Exception, e: logger.info(traceback.format_exc())
def getAndSaveZhuanXianFromId(self, id): url = "http://www.chinawutong.com/201/%s.html" % id result = {} try: WTZhuanXian.objects.get(webSiteId=id) logger.info("%s already exsists" % id) return except: pass text = self.httpClient.geturlcon(url) try: result = self.pharseZhuanXianMeta(text, id) except Exception, e: logger.error(traceback.format_exc())
def post(self): logger.info('提交数据进行修改密码') old_password = self.get_argument("old_password", '') new_password = self.get_argument("new_password", '') if not old_password.strip() or not new_password.strip(): logger.info('密码不能为空') self.render('change_pass.html', error = 1) return md = hashlib.md5() md.update(old_password.encode('utf-8')) sql = 'SELECT password FROM tb_userinfo WHERE username = ? LIMIT 1;' result = self.db_select(sql, [self.current_user.decode()]) if md.hexdigest() == result[0]['password']: md = hashlib.md5() md.update(new_password.encode('utf-8')) sql = 'UPDATE tb_userinfo SET password = ? WHERE username = ?' result = self.db_update_insert(sql, [md.hexdigest(), self.current_user.decode()]) if not result: logger.warning('数据库更新失败,密码更改失败') self.render('change_pass.html', error = 2) return else : logger.info('原来密码错误') self.render('change_pass.html', error = 3) return logger.info('密码修改成功!') self.render('change_pass.html', error = 0)
def read_data(): data = [] try: with open(os.path.join(os.path.abspath(os.path.dirname(__file__)),"data.json"), "r") as f: try: data = json.load(f) except json.decoder.JSONDecodeError: log.info("data file is empty or cannot decode, reseted.") pass except FileNotFoundError: log.info("data file not exist, created one.") with open(os.path.join(os.path.abspath(os.path.dirname(__file__)),"data.json"), "w") as f: pass return data
def get(self): containers_id = self.get_argument("containers_id", '').strip() if not containers_id: logger.error('用户%s提交的容器id错误' % self.current_user.decode()) self.render('docker.html', status = '', port = '', error = True) return logger.info('用户%s提交的容器id是%s。' % (self.current_user.decode(),containers_id)) con = Dockers_Info.Get_Containers_Message(containers_id) if not con: logger.error('用户%s提交的容器id错误' % self.current_user.decode()) self.render('docker.html', status = '', port = '', error = True) return logger.info('获取到用户%s提交的容器信息。' % self.current_user.decode()) if not Dockers_Stop.Stop_Containers(con): logger.error('获取用户%s提交的容器信息,但是在关闭的时候出错。' % self.current_user.decode()) self.render('docker.html', status = '', port = '', error = True) return logger.info('成功关闭用户%s提交的容器。' % self.current_user.decode()) sql = "UPDATE tb_status SET containers_status = 'closed' WHERE containers_id = ? ;" result = self.db_update_insert(sql, [containers_id]) if not result: logger.error('成功关闭用户%s提交的容器,但是在写入数据库时出错。' % self.current_user.decode()) self.render('docker.html', status = '', port = '', error = True) return logger.info('成功关闭用户%s提交的容器,并成功写入数据库。' % self.current_user.decode()) self.render('docker.html', status='close', port='', error = False)
def __init__(self): ''' 初始化tornado ''' # tornado的路由信息 handlers = [ (r"/", HomeHandler), (r"/login", LoginHandler), (r"/logout", LogoutHandler), (r"/stop_all_containers", StopAllContainers), (r"/search_images", SearchImagesHandler), (r"/images_info", ImagesHandler), (r"/status_info", StatusHandler), (r"/websocket", SocketHandler), (r"/setting", SettingHandler), (r"/add_images", AddImagesHandler), (r"/start_containers", StartContainersHandler), (r"/stop_containers", StopContainersHandler), (r"/change_pass", Change_Pass_Handler), (r"/add_user", Add_User_Handler), (r".*", ErrorHandler), ] # 初始化tornado的设置 settings = dict( template_path = os.path.join(os.path.dirname(__file__), "templates"), static_path = os.path.join(os.path.dirname(__file__), "static"), xsrf_cookies = False, cookie_secret = "__TODO:_TORNADO_MY_OWN_RANDOM_VALUE_HERE__", login_url = "/login", debug = True, ) logger.info('获取tronado基础配置') tornado.web.Application.__init__(self, handlers, **settings) logger.info('初始化tornado对象,初始化路由') self.db = sqlite3.connect(options.sqlite_path, check_same_thread = False) self.db.row_factory = self.__dict_factory logger.info('链接数据库') threading.Thread(target = Start_Get_Sysinfo, args = (que,)).start() logger.info('开启后台监控进程') self.status = que.get() logger.info('获取系统信息!')
def getAndSaveCYFromId(self, id): url = "http://56gate.com/html/cy/cydetail_%s.html" % id try: GateCYSource.objects.get(webSiteId=id) logger.info("Zhuanxian %s already exsists" % id) return except: pass text = self.httpClient.geturlcon(url) result = {} try: result = self.parseCYDetailsMeta(text, id) except Exception, e: logger.error(e)
def send(self, sock, data, size): if size != len(data): logger.error("data size [%d] not equal real size [%d]", size, len(data)) return -1 logger.info("send-DATA: %s" % data) msg = DataMsg(data) buf = msg.pack() try: ret = sock.send(buf) return ret - SOCKET_HEADER_SIZE except socket.error as err: logger.error("socket exception: %s" % err) if err.errno == errno.ECONNRESET or err.errno == errno.EBADF: logger.info("client [%s:%s] is closed." % self.clients_ext[sock.fileno()]) self.close_socket(sock)
def recode_name(name): if not have_chinese_(name): try: new_name = name.encode('cp437').decode('utf-8') except Exception as error: logger.info(f'当前编码方式不正确,错误信息为:{error}') try: new_name = name.encode('cp437').decode('gbk') except Exception as error: logger.info(f'当前编码方式不正确,错误信息为:{error}') new_name = name else: new_name = name return new_name
def getAndSaveCYFromId(self, id): url = "http://wl.kywmall.com/wl_pages/wl_find_truck_details.aspx?id=%s" % id try: KYVehicle.objects.get(webSiteId=id) logger.info("HuoYuan %s already exsists" % id) return except: pass text = self.httpClient.geturlcon(url) result = {} try: result = self.parseCheyuanMeta(text, id) except Exception, e: logger.error(e)
def getAndSaveFromId(self, id): url = "http://land.jctrans.com/CarsInfo/roaddetails_%s.html" % id try: JctransRoadDetails.objects.get(webSiteId=id) logger.info("%s already exsists" % id) return except: pass result = {} text = self.httpClient.geturlcon(url) try: result = self.parseRoadDetailsMeta(text, id) except Exception, e: logger.error(e)
def getAndSaveZXFromId(self, id): url = "http://wl.kywmall.com/wl_pages/wl_find_line_details.aspx?id=%s" % id try: KYZhuanXian.objects.get(webSiteId=id) logger.info("Zhuanxian %s already exsists" % id) return except: pass text = self.httpClient.geturlcon(url) result = {} try: result = self.parseZXMeta(text, id) except Exception, e: logger.error(e)
def getAndSaveHYFromId(self, id): url = "http://56gate.com/html/hy/sgdetail_%s.html" % id try: GateHuoYuanSource.objects.get(webSiteId=id) logger.info("HuoYuan %s already exsists" % id) return except: pass text = self.httpClient.geturlcon(url) result = {} try: result = self.parseHYDetailsMeta(text, id) except Exception, e: traceback.print_exc() logger.error(e)
def geturlcon(self, url, data=None, headers={}, timeout=20, retries=5): logger.info(url) tries = 0 while tries < retries: try: request = urllib2.Request(url, data=self.getPostData(data), headers=headers) url = urllib2.urlopen(request, timeout=timeout) time.sleep(1) #page = url.read().decode('utf8', 'ignore') page = url.read() logger.warn(page) return page except Exception, e: print traceback.format_exc() tries += 1 time.sleep(5 * random.random() + 5)
def send_mail(subject, content, receiver): message = MIMEText(content, 'plain', 'utf-8') message['From'] = Header(mail_config["user"], 'utf-8') message['To'] = Header(config.email, 'utf-8') message['Subject'] = Header(subject, 'utf-8') try: smtpObj = smtplib.SMTP() smtpObj.connect(mail_config["host"], 25) # 25 为 SMTP 端口号 smtpObj.login(mail_config["user"], mail_config["passwd"]) smtpObj.sendmail(mail_config["user"], receiver, message.as_string()) log.info("mail sent success.") except smtplib.SMTPException as e: log.error("mail sent failed") log.error(str(e)) log.error(traceback.format_exc())
def __re_risk(self, string): if not string.strip(): return '' string = string.strip().lower() logger.info('处理传入的环境难度等级信息!') lists = ['simple', 'medium', 'advanced'] if string not in lists: logger.error('传入的环境难度等级信息有误!') return '' logger.info('成功处理传入的环境难度等级信息,获取到难度等级为:%s' % string) return string
def get(self): logger.info('用户%s退出登陆并尝试关闭所有的容器!' % self.current_user.decode()) sql = 'SELECT containers_id FROM tb_status WHERE containers_user = ? and containers_status = ?;' containers_list = self.db_select(sql, [self.current_user.decode(), 'runing']) for x in containers_list: con = Dockers_Info.Get_Containers_Message(x['containers_id']) if not con: continue Dockers_Stop.Stop_Containers(con) sql = 'UPDATE tb_status SET containers_status = ? WHERE containers_user = ? AND containers_status = ?;' self.db_update_insert(sql, ['closed', self.current_user.decode(), 'runing']) logger.info('关闭用户%s所有的容器,并更新数据库状态!' % self.current_user.decode()) self.redirect("/logout")
def run(self): board = Board() is_piece_picked = False piece_src_position = None while True: for event in pygame.event.get(): if event.type == MOUSEBUTTONUP: button_up_pos = pygame.mouse.get_pos() if not is_piece_picked: which_piece_is_picked = self.getPieceByPosition( board, button_up_pos) if which_piece_is_picked != 0 and board.isTurnRight( self.toBoardPos(button_up_pos)): is_piece_picked = True piece_src_position = button_up_pos logger.info("pick an chess %s, src pos is %s", str(which_piece_is_picked), str(piece_src_position)) else: src = self.toBoardPos(piece_src_position) dst = self.toBoardPos(button_up_pos) another_pick = self.getPieceByPosition( board, button_up_pos) # pick another self chess if another_pick != 0 and board.isSameSide(src, dst): which_piece_is_picked = another_pick piece_src_position = button_up_pos logger.info("pick another chess %s, src pos is %s", str(which_piece_is_picked), str(piece_src_position)) break else: isValid, isFinished = board.move( which_piece_is_picked, src, dst) if isValid: is_piece_picked = False piece_src_position = None else: logger.info('invalid move src %s, dst %s', str(src), str(dst)) if isFinished: logger.info('finished.') if event.type == QUIT: exit() self.__screen.blit(self.__background, (0, 0)) for c in board.getPieces(): self.putPiece(c[0], c[1]) pygame.display.update()
def unpack_zip(zipfilename='', path_from_local=''): index, ext = arg_first( ZIP_FORMAT, lambda n: n.lower() == zipfilename[-len(n):].lower()) # check if the file format is need unpack if index is None: return filepath = os.path.join(path_from_local, zipfilename) extract_path = filepath[:-len(ext)] + '/' if not os.path.exists(extract_path): os.makedirs(extract_path) # else: # shutil.rmtree(extract_path, True) # os.makedirs(extract_path) patoolib.extract_archive(filepath, verbosity=0, outdir=extract_path) file_name, full_name = os.path.split(filepath) filename, ext = os.path.splitext(full_name) file_dir = os.path.join(root, file_name, filename) # 先修正文件夹名称乱码 fix_folder_name(file_dir) # 修正文件名乱码 fix_file_name(file_dir) # 解压后的文件列表 name_list = [] for name in get_all_files(extract_path): name_list.append(recode_name(name)) for name in name_list: try: new_zip_filepath = os.path.join(extract_path, name) unpack_zip(zipfilename=new_zip_filepath) except NotImplementedError as e: logger.info(e) except OSError as e: logger.info(e) return extract_path
def get(self): page = int(self.get_argument("page", 1)) sql = 'SELECT * FROM tb_images LIMIT ?,?;' images_result = self.db_select(sql, [(page - 1) * page_size, page_size]) images_count = self.db_select('SELECT id FROM tb_images') sql = 'SELECT images_id FROM tb_status WHERE containers_user = ? AND containers_status = "runing";' statrt_result = self.db_select(sql, [self.current_user.decode()]) result = [] for x in images_result: if {'images_id': x['images_id']} not in statrt_result: x['json_images_port'] = json.loads(x['images_port']) result.append(x) logger.info('获取用户%s可以使用的镜像!' % self.current_user.decode()) self.render('images.html', cursor = result, count = len(images_count) - len(statrt_result) if (len(images_count) - len(statrt_result)) > 0 else 0)
def start(self): self.running = True logger.info("TCP socket server [%s:%s] start." % (self.host, self.port)) while self.running: selectors = self.selector.poll() for fd, event in selectors: if fd == self.server_socket.fileno(): sock, addr = self.server_socket.accept() # new client sock.setblocking(False) self._set_socket_opt(sock) self.clients.update({sock.fileno(): sock}) self.clients_ext.update({sock.fileno(): addr}) self.selector.register( sock.fileno(), select.EPOLLIN | select.EPOLLPRI | select.EPOLLET) logger.info("new socket client: [%s]" % str(addr)) elif event & select.EPOLLIN: try: recv_data = self.recv(self.clients[fd]) if self.handler and recv_data: self.handler.handle(self, self.clients[fd], recv_data) except Exception, e: logger.error( "recv data and handle data with exception: %s" % e) if fd in self.clients_ext.keys(): logger.info("close socket [%s]" % str(self.clients_ext[fd])) #self.send(sock, "close you!", 10) self.close_socket(self.clients[fd]) elif event & select.EPOLLPRI: logger.info("EPOLLPRI") elif event & select.EPOLLERR or event & select.EPOLLHUP: logger.info("socket [%s] is error, closed it." % str(self.clients_ext[fd])) if fd in self.clients_ext.keys(): self.close_socket(self.clients[fd])
def __thread_addimages(self, data, images_name, images_tag, db_update_insert): logger.info('开启一个线程去创建images镜像,镜像地址为:%s:%s' % (images_name, images_tag)) images = Dockers_Start.Add_Images(images_name, images_tag) if not images: return False images_name = images.tags[0] images_id = images.id data.append(images_name) data.append(images_id) sql = 'DELETE FROM tb_images WHERE images_name = ? ;' if db_update_insert(sql, [images_name,]): logger.info('成功创建一个images镜像并删除数据库中旧的数据!') else: logger.error('成功创建一个images镜像,但是在删除数据库中旧的数据时出错。') sql = 'INSERT INTO tb_images (name, info, isupload, types, tags, difficulty, author, images_port, flag, images_start_mode, images_name, images_id) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);' if db_update_insert(sql, data): logger.info('成功把images镜像信息写入数据库,镜像名字为:%s' % images_name) else: logger.error('把images镜像信息写入数据库时出错,镜像名字为:%s' % images_name)
def _get_token_(url, file_path): _, ext = os.path.splitext(file_path) tmp_file_name = get_tmp_file_path(str(uuid.uuid1()) + ext.lower()) shutil.copy(file_path, tmp_file_name) assert filecmp.cmp(file_path, tmp_file_name) is True token = [] with open(tmp_file_name, 'rb') as f: logger.info( f"call get tokens from document service started. url is {url} ") response = requests.post(url, files={'file': f}) logger.info( f"call get tokens from document service ended. result status code is {response.status_code}") if response.status_code == 200: result = json.loads(response.text) token = result['text_tokens'] if isinstance(result, str): logger.exception(f"{tmp_file_name}文件可能损坏,请将订单文件另存为正确的格式后,重新上传。") else: logger.warning(response.status_code) logger.info(f"get tokens from windows server error. {response.content}") # os.remove(tmp_file_name) # os.remove(tmp_file_name) return token
def get_all_info(): ''' 动态ajax, 用浏览器获取数据比较方便 :return: ''' current_projects = db.read_data() projects = [] browser.get("http://kcb.sse.com.cn/renewal/") work_flag = True while work_flag: time.sleep(5) # 等待页面加载 hrefs = browser.find_elements_by_css_selector("#dataList1_container>tbody>tr:not(:first-child) td:nth-child(2)>a") for href in hrefs: title = href.text.replace("<br>","").replace("\n","") if len(current_projects) and title == current_projects[0]["title"]: work_flag = False break else: projects.append({ "title": title, "link": href.get_attribute("href") }) if work_flag: try: browser.find_element_by_css_selector("a.paging_next").click() except selenium.common.exceptions.NoSuchElementException: work_flag = False log.info("all data loaded!") break log.info("reading projects...") log.info(projects) current_projects = projects + current_projects # 保证数据库中第一个数据是最新的. db.write_data(current_projects)
def compute(self, node, level): if level == 0: return board = node.getBoard() movements = board.nextPossibleMovements() if len(movements) == 0: logger.info("board.nextPossibleMovements return empty list.") return for mov in movements: row_src, col_src, row_dst, col_dst = mov board_copy = board.clone() board_copy.move(board_copy.get(row_src, col_src), (row_src, col_src), (row_dst, col_dst)) child = TreeNode() child.setBoard(board_copy) node.addChild(child) next_level = level - 1 self.compute(child, next_level)
logger.info("%s already exsists" % id) return except: pass text = self.httpClient.geturlcon(url) result = {} try: result = self.pharseHYMeta(text, id) except Exception, e: logger.info(traceback.format_exc()) if (result.get("startPlace")): source = TK56HuoYuan() try: source.save_from_a_source(result) except Exception, e: logger.info(traceback.format_exc()) else: logger.info("saved") else: logger.error(url + "is a null page") def getPage(self, type, page): url = "http://www.tk56.com/search%s.aspx?&page=%s" % (type, page) text = self.httpClient.geturlcon(url) soup = BeautifulSoup(text) soup.prettify() hrefs = soup.findAll(name="a", href=re.compile(type + 'detail')) id_list = [] for href in hrefs: str_url = str(href).split('"')
return except: pass text = self.httpClient.geturlcon(url) try: result = self.pharseHYMeta(text, id) except Exception, e: logger.info(traceback.format_exc()) if (result.get("startPlace")) and (result.get("destPlace")): huoyuan = WTHuoYuan() try: huoyuan.save_from_dic(result) except Exception, e: logger.error(traceback.format_exc()) else: logger.info("saved %s" % (id)) else: logger.info(url + " is a null page") def getAndSaveZhuanXianFromId(self, id): url = "http://www.chinawutong.com/201/%s.html" % id result = {} try: WTZhuanXian.objects.get(webSiteId=id) logger.info("%s already exsists" % id) return except: pass text = self.httpClient.geturlcon(url) try:
pass result = {} text = self.httpClient.geturlcon(url) try: result = self.parseRoadDetailsMeta(text, id) except Exception, e: logger.error(e) if ( result.get("startPlace")) and (result.get("phone")): jctranRoadDetails = JctransRoadDetails() try: jctranRoadDetails.save_from_dic(result) except Exception, e: logger.error(e) else: logger.info(url + " is a null page") def getPage(self, page): url = "http://land.jctrans.com/carsinfo/newlist-0-------1---------40-%s.html" % page text = self.httpClient.geturlcon(url) soup = BeautifulSoup(text) soup.prettify() hrefs = soup.findAll(name="a", href=re.compile('roaddetails')) id_list = [] for href in hrefs: str_url = str(href).split('"') id_list.append(str_url[1].split("_")[1].replace(".html", "")) return id_list def getRoadDetailPages(self, pages=pages):
return except: pass text = self.httpClient.geturlcon(url) result = {} try: result = self.parseHYDetailsMeta(text, id) except Exception, e: traceback.print_exc() logger.error(e) if True: source = GateHuoYuanSource() try: for k, v in result.iteritems(): logger.info("gateee " + str(k) + " " + str(v)) source.save_from_a_source(result) except Exception, e: traceback.print_exc() logger.error(e) else: logger.info("%s saved" % (id)) else: logger.error(url + " is a null page") def getAndSaveCYFromId(self, id): url = "http://56gate.com/html/cy/cydetail_%s.html" % id try: GateCYSource.objects.get(webSiteId=id) logger.info("Zhuanxian %s already exsists" % id) return
def recv(self, sock, timeout=5): end_time = time.time() + timeout head = sock.recv(SOCKET_HEADER_SIZE) if head is None or len(head) == 0: logger.info("client [%s] is closed." % str(self.clients_ext[sock.fileno()])) self.close_socket(sock) return None header_data = struct.unpack("=%dsi" % len(SOCKET_HEADER_NAME), head[:SOCKET_HEADER_SIZE]) logger.info("read-HEAD: %s" % str(header_data)) header = HeaderMsg(header_data[0], header_data[1]) total_size = data_size = header.size - SOCKET_HEADER_SIZE data = "" count = 0 while end_time > time.time(): try: buf = sock.recv(data_size) if buf is None or len(buf) == 0: logger.info("client [%s] is closed." % str(self.clients_ext[sock.fileno()])) self.close_socket(sock) return None data = data + buf if len(buf) < data_size: data_size = data_size - len(buf) time.sleep(0.1) continue if total_size == len(data): logger.info("read-DATA len(%d)" % (len(data))) return data if count > 20: logger.error("read socket timeout.") return None except socket.error as err: logger.error("socket exception: %s" % str(err)) if err.errno == errno.ECONNRESET or err.errno == errno.EBADF: logger.info("client [%s] is closed." % str(self.clients_ext[sock.fileno()])) self.close_socket(sock) return None if err.errno == errno.EAGAIN: logger.info("data is coming") count += 1 time.sleep(0.2) continue
text = self.httpClient.geturlcon(url) result = {} try: result = self.parseHYDetailsMeta(text, id) except Exception, e: print traceback.format_exc() logger.info(e) if True: source = GateHuoYuanSource() try: source.save_from_a_source(result) except Exception, e: logger.error(e) else: logger.info("%s saved" % (id)) else: logger.error(url + " is a null page") def getAndSaveCYFromId(self, id): url = "http://56gate.com/html/cy/cydetail_%s.html" % id try: GateCYSource.objects.get(webSiteId=id) logger.info("Zhuanxian %s already exsists" % id) return except: pass text = self.httpClient.geturlcon(url) result = {} try:
from third_party.reflection.reflection import * from log.logger import logger from serialize.serializer import Serializer from config.arguments_parser import argparser from serialize.abstract_serializer import AbstractSerializer class StatusLogStarter(Starter): def __init__(self, generator=None, test_framework=None): Starter.__init__(self) self.generator = generator self.test_framework = test_framework if test_framework is None: self.test_framework = tests.tests() def _test(self): pass # for test in self.test_framework: # assert isinstance(test, AbstractTest) # Helper.run_test(test, self.generator.connection, self.generator.container, # [sys.stdout, open('result/res.html', 'w')], True) starter = StatusLogStarter() starter.start() s = Serializer().serializer(argparser['serializer']) for d in DLogGenerator().generate(20): logger.assert_true(isinstance(s, AbstractSerializer)) logger.info('Serialized some stuff: {}'.format(s.serialize(d))) print s.serialize(d)