def get_data(): #if request.method == "POST": rdict = {"aname": '', "img_hash": '', "title": ''} for item in rdict: rdict[item] = request.values.get(item, '') row_update_dict = {} row2_update_dict = {} if rdict["aname"] == u"不保留": row_update_dict['valid'] = 0 row_update_dict['label'] = 1 elif rdict["aname"] == u"上一张": session = DBSession() session.expire_on_commit = False session.query(PicLabel).filter( PicLabel.image_hash == rdict["img_hash"]).update( {PicLabel.current: 0}) uquerys = session.query(PicLabel).filter( PicLabel.title == rdict["title"]).filter( or_(PicLabel.label == 0, PicLabel.image_hash == rdict["img_hash"])).order_by( PicLabel.episode, PicLabel.frame_timestamp).all() for idx, uquery in enumerate(uquerys): if uquery.image_hash == rdict["img_hash"]: index = idx length = len(uquerys) uquery = uquerys[(length + index - 2) % length] session.query(PicLabel).filter( PicLabel.image_hash == uquery.image_hash).update( {PicLabel.current: 1}) session.commit() session.close() elif rdict["aname"] != u"跳过" and rdict["aname"] != u"上一张": row_update_dict['aname'] = rdict['aname'] row_update_dict['label'] = 1 # row2_update_dict['status'] = 1 session = DBSession() try: if row_update_dict: session.query(PicLabel).filter_by( image_hash=rdict["img_hash"]).update(row_update_dict) if row2_update_dict: session.query(CheckActor).filter( CheckActor.actor == rdict["aname"]).update(row2_update_dict) session.commit() except: session.rollback() finally: session.close() #url, img_hash, title, episode, frame_timestamp = get_url(rdict["title"]) #return jsonify(url=url, img_hash=img_hash, title=title, episode=episode, frame_timestamp=frame_timestamp) return jsonify({'error_code': 0})
def get_by_key(cls, key): session = DBSession() if py_ver == 2: the_key = (key or b'').encode('utf-8') else: the_key = str(key or b'', 'utf-8') return session.query(cls).filter(cls.key == the_key).first()
def get(self): cur_page = self.get_argument('cur_page', '1') page_size = 5 log.info('获取文章分类列表cur_page:' + cur_page) sql = """ select ( select count(id) from article_class ) as total, * from article_class order by id desc limit %d offset %d """ % (page_size, (int(cur_page) - 1) * page_size) session = DBSession() cursor = session.execute(sql) data = cursor.fetchall() table_data = { 'data': [], 'page_size': page_size, 'total': data[0]['total'] if data else 0 } for d in data: table_data['data'].append({ 'id': d.id, 'name': d.name, 'create_date': d.create_date.strftime('%Y-%m-%d %H:%M:%S'), 'write_date': d.write_date.strftime('%Y-%m-%d %H:%M:%S'), }) return self.finish(json.dumps(table_data))
def put(self): class_id = self.get_body_argument('class_id', None) name = self.get_body_argument('name', None) session = DBSession() article_class = session.query(ArticleClass).filter_by(id=class_id).first() if not article_class: return self.finish(json.dumps({'code': -1, 'msg': '该分类不存在'})) now = datetime.now().strftime('%Y-%m-%d %H:%M:%S') data = { 'name': name, 'write_date': now } log.info('修改文章分类:class_id ' + class_id) log.info('修改文章分类:name ' + article_class.name + ' => ' + name) try: session.query(ArticleClass).filter_by(id=class_id).update(data) session.commit() session.close() except Exception as e: log.error(e) return self.finish(json.dumps({'code': -1, 'msg': '修改失败'})) return self.finish(json.dumps({'code': 0, 'msg': '修改成功'}))
def get(self): article_id = self.get_argument('article_id', None) log.info('获取文章信息:article_id ' + article_id) session = DBSession() article = session.query(Article).filter_by(id=article_id).first() if not article: return self.finish(json.dumps({'code': -1, 'msg': '该文章不存在'})) result = { 'id': article.id, 'class_id': article.class_id, 'image_url': (domain_name + article.image_url) if article.image_url else '', 'title': article.title, 'note': article.note, 'content': article.content } return self.finish(json.dumps({'code': 0, 'data': result}))
def get_cur_actors(sid): """获取未标注过主演名单""" actors = set(get_actors(sid)) session = DBSession() session.expire_on_commit = False #querycs = session.query(CheckActor).filter(CheckActor.sid == sid).all() querycs = { obj.aname for obj in session.query(PicLabel.aname).filter( PicLabel.sid == sid).distinct() if obj.aname } session.commit() session.close() actors = actors - querycs ''' for qactor in querycs: if qactor.status == 1: try: actors.remove(qactor.actor) except: continue ''' return list(actors) + [u"不保留", u"上一张", u"跳过"]
def crawl_transaction_by_search(self, args): """ 依据商圈或小区 爬取一页历史成交房源 """ search_key, page = args url_page = self.base_url + f"chengjiao/pg{page}rs{search_key}/" content = self.request_fn(url_page) soup = BeautifulSoup(content, self.bs4_parser) logging.debug('@crawl_transaction_by_search: {0} - page - {1}: {2}'.format(search_key, page, url_page)) session = DBSession() for ul_tag in soup.find_all("ul", class_="listContent"): for item_tag in ul_tag.find_all("li"): try: info_dict = self.parse_transaction_content(item_tag) query = session.query(TransactionInfo).filter(TransactionInfo.id == info_dict['id']) if query.first(): query.update(info_dict) else: session.add(TransactionInfo(**info_dict)) session.commit() logging.debug('@crawl_transaction_by_search: {0} - page - {1}: {2}'.format( search_key, page, info_dict)) except Exception as e: logging.exception('@crawl_transaction_by_search: {0} - page - {1}: {2}'.format( search_key, page, e)) time.sleep(3) logging.info('@crawl_transaction_by_search: {0} - page - {1} complete.'.format(search_key, page))
def crawl_sale_by_search(self, args): """ 根据商圈或社区爬取一页在售房源 """ search_key, page = args url_page = self.base_url + f"ershoufang/pg{page}rs{search_key}/" content = self.request_fn(url_page) soup = BeautifulSoup(content, self.bs4_parser) logging.debug('@crawl_sale_by_search: {0} - page - {1}: {2}'.format(search_key, page, url_page)) session = DBSession() for ul_tag in soup.find_all("ul", class_="sellListContent"): for item_tag in ul_tag.find_all("li"): try: info_dict = self.parse_sale_content(item_tag) logging.debug('@crawl_sale_by_search: {0} - page - {1}: {2}'.format(search_key, page, info_dict)) sale_info = SaleInfo(**info_dict) if not sale_info.house_id or not sale_info.community_id or not sale_info.district: continue session.add(sale_info) except Exception as e: session.rollback() logging.exception('@crawl_sale_by_search: {0} - page - {1}: {2}'.format(search_key, page, e)) time.sleep(3) session.commit() session.close() logging.info('@crawl_sale_by_search: {0} - page - {1} complete.'.format(search_key, page))
def crawl_community_by_district(self, args): """ 根据区县爬取一页小区信息 """ district, page = args url_page = self.base_url + f"xiaoqu/{district}/pg{page}/" content = self.request_fn(url_page) soup = BeautifulSoup(content, self.bs4_parser) logging.debug('@crawl_community_by_district: {0} - page - {1}: {2}'.format(district, page, url_page)) session = DBSession() for ul_tag in soup.find_all("ul", class_="listContent"): for item_tag in ul_tag.find_all("li"): try: info_dict = self.parse_community_content(item_tag) query = session.query(CommunityInfo).filter(CommunityInfo.id == info_dict['id']) if query.first(): query.update(info_dict) else: session.add(CommunityInfo(**info_dict)) session.commit() logging.debug('@crawl_community_by_district: {0} - page - {1}: {2}'.format(district, page, info_dict)) except Exception as e: session.rollback() logging.exception('@crawl_community_by_district: {0} - page - {1}: {2}'.format(district, page, e)) time.sleep(3) session.close() logging.info('@crawl_community_by_district: {0} - page - {1} complete.'.format(district, page))
def post(self): class_id = self.get_body_argument('class_id', None) title = self.get_body_argument('title', None) image_url = self.get_body_argument('image_url', None) note = self.get_body_argument('note', None) content = self.get_body_argument('content', None) now = datetime.now().strftime('%Y-%m-%d %H:%M:%S') data = { 'class_id': class_id, 'title': title, 'image_url': image_url, 'note': note, 'content': content, 'author': 'LiJiaF', 'create_date': now, 'write_date': now } log.info('添加文章:' + json.dumps(data)) try: session = DBSession() new_article = Article(**data) session.add(new_article) session.commit() session.close() except Exception as e: log.error(e) return self.finish(json.dumps({'code': -1, 'msg': '添加失败'})) return self.finish(json.dumps({'code': 0, 'msg': '添加成功'}))
def get_sid(file_name): """获取sid""" session = DBSession() session.expire_on_commit = False uquery = session.query(PicLabel).filter( PicLabel.label == 0, PicLabel.title == file_name).first() session.commit() session.close() if uquery: return uquery.sid
def auth(cls, username, password): session = DBSession() u = session.query(cls).filter(cls.username==username).first() if not u: return False password_md5 = md5(password.encode('utf-8')).hexdigest() password_final = md5((password_md5 + u.salt).encode('utf-8')).hexdigest() if u.password == password_final: return u
def get_actors(sid): """获取至少10个主演名单""" session = DBSession() session.expire_on_commit = False uquerys = session.query(CheckActor) uquery = uquerys.filter(CheckActor.sid == sid).all() session.commit() session.close() actors = [act.actor for act in uquery] return actors #[:10]
def set_password(self, new_password): salt = random_str() password_md5 = md5(new_password.encode('utf-8')).hexdigest() password_final = md5((password_md5 + salt).encode('utf-8')).hexdigest() session = DBSession() self.salt = salt self.password = password_final session.add(self) session.commit()
def get_home_items(): """获取剧名列表""" session = DBSession() sid_actor_num = { obj.sid: obj.num for obj in session.query( CheckActor.sid, sqlalchemy.sql.label('num', sqlalchemy.func.count( CheckActor.actor))).group_by(CheckActor.sid).all() } sid_label_num = { obj.sid: obj.num for obj in session.query( PicLabel.sid, sqlalchemy.sql.label('num', sqlalchemy.func.count( PicLabel.aname))).distinct().group_by(PicLabel.sid).all() } #sid_need_tag = {obj.sid for obj in session.query(CheckActor.sid, CheckActor.status).all() if not obj.status} sid_title = { obj.sid: obj.title for obj in session.query(PicLabel.sid, PicLabel.title).distinct() } nonempty_sid = { obj.sid for obj in session.query(PicLabel.sid).filter( PicLabel.label == 0).distinct() } session.commit() session.close() items = [] for k, v in sid_title.items(): if k not in sid_label_num: sid_label_num[k] = 0 if sid_actor_num[k] > sid_label_num[k] and k in nonempty_sid: badge = 0 else: badge = 1 items.append({v: badge}) ''' querycs = session.query(CheckActor.sid).distinct() items = [] for qs in querycs: title = query.filter(PicLabel.sid == qs.sid).first().title if title not in items: badge = 0 status_set = queryc.filter(CheckActor.sid == qs.sid).all() cnt = 0 for st in status_set: if st.status == 1: cnt = cnt + 1 if cnt == len(status_set): badge = 1 items.append({title: badge}) ''' return items
def query_biz_circle(cls, districts): """ 查商圈 """ session = DBSession() query = session.query(CommunityInfo.biz_circle) \ .filter(CommunityInfo.district.in_(districts)) \ .all() session.commit() session.close() result = list(set([x[0] for x in query])) result.sort() return result
def reply_text_and_get_user_profile(event): # 取出消息內User的資料 user_profile = line_bot_api.get_profile(event.source.user_id) date = strftime("%Y-%m-%d", gmtime()) db = DBSession() userIds = db.query(userinfo.user_id).all() if userIds == []: db = DBSession() user = userinfo(date, user_profile.display_name, user_profile.picture_url, user_profile.status_message, user_profile.user_id) db.add(user) db.commit() db.close() line_bot_api.push_message(user_profile.user_id, TextSendMessage(text='Hello'))
def get(self): session = DBSession() data = session.query(ArticleClass).all() result = [] for d in data: result.append({ 'id': d.id, 'name': d.name }) return self.finish(json.dumps(result))
def _restore_pending_data(self): db_session = DBSession() transaction = db_session.query(PaymentTransaction)\ .filter_by(status = PaymentTransaction.PENDING).first() data = json.loads(transaction.transaction_data) amounts = [] for i in range(len(data["amounts"])): amounts.append(Decimal(data["amounts"][i])) self._logger.info(f'miner {data["miners"][i].lower()} unpaid rewards {data["amounts"][i]}') data["amounts"] = amounts return data, data["pool_amounts"]
def save_stock_list(stock_list): session = DBSession() for stock in stock_list: symbol = stock["symbol"] name = stock["name"] stock_model = Stock(symbol, name) session.add(stock_model) session.commit() session.close() return True
def new(cls, username, password): salt = random_str() password_md5 = md5(password.encode('utf-8')).hexdigest() password_final = md5((password_md5 + salt).encode('utf-8')).hexdigest() level = USER_LEVEL.ADMIN if cls.count() == 0 else USER_LEVEL.NORMAL # 首个用户赋予admin权限 the_time = int(time.time()) session = DBSession() ret = cls(username=username, password=password_final, salt=salt, level=level, key=random_str(32), key_time = the_time, reg_time = the_time) session.add(ret) session.commit() return ret
def save_comment_list(symbol, comment_list): session = DBSession() for comment in comment_list: id = comment["id"] title = comment["title"] text = comment["text"] description = comment["description"] comment_model = Comment(id, symbol, title, text, description) session.add(comment_model) session.commit() session.close() return True
def get(self): class_id = self.get_argument('class_id', None) log.info('获取文章分类信息:class_id ' + class_id) session = DBSession() article_class = session.query(ArticleClass).filter_by(id=class_id).first() if not article_class: return self.finish(json.dumps({'code': -1, 'msg': '该分类不存在'})) result = { 'id': article_class.id, 'name': article_class.name } return self.finish(json.dumps({'code': 0, 'data': result}))
def get(self): cur_page = self.get_argument('cur_page', '1') page_size = 5 log.info('获取文章列表cur_page:' + cur_page) sql = """ select ( select count(id) from article ) as total, a.id, ac.name, a.image_url, a.title, a.author, a.create_date, a.write_date from article a inner join article_class ac on ac.id = a.class_id order by id desc limit %d offset %d """ % (page_size, (int(cur_page) - 1) * page_size) session = DBSession() cursor = session.execute(sql) data = cursor.fetchall() table_data = { 'data': [], 'page_size': page_size, 'total': data[0]['total'] if data else 0 } for d in data: table_data['data'].append({ 'id': d.id, 'class_name': d.name, 'image_url': domain_name + d.image_url, 'title': d.title, 'author': d.author, 'create_date': d.create_date.strftime('%Y-%m-%d %H:%M:%S'), 'write_date': d.write_date.strftime('%Y-%m-%d %H:%M:%S'), }) return self.finish(json.dumps(table_data))
def delete(self): article_id = self.get_argument('article_id', None) log.info('删除文章:article_id ' + article_id) session = DBSession() article = session.query(Article).filter_by(id=article_id).one() if not article: return self.finish(json.dumps({'code': -1, 'msg': '删除失败'})) try: session.query(Article).filter_by(id=article_id).delete() session.commit() except Exception as e: log.error(e) return self.finish(json.dumps({'code': -1, 'msg': '删除失败'})) return self.finish(json.dumps({'code': 0, 'msg': '删除成功'}))
def _check_pending_transactions(self) -> bool: db_session = DBSession() pending_transactions = db_session.query(PaymentTransaction)\ .filter_by(status = PaymentTransaction.PENDING).all() for transaction in pending_transactions: try: tx_receipt = self._web3.eth.waitForTransactionReceipt(transaction.transaction_hash, timeout=config.WAIT_TIMEOUT) data = json.loads(transaction.transaction_data) amounts = [] for amount in data["amounts"]: amounts.append(Decimal(amount)) self._save_payments_info(tx_receipt, data["miners"], amounts, data["pool_amounts"]) except Exception as e: self._logger.fatal( f"get trasaction fail! tx_hash:{transaction.transaction_hash}, err:{e}") return False return True
def publish(): try: title = request.form['title'] subtitle = request.form['subtitle'] description = request.form['description'] content = request.form['content'] now = int(time.time()) session = DBSession() new_blog = Blog(title=title, description=description, subtitle=subtitle, content=content, time=now) session.add(new_blog) session.commit() session.close() except Exception, e: app.logger.error(e)
def delete(self): class_id = self.get_argument('class_id', None) log.info('删除文章分类:class_id ' + class_id) session = DBSession() article_class = session.query(ArticleClass).filter_by(id=class_id).first() if not article_class: return self.finish(json.dumps({'code': -1, 'msg': '删除失败'})) try: session.query(ArticleClass).filter_by(id=class_id).delete() session.commit() except Exception as e: log.error(e) return self.finish(json.dumps({'code': -1, 'msg': '删除失败'})) return self.finish(json.dumps({'code': 0, 'msg': '删除成功'}))
def get_url(file_name): """获取图片信息""" session = DBSession() session.expire_on_commit = False # offset_limit = session.query(PicLabel).filter(PicLabel.label == 0, PicLabel.title == file_name).count() # offset_row = int(hashlib.sha1(os.urandom(10)).hexdigest(), 16) % offset_limit # #offset_row = random.randint(0, offset_limit) # uquery = session.query(PicLabel).filter(PicLabel.label == 0, PicLabel.title == file_name).offset(offset_row).first() curq = session.query(PicLabel).filter(PicLabel.current == 1, PicLabel.title == file_name).first() if not curq: uquery = session.query(PicLabel).filter( PicLabel.title == file_name, PicLabel.label == 0).order_by(PicLabel.episode, PicLabel.frame_timestamp).first() else: session.query(PicLabel).filter( PicLabel.image_hash == curq.image_hash).update( {PicLabel.current: 0}) uquerys = session.query(PicLabel).filter( PicLabel.title == file_name).filter( or_(PicLabel.label == 0, PicLabel.image_hash == curq.image_hash)).order_by( PicLabel.episode, PicLabel.frame_timestamp).all() for idx, uquery in enumerate(uquerys): if uquery.image_hash == curq.image_hash: index = idx length = len(uquerys) uquery = uquerys[(index + 1) % length] session.query(PicLabel).filter( PicLabel.image_hash == uquery.image_hash).update({PicLabel.current: 1}) session.commit() session.close() try: url = baseurl + uquery.image_path img_hash = uquery.image_hash title = uquery.title episode = uquery.episode #print episode frame_timestamp = uquery.frame_timestamp return url, img_hash, title, episode, frame_timestamp except: return None
def query_community(cls, districts=None, biz_circle=None): """ 查小区 """ session = DBSession() if districts: query = session.query(CommunityInfo.community) \ .filter(CommunityInfo.district.in_(districts)) \ .all() elif biz_circle: query = session.query(CommunityInfo.community) \ .filter(CommunityInfo.biz_circle.in_(biz_circle)) \ .all() else: query = [[]] logging.exception("@query_community: query condition un-defined.") session.commit() session.close() result = list(set([x[0] for x in query])) result.sort() return result