def get_by_key(cls, key): session = DBSession() if py_ver == 2: the_key = (key or b'').encode('utf-8') else: the_key = str(key or b'', 'utf-8') return session.query(cls).filter(cls.key==the_key).first()
def get(self): article_id = self.get_argument('article_id', None) log.info('获取文章信息:article_id ' + article_id) session = DBSession() article = session.query(Article).filter_by(id=article_id).first() if not article: return self.finish(json.dumps({'code': -1, 'msg': '该文章不存在'})) result = { 'id': article.id, 'class_id': article.class_id, 'image_url': (domain_name + article.image_url) if article.image_url else '', 'title': article.title, 'note': article.note, 'content': article.content } return self.finish(json.dumps({'code': 0, 'data': result}))
def FlushofGamelist(self): ''' No.1 clear table of gamelist ''' localgamelist = DBSession.query(GameList).all() if len(localgamelist) != 0: for eachlist in range(len(localgamelist)): DBSession.delete(localgamelist[eachlist]) DBSession.commit() ''' No.2 analyst Config and create conn ''' self.getConfig = readFromConfigFile().get_config_zonelist('/WebserviceInterface/ServiceConfig/setting.ini') for eachTuple in range(len(self.getConfig['Zonelist'])): if self.getConfig['Zonelist'][eachTuple][0] == 'username': self.change['username'] = self.getConfig['Zonelist'][eachTuple][1] elif self.getConfig['Zonelist'][eachTuple][0] == 'password': self.change['password'] = self.getConfig['Zonelist'][eachTuple][1] elif self.getConfig['Zonelist'][eachTuple][0] == 'ipaddress': self.change['ipaddress'] = self.getConfig['Zonelist'][eachTuple][1] elif self.getConfig['Zonelist'][eachTuple][0] == 'port': self.change['port'] = int(self.getConfig['Zonelist'][eachTuple][1]) elif self.getConfig['Zonelist'][eachTuple][0] == 'dbname': self.change['dbname'] = self.getConfig['Zonelist'][eachTuple][1] elif self.getConfig['Zonelist'][eachTuple][0] == 'tablename': self.change['tablename'] = self.getConfig['Zonelist'][eachTuple][1] (conn, cursor) = Connect().create(self.change) sql = 'select * from %s' % self.change['tablename'] cursor.execute(sql) result=cursor.fetchall() for i in result: print i
def get_cur_actors(sid): """获取未标注过主演名单""" actors = set(get_actors(sid)) session = DBSession() session.expire_on_commit = False #querycs = session.query(CheckActor).filter(CheckActor.sid == sid).all() querycs = { obj.aname for obj in session.query(PicLabel.aname).filter( PicLabel.sid == sid).distinct() if obj.aname } session.commit() session.close() actors = actors - querycs ''' for qactor in querycs: if qactor.status == 1: try: actors.remove(qactor.actor) except: continue ''' return list(actors) + [u"不保留", u"上一张", u"跳过"]
def get_by_key(cls, key): session = DBSession() if py_ver == 2: the_key = (key or b'').encode('utf-8') else: the_key = str(key or b'', 'utf-8') return session.query(cls).filter(cls.key == the_key).first()
def init_db(path='eta.db'): """Initialze database """ engine = create_engine("sqlite:///%s" % path) Base.metadata.create_all(engine) DBSession.configure(bind=engine) log.info("Initialized database %s", path)
def get(self): cur_page = self.get_argument('cur_page', '1') page_size = 5 log.info('获取文章分类列表cur_page:' + cur_page) sql = """ select ( select count(id) from article_class ) as total, * from article_class order by id desc limit %d offset %d """ % (page_size, (int(cur_page) - 1) * page_size) session = DBSession() cursor = session.execute(sql) data = cursor.fetchall() table_data = { 'data': [], 'page_size': page_size, 'total': data[0]['total'] if data else 0 } for d in data: table_data['data'].append({ 'id': d.id, 'name': d.name, 'create_date': d.create_date.strftime('%Y-%m-%d %H:%M:%S'), 'write_date': d.write_date.strftime('%Y-%m-%d %H:%M:%S'), }) return self.finish(json.dumps(table_data))
def crawl_sale_by_search(self, args): """ 根据商圈或社区爬取一页在售房源 """ search_key, page = args url_page = self.base_url + f"ershoufang/pg{page}rs{search_key}/" content = self.request_fn(url_page) soup = BeautifulSoup(content, self.bs4_parser) logging.debug('@crawl_sale_by_search: {0} - page - {1}: {2}'.format(search_key, page, url_page)) session = DBSession() for ul_tag in soup.find_all("ul", class_="sellListContent"): for item_tag in ul_tag.find_all("li"): try: info_dict = self.parse_sale_content(item_tag) logging.debug('@crawl_sale_by_search: {0} - page - {1}: {2}'.format(search_key, page, info_dict)) sale_info = SaleInfo(**info_dict) if not sale_info.house_id or not sale_info.community_id or not sale_info.district: continue session.add(sale_info) except Exception as e: session.rollback() logging.exception('@crawl_sale_by_search: {0} - page - {1}: {2}'.format(search_key, page, e)) time.sleep(3) session.commit() session.close() logging.info('@crawl_sale_by_search: {0} - page - {1} complete.'.format(search_key, page))
def create_entry(data): """Write data to database """ data = map_data(data) entry = Entry(date=datetime.now(), **data) DBSession.add(entry) DBSession.commit() log.debug("Write entry %r", data)
def set_password(self, new_password): salt = random_str() password_md5 = md5(new_password.encode('utf-8')).hexdigest() password_final = md5((password_md5 + salt).encode('utf-8')).hexdigest() session = DBSession() self.salt = salt self.password = password_final session.add(self) session.commit()
def auth(cls, username, password): session = DBSession() u = session.query(cls).filter(cls.username==username).first() if not u: return False password_md5 = md5(password.encode('utf-8')).hexdigest() password_final = md5((password_md5 + u.salt).encode('utf-8')).hexdigest() if u.password == password_final: return u
def get_actors(sid): """获取至少10个主演名单""" session = DBSession() session.expire_on_commit = False uquerys = session.query(CheckActor) uquery = uquerys.filter(CheckActor.sid == sid).all() session.commit() session.close() actors = [act.actor for act in uquery] return actors #[:10]
def flush(self, oracleCursor): # execute in oracle VIEW oracleCursor.execute('select * from MEP_ASSETREPORT') self.all = self.cursor.fetchall() for eachline in range(len(self.all)): DBSession.add(ASSET(eachline, self.all[eachline][1], self.all[eachline][2], self.all[eachline][3], self.all[eachline][4], self.all[eachline][5], self.all[eachline][6], self.all[eachline][7], self.all[eachline][8], self.all[eachline][9], self.all[eachline][10], self.all[eachline][11], self.all[eachline][13], self.all[eachline][14], self.all[eachline][15], self.all[eachline][17], self.all[eachline][22])) DBSession.commit()
def get_sid(file_name): """获取sid""" session = DBSession() session.expire_on_commit = False uquery = session.query(PicLabel).filter( PicLabel.label == 0, PicLabel.title == file_name).first() session.commit() session.close() if uquery: return uquery.sid
def get_data(): #if request.method == "POST": rdict = {"aname": '', "img_hash": '', "title": ''} for item in rdict: rdict[item] = request.values.get(item, '') row_update_dict = {} row2_update_dict = {} if rdict["aname"] == u"不保留": row_update_dict['valid'] = 0 row_update_dict['label'] = 1 elif rdict["aname"] == u"上一张": session = DBSession() session.expire_on_commit = False session.query(PicLabel).filter( PicLabel.image_hash == rdict["img_hash"]).update( {PicLabel.current: 0}) uquerys = session.query(PicLabel).filter( PicLabel.title == rdict["title"]).filter( or_(PicLabel.label == 0, PicLabel.image_hash == rdict["img_hash"])).order_by( PicLabel.episode, PicLabel.frame_timestamp).all() for idx, uquery in enumerate(uquerys): if uquery.image_hash == rdict["img_hash"]: index = idx length = len(uquerys) uquery = uquerys[(length + index - 2) % length] session.query(PicLabel).filter( PicLabel.image_hash == uquery.image_hash).update( {PicLabel.current: 1}) session.commit() session.close() elif rdict["aname"] != u"跳过" and rdict["aname"] != u"上一张": row_update_dict['aname'] = rdict['aname'] row_update_dict['label'] = 1 # row2_update_dict['status'] = 1 session = DBSession() try: if row_update_dict: session.query(PicLabel).filter_by( image_hash=rdict["img_hash"]).update(row_update_dict) if row2_update_dict: session.query(CheckActor).filter( CheckActor.actor == rdict["aname"]).update(row2_update_dict) session.commit() except: session.rollback() finally: session.close() #url, img_hash, title, episode, frame_timestamp = get_url(rdict["title"]) #return jsonify(url=url, img_hash=img_hash, title=title, episode=episode, frame_timestamp=frame_timestamp) return jsonify({'error_code': 0})
def put(self): class_id = self.get_body_argument('class_id', None) name = self.get_body_argument('name', None) session = DBSession() article_class = session.query(ArticleClass).filter_by(id=class_id).first() if not article_class: return self.finish(json.dumps({'code': -1, 'msg': '该分类不存在'})) now = datetime.now().strftime('%Y-%m-%d %H:%M:%S') data = { 'name': name, 'write_date': now } log.info('修改文章分类:class_id ' + class_id) log.info('修改文章分类:name ' + article_class.name + ' => ' + name) try: session.query(ArticleClass).filter_by(id=class_id).update(data) session.commit() session.close() except Exception as e: log.error(e) return self.finish(json.dumps({'code': -1, 'msg': '修改失败'})) return self.finish(json.dumps({'code': 0, 'msg': '修改成功'}))
def get(self): session = DBSession() data = session.query(ArticleClass).all() result = [] for d in data: result.append({ 'id': d.id, 'name': d.name }) return self.finish(json.dumps(result))
def _restore_pending_data(self): db_session = DBSession() transaction = db_session.query(PaymentTransaction)\ .filter_by(status = PaymentTransaction.PENDING).first() data = json.loads(transaction.transaction_data) amounts = [] for i in range(len(data["amounts"])): amounts.append(Decimal(data["amounts"][i])) self._logger.info(f'miner {data["miners"][i].lower()} unpaid rewards {data["amounts"][i]}') data["amounts"] = amounts return data, data["pool_amounts"]
def new(cls, username, password): salt = random_str() password_md5 = md5(password.encode('utf-8')).hexdigest() password_final = md5((password_md5 + salt).encode('utf-8')).hexdigest() level = USER_LEVEL.ADMIN if cls.count() == 0 else USER_LEVEL.NORMAL # 首个用户赋予admin权限 the_time = int(time.time()) session = DBSession() ret = cls(username=username, password=password_final, salt=salt, level=level, key=random_str(32), key_time = the_time, reg_time = the_time) session.add(ret) session.commit() return ret
def add_article(request): if 'form.submitted' in request.POST: with transaction.manager: article = Article(title=request.POST['title'], body=request.POST['body']) DBSession.add(article) return HTTPFound(location=route_url('article_list', request)) else: return render_to_response( 'templates/add_article.pt', {'back_url': route_url('article_list', request)}, request=request)
def post(self): class_id = self.get_body_argument('class_id', None) title = self.get_body_argument('title', None) image_url = self.get_body_argument('image_url', None) note = self.get_body_argument('note', None) content = self.get_body_argument('content', None) now = datetime.now().strftime('%Y-%m-%d %H:%M:%S') data = { 'class_id': class_id, 'title': title, 'image_url': image_url, 'note': note, 'content': content, 'author': 'LiJiaF', 'create_date': now, 'write_date': now } log.info('添加文章:' + json.dumps(data)) try: session = DBSession() new_article = Article(**data) session.add(new_article) session.commit() session.close() except Exception as e: log.error(e) return self.finish(json.dumps({'code': -1, 'msg': '添加失败'})) return self.finish(json.dumps({'code': 0, 'msg': '添加成功'}))
def new(cls, username, password): username = username.lower() salt = random_str() password_md5 = md5(password.encode('utf-8')).hexdigest() password_final = md5((password_md5 + salt).encode('utf-8')).hexdigest() state = USER_STATE.ADMIN if cls.count() == 0 else USER_STATE.NORMAL # first user is admin the_time = int(time.time()) session = DBSession() ret = cls(username=username, password=password_final, salt=salt, state=state, key=random_str(32), key_time = the_time, reg_time = the_time) session.add(ret) session.commit() return ret
def Recovery(self): localAsset = DBSession.query(ASSET).all() if len(localAsset) > 100: for eachAsset in range(len(localAsset)): DBSession.delete(localAsset[eachAsset]) DBSession.commit() self.flush(self.cursor) else: self.flush(self.cursor) self.cursor.close() self.OracleConn.close()
def get(self): class_id = self.get_argument('class_id', None) log.info('获取文章分类信息:class_id ' + class_id) session = DBSession() article_class = session.query(ArticleClass).filter_by(id=class_id).first() if not article_class: return self.finish(json.dumps({'code': -1, 'msg': '该分类不存在'})) result = { 'id': article_class.id, 'name': article_class.name } return self.finish(json.dumps({'code': 0, 'data': result}))
def get(self): cur_page = self.get_argument('cur_page', '1') page_size = 5 log.info('获取文章列表cur_page:' + cur_page) sql = """ select ( select count(id) from article ) as total, a.id, ac.name, a.image_url, a.title, a.author, a.create_date, a.write_date from article a inner join article_class ac on ac.id = a.class_id order by id desc limit %d offset %d """ % (page_size, (int(cur_page) - 1) * page_size) session = DBSession() cursor = session.execute(sql) data = cursor.fetchall() table_data = { 'data': [], 'page_size': page_size, 'total': data[0]['total'] if data else 0 } for d in data: table_data['data'].append({ 'id': d.id, 'class_name': d.name, 'image_url': domain_name + d.image_url, 'title': d.title, 'author': d.author, 'create_date': d.create_date.strftime('%Y-%m-%d %H:%M:%S'), 'write_date': d.write_date.strftime('%Y-%m-%d %H:%M:%S'), }) return self.finish(json.dumps(table_data))
def crawl_transaction_by_search(self, args): """ 依据商圈或小区 爬取一页历史成交房源 """ search_key, page = args url_page = self.base_url + f"chengjiao/pg{page}rs{search_key}/" content = self.request_fn(url_page) soup = BeautifulSoup(content, self.bs4_parser) logging.debug('@crawl_transaction_by_search: {0} - page - {1}: {2}'.format(search_key, page, url_page)) session = DBSession() for ul_tag in soup.find_all("ul", class_="listContent"): for item_tag in ul_tag.find_all("li"): try: info_dict = self.parse_transaction_content(item_tag) query = session.query(TransactionInfo).filter(TransactionInfo.id == info_dict['id']) if query.first(): query.update(info_dict) else: session.add(TransactionInfo(**info_dict)) session.commit() logging.debug('@crawl_transaction_by_search: {0} - page - {1}: {2}'.format( search_key, page, info_dict)) except Exception as e: logging.exception('@crawl_transaction_by_search: {0} - page - {1}: {2}'.format( search_key, page, e)) time.sleep(3) logging.info('@crawl_transaction_by_search: {0} - page - {1} complete.'.format(search_key, page))
def _check_pending_transactions(self) -> bool: db_session = DBSession() pending_transactions = db_session.query(PaymentTransaction)\ .filter_by(status = PaymentTransaction.PENDING).all() for transaction in pending_transactions: try: tx_receipt = self._web3.eth.waitForTransactionReceipt(transaction.transaction_hash, timeout=config.WAIT_TIMEOUT) data = json.loads(transaction.transaction_data) amounts = [] for amount in data["amounts"]: amounts.append(Decimal(amount)) self._save_payments_info(tx_receipt, data["miners"], amounts, data["pool_amounts"]) except Exception as e: self._logger.fatal( f"get trasaction fail! tx_hash:{transaction.transaction_hash}, err:{e}") return False return True
def add_article(request): if 'form.submitted' in request.POST: with transaction.manager: article = Article( title=request.POST['title'], body=request.POST['body'] ) DBSession.add(article) return HTTPFound( location=route_url('article_list', request) ) else: return render_to_response( 'templates/add_article.pt', {'back_url': route_url('article_list', request)}, request=request )
def get_url(file_name): """获取图片信息""" session = DBSession() session.expire_on_commit = False # offset_limit = session.query(PicLabel).filter(PicLabel.label == 0, PicLabel.title == file_name).count() # offset_row = int(hashlib.sha1(os.urandom(10)).hexdigest(), 16) % offset_limit # #offset_row = random.randint(0, offset_limit) # uquery = session.query(PicLabel).filter(PicLabel.label == 0, PicLabel.title == file_name).offset(offset_row).first() curq = session.query(PicLabel).filter(PicLabel.current == 1, PicLabel.title == file_name).first() if not curq: uquery = session.query(PicLabel).filter( PicLabel.title == file_name, PicLabel.label == 0).order_by(PicLabel.episode, PicLabel.frame_timestamp).first() else: session.query(PicLabel).filter( PicLabel.image_hash == curq.image_hash).update( {PicLabel.current: 0}) uquerys = session.query(PicLabel).filter( PicLabel.title == file_name).filter( or_(PicLabel.label == 0, PicLabel.image_hash == curq.image_hash)).order_by( PicLabel.episode, PicLabel.frame_timestamp).all() for idx, uquery in enumerate(uquerys): if uquery.image_hash == curq.image_hash: index = idx length = len(uquerys) uquery = uquerys[(index + 1) % length] session.query(PicLabel).filter( PicLabel.image_hash == uquery.image_hash).update({PicLabel.current: 1}) session.commit() session.close() try: url = baseurl + uquery.image_path img_hash = uquery.image_hash title = uquery.title episode = uquery.episode #print episode frame_timestamp = uquery.frame_timestamp return url, img_hash, title, episode, frame_timestamp except: return None
def searchObject(self, machineroom, requestobject): try: allsearch = DBSession.query(ASSET).filter(and_(ASSET.partNum.like("%"+requestobject+"%"), ASSET.machineRoom.like("%"+machineroom+"%"), ASSET.useNow == 'Y')).all() for eachAsset in range(len(allsearch)): self.assetBack[allsearch[eachAsset].id] = dict(confNum=allsearch[eachAsset].confNum, serialNum=allsearch[eachAsset].serialNum, hostname=allsearch[eachAsset].hostname, companyIp=allsearch[eachAsset].companyIp, outIp=allsearch[eachAsset].outIp, storeIp=allsearch[eachAsset].storeIp, zcbm=allsearch[eachAsset].zcbm) except Exception, e: return dict(Status='false', Return=e)
def getDatabase(self): searchLocal = DBSession.query(ASSET).all() if len(searchLocal) > 100: print "local table.asset has data. needn't." else: self.flush(self.cursor) self.cursor.close() self.OracleConn.close()
def main(global_config, **settings): config = Configurator(settings=settings) config.add_route('article_list', '/') config.add_route('show_article', '/article/{article}') config.add_route('add_article', '/add_article') config.scan() # SQL Alchemy stuff. engine = engine_from_config(settings, 'sqlalchemy.') DBSession.configure(bind=engine) try: Base.metadata.create_all(engine) with transaction.manager: article = Article(title='Test article', body="Test test test") DBSession.add(article) except IntegrityError: print "Skipping creating, integrity error was thrown" return config.make_wsgi_app()
def get_home_items(): """获取剧名列表""" session = DBSession() sid_actor_num = { obj.sid: obj.num for obj in session.query( CheckActor.sid, sqlalchemy.sql.label('num', sqlalchemy.func.count( CheckActor.actor))).group_by(CheckActor.sid).all() } sid_label_num = { obj.sid: obj.num for obj in session.query( PicLabel.sid, sqlalchemy.sql.label('num', sqlalchemy.func.count( PicLabel.aname))).distinct().group_by(PicLabel.sid).all() } #sid_need_tag = {obj.sid for obj in session.query(CheckActor.sid, CheckActor.status).all() if not obj.status} sid_title = { obj.sid: obj.title for obj in session.query(PicLabel.sid, PicLabel.title).distinct() } nonempty_sid = { obj.sid for obj in session.query(PicLabel.sid).filter( PicLabel.label == 0).distinct() } session.commit() session.close() items = [] for k, v in sid_title.items(): if k not in sid_label_num: sid_label_num[k] = 0 if sid_actor_num[k] > sid_label_num[k] and k in nonempty_sid: badge = 0 else: badge = 1 items.append({v: badge}) ''' querycs = session.query(CheckActor.sid).distinct() items = [] for qs in querycs: title = query.filter(PicLabel.sid == qs.sid).first().title if title not in items: badge = 0 status_set = queryc.filter(CheckActor.sid == qs.sid).all() cnt = 0 for st in status_set: if st.status == 1: cnt = cnt + 1 if cnt == len(status_set): badge = 1 items.append({title: badge}) ''' return items
def get(self): class_sql = """ select ac.id, ac.name from article_class as ac where ac.id in (select distinct class_id from article) order by ac.id """ session = DBSession() cursor = session.execute(class_sql) class_data = cursor.fetchall() session.close() class_result = [] for d in class_data: class_result.append({'id': d.id, 'name': d.name}) self.render("404.html", data=class_result)
def list(): session = DBSession() try: filter = request.args.get('filter', None) _blogs = [] if not filter: _blogs = session.query(Blog.time).order_by(Blog.time).all() else: _blogs = session.query(Blog.time).filter( Blog.title.like('%filter%')).all() blogs = [] for blog in _blogs: blogs.append({'time': blog.time}) print json.dumps(blogs) return json.dumps(blogs) except Exception, e: app.logger.error(e) return 'error'
def save_stock_list(stock_list): session = DBSession() for stock in stock_list: symbol = stock["symbol"] name = stock["name"] stock_model = Stock(symbol, name) session.add(stock_model) session.commit() session.close() return True
def _get_miner_unpaid_reward(self): db_session = DBSession() items = db_session.query(MatureMiningReward)\ .outerjoin(RoundPaymentSummary, MatureMiningReward.holder == RoundPaymentSummary.holder)\ .filter(MatureMiningReward.mining_round == config.MINING_ROUND)\ .with_entities(MatureMiningReward.holder, MatureMiningReward.mcb_balance, RoundPaymentSummary.paid_amount)\ .all() result = { "miners": [], "amounts": [], } for item in items: unpaid = item.mcb_balance if item.paid_amount is not None: unpaid = item.mcb_balance - item.paid_amount if unpaid >= Decimal(config.MIN_PAY_AMOUNT): result["miners"].append(self._web3.toChecksumAddress(item.holder)) result["amounts"].append(unpaid) self._logger.info(f'miner {item.holder} unpaid rewards {unpaid}') return result
def blog(): try: session = DBSession() blog_id = request.args.get('blog_id', 'latest') _blog = None if blog_id != 'latest': _blog = session.query(Blog).filter(Blog.time == int(blog_id)).one() else: _blog = session.query(Blog).order_by(Blog.time.asc()).one() blog = { 'time': _blog.time, 'title': _blog.title, 'description': _blog.description, 'subtitle': _blog.subtitle, 'content': _blog.content } print json.dumps(blog) return json.dumps(blog) except Exception, e: app.logger.error(e) return 'error'
def query_biz_circle(cls, districts): """ 查商圈 """ session = DBSession() query = session.query(CommunityInfo.biz_circle) \ .filter(CommunityInfo.district.in_(districts)) \ .all() session.commit() session.close() result = list(set([x[0] for x in query])) result.sort() return result
def analyNode(self, AgentZone): try: getNodeInform = DBSession.query(NodeList).filter_by(NodeZone = AgentZone).all() if getNodeInform: for eachNodeInform in range(len(getNodeInform)): self.Nodeinform[getNodeInform[eachNodeInform].NodeID]=dict(NodeType=getNodeInform[eachNodeInform].NodeType, NodeZone=getNodeInform[eachNodeInform].NodeZone, NodeName=getNodeInform[eachNodeInform].NodeName, IP=getNodeInform[eachNodeInform].IP, Port=getNodeInform[eachNodeInform].Port) else: msg = 'MySQLdb : select from table.nodelist error.' return msg except Exception, e: return e
def show_article(context, request): article = {} db_result = DBSession.query(Article).filter( Article.id == context.article).first() if not db_result: raise HTTPNotFound("Article could not be found") article['id'] = db_result.id article['title'] = db_result.title article['body'] = db_result.body return {'article': article, 'back_url': route_url('article_list', request)}
def save_comment_list(symbol, comment_list): session = DBSession() for comment in comment_list: id = comment["id"] title = comment["title"] text = comment["text"] description = comment["description"] comment_model = Comment(id, symbol, title, text, description) session.add(comment_model) session.commit() session.close() return True
def put(self): article_id = self.get_body_argument('article_id', None) class_id = self.get_body_argument('class_id', None) title = self.get_body_argument('title', None) image_url = self.get_body_argument('image_url', None) note = self.get_body_argument('note', None) content = self.get_body_argument('content', None) session = DBSession() article = session.query(Article).filter_by(id=article_id).first() if not article: return self.finish(json.dumps({'code': -1, 'msg': '该文章不存在'})) now = datetime.now().strftime('%Y-%m-%d %H:%M:%S') data = { 'class_id': class_id, 'title': title, 'note': note, 'content': content, 'write_date': now } log.info('修改文章:article_id ' + article_id) log.info('修改文章:class_id ' + str(article.class_id) + ' => ' + class_id) log.info('修改文章:title ' + article.title + ' => ' + title) log.info('修改文章:note ' + article.note + ' => ' + note) try: image_url = image_url.lstrip(domain_name) if image_url != article.image_url: data['image_url'] = image_url if article.image_url: os.remove(article.image_url) session.query(Article).filter_by(id=article_id).update(data) session.commit() session.close() except Exception as e: log.error(e) return self.finish(json.dumps({'code': -1, 'msg': '修改失败'})) return self.finish(json.dumps({'code': 0, 'msg': '修改成功'}))
def flushAgent(self, machineroomSimple, Agent='Agent'): try: getSearchAgent = DBSession.query(Translate).filter_by(simple = Agent).first() AgentSimple = getSearchAgent.detail getSearchMachineroom = DBSession.query(Translate).filter_by(simple = machineroomSimple).first() MrDetail = getSearchMachineroom.detail getAgentList = AssetInformation().searchObject(MrDetail, AgentSimple) print "######## LEN ", len(getAgentList['Return']) for key,value in getAgentList['Return'].items(): AgentID = key AgentZone = machineroomSimple AgentName = value['hostname'] IP = self.iprule(key, machineroomSimple, value['companyIp'], value['outIp'], value['storeIp']) Port = 'NULL' IsUse = 1 except Exception, e: return e
def getFlushoftable(self, signal='ALL'): # No.1 clear table of gamename localGameName = DBSession.query(Gameinform).all() if len(localGameName) != 0: for eachGamename in range(len(localGameName)): DBSession.delete(localGameName[eachGamename]) DBSession.commit() # No.2 get each Game information & fill in table self.flush = Urlex().getInformationMultiple(signal) for key,value in self.flush.items(): if key != 'NULL': DBSession.add(Gameinform(self.count, key, value['ipaddress'], value['port'], value['dbname'])) self.count += 1 DBSession.commit()
def article_list(request): results = [] db_results = DBSession.query(Article).all() for db_result in db_results: results.append({ 'id': db_result.id, 'url': route_url( 'show_article', request, article=str(db_result.id) ), 'title': db_result.title }) return { 'articles': results, 'add_article': route_url('add_article', request) }
def show_article(context, request): article = {} db_result = DBSession.query(Article).filter( Article.id == context.article ).first() if not db_result: raise HTTPNotFound( "Article could not be found" ) article['id'] = db_result.id article['title'] = db_result.title article['body'] = db_result.body return { 'article': article, 'back_url': route_url('article_list', request) }
def FromConfNum(self, confNum): Attitude = "" ServerInform = {} Internet = {} Device = {} try: self.allLineofConfnum = DBSession.query(ASSET).filter_by(confNum = confNum).first() print self.allLineofConfnum if self.allLineofConfnum: # All information collect Attitude = self.allLineofConfnum.useNow Device['confNum'] = self.allLineofConfnum.confNum Device['partNum'] = self.allLineofConfnum.partNum Device['serialNum'] = self.allLineofConfnum.serialNum Device['zcbm'] = self.allLineofConfnum.zcbm Device['machineRoom'] = self.allLineofConfnum.machineRoom ServerInform['useProperty'] = self.allLineofConfnum.useProperty ServerInform['conProject'] = self.allLineofConfnum.conProject ServerInform['username'] = self.allLineofConfnum.username ServerInform['userId'] = self.allLineofConfnum.userId ServerInform['usage'] = self.allLineofConfnum.usage Internet['hostname'] = self.allLineofConfnum.hostname Internet['companyIp'] = self.allLineofConfnum.companyIp Internet['outIp'] = self.allLineofConfnum.outIp Internet['storeIp'] = self.allLineofConfnum.storeIp return dict(State=Attitude, Device=Device, ServerInform=ServerInform, Internet=Internet) else: msg = 'MySQLdb : could not found any confNum.' return msg except Exception, e: msg = e return msg
def crawling(company_short_name): param = {'first': 'false', 'pn': 0, 'kd': ''} param = {'first': 'false', 'pn': page, 'sortField': 0, 'havemark': 0} log = "[{}]\t抓取第 {} 页完毕, 当前页职位个数{}, 累计已抓{}, 该公司总计{}" count = 0 for i in range(max_iter): param['pn'] += 1 req = requests.post(job_url, data=param) info = json.loads(req.content) total_count = int(info['content']['positionResult']['totalCount']) job_list = info['content']['positionResult']['result'] count += len(job_list) print log.format(datetime.now().strftime("%Y-%m-%d %H:%M:%S"), param['pn'], len(job_list), count, total_count) session = DBSession() for job in job_list: c = Job( company_id=company.get("companyId"), company_full_name=company.get("companyFullName"), company_short_name=company.get("companyShortName"), city=company.get("city"), position_num=company.get("positionNum"), city_score=company.get("cityScore"), finance_stage=company.get("financeStage"), industry_field=company.get("industryField"), country_score=company.get("countryScore"), company_features=company.get("companyFeatures"), process_rate=company.get("processRate"), interview_remark_num=company.get("interviewRemarkNum"), approve=company.get("approve"), create_time=datetime.now() ) session.add(c) session.commit() session.close() time.sleep(3) if len(job_list) == 0: break return count
def searchfinisheddoingeventofbefore(self, startpoint): tmpArray = [] if type(startpoint).__name__ != 'int': startpoint = int(startpoint) try: getsearchfinisheddoingeventofall = DBSession.query(EventFinished).filter((EventFinished.Eid < startpoint)).order_by(desc(EventFinished.Eid)).all() if getsearchfinisheddoingeventofall: if len(getsearchfinisheddoingeventofall) > 10: for eachline in range(10): # GamePYname tmpGamePYname = EventSearch().searchGamelistAboutPYname(getsearchfinisheddoingeventofall[eachline].GameID) if tmpGamePYname['Status'] != 'Success': GamePYname = 'None' else: GamePYname = tmpGamePYname['FullName'] # OidPYname tmpOidPYname = EventSearch().searchOIDdetailinTemplate(getsearchfinisheddoingeventofall[eachline].Oid) if tmpOidPYname['Status'] != 'Success': OidPYname = 'None' else: OidPYname = tmpOidPYname['TemplateName'] # Data newData = base64Data().decode64(getsearchfinisheddoingeventofall[eachline].Data) newData = eval(newData) newData = json.dumps(newData) # Timestamp closeTime = getsearchfinisheddoingeventofall[eachline].CloseTime newclosetime = TimeBasic().timeControl(getsearchfinisheddoingeventofall[eachline].CloseTime, 5) # OccurTime occurTime = getsearchfinisheddoingeventofall[eachline].OccurTime newoccurTime = TimeBasic().timeControl(getsearchfinisheddoingeventofall[eachline].OccurTime, 5) # DealTime tmpDealTime = TimeBasic().TimeMinus(getsearchfinisheddoingeventofall[eachline].OccurTime, getsearchfinisheddoingeventofall[eachline].CloseTime) # username username = getsearchfinisheddoingeventofall[eachline].Username # close information Detail = getsearchfinisheddoingeventofall[eachline].Detail tmpArray.append(dict(occurTime = occurTime, occurPYtime = newoccurTime, DealTime = tmpDealTime, closeTime = closeTime, closePYtime = newclosetime, Username = username, CloseDetail=Detail, Eid = getsearchfinisheddoingeventofall[eachline].Eid, GameID = getsearchfinisheddoingeventofall[eachline].GameID, GamePYname = GamePYname, Oid = getsearchfinisheddoingeventofall[eachline].Oid, OidPYname = OidPYname, Data = newData)) else: for eachline in getsearchfinisheddoingeventofall: # GamePYname tmpGamePYname = EventSearch().searchGamelistAboutPYname(eachline.GameID) if tmpGamePYname['Status'] != 'Success': GamePYname = 'None' else: GamePYname = tmpGamePYname['FullName'] # OidPYname tmpOidPYname = EventSearch().searchOIDdetailinTemplate(eachline.Oid) if tmpOidPYname['Status'] != 'Success': OidPYname = 'None' else: OidPYname = tmpOidPYname['TemplateName'] # Data newData = base64Data().decode64(eachline.Data) newData = eval(newData) newData = json.dumps(newData) # Timestamp closeTime = eachline.CloseTime newclosetime = TimeBasic().timeControl(eachline.CloseTime, 5) # OccurTime occurTime = eachline.OccurTime newoccurTime = TimeBasic().timeControl(eachline.OccurTime, 5) # DealTime tmpDealTime = TimeBasic().TimeMinus(eachline.OccurTime, eachline.CloseTime) # username username = eachline.Username # close information Detail = eachline.Detail tmpArray.append(dict(occurTime = occurTime, occurPYtime = newoccurTime, DealTime = tmpDealTime, closeTime = closeTime, closePYtime = newclosetime, Username = username, CloseDetail=Detail, Eid = eachline.Eid, GameID = eachline.GameID, GamePYname = GamePYname, Oid = eachline.Oid, OidPYname = OidPYname, Data = newData)) else: return dict(Status='False', msg='MySQL could not found any thing in Eventfinshed.') except Exception, e: DBSession.rollback() return dict(Status='False', msg=str(e))
def init_db(config): settings = config.get_settings() path = settings.get('db.path', 'var/eta.db') engine = create_engine("sqlite:///%s" % path) Base.metadata.create_all(engine) DBSession.configure(bind=engine)
def get_by_key(cls, key): session = DBSession() return session.query(cls).filter(cls.key==str(key or b'', 'utf-8')).first()
def refresh_key(self): session = DBSession() self.key = random_str(32) self.key_time = int(time.time()) session.add(self) session.commit()
def count(cls): session = DBSession() return session.query(cls).filter(cls.state>0).count()
def get_by_username(cls, username): username = username.lower() session = DBSession() return session.query(cls).filter(cls.username==username).first()
def DetailforEachOid(self, name, message): # Oid = 1.1 if name == 'insert': ''' Step 1. All information about input detail ''' tmpProjectName = "" tmpProjectFunc = "" tmpKernel = "" tmpCpuCoreNum = "" tmpSerialNum = "" tmpZCBM = "" tmpMemory = "" tmpCpuType = "" tmpModel = "" tmpHostName = "" tmpOS = "" tmpManufacturer = "" tmpEthInfo = {} tmpTimestamp = int(round(time.time())) Resultofbody = {} for key,value in message.items(): if key == 'Status': if value != 'Success': return dict(Status='False', msg='Message check Failed.') if type(message['Result']).__name__ == 'str': Resultofbody = changeDict().strtodict(message['Result']) else: Resultofbody = message['Result'] for keys,values in Resultofbody.items(): if keys == 'Project': for K,V in Resultofbody[keys].items(): if K == 'Name': tmpProjectName = self.changestr(V) elif K == 'Func': tmpProjectFunc = self.changestr(V) elif keys == 'HwInfo': for KK,VV in Resultofbody[keys].items(): if KK == 'Kernel': tmpKernel = self.changestr(VV) elif KK == 'CpuCoreNum': tmpCpuCoreNum = self.changestr(VV) elif KK == 'SN': tmpSerialNum = self.changestr(VV) elif KK == 'ZCBM': tmpZCBM = self.changestr(VV) elif KK == 'Memory': tmpMemory = self.changestr(VV) elif KK == 'CpuType': tmpCpuType = self.changestr(VV) elif KK == 'Model': tmpModel = self.changestr(VV) elif KK == 'HostName': tmpHostName = self.changestr(VV) elif KK == 'OS': tmpOS = self.changestr(VV) elif KK == 'Manufacturer': tmpManufacturer = self.changestr(VV) elif keys == 'EthInfo': for eachline in Resultofbody[keys]: tmpStatus = '' tmpip = '' tmpmask = '' tmpethname = '' for KKK, VVV in eachline.items(): if KKK == 'status': tmpStatus = self.changestr(VVV) elif KKK == 'ip': tmpip = self.changestr(VVV) elif KKK == 'mask': tmpmask = self.changestr(VVV) elif KKK == 'ethname': tmpethname = self.changestr(VVV) tmpEthInfo[tmpethname] = dict(status=tmpStatus, ip=tmpip, mask=tmpmask) if tmpZCBM == '': return dict(Status='False', msg='Input Server has not ZCBM.') ''' get eth detail ''' tmpEthDict = {} for key,value in tmpEthInfo.items(): if key == 'eth0': getSearchofeth = DBSession.query(Ethdetail).filter(Ethdetail.ip == value['ip'], Ethdetail.mask == value['mask']).first() if getSearchofeth: tmpEthDict['eth0'] = getSearchofeth.eid else: getethcount = DBSession.query(Ethdetail).count() getethcount = (getethcount + 1) DBSession.add(Ethdetail(getethcount,value['status'],value['ip'],value['mask'],'eth0')) tmpEthDict['eth0'] = getethcount elif key == 'eth1': getSearchofethone = DBSession.query(Ethdetail).filter(Ethdetail.ip == value['ip'], Ethdetail.mask == value['mask']).first() if getSearchofethone: tmpEthDict['eth1'] = getSearchofethone.eid else: getethcountone = DBSession.query(Ethdetail).count() getethcountone = (getethcountone + 1) DBSession.add(Ethdetail(getethcountone,value['status'],value['ip'],value['mask'],'eth1')) tmpEthDict['eth1'] = getethcountone ''' Step 2. check server information exist. ''' getSearchofHardware = DBSession.query(AssetForAgent).filter(AssetForAgent.ZCBM == tmpZCBM).first() if getSearchofHardware: try: if int(getSearchofHardware.Timestamp) < message['SendTime']: DBSession.delete(getSearchofHardware) DBSession.commit() tmpeth0 = "" tmpeth1 = "" for key,value in tmpEthDict.items(): if key == 'eth0': tmpeth0 = value elif key == 'eth1': tmpeth1 = value getCountofeth = DBSession.query(EthInfo).count() getCountofeth = (getCountofeth + 1) DBSession.add(EthInfo(getCountofeth,tmpeth0,tmpeth1,'None','None')) DBSession.add(AssetForAgent(tmpProjectName, tmpProjectFunc, tmpKernel, tmpCpuCoreNum, tmpSerialNum, tmpZCBM, tmpMemory, tmpCpuType, tmpModel, tmpHostName, tmpOS, tmpManufacturer, message['SendTime'])) DBSession.commit() getTmpid = DBSession.query(AssetForAgent).filter_by(ZCBM = tmpZCBM).first() if getTmpid: Tmpid = getTmpid.Hid else: DBSession.rollback() return dict(Status='False', msg='flush assetforagent Error.') getCountofrelation = DBSession.query(AssetidtoEid).count() getCountofrelation = int(getCountofrelation + 1) DBSession.add(AssetidtoEid(getCountofrelation, Tmpid, getCountofeth)) DBSession.commit() return dict(Status='Success') else: return dict(Status='Success', msg='Input Hostname Need not fresh.') except Exception, e: DBSession.commit() return dict(Status='False', msg=str(e)) else: try: tmpHidforinsert = "" ''' insert into table''' DBSession.add(AssetForAgent(tmpProjectName, tmpProjectFunc, tmpKernel, tmpCpuCoreNum, tmpSerialNum, tmpZCBM, tmpMemory, tmpCpuType, tmpModel, tmpHostName, tmpOS, tmpManufacturer, message['SendTime'])) DBSession.commit() getHid = DBSession.query(AssetForAgent).filter_by(ZCBM = tmpZCBM).first() if getHid: tmpHidforinsert = getHid.Hid else: DBSession.rollback() return dict(Status='False', msg='insert into assetforagent error.') for key,value in tmpEthDict.items(): if key == 'eth0': tmpeth0 = value elif key == 'eth1': tmpeth1 = value getCountofeth = DBSession.query(EthInfo).count() getCountofeth = (getCountofeth + 1) DBSession.add(EthInfo(getCountofeth,tmpeth0,tmpeth1,'None','None')) getCountofrelation = DBSession.query(AssetidtoEid).count() getCountofrelation = int(getCountofrelation + 1) DBSession.add(AssetidtoEid(getCountofrelation, tmpHidforinsert, getCountofeth)) except Exception, e: DBSession.rollback() return dict(Status='False', msg=str(e)) DBSession.commit() return dict(Status='Success')
def exist(cls, username): username = username.lower() session = DBSession() return session.query(cls).filter(cls.username==username).count() > 0
def searchdoingeventofall(self): tmpArray = [] try: getsearchofdoingeventall = DBSession.query(EventAlarmDoing).order_by(desc(EventAlarmDoing.Eid)).all() if getsearchofdoingeventall: if len(getsearchofdoingeventall) > 10: for eachline in range(10): # GamePYname tmpGamePYname = EventSearch().searchGamelistAboutPYname(getsearchofdoingeventall[eachline].GameID) if tmpGamePYname['Status'] != 'Success': GamePYname = 'None' else: GamePYname = tmpGamePYname['FullName'] # OidPYname tmpOidPYname = EventSearch().searchOIDdetailinTemplate(getsearchofdoingeventall[eachline].Oid) if tmpOidPYname['Status'] != 'Success': OidPYname = 'None' else: OidPYname = tmpOidPYname['TemplateName'] # Data newData = base64Data().decode64(getsearchofdoingeventall[eachline].Data) newData = eval(newData) newData = json.dumps(newData) # EventName newEventName = base64Data().decode64(getsearchofdoingeventall[eachline].EventName) # opTime tmpOpTime = getsearchofdoingeventall[eachline].Timestamp tmpOpPYTime = TimeBasic().timeControl(tmpOpTime, 5) # OccurTime tmpOccurTime = getsearchofdoingeventall[eachline].OccurTime tmpOccurPYTime = TimeBasic().timeControl(tmpOccurTime, 5) # ToUser tmpUser = EventTransportExpand().searcheventindesigntoother(getsearchofdoingeventall[eachline].Eid) if tmpUser['Status'] == 'Success': tmpOpuser = tmpUser['ToUser'] else: tmpOpuser = '******' tmpArray.append(dict(OperationTime = tmpOpTime, OperationPYTime = tmpOpPYTime, OccurTime = tmpOccurTime, OccurPYTime = tmpOccurPYTime, opUser = tmpOpuser, Eid = getsearchofdoingeventall[eachline].Eid, GameID = getsearchofdoingeventall[eachline].GameID, GamePYname = GamePYname, Oid = getsearchofdoingeventall[eachline].Oid, OidPYname = OidPYname, eventGrade = getsearchofdoingeventall[eachline].eventGrade, Data = newData, EventName = newEventName)) else: for eachline in getsearchofdoingeventall: # GamePYname tmpGamePYname = EventSearch().searchGamelistAboutPYname(eachline.GameID) if tmpGamePYname['Status'] != 'Success': GamePYname = 'None' else: GamePYname = tmpGamePYname['FullName'] # OidPYname tmpOidPYname = EventSearch().searchOIDdetailinTemplate(eachline.Oid) if tmpOidPYname['Status'] != 'Success': OidPYname = 'None' else: OidPYname = tmpOidPYname['TemplateName'] # Data newData = base64Data().decode64(eachline.Data) newData = eval(newData) newData = json.dumps(newData) # EventName newEventName = base64Data().decode64(eachline.EventName) # opTime tmpOpTime = eachline.Timestamp tmpOpPYTime = TimeBasic().timeControl(tmpOpTime, 5) # OccurTime tmpOccurTime = eachline.OccurTime tmpOccurPYTime = TimeBasic().timeControl(tmpOccurTime, 5) # ToUser tmpUser = EventTransportExpand().searcheventindesigntoother(eachline.Eid) if tmpUser['Status'] == 'Success': tmpOpuser = tmpUser['ToUser'] else: tmpOpuser = '******' tmpArray.append(dict(OperationTime = tmpOpTime, OperationPYTime = tmpOpPYTime, OccurTime = tmpOccurTime, OccurPYTime = tmpOccurPYTime, opUser = tmpOpuser, Eid = eachline.Eid, GameID = eachline.GameID, GamePYname = GamePYname, Oid = eachline.Oid, OidPYname = OidPYname, eventGrade = eachline.eventGrade, Data = newData, EventName = newEventName)) else: return dict(Status='False', msg='MySQL could not found any thing in Eventalarmdoing.') except Exception, e: DBSession.rollback() return dict(Status='False', msg=str(e))