def execute_comments_for_user_rss(self, request, **kwargs): user = db.query(User).get(kwargs["id"]) content_item_ids_user_commented = db.query(distinct(Comment.content_item_id)).filter(Comment.identity_id.in_([identity.id for identity in user.identities])) rss = PyRSS2Gen.RSS2( title = config.build_title(u"Новые комментарии для %s" % (all_social_service[user.default_identity.service].get_user_name(user.default_identity.service_data))), link = config.url + request.path, description = "", lastBuildDate = datetime.now(), items = [ PyRSS2Gen.RSSItem( title = u"%(username)s - %(title)s" % { "username" : all_social_service[comment.identity.user.default_identity.service].get_user_name(comment.identity.user.default_identity.service_data), "title" : self._item_dict(comment.content_item)["title"], }, link = self._item_dict(comment.content_item)["url"] + "#comment-%d" % ( db.query(func.count(Comment)).filter(Comment.content_item == comment.content_item, Comment.created_at < comment.created_at).scalar() + 1 ), description = self._process_comment_text(comment.text), guid = PyRSS2Gen.Guid(self._item_dict(comment.content_item)["url"] + "#comment-%d" % ( db.query(func.count(Comment)).filter(Comment.content_item == comment.content_item, Comment.created_at < comment.created_at).scalar() + 1 )), pubDate = comment.created_at ) for comment in db.query(Comment).filter( Comment.content_item_id.in_(content_item_ids_user_commented), ~Comment.identity_id.in_([identity.id for identity in user.identities]) ).order_by(Comment.created_at.desc())[:50] ] ) rss_string = StringIO.StringIO() rss.write_xml(rss_string, "utf-8") return Response(rss_string.getvalue(), mimetype="application/rss+xml")
async def profile(username, current_user: str = Depends(get_current_active_user)): """ show profile for a particular user """ print("finding user") try: user = db.query(User).filter(User.username==username).one() print(user) num_attachments = db.query(Attachment).filter(Attachment.creator_id==user.id).count() except sqlalchemy.orm.exc.NoResultFound: return { "code": "error", "message": "No result found" } if not user: return { "code": "error", "message": "No result found" } shows = db.query(Show).filter(Show.creator_id==user.id).order_by(Show.date).all() return { "shows": shows, "num_shows": len(shows), "num_attachments": num_attachments, "user": user }
def middleware(request): request.user = None if "u" in request.cookies: request.user = db.query(User).filter(User.token == request.cookies["u"]).first() if request.user: if datetime.now() > request.user.last_activity + config.user_inactivity_till_leave: request.user.last_visit = request.user.last_activity request.user.last_activity = datetime.now() db.flush() request.anonymous = None if request.remote_addr != "127.0.0.1": if request.user is None: if "a" in request.cookies: request.anonymous = db.query(Anonymous).filter(Anonymous.token == request.cookies["a"]).first() if request.anonymous is None: request.anonymous = Anonymous() request.anonymous.token = "".join(random.choice(string.letters + string.digits + string.punctuation) for i in xrange(32)) db.add(request.anonymous) db.flush() if request.remote_addr not in request.anonymous.ip_addresses: request.anonymous.ip_addresses = {a: True for a in request.anonymous.ip_addresses.keys()} request.anonymous.ip_addresses[request.remote_addr] = True db.flush() return request
def delete(id: int): """ delete a single show by id """ db.query(Show).filter(Show.id == id).delete() db.commit() return {"delete": True}
def GET(self): auth() ''' get tag ''' print '123' db.query('select * from tag') res = db.fetchAllRows() if res == None: res = [] return render_template("edit.html")
def block(request, feed, item): next = db.query(ContentItem).filter(ContentItem.type.in_(get_content_feeds()[feed]["types"]), ContentItem.permissions_for(request.user), ContentItem.created_at > item["created_at"]).order_by(ContentItem.created_at).first() prev = db.query(ContentItem).filter(ContentItem.type.in_(get_content_feeds()[feed]["types"]), ContentItem.permissions_for(request.user), ContentItem.created_at < item["created_at"]).order_by(ContentItem.created_at.desc()).first() if next or prev: return { "next" : process_content_item(next) if next else None, "prev" : process_content_item(prev) if prev else None, } else: return None
def execute_user_comments(self, request, **kwargs): user = db.query(User).get(kwargs["id"]) comments = db.query(Comment).filter(Comment.identity_id.in_([identity.id for identity in user.identities])).order_by(Comment.created_at.desc()).all() return self.render_to_response(request, "content/user_comments.html", **{ "breadcrumbs" : [u"Комментарии пользователя %s" % all_social_service[user.default_identity.service].get_user_name(user.default_identity.service_data)], "target_user" : user, "comments" : comments, })
async def re_notify_to_user(send_user, recive_user, msg, id): now = int(time.time()) try: user = users[str(recive_user)] message = msg_event(send_user, msg) await user.send(message) rs = db.query("""update msg set status = 1 where msg_id = $id""", vars=locals()) except: rs = db.query("""update msg set status = 2 where msg_id = $id""", vars=locals())
def analyze(self, callback): for pattern in self.list_patterns: try: db.query("SELECT ip, count(*) FROM %s JOIN %s WHERE %s.id = %s.visitor_id AND REGEXP(?, HEX(%s)) == 1 GROUP BY %s.ip" %(tbl_visitor, tbl_request, tbl_visitor, tbl_request, pattern.field, tbl_visitor), [pattern.pattern]) while 1: data = db.fetch_one() if data: callback(data[0], self.action, pattern.description) else: break except: raise
def analyze(self, callback): for pattern in self.list_patterns: try: db.query("SELECT DISTINCT ip FROM %s JOIN %s WHERE %s.id = %s.visitor_id AND REGEXP(?, HEX(%s)) == 1" %(tbl_visitor, tbl_request, tbl_visitor, tbl_request, pattern.field), [pattern.pattern]) while 1: data = db.fetch_one() if data: callback(data[0], self.action, "Shell_shock attack detected") else: break except: raise
def get_foreign_key_value(tab_name, col_name, col_val, id='id'): sql = 'select ' + id + ' from ' + tab_name + ' where ' + col_name + '=$' + col_name rows = db.query(sql, vars={col_name: col_val}).list() if len(rows) == 0: # MODIFIED sql = 'insert into ' + tab_name + ' (' + col_name + ') values ($' + col_name + ')' db.query(sql, vars={col_name: col_val}) rs = db.query('select max(' + id +') id from ' + tab_name) return rs[0].id #raise Exception("table -> column: [%s -> %s] = '%s'. return []") else: return _int(rows[0][id])
def POST(self): session = web.ctx.session data = web.input(email='', passwd='') if data.email != '' and data.passwd != '': db.query('select userpasswd from user where username=\'%s\''%data.email) res = db.fetchOneRow() print res if res != None and res[0] == data.passwd: session.loggedin = True session.username = data.email return web.seeother("/edit") else: return web.redirect('/login')
def block(request, limit=10): return { "last_comments" : [ { "comment" : db.query(Comment).options(joinedload("content_item")).filter(Comment.content_item_id == content_item_id).order_by(Comment.created_at.desc()).first(), "content_item" : process_content_item(db.query(ContentItem).get(content_item_id)), "count_new" : db.query(func.count(Comment)).filter(Comment.content_item_id == content_item_id, Comment.created_at > request.user.last_visit).scalar() if request.user else 0, "new_index" : (db.query(func.count(Comment)).filter(Comment.content_item_id == content_item_id, Comment.created_at <= request.user.last_visit).scalar() if request.user else 0) + 1, } for content_item_id, in db.query(Comment.content_item_id).group_by(Comment.content_item_id).order_by(-func.max(Comment.created_at))[:limit] ], }
def get_alerts(): session = web.config._session params = dict( wid = session.userid , ts = "datetime('now')") alerts = db.select('Alerts JOIN Orders', vars=params, where='waiter=$wid and stime<= datetime("now")').list() print alerts update_query = """ update Alerts set stime=datetime('now', '+10 minutes') where exists (select Alerts.id as aid from Alerts JOIN Orders where waiter=$wid and stime<= $ts) """ db.query(update_query, vars=params) return alerts
def bank_card(): shops = {} for content_item in db.query(ContentItem).filter(ContentItem.type == "vtb24_transaction", ContentItem.created_at >= start, ContentItem.created_at <= end): m = re.search(re.compile(u"произведена транзакция по (.+) на сумму ([0-9.]+) (.+?)\..+Детали платежа: (.+)\. Код авторизации", re.DOTALL), content_item.data["notification"]) if m.group(1) == u"оплате": amount = float(m.group(2)) if m.group(3) == "USD": amount *= 30 elif m.group(3) == "RUR": pass else: logger.warning("Unknown currency: %s" % m.group(3)) shop_name = m.group(4) if shop_name not in shops: shops[shop_name] = 0 shops[shop_name] += amount if shops: text = u"Покупал товары. Топ магазинов:\n" text += u"<ul>\n" for shop_name, amount in sorted(shops.items(), key=lambda kv: -kv[1])[:5]: text += u"<li>%s (%s)</li>\n" % (shop_name, pytils.numeral.get_plural(int(amount), (u"рубль", u"рубля", u"рублей"))) text += u"</ul>\n" return text else: return None
def parse(resort_id): res = db.query('SELECT url FROM resorts WHERE id=' + str(resort_id)) url = res[0]['url'] page = requests.get(url) parsed_json = json.loads(page.content) lifts_dom = BeautifulSoup(parsed_json['data'], 'html.parser') return lifts_dom
def fetchMaxId(): r = db.query("SELECT id FROM account ORDER BY id DESC LIMIT 1") row = r.fetchone() if row: return row[0] else: return 0
def parse(resort_id): res = db.query('SELECT url FROM resorts WHERE id=' + str(resort_id)) url = res[0]['url'] page = requests.get(url) soup = BeautifulSoup(page.content, 'html.parser') lifts_dom = soup.find(id='conditions_report_full') return lifts_dom
def get_all_enters(city_id, limit_start): sub_sql_where = '' if city_id == 0 else 'where e.city = %s ' % city_id sub_sql_limit = '' if limit_start == -1 else '%s, ' % limit_start sql = 'select ' \ 'e.id, e.company as companyId, e.name, e.address, e.time, e.nick ' \ 'from company_info as e %s order by id desc limit %s 10;' % (sub_sql_where, sub_sql_limit) return _db.query(sql)
def get_last_page_id(): stmt = db.query("SELECT page_id FROM so_badges ORDER BY page_id DESC LIMIT 1"); fetch = stmt.fetchone() if not fetch: return 0 else: return fetch[0]
def read(id: int): """ returns a single show by id """ print("reading one") show = db.query(Show).filter(Show.id == id).one() user = db.query(User).filter(User.id == show.creator_id).one() attachments = db.query(Attachment).join( Show, Attachment.shows).filter(Show.id == id).all() return { "user_id": user.id, "username": user.username, "show": show, "attachments": attachments }
def maxid(): sql = "SELECT id from node ORDER BY id DESC" r = db.query(sql).fetchone() if r: return r[0] else: return 0
def execute_admin(self, request): filter_types = [("", u"все")] + sorted([(k, v["type"].item_cases[0]) for k, v in self.types.items()], key=operator.itemgetter(1)) create_types = sorted([("/admin/content/new/%s/" % k, v["type"].item_cases[3]) for k, v in self.types.items() if v["type"].get_editor() is not None], key=operator.itemgetter(1)) q = db.query(ContentItem).filter(ContentItem.type.in_(self.types.keys())).order_by(ContentItem.created_at.desc()) if request.args.get("type", ""): filter_type = request.args["type"] q = q.filter(ContentItem.type == filter_type) else: filter_type = "" total_pages = int(ceil(q.count() / 100.0)) page = request.args.get("page", 1, type=int) content_items = [( self.types[content_item.type]["type"].item_cases[0], "/admin/content/edit/%d/" % content_item.id if self.types[content_item.type]["type"].get_editor() else None, self._item_dict(content_item), ) for content_item in q[(page - 1) * 100 : page * 100]] return self.render_to_response(request, "content/admin.html", **{ "breadcrumbs" : [u"Управление контентом"], "filter_types" : filter_types, "create_types" : create_types, "filter_type" : filter_type, "total_pages" : total_pages, "page" : page, "content_items" : content_items, })
async def users(): """ browse all users on the site """ users = db.query(User).all() return users
def main(): """ For each table, loop through rows convert field names render a template write to output folder. """ for name, schema in TABLES.iteritems(): if not schema: continue template = env.get_template(schema['template']) fields = schema['fields'] table = db.load_table(name) # fail if the name is wrong if "query" in schema: query = db.query(schema['query']) else: query = table.all() if "version" in table.columns: query = get_most_recent(query) posts = [convert(article, fields) for article in query] if "chunks" in schema: for page, chunk in enumerate(grouper(posts, schema['chunks']), 1): render(template, chunk, name=name, page=page) else: render(template, posts, name=name)
def execute_admin_edit(self, request, id): c = db.query(ContentItem).get(id) if c is None: raise NotFound() editor = self.types[c.type]["type"].get_editor() if request.method == "POST": c.type_key = request.form["type_key"] c.started_at = dateutil.parser.parse(request.form["started_at"]) if request.form.get("started_at", "").strip() else None c.created_at = dateutil.parser.parse(request.form["created_at"]) c.permissions = ContentItem.permissions_PUBLIC if "public" in request.form else ContentItem.permissions_NOT_READY c.tags = [] for tag in request.form["tags"].split(","): tag = tag.strip() db_tag = db.query(Tag).filter(Tag.title == tag).first() if db_tag is None: db_tag = Tag() db_tag.url = tag.split(":", 2)[0] if ":" in tag else tag db_tag.title = tag.split(":", 2)[1] if ":" in tag else tag db.add(db_tag) db.flush() c.tags.append(db_tag) data = editor.form_to_db(request, c.data) c.data = None db.flush() c.data = data db.flush() cache.get_cache("content_item_%d" % c.id).remove_value(key="formatter_output") return redirect(request.path) else: form = editor.db_to_form(c.data) return self.render_to_response(request, [ "content/type/%s/edit.html" % (c.type,), "content/type/%s/edit.html" % (self._base_type(c.type),), ], **{ "breadcrumbs" : [u"Редактирование %s" % self.types[c.type]["type"].item_cases[1]], "form" : form, "content_item" : c, "tags" : u",".join([t.title for t in c.tags]), })
def get_user_by_name(username): sql = ( r'select * from users where username = $username ') result = db.query(sql, vars={'username': username}) if result: return result[0] else: return {}
def get_job_applicants(job_id, start_id): sql_limit = '' if start_id == -1 else '%s,' % start_id sql = 'select ' \ 'a.id, a.user as userId, r.name as userName, a.time, a.status ' \ 'from apply_job as a ' \ 'join resume as r on a.job = %s and a.user = r.user ' \ 'order by a.id desc limit %s 10 ' % (job_id, sql_limit) return _db.query(sql)
def loadFromDb(self, id): sql = "SELECT id, name, model, userid FROM node WHERE id=?" r = db.query(sql, (id, )).fetchone() if r: self.id, self.name, self.model, self.userid = r else: raise InvalidNode
def execute_edit_comment(self, request, **kwargs): comment = db.query(Comment).get(kwargs["id"]) if request.user and request.user.id == comment.identity.user.id: if db.query(Comment).filter(Comment.content_item_id == comment.content_item_id, Comment.created_at > comment.created_at).count() > 0: response = {"error" : u"К сожалению, пока вы редактировали комментарий, его успели увидеть другие пользователи. Обновите страницу, чтобы увидеть их реакцию на ваш позор."} else: if (request.form["text"].strip() == ""): db.delete(comment) response = {"deleted" : True} else: comment.text = request.form["text"] response = {"text" : self._process_comment_text(comment.text)} db.flush() return Response(simplejson.dumps(response), mimetype="application/json") raise Forbidden()
def get_user_collect_enters(user_id, start_id): sql_limit = '' if start_id == -1 else '%s,' % start_id sql = 'select ' \ 'e.id, e.company as companyId, e.name, e.address, e.time, e.nick ' \ 'from company_info as e ' \ 'join collect_enterprise as a on a.company = e.company and a.user = %s ' \ 'order by a.id desc limit %s 10' % (user_id, sql_limit) return _db.query(sql)
def get_all_jobs(company_id, start_id): sql_limit = '' if start_id < 1 else '%s, ' % start_id sql = 'select ' \ 'j.id, j.name, j.address, j.time, j.apply, j.collect ' \ 'from job as j ' \ 'where company = %s ' \ 'order by id desc limit %s 10;' % (company_id, sql_limit) return _db.query(sql)
def search_enters(city_id, enter_name, limit_start): sub_sql_where = '' if city_id == 0 else 'and e.city = %s ' % city_id sub_sql_limit = '' if limit_start == -1 else '%s, ' % limit_start sql = 'select ' \ 'e.id, e.company as companyId, e.name, e.address, e.time, e.nick ' \ 'from company_info as e where (e.name like "%%%s%%" or e.nick like "%%%s%%") %s order by id desc limit %s 10;' % ( enter_name, enter_name, sub_sql_where, sub_sql_limit) return _db.query(sql)
def analyze(self, callback): for pattern in self.list_patterns: try: db.query("SELECT ip, path FROM %s JOIN %s WHERE %s.id = %s.visitor_id AND REGEXP(?, HEX(%s)) == 1" %(tbl_visitor, tbl_request, tbl_visitor, tbl_request, pattern.field), [pattern.pattern]) while 1: data = db.fetch_one() if data: if pattern.pattern == "\.\./": lfi_file = re.search("[^&=]*\.\./[^&]*", data[1]).group(0) description = "LFI attack detected: Trying to read '%s'"%lfi_file else: description = "LFI attack detected: Using php filter" callback(data[0], self.action, description) else: break except: raise
def get_user_collect_jobs(user_id, start_id): sql_where = '' if start_id == -1 else '%s,' % start_id sql = 'select ' \ 'j.id, j.name, j.address, j.time, j.company as companyId , c.name as company ,c.nick ' \ 'from job as j ' \ 'join collect_job as a on a.job = j.id and a.user = %s ' \ 'join company_info as c on j.company = c.company order by a.id desc limit %s 10 ' % (user_id, sql_where) return _db.query(sql)
def get_enter_jobs(enterprise_id, start_id): sql_where_id = '' if start_id < 1 else 'and j.id < %s' % start_id sql = 'select ' \ 'j.id, j.name, j.address, j.time, j.company as companyId , c.name as company ,c.nick ' \ 'from job as j ' \ 'join company_info as c ' \ 'on j.company = c.company and c.company = %s %s order by id desc limit 10;' % (enterprise_id, sql_where_id) return _db.query(sql)
def analyze(self, callback): """ Default analyze method for ATTACK modules It just search pattern in the field This method should be overwriten by modules, if need """ for pattern in self.list_patterns: try: db.query("SELECT ip, count(*) FROM %s JOIN %s WHERE %s.id = %s.visitor_id AND REGEXP(?, HEX(%s)) == 1 GROUP BY %s.ip" %(tbl_visitor, tbl_request, tbl_visitor, tbl_request, pattern.field, tbl_visitor), [pattern.pattern]) while 1: data = db.fetch_one() if data: callback(data[0], self.action, pattern.description) else: break except: raise
def parse(resort_id): res = db.query('SELECT url, sub_parser FROM resorts WHERE id='+str(resort_id)) url = res[0]['url'] subparser = res[0]['sub_parser'] page = requests.get(url) soup = BeautifulSoup(page.content, 'html.parser') dom_name = subparser+'-lifts' lifts_dom = soup.find(id=dom_name) return lifts_dom
def cs_offline_friends(p, req): cid, offline_cids = req fields = ['id', 'lead_id', 'nick_name', 'level', 'might'] _sql = 'SELECT {0} FROM tb_character WHERE {1};'.format(','.join(fields), '1 AND' + ' OR '.join( [' id=%s'%_cid for _cid in offline_cids])) _dataset = yield db.query(_sql) #log.info('For Test. _sql: {0}, _dataset: {1}.'.format( _sql, _dataset )) defer.returnValue( _dataset if _dataset else [] )
def get_nearby_enters(city_id, lat, lng, limit_start): sub_sql_where = '' if city_id == 0 else 'where e.city = %s ' % city_id sub_sql_limit = '' if limit_start == -1 else '%s, ' % limit_start sql = 'select ' \ 'e.id, e.company as companyId, e.name, e.address, e.time, e.nick, ' \ '(6371.004*ACOS(SIN(%s/565.486678)*SIN(e.lat/565.486678)+COS(%s/565.486678)*COS(e.lat/565.486678)*COS((%s-e.lng)/565.486678))) as distance ' \ 'from company_info as e %s ' \ 'order by distance limit %s 10; ' % (lat, lat, lng, sub_sql_where, sub_sql_limit) return _db.query(sql)
async def notify_to_user(send_user, recive_user, msg): now = int(time.time()) try: user = users[str(recive_user)] message = msg_event(send_user, msg) await user.send(message) rs = db.query( """insert into msg (send_user_id, recv_user_id, msg, create_time, status) values ($send_user, $recive_user,$msg, $now, 1)""", vars=locals()) except: user = users[send_user] message = msg_event("系统提示", "该用户已离线,消息会在他登录后发送") await asyncio.wait([user.send(message)]) rs = db.query( """insert into msg (send_user_id, recv_user_id, msg, create_time, status, status_remark) values ($send_user,$recive_user,$msg, $now, 0, "该用户已离线")""", vars=locals())
def get_recipes(self): self._get_ingredient_ids() sql = self._get_sql() res = db.query(sql) recipes = [] for row in res: recipes.append(row) return recipes
def parse(resort_id): res = db.query('SELECT url FROM resorts WHERE id='+str(resort_id)) url = res[0]['url'] page = requests.get(url) beg = page.content.find('FR.TerrainStatusFeed') sub = page.content[beg+23:] end = sub.find('FR.LiftStatusFilters') jsonstr = sub[:end].strip()[:-1] parsed_json = json.loads(jsonstr) return parsed_json
def _loadFromDb(self, id): sql = "SELECT id, username, password, authkey FROM account WHERE id=?" r = db.query(sql, (id, )).fetchone() if r: self.id = r[0] self.username = r[1] self.password = r[2] self.authkey = r[3] else: raise AccountNotFound
def set(self, directory, key, value): kv = db.query(KV).filter_by(directory=directory, key=key).first() if kv is None: kv = KV() kv.directory = directory kv.key = key db.add(kv) kv.value = value db.flush()
def check_login(username, password): user = get_user_by_name(username) print user sql = ( r'select count(*) as count from users where username = $username ' r'and password = md5(concat($salt, md5($password)))') result = db.query( sql, vars={'username': username, 'password': password, 'salt': user['salt']}) return result[0]['count'] != 0
def search_job(city_id, job_name, limit_start): sub_sql_city = '' if city_id == 0 else 'and j.city = %s ' % city_id sub_sql_limit = '' if limit_start == -1 else '%s, ' % limit_start sql = 'select ' \ 'j.id, j.name, j.address, j.time, j.company as companyId , c.name as company ,c.nick ' \ 'from job as j ' \ 'join company_info as c ' \ 'on j.company = c.company and j.name like "%%%s%%" %s order by id desc limit %s 10;' % ( job_name, sub_sql_city, sub_sql_limit) return _db.query(sql)
def get_hot_jobs(city_id, limit_start): sub_sql_where = '' if city_id == 0 else 'and j.city = %s ' % city_id sub_sql_limit = '' if limit_start == -1 else '%s, ' % limit_start sql = 'select * from (select ' \ 'j.id, j.name, j.address, j.time, j.company as companyId ,j.apply, c.name as company ,c.nick ' \ 'from job as j ' \ 'join company_info as c ' \ 'on j.company = c.company %s order by j.apply desc) as r limit %s 10;' % ( sub_sql_where, sub_sql_limit) return _db.query(sql)
def cs_offline_friends(p, req): cid, offline_cids = req fields = ['id', 'lead_id', 'nick_name', 'level', 'might'] _sql = 'SELECT {0} FROM tb_character WHERE {1};'.format( ','.join(fields), '1 AND' + ' OR '.join([' id=%s' % _cid for _cid in offline_cids])) _dataset = yield db.query(_sql) #log.info('For Test. _sql: {0}, _dataset: {1}.'.format( _sql, _dataset )) defer.returnValue(_dataset if _dataset else [])
def authenticate_user(username: str, password: str): try: user = db.query(User).filter(User.username == username).one() except sqlalchemy.orm.exc.NoResultFound: return False if not user: return False if not verify_password(password, user.hashed_password): return False return user
def cs_offline_rand_friends(p, req): cid, count, level, except_cids = req _info = [] _sql = "SELECT id,lead_id,nick_name,level,might FROM tb_character WHERE 1 AND level >= %s AND level <= %s AND id not in ('%s');" _limit = "', '".join(map(str, except_cids)) _dataset = yield db.query(_sql % (level - 5, level + 5, _limit)) if len(_dataset) >= count: _info = random.sample(_dataset, count) else: _dataset = yield db.query(_sql % (level - 10, level + 10, _limit)) if len(_dataset) >= count: _info = random.sample(_dataset, count) else: _dataset = yield db.query(_sql % (0, 1000, _limit)) if len(_dataset) >= count: _info = random.sample(_dataset, count) else: _info = _dataset defer.returnValue(_info)
def cs_search_nick_name(p, req): cid, nick_name = req _info = [] _sql = "select id,lead_id,nick_name,level,might FROM tb_character WHERE nick_name like '%s'" _query = _sql % ('%' + nick_name + '%') _dataset = yield db.query(_query) if _dataset: defer.returnValue(_dataset[:FRIEND_RAND_MAX_COUNT]) else: defer.returnValue([])
def get_weather_for_resort(resort_id): apikey = config['appid'] res = db.query('SELECT geo_lat, geo_lon FROM resorts WHERE id='+str(resort_id)) latitude = res[0]['geo_lat'] longitude = res[0]['geo_lon'] url = 'http://api.openweathermap.org/data/2.5/weather?lat={}&lon={}&APPID={}'.format(latitude, longitude, apikey) report = requests.get(url) weather_data = json.loads(report.content) data_map = { 'source': 'openweathermap', 'updated_at': time.strftime('%Y-%m-%d %H:%M:%S'), 'resort_id': resort_id } if len(weather_data) > 0: if len(weather_data['weather']) > 0: weather = weather_data['weather'][0] if 'id' in weather: data_map['label_id'] = weather['id'] if 'main' in weather: data_map['label'] = weather['main'] if 'description' in weather: data_map['description'] = weather['description'] if 'main' in weather_data: main = weather_data['main'] if 'temp' in main: data_map['temperature'] = (main['temp'] - 273.15) #kelvin->centigrade if 'pressure' in main: data_map['pressure'] = main['pressure'] if 'humidity' in main: data_map['humidity'] = main['humidity'] if 'temp_min' in main: data_map['temperature_min'] = (main['temp_min'] - 273.15) #kelvin->centigrade if 'temp_max' in main: data_map['temperature_max'] = (main['temp_max'] - 273.15) #kelvin->centigrade if 'visibility' in weather_data: data_map['visibility'] = weather_data['visibility'] if 'wind' in weather_data: wind = weather_data['wind'] if 'speed' in wind: data_map['wind_speed'] = wind['speed'] if 'deg' in wind: data_map['wind_dir'] = wind['deg'] if 'clouds' in weather_data and 'all' in weather_data['clouds']: data_map['cloudiness'] = weather_data['clouds']['all'] if 'rain' in weather_data: rain = weather_data['rain'] if '1h' in rain: data_map['rain_last_1h'] = rain['1h'] if '3h' in rain: data_map['rain_last_3h'] = rain['3h'] if 'snow' in weather_data: snow = weather_data['snow'] if '1h' in snow: data_map['snow_last_1h'] = snow['1h'] if '3h' in snow: data_map['snow_last_3h'] = snow['3h'] if 'dt' in weather_data: data_map['data_calculated_at'] = datetime.utcfromtimestamp(weather_data['dt']).strftime('%Y-%m-%d %H:%M:%S') cols = ','.join(data_map.keys()) vals = data_map.values() vals = tuple(vals) tmp = ','.join(['%s'] * len(vals)) sql = 'INSERT INTO weather_reports ({}) VALUES ({})'.format(cols,tmp) db.execute(sql, vals) db.commit() db.close_db_connection()
def to_sql(): raw = entry.get() seg = nl2sql.cut_words(raw) show_segments.insert('insert', '/'.join(seg)) a = db.info() sql = nl2sql.nl2sql(seg, a) show_sql.insert('insert', sql) result = db.query(sql) show_result.insert('insert', result) if result: tkinter.messagebox.showinfo(title='成功', message='查询成功!') else: tkinter.messagebox.showerror(title='失败', message='查询失败!')
def exam1_auth(level, check=False): if check: if 'userid' not in session: return False iso, dlevel = db.query( 'SELECT creation, level FROM exam1_users WHERE id=%s', session['userid'])[0] try: date = datetime.strptime(iso, '%Y-%m-%dT%H:%M:%S') except: date = datetime.strptime(iso, '%Y-%m-%dT%H:%M:%S.%f') if dlevel < level: return False if (datetime.now() - date).total_seconds() > 14400: return False return True def xsub(func): def sub(*args, **kwargs): if 'userid' not in session: redirect(handler.exam1.get_index.url(error='Session expired')) iso, dlevel = db.query( 'SELECT creation, level FROM exam1_users WHERE id=%s', session['userid'])[0] try: date = datetime.strptime(iso, '%Y-%m-%dT%H:%M:%S') except: date = datetime.strptime(iso, '%Y-%m-%dT%H:%M:%S.%f') if dlevel < level: redirect( handler.exam1.get_index.url( error='You have not yet achieved this level')) if (datetime.now() - date).total_seconds() > 14400: redirect( handler.exam1.get_index.url(error='Time has expired!')) return func(*args, **kwargs) sub.__delegated__ = func return sub return xsub
def get_lift_status(resort_id): #get mapping of lift name to lift id lift_map_sql = db.query('SELECT id, name FROM lifts WHERE resort_id='+str(resort_id)) lift_map = {} for lift in lift_map_sql: lift_map[lift['name']] = int(lift['id']) parsed_json = parse(resort_id) lifts = parsed_json['Lifts'] lift_status = list(map(lambda l: {'lift_id': lift_map[l['Name']], 'lift_status': l['Status']}, lifts)) #save lift statuses in db for status in lift_status: sql = 'INSERT INTO lift_status (lift_id, status, updated_at) VALUES (%s, %s, %s)' val = (status['lift_id'], status['lift_status'], time.strftime('%Y-%m-%d %H:%M:%S')) db.execute(sql, val) db.commit() db.close_db_connection()
def _get_ingredient_ids(self): total_ingredients = self._extract_ingredients() + self._ingredients lang = 'and lang = "{}"'.format(self.lang) if self.lang else '' for ingredient in total_ingredients: sql = u'select ingredient_id from ingredient_translation where name = "{}" {}'.format( ingredient, lang) res = db.query(sql) found = None for row in res: found = row['ingredient_id'] break if found is None: self._not_founds.append(ingredient) else: self._founds.append(found)
def get_lift_status(resort_id): global lift_map lift_map_sql = db.query('SELECT id, name FROM lifts WHERE resort_id='+str(resort_id)) for sl in lift_map_sql: lift_map[sl['name']] = int(sl['id']) lifts_dom = parse(resort_id) lifts = lifts_dom.find_all('div','lift') lift_status = list(map(get_lift_status_obj, lifts)) #save lift statuses in db for status in lift_status: sql = 'INSERT INTO lift_status (lift_id, status, updated_at) VALUES (%s, %s, %s)' val = (status['lift_id'], status['lift_status'], time.strftime('%Y-%m-%d %H:%M:%S')) db.execute(sql, val) db.commit() db.close_db_connection()