def cmp_solve(env, headers, cmp_id): cookies = _get_cookies(env) user_id = auth.is_logined(cookies) if user_id is not None: sql_str = 'id={}' sql_str = sql_str.format(cmp_id) rows = db.select('CMPS', sql_str) if rows: post_data = _get_post_data(env) format_a = 'answer-{}' sql_str = 'cmp_id={}' sql_str = sql_str.format(cmp_id) answers = ( (r[0], common.escape(post_data[format_a.format(r[0])][0])) for r in db.select('QUESTIONS', sql_str)) username = db.username_by_id(user_id) solver.save_answers(username, answers, cmp_id) headers.append(('Location', '/quiz/{}/results'.format(cmp_id))) else: err_msg = "User ID {} tried to solve non-existing cmp {}" err_msg = err_msg.format(str(user_id), str(cmp_id)) common.dbg_log(err_msg) return (ui.error_page({'error_msg': '404: Competition not found'}), '404 Not Found') else: headers.append(('Location', '/')) return ''
def dashboard(env, headers): cookies = _get_cookies(env) user_id = auth.is_logined(cookies) if user_id is not None: headers.append(('Location', '/dashboard')) post_data = _get_post_data(env) search_list = None if 'search' in post_data: request = post_data['search'][0] search_list = ui.search_page({'results': common.search(request)}) cond_f = 'user_id={}' cond = cond_f.format(user_id) users_cmps = db.select('cmps', cond) user_answers = db.select('user_answers', cond) cond_f = 'id={}' solved_cmps = [] used_tasks = {None} for user_answer in user_answers: cond = cond_f.format(user_answer[2]) task_id = db.select('questions', cond)[0][2] if not (task_id in used_tasks): used_tasks.add(task_id) cond = cond_f.format(db.select('questions', cond)[0][2]) solved_cmps += db.select('cmps', cond) return ui.dashboard_page({ 'user_id': user_id, 'search_page': search_list, 'users_cmps': users_cmps, 'solved_cmps': solved_cmps, 'is_op': auth.is_op(user_id) }) else: headers.append(('Location', '/')) return ''
def getuser( loginorid, secret='' ): if ( type(loginorid) == type('') ): login = loginorid theUser = dataobjects.User( login, '' ) if ( theUser.secret() == secret ): qstring = "SELECT * FROM users WHERE login_name='%s'" % theUser.login item = db.select( qstring ) if ( item == None ): return None else: return dataobjects.User( item ) else: return None else: ident = loginorid qstring = "SELECT * FROM users WHERE id=%d" % ident item = db.select( qstring ) if ( item == None ): return None else: theUser = dataobjects.User( item ) if ( theUser.secret() == secret ): return theUser else: return None
def latest_temperature(): # Build a dictionary of data. data = {} # Select all temperature devices. devices = db.select("SELECT DeviceID, Label FROM Devices WHERE Type IN ('ds18b20', 'dht22', 'dht11', 'am2302')") # Iterate through the devices. for device in devices: # Get the latest temperature. args = (device[0],) rows = db.select( "SELECT Timestamp, " + settings["t_unit"] + " FROM Temperature WHERE DeviceID = ? ORDER BY Timestamp DESC LIMIT 1", args, ) # Fill in the data. for row in rows: timestamp = int(str(row[0]) + "000") label = device[1] temperature = row[1] data[label] = [timestamp, temperature] # Return as a string. return json.dumps(data)
def latest_temperature(): # Build a dictionary of data. data = {} # Select all temperature devices. devices = db.select("SELECT DeviceID, Label FROM Devices WHERE Type IN ('ds18b20', 'dht22', 'dht11', 'am2302')") # Iterate through the devices. for device in devices: # Get the latest temperature. args = (device[0],) rows = db.select("SELECT Timestamp, " + settings['t_unit'] + " FROM Temperature WHERE DeviceID = ? ORDER BY Timestamp DESC LIMIT 1", args) # Fill in the data. for row in rows: timestamp = int(str(row[0]) + '000') label = device[1] temperature = row[1] data[label] = [timestamp, temperature] # Return as a string. logger.info("/data/latest/temperature: {}".format(json.dumps(data))) # /data/latest/temperature: {"28-000003ea01f5": [1461046802000, 4.38]} return json.dumps(data)
def test_with(self): with db.transaction() as t: t.delete('users').condition('uid', 1).execute() res = db.select('users').condition('uid', 1).execute() self.assertEqual(len(res), 1) res = db.select('users').condition('uid', 1).execute() self.assertEqual(len(res), 0)
def cmp_page(env, headers, cmp_id): cookies = _get_cookies(env) user_id = auth.is_logined(cookies) if user_id is not None: sql_str = 'id={}' sql_str = sql_str.format(cmp_id) rows = db.select('CMPS', sql_str) if rows and len(rows): row = rows[0] if common.is_user_solve_cmp(cmp_id, user_id): headers.append(('Location', '/quiz/{}/results'.format(cmp_id))) else: username = db.username_by_id(user_id) sql_str = 'cmp_id={}' sql_str = sql_str.format(cmp_id) questions = [(r[0], r[1]) for r in db.select('QUESTIONS', sql_str)] return ui.solve_page({ 'cmp_id': row[0], 'title': row[1], 'description': row[2], 'user_name': username, 'questions': questions}) else: err_msg = "User ID {} tried to solve non-existing cmp {}" err_msg = err_msg.format(str(user_id), str(cmp_id)) common.dbg_log(err_msg) return (ui.error_page({'error_msg': '404: Competition not found'}), '404 Not Found') else: headers.append(('Location', '/')) return ''
def questions_overview(request): matches = db.select("""SELECT question_threads.id AS tid, question_threads.title AS ttitle, (SELECT COUNT(*) FROM question WHERE thread_id = question_threads.id) AS qcount, (SELECT COUNT(*) FROM answer WHERE thread_id = question_threads.id) AS acount FROM question_threads """) unanswered = db.select("""SELECT question.thread_id as tid, question.id as qid, question.title AS qtitle, COUNT(answer.question) AS checker FROM question LEFT JOIN answer ON answer.question = question.id GROUP BY question.id HAVING checker = 0""") return render_template('questions.xml', questions=matches, unanswered=unanswered, session=request.session)
def select(cls, where, *args): ''' Find by where clause and return list. ''' L = db.select('select * from %s %s' % (cls.__table__, where), *args) if where else \ db.select('select * from %s' % cls.__table__) return [cls(**d) for d in L]
def main(): now = time.localtime() now = datetime.datetime(now.tm_year, now.tm_mon, now.tm_mday) lunar_now = lunar.get_lunar_date(now) now = (now.year, now.month, now.day) log('now', now) log('lunar_now', lunar_now) personGroup = [] sql1 = 'select name, sex, birthday, calendar, mail from person where birthday="%04d-%02d-%02d" and calendar="阳历"' % (now[0], now[1], now[2]) sql2 = 'select name, sex, birthday, calendar, mail from person where birthday="%04d-%02d-%02d" and calendar="阴历"' % (lunar_now[0], lunar_now[1], lunar_now[2]) for sql in (sql1, sql2): db.select(sql, lambda item:personGroup.append({'name':item[0],'sex':item[1],'birthday':item[2],'calendar':item[3],'mail':item[4]})) init_template_pathname_group() for person in personGroup: templatePathname = random.choice(templatePathnameGroup) template = Template(open(templatePathname, 'rt').read()) text = template.render(person = create_person( person['name'], person['sex'], person['birthday'], person['calendar'], person['mail'], now, lunar_now ) ) send_mail('*****@*****.**', person['mail'], 'Happy Birthday To You!', text)
def user_cells(): searching = True page = 0 while searching: sql_query = select(['mcc','mnc','lac','cid','lat','lon','_id'],'gsmlocation',page=page) cells = local.action.datastore_search_sql(sql=sql_query)['records'] if len(cells) == 0: searching = False page += 1 for cell in cells: mcc,mnc,lac,cid = [ str(cell[key]) for key in ['mcc','mnc','lac','cid'] ] ref,lat,lon = [ cell[key] for key in ['_id','lat','lon'] ] user_search = True user_page = 0 print ','.join([mcc,mnc,lac,cid]) while user_search: sql_query = select(['COUNT(*)','uid'],'gsmcell',eq={'mcc':mcc,'mnc':mnc,'lac':lac,'cid':cid},group='uid',page=user_page) users = local.action.datastore_search_sql(sql=sql_query)['records'] if len(users) == 0: user_search = False user_page += 1 for uid in [ u['uid'] for u in users ]: print uid local.action.datastore_delete(resource_id = resources['userlocations'],filters={'uid':uid,'cid':ref}) local.action.datastore_upsert(resource_id = resources['userlocations'], records = [ {'uid':user['uid'], 'count':user['count'], 'cid':ref, 'lat':lat, 'lon':lon} for user in users ], method = 'insert')
def login(username, password, headers): """Log in user into his account(if it is his account) headers - dictionary return id or None """ username = common.escape(username) sql_str = "username='******'" sql_str = sql_str.format(username) user = db.select('USERS', sql_str) if user: user = user[0] pass_hash = _generate_hash(password, user[3]) if user[2] == pass_hash: session_id = None while (session_id is None) or db.select('SESSIONS', sql_str): session_id = _generate_string(256) sql_str = "session_id='{}'" sql_str = sql_str.format(session_id) td = datetime.timedelta(14) dt = datetime.datetime.today() dt = dt + td expires = common.utctime(dt) cookie = "session_id={}; expires={}; path=/" cookie = cookie.format(session_id, expires) headers.append(('Set-Cookie', cookie)) user_id = user[0] db.insert('SESSIONS', (session_id, user_id)) return user_id return None
def admin_view(request): if request.method == 'GET': users = db.select('SELECT * FROM person') schichten = {} for u in users: schichten[u['username']] = db.select('''SELECT ? AS userid, p.mobile AS mobile, p.email AS email, p.signed_hygiene as signed_hygiene, p.signed_fire as signed_fire, COUNT(ps.schicht_id) AS schicht_count, p.tshirt_size as tshirt, p.pullover_size as zipper FROM person AS p LEFT OUTER JOIN person_schicht AS ps ON p.id = ps.pers_id WHERE p.id = ?''', (u['id'],u['id']))[0] ## that query returns a one-element ## list for everything conflicts = db.select('''SELECT ps.pers_id as pid, group_concat(DISTINCT s.id) as sids FROM schicht as s JOIN person_schicht as ps ON s.id = ps.schicht_id JOIN person_schicht as ps2 ON ps.pers_id = ps2.pers_id JOIN schicht as s2 ON s2.id = ps2.schicht_id AND s.id != s2.id WHERE ((s.from_day * 24 + s.from_hour <= s2.from_day * 24 + s2.from_hour) AND (s.until_day * 24 + s.until_hour > s2.from_day * 24 + s2.from_hour)) OR ((s.from_day * 24 + s.from_hour < s2.until_day * 24 + s2.until_hour) AND (s.until_day * 24 + s.until_hour >= s2.until_day * 24 + s2.until_hour)) OR ((s.from_day * 24 + s.from_hour >= s2.from_day * 24 + s2.from_hour) AND (s.until_day * 24 + s.until_hour <= s2.until_day * 24 + s2.until_hour)) GROUP BY ps.pers_id;''') howmany = db.select('''SELECT sum(s.needed_persons) AS want, (SELECT count(ps.schicht_id) FROM person_schicht AS ps) AS have FROM schicht AS s;''') return render_template('admin.xml', session=request.session, users=users, schichten=schichten, conflicts=conflicts, howmany=howmany[0]) else: return render_template('error.xml', error_short='invalid request', error_long='you sent neither a POST nor a GET request. we don\'t understand anything else', session=request.session)
def update_book(self): # 用链接表中的项和书籍表比较,新增的加入 link_list = select(p.link for p in Link)[:] book_link_list = select(p.link for p in Book)[:] for link in link_list: if link not in book_link_list: crawler.get_book(link, 'div.btitle>h1') sleep(book_cd)
def build_user_apps(): for uid in get_users(): #print uid for table,field in [('socket','process'),('networktraffic','process'),('notification','package')]: ex_query = field+' NOT IN (' + select(['process'],'userapps',eq={'uid':uid}) + ')' sql_query = select([field],table,eq={'uid':uid},where=[ex_query]) #print sql_query records = [ {'uid':uid,'process':r[field]} for r in local.action.datastore_search_sql(sql=sql_query)['records'] ] local.action.datastore_upsert(resource_id=resources['userapps'],records=records,method='insert')
def test_begin_commit(self): t = db.transaction() t.begin() t.delete('users').condition('uid', 1).execute() res = db.select('users').condition('uid', 1).execute() self.assertEqual(len(res), 1) t.commit() res = db.select('users').condition('uid', 1).execute() self.assertEqual(len(res), 0)
def updateMap(self, worldSizeX, worldSizeY): #grab all the levels that belong to this cell c = db.levelData.columns statement = db.select([db.levelData], (c.cellIdX == self.cellIdX) & (c.cellIdY == self.cellIdY)) result = self.conn.execute(statement) rows = result.fetchall() result.close() highest = None highestImage = None levels = {} for row in rows: #create an instance of each and update it's image levels[row['levelIdZ']] = level.Update(data = row, conn = self.conn) levels[row['levelIdZ']].updateMap() #check if it's the highest and save the image name if it is if (highest == None or row['levelIdZ'] > highest) and row['air'] < 0.99: highest = row['levelIdZ'] highestImage = levels[row['levelIdZ']].data['baseImage'] if highest == None: highestImage = 'img-src/void.png' self.data['cellImage'] = highestImage c = db.cellData.columns up = db.cellData.update().where((c.cellIdX == self.cellIdX) & (c.cellIdY == self.cellIdY)).values(cellImage = self.data['cellImage']) self.conn.execute(up) #and update the zoom images x = (self.cellIdX / 2) * 2 y = (self.cellIdY / 2) * 2 c = db.cellData.columns statement = db.select([db.cellData],\ ((c.cellIdX == x) | (c.cellIdX == (x + 1))) & \ ((c.cellIdY == y) | (c.cellIdY == (y + 1)))) result = self.conn.execute(statement) rows = result.fetchall() result.close() images = {} for row in rows: images[(row['cellIdX'], row['cellIdY'])] = row['cellImage'] keepGoing = zoomMap(2, x, y, images[(x,y)], images[(x+1,y)], images[(x,y+1)], images[(x+1,y+1)]) zoom = 3 nCells = 2 ** (zoom -1) keepGoing = True while nCells < worldSizeX and nCells < worldSizeY and keepGoing: keepGoing = prepareZoomMap(zoom, x, y) zoom = zoom + 1 nCells = 2 ** (zoom -1)
def setData(self,sourceNum,startDate,forestNum): ''' 参数: sourceNum(int) : 历史日的天数 startDate(str) : 需要预测的日期(格式:'2007-4-5') forestNum(str) : 需要预测的天数,包含startDate向后数forestNum天 ''' d = datetime.strptime(startDate,"%Y-%m-%d") #计算历史日的第一天的日期 preDate = (d+timedelta(-sourceNum)).strftime("%Y-%m-%d") #计算待预测日的最后一天的日期 endDate = (d+timedelta(forestNum-1)).strftime("%Y-%m-%d") #从数据库获取历史日 元数据 sourceDates(list(dict)) sourceDates = db.select("select ah.*,sh.daytype,sh.detailNum,sh.powerConsume from atmosphere_history ah INNER JOIN sw_history sh on ah.date = sh.date where ah.date>=? and ah.date<? and sh.detailNum=96 ORDER BY ah.date",preDate,startDate) #从数据库获取待预测日 元数据 forestDates(list(dict)) forestDates = db.select("select ah.*,sh.daytype,sh.detailNum,sh.powerConsume from atmosphere_history ah INNER JOIN sw_history sh on ah.date = sh.date where ah.date>=? and ah.date<=? and sh.detailNum=96 ORDER BY ah.date",startDate,endDate) utiles.clearList(self.source) utiles.clearList(self.expect) utiles.clearList(self.forest) #1 将历史日元数据转化为 AP 对象 #2 赋值到source for one in sourceDates: for key in one.iterkeys(): if key=='powerConsume': one[key] = float('%5.6f'%one[key]) continue elif (key=='date' or key=='cityName'): continue one[key] = int(one[key]) ap = entity.AP(**one) self.source.append(ap) #1 将待预测日元数据转化为 AP 对象 #2 赋值到forest for one in forestDates: for key in one.iterkeys(): if key=='powerConsume': one[key] = float('%5.6f'%one[key]) self.expect.append(one[key]) continue elif (key=='date' or key=='cityName'): continue one[key] = int(one[key]) ap = entity.AP(**one) self.forest.append(ap) self.stage = Stage.SETDATA
def truck_list(status=None): result = { 'data': {}, } if status: items = select(i for i in Truck if i.status == status) else: items = select(i for i in Truck) for item in items: result['data'][item.id] = item.to_dict(only=['id', 'number', 'model', 'status']) return result
def driver_list(status=None): result = { 'data': {}, } if status: items = select(i for i in Driver if i.status == status) else: items = select(i for i in Driver) for item in items: result['data'][item.id] = item.to_dict() result['data'][item.id]['full_name'] = item.name + ' ' + item.patronymic + ' ' + item.surname return result
def __init__(self): self._res = db.select("SELECT " + "MAX(ah.airPressureMax) as airPressureMax, " + "MIN(ah.airPressureMin) as airPressureMin, " + "MAX(ah.temperatureMax) as temperatureMax, " + "MIN(ah.temperatureMin) as temperatureMin, " + "MAX(ah.waterPressureAve) as waterPressureMax, " + "MIN(ah.waterPressureAve) as waterPressureMin, " + "MAX(ah.relativeHumidityAve) as relativeHumidityMax, " + "MIN(ah.relativeHumidityMin) as relativeHumidityMin, " + "MAX(ah.precipitation) as precipitationMax, " + "MIN(ah.precipitation) as precipitationMin, " + "MAX(ah.smallEvaporation) as smallEvaporationMax, " + "MIN(ah.smallEvaporation) as smallEvaporationMin, " + "MAX(ah.largeEvaporation) as largeEvaporationMax, " + "MIN(ah.largeEvaporation) as largeEvaporationMin, " + "(CASE WHEN MAX(ah.windVelocityMax)>MAX(ah.extremeWindVelocity) THEN MAX(ah.windVelocityMax) ELSE MAX(ah.extremeWindVelocity) END) as windVelocityMax, " + "(CASE WHEN MIN(ah.windVelocityAve)<MIN(ah.extremeWindVelocity) THEN MIN(ah.windVelocityAve) ELSE MIN(ah.extremeWindVelocity) END) as windVelocityMin, " + "MAX(ah.hoursOfSunshine) as hoursOfSunshineMax, " + "MIN(ah.hoursOfSunshine) as hoursOfSunshineMin " + "FROM " + "atmosphere_history ah " + "where " + "ah.cityName = 'shanwei';") self._res = self._res[0] pc = db.select("SELECT " + "MAX(powerConsume) as powerConsumeMax, " + "MIN(powerConsume) as powerConsumeMin " + "FROM " + "powerconsume " + "where " + "cityName = 'shanwei';") self._res = dict(self._res, **pc[0]) # self._res.extend(pc[0]) for key in self._res.iterkeys(): if key == 'powerConsumeMax' or key == 'powerConsumeMin': self._res[key] = float('%5.6f' % self._res[key]) else: self._res[key] = int(self._res[key]) self._normalize = { 'airPressure': lambda airPressure: float((airPressure - self._res['airPressureMin'])) / (self._res['airPressureMax'] - self._res['airPressureMin']), 'temperature': lambda temperature: float((temperature - self._res['temperatureMin'])) / (self._res['temperatureMax'] - self._res['temperatureMin']), 'waterPressure': lambda waterPressure: float((waterPressure - self._res['waterPressureMin'])) / (self._res['waterPressureMax'] - self._res['waterPressureMin']), 'relativeHumidity': lambda relativeHumidity: float((relativeHumidity - self._res['relativeHumidityMin'])) / (self._res['relativeHumidityMax'] - self._res['relativeHumidityMin']), 'precipitation': lambda precipitation: float((precipitation - self._res['precipitationMin'])) / (self._res['precipitationMax'] - self._res['precipitationMin']), 'smallEvaporation': lambda smallEvaporation: float((smallEvaporation - self._res['smallEvaporationMin'])) / (self._res['smallEvaporationMax'] - self._res['smallEvaporationMin']), 'largeEvaporationMax': lambda largeEvaporationMax: float((largeEvaporationMax - self._res['largeEvaporationMin'])) / (self._res['largeEvaporationMax'] - self._res['largeEvaporationMin']), 'windVelocity': lambda windVelocity: float((windVelocity - self._res['windVelocityMin'])) / (self._res['windVelocityMax'] - self._res['windVelocityMin']), 'hoursOfSunshine': lambda hoursOfSunshine: float((hoursOfSunshine - self._res['hoursOfSunshineMin'])) / (self._res['hoursOfSunshineMax'] - self._res['hoursOfSunshineMin']), 'windVelocityDir': lambda windVelocityDir: float((windVelocityDir - 1)) / (16 - 1), 'powerConsume': lambda powerConsume: float((powerConsume - self._res['powerConsumeMin'])) / (self._res['powerConsumeMax'] - self._res['powerConsumeMin']) }
def user_answers_for_cmp(cmp_id, user_id): """get users answers for competition return list of rows """ user_answers = [] cond_f = 'cmp_id={}' cond = cond_f.format(cmp_id) questions = db.select('questions', cond) cond_f = 'user_id={} AND task_id={}'.format(user_id, '{}') for task in questions: cond = cond_f.format(task[0]) user_answers += db.select('user_answers', cond) return user_answers
def get (data=None): """Prints a person's introduction""" if data == None: name = "Rob0tnik" else: name = data.strip() if db.select ("bot_people", "`name` = \"%s\""%name): details = db.select ("bot_people", "`name` = \"%s\""%name)[0][2] else: return "Person does not exist in database." return "%s: %s"%(name, details)
def is_user_solve_cmp(cmp_id, user_id): """ return True or False """ cond_f = 'cmp_id={}' cond = cond_f.format(cmp_id) questions = db.select('questions', cond) cond_f = 'user_id={} AND task_id={}' if questions and len(questions) and len(questions[0]): cond = cond_f.format(user_id, questions[0][0]) return bool(db.select('user_answers', cond)) else: return False
def setData(self, startDate, forestNum): self.forestNum = forestNum ''' 参数: startDate(str) : 需要预测的日期(格式:'2007-4-5') forestNum(str) : 需要预测的天数,包含startDate向后数forestNum天 ''' d = datetime.strptime(startDate, "%Y-%m-%d") sevenday = datetime(2007, 1, 1) # 计算历史日的第一天的日期 preDate = datetime(2007, 1, 8) # 计算待预测日的最后一天的日期 endDate = (d + timedelta(forestNum - 1)).strftime("%Y-%m-%d") # 从数据库获取历史日 元数据 sourceDates(list(dict)) sevenDates = db.select("select powerConsume from powerConsume sh where date>=? and date<? ORDER BY date", sevenday, preDate) # 从数据库获取历史日 元数据 sourceDates(list(dict)) sourceDates = db.select("select ah.*, sh.daytype, sh.powerConsume, sh.pc_id from atmosphere_history ah INNER JOIN powerConsume sh on ah.date = sh.date where ah.date>=? and ah.date<? ORDER BY ah.date", preDate, startDate) # 从数据库获取待预测日 元数据 forestDates(list(dict)) forestDates = db.select("select ah.*, sh.daytype, sh.powerConsume, sh.pc_id from atmosphere_history ah INNER JOIN powerConsume sh on ah.date = sh.date where ah.date>=? and ah.date<=? ORDER BY ah.date", startDate, endDate) # print forestDates[0] utiles.clearList(self._powerConsumes) utiles.clearList(self._expect) utiles.clearList(self._expect) utiles.clearList(self._forest) sevenday_index = 0 for row in sevenDates: self._powerConsumes.append(float('%5.6f' % row['powerConsume'])) # 1 将历史日元数据转化为 AP 对象 # 2 赋值到source for row in sourceDates: ap = entity.AP(**row) ap.sevendayPowerConsume = self._powerConsumes[sevenday_index:sevenday_index + 7] sevenday_index += 1 self._powerConsumes.append(ap.powerConsume['real']) self._expect.append(ap.powerConsume['real']) self._source.append(ap) # 1 将待预测日元数据转化为 AP 对象 # 2 赋值到forest for row in forestDates: ap = entity.AP(**row) ap.sevendayPowerConsume = self._powerConsumes[sevenday_index:sevenday_index + 7] sevenday_index += 1 self._powerConsumes.append(ap.powerConsume['real']) self._expect.append(ap.powerConsume['real']) self._forest.append(ap)
def helfer_overview(request): helfer = db.select('SELECT id, username FROM person') schichten = {} for h in helfer: schichten[h[1]] = db.select('''SELECT schicht.name AS schichtname FROM person_schicht JOIN schicht ON person_schicht.schicht_id = schicht.id WHERE person_schicht.pers_id=?''', (h[0],)) return render_template('helferuebersicht.xml', schichten=schichten, session=request.session)
def order_list(status=None): result = { 'data': {}, } if status: items = select(i for i in Order if i.status == status) else: items = select(i for i in Order) for item in items: result['data'][item.id] = item.to_dict() result['data'][item.id]['processed_value'] = 0 for t in item.transportations: result['data'][item.id]['processed_value'] += float(t.value) return result
def pollImageUpdates(self): trans = self.conn.begin() try: c = db.levelData.columns statement = db.select([db.levelData], c.dirtyImage == True).limit(100) result = self.conn.execute(statement) rows = result.fetchall() result.close() if len(rows) == 0: trans.rollback() return False for row in rows: #update the entire column at once r = dict(row) c = db.levelData.columns up = db.levelData.update().where((c.cellIdX == r['cellIdX']) & (c.cellIdY == r['cellIdY'])).\ values(dirtyImage = False) self.conn.execute(up) trans.commit() for row in rows: r = dict(row) cell = khopeshpy.cell.Update(position = (row['cellIdX'], row['cellIdY']), conn = self.conn) cell.updateMap(self.world.data['worldSizeX'], self.world.data['worldSizeY']) return True except: trans.rollback() raise
def pollCellIdle(self): trans = self.conn.begin() try: c = db.cellData.columns statement = db.select([db.cellData],\ (c.lastUpdated == None) & (c.cellGroundlevel > khopeshpy.config.worldSeaLevel))\ .limit(1) result = self.conn.execute(statement) row = result.fetchone() result.close() if row == None: trans.rollback() return False else: row = dict(row) c = db.cellData.columns up = db.cellData.update().where(c.cellId == row['cellId']).values(lastUpdated = time.time()) self.conn.execute(up) trans.commit() cell = khopeshpy.cell.Update(data = row, conn = self.conn) cell.addIdleCell() return True except: trans.rollback() raise
def pollCellUpdates(self): trans = self.conn.begin() try: c = db.cellData.columns statement = db.select([db.cellData],\ (c.lastUpdated < int(time.time() - khopeshpy.config.worldTick))).limit(100) result = self.conn.execute(statement) rows = result.fetchall() result.close() if len(rows) == 0: trans.rollback() return False ids = [] for row in rows: ids.append(row['cellId']) c = db.cellData.columns up = db.cellData.update().where(c.cellId.in_(ids)).values(lastUpdated = time.time()) self.conn.execute(up) trans.commit() for row in rows: r = dict(row) cell = khopeshpy.cell.Update(data = row, conn = self.conn) cell.updateLevels() return True except: trans.rollback() raise
def test_should_call_cursor_and_methods(self): connection = MockConector(test_env) select(connection, ['test_selector_1'], 'test_table') connection.cursor.assert_called_once()
def recv(code, *args): """ check if this track is done No result in database would return none, so do a query and insert :param code: express id :param args: usually Telegram message_id and chat_id(user_id) :return: message to be sent to the client """ # check the undone job length and send warning if necessary. sql_cmd = 'SELECT track_id,message_id,chat_id,content FROM job WHERE done=?' # recommend 4-6 hours on cron message = '' if len(db.select(sql_cmd, (0, ))) > 300: message += u'由于快递100的免费版接口存在每IP每日最高2000查询的限制,查询次数即将接近上限。*您的查询可能会失败*' try: db_res = db.select("SELECT * FROM job WHERE track_id=?", (code, ))[0] except IndexError: db_res = db.select("SELECT * FROM job WHERE track_id=?", (code, )) if len(db_res) == 0: com_code, real_com_name = auto_detect(code) if com_code == u'shunfeng': return u'不好意思,快递100说顺丰的接口有一点点小问题。俺会尽快调整API的。' if not com_code: # TODO: Is it the pythonic way? return utils.reply_not_found() res = query_express_status(com_code, code) done = 1 if (res.get('state') == '3' or res.get('state') == '4') else 0 try: sql_cmd = "INSERT INTO job VALUES (NULL ,?,?,?,?,?,?,?,?)" db.upsert( sql_cmd, (args[0], args[1], com_code, code, res.get('data')[0].get('context'), STATE.get( res.get('state')), res.get('data')[0].get('time'), done)) message += code + ' ' + real_com_name + '\n' + res.get('data')[ 0].get('time') + ' ' + res.get('data')[0].get('context') except IndexError: message += res.get('message') elif db_res[8] == 0: com_code, real_com_name = auto_detect(code) if com_code == u'shunfeng': return u'不好意思,快递100说顺丰的接口有一点点小问题。俺会尽快调整API的。' if not com_code: return utils.reply_not_found() res = query_express_status(com_code, code) done = 1 if (res.get('state') == '3' or res.get('state') == '4') else 0 try: sql_cmd = "UPDATE job SET content=?,status=?,date=?,done=? WHERE track_id=?" db.upsert(sql_cmd, (res.get('data')[0].get('context'), STATE.get(res.get('state')), res.get('data')[0].get('time'), done, code)) message += code + ' ' + real_com_name + '\n' + res.get('data')[ 0].get('time') + ' ' + res.get('data')[0].get('context') except IndexError: message += res.get('message') else: message += db_res[4] + ' ' + PROVIDER.get( db_res[3], 'Default') + '\n' + db_res[7] + ' ' + db_res[5] # TODO: 快递100的顺丰接口被废了,使用移动版 if message == u'非法访问:IP禁止访问': message = '''由于快递100的免费版接口存在每IP每日最高2000查询的限制,目前已经超过此限制。 因此您此次的查询被取消。\n 建议稍后尝试,或者按照 https://github.com/BennyThink/ExpressBot 部署自己的机器人''' return message
def find(self, key): data = db.select(self.table).condition('key', key).execute() if data: return self.load(data[0], self.model)
def find_all(cls, *args): L = db.select('select * from `%s`' % cls.__table__) return [cls(**d) for d in L]
def find_all(cls, *args): """ 查询所有字段, 将结果以一个列表返回 """ L = db.select('select * from `%s`' % cls.__table__) return [cls(**d) for d in L]
db_conf = params['db'] connection_string = f'dbname={db_conf["dbname"]} user={db_conf["user"]}' with psycopg2.connect(connection_string) as conn: ======= dbp = params['db'] with psycopg2.connect(dbp['connection_string']) as conn: >>>>>>> 7ccf25c06a5f51c36df8b79b70c79a45014bb9ed conn.autocommit = True with conn.cursor() as cur: l = tables.LearnerData('ld') a = tables.ActorData('ad') <<<<<<< HEAD s = db.select([l.train_num, l.loss, a.ep_reward]).\ frm(l).\ join(a).on(f'{l.train_num}={a.train_num}').\ where(f'{l.param_set_id}=0 {a.actor_id}=0').\ order_by([l.train_num, l.timestamp, a.timestamp]) df = pd.read_sql(s.sql(), conn) plt.figure() df.plot(x='train_num') ======= s = db.select([l.train_num, a.ep_reward]).\ frm(l).\ join(a).on(f'{l.train_num}={a.train_num}').\ where(f'{l.param_set_id}=2 AND {a.actor_id}=2').\ order_by([l.train_num, l.timestamp, a.timestamp]) df1 = pd.read_sql(s.sql(), conn) s = db.select([l.train_num, a.ep_reward]).\
def get_notes(): query = db.select("*", 'note', 'deleted = 0') result = db.run_query(query, default_params, select=True) return {'data': result}
def get_categories(): query = db.select("*", 'category') result = db.run_query(query, default_params, select=True) return {'data': result}
def count_all(cls): """ 执行select count(pk) from table,返回一个数值 """ return db.select('select count(`%s`) from `%s`' % (cls.__primary_key__.name, cls.__table__))
def find_by(cls, where, *args): """ 通过where语法进行查询 结果以列表形式返回 """ L = db.select('select *from `%s` %s' % (cls.__table__, where), *args) return [cls(**d) for d in L]
def get_kpk_history(database, kpk): """Όλες οι τιμές ιστορικά του kpk""" sql = f"SELECT * FROM kpk WHERE kpk='{kpk}' ORDER by period DESC" return select(database, sql, howmany='many', grup_on=False)
def get_eid_by_name(database, name): """Αναζήτηση eid με την ονομασία""" upname = grup(name) sql = f"SELECT * from eid WHERE grup(eidp) like '%{upname}%'" return select(database, sql, howmany='many', grup_on=True)
def ahp_score(**kwargs): data = database.select("result", **kwargs) # if data: # database.close() return data
def find_all(cls, *args): """ 用法: Class.find_all() """ L = db.select('select * from %s' % cls.__table__) return [cls(**d) for d in L]
def find_by(cls, where='', *args): """ 用法: Class.find_by(where),返回全部对象组成List """ L = db.select('select * from %s %s' % (cls.__table__, where), *args) return [cls(**d) for d in L]
def users(): return render_template('users.html',rows=db.select('users',order=('id',)), columns=('id','name','active','admin','properties'))
def get_note(note_id): query = db.select("*", 'note', 'deleted = 0 and id = ' + note_id) result = db.run_query(query, default_params, select=True) return {'data': result}
def find_all(cls, *args): ''' Find all and return list. ''' L = db.select('select * from `%s`' % cls.__table__) return [cls(**d) for d in L]
def server(): if not session: return redirect('/login/') res = select('server', filed3) return render_template('server.html', users=session, result=res['msg'])
def idc(): if not session: return redirect('/login/') info = {k: v[0] for k, v in dict(request.form).items()} res = select('idc', filed1) return render_template('idc.html', users=session, result=res['msg'])
def cabinet(): if not session: return redirect('/login/') res = select('cabinet', filed2) return render_template('cabinet.html', users=session, result=res['msg'])
def find_all(cls): # return db.select('select * from `%s`' % cls.__table__) #这样写返回的不是User或者具体的表对象,而是由dict组成的list l = db.select('select * from `%s`' % cls.__table__) return [cls(**d) for d in l] # 这样写,返回的就是由对象组成的list
def find_all(cls, *args): """ Find all and return list. """ L = db.select('select * from %s' % cls.__table__) return [cls(**d) for d in L]
import db from processing import TermClassifier neighbors = [1, 2, 4, 5, 10] rank = 300 db.connect() tweets = db.select( "SELECT clean_text FROM Tweets WHERE date BETWEEN '2016-12-18' AND '2017-01-07'" ) tweets_ = [] for t in tweets: tweets_.append(t[0]) tc = TermClassifier(tweets_, rank) for n in neighbors: print('\n\nNeighbors: ', n) print('Waiting... :/ Be patient ;)') tc.classify_terms(n)
def find_by(cls, where, *args): L = db.select('select * from `%s` %s' % (cls.__table__, where), *args) return [cls(**d) for d in L]
import matplotlib.pyplot as plt from datetime import datetime, timedelta from random import randint from db import select, select old sql = ''' select id from bulletins ''' result_sql = select_old(sql)['id'] sql = f''' select sum* from bulletins where id not in [{result_sql}] ''' result_sql = select(sql)['id'] graph = { } delta = 0 for row in result_sql: qty = graph.get(row['date_time']) or 0 qty += 1 graph[row['date_time']] = qty fig, ax = plt.subplots() ax.plot(graph.keys(), graph.values(), 'o-b') ax.set_xlabel('Время', fontsize=15
def find_by(cls, where, *args): ''' Find by where clause and return list. ''' L = db.select('select * from `%s` %s' % (cls.__table__, where), *args) return [cls(**d) for d in L]
def category_count(self, category_id): return db.select(self.table).fields(db.expr( 'count(*)', 'total')).condition('category', category_id).condition( 'status', 'published').execute()[0][0]
def find_by(cls, where, *args): """ 通过where语句进行条件查询,将结果以一个列表返回 """ L = db.select('select * from `%s` %s' % (cls.__table__, where), *args) return [cls(**d) for d in L]
def count(self, status=None): q = db.select(self.table).fields(db.expr('COUNT(*)')) if status: q.condition('status', status) return q.execute()[0][0]