def admin(): user = session['user'] if user['id'] not in config['admins']: return render_template('gitout.html') sort = request.args.get('sort', 'age') if sort == 'age_asc': keys = r.table('keys').order_by(r.asc('creation_time')).run(get_db()) elif sort == 'age_desc': keys = r.table('keys').order_by(r.desc('creation_time')).run(get_db()) elif sort == 'usage_asc': keys = r.table('keys').order_by(r.asc('total_usage')).run(get_db()) elif sort == 'usage_desc': keys = r.table('keys').order_by(r.desc('total_usage')).run(get_db()) elif sort == 'accept_asc': keys = r.table('keys').order_by(r.asc('acceptance_time')).run(get_db()) elif sort == 'accept_desc': keys = r.table('keys').order_by(r.desc('acceptance_time')).run( get_db()) else: keys = r.table('keys').order_by(r.asc('creation_time')).run(get_db()) apps = r.table('applications').order_by('time').run(get_db()) return render_template('admin.html', name=user['username'], apps=apps, keys=keys, sort=sort)
def get_key_frames(self, weighted_nodes, page, include_total_pages=False): """ Holt zu den übergebenen Konzepten die Keyframe-Metadaten aus der Datenbank. :param weighted_nodes: übergebene Konzepte mit Gewichtung :param page: Seite die abgefragt werden soll :return: Keyframe-Metadaten """ total_pages = None nodes = weighted_nodes.map(lambda item: item[0]) weighted_nodes_dict = weighted_nodes.to_dict() if include_total_pages: total_count = ( r.table('key_frame_predictions') .get_all(r.args(nodes), index='node') .group('key_frame_id') .ungroup() .count() .run(r.connect(self.db_host, self.db_port, 'vor')) ) total_pages = math.ceil(float(total_count)/PAGE_LENGTH) key_frames = ( r.table('key_frame_predictions') .get_all(r.args(nodes), index='node') # alle Keyframe-Predictions holen, die einem übergebenen Konzept zugeordnet sind .map(lambda row: { 'key_frame_id': row['key_frame_id'], 'weighted_score': r.expr(weighted_nodes_dict)[row['node']].mul(row['score'].coerce_to('number')) # die übergebenen Gewicht und Scores der Keyframes multiplizieren }) .group('key_frame_id').reduce(lambda left, right: { # nach Keyframe gruppieren 'weighted_score': left['weighted_score'].add(right['weighted_score']) # den Score für mehrere Konzepte addieren }) .ungroup() .map(lambda row: { 'key_frame_id': row['group'], 'weighted_score_sum': row['reduction']['weighted_score'] }) .order_by(r.desc('weighted_score_sum')) # absteigend sortieren .slice(*KeyFrameRepository.__pagination(page)) # zur entsprechenden Page skippen .eq_join('key_frame_id', r.table('key_frames')) # die Metadaten aus der Keyframe-Tabelle holen .without({'right': 'id'}) .zip() .order_by(r.desc('weighted_score_sum')) # erneut sortieren, da join die Reihenfolge verändert .run(r.connect(self.db_host, self.db_port, 'vor')) ) return key_frames, total_pages
def home(page): connection = r.connect('localhost', 28015) name = '' logout = '' c = r.db('hackjobs').table('post').count().run(connection) skip_no = PAGE_LIMIT * page result = list( r.db('hackjobs').table('post').order_by( index=r.desc('time')).run(connection)) if skip_no + 30 >= c: page = None if session.get('id', None): count = r.db('hackjobs').table('user').filter( r.row['id'] == session['id']).count().run(connection) if count > 1: user = list( r.db('hackjobs').table('user').filter( r.row['id'] == session['id']).run(connection)) name = user[0].get('name', '') logout = '(logout)' connection.close() return render_template('index.html', results=result, count=page, name=name, logout=logout)
def history( self, method=None, hid=None, time=None, start=None, limit=None, rdb=None): ''' This will pull a monitors history from rethinkDB ''' retdata = False if method == "mon-history": retdata = [] monitors = r.table('history').filter( (r.row['cid'] == self.cid) & (r.row['starttime'] >= time) & (r.row['type'] == "monitor")).order_by( r.desc('starttime')).pluck('starttime', 'id', 'cid', 'zone', 'status', 'failcount', 'method', 'name').skip(start).limit(limit).run(rdb) for mon in monitors: mon['starttime'] = datetime.datetime.fromtimestamp( mon['starttime']).strftime('%Y-%m-%d %H:%M:%S') retdata.append(mon) elif method == "detail-history": retdata = [] mon = r.table('history').get(hid).pluck( 'starttime', 'cid', 'zone', 'status', 'failcount', 'method', 'name').run(rdb) mon['reactions'] = [] reactions = r.table('history').filter( (r.row['cid'] == self.cid) & (r.row['starttime'] == mon['starttime']) & (r.row['zone'] == mon['zone']) & (r.row['type'] == "reaction")).pluck('name', 'rstatus', 'time', 'starttime').run(rdb) for react in reactions: react['starttime'] = datetime.datetime.fromtimestamp( react['starttime']).strftime('%Y-%m-%d %H:%M:%S') react['time'] = datetime.datetime.fromtimestamp( react['time']).strftime('%Y-%m-%d %H:%M:%S') mon['reactions'].append(react) mon['starttime'] = datetime.datetime.fromtimestamp( mon['starttime']).strftime('%Y-%m-%d %H:%M:%S') retdata.append(mon) elif method == "count": retdata = r.table('history').filter( (r.row['cid'] == self.cid) & (r.row['starttime'] >= time) & (r.row['type'] == "monitor")).count().run(rdb) return retdata
def get_latest_with_keyword(cls, search_keyword): # Looks like we can't chain a get_all with an order_by, so we can't use # the search_keyword index. query = (r.table(cls._TABLE_NAME) .order_by(index=r.desc('pushed_at')) .filter({'search_keyword': search_keyword})) return db.util.get_first(query)
def list_public(player): return r.db("chess").table("games").filter( (r.row["public"] == True) & ((r.row["white_md5uuid"] == "_") | (r.row["black_md5uuid"] == "_")) & (r.row['white_md5uuid'] != hex_digest(player)) & (r.row['black_md5uuid'] != hex_digest(player))).order_by( r.desc("uts")).run(conn)
def get_latest_with_keyword(cls, search_keyword): # Looks like we can't chain a get_all with an order_by, so we can't use # the search_keyword index. query = (r.table( cls._TABLE_NAME).order_by(index=r.desc('pushed_at')).filter( {'search_keyword': search_keyword})) return db.util.get_first(query)
def list_notes(hostname): """Retrieve a list of notes associated with a host. Or given {'user': '******', 'note': 'some message'} post a note.""" if request.method == 'GET': try: #someday i should probably add offset support here and in the statelog limit = request.args.get('limit', 50, type=int) except ValueError: abort(400) notes = list(r.table("notes").filter({"hostname": hostname}).order_by(r.desc("ts")).limit(limit).run(rdb.conn)) if notes: return jsonify({'notes': sorted(notes, key=lambda k: k['ts'])}) else: abort(404) elif request.method == 'POST': if not request.json: abort(400) if not request.json.get("user") or not request.json.get("note"): abort(400) if not r.table("hosts").get_all(hostname, index="hostname").run(rdb.conn): abort(404) alerting = [x["check"] for x in r.table("checks").filter({"h stname": hostname, "status": False}).run(rdb.conn)] q = r.table("notes").insert({'hostname': hostname, 'user': request.json.get("user"), 'note': request.json.get("note"), 'ts': time(), 'alerting': alerting}).run(rdb.conn) if q["inserted"] == 1: return jsonify({'success': True}) else: logger.error(q) abort(500) else: abort(400)
def get(self): conn = yield self.rethinkdb_conn posts = yield r.table("posts")\ .order_by(index=r.desc("created_time"))\ .pluck( {"images":{"low_resolution":{"url":True}}}, {"user":{"username":True}}, "created_time", "link", {"caption":{"text":True}})\ .limit(9)\ .run(conn) output_posts = [] while(yield posts.fetch_next()): if len(output_posts) >= 9: break; p = yield posts.next() output_posts.append(p) home_template = template_env.get_template("home.html") subscriptions_raw = insta_api.list_subscriptions() subscriptions = [x['object_id'] for x in subscriptions_raw['data']] self.write(home_template.render( auth_url = insta_api.get_authorize_url(), posts = output_posts, subscriptions = subscriptions ))
def getEvents(self, rdb): ''' Returns a list of events from the events table for this user ''' # Get Events results = r.table('events').filter({ 'uid': self.uid }).order_by(r.desc('time')).limit(50).run(rdb) return results
def renderScores(nameOfTable, GameName): r.connect().repl() try: r.db("test").table_create(nameOfTable).run() except r.ReqlOpFailedError: pass bestScorePerPlayer = r.table(nameOfTable).group("username").max("score").run() topscoresPerPlayer = r.expr(bestScorePerPlayer.values()).order_by(r.desc("score")).limit(10).run() topscoresofalltime = r.table(nameOfTable).order_by(r.desc("score")).limit(10).run() tabifyResult(topscoresPerPlayer) tabifyResult(topscoresofalltime) return render_template('bshighScores.html',gameName=GameName, topscoresByPlayerTable=topscoresPerPlayer,topscoresalltimeTable=topscoresofalltime)
def get_all(self): selection = list(r.db(rdb['chatdb']).table('chats').order_by(r.desc(lambda date: date['meta']['updated_at'])).run(g.rdb_conn)) if selection is not None: print("Chat.get_all: Retrieved Chat messages from DB: {}".format(selection)) return render_template('chat/chatlist.html', results=selection) else: return "Not Found", 404
def get(self): # list teams a user is (or was) a part of if not self.user_data: return self.redirect_login() query = self.db.query("code_jam_teams").filter(lambda team: team[ "members"].contains(self.user_data["user_id"])).merge( lambda team: { "members": self.db.query("users").filter(lambda user: team[ "members"].contains(user["user_id"])).merge( lambda user: { "gitlab_username": self.db.query("code_jam_participants").filter({ "id": user["user_id"] }).coerce_to("array")[0]["gitlab_username"] }).coerce_to("array"), "jam": self.db.query("code_jams").get(team["jam"]) }).order_by(rethinkdb.desc("jam.number")) teams = self.db.run(query) return self.render("main/jams/team_list.html", user_teams=True, teams=teams)
def get_peak_records(self, account_year_pairs): return self.uow.run_list(self.table.get_all(*account_year_pairs, index='peak_report') .map(lambda record: {'account_id': record['account_id'], 'readingdateutc': record['readingdateutc'], 'demand': record['energy']['demand'], 'weather': record['weather']}) .order_by(r.desc('demand')).limit(50))
def get_filter(db_name, table_name, query, limit=None, order_by=None, sort_order='desc'): items = [] if limit: if order_by: if 'asc' in sort_order: query = r.db(db_name).table(table_name).filter(query).order_by( r.asc(order_by)).limit(limit).run(conn) else: query = r.db(db_name).table(table_name).filter(query).order_by( r.desc(order_by)).limit(limit).run(conn) else: query = r.db(db_name).table(table_name).filter(query).limit( limit).run(conn) else: query = r.db(db_name).table(table_name).filter(query).run(conn) for x in query: items.append(x) return items
def get(self, table): # pylint: disable=arguments-differ try: conn = yield r.connect(options.rethinkdb_host, 28015, options.rethinkdb_db) orderList = [r.desc('mp3_author'), '_item'] alldocs = yield r.table(table).filter( ~r.row.has_fields('_deleted')).order_by( *orderList).limit(10000).run(conn) # alldocs[:] = [x for x in alldocs if not ValidBook(x)] result = { 'suscess': True, 'rows': alldocs, 'totalcount': len(alldocs), 'rowcount': len(alldocs) } replyWithJsonP(self, result) except: self.clear() self.set_status(404) raise finally: yield conn.close()
def admin(self): selection = list(r.db(RDB['ticketsdb']).table('tickets').order_by(r.desc(lambda date: date['meta']['updated_at'])).run(g.rdb_conn)) if selection is not None: print(selection) return render_template('tickets/ticketslist.html', results=selection) else: return "Not Found", 404
def load_ebook(self, ebook_id): # query returns dict with ebook->versions->formats nested document # versions are ordered by popularity try: ebook = ( r.table("ebooks") .get(ebook_id) .merge( lambda ebook: { "versions": r.table("versions") .get_all(ebook["ebook_id"], index="ebook_id") .order_by(r.desc("ranking")) .coerce_to("array") .merge( lambda version: { "formats": r.table("formats") .get_all(version["version_id"], index="version_id") .coerce_to("array") } ) } ) .run() ) except RqlRuntimeError as e: if "Cannot perform merge on a non-object non-sequence `null`" in str(e): return None else: raise e return ebook
def get_messages(): conn = r.connect(host='localhost', port=28015, db='chatrethink') messages = r.table('messages').order_by(r.desc('added')).limit(25).run(conn) messages = [{'text': message['text'], 'username': message['by']} for message in reversed(list(messages))] return jsonify({'messages': messages})
def claim_sites(self, n=1): result = ( self.rr.table('sites').get_all( r.args( r.db(self.rr.dbname).table( 'sites', read_mode='majority').between( ['ACTIVE', r.minval], ['ACTIVE', r.maxval], index='sites_last_disclaimed').order_by( r.desc('claimed'), 'last_disclaimed'). fold({}, lambda acc, site: acc.merge( r.branch( site.has_fields('job_id'), r.object( site['job_id'].coerce_to('string'), acc[ site['job_id'].coerce_to('string')]. default(0).add(1)), {})), emit=lambda acc, site, new_acc: r.branch( r.and_( r.or_( site['claimed'].not_(), site[ 'last_claimed'].lt(r.now().sub(60 * 60 ))), r.or_( site.has_fields('max_claimed_sites').not_( ), new_acc[site['job_id'].coerce_to( 'string')].le(site['max_claimed_sites' ]))), [site['id']], [])).limit(n))). update( # try to avoid a race condition resulting in multiple # brozzler-workers claiming the same site # see https://github.com/rethinkdb/rethinkdb/issues/3235#issuecomment-60283038 r.branch( r.or_(r.row['claimed'].not_(), r.row['last_claimed'].lt(r.now().sub(60 * 60))), { 'claimed': True, 'last_claimed': r.now() }, {}), return_changes=True)).run() self._vet_result(result, replaced=list(range(n + 1)), unchanged=list(range(n + 1))) sites = [] for i in range(result["replaced"]): if result["changes"][i]["old_val"]["claimed"]: self.logger.warn( "re-claimed site that was still marked 'claimed' " "because it was last claimed a long time ago " "at %s, and presumably some error stopped it from " "being disclaimed", result["changes"][i]["old_val"]["last_claimed"]) site = brozzler.Site(self.rr, result["changes"][i]["new_val"]) sites.append(site) if sites: return sites else: raise brozzler.NothingToClaim
def on_get(self, req, resp): filter = (req.get_param('filter') or 'launched').capitalize() limit = req.get_param_as_int('limit') or 1 projects = r.table('projectsRecently%s' % filter) \ .order_by(r.desc('launched_at')) \ .limit(limit) \ .run(self.connection) resp.body = json.dumps(projects)
def select(): try: rdb_conn = rdb.connect(host=RDB_HOST, port=RDB_PORT, db=DB_NAME) result = rdb.db(DB_NAME).table(TABLE_NAME).order_by(index=rdb.desc('point')).limit(10).run(rdb_conn) rdb_conn.close() return result except RqlDriverError: abort(503, "No database connection could be established.")
def languages_db(): """Get the languages stored in the database""" s_langs = r.db("indielangs").table("languages").order_by(r.desc("timestamp")).limit(1).run(DB) if len(s_langs) != 0: return s_langs[0] else: return {}
def getordered(table, conn, key, index, order, direction=None): '''Fetch all results out of the database by Secondary Key and order them by non-index''' if direction is 'desc': return r.table(table).get_all(key, index=index).order_by( r.desc(order)).coerce_to('array').run(conn) else: return r.table(table).get_all(key, index=index).order_by( r.asc(order)).coerce_to('array').run(conn)
def test_sort_1_attr_2_desc(self, conn): expected = [ {'id': 'bill', 'age': 35, 'score': 78}, {'id': 'joe', 'age': 26, 'score': 60}, {'id': 'todd', 'age': 52, 'score': 15}, ] result = r.db('y').table('scores').order_by(r.desc('score')).run(conn) assertEqual(expected, list(result))
def get_index(): conn = r.connect() quakes = list(r.table("quakes") .order_by(r.desc(r.row["properties"]["mag"])) .limit(10).run(conn)) conn.close() return flask.render_template("quakes.html", quakes=quakes)
def get_all_hypervisor_status(hyp_id, start=None, end=None): r_conn = new_rethink_connection() rtable = r.table('hypervisors_status') if start and end: results = rtable.filter({'hyp_id': hyp_id}).filter(lambda s: start <= s['when'] and s['when'] <= end).order_by( r.desc('when')).run(r_conn) elif start: results = rtable.filter({'hyp_id': hyp_id}).filter(lambda s: start <= s['when']).order_by(r.desc('when')).run( r_conn) elif end: results = rtable.filter({'hyp_id': hyp_id}).filter(lambda s: s['when'] <= end).order_by(r.desc('when')).run( r_conn) else: results = rtable.filter({'hyp_id': hyp_id}).order_by(r.desc('when')).run(r_conn) results = list(results) close_rethink_connection(r_conn) return results
def players_by_score(cls): watershed = datetime.datetime.now() - datetime.timedelta(minutes=1) players = cls.query( rdb.row['beat'] > pytz.utc.localize(watershed) ).order_by( rdb.desc('score') ).fetch() return players
def latest(): last_report = r.table(STATS_TABLE).order_by(r.desc('datetime'))[0] selection = last_report['by_milestone'].filter(lambda report_by_m: is_in_array(report_by_m['milestone'], MILESTONES) ).map(lambda filtered_report: filtered_report.merge({'datetime': last_report['datetime']}) ).run(g.rdb_conn) return json.dumps(selection)
def get_lastest_price_data(self, currency): prices = r.\ db('bitfinex').\ table('tickers'). \ order_by(r.desc('timestamp')). \ filter({'currency': currency.upper()}).\ limit(75).run() return prices
def get(self, ds_id): dataset = r.table('datasets').get(ds_id).run(db.conn) dataset = chocapic(dataset) visualizations = list(r.table('visualizations').filter( {'dataset_id': ds_id}).order_by(r.desc('added_at')).run(db.conn)) visualizations = cornflakes(visualizations) return render_template('datasets/get.html', dataset=dataset, visualizations=visualizations)
def dashboard(): scores = None email = session['email'] history = r.table('submissions')\ .filter(r.row["email"].eq(email))\ .order_by(r.desc('timestamp')) \ .run(g.rdb_conn) if request.method == "POST": predictions = request.form.get("predictions") code = request.form.get("code") try: scores = grader.grader_text(predictions, validator) nyc = pytz.timezone('America/New_York') submission = { 'timestamp': nyc.localize(datetime.now(), is_dst=False), 'email': email, 'predictions': predictions, 'code': code, 'precision': scores['precision'], 'recall': scores['recall'], 'F1': scores['F1'] } inserted = r.table('submissions').insert(submission).run( g.rdb_conn) ## leaderboard best = r.table('leaderboard')\ .filter(r.row['email'].eq(email))\ .run(g.rdb_conn) name = r.table('users')\ .filter(r.row['email'].eq(email))\ .get_field('name').run(g.rdb_conn) if not best.items: ins = r.table('leaderboard').insert({ "name": name.items[0], "email": email, "F1": scores["F1"] }).run(g.rdb_conn) else: best = best.items[0] if scores["F1"] > best["F1"]: # update the best score up = r.table('leaderboard')\ .filter(r.row["email"].eq(email))\ .update({"F1": scores["F1"]}).run(g.rdb_conn) if inserted['generated_keys']: flash("Submission Successful!", "success") return redirect(url_for('dashboard')) else: flash("Submission Unsuccessful!", "danger") except grader.InputFormatError as e: flash(e.msg, "danger") return render_template("dashboard.html", history=history, page="dashboard", logged_in=True)
def claim_sites(self, n=1): self.logger.trace('claiming up to %s sites to brozzle', n) result = ( self.rr.table('sites').get_all(r.args( r.db(self.rr.dbname).table('sites', read_mode='majority') .between( ['ACTIVE', r.minval], ['ACTIVE', r.maxval], index='sites_last_disclaimed') .order_by(r.desc('claimed'), 'last_disclaimed') .fold( {}, lambda acc, site: acc.merge( r.branch( site.has_fields('job_id'), r.object( site['job_id'].coerce_to('string'), acc[site['job_id'].coerce_to('string')].default(0).add(1)), {})), emit=lambda acc, site, new_acc: r.branch( r.and_( r.or_( site['claimed'].not_(), site['last_claimed'].lt(r.now().sub(60*60))), r.or_( site.has_fields('max_claimed_sites').not_(), new_acc[site['job_id'].coerce_to('string')].le(site['max_claimed_sites']))), [site['id']], [])) .limit(n))) .update( # try to avoid a race condition resulting in multiple # brozzler-workers claiming the same site # see https://github.com/rethinkdb/rethinkdb/issues/3235#issuecomment-60283038 r.branch( r.or_( r.row['claimed'].not_(), r.row['last_claimed'].lt(r.now().sub(60*60))), {'claimed': True, 'last_claimed': r.now()}, {}), return_changes=True)).run() self._vet_result( result, replaced=list(range(n+1)), unchanged=list(range(n+1))) sites = [] for i in range(result["replaced"]): if result["changes"][i]["old_val"]["claimed"]: self.logger.warn( "re-claimed site that was still marked 'claimed' " "because it was last claimed a long time ago " "at %s, and presumably some error stopped it from " "being disclaimed", result["changes"][i]["old_val"]["last_claimed"]) site = brozzler.Site(self.rr, result["changes"][i]["new_val"]) sites.append(site) self.logger.debug('claimed %s sites', len(sites)) if sites: return sites else: raise brozzler.NothingToClaim
def get(self, params, user_id, infraction_type): params = params or {} expand = parse_bool(params.get("expand"), default=False) query_filter = {"user_id": user_id, "type": infraction_type} query = _merged_query(self, expand, query_filter).filter({ "active": True }).order_by(rethinkdb.desc("data")).limit(1).nth(0).default(None) return jsonify({"infraction": self.db.run(query)})
def get_all_domain_status(name, start=None, stop=None, history=False): r_conn = new_rethink_connection() table = "domains_status" if not history else "domains_status_history" rtable = r.table(table) obj = {'name': name} if start and stop: results = rtable.filter(obj).filter(lambda s: start <= s['when'] and s['when'] <= stop).order_by( r.desc('when')).run(r_conn) elif start: results = rtable.filter(obj).filter(lambda s: start <= s['when']).order_by(r.desc('when')).run( r_conn) elif stop: results = rtable.filter(obj).filter(lambda s: s['when'] <= stop).order_by(r.desc('when')).run(r_conn) else: results = rtable.filter(obj).order_by(r.desc('when')).run(r_conn) results = list(results) close_rethink_connection(r_conn) return results
async def task_all(self, filter=None): """ Return Async Generator Required Python 3.6 """ return await self.run( r.table("tasks").order_by(r.desc("createdAt")))
async def old_messages_handler(request): old_messages = await r.table("messages").order_by(r.desc('time')).limit(5).run(connection) # old_messages = [] # data = await r.table("messages").limit(5).run(connection) # while await data.fetch_next(): # message = await data.next() # old_messages.append(message) return web.Response(text=json.dumps(old_messages, default=json_serial))
def list(self, user_id: str, agent=0): agent = int(agent) if agent: result = list( r.table('conversation').order_by(r.desc('stampdate')).run( db.c())) else: result = list( r.table('conversation').filter({ 'user_id': user_id }).order_by(r.desc('stampdate')).run(db.c())) for row in result: row['date'] = arrow.get(row.get('stampdate')).datetime summary = self.summary(user_id=user_id, conversation_id=row['conversation_id']) row['unread'] = summary['unread'] row['updated'] = summary['updated'] return result
def query(etype): print "query", etype t = time.time() if rdb == None: return flask.jsonify({'error': 'No DB', 't': t, 'records': []}) args = request.args id = args.get("id", None) #if id != None: # recs = rdb.table(etype).filter({'id': id}).run(conn) # obj = recs.next() # return flask.jsonify(obj) tMin = args.get("tMin", None) limit = args.get("limit", None) if limit != None: limit = int(limit) if tMin != None: tMin = float(tMin) try: q = rdb.table(etype) if id != None: q = q.filter({'id': id}) if tMin != None: q = q.filter(rdb.row["t"].gt(tMin)) q = q.order_by(rdb.desc('t')) if limit != None: q = q.limit(limit) print q recs = q.run(conn) except: traceback.print_exc() return """ try: if tMin != None: if limit == None: recs = rdb.table(etype).filter(rdb.row["t"].gt(tMin)).order_by( rdb.desc('t')).run(conn) else: recs = rdb.table(etype).filter(rdb.row["t"].gt(tMin)).order_by( rdb.desc('t')).limit(limit).run(conn) else: if limit == None: recs = rdb.table(etype).order_by(rdb.desc('t')).run(conn) else: recs = rdb.table(etype).order_by(rdb.desc('t')).limit(limit).run(conn) except: traceback.print_exc() return """ #print "recs:", recs #items = [x for x in recs] items = list(recs) obj = {'type': etype, 't' : t, 'records': items} return flask.jsonify(obj)
def rfid_to_user(self, rfid): conn = yield self.connection() try: userdata = yield r.table('rfid').get_all(rfid, index='rfid') \ .order_by(r.desc('showdate')).limit(1).run(conn) return self.userdata_to_obj(userdata[0]) except: self.logger.exception("Unable to get user for " "rfid: {}".format(rfid)) return None
def recent_builds(n=10, show_all=False): with DB.connect() as conn: q = r.table("builds") if not show_all: q = q.group("pkg").max("time_upload").ungroup()["reduction"] q = q.order_by(r.desc("time_upload")).limit(n) return list(q.run(conn))
def test_sort_multi_1_desc_2(self, conn): expected = [ {'id': 'todd', 'age': 52, 'score': 15}, {'id': 'pale', 'age': 52, 'score': 30}, {'id': 'bill', 'age': 35, 'score': 78}, {'id': 'glen', 'age': 26, 'score': 15}, {'id': 'joe', 'age': 26, 'score': 60} ] result = r.db('y').table('scores').order_by(r.desc('age'), 'score').run(conn) assertEqual(expected, list(result))
def get_last_messages(request, event_id): """ List all messages given a certain event """ JSON_RESPONSE = {STATUS: None, DATA: None, MESSAGE: None} table_name = "event_" + str(event_id) JSON_RESPONSE[STATUS] = SUCCESS JSON_RESPONSE[DATA] = r.table(table_name).order_by( r.desc('message_id')).limit(LAST_MESSAGES).run(CONN) return Response(JSON_RESPONSE, status=status.HTTP_200_OK)
def dashboard(): scores = None email = session['email'] history = r.table('submissions')\ .filter(r.row["email"].eq(email))\ .order_by(r.desc('timestamp')) \ .run(g.rdb_conn) if request.method == "POST": predictions = request.form.get("predictions") code = request.form.get("code") try: scores = grader.grader_text(predictions, validator) nyc = pytz.timezone('America/New_York') submission = { 'timestamp': nyc.localize(datetime.now(), is_dst=False), 'email': email, 'predictions': predictions, 'code': code, 'precision': scores['precision'], 'recall': scores['recall'], 'F1': scores['F1'] } inserted = r.table('submissions').insert(submission).run(g.rdb_conn) ## leaderboard best = r.table('leaderboard')\ .filter(r.row['email'].eq(email))\ .run(g.rdb_conn) name = r.table('users')\ .filter(r.row['email'].eq(email))\ .get_field('name').run(g.rdb_conn) if not best.items: ins = r.table('leaderboard').insert({ "name": name.items[0], "email": email, "F1": scores["F1"] }).run(g.rdb_conn) else: best = best.items[0] if scores["F1"] > best["F1"]: # update the best score up = r.table('leaderboard')\ .filter(r.row["email"].eq(email))\ .update({"F1": scores["F1"]}).run(g.rdb_conn) if inserted['generated_keys']: flash("Submission Successful!", "success") return redirect(url_for('dashboard')) else: flash("Submission Unsuccessful!", "danger") except grader.InputFormatError as e: flash(e.msg, "danger") return render_template("dashboard.html", history=history, page="dashboard", logged_in=True)
def list_movements(page): conn = getattr(g, 'db_conn') base_query = r.table('train_movements').order_by(r.desc('actual_timestamp')) max_count = 50 skip_count = page * max_count filter_by_type = request.args.get('type', '') if filter_by_type: base_query = base_query.filter({'type': filter_by_type}) query = base_query.skip(skip_count).limit(max_count) mvs = list(query.run(conn)) return json.dumps(mvs, default=json_formats.date_handler)
def apply_query_parameter_sorts(cls, q, query_parameters): if len(query_parameters.sort) == 0: q = q.order_by("id") else: for sort in query_parameters.sort: if sort["dir"] == "desc": q = q.order_by(rethinkdb.desc(sort["field"])) else: q = q.order_by(rethinkdb.asc(sort["field"])) return q
def get_messagage_history(request, event_id, offset): """ Get last 10 messages from index """ JSON_RESPONSE = {STATUS: None, DATA: None, MESSAGE: None} table_name = "event_" + str(event_id) skip_offset = LAST_MESSAGES + LAST_MESSAGES_OFFSET * int(offset) JSON_RESPONSE[STATUS] = SUCCESS JSON_RESPONSE[DATA] = r.table(table_name).order_by(r.desc( 'message_id')).skip(skip_offset).limit(LAST_MESSAGES_OFFSET).run(CONN) return Response(JSON_RESPONSE, status=status.HTTP_200_OK)
def get_all(self): selection = list( r.db(rdb['chatdb']).table('chats').order_by( r.desc(lambda date: date['meta']['updated_at'])).run( g.rdb_conn)) if selection is not None: print("Chat.get_all: Retrieved Chat messages from DB: {}".format( selection)) return render_template('chat/chatlist.html', results=selection) else: return "Not Found", 404
def get_top_for_board(board, limit=10): out = list() rows = Player.get_table().\ order_by(r.desc('score')).\ filter({'board': board.id}).\ limit(limit).run(_get_conn()) for row in rows: p = Player(board=board) p.apply_rowdata(row) out.append(p) return out