def manage(self,type,id): if type == 'article': c.articles = Session.query(Article).order_by(desc(Article.pub_date)).all() return render('/admin/manage_news.html') elif type == 'comments': c.article = Session.query(Article).filter_by(id=id).order_by(desc(Article.pub_date)).one() return render('/admin/manage_comments.html')
def __get_result(model, flight_field, **kw): subq = ( DBSession.query( getattr(Flight, flight_field), func.count("*").label("count"), func.sum(Flight.index_score).label("total"), ) .group_by(getattr(Flight, flight_field)) .outerjoin(Flight.model) ) if "year" in kw: try: year = int(kw["year"]) except: raise HTTPBadRequest year_start = date(year, 1, 1) year_end = date(year, 12, 31) subq = subq.filter(Flight.date_local >= year_start).filter(Flight.date_local <= year_end) subq = subq.subquery() result = DBSession.query( model, subq.c.count, subq.c.total, over(func.rank(), order_by=desc("total")).label("rank") ).join((subq, getattr(subq.c, flight_field) == model.id)) result = result.order_by(desc("total")) return result
def top_last_month(cls, key, ids=None, num=None): """Aggregate a listing of the top items (by pageviews) last month. We use the last month because it's guaranteed to be fully computed and therefore will be more meaningful. """ cur_month = datetime.date.today().replace(day=1) last_month = decrement_month(cur_month) q = ( Session.query(cls) .filter(cls.date == last_month) .filter(cls.interval == "month") .order_by(desc(cls.date), desc(cls.pageview_count)) ) if ids: q = q.filter(getattr(cls, key).in_(ids)) else: num = num or 55 q = q.limit(num) return [(getattr(r, key), (r.unique_count, r.pageview_count)) for r in q.all()]
def top_maps(self): """Returns the raw data shared by all renderers.""" try: top_maps_q = DBSession.query( fg.row_number().over(order_by=expr.desc(func.count())).label("rank"), Game.map_id, Map.name, func.count().label("times_played"))\ .filter(Map.map_id == Game.map_id)\ .filter(Game.server_id == self.server_id)\ .filter(Game.create_dt > (self.now - timedelta(days=self.lifetime)))\ .group_by(Game.map_id)\ .group_by(Map.name) \ .order_by(expr.desc(func.count())) if self.last: top_maps_q = top_maps_q.offset(self.last) if self.limit: top_maps_q = top_maps_q.limit(self.limit) top_maps = top_maps_q.all() except Exception as e: log.debug(e) raise HTTPNotFound return top_maps
def mostFrequentVariableAndValue(self, variableNameList): """ :type variableNameList: list(str) """ subQueryList = [] if len(variableNameList) == 0: raise EmptyVariableNameListError() with closing(self._sessionMaker()) as session: # For each variable, retrieve all possible values and their occurrence count. for variableName in variableNameList: variableNameColumn = literal(variableName).label(self._VARIABLE_NAME_KEY) variableValueColumn = getattr(SQLModsecurityAuditEntryMessage, variableName).label(self._VARIABLE_VALUE_KEY) variableValueCountColumn = count().label(self._VARIABLE_VALUE_COUNT_KEY) # Subquery of each variable. subQuery = self._makeQuery(session, [variableNameColumn, variableValueColumn, variableValueCountColumn]) subQuery = subQuery.group_by(self._VARIABLE_NAME_KEY, self._VARIABLE_VALUE_KEY) subQueryList.append(subQuery) # Merging all subqueries and sorting by reverse count... query = union(*subQueryList).order_by(desc(self._VARIABLE_VALUE_COUNT_KEY)).limit(1) query = query.order_by(desc(self._VARIABLE_VALUE_COUNT_KEY)).limit(1) # ... then picking the first one. item = session.execute(query).fetchone() if item is not None: return {str(item.variableName): item.variableValue} else: return None
def get(self): # TODO: TOTAL_INSTANCE = self.db.query(Instance.id).count() TOTAL_APPLIANCE = self.db.query(Appliance.id).count() TOTAL_USER = self.db.query(User.id).count() TOTAL_JOB = self.db.query(Job.id).count() TOTAL_NODE = self.db.query(Node.id).count() new_users = self.db.query(User).order_by( desc(User.id) ).limit(10) new_jobs = self.db.query(Job).order_by( desc(Job.id) ).limit(10) ud = self._get_data() d = { 'title': self.trans(_('Admin Console')), 'human_size': human_size, 'TOTAL_APPLIANCE': TOTAL_APPLIANCE, 'TOTAL_INSTANCE': TOTAL_INSTANCE, 'TOTAL_USER': TOTAL_USER, 'TOTAL_JOB': TOTAL_JOB, 'TOTAL_NODE': TOTAL_NODE, 'NEW_USER_LIST': new_users, 'NEW_JOB_LIST': new_jobs, 'TOTAL_MEMORY': ud['TOTAL_MEMORY'], 'USED_MEMORY': ud['USED_MEMORY'], 'TOTAL_CPU': ud['TOTAL_CPU'], 'USED_CPU': ud['USED_CPU'] } d.update( self._get_data() ) self.render('admin/index.html', **d)
def get_notifications(count, to_user_id, get_older=False, than_id=None): notification_query = g.db().query(Notification).filter(Notification.to_user_id == to_user_id) if than_id: if get_older: notifications = notification_query.filter(Notification.id < than_id).order_by( expression.desc(Notification.cr_tm)).limit(count + 1).all() there_is_more = ['there_is_older', len(notifications) > count] notifications = notifications[0:count] # notifications.reverse() else: notifications = notification_query.filter(Notification.id > than_id).order_by( expression.asc(Notification.cr_tm)).limit(count + 1).all() there_is_more = ['there_is_newer', len(notifications) > count] notifications = notifications[0:count] else: notifications = notification_query.order_by(expression.desc(Notification.cr_tm)).limit( count + 1).all() there_is_more = ['there_is_older', len(notifications) > count] notifications = notifications[0:count] # notifications.reverse() return { there_is_more[0]: there_is_more[1], 'items': notifications }
def get_entries(self, category_id=None, category_name=None, date_from=None, date_to=None, limit=None): if not self.entries: self.entries = ExpensesTable.query\ .filter(ExpensesTable.user == self.user_id)\ .join(ExpenseCategoriesTable)\ .add_columns(ExpenseCategoriesTable.name, ExpenseCategoriesTable.slug)\ .order_by(desc(ExpensesTable.date)).order_by(desc(ExpensesTable.id))\ .outerjoin(ExpensesToLoansTable)\ .outerjoin((UsersTable, (UsersTable.id == ExpensesToLoansTable.shared_with)))\ .add_columns(UsersTable.name, UsersTable.slug) # provided category id if category_id: self.entries = self.entries.filter(ExpensesTable.category == category_id) # provided category name if category_name: self.entries = self.entries.filter(ExpenseCategoriesTable.name == category_name) # provided date range if date_from and date_to: self.entries = self.entries.filter(ExpensesTable.date >= date_from).filter(ExpensesTable.date <= date_to) # provided count if limit: self.entries = self.entries.limit(limit) return self.entries
def execute(self): try: limit = int(self.get_argument('limit', default=15)) page = int(self.get_argument('page', default=0)) """ for Exception test """ #limit = self.get_argument('limit') #page = self.get_argument('page') except Exception as e: return self.handle_request_exception(e) else: data = self.emc.get(self.emc.get_key_format() % (page, limit)) if not data: titles = session.query(EVJPTitle).\ order_by(desc(EVJPTitle.released_date)).\ order_by(desc(EVJPTitle.ev_title_id)).\ slice(page * limit, page * limit + limit).\ all() try: data = self.to_json(titles, limit, page) except RuntimeError: data = self.to_json([], limit, page) else: self.emc.set(self.emc.get_key_format() % (page, limit), data) self.start_response(self.get_status(), self.get_response_headers()) return iter([data])
def get_messages(self, count, get_older=False, than_id=None): messages_filter = (Message.contact_id == self.id) messages_query = g.db().query(Message) if than_id: if get_older: messages = messages_query.filter(and_(messages_filter, Message.id < than_id)).order_by( expression.desc(Message.cr_tm)).limit(count + 1).all() there_is_more = ['there_is_older', len(messages) > count] messages = messages[0:count] # messages.reverse() else: messages = messages_query.filter(and_(messages_filter, Message.id > than_id)).order_by( expression.asc(Message.cr_tm)).limit(count + 1).all() there_is_more = ['there_is_newer', len(messages) > count] messages = messages[0:count] else: messages = messages_query.filter(messages_filter).order_by(expression.desc(Message.cr_tm)).limit( count + 1).all() there_is_more = ['there_is_older', len(messages) > count] messages = messages[0:count] # messages.reverse() return { there_is_more[0]: there_is_more[1], 'items': messages }
def __get_result(model, flight_field, **kw): subq = DBSession \ .query(getattr(Flight, flight_field), func.count('*').label('count'), func.sum(Flight.index_score).label('total')) \ .group_by(getattr(Flight, flight_field)) \ .outerjoin(Flight.model) if 'year' in kw: try: year = int(kw['year']) except: raise HTTPBadRequest year_start = date(year, 1, 1) year_end = date(year, 12, 31) subq = subq.filter(Flight.date_local >= year_start) \ .filter(Flight.date_local <= year_end) subq = subq.subquery() result = DBSession \ .query(model, subq.c.count, subq.c.total, over(func.rank(), order_by=desc('total')).label('rank')) \ .join((subq, getattr(subq.c, flight_field) == model.id)) result = result.order_by(desc('total')) return result
def get_all_datatypes_in_burst(self, burst_id): """ Get all dataTypes in burst, order by their creation, desc. :param burst_id BurstConfiguration Identifier. :returns: list dataType GIDs or empty list. """ try: groups = ( self.session.query(model.DataTypeGroup) .join(model.Operation, model.DataTypeGroup.fk_from_operation == model.Operation.id) .join(model.WorkflowStep, model.Operation.id == model.WorkflowStep.fk_operation) .join(model.Workflow) .filter(model.Workflow.fk_burst == burst_id) .order_by(desc(model.DataTypeGroup.id)) .all() ) result = ( self.session.query(model.DataType) .filter(model.DataType.fk_parent_burst == burst_id) .filter(model.DataType.fk_datatype_group == None) .filter(model.DataType.type != self.EXCEPTION_DATATYPE_GROUP) .order_by(desc(model.DataType.id)) .all() ) result.extend(groups) except SQLAlchemyError, exc: self.logger.exception(exc) result = []
def get_entries(self, category_id=None, category_name=None, date_from=None, date_to=None, limit=None): if not self.entries: self.entries = IncomeTable.query\ .filter(IncomeTable.user == self.user_id)\ .join(IncomeCategoriesTable)\ .add_columns(IncomeCategoriesTable.name, IncomeCategoriesTable.slug)\ .order_by(desc(IncomeTable.date)).order_by(desc(IncomeTable.id)) # provided category id if category_id: self.entries = self.entries.filter(IncomeTable.category == category_id) # provided category name if category_name: self.entries = self.entries.filter(IncomeCategoriesTable.name == category_name) # provided date range if date_from and date_to: self.entries = self.entries.filter(IncomeTable.date >= date_from).filter(IncomeTable.date <= date_to) # provided count if limit: self.entries = self.entries.limit(limit) return self.entries
def _get_result(model, flight_field, year=None): subq = db.session \ .query(getattr(Flight, flight_field), func.count('*').label('count'), func.sum(Flight.index_score).label('total')) \ .group_by(getattr(Flight, flight_field)) \ .outerjoin(Flight.model) if isinstance(year, int): year_start = date(year, 1, 1) year_end = date(year, 12, 31) subq = subq.filter(Flight.date_local >= year_start) \ .filter(Flight.date_local <= year_end) subq = subq.subquery() result = db.session \ .query(model, subq.c.count, subq.c.total, over(func.rank(), order_by=desc('total')).label('rank')) \ .join((subq, getattr(subq.c, flight_field) == model.id)) if model == User: result = result.options(subqueryload(model.club)) result = result.order_by(desc('total')) return result
def production_export( self, **kw ): ws = [Item.active == 0, Item.status == STATUS_APPROVE, ] if kw.get( "jobNo", False ) : ws.append( Item.jobNo.op( "ilike" )( "%%%s%%" % kw["jobNo"] ) ) if kw.get( "systemNo", False ) : ws.append( Item.systemNo.op( "ilike" )( "%%%s%%" % kw["systemNo"] ) ) if kw.get( "desc", False ) : ws.append( Item.desc.op( "ilike" )( "%%%s%%" % kw["desc"] ) ) if kw.get( "approve_time_from", False ) : ws.append( Item.approveTime >= kw["approve_time_from"] ) if kw.get( "approve_time_to", False ) : ws.append( Item.approveTime <= kw["approve_time_to"] ) result = qry( Item ).filter( and_( *ws ) ).order_by( desc( Item.createTime ) ).all() data = [] for h in qry( Item ).filter( and_( *ws ) ).order_by( desc( Item.createTime ) ): data.append( map( unicode, [ h.systemNo, h.jobNo, h.desc, h.createTime.strftime( "%Y/%m/%d %H:%M" ), h.showStatus(), h.approveTime.strftime( "%Y/%m/%d %H:%M" ) if h.approveTime else '', ] ) ) try: v = getExcelVersion() if not v : raise ReportGenerationException() if v <= "2003" : # version below 2003 templatePath = os.path.join( config.get( "public_dir" ), "TEMPLATE", "CAB_PRODUCTION_REPORT_TEMPLATE.xls" ) else : # version above 2003 templatePath = os.path.join( config.get( "public_dir" ), "TEMPLATE", "CAB_PRODUCTION_REPORT_TEMPLATE.xlsx" ) tempFileName, realFileName = self._getReportFilePath( templatePath ) sdexcel = CABItemReport( templatePath = tempFileName, destinationPath = realFileName ) sdexcel.inputData( data ) sdexcel.outputData() except: traceback.print_exc() logError() if sdexcel:sdexcel.clearData() raise ReportGenerationException() else: return serveFile( realFileName )
def shipment(): if request.method=='POST': for v in request.form: '''v : 'shipment-743-0' ''' splited=v.split('-') order_id=int(splited[1]) product_id=int(splited[2]) order=Order.query.filter_by(id=order_id).one() #order.products[key].status_id=int(request.form[v]) for p in order.products: if p.product.id==product_id: if p.status_id is not int(request.form[v]): p.status_id=int(request.form[v]) db_session.add(order) db_session.commit() q_str=request.args.get('status','') if q_str: url='/shipment/?status=' + q_str else: url='/shipment/' return redirect(url) elif request.args.get('status')=='accepted': orders=Order.query.options(joinedload(Order.products)).filter(Order.products.any(status_id=1)) \ .order_by(desc(Order.created_at)).all() elif request.args.get('status')=='prepared': orders=Order.query.options(joinedload(Order.products)).filter(Order.products.any(status_id=2)) \ .order_by(desc(Order.created_at)).all() else: #orders=Order.query.order_by(desc(Order.created_at)).all() orders=Order.query.options(joinedload(Order.products)).order_by(desc(Order.created_at)).all() statuses=Status.query.order_by(Status.id).all() return render_template('admin_shipment.html',orders=orders,statuses=statuses)
def dashboard(request): player = request.player db = request.db wins, losses, ratio = player.wins_losses() # Upcoming games upcoming_games = [] results = db.query(Game).filter(Game.time > datetime.now()).order_by(desc(Game.time)).limit(4) for game in results: game.hour, game.min, game.m = game.get_printed_time() upcoming_games.append(game) # Previous Games previous_games = [] results = db.query(Game).filter(Game.time < datetime.now()).order_by(desc(Game.time)).limit(4) for game in results: game.hour, game.min, game.m = game.get_printed_time() previous_games.append(game) leaders = get_leaderboard(request) return { 'wins': wins, 'losses': losses, 'ratio': ratio, 'upcoming_games': upcoming_games, 'previous_games': previous_games, 'leaders': leaders }
def get_top_scorers(self): """Top players by score. Shared by all renderers.""" cutoff = self.now - timedelta(days=self.lifetime) cutoff = self.now - timedelta(days=120) top_scorers_q = DBSession.query( fg.row_number().over(order_by=expr.desc(func.sum(PlayerGameStat.score))).label("rank"), Player.player_id, Player.nick, func.sum(PlayerGameStat.score).label("total_score"))\ .filter(Player.player_id == PlayerGameStat.player_id)\ .filter(Game.game_id == PlayerGameStat.game_id)\ .filter(Game.map_id == self.map_id)\ .filter(Player.player_id > 2)\ .filter(PlayerGameStat.create_dt > cutoff)\ .order_by(expr.desc(func.sum(PlayerGameStat.score)))\ .group_by(Player.nick)\ .group_by(Player.player_id) if self.last: top_scorers_q = top_scorers_q.offset(self.last) if self.limit: top_scorers_q = top_scorers_q.limit(self.limit) top_scorers = top_scorers_q.all() return top_scorers
def top_players(self): """Top players on this server by total playing time.""" try: top_players_q = DBSession.query( fg.row_number().over( order_by=expr.desc(func.sum(PlayerGameStat.alivetime))).label("rank"), Player.player_id, Player.nick, func.sum(PlayerGameStat.alivetime).label("alivetime"))\ .filter(Player.player_id == PlayerGameStat.player_id)\ .filter(Game.game_id == PlayerGameStat.game_id)\ .filter(Game.server_id == self.server_id)\ .filter(Player.player_id > 2)\ .filter(PlayerGameStat.create_dt > (self.now - timedelta(days=self.lifetime)))\ .order_by(expr.desc(func.sum(PlayerGameStat.alivetime)))\ .group_by(Player.nick)\ .group_by(Player.player_id) if self.last: top_players_q = top_players_q.offset(self.last) if self.limit: top_players_q = top_players_q.limit(self.limit) top_players = top_players_q.all() except Exception as e: log.debug(e) raise HTTPNotFound return top_players
def parse_custom_query(session, args, group, order): table = Ticket.__table__ cols = table.columns if group not in cols: group = 'status' if order not in cols: order = 'priority' groupcol = cols[group] ordercol = cols[order] preds = [cols[key].in_(values) for (key, values) in args.iterlists() if key in cols] q = session.query(Ticket.id.label('ticket'), Ticket.summary, Ticket.owner, Ticket.type, Ticket.priority, Ticket.component, Ticket.time.label('created'), Enum.value.label('__color__'), groupcol.label('__group__') ).filter(and_(Enum.type == 'priority', Ticket.priority == Enum.name, *preds)) return q.order_by([ desc(groupcol) if args.get('groupdesc') == '1' else groupcol, desc(ordercol) if args.get('desc') == '1' else ordercol, ])
def guess_model(self): from skylines.model import Flight, AircraftModel # first try to find the reg number in the database if self.registration is not None: glider_reg = self.registration result = DBSession.query(Flight) \ .filter(func.upper(Flight.registration) == func.upper(glider_reg)) \ .order_by(desc(Flight.id)) \ .first() if result and result.model_id: return result.model_id # try to find another flight with the same logger and use it's aircraft type if (self.logger_id is not None and self.logger_manufacturer_id is not None): logger_id = self.logger_id logger_manufacturer_id = self.logger_manufacturer_id result = DBSession.query(Flight).outerjoin(IGCFile) \ .filter(func.upper(IGCFile.logger_manufacturer_id) == func.upper(logger_manufacturer_id)) \ .filter(func.upper(IGCFile.logger_id) == func.upper(logger_id)) \ .filter(Flight.model_id == None) \ .order_by(desc(Flight.id)) if self.logger_manufacturer_id.startswith('X'): result = result.filter(Flight.pilot == self.owner) result = result.first() if result and result.model_id: return result.model_id if self.model is not None: glider_type = self.model.lower() # otherwise, try to guess the glider model by the glider type igc header text_fragments = ['%{}%'.format(v) for v in re.sub(r'[^a-z]', ' ', glider_type).split()] digit_fragments = ['%{}%'.format(v) for v in re.sub(r'[^0-9]', ' ', glider_type).split()] if not text_fragments and not digit_fragments: return None glider_type_clean = re.sub(r'[^a-z0-9]', '', glider_type) result = DBSession \ .query(AircraftModel) \ .filter(and_( func.regexp_replace(func.lower(AircraftModel.name), '[^a-z]', ' ').like(func.any(text_fragments)), func.regexp_replace(func.lower(AircraftModel.name), '[^0-9]', ' ').like(func.all(digit_fragments)))) \ .order_by(func.levenshtein(func.regexp_replace(func.lower(AircraftModel.name), '[^a-z0-9]', ''), glider_type_clean)) if result.first(): return result.first().id # nothing found return None
def execute(self): #site_ids = [int(site_id) for site_id in self.kwargs['site_ids'].split('+ ')] #site_ids = site_ids.split('+') try: limit = int(self.get_argument('limit', default=15)) page = int(self.get_argument('page', default=0)) except Exception as e: return self.handle_request_exception(e) else: data = self.emc.get(self.emc.get_key_format() % (str(self.kwargs['site_ids']), page, limit)) if not data: site_ids = [int(site_id) for site_id in re.split(r' |\+', self.kwargs['site_ids'])] title_list = session.query(EVJPTitle).\ join(SiteJPTitle).\ filter(SiteJPTitle.site_id.in_(site_ids)).\ order_by(desc(EVJPTitle.released_date), desc(EVJPTitle.ev_title_id)).\ slice(page * limit, page * limit + limit).\ all() # limit = limit / len(site_ids) # title_list = [] # for site_id in site_ids: # titles = session.query(EVJPTitle).\ # join(SiteJPTitle).\ # filter(SiteJPTitle.site_id==site_id).\ # order_by(desc(EVJPTitle.released_date)).\ # slice(page * limit, page * limit + limit).\ # all() #options(joinedload(EVJPTitle.site_title)).\ # title_list += titles #title_list = sorted(title_list, key=lambda title: title.released_date, reverse=True) # title_list = sorted(title_list, key=lambda title: (title.released_date, title.ev_title_id), reverse=True) try: data = self.to_json(title_list, limit, page) except RuntimeError: data = self.to_json([], limit, page) else: self.emc.set(self.emc.get_key_format() % (str(self.kwargs['site_ids']), page, limit), data) #site_ids = [int(site_id) for site_id in re.split(r' |\+', self.kwargs['site_ids'])] #site_ids = [int(site_id) for site_id in re.split(r' |\+', site_ids)] #title_list = [] #for site_id in site_ids: # data = self.emc.get(self.emc.get_key_format() % (site_id, page, limit)) # titles = session.query(EVJPTitle).\ # join(SiteJPTitle).\ # filter(SiteJPTitle.site_id==site_id).\ # order_by(desc(EVJPTitle.released_date)).\ # slice(page * limit, page * limit + limit).\ # all() # title_list += titles #title_list = sorted(title_list, key=lambda title: title.released_date, reverse=True) #self.set_response_header('Content-Type', 'application/json') self.start_response(self.get_status(), self.get_response_headers()) return iter([data])
def _get_world_rank(): dbsession = DBSession() world_top = dbsession.query(Friend).order_by(desc(Friend.score)).\ limit(100).all() world_top_users = dbsession.query(User).order_by(desc(User.score)).\ limit(100).all() world_top.extend(world_top_users) return sorted(world_top, key=lambda k:k.score, reverse=True)
def barcode(self): method = _g('m', 'LIST') if method not in ['LIST', 'NEW', 'PRINT', 'SAVE_NEW']: flash(MSG_NO_SUCH_ACTION, MESSAGE_ERROR); return redirect(url_for('.view', action = 'index')) DBObj = Barcode index_page = 'admin/barcode_index.html' new_page = 'admin/barcode_new.html' print_page = 'admin/barcode_print.html' action = 'barcode' if method == 'LIST': if _g('SEARCH_SUBMIT'): # come from search values = {'page' : 1} for f in ['value', 'ref_no', 'status', 'create_time_from', 'create_time_to'] : values[f] = _g(f) values['field'] = _g('field', None) or 'create_time' values['direction'] = _g('direction', None) or 'desc' else: #come from paginate or return values = session.get('master_barcode_values', {}) if _g('page') : values['page'] = int(_g('page')) elif 'page' not in values : values['page'] = 1 session['master_barcode_values'] = values conditions = [DBObj.active == 0] if values.get('value', None): conditions.append(DBObj.value.op('like')('%%%s%%' % values['value'])) if values.get('ref_no', None): conditions.append(DBObj.ref_no.op('like')('%%%s%%' % values['ref_no'])) if values.get('status', None): conditions.append(DBObj.status == values['status']) if values.get('create_time_from', None): conditions.append(DBObj.create_time > values['create_time_from']) if values.get('create_time_to', None): conditions.append(DBObj.create_time < '%s 23:59' % values['create_time_to']) field = values.get('field', 'create_time') if values.get('direction', 'desc') == 'desc': result = DBSession.query(DBObj).filter(and_(*conditions)).order_by(desc(getattr(DBObj, field))) else: result = DBSession.query(DBObj).filter(and_(*conditions)).order_by(getattr(DBObj, field)) def url_for_page(**params): return url_for('bpAdmin.view', action = action, m = 'LIST', page = params['page']) records = paginate.Page(result, values['page'], show_if_single_page = True, items_per_page = 100, url = url_for_page) return render_template(index_page, records = records, action = action, values = values) elif method == 'NEW': return render_template(new_page, action = action) elif method == 'PRINT': ids = _gl('ids') records = DBSession.query(DBObj).filter(DBObj.id.in_(ids)).order_by(desc(DBObj.create_time)) return render_template(print_page, records = records) elif method == 'SAVE_NEW': qty = _g('qty') records = [DBObj.getOrCreate(None, None, status = 1) for i in range(int(qty))] DBSession.commit() if _g('type') == 'CREATE': flash(MSG_SAVE_SUCC, MESSAGE_INFO) return redirect(url_for('.view', action = action)) else: return render_template(print_page, records = records)
def get_all_analysis(self): """ Get all analysis @return ALL analysis """ try: analysis = s.query(Analysis).order_by(desc(Analysis.status), desc(Analysis.created_on)).all() return analysis except SQLAlchemyError as e: raise CuckooDatabaseError("Unable to get all analysis, reason:" % e)
def read_many_byuser(self, request): """ """ username = request.matchdict['username'] page = int(request.params.get("page", 1)) pagesize = int(request.params.get("pagesize", 10)) if self.Session.query(User).filter(User.username == username).first() == None: raise HTTPNotFound("Requested user does not exist.") items = [] activities_sub_query = self.Session.query(Activity.activity_identifier.label("identifier"), Activity.version, Changeset.timestamp, Changeset.fk_user).\ join(Changeset).\ filter(or_(Activity.fk_status == 2, Activity.fk_status == 3)).subquery(name="sub_act") activities_query = self.Session.query(activities_sub_query, User.username).\ join(User).filter(User.username == username).subquery(name="act") # All active and inactive stakeholders stakeholder_active = self.Session.query(Stakeholder).\ filter(or_(Stakeholder.fk_status == 2, Stakeholder.fk_status == 3)).\ subquery("st_active") # Get the five latest stakeholder by changeset stakeholder_sub_query = self.Session.query(stakeholder_active.c.stakeholder_identifier.label("identifier"), \ stakeholder_active.c.version, Changeset.timestamp, Changeset.fk_user).\ join(Changeset, Changeset.id == stakeholder_active.c.fk_changeset).\ subquery(name="sub_st") # Join the resulting set to the user table stakeholder_query = self.Session.query(stakeholder_sub_query, User.username).\ join(User).filter(User.username == username).subquery(name="st") query = self.Session.query(activities_query, literal_column("\'activity\'").label("type")).\ union(self.Session.query(stakeholder_query, literal_column("\'stakeholder\'").label("type"))).\ order_by(desc(activities_query.c.timestamp)).order_by(desc(activities_query.c.version)) for i in query.offset((page-1)*pagesize).limit(pagesize).all(): items.append({ "type": i.type, "author": i.username, "timestamp": i.timestamp, "version": i.version, "identifier": str(i.identifier) }) return { "items": items, "username": username, "totalitems": query.count(), "pagesize": pagesize, "currentpage": page } return {}
def _all_events(self, from_time, to_time): events = meta.Session.query(Event)\ .filter(Event.created < to_time)\ .filter(Event.created >= from_time)\ .filter(or_(Event.file_id != None, Event.page_id != None, Event.event_type == 'moderated_post_created'))\ .order_by(desc(Event.created), desc(Event.id))\ .all() return events
def main_index(request): leaderboard_count = 10 recent_games_count = 32 # top players by score top_players = DBSession.query(Player.player_id, Player.nick, func.sum(PlayerGameStat.score)).\ filter(Player.player_id == PlayerGameStat.player_id).\ filter(Player.player_id > 2).\ order_by(expr.desc(func.sum(PlayerGameStat.score))).\ group_by(Player.nick).\ group_by(Player.player_id).all()[0:10] top_players = [(player_id, html_colors(nick), score) \ for (player_id, nick, score) in top_players] for i in range(leaderboard_count-len(top_players)): top_players.append(('-', '-', '-')) # top servers by number of total players played top_servers = DBSession.query(Server.server_id, Server.name, func.count()).\ filter(Game.server_id==Server.server_id).\ order_by(expr.desc(func.count(Game.game_id))).\ group_by(Server.server_id).\ group_by(Server.name).all()[0:10] for i in range(leaderboard_count-len(top_servers)): top_servers.append(('-', '-', '-')) # top maps by total times played top_maps = DBSession.query(Map.map_id, Map.name, func.count(Game.game_id)).\ filter(Map.map_id==Game.game_id).\ order_by(expr.desc(func.count(Game.game_id))).\ group_by(Map.map_id).\ group_by(Map.name).all()[0:10] for i in range(leaderboard_count-len(top_maps)): top_maps.append(('-', '-', '-')) recent_games = DBSession.query(Game, Server, Map).\ filter(Game.server_id==Server.server_id).\ filter(Game.map_id==Map.map_id).\ order_by(expr.desc(Game.start_dt)).all()[0:recent_games_count] for i in range(recent_games_count-len(recent_games)): recent_games.append(('-', '-', '-')) return {'top_players':top_players, 'top_servers':top_servers, 'top_maps':top_maps, 'recent_games':recent_games, }
def index(self): ''' Show all wanted, snatched, downloaded movies ''' qMovie = Db.query(Movie) movies = qMovie.order_by(Movie.name).filter(or_(Movie.status == u'want', Movie.status == u'waiting')).all() snatched = qMovie.order_by(desc(Movie.dateChanged), Movie.name).filter_by(status = u'snatched').all() downloaded = qMovie.order_by(desc(Movie.dateChanged), Movie.name).filter_by(status = u'downloaded').all() return self.render({'movies': movies, 'snatched':snatched, 'downloaded':downloaded})
def annotation_object_next(self, current_frame, current_annotation_object=None, to_future=True): """ When current_annotation_object is None, nearest AnnotationObject in the future (in respect to current_frame) is returned. With current_annotation_object given, "next" AnnotationObject is returned, i.e. an object with higher ID in the current frame or the nearest in the future. If to_future is false, the search is done in the past instead of the future. Returned tuple contains the next object and its starting and ending frame. :rtype: (AnnotationObject, int, int) """ q = database.db.session.query(AnnotationObject, func.min(AnnotationValue.frame_from), func.max(AnnotationValue.frame_from)) q = q.filter_by(video_id=self.id) q = q.join(AnnotationObject.annotation_values) q = q.group_by(AnnotationObject.id) if to_future: if current_annotation_object is None: # nearest AnnotationObject in future q = q.having(func.min(AnnotationValue.frame_from) > current_frame) q = q.order_by(func.min(AnnotationValue.frame_from), AnnotationObject.id) else: # nearest AnnotationObject in future, or in the same frame with bigger ID current_id = current_annotation_object.id q = q.having( (func.min(AnnotationValue.frame_from) > current_frame) | ((AnnotationObject.id > current_id) & (func.min(AnnotationValue.frame_from) <= current_frame) & (func.max(AnnotationValue.frame_from) >= current_frame)) ) q = q.order_by(func.min(AnnotationValue.frame_from), AnnotationObject.id) else: if current_annotation_object is None: # nearest AnnotationObject in past q = q.having(func.max(AnnotationValue.frame_from) < current_frame) q = q.order_by(desc(func.max(AnnotationValue.frame_from)), desc(AnnotationObject.id)) else: # nearest AnnotationObject in past, or in the same frame with lower ID current_id = current_annotation_object.id q = q.having( (func.max(AnnotationValue.frame_from) < current_frame) | ((AnnotationObject.id < current_id) & (func.min(AnnotationValue.frame_from) <= current_frame) & (func.max(AnnotationValue.frame_from) >= current_frame)) ) q = q.order_by(desc(func.max(AnnotationValue.frame_from)), desc(AnnotationObject.id)) q = q.limit(1) q = q.all() if len(q) < 1: return None if len(q) > 1: raise Exception('Returned collection cannot contain more than 1 item.') return q[0]
def get_entries(self, category_id=None, category_name=None, date_from=None, date_to=None, limit=None): if not self.entries: self.entries = ExpensesTable.query\ .filter(ExpensesTable.user == self.user_id)\ .join(ExpenseCategoriesTable)\ .add_columns(ExpenseCategoriesTable.name, ExpenseCategoriesTable.slug)\ .order_by(desc(ExpensesTable.date)).order_by(desc(ExpensesTable.id))\ .outerjoin(ExpensesToLoansTable)\ .outerjoin((UsersTable, (UsersTable.id == ExpensesToLoansTable.shared_with)))\ .add_columns(UsersTable.name, UsersTable.slug) # provided category id if category_id: self.entries = self.entries.filter( ExpensesTable.category == category_id) # provided category name if category_name: self.entries = self.entries.filter( ExpenseCategoriesTable.name == category_name) # provided date range if date_from and date_to: self.entries = self.entries.filter( ExpensesTable.date >= date_from).filter( ExpensesTable.date <= date_to) # provided count if limit: self.entries = self.entries.limit(limit) return self.entries
def get_values_of_datatype(self, project_id, datatype_class, filters=None, page_size=50): """ Retrieve a list of dataTypes matching a filter inside a project. :returns: (results, total_count) maximum page_end rows are returned, to avoid endless time when loading a page """ result = [] count = 0 if not issubclass(datatype_class, model.Base): self.logger.warning("Trying to filter not DB class:" + str(datatype_class)) return result, count try: #Prepare generic query: query = self.session.query( datatype_class.id, func.max(datatype_class.type), func.max(datatype_class.gid), func.max(datatype_class.subject), func.max(model.Operation.completion_date), func.max(model.Operation.user_group), func.max(text('"OPERATION_GROUPS_1".name')), func.max(model.DataType.user_tag_1)).join( (model.Operation, datatype_class.fk_from_operation == model.Operation.id) ).outerjoin(model.Links).outerjoin( (model.OperationGroup, model.Operation.fk_operation_group == model.OperationGroup.id), aliased=True).filter( model.DataType.invalid == False).filter( or_(model.Operation.fk_launched_in == project_id, model.Links.fk_to_project == project_id)) if filters: filter_str = filters.get_sql_filter_equivalent( datatype_to_check='datatype_class') if filter_str is not None: query = query.filter(eval(filter_str)) #Retrieve the results query = query.group_by(datatype_class.id).order_by( desc(datatype_class.id)) result = query.limit(max(page_size, 0)).all() count = query.count() except Exception, excep: self.logger.exception(excep)
def page_view_instances(self, app): view = self.get_argument('view', 'all') by = self.get_argument('by', 'updated') sort = self.get_argument('sort', 'desc') status = self.get_argument('status', 'all') page_size = int( self.get_argument('sepa', settings.APPLIANCE_INSTANCE_LIST_PAGE_SIZE)) cur_page = int(self.get_argument('p', 1)) start = (cur_page - 1) * page_size stop = start + page_size if status == 'running': slist = settings.INSTANCE_SLIST_RUNING elif status == 'stoped': slist = settings.INSTANCE_SLIST_STOPED else: slist = settings.INSTANCE_SLIST_ALL instances = self.db2.query(Instance).filter( Instance.isprivate != True).filter( Instance.status.in_(slist)).filter( Instance.appliance_id == app.id) if view == 'self' and self.current_user: instances = instances.filter_by(user_id=self.current_user.id) if by == 'created': by_obj = Instance.created else: by_obj = Instance.updated sort_by_obj = desc(by_obj) if sort == 'desc' else asc(by_obj) instances = instances.order_by(sort_by_obj) total = instances.count() instances = instances.slice(start, stop).all() if total > page_size: page_html = Pagination(total=total, page_size=page_size, cur_page=cur_page).html(self.get_page_url) else: page_html = "" return instances, page_html
def _order_by(self, query, joins, sort_field, sort_desc): """ Apply order_by to the query :param query: Query :param joins: Joins set :param sort_field: Sort field :param sort_desc: Ascending or descending """ # TODO: Preprocessing for joins # Try to handle it as a string if isinstance(sort_field, basestring): # Create automatic join against a table if column name # contains dot. if '.' in sort_field: parts = sort_field.split('.', 1) if parts[0] not in joins: query = query.join(parts[0]) joins.add(parts[0]) elif isinstance(sort_field, InstrumentedAttribute): # SQLAlchemy 0.8+ uses 'parent' as a name mapper = getattr(sort_field, 'parent', None) if mapper is None: # SQLAlchemy 0.7.x uses parententity mapper = getattr(sort_field, 'parententity', None) if mapper is not None: table = mapper.tables[0] if table.name not in joins: query = query.join(table) joins.add(table.name) elif isinstance(sort_field, Column): pass else: raise TypeError('Wrong argument type') if sort_field is not None: if sort_desc: query = query.order_by(desc(sort_field)) else: query = query.order_by(sort_field) return query, joins
def signup_user( self, email: str, login_provider: UserLoginProviderType, login_provider_uid: str, login_provider_data: Optional[LoginDataABC] = None, first_name: str = None, last_name: str = None, ) -> User: """Returns new user signup if successful. Args: email (str) login_provider (UserLoginProviderType): what login channel login_provider_uid (str): login provider username, often same as email login_provider_data (Optional[dict], optional): Includes information for authentication (like password). Defaults to None. first_name (str, optional): Defaults to None. last_name (str, optional): Defaults to None. Raises: HawkAuthException: when account exists, login id exists, or credentials aren't valid Returns: User: Newly signed up User """ if (self.session.query(User).filter(User.email == email).one_or_none() is None and (self.session.query(UserLoginProvider). filter(UserLoginProvider.provider == login_provider.value).filter( UserLoginProvider.unique_id == login_provider_uid).order_by( desc(UserLoginProvider.created_at))).first() is None): new_user = User() new_user.email = email new_user.first_name = first_name new_user.last_name = last_name self.session.add(new_user) self.session.flush() new_login = UserLoginProvider() new_login.provider = login_provider.value new_login.unique_id = login_provider_uid new_login.user_id = new_user.id new_login.data = self._save_login_data(login_provider, login_provider_data) self.session.add(new_login) self.session.flush() return new_user else: raise HawkAuthException(1002)
def my_box_list(self, show="inbox"): UID = self.current_user.id page_size = self.get_argument_int('sepa', 20) cur_page = self.get_argument_int('p', 1) by = self.get_argument('by', 'id') sort = self.get_argument('sort', 'DESC') if by == 'status': by = Message.status elif by == 'isread': by = Message.isread elif by == 'readtime': by = Message.readtime elif by == 'sender': by = Message.sender_id else: by = Message.id by_exp = desc(by) if sort == 'DESC' else asc(by) start = (cur_page - 1) * page_size stop = start + page_size if show == "inbox": ml = self.db.query(Message).filter_by(receiver_id=UID, isinbox=True) elif show == "outbox": ml = self.db.query(Message).filter_by( sender_id=UID, isinbox=False).filter(Message.receiver_id != None) elif show == "notice": ml = self.db.query(Message).filter(Message.receiver_id == None) else: return {} total = ml.count() ml = ml.order_by(by_exp).slice(start, stop).all() page_html = pagination(self.request.uri, total, page_size, cur_page) return { 'MESSAGE_LIST': ml, 'PAGE_HTML': page_html, 'TOTAL': total, 'SORT': sort, 'BY': by }
def list(self): if not 'page' in request.params: page = 1 else: page = request.params['page'] c.users = webhelpers.paginate.Page( meta.Session.query(User).order_by(desc(User.login)), page = int(page), items_per_page = 15) if 'partial' in request.params: return render('user_list_ajax.mako') else: # Render the full page return render('user_list.mako')
def top_last_month(cls, key, ids=None, num=None): """Aggregate a listing of the top items (by pageviews) last month. We use the last month because it's guaranteed to be fully computed and therefore will be more meaningful. """ cur_month = datetime.date.today().replace(day=1) last_month = decrement_month(cur_month) q = (Session.query(cls) .filter(cls.date == last_month) .filter(cls.interval == "month") .order_by(desc(cls.date), desc(cls.pageview_count))) if ids: q = q.filter(getattr(cls, key).in_(ids)) else: num = num or 55 q = q.limit(num) return [(getattr(r, key), (r.unique_count, r.pageview_count)) for r in q.all()]
def _get_tasks_for_responsible_query(self, responsible, sort_on='modified', sort_order='reverse'): """Returns a sqlalchemy query of all tasks assigned to the given responsible. """ sort_on = getattr(Task, sort_on) if sort_order == 'reverse': return Session().query(Task).filter( Task.responsible == responsible).order_by(desc(sort_on)) else: return Session().query(Task).filter( Task.responsible == responsible).order_by(asc(sort_on))
def history(min_mins: int, max_mins: int): if max_mins <= min_mins: return render_template("waitlist/history_cut.html", history=[]) # only officer and leadership can go back more then 4h if max_mins > 240 and not (perm_comp_unlimited.can()): redirect(url_for("fittings.history", min_mins=min_mins, max_mins=240)) tnow: datetime = datetime.utcnow() max_time: datetime = tnow - timedelta(minutes=max_mins) min_time: datetime = tnow - timedelta(minutes=min_mins) history_entries = db.session.query(HistoryEntry) \ .filter((HistoryEntry.time <= min_time) & (HistoryEntry.time > max_time)) \ .order_by(desc(HistoryEntry.time)).limit(1000).all() return render_template("waitlist/history_cut.html", history=history_entries)
def get_bursts_for_project(self, project_id, page_start=0, page_end=None, count=False): """Get latest 50 BurstConfiguration entities for the current project""" try: bursts = self.session.query(model.BurstConfiguration ).filter_by(fk_project=project_id ).order_by(desc(model.BurstConfiguration.id)) if count: return bursts.count() if page_end is not None: bursts = bursts.offset(max(page_start, 0)).limit(page_end) bursts = bursts.all() except Exception, excep: self.logger.exception(excep) bursts = None
def cmd_score(self, sender, message): classement = self.bot.session.query(HiddenBase).order_by( desc(HiddenBase.score)).all() if classement != []: sc = "\nHidden Word Score :" pseudo = "" sc += "\n┌" + 39 * "─" + "┐" for hidden in classement: pseudo = self.bot.occupants.jid_to_pseudo(hidden.jid) sc += "\n│ %-4s - %-30s │" % (hidden.score, pseudo) sc += "\n└" + 39 * "─" + "┘" return {"text": sc, "monospace": True} else: return "Aucun mot caché trouvé..."
def get_last_account_status(self, account_id: int, end_date: date = None) -> StatusDbo: """Gets the last defined status of a given account at a given date. :param account_id: the account id :param end_date: the latest allowed date for the status to search :return: the status """ status = self.__entity_manager.query(StatusDbo).filter( StatusDbo.account_id == account_id) status = status.order_by(desc(StatusDbo.date)) if end_date is not None: status = status.filter(StatusDbo.date < end_date) return status.first()
def portal_division_dict(article, tags=None): if (not hasattr(article, 'portal_division_id')) or ( article.portal_division_id is None): return {'positioned_articles': []} else: filter = article.position_unique_filter() return { 'positioned_articles': [ pda.get_client_side_dict(fields='id|position|title') for pda in db(ArticlePortalDivision).filter(filter).order_by( expression.desc(ArticlePortalDivision.position)).all() ], 'availableTags': tags }
def get_from_database(): session = DBSession() query = session.query(cls) if device is not None: query = query.select_from(join(File, Device)). \ filter(Device.name == device) if type is not None: query = query.filter(cls.type == type) # Limit the query and order it query = query.order_by(desc(cls.date_created))[:20] return query
def alarms_by_contract_resource_project(meter, project=None, resource=None): """ Retrieve ordered by priority. The first one should be triggered. because is the best match. """ contract_q = model_query(m.SLAContract).\ filter( or_((m.SLAContract.project_id == project) & (m.SLAContract.resource_id == resource)) | or_((m.SLAContract.resource_id == None) & (m.SLAContract.project_id == project)) | or_((m.SLAContract.project_id == None) & (m.SLAContract.resource_id == None))). \ subquery() query = model_query(m.AlarmTrack) query = query.filter(m.AlarmTrack.meter==meter) obj = query.join(contract_q, m.AlarmTrack.contract_id==contract_q.c.id) obj = obj.order_by(desc(contract_q.c.resource_id)) obj = obj.order_by(desc(contract_q.c.project_id)).all() if not obj: raise exc.NotFoundException() return obj
def index(): run_count = g.tdb.session.query(Run).count() run_last = g.tdb.session.query(Run).order_by(desc(Run.number)).first() boards_count = 0 crates_count = 0 db_versions = g.tdb.session.query(SchemaVersion).order_by(SchemaVersion.version.desc()) return render_template("statistics/index.html", run_count=run_count, run_last=run_last, boards_count=boards_count, crates_count=crates_count, db_versions=db_versions) pass
def get_swupd_msgs(most_recent=None): q = db.session.query(Record.tsp, Record.machine_id, Record.backtrace).join(Classification) q = q.filter( Classification.classification.like( 'org.clearlinux/swupd-client/%')) if most_recent: interval_sec = 24 * 60 * 60 * int(most_recent) current_time = time() sec_in_past = current_time - interval_sec q = q.filter(Record.tsp > sec_in_past) q = q.order_by(desc(Record.tsp)) return q
def get_changes_of_feed(token_id, limit, user_id=None): query = DBSession.query(DocumentVersion.history_metadata_id) \ .join(HistoryMetaData) \ .join(Document) \ .filter(Document.type.notin_([OUTING_TYPE, USERPROFILE_TYPE])) \ .order_by(desc(DocumentVersion.history_metadata_id)) # pagination filter if token_id is not None: query = query.filter(DocumentVersion.history_metadata_id < token_id) if user_id is not None: query = query.filter(HistoryMetaData.user_id == user_id) return query.limit(limit).all()
def __table_args__(cls): return ( Index( f"{cls.__tablename__}_unique_idx", "name", "version", "language", unique=True, ), Index( f"{cls.__tablename__}_inserted_idx", "inserted_at", expression.desc(cls.inserted_at), ), )
def get_top_crash_guilties(classes=None): q = db.session.query(Guilty.function, Guilty.module, Record.build, db.func.count(Record.id).label('total'), Guilty.id, Guilty.comment) q = q.join(Record) if not classes: classes = ['org.clearlinux/crash/clr'] q = q.filter(Record.classification.in_(classes)) q = q.filter(Record.build.op('~')('^[0-9][0-9]+$')) q = q.filter(cast(Record.build, db.Integer) <= 100000) q = q.filter(Guilty.hide == False) q = q.group_by(Guilty.function, Guilty.module, Guilty.comment, Guilty.id, Record.build) q = q.order_by(desc(cast(Record.build, db.Integer)), desc('total')) # query for records created in the last week (~ 10 Clear builds) q = q.filter( Record.build.in_( sorted(tuple(set([x[2] for x in q.all()])), key=lambda x: int(x))[-8:])) interval_sec = 24 * 60 * 60 * 7 current_time = time() sec_in_past = current_time - interval_sec q = q.filter(Record.timestamp_client > sec_in_past) return q.all()
def list(self): if not 'page' in request.params: page = 1 else: page = request.params['page'] c.appliances = webhelpers.paginate.Page( meta.Session.query(Appliance).order_by(desc(Appliance.name)), page=int(page), items_per_page=15) if 'partial' in request.params: return render('appliance_list_ajax.mako') else: # Render the full page return render('appliance_list.mako')
def totals(cls, interval): """Aggregate sitewide totals for self-serve promotion traffic. We only aggregate codenames that start with a link type prefix which effectively filters out all DART / 300x100 etc. traffic numbers. """ start_time, stop_time = time_range(interval) q = (Session.query(cls.date, sum(cls.pageview_count).label("sum")) .filter(cls.interval == interval) .filter(cls.date > start_time) .filter(cls.codename.startswith(Link._type_prefix)) .group_by(cls.date) .order_by(desc(cls.date))) return fill_gaps(interval, start_time, stop_time, q, "sum")
def parse_custom_query(session, args, group, order): table = Ticket.__table__ cols = table.columns if group not in cols: group = 'status' if order not in cols: order = 'priority' groupcol = cols[group] ordercol = cols[order] preds = [ cols[key].in_(values) for (key, values) in args.iterlists() if key in cols ] q = session.query(Ticket.id.label('ticket'), Ticket.summary, Ticket.owner, Ticket.type, Ticket.priority, Ticket.component, Ticket.time.label('created'), Enum.value.label('__color__'), groupcol.label('__group__')).filter( and_(Enum.type == 'priority', Ticket.priority == Enum.name, *preds)) return q.order_by([ desc(groupcol) if args.get('groupdesc') == '1' else groupcol, desc(ordercol) if args.get('desc') == '1' else ordercol, ])
def protocol(): form = ProtocolForm(request.form) if request.method == 'POST': if form.validate(): last_protocol = [] if Protocol.query.filter(Protocol.patient_id == form.patient_id.data).first() is not None: last_form_id = Protocol.query \ .filter(Protocol.patient_id == form.patient_id.data) \ .order_by(desc(Protocol.form_id)) \ .first().form_id last_protocol = sorted(Protocol.query \ .filter(Protocol.patient_id == form.patient_id.data, Protocol.form_id == int(last_form_id)) \ .order_by(desc(Protocol.form_id)) \ .all(), key=lambda x: x.row) return render_template('admin/protocol.html', patient_id=clean(form.patient_id.data), form_id=clean(form.form_id.data), last=last_protocol) else: flash(u'Please check form entries', 'error') return redirect(url_for('admin.protocol')) return render_template('admin/protocol_init.html', form=form)
def get_frequent_words(self, params, project, part_of_speech, is_lemmatized): words_query = None like_query = part_of_speech + "%" if "query_id" in params: query = Query.query.get(params["query_id"]) words_query = db.session.query( Word.id, Word.lemma.label("lemma"), Word.surface.label("word"), func.count(Sentence.id).label("sentence_count"), func.count(Sentence.document_id.distinct()).label("document_count")).\ group_by(Word.lemma if is_lemmatized else Word.surface).\ filter(Sentence.project_id == project.id).\ join(SentenceInQuery, SentenceInQuery.sentence_id == Sentence.id).\ filter(SentenceInQuery.query_id == query.id).\ filter(Word.part_of_speech.like(like_query)).\ filter(Word.id == WordInSentence.word_id).\ filter(Sentence.id == WordInSentence.sentence_id) else: # There's no query id, we just want the most frequent words in # the whole collection. is_lemmatized = False count_docs = False words_query = FrequentWord.query.\ filter(FrequentWord.project_id == project.id).\ filter(FrequentWord.pos.like(like_query)) words_query = words_query.order_by(desc("sentence_count")) results = [] for word in words_query: result = { "word": word.lemma if is_lemmatized else word.word, "is_lemmatized": 1 if is_lemmatized else 0, "count": word.sentence_count, } if "query_id" in params: result["id"] = "." + str(word.id) if is_lemmatized else str( word.id) result["document_count"] = word.document_count else: result["id"] = str(word.word_id) results.append(result) return results
class WorkFlow(db.Model, TimestampMixin): """Define WorkFlow.""" __tablename__ = 'workflow_workflow' id = db.Column(db.Integer(), nullable=False, primary_key=True, autoincrement=True) """Flows identifier.""" flows_id = db.Column(UUIDType, nullable=False, unique=True, index=True, default=uuid.uuid4()) """the id of flows.""" flows_name = db.Column(db.String(255), nullable=True, unique=False, index=False) """the name of flows.""" itemtype_id = db.Column(db.Integer(), db.ForeignKey(ItemType.id), nullable=False, unique=False) """the id of itemtype.""" itemtype = db.relationship(ItemType, backref=db.backref( 'workflow', lazy='dynamic', order_by=desc('item_type.tag'))) index_tree_id = db.Column(db.BigInteger, nullable=True, unique=False) """Index tree id that this workflow will belong to""" flow_id = db.Column(db.Integer(), db.ForeignKey(FlowDefine.id), nullable=False, unique=False) """the id of flow.""" flow_define = db.relationship(FlowDefine, backref=db.backref('workflow', lazy='dynamic'))
def generate_mac(self, session, dbmachine): """ Generate a mac address for virtual hardware. Algorithm: * Query for first mac address in aqdb starting with vendor prefix, order by mac descending. * If no address, or address less than prefix start, use prefix start. * If the found address is not suffix end, increment by one and use it. * If the address is suffix end, requery for the full list and scan through for holes. Use the first hole. * If no holes, error. [In this case, we're still not completely dead in the water - the mac address would just need to be given manually.] """ if dbmachine.model.machine_type != "virtual_machine": raise ArgumentError("Can only automatically generate MAC " "addresses for virtual hardware.") if not dbmachine.cluster or dbmachine.cluster.cluster_type != 'esx': raise UnimplementedError("MAC address auto-generation has only " "been enabled for ESX Clusters.") # FIXME: These values should probably be configurable. mac_prefix_esx = "00:50:56" mac_start_esx = mac_prefix_esx + ":01:20:00" mac_end_esx = mac_prefix_esx + ":3f:ff:ff" mac_start = MACAddress(mac_start_esx) mac_end = MACAddress(mac_end_esx) q = session.query(Interface.mac) q = q.filter(Interface.mac.between(str(mac_start), str(mac_end))) # This query (with a different order_by) is used below. mac = q.order_by(desc(Interface.mac)).first() if not mac: return str(mac_start) highest_mac = MACAddress(mac[0]) if highest_mac < mac_start: return str(mac_start) if highest_mac < mac_end: return str(highest_mac.next()) potential_hole = mac_start for mac in q.order_by(asc(Interface.mac)).all(): current_mac = MACAddress(mac[0]) if current_mac < mac_start: continue if potential_hole < current_mac: return str(potential_hole) potential_hole = current_mac.next() raise ArgumentError("All MAC addresses between %s and %s inclusive " "are currently in use." % (mac_start, mac_end))
def _recordActionOnWorkTask(self, task_id, employee, action_time, location): # We're going to use the last action report on the same task we're # working now to determine if the pointage is a task start or task stop # BUG For some reason if I only ask the action kind # the query returns no row at all... q = self.dao.session.query(TaskActionReport.task_action_report_id, TaskActionReport.kind).\ filter(and_(TaskActionReport.task_id == task_id, TaskActionReport.reporter == employee)).order_by(desc(TaskActionReport.time),desc(TaskActionReport.task_action_report_id)) last_action_report = q.first() last_action_report_kind = None if last_action_report: # There is a last action report last_action_report_kind = last_action_report[1] # Who else works on the task ? # q = self.dao.session.query(TaskActionReport.task_action_report_id, TaskActionReport.reporter_id).filter(and_(TaskActionReport.task_id == task_id,TaskActionReport.reporter_id != employee_id)).order_by(desc(TaskActionReport.time),desc(TaskActionReport.task_action_report_id)) # When pointage is on started task, then we *guess* # the intention of the user is to stop the task. # and vice versa... action_kind = TaskActionReportType.start_task if last_action_report_kind == TaskActionReportType.start_task: action_kind = TaskActionReportType.stop_task elif last_action_report_kind == TaskActionReportType.stop_task: action_kind = TaskActionReportType.start_task mainlog.debug( "Constructing task action report on task {}".format(task_id)) self.dao.task_action_report_dao.fast_create(task_id, employee, action_time, action_kind, location) # No commit self.dao.commit() task_action_reports = self.dao.task_action_report_dao.get_reports_for_employee_on_date( employee, action_time.date()) # if len(task_action_reports) > 0: # mainlog.debug(u"_recordActionOnWorkTask : {}".format(task_action_reports[-1])) return action_kind
def _attach_visit_data(self) -> "TopicQuery": """Join the data related to the user's last visit to the topic(s).""" # subquery using LATERAL to select only the newest visit for each topic lateral_subquery = (self.request.db_session.query( TopicVisit.visit_time, TopicVisit.num_comments).filter( TopicVisit.topic_id == Topic.topic_id, TopicVisit.user == self.request.user, ).order_by(desc(TopicVisit.visit_time)).limit(1).correlate( Topic).subquery().lateral()) # join on "true" since the subquery already restricts to the row we want query = self.outerjoin(lateral_subquery, text("true")) query = query.add_columns(lateral_subquery) return query