def domain_pie_query(domain, reportid, num_of_days=0): "Run domain query" queryfield = getattr(Message, REPORTS[reportid]['address']) orderby = REPORTS[reportid]['sort'] query = Session.query(queryfield.label('address'), func.count(queryfield).label('count'), func.sum(Message.size).label('size')) if reportid == '10': query = query.filter(queryfield != u'127.0.0.1')\ .group_by(queryfield)\ .order_by(desc(orderby)) else: query = query.filter(queryfield != u'')\ .group_by(queryfield)\ .order_by(desc(orderby)) if reportid in ['5', '6', '7', '8']: query = query.filter(Message.to_domain == domain) else: query = query.filter(func._(or_(Message.from_domain == domain, Message.to_domain == domain))) if int(num_of_days) > 0: numofdays = datetime.timedelta(days=num_of_days) startdate = now().date() - numofdays query = query.filter(Message.timestamp > str(startdate)) data = query[:10] return data
def most(filter_by, time_by, page_offset): results = Memorial.query.filter(Memorial.status == 3) if filter_by == 0: results.order_by(desc(Memorial.views)) elif filter_by == 1: results = results.order_by(desc(Memorial.condolences)) elif filter_by == 2: results = results.order_by(desc(Memorial.remembers)) # we don't need a date constraint on 'all time' # if time_by in range(0, 4): if False: today = date.today() if time_by == 0: delta = today elif time_by == 1: delta = today - timedelta(days=1) elif time_by == 2: delta = today - timedelta(weeks=1) else: delta = today - timedelta(weeks=4) results = results.filter( Memorial.created >= delta ) page_offset = page_offset * RESULTS_COUNT results = results.limit(RESULTS_COUNT + 1).offset(page_offset) # we grab the extra so we know whether there is more left after this query results = results.all() # use all so it returns as a list for memorial in results: memorial.media = Media.query.filter_by(memorial_id=memorial.id).first() # first() for now since each memorial will have just one picture return results
def test_order_by( self ): user2 = self.user_manager.create( **user2_data ) history = self.history_manager.create( name='history', user=user2 ) contents = [] contents.extend([ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in xrange( 3 ) ]) contents.append( self.add_list_collection_to_history( history, contents[:3] ) ) contents.extend([ self.add_hda_to_history( history, name=( 'hda-' + str( x ) ) ) for x in xrange( 4, 6 ) ]) contents.append( self.add_list_collection_to_history( history, contents[4:6] ) ) self.log( "should default to hid order_by" ) self.assertEqual( self.contents_manager.contents( history ), contents ) self.log( "should allow asc, desc order_by" ) self.assertEqual( self.contents_manager.contents( history, order_by=desc( 'hid' ) ), contents[::-1] ) def get_create_time( item ): create_time = getattr( item, 'create_time', None ) if not create_time: create_time = item.collection.create_time return create_time self.log( "should allow create_time order_by" ) newest_first = sorted( contents, key=get_create_time, reverse=True ) results = self.contents_manager.contents( history, order_by=desc( 'create_time' ) ) self.assertEqual( newest_first, results ) self.log( "should allow update_time order_by" ) # change the oldest created to update the update_time contents[0].name = 'zany and/or wacky' self.app.model.context.flush() results = self.contents_manager.contents( history, order_by=desc( 'update_time' ) ) self.assertEqual( contents[0], results[0] )
def get(self,tablename): try: session = Session() M = make_table_model(tablename.encode('utf-8')) #filter_args = [ getattr(M,k)==v[0] for k,v in self.request.arguments.items()] _params = {} [_params.update({k: v[0]}) for k, v in self.request.arguments.items()] logger.info(tablename+str(_params)) filter_args = utils.get_filter_args(M, _params) if filter_args: models = session.query(M).filter(*filter_args).order_by(desc('id')).limit(100) else: models = session.query(M).order_by(desc('id')).limit(100) logger.debug(models) models = [ [ getattr(model,c.name) for c in M.__table__._columns] for model in models] clms = map(lambda x:x.name, M.__table__._columns) # hide the primary_key result = map(lambda x: dict(zip(clms[1:], x[1:])), models) for item in result: for k in item: if type(item[k])==datetime: item[k] = item[k].strftime("%Y-%m-%d %H:%M:%S") elif type(item[k])==unicode: item[k] = item[k].strip() self.write(json.dumps(result)) except BaseException, e: self.write(json.dumps({'msg':'Request Error'})) logger.error(traceback.format_exc(e))
def general(env): """ Default page. """ if loggedin(env): tmpl = jinjaenv.get_template('meinpage.html') userid = env['beaker.session']['loggedin_id'] userinfo = ut.info(userid) user_commits = Session.query(Commit).filter(Commit.user_id == userid ). order_by(sqlalchemy.desc(Commit.id)).limit(5).all() script_commits = Session.query(Commit).join( (Script, Script.id == Commit.script_id)).filter( Script.owner_id == userid).order_by( sqlalchemy.desc(Commit.id)).limit(5).all() return template_render(tmpl, {'session' : env['beaker.session'], 'user' : userinfo['user'], 'ttc' : userinfo['time']['commit_time'], 'tc' : userinfo['time']['commit_amount'], 'own_commits' : user_commits, 'script_commits' : script_commits }) else: tmpl = jinjaenv.get_template('base.html') return template_render(tmpl, {'session' : env['beaker.session']} )
def parse_order_by(self, order_by_string, default=None): """Return an ORM compatible order_by using the given string""" # TODO: generalize into class # TODO: general (enough) columns if order_by_string in ('create_time', 'create_time-dsc'): return desc(self.model_class.create_time) if order_by_string == 'create_time-asc': return asc(self.model_class.create_time) if order_by_string in ('update_time', 'update_time-dsc'): return desc(self.model_class.update_time) if order_by_string == 'update_time-asc': return asc(self.model_class.update_time) if order_by_string in ('name', 'name-asc'): return asc(self.model_class.name) if order_by_string == 'name-dsc': return desc(self.model_class.name) # TODO: history columns if order_by_string in ('size', 'size-dsc'): return desc(self.model_class.disk_size) if order_by_string == 'size-asc': return asc(self.model_class.disk_size) # TODO: add functional/non-orm orders (such as rating) if default: return self.parse_order_by(default) raise glx_exceptions.RequestParameterInvalidException('Unkown order_by', order_by=order_by_string, available=['create_time', 'update_time', 'name', 'size'])
def get_cases(status, current_user, user=False, QA=False, current_user_perms=False, case_perm_checker=None, case_man=False): q = session.query(Case) if status != 'All' and status != "Queued": q = q.filter_by(currentStatus=status) elif status == "Queued": q = q.filter_by(currentStatus=CaseStatus.OPEN).join('tasks').filter(Task.currentStatus == TaskStatus.QUEUED) if user is True: q = q.join('tasks').join(Task.task_roles) if QA: q = q.filter(and_(UserTaskRoles.user_id == current_user.id, UserTaskRoles.role.in_(UserTaskRoles.qa_roles))) else: q = q.filter(and_(UserTaskRoles.user_id == current_user.id, UserTaskRoles.role.in_(UserTaskRoles.inv_roles))) return q.order_by(desc(Case.creation_date)).all() else: cases = q.order_by(desc(Case.creation_date)).all() output = [] for case in cases: if (case_man is True and case.principle_case_manager is None and case.secondary_case_manager is None) \ or case_man is False: try: case_perm_checker(current_user, case, "view") output.append(case) except Forbidden: pass return output
def GetProducts( self, tenantId, pageNo=0, pageSize=50, searchField=None, searchValue=None, status=None, supplierId=None ): if not tenantId: return None query = DBSession.query(Product).filter(Product.TenantId == tenantId) if searchField: if searchField == "Name" and searchValue: query = query.filter(Product.Name.like("%s%%" % searchValue)).order_by(Product.Name) elif searchField == "Barcode" and searchValue: query = query.filter(Product.Barcode == searchValue) elif searchField == "MRP" and searchValue: query = query.filter(Product.MRP == searchValue) elif searchField == "SellPrice" and searchValue: query = query.filter(Product.SellPrice == searchValue) elif searchField == "SuppierName" and searchValue: query = query.join(Supplier).filter(Supplier.Name == searchValue) if status: query = query.filter(Product.Status == status) if supplierId: query = query.filter(Product.SupplierId == supplierId) query = query.order_by(desc(Product.UpdatedOn), desc(Product.CreatedOn)) lstItems = query.offset(pageNo).limit(pageSize).all() return lstItems, query.count()
def _changeOrderBy(self, widget, fieldsString): """ Changes results order RITOCCO fatto per SA 0.6.3 ...ancora da approfondire """ if sqlalchemy.__version__ > "0.6.0": if (self.orderBy is not None) == (fieldsString[1] is not None): if self.flag == False: self.orderBy = asc(fieldsString[1]) self.flag = True else: self.orderBy = desc(fieldsString[1]) self.flag = False else: self.orderBy = fieldsString[1] if fieldsString[0] is not None: self.join = fieldsString[0] else: if self.orderBy == fieldsString[1]: if self.flag == False: self.orderBy = asc(fieldsString[1]) self.flag = True else: self.orderBy = desc(fieldsString[1]) self.flag = False else: self.orderBy = fieldsString[1] if fieldsString[0]: self.join = fieldsString[0] self.refresh()
def get(self, groupname, name): """ Review the articles for a specific feed on this key. """ key = auth() feed = Feed.query.filter(and_(Feed.name == name, Feed.key == key)).first() if not feed: abort(404) per_page = 10 parser = restful.reqparse.RequestParser() parser.add_argument("page",type=int, help="", required=False, default=1) parser.add_argument("content",type=bool, help="", required=False, default=None) args = parser.parse_args() # Return a list of the JSONified Articles ordered by descending creation date and paginated. if args.content == True: return [a.jsonify() for a in \ Article.query.filter(and_(Article.key == key, Article.content != None, Article.feed == feed)) .order_by(desc(Article.created)).paginate(args.page, per_page).items ] elif args.content == False: return [a.jsonify() for a in \ Article.query.filter(and_(Article.key == key, Article.content == None, Article.feed == feed)) .order_by(desc(Article.created)).paginate(args.page, per_page).items ] return [a.jsonify() for a in \ Article.query.filter(and_(Article.key == key, Article.feed == feed)) .order_by(desc(Article.created)).paginate(args.page, per_page).items ]
def candidate(name): if g.user_email == None: flash(u'로그인 후에 이용해주세요', 'danger') return redirect(url_for('login')) else: cand_data = {} # 유저가 A에 있는지, B에 있는지 모르기 때문에 다음의 테크닉을 쓰게 된다. try: each_match = Match.query.filter(Match.candidate_A_namename == name).all() each_match = each_match[0] cand_data['name'] = name cand_data['school'] = each_match.candidate_A_school cand_data['photo'] = each_match.candidate_A_photolink cand_data['group'] = each_match.group except: each_match = Match.query.filter(Match.candidate_B_namename == name).all() each_match = each_match[0] cand_data['name'] = name cand_data['school'] = each_match.candidate_B_school cand_data['photo'] = each_match.candidate_B_photolink cand_data['group'] = each_match.group comments_A = Comment.query.order_by(desc(Comment.date_created)).filter(Comment.comment_A == name).all() comments_B = Comment.query.order_by(desc(Comment.date_created)).filter(Comment.comment_B == name).all() comments_indiv = Indiv_Comment.query.order_by(desc(Indiv_Comment.date_created)).filter(Indiv_Comment.comment_name == name).all() comments = comments_A + comments_B return render_template("candidate_page.html", cand_data=cand_data, comments = comments, comments_indiv = comments_indiv, active_tab="candidate")
def indiv_details(request): params=int(request.matchdict['id']) join_table = join(SatTrx, ObjectsCaracValues, SatTrx.ptt == cast(ObjectsCaracValues.value, Integer) ).join(Individual, ObjectsCaracValues.object==Individual.id) query=select([ObjectsCaracValues.value.label('id'), Individual.id.label('ind_id'),Individual.survey_type.label('survey_type'), Individual.status.label('status') , Individual.monitoring_status.label('monitoring_status'), Individual.birth_date.label('birth_date'), Individual.ptt.label('ptt'),ObjectsCaracValues.begin_date.label('begin_date'),ObjectsCaracValues.end_date.label('end_date')] ).select_from(join_table ).where(and_(SatTrx.model.like('GSM%'),ObjectsCaracValues.carac_type==19,ObjectsCaracValues.object_type=='Individual') ).where(ObjectsCaracValues.value==params).order_by(desc(ObjectsCaracValues.begin_date)) data=DBSession.execute(query).first() transaction.commit() if data['end_date'] == None : end_date=datetime.datetime.now() else : end_date=data['end_date'] result=dict([ (key[0],key[1]) for key in data.items()]) print(result) result['duration']=(end_date.month-data['begin_date'].month)+(end_date.year-data['begin_date'].year)*12 query = select([V_Individuals_LatLonDate.c.date] ).where(V_Individuals_LatLonDate.c.ind_id == result['ind_id'] ).order_by(desc(V_Individuals_LatLonDate.c.date)).limit(1) lastObs=DBSession.execute(query).fetchone() result['last_observation']=lastObs['date'].strftime('%d/%m/%Y') if result['birth_date']!= None: result['birth_date']=result['birth_date'].strftime('%d/%m/%Y') del result['begin_date'], result['end_date'] print (result) return result
def sorter(default_sort_id, kwd): """ Initialize sorting variables """ SortSpec = namedtuple('SortSpec', ['sort_id', 'order', 'arrow', 'exc_order']) sort_id = kwd.get('sort_id') order = kwd.get('order') # Parse the default value if sort_id == "default": sort_id = default_sort_id # Create the sort if order == "asc": _order = sa.asc( sort_id ) elif order == "desc": _order = sa.desc( sort_id ) else: # In case of default order = "desc" _order = sa.desc( sort_id ) # Create an arrow icon to put beside the ordered column up_arrow = "↑" down_arrow = "↓" arrow = " " if order == "asc": arrow += down_arrow else: arrow += up_arrow return SortSpec(sort_id, order, arrow, _order)
def index(self): documentation = "https://github.com/ceph/shaman#shaman" projects = Project.query.all() now = datetime.datetime.utcnow() # five days worth of data periods = [ (now - datetime.timedelta(days=day-1), now - datetime.timedelta(days=day+1)) for day in range(0, 10) ] area_data = [] for upper_range, lower_range in periods: day = lower_range + datetime.timedelta(days=1) day_data = {'period': day.strftime("%Y-%m-%d")} for project in projects: repository_count = Repo.filter_by( project=project).filter(Repo.status == "ready").filter( and_(Repo.modified > lower_range), and_(Repo.modified < upper_range) ).count() day_data[str(project.name)] = int(repository_count) area_data.append(day_data) latest_repos = Repo.query.filter_by(status="ready").order_by(desc(Repo.modified)).limit(10).all() latest_builds = Build.query.filter_by(status="completed").order_by(desc(Build.modified)).limit(10).all() return dict( description=description, documentation=documentation, area_data=str(area_data), projects=[str(p.name) for p in projects], latest_repos=latest_repos, latest_builds=latest_builds, )
def get_organizations(name=None): ''' Regular response option for organizations. ''' filters = request.args filters, querystring = get_query_params(request.args) if name: # Get one named organization. filter = Organization.name == raw_name(name) org = db.session.query(Organization).filter(filter).first() if org: return jsonify(org.asdict(True)) else: # If no org found return jsonify({"status": "Resource Not Found"}), 404 # Get a bunch of organizations. query = db.session.query(Organization) # Default ordering of results ordering = desc(Organization.last_updated) for attr, value in filters.iteritems(): if 'q' in attr: query = query.filter("organization.tsv_body @@ plainto_tsquery('%s')" % value) ordering = desc(func.ts_rank(Organization.tsv_body, func.plainto_tsquery('%s' % value))) else: query = query.filter(getattr(Organization, attr).ilike('%%%s%%' % value)) query = query.order_by(ordering) response = paged_results(query, int(request.args.get('page', 1)), int(request.args.get('per_page', 10)), querystring) return jsonify(response)
def get(self, session=None): """ List of previously accepted entries """ args = history_parser.parse_args() page = args['page'] max_results = args['max'] task = args['task'] if task: count = session.query(History).filter(History.task == task).count() else: count = session.query(History).count() if not count: return {'entries': [], 'pages': 0} pages = int(ceil(count / float(max_results))) if page > pages: return {'error': 'page %s does not exist' % page}, 404 start = (page - 1) * max_results finish = start + max_results if task: items = session.query(History).filter(History.task == task).order_by(desc(History.time)).slice(start, finish) else: items = session.query(History).order_by(desc(History.time)).slice(start, finish) return jsonify({ 'entries': [item.to_dict() for item in items], 'pages': pages })
def filter(self, sql): plain = [] sort = "id" for item in self.query: if item.startswith("sort:"): sort = item.partition(":")[2] elif item.startswith("score:"): score = int(item.partition(":")[2]) sql = sql.filter(Post.score == score) else: t = Tag.get(item) if not t: logger.info("Couldn't find tag for '%s' - shortcutting", item) sql = sql.filter("1=0") elif t.is_plain_tag(): plain.append(t.name) if plain: sql = sql.join(Post.tags).filter(Tag.name.in_(plain)).group_by(Post.id).having(func.count(Post.id) == len(plain)) if sort == "score": sql = sql.order_by(desc(Post.score)) elif sort == "-posted": sql = sql.order_by(asc(Post.id)) else: # "posted" sql = sql.order_by(desc(Post.id)) return sql
def index(self, trans, **kwd): """ GET /api/workflows Displays a collection of workflows. """ rval = [] for wf in ( trans.sa_session.query(trans.app.model.StoredWorkflow) .filter_by(user=trans.user, deleted=False) .order_by(desc(trans.app.model.StoredWorkflow.table.c.update_time)) .all() ): item = wf.to_dict(value_mapper={"id": trans.security.encode_id}) encoded_id = trans.security.encode_id(wf.id) item["url"] = url_for("workflow", id=encoded_id) rval.append(item) for wf_sa in ( trans.sa_session.query(trans.app.model.StoredWorkflowUserShareAssociation) .filter_by(user=trans.user) .join("stored_workflow") .filter(trans.app.model.StoredWorkflow.deleted == False) .order_by(desc(trans.app.model.StoredWorkflow.update_time)) .all() ): item = wf_sa.stored_workflow.to_dict(value_mapper={"id": trans.security.encode_id}) encoded_id = trans.security.encode_id(wf_sa.stored_workflow.id) item["url"] = url_for("workflow", id=encoded_id) rval.append(item) return rval
def list_project_searches(project_external_id): project = get_project_by_id_or_404(project_external_id) page = get_valid_page_or_1() searches = DirectAwardSearch.query.filter(DirectAwardSearch.project_id == project.id) if 'latest-first' in request.args: if convert_to_boolean(request.args.get('latest-first')): searches = searches.order_by(desc(DirectAwardSearch.created_at), desc(DirectAwardSearch.id)) else: searches = searches.order_by(asc(DirectAwardSearch.created_at), asc(DirectAwardSearch.id)) else: searches = searches.order_by(asc(DirectAwardSearch.id)) if convert_to_boolean(request.args.get('only-active', False)): searches = searches.filter(DirectAwardSearch.active == True) # noqa pagination_params = request.args.to_dict() pagination_params['project_external_id'] = project.external_id return paginated_result_response( result_name="searches", results_query=searches, page=page, per_page=current_app.config['DM_API_PROJECTS_PAGE_SIZE'], endpoint='.list_project_searches', request_args=pagination_params, ), 200
def two_people_show(name1,name2): if g.user_email == None: flash(u'로그인 후에 이용해주세요', 'danger') return redirect(url_for('login')) else: two_people = Comment.query.order_by(desc(Comment.date_created)).filter(Comment.comment_A == name1, Comment.comment_B == name2).all() if len(two_people) == 0: two_people = Comment.query.order_by(desc(Comment.date_created)).filter(Comment.comment_A == name2, Comment.comment_B == name1).all() try: cand1 = Match.query.filter(Match.candidate_A_namename == name1).all() cand1 = cand1[0] photo1 = cand1.candidate_A_photolink except: cand1 = Match.query.filter(Match.candidate_B_namename == name1).all() cand1 = cand1[0] photo1 = cand1.candidate_B_photolink try: cand2 = Match.query.filter(Match.candidate_A_namename == name2).all() cand2 = cand2[0] photo2 = cand2.candidate_A_photolink except: cand2 = Match.query.filter(Match.candidate_B_namename == name2).all() cand2 = cand2[0] photo2 = cand2.candidate_B_photolink return render_template('two_people.html', two_people = two_people, photo1 = photo1, photo2 = photo2, name1 = name1, name2 = name2)
def getContacts(search,baoxiang): start=0 limit=30 if search!='': search="%"+search+"%" if baoxiang!="": baoxiang="%"+baoxiang+"%" #objs = Contact.objects.filter((Q(hetongbh__icontains=search) | Q(yiqibh__icontains=search)) & Q(baoxiang=baoxiang)).order_by('-yujifahuo_date')[start:start+limit] objs=session.query(PartsContact).filter( and_( or_(PartsContact.hetongbh.like(search),PartsContact.yiqibh.like(search)), PartsContact.baoxiang.like(baoxiang) ) ).order_by(desc(PartsContact.yujifahuo_date))#[start:start+limit] else: objs=session.query(PartsContact).filter( or_(PartsContact.hetongbh.like(search),PartsContact.yiqibh.like(search)) ).order_by(desc(PartsContact.yujifahuo_date))#[start:start+limit] # Contact.objects.filter(Q(hetongbh__icontains=search) | Q(yiqibh__icontains=search)).order_by('-yujifahuo_date')[start:start+limit] else: if baoxiang!="": baoxiang="%"+baoxiang+"%" objs=session.query(PartsContact).filter( PartsContact.baoxiang.like(baoxiang) ).order_by(desc(PartsContact.yujifahuo_date))#[start:start+limit] else: objs=session.query(PartsContact).order_by(desc(PartsContact.yujifahuo_date))#[start:start+limit] return objs
def _get_traps(db, offset=0, limit=50, host=None, oid=None, severity=None): now = datetime.utcnow() active_query = (db .query(Notification) .filter(or_( Notification.expires >= now, Notification.expires == None )) .order_by(desc(Notification.sent)) ) active_query = filter_query(active_query, host, oid, severity) total_active = active_query.count() traps = active_query.offset(offset).limit(limit).all() num_active = len(traps) if num_active: remaining_offset = 0 else: remaining_offset = offset - total_active if remaining_offset < 0: remaining_offset = 0 if num_active < limit: expired_query = (db .query(Notification) .filter(Notification.expires < now) .order_by(desc(Notification.sent)) ) expired_query = filter_query(expired_query, host, oid, severity) traps += expired_query.offset(remaining_offset).limit(limit - num_active).all() return traps, num_active
def _set_orderby_desc(self, query, model, limit, last_id, offset, descending, orderby): """Return an updated query with the proper orderby and desc.""" if orderby == 'fav_user_ids': n_favs = func.coalesce(func.array_length(model.fav_user_ids, 1), 0).label('n_favs') query = query.add_column(n_favs) if orderby in ['created', 'updated', 'finish_time']: if descending: query = query.order_by(desc( cast(getattr(model, orderby), TIMESTAMP))) else: query = query.order_by(cast(getattr(model, orderby), TIMESTAMP)) else: if orderby != 'fav_user_ids': if descending: query = query.order_by(desc(getattr(model, orderby))) else: query = query.order_by(getattr(model, orderby)) else: if descending: query = query.order_by(desc("n_favs")) else: query = query.order_by("n_favs") if last_id: query = query.limit(limit) else: query = query.limit(limit).offset(offset) return query
def __init__(self, user, reportid, filters=None): "Init" self.dbsession = Session self.user = user self.reportid = reportid self.model = None self.isaggr = False self.filters = filters queryfield = getattr(Message, REPORTS[self.reportid]['address']) orderby = REPORTS[reportid]['sort'] if (self.reportid in ['3', '4', '7', '8'] and self.user.is_superadmin and not self.filters): # domains self.isaggr = True if self.reportid in ['3', '4']: #src self.model = SrcMessageTotals self.query = self.dbsession\ .query(SrcMessageTotals.id.label('address'), SrcMessageTotals.total.label('count'), SrcMessageTotals.volume.label('size'))\ .order_by(desc(orderby)) else: #dst self.model = DstMessageTotals self.query = self.dbsession\ .query(DstMessageTotals.id.label('address'), DstMessageTotals.total.label('count'), DstMessageTotals.volume.label('size'))\ .order_by(desc(orderby)) else: # emails & relays self.query = self.dbsession.query(queryfield.label('address'), func.count(queryfield).label('count'), func.sum(Message.size).label('size')) if self.reportid != '10': self.query = self.query.filter(queryfield != u'')\ .group_by(queryfield).order_by(desc(orderby)) else: self.query = self.query.filter(queryfield != u'127.0.0.1')\ .group_by(queryfield).order_by(desc(orderby)) if self.isaggr: uquery = AggrFilter(self.query) else: uquery = UserFilter(self.dbsession, self.user, self.query) if not self.reportid in ['5', '6', '7', '8']: self.query = uquery() if self.reportid in ['5', '6', '7', '8']: if not self.user.is_superadmin: uquery.setdirection('in') self.query = uquery() else: flf = self.model.id if self.isaggr else Message.to_domain self.query = self.query.filter(flf\ .in_(self.dbsession.query(Domain.name)\ .filter(Domain.status == True)))
def show_all(sid,Rss=False): if Rss==True: article=se.query(Article).order_by(desc('date')).all() else: article=se.query(Article).order_by(desc('date')).limit(SKIP).offset(sid*SKIP).all() se.close() return article
def get_measurements_by_count(device_id, count, offset=1, interval=LIVE): """ Get specific count of measurements :param device_id: uuid of device :param count: count of measurements :param offset: offset in page :param interval: interval of aggregation :return: list of measurements dto """ device = Device.query.filter_by(uuid=device_id).first() measurements = [] # Extract specific count of measurements. if interval == LIVE: measurements = device.measurements.order_by(desc(Measurement.timestamp)).\ limit(count).offset(count * (offset - 1)).all() elif interval == HOUR: measurements = Hour.query.filter(Hour.device_id == device.id).\ order_by(desc(Hour.timestamp)).\ limit(count).offset(count * (offset - 1)).all() elif interval == DAY: measurements = Day.query.filter(Day.device_id == device.id).\ order_by(desc(Day.timestamp)).\ limit(count).offset(count * (offset - 1)).all() elif interval == WEEK: measurements = Week.query.filter(Week.device_id == device.id).order_by(desc(Week.timestamp)).\ limit(count).offset(count * (offset - 1)).all() return measurements_to_dto(measurements, count=count, offset=offset)
def index(self, trans, **kwd): """ GET /api/workflows Displays a collection of workflows. :param show_published: if True, show also published workflows :type show_published: boolean """ show_published = util.string_as_bool( kwd.get( 'show_published', 'False' ) ) rval = [] filter1 = ( trans.app.model.StoredWorkflow.user == trans.user ) if show_published: filter1 = or_( filter1, ( trans.app.model.StoredWorkflow.published == true() ) ) for wf in trans.sa_session.query( trans.app.model.StoredWorkflow ).filter( filter1, trans.app.model.StoredWorkflow.table.c.deleted == false() ).order_by( desc( trans.app.model.StoredWorkflow.table.c.update_time ) ).all(): item = wf.to_dict( value_mapper={ 'id': trans.security.encode_id } ) encoded_id = trans.security.encode_id(wf.id) item['url'] = url_for('workflow', id=encoded_id) item['owner'] = wf.user.username rval.append(item) for wf_sa in trans.sa_session.query( trans.app.model.StoredWorkflowUserShareAssociation ).filter_by( user=trans.user ).join( 'stored_workflow' ).filter( trans.app.model.StoredWorkflow.deleted == false() ).order_by( desc( trans.app.model.StoredWorkflow.update_time ) ).all(): item = wf_sa.stored_workflow.to_dict( value_mapper={ 'id': trans.security.encode_id } ) encoded_id = trans.security.encode_id(wf_sa.stored_workflow.id) item['url'] = url_for( 'workflow', id=encoded_id ) item['owner'] = wf_sa.stored_workflow.user.username rval.append(item) return rval
def user_page(user_name): # Munge the user_name, and hope user_name = user_name.replace('_', ' ').lower() user = g.conn.session.query(User).filter(func.lower(User.username) == user_name).one() stats = { 'query_count': g.conn.session.query(func.count(Query.id)).filter(Query.user_id == user.id).scalar(), 'stars_count': g.conn.session.query(func.count(Star.id)).filter(Star.user_id == user.id).scalar() } draft_queries = g.conn.session.query(Query) \ .filter(Query.user_id == user.id) \ .filter_by(published=False) \ .order_by(desc(Query.last_touched)) \ .limit(10) published_queries = g.conn.session.query(Query)\ .filter(Query.user_id == user.id)\ .filter_by(published=True)\ .order_by(desc(Query.last_touched))\ .limit(10) stars = g.conn.session.query(Star).join(Star.query) \ .options(joinedload(Star.query))\ .filter(Star.user_id == user.id) \ .order_by(desc(Star.timestamp))\ .limit(10) return render_template( "user.html", display_user=user, user=get_user(), stats=stats, draft_queries=draft_queries, published_queries=published_queries, stars=stars )
def renderGroupId(sid, page=1): group = Translators.query.filter(Translators.id==sid).scalar() if group is None: flash(gettext('Group/Translator not found? This is probably a error!')) return redirect(url_for('renderGroupsTable')) names = [tmp.name for tmp in group.alt_names] feeds = Feeds.query.options(joinedload('tags')) \ .filter(Feeds.srcname.in_(names)) \ .order_by(desc(Feeds.published)) items = Releases.query.filter(Releases.tlgroup==group.id).order_by(desc(Releases.published)).all() feed_entries = feeds.paginate(page, app.config['SERIES_PER_PAGE']) ids = [] for item in items: ids.append(item.series) series = Series.query.filter(Series.id.in_(ids)).order_by(Series.title).all() return render_template('group.html', series = series, releases = items, sequence_item = feed_entries, group = group, wiki = wiki_views.render_wiki("Group", group.name) )
def _get_last_trx_id_for_namespace(namespace_id, db_session): q = bakery(lambda session: session.query(Transaction.id)) q += lambda q: q.filter( Transaction.namespace_id == bindparam('namespace_id')) q += lambda q: q.order_by(desc(Transaction.created_at)).\ order_by(desc(Transaction.id)).limit(1) return q(db_session).params(namespace_id=namespace_id).one()[0]
def overview(username): player_names = {} user = User.query.filter(func.lower(User.username) == func.lower(username)).first() if not user: return redirect(url_for('profile.user_not_found', username=username)) player1 = aliased(User) player2 = aliased(User) games = ( Game.query .filter( ((Game.player1 == user.id) | (Game.player2 == user.id)) & (Game.status != 'challenged') & (Game.status != 'declined') & (Game.status != 'aborted') ) .join(player1, Game.player1 == player1.id).add_columns(player1.username) .join(player2, Game.player2 == player2.id, isouter=True).add_columns(player2.username) .order_by(desc(Game.id)).limit(10).all() ) cricket_games = ( CricketGame.query .filter( ((CricketGame.player1 == user.id) | (CricketGame.player2 == user.id)) & (CricketGame.status != 'challenged') & (CricketGame.status != 'declined') & (CricketGame.status != 'aborted') ) .join(player1, CricketGame.player1 == player1.id).add_columns(player1.username) .join(player2, CricketGame.player2 == player2.id, isouter=True).add_columns(player2.username) .order_by(desc(CricketGame.id)).limit(10).all() ) games.extend(cricket_games) games.sort(key=lambda game: game[0].begin, reverse=True) stats = UserStatistic.query.filter_by(user=user.id).first() if not stats: stats = UserStatistic(user=user.id) db.session.add(stats) db.session.commit() settings = ( UserSettings.query .with_entities(UserSettings.country, UserSettings.profile_text) .filter_by(user=user.id) .first() ) if not settings: settings = UserSettings(user=user.id) db.session.add(settings) db.session.commit() country = None profile_text = None else: country, profile_text = settings if profile_text: profile_text = re.sub( r'(\bhttps:\/\/i\.imgur\.com\/\w+.\w+)', '<img src="' + r'\1' + '" style="max-width: 100%">', profile_text, ) profile_text = linker.linkify(profile_text) profile_text = profile_text.replace('\n', '<br>') friend_query1 = Friendship.query.with_entities(Friendship.user2_id).filter_by(user1_id=current_user.id) friend_query2 = Friendship.query.with_entities(Friendship.user1_id).filter_by(user2_id=current_user.id) friend_list = friend_query1.union(friend_query2).all() friend_list = [r for (r,) in friend_list] avatar_url = avatars.url(f'{user.id}_thumbnail.jpg') if user.avatar else avatars.url('default.png') return render_template('profile/overview.html', user=user, games=games[:10], player_names=player_names, friend_list=friend_list, recently_online=user.recently_online(), country=country, profile_text=profile_text, stats=stats, avatar_url=avatar_url, title=lazy_gettext('Profile'))
def get(userId, imageDigest, tag, policyId=None, session=None): if not session: session = db.Session ret = {} if policyId: result = session.query(PolicyEval).filter_by(policyId=policyId, userId=userId, imageDigest=imageDigest, tag=tag).order_by(desc(PolicyEval.created_at)).first() else: result = session.query(PolicyEval).filter_by(userId=userId, imageDigest=imageDigest, tag=tag).order_by(desc(PolicyEval.created_at)).first() if result: obj = dict((key,value) for key, value in vars(result).items() if not key.startswith('_')) ret = obj return ret
def all(cls, request): query = request.dbsession.query(BlogRecord) return query.order_by(sa.desc(BlogRecord.created))
def order_notes(): """Notes to seller given during subscription creation""" subscriptions = Subscription.query.order_by(desc('created_at')).all() return render_template("admin/order-notes.html", subscriptions=subscriptions)
def newest(num): return Bookmark.query.order_by(desc(Bookmark.date)).limit(num)
def showCatalog(): categories = session.query(Category).order_by(asc(Category.name)) items = session.query(Items).order_by(desc(Items.date)).limit(5) return render_template('catalog.html', categories=categories, items=items)
class HostsView(PaginatedMixin, FilterAlchemyMixin, ReadWriteWorkspacedView, FilterWorkspacedMixin): route_base = 'hosts' model_class = Host order_field = desc(Host.vulnerability_critical_generic_count),\ desc(Host.vulnerability_high_generic_count),\ desc(Host.vulnerability_medium_generic_count),\ desc(Host.vulnerability_low_generic_count),\ desc(Host.vulnerability_info_generic_count),\ desc(Host.vulnerability_unclassified_generic_count), Host.ip.asc() schema_class = HostSchema filterset_class = HostFilterSet get_undefer = [ Host.credentials_count, Host.open_service_count, Host.vulnerability_count ] get_joinedloads = [Host.hostnames, Host.services, Host.update_user] def _get_base_query(self, workspace_name): return Host.query_with_count(None, None, workspace_name) @route('/filter') def filter(self, workspace_name): """ --- get: tags: ["Filter", "Host"] description: Filters, sorts and groups hosts using a json with parameters. These parameters must be part of the model. parameters: - in: query name: q description: Recursive json with filters that supports operators. The json could also contain sort and group. responses: 200: description: Returns filtered, sorted and grouped results content: application/json: schema: FlaskRestlessSchema 400: description: Invalid q was sent to the server tags: ["Filter", "Host"] responses: 200: description: Ok """ filters = flask.request.args.get('q', '{"filters": []}') filtered_objs, count = self._filter(filters, workspace_name, severity_count=True) class PageMeta: total = 0 pagination_metadata = PageMeta() pagination_metadata.total = count return self._envelope_list(filtered_objs, pagination_metadata) @route('/bulk_create', methods=['POST']) def bulk_create(self, workspace_name): """ --- post: tags: ["Bulk", "Host"] description: Creates hosts in bulk responses: 201: description: Created content: application/json: schema: HostSchema 400: description: Bad request 403: description: Forbidden tags: ["Bulk", "Host"] responses: 200: description: Ok """ try: validate_csrf(flask.request.form.get('csrf_token')) except wtforms.ValidationError: flask.abort(403) def parse_hosts(list_string): items = re.findall(r"([.a-zA-Z0-9_-]+)", list_string) return items workspace = self._get_workspace(workspace_name) logger.info("Create hosts from CSV") if 'file' not in flask.request.files: abort(400, "Missing File in request") hosts_file = flask.request.files['file'] stream = StringIO(hosts_file.stream.read().decode("utf-8"), newline=None) FILE_HEADERS = {'description', 'hostnames', 'ip', 'os'} try: hosts_reader = csv.DictReader(stream) if set(hosts_reader.fieldnames) != FILE_HEADERS: logger.error("Missing Required headers in CSV (%s)", FILE_HEADERS) abort(400, f"Missing Required headers in CSV ({FILE_HEADERS})") hosts_created_count = 0 hosts_with_errors_count = 0 for host_dict in hosts_reader: try: hostnames = parse_hosts(host_dict.pop('hostnames')) other_fields = { 'owned': False, 'mac': u'00:00:00:00:00:00', 'default_gateway_ip': u'None' } host_dict.update(other_fields) host = super()._perform_create(host_dict, workspace_name) host.workspace = workspace for name in hostnames: get_or_create(db.session, Hostname, name=name, host=host, workspace=host.workspace) db.session.commit() except Exception as e: logger.error("Error creating host (%s)", e) hosts_with_errors_count += 1 else: logger.debug("Host Created (%s)", host_dict) hosts_created_count += 1 return make_response( jsonify(hosts_created=hosts_created_count, hosts_with_errors=hosts_with_errors_count), 200) except Exception as e: logger.error("Error parsing hosts CSV (%s)", e) abort(400, f"Error parsing hosts CSV ({e})") @route('/<host_id>/services') def service_list(self, workspace_name, host_id): """ --- get: tags: ["Host", "Service"] summary: Get the services of a host responses: 200: description: Ok content: application/json: schema: ServiceSchema tags: ["Host", "Service"] responses: 200: description: Ok """ services = self._get_object(host_id, workspace_name).services return ServiceSchema(many=True).dump(services) @route('/countVulns') def count_vulns(self, workspace_name): """ --- get: tags: ["Host"] summary: Counts Vulnerabilities per host responses: 200: description: Ok content: application/json: schema: HostCountSchema tags: ["Host"] responses: 200: description: Ok """ host_ids = flask.request.args.get('hosts', None) if host_ids: host_id_list = host_ids.split(',') else: host_id_list = None res_dict = {'hosts': {}} host_count_schema = HostCountSchema() host_count = Host.query_with_count(None, host_id_list, workspace_name) for host in host_count.all(): res_dict["hosts"][host.id] = host_count_schema.dump(host) # return counts.data return res_dict @route('/<host_id>/tools_history') def tool_impacted_by_host(self, workspace_name, host_id): """ --- get: tags: ["Host", "Command"] summary: "Get the command impacted by a host" responses: 200: description: Ok content: application/json: schema: CommandSchema tags: ["Host", "Command"] responses: 200: description: Ok """ workspace = self._get_workspace(workspace_name) query = db.session.query(Host, Command).filter( Host.id == CommandObject.object_id, CommandObject.object_type == 'host', Command.id == CommandObject.command_id, Host.workspace_id == workspace.id, Host.id == host_id).order_by(desc(CommandObject.create_date)) result = query.all() res_dict = {'tools': []} for row in result: _, command = row res_dict['tools'].append({ 'command': command.tool, 'user': command.user, 'params': command.params, 'command_id': command.id, 'create_date': command.create_date.replace(tzinfo=pytz.utc).isoformat() }) return res_dict def _perform_create(self, data, **kwargs): hostnames = data.pop('hostnames', []) host = super()._perform_create(data, **kwargs) for name in hostnames: get_or_create(db.session, Hostname, name=name, host=host, workspace=host.workspace) db.session.commit() return host def _update_object(self, obj, data, **kwargs): try: hostnames = data.pop('hostnames') except KeyError: pass else: obj.set_hostnames(hostnames) # A commit is required here, otherwise it breaks (i'm not sure why) db.session.commit() return super()._update_object(obj, data) def _filter_query(self, query): query = super()._filter_query(query) search_term = flask.request.args.get('search', None) if search_term is not None: like_term = '%' + search_term + '%' match_ip = Host.ip.ilike(like_term) match_service_name = Host.services.any( Service.name.ilike(like_term)) match_os = Host.os.ilike(like_term) match_hostname = Host.hostnames.any(Hostname.name.ilike(like_term)) query = query.filter(match_ip | match_service_name | match_os | match_hostname) return query def _envelope_list(self, objects, pagination_metadata=None): hosts = [] for index, host in enumerate(objects): # we use index when the filter endpoint uses group by and # the _id was not used in the group by hosts.append({ 'id': host.get('_id', index), 'key': host.get('_id', index), 'value': host }) return { 'rows': hosts, 'count': (pagination_metadata and pagination_metadata.total or len(hosts)), }
def get_ratings_query(self): ratings = Ratings.query.filter_by(vendor_id=self.id) sorted_ratings = ratings.order_by(desc(Ratings.date_reviewed)) return sorted_ratings
async def list_logs(request: web.Request, params: Any) -> web.Response: resp: MutableMapping[str, Any] = {'logs': []} dbpool = request.app['dbpool'] domain_name = request['user']['domain_name'] user_role = request['user']['role'] user_uuid = request['user']['uuid'] requester_access_key, owner_access_key = await get_access_key_scopes( request, params) log.info( 'LIST (ak:{0}/{1})', requester_access_key, owner_access_key if owner_access_key != requester_access_key else '*') async with dbpool.acquire() as conn: is_admin = True query = (sa.select('*').select_from(error_logs).order_by( sa.desc(error_logs.c.created_at)).limit(params['page_size'])) count_query = (sa.select([sa.func.count(error_logs.c.message) ]).select_from(error_logs)) if params['page_no'] > 1: query = query.offset((params['page_no'] - 1) * params['page_size']) if request['is_superadmin']: pass elif user_role == UserRole.ADMIN or user_role == 'admin': j = (groups.join(agus, groups.c.id == agus.c.group_id)) usr_query = (sa.select([agus.c.user_id]).select_from(j).where( [groups.c.domain_name == domain_name])) result = await conn.execute(usr_query) usrs = await result.fetchall() user_ids = [g.id for g in usrs] where = error_logs.c.user.in_(user_ids) query = query.where(where) count_query = query.where(where) else: is_admin = False where = ((error_logs.c.user == user_uuid) & (~error_logs.c.is_cleared)) query = query.where(where) count_query = query.where(where) result = await conn.execute(query) async for row in result: result_item = { 'log_id': str(row['id']), 'created_at': datetime.timestamp(row['created_at']), 'severity': row['severity'], 'source': row['source'], 'user': row['user'], 'is_read': row['is_read'], 'message': row['message'], 'context_lang': row['context_lang'], 'context_env': row['context_env'], 'request_url': row['request_url'], 'request_status': row['request_status'], 'traceback': row['traceback'], } if result_item['user'] is not None: result_item['user'] = str(result_item['user']) if is_admin: result_item['is_cleared'] = row['is_cleared'] resp['logs'].append(result_item) resp['count'] = await conn.scalar(count_query) if params['mark_read']: update = (sa.update(error_logs).values(is_read=True).where( error_logs.c.id.in_([x['log_id'] for x in resp['logs']]))) await conn.execute(update) return web.json_response(resp, status=200)
def dao_get_provider_versions(provider_id): return ProviderDetailsHistory.query.filter_by(id=provider_id).order_by( desc(ProviderDetailsHistory.version)).all()
results # In[28]: s = select([cookies.c.cookie_name, cookies.c.quantity]) s = s.order_by(cookies.c.quantity, cookies.c.cookie_name) rp = connection.execute(s) for cookie in rp: print('{} - {}'.format(cookie.quantity, cookie.cookie_name)) # In[35]: from sqlalchemy import desc s = select([cookies.c.cookie_name, cookies.c.quantity]) s = s.order_by(desc(cookies.c.quantity)) rp = connection.execute(s) for cookie in rp: print('{} - {}'.format(cookie.quantity, cookie.cookie_name)) # In[37]: from sqlalchemy.sql import func # In[38]: s = select([func.count(cookies.c.cookie_name)]) rp = connection.execute(s) record = rp.first() print(record.keys()) print(record.count_1)
def blog(): username = request.args.get('user') #print('username : '******'userposts.html',title="Blogz | Posts", blogs=blogs_ordered_most_recent, deleted_blogs=deleted_blogs, user=currentUser) # ---------- Con Paginacion ------------- page = request.args.get('page', 1, type=int) posts = Blog.query.filter_by(deleted=False, owner_id=user_id).order_by( desc(Blog.pub_date)).paginate(page, app.config['POSTS_PER_PAGE_USER'], False) next_url = url_for('blog', user=username, page=posts.next_num) \ if posts.has_next else None prev_url = url_for('blog', user=username, page=posts.prev_num) \ if posts.has_prev else None return render_template('userposts.html', title="Blogz | Posts", blogs=posts.items, user=currentUser, next_url=next_url, prev_url=prev_url) else: username = '' blogs = Blog.query.filter_by(deleted=False).all() blogs_ordered_most_recent = Blog.query.filter_by( deleted=False).order_by(desc(Blog.pub_date)).all() deleted_blogs = Blog.query.filter_by(deleted=True).all() blog_id = request.args.get('id') if blog_id: indv_blog = Blog.query.get(blog_id) return render_template('individualblog.html', title="Blogz | Post", blogs=blogs, blog=indv_blog) else: page = request.args.get('page', 1, type=int) posts = Blog.query.filter_by(deleted=False).order_by( desc(Blog.pub_date)).paginate(page, app.config['POSTS_PER_PAGE'], False) next_url = url_for('blog', page=posts.next_num) \ if posts.has_next else None prev_url = url_for('blog', page=posts.prev_num) \ if posts.has_prev else None return render_template('blog.html', title="Blogz | Posts", blogs=posts.items, next_url=next_url, prev_url=prev_url)
def _filter_projects(search_dto: ProjectSearchDTO, user): """ Filters all projects based on criteria provided by user""" query = ProjectSearchService.create_search_query(user) query = query.join(ProjectInfo).filter( ProjectInfo.locale.in_([search_dto.preferred_locale, "en"]) ) project_status_array = [] if search_dto.project_statuses: for project_status in search_dto.project_statuses: project_status_array.append(ProjectStatus[project_status].value) query = query.filter(Project.status.in_(project_status_array)) else: if not search_dto.created_by: project_status_array = [ProjectStatus.PUBLISHED.value] query = query.filter(Project.status.in_(project_status_array)) if search_dto.interests: query = query.join( project_interests, project_interests.c.project_id == Project.id ).filter(project_interests.c.interest_id.in_(search_dto.interests)) if search_dto.created_by: query = query.filter(Project.author_id == search_dto.created_by) if search_dto.mapped_by: projects_mapped = UserService.get_projects_mapped(search_dto.mapped_by) query = query.filter(Project.id.in_(projects_mapped)) if search_dto.favorited_by: user = UserService.get_user_by_id(search_dto.favorited_by) projects_favorited = user.favorites query = query.filter( Project.id.in_([project.id for project in projects_favorited]) ) if search_dto.mapper_level and search_dto.mapper_level.upper() != "ALL": query = query.filter( Project.mapper_level == MappingLevel[search_dto.mapper_level].value ) if search_dto.organisation_name: query = query.filter(Organisation.name == search_dto.organisation_name) if search_dto.organisation_id: query = query.filter(Organisation.id == search_dto.organisation_id) if search_dto.team_id: query = query.join( ProjectTeams, ProjectTeams.project_id == Project.id ).filter(ProjectTeams.team_id == search_dto.team_id) if search_dto.campaign: query = query.join(Campaign, Project.campaign).group_by( Project.id, Campaign.name ) query = query.filter(Campaign.name == search_dto.campaign) if search_dto.mapping_types: # Construct array of mapping types for query mapping_type_array = [] for mapping_type in search_dto.mapping_types: mapping_type_array.append(MappingTypes[mapping_type].value) query = query.filter(Project.mapping_types.contains(mapping_type_array)) if search_dto.text_search: # We construct an OR search, so any projects that contain or more of the search terms should be returned or_search = " | ".join( [x for x in search_dto.text_search.split(" ") if x != ""] ) opts = [ ProjectInfo.text_searchable.match( or_search, postgresql_regconfig="english" ), ProjectInfo.name.like(f"%{or_search}%"), ] try: opts.append(Project.id == int(search_dto.text_search)) except ValueError: pass query = query.filter(or_(*opts)) if search_dto.country: # Unnest country column array. sq = Project.query.with_entities( Project.id, func.unnest(Project.country).label("country") ).subquery() query = query.filter( sq.c.country.ilike("%{}%".format(search_dto.country)) ).filter(Project.id == sq.c.id) order_by = search_dto.order_by if search_dto.order_by_type == "DESC": order_by = desc(search_dto.order_by) query = query.order_by(order_by).group_by(Project.id) if search_dto.managed_by and user.role != UserRole.ADMIN.value: team_projects = query.join(ProjectTeams).filter( ProjectTeams.role == TeamRoles.PROJECT_MANAGER.value, ProjectTeams.project_id == Project.id, ) user_orgs_list = OrganisationService.get_organisations_managed_by_user( search_dto.managed_by ) orgs_managed = [org.id for org in user_orgs_list] org_projects = query.filter(Project.organisation_id.in_(orgs_managed)) query = org_projects.union(team_projects) all_results = query.all() paginated_results = query.paginate(search_dto.page, 14, True) return all_results, paginated_results
def largest_files_query(session): return session.query(AnnexFile).order_by(desc(AnnexFile.bytesize))
def post_list_all_users(self): domain_id = self.json_args.get('domain_id') size = int(self.json_args.get('size')) page = int(self.json_args.get('page')) search = self.json_args.get('search') session = self.session('repo') result = list() try: level_map = get_user_level(session, domain_id) q = session.query(RepoUser).filter( RepoUser.domain_id == domain_id).order_by(RepoUser.user_id) if search and len(search) > 0: q = q.filter( or_(RepoUser.tags.like("%" + search + "%"), RepoUser.user_id.like("%" + search + "%"))) count = q.count() max_page = int(math.ceil(count / size)) q = q.order_by(desc(RepoUser.id)).offset( (page - 1) * size).limit(size) for user in q.all(): result.append({ 'id': user.user_id, 'domain_id': user.domain_id, 'name': user.name, 'master_id': user.master_id, 'shard_id': user.shard_id, 'type': user.type, 'password': user.password, 'secret': user.secret, 'iv': user.iv, 'back_url': user.back_url, 'prefix': user.prefix, 'status': user.status, 'cooperation': user.cooperation, 'qq': user.qq, 'mobile': user.mobile, 'notes': user.notes, 'services': user.services, 'tsp_n': str(user.create_time), 'plevel_n': level_map.get(user.level), 'status_n': STATUS.get(user.status), }) finally: session.close() return self.write( json.dumps({ 'list': result, 'max': max_page, 'page': page, 'size': size }))
def get_most_recent_record(cls): return cls.query().order_by(desc(cls.id)).first()
def read_bills(page=1): def billMap(bill): return bill.get_api_dict() return api.paginate(Bill.query.filter(Bill.householdId == g.user.householdId).order_by(desc(Bill.date)), billMap, page)
def post_add_user(self): domain_id = self.json_args.get('domain_id') template_id = self.json_args.get('template_id') user_name = self.json_args.get('name') # request_id = self.json_args.get('request_id') login = self.json_args.get('login') level = self.json_args.get('plevel', '1') notes = self.json_args.get('notes', '') qq = self.json_args.get('qq', '') mobile = self.json_args.get('mobile', '') session = self.session('repo') try: if len(user_name) < 2 or len(user_name) > MAX_NAME: raise ValueError('代理商名称长度不符合要求(2-%d字符) (%s)' % (MAX_NAME, user_name)) if not login_re.match(login): raise ValueError('登录名不符合规则:小写字母或数字长度4~12位') exist_user = session.query(RepoUser).filter( RepoUser.domain_id == domain_id).filter( RepoUser.name == user_name).first() if exist_user: raise ValueError('代理商用户名已经存在 (%s)' % user_name) exist_operator = session.query(RepoOperator).filter( RepoOperator.domain_id == domain_id).filter( RepoOperator.login == login).first() if exist_operator: raise ValueError('代理商登录名已经存在 (%s)' % login) q = session.query(RepoTemplate).filter( RepoTemplate.domain_id == domain_id).filter( RepoTemplate.template_id == template_id) user_template = q.one() if user_template is None: raise ValueError('代理商模板异常') # Next UserID latest_user = session.query(RepoUser).filter( RepoUser.user_id >= user_template.user_id_start).filter( RepoUser.user_id <= user_template.user_id_end).order_by( desc(RepoUser.user_id)).first() if latest_user: if latest_user == user_template.user_id_end: raise ValueError('用户数已超出限额,请联系开发人员!') user_id = str(int(latest_user.user_id) + 1) else: user_id = user_template.user_id_start user = RepoUser() user.user_id = user_id user.domain_id = domain_id user.name = user_name user.master_id = user_template.master_id or user_id user.shard_id = user_template.shard_id or user_id user.type = user_template.type user.password = gen_key( 16, string.ascii_uppercase + string.ascii_lowercase + string.digits) user.secret = gen_key( 32, string.ascii_uppercase + string.ascii_lowercase + string.digits) user.iv = gen_key(16, string.digits) user.back_url = user_template.back_url user.tags = get_initial(user_name) user.level = level user.prefix = user_template.prefix user.status = user_template.status user.create_time = dt.now() user.qq = qq user.mobile = mobile user.notes = notes user.services = user_template.services session.add(user) rand_pass = ''.join( random.sample(string.ascii_letters + string.digits, 6)) signed = sign_request(rand_pass.encode()) operator = RepoOperator() operator.domain_id = domain_id operator.user_id = user_id operator.login = login operator.name = user_name operator.password = signed operator.role = user_template.role operator.status = user_template.status session.add(operator) session.commit() self.finish({'status': 'ok', 'msg': 'OK', 'password': rand_pass}) # sync user yield core.sync_user(session, domain_id) # sync pricing # yield core.sync_pricing(session, domain_id, filter_user=user_id) self.master.lpush( 'list:sync:pricing', '{domain_id},{product_id},{user_id}'.format( domain_id=domain_id, product_id='', user_id=user_id)) except ValueError as ve: self.finish({'status': 'fail', 'msg': str(ve)}) except Exception as ee: request_log.exception('ADD USER') self.finish({'status': 'fail', 'msg': 'EXCEPTION'}) finally: session.close()
def search(user_id, title): list = Products.query.filter(Products.title.like('%' + title + '%')).order_by(desc(Products.boost_date)) if user_id is not None: list = list.filter(Products.user_id != user_id) list = list.filter(Products.is_removed == False) list = list.filter(Products.ban_reason == None) return list.all()
def get(self, produk_kategori=None, produk_subkategori=None): # user = get_jwt_identity() # identity = marshal(user, Pembeli_dbStructure.response_field) # if identity['pembeli_username'] == pembeli_username: if produk_kategori == None: # client = memcache.Client([('127.0.0.1', 11211)]) # isiMem = client.get("getAllPopMem") isiMem = None if isiMem == None: parser = reqparse.RequestParser() parser.add_argument('p', type=int, location='args', default=1) parser.add_argument('rp', type=int, location='args', default=5) args = parser.parse_args() offset = (args['p'] * args['rp']) - args['rp'] qry = Penjual_Produk_dbStructure.query qry = qry.order_by(desc("produk_Jumlahdibeli")) LIST = [] for row in qry.limit(args['rp']).offset(offset).all(): penjualProduk_marshal = marshal( row, Penjual_Produk_dbStructure.response_field) LIST.append(penjualProduk_marshal) ############################### save to ################################### # client = memcache.Client([('127.0.0.1', 11211)]) # sample_obj = LIST # client.set("getAllPopMem", sample_obj, time=15) ########################################################################## DICT = { "Status": "OK", "Message": "Pembeli Melakukan Pencarian Produk Popular disemua produk(DB):", "Produk": LIST } return DICT, 200, {'Content-Type': 'application/json'} else: DICT = { "Status": "OK", "Message": "Pembeli Melakukan Pencarian Produk Popular disemua produk (MEM):", "Produk": isiMem } return DICT, 200, {'Content-Type': 'application/json'} elif produk_kategori != None and produk_subkategori == None: # client = memcache.Client([('127.0.0.1', 11211)]) # isiMem = client.get("getAllPopCatMem") isiMem = None if isiMem == None: parser = reqparse.RequestParser() parser.add_argument('p', type=int, location='args', default=1) parser.add_argument('rp', type=int, location='args', default=5) args = parser.parse_args() offset = (args['p'] * args['rp']) - args['rp'] qry = Penjual_Produk_dbStructure.query qry = qry.filter_by(produk_kategori=produk_kategori) qry = qry.order_by(desc("produk_Jumlahdibeli")) LIST = [] for row in qry.limit(args['rp']).offset(offset).all(): penjualProduk_marshal = marshal( row, Penjual_Produk_dbStructure.response_field) LIST.append(penjualProduk_marshal) ############################### save to ################################### # client = memcache.Client([('127.0.0.1', 11211)]) # sample_obj = LIST # client.set("getAllPopCatMem", sample_obj, time=15) ########################################################################## DICT = { "Status": "OK", "Message": "Pembeli Melakukan Pencarian Produk Popular dikategori produk(DB):", "Produk": LIST } return DICT, 200, {'Content-Type': 'application/json'} else: DICT = { "Status": "OK", "Message": "Pembeli Melakukan Pencarian Produk Popular dikategori produk(MEM):", "Produk": isiMem } return DICT, 200, {'Content-Type': 'application/json'} elif produk_kategori != None and produk_subkategori != None: # client = memcache.Client([('127.0.0.1', 11211)]) # isiMem = client.get("getAllPopCatSubMem") isiMem = None if isiMem == None: parser = reqparse.RequestParser() parser.add_argument('p', type=int, location='args', default=1) parser.add_argument('rp', type=int, location='args', default=5) args = parser.parse_args() offset = (args['p'] * args['rp']) - args['rp'] qry = Penjual_Produk_dbStructure.query qry = qry.filter_by(produk_kategori=produk_kategori) qry = qry.filter_by(produk_subkategori=produk_subkategori) qry = qry.order_by(desc("produk_Jumlahdibeli")) LIST = [] for row in qry.limit(args['rp']).offset(offset).all(): userByCategory = marshal( row, Penjual_Produk_dbStructure.response_field) # userByCategory['produk_hargaDiskon'] = userByCategory['produk_harga'] LIST.append(userByCategory) ############################### save to ################################### # client = memcache.Client([('127.0.0.1', 11211)]) # sample_obj = LIST # client.set("getAllPopCatSubMem", sample_obj, time=15) ########################################################################## DICT = { "Status": "OK", "Message": "Pembeli Melakukan Pencarian Produk Popular dikategori & subkategori produk(DB):", "Produk": LIST } return DICT, 200, {'Content-Type': 'application/json'} else: DICT = { "Status": "OK", "Message": "Pembeli Melakukan Pencarian Produk Popular dikategori & subkategori produk(MEM):", "Produk": isiMem } return DICT, 200, {'Content-Type': 'application/json'} else: return { 'status': 'URL warning', 'message': 'Unrecognized URL' }, 401
def get_popular_communities(): communities = (Community.query.join(Post).group_by(Community.id).order_by( desc(func.count(Post.community_id))).limit(5).all()) return jsonify([community.to_simple_dict() for community in communities])
def view(): limit = int(request.values.get('limit', default=7)) #默认获取7天的数据 print limit if limit in [3, 7]: some_days_ago = str((datetime.datetime.now() - datetime.timedelta(days = limit)).strftime("%Y-%m-%d")) records = BDRecord.query.filter_by(bd_username=session.get('username')).filter(BDRecord.visit_time >= some_days_ago).order_by(desc(BDRecord.visit_time)).all() else: #返回所有数据 records = BDRecord.query.filter_by(bd_username=session.get('username')).order_by(desc(BDRecord.visit_time)).all() return render_template('hmk/view.html', records=records)
def get_comments_by_post(post_id): return (db.session.query(Comment).filter_by(post_id=post_id).order_by( desc(Comment.created)).all())
def list_by_id(id): # TODO doc list = Products.query.filter_by(user_id=id).order_by(desc(Products.boost_date)) return list
def max_id_in_table(session): """ Return the maximum ID in the player table """ return session.query(Player).order_by(desc("player_id")).first().player_id
def get_posts(limit, offset): posts = (db.session.query(Post).order_by(desc(Post.created)).slice( offset, offset + limit).all()) return posts
def pending(): drifts = Drift.query.filter(or_(Drift.requester_id == current_user.id, Drift.gifter_id == current_user.id)).order_by( desc(Drift.create_time)).all() view_model = DriftCollection(drifts, current_user.id) return render_template('pending.html', drifts=view_model.data)
def get_posts_by_tag(tagname, limit=5, offset=0): # given a tag, return the posts, recent first, with limit and offset return (Post.query.filter(Post.tags.any(name=tagname)).order_by( desc(Post.created)).slice(offset, (offset + limit + 1)).all())
def _get_statistics(self): # overridden base method because generic honeypot can be ran on any port # extract statistics by port! stats = {} for attack in self.session.query( distinct(self.Attack.dst_port).label('dport')): stats[attack.dport] = {} for attack in self.session.query( func.count(self.Attack.src_id).label('src_count'), self.Attack.dst_port.label('dport')).group_by('dport'): stats[attack.dport]['total_ips'] = attack.src_count for attack in self.session.query( func.count('*').label('total_count'), func.sum(self.Attack.count).label('total_packets'), self.Attack.dst_port.label('dport')).group_by('dport'): stats[attack.dport]['total_attacks'] = attack.total_count stats[attack.dport]['total_packets'] = attack.total_packets for attack in self.session.query( self.Attack.src_id.label('src'), func.min(self.Attack.start).label('first_start'), self.Attack.dst_port.label('dport')).group_by('dport'): stats[attack.dport]['first_attack'] = (attack.src, attack.first_start) for attack in self.session.query( self.Attack.src_id.label('src'), func.max(self.Attack.latest).label('last_start'), self.Attack.dst_port.label('dport')).group_by('dport'): stats[attack.dport]['last_attack'] = (attack.src, attack.last_start) for attack in self.session.query( self.Attack.src_id.label('src'), func.max( func.julianday(self.Attack.latest) - func.julianday(self.Attack.start)).label('duration'), self.Attack.start, self.Attack.latest, self.Attack.count, self.Attack.dst_port.label('dport')).group_by('dport'): stats[attack.dport]['longest_cont_attack'] = (attack.src, attack.duration, attack.start, attack.latest, attack.count) for attack in self.session.query( self.Attack.src_id.label('src'), (func.julianday(self.Attack.latest) - func.julianday(self.Attack.start)).label('duration'), self.Attack.start, self.Attack.latest, func.max(self.Attack.count).label('pkt_sum'), self.Attack.dst_port.label('dport')).group_by('dport'): stats[attack.dport]['largest_cont_attack'] = (attack.src, attack.duration, attack.start, attack.latest, attack.pkt_sum) for attack in self.session.query( func.avg( func.julianday(self.Attack.latest) - func.julianday(self.Attack.start)).label('avg_duration'), self.Attack.dst_port.label('dport')).group_by('dport'): stats[attack.dport]['avg_attack_duration'] = attack.avg_duration for attack in self.session.query( self.Attack.src_id.label('src'), func.min(self.Attack.start).label('start_min'), func.max(self.Attack.latest).label('latest_max'), func.sum(self.Attack.count).label('total_count'), self.Attack.dst_port.label('dport') ).group_by('src','dport').\ order_by(desc('total_count')): if 'top_attack' not in stats[ attack.dport] or attack.total_count > stats[ attack.dport]['top_attack'][3]: stats[attack.dport]['top_attack'] = (attack.src, attack.start_min, attack.latest_max, attack.total_count) # detailed (honeypot specific) details will be present as a dictionary under 'specific' key stats['specific'] = self._get_detailed_statistics() return stats
def get_user_wishes(cls, uid): gifts = Wish.query.filter_by(uid=uid, launched=False).order_by( desc(Wish.create_time)).all() return gifts