def chat(request, username): """Chat page.""" try: # adressee. do not allow user to chat with himself. user = User.objects.get(pk__ne=request.user.pk, username=username) except (DoesNotExist, MultipleObjectsReturned): raise Http404 all_messages = Message.objects.filter( Q(author=request.user, addressee=user) | Q(author=user, addressee=request.user)).order_by("created") # Mark unread messages as read. all_messages.filter(author=user, read=False).update(set__read=True) if request.method == "POST": form = MessageForm(request.POST) if form.is_valid(): message = form.save(commit=False) message.author = request.user message.addressee = user message.save() return redirect("chat", user.username) else: form = MessageForm() return render(request, 'chat.html', { "addressee": user, "form": form, "all_messages": all_messages })
def eventmatch_mine(_userid=None): doc = EventMatchModel.objects( Q(eventOwnerId=_userid, status="matched") | Q(reqUserId=_userid, status="matched")) return json_util.dumps([d.to_mongo() for d in doc], default=json_util.default)
def process_item(self, item, spider): if item['email'] and not is_valid_email(item['email']): raise DropItem( "DROPPING ITEM: Developer's email address was not a valid one: %s" % item['email']) elif not item['email'] and not item['developer_website']: raise DropItem( "DROPPING ITEM: Couldn't find a valid email or website for this developer: %s" % item['name']) else: results = AndroidApp.objects( Q(email=item['email']) | Q(company=item['company'])) if len(results) > 0: raise DropItem( "DROPPING ITEM: Duplicate developer was found: %s" % item['email']) else: app = AndroidApp() app.name = item['name'] app.category = item['category'] app.company = item['company'] app.email = item['email'] app.developer_website = item['developer_website'] app.min_downloads = item['min_downloads'] app.max_downloads = item['max_downloads'] app.is_free = item['is_free'] app.store_url = item['store_url'] app.save() return item
def delete(self): """删除所有未审核的及标记为垃圾信息的评论""" comments = Comment.objects(Q(status='pending') | Q(status='spam')) comments.delete() flash('All pending comments and spams has been deleted', 'success') return jsonify( message='All pending comments and spams has been deleted')
def clean(self): self.from_number = clean_number(self.from_number or "") if not self.from_number: raise ValidationError("Empty from_number") self.to_number = clean_number(self.to_number or "") if not self.to_number: raise ValidationError("Empty to_number") if self.to_number < self.from_number: self.from_number, self.to_number = self.to_number, self.from_number super(PhoneRange, self).clean() # Check overlapped ranges q = Q(dialplan=self.dialplan.id) & ( Q(from_number__gt=self.from_number, from_number__lte=self.to_number, to_number__gt=self.to_number) | Q(to_number__lt=self.to_number, from_number__lt=self.from_number, to_number__gte=self.from_number) | Q(from_number=self.from_number, to_number=self.to_number)) if self.id: q &= Q(id__ne=self.id) rr = PhoneRange.objects.filter(q).first() if rr: raise ValidationError("Overlapped ranges: %s - %s (%s)" % (rr.from_number, rr.to_number, rr.name)) q = { "dialplan": self.dialplan, "from_number": self.from_number, "to_number": self.to_number } if self.id: q["exclude_range"] = self self.parent = PhoneRange.get_closest_range(**q)
def get_user_notifications(username, count=False, newer_than=None): """ Get the notifications for a user. :param username: The user to get notifications for. :type username: str :param count: Only return the count. :type count:bool :returns: int, :class:`crits.core.crits_mongoengine.CritsQuerySet` """ n = None if newer_than is None or newer_than == None: if count: n = Notification.objects( users=username).order_by('-created').count() else: n = Notification.objects(users=username).order_by('-created') else: if count: n = Notification.objects( Q(users=username) & Q(created__gt=newer_than)).order_by('-created').count() else: n = Notification.objects( Q(users=username) & Q(created__gt=newer_than)).order_by('-created') return n
def get_queryset(self): import_ids = self.request.QUERY_PARAMS.get('import_ids') order_by = self.request.QUERY_PARAMS.get('order_by') search = self.request.QUERY_PARAMS.get('search') search_fields = self.request.QUERY_PARAMS.get('search_fields') data_format = self.request.QUERY_PARAMS.get('data_format') if import_ids: queryset = Data.objects.filter(import_id__in=import_ids.split(",")) else: queryset = Data.objects.all() if (search and search_fields): print(search_fields) # or the searches together for each field in search_fields qset = Q() for field in search_fields.split(","): qset = qset | Q(**{field + "__icontains": search}) queryset = queryset.filter(qset) if order_by: queryset = queryset.order_by(order_by) # return Data.objects.filter(import_id=import_id) return queryset
def from_cybox(cls, cybox_object): """ Convert a Cybox DefinedObject to a MongoEngine Indicator object. :param cybox_object: The cybox object to create the indicator from. :type cybox_object: :class:`cybox.core.Observable`` :returns: :class:`crits.indicators.indicator.Indicator` """ obj = make_crits_object(cybox_object) if obj.name and obj.name != obj.object_type: ind_type = "%s - %s" % (obj.object_type, obj.name) else: ind_type = obj.object_type db_indicator = Indicator.objects(Q(ind_type=ind_type) & Q(value=obj.value)).first() if db_indicator: indicator = db_indicator else: indicator = cls() indicator.value = obj.value indicator.created = obj.date indicator.modified = obj.date return indicator
def get_queryset(self): # reading request parameters kwargs = self.parser.parse_args() args = [] # filter args kwargs = {key: val for key, val in kwargs.items() if val} # deal with search fields if 'search' in kwargs: pattern = kwargs.pop("search") pattern = re.compile(pattern, re.IGNORECASE) args = [Q(name=pattern) | Q(aliases__fid=pattern)] # remove name from args if exists (i'm searching against it) if 'name' in kwargs: del (kwargs['name']) current_app.logger.info(f"{args}, {kwargs}") if args or kwargs: queryset = self.model.objects.filter(*args, **kwargs) else: queryset = self.model.objects.all() return queryset
def ensure_platform(cls, vendor, name): """ Get or create platform by vendor and code :param vendor: :param name: :return: """ # Try to find platform q = Q(vendor=vendor.id, name=name) | Q(vendor=vendor.id, aliases=name) platform = Platform.objects.filter(q).first() if platform: return platform # Try to create pu = uuid.uuid4() d = Platform._get_collection().find_one_and_update( { "vendor": vendor.id, "name": name }, { "$setOnInsert": { "uuid": pu, "full_name": "%s %s" % (vendor.name, name), "bi_id": Int64(new_bi_id()), "aliases": [] } }, upsert=True, return_document=ReturnDocument.AFTER) d["id"] = d["_id"] del d["_id"] p = Platform(**d) p._changed_fields = [] return p
def backlogged(klass, date=None): """ Cards that are backlogged as of the supplied date (or now). Backlogged is a semi-tricky calculation. If the date is today, it's easy, it's any ticket that doesn't have a done_date or a start_date. If the date is earlier, then it's: a.) Any ticket without a done_date whose backlog_date is lte than the reference date and who's start_date is lte the reference date **AND** b.) Any ticket with a start_date greater than the reference date and a backlog_date earlier than the reference date """ if not date: return klass.objects.filter(start_date=None) query_a = Q(start_date=None) & \ Q(backlog_date__lte=date) query_b = Q(start_date__gt=date) & \ Q(backlog_date__lte=date) results_a = list(klass.objects.filter(query_a)) results_b = list(klass.objects.filter(query_b)) all_ids = [c.id for c in results_a] all_ids = set(all_ids) all_ids.update([c.id for c in results_b]) return klass.objects.filter(id__in=all_ids)
def for_team_board(self, team, backlog_limit, done_days): states = States() in_progress_q = Q(state__in=states.in_progress, team=team) done_q = Q(done_date__gte=now() - relativedelta(days=done_days), team=team) cards_query = in_progress_q | done_q wip_and_done = list( self.filter(cards_query).exclude('_ticket_system_data')) ordered_backlog_q = Q(state=states.backlog, team=team, priority__exists=True) unordered_backlog_q = Q(state=states.backlog, team=team, priority__exists=False) ordered_backlog_cards = self.filter(ordered_backlog_q).order_by( 'priority', 'created_at') ordered_backlog_cards = ordered_backlog_cards.limit( backlog_limit).exclude('_ticket_system_data') unordered_backlog_cards = [] if len(ordered_backlog_cards) < backlog_limit: ordered_backlog_cards = list(ordered_backlog_cards) unordered_backlog_cards = Kard.objects.filter( unordered_backlog_q).order_by('created_at') unordered_backlog_cards = unordered_backlog_cards.limit( backlog_limit).exclude('_ticket_system_data') backlog = list(ordered_backlog_cards) + list(unordered_backlog_cards) backlog = backlog[:backlog_limit] return backlog + wip_and_done
def filter_queryset(self, queryset): """ filtra a queryset pela string recebida do cliente """ customSearch = self.request.GET.get('sSearch', '') #.encode('utf-8'); if hasattr(self, 'get_filter_fields'): filter_fields = self.get_filter_fields() elif hasattr(self, 'filter_fields'): filter_fields = self.filter_fields else: filter_fields = None if customSearch != '' and filter_fields: outputQ = None first = True for searchableColumn in filter_fields: kwargz = {searchableColumn + "__icontains": customSearch} outputQ = outputQ | Q(**kwargz) if outputQ else Q(**kwargz) queryset = queryset.filter(outputQ) return queryset
def fix(): for l in Link.objects.filter( Q(linked_objects__exists=False) | Q(linked_segments__exists=False) | Q(type__exists=False)).timeout(False): try: l.save() except AssertionError: print("Assertion Error, check link with id: %s" % l.id)
def filter_params(self, value): if value is None: return {} val_min = value.get('min', None) val_max = value.get('max', None) attr_min, attr_max = self.target return (Q(**{attr_min:None})|Q(**{attr_min+"__lte": val_max}))&(Q(**{attr_max:None})|Q(**{attr_max+"__gte": val_min}))
def test_or_combination(self): """Ensure that Q-objects correctly OR together.""" class TestDoc(Document): x = IntField() q1 = Q(x__lt=3) q2 = Q(x__gt=7) query = (q1 | q2).to_query(TestDoc) assert query == {"$or": [{"x": {"$lt": 3}}, {"x": {"$gt": 7}}]}
def profile_mylocations_view(request): data = init_data(request) data['locations'] = Location.objects.filter( Q(created_by__contains=str(request.user.id)) | Q(also_editable_by__contains=str(request.user.id))) return render_to_response('maps-admin/profile/profile-mylocations.html', data, context_instance=RequestContext(request))
def get_templates_api(): if current_user.has_role('admin'): templates = Templates.objects.all().order_by('-pub_date') elif current_user.has_role('editor'): templates = Templates.objects( Q(status='public') | Q(contributor=current_user.email)).order_by('-pub_date') else: templates = Templates.objects(Q(status='public')).order_by('-pub_date') return jsonify({'result': templates})
def get_visualizations_api(): if current_user.has_role('admin'): visualizations = Visualizations.objects.all().order_by('-pub_date') elif current_user.has_role('editor'): visualizations = Visualizations.objects( Q(status='public') | Q(contributor=current_user.email)).order_by('-pub_date') else: visualizations = Visualizations.objects( Q(status='public')).order_by('-pub_date') return jsonify({'result': visualizations})
def validateLogin(cls, username=None, email=None, password=None): if password is None or (username is None and email is None): return False # Check if user exists try: user = User.objects.get(Q(email=email) | Q(username=username)) if user and bcrypt.verify(password, user.password): return user else: return False except User.DoesNotExist: return False
def test_q_merge_queries_edge_case(self): class User(Document): email = EmailField(required=False) name = StringField() User.drop_collection() pk = ObjectId() User(email='*****@*****.**', pk=pk).save() self.assertEqual(1, User.objects.filter(Q(email='*****@*****.**') | Q(name='John Doe')).limit(2).filter(pk=pk).count())
def fetch_excluded_topics(self, max_posts_count: int = 15, max_views_count: int = 2000, time_limit: int = 15): """ Return Topic ID's that needs to be excluded (no requests for topic page content scrapping) """ date_limit = datetime.utcnow() - timedelta(minutes=time_limit) return self.filter( Q(posts_count__gte=max_posts_count) | Q(views_count__gte=max_views_count) | Q(updated__gte=date_limit) | Q(closed=True) | Q(location=None)).only('topic_id')
def test_q_merge_queries_edge_case(self): class User(Document): email = EmailField(required=False) name = StringField() User.drop_collection() pk = ObjectId() User(email="*****@*****.**", pk=pk).save() assert (1 == User.objects.filter( Q(email="*****@*****.**") | Q(name="John Doe")).limit(2).filter(pk=pk).count())
def logistics(self): items_range = request.headers.get('Range', "0-9") start, end = items_range.split('-') per_page = int(end) - int(start) + 1 query = restruct_query(request.args) tracking_no = query.pop("tracking_no", "") include_closed = query.get('include_closed') and query.pop( 'include_closed') try: if include_closed: los = Models.Logistic.objects(**query) else: los = Models.Logistic.objects(is_closed=False, **query) if tracking_no: los = los.filter( Q(detail__us_tracking_no=tracking_no) | Q(detail__cn_tracking_no=tracking_no)) if request.args.get('status'): los = los.order_by('detail__%s' % Models.LogisticDetail.attr_by_log_stat[ request.args.get('status')]) except: pass if query.get('receiver'): addrs = Models.Address.objects( receiver=query.get('receiver')).distinct('id') orders = Models.Order.commodities(address__in=addrs) los = list(chain.from_iterable(order.logistics for order in orders)) if query.get('order_id'): orders = Models.Order.commodities( short_id=int(query.get('order_id'))) los = list(chain.from_iterable(order.logistics for order in orders)) try: los_size = los.count() except: los_size = len(los) data = los[int(start):int(end)] data = [to_json(l) for l in data] resp = make_response(json_util.dumps(data), 200) resp.headers['Accept-Range'] = 'items' resp.headers['Content-Range'] = '%s-%s/%s' % (start, end, los_size) resp.headers['Content-Type'] = 'application/json' return resp
def logistics_delay(self, status=None, delay_type=None): utcnow = datetime.datetime.utcnow() if status: items_range = request.headers.get('Range', "0-9") start, end = items_range.split('-') per_page = int(end) - int(start) + 1 query = restruct_query(request.args) tracking_no = query.pop("tracking_no", "") date_field = Models.LogisticDetail.attr_by_log_stat[status] delay_days = datetime.timedelta(days=delay_status_by_date[status]) query.update({ 'detail__%s__lt' % date_field: utcnow - delay_days, 'detail__status': status, }) los = Models.Logistic.objects(is_closed=False, **query).order_by( 'detail__%s' % date_field) if tracking_no: los = los.filter( Q(detail__us_tracking_no=tracking_no) | Q(detail__cn_tracking_no=tracking_no)) if delay_type: los = los.filter( detail__delay_details__reason__contains=delay_type) data = los[int(start):int(end)] data = [to_json(l) for l in data] resp = make_response(json_util.dumps(data), 200) resp.headers['Accept-Range'] = 'items' resp.headers['Content-Range'] = '%s-%s/%s' % (start, end, los.count()) resp.headers['Content-Type'] = 'application/json' return resp data = {} for status in [ "PAYMENT_RECEIVED", 'PROCESSING', 'SHIPPING', "PORT_ARRIVED" ]: los = Models.Logistic.objects(is_closed=False) date_field = Models.LogisticDetail.attr_by_log_stat[status] delay_days = datetime.timedelta(days=delay_status_by_date[status]) query = { 'detail__%s__lt' % date_field: utcnow - delay_days, 'detail__status': status, } count = los.filter(**query).count() data.update({status: count}) return jsonify(results=data)
def get_queryset(self): search = self.request.GET.get('title', None) queryset = Question.objects.can_view(self.request.user) category = self.get_category() if category: queryset = queryset.filter(categories=category) if search: queryset = queryset.filter( Q(title__icontains=search) | Q(body__icontains=search)) return queryset
def lookup(self, value): lookup_fields = self._document._meta['lookup_fields'] if value is None or not lookup_fields: raise self.DoesNotExist( '{} matching query does not exist.'.format( self.__class__._meta.object_name)) query = Q() for key in lookup_fields: query |= Q(**{key: value}) return self.get(query)
def test_custom_filter_not_equal(self): journal = makeOneJournal({'title': 'title-%s' % str(uuid4().hex)}) makeOneIssue({'journal': journal}) column = Issue.journal custom_filter = CustomFilterNotEqual(column=column, name=__('Periódico')) result = custom_filter.apply(Issue.objects, journal.title) journals = Journal.objects.filter(Q(**{'title__ne': journal.title})) expected = Issue.objects.filter(Q(**{'%s__in' % column.name: journals})) self.assertListEqual([_ for _ in expected], [_ for _ in result])
def test_multiple_occurence_in_field(self): class Test(Document): name = StringField(max_length=40) title = StringField(max_length=40) q1 = Q(name__contains="te") | Q(title__contains="te") q2 = Q(name__contains="12") | Q(title__contains="12") q3 = q1 & q2 query = q3.to_query(Test) self.assertEqual(query["$and"][0], q1.to_query(Test)) self.assertEqual(query["$and"][1], q2.to_query(Test))
def queryset(self): groups_config = app.config.get('REPORT_GROUPS', {}) group = groups_config.get(self.group, ()) query = Q() if group: teams = group[0] for team in teams: query = Q(team=team) | query if query: return self.qs.filter(query) return self.qs