def conversation_exist(conversation_id=None, users=None, about=None, include_public=False): """ Check whether a conversation with same id or both users and about exists """ if conversation_id: try: return Conversation.objects.get(pk=conversation_id) except Conversation.DoesNotExist: return False elif users: assert isinstance(users, list) if about: conversations = Conversation.objects.with_attached_object(about) else: conversations = Conversation.objects.filter(object_id=None) if not include_public: conversations = conversations.exclude(type=CONVERSATION_TYPE_PUBLIC_CHAT) users = arrays.unique(users) conversations = conversations.annotate(c=Count('users')).filter(c=len(users)) for user in users: conversations = conversations.filter(users=user) return conversations.first() or False else: return False
def process_tags(names, snake_case=False): processed_tags = [] fn = strings.snake_case if snake_case else strings.kebab_case for name in names: processed_tag = process_tag(name, fn) if processed_tag: processed_tags.append(processed_tag) processed_tags = arrays.unique(processed_tags) return processed_tags
def get_cities_within(self, dist_km, max_cities=30): distance = { 'distance': """(6371 * acos( cos( radians(%s) ) * cos( radians( latitude ) ) * cos( radians( longitude ) - radians(%s) ) + sin( radians(%s) ) * sin( radians( latitude ) ) ) )""" % (self.latitude, self.longitude, self.latitude) } cities = PredefinedCity.objects.filter(country=self.country).exclude(id=self.id).extra(select=distance).values( 'id', 'distance') cities = list(cities) cities.sort(key=lambda x: x['distance']) ids = [c['id'] for c in cities if float(c['distance']) < dist_km][:max_cities] ids = arrays.unique(ids) return PredefinedCity.objects.filter(id__in=ids)
def fb_scopes_changed(self, request): """ Get notified about a Facebook user changing Shoutit App scopes. This updates the LinkedFacebookAccount record with new scopes. ###NOT TO BE USED BY API CLIENTS ###POST Expects a POST body with entry as list of objects each which has a uid and other attributes. https://developers.facebook.com/docs/graph-api/webhooks/v2.5 """ hub_challenge = request.query_params.get('hub.challenge', '') if request.method == 'GET': return Response(hub_challenge) entries = request.data.get('entry', []) facebook_ids = filter(None, arrays.unique(map(lambda e: e['id'], entries))) for facebook_id in facebook_ids: facebook_controller.update_linked_facebook_account_scopes( facebook_id) return Response('OK')
def api_client_names(self): return arrays.unique( self.accesstoken_set.values_list('client__name', flat=True))
def filter_queryset(self, request, index_queryset, view, extra_query_params=None): if not isinstance(index_queryset, Search): return index_queryset # Copy the query dict to be able to modify it as it is immutable, then update it with extra params data = request.query_params.copy() if isinstance(extra_query_params, dict): data.update(extra_query_params) # Update data from discover item shouts query if discover is passed discover = data.get('discover') if discover: try: discover_item = DiscoverItem.objects.get(id=discover) except ValueError: raise InvalidParameter('discover', _("Invalid discover id")) except DiscoverItem.DoesNotExist: msg = _( "Discover Item with id '%(discover)s' does not exist") % { 'discover': discover } raise InvalidParameter('discover', msg) else: data.update(discover_item.shouts_query) # Filter shouts by user id if user username is passed in `profile` query param user = data.get('profile') or data.get('user') if user: # Replace `me` with logged in username if user == 'me' and request.user.is_authenticated(): user = request.user.username # Get the user id using username try: user_id = str( User.objects.values('pk').get(username=user)['pk']) except User.DoesNotExist: msg = _( "Profile with username '%(username)s' does not exist") % { 'username': user } raise InvalidParameter('profile', msg) else: index_queryset = index_queryset.filter('term', uid=user_id) # When listing user's own shouts show him the expired ones if user == request.user.username: setattr(view, 'get_expired', True) # Exclude shouts using their ids exclude = data.get('exclude') if isinstance(exclude, basestring): exclude = exclude.split(',') if exclude and not isinstance(exclude, list): exclude = [exclude] if exclude: index_queryset = index_queryset.filter( ~EQ('terms', _id=map(str, exclude))) # Shout type shout_type = data.get('shout_type') if shout_type: if shout_type not in ['all', 'offer', 'request']: msg = _("Should be `all`, `request` or `offer`") raise InvalidParameter('shout_type', msg) if shout_type != 'all': index_queryset = index_queryset.filter('term', type=shout_type) # Search query search = data.get('search') if search: index_queryset = index_queryset.query( 'multi_match', query=search, fields=['title', 'text', 'tags'], fuzziness='AUTO') # Tags tags = data.get('tags') if tags: tags = tags.replace(',', ' ').split() tag_names = process_tags(tags) index_queryset = index_queryset.filter('terms', tags=tag_names) # Location: Country, State, City, Latitude, Longitude country = data.get('country', '').upper() if country and country != 'all': index_queryset = index_queryset.filter('term', country=country) # todo: add state city = data.get('city') if city and city != 'all': # todo: use other means of finding the surrounding cities like state. try: pd_city = PredefinedCity.objects.filter(city=city, country=country)[0] except IndexError: pass else: nearby_cities = pd_city.get_cities_within( settings.NEARBY_CITIES_RADIUS_KM) cities = map(lambda nc: nc.city, nearby_cities) cities.append(city) cities = arrays.unique(cities) index_queryset = index_queryset.filter('terms', city=cities) down_left_lat = data.get('down_left_lat') down_left_lng = data.get('down_left_lng') up_right_lat = data.get('up_right_lat') up_right_lng = data.get('up_right_lng') latlng_key = '' try: if down_left_lat: latlng_key = 'down_left_lat' down_left_lat = float(down_left_lat) up_right_lat = up_right_lat or 90 if down_left_lat > float(up_right_lat) or not ( 90 >= down_left_lat >= -90): raise InvalidParameter( 'down_left_lat', _("Should be between -90 and 90, also not greater than 'up_right_lat'" )) index_queryset = index_queryset.filter( 'range', **{'latitude': { 'gte': down_left_lat }}) if down_left_lng: latlng_key = 'down_left_lng' down_left_lng = float(down_left_lng) up_right_lng = up_right_lng or 180 if down_left_lng > float(up_right_lng) or not ( 180 >= down_left_lng >= -180): raise InvalidParameter( 'down_left_lng', _("Should be between -180 and 180, also not greater than 'up_right_lng'" )) index_queryset = index_queryset.filter( 'range', **{'longitude': { 'gte': down_left_lng }}) if up_right_lat: latlng_key = 'up_right_lat' if not (90 >= float(up_right_lat) >= -90): raise InvalidParameter('up_right_lat', _("Should be between -90 and 90")) index_queryset = index_queryset.filter( 'range', **{'latitude': { 'lte': up_right_lat }}) if up_right_lng: latlng_key = 'up_right_lng' if not (180 >= float(up_right_lng) >= -180): raise InvalidParameter('up_right_lng', _("Should be between -180 and 180")) index_queryset = index_queryset.filter( 'range', **{'longitude': { 'lte': up_right_lng }}) except ValueError: raise InvalidParameter(latlng_key, _("Invalid number")) # Category and Filters category = data.get('category') if category and category != 'all': try: category = Category.objects.prefetch_related('filters').get( slug=category) except Category.DoesNotExist: msg = _("Category with slug '%(slug)s' does not exist") % { 'slug': category } raise InvalidParameter('category', msg) else: data['category'] = category.slug index_queryset = index_queryset.filter( 'terms', category=[category.name, category.slug]) cat_filters = category.filters.values_list( 'slug', 'values_type') for cat_f_slug, cat_f_type in cat_filters: if cat_f_type == TAG_TYPE_STR: cat_f_param = data.get(cat_f_slug) if cat_f_param: cat_f_params = cat_f_param.split(',') index_queryset = index_queryset.filter( 'terms', **{'filters__%s' % cat_f_slug: cat_f_params}) elif cat_f_type == TAG_TYPE_INT: for m1, m2 in [('min', 'gte'), ('max', 'lte')]: cat_f_param = data.get('%s_%s' % (m1, cat_f_slug)) if cat_f_param: index_queryset = index_queryset.filter( 'range', **{ 'filters__%s' % cat_f_slug: { m2: cat_f_param } }) # Price min_price = data.get('min_price') if min_price: index_queryset = index_queryset.filter( 'range', **{'price': { 'gte': min_price }}) max_price = data.get('max_price') if max_price: index_queryset = index_queryset.filter( 'range', **{'price': { 'lte': max_price }}) # Expired if not getattr(view, 'get_expired', False): now = timezone.now() min_published = now - timedelta(days=int(settings.MAX_EXPIRY_DAYS)) # Recently published and no specified expires_at recently_published = EQ('range', **{'published_at': { 'gte': min_published }}) no_expiry_still_valid = EQ( 'bool', filter=[~EQ('exists', field='expires_at'), recently_published]) # Not expired not_expired = EQ('range', **{'expires_at': {'gte': now}}) expiry_still_valid = EQ( 'bool', filter=[EQ('exists', field='expires_at'), not_expired]) index_queryset = index_queryset.filter(no_expiry_still_valid | expiry_still_valid) # Sorting sort = data.get('sort') sort_types = { None: ('-published_at', ), 'time': ('-published_at', ), 'price_asc': ('price', ), 'price_desc': ('-price', ), } if sort and sort not in sort_types: raise InvalidParameter('sort', _("Invalid sort")) # selected_sort = ('-priority',) + sort_types[sort] selected_sort = sort_types[sort] if search: selected_sort = ('_score', ) + selected_sort index_queryset = index_queryset.sort(*selected_sort) debug_logger.debug(index_queryset.to_dict()) index_queryset.search_data = { k: parse_int(v, 10) or v for k, v in data.items() } return index_queryset
def filter_queryset(self, request, index_queryset, view, extra_query_params=None): if not isinstance(index_queryset, Search): return index_queryset # Copy the query dict to be able to modify it as it is immutable, then update it with extra params data = request.query_params.copy() if isinstance(extra_query_params, dict): data.update(extra_query_params) # Update data from discover item shouts query if discover is passed discover = data.get('discover') if discover: try: discover_item = DiscoverItem.objects.get(id=discover) except DiscoverItem.DoesNotExist: raise ValidationError({ 'discover': ["Discover Item with id '%s' does not exist" % discover] }) else: data.update(discover_item.shouts_query) # Filter shouts by user id if user username or id are passed in `user` query param user = data.get('user') if user: try: user_id = User.objects.get(username=user).pk except User.DoesNotExist: raise ValidationError({ 'user': ["User with username '%s' does not exist" % user] }) else: index_queryset = index_queryset.filter('term', uid=user_id) # Exclude ids exclude_ids = data.get('exclude_ids') if isinstance(exclude_ids, basestring): exclude_ids = exclude_ids.split(',') if exclude_ids and not isinstance(exclude_ids, list): exclude_ids = [exclude_ids] if exclude_ids: index_queryset = index_queryset.filter( ~Q('terms', _id=map(str, exclude_ids))) # Shout type shout_type = data.get('shout_type') if shout_type: if shout_type not in ['all', 'offer', 'request']: raise ValidationError( {'shout_type': ["Should be `all`, `request` or `offer`"]}) if shout_type != 'all': index_queryset = index_queryset.filter('term', type=shout_type) # Search query search = data.get('search') if search: index_queryset = index_queryset.query( 'multi_match', query=search, fields=['title', 'text', 'tags'], fuzziness='AUTO') # Tags tags = data.get('tags') if tags: tags = tags.replace(',', ' ').split() tag_names = process_tags(tags) index_queryset = index_queryset.filter('terms', tags=tag_names) # Location: Country, State, City, Latitude, Longitude country = data.get('country', '').upper() if country and country != 'all': index_queryset = index_queryset.filter('term', country=country) # todo: add state city = data.get('city') if city and city != 'all': # todo: use other means of finding the surrounding cities like state. try: pd_city = PredefinedCity.objects.filter(city=city, country=country)[0] except IndexError: pass else: nearby_cities = pd_city.get_cities_within( settings.NEARBY_CITIES_RADIUS_KM) cities = map(lambda nc: nc.city, nearby_cities) cities.append(city) cities = arrays.unique(cities) index_queryset = index_queryset.filter('terms', city=cities) latlng_errors = OrderedDict() down_left_lat = data.get('down_left_lat') down_left_lng = data.get('down_left_lng') up_right_lat = data.get('up_right_lat') up_right_lng = data.get('up_right_lng') try: if down_left_lat: down_left_lat = float(down_left_lat) if down_left_lat > float(up_right_lat) or not ( 90 >= down_left_lat >= -90): latlng_errors['down_left_lat'] = [ "should be between -90 and 90, also not greater than 'up_right_lat'" ] index_queryset = index_queryset.filter( 'range', **{'latitude': { 'gte': down_left_lat }}) if down_left_lng: down_left_lng = float(down_left_lng) if down_left_lng > float(up_right_lng) or not ( 180 >= down_left_lng >= -180): latlng_errors['down_left_lng'] = [ "should be between -180 and 180, also not greater than 'up_right_lng'" ] index_queryset = index_queryset.filter( 'range', **{'longitude': { 'gte': down_left_lng }}) if up_right_lat: if not (90 >= float(up_right_lat) >= -90): latlng_errors['up_right_lat'] = [ "should be between -90 and 90" ] index_queryset = index_queryset.filter( 'range', **{'latitude': { 'lte': up_right_lat }}) if up_right_lng: if not (180 >= float(up_right_lng) >= -180): latlng_errors['up_right_lng'] = [ "should be between -180 and 180" ] index_queryset = index_queryset.filter( 'range', **{'longitude': { 'lte': up_right_lng }}) except ValueError: latlng_errors['error'] = ["invalid lat or lng parameters"] if latlng_errors: raise ValidationError(latlng_errors) # Category and Filters category = data.get('category') if category and category != 'all': try: category = Category.objects.prefetch_related('filters').get( DQ(name=category) | DQ(slug=category)) except Category.DoesNotExist: raise ValidationError({ 'category': [ "Category with name or slug '%s' does not exist" % category ] }) else: data['category'] = category.slug index_queryset = index_queryset.filter( 'terms', category=[category.name, category.slug]) cat_filters = category.filters.values_list( 'slug', 'values_type') for cat_f_slug, cat_f_type in cat_filters: if cat_f_type == TAG_TYPE_STR: cat_f_param = data.get(cat_f_slug) if cat_f_param: index_queryset = index_queryset.filter( 'term', **{'filters__%s' % cat_f_slug: cat_f_param}) elif cat_f_type == TAG_TYPE_INT: for m1, m2 in [('min', 'gte'), ('max', 'lte')]: cat_f_param = data.get('%s_%s' % (m1, cat_f_slug)) if cat_f_param: index_queryset = index_queryset.filter( 'range', **{ 'filters__%s' % cat_f_slug: { m2: cat_f_param } }) # Price min_price = data.get('min_price') if min_price: index_queryset = index_queryset.filter( 'range', **{'price': { 'gte': min_price }}) max_price = data.get('max_price') if max_price: index_queryset = index_queryset.filter( 'range', **{'price': { 'lte': max_price }}) # Sorting sort = data.get('sort') sort_types = { None: ('-published_at', ), 'time': ('-published_at', ), 'price_asc': ('price', ), 'price_desc': ('-price', ), } if sort and sort not in sort_types: raise ValidationError({'sort': ["Invalid sort"]}) # selected_sort = ('-priority',) + sort_types[sort] selected_sort = sort_types[sort] if search: selected_sort = ('_score', ) + selected_sort index_queryset = index_queryset.sort(*selected_sort) debug_logger.debug(index_queryset.to_dict()) index_queryset.search_data = { k: parse_int(v, 10) or v for k, v in data.items() } return index_queryset