コード例 #1
0
 async def _channel_watchdog(self):
     """
     Channel Watchdog Implementation.
     In asyncio, it monitors the channel-ttl keys and the channels SET to expire registered services as needed.
     The other functionality is to register new channels as they're added to the pubsub in the processing listener.
     """
     redis_aio_pool = None
     try:
         logger.debug("Starting Channel Watchdog")
         redis_aio_pool = await self._create_pool()
         redis_reg_key_ttl_timedelta = timedelta(
             seconds=self.redis_reg_key_ttl)
         while True:
             # get list of channels
             channels = await redis_aio_pool.smembers(
                 self.redis_channels_key)
             # for each channel, verify when it was added
             for channel in channels:
                 channel_ttl = await redis_aio_pool.get(
                     self.redis_channel_ttl_key_prefix + str(channel))
                 if not isinstance(channel_ttl, str):
                     continue
                 # parse dates
                 try:
                     channel_ttl_int = pydash.parse_int(channel_ttl, 10)
                     channel_ttl_date = datetime.utcfromtimestamp(
                         channel_ttl_int)
                 except (ValueError, TypeError):
                     continue
                 # if expired, remove!
                 if channel_ttl_date + redis_reg_key_ttl_timedelta < datetime.utcnow(
                 ):
                     self.registered_channels.remove(channel)
                     await redis_aio_pool.srem(self.redis_channels_key,
                                               channel)
             # verify list of channels again
             channels = await redis_aio_pool.smembers(
                 self.redis_channels_key)
             # add any remaining channels to the channels-to-be-monitored
             self.registered_channels.update(channels)
             has_valid_pubsub = self.processing_worker_name in self.pubsubs and isinstance(
                 self.pubsubs[self.processing_worker_name],
                 redis.client.PubSub)
             if has_valid_pubsub and len(channels) > 0:
                 # this is fine because of the python's GIL...
                 self.pubsubs[self.processing_worker_name].subscribe(
                     *channels)
             await asyncio.sleep(1)  # being nice
     except asyncio.CancelledError as e:
         logger.debug("Terminating Channel Watchdog")
         raise  # re-raise is needed to safely terminate
     except aioredis.errors.RedisError:
         logger.error("Redis Error on Channel Watchdog")
     finally:
         if redis_aio_pool:
             await redis_aio_pool.close()
コード例 #2
0
 def get_registered_channels(cls, include_ttl=False):
     """
     Get list of registered channels, with registration time if requested.
     :type include_ttl: bool
     :rtype: list[str] | dict[str, datetime]
     """
     registered_channels = cls.r_conn.smembers(cls.redis_channels_key)
     if not include_ttl:
         return list(registered_channels)
     with cls.r_conn.pipeline() as pipe:
         for channel in registered_channels:
             pipe.get(cls.redis_channel_ttl_key_prefix + channel)
         values = pipe.execute()
     final_values = []
     for redis_val in values:
         try:
             channel_ttl_int = pydash.parse_int(redis_val, 10)
             redis_val = datetime.utcfromtimestamp(channel_ttl_int)
         except (ValueError, TypeError):
             continue
         final_values.append(redis_val)
     return dict(zip(registered_channels, final_values))
コード例 #3
0
    def filter_queryset(self,
                        request,
                        index_queryset,
                        view,
                        extra_query_params=None):
        if not isinstance(index_queryset, Search):
            return index_queryset

        # Copy the query dict to be able to modify it as it is immutable, then update it with extra params
        data = request.query_params.copy()
        if isinstance(extra_query_params, dict):
            data.update(extra_query_params)

        # Update data from discover item shouts query if discover is passed
        discover = data.get('discover')
        if discover:
            try:
                discover_item = DiscoverItem.objects.get(id=discover)
            except ValueError:
                raise InvalidParameter('discover', _("Invalid discover id"))
            except DiscoverItem.DoesNotExist:
                msg = _(
                    "Discover Item with id '%(discover)s' does not exist") % {
                        'discover': discover
                    }
                raise InvalidParameter('discover', msg)
            else:
                data.update(discover_item.shouts_query)

        # Filter shouts by user id if user username is passed in `profile` query param
        user = data.get('profile') or data.get('user')
        if user:
            # Replace `me` with logged in username
            if user == 'me' and request.user.is_authenticated():
                user = request.user.username

            # Get the user id using username
            try:
                user_id = str(
                    User.objects.values('pk').get(username=user)['pk'])
            except User.DoesNotExist:
                msg = _(
                    "Profile with username '%(username)s' does not exist") % {
                        'username': user
                    }
                raise InvalidParameter('profile', msg)
            else:
                index_queryset = index_queryset.filter('term', uid=user_id)

            # When listing user's own shouts show him the expired ones
            if user == request.user.username:
                setattr(view, 'get_expired', True)

        # Exclude shouts using their ids
        exclude = data.get('exclude')
        if isinstance(exclude, basestring):
            exclude = exclude.split(',')
        if exclude and not isinstance(exclude, list):
            exclude = [exclude]
        if exclude:
            index_queryset = index_queryset.filter(
                ~EQ('terms', _id=map(str, exclude)))

        # Shout type
        shout_type = data.get('shout_type')
        if shout_type:
            if shout_type not in ['all', 'offer', 'request']:
                msg = _("Should be `all`, `request` or `offer`")
                raise InvalidParameter('shout_type', msg)
            if shout_type != 'all':
                index_queryset = index_queryset.filter('term', type=shout_type)

        # Search query
        search = data.get('search')
        if search:
            index_queryset = index_queryset.query(
                'multi_match',
                query=search,
                fields=['title', 'text', 'tags'],
                fuzziness='AUTO')

        # Tags
        tags = data.get('tags')
        if tags:
            tags = tags.replace(',', ' ').split()
            tag_names = process_tags(tags)
            index_queryset = index_queryset.filter('terms', tags=tag_names)

        # Location: Country, State, City, Latitude, Longitude
        country = data.get('country', '').upper()
        if country and country != 'all':
            index_queryset = index_queryset.filter('term', country=country)
            # todo: add state
            city = data.get('city')
            if city and city != 'all':
                # todo: use other means of finding the surrounding cities like state.
                try:
                    pd_city = PredefinedCity.objects.filter(city=city,
                                                            country=country)[0]
                except IndexError:
                    pass
                else:
                    nearby_cities = pd_city.get_cities_within(
                        settings.NEARBY_CITIES_RADIUS_KM)
                    cities = map(lambda nc: nc.city, nearby_cities)
                    cities.append(city)
                    cities = arrays.unique(cities)
                    index_queryset = index_queryset.filter('terms',
                                                           city=cities)

        down_left_lat = data.get('down_left_lat')
        down_left_lng = data.get('down_left_lng')
        up_right_lat = data.get('up_right_lat')
        up_right_lng = data.get('up_right_lng')
        latlng_key = ''
        try:
            if down_left_lat:
                latlng_key = 'down_left_lat'
                down_left_lat = float(down_left_lat)
                up_right_lat = up_right_lat or 90
                if down_left_lat > float(up_right_lat) or not (
                        90 >= down_left_lat >= -90):
                    raise InvalidParameter(
                        'down_left_lat',
                        _("Should be between -90 and 90, also not greater than 'up_right_lat'"
                          ))
                index_queryset = index_queryset.filter(
                    'range', **{'latitude': {
                        'gte': down_left_lat
                    }})
            if down_left_lng:
                latlng_key = 'down_left_lng'
                down_left_lng = float(down_left_lng)
                up_right_lng = up_right_lng or 180
                if down_left_lng > float(up_right_lng) or not (
                        180 >= down_left_lng >= -180):
                    raise InvalidParameter(
                        'down_left_lng',
                        _("Should be between -180 and 180, also not greater than 'up_right_lng'"
                          ))
                index_queryset = index_queryset.filter(
                    'range', **{'longitude': {
                        'gte': down_left_lng
                    }})
            if up_right_lat:
                latlng_key = 'up_right_lat'
                if not (90 >= float(up_right_lat) >= -90):
                    raise InvalidParameter('up_right_lat',
                                           _("Should be between -90 and 90"))
                index_queryset = index_queryset.filter(
                    'range', **{'latitude': {
                        'lte': up_right_lat
                    }})
            if up_right_lng:
                latlng_key = 'up_right_lng'
                if not (180 >= float(up_right_lng) >= -180):
                    raise InvalidParameter('up_right_lng',
                                           _("Should be between -180 and 180"))
                index_queryset = index_queryset.filter(
                    'range', **{'longitude': {
                        'lte': up_right_lng
                    }})
        except ValueError:
            raise InvalidParameter(latlng_key, _("Invalid number"))

        # Category and Filters
        category = data.get('category')
        if category and category != 'all':
            try:
                category = Category.objects.prefetch_related('filters').get(
                    slug=category)
            except Category.DoesNotExist:
                msg = _("Category with slug '%(slug)s' does not exist") % {
                    'slug': category
                }
                raise InvalidParameter('category', msg)
            else:
                data['category'] = category.slug
                index_queryset = index_queryset.filter(
                    'terms', category=[category.name, category.slug])
                cat_filters = category.filters.values_list(
                    'slug', 'values_type')
                for cat_f_slug, cat_f_type in cat_filters:
                    if cat_f_type == TAG_TYPE_STR:
                        cat_f_param = data.get(cat_f_slug)
                        if cat_f_param:
                            cat_f_params = cat_f_param.split(',')
                            index_queryset = index_queryset.filter(
                                'terms',
                                **{'filters__%s' % cat_f_slug: cat_f_params})
                    elif cat_f_type == TAG_TYPE_INT:
                        for m1, m2 in [('min', 'gte'), ('max', 'lte')]:
                            cat_f_param = data.get('%s_%s' % (m1, cat_f_slug))
                            if cat_f_param:
                                index_queryset = index_queryset.filter(
                                    'range', **{
                                        'filters__%s' % cat_f_slug: {
                                            m2: cat_f_param
                                        }
                                    })

        # Price
        min_price = data.get('min_price')
        if min_price:
            index_queryset = index_queryset.filter(
                'range', **{'price': {
                    'gte': min_price
                }})

        max_price = data.get('max_price')
        if max_price:
            index_queryset = index_queryset.filter(
                'range', **{'price': {
                    'lte': max_price
                }})

        # Expired
        if not getattr(view, 'get_expired', False):
            now = timezone.now()
            min_published = now - timedelta(days=int(settings.MAX_EXPIRY_DAYS))

            # Recently published and no specified expires_at
            recently_published = EQ('range',
                                    **{'published_at': {
                                        'gte': min_published
                                    }})
            no_expiry_still_valid = EQ(
                'bool',
                filter=[~EQ('exists', field='expires_at'), recently_published])

            # Not expired
            not_expired = EQ('range', **{'expires_at': {'gte': now}})
            expiry_still_valid = EQ(
                'bool', filter=[EQ('exists', field='expires_at'), not_expired])

            index_queryset = index_queryset.filter(no_expiry_still_valid
                                                   | expiry_still_valid)

        # Sorting
        sort = data.get('sort')
        sort_types = {
            None: ('-published_at', ),
            'time': ('-published_at', ),
            'price_asc': ('price', ),
            'price_desc': ('-price', ),
        }
        if sort and sort not in sort_types:
            raise InvalidParameter('sort', _("Invalid sort"))
        # selected_sort = ('-priority',) + sort_types[sort]
        selected_sort = sort_types[sort]
        if search:
            selected_sort = ('_score', ) + selected_sort
        index_queryset = index_queryset.sort(*selected_sort)

        debug_logger.debug(index_queryset.to_dict())
        index_queryset.search_data = {
            k: parse_int(v, 10) or v
            for k, v in data.items()
        }
        return index_queryset
コード例 #4
0
ファイル: test_objects.py プロジェクト: dgilland/pydash
def test_parse_int(case, expected):
    assert _.parse_int(*case) == expected
コード例 #5
0
    def filter_queryset(self,
                        request,
                        index_queryset,
                        view,
                        extra_query_params=None):
        if not isinstance(index_queryset, Search):
            return index_queryset

        # Copy the query dict to be able to modify it as it is immutable, then update it with extra params
        data = request.query_params.copy()
        if isinstance(extra_query_params, dict):
            data.update(extra_query_params)

        # Update data from discover item shouts query if discover is passed
        discover = data.get('discover')
        if discover:
            try:
                discover_item = DiscoverItem.objects.get(id=discover)
            except DiscoverItem.DoesNotExist:
                raise ValidationError({
                    'discover':
                    ["Discover Item with id '%s' does not exist" % discover]
                })
            else:
                data.update(discover_item.shouts_query)

        # Filter shouts by user id if user username or id are passed in `user` query param
        user = data.get('user')
        if user:
            try:
                user_id = User.objects.get(username=user).pk
            except User.DoesNotExist:
                raise ValidationError({
                    'user': ["User with username '%s' does not exist" % user]
                })
            else:
                index_queryset = index_queryset.filter('term', uid=user_id)

        # Exclude ids
        exclude_ids = data.get('exclude_ids')
        if isinstance(exclude_ids, basestring):
            exclude_ids = exclude_ids.split(',')
        if exclude_ids and not isinstance(exclude_ids, list):
            exclude_ids = [exclude_ids]
        if exclude_ids:
            index_queryset = index_queryset.filter(
                ~Q('terms', _id=map(str, exclude_ids)))

        # Shout type
        shout_type = data.get('shout_type')
        if shout_type:
            if shout_type not in ['all', 'offer', 'request']:
                raise ValidationError(
                    {'shout_type': ["Should be `all`, `request` or `offer`"]})
            if shout_type != 'all':
                index_queryset = index_queryset.filter('term', type=shout_type)

        # Search query
        search = data.get('search')
        if search:
            index_queryset = index_queryset.query(
                'multi_match',
                query=search,
                fields=['title', 'text', 'tags'],
                fuzziness='AUTO')

        # Tags
        tags = data.get('tags')
        if tags:
            tags = tags.replace(',', ' ').split()
            tag_names = process_tags(tags)
            index_queryset = index_queryset.filter('terms', tags=tag_names)

        # Location: Country, State, City, Latitude, Longitude
        country = data.get('country', '').upper()
        if country and country != 'all':
            index_queryset = index_queryset.filter('term', country=country)
            # todo: add state
            city = data.get('city')
            if city and city != 'all':
                # todo: use other means of finding the surrounding cities like state.
                try:
                    pd_city = PredefinedCity.objects.filter(city=city,
                                                            country=country)[0]
                except IndexError:
                    pass
                else:
                    nearby_cities = pd_city.get_cities_within(
                        settings.NEARBY_CITIES_RADIUS_KM)
                    cities = map(lambda nc: nc.city, nearby_cities)
                    cities.append(city)
                    cities = arrays.unique(cities)
                    index_queryset = index_queryset.filter('terms',
                                                           city=cities)

        latlng_errors = OrderedDict()
        down_left_lat = data.get('down_left_lat')
        down_left_lng = data.get('down_left_lng')
        up_right_lat = data.get('up_right_lat')
        up_right_lng = data.get('up_right_lng')
        try:
            if down_left_lat:
                down_left_lat = float(down_left_lat)
                if down_left_lat > float(up_right_lat) or not (
                        90 >= down_left_lat >= -90):
                    latlng_errors['down_left_lat'] = [
                        "should be between -90 and 90, also not greater than 'up_right_lat'"
                    ]
                    index_queryset = index_queryset.filter(
                        'range', **{'latitude': {
                            'gte': down_left_lat
                        }})
            if down_left_lng:
                down_left_lng = float(down_left_lng)
                if down_left_lng > float(up_right_lng) or not (
                        180 >= down_left_lng >= -180):
                    latlng_errors['down_left_lng'] = [
                        "should be between -180 and 180, also not greater than 'up_right_lng'"
                    ]
                index_queryset = index_queryset.filter(
                    'range', **{'longitude': {
                        'gte': down_left_lng
                    }})
            if up_right_lat:
                if not (90 >= float(up_right_lat) >= -90):
                    latlng_errors['up_right_lat'] = [
                        "should be between -90 and 90"
                    ]
                index_queryset = index_queryset.filter(
                    'range', **{'latitude': {
                        'lte': up_right_lat
                    }})
            if up_right_lng:
                if not (180 >= float(up_right_lng) >= -180):
                    latlng_errors['up_right_lng'] = [
                        "should be between -180 and 180"
                    ]
                index_queryset = index_queryset.filter(
                    'range', **{'longitude': {
                        'lte': up_right_lng
                    }})
        except ValueError:
            latlng_errors['error'] = ["invalid lat or lng parameters"]
        if latlng_errors:
            raise ValidationError(latlng_errors)

        # Category and Filters
        category = data.get('category')
        if category and category != 'all':
            try:
                category = Category.objects.prefetch_related('filters').get(
                    DQ(name=category) | DQ(slug=category))
            except Category.DoesNotExist:
                raise ValidationError({
                    'category': [
                        "Category with name or slug '%s' does not exist" %
                        category
                    ]
                })
            else:
                data['category'] = category.slug
                index_queryset = index_queryset.filter(
                    'terms', category=[category.name, category.slug])
                cat_filters = category.filters.values_list(
                    'slug', 'values_type')
                for cat_f_slug, cat_f_type in cat_filters:
                    if cat_f_type == TAG_TYPE_STR:
                        cat_f_param = data.get(cat_f_slug)
                        if cat_f_param:
                            index_queryset = index_queryset.filter(
                                'term',
                                **{'filters__%s' % cat_f_slug: cat_f_param})
                    elif cat_f_type == TAG_TYPE_INT:
                        for m1, m2 in [('min', 'gte'), ('max', 'lte')]:
                            cat_f_param = data.get('%s_%s' % (m1, cat_f_slug))
                            if cat_f_param:
                                index_queryset = index_queryset.filter(
                                    'range', **{
                                        'filters__%s' % cat_f_slug: {
                                            m2: cat_f_param
                                        }
                                    })

        # Price
        min_price = data.get('min_price')
        if min_price:
            index_queryset = index_queryset.filter(
                'range', **{'price': {
                    'gte': min_price
                }})

        max_price = data.get('max_price')
        if max_price:
            index_queryset = index_queryset.filter(
                'range', **{'price': {
                    'lte': max_price
                }})

        # Sorting
        sort = data.get('sort')
        sort_types = {
            None: ('-published_at', ),
            'time': ('-published_at', ),
            'price_asc': ('price', ),
            'price_desc': ('-price', ),
        }
        if sort and sort not in sort_types:
            raise ValidationError({'sort': ["Invalid sort"]})
        # selected_sort = ('-priority',) + sort_types[sort]
        selected_sort = sort_types[sort]
        if search:
            selected_sort = ('_score', ) + selected_sort
        index_queryset = index_queryset.sort(*selected_sort)

        debug_logger.debug(index_queryset.to_dict())
        index_queryset.search_data = {
            k: parse_int(v, 10) or v
            for k, v in data.items()
        }
        return index_queryset