Ejemplo n.º 1
0
    def get(self, request, *_args, **_kwargs):
        """
        Typeahead uses the ngram_analyzer as the index_analyzer to generate ngrams of the title during indexing.
        i.e. Data Science -> da, dat, at, ata, data, etc...
        Typeahead uses the lowercase analyzer as the search_analyzer.
        The ngram_analyzer uses the lowercase filter as well, which makes typeahead case insensitive.
        Available analyzers are defined in index _settings and field level analyzers are defined in the index _mapping.
        NGrams are used rather than EdgeNgrams because NGrams allow partial searches across white space:
        i.e. data sci - > data science, but not data analysis or scientific method
        ---
        parameters:
            - name: q
              description: "Search text"
              paramType: query
              required: true
              type: string
            - name: orgs
              description: "Organization short codes"
              paramType: query
              required: false
              type: List of string
        """
        query = request.query_params.get('q')
        if not query:
            raise ValidationError(
                "The 'q' querystring parameter is required for searching.")
        words = query.split()
        org_keys = self.request.GET.getlist('org', None)

        queryset = Person.objects.all()

        if org_keys:
            # We are pulling the people who are part of course runs belonging to the given organizations.
            # This blank order_by is there to offset the default ordering on people since
            # we don't care about the order in which they are returned.
            queryset = (queryset.filter(
                courses_staffed__course__authoring_organizations__key__in=
                org_keys).distinct().order_by())

        for word in words:
            # Progressively filter the same queryset - every word must match something
            queryset = queryset.filter(
                DQ(given_name__icontains=word)
                | DQ(family_name__icontains=word))

        # No match? Maybe they gave us a UUID...
        if not queryset:
            try:
                q_uuid = uuid.UUID(query).hex
                queryset = Person.objects.filter(uuid=q_uuid)
            except ValueError:
                pass

        context = {'request': self.request}
        serialized_people = [
            serializers.PersonSerializer(p, context=context).data
            for p in queryset
        ]
        return Response(serialized_people, status=status.HTTP_200_OK)
Ejemplo n.º 2
0
    def filter_queryset(self, request, queryset, view):
        if request.user.id:
            queryset = queryset.filter(
                DQ(owner=request.user) |
                DQ(initiative__activity_manager=request.user) |
                DQ(initiative__owner=request.user) |
                DQ(status__in=self.public_statuses)
            ).exclude(status=ActivityStateMachine.deleted.value)
        else:
            queryset = queryset.filter(status__in=self.public_statuses)

        return super(ActivityFilter, self).filter_queryset(request, queryset, view)
Ejemplo n.º 3
0
def get_user(email=None, password=None, token=None, external_id_provider=None, external_id=None):
    """
    Get an instance of `User` matching the provided params.

    1. email
    2. email and password
    3  token
    4. external_id_provider and external_id

    :param token: the token in verification key
    :param email: user's email
    :param password: user's password
    :param external_id_provider: the external identity provider
    :param external_id: the external id
    :rtype User or None
    """
    from osf.models import OSFUser, Email

    if not any([email, password, token, external_id_provider, external_id_provider]):
        return None

    if password and not email:
        raise AssertionError('If a password is provided, an email must also be provided.')

    qs = OSFUser.objects.filter()

    if email:
        email = email.strip().lower()
        qs = qs.filter(DQ(DQ(username=email) | DQ(id=Subquery(Email.objects.filter(address=email).values('user_id')))))

    if password:
        password = password.strip()
        try:
            user = qs.get()
        except Exception as err:
            logger.error(err)
            user = None
        if user and not user.check_password(password):
            return False
        return user

    if token:
        qs = qs.filter(verification_key=token)

    if external_id_provider and external_id:
        qs = qs.filter(**{'external_identity__{}__{}'.format(external_id_provider, external_id): 'VERIFIED'})

    try:
        user = qs.get()
        return user
    except Exception as err:
        logger.error(err)
        return None
Ejemplo n.º 4
0
    def get_queryset(self):
        queryset = super().get_queryset()
        group = user_group(self.request.user)
        if group in ["admin", "reviewer"]:
            return queryset

        elif group == "basic":

            return queryset.filter(
                DQ(access=PUBLIC) | DQ(creator=self.request.user)
                | DQ(collaborators=self.request.user)
                | DQ(curators=self.request.user)).distinct()

        elif group == "anonymous":
            return queryset.filter(access=PUBLIC)
Ejemplo n.º 5
0
 def db_resolution_query(self):
     tip_results = {}
     man_results = {}
     custom_results = {}
     for query in self.found_terms:
         if len(self.found_terms[query]['models']) and len(
                 self.found_terms[query]['errors']):
             model_query = DQ()
             error_query = DQ()
             for item in self.found_terms[query]['models']:
                 for model in item:
                     model_query = model_query | DQ(
                         model_id__model_number=model.model_number)
             for item in self.found_terms[query]['errors']:
                 for error in item:
                     error_query = error_query | DQ(
                         repairs_error__error_name=error.error_name)
             tip_results[query] = TechTipFix.objects.filter(
                 model_query).filter(error_query).distinct()
             man_results[query] = ManualFix.objects.filter(
                 model_query).filter(error_query).distinct()
             custom_results[query] = CustomFixes.objects.filter(
                 model_query).filter(error_query).distinct()
     return tip_results, man_results, custom_results
Ejemplo n.º 6
0
def recent_public_registrations(n=10):
    from django.db.models import Q as DQ
    Registration = apps.get_model('osf.Registration')

    return Registration.objects.filter(
        is_public=True,
        is_deleted=False,
    ).filter(
        DQ(DQ(embargo__isnull=True) | ~DQ(embargo__state='unapproved'))
        & DQ(DQ(retraction__isnull=True) | ~DQ(retraction__state='approved'))
    ).get_roots().order_by('-registered_date').limit(n)
Ejemplo n.º 7
0
    def filter_queryset(self,
                        request,
                        index_queryset,
                        view,
                        extra_query_params=None):
        if not isinstance(index_queryset, Search):
            return index_queryset

        # Copy the query dict to be able to modify it as it is immutable, then update it with extra params
        data = request.query_params.copy()
        if isinstance(extra_query_params, dict):
            data.update(extra_query_params)

        # Update data from discover item shouts query if discover is passed
        discover = data.get('discover')
        if discover:
            try:
                discover_item = DiscoverItem.objects.get(id=discover)
            except DiscoverItem.DoesNotExist:
                raise ValidationError({
                    'discover':
                    ["Discover Item with id '%s' does not exist" % discover]
                })
            else:
                data.update(discover_item.shouts_query)

        # Filter shouts by user id if user username or id are passed in `user` query param
        user = data.get('user')
        if user:
            try:
                user_id = User.objects.get(username=user).pk
            except User.DoesNotExist:
                raise ValidationError({
                    'user': ["User with username '%s' does not exist" % user]
                })
            else:
                index_queryset = index_queryset.filter('term', uid=user_id)

        # Exclude ids
        exclude_ids = data.get('exclude_ids')
        if isinstance(exclude_ids, basestring):
            exclude_ids = exclude_ids.split(',')
        if exclude_ids and not isinstance(exclude_ids, list):
            exclude_ids = [exclude_ids]
        if exclude_ids:
            index_queryset = index_queryset.filter(
                ~Q('terms', _id=map(str, exclude_ids)))

        # Shout type
        shout_type = data.get('shout_type')
        if shout_type:
            if shout_type not in ['all', 'offer', 'request']:
                raise ValidationError(
                    {'shout_type': ["Should be `all`, `request` or `offer`"]})
            if shout_type != 'all':
                index_queryset = index_queryset.filter('term', type=shout_type)

        # Search query
        search = data.get('search')
        if search:
            index_queryset = index_queryset.query(
                'multi_match',
                query=search,
                fields=['title', 'text', 'tags'],
                fuzziness='AUTO')

        # Tags
        tags = data.get('tags')
        if tags:
            tags = tags.replace(',', ' ').split()
            tag_names = process_tags(tags)
            index_queryset = index_queryset.filter('terms', tags=tag_names)

        # Location: Country, State, City, Latitude, Longitude
        country = data.get('country', '').upper()
        if country and country != 'all':
            index_queryset = index_queryset.filter('term', country=country)
            # todo: add state
            city = data.get('city')
            if city and city != 'all':
                # todo: use other means of finding the surrounding cities like state.
                try:
                    pd_city = PredefinedCity.objects.filter(city=city,
                                                            country=country)[0]
                except IndexError:
                    pass
                else:
                    nearby_cities = pd_city.get_cities_within(
                        settings.NEARBY_CITIES_RADIUS_KM)
                    cities = map(lambda nc: nc.city, nearby_cities)
                    cities.append(city)
                    cities = arrays.unique(cities)
                    index_queryset = index_queryset.filter('terms',
                                                           city=cities)

        latlng_errors = OrderedDict()
        down_left_lat = data.get('down_left_lat')
        down_left_lng = data.get('down_left_lng')
        up_right_lat = data.get('up_right_lat')
        up_right_lng = data.get('up_right_lng')
        try:
            if down_left_lat:
                down_left_lat = float(down_left_lat)
                if down_left_lat > float(up_right_lat) or not (
                        90 >= down_left_lat >= -90):
                    latlng_errors['down_left_lat'] = [
                        "should be between -90 and 90, also not greater than 'up_right_lat'"
                    ]
                    index_queryset = index_queryset.filter(
                        'range', **{'latitude': {
                            'gte': down_left_lat
                        }})
            if down_left_lng:
                down_left_lng = float(down_left_lng)
                if down_left_lng > float(up_right_lng) or not (
                        180 >= down_left_lng >= -180):
                    latlng_errors['down_left_lng'] = [
                        "should be between -180 and 180, also not greater than 'up_right_lng'"
                    ]
                index_queryset = index_queryset.filter(
                    'range', **{'longitude': {
                        'gte': down_left_lng
                    }})
            if up_right_lat:
                if not (90 >= float(up_right_lat) >= -90):
                    latlng_errors['up_right_lat'] = [
                        "should be between -90 and 90"
                    ]
                index_queryset = index_queryset.filter(
                    'range', **{'latitude': {
                        'lte': up_right_lat
                    }})
            if up_right_lng:
                if not (180 >= float(up_right_lng) >= -180):
                    latlng_errors['up_right_lng'] = [
                        "should be between -180 and 180"
                    ]
                index_queryset = index_queryset.filter(
                    'range', **{'longitude': {
                        'lte': up_right_lng
                    }})
        except ValueError:
            latlng_errors['error'] = ["invalid lat or lng parameters"]
        if latlng_errors:
            raise ValidationError(latlng_errors)

        # Category and Filters
        category = data.get('category')
        if category and category != 'all':
            try:
                category = Category.objects.prefetch_related('filters').get(
                    DQ(name=category) | DQ(slug=category))
            except Category.DoesNotExist:
                raise ValidationError({
                    'category': [
                        "Category with name or slug '%s' does not exist" %
                        category
                    ]
                })
            else:
                data['category'] = category.slug
                index_queryset = index_queryset.filter(
                    'terms', category=[category.name, category.slug])
                cat_filters = category.filters.values_list(
                    'slug', 'values_type')
                for cat_f_slug, cat_f_type in cat_filters:
                    if cat_f_type == TAG_TYPE_STR:
                        cat_f_param = data.get(cat_f_slug)
                        if cat_f_param:
                            index_queryset = index_queryset.filter(
                                'term',
                                **{'filters__%s' % cat_f_slug: cat_f_param})
                    elif cat_f_type == TAG_TYPE_INT:
                        for m1, m2 in [('min', 'gte'), ('max', 'lte')]:
                            cat_f_param = data.get('%s_%s' % (m1, cat_f_slug))
                            if cat_f_param:
                                index_queryset = index_queryset.filter(
                                    'range', **{
                                        'filters__%s' % cat_f_slug: {
                                            m2: cat_f_param
                                        }
                                    })

        # Price
        min_price = data.get('min_price')
        if min_price:
            index_queryset = index_queryset.filter(
                'range', **{'price': {
                    'gte': min_price
                }})

        max_price = data.get('max_price')
        if max_price:
            index_queryset = index_queryset.filter(
                'range', **{'price': {
                    'lte': max_price
                }})

        # Sorting
        sort = data.get('sort')
        sort_types = {
            None: ('-published_at', ),
            'time': ('-published_at', ),
            'price_asc': ('price', ),
            'price_desc': ('-price', ),
        }
        if sort and sort not in sort_types:
            raise ValidationError({'sort': ["Invalid sort"]})
        # selected_sort = ('-priority',) + sort_types[sort]
        selected_sort = sort_types[sort]
        if search:
            selected_sort = ('_score', ) + selected_sort
        index_queryset = index_queryset.sort(*selected_sort)

        debug_logger.debug(index_queryset.to_dict())
        index_queryset.search_data = {
            k: parse_int(v, 10) or v
            for k, v in data.items()
        }
        return index_queryset