Exemple #1
0
    def search(self):

        sqs = super(TextSearchForm, self).search()

        if not self.is_valid():
            return self.searchqueryset.all().exclude(
                speakerposition__exact="subtopic").exclude(
                    speakerposition__exact="topic")

        if not self.cleaned_data.get('q'):
            return self.searchqueryset.all().exclude(
                speakerposition__exact="subtopic").exclude(
                    speakerposition__exact="topic")

        sqs = sqs.exclude(speakerposition__exact="subtopic").exclude(
            speakerposition__exact="topic")

        if self.cleaned_data.get('q'):
            alt_q = AltParser(
                "edismax",
                self.cleaned_data["q"],
                qf="speechtext^1.5 text",
            )

            sqs = sqs.filter(content=alt_q)

        #sqs = sqs.filter(text__exact=AutoQuery(self.cleaned_data['q']))

        if self.load_all:
            sqs = sqs.load_all()

        return sqs
Exemple #2
0
 def test_build_complex_altparser_query(self):
     self.sq.add_filter(SQ(content=AltParser('dismax', "Don't panic", qf='text')))
     self.sq.add_filter(SQ(pub_date__lte=Exact('2009-02-10 01:59:00')))
     self.sq.add_filter(SQ(author__gt='daniel'))
     self.sq.add_filter(SQ(created__lt=Exact('2009-02-12 12:13:00')))
     self.sq.add_filter(SQ(title__gte='B'))
     self.sq.add_filter(SQ(id__in=[1, 2, 3]))
     self.sq.add_filter(SQ(rating__range=[3, 5]))
     self.assertEqual(self.sq.build_query(), u'((_query_:"{!dismax qf=text}Don\'t panic") AND pub_date:([* TO "2009-02-10 01:59:00"]) AND author:({"daniel" TO *}) AND created:({* TO "2009-02-12 12:13:00"}) AND title:(["B" TO *]) AND id:("1" OR "2" OR "3") AND rating:(["3" TO "5"]))')
Exemple #3
0
    def search(self):
        if not self.is_valid():
            return self.no_query_found()

        # filter_or goes here
        sqs = self.searchqueryset.all()

        kwargs = {}

        self.excluded_fields.extend(['q', 'type', 'since', 'until', 'order'])

        if self.cleaned_data['type']:
            all_types = self.cleaned_data['type'].split(' ')
            sqs = sqs.filter_or(type__in=all_types)

        for key in self.fields.keys():
            value = self.cleaned_data[key]
            if value and key not in self.excluded_fields:
                kwargs[key] = self.cleaned_data[key]

        sqs = sqs.filter(**kwargs)

        if self.cleaned_data['q']:
            q = unicodedata.normalize('NFKD',
                                      self.cleaned_data.get('q')).encode(
                                          'ascii', 'ignore')

            dismax_opts = {
                'q.alt': '*.*',
                'pf': 'title^2.1 author^1.9 description^1.7',
                'mm': '2<70%',

                # Date boosting:
                # http://wiki.apache.org/solr/FunctionQuery#Date_Boosting
                'bf': 'recip(ms(NOW/HOUR,modified),3.16e-11,1,1)^10',
            }
            hayString = 'haystack.backends.whoosh_backend.WhooshEngine'
            if settings.HAYSTACK_CONNECTIONS['default']['ENGINE'] != hayString:
                sqs = sqs.filter(
                    content=AltParser('edismax', q, **dismax_opts))

            else:
                sqs = sqs.filter(content=AutoQuery(q))

        if self.cleaned_data['order']:
            for option, dict_order in settings.ORDERING_DATA.items():
                if self.cleaned_data['order'] == option:
                    if dict_order['fields']:
                        sqs = sqs.order_by(*dict_order['fields'])

        if self.cleaned_data['since']:
            sqs = sqs.filter(modified__gte=self.cleaned_data['since'])
        if self.cleaned_data['until']:
            sqs = sqs.filter(modified__lte=self.cleaned_data['until'])

        return sqs
Exemple #4
0
 def get_queryset(self):
     if self.request.POST:
         parser = AltParser(parser_name=self.parser_name,
                            query_string=self.request.POST["q"],
                            qf=" ".join(self.query_fields),
                            q_alt="*:*",
                            mm=1)
         sqs = SearchQuerySet().filter(content=parser)
         return sqs
     return []
Exemple #5
0
 def test_build_complex_altparser_query(self):
     self.sq.add_filter(SQ(content=AltParser("dismax", "Don't panic", qf="text")))
     self.sq.add_filter(SQ(pub_date__lte=Exact("2009-02-10 01:59:00")))
     self.sq.add_filter(SQ(author__gt="daniel"))
     self.sq.add_filter(SQ(created__lt=Exact("2009-02-12 12:13:00")))
     self.sq.add_filter(SQ(title__gte="B"))
     self.sq.add_filter(SQ(id__in=[1, 2, 3]))
     self.sq.add_filter(SQ(rating__range=[3, 5]))
     query = self.sq.build_query()
     self.assertTrue('(_query_:"{!dismax qf=text}Don\'t panic")' in query)
     self.assertTrue('pub_date:([* TO "2009-02-10 01:59:00"])' in query)
     self.assertTrue('author:({"daniel" TO *})' in query)
     self.assertTrue('created:({* TO "2009-02-12 12:13:00"})' in query)
     self.assertTrue('title:(["B" TO *])' in query)
     self.assertTrue('id:("1" OR "2" OR "3")' in query)
     self.assertTrue('rating:(["3" TO "5"])' in query)
Exemple #6
0
    def search(self):
        if not self.is_valid():
            return self.no_query_found()

        # filter_or goes here
        sqs = self.searchqueryset.all()
        mimetype = self.cleaned_data['mimetype']
        if mimetype:
            filter_mimetypes = {'mimetype__in': []}
            for type_, display, mimelist in settings.FILE_TYPE_GROUPINGS:
                if type_ in mimetype:
                    filter_mimetypes['mimetype__in'] += mimelist
                    if not self.cleaned_data['size']:
                        sqs = sqs.filter_or(mimetype__in=mimelist)

        if self.cleaned_data['size']:
            # (1024 * 1024) / 2
            # (1024 * 1024) * 10
            filter_sizes = {}
            filter_sizes_exp = {}
            if '<500KB' in self.cleaned_data['size']:
                filter_sizes['size__lt'] = 524288
            if '500KB__10MB' in self.cleaned_data['size']:
                filter_sizes_exp['size__gte'] = 524288
                filter_sizes_exp['size__lte'] = 10485760
            if '>10MB' in self.cleaned_data['size']:
                filter_sizes['size__gt'] = 10485760

            if self.cleaned_data['mimetype']:
                # Add the mimetypes filters to this dict and filter it
                if filter_sizes_exp:
                    filter_sizes_exp.update(filter_mimetypes)
                    sqs = sqs.filter_or(**filter_sizes_exp)
                for filter_or in filter_sizes.items():
                    filter_or = dict((filter_or, ))
                    filter_or.update(filter_mimetypes)
                    sqs = sqs.filter_or(**filter_or)
            else:
                for filter_or in filter_sizes.items():
                    filter_or = dict((filter_or, ))
                    sqs = sqs.filter_or(**filter_or)
                sqs = sqs.filter_or(**filter_sizes_exp)

        if self.cleaned_data['used_by']:
            sqs = sqs.filter_or(used_by__in=self.cleaned_data['used_by'].split())


        if self.cleaned_data['q']:
            q = unicodedata.normalize(
                'NFKD', self.cleaned_data.get('q')
            ).encode('ascii', 'ignore')

            dismax_opts = {
                'q.alt': '*.*',
                'pf': 'title^2.1 author^1.9 description^1.7',
                'mm': '2<70%',

                # Date boosting: http://wiki.apache.org/solr/FunctionQuery#Date_Boosting
                'bf': 'recip(ms(NOW/HOUR,modified),3.16e-11,1,1)^10',
            }

            sqs = sqs.filter(content=AltParser('edismax', q, **dismax_opts))

        if self.cleaned_data['type']:
            sqs = sqs.filter(type=self.cleaned_data['type'])

        if self.cleaned_data['order']:
            for option, dict_order in settings.ORDERING_DATA.items():
                if self.cleaned_data['order'] == option:
                    if dict_order['fields']:
                        sqs = sqs.order_by(*dict_order['fields'])

        if self.cleaned_data['author']:
            sqs = sqs.filter(
                fullname_and_username__contains=self.cleaned_data['author']
            )

        if self.cleaned_data['modified_by']:
            sqs = sqs.filter(
                fullname_and_username__contains=self.cleaned_data['modified_by']
            )

        if self.cleaned_data['milestone']:
            sqs = sqs.filter(milestone=self.cleaned_data['milestone'])
        if self.cleaned_data['priority']:
            sqs = sqs.filter(priority=self.cleaned_data['priority'])
        if self.cleaned_data['severity']:
            sqs = sqs.filter(severity=self.cleaned_data['severity'])
        if self.cleaned_data['reporter']:
            sqs = sqs.filter(reporter=self.cleaned_data['reporter'])
        if self.cleaned_data['keywords']:
            sqs = sqs.filter(keywords=self.cleaned_data['keywords'])
        if self.cleaned_data['collaborators']:
            sqs = sqs.filter(collaborators=self.cleaned_data['collaborators'])
        if self.cleaned_data['repository_name']:
            sqs = sqs.filter(
                repository_name=self.cleaned_data['repository_name']
            )
        if self.cleaned_data['username']:
            sqs = sqs.filter(username=self.cleaned_data['username'])
        if self.cleaned_data['name']:
            sqs = sqs.filter(name=self.cleaned_data['name'])
        if self.cleaned_data['institution']:
            sqs = sqs.filter(institution=self.cleaned_data['institution'])
        if self.cleaned_data['role']:
            sqs = sqs.filter(role=self.cleaned_data['role'])
        if self.cleaned_data['tag']:
            sqs = sqs.filter(tag=self.cleaned_data['tag'])

        if self.cleaned_data['list']:
            sqs = sqs.filter(tag__in=self.cleaned_data['list'])

        if self.cleaned_data['since']:
            sqs = sqs.filter(modified__gte=self.cleaned_data['since'])
        if self.cleaned_data['until']:
            sqs = sqs.filter(modified__lte=self.cleaned_data['until'])

        if self.cleaned_data['filename']:
            sqs = sqs.filter(filename=self.cleaned_data['filename'])

        return sqs