def test_nocachefield_segments(): schema = fields.Schema(a=fields.ID(stored=True)) ix = RamStorage().create_index(schema) w = ix.writer() w.add_document(a=u("bravo")) w.add_document(a=u("echo")) w.add_document(a=u("juliet")) w.commit() w = ix.writer() w.add_document(a=u("kilo")) w.add_document(a=u("foxtrot")) w.add_document(a=u("charlie")) w.commit(merge=False) w = ix.writer() w.delete_by_term("a", u("echo")) w.add_document(a=u("alfa")) w.add_document(a=u("india")) w.add_document(a=u("delta")) w.commit(merge=False) with ix.searcher() as s: q = query.TermRange("a", u("bravo"), u("k")) facet = sorting.FieldFacet("a", reverse=True) r = s.search(q, sortedby=facet) assert [hit["a"] for hit in r] == [ "juliet", "india", "foxtrot", "delta", "charlie", "bravo" ] mq = query.Or( [query.Term("a", u("bravo")), query.Term("a", u("delta"))]) anq = query.AndNot(q, mq) r = s.search(anq, sortedby=facet) assert [hit["a"] for hit in r] == ["juliet", "india", "foxtrot", "charlie"] mq = query.Or( [query.Term("a", u("bravo")), query.Term("a", u("delta"))]) r = s.search(q, mask=mq, sortedby=facet) assert [hit["a"] for hit in r] == ["juliet", "india", "foxtrot", "charlie"] fq = query.Or([ query.Term("a", u("alfa")), query.Term("a", u("charlie")), query.Term("a", u("echo")), query.Term("a", u("india")), ]) r = s.search(query.Every(), filter=fq, sortedby=facet) assert [hit["a"] for hit in r] == ["india", "charlie", "alfa"] nq = query.Not( query.Or([query.Term("a", u("alfa")), query.Term("a", u("india"))])) r = s.search(query.Every(), filter=nq, sortedby=facet) assert [hit["a"] for hit in r] == [ "kilo", "juliet", "foxtrot", "delta", "charlie", "bravo" ]
def query_pre_process(self, query_parameters, context=None): if not self.enabled: return permissions = self.get_user_permissions(context.req.authname) allowed_docs, denied_docs = [], [] for product, doc_type, doc_id, perm, denied in permissions: term_spec = [] if product: term_spec.append(query.Term(IndexFields.PRODUCT, product)) else: term_spec.append(query.Not(query.Every(IndexFields.PRODUCT))) if doc_type != '*': term_spec.append(query.Term(IndexFields.TYPE, doc_type)) if doc_id != '*': term_spec.append(query.Term(IndexFields.ID, doc_id)) term_spec.append(query.Term(IndexFields.REQUIRED_PERMISSION, perm)) term_spec = query.And(term_spec) if denied: denied_docs.append(term_spec) else: allowed_docs.append(term_spec) self.update_security_filter(query_parameters, allowed_docs, denied_docs)
def test_or_nots1(): # Issue #285 schema = fields.Schema(a=fields.KEYWORD(stored=True), b=fields.KEYWORD(stored=True)) st = RamStorage() ix = st.create_index(schema) with ix.writer() as w: w.add_document(a=u("alfa"), b=u("charlie")) with ix.searcher() as s: q = query.And([query.Term("a", "alfa"), query.Or([query.Not(query.Term("b", "bravo")), query.Not(query.Term("b", "charlie")) ]) ]) r = s.search(q) assert len(r) == 1
def allowed_documents(): #todo: add special case handling for trac_admin and product_owner for product, perm in self._get_all_user_permissions(context): if product: prod_term = query.Term(IndexFields.PRODUCT, product) else: prod_term = query.Not(query.Every(IndexFields.PRODUCT)) perm_term = query.Term(IndexFields.REQUIRED_PERMISSION, perm) yield query.And([prod_term, perm_term])
def test_can_parse_meta_keywords_that_resolve_to_meta_keywords(self): parsed_query = self.parser.parse("$unresolved") self.assertEqual( parsed_query, query.Not( query.Or([ query.Term('status', 'resolved'), query.Term('status', 'closed') ])))
def test_multireader_not(): schema = fields.Schema(id=fields.STORED, f=fields.TEXT) ix = RamStorage().create_index(schema) w = ix.writer() w.add_document(id=0, f=u("alfa bravo chralie")) w.add_document(id=1, f=u("bravo chralie delta")) w.add_document(id=2, f=u("charlie delta echo")) w.add_document(id=3, f=u("delta echo foxtrot")) w.add_document(id=4, f=u("echo foxtrot golf")) w.commit() with ix.searcher() as s: q = query.And([query.Term("f", "delta"), query.Not(query.Term("f", "delta"))]) r = s.search(q) assert_equal(len(r), 0) ix = RamStorage().create_index(schema) w = ix.writer() w.add_document(id=5, f=u("alfa bravo chralie")) w.add_document(id=6, f=u("bravo chralie delta")) w.commit(merge=False) w = ix.writer() w.add_document(id=7, f=u("charlie delta echo")) w.add_document(id=8, f=u("delta echo foxtrot")) w.commit(merge=False) w = ix.writer() w.add_document(id=9, f=u("echo foxtrot golf")) w.add_document(id=10, f=u("foxtrot golf delta")) w.commit(merge=False) assert len(ix._segments()) > 1 with ix.searcher() as s: q = query.And([query.Term("f", "delta"), query.Not(query.Term("f", "delta"))]) r = s.search(q) assert_equal(len(r), 0)
def test_can_parse_complex_query(self): parsed_query = self.parser.parse("content:test $ticket $unresolved") self.assertEqual( parsed_query, query.And([ query.Term('content', 'test'), query.Term('type', 'ticket'), query.Not( query.Or([ query.Term('status', 'resolved'), query.Term('status', 'closed') ])) ]))
def query(self, parser): assert len(self.tokens) == 1 return query.Not(self.tokens[0].query(parser))
def test_can_parse_NOT_keyword_ticket(self): parsed_query = self.parser.parse("NOT $ticket") self.assertEqual(parsed_query, query.Not(query.Term('type', 'ticket')))
def make_not(self, q): return query.Not(q)
def test_not(): _run_query(query.Or([query.Term("value", u("red")), query.Term("name", u("yellow")), query.Not(query.Term("name", u("quick")))]), [u("A"), u("E")])
def test_topnot(): _run_query(query.Not(query.Term("value", "red")), [u("B"), "C", "E"]) _run_query(query.Not(query.Term("name", "yellow")), [u("B"), u("C"), u("D")])