def testIndexObject(self):
     response = getData('add_response.txt')
     # fake add response
     output = fakehttp(self.mngr.getConnection(), response)
     # indexing sends data
     self.proc.index(Foo(id='500', name='python test doc'))
     self.assertEqual(sortFields(str(output)), getData('add_request.txt'))
 def testPartialIndexObject(self):
     foo = Foo(id='500', name='foo', price=42.0)
     # first index all attributes...
     response = getData('add_response.txt')
     output = fakehttp(self.mngr.getConnection(), response)
     self.proc.index(foo)
     self.assert_(str(output).find(
         '<field name="price">42.0</field>') > 0, '"price" data not found')
     # then only a subset...
     response = getData('add_response.txt')
     output = fakehttp(self.mngr.getConnection(), response)
     self.proc.index(foo, attributes=['id', 'name'])
     output = str(output)
     self.assert_(
         output.find('<field name="name">foo</field>') > 0,
         '"name" data not found'
     )
     # at this point we'd normally check for a partial update:
     #   self.assertEqual(output.find('price'), -1, '"price" data found?')
     #   self.assertEqual(output.find('42'), -1, '"price" data found?')
     # however, until SOLR-139 has been implemented (re)index operations
     # always need to provide data for all attributes in the schema...
     self.assert_(
         output.find('<field name="price">42.0</field>') > 0,
         '"price" data not found'
     )
 def test_commit_no_wait(self):
     commit_request = getData('commit_request_no_wait.txt')
     commit_response = getData('commit_response.txt')
     c = SolrConnection(host='localhost:8983', persistent=True)
     output = fakehttp(c, commit_response)
     c.commit(waitFlush=False, waitSearcher=False)
     self.failUnlessEqual(str(output), commit_request)
 def testUnindexObject(self):
     response = getData('delete_response.txt')
     # fake response
     output = fakehttp(self.mngr.getConnection(), response)
     # unindexing sends data
     self.proc.unindex(Foo(id='500', name='python test doc'))
     self.assertEqual(str(output), getData('delete_request.txt'))
Beispiel #5
0
 def test_optimize(self):
     commit_request = getData("optimize_request.txt").rstrip(b"\n")
     commit_response = getData("commit_response.txt")
     c = SolrConnection(host="localhost:8983", persistent=True)
     output = fakehttp(c, commit_response)
     c.commit(optimize=True)
     self.failUnlessEqual(str(output), commit_request.decode("utf-8"))
Beispiel #6
0
 def test_commit_no_wait(self):
     commit_request = getData('commit_request_no_wait.txt')
     commit_response = getData('commit_response.txt')
     c = SolrConnection(host='localhost:8983', persistent=True)
     output = fakehttp(c, commit_response)
     c.commit(waitFlush=False, waitSearcher=False)
     self.failUnlessEqual(str(output), commit_request)
 def testUnindexObject(self):
     response = getData('delete_response.txt')
     # fake response
     output = fakehttp(self.mngr.getConnection(), response)
     # unindexing sends data
     self.proc.unindex(Foo(id='500', name='python test doc'))
     self.assertEqual(str(output), getData('delete_request.txt'))
 def testCommit(self):
     response = getData('commit_response.txt')
     # fake response
     output = fakehttp(self.mngr.getConnection(), response)
     # committing sends data
     self.proc.commit()
     self.assertEqual(str(output), getData('commit_request.txt'))
 def testCommit(self):
     response = getData('commit_response.txt')
     # fake response
     output = fakehttp(self.mngr.getConnection(), response)
     # committing sends data
     self.proc.commit()
     self.assertEqual(str(output), getData('commit_request.txt'))
Beispiel #10
0
    def test_add(self):
        config = getConfig()
        config.atomic_updates = True
        add_request = getData('add_request.txt')
        add_response = getData('add_response.txt')

        c = SolrConnection(host='localhost:8983', persistent=True)

        # fake schema response - caches the schema
        fakehttp(c, getData('schema.xml'))
        c.get_schema()

        output = fakehttp(c, add_response)
        c.add(id='500', name='python test doc')
        res = c.flush()
        self.assertEqual(len(res), 1)   # one request was sent
        res = res[0]
        self.failUnlessEqual(str(output), add_request)
        # Status
        node = res.findall(".//int")[0]
        self.failUnlessEqual(node.attrib['name'], 'status')
        self.failUnlessEqual(node.text, '0')
        # QTime
        node = res.findall(".//int")[1]
        self.failUnlessEqual(node.attrib['name'], 'QTime')
        self.failUnlessEqual(node.text, '4')
        res.find('QTime')
Beispiel #11
0
    def test_add(self):
        config = getConfig()
        config.atomic_updates = True
        add_request = getData("add_request.txt").rstrip(b"\n")
        add_response = getData("add_response.txt")

        c = SolrConnection(host="localhost:8983", persistent=True)

        # fake schema response - caches the schema
        fakehttp(c, getData("schema.xml"))
        c.get_schema()

        output = fakehttp(c, add_response)
        c.add(id="500", name="python test doc")
        res = c.flush()
        self.assertEqual(len(res), 1)  # one request was sent
        res = res[0]
        self.failUnlessEqual(str(output), add_request.decode("utf-8"))
        # Status
        node = res.findall(".//int")[0]
        self.failUnlessEqual(node.attrib["name"], "status")
        self.failUnlessEqual(node.text, "0")
        # QTime
        node = res.findall(".//int")[1]
        self.failUnlessEqual(node.attrib["name"], "QTime")
        self.failUnlessEqual(node.text, "4")
        res.find("QTime")
 def testIndexObject(self):
     response = getData('add_response.txt')
     output = fakehttp(self.mngr.getConnection(),
                       response)  # fake add response
     self.proc.index(Foo(id='500',
                         name='python test doc'))  # indexing sends data
     self.assertEqual(sortFields(str(output)), getData('add_request.txt'))
Beispiel #13
0
    def test_search(self):
        # XXX: Solr 7 has a new query param 'q.op' which can not be passed to
        # the search method in Python.
        # This is why we have commented out code here.
        search_request = getData("search_request.txt").rstrip(b"\n")
        search_request_py2 = getData("search_request_py2.txt").rstrip(b"\n")
        search_response = getData("search_response.txt")
        c = SolrConnection(host="localhost:8983", persistent=True)
        output = fakehttp(c, search_response)
        parameters = {
            "q": "+id:[* TO *]",
            "fl": "* score",
            "wt": "xml",
            "rows": "10",
            "indent": "on",
            "q.op": "AND",
            "lowercaseOperators": "true",
            "sow": "true",
        }

        res = c.search(**parameters)
        res = fromstring(res.read())
        normalize = lambda x: sorted(x.split(b"&"))  # sort request params
        self.assertIn(
            normalize(output.get()),
            [normalize(search_request),
             normalize(search_request_py2)],
        )
        self.failUnless(res.find((".//doc")))
 def testPartialIndexObject(self):
     foo = Foo(id='500', name='foo', price=42.0)
     # first index all attributes...
     response = getData('add_response.txt')
     output = fakehttp(self.mngr.getConnection(), response)
     self.proc.index(foo)
     self.assert_(
         str(output).find('<field name="price">42.0</field>') > 0,
         '"price" data not found')
     # then only a subset...
     response = getData('add_response.txt')
     output = fakehttp(self.mngr.getConnection(), response)
     self.proc.index(foo, attributes=['id', 'name'])
     output = str(output)
     self.assert_(
         output.find('<field name="name">foo</field>') > 0,
         '"name" data not found')
     # at this point we'd normally check for a partial update:
     #   self.assertEqual(output.find('price'), -1, '"price" data found?')
     #   self.assertEqual(output.find('42'), -1, '"price" data found?')
     # however, until SOLR-139 has been implemented (re)index operations
     # always need to provide data for all attributes in the schema...
     self.assert_(
         output.find('<field name="price">42.0</field>') > 0,
         '"price" data not found')
 def test_optimize(self):
     commit_request = getData('optimize_request.txt')
     commit_response = getData('commit_response.txt')
     c = SolrConnection(host='localhost:8983', persistent=True)
     output = fakehttp(c, commit_response)
     c.commit(optimize=True)
     self.failUnlessEqual(str(output), commit_request)
Beispiel #16
0
 def test_optimize(self):
     commit_request = getData('optimize_request.txt')
     commit_response = getData('commit_response.txt')
     c = SolrConnection(host='localhost:8983', persistent=True)
     output = fakehttp(c, commit_response)
     c.commit(optimize=True)
     self.failUnlessEqual(str(output), commit_request)
Beispiel #17
0
 def test_commit_no_wait_flush(self):
     commit_request = getData('commit_request.txt').rstrip('\n')
     commit_response = getData('commit_response.txt')
     c = SolrConnection(host='localhost:8983', persistent=True)
     output = fakehttp(c, commit_response)
     c.commit()
     self.failUnlessEqual(str(output), commit_request)
Beispiel #18
0
    def test_add(self):
        config = getConfig()
        config.atomic_updates = True
        add_request = getData('add_request.txt').rstrip('\n')
        add_response = getData('add_response.txt')

        c = SolrConnection(host='localhost:8983', persistent=True)

        # fake schema response - caches the schema
        fakehttp(c, getData('schema.xml'))
        c.get_schema()

        output = fakehttp(c, add_response)
        c.add(id='500', name='python test doc')
        res = c.flush()
        self.assertEqual(len(res), 1)   # one request was sent
        res = res[0]
        self.failUnlessEqual(str(output), add_request)
        # Status
        node = res.findall(".//int")[0]
        self.failUnlessEqual(node.attrib['name'], 'status')
        self.failUnlessEqual(node.text, '0')
        # QTime
        node = res.findall(".//int")[1]
        self.failUnlessEqual(node.attrib['name'], 'QTime')
        self.failUnlessEqual(node.text, '4')
        res.find('QTime')
Beispiel #19
0
 def test_commit_no_wait_searcher(self):
     commit_request = getData("commit_request_no_wait_searcher.txt").rstrip(
         b"\n")
     commit_response = getData("commit_response.txt")
     c = SolrConnection(host="localhost:8983", persistent=True)
     output = fakehttp(c, commit_response)
     c.commit(waitSearcher=False)
     self.failUnlessEqual(str(output), commit_request.decode("utf-8"))
 def testIndexAccessorRaises(self):
     response = getData('add_response.txt')
     output = fakehttp(self.mngr.getConnection(), response)   # fake add response
     def brokenfunc():
         raise ValueError
     self.proc.index(Foo(id='500', name='python test doc',
                         text=brokenfunc))   # indexing sends data
     self.assertEqual(sortFields(str(output)), getData('add_request.txt'))
 def testCommit(self):
     response = getData("commit_response.txt")
     # fake response
     output = fakehttp(self.mngr.getConnection(), response)
     # committing sends data
     self.proc.commit()
     self.assertEqual(
         str(output), getData("commit_request.txt").decode("utf-8").rstrip("\n")
     )
 def testUnindexObject(self):
     response = getData("delete_response.txt")
     # fake response
     output = fakehttp(self.mngr.getConnection(), response)
     # unindexing sends data
     self.proc.unindex(Foo(id="500", name="python test doc"))
     self.assertEqual(
         str(output), getData("delete_request.txt").decode("utf-8").rstrip("\n")
     )
Beispiel #23
0
 def test_search(self):
     search_request = getData('search_request.txt')
     search_response = getData('search_response.txt')
     c = SolrConnection(host='localhost:8983', persistent=True)
     output = fakehttp(c, search_response)
     res = c.search(q='+id:[* TO *]', wt='xml', rows='10', indent='on')
     res = fromstring(res.read())
     self.failUnlessEqual(str(output), search_request)
     self.failUnless(res.find(('.//doc')))
Beispiel #24
0
 def test_search(self):
     search_request = getData('search_request.txt')
     search_response = getData('search_response.txt')
     c = SolrConnection(host='localhost:8983', persistent=True)
     output = fakehttp(c, search_response)
     res = c.search(q='+id:[* TO *]', wt='xml', rows='10', indent='on')
     res = fromstring(res.read())
     self.failUnlessEqual(str(output), search_request)
     self.failUnless(res.find(('.//doc')))
 def testIndexCatalogAwareObject(self):
     """Check that not only CMFCatalogAware objects are indexed but also
     CatalogAware ones (i.e comments).
     """
     response = getData('add_response.txt')
     # fake add response
     output = fakehttp(self.mngr.getConnection(), response)
     # indexing sends data
     self.proc.index(Comentish(id='500', name='python test doc'))
     self.assertEqual(sortFields(str(output)), getData('add_request.txt'))
 def testTwoRequests(self):
     mngr = SolrConnectionManager(active=True)
     proc = SolrIndexProcessor(mngr)
     output = fakehttp(mngr.getConnection(), getData('schema.xml'),
                       getData('add_response.txt'))
     proc.index(self.foo)
     mngr.closeConnection()
     self.assertEqual(len(output), 2)
     self.failUnless(output.get().startswith(self.schema_request))
     self.assertEqual(sortFields(output.get()), getData('add_request.txt'))
 def testIndexObject(self):
     response = getData("add_response.txt")
     # fake add response
     output = fakehttp(self.mngr.getConnection(), response)
     # indexing sends data
     self.proc.index(Foo(id="500", name="python test doc"))
     self.assertEqual(
         sortFields(str(output).encode("utf-8")),
         getData("add_request.txt").rstrip(b"\n"),
     )
Beispiel #28
0
 def test_search(self):
     search_request = getData('search_request.txt')
     search_response = getData('search_response.txt')
     c = SolrConnection(host='localhost:8983', persistent=True)
     output = fakehttp(c, search_response)
     res = c.search(q='+id:[* TO *]', fl='* score', wt='xml', rows='10', indent='on')
     res = fromstring(res.read())
     normalize = lambda x: sorted(x.split('&'))      # sort request params
     self.assertEqual(normalize(output.get()), normalize(search_request))
     self.failUnless(res.find(('.//doc')))
 def testTwoRequests(self):
     mngr = SolrConnectionManager(active=True)
     proc = SolrIndexProcessor(mngr)
     output = fakehttp(mngr.getConnection(), getData('schema.xml'),
         getData('add_response.txt'))
     proc.index(self.foo)
     mngr.closeConnection()
     self.assertEqual(len(output), 2)
     self.failUnless(output.get().startswith(self.schema_request))
     self.assertEqual(sortFields(output.get()), getData('add_request.txt'))
 def testUnindexingWithUniqueKeyMissing(self):
     fakehttp(self.conn, getData('simple_schema.xml'))   # fake schema response
     self.mngr.getSchema()                               # read and cache the schema
     response = getData('delete_response.txt')
     output = fakehttp(self.conn, response)              # fake delete response
     foo = Foo(id='500', name='foo')
     self.proc.unindex(foo)                              # unindexing sends data
     self.assertEqual(len(output), 0)                    # nothing happened...
     self.assertEqual(self.log, [
         'schema is missing unique key, skipping unindexing of %r', foo])
 def testIndexObject(self):
     output = []
     connection = self.proc.getConnection()
     responses = getData('add_response.txt'), getData('commit_response.txt')
     output = fakehttp(connection, *responses)           # fake responses
     self.folder.processForm(values={'title': 'Foo'})    # updating sends
     self.assertEqual(self.folder.Title(), 'Foo')
     self.assertEqual(str(output), '', 'reindexed unqueued!')
     commit()                        # indexing happens on commit
     required = '<field name="Title">Foo</field>'
     self.assert_(str(output).find(required) > 0, '"title" data not found')
Beispiel #32
0
 def test_add_with_boost_values(self):
     add_request = getData('add_request_with_boost_values.txt')
     add_response = getData('add_response.txt')
     c = SolrConnection(host='localhost:8983', persistent=True)
     output = fakehttp(c, add_response)
     boost = {'': 2, 'id': 0.5, 'name': 5}
     c.add(boost_values=boost, id='500', name='python test doc')
     res = c.flush()
     self.assertEqual(len(res), 1)   # one request was sent
     res = res[0]
     self.failUnlessEqual(str(output), add_request)
 def test_add_with_boost_values(self):
     add_request = getData('add_request_with_boost_values.txt')
     add_response = getData('add_response.txt')
     c = SolrConnection(host='localhost:8983', persistent=True)
     output = fakehttp(c, add_response)
     boost = {'': 2, 'id': 0.5, 'name': 5}
     c.add(boost_values=boost, id='500', name='python test doc')
     res = c.flush()
     self.assertEqual(len(res), 1)   # one request was sent
     res = res[0]
     self.failUnlessEqual(str(output), add_request)
 def test_search(self):
     search_request = getData('search_request.txt')
     search_response = getData('search_response.txt')
     c = SolrConnection(host='localhost:8983', persistent=True)
     output = fakehttp(c, search_response)
     res = c.search(
         q='+id:[* TO *]', fl='* score', wt='xml', rows='10', indent='on')
     res = fromstring(res.read())
     normalize = lambda x: sorted(x.split('&'))      # sort request params
     self.assertEqual(normalize(output.get()), normalize(search_request))
     self.failUnless(res.find(('.//doc')))
 def testIndexObject(self):
     output = []
     connection = self.proc.getConnection()
     responses = getData('add_response.txt'), getData('commit_response.txt')
     output = fakehttp(connection, *responses)  # fake responses
     self.folder.processForm(values={'title': 'Foo'})  # updating sends
     self.assertEqual(self.folder.Title(), 'Foo')
     self.assertEqual(str(output), '', 'reindexed unqueued!')
     commit()  # indexing happens on commit
     required = '<field name="Title">Foo</field>'
     self.assert_(str(output).find(required) > 0, '"title" data not found')
    def testLocalConnections(self):
        config = getConfig()
        config.atomic_updates = True
        mngr = SolrConnectionManager(active=True)
        proc = SolrIndexProcessor(mngr)
        mngr.setHost(active=True)
        schema = getData("schema.xml")
        log = []

        def runner():
            # fake schema response on solr connection - caches the schema
            fakehttp(mngr.getConnection(), getData("schema.xml"))
            mngr.getConnection().get_schema()

            fakehttp(mngr.getConnection(), schema)  # fake schema response
            # read and cache the schema
            mngr.getSchema()
            response = getData("add_response.txt")
            # fake add response
            output = fakehttp(mngr.getConnection(), response)
            # indexing sends data
            proc.index(Foo(id="500", name="python test doc"))
            mngr.closeConnection()
            log.append(str(output))
            log.append(proc)
            log.append(mngr.getConnection())

        # after the runner was set up, another thread can be created and
        # started;  its output should contain the proper indexing request,
        # whereas the main thread's connection remain idle;  the latter
        # cannot be checked directly, but the connection object would raise
        # an exception if it was used to send a request without setting up
        # a fake response beforehand...
        thread = Thread(target=runner)
        thread.start()
        thread.join()
        conn = mngr.getConnection()  # get this thread's connection
        fakehttp(conn, schema)  # fake schema response
        mngr.getSchema()  # read and cache the schema
        mngr.closeConnection()
        mngr.setHost(active=False)
        self.assertEqual(len(log), 3)
        self.assertEqual(
            sortFields(log[0].encode("utf-8")), getData("add_request.txt").rstrip(b"\n")
        )
        self.failUnless(isinstance(log[1], SolrIndexProcessor))
        self.failUnless(isinstance(log[2], SolrConnection))
        self.failUnless(isinstance(proc, SolrIndexProcessor))
        self.failUnless(isinstance(conn, SolrConnection))
        self.assertEqual(log[1], proc)  # processors should be the same...
        self.assertNotEqual(log[2], conn)  # but not the connections
Beispiel #37
0
 def setUp(self):
     replies = (getData('plone_schema.xml'), getData('add_response.txt'),
                getData('add_response.txt'), getData('add_response.txt'),
                getData('commit_response.txt'))
     self.proc = queryUtility(ISolrConnectionManager)
     self.proc.setHost(active=True)
     conn = self.proc.getConnection()
     fakehttp(conn, *replies)  # fake schema response
     self.proc.getSchema()  # read and cache the schema
     self.portal = self.layer['portal']
     setRoles(self.portal, TEST_USER_ID, ['Manager'])
     self.portal.invokeFactory('Folder', id='folder')
     self.folder = self.portal.folder
     commit()
    def testLocalConnections(self):
        config = getConfig()
        config.atomic_updates = True
        mngr = SolrConnectionManager(active=True)
        proc = SolrIndexProcessor(mngr)
        mngr.setHost(active=True)
        schema = getData('schema.xml')
        log = []

        def runner():
            # fake schema response on solr connection - caches the schema
            fakehttp(mngr.getConnection(), getData('schema.xml'))
            mngr.getConnection().get_schema()

            fakehttp(mngr.getConnection(), schema)      # fake schema response
            # read and cache the schema
            mngr.getSchema()
            response = getData('add_response.txt')
            # fake add response
            output = fakehttp(mngr.getConnection(), response)
            # indexing sends data
            proc.index(Foo(id='500', name='python test doc'))
            mngr.closeConnection()
            log.append(str(output))
            log.append(proc)
            log.append(mngr.getConnection())
        # after the runner was set up, another thread can be created and
        # started;  its output should contain the proper indexing request,
        # whereas the main thread's connection remain idle;  the latter
        # cannot be checked directly, but the connection object would raise
        # an exception if it was used to send a request without setting up
        # a fake response beforehand...
        thread = Thread(target=runner)
        thread.start()
        thread.join()
        conn = mngr.getConnection()         # get this thread's connection
        fakehttp(conn, schema)              # fake schema response
        mngr.getSchema()                    # read and cache the schema
        mngr.closeConnection()
        mngr.setHost(active=False)
        self.assertEqual(len(log), 3)
        self.assertEqual(sortFields(log[0]), getData(
            'add_request.txt').rstrip('\n'))
        self.failUnless(isinstance(log[1], SolrIndexProcessor))
        self.failUnless(isinstance(log[2], SolrConnection))
        self.failUnless(isinstance(proc, SolrIndexProcessor))
        self.failUnless(isinstance(conn, SolrConnection))
        self.assertEqual(log[1], proc)      # processors should be the same...
        self.assertNotEqual(log[2], conn)   # but not the connections
    def testIndexAccessorRaises(self):
        response = getData("add_response.txt")
        # fake add response
        output = fakehttp(self.mngr.getConnection(), response)

        def brokenfunc():
            raise ValueError

        self.proc.index(
            Foo(id="500", name="python test doc", text=brokenfunc)
        )  # indexing sends data
        self.assertEqual(
            sortFields(str(output).encode("utf-8")),
            getData("add_request.txt").rstrip(b"\n"),
        )
    def testIndexObject(self):
        output = []
        connection = self.proc.getConnection()
        connection.get_schema()  # cache schema to avoid multiple calls

        responses = (getData('plone_schema.xml'),
                     getData('commit_response.txt'))
        output = fakehttp(connection, *responses)           # fake responses
        self.folder.title = 'Foo'
        self.portal.invokeFactory('Folder', id='myfolder', title='Foo')
        self.assertEqual(str(output), '', 'reindexed unqueued!')
        commit()                        # indexing happens on commit
        required = '<field name="Title" update="set">Foo</field>'
        self.assertTrue(str(output).find(required) > 0,
                        '"title" data not found')
    def testIndexObject(self):
        output = []
        connection = self.proc.getConnection()
        connection.get_schema()  # cache schema to avoid multiple calls

        responses = (getData('plone_schema.xml'),
                     getData('commit_response.txt'))
        output = fakehttp(connection, *responses)           # fake responses
        self.folder.title = 'Foo'
        self.portal.invokeFactory('Folder', id='myfolder', title='Foo')
        self.assertEqual(str(output), '', 'reindexed unqueued!')
        commit()                        # indexing happens on commit
        required = '<field name="Title" update="set">Foo</field>'
        self.assertTrue(str(output).find(required) > 0,
                        '"title" data not found')
 def testIndexingWithUniqueKeyMissing(self):
     # fake schema response
     fakehttp(self.conn, getData("simple_schema.xml"))
     # read and cache the schema
     self.mngr.getSchema()
     response = getData("add_response.txt")
     output = fakehttp(self.conn, response)  # fake add response
     foo = Foo(id="500", name="foo")
     # indexing sends data
     self.proc.index(foo)
     # nothing happened...
     self.assertEqual(len(output), 0)
     self.assertEqual(
         self.log, ["schema is missing unique key, skipping indexing of %r", foo]
     )
 def setUp(self):
     replies = (getData('plone_schema.xml'), getData('add_response.txt'),
                getData('add_response.txt'),
                getData('add_response.txt'),
                getData('commit_response.txt'))
     self.proc = queryUtility(ISolrConnectionManager)
     self.proc.setHost(active=True)
     conn = self.proc.getConnection()
     fakehttp(conn, *replies)              # fake schema response
     self.proc.getSchema()               # read and cache the schema
     self.portal = self.layer['portal']
     setRoles(self.portal, TEST_USER_ID, ['Manager'])
     self.portal.invokeFactory('Folder', id='folder')
     self.folder = self.portal.folder
     commit()
 def testParseDateFacetSearchResults(self):
     facet_xml_response = getData('date_facet_xml_response.txt')
     response = SolrResponse(facet_xml_response)
     results = response.response     # the result set is named 'response'
     self.assertEqual(results.numFound, '42')
     self.assertEqual(results.start, '0')
     self.assertEqual(len(results), 0)
     headers = response.responseHeader
     self.assertEqual(type(headers), type({}))
     self.assertEqual(headers['status'], 0)
     self.assertEqual(headers['QTime'], 5)
     self.assertEqual(headers['params']['facet.date'], 'timestamp')
     self.assertEqual(headers['params']['facet.date.start'],
         'NOW/DAY-5DAYS')
     self.assertEqual(headers['params']['facet.date.end'], 'NOW/DAY+1DAY')
     self.assertEqual(headers['params']['facet.date.gap'], '+1DAY')
     self.assertEqual(headers['params']['rows'], '0')
     self.assertEqual(headers['params']['facet'], 'true')
     self.assertEqual(headers['params']['indent'], 'true')
     self.assertEqual(headers['params']['q'], '*:*')
     counts = response.facet_counts
     self.assertEqual(type(counts), type({}))
     self.assertEqual(counts['facet_queries'], {})
     self.assertEqual(counts['facet_fields'], {})
     timestamps = counts['facet_dates']['timestamp']
     self.assertEqual(timestamps['2007-08-11T00:00:00.000Z'], 1)
     self.assertEqual(timestamps['2007-08-12T00:00:00.000Z'], 5)
     self.assertEqual(timestamps['2007-08-13T00:00:00.000Z'], 3)
     self.assertEqual(timestamps['2007-08-14T00:00:00.000Z'], 7)
     self.assertEqual(timestamps['2007-08-15T00:00:00.000Z'], 2)
     self.assertEqual(timestamps['2007-08-16T00:00:00.000Z'], 16)
     self.assertEqual(timestamps['gap'], '+1DAY')
     self.assertEqual(timestamps['end'], DateTime('2007-08-17 GMT'))
 def testParseFacetSearchResults(self):
     facet_xml_response = getData('facet_xml_response.txt')
     response = SolrResponse(facet_xml_response)
     results = response.response     # the result set is named 'response'
     self.assertEqual(results.numFound, '1')
     self.assertEqual(results.start, '0')
     self.assertEqual(len(results), 0)
     headers = response.responseHeader
     self.assertEqual(type(headers), type({}))
     self.assertEqual(headers['status'], 0)
     self.assertEqual(headers['QTime'], 1)
     self.assertEqual(headers['params']['facet.limit'], '-1')
     self.assertEqual(headers['params']['rows'], '0')
     self.assertEqual(headers['params']['facet'], 'true')
     self.assertEqual(headers['params']['facet.field'], ['cat', 'inStock'])
     self.assertEqual(headers['params']['indent'], '10')
     self.assertEqual(headers['params']['q'], 'solr')
     counts = response.facet_counts
     self.assertEqual(type(counts), type({}))
     self.assertEqual(counts['facet_queries'], {})
     self.assertEqual(counts['facet_fields']['cat']['electronics'], 0)
     self.assertEqual(counts['facet_fields']['cat']['monitor'], 0)
     self.assertEqual(counts['facet_fields']['cat']['search'], 1)
     self.assertEqual(counts['facet_fields']['cat']['software'], 1)
     self.assertEqual(counts['facet_fields']['inStock']['true'], 1)
 def testParseDateFacetSearchResults(self):
     facet_xml_response = getData("date_facet_xml_response.txt")
     response = SolrResponse(facet_xml_response)
     results = response.response  # the result set is named 'response'
     self.assertEqual(results.numFound, "42")
     self.assertEqual(results.start, "0")
     self.assertEqual(len(results), 0)
     headers = response.responseHeader
     self.assertEqual(type(headers), type({}))
     self.assertEqual(headers["status"], 0)
     self.assertEqual(headers["QTime"], 5)
     self.assertEqual(headers["params"]["facet.date"], "timestamp")
     self.assertEqual(headers["params"]["facet.date.start"], "NOW/DAY-5DAYS")
     self.assertEqual(headers["params"]["facet.date.end"], "NOW/DAY+1DAY")
     self.assertEqual(headers["params"]["facet.date.gap"], "+1DAY")
     self.assertEqual(headers["params"]["rows"], "0")
     self.assertEqual(headers["params"]["facet"], "true")
     self.assertEqual(headers["params"]["indent"], "true")
     self.assertEqual(headers["params"]["q"], "*:*")
     counts = response.facet_counts
     self.assertEqual(type(counts), type({}))
     self.assertEqual(counts["facet_queries"], {})
     self.assertEqual(counts["facet_fields"], {})
     timestamps = counts["facet_dates"]["timestamp"]
     self.assertEqual(timestamps["2007-08-11T00:00:00.000Z"], 1)
     self.assertEqual(timestamps["2007-08-12T00:00:00.000Z"], 5)
     self.assertEqual(timestamps["2007-08-13T00:00:00.000Z"], 3)
     self.assertEqual(timestamps["2007-08-14T00:00:00.000Z"], 7)
     self.assertEqual(timestamps["2007-08-15T00:00:00.000Z"], 2)
     self.assertEqual(timestamps["2007-08-16T00:00:00.000Z"], 16)
     self.assertEqual(timestamps["gap"], "+1DAY")
     self.assertEqual(timestamps["end"].ISO8601(), DateTime("2007-08-17 GMT").ISO8601())
 def testParseFacetSearchResults(self):
     facet_xml_response = getData("facet_xml_response.txt")
     response = SolrResponse(facet_xml_response)
     results = response.response  # the result set is named 'response'
     self.assertEqual(results.numFound, "1")
     self.assertEqual(results.start, "0")
     self.assertEqual(len(results), 0)
     headers = response.responseHeader
     self.assertEqual(type(headers), type({}))
     self.assertEqual(headers["status"], 0)
     self.assertEqual(headers["QTime"], 1)
     self.assertEqual(headers["params"]["facet.limit"], "-1")
     self.assertEqual(headers["params"]["rows"], "0")
     self.assertEqual(headers["params"]["facet"], "true")
     self.assertEqual(headers["params"]["facet.field"], ["cat", "inStock"])
     self.assertEqual(headers["params"]["indent"], "10")
     self.assertEqual(headers["params"]["q"], "solr")
     counts = response.facet_counts
     self.assertEqual(type(counts), type({}))
     self.assertEqual(counts["facet_queries"], {})
     self.assertEqual(counts["facet_fields"]["cat"]["electronics"], 0)
     self.assertEqual(counts["facet_fields"]["cat"]["monitor"], 0)
     self.assertEqual(counts["facet_fields"]["cat"]["search"], 1)
     self.assertEqual(counts["facet_fields"]["cat"]["software"], 1)
     self.assertEqual(counts["facet_fields"]["inStock"]["true"], 1)
 def testParseComplexSearchResults(self):
     complex_xml_response = getData("complex_xml_response.txt")
     response = SolrResponse(complex_xml_response)
     results = response.response  # the result set is named 'response'
     self.assertEqual(results.numFound, "2")
     self.assertEqual(results.start, "0")
     self.assertEqual(len(results), 2)
     first = results[0]
     self.assertEqual(first.cat, ["software", "search"])
     self.assertEqual(len(first.features), 7)
     self.assertEqual([type(x).__name__ for x in first.features], ["str"] * 6 + ["unicode"])
     self.assertEqual(first.id, "SOLR1000")
     self.assertEqual(first.inStock, True)
     self.assertEqual(first.incubationdate_dt.ISO8601(), DateTime("2006/01/17 GMT").ISO8601())
     self.assertEqual(first.manu, "Apache Software Foundation")
     self.assertEqual(first.popularity, 10)
     self.assertEqual(first.price, 0.0)
     headers = response.responseHeader
     self.assertEqual(headers["status"], 0)
     self.assertEqual(headers["QTime"], 0)
     self.assertEqual(headers["params"]["indent"], "on")
     self.assertEqual(headers["params"]["rows"], "10")
     self.assertEqual(headers["params"]["start"], "0")
     self.assertEqual(headers["params"]["q"], "id:[* TO *]")
     self.assertEqual(headers["params"]["version"], "2.2")
 def testParseConfig(self):
     schema_xml = getData("schema.xml")
     schema = SolrSchema(schema_xml.split("\n\n", 1)[1])
     self.assertEqual(len(schema), 21)  # 21 items defined in schema.xml
     self.assertEqual(schema["defaultSearchField"], "text")
     self.assertEqual(schema["uniqueKey"], "id")
     self.assertEqual(schema["solrQueryParser"].defaultOperator, "OR")
     self.assertEqual(schema["requiredFields"], ["id", "name"])
     self.assertEqual(schema["id"].type, "string")
     self.assertEqual(schema["id"].class_, "solr.StrField")
     self.assertEqual(schema["id"].required, True)
     self.assertEqual(schema["id"].omitNorms, True)
     self.assertEqual(schema["id"].multiValued, False)
     self.assertEqual(schema["cat"].class_, "solr.TextField")
     self.assertEqual(schema["cat"].required, False)
     self.assertEqual(schema["cat"].multiValued, True)
     self.assertEqual(schema["cat"].termVectors, True)
     self.assertEqual(schema["sku"].positionIncrementGap, "100")
     self.assertEqual(schema.features.multiValued, True)
     self.assertEqual(schema.weight.class_, "solr.SortableFloatField")
     self.assertEqual(schema.popularity.class_, "solr.SortableIntField")
     self.assertEqual(schema.inStock.class_, "solr.BoolField")
     self.assertEqual(schema.timestamp.class_, "solr.DateField")
     self.assertEqual(schema.timestamp.default, "NOW")
     self.assertEqual(schema.timestamp.multiValued, False)
     self.assertEqual(schema.timestamp.indexed, True)
     self.assertEqual(schema.word.indexed, False)
     fields = schema.values()
     self.assertEqual(len([f for f in fields if getattr(f, "required", False)]), 2)
     self.assertEqual(len([f for f in fields if getattr(f, "multiValued", False)]), 3)
 def testExtraRequest(self):
     # basically the same as `testThreeRequests`, except it
     # tests adding fake responses consecutively
     mngr = SolrConnectionManager(active=True)
     proc = SolrIndexProcessor(mngr)
     conn = mngr.getConnection()
     output = fakehttp(conn, getData('schema.xml'))
     fakemore(conn, getData('add_response.txt'))
     proc.index(self.foo)
     fakemore(conn, getData('delete_response.txt'))
     proc.unindex(self.foo)
     mngr.closeConnection()
     self.assertEqual(len(output), 3)
     self.failUnless(output.get().startswith(self.schema_request))
     self.assertEqual(sortFields(output.get()), getData('add_request.txt'))
     self.assertEqual(output.get(), getData('delete_request.txt'))
 def testNoIndexingWithoutAllRequiredFields(self):
     response = getData('dummy_response.txt')
     # fake add response
     output = fakehttp(self.mngr.getConnection(), response)
     # indexing sends data
     self.proc.index(Foo(id='500'))
     self.assertEqual(str(output), '')
 def testParseComplexSearchResults(self):
     complex_xml_response = getData('complex_xml_response.txt')
     response = SolrResponse(complex_xml_response)
     results = response.response     # the result set is named 'response'
     self.assertEqual(results.numFound, '2')
     self.assertEqual(results.start, '0')
     self.assertEqual(len(results), 2)
     first = results[0]
     self.assertEqual(first.cat, ['software', 'search'])
     self.assertEqual(len(first.features), 7)
     self.assertEqual([type(x).__name__ for x in first.features],
         ['str'] * 6 + ['unicode'])
     self.assertEqual(first.id, 'SOLR1000')
     self.assertEqual(first.inStock, True)
     self.assertEqual(first.incubationdate_dt, DateTime('2006/01/17 GMT'))
     self.assertEqual(first.manu, 'Apache Software Foundation')
     self.assertEqual(first.popularity, 10)
     self.assertEqual(first.price, 0.0)
     headers = response.responseHeader
     self.assertEqual(headers['status'], 0)
     self.assertEqual(headers['QTime'], 0)
     self.assertEqual(headers['params']['indent'], 'on')
     self.assertEqual(headers['params']['rows'], '10')
     self.assertEqual(headers['params']['start'], '0')
     self.assertEqual(headers['params']['q'], 'id:[* TO *]')
     self.assertEqual(headers['params']['version'], '2.2')
Beispiel #53
0
 def testParseConfig(self):
     schema_xml = getData('schema.xml')
     schema = SolrSchema(schema_xml.split('\n\n', 1)[1])
     self.assertEqual(len(schema), 21) # 21 items defined in schema.xml
     self.assertEqual(schema['defaultSearchField'], 'text')
     self.assertEqual(schema['uniqueKey'], 'id')
     self.assertEqual(schema['solrQueryParser'].defaultOperator, 'OR')
     self.assertEqual(schema['requiredFields'], ['id', 'name'])
     self.assertEqual(schema['id'].type, 'string')
     self.assertEqual(schema['id'].class_, 'solr.StrField')
     self.assertEqual(schema['id'].required, True)
     self.assertEqual(schema['id'].omitNorms, True)
     self.assertEqual(schema['id'].multiValued, False)
     self.assertEqual(schema['cat'].class_, 'solr.TextField')
     self.assertEqual(schema['cat'].required, False)
     self.assertEqual(schema['cat'].multiValued, True)
     self.assertEqual(schema['cat'].termVectors, True)
     self.assertEqual(schema['sku'].positionIncrementGap, '100')
     self.assertEqual(schema.features.multiValued, True)
     self.assertEqual(schema.weight.class_, 'solr.SortableFloatField')
     self.assertEqual(schema.popularity.class_, 'solr.SortableIntField')
     self.assertEqual(schema.inStock.class_, 'solr.BoolField')
     self.assertEqual(schema.timestamp.class_, 'solr.DateField')
     self.assertEqual(schema.timestamp.default, 'NOW')
     self.assertEqual(schema.timestamp.multiValued, False)
     self.assertEqual(schema.timestamp.indexed, True)
     self.assertEqual(schema.word.indexed, False)
     fields = schema.values()
     self.assertEqual(len([f for f in fields if
         getattr(f, 'required', False)]), 2)
     self.assertEqual(len([f for f in fields if
         getattr(f, 'multiValued', False)]), 3)
Beispiel #54
0
 def testDateIndexingWithPythonDateTime(self):
     foo = Foo(id='gerken', name='patrick', cat='nerd', timestamp=datetime(1980, 9, 29, 14, 02))
     response = getData('add_response.txt')
     output = fakehttp(self.mngr.getConnection(), response)   # fake add response
     self.proc.index(foo)
     required = '<field name="timestamp">1980-09-29T14:02:00.000Z</field>'
     self.assert_(str(output).find(required) > 0, '"date" data not found')