def test_search_limit(self, session, index, job): doc = job.to_document() timestamp = doc['created'] bulk = [] for i in range(15): doc = copy.deepcopy(doc) doc['id'] = unicode(i) doc['created'] = timestamp - timedelta(days=i) bulk.append(doc) index = Index() index.add_document_bulk(bulk) # Search with ascending sort, should return the ids in reverse order. hits = index.search(job.title, sort=('created', 'asc')) assert [int(hit['id']) for hit in hits] == range(15)[::-1] # Search with descending sort. hits = index.search(job.title, sort=('created', 'desc')) assert [int(hit['id']) for hit in hits] == range(15)
def main(should_create, index_all, session): name = settings.SEARCH_INDEX_NAME directory = settings.SEARCH_INDEX_DIRECTORY if should_create: print blue("You've asked to (re)create index '{}'.".format(name)) IndexManager.create(Schema, name, directory) if not IndexManager.exists(name, directory): die('Search index does not exist!') index = Index() start = time.time() kwargs = {} if index_all else {'published': True} jobs = session.query(Job).filter_by(**kwargs).all() index.add_document_bulk([job.to_document() for job in jobs]) duration = time.time() - start print green("{0} documents added okay in {1:.2f} ms.".format(len(jobs), duration))
def main(should_create, index_all, session): name = settings.SEARCH_INDEX_NAME directory = settings.SEARCH_INDEX_DIRECTORY if should_create: print blue("You've asked to (re)create index '{}'.".format(name)) IndexManager.create(Schema, name, directory) if not IndexManager.exists(name, directory): die('Search index does not exist!') index = Index() start = time.time() kwargs = {} if index_all else {'published': True} jobs = session.query(Job).filter_by(**kwargs).all() index.add_document_bulk([job.to_document() for job in jobs]) duration = time.time() - start print green("{0} documents added okay in {1:.2f} ms.".format( len(jobs), duration))