def setup_class(self): if not is_search_supported(): raise SkipTest("Search not supported") indexer = TestSearchIndexer() model.Session.remove() CreateTestData.create_search_test_data() # now remove a tag so we can test search with deleted tags model.repo.new_revision() gils = model.Package.by_name(u'gils') # an existing tag used only by gils self.tagname = u'registry' # we aren't guaranteed it is last ... idx = [ t.name for t in gils.tags].index(self.tagname) del gils.tags[idx] model.repo.commit_and_remove() indexer.index() self.gils = model.Package.by_name(u'gils') self.war = model.Package.by_name(u'warandpeace') self.russian = model.Tag.by_name(u'russian') self.tolstoy = model.Tag.by_name(u'tolstoy') self.backend = get_backend(backend='sql')
def test_index(self): search.dispatch_by_operation("Package", {"title": "penguin"}, "new", backend=search.get_backend(backend="sql")) sql = "select search_vector from package_search where package_id='%s'" % self.anna.id vector = model.Session.execute(sql).fetchone()[0] assert "annakarenina" in vector, vector assert not "penguin" in vector, vector
def setup_class(self): indexer = TestSearchIndexer() init_data = [ { 'name': 'a', 'extras': { 'department': 'abc', 'agency': 'ag-a' }, }, { 'name': 'b', 'extras': { 'department': 'bcd', 'agency': 'ag-b' }, }, { 'name': 'c', 'extras': { 'department': 'cde abc' }, }, { 'name': 'none', 'extras': { 'department': '' }, }, ] CreateTestData.create_arbitrary(init_data) indexer.index() self.backend = get_backend(backend='sql')
def setup_class(self): if not is_search_supported(): raise SkipTest("Search not supported") indexer = TestSearchIndexer() model.Session.remove() CreateTestData.create_search_test_data() # now remove a tag so we can test search with deleted tags model.repo.new_revision() gils = model.Package.by_name(u'gils') # an existing tag used only by gils self.tagname = u'registry' # we aren't guaranteed it is last ... idx = [t.name for t in gils.tags].index(self.tagname) del gils.tags[idx] model.repo.commit_and_remove() indexer.index() self.gils = model.Package.by_name(u'gils') self.war = model.Package.by_name(u'warandpeace') self.russian = model.Tag.by_name(u'russian') self.tolstoy = model.Tag.by_name(u'tolstoy') self.backend = get_backend(backend='sql')
def test_index(self): search.dispatch_by_operation('Package', {'title': 'penguin'}, 'new', backend=search.get_backend(backend='sql')) sql = "select search_vector from package_search where package_id='%s'" % self.anna.id vector = model.Session.execute(sql).fetchone()[0] assert 'annakarenina' in vector, vector assert not 'penguin' in vector, vector
def setup_class(self): indexer = TestSearchIndexer() init_data = [{'name':u'test1-penguin-canary', 'tags':u'canary goose squirrel wombat wombat'}, {'name':u'test2-squirrel-squirrel-canary-goose', 'tags':u'penguin wombat'}, ] CreateTestData.create_arbitrary(init_data) self.pkg_names = [u'test1-penguin-canary', u'test2-squirrel-squirrel-canary-goose'] indexer.index() self.backend = get_backend(backend='sql')
def setup_class(self): if not is_search_supported(): raise SkipTest("Search not supported") import gc from pylons import config # Force a garbage collection to trigger issue #695 gc.collect() config['search_backend'] = 'sql' self.backend = search.get_backend() plugins.load('synchronous_search') CreateTestData.create()
def dispatch_by_operation(entity_type, entity, operation, backend=None): """ Call the appropriate index method for a given notification. """ if backend is None: from ckan.lib.search import get_backend backend = get_backend() try: index = backend.index_for(entity_type) if operation == DomainObjectOperation.new: index.insert_dict(entity) elif operation == DomainObjectOperation.changed: index.update_dict(entity) elif operation == DomainObjectOperation.deleted: index.remove_dict(entity) else: log.warn("Unknown operation: %s" % operation) except Exception, ex: log.exception(ex)
def setup_class(self): indexer = TestSearchIndexer() init_data = [ {'name':'a', 'extras':{'department':'abc', 'agency':'ag-a'},}, {'name':'b', 'extras':{'department':'bcd', 'agency':'ag-b'},}, {'name':'c', 'extras':{'department':'cde abc'},}, {'name':'none', 'extras':{'department':''},}, ] CreateTestData.create_arbitrary(init_data) indexer.index() self.backend = get_backend(backend='sql')
def setup_class(self): indexer = TestSearchIndexer() init_data = [ {'name':'eng', 'extras':{'geographic_coverage':'100000: England'},}, {'name':'eng_ni', 'extras':{'geographic_coverage':'100100: England, Northern Ireland'},}, {'name':'uk', 'extras':{'geographic_coverage':'111100: United Kingdom (England, Scotland, Wales, Northern Ireland'},}, {'name':'gb', 'extras':{'geographic_coverage':'111000: Great Britain (England, Scotland, Wales)'},}, {'name':'none', 'extras':{'geographic_coverage':'000000:'},}, ] CreateTestData.create_arbitrary(init_data) indexer.index() self.backend = get_backend(backend='sql')
def setup_class(self): indexer = TestSearchIndexer() init_data = [ { 'name': u'test1-penguin-canary', 'tags': u'canary goose squirrel wombat wombat' }, { 'name': u'test2-squirrel-squirrel-canary-goose', 'tags': u'penguin wombat' }, ] CreateTestData.create_arbitrary(init_data) self.pkg_names = [ u'test1-penguin-canary', u'test2-squirrel-squirrel-canary-goose' ] indexer.index() self.backend = get_backend(backend='sql')
def setup_class(self): if not is_search_supported(): raise SkipTest("Search not supported") self.ab = 'http://site.com/a/b.txt' self.cd = 'http://site.com/c/d.txt' self.ef = 'http://site.com/e/f.txt' self.pkgs = [ {'name':'pkg1', 'resources':[ {'url':self.ab, 'description':'This is site ab.', 'format':'Excel spreadsheet', 'hash':'abc-123', 'alt_url': 'alt1', 'extras':{'size': '100'}, }, {'url':self.cd, 'description':'This is site cd.', 'format':'Office spreadsheet', 'hash':'qwe-456', 'alt_url':'alt2', 'extras':{'size':'200'}, }, ] }, {'name':'pkg2', 'resources':[ {'url':self.cd, 'alt_url': 'alt1', 'description':'This is site cd.'}, {'url':self.ef, 'description':'This is site ef.'}, {'url':self.ef, 'description':'This is site gh.'}, {'url':self.ef, 'description':'This is site ij.'}, ] }, ] CreateTestData.create_arbitrary(self.pkgs) self.backend = get_backend(backend='sql')
def setup_class(self): indexer = TestSearchIndexer() init_data = [ { 'name': 'eng', 'extras': { 'geographic_coverage': '100000: England' }, }, { 'name': 'eng_ni', 'extras': { 'geographic_coverage': '100100: England, Northern Ireland' }, }, { 'name': 'uk', 'extras': { 'geographic_coverage': '111100: United Kingdom (England, Scotland, Wales, Northern Ireland' }, }, { 'name': 'gb', 'extras': { 'geographic_coverage': '111000: Great Britain (England, Scotland, Wales)' }, }, { 'name': 'none', 'extras': { 'geographic_coverage': '000000:' }, }, ] CreateTestData.create_arbitrary(init_data) indexer.index() self.backend = get_backend(backend='sql')
def setup_class(self): indexer = TestSearchIndexer() CreateTestData.create() indexer.index() self.backend = get_backend(backend='sql')