def test_uses_library_driver_backend(self): self._my_library.config.services.search = None # switch to sqlite. self._my_library.database.driver = 'sqlite' search = Search(self._my_library) self.assertIsInstance(search.backend, SQLiteSearchBackend) # switch to postgres. self._my_library.database.driver = 'postgres' search = Search(self._my_library) self.assertIsInstance(search.backend, PostgreSQLSearchBackend)
def test_raises_missing_backend_exception_if_config_contains_invalid_backend( self): # services.search try: Search(self._my_library) except Exception as exc: self.assertIn('Missing backend', str(exc))
def test_uses_backend_from_config(self, fake_init): # Disable backend initialization to reduce amount of mocks. fake_init.return_value = None self._my_library.config.services.search = 'whoosh' search = Search(self._my_library) self.assertIsInstance(search.backend, WhooshSearchBackend)
def test_uses_default_backend_if_library_database_search_is_not_implemented( self, fake_init): # Disable backend initialization to reduce amount of mocks. fake_init.return_value = None self._my_library.config.services.search = None with patch.object(self._my_library.database, 'driver', 'mysql'): search = Search(self._my_library) self.assertIsInstance(search.backend, WhooshSearchBackend)
def test_indexes_library_datasets(self): ds1 = MagicMock(spec=Dataset) ds2 = MagicMock(spec=Dataset) ds3 = MagicMock(spec=Dataset) self._my_library.datasets = [ds1, ds2, ds3] fake_backend = MagicMock(spec=SQLiteSearchBackend) fake_backend.dataset_index = Mock() fake_backend.partition_index = Mock() fake_backend.identifier_index = Mock() search = Search(self._my_library, backend=fake_backend) search.index_library_datasets() self.assertEqual(len(fake_backend.dataset_index.index_one.mock_calls), 3)
def test_postgres_query(self): self._my_library.config.services.search = None # switch to postgres. self._my_library.database.driver = 'postgres' search = Search(self._my_library) self.assertIsInstance(search.backend, PostgreSQLSearchBackend) be = search.backend pg = be._get_dataset_index() q = pg._make_query_from_terms( 'source healthindicators.gov diabetes asthma') print str(q[0]) print q[1]
def test_feeds_tick_function_with_indexed_dataset(self): # prepare mocks fake_backend = MagicMock(spec=SQLiteSearchBackend) fake_backend.dataset_index = Mock() fake_backend.partition_index = Mock() fake_backend.identifier_index = Mock() tick_f = Mock() fake_library = MagicMock(spec=Library) fake_dataset = MagicMock(spec=Dataset) fake_library.datasets = [fake_dataset] # run search = Search(fake_library, backend=fake_backend) search.index_library_datasets(tick_f=tick_f) # test tick_f.assert_called_once_with('datasets: 1 partitions: 0')
def __init__(self, config=None, search=None, echo=None, read_only=False): from sqlalchemy.exc import OperationalError from ambry.orm.exc import DatabaseMissingError if config: self._config = config else: self._config = get_runconfig() self.logger = logger self.read_only = read_only # allow optimizations that assume we aren't building bundles. self._echo = echo self._fs = LibraryFilesystem(config) self._db = Database(self._fs.database_dsn, echo=echo) self._account_password = self.config.accounts.password self._warehouse = None # Will be populated in the warehouse property. try: self._db.open() except OperationalError as e: raise DatabaseMissingError( "Failed to open database '{}': {} ".format(self._db.dsn, e)) self.processes = None # Number of multiprocessing proccors. Default to all of them if search: self._search = Search(self, search) else: self._search = None
def search(self): if not self._search: self._search = Search(self) return self._search