def test_dir_and_basename_query(self): shard_manager = FakeDBShardManager( ["foo/bar.txt", "foo/rebar.txt", "blah/baz.txt"]) query_cache = QueryCache() res = MockQuery("oo/bar").execute(shard_manager, query_cache) self.assertEquals(["foo/bar.txt", "foo/rebar.txt"], res.filenames)
def test_query_that_times_out(self): files = [] for i in range(100000): files.append("f/x%i.txt" % i) shard_manager = FakeDBShardManager(files) query_cache = QueryCache() q = MockQuery("x/") q._dir_search_timeout = 0.00001 res = q.execute(shard_manager, query_cache) self.assertTrue(res.truncated)
def test_cache_different_maxhits(self): q1 = MockQuery("a", 10) query_cache = QueryCache() shard_manager = FakeDBShardManager() q1.execute(shard_manager, query_cache) self.assertTrue(q1.did_call_execute_nocache) q2 = MockQuery("a", 11) q2.execute(shard_manager, query_cache) self.assertTrue(q2.did_call_execute_nocache)
def test_exact_filter_plumbing(self): shard_manager = FakeDBShardManager(["foo/bar.txt", "foo/rebar.txt"]) query_cache = QueryCache() res = MockQuery("bar", 10, exact_match=True).execute(shard_manager, query_cache) self.assertTrue(res.is_empty()) res = MockQuery("bar.txt", 10, exact_match=True).execute(shard_manager, query_cache) self.assertEquals(["foo/bar.txt"], res.filenames)
def test_basename_only_query_rank_results(self): shard_manager = FakeDBShardManager( ["foo/bar.txt", "foo/rebar.txt", "blah/baz.txt"]) query_cache = QueryCache() res = MockQuery("bar").execute_nocache(shard_manager, query_cache) self.assertEquals(set(["foo/bar.txt", "foo/rebar.txt"]), set(res.filenames)) self.assertEquals([ BasenameRanker().rank_query("bar", os.path.basename(res.filenames[0])), BasenameRanker().rank_query("bar", os.path.basename(res.filenames[1])) ], res.ranks)
def step_indexer(self): if not self._pending_indexer: return # _pending_indexer is an integer if recreation should be triggered. if isinstance(self._pending_indexer, int): self._dir_cache.set_ignores(self.settings.ignores) self._pending_indexer = db_indexer.Create(self.settings.dirs, self._dir_cache) self._pending_indexer_start_time = time.time() if self._pending_indexer.complete: elapsed = time.time() - self._pending_indexer_start_time logging.debug("Indexing with %s took %s seconds", type(self._pending_indexer), elapsed) self._cur_shard_manager = DBShardManager(self._pending_indexer) self._cur_query_cache = QueryCache() self._pending_indexer = None else: self._pending_indexer.index_a_bit_more()
from twisted.internet import protocol, reactor from vertica_wire_handler import VerticaWireHandler from query_cache import QueryCache from constants import HOST, TARGET_PORT, _END_PATTERN, _REQUEST_ORD, _END_JDBC_PATTERN, MAX_RESULT_SIZE query_cache = QueryCache() _MAX_RESULT_SIZE = MAX_RESULT_SIZE if MAX_RESULT_SIZE < 380000 else 380000 class ServerProtocol(protocol.Protocol): def __init__(self): self.buffer = None self.client = None self.sp_data = [None, None] def connectionMade(self): self.client_factory = protocol.ClientFactory() self.client_factory.protocol = ClientProtocol self.client_factory.server = self reactor.connectTCP(HOST, TARGET_PORT, self.client_factory) def dataReceived(self, data): if (self.client != None): if data[0] == _REQUEST_ORD: msg = VerticaWireHandler(data) self.sp_data = [msg, []] if msg.key in query_cache.cache_keys: cached_data = query_cache.cache_access(msg.key) if cached_data:
def test_empty_dir_query(self): shard_manager = FakeDBShardManager(["foo/bar.txt", "foo/rebar.txt"]) query_cache = QueryCache() res = MockQuery("/").execute(shard_manager, query_cache) self.assertTrue(len(res.filenames) != 0)
def test_empty_query(self): q1 = MockQuery("", 10) query_cache = QueryCache() shard_manager = FakeDBShardManager() q1.execute(shard_manager, query_cache) self.assertFalse(q1.did_call_execute_nocache)
def test_query_with_debug(self): q = MockQuery("a", 10) q.debug = True query_cache = QueryCache() shard_manager = FakeDBShardManager() q.execute(shard_manager, query_cache)