def testDelete(self): self.SetCurrentUser('*****@*****.**') layered_cache.Set('hello', 'secret') self.SetCurrentUser('*****@*****.**') layered_cache.Set('hello', 'not secret') layered_cache.Delete('hello') self.SetCurrentUser('*****@*****.**') self.assertIsNone(layered_cache.Get('hello')) self.SetCurrentUser('*****@*****.**') self.assertIsNone(layered_cache.Get('hello'))
def testGet_OverRetryLimit(self): j1 = job_module.Job.New((), ()) j1.task = '123' j1.put() j1._Schedule = mock.MagicMock() j1.Fail = mock.MagicMock() j2 = job_module.Job.New((), ()) j2.task = '123' j2.put() j2.updated = datetime.datetime.now() - datetime.timedelta(hours=8) j2.put() j2._Schedule = mock.MagicMock() # pylint: disable=invalid-name j2.Fail = mock.MagicMock() # pylint: disable=invalid-name layered_cache.Set( refresh_jobs._JOB_CACHE_KEY % j2.job_id, {'retries': refresh_jobs._JOB_MAX_RETRIES+1}) self.testapp.get('/cron/refresh-jobs') self.ExecuteDeferredTasks('default') self.assertFalse(j1._Schedule.called) self.assertFalse(j1.Fail.called) self.assertFalse(j2._Schedule.called) self.assertFalse(j2.Fail.called)
def testDeleteAllExpiredEntities(self): self.SetCurrentUser('*****@*****.**') layered_cache.Set('expired_str1', 'apple', days_to_keep=-10) layered_cache.Set('expired_str2', 'bat', days_to_keep=-1) layered_cache.Set('expired_str3', 'cat', days_to_keep=10) layered_cache.Set('expired_str4', 'dog', days_to_keep=0) layered_cache.Set('expired_str5', 'egg') self.assertEqual('apple', layered_cache.Get('expired_str1')) self.assertEqual('bat', layered_cache.Get('expired_str2')) self.assertEqual('cat', layered_cache.Get('expired_str3')) self.assertEqual('dog', layered_cache.Get('expired_str4')) self.assertEqual('egg', layered_cache.Get('expired_str5')) layered_cache.DeleteAllExpiredEntities() self.assertIsNone(layered_cache.Get('expired_str1')) self.assertIsNone(layered_cache.Get('expired_str2')) self.assertEqual('cat', layered_cache.Get('expired_str3')) self.assertEqual('dog', layered_cache.Get('expired_str4')) self.assertEqual('egg', layered_cache.Get('expired_str5'))
def GetSubTests(suite_name, bot_names): """Gets the entire tree of subtests for the suite with the given name. Each bot may have different sub-tests available, but there is one combined sub-tests dict returned for all the bots specified. This method is used by the test-picker select menus to display what tests are available; only tests that are not deprecated should be listed. Args: suite_name: Top level test name. bot_names: List of master/bot names in the form "<master>/<platform>". Returns: A dict mapping test names to dicts to entries which have the keys "has_rows" (boolean) and "sub_tests", which is another sub-tests dict. This forms a tree structure. """ # For some bots, there may be cached data; First collect and combine this. combined = {} for bot_name in bot_names: master, bot = bot_name.split('/') suite_key = ndb.Key('TestMetadata', '%s/%s/%s' % (master, bot, suite_name)) cache_key = _ListSubTestCacheKey(suite_key) cached = layered_cache.Get(cache_key) if cached: combined = _MergeSubTestsDict(combined, json.loads(cached)) else: sub_test_paths_futures = GetTestDescendantsAsync(suite_key, has_rows=True, deprecated=False) deprecated_sub_test_path_futures = GetTestDescendantsAsync( suite_key, has_rows=True, deprecated=True) ndb.Future.wait_all( [sub_test_paths_futures, deprecated_sub_test_path_futures]) sub_test_paths = _MapTestDescendantsToSubTestPaths( sub_test_paths_futures.get_result()) deprecated_sub_test_paths = _MapTestDescendantsToSubTestPaths( deprecated_sub_test_path_futures.get_result()) d1 = _SubTestsDict(sub_test_paths, False) d2 = _SubTestsDict(deprecated_sub_test_paths, True) sub_tests = _MergeSubTestsDict(d1, d2) # Pickle is actually really slow, json.dumps to bypass that. layered_cache.Set(cache_key, json.dumps(sub_tests)) combined = _MergeSubTestsDict(combined, sub_tests) return combined
def _ProcessFrozenJob(job_id): job = job_module.JobFromId(job_id) key = _JOB_CACHE_KEY % job_id info = layered_cache.Get(key) if not info: info = {'retries': 0} if info.get('retries') == _JOB_MAX_RETRIES: info['retries'] += 1 layered_cache.Set(key, info, days_to_keep=30) job.Fail(_FAILURE_MESSAGE) job.put() return elif info.get('retries') > _JOB_MAX_RETRIES: return info['retries'] += 1 layered_cache.Set(key, info, days_to_keep=30) job._Schedule() job.put()
def _ProcessFrozenJob(job_id): job = job_module.JobFromId(job_id) key = _JOB_CACHE_KEY % job_id info = layered_cache.Get(key) or {'retries': 0} if info.get('retries') == _JOB_MAX_RETRIES: info['retries'] += 1 layered_cache.Set(key, info, days_to_keep=30) job.Fail(errors.REFRESH_FAILURE) job.put() logging.error('Failed retry for job %s', job_id) return elif info.get('retries') > _JOB_MAX_RETRIES: logging.error('Exceeded maximum retries (%s) for job %s', _JOB_MAX_RETRIES, job_id) return info['retries'] += 1 layered_cache.Set(key, info, days_to_keep=30) logging.info('Restarting job %s', job_id) job._Schedule() job.put()
def testSetAndGet(self): self.SetCurrentUser('*****@*****.**') layered_cache.Set('str', 'Hello, World!') layered_cache.Set('dict', {'hello': [1, 2, 3]}) self.assertEqual( 'Hello, World!', cPickle.loads( ndb.Key('CachedPickledString', 'internal_only__str').get().value)) self.assertIsNone( ndb.Key('CachedPickledString', 'externally_visible__str').get()) self.assertEqual('Hello, World!', layered_cache.Get('str')) self.SetCurrentUser('*****@*****.**') self.assertIsNone(layered_cache.Get('str')) self.SetCurrentUser('*****@*****.**') self.assertEqual({'hello': [1, 2, 3]}, cPickle.loads( ndb.Key('CachedPickledString', 'internal_only__dict').get().value)) self.assertIsNone( ndb.Key('CachedPickledString', 'externally_visible__dict').get()) self.assertEqual({'hello': [1, 2, 3]}, layered_cache.Get('dict')) self.SetCurrentUser('*****@*****.**') self.assertIsNone(layered_cache.Get('dict'))
def testExpireTime(self): self.SetCurrentUser('*****@*****.**') layered_cache.Set('str1', 'Hello, World!', days_to_keep=10) key_internal = ndb.Key('CachedPickledString', 'internal_only__str1') key_external = ndb.Key('CachedPickledString', 'externally_visible__str1') self.assertEqual('Hello, World!', cPickle.loads(key_internal.get().value)) self.assertIsNone(key_external.get()) self.assertEqual('Hello, World!', layered_cache.Get('str1')) # The expire date should be 10 days after the current date. actual_date = key_internal.get().expire_time expected_date = datetime.datetime.now() + datetime.timedelta(days=10) self.assertEqual(actual_date.date(), expected_date.date()) # When current user is external, the external version is returned by Get. self.SetCurrentUser('*****@*****.**') self.assertIsNone(layered_cache.Get('str1'))
def testSet_TooBig(self, mock_cached_pickled_string): e = apiproxy_errors.RequestTooLargeError('too big!') mock_cached_pickled_string.side_effect = e layered_cache.Set('foo', 'bar') self.assertEqual('bar', stored_object.Get('foo'))
def testGetSubTests_FetchAndCacheBehavior(self): self._AddSampleData() # Set the has_rows flag to true on two of the TestMetadata entities. for test_path in [ 'Chromium/win7/really/nested/very/deeply/subtest', 'Chromium/win7/really/nested/very_very' ]: test = utils.TestKey(test_path).get() test.has_rows = True test.put() # A tree-structured dict of dicts is constructed, and the 'has_rows' # flag is set to true for two of these tests. These two tests and # their parents are all included in the result. response = self.testapp.post( '/list_tests', { 'type': 'sub_tests', 'suite': 'really', 'bots': 'Chromium/win7,Chromium/mac' }) self.assertEqual('*', response.headers.get('Access-Control-Allow-Origin')) expected = { 'nested': { 'has_rows': False, 'sub_tests': { 'very': { 'has_rows': False, 'sub_tests': { 'deeply': { 'has_rows': False, 'sub_tests': { 'subtest': { 'has_rows': True, 'sub_tests': {} } } } } }, 'very_very': { 'has_rows': True, 'sub_tests': {} } } } } # The response should be as expected. self.assertEqual(expected, json.loads(response.body)) # The cache should be set for the win7 bot with the expected response. self.assertEqual( expected, json.loads( layered_cache.Get(graph_data.LIST_TESTS_SUBTEST_CACHE_KEY % ('Chromium', 'win7', 'really')))) # Change mac subtests in cache. Should be merged with win7. mac_subtests = { 'mactest': { 'has_rows': False, 'sub_tests': { 'graph': { 'has_rows': True, 'sub_tests': {} } } } } layered_cache.Set( graph_data.LIST_TESTS_SUBTEST_CACHE_KEY % ('Chromium', 'mac', 'really'), json.dumps(mac_subtests)) response = self.testapp.post( '/list_tests', { 'type': 'sub_tests', 'suite': 'really', 'bots': 'Chromium/win7,Chromium/mac' }) self.assertEqual('*', response.headers.get('Access-Control-Allow-Origin')) expected.update(mac_subtests) self.assertEqual(expected, json.loads(response.body))