def testPost_CacheSet(self): self.SetCurrentUser('*****@*****.**') self.testapp.post( '/set_warning_message', {'warning_bug': '54321', 'warning_message': 'Stern warning'}) self.assertEqual('Stern warning', layered_cache.Get('warning_message')) self.assertEqual('54321', layered_cache.Get('warning_bug'))
def testPost_CacheSetOnlyMessage(self): self.SetCurrentUser('*****@*****.**') self.testapp.post( '/set_warning_message', {'warning_bug': '', 'warning_message': 'Random warning'}) self.assertEqual('Random warning', layered_cache.Get('warning_message')) self.assertIsNone(layered_cache.Get('warning_bug'))
def _GetChartValues(self): return { 'revision_info': namespaced_stored_object.Get(_REVISION_INFO_KEY) or {}, 'warning_message': layered_cache.Get('warning_message'), 'warning_bug': layered_cache.Get('warning_bug'), }
def testPost_NotLoggedIn(self): self.SetCurrentUser('*****@*****.**') response = self.testapp.post( '/set_warning_message', {'warning_bug': '54321', 'warning_message': 'Stern warning'}) self.assertIsNone(layered_cache.Get('warning_message')) self.assertIsNone(layered_cache.Get('warning_bug')) self.assertIn('Only logged-in internal users', response)
def get(self): """Renders the UI for setting the warning message.""" if self._ShowErrorIfNotLoggedIn(): return self.RenderHtml('set_warning_message.html', { 'warning_message': layered_cache.Get('warning_message') or '', 'warning_bug': layered_cache.Get('warning_bug') or '', })
def testDelete(self): self.SetCurrentUser('*****@*****.**') layered_cache.Set('hello', 'secret') self.SetCurrentUser('*****@*****.**') layered_cache.Set('hello', 'not secret') layered_cache.Delete('hello') self.SetCurrentUser('*****@*****.**') self.assertIsNone(layered_cache.Get('hello')) self.SetCurrentUser('*****@*****.**') self.assertIsNone(layered_cache.Get('hello'))
def RenderHtml(self, template_file, template_values, status=200): """Fills in template values for pages that show charts.""" revision_info = namespaced_stored_object.Get(_REVISION_INFO_KEY) or {} template_values.update({ 'revision_info': json.dumps(revision_info), 'warning_message': layered_cache.Get('warning_message'), 'warning_bug': layered_cache.Get('warning_bug'), }) return super(ChartHandler, self).RenderHtml(template_file, template_values, status)
def GetSubTests(suite_name, bot_names): """Gets the entire tree of subtests for the suite with the given name. Each bot may have different sub-tests available, but there is one combined sub-tests dict returned for all the bots specified. This method is used by the test-picker select menus to display what tests are available; only tests that are not deprecated should be listed. Args: suite_name: Top level test name. bot_names: List of master/bot names in the form "<master>/<platform>". Returns: A dict mapping test names to dicts to entries which have the keys "has_rows" (boolean) and "sub_tests", which is another sub-tests dict. This forms a tree structure which matches the tree structure of the Test entities in the datastore. """ # For some bots, there may be cached data; First collect and combine this. combined = {} for bot_name in bot_names: master, bot = bot_name.split('/') suite_key = ndb.Key('Master', master, 'Bot', bot, 'Test', suite_name) cached = layered_cache.Get(_ListSubTestCacheKey(suite_key)) if cached: combined = _MergeSubTestsDict(combined, cached) else: sub_test_paths = _FetchSubTestPaths(suite_key) sub_tests = _SubTestsDict(sub_test_paths) layered_cache.Set(_ListSubTestCacheKey(suite_key), sub_tests) combined = _MergeSubTestsDict(combined, sub_tests) return combined
def testGet_NegativeResult_StoresCommitHash(self): try_job.TryJob(bug_id=12345, rietveld_issue_id=200034, rietveld_patchset_id=1, status='started', bot='win_perf').put() self.testapp.get('/update_bug_with_results') self.assertIsNone(layered_cache.Get('commit_hash_a121212'))
def testExpireTime(self): self.SetCurrentUser('*****@*****.**') layered_cache.Set('str1', 'Hello, World!', days_to_keep=10) key_internal = ndb.Key('CachedPickledString', 'internal_only__str1') key_external = ndb.Key('CachedPickledString', 'externally_visible__str1') self.assertEqual('Hello, World!', cPickle.loads(key_internal.get().value)) self.assertIsNone(key_external.get()) self.assertEqual('Hello, World!', layered_cache.Get('str1')) # The expire date should be 10 days after the current date. actual_date = key_internal.get().expire_time expected_date = datetime.datetime.now() + datetime.timedelta(days=10) self.assertEqual(actual_date.date(), expected_date.date()) # When current user is external, the external version is returned by Get. self.SetCurrentUser('*****@*****.**') self.assertIsNone(layered_cache.Get('str1'))
def testGet_DeleteExpiredEntities(self): self.SetCurrentUser('*****@*****.**') layered_cache.Set('expired_str1', 'apple', days_to_keep=-10) layered_cache.Set('expired_str2', 'bat', days_to_keep=-1) layered_cache.Set('expired_str3', 'cat', days_to_keep=10) layered_cache.Set('expired_str4', 'dog', days_to_keep=0) layered_cache.Set('expired_str5', 'egg') self.assertEqual('apple', layered_cache.Get('expired_str1')) self.assertEqual('bat', layered_cache.Get('expired_str2')) self.assertEqual('cat', layered_cache.Get('expired_str3')) self.assertEqual('dog', layered_cache.Get('expired_str4')) self.assertEqual('egg', layered_cache.Get('expired_str5')) self.testapp.get('/delete_expired_entities') self.assertIsNone(layered_cache.Get('expired_str1')) self.assertIsNone(layered_cache.Get('expired_str2')) self.assertEqual('cat', layered_cache.Get('expired_str3')) self.assertEqual('dog', layered_cache.Get('expired_str4')) self.assertEqual('egg', layered_cache.Get('expired_str5'))
def testDeleteAllExpiredEntities(self): self.SetCurrentUser('*****@*****.**') layered_cache.Set('expired_str1', 'apple', days_to_keep=-10) layered_cache.Set('expired_str2', 'bat', days_to_keep=-1) layered_cache.Set('expired_str3', 'cat', days_to_keep=10) layered_cache.Set('expired_str4', 'dog', days_to_keep=0) layered_cache.Set('expired_str5', 'egg') self.assertEqual('apple', layered_cache.Get('expired_str1')) self.assertEqual('bat', layered_cache.Get('expired_str2')) self.assertEqual('cat', layered_cache.Get('expired_str3')) self.assertEqual('dog', layered_cache.Get('expired_str4')) self.assertEqual('egg', layered_cache.Get('expired_str5')) layered_cache.DeleteAllExpiredEntities() self.assertIsNone(layered_cache.Get('expired_str1')) self.assertIsNone(layered_cache.Get('expired_str2')) self.assertEqual('cat', layered_cache.Get('expired_str3')) self.assertEqual('dog', layered_cache.Get('expired_str4')) self.assertEqual('egg', layered_cache.Get('expired_str5'))
def _PostSucessfulResult(job, bisect_results, issue_tracker): """Posts successful bisect results on logger and issue tracker.""" # From the results, get the list of people to CC (if applicable), the bug # to merge into (if applicable) and the commit hash cache key, which # will be used below. authors_to_cc = [] merge_issue = None bug = ndb.Key('Bug', job.bug_id).get() commit_cache_key = _GetCommitHashCacheKey(bisect_results['results']) if bug and _BisectResultIsPositive(bisect_results['results']): merge_issue = layered_cache.Get(commit_cache_key) if not merge_issue: authors_to_cc = _GetAuthorsToCC(bisect_results['results']) comment = _BUG_COMMENT_TEMPLATE % bisect_results # Add a friendly message to author of culprit CL. owner = None if authors_to_cc: comment = '%s%s' % (_AUTO_ASSIGN_MSG % { 'author': authors_to_cc[0] }, comment) owner = authors_to_cc[0] # Set restrict view label if the bisect results are internal only. labels = ['Restrict-View-Google'] if job.internal_only else None added_comment = issue_tracker.AddBugComment(job.bug_id, comment, cc_list=authors_to_cc, merge_issue=merge_issue, labels=labels, owner=owner) if not added_comment: raise BugUpdateFailure('Failed to update bug %s with comment %s' % (job.bug_id, comment)) start_try_job.LogBisectResult(job.bug_id, comment) logging.info('Updated bug %s with results from %s', job.bug_id, job.rietveld_issue_id) if merge_issue: _MapAnomaliesToMergeIntoBug(merge_issue, job.bug_id) # Mark the duplicate bug's Bug entity status as closed so that # it doesn't get auto triaged. bug.status = bug_data.BUG_STATUS_CLOSED bug.put() # Cache the commit info and bug ID to datastore when there is no duplicate # issue that this issue is getting merged into. This has to be done only # after the issue is updated successfully with bisect information. if commit_cache_key and not merge_issue: layered_cache.Set(commit_cache_key, str(job.bug_id), days_to_keep=30) logging.info('Cached bug id %s and commit info %s in the datastore.', job.bug_id, commit_cache_key)
def testBenchFindChangePoints_Basic(self, add_report_to_log_mock): test = self._AddTestData('test', _SAMPLE_SERIES, anomaly.DOWN) # Add untriaged anomalies. self._AddAnomalyForTest(7, None, test.key) # Add confirmed anomalies. self._AddAnomalyForTest(4, 123, test.key) # Add invalid anomalies. self._AddAnomalyForTest(10, -1, test.key) bench_find_anomalies.SetupBaseDataForBench() self.ExecuteDeferredTasks(bench_find_anomalies._TASK_QUEUE_NAME) test_benches = bench_find_anomalies.TestBench.query().fetch() self.assertEqual(1, len(test_benches)) self.assertEqual(_SAMPLE_SERIES, test_benches[0].data_series) self.assertEqual([[1, 2, 3, 4, 5, 6, 7, 8]], test_benches[0].base_anomaly_revs) self.assertEqual([[6, 7, 8, 9, 10, 11, 12]], test_benches[0].invalid_anomaly_revs) self.assertEqual([[1, 2, 3, 4, 5, 6, 7, 8]], test_benches[0].confirmed_anomaly_revs) bench_name = 'find_change_points_default' bench_description = 'A description.' bench_find_anomalies.BenchFindChangePoints(bench_name, bench_description) # Layered cache set. bench_key = '%s.%s' % (bench_name, bench_description) self.assertEqual( {bench_key: True}, layered_cache.Get( bench_find_anomalies._FIND_ANOMALIES_BENCH_CACHE_KEY)) task_queue = self.testbed.get_stub(testbed.TASKQUEUE_SERVICE_NAME) test_support.execute_until_empty(task_queue, bench_find_anomalies._TASK_QUEUE_NAME) expected_result_dict = { 'bench_name': bench_name, 'description': bench_description, 'invalid_alerts': '0/1', 'confirmed_alerts': '1/1', 'new_alerts': 0, 'total_alerts': '1/1', 'unconfirmed_alert_links': '', 'extra_alert_links': '', } add_report_to_log_mock.assert_called_once_with(expected_result_dict)
def GetSubTests(suite_name, bot_names): """Gets the entire tree of subtests for the suite with the given name. Each bot may have different sub-tests available, but there is one combined sub-tests dict returned for all the bots specified. This method is used by the test-picker select menus to display what tests are available; only tests that are not deprecated should be listed. Args: suite_name: Top level test name. bot_names: List of master/bot names in the form "<master>/<platform>". Returns: A dict mapping test names to dicts to entries which have the keys "has_rows" (boolean) and "sub_tests", which is another sub-tests dict. This forms a tree structure. """ # For some bots, there may be cached data; First collect and combine this. combined = {} for bot_name in bot_names: master, bot = bot_name.split('/') suite_key = ndb.Key('TestMetadata', '%s/%s/%s' % (master, bot, suite_name)) cache_key = _ListSubTestCacheKey(suite_key) cached = layered_cache.Get(cache_key) if cached: combined = _MergeSubTestsDict(combined, json.loads(cached)) else: sub_test_paths_futures = GetTestDescendantsAsync(suite_key, has_rows=True, deprecated=False) deprecated_sub_test_path_futures = GetTestDescendantsAsync( suite_key, has_rows=True, deprecated=True) ndb.Future.wait_all( [sub_test_paths_futures, deprecated_sub_test_path_futures]) sub_test_paths = _MapTestDescendantsToSubTestPaths( sub_test_paths_futures.get_result()) deprecated_sub_test_paths = _MapTestDescendantsToSubTestPaths( deprecated_sub_test_path_futures.get_result()) d1 = _SubTestsDict(sub_test_paths, False) d2 = _SubTestsDict(deprecated_sub_test_paths, True) sub_tests = _MergeSubTestsDict(d1, d2) # Pickle is actually really slow, json.dumps to bypass that. layered_cache.Set(cache_key, json.dumps(sub_tests)) combined = _MergeSubTestsDict(combined, sub_tests) return combined
def testSetAndGet(self): self.SetCurrentUser('*****@*****.**') layered_cache.Set('str', 'Hello, World!') layered_cache.Set('dict', {'hello': [1, 2, 3]}) self.assertEqual( 'Hello, World!', cPickle.loads( ndb.Key('CachedPickledString', 'internal_only__str').get().value)) self.assertIsNone( ndb.Key('CachedPickledString', 'externally_visible__str').get()) self.assertEqual('Hello, World!', layered_cache.Get('str')) self.SetCurrentUser('*****@*****.**') self.assertIsNone(layered_cache.Get('str')) self.SetCurrentUser('*****@*****.**') self.assertEqual( {'hello': [1, 2, 3]}, cPickle.loads( ndb.Key('CachedPickledString', 'internal_only__dict').get().value)) self.assertIsNone( ndb.Key('CachedPickledString', 'externally_visible__dict').get()) self.assertEqual({'hello': [1, 2, 3]}, layered_cache.Get('dict')) self.SetCurrentUser('*****@*****.**') self.assertIsNone(layered_cache.Get('dict'))
def BenchFindChangePoints(bench_name, description): """Submits a bench job for a bench_name and description. Requires an implementation of find_change_points added to _EXPERIMENTAL_FUNCTIONS. At least bench_name or description must be different for each job. Args: bench_name: A string bench name which should exist in they keys of _EXPERIMENTAL_FUNCTIONS. description: A string description of this bench job. Raises: ValueError: The input was not valid. Exception: Not enough data available. """ if bench_name not in _EXPERIMENTAL_FUNCTIONS: raise ValueError('%s is not a valid find anomalies bench function.' % bench_name) bench_key = '%s.%s' % (bench_name, description) submitted_benches = layered_cache.Get(_FIND_ANOMALIES_BENCH_CACHE_KEY) if not submitted_benches: submitted_benches = {} if bench_key in submitted_benches: raise ValueError('Bench job for "%s. %s" already in submitted.' % (bench_name, description)) submitted_benches[bench_key] = True layered_cache.Set(_FIND_ANOMALIES_BENCH_CACHE_KEY, submitted_benches) # Check if base bench data exist. if not TestBench.query().fetch(keys_only=True, limit=1): raise Exception('No base data available to bench against.') # Add to taskqueue to run simulation. stage = RunExperimentalPipeline(bench_name, description) stage.start(queue_name=_TASK_QUEUE_NAME)
def testGetSubTests_FetchAndCacheBehavior(self): self._AddSampleData() # Set the has_rows flag to true on two of the TestMetadata entities. for test_path in [ 'Chromium/win7/really/nested/very/deeply/subtest', 'Chromium/win7/really/nested/very_very' ]: test = utils.TestKey(test_path).get() test.has_rows = True test.put() # A tree-structured dict of dicts is constructed, and the 'has_rows' # flag is set to true for two of these tests. These two tests and # their parents are all included in the result. response = self.testapp.post( '/list_tests', { 'type': 'sub_tests', 'suite': 'really', 'bots': 'Chromium/win7,Chromium/mac' }) self.assertEqual('*', response.headers.get('Access-Control-Allow-Origin')) expected = { 'nested': { 'has_rows': False, 'sub_tests': { 'very': { 'has_rows': False, 'sub_tests': { 'deeply': { 'has_rows': False, 'sub_tests': { 'subtest': { 'has_rows': True, 'sub_tests': {} } } } } }, 'very_very': { 'has_rows': True, 'sub_tests': {} } } } } # The response should be as expected. self.assertEqual(expected, json.loads(response.body)) # The cache should be set for the win7 bot with the expected response. self.assertEqual( expected, json.loads( layered_cache.Get(graph_data.LIST_TESTS_SUBTEST_CACHE_KEY % ('Chromium', 'win7', 'really')))) # Change mac subtests in cache. Should be merged with win7. mac_subtests = { 'mactest': { 'has_rows': False, 'sub_tests': { 'graph': { 'has_rows': True, 'sub_tests': {} } } } } layered_cache.Set( graph_data.LIST_TESTS_SUBTEST_CACHE_KEY % ('Chromium', 'mac', 'really'), json.dumps(mac_subtests)) response = self.testapp.post( '/list_tests', { 'type': 'sub_tests', 'suite': 'really', 'bots': 'Chromium/win7,Chromium/mac' }) self.assertEqual('*', response.headers.get('Access-Control-Allow-Origin')) expected.update(mac_subtests) self.assertEqual(expected, json.loads(response.body))
def testPost_CacheCleared(self): self.SetCurrentUser('*****@*****.**') self.testapp.post('/set_warning_message', {'warning_message': ''}) self.assertEqual(None, layered_cache.Get('warning_message')) self.assertIsNone(layered_cache.Get('warning_bug'))
def testGet_StoredObject(self): stored_object.Set('foo', 'bar') self.assertEqual('bar', layered_cache.Get('foo'))