def testUpdateOwnerFromChartjson(self): chartjson_owner_dict_cache = { 'ChromiumPerf/speedometer': {'*****@*****.**'}, 'ChromiumPerf/jetstream': {'*****@*****.**'}, } master_owner_dict_cache = { 'ChromiumPerf/speedometer': {'*****@*****.**', '*****@*****.**', '*****@*****.**'}, 'ChromiumPerf/jetstream': {'*****@*****.**'}, 'ChromiumPerf/octane': {'*****@*****.**'}, } new_chartjson_owner_dict = { 'ChromiumPerf/speedometer': {'*****@*****.**'}, 'ChromiumPerf/jetstream': None, 'ChromiumPerf/spaceport': {'*****@*****.**'}, } layered_cache.SetExternal(test_owner._CHARTJSON_OWNER_CACHE_KEY, chartjson_owner_dict_cache) layered_cache.SetExternal(test_owner._MASTER_OWNER_CACHE_KEY, master_owner_dict_cache) test_owner.UpdateOwnerFromChartjson(new_chartjson_owner_dict) updated_chartjson_owner_dict = layered_cache.GetExternal( test_owner._CHARTJSON_OWNER_CACHE_KEY) self.assertEqual(_ANOTHER_SAMPLE_OWNER_DICT, updated_chartjson_owner_dict) updated_master_owner_dict = layered_cache.GetExternal( test_owner._MASTER_OWNER_CACHE_KEY) self.assertEqual(_COMBINED_SAMPLE_OWNER_DICT, updated_master_owner_dict)
def testGet_BisectCulpritHasAuthor_DoesNotMergeIntoBugWithMultipleCulprits( self, mock_update_bug): data = copy.deepcopy(_SAMPLE_BISECT_RESULTS_JSON) self._AddTryJob(123, 'completed', 'win_perf', results_data=data) layered_cache.SetExternal('commit_hash_2a1781d64d', '123') data = copy.deepcopy(_SAMPLE_BISECT_RESULTS_JSON) data['culprit_data']['email'] = '*****@*****.**' data['culprit_data']['cl'] = 'BBBBBBBB' self._AddTryJob(123, 'completed', 'linux_perf', results_data=data) layered_cache.SetExternal('commit_hash_BBBBBBBB', '123') self._AddTryJob(456, 'started', 'win_perf', results_data=_SAMPLE_BISECT_RESULTS_JSON) self.testapp.get('/update_bug_with_results') mock_update_bug.assert_called_once_with( mock.ANY, mock.ANY, cc_list=['*****@*****.**', '*****@*****.**'], merge_issue=None, labels=None, owner='*****@*****.**', status='Assigned') # Should have skipped updating cache. self.assertEqual(layered_cache.GetExternal('commit_hash_2a1781d64d'), '456')
def testAddOwnerFromDict(self): layered_cache.SetExternal(test_owner._MASTER_OWNER_CACHE_KEY, _SAMPLE_OWNER_DICT) test_owner.AddOwnerFromDict(_ANOTHER_SAMPLE_OWNER_DICT) owner_dict = layered_cache.GetExternal( test_owner._MASTER_OWNER_CACHE_KEY) self.assertEqual(_COMBINED_SAMPLE_OWNER_DICT, owner_dict)
def AddOwnerFromDict(owner_dict): """Adds test owner from |owner_dict| to owner dict in layered_cache. For example, if owner cache dict is: { 'a/a': {'*****@*****.**'}, 'a/b': {'*****@*****.**'}, } and parameter owner_dict is: { 'a/a': {'*****@*****.**'}, 'c/c': {'*****@*****.**'}, } then the cached will be updated to: { 'a/a': {'*****@*****.**', '*****@*****.**'}, 'a/b': {'*****@*****.**'}, 'c/c': {'*****@*****.**'}, } Args: owner_dict: A dictionary of "Master/Test suite" to set of owners' email. """ owner_dict_cache = GetMasterCachedOwner() for path, owners in owner_dict.iteritems(): owners_cache = owner_dict_cache.get(path, set()) owners_cache.update(owners) owner_dict_cache[path] = owners_cache layered_cache.SetExternal(_MASTER_OWNER_CACHE_KEY, owner_dict_cache)
def testRemoveOwnerFromDict(self): layered_cache.SetExternal(test_owner._MASTER_OWNER_CACHE_KEY, _SAMPLE_OWNER_DICT) test_owner.RemoveOwnerFromDict(_SAMPLE_OWNER_DICT) owner_dict = layered_cache.GetExternal( test_owner._MASTER_OWNER_CACHE_KEY) self.assertEqual({}, owner_dict)
def _PostSuccessfulResult(job, issue_tracker): """Posts successful bisect results on issue tracker.""" # From the results, get the list of people to CC (if applicable), the bug # to merge into (if applicable) and the commit hash cache key, which # will be used below. if job.bug_id < 0: return results_data = job.results_data authors_to_cc = [] commit_cache_key = _GetCommitHashCacheKey(results_data) merge_issue = layered_cache.GetExternal(commit_cache_key) if not merge_issue: authors_to_cc = _GetAuthorsToCC(results_data) comment = bisect_report.GetReport(job) # Add a friendly message to author of culprit CL. owner = None if authors_to_cc: comment = '%s%s%s' % (_GetAutoAssignHeader(results_data), _AUTO_ASSIGN_MSG % { 'author': authors_to_cc[0] }, comment) owner = authors_to_cc[0] # Set restrict view label if the bisect results are internal only. labels = ['Restrict-View-Google'] if job.internal_only else None comment_added = issue_tracker.AddBugComment(job.bug_id, comment, cc_list=authors_to_cc, merge_issue=merge_issue, labels=labels, owner=owner) if not comment_added: raise BugUpdateFailure('Failed to update bug %s with comment %s' % (job.bug_id, comment)) logging.info('Updated bug %s with results from %s', job.bug_id, job.rietveld_issue_id) if merge_issue: _MapAnomaliesToMergeIntoBug(merge_issue, job.bug_id) # Mark the duplicate bug's Bug entity status as closed so that # it doesn't get auto triaged. bug = ndb.Key('Bug', job.bug_id).get() if bug: bug.status = bug_data.BUG_STATUS_CLOSED bug.put() # Cache the commit info and bug ID to datastore when there is no duplicate # issue that this issue is getting merged into. This has to be done only # after the issue is updated successfully with bisect information. if commit_cache_key and not merge_issue: layered_cache.SetExternal(commit_cache_key, str(job.bug_id), days_to_keep=30) logging.info('Cached bug id %s and commit info %s in the datastore.', job.bug_id, commit_cache_key)
def testGet_BisectNoResultsData_NoException(self, mock_logging_error): data = copy.deepcopy(_SAMPLE_BISECT_RESULTS_JSON) data['culprit_data'] = None self._AddTryJob(123, 'completed', 'win_perf', results_data=data) layered_cache.SetExternal('commit_hash_2a1781d64d', '123') data = copy.deepcopy(_SAMPLE_BISECT_RESULTS_JSON) data['culprit_data'] = None self._AddTryJob(123, 'completed', 'linux_perf', results_data=data) layered_cache.SetExternal('commit_hash_BBBBBBBB', '123') self._AddTryJob(456, 'started', 'win_perf', results_data=_SAMPLE_BISECT_RESULTS_JSON) self.testapp.get('/update_bug_with_results') self.assertEqual(0, mock_logging_error.call_count)
def _UpdateCacheKeyForIssue(merge_issue_id, commit_cache_key, job): # Cache the commit info and bug ID to datastore when there is no duplicate # issue that this issue is getting merged into. This has to be done only # after the issue is updated successfully with bisect information. if commit_cache_key and not merge_issue_id: layered_cache.SetExternal(commit_cache_key, str(job.bug_id), days_to_keep=30) logging.info('Cached bug id %s and commit info %s in the datastore.', job.bug_id, commit_cache_key)
def testRemoveOwner(self): layered_cache.SetExternal(test_owner._MASTER_OWNER_CACHE_KEY, _SAMPLE_OWNER_DICT) test_owner.RemoveOwner('ChromiumPerf/speedometer', '*****@*****.**') test_owner.RemoveOwner('ChromiumPerf/speedometer', '*****@*****.**') test_owner.RemoveOwner('ChromiumPerf/octane', '*****@*****.**') owner_dict = layered_cache.GetExternal( test_owner._MASTER_OWNER_CACHE_KEY) self.assertEqual({}, owner_dict)
def testGetAndSet_External(self): layered_cache.SetExternal('str', 'Hello, World!') layered_cache.SetExternal('dict', {'hello': [1, 2, 3]}) self.assertEqual( 'Hello, World!', cPickle.loads( ndb.Key('CachedPickledString', 'externally_visible__str').get().value)) self.assertEqual( None, ndb.Key('CachedPickledString', 'internal_only__str').get()) self.assertEqual('Hello, World!', layered_cache.GetExternal('str')) self.assertEqual({'hello': [1, 2, 3]}, cPickle.loads( ndb.Key('CachedPickledString', 'externally_visible__dict').get().value)) self.assertEqual( None, ndb.Key('CachedPickledString', 'internal_only__dict').get()) self.assertEqual({'hello': [1, 2, 3]}, layered_cache.GetExternal('dict'))
def testGet_NonAdmin_OnlyUserInfoEmbeddedOnPage(self): self.SetCurrentUser('*****@*****.**', is_admin=False) layered_cache.SetExternal(test_owner._MASTER_OWNER_CACHE_KEY, _SAMPLE_OWNER_DICT) response = self.testapp.get('/edit_test_owners') owner_info = self.GetEmbeddedVariable(response, 'OWNER_INFO') expected_owner_info = [ {u'name': u'ChromiumPerf/octane'}, {u'name': u'ChromiumPerf/speedometer'} ] self.assertEqual(expected_owner_info, owner_info)
def AddOwner(test_suite_path, owner_email): """Adds an owner for a test suite path. Args: test_suite_path: A string of "Master/Test suite". owner_email: An email string. """ owner_dict_cache = GetMasterCachedOwner() owners = owner_dict_cache.get(test_suite_path, set()) owners.add(owner_email) owner_dict_cache[test_suite_path] = owners layered_cache.SetExternal(_MASTER_OWNER_CACHE_KEY, owner_dict_cache) owner_dict_cache = GetMasterCachedOwner()
def RemoveOwnerFromDict(owner_dict): """Adds test owner from |owner_dict| to owner dict in layered_cache. Args: owner_dict: A dictionary of Master/Test suite to set of owners to be removed. """ owner_dict_cache = GetMasterCachedOwner() for path, owners in owner_dict.iteritems(): owners_cache = owner_dict_cache.get(path, set()) owner_dict_cache[path] = owners_cache - owners if not owner_dict_cache[path]: del owner_dict_cache[path] layered_cache.SetExternal(_MASTER_OWNER_CACHE_KEY, owner_dict_cache)
def testGet_BisectCulpritHasAuthor_MergesBugWithExisting( self, mock_update_bug, mock_merge_anomalies): layered_cache.SetExternal('commit_hash_2a1781d64d', 111222) self._AddTryJob(12345, 'started', 'win_perf', results_data=_SAMPLE_BISECT_RESULTS_JSON) self.testapp.get('/update_bug_with_results') mock_update_bug.assert_called_once_with( mock.ANY, mock.ANY, cc_list=[], merge_issue='111222', labels=None, owner=None, status=None) # Should have skipped updating cache. self.assertEqual( layered_cache.GetExternal('commit_hash_2a1781d64d'), 111222) mock_merge_anomalies.assert_called_once_with('111222', 12345)
def testGet_BisectCulpritHasAuthor_DoesNotMergeDuplicate( self, mock_update_bug, mock_merge_anomalies): layered_cache.SetExternal('commit_hash_2a1781d64d', 111222) self._AddTryJob(12345, 'started', 'win_perf', results_data=_SAMPLE_BISECT_RESULTS_JSON) self.testapp.get('/update_bug_with_results') mock_update_bug.assert_called_once_with( mock.ANY, mock.ANY, cc_list=['*****@*****.**', '*****@*****.**'], merge_issue=None, labels=None, owner='*****@*****.**', status='Assigned') # Should have skipped updating cache. self.assertEqual( layered_cache.GetExternal('commit_hash_2a1781d64d'), 111222) # Should have skipped mapping anomalies. self.assertEqual(0, mock_merge_anomalies.call_count)
def RemoveOwner(test_suite_path, owner_email=None): """Removes test owners for |test_suite_path|. Args: test_suite_path: A string of "Master/Test suite". owner_email: Optional email string. If not specified, dict entry for |test_suite_path| will be deleted. """ owner_dict_cache = GetMasterCachedOwner() if test_suite_path in owner_dict_cache: if owner_email: owners = owner_dict_cache[test_suite_path] owners.remove(owner_email) if not owners: del owner_dict_cache[test_suite_path] else: del owner_dict_cache[test_suite_path] layered_cache.SetExternal(_MASTER_OWNER_CACHE_KEY, owner_dict_cache)
def UpdateBisectStats(bot_name, status): """Updates bisect run stat by bot name and status. Bisect stats stored in a layered_cache entity have the form below. Each tick is one week and count is the sum of failed or completed bisect runs. { 'failed': { bot_name: [[week_timestamp, count], [week_timestamp, count]], }, 'completed': { bot_name: [[week_timestamp, count], [week_timestamp, count]], } } Args: bot_name: Name of the bisect bot. status: Bisect status. Either 'failed' or 'completed'. """ # TODO(chrisphan): Add stats for staled bisect. if status not in ['failed', 'completed']: return series_name = _GetSeriesNameFromBotName(bot_name) week_timestamp = _GetLastMondayTimestamp() bisect_stats = layered_cache.GetExternal(_BISECT_STATS_CACHE_KEY) if not bisect_stats: bisect_stats = { 'failed': {}, 'completed': {}, } series_map = bisect_stats[status] if series_name not in series_map: series_map[series_name] = [[week_timestamp, 1]] else: series = series_map[series_name] if week_timestamp == series[-1][0]: series[-1][1] += 1 else: series.append([week_timestamp, 1]) layered_cache.SetExternal(_BISECT_STATS_CACHE_KEY, bisect_stats)
def UpdateOwnerFromChartjson(owner_dict): """Updates test owners with test owner data from chartjson. Checks if tests owners have changed by matching |owner_dict| with the stored owner dict for chartjson and update the master owner dict accordingly. Args: owner_dict: A dictionary of Master/Test suite to set of owners. """ add_owner_dict = {} remove_owner_dict = {} owner_dict_cache = layered_cache.GetExternal( _CHARTJSON_OWNER_CACHE_KEY) or {} for path, owners in owner_dict.iteritems(): owners = owners or set() owners_cache = owner_dict_cache.get(path, set()) if owners_cache: diff = owners_cache - owners if diff: remove_owner_dict[path] = diff diff = owners - owners_cache if diff: add_owner_dict[path] = diff else: add_owner_dict[path] = owners if owners: owner_dict_cache[path] = owners elif path in owner_dict_cache: del owner_dict_cache[path] if add_owner_dict or remove_owner_dict: layered_cache.SetExternal(_CHARTJSON_OWNER_CACHE_KEY, owner_dict_cache) if add_owner_dict: AddOwnerFromDict(add_owner_dict) if remove_owner_dict: RemoveOwnerFromDict(remove_owner_dict)
def _SetOwnersDict(self, owner_dict): layered_cache.SetExternal(test_owner._MASTER_OWNER_CACHE_KEY, owner_dict)
def _PostSuccessfulResult(job, issue_tracker): """Posts successful bisect results on issue tracker.""" # From the results, get the list of people to CC (if applicable), the bug # to merge into (if applicable) and the commit hash cache key, which # will be used below. if job.bug_id < 0: return commit_cache_key = _GetCommitHashCacheKey(job.results_data) # Check to see if there's already an issue for this commit, if so we can # potentially merge the bugs. merge_issue = _GetMergeIssue(issue_tracker, commit_cache_key) # Check if we can duplicate this issue against an existing issue. # We won't duplicate against an issue that itself is already # a duplicate though. Could follow the whole chain through but we'll # just keep things simple and flat for now. merge_issue_id = None if merge_issue: if merge_issue.get('status') != issue_tracker_service.STATUS_DUPLICATE: merge_issue_id = str(merge_issue.get('id')) # Only skip cc'ing the authors if we're going to merge this isn't another # issue. authors_to_cc = [] if not merge_issue_id: authors_to_cc = _GetAuthorsToCC(job.results_data) # Add a friendly message to author of culprit CL. owner, comment = _GetCulpritCLOwnerAndComment(job, authors_to_cc) # Set restrict view label if the bisect results are internal only. labels = ['Restrict-View-Google'] if job.internal_only else None comment_added = issue_tracker.AddBugComment(job.bug_id, comment, cc_list=authors_to_cc, merge_issue=merge_issue_id, labels=labels, owner=owner) if not comment_added: raise BugUpdateFailure('Failed to update bug %s with comment %s' % (job.bug_id, comment)) logging.info('Updated bug %s with results from %s', job.bug_id, job.rietveld_issue_id) # If the issue we were going to merge into was itself a duplicate, we don't # dup against it but we also don't merge existing anomalies to it or cache it. if merge_issue.get('status') == issue_tracker_service.STATUS_DUPLICATE: return if merge_issue_id: _MapAnomaliesToMergeIntoBug(merge_issue_id, job.bug_id) # Mark the duplicate bug's Bug entity status as closed so that # it doesn't get auto triaged. bug = ndb.Key('Bug', job.bug_id).get() if bug: bug.status = bug_data.BUG_STATUS_CLOSED bug.put() # Cache the commit info and bug ID to datastore when there is no duplicate # issue that this issue is getting merged into. This has to be done only # after the issue is updated successfully with bisect information. if commit_cache_key and not merge_issue_id: layered_cache.SetExternal(commit_cache_key, str(job.bug_id), days_to_keep=30) logging.info('Cached bug id %s and commit info %s in the datastore.', job.bug_id, commit_cache_key)
def testGetTestSuitePaths(self): layered_cache.SetExternal(test_owner._MASTER_OWNER_CACHE_KEY, _SAMPLE_OWNER_DICT) test_suite_paths = test_owner.GetTestSuitePaths('*****@*****.**') self.assertEqual(['ChromiumPerf/octane', 'ChromiumPerf/speedometer'], test_suite_paths)
def testGetOwners(self): layered_cache.SetExternal(test_owner._MASTER_OWNER_CACHE_KEY, _SAMPLE_OWNER_DICT) test_suite_paths = ['ChromiumPerf/speedometer', 'ChromiumPerf/octane'] owners = test_owner.GetOwners(test_suite_paths) self.assertEqual(['*****@*****.**', '*****@*****.**'], owners)