def test_is_wpt_notify_enabled_error(self): self.host.executive = MockExecutive(output='error') extractor = DirectoryOwnersExtractor(self.host) self.assertFalse( extractor.is_wpt_notify_enabled(ABS_WPT_BASE + '/foo/DIR_METADATA'))
def test_is_wpt_notify_enabled_false(self): data = ( '{"dirs":{"third_party/blink/web_tests/a/b":{"monorail":' '{"component":"foo"},"teamEmail":"bar","wpt":{"notify":"NO"}}}}') self.host.executive = MockExecutive(output=data) extractor = DirectoryOwnersExtractor(self.host) self.assertFalse( extractor.is_wpt_notify_enabled(MOCK_WEB_TESTS + 'a/b/DIR_METADATA'))
def test_is_wpt_notify_enabled_true(self): data = ( '{"dirs":{"a/b":{"monorail":' '{"component":"foo"},"teamEmail":"bar","wpt":{"notify":"YES"}}}}') self.host.executive = MockExecutive(output=data) extractor = DirectoryOwnersExtractor(self.host) self.assertTrue( extractor.is_wpt_notify_enabled(MOCK_WEB_TESTS + 'a/b/DIR_METADATA'))
def test_is_wpt_notify_enabled_with_dir_metadata_none(self): self.host.filesystem.files = { ABS_WPT_BASE + '/foo/OWNERS': '# COMPONENT: Blink>Layout\n' '# WPT-NOTIFY: true\n' } self.host.executive = MockExecutive(output='error') extractor = DirectoryOwnersExtractor(self.host) self.assertTrue( extractor.is_wpt_notify_enabled(ABS_WPT_BASE + '/foo/OWNERS'))
def test_is_wpt_notify_enabled_with_dir_metadata(self): self.host.filesystem.files = { ABS_WPT_BASE + '/foo/OWNERS': '# TEAM: [email protected]\n' '# COMPONENT: Blink>Layout\n' '# WPT-NOTIFY: true\n' } data = ( '{"dirs":{"a/b":{"monorail":' '{"component":"foo"},"teamEmail":"bar","wpt":{"notify":"YES"}}}}') self.host.executive = MockExecutive(output=data) extractor = DirectoryOwnersExtractor(self.host) self.assertTrue( extractor.is_wpt_notify_enabled(MOCK_WEB_TESTS + 'a/b/OWNERS'))
class ImportNotifier(object): def __init__(self, host, chromium_git, local_wpt): self.host = host self.git = chromium_git self.local_wpt = local_wpt self._monorail_api = MonorailAPI self.default_port = host.port_factory.get() self.finder = PathFinder(host.filesystem) self.owners_extractor = DirectoryOwnersExtractor(host) self.new_failures_by_directory = defaultdict(list) self.components_for_product = {ANDROID_WEBLAYER: ["Internals>WebLayer"]} self.labels_for_product = { ANDROID_WEBLAYER: ["Project-WebLayer-WebPlatformSupport", "WL-WPT-Compat"] } def main(self, wpt_revision_start, wpt_revision_end, rebaselined_tests, test_expectations, new_override_expectations, issue, patchset, dry_run=True, service_account_key_json=None): """Files bug reports for new failures. Args: wpt_revision_start: The start of the imported WPT revision range (exclusive), i.e. the last imported revision. wpt_revision_end: The end of the imported WPT revision range (inclusive), i.e. the current imported revision. rebaselined_tests: A list of test names that have been rebaselined. test_expectations: A dictionary mapping names of tests that cannot be rebaselined to a list of new test expectation lines. issue: The issue number of the import CL (a string). patchset: The patchset number of the import CL (a string). dry_run: If True, no bugs will be actually filed to crbug.com. service_account_key_json: The path to a JSON private key of a service account for accessing Monorail. If None, try to get an access token from luci-auth. Note: "test names" are paths of the tests relative to web_tests. """ gerrit_url = SHORT_GERRIT_PREFIX + issue gerrit_url_with_ps = gerrit_url + '/' + patchset + '/' changed_test_baselines = self.find_changed_baselines_of_tests( rebaselined_tests) self.examine_baseline_changes(changed_test_baselines, gerrit_url_with_ps) self.examine_new_test_expectations(test_expectations) bugs = self.create_bugs_from_new_failures(wpt_revision_start, wpt_revision_end, gerrit_url) self.file_bugs(bugs, dry_run, service_account_key_json) for product, expectation_lines in new_override_expectations.items(): bugs = self.create_bugs_for_product(wpt_revision_start, wpt_revision_end, gerrit_url, product, expectation_lines) self.file_bugs(bugs, dry_run, service_account_key_json) def find_changed_baselines_of_tests(self, rebaselined_tests): """Finds the corresponding changed baselines of each test. Args: rebaselined_tests: A list of test names that have been rebaselined. Returns: A dictionary mapping test names to paths of their baselines changed in this import CL (paths relative to the root of Chromium repo). """ test_baselines = {} changed_files = self.git.changed_files() for test_name in rebaselined_tests: test_without_ext, _ = self.host.filesystem.splitext(test_name) changed_baselines = [] # TODO(robertma): Refactor this into web_tests.port.base. baseline_name = test_without_ext + '-expected.txt' for changed_file in changed_files: if changed_file.endswith(baseline_name): changed_baselines.append(changed_file) if changed_baselines: test_baselines[test_name] = changed_baselines return test_baselines def examine_baseline_changes(self, changed_test_baselines, gerrit_url_with_ps): """Examines all changed baselines to find new failures. Args: changed_test_baselines: A dictionary mapping test names to paths of changed baselines. gerrit_url_with_ps: Gerrit URL of this CL with the patchset number. """ for test_name, changed_baselines in changed_test_baselines.items(): directory = self.find_owned_directory(test_name) if not directory: _log.warning('Cannot find OWNERS of %s', test_name) continue for baseline in changed_baselines: if self.more_failures_in_baseline(baseline): self.new_failures_by_directory[directory].append( TestFailure( TestFailure.BASELINE_CHANGE, test_name, baseline_path=baseline, gerrit_url_with_ps=gerrit_url_with_ps)) def more_failures_in_baseline(self, baseline): """Determines if a testharness.js baseline file has new failures. The file is assumed to have been modified in the current git checkout, and so has a diff we can parse. We recognize two types of failures: FAIL lines, which are output for a specific subtest failing, and harness errors, which indicate an uncaught error in the test. Increasing numbers of either are considered new failures - this includes going from FAIL to error or vice-versa. """ diff = self.git.run(['diff', '-U0', 'origin/main', '--', baseline]) delta_failures = 0 delta_harness_errors = 0 for line in diff.splitlines(): if line.startswith('+FAIL'): delta_failures += 1 if line.startswith('-FAIL'): delta_failures -= 1 if line.startswith('+Harness Error.'): delta_harness_errors += 1 if line.startswith('-Harness Error.'): delta_harness_errors -= 1 return delta_failures > 0 or delta_harness_errors > 0 def examine_new_test_expectations(self, test_expectations): """Examines new test expectations to find new failures. Args: test_expectations: A dictionary mapping names of tests that cannot be rebaselined to a list of new test expectation lines. """ for test_name, expectation_lines in test_expectations.items(): directory = self.find_owned_directory(test_name) if not directory: _log.warning('Cannot find OWNERS of %s', test_name) continue for expectation_line in expectation_lines: self.new_failures_by_directory[directory].append( TestFailure( TestFailure.NEW_EXPECTATION, test_name, expectation_line=expectation_line)) def create_bugs_for_product(self, wpt_revision_start, wpt_revision_end, gerrit_url, product, expectation_lines): """Files bug reports for new failures per product Args: wpt_revision_start: The start of the imported WPT revision range (exclusive), i.e. the last imported revision. wpt_revision_end: The end of the imported WPT revision range (inclusive), i.e. the current imported revision. gerrit_url: Gerrit URL of the CL. product: the product for which to file bugs for. expectation_lines: list of new expectations for this product Return: A MonorailIssue object that should be filed. """ bugs = [] summary = '[WPT] New failures introduced by import {}'.format(gerrit_url) prologue = ('WPT import {} introduced new failures:\n\n' 'List of new failures:\n'.format(gerrit_url)) failure_list = '' for _, failure in expectation_lines.items(): failure_list += str(failure) + '\n' expectations_statement = ( '\nExpectations have been automatically added for ' 'the failing results to keep the bots green. Please ' 'investigate the new failures and triage as appropriate.\n') range_statement = '\nThis import contains upstream changes from {} to {}:\n'.format( wpt_revision_start, wpt_revision_end) description = (prologue + failure_list + expectations_statement + range_statement) bug = MonorailIssue.new_chromium_issue( summary, description, cc=[], components=self.components_for_product[product], labels=self.labels_for_product[product]) bugs.append(bug) return bugs def create_bugs_from_new_failures(self, wpt_revision_start, wpt_revision_end, gerrit_url): """Files bug reports for new failures. Args: wpt_revision_start: The start of the imported WPT revision range (exclusive), i.e. the last imported revision. wpt_revision_end: The end of the imported WPT revision range (inclusive), i.e. the current imported revision. gerrit_url: Gerrit URL of the CL. Return: A list of MonorailIssue objects that should be filed. """ imported_commits = self.local_wpt.commits_in_range( wpt_revision_start, wpt_revision_end) bugs = [] for directory, failures in self.new_failures_by_directory.items(): summary = '[WPT] New failures introduced in {} by import {}'.format( directory, gerrit_url) full_directory = self.host.filesystem.join( self.finder.web_tests_dir(), directory) owners_file = self.host.filesystem.join(full_directory, 'OWNERS') metadata_file = self.host.filesystem.join(full_directory, 'DIR_METADATA') is_wpt_notify_enabled = False try: is_wpt_notify_enabled = self.owners_extractor.is_wpt_notify_enabled( metadata_file) except KeyError: _log.info('KeyError when parsing %s' % metadata_file) if not is_wpt_notify_enabled: _log.info("WPT-NOTIFY disabled in %s." % full_directory) continue owners = self.owners_extractor.extract_owners(owners_file) # owners may be empty but not None. cc = owners component = self.owners_extractor.extract_component(metadata_file) # component could be None. components = [component] if component else None prologue = ('WPT import {} introduced new failures in {}:\n\n' 'List of new failures:\n'.format( gerrit_url, directory)) failure_list = '' for failure in failures: failure_list += str(failure) + '\n' expectations_statement = ( '\nExpectations or baseline files [0] have been automatically ' 'added for the failing results to keep the bots green. Please ' 'investigate the new failures and triage as appropriate.\n') range_statement = '\nThis import contains upstream changes from {} to {}:\n'.format( wpt_revision_start, wpt_revision_end) commit_list = self.format_commit_list(imported_commits, full_directory) links_list = '\n[0]: https://chromium.googlesource.com/chromium/src/+/HEAD/docs/testing/web_test_expectations.md\n' description = (prologue + failure_list + expectations_statement + range_statement + commit_list + links_list) bug = MonorailIssue.new_chromium_issue(summary, description, cc, components, labels=['Test-WebTest']) _log.info(bug) _log.info("WPT-NOTIFY enabled in %s; adding the bug to the pending list." % full_directory) bugs.append(bug) return bugs def format_commit_list(self, imported_commits, directory): """Formats the list of imported WPT commits. Imports affecting the given directory will be highlighted. Args: imported_commits: A list of (SHA, commit subject) pairs. directory: An absolute path of a directory in the Chromium repo, for which the list is formatted. Returns: A multi-line string. """ path_from_wpt = self.host.filesystem.relpath( directory, self.finder.path_from_web_tests('external', 'wpt')) commit_list = '' for sha, subject in imported_commits: # subject is a Unicode string and can contain non-ASCII characters. line = u'{}: {}'.format(subject, GITHUB_COMMIT_PREFIX + sha) if self.local_wpt.is_commit_affecting_directory( sha, path_from_wpt): line += ' [affecting this directory]' commit_list += line + '\n' return commit_list def find_owned_directory(self, test_name): """Finds the lowest directory that contains the test and has OWNERS. Args: The name of the test (a path relative to web_tests). Returns: The path of the found directory relative to web_tests. """ # Always use non-virtual test names when looking up OWNERS. if self.default_port.lookup_virtual_test_base(test_name): test_name = self.default_port.lookup_virtual_test_base(test_name) # find_owners_file takes either a relative path from the *root* of the # repository, or an absolute path. abs_test_path = self.finder.path_from_web_tests(test_name) owners_file = self.owners_extractor.find_owners_file( self.host.filesystem.dirname(abs_test_path)) if not owners_file: return None owned_directory = self.host.filesystem.dirname(owners_file) short_directory = self.host.filesystem.relpath( owned_directory, self.finder.web_tests_dir()) return short_directory def file_bugs(self, bugs, dry_run, service_account_key_json=None): """Files a list of bugs to Monorail. Args: bugs: A list of MonorailIssue objects. dry_run: A boolean, whether we are in dry run mode. service_account_key_json: Optional, see docs for main(). """ # TODO(robertma): Better error handling in this method. if dry_run: _log.info( '[dry_run] Would have filed the %d bugs in the pending list.', len(bugs)) return _log.info('Filing %d bugs in the pending list to Monorail', len(bugs)) api = self._get_monorail_api(service_account_key_json) for index, bug in enumerate(bugs, start=1): response = api.insert_issue(bug) _log.info('[%d] Filed bug: %s', index, MonorailIssue.crbug_link(response['id'])) def _get_monorail_api(self, service_account_key_json): if service_account_key_json: return self._monorail_api( service_account_key_json=service_account_key_json) token = LuciAuth(self.host).get_access_token() return self._monorail_api(access_token=token)
class ImportNotifier(object): def __init__(self, host, chromium_git, local_wpt): self.host = host self.git = chromium_git self.local_wpt = local_wpt self.default_port = host.port_factory.get() self.finder = PathFinder(host.filesystem) self.owners_extractor = DirectoryOwnersExtractor(host.filesystem) self.new_failures_by_directory = defaultdict(list) def main(self, wpt_revision_start, wpt_revision_end, rebaselined_tests, test_expectations, issue, patchset, dry_run=True, service_account_key_json=None): """Files bug reports for new failures. Args: wpt_revision_start: The start of the imported WPT revision range (exclusive), i.e. the last imported revision. wpt_revision_end: The end of the imported WPT revision range (inclusive), i.e. the current imported revision. rebaselined_tests: A list of test names that have been rebaselined. test_expectations: A dictionary mapping names of tests that cannot be rebaselined to a list of new test expectation lines. issue: The issue number of the import CL (a string). patchset: The patchset number of the import CL (a string). dry_run: If True, no bugs will be actually filed to crbug.com. service_account_key_json: The path to a JSON private key of a service account for accessing Monorail. If None, try to load from the default location, i.e. the path stored in the environment variable GOOGLE_APPLICATION_CREDENTIALS. Note: "test names" are paths of the tests relative to LayoutTests. """ gerrit_url = SHORT_GERRIT_PREFIX + issue gerrit_url_with_ps = gerrit_url + '/' + patchset + '/' changed_test_baselines = self.find_changed_baselines_of_tests( rebaselined_tests) self.examine_baseline_changes(changed_test_baselines, gerrit_url_with_ps) self.examine_new_test_expectations(test_expectations) bugs = self.create_bugs_from_new_failures(wpt_revision_start, wpt_revision_end, gerrit_url) self.file_bugs(bugs, dry_run, service_account_key_json) def find_changed_baselines_of_tests(self, rebaselined_tests): """Finds the corresponding changed baselines of each test. Args: rebaselined_tests: A list of test names that have been rebaselined. Returns: A dictionary mapping test names to paths of their baselines changed in this import CL (paths relative to the root of Chromium repo). """ test_baselines = {} changed_files = self.git.changed_files() for test_name in rebaselined_tests: test_without_ext, _ = self.host.filesystem.splitext(test_name) changed_baselines = [] # TODO(robertma): Refactor this into layout_tests.port.base. baseline_name = test_without_ext + '-expected.txt' for changed_file in changed_files: if changed_file.endswith(baseline_name): changed_baselines.append(changed_file) if changed_baselines: test_baselines[test_name] = changed_baselines return test_baselines def examine_baseline_changes(self, changed_test_baselines, gerrit_url_with_ps): """Examines all changed baselines to find new failures. Args: changed_test_baselines: A dictionary mapping test names to paths of changed baselines. gerrit_url_with_ps: Gerrit URL of this CL with the patchset number. """ for test_name, changed_baselines in changed_test_baselines.iteritems(): directory = self.find_owned_directory(test_name) if not directory: _log.warning('Cannot find OWNERS of %s', test_name) continue for baseline in changed_baselines: if self.more_failures_in_baseline(baseline): self.new_failures_by_directory[directory].append( TestFailure(TestFailure.BASELINE_CHANGE, test_name, baseline_path=baseline, gerrit_url_with_ps=gerrit_url_with_ps)) def more_failures_in_baseline(self, baseline): diff = self.git.run(['diff', '-U0', 'origin/master', '--', baseline]) delta_failures = 0 for line in diff.splitlines(): if line.startswith('+FAIL'): delta_failures += 1 if line.startswith('-FAIL'): delta_failures -= 1 return delta_failures > 0 def examine_new_test_expectations(self, test_expectations): """Examines new test expectations to find new failures. Args: test_expectations: A dictionary mapping names of tests that cannot be rebaselined to a list of new test expectation lines. """ for test_name, expectation_lines in test_expectations.iteritems(): directory = self.find_owned_directory(test_name) if not directory: _log.warning('Cannot find OWNERS of %s', test_name) continue for expectation_line in expectation_lines: self.new_failures_by_directory[directory].append( TestFailure(TestFailure.NEW_EXPECTATION, test_name, expectation_line=expectation_line)) def create_bugs_from_new_failures(self, wpt_revision_start, wpt_revision_end, gerrit_url): """Files bug reports for new failures. Args: wpt_revision_start: The start of the imported WPT revision range (exclusive), i.e. the last imported revision. wpt_revision_end: The end of the imported WPT revision range (inclusive), i.e. the current imported revision. gerrit_url: Gerrit URL of the CL. Return: A list of MonorailIssue objects that should be filed. """ imported_commits = self.local_wpt.commits_in_range( wpt_revision_start, wpt_revision_end) bugs = [] for directory, failures in self.new_failures_by_directory.iteritems(): summary = '[WPT] New failures introduced in {} by import {}'.format( directory, gerrit_url) full_directory = self.host.filesystem.join( self.finder.layout_tests_dir(), directory) owners_file = self.host.filesystem.join(full_directory, 'OWNERS') is_wpt_notify_enabled = self.owners_extractor.is_wpt_notify_enabled( owners_file) owners = self.owners_extractor.extract_owners(owners_file) # owners may be empty but not None. cc = owners + ['*****@*****.**'] component = self.owners_extractor.extract_component(owners_file) # component could be None. components = [component] if component else None prologue = ('WPT import {} introduced new failures in {}:\n\n' 'List of new failures:\n'.format( gerrit_url, directory)) failure_list = '' for failure in failures: failure_list += str(failure) + '\n' epilogue = '\nThis import contains upstream changes from {} to {}:\n'.format( wpt_revision_start, wpt_revision_end) commit_list = self.format_commit_list(imported_commits, full_directory) description = prologue + failure_list + epilogue + commit_list bug = MonorailIssue.new_chromium_issue(summary, description, cc, components) _log.info(str(bug)) if is_wpt_notify_enabled: _log.info( "WPT-NOTIFY enabled in this directory; adding the bug to the pending list." ) bugs.append(bug) else: _log.info( "WPT-NOTIFY disabled in this directory; discarding the bug." ) return bugs def format_commit_list(self, imported_commits, directory): """Formats the list of imported WPT commits. Imports affecting the given directory will be highlighted. Args: imported_commits: A list of (SHA, commit subject) pairs. directory: An absolute path of a directory in the Chromium repo, for which the list is formatted. Returns: A multi-line string. """ path_from_wpt = self.host.filesystem.relpath( directory, self.finder.path_from_layout_tests('external', 'wpt')) commit_list = '' for sha, subject in imported_commits: # subject is a Unicode string and can contain non-ASCII characters. line = u'{}: {}'.format(subject, GITHUB_COMMIT_PREFIX + sha) if self.local_wpt.is_commit_affecting_directory( sha, path_from_wpt): line += ' [affecting this directory]' commit_list += line + '\n' return commit_list def find_owned_directory(self, test_name): """Finds the lowest directory that contains the test and has OWNERS. Args: The name of the test (a path relative to LayoutTests). Returns: The path of the found directory relative to LayoutTests. """ # Always use non-virtual test names when looking up OWNERS. if self.default_port.lookup_virtual_test_base(test_name): test_name = self.default_port.lookup_virtual_test_base(test_name) # find_owners_file takes either a relative path from the *root* of the # repository, or an absolute path. abs_test_path = self.finder.path_from_layout_tests(test_name) owners_file = self.owners_extractor.find_owners_file( self.host.filesystem.dirname(abs_test_path)) if not owners_file: return None owned_directory = self.host.filesystem.dirname(owners_file) short_directory = self.host.filesystem.relpath( owned_directory, self.finder.layout_tests_dir()) return short_directory def file_bugs(self, bugs, dry_run, service_account_key_json=None): """Files a list of bugs to Monorail. Args: bugs: A list of MonorailIssue objects. dry_run: A boolean, whether we are in dry run mode. service_account_key_json: Optional, see docs for main(). """ # TODO(robertma): Better error handling in this method. if dry_run: _log.info( '[dry_run] Would have filed the %d bugs in the pending list.', len(bugs)) return _log.info('Filing %d bugs in the pending list to Monorail', len(bugs)) api = self._get_monorail_api(service_account_key_json) for index, bug in enumerate(bugs, start=1): response = api.insert_issue(bug) _log.info('[%d] Filed bug: %s', index, MonorailIssue.crbug_link(response['id'])) def _get_monorail_api(self, service_account_key_json): return MonorailAPI(service_account_key_json=service_account_key_json)
class DirectoryOwnersExtractorTest(unittest.TestCase): def setUp(self): # We always have an OWNERS file at web_tests/external. self.host = MockHost() self.host.filesystem = MockFileSystem(files={ MOCK_WEB_TESTS + 'external/OWNERS': '*****@*****.**' }) self.extractor = DirectoryOwnersExtractor(self.host) def _write_files(self, files): # Use write_text_file instead of directly assigning to filesystem.files # so that intermediary directories are correctly created, too. for path, contents in files.iteritems(): self.host.filesystem.write_text_file(path, contents) def test_list_owners_combines_same_owners(self): self._write_files({ ABS_WPT_BASE + '/foo/x.html': '', ABS_WPT_BASE + '/foo/OWNERS': '[email protected]\[email protected]\n', ABS_WPT_BASE + '/bar/x/y.html': '', ABS_WPT_BASE + '/bar/OWNERS': '[email protected]\[email protected]\n', }) changed_files = [ REL_WPT_BASE + '/foo/x.html', REL_WPT_BASE + '/bar/x/y.html', ] self.assertEqual( self.extractor.list_owners(changed_files), { ('*****@*****.**', '*****@*****.**'): ['external/wpt/bar', 'external/wpt/foo'] }) def test_list_owners_combines_same_directory(self): self._write_files({ ABS_WPT_BASE + '/baz/x/y.html': '', ABS_WPT_BASE + '/baz/x/y/z.html': '', ABS_WPT_BASE + '/baz/x/OWNERS': '[email protected]\n', }) changed_files = [ REL_WPT_BASE + '/baz/x/y.html', REL_WPT_BASE + '/baz/x/y/z.html', ] self.assertEqual( self.extractor.list_owners(changed_files), {('*****@*****.**', ): ['external/wpt/baz/x']}) def test_list_owners_skips_empty_owners(self): self._write_files({ ABS_WPT_BASE + '/baz/x/y/z.html': '', ABS_WPT_BASE + '/baz/x/y/OWNERS': '# Some comments\n', ABS_WPT_BASE + '/baz/x/OWNERS': '[email protected]\n', }) changed_files = [ REL_WPT_BASE + '/baz/x/y/z.html', ] self.assertEqual( self.extractor.list_owners(changed_files), {('*****@*****.**', ): ['external/wpt/baz/x']}) def test_list_owners_not_found(self): self._write_files({ # Although web_tests/external/OWNERS exists, it should not be listed. ABS_WPT_BASE + '/foo/bar.html': '', # Files out of external. '/mock-checkout/' + RELATIVE_WEB_TESTS + 'TestExpectations': '', '/mock-checkout/' + RELATIVE_WEB_TESTS + 'OWNERS': '*****@*****.**', }) changed_files = [ REL_WPT_BASE + '/foo/bar.html', RELATIVE_WEB_TESTS + 'TestExpectations', ] self.assertEqual(self.extractor.list_owners(changed_files), {}) def test_find_owners_file_at_current_dir(self): self._write_files({ABS_WPT_BASE + '/foo/OWNERS': '*****@*****.**'}) self.assertEqual( self.extractor.find_owners_file(REL_WPT_BASE + '/foo'), ABS_WPT_BASE + '/foo/OWNERS') def test_find_owners_file_at_ancestor(self): self._write_files({ ABS_WPT_BASE + '/x/OWNERS': '*****@*****.**', ABS_WPT_BASE + '/x/y/z.html': '', }) self.assertEqual( self.extractor.find_owners_file(REL_WPT_BASE + '/x/y'), ABS_WPT_BASE + '/x/OWNERS') def test_find_owners_file_stops_at_external_root(self): self._write_files({ ABS_WPT_BASE + '/x/y/z.html': '', }) self.assertEqual( self.extractor.find_owners_file(REL_WPT_BASE + '/x/y'), MOCK_WEB_TESTS + 'external/OWNERS') def test_find_owners_file_takes_four_kinds_of_paths(self): owners_path = ABS_WPT_BASE + '/foo/OWNERS' self._write_files({ owners_path: '*****@*****.**', ABS_WPT_BASE + '/foo/bar.html': '', }) # Absolute paths of directories. self.assertEqual( self.extractor.find_owners_file(ABS_WPT_BASE + '/foo'), owners_path) # Relative paths of directories. self.assertEqual( self.extractor.find_owners_file(REL_WPT_BASE + '/foo'), owners_path) # Absolute paths of files. self.assertEqual( self.extractor.find_owners_file(ABS_WPT_BASE + '/foo/bar.html'), owners_path) # Relative paths of files. self.assertEqual( self.extractor.find_owners_file(REL_WPT_BASE + '/foo/bar.html'), owners_path) def test_find_owners_file_out_of_external(self): self._write_files({ '/mock-checkout/' + RELATIVE_WEB_TESTS + 'OWNERS': '*****@*****.**', '/mock-checkout/' + RELATIVE_WEB_TESTS + 'other/some_file': '', }) self.assertIsNone( self.extractor.find_owners_file(RELATIVE_WEB_TESTS[:-1])) self.assertIsNone( self.extractor.find_owners_file(RELATIVE_WEB_TESTS + 'other')) self.assertIsNone(self.extractor.find_owners_file('third_party')) def test_extract_owners(self): self.host.filesystem.files = { ABS_WPT_BASE + '/foo/OWNERS': '#This is a comment\n' '*\n' '[email protected]\n' '[email protected]\n' 'foobar\n' '#[email protected]\n' '# TEAM: [email protected]\n' '# COMPONENT: Blink>Layout\n' } self.assertEqual( self.extractor.extract_owners(ABS_WPT_BASE + '/foo/OWNERS'), ['*****@*****.**', '*****@*****.**']) def test_extract_component(self): self.host.filesystem.files = { ABS_WPT_BASE + '/foo/OWNERS': '# TEAM: [email protected]\n' '# COMPONENT: Blink>Layout\n' } self.assertEqual( self.extractor.extract_component(ABS_WPT_BASE + '/foo/OWNERS'), 'Blink>Layout') def test_is_wpt_notify_enabled_true(self): self.host.filesystem.files = { ABS_WPT_BASE + '/foo/OWNERS': '# COMPONENT: Blink>Layout\n' '# WPT-NOTIFY: true\n' } self.assertTrue( self.extractor.is_wpt_notify_enabled(ABS_WPT_BASE + '/foo/OWNERS')) def test_is_wpt_notify_enabled_false(self): self.host.filesystem.files = { ABS_WPT_BASE + '/foo/OWNERS': '# COMPONENT: Blink>Layout\n' '# WPT-NOTIFY: false\n' } self.assertFalse( self.extractor.is_wpt_notify_enabled(ABS_WPT_BASE + '/foo/OWNERS')) def test_is_wpt_notify_enabled_absence_is_false(self): self.host.filesystem.files = { ABS_WPT_BASE + '/foo/OWNERS': '# TEAM: [email protected]\n' '# COMPONENT: Blink>Layout\n' } self.assertFalse( self.extractor.is_wpt_notify_enabled(ABS_WPT_BASE + '/foo/OWNERS')) def test_is_wpt_notify_enabled_with_dir_metadata(self): self.host.filesystem.files = { ABS_WPT_BASE + '/foo/OWNERS': '# TEAM: [email protected]\n' '# COMPONENT: Blink>Layout\n' '# WPT-NOTIFY: true\n' } data = ( '{"dirs":{"a/b":{"monorail":' '{"component":"foo"},"teamEmail":"bar","wpt":{"notify":"YES"}}}}') self.host.executive = MockExecutive(output=data) extractor = DirectoryOwnersExtractor(self.host) self.assertTrue( extractor.is_wpt_notify_enabled(MOCK_WEB_TESTS + 'a/b/OWNERS')) def test_is_wpt_notify_enabled_with_dir_metadata_none(self): self.host.filesystem.files = { ABS_WPT_BASE + '/foo/OWNERS': '# COMPONENT: Blink>Layout\n' '# WPT-NOTIFY: true\n' } self.host.executive = MockExecutive(output='error') extractor = DirectoryOwnersExtractor(self.host) self.assertTrue( extractor.is_wpt_notify_enabled(ABS_WPT_BASE + '/foo/OWNERS')) def test_extract_component_with_dir_metadata(self): data = ( '{"dirs":{"a/b":{"monorail":' '{"component":"foo"},"teamEmail":"bar","wpt":{"notify":"YES"}}}}') self.host.executive = MockExecutive(output=data) extractor = DirectoryOwnersExtractor(self.host) self.assertEqual( extractor.extract_component(MOCK_WEB_TESTS + 'a/b/OWNERS'), 'foo') def test_read_dir_metadata_success(self): data = ( '{"dirs":{"a/b":{"monorail":' '{"component":"foo"},"teamEmail":"bar","wpt":{"notify":"YES"}}}}') self.host.executive = MockExecutive(output=data) extractor = DirectoryOwnersExtractor(self.host) wpt_dir_metadata = extractor._read_dir_metadata(MOCK_WEB_TESTS + 'a/b/OWNERS') self.assertEqual(self.host.executive.full_calls[0].args, [ 'dirmd', 'compute', '-root', MOCK_WEB_TESTS_WITHOUT_SLASH, MOCK_WEB_TESTS + 'a/b' ]) self.assertEqual(wpt_dir_metadata.team_email, 'bar') self.assertEqual(wpt_dir_metadata.should_notify, True) self.assertEqual(wpt_dir_metadata.component, 'foo') def test_read_dir_metadata_none(self): self.host.executive = MockExecutive(output='error') extractor = DirectoryOwnersExtractor(self.host) wpt_dir_metadata = extractor._read_dir_metadata(MOCK_WEB_TESTS + 'a/b/OWNERS') self.assertEqual(self.host.executive.full_calls[0].args, [ 'dirmd', 'compute', '-root', MOCK_WEB_TESTS_WITHOUT_SLASH, MOCK_WEB_TESTS + 'a/b' ]) self.assertEqual(wpt_dir_metadata, None)
class DirectoryOwnersExtractorTest(unittest.TestCase): def setUp(self): # We always have an OWNERS file at LayoutTests/external. self.filesystem = MockFileSystem( files={ '/mock-checkout/third_party/WebKit/LayoutTests/external/OWNERS': '*****@*****.**' }) self.extractor = DirectoryOwnersExtractor(self.filesystem) def _write_files(self, files): # Use write_text_file instead of directly assigning to filesystem.files # so that intermediary directories are correctly created, too. for path, contents in files.iteritems(): self.filesystem.write_text_file(path, contents) def test_list_owners_combines_same_owners(self): self._write_files({ ABS_WPT_BASE + '/foo/x.html': '', ABS_WPT_BASE + '/foo/OWNERS': '[email protected]\[email protected]\n', ABS_WPT_BASE + '/bar/x/y.html': '', ABS_WPT_BASE + '/bar/OWNERS': '[email protected]\[email protected]\n', }) changed_files = [ REL_WPT_BASE + '/foo/x.html', REL_WPT_BASE + '/bar/x/y.html', ] self.assertEqual( self.extractor.list_owners(changed_files), { ('*****@*****.**', '*****@*****.**'): ['external/wpt/bar', 'external/wpt/foo'] }) def test_list_owners_combines_same_directory(self): self._write_files({ ABS_WPT_BASE + '/baz/x/y.html': '', ABS_WPT_BASE + '/baz/x/y/z.html': '', ABS_WPT_BASE + '/baz/x/OWNERS': '[email protected]\n', }) changed_files = [ REL_WPT_BASE + '/baz/x/y.html', REL_WPT_BASE + '/baz/x/y/z.html', ] self.assertEqual(self.extractor.list_owners(changed_files), {('*****@*****.**', ): ['external/wpt/baz/x']}) def test_list_owners_skips_empty_owners(self): self._write_files({ ABS_WPT_BASE + '/baz/x/y/z.html': '', ABS_WPT_BASE + '/baz/x/y/OWNERS': '# Some comments\n', ABS_WPT_BASE + '/baz/x/OWNERS': '[email protected]\n', }) changed_files = [ REL_WPT_BASE + '/baz/x/y/z.html', ] self.assertEqual(self.extractor.list_owners(changed_files), {('*****@*****.**', ): ['external/wpt/baz/x']}) def test_list_owners_not_found(self): self._write_files({ # Although LayoutTests/external/OWNERS exists, it should not be listed. ABS_WPT_BASE + '/foo/bar.html': '', # Files out of external. '/mock-checkout/third_party/WebKit/LayoutTests/TestExpectations': '', '/mock-checkout/third_party/WebKit/LayoutTests/OWNERS': '*****@*****.**', }) changed_files = [ REL_WPT_BASE + '/foo/bar.html', 'third_party/WebKit/LayoutTests/TestExpectations', ] self.assertEqual(self.extractor.list_owners(changed_files), {}) def test_find_owners_file_at_current_dir(self): self._write_files({ABS_WPT_BASE + '/foo/OWNERS': '*****@*****.**'}) self.assertEqual( self.extractor.find_owners_file(REL_WPT_BASE + '/foo'), ABS_WPT_BASE + '/foo/OWNERS') def test_find_owners_file_at_ancestor(self): self._write_files({ ABS_WPT_BASE + '/x/OWNERS': '*****@*****.**', ABS_WPT_BASE + '/x/y/z.html': '', }) self.assertEqual( self.extractor.find_owners_file(REL_WPT_BASE + '/x/y'), ABS_WPT_BASE + '/x/OWNERS') def test_find_owners_file_stops_at_external_root(self): self._write_files({ ABS_WPT_BASE + '/x/y/z.html': '', }) self.assertEqual( self.extractor.find_owners_file(REL_WPT_BASE + '/x/y'), '/mock-checkout/third_party/WebKit/LayoutTests/external/OWNERS') def test_find_owners_file_takes_four_kinds_of_paths(self): owners_path = ABS_WPT_BASE + '/foo/OWNERS' self._write_files({ owners_path: '*****@*****.**', ABS_WPT_BASE + '/foo/bar.html': '', }) # Absolute paths of directories. self.assertEqual( self.extractor.find_owners_file(ABS_WPT_BASE + '/foo'), owners_path) # Relative paths of directories. self.assertEqual( self.extractor.find_owners_file(REL_WPT_BASE + '/foo'), owners_path) # Absolute paths of files. self.assertEqual( self.extractor.find_owners_file(ABS_WPT_BASE + '/foo/bar.html'), owners_path) # Relative paths of files. self.assertEqual( self.extractor.find_owners_file(REL_WPT_BASE + '/foo/bar.html'), owners_path) def test_find_owners_file_out_of_external(self): self._write_files({ '/mock-checkout/third_party/WebKit/LayoutTests/OWNERS': '*****@*****.**', '/mock-checkout/third_party/WebKit/LayoutTests/other/some_file': '', }) self.assertIsNone( self.extractor.find_owners_file('third_party/WebKit/LayoutTests')) self.assertIsNone( self.extractor.find_owners_file( 'third_party/WebKit/LayoutTests/other')) self.assertIsNone(self.extractor.find_owners_file('third_party')) def test_extract_owners(self): self.filesystem.files = { ABS_WPT_BASE + '/foo/OWNERS': '#This is a comment\n' '*\n' '[email protected]\n' '[email protected]\n' 'foobar\n' '#[email protected]\n' '# TEAM: [email protected]\n' '# COMPONENT: Blink>Layout\n' } self.assertEqual( self.extractor.extract_owners(ABS_WPT_BASE + '/foo/OWNERS'), ['*****@*****.**', '*****@*****.**']) def test_extract_component(self): self.filesystem.files = { ABS_WPT_BASE + '/foo/OWNERS': '# TEAM: [email protected]\n' '# COMPONENT: Blink>Layout\n' } self.assertEqual( self.extractor.extract_component(ABS_WPT_BASE + '/foo/OWNERS'), 'Blink>Layout') def test_is_wpt_notify_enabled_true(self): self.filesystem.files = { ABS_WPT_BASE + '/foo/OWNERS': '# COMPONENT: Blink>Layout\n' '# WPT-NOTIFY: true\n' } self.assertTrue( self.extractor.is_wpt_notify_enabled(ABS_WPT_BASE + '/foo/OWNERS')) def test_is_wpt_notify_enabled_false(self): self.filesystem.files = { ABS_WPT_BASE + '/foo/OWNERS': '# COMPONENT: Blink>Layout\n' '# WPT-NOTIFY: false\n' } self.assertFalse( self.extractor.is_wpt_notify_enabled(ABS_WPT_BASE + '/foo/OWNERS')) def test_is_wpt_notify_enabled_absence_is_false(self): self.filesystem.files = { ABS_WPT_BASE + '/foo/OWNERS': '# TEAM: [email protected]\n' '# COMPONENT: Blink>Layout\n' } self.assertFalse( self.extractor.is_wpt_notify_enabled(ABS_WPT_BASE + '/foo/OWNERS'))