Пример #1
0
    def __init__(self, host, source_directory, options):
        self.host = host
        self.source_directory = source_directory
        self.options = options

        self.filesystem = self.host.filesystem

        webkit_finder = WebKitFinder(self.filesystem)
        self._webkit_root = webkit_finder.webkit_base()

        self.destination_directory = webkit_finder.path_from_webkit_base("LayoutTests", options.destination)
        self.tests_w3c_relative_path = self.filesystem.join('imported', 'w3c')
        self.layout_tests_path = webkit_finder.path_from_webkit_base('LayoutTests')
        self.layout_tests_w3c_path = self.filesystem.join(self.layout_tests_path, self.tests_w3c_relative_path)
        self.tests_download_path = webkit_finder.path_from_webkit_base('WebKitBuild', 'w3c-tests')

        self._test_downloader = None

        self._potential_test_resource_files = []

        self.import_list = []
        self._importing_downloaded_tests = source_directory is None

        self._test_resource_files_json_path = self.filesystem.join(self.layout_tests_w3c_path, "resources", "resource-files.json")
        self._test_resource_files = json.loads(self.filesystem.read_text_file(self._test_resource_files_json_path)) if self.filesystem.exists(self._test_resource_files_json_path) else None

        self._tests_options_json_path = self.filesystem.join(self.layout_tests_path, 'tests-options.json')
        self._tests_options = json.loads(self.filesystem.read_text_file(self._tests_options_json_path)) if self.filesystem.exists(self._tests_options_json_path) else None
        self._slow_tests = []

        if self.options.clean_destination_directory and self._test_resource_files:
            self._test_resource_files["files"] = []
            if self._tests_options:
                self.remove_slow_from_w3c_tests_options()
Пример #2
0
    def __init__(self, host, test_paths, options):
        self.host = host
        self.source_directory = options.source
        self.options = options
        self.test_paths = test_paths if test_paths else []

        self.filesystem = self.host.filesystem

        webkit_finder = WebKitFinder(self.filesystem)
        self._webkit_root = webkit_finder.webkit_base()

        self.destination_directory = webkit_finder.path_from_webkit_base("LayoutTests", options.destination)
        self.tests_w3c_relative_path = self.filesystem.join('imported', 'w3c')
        self.layout_tests_path = webkit_finder.path_from_webkit_base('LayoutTests')
        self.layout_tests_w3c_path = self.filesystem.join(self.layout_tests_path, self.tests_w3c_relative_path)
        self.tests_download_path = webkit_finder.path_from_webkit_base('WebKitBuild', 'w3c-tests')

        self._test_downloader = None

        self._potential_test_resource_files = []

        self.import_list = []
        self._importing_downloaded_tests = self.source_directory is None

        self._test_resource_files_json_path = self.filesystem.join(self.layout_tests_w3c_path, "resources", "resource-files.json")
        self._test_resource_files = json.loads(self.filesystem.read_text_file(self._test_resource_files_json_path)) if self.filesystem.exists(self._test_resource_files_json_path) else None

        self._tests_options_json_path = self.filesystem.join(self.layout_tests_path, 'tests-options.json')
        self._tests_options = json.loads(self.filesystem.read_text_file(self._tests_options_json_path)) if self.filesystem.exists(self._tests_options_json_path) else None
        self._slow_tests = []

        if self.options.clean_destination_directory and self._test_resource_files:
            self._test_resource_files["files"] = []
            if self._tests_options:
                self.remove_slow_from_w3c_tests_options()
Пример #3
0
 def _run_pylint(self, path):
     wkf = WebKitFinder(FileSystem())
     executive = Executive()
     env = os.environ.copy()
     env['PYTHONPATH'] = ('%s%s%s' % (wkf.path_from_webkit_base('Tools', 'Scripts'),
                                      os.pathsep,
                                      wkf.path_from_webkit_base('Tools', 'Scripts', 'webkitpy', 'thirdparty')))
     return executive.run_command([sys.executable, wkf.path_from_depot_tools_base('pylint.py'),
                                   '--output-format=parseable',
                                   '--errors-only',
                                   '--rcfile=' + wkf.path_from_webkit_base('Tools', 'Scripts', 'webkitpy', 'pylintrc'),
                                   path],
                                  env=env,
                                  error_handler=executive.ignore_error)
Пример #4
0
 def _run_pylint(self, path):
     wkf = WebKitFinder(FileSystem())
     executive = Executive()
     env = os.environ.copy()
     env['PYTHONPATH'] = ('%s%s%s%s%s' % (wkf.path_from_webkit_base('Tools', 'Scripts'),
                                          os.pathsep,
                                          wkf.path_from_webkit_base('Source', 'build', 'scripts'),
                                          os.pathsep,
                                          wkf.path_from_webkit_base('Tools', 'Scripts', 'webkitpy', 'thirdparty')))
     return executive.run_command([sys.executable, wkf.path_from_depot_tools_base('pylint.py'),
                                   '--output-format=parseable',
                                   '--errors-only',
                                   '--rcfile=' + wkf.path_from_webkit_base('Tools', 'Scripts', 'webkitpy', 'pylintrc'),
                                   path],
                                  env=env,
                                  error_handler=executive.ignore_error)
Пример #5
0
    def ensure_manifest(host):
        """Checks whether the manifest exists, and then generates it if necessary."""
        finder = WebKitFinder(host.filesystem)
        manifest_path = finder.path_from_webkit_base('LayoutTests', 'external', 'wpt', 'MANIFEST.json')
        base_manifest_path = finder.path_from_webkit_base('LayoutTests', 'external', 'WPT_BASE_MANIFEST.json')

        if not host.filesystem.exists(base_manifest_path):
            _log.error('Manifest base not found at "%s".', base_manifest_path)
            host.filesystem.write_text_file(base_manifest_path, '{}')

        if not host.filesystem.exists(manifest_path):
            _log.debug('Manifest not found, copying from base "%s".', base_manifest_path)
            host.filesystem.copyfile(base_manifest_path, manifest_path)

        wpt_path = manifest_path = finder.path_from_webkit_base('LayoutTests', 'external', 'wpt')
        WPTManifest.generate_manifest(host, wpt_path)
Пример #6
0
 def do_POST(self):
     json_raw_data = self.rfile.read(
         int(self.headers.getheader('content-length')))
     json_data = json.loads(json_raw_data)
     test_list = ''
     for each in json_data['tests']:
         test_list += each + ' '
     filesystem = FileSystem()
     webkit_finder = WebKitFinder(filesystem)
     script_dir = webkit_finder.path_from_webkit_base('Tools', 'Scripts')
     executable_path = script_dir + "/run-webkit-tests"
     cmd = "python " + executable_path + " --no-show-results "
     cmd += test_list
     process = subprocess.Popen(cmd,
                                shell=True,
                                cwd=script_dir,
                                env=None,
                                stdout=subprocess.PIPE,
                                stderr=STDOUT)
     self.send_response(200)
     self.send_header('Access-Control-Allow-Origin', '*')
     self.send_header("Content-type", "text/html")
     self.end_headers()
     while process.poll() is None:
         html_output = '<br>' + str(process.stdout.readline())
         self.wfile.write(html_output)
         self.wfile.flush()
         time.sleep(0.05)
     process.wait()
Пример #7
0
    def __init__(self,
                 host,
                 bugzilla_id,
                 is_bug_id=True,
                 is_attachment_platform_specific=False,
                 attachment_fetcher=bugzilla.Bugzilla(),
                 unzip=None):
        self.host = host
        self.filesystem = self.host.filesystem
        self.unzip = unzip if unzip else lambda content: zipfile.ZipFile(
            io.BytesIO(content))
        if is_bug_id:
            self.platform_specific_attachments = {}
            for attachment in attachment_fetcher.fetch_bug(
                    bugzilla_id).attachments():
                bot_type = self._bot_type(attachment)
                if bot_type:
                    self.platform_specific_attachments[bot_type] = attachment
            self.generic_attachment = self.platform_specific_attachments.pop(
                "mac-wk2"
            ) if "mac-wk2" in self.platform_specific_attachments else None
        else:
            attachment = attachment_fetcher.fetch_attachment(bugzilla_id)
            self.platform_specific_attachments = {
                self._bot_type(attachment): attachment
            } if is_attachment_platform_specific else {}
            self.generic_attachment = attachment if not is_attachment_platform_specific else None

        webkit_finder = WebKitFinder(self.filesystem)
        self.layout_test_repository = webkit_finder.path_from_webkit_base(
            "LayoutTests")
Пример #8
0
 def load_test_repositories():
     filesystem = FileSystem()
     webkit_finder = WebKitFinder(filesystem)
     test_repositories_path = webkit_finder.path_from_webkit_base(
         "LayoutTests", "imported", "w3c", "resources", "TestRepositories"
     )
     return json.loads(filesystem.read_text_file(test_repositories_path))
Пример #9
0
    def __init__(self, repository_directory, host, options):
        self._options = options
        self._host = host
        self._filesystem = host.filesystem
        self._test_suites = []

        self.repository_directory = repository_directory

        self.test_repositories = self.load_test_repositories()

        self.paths_to_skip = []
        self.paths_to_import = []
        for test_repository in self.test_repositories:
            self.paths_to_skip.extend(
                [self._filesystem.join(test_repository["name"], path) for path in test_repository["paths_to_skip"]]
            )
            self.paths_to_import.extend(
                [self._filesystem.join(test_repository["name"], path) for path in test_repository["paths_to_import"]]
            )

        if not self._options.import_all:
            webkit_finder = WebKitFinder(self._filesystem)
            import_expectations_path = webkit_finder.path_from_webkit_base(
                "LayoutTests", "imported", "w3c", "resources", "ImportExpectations"
            )
            self._init_paths_from_expectations(import_expectations_path)
Пример #10
0
 def _run_pylint(self, path):
     wkf = WebKitFinder(FileSystem())
     executive = Executive()
     return executive.run_command([sys.executable, wkf.path_from_depot_tools_base('pylint.py'),
                                   '--output-format=parseable',
                                   '--errors-only',
                                   '--rcfile=' + wkf.path_from_webkit_base('Tools', 'Scripts', 'webkitpy', 'pylintrc'),
                                   path],
                                   error_handler=executive.ignore_error)
Пример #11
0
    def __init__(self, host, source_directory, options):
        self.host = host
        self.source_directory = source_directory
        self.options = options

        self.filesystem = self.host.filesystem

        webkit_finder = WebKitFinder(self.filesystem)
        self._webkit_root = webkit_finder.webkit_base()

        self.destination_directory = webkit_finder.path_from_webkit_base("LayoutTests", options.destination)
        self.layout_tests_w3c_path = webkit_finder.path_from_webkit_base('LayoutTests', 'imported', 'w3c')
        self.tests_download_path = webkit_finder.path_from_webkit_base('WebKitBuild', 'w3c-tests')

        self._test_downloader = None

        self.import_list = []
        self._importing_downloaded_tests = source_directory is None
Пример #12
0
    def _add_base_manifest_to_mock_filesystem(self, filesystem):
        webkit_finder = WebKitFinder(filesystem)

        external_dir = webkit_finder.path_from_webkit_base(
            'LayoutTests', 'external')
        filesystem.maybe_make_directory(filesystem.join(external_dir, 'wpt'))

        manifest_base_path = filesystem.join(external_dir,
                                             'WPT_BASE_MANIFEST.json')
        filesystem.files[manifest_base_path] = '{}'
Пример #13
0
 def _run_pylint(self, path):
     wkf = WebKitFinder(FileSystem())
     executive = Executive()
     return executive.run_command([
         sys.executable,
         wkf.path_from_depot_tools_base('pylint.py'),
         '--output-format=parseable',
         '--errors-only', '--rcfile=' + wkf.path_from_webkit_base(
             'Tools', 'Scripts', 'webkitpy', 'pylintrc'), path
     ],
                                  error_handler=executive.ignore_error)
Пример #14
0
def main():
    filesystem = FileSystem()
    wkf = WebKitFinder(filesystem)
    tester = Tester(filesystem, wkf)
    tester.add_tree(wkf.path_from_webkit_base('tools'), 'webkitpy')

    tester.skip(('webkitpy.common.checkout.scm.scm_unittest',), 'are really, really, slow', 31818)
    if sys.platform == 'win32':
        tester.skip(('webkitpy.common.checkout', 'webkitpy.common.config', 'webkitpy.tool', 'webkitpy.w3c', 'webkitpy.layout_tests.layout_package.bot_test_expectations'), 'fail horribly on win32', 54526)

    return not tester.run()
Пример #15
0
    def __init__(self, host, source_directory, options):
        self.host = host
        self.source_directory = source_directory
        self.options = options

        self.filesystem = self.host.filesystem

        webkit_finder = WebKitFinder(self.filesystem)
        self._webkit_root = webkit_finder.webkit_base()

        self.destination_directory = webkit_finder.path_from_webkit_base("LayoutTests", options.destination)
        self.tests_w3c_relative_path = self.filesystem.join("imported", "w3c")
        self.layout_tests_w3c_path = webkit_finder.path_from_webkit_base("LayoutTests", self.tests_w3c_relative_path)
        self.tests_download_path = webkit_finder.path_from_webkit_base("WebKitBuild", "w3c-tests")

        self._test_downloader = None

        self._potential_test_resource_files = []

        self.import_list = []
        self._importing_downloaded_tests = source_directory is None
Пример #16
0
    def __init__(self, host, source_directory, options):
        self.host = host
        self.source_directory = source_directory
        self.options = options

        self.filesystem = self.host.filesystem

        webkit_finder = WebKitFinder(self.filesystem)
        self._webkit_root = webkit_finder.webkit_base()

        self.destination_directory = webkit_finder.path_from_webkit_base("LayoutTests", options.destination)

        self.import_list = []
Пример #17
0
def main():
    filesystem = FileSystem()
    wkf = WebKitFinder(filesystem)
    tester = Tester(filesystem, wkf)
    tester.add_tree(wkf.path_from_webkit_base('Tools', 'Scripts'), 'webkitpy')

    tester.skip(('webkitpy.common.checkout.scm.scm_unittest',), 'are really, really, slow', 31818)
    if sys.platform == 'win32':
        tester.skip(('webkitpy.common.checkout', 'webkitpy.common.config', 'webkitpy.tool', 'webkitpy.w3c', 'webkitpy.layout_tests.layout_package.bot_test_expectations'), 'fail horribly on win32', 54526)

    # This only needs to run on Unix, so don't worry about win32 for now.
    appengine_sdk_path = '/usr/local/google_appengine'
    if os.path.exists(appengine_sdk_path):
        if not appengine_sdk_path in sys.path:
            sys.path.append(appengine_sdk_path)
        import dev_appserver
        from google.appengine.dist import use_library
        use_library('django', '1.2')
        dev_appserver.fix_sys_path()
        tester.add_tree(wkf.path_from_webkit_base('Tools', 'TestResultServer'))
    else:
        _log.info('Skipping TestResultServer tests; the Google AppEngine Python SDK is not installed.')

    return not tester.run()
Пример #18
0
    def __init__(self, host, source_directory, repo_dir, options):
        self.host = host
        self.source_directory = source_directory
        self.options = options

        self.filesystem = self.host.filesystem

        webkit_finder = WebKitFinder(self.filesystem)
        self._webkit_root = webkit_finder.webkit_base()
        self.repo_dir = repo_dir

        self.destination_directory = webkit_finder.path_from_webkit_base("LayoutTests", options.destination)

        self.changeset = CHANGESET_NOT_AVAILABLE

        self.import_list = []
    def __init__(self, host, source_directory, repo_dir, options):
        self.host = host
        self.source_directory = source_directory
        self.options = options

        self.filesystem = self.host.filesystem

        webkit_finder = WebKitFinder(self.filesystem)
        self._webkit_root = webkit_finder.webkit_base()
        self.repo_dir = repo_dir

        self.destination_directory = webkit_finder.path_from_webkit_base("LayoutTests", options.destination)

        self.changeset = CHANGESET_NOT_AVAILABLE
        self.test_status = TEST_STATUS_UNKNOWN

        self.import_list = []
Пример #20
0
    def __init__(self, host, source_directory, repo_dir, options):
        self.host = host
        self.source_directory = source_directory
        self.options = options

        self.filesystem = self.host.filesystem

        webkit_finder = WebKitFinder(self.filesystem)
        self._webkit_root = webkit_finder.webkit_base()
        self.repo_dir = self.filesystem.abspath(repo_dir)

        self.destination_directory = webkit_finder.path_from_webkit_base("LayoutTests", options.destination, self.filesystem.basename(self.repo_dir))

        self.changeset = CHANGESET_NOT_AVAILABLE
        self.test_status = TEST_STATUS_UNKNOWN

        self.import_list = []
Пример #21
0
    def generate_manifest(host, dest_path):
        """Generates MANIFEST.json on the specified directory."""
        executive = host.executive
        finder = WebKitFinder(host.filesystem)
        manifest_exec_path = finder.path_from_webkit_base('Tools', 'Scripts', 'webkitpy', 'thirdparty', 'wpt', 'wpt', 'manifest')

        cmd = ['python', manifest_exec_path, '--work', '--tests-root', dest_path]
        _log.debug('Running command: %s', ' '.join(cmd))
        proc = executive.popen(cmd, stdout=executive.PIPE, stderr=executive.PIPE, stdin=executive.PIPE, cwd=finder.webkit_base())
        out, err = proc.communicate('')
        if proc.returncode:
            _log.info('# ret> %d' % proc.returncode)
            if out:
                _log.info(out)
            if err:
                _log.info(err)
            host.exit(proc.returncode)
        return proc.returncode, out
Пример #22
0
    def __init__(self, repository_directory, host, options):
        self._options = options
        self._host = host
        self._filesystem = host.filesystem
        self._test_suites = []

        self.repository_directory = repository_directory

        self.test_repositories = self.load_test_repositories(self._filesystem)

        self.paths_to_skip = []
        self.paths_to_import = []
        for test_repository in self.test_repositories:
            self.paths_to_skip.extend([self._filesystem.join(test_repository['name'], path) for path in test_repository['paths_to_skip']])
            self.paths_to_import.extend([self._filesystem.join(test_repository['name'], path) for path in test_repository['paths_to_import']])

        if not self._options.import_all:
            webkit_finder = WebKitFinder(self._filesystem)
            import_expectations_path = webkit_finder.path_from_webkit_base('LayoutTests', 'imported', 'w3c', 'resources', 'ImportExpectations')
            self._init_paths_from_expectations(import_expectations_path)
Пример #23
0
 def do_POST(self):
     json_raw_data = self.rfile.read(int(self.headers.getheader('content-length')))
     json_data = json.loads(json_raw_data)
     test_list = ''
     for each in json_data['tests']:
         test_list += each + ' '
     filesystem = FileSystem()
     webkit_finder = WebKitFinder(filesystem)
     script_dir = webkit_finder.path_from_webkit_base('Tools', 'Scripts')
     executable_path = script_dir + "/run-webkit-tests"
     cmd = "python " + executable_path + " --no-show-results "
     cmd += test_list
     process = subprocess.Popen(cmd, shell=True, cwd=script_dir, env=None, stdout=subprocess.PIPE, stderr=STDOUT)
     self.send_response(200)
     self.send_header('Access-Control-Allow-Origin', '*')
     self.send_header("Content-type", "text/html")
     self.end_headers()
     while process.poll() is None:
         html_output = '<br>' + str(process.stdout.readline())
         self.wfile.write(html_output)
         self.wfile.flush()
         time.sleep(0.05)
     process.wait()
Пример #24
0
def absolute_chromium_wpt_dir(host):
    finder = WebKitFinder(host.filesystem)
    return finder.path_from_webkit_base('LayoutTests', 'external', 'wpt')
Пример #25
0
class DepsUpdater(object):
    def __init__(self, host):
        self.host = host
        self.executive = host.executive
        self.fs = host.filesystem
        self.finder = WebKitFinder(self.fs)
        self.verbose = False
        self.allow_local_blink_commits = False
        self.keep_w3c_repos_around = False

    def main(self, argv=None):
        self.parse_args(argv)

        self.cd('')
        if not self.checkout_is_okay():
            return 1

        self.print_('## noting the current Blink commitish')
        blink_commitish = self.run(['git', 'show-ref', 'HEAD'])[1].split()[0]

        wpt_import_text = self.update('web-platform-tests',
                                      'https://chromium.googlesource.com/external/w3c/web-platform-tests.git')

        css_import_text = self.update('csswg-test',
                                      'https://chromium.googlesource.com/external/w3c/csswg-test.git')

        self.commit_changes_if_needed(blink_commitish, css_import_text, wpt_import_text)

        return 0

    def parse_args(self, argv):
        parser = argparse.ArgumentParser()
        parser.description = __doc__
        parser.add_argument('-v', '--verbose', action='store_true',
                            help='log what we are doing')
        parser.add_argument('--allow-local-blink-commits', action='store_true',
                            help='allow script to run even if we have local blink commits')
        parser.add_argument('--keep-w3c-repos-around', action='store_true',
                            help='leave the w3c repos around that were imported previously.')

        args = parser.parse_args(argv)
        self.allow_local_blink_commits = args.allow_local_blink_commits
        self.keep_w3c_repos_around = args.keep_w3c_repos_around
        self.verbose = args.verbose

    def checkout_is_okay(self):
        if self.run(['git', 'diff', '--quiet', 'HEAD'], exit_on_failure=False)[0]:
            self.print_('## blink checkout is dirty, aborting')
            return False

        local_blink_commits = self.run(['git', 'log', '--oneline', 'origin/master..HEAD'])[1]
        if local_blink_commits and not self.allow_local_blink_commits:
            self.print_('## blink checkout has local commits, aborting')
            return False

        if self.fs.exists(self.path_from_webkit_base('web-platform-tests')):
            self.print_('## web-platform-tests repo exists, aborting')
            return False

        if self.fs.exists(self.path_from_webkit_base('csswg-test')):
            self.print_('## csswg-test repo exists, aborting')
            return False

        return True

    def update(self, repo, url):
        self.print_('## cloning %s' % repo)
        self.cd('')
        self.run(['git', 'clone', url])

        self.print_('## noting the revision we are importing')
        master_commitish = self.run(['git', 'show-ref', 'origin/master'])[1].split()[0]

        self.print_('## cleaning out tests from LayoutTests/imported/%s' % repo)
        dest_repo = self.path_from_webkit_base('LayoutTests', 'imported', repo)
        files_to_delete = self.fs.files_under(dest_repo, file_filter=self.is_not_baseline)
        for subpath in files_to_delete:
            self.remove('LayoutTests', 'imported', subpath)

        self.print_('## importing the tests')
        src_repo = self.path_from_webkit_base(repo)
        import_path = self.path_from_webkit_base('Tools', 'Scripts', 'import-w3c-tests')
        self.run([self.host.executable, import_path, '-d', 'imported', src_repo])

        self.cd('')
        self.run(['git', 'add', '--all', 'LayoutTests/imported/%s' % repo])

        self.print_('## deleting manual tests')
        files_to_delete = self.fs.files_under(dest_repo, file_filter=self.is_manual_test)
        for subpath in files_to_delete:
            self.remove('LayoutTests', 'imported', subpath)

        self.print_('## deleting any orphaned baselines')
        previous_baselines = self.fs.files_under(dest_repo, file_filter=self.is_baseline)
        for subpath in previous_baselines:
            full_path = self.fs.join(dest_repo, subpath)
            if self.fs.glob(full_path.replace('-expected.txt', '*')) == [full_path]:
                self.fs.remove(full_path)

        if not self.keep_w3c_repos_around:
            self.print_('## deleting %s repo' % repo)
            self.cd('')
            self.rmtree(repo)

        return 'imported %s@%s' % (repo, master_commitish)

    def commit_changes_if_needed(self, blink_commitish, css_import_text, wpt_import_text):
        if self.run(['git', 'diff', '--quiet', 'HEAD'], exit_on_failure=False)[0]:
            self.print_('## commiting changes')
            commit_msg = ('update-w3c-deps import using blink %s:\n'
                          '\n'
                          '%s\n'
                          '%s\n' % (blink_commitish, css_import_text, wpt_import_text))
            path_to_commit_msg = self.path_from_webkit_base('commit_msg')
            if self.verbose:
                self.print_('cat > %s <<EOF' % path_to_commit_msg)
                self.print_(commit_msg)
                self.print_('EOF')
            self.fs.write_text_file(path_to_commit_msg, commit_msg)
            self.run(['git', 'commit', '-a', '-F', path_to_commit_msg])
            self.remove(path_to_commit_msg)
            self.print_('## Done: changes imported and committed')
        else:
            self.print_('## Done: no changes to import')

    def is_manual_test(self, fs, dirname, basename):
        return basename.endswith('-manual.html') or basename.endswith('-manual.htm')

    def is_baseline(self, fs, dirname, basename):
        return basename.endswith('-expected.txt')

    def is_not_baseline(self, fs, dirname, basename):
        return not self.is_baseline(fs, dirname, basename)

    def run(self, cmd, exit_on_failure=True):
        if self.verbose:
            self.print_(' '.join(cmd))

        proc = self.executive.popen(cmd, stdout=self.executive.PIPE, stderr=self.executive.PIPE)
        out, err = proc.communicate()
        if proc.returncode or self.verbose:
            self.print_('# ret> %d' % proc.returncode)
            if out:
                for line in out.splitlines():
                    self.print_('# out> %s' % line)
            if err:
                for line in err.splitlines():
                    self.print_('# err> %s' % line)
        if exit_on_failure and proc.returncode:
            self.host.exit(proc.returncode)
        return proc.returncode, out

    def cd(self, *comps):
        dest = self.path_from_webkit_base(*comps)
        if self.verbose:
            self.print_('cd %s' % dest)
        self.fs.chdir(dest)

    def remove(self, *comps):
        dest = self.path_from_webkit_base(*comps)
        if self.verbose:
            self.print_('rm %s' % dest)
        self.fs.remove(dest)

    def rmtree(self, *comps):
        dest = self.path_from_webkit_base(*comps)
        if self.verbose:
            self.print_('rm -fr %s' % dest)
        self.fs.rmtree(dest)

    def path_from_webkit_base(self, *comps):
        return self.finder.path_from_webkit_base(*comps)

    def print_(self, msg):
        self.host.print_(msg)
Пример #26
0
 def absolute_chromium_wpt_dir(self):
     finder = WebKitFinder(self.host.filesystem)
     return finder.path_from_webkit_base('LayoutTests', 'imported', 'wpt')
Пример #27
0
 def absolute_chromium_wpt_dir(self):
     finder = WebKitFinder(self.host.filesystem)
     return finder.path_from_webkit_base("LayoutTests", "imported", "wpt")
Пример #28
0
class TestImporter(object):
    def __init__(self, host, dir_to_import, top_of_repo, options):
        self.host = host
        self.dir_to_import = dir_to_import
        self.top_of_repo = top_of_repo
        self.options = options

        self.filesystem = self.host.filesystem
        self.webkit_finder = WebKitFinder(self.filesystem)
        self._webkit_root = self.webkit_finder.webkit_base()
        self.layout_tests_dir = self.webkit_finder.path_from_webkit_base("LayoutTests")
        self.destination_directory = self.filesystem.normpath(
            self.filesystem.join(self.layout_tests_dir, options.destination, self.filesystem.basename(self.top_of_repo))
        )
        self.import_in_place = self.dir_to_import == self.destination_directory
        self.dir_above_repo = self.filesystem.dirname(self.top_of_repo)

        self.changeset = CHANGESET_NOT_AVAILABLE

        self.import_list = []

    def do_import(self):
        _log.info("Importing %s into %s", self.dir_to_import, self.destination_directory)
        self.find_importable_tests(self.dir_to_import)
        self.load_changeset()
        self.import_tests()

    def load_changeset(self):
        """Returns the current changeset from mercurial or "Not Available"."""
        try:
            self.changeset = self.host.executive.run_command(["hg", "tip"]).split("changeset:")[1]
        except (OSError, ScriptError):
            self.changeset = CHANGESET_NOT_AVAILABLE

    def find_importable_tests(self, directory):
        # FIXME: use filesystem
        paths_to_skip = self.find_paths_to_skip()

        for root, dirs, files in os.walk(directory):
            cur_dir = root.replace(self.dir_above_repo + "/", "") + "/"
            _log.info("  scanning " + cur_dir + "...")
            total_tests = 0
            reftests = 0
            jstests = 0

            DIRS_TO_SKIP = (".git", ".hg")
            if dirs:
                for d in DIRS_TO_SKIP:
                    if d in dirs:
                        dirs.remove(d)

                for path in paths_to_skip:
                    path_base = path.replace(self.options.destination + "/", "")
                    path_base = path_base.replace(cur_dir, "")
                    path_full = self.filesystem.join(root, path_base)
                    if path_base in dirs:
                        dirs.remove(path_base)
                        if not self.options.dry_run and self.import_in_place:
                            _log.info("  pruning %s" % path_base)
                            self.filesystem.rmtree(path_full)
                        else:
                            _log.info("  skipping %s" % path_base)

            copy_list = []

            for filename in files:
                path_full = self.filesystem.join(root, filename)
                path_base = path_full.replace(directory + "/", "")
                path_base = self.destination_directory.replace(self.layout_tests_dir + "/", "") + "/" + path_base
                if path_base in paths_to_skip:
                    if not self.options.dry_run and self.import_in_place:
                        _log.info("  pruning %s" % path_base)
                        self.filesystem.remove(path_full)
                        continue
                    else:
                        continue
                # FIXME: This block should really be a separate function, but the early-continues make that difficult.

                if filename.startswith(".") or filename.endswith(".pl"):
                    continue  # For some reason the w3c repo contains random perl scripts we don't care about.

                fullpath = os.path.join(root, filename)

                mimetype = mimetypes.guess_type(fullpath)
                if (
                    not "html" in str(mimetype[0])
                    and not "application/xhtml+xml" in str(mimetype[0])
                    and not "application/xml" in str(mimetype[0])
                ):
                    copy_list.append({"src": fullpath, "dest": filename})
                    continue

                if root.endswith("resources"):
                    copy_list.append({"src": fullpath, "dest": filename})
                    continue

                test_parser = TestParser(vars(self.options), filename=fullpath)
                test_info = test_parser.analyze_test()
                if test_info is None:
                    continue

                if "reference" in test_info.keys():
                    reftests += 1
                    total_tests += 1
                    test_basename = os.path.basename(test_info["test"])

                    # Add the ref file, following WebKit style.
                    # FIXME: Ideally we'd support reading the metadata
                    # directly rather than relying  on a naming convention.
                    # Using a naming convention creates duplicate copies of the
                    # reference files.
                    ref_file = os.path.splitext(test_basename)[0] + "-expected"
                    # Make sure to use the extension from the *reference*, not
                    # from the test, because at least flexbox tests use XHTML
                    # references but HTML tests.
                    ref_file += os.path.splitext(test_info["reference"])[1]

                    copy_list.append(
                        {
                            "src": test_info["reference"],
                            "dest": ref_file,
                            "reference_support_info": test_info["reference_support_info"],
                        }
                    )
                    copy_list.append({"src": test_info["test"], "dest": filename})

                elif "jstest" in test_info.keys():
                    jstests += 1
                    total_tests += 1
                    copy_list.append({"src": fullpath, "dest": filename})
                else:
                    total_tests += 1
                    copy_list.append({"src": fullpath, "dest": filename})

            if copy_list:
                # Only add this directory to the list if there's something to import
                self.import_list.append(
                    {
                        "dirname": root,
                        "copy_list": copy_list,
                        "reftests": reftests,
                        "jstests": jstests,
                        "total_tests": total_tests,
                    }
                )

    def find_paths_to_skip(self):
        if self.options.ignore_expectations:
            return set()

        paths_to_skip = set()
        port = self.host.port_factory.get()
        w3c_import_expectations_path = self.webkit_finder.path_from_webkit_base("LayoutTests", "W3CImportExpectations")
        w3c_import_expectations = self.filesystem.read_text_file(w3c_import_expectations_path)
        parser = TestExpectationParser(port, full_test_list=(), is_lint_mode=False)
        expectation_lines = parser.parse(w3c_import_expectations_path, w3c_import_expectations)
        for line in expectation_lines:
            if "SKIP" in line.expectations:
                if line.specifiers:
                    _log.warning("W3CImportExpectations:%s should not have any specifiers" % line.line_numbers)
                    continue
                paths_to_skip.add(line.name)
        return paths_to_skip

    def import_tests(self):
        total_imported_tests = 0
        total_imported_reftests = 0
        total_imported_jstests = 0
        total_prefixed_properties = {}

        for dir_to_copy in self.import_list:
            total_imported_tests += dir_to_copy["total_tests"]
            total_imported_reftests += dir_to_copy["reftests"]
            total_imported_jstests += dir_to_copy["jstests"]

            prefixed_properties = []

            if not dir_to_copy["copy_list"]:
                continue

            orig_path = dir_to_copy["dirname"]

            subpath = os.path.relpath(orig_path, self.top_of_repo)
            new_path = os.path.join(self.destination_directory, subpath)

            if not (os.path.exists(new_path)):
                os.makedirs(new_path)

            copied_files = []

            for file_to_copy in dir_to_copy["copy_list"]:
                # FIXME: Split this block into a separate function.
                orig_filepath = os.path.normpath(file_to_copy["src"])

                if os.path.isdir(orig_filepath):
                    # FIXME: Figure out what is triggering this and what to do about it.
                    _log.error("%s refers to a directory" % orig_filepath)
                    continue

                if not (os.path.exists(orig_filepath)):
                    _log.warning("%s not found. Possible error in the test.", orig_filepath)
                    continue

                new_filepath = os.path.join(new_path, file_to_copy["dest"])
                if "reference_support_info" in file_to_copy.keys() and file_to_copy["reference_support_info"] != {}:
                    reference_support_info = file_to_copy["reference_support_info"]
                else:
                    reference_support_info = None

                if not (os.path.exists(os.path.dirname(new_filepath))):
                    if not self.import_in_place and not self.options.dry_run:
                        os.makedirs(os.path.dirname(new_filepath))

                relpath = os.path.relpath(new_filepath, self.layout_tests_dir)
                if not self.options.overwrite and os.path.exists(new_filepath):
                    _log.info("  skipping %s" % relpath)
                else:
                    # FIXME: Maybe doing a file diff is in order here for existing files?
                    # In other words, there's no sense in overwriting identical files, but
                    # there's no harm in copying the identical thing.
                    _log.info("  %s" % relpath)

                # Only html, xml, or css should be converted
                # FIXME: Eventually, so should js when support is added for this type of conversion
                mimetype = mimetypes.guess_type(orig_filepath)
                if "html" in str(mimetype[0]) or "xml" in str(mimetype[0]) or "css" in str(mimetype[0]):
                    converted_file = convert_for_webkit(
                        new_path, filename=orig_filepath, reference_support_info=reference_support_info
                    )

                    if not converted_file:
                        if not self.import_in_place and not self.options.dry_run:
                            shutil.copyfile(orig_filepath, new_filepath)  # The file was unmodified.
                    else:
                        for prefixed_property in converted_file[0]:
                            total_prefixed_properties.setdefault(prefixed_property, 0)
                            total_prefixed_properties[prefixed_property] += 1

                        prefixed_properties.extend(set(converted_file[0]) - set(prefixed_properties))
                        if not self.options.dry_run:
                            outfile = open(new_filepath, "wb")
                            outfile.write(converted_file[1])
                            outfile.close()
                else:
                    if not self.import_in_place and not self.options.dry_run:
                        shutil.copyfile(orig_filepath, new_filepath)

                copied_files.append(new_filepath.replace(self._webkit_root, ""))

        _log.info("")
        _log.info("Import complete")
        _log.info("")
        _log.info("IMPORTED %d TOTAL TESTS", total_imported_tests)
        _log.info("Imported %d reftests", total_imported_reftests)
        _log.info("Imported %d JS tests", total_imported_jstests)
        _log.info(
            "Imported %d pixel/manual tests", total_imported_tests - total_imported_jstests - total_imported_reftests
        )
        _log.info("")

        if total_prefixed_properties:
            _log.info("Properties needing prefixes (by count):")
            for prefixed_property in sorted(total_prefixed_properties, key=lambda p: total_prefixed_properties[p]):
                _log.info("  %s: %s", prefixed_property, total_prefixed_properties[prefixed_property])

    def setup_destination_directory(self):
        """ Creates a destination directory that mirrors that of the source directory """

        new_subpath = self.dir_to_import[len(self.top_of_repo) :]

        destination_directory = os.path.join(self.destination_directory, new_subpath)

        if not os.path.exists(destination_directory):
            os.makedirs(destination_directory)

        _log.info("Tests will be imported into: %s", destination_directory)
Пример #29
0
class TestImporter(object):

    def __init__(self, host, source_repo_path, options):
        self.host = host
        self.source_repo_path = source_repo_path
        self.options = options

        self.filesystem = self.host.filesystem
        self.webkit_finder = WebKitFinder(self.filesystem)
        self._webkit_root = self.webkit_finder.webkit_base()
        self.layout_tests_dir = self.webkit_finder.path_from_webkit_base('LayoutTests')
        self.destination_directory = self.filesystem.normpath(
            self.filesystem.join(
                self.layout_tests_dir,
                options.destination,
                self.filesystem.basename(self.source_repo_path)))
        self.import_in_place = (self.source_repo_path == self.destination_directory)
        self.dir_above_repo = self.filesystem.dirname(self.source_repo_path)

        self.import_list = []

        # This is just a FYI list of CSS properties that still need to be prefixed,
        # which may be output after importing.
        self._prefixed_properties = {}

    def do_import(self):
        _log.info("Importing %s into %s", self.source_repo_path, self.destination_directory)
        self.find_importable_tests()
        self.import_tests()

    def find_importable_tests(self):
        """Walks through the source directory to find what tests should be imported.

        This function sets self.import_list, which contains information about how many
        tests are being imported, and their source and destination paths.
        """
        paths_to_skip = self.find_paths_to_skip()

        for root, dirs, files in self.filesystem.walk(self.source_repo_path):
            cur_dir = root.replace(self.dir_above_repo + '/', '') + '/'
            _log.info('  scanning ' + cur_dir + '...')
            total_tests = 0
            reftests = 0
            jstests = 0

            # Files in 'tools' are not for browser testing, so we skip them.
            # See: http://testthewebforward.org/docs/test-format-guidelines.html#tools
            DIRS_TO_SKIP = ('.git', 'test-plan', 'tools')

            # We copy all files in 'support', including HTML without metadata.
            # See: http://testthewebforward.org/docs/test-format-guidelines.html#support-files
            DIRS_TO_INCLUDE = ('resources', 'support')

            if dirs:
                for d in DIRS_TO_SKIP:
                    if d in dirs:
                        dirs.remove(d)

                for path in paths_to_skip:
                    path_base = path.replace(self.options.destination + '/', '')
                    path_base = path_base.replace(cur_dir, '')
                    path_full = self.filesystem.join(root, path_base)
                    if path_base in dirs:
                        dirs.remove(path_base)
                        if not self.options.dry_run and self.import_in_place:
                            _log.info("  pruning %s", path_base)
                            self.filesystem.rmtree(path_full)
                        else:
                            _log.info("  skipping %s", path_base)

            copy_list = []

            for filename in files:
                path_full = self.filesystem.join(root, filename)
                path_base = path_full.replace(self.source_repo_path + '/', '')
                path_base = self.destination_directory.replace(self.layout_tests_dir + '/', '') + '/' + path_base
                if path_base in paths_to_skip:
                    if not self.options.dry_run and self.import_in_place:
                        _log.info("  pruning %s", path_base)
                        self.filesystem.remove(path_full)
                        continue
                    else:
                        continue
                # FIXME: This block should really be a separate function, but the early-continues make that difficult.

                if filename.startswith('.') or filename.endswith('.pl'):
                    # The w3cs repos may contain perl scripts, which we don't care about.
                    continue
                if filename == 'OWNERS' or filename == 'reftest.list':
                    # These files fail our presubmits.
                    # See http://crbug.com/584660 and http://crbug.com/582838.
                    continue

                fullpath = self.filesystem.join(root, filename)

                mimetype = mimetypes.guess_type(fullpath)
                if ('html' not in str(mimetype[0]) and
                        'application/xhtml+xml' not in str(mimetype[0]) and
                        'application/xml' not in str(mimetype[0])):
                    copy_list.append({'src': fullpath, 'dest': filename})
                    continue

                if self.filesystem.basename(root) in DIRS_TO_INCLUDE:
                    copy_list.append({'src': fullpath, 'dest': filename})
                    continue

                test_parser = TestParser(fullpath, self.host)
                test_info = test_parser.analyze_test()
                if test_info is None:
                    copy_list.append({'src': fullpath, 'dest': filename})
                    continue

                if self.path_too_long(path_full):
                    _log.warning('%s skipped due to long path. '
                                 'Max length from repo base %d chars; see http://crbug.com/609871.',
                                 path_full, MAX_PATH_LENGTH)
                    continue

                if 'reference' in test_info.keys():
                    test_basename = self.filesystem.basename(test_info['test'])
                    # Add the ref file, following WebKit style.
                    # FIXME: Ideally we'd support reading the metadata
                    # directly rather than relying on a naming convention.
                    # Using a naming convention creates duplicate copies of the
                    # reference files (http://crrev.com/268729).
                    ref_file = self.filesystem.splitext(test_basename)[0] + '-expected'
                    # Make sure to use the extension from the *reference*, not
                    # from the test, because at least flexbox tests use XHTML
                    # references but HTML tests.
                    ref_file += self.filesystem.splitext(test_info['reference'])[1]

                    if not self.filesystem.exists(test_info['reference']):
                        _log.warning('%s skipped because ref file %s was not found.',
                                     path_full, ref_file)
                        continue

                    if self.path_too_long(path_full.replace(filename, ref_file)):
                        _log.warning('%s skipped because path of ref file %s would be too long. '
                                     'Max length from repo base %d chars; see http://crbug.com/609871.',
                                     path_full, ref_file, MAX_PATH_LENGTH)
                        continue

                    reftests += 1
                    total_tests += 1
                    copy_list.append({'src': test_info['reference'], 'dest': ref_file,
                                      'reference_support_info': test_info['reference_support_info']})
                    copy_list.append({'src': test_info['test'], 'dest': filename})

                elif 'jstest' in test_info.keys():
                    jstests += 1
                    total_tests += 1
                    copy_list.append({'src': fullpath, 'dest': filename, 'is_jstest': True})

                elif self.options.all:
                    total_tests += 1
                    copy_list.append({'src': fullpath, 'dest': filename})

            if copy_list:
                # Only add this directory to the list if there's something to import
                self.import_list.append({'dirname': root, 'copy_list': copy_list,
                                         'reftests': reftests, 'jstests': jstests, 'total_tests': total_tests})

    def find_paths_to_skip(self):
        if self.options.ignore_expectations:
            return set()

        paths_to_skip = set()
        port = self.host.port_factory.get()
        w3c_import_expectations_path = self.webkit_finder.path_from_webkit_base('LayoutTests', 'W3CImportExpectations')
        w3c_import_expectations = self.filesystem.read_text_file(w3c_import_expectations_path)
        parser = TestExpectationParser(port, all_tests=(), is_lint_mode=False)
        expectation_lines = parser.parse(w3c_import_expectations_path, w3c_import_expectations)
        for line in expectation_lines:
            if 'SKIP' in line.expectations:
                if line.specifiers:
                    _log.warning("W3CImportExpectations:%s should not have any specifiers", line.line_numbers)
                    continue
                paths_to_skip.add(line.name)
        return paths_to_skip

    def import_tests(self):
        """Reads |self.import_list|, and converts and copies files to their destination."""
        total_imported_tests = 0
        total_imported_reftests = 0
        total_imported_jstests = 0

        for dir_to_copy in self.import_list:
            total_imported_tests += dir_to_copy['total_tests']
            total_imported_reftests += dir_to_copy['reftests']
            total_imported_jstests += dir_to_copy['jstests']

            if not dir_to_copy['copy_list']:
                continue

            orig_path = dir_to_copy['dirname']

            relative_dir = self.filesystem.relpath(orig_path, self.source_repo_path)
            dest_dir = self.filesystem.join(self.destination_directory, relative_dir)

            if not self.filesystem.exists(dest_dir):
                self.filesystem.maybe_make_directory(dest_dir)

            copied_files = []

            for file_to_copy in dir_to_copy['copy_list']:
                copied_file = self.copy_file(file_to_copy, dest_dir)
                if copied_file:
                    copied_files.append(copied_file)

        _log.info('')
        _log.info('Import complete')
        _log.info('')
        _log.info('IMPORTED %d TOTAL TESTS', total_imported_tests)
        _log.info('Imported %d reftests', total_imported_reftests)
        _log.info('Imported %d JS tests', total_imported_jstests)
        _log.info('Imported %d pixel/manual tests', total_imported_tests - total_imported_jstests - total_imported_reftests)
        _log.info('')

        if self._prefixed_properties:
            _log.info('Properties needing prefixes (by count):')
            for prefixed_property in sorted(self._prefixed_properties, key=lambda p: self._prefixed_properties[p]):
                _log.info('  %s: %s', prefixed_property, self._prefixed_properties[prefixed_property])

    def copy_file(self, file_to_copy, dest_dir):
        """Converts and copies a file, if it should be copied.

        Args:
            file_to_copy: A dict in a file copy list constructed by
                find_importable_tests, which represents one file to copy, including
                the keys:
                    "src": Absolute path to the source location of the file.
                    "destination": File name of the destination file.
                And possibly also the keys "reference_support_info" or "is_jstest".
            dest_dir: Path to the directory where the file should be copied.

        Returns:
            The path to the new file, relative to the Blink root (//third_party/WebKit).
        """
        source_path = self.filesystem.normpath(file_to_copy['src'])
        dest_path = self.filesystem.join(dest_dir, file_to_copy['dest'])

        if self.filesystem.isdir(source_path):
            _log.error('%s refers to a directory', source_path)
            return None

        if not self.filesystem.exists(source_path):
            _log.error('%s not found. Possible error in the test.', source_path)
            return None

        if file_to_copy.get('reference_support_info'):
            reference_support_info = file_to_copy['reference_support_info']
        else:
            reference_support_info = None

        if not self.filesystem.exists(self.filesystem.dirname(dest_path)):
            if not self.import_in_place and not self.options.dry_run:
                self.filesystem.maybe_make_directory(self.filesystem.dirname(dest_path))

        relpath = self.filesystem.relpath(dest_path, self.layout_tests_dir)
        if not self.options.overwrite and self.filesystem.exists(dest_path):
            _log.info('  skipping %s', relpath)
        else:
            # FIXME: Maybe doing a file diff is in order here for existing files?
            # In other words, there's no sense in overwriting identical files, but
            # there's no harm in copying the identical thing.
            _log.info('  %s', relpath)

        if self.should_try_to_convert(file_to_copy, source_path, dest_dir):
            converted_file = convert_for_webkit(
                dest_dir, filename=source_path,
                reference_support_info=reference_support_info,
                host=self.host)
            for prefixed_property in converted_file[0]:
                self._prefixed_properties.setdefault(prefixed_property, 0)
                self._prefixed_properties[prefixed_property] += 1

            if not self.options.dry_run:
                self.filesystem.write_text_file(dest_path, converted_file[1])
        else:
            if not self.import_in_place and not self.options.dry_run:
                self.filesystem.copyfile(source_path, dest_path)
                if self.filesystem.read_binary_file(source_path)[:2] == '#!':
                    self.filesystem.make_executable(dest_path)

        return dest_path.replace(self._webkit_root, '')

    @staticmethod
    def should_try_to_convert(file_to_copy, source_path, dest_dir):
        """Checks whether we should try to modify the file when importing."""
        if file_to_copy.get('is_jstest', False):
            return False

        # Conversion is not necessary for any tests in wpt now; see http://crbug.com/654081.
        # Note, we want to move away from converting files, see http://crbug.com/663773.
        if re.search(r'[/\\]imported[/\\]wpt[/\\]', dest_dir):
            return False

        # Only HTML, XHTML and CSS files should be converted.
        mimetype, _ = mimetypes.guess_type(source_path)
        return mimetype in ('text/html', 'application/xhtml+xml', 'text/css')

    def path_too_long(self, source_path):
        """Checks whether a source path is too long to import.

        Args:
            Absolute path of file to be imported.

        Returns:
            True if the path is too long to import, False if it's OK.
        """
        path_from_repo_base = os.path.relpath(source_path, self.source_repo_path)
        return len(path_from_repo_base) > MAX_PATH_LENGTH
Пример #30
0
class TestImporter(object):
    def __init__(self, host, source_repo_path, options):
        self.host = host
        self.source_repo_path = source_repo_path
        self.options = options

        self.filesystem = self.host.filesystem
        self.webkit_finder = WebKitFinder(self.filesystem)
        self._webkit_root = self.webkit_finder.webkit_base()
        self.layout_tests_dir = self.webkit_finder.path_from_webkit_base("LayoutTests")
        self.destination_directory = self.filesystem.normpath(
            self.filesystem.join(
                self.layout_tests_dir, options.destination, self.filesystem.basename(self.source_repo_path)
            )
        )
        self.import_in_place = self.source_repo_path == self.destination_directory
        self.dir_above_repo = self.filesystem.dirname(self.source_repo_path)

        self.import_list = []

    def do_import(self):
        _log.info("Importing %s into %s", self.source_repo_path, self.destination_directory)
        self.find_importable_tests()
        self.import_tests()

    def find_importable_tests(self):
        """Walks through the source directory to find what tests should be imported.

        This function sets self.import_list, which contains information about how many
        tests are being imported, and their source and destination paths.
        """
        paths_to_skip = self.find_paths_to_skip()

        for root, dirs, files in self.filesystem.walk(self.source_repo_path):
            cur_dir = root.replace(self.dir_above_repo + "/", "") + "/"
            _log.info("  scanning " + cur_dir + "...")
            total_tests = 0
            reftests = 0
            jstests = 0

            # Files in 'tools' are not for browser testing, so we skip them.
            # See: http://testthewebforward.org/docs/test-format-guidelines.html#tools
            DIRS_TO_SKIP = (".git", "test-plan", "tools")

            # We copy all files in 'support', including HTML without metadata.
            # See: http://testthewebforward.org/docs/test-format-guidelines.html#support-files
            DIRS_TO_INCLUDE = ("resources", "support")

            if dirs:
                for d in DIRS_TO_SKIP:
                    if d in dirs:
                        dirs.remove(d)

                for path in paths_to_skip:
                    path_base = path.replace(self.options.destination + "/", "")
                    path_base = path_base.replace(cur_dir, "")
                    path_full = self.filesystem.join(root, path_base)
                    if path_base in dirs:
                        dirs.remove(path_base)
                        if not self.options.dry_run and self.import_in_place:
                            _log.info("  pruning %s", path_base)
                            self.filesystem.rmtree(path_full)
                        else:
                            _log.info("  skipping %s", path_base)

            copy_list = []

            for filename in files:
                path_full = self.filesystem.join(root, filename)
                path_base = path_full.replace(self.source_repo_path + "/", "")
                path_base = self.destination_directory.replace(self.layout_tests_dir + "/", "") + "/" + path_base
                if path_base in paths_to_skip:
                    if not self.options.dry_run and self.import_in_place:
                        _log.info("  pruning %s", path_base)
                        self.filesystem.remove(path_full)
                        continue
                    else:
                        continue
                # FIXME: This block should really be a separate function, but the early-continues make that difficult.

                if filename.startswith(".") or filename.endswith(".pl"):
                    # The w3cs repos may contain perl scripts, which we don't care about.
                    continue
                if filename == "OWNERS" or filename == "reftest.list":
                    # These files fail our presubmits.
                    # See http://crbug.com/584660 and http://crbug.com/582838.
                    continue

                fullpath = self.filesystem.join(root, filename)

                mimetype = mimetypes.guess_type(fullpath)
                if (
                    "html" not in str(mimetype[0])
                    and "application/xhtml+xml" not in str(mimetype[0])
                    and "application/xml" not in str(mimetype[0])
                ):
                    copy_list.append({"src": fullpath, "dest": filename})
                    continue

                if self.filesystem.basename(root) in DIRS_TO_INCLUDE:
                    copy_list.append({"src": fullpath, "dest": filename})
                    continue

                test_parser = TestParser(fullpath, self.host, vars(self.options))
                test_info = test_parser.analyze_test()
                if test_info is None:
                    continue

                if self.path_too_long(path_full):
                    _log.warning(
                        "%s skipped due to long path. "
                        "Max length from repo base %d chars; see http://crbug.com/609871.",
                        path_full,
                        MAX_PATH_LENGTH,
                    )
                    continue

                if "reference" in test_info.keys():
                    test_basename = self.filesystem.basename(test_info["test"])
                    # Add the ref file, following WebKit style.
                    # FIXME: Ideally we'd support reading the metadata
                    # directly rather than relying  on a naming convention.
                    # Using a naming convention creates duplicate copies of the
                    # reference files.
                    ref_file = self.filesystem.splitext(test_basename)[0] + "-expected"
                    # Make sure to use the extension from the *reference*, not
                    # from the test, because at least flexbox tests use XHTML
                    # references but HTML tests.
                    ref_file += self.filesystem.splitext(test_info["reference"])[1]

                    if self.path_too_long(path_full.replace(filename, ref_file)):
                        _log.warning(
                            "%s skipped because path of ref file %s would be too long. "
                            "Max length from repo base %d chars; see http://crbug.com/609871.",
                            path_full,
                            ref_file,
                            MAX_PATH_LENGTH,
                        )
                        continue

                    reftests += 1
                    total_tests += 1
                    copy_list.append(
                        {
                            "src": test_info["reference"],
                            "dest": ref_file,
                            "reference_support_info": test_info["reference_support_info"],
                        }
                    )
                    copy_list.append({"src": test_info["test"], "dest": filename})

                elif "jstest" in test_info.keys():
                    jstests += 1
                    total_tests += 1
                    copy_list.append({"src": fullpath, "dest": filename, "is_jstest": True})
                else:
                    total_tests += 1
                    copy_list.append({"src": fullpath, "dest": filename})

            if copy_list:
                # Only add this directory to the list if there's something to import
                self.import_list.append(
                    {
                        "dirname": root,
                        "copy_list": copy_list,
                        "reftests": reftests,
                        "jstests": jstests,
                        "total_tests": total_tests,
                    }
                )

    def find_paths_to_skip(self):
        if self.options.ignore_expectations:
            return set()

        paths_to_skip = set()
        port = self.host.port_factory.get()
        w3c_import_expectations_path = self.webkit_finder.path_from_webkit_base("LayoutTests", "W3CImportExpectations")
        w3c_import_expectations = self.filesystem.read_text_file(w3c_import_expectations_path)
        parser = TestExpectationParser(port, all_tests=(), is_lint_mode=False)
        expectation_lines = parser.parse(w3c_import_expectations_path, w3c_import_expectations)
        for line in expectation_lines:
            if "SKIP" in line.expectations:
                if line.specifiers:
                    _log.warning("W3CImportExpectations:%s should not have any specifiers", line.line_numbers)
                    continue
                paths_to_skip.add(line.name)
        return paths_to_skip

    def import_tests(self):
        """Reads |self.import_list|, and converts and copies files to their destination."""
        total_imported_tests = 0
        total_imported_reftests = 0
        total_imported_jstests = 0
        total_prefixed_properties = {}

        for dir_to_copy in self.import_list:
            total_imported_tests += dir_to_copy["total_tests"]
            total_imported_reftests += dir_to_copy["reftests"]
            total_imported_jstests += dir_to_copy["jstests"]

            prefixed_properties = []

            if not dir_to_copy["copy_list"]:
                continue

            orig_path = dir_to_copy["dirname"]

            subpath = self.filesystem.relpath(orig_path, self.source_repo_path)
            new_path = self.filesystem.join(self.destination_directory, subpath)

            if not self.filesystem.exists(new_path):
                self.filesystem.maybe_make_directory(new_path)

            copied_files = []

            for file_to_copy in dir_to_copy["copy_list"]:
                # FIXME: Split this block into a separate function.
                orig_filepath = self.filesystem.normpath(file_to_copy["src"])

                if self.filesystem.isdir(orig_filepath):
                    # FIXME: Figure out what is triggering this and what to do about it.
                    _log.error("%s refers to a directory", orig_filepath)
                    continue

                if not self.filesystem.exists(orig_filepath):
                    _log.warning("%s not found. Possible error in the test.", orig_filepath)
                    continue

                new_filepath = self.filesystem.join(new_path, file_to_copy["dest"])
                if "reference_support_info" in file_to_copy.keys() and file_to_copy["reference_support_info"] != {}:
                    reference_support_info = file_to_copy["reference_support_info"]
                else:
                    reference_support_info = None

                if not self.filesystem.exists(self.filesystem.dirname(new_filepath)):
                    if not self.import_in_place and not self.options.dry_run:
                        self.filesystem.maybe_make_directory(self.filesystem.dirname(new_filepath))

                relpath = self.filesystem.relpath(new_filepath, self.layout_tests_dir)
                if not self.options.overwrite and self.filesystem.exists(new_filepath):
                    _log.info("  skipping %s", relpath)
                else:
                    # FIXME: Maybe doing a file diff is in order here for existing files?
                    # In other words, there's no sense in overwriting identical files, but
                    # there's no harm in copying the identical thing.
                    _log.info("  %s", relpath)

                # Only HTML, XML, or CSS should be converted.
                # FIXME: Eventually, so should JS when support is added for this type of conversion.
                mimetype = mimetypes.guess_type(orig_filepath)
                if "is_jstest" not in file_to_copy and (
                    "html" in str(mimetype[0]) or "xml" in str(mimetype[0]) or "css" in str(mimetype[0])
                ):
                    converted_file = convert_for_webkit(
                        new_path, filename=orig_filepath, reference_support_info=reference_support_info
                    )

                    if not converted_file:
                        if not self.import_in_place and not self.options.dry_run:
                            self.filesystem.copyfile(orig_filepath, new_filepath)  # The file was unmodified.
                    else:
                        for prefixed_property in converted_file[0]:
                            total_prefixed_properties.setdefault(prefixed_property, 0)
                            total_prefixed_properties[prefixed_property] += 1

                        prefixed_properties.extend(set(converted_file[0]) - set(prefixed_properties))
                        if not self.options.dry_run:
                            outfile = open(new_filepath, "wb")
                            outfile.write(converted_file[1].encode("utf-8"))
                            outfile.close()
                else:
                    if not self.import_in_place and not self.options.dry_run:
                        self.filesystem.copyfile(orig_filepath, new_filepath)
                        if self.filesystem.read_binary_file(orig_filepath)[:2] == "#!":
                            self.filesystem.make_executable(new_filepath)

                copied_files.append(new_filepath.replace(self._webkit_root, ""))

        _log.info("")
        _log.info("Import complete")
        _log.info("")
        _log.info("IMPORTED %d TOTAL TESTS", total_imported_tests)
        _log.info("Imported %d reftests", total_imported_reftests)
        _log.info("Imported %d JS tests", total_imported_jstests)
        _log.info(
            "Imported %d pixel/manual tests", total_imported_tests - total_imported_jstests - total_imported_reftests
        )
        _log.info("")

        if total_prefixed_properties:
            _log.info("Properties needing prefixes (by count):")
            for prefixed_property in sorted(total_prefixed_properties, key=lambda p: total_prefixed_properties[p]):
                _log.info("  %s: %s", prefixed_property, total_prefixed_properties[prefixed_property])

    def path_too_long(self, source_path):
        """Checks whether a source path is too long to import.

        Args:
            Absolute path of file to be imported.

        Returns:
            True if the path is too long to import, False if it's OK.
        """
        path_from_repo_base = os.path.relpath(source_path, self.source_repo_path)
        return len(path_from_repo_base) > MAX_PATH_LENGTH
Пример #31
0
class Bisector(object):

    def __init__(self, tests, is_debug):
        self.executive = Executive()
        self.tests = tests
        self.expected_failure = tests[-1]
        self.is_debug = is_debug
        self.webkit_finder = WebKitFinder(FileSystem())

    def bisect(self):
        if self.test_fails_in_isolation():
            self.buckets = [Bucket([self.expected_failure])]
            print '%s fails when run in isolation.' % self.expected_failure
            self.print_result()
            return 0
        if not self.test_fails(self.tests):
            _log.error('%s does not fail' % self.expected_failure)
            return 1
        # Split the list of test into buckets. Each bucket has at least one test required to cause
        # the expected failure at the end. Split buckets in half until there are only buckets left
        # with one item in them.
        self.buckets = [Bucket(self.tests[:-1]), Bucket([self.expected_failure])]
        while not self.is_done():
            self.print_progress()
            self.split_largest_bucket()
        self.print_result()
        self.verify_non_flaky()
        return 0

    def test_fails_in_isolation(self):
        return self.test_bucket_list_fails([Bucket([self.expected_failure])])

    def verify_non_flaky(self):
        print 'Verifying the failure is not flaky by running 10 times.'
        count_failures = 0
        for i in range(0, 10):
            if self.test_bucket_list_fails(self.buckets):
                count_failures += 1
        print 'Failed %d/10 times' % count_failures

    def print_progress(self):
        count = 0
        for bucket in self.buckets:
            count += len(bucket.tests)
        print '%d tests left, %d buckets' % (count, len(self.buckets))

    def print_result(self):
        tests = []
        for bucket in self.buckets:
            tests += bucket.tests
        extra_args = ' --debug' if self.is_debug else ''
        print 'run-webkit-tests%s --child-processes=1 --order=none %s' % (extra_args, " ".join(tests))

    def is_done(self):
        for bucket in self.buckets:
            if bucket.size() > 1:
                return False
        return True

    def split_largest_bucket(self):
        index = 0
        largest_index = 0
        largest_size = 0
        for bucket in self.buckets:
            if bucket.size() > largest_size:
                largest_index = index
                largest_size = bucket.size()
            index += 1

        bucket_to_split = self.buckets[largest_index]
        halfway_point = int(largest_size / 2)
        first_half = Bucket(bucket_to_split.tests[:halfway_point])
        second_half = Bucket(bucket_to_split.tests[halfway_point:])

        buckets_before = self.buckets[:largest_index]
        buckets_after = self.buckets[largest_index + 1:]

        # Do the second half first because it tends to be faster because the http tests are front-loaded and slow.
        new_buckets = buckets_before + [second_half] + buckets_after
        if self.test_bucket_list_fails(new_buckets):
            self.buckets = new_buckets
            return

        new_buckets = buckets_before + [first_half] + buckets_after
        if self.test_bucket_list_fails(new_buckets):
            self.buckets = new_buckets
            return

        self.buckets = buckets_before + [first_half, second_half] + buckets_after

    def test_bucket_list_fails(self, buckets):
        tests = []
        for bucket in buckets:
            tests += bucket.tests
        return self.test_fails(tests)

    def test_fails(self, tests):
        extra_args = ['--debug'] if self.is_debug else []
        path_to_run_webkit_tests = self.webkit_finder.path_from_webkit_base('tools', 'test_sky')
        output = self.executive.popen([path_to_run_webkit_tests, '--child-processes', '1', '--order', 'none', '--no-retry', '--no-show-results', '--verbose'] + extra_args + tests, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
        failure_string = self.expected_failure + ' failed'
        if failure_string in output.stderr.read():
            return True
        return False
Пример #32
0
class TestImporter(object):

    def __init__(self, host, dir_to_import, top_of_repo, options):
        self.host = host
        self.dir_to_import = dir_to_import
        self.top_of_repo = top_of_repo
        self.options = options

        self.filesystem = self.host.filesystem
        self.webkit_finder = WebKitFinder(self.filesystem)
        self._webkit_root = self.webkit_finder.webkit_base()
        self.layout_tests_dir = self.webkit_finder.path_from_webkit_base('LayoutTests')
        self.destination_directory = self.filesystem.normpath(self.filesystem.join(self.layout_tests_dir, options.destination,
                                                                                   self.filesystem.basename(self.top_of_repo)))
        self.import_in_place = (self.dir_to_import == self.destination_directory)
        self.dir_above_repo = self.filesystem.dirname(self.top_of_repo)

        self.import_list = []

    def do_import(self):
        _log.info("Importing %s into %s", self.dir_to_import, self.destination_directory)
        self.find_importable_tests(self.dir_to_import)
        self.import_tests()

    def find_importable_tests(self, directory):
        paths_to_skip = self.find_paths_to_skip()

        for root, dirs, files in self.filesystem.walk(directory):
            cur_dir = root.replace(self.dir_above_repo + '/', '') + '/'
            _log.info('  scanning ' + cur_dir + '...')
            total_tests = 0
            reftests = 0
            jstests = 0

            # Files in 'tools' are not for browser testing (e.g., a script for generating test files).
            # http://testthewebforward.org/docs/test-format-guidelines.html#tools
            DIRS_TO_SKIP = ('.git', 'test-plan', 'tools')

            # Need to copy all files in 'support', including HTML without meta data.
            # http://testthewebforward.org/docs/test-format-guidelines.html#support-files
            DIRS_TO_INCLUDE = ('resources', 'support')

            if dirs:
                for d in DIRS_TO_SKIP:
                    if d in dirs:
                        dirs.remove(d)

                for path in paths_to_skip:
                    path_base = path.replace(self.options.destination + '/', '')
                    path_base = path_base.replace(cur_dir, '')
                    path_full = self.filesystem.join(root, path_base)
                    if path_base in dirs:
                        dirs.remove(path_base)
                        if not self.options.dry_run and self.import_in_place:
                            _log.info("  pruning %s" % path_base)
                            self.filesystem.rmtree(path_full)
                        else:
                            _log.info("  skipping %s" % path_base)

            copy_list = []

            for filename in files:
                path_full = self.filesystem.join(root, filename)
                path_base = path_full.replace(directory + '/', '')
                path_base = self.destination_directory.replace(self.layout_tests_dir + '/', '') + '/' + path_base
                if path_base in paths_to_skip:
                    if not self.options.dry_run and self.import_in_place:
                        _log.info("  pruning %s" % path_base)
                        self.filesystem.remove(path_full)
                        continue
                    else:
                        continue
                # FIXME: This block should really be a separate function, but the early-continues make that difficult.

                if filename.startswith('.') or filename.endswith('.pl'):
                    continue  # For some reason the w3c repo contains random perl scripts we don't care about.

                fullpath = self.filesystem.join(root, filename)

                mimetype = mimetypes.guess_type(fullpath)
                if not 'html' in str(mimetype[0]) and not 'application/xhtml+xml' in str(mimetype[0]) and not 'application/xml' in str(mimetype[0]):
                    copy_list.append({'src': fullpath, 'dest': filename})
                    continue

                if self.filesystem.basename(root) in DIRS_TO_INCLUDE:
                    copy_list.append({'src': fullpath, 'dest': filename})
                    continue

                test_parser = TestParser(vars(self.options), filename=fullpath)
                test_info = test_parser.analyze_test()
                if test_info is None:
                    continue

                if 'reference' in test_info.keys():
                    reftests += 1
                    total_tests += 1
                    test_basename = self.filesystem.basename(test_info['test'])

                    # Add the ref file, following WebKit style.
                    # FIXME: Ideally we'd support reading the metadata
                    # directly rather than relying  on a naming convention.
                    # Using a naming convention creates duplicate copies of the
                    # reference files.
                    ref_file = self.filesystem.splitext(test_basename)[0] + '-expected'
                    # Make sure to use the extension from the *reference*, not
                    # from the test, because at least flexbox tests use XHTML
                    # references but HTML tests.
                    ref_file += self.filesystem.splitext(test_info['reference'])[1]

                    copy_list.append({'src': test_info['reference'], 'dest': ref_file,
                                      'reference_support_info': test_info['reference_support_info']})
                    copy_list.append({'src': test_info['test'], 'dest': filename})

                elif 'jstest' in test_info.keys():
                    jstests += 1
                    total_tests += 1
                    copy_list.append({'src': fullpath, 'dest': filename})
                else:
                    total_tests += 1
                    copy_list.append({'src': fullpath, 'dest': filename})

            if copy_list:
                # Only add this directory to the list if there's something to import
                self.import_list.append({'dirname': root, 'copy_list': copy_list,
                                         'reftests': reftests, 'jstests': jstests, 'total_tests': total_tests})

    def find_paths_to_skip(self):
        if self.options.ignore_expectations:
            return set()

        paths_to_skip = set()
        port = self.host.port_factory.get()
        w3c_import_expectations_path = self.webkit_finder.path_from_webkit_base('LayoutTests', 'W3CImportExpectations')
        w3c_import_expectations = self.filesystem.read_text_file(w3c_import_expectations_path)
        parser = TestExpectationParser(port, all_tests=(), is_lint_mode=False)
        expectation_lines = parser.parse(w3c_import_expectations_path, w3c_import_expectations)
        for line in expectation_lines:
            if 'SKIP' in line.expectations:
                if line.specifiers:
                    _log.warning("W3CImportExpectations:%s should not have any specifiers" % line.line_numbers)
                    continue
                paths_to_skip.add(line.name)
        return paths_to_skip

    def import_tests(self):
        total_imported_tests = 0
        total_imported_reftests = 0
        total_imported_jstests = 0
        total_prefixed_properties = {}

        for dir_to_copy in self.import_list:
            total_imported_tests += dir_to_copy['total_tests']
            total_imported_reftests += dir_to_copy['reftests']
            total_imported_jstests += dir_to_copy['jstests']

            prefixed_properties = []

            if not dir_to_copy['copy_list']:
                continue

            orig_path = dir_to_copy['dirname']

            subpath = self.filesystem.relpath(orig_path, self.top_of_repo)
            new_path = self.filesystem.join(self.destination_directory, subpath)

            if not self.filesystem.exists(new_path):
                self.filesystem.maybe_make_directory(new_path)

            copied_files = []

            for file_to_copy in dir_to_copy['copy_list']:
                # FIXME: Split this block into a separate function.
                orig_filepath = self.filesystem.normpath(file_to_copy['src'])

                if self.filesystem.isdir(orig_filepath):
                    # FIXME: Figure out what is triggering this and what to do about it.
                    _log.error('%s refers to a directory' % orig_filepath)
                    continue

                if not self.filesystem.exists(orig_filepath):
                    _log.warning('%s not found. Possible error in the test.', orig_filepath)
                    continue

                if self.path_too_long(orig_filepath):
                    _log.warning('%s skipped (longer than %d chars), to avoid hitting Windows max path length on builders (http://crbug.com/609871).',
                                 orig_filepath, MAX_PATH_LENGTH)
                    continue

                new_filepath = self.filesystem.join(new_path, file_to_copy['dest'])
                if 'reference_support_info' in file_to_copy.keys() and file_to_copy['reference_support_info'] != {}:
                    reference_support_info = file_to_copy['reference_support_info']
                else:
                    reference_support_info = None

                if not self.filesystem.exists(self.filesystem.dirname(new_filepath)):
                    if not self.import_in_place and not self.options.dry_run:
                        self.filesystem.maybe_make_directory(self.filesystem.dirname(new_filepath))

                relpath = self.filesystem.relpath(new_filepath, self.layout_tests_dir)
                if not self.options.overwrite and self.filesystem.exists(new_filepath):
                    _log.info('  skipping %s' % relpath)
                else:
                    # FIXME: Maybe doing a file diff is in order here for existing files?
                    # In other words, there's no sense in overwriting identical files, but
                    # there's no harm in copying the identical thing.
                    _log.info('  %s' % relpath)

                # Only html, xml, or css should be converted
                # FIXME: Eventually, so should js when support is added for this type of conversion
                mimetype = mimetypes.guess_type(orig_filepath)
                if 'html' in str(mimetype[0]) or 'xml' in str(mimetype[0]) or 'css' in str(mimetype[0]):
                    converted_file = convert_for_webkit(new_path, filename=orig_filepath,
                                                        reference_support_info=reference_support_info)

                    if not converted_file:
                        if not self.import_in_place and not self.options.dry_run:
                            self.filesystem.copyfile(orig_filepath, new_filepath)  # The file was unmodified.
                    else:
                        for prefixed_property in converted_file[0]:
                            total_prefixed_properties.setdefault(prefixed_property, 0)
                            total_prefixed_properties[prefixed_property] += 1

                        prefixed_properties.extend(set(converted_file[0]) - set(prefixed_properties))
                        if not self.options.dry_run:
                            outfile = open(new_filepath, 'wb')
                            outfile.write(converted_file[1].encode('utf-8'))
                            outfile.close()
                else:
                    if not self.import_in_place and not self.options.dry_run:
                        self.filesystem.copyfile(orig_filepath, new_filepath)

                copied_files.append(new_filepath.replace(self._webkit_root, ''))

        _log.info('')
        _log.info('Import complete')
        _log.info('')
        _log.info('IMPORTED %d TOTAL TESTS', total_imported_tests)
        _log.info('Imported %d reftests', total_imported_reftests)
        _log.info('Imported %d JS tests', total_imported_jstests)
        _log.info('Imported %d pixel/manual tests', total_imported_tests - total_imported_jstests - total_imported_reftests)
        _log.info('')

        if total_prefixed_properties:
            _log.info('Properties needing prefixes (by count):')
            for prefixed_property in sorted(total_prefixed_properties, key=lambda p: total_prefixed_properties[p]):
                _log.info('  %s: %s', prefixed_property, total_prefixed_properties[prefixed_property])

    def path_too_long(self, source_path):
        """Checks whether a source path is too long to import.

        Args:
          Absolute path of file to be imported.
        """
        path_from_repo_base = os.path.relpath(source_path, self.top_of_repo)
        return len(path_from_repo_base) > MAX_PATH_LENGTH

    def setup_destination_directory(self):
        """ Creates a destination directory that mirrors that of the source directory """

        new_subpath = self.dir_to_import[len(self.top_of_repo):]

        destination_directory = self.filesystem.join(self.destination_directory, new_subpath)

        if not self.filesystem.exists(destination_directory):
            self.filesystem.maybe_make_directory(destination_directory)

        _log.info('Tests will be imported into: %s', destination_directory)
Пример #33
0
class TestImporter(object):
    def __init__(self, host, dir_to_import, top_of_repo, options):
        self.host = host
        self.dir_to_import = dir_to_import
        self.top_of_repo = top_of_repo
        self.options = options

        self.filesystem = self.host.filesystem
        self.webkit_finder = WebKitFinder(self.filesystem)
        self._webkit_root = self.webkit_finder.webkit_base()
        self.layout_tests_dir = self.webkit_finder.path_from_webkit_base(
            'LayoutTests')
        self.destination_directory = self.filesystem.normpath(
            self.filesystem.join(self.layout_tests_dir, options.destination,
                                 self.filesystem.basename(self.top_of_repo)))
        self.import_in_place = (
            self.dir_to_import == self.destination_directory)

        self.changeset = CHANGESET_NOT_AVAILABLE
        self.test_status = TEST_STATUS_UNKNOWN

        self.import_list = []

    def do_import(self):
        _log.info("Importing %s into %s", self.dir_to_import,
                  self.destination_directory)
        self.find_importable_tests(self.dir_to_import)
        self.load_changeset()
        self.import_tests()

    def load_changeset(self):
        """Returns the current changeset from mercurial or "Not Available"."""
        try:
            self.changeset = self.host.executive.run_command(
                ['hg', 'tip']).split('changeset:')[1]
        except (OSError, ScriptError):
            self.changeset = CHANGESET_NOT_AVAILABLE

    def find_importable_tests(self, directory):
        # FIXME: use filesystem
        paths_to_skip = self.find_paths_to_skip()

        for root, dirs, files in os.walk(directory):
            cur_dir = root.replace(self.layout_tests_dir + '/', '') + '/'
            _log.info('Scanning ' + cur_dir + '...')
            total_tests = 0
            reftests = 0
            jstests = 0

            # "archive" and "data" dirs are internal csswg things that live in every approved directory.
            # FIXME: skip 'incoming' tests for now, but we should rework the 'test_status' concept and
            # support reading them as well.
            DIRS_TO_SKIP = ('.git', '.hg', 'data', 'archive', 'incoming')
            if dirs:
                for d in DIRS_TO_SKIP:
                    if d in dirs:
                        dirs.remove(d)

                for path in paths_to_skip:
                    path_base = path.replace(cur_dir, '')
                    path_full = self.filesystem.join(root, path_base)
                    if path_base in dirs:
                        dirs.remove(path_base)
                        if not self.options.dry_run and self.import_in_place:
                            _log.info("Pruning %s" % path_full)
                            self.filesystem.rmtree(path_full)

            copy_list = []

            for filename in files:
                path_full = self.filesystem.join(root, filename)
                path_base = path_full.replace(self.layout_tests_dir + '/', '')
                if path_base in paths_to_skip:
                    if not self.options.dry_run and self.import_in_place:
                        _log.info("Pruning %s" % path_base)
                        self.filesystem.remove(path_full)
                        continue
                # FIXME: This block should really be a separate function, but the early-continues make that difficult.

                if filename.startswith('.') or filename.endswith('.pl'):
                    continue  # For some reason the w3c repo contains random perl scripts we don't care about.

                fullpath = os.path.join(root, filename)

                mimetype = mimetypes.guess_type(fullpath)
                if not 'html' in str(mimetype[0]) and not 'xml' in str(
                        mimetype[0]):
                    copy_list.append({'src': fullpath, 'dest': filename})
                    continue

                if root.endswith('resources'):
                    copy_list.append({'src': fullpath, 'dest': filename})
                    continue

                test_parser = TestParser(vars(self.options), filename=fullpath)
                test_info = test_parser.analyze_test()
                if test_info is None:
                    continue

                if 'reference' in test_info.keys():
                    reftests += 1
                    total_tests += 1
                    test_basename = os.path.basename(test_info['test'])

                    # Add the ref file, following WebKit style.
                    # FIXME: Ideally we'd support reading the metadata
                    # directly rather than relying  on a naming convention.
                    # Using a naming convention creates duplicate copies of the
                    # reference files.
                    ref_file = os.path.splitext(test_basename)[0] + '-expected'
                    ref_file += os.path.splitext(test_basename)[1]

                    copy_list.append({
                        'src': test_info['reference'],
                        'dest': ref_file
                    })
                    copy_list.append({
                        'src': test_info['test'],
                        'dest': filename
                    })

                    # Update any support files that need to move as well to remain relative to the -expected file.
                    if 'refsupport' in test_info.keys():
                        for support_file in test_info['refsupport']:
                            source_file = os.path.join(
                                os.path.dirname(test_info['reference']),
                                support_file)
                            source_file = os.path.normpath(source_file)

                            # Keep the dest as it was
                            to_copy = {
                                'src': source_file,
                                'dest': support_file
                            }

                            # Only add it once
                            if not (to_copy in copy_list):
                                copy_list.append(to_copy)
                elif 'jstest' in test_info.keys():
                    jstests += 1
                    total_tests += 1
                    copy_list.append({'src': fullpath, 'dest': filename})
                else:
                    total_tests += 1
                    copy_list.append({'src': fullpath, 'dest': filename})

            if not total_tests:
                # We can skip the support directory if no tests were found.
                if 'support' in dirs:
                    dirs.remove('support')

            if copy_list:
                # Only add this directory to the list if there's something to import
                self.import_list.append({
                    'dirname': root,
                    'copy_list': copy_list,
                    'reftests': reftests,
                    'jstests': jstests,
                    'total_tests': total_tests
                })

    def find_paths_to_skip(self):
        if self.options.ignore_expectations:
            return set()

        paths_to_skip = set()
        port = self.host.port_factory.get()
        w3c_import_expectations_path = self.webkit_finder.path_from_webkit_base(
            'LayoutTests', 'W3CImportExpectations')
        w3c_import_expectations = self.filesystem.read_text_file(
            w3c_import_expectations_path)
        parser = TestExpectationParser(port,
                                       full_test_list=(),
                                       is_lint_mode=False)
        expectation_lines = parser.parse(w3c_import_expectations_path,
                                         w3c_import_expectations)
        for line in expectation_lines:
            if 'SKIP' in line.expectations:
                if line.specifiers:
                    _log.warning(
                        "W3CImportExpectations:%s should not have any specifiers"
                        % line.line_numbers)
                    continue
                paths_to_skip.add(line.name)
        return paths_to_skip

    def import_tests(self):
        total_imported_tests = 0
        total_imported_reftests = 0
        total_imported_jstests = 0
        total_prefixed_properties = {}

        for dir_to_copy in self.import_list:
            total_imported_tests += dir_to_copy['total_tests']
            total_imported_reftests += dir_to_copy['reftests']
            total_imported_jstests += dir_to_copy['jstests']

            prefixed_properties = []

            if not dir_to_copy['copy_list']:
                continue

            orig_path = dir_to_copy['dirname']

            subpath = os.path.relpath(orig_path, self.top_of_repo)
            new_path = os.path.join(self.destination_directory, subpath)

            if not (os.path.exists(new_path)):
                os.makedirs(new_path)

            copied_files = []

            for file_to_copy in dir_to_copy['copy_list']:
                # FIXME: Split this block into a separate function.
                orig_filepath = os.path.normpath(file_to_copy['src'])

                if os.path.isdir(orig_filepath):
                    # FIXME: Figure out what is triggering this and what to do about it.
                    _log.error('%s refers to a directory' % orig_filepath)
                    continue

                if not (os.path.exists(orig_filepath)):
                    _log.warning('%s not found. Possible error in the test.',
                                 orig_filepath)
                    continue

                new_filepath = os.path.join(new_path, file_to_copy['dest'])

                if not (os.path.exists(os.path.dirname(new_filepath))):
                    if not self.import_in_place and not self.options.dry_run:
                        os.makedirs(os.path.dirname(new_filepath))

                if not self.options.overwrite and os.path.exists(new_filepath):
                    _log.info('Skipping import of existing file ' +
                              new_filepath)
                else:
                    # FIXME: Maybe doing a file diff is in order here for existing files?
                    # In other words, there's no sense in overwriting identical files, but
                    # there's no harm in copying the identical thing.
                    _log.info('Importing: %s', orig_filepath)
                    _log.info('       As: %s', new_filepath)

                # Only html, xml, or css should be converted
                # FIXME: Eventually, so should js when support is added for this type of conversion
                mimetype = mimetypes.guess_type(orig_filepath)
                if 'html' in str(mimetype[0]) or 'xml' in str(
                        mimetype[0]) or 'css' in str(mimetype[0]):
                    converted_file = convert_for_webkit(new_path,
                                                        filename=orig_filepath)

                    if not converted_file:
                        if not self.import_in_place and not self.options.dry_run:
                            shutil.copyfile(
                                orig_filepath,
                                new_filepath)  # The file was unmodified.
                    else:
                        for prefixed_property in converted_file[0]:
                            total_prefixed_properties.setdefault(
                                prefixed_property, 0)
                            total_prefixed_properties[prefixed_property] += 1

                        prefixed_properties.extend(
                            set(converted_file[0]) - set(prefixed_properties))
                        if not self.options.dry_run:
                            outfile = open(new_filepath, 'wb')
                            outfile.write(converted_file[1])
                            outfile.close()
                else:
                    if not self.import_in_place and not self.options.dry_run:
                        shutil.copyfile(orig_filepath, new_filepath)

                copied_files.append(new_filepath.replace(
                    self._webkit_root, ''))

            if not self.options.import_in_place and not self.options.dry_run:
                self.remove_deleted_files(new_path, copied_files)
                self.write_import_log(new_path, copied_files,
                                      prefixed_properties)

        _log.info('Import complete')

        _log.info('IMPORTED %d TOTAL TESTS', total_imported_tests)
        _log.info('Imported %d reftests', total_imported_reftests)
        _log.info('Imported %d JS tests', total_imported_jstests)
        _log.info(
            'Imported %d pixel/manual tests', total_imported_tests -
            total_imported_jstests - total_imported_reftests)
        _log.info('')
        _log.info('Properties needing prefixes (by count):')
        for prefixed_property in sorted(
                total_prefixed_properties,
                key=lambda p: total_prefixed_properties[p]):
            _log.info('  %s: %s', prefixed_property,
                      total_prefixed_properties[prefixed_property])

    def setup_destination_directory(self):
        """ Creates a destination directory that mirrors that of the source approved or submitted directory """

        self.update_test_status()

        start = self.dir_to_import.find(self.test_status)
        new_subpath = self.dir_to_import[len(self.top_of_repo):]

        destination_directory = os.path.join(self.destination_directory,
                                             new_subpath)

        if not os.path.exists(destination_directory):
            os.makedirs(destination_directory)

        _log.info('Tests will be imported into: %s', destination_directory)

    def update_test_status(self):
        """ Sets the test status to either 'approved' or 'submitted' """

        status = TEST_STATUS_UNKNOWN

        directory_parts = self.dir_to_import.split(os.path.sep)
        for test_status in VALID_TEST_STATUSES:
            if test_status in directory_parts:
                status = test_status

        self.test_status = status

    def remove_deleted_files(self, dir_to_import, new_file_list):
        previous_file_list = []

        import_log_file = os.path.join(dir_to_import, 'w3c-import.log')
        if not os.path.exists(import_log_file):
            return

        import_log = open(import_log_file, 'r')
        contents = import_log.readlines()

        if 'List of files\n' in contents:
            list_index = contents.index('List of files:\n') + 1
            previous_file_list = [
                filename.strip() for filename in contents[list_index:]
            ]

        deleted_files = set(previous_file_list) - set(new_file_list)
        for deleted_file in deleted_files:
            _log.info('Deleting file removed from the W3C repo: %s',
                      deleted_file)
            deleted_file = os.path.join(self._webkit_root, deleted_file)
            os.remove(deleted_file)

        import_log.close()

    def write_import_log(self, dir_to_import, file_list, prop_list):
        now = datetime.datetime.now()

        import_log = open(os.path.join(dir_to_import, 'w3c-import.log'), 'w')
        import_log.write(
            'The tests in this directory were imported from the W3C repository.\n'
        )
        import_log.write(
            'Do NOT modify these tests directly in Webkit. Instead, push changes to the W3C CSS repo:\n\n'
        )
        import_log.write('http://hg.csswg.org/test\n\n')
        import_log.write(
            'Then run the Tools/Scripts/import-w3c-tests in Webkit to reimport\n\n'
        )
        import_log.write('Do NOT modify or remove this file\n\n')
        import_log.write(
            '------------------------------------------------------------------------\n'
        )
        import_log.write('Last Import: ' + now.strftime('%Y-%m-%d %H:%M') +
                         '\n')
        import_log.write('W3C Mercurial changeset: ' + self.changeset + '\n')
        import_log.write('Test status at time of import: ' + self.test_status +
                         '\n')
        import_log.write(
            '------------------------------------------------------------------------\n'
        )
        import_log.write('Properties requiring vendor prefixes:\n')
        if prop_list:
            for prop in prop_list:
                import_log.write(prop + '\n')
        else:
            import_log.write('None\n')
        import_log.write(
            '------------------------------------------------------------------------\n'
        )
        import_log.write('List of files:\n')
        for item in file_list:
            import_log.write(item + '\n')

        import_log.close()
Пример #34
0
class DepsUpdater(object):

    def __init__(self, host):
        self.host = host
        self.executive = host.executive
        self.fs = host.filesystem
        self.finder = WebKitFinder(self.fs)
        self.verbose = False
        self.git_cl = None

    def main(self, argv=None):
        options = self.parse_args(argv)
        self.verbose = options.verbose

        if not self.checkout_is_okay(options.allow_local_commits):
            return 1

        self.git_cl = GitCL(self.host, auth_refresh_token_json=options.auth_refresh_token_json)

        self.print_('## Noting the current Chromium commit.')
        _, show_ref_output = self.run(['git', 'show-ref', 'HEAD'])
        chromium_commitish = show_ref_output.split()[0]

        if options.target == 'wpt':
            import_commitish = self.update(WPT_DEST_NAME, WPT_REPO_URL, options.keep_w3c_repos_around, options.revision)
            self._copy_resources()
        elif options.target == 'css':
            import_commitish = self.update(CSS_DEST_NAME, CSS_REPO_URL, options.keep_w3c_repos_around, options.revision)
        else:
            raise AssertionError("Unsupported target %s" % options.target)

        has_changes = self.commit_changes_if_needed(chromium_commitish, import_commitish)
        if options.auto_update and has_changes:
            commit_successful = self.do_auto_update()
            if not commit_successful:
                return 1
        return 0

    def parse_args(self, argv):
        parser = argparse.ArgumentParser()
        parser.description = __doc__
        parser.add_argument('-v', '--verbose', action='store_true',
                            help='log what we are doing')
        parser.add_argument('--allow-local-commits', action='store_true',
                            help='allow script to run even if we have local commits')
        parser.add_argument('--keep-w3c-repos-around', action='store_true',
                            help='leave the w3c repos around that were imported previously.')
        parser.add_argument('-r', dest='revision', action='store',
                            help='Target revision.')
        parser.add_argument('target', choices=['css', 'wpt'],
                            help='Target repository.  "css" for csswg-test, "wpt" for web-platform-tests.')
        parser.add_argument('--auto-update', action='store_true',
                            help='uploads CL and initiates commit queue.')
        parser.add_argument('--auth-refresh-token-json',
                            help='Rietveld auth refresh JSON token.')
        return parser.parse_args(argv)

    def checkout_is_okay(self, allow_local_commits):
        git_diff_retcode, _ = self.run(['git', 'diff', '--quiet', 'HEAD'], exit_on_failure=False)
        if git_diff_retcode:
            self.print_('## Checkout is dirty; aborting.')
            return False

        local_commits = self.run(['git', 'log', '--oneline', 'origin/master..HEAD'])[1]
        if local_commits and not allow_local_commits:
            self.print_('## Checkout has local commits; aborting. Use --allow-local-commits to allow this.')
            return False

        if self.fs.exists(self.path_from_webkit_base(WPT_DEST_NAME)):
            self.print_('## WebKit/%s exists; aborting.' % WPT_DEST_NAME)
            return False

        if self.fs.exists(self.path_from_webkit_base(CSS_DEST_NAME)):
            self.print_('## WebKit/%s repo exists; aborting.' % CSS_DEST_NAME)
            return False

        return True

    def _copy_resources(self):
        """Copies resources from LayoutTests/resources to wpt and vice versa.

        There are resources from our repository that we use instead of the
        upstream versions. Conversely, there are also some resources that
        are copied in the other direction.

        Specifically:
          - testharnessreport.js contains code needed to integrate our testing
            with testharness.js; we also want our code to be used for tests
            in wpt.
          - TODO(qyearsley, jsbell): Document why other other files are copied,
            or stop copying them if it's unnecessary.

        If this method is changed, the lists of files expected to be identical
        in LayoutTests/PRESUBMIT.py should also be changed.
        """
        # TODO(tkent): resources_to_copy_to_wpt is unnecessary after enabling
        # WPTServe.
        resources_to_copy_to_wpt = [
            ('testharnessreport.js', 'resources'),
            ('WebIDLParser.js', 'resources'),
            ('vendor-prefix.js', 'common'),
        ]
        resources_to_copy_from_wpt = [
            ('idlharness.js', 'resources'),
            ('testharness.js', 'resources'),
        ]
        for filename, wpt_subdir in resources_to_copy_to_wpt:
            source = self.path_from_webkit_base('LayoutTests', 'resources', filename)
            destination = self.path_from_webkit_base('LayoutTests', 'imported', WPT_DEST_NAME, wpt_subdir, filename)
            self.copyfile(source, destination)
            self.run(['git', 'add', destination])
        for filename, wpt_subdir in resources_to_copy_from_wpt:
            source = self.path_from_webkit_base('LayoutTests', 'imported', WPT_DEST_NAME, wpt_subdir, filename)
            destination = self.path_from_webkit_base('LayoutTests', 'resources', filename)
            self.copyfile(source, destination)
            self.run(['git', 'add', destination])

    def _generate_manifest(self, original_repo_path, dest_path):
        """Generate MANIFEST.json for imported tests.

        Run 'manifest' command if it exists in original_repo_path, and
        add generated MANIFEST.json to dest_path.
        """
        manifest_command = self.fs.join(original_repo_path, 'manifest')
        if not self.fs.exists(manifest_command):
            # Do nothing for csswg-test.
            return
        self.print_('## Generating MANIFEST.json')
        self.run([manifest_command, '--tests-root', dest_path])
        self.run(['git', 'add', self.fs.join(dest_path, 'MANIFEST.json')])

    def update(self, dest_dir_name, url, keep_w3c_repos_around, revision):
        """Updates an imported repository.

        Args:
            dest_dir_name: The destination directory name.
            url: URL of the git repository.
            revision: Commit hash or None.

        Returns:
            A string for the commit description "<destination>@<commitish>".
        """
        temp_repo_path = self.path_from_webkit_base(dest_dir_name)
        self.print_('## Cloning %s into %s.' % (url, temp_repo_path))
        self.run(['git', 'clone', url, temp_repo_path])

        if revision is not None:
            self.print_('## Checking out %s' % revision)
            self.run(['git', 'checkout', revision], cwd=temp_repo_path)
        self.run(['git', 'submodule', 'update', '--init', '--recursive'], cwd=temp_repo_path)

        self.print_('## Noting the revision we are importing.')
        _, show_ref_output = self.run(['git', 'show-ref', 'origin/master'], cwd=temp_repo_path)
        master_commitish = show_ref_output.split()[0]

        self.print_('## Cleaning out tests from LayoutTests/imported/%s.' % dest_dir_name)
        dest_path = self.path_from_webkit_base('LayoutTests', 'imported', dest_dir_name)
        files_to_delete = self.fs.files_under(dest_path, file_filter=self.is_not_baseline)
        for subpath in files_to_delete:
            self.remove('LayoutTests', 'imported', subpath)

        self.print_('## Importing the tests.')
        src_repo = self.path_from_webkit_base(dest_dir_name)
        import_path = self.path_from_webkit_base('Tools', 'Scripts', 'import-w3c-tests')
        self.run([self.host.executable, import_path, '-d', 'imported', src_repo])

        self.run(['git', 'add', '--all', 'LayoutTests/imported/%s' % dest_dir_name])

        self.print_('## Deleting any orphaned baselines.')
        previous_baselines = self.fs.files_under(dest_path, file_filter=self.is_baseline)
        for subpath in previous_baselines:
            full_path = self.fs.join(dest_path, subpath)
            if self.fs.glob(full_path.replace('-expected.txt', '*')) == [full_path]:
                self.fs.remove(full_path)

        self._generate_manifest(temp_repo_path, dest_path)
        if not keep_w3c_repos_around:
            self.print_('## Deleting temp repo directory %s.' % temp_repo_path)
            self.rmtree(temp_repo_path)

        self.print_('## Updating TestExpectations for any removed or renamed tests.')
        self.update_all_test_expectations_files(self._list_deleted_tests(), self._list_renamed_tests())

        return '%s@%s' % (dest_dir_name, master_commitish)

    def commit_changes_if_needed(self, chromium_commitish, import_commitish):
        if self.run(['git', 'diff', '--quiet', 'HEAD'], exit_on_failure=False)[0]:
            self.print_('## Committing changes.')
            commit_msg = ('Import %s\n'
                          '\n'
                          'Using update-w3c-deps in Chromium %s.\n'
                          % (import_commitish, chromium_commitish))
            path_to_commit_msg = self.path_from_webkit_base('commit_msg')
            if self.verbose:
                self.print_('cat > %s <<EOF' % path_to_commit_msg)
                self.print_(commit_msg)
                self.print_('EOF')
            self.fs.write_text_file(path_to_commit_msg, commit_msg)
            self.run(['git', 'commit', '-a', '-F', path_to_commit_msg])
            self.remove(path_to_commit_msg)
            self.print_('## Done: changes imported and committed.')
            return True
        else:
            self.print_('## Done: no changes to import.')
            return False

    # Callback for FileSystem.files_under; not all arguments used - pylint: disable=unused-argument
    def is_baseline(self, fs, dirname, basename):
        return basename.endswith('-expected.txt')

    def is_not_baseline(self, fs, dirname, basename):
        return not self.is_baseline(fs, dirname, basename)

    def run(self, cmd, exit_on_failure=True, cwd=None):
        if self.verbose:
            self.print_(' '.join(cmd))

        cwd = cwd or self.finder.webkit_base()
        proc = self.executive.popen(cmd, stdout=self.executive.PIPE, stderr=self.executive.PIPE, cwd=cwd)
        out, err = proc.communicate()
        if proc.returncode or self.verbose:
            self.print_('# ret> %d' % proc.returncode)
            if out:
                for line in out.splitlines():
                    self.print_('# out> %s' % line)
            if err:
                for line in err.splitlines():
                    self.print_('# err> %s' % line)
        if exit_on_failure and proc.returncode:
            self.host.exit(proc.returncode)
        return proc.returncode, out

    def check_run(self, command):
        return_code, out = self.run(command)
        if return_code:
            raise Exception('%s failed with exit code %d.' % ' '.join(command), return_code)
        return out

    def copyfile(self, source, destination):
        if self.verbose:
            self.print_('cp %s %s' % (source, destination))
        self.fs.copyfile(source, destination)

    def remove(self, *comps):
        dest = self.path_from_webkit_base(*comps)
        if self.verbose:
            self.print_('rm %s' % dest)
        self.fs.remove(dest)

    def rmtree(self, *comps):
        dest = self.path_from_webkit_base(*comps)
        if self.verbose:
            self.print_('rm -fr %s' % dest)
        self.fs.rmtree(dest)

    def path_from_webkit_base(self, *comps):
        return self.finder.path_from_webkit_base(*comps)

    def print_(self, msg):
        self.host.print_(msg)

    def do_auto_update(self):
        """Attempts to upload a CL, make any required adjustments, and commit.

        This function assumes that the imported repo has already been updated,
        and that change has been committed. There may be newly-failing tests,
        so before being able to commit these new changes, we may need to update
        TestExpectations or download new baselines.

        Returns:
            True if successfully committed, False otherwise.
        """
        self._upload_cl()
        self.print_('## ' + self.git_cl.run(['issue']).strip())

        # First try: if there are failures, update expectations.
        self.print_('## Triggering try jobs.')
        for try_bot in self.host.builders.all_try_builder_names():
            self.git_cl.run(['try', '-b', try_bot])
        try_results = self.git_cl.wait_for_try_jobs()
        if not try_results:
            self.print_('## Timed out waiting for try results.')
            return
        if try_results and self.git_cl.has_failing_try_results(try_results):
            self.fetch_new_expectations_and_baselines()

        # Second try: if there are failures, then abort.
        self.git_cl.run(['set-commit', '--rietveld'])
        try_results = self.git_cl.wait_for_try_jobs()
        if not try_results:
            self.print_('Timed out waiting for try results.')
            self.git_cl.run(['set-close'])
            return False
        if self.git_cl.has_failing_try_results(try_results):
            self.print_('CQ failed; aborting.')
            self.git_cl.run(['set-close'])
            return False
        self.print_('## Update completed.')
        return True

    def _upload_cl(self):
        self.print_('## Uploading change list.')
        cc_list = self.get_directory_owners_to_cc()
        last_commit_message = self.check_run(['git', 'log', '-1', '--format=%B'])
        commit_message = last_commit_message + '[email protected]'
        self.git_cl.run([
            'upload',
            '-f',
            '--rietveld',
            '-m',
            commit_message,
        ] + ['--cc=' + email for email in cc_list])

    def get_directory_owners_to_cc(self):
        """Returns a list of email addresses to CC for the current import."""
        self.print_('## Gathering directory owners emails to CC.')
        directory_owners_file_path = self.finder.path_from_webkit_base(
            'Tools', 'Scripts', 'webkitpy', 'w3c', 'directory_owners.json')
        with open(directory_owners_file_path) as data_file:
            directory_to_owner = self.parse_directory_owners(json.load(data_file))
        out = self.check_run(['git', 'diff', 'origin/master', '--name-only'])
        changed_files = out.splitlines()
        return self.generate_email_list(changed_files, directory_to_owner)

    @staticmethod
    def parse_directory_owners(decoded_data_file):
        directory_dict = {}
        for dict_set in decoded_data_file:
            if dict_set['notification-email']:
                directory_dict[dict_set['directory']] = dict_set['notification-email']
        return directory_dict

    def generate_email_list(self, changed_files, directory_to_owner):
        """Returns a list of email addresses based on the given file list and
        directory-to-owner mapping.

        Args:
            changed_files: A list of file paths relative to the repository root.
            directory_to_owner: A dict mapping layout test directories to emails.

        Returns:
            A list of the email addresses to be notified for the current import.
        """
        email_addresses = set()
        for file_path in changed_files:
            test_path = self.finder.layout_test_name(file_path)
            if test_path is None:
                continue
            test_dir = self.fs.dirname(test_path)
            if test_dir in directory_to_owner:
                email_addresses.add(directory_to_owner[test_dir])
        return sorted(email_addresses)

    def fetch_new_expectations_and_baselines(self):
        """Adds new expectations and downloads baselines based on try job results, then commits and uploads the change."""
        self.print_('## Adding test expectations lines to LayoutTests/TestExpectations.')
        script_path = self.path_from_webkit_base('Tools', 'Scripts', 'update-w3c-test-expectations')
        self.run([self.host.executable, script_path, '--verbose'])
        message = 'Modify TestExpectations or download new baselines for tests.'
        self.check_run(['git', 'commit', '-a', '-m', message])
        self.git_cl.run(['upload', '-m', message, '--rietveld'])

    def update_all_test_expectations_files(self, deleted_tests, renamed_tests):
        """Updates all test expectations files for tests that have been deleted or renamed."""
        port = self.host.port_factory.get()
        for path, file_contents in port.all_expectations_dict().iteritems():

            parser = TestExpectationParser(port, all_tests=None, is_lint_mode=False)
            expectation_lines = parser.parse(path, file_contents)
            self._update_single_test_expectations_file(path, expectation_lines, deleted_tests, renamed_tests)

    def _update_single_test_expectations_file(self, path, expectation_lines, deleted_tests, renamed_tests):
        """Updates single test expectations file."""
        # FIXME: This won't work for removed or renamed directories with test expectations
        # that are directories rather than individual tests.
        new_lines = []
        changed_lines = []
        for expectation_line in expectation_lines:
            if expectation_line.name in deleted_tests:
                continue
            if expectation_line.name in renamed_tests:
                expectation_line.name = renamed_tests[expectation_line.name]
                # Upon parsing the file, a "path does not exist" warning is expected
                # to be there for tests that have been renamed, and if there are warnings,
                # then the original string is used. If the warnings are reset, then the
                # expectation line is re-serialized when output.
                expectation_line.warnings = []
                changed_lines.append(expectation_line)
            new_lines.append(expectation_line)
        new_file_contents = TestExpectations.list_to_string(new_lines, reconstitute_only_these=changed_lines)
        self.host.filesystem.write_text_file(path, new_file_contents)

    def _list_deleted_tests(self):
        """Returns a list of layout tests that have been deleted."""
        out = self.check_run(['git', 'diff', 'origin/master', '-M100%', '--diff-filter=D', '--name-only'])
        deleted_tests = []
        for line in out.splitlines():
            test = self.finder.layout_test_name(line)
            if test:
                deleted_tests.append(test)
        return deleted_tests

    def _list_renamed_tests(self):
        """Returns a dict mapping source to dest name for layout tests that have been renamed."""
        out = self.check_run(['git', 'diff', 'origin/master', '-M100%', '--diff-filter=R', '--name-status'])
        renamed_tests = {}
        for line in out.splitlines():
            _, source_path, dest_path = line.split()
            source_test = self.finder.layout_test_name(source_path)
            dest_test = self.finder.layout_test_name(dest_path)
            if source_test and dest_test:
                renamed_tests[source_test] = dest_test
        return renamed_tests
Пример #35
0
class DirectoryOwnersExtractor(object):
    def __init__(self, filesystem=None):
        self.filesystem = filesystem or FileSystem
        self.finder = WebKitFinder(filesystem)
        self.owner_map = None

    def read_owner_map(self):
        """Reads the W3CImportExpectations file and returns a map of directories to owners."""
        input_path = self.finder.path_from_webkit_base(
            'LayoutTests', 'W3CImportExpectations')
        input_contents = self.filesystem.read_text_file(input_path)
        self.owner_map = self.lines_to_owner_map(input_contents.splitlines())

    def lines_to_owner_map(self, lines):
        current_owners = []
        owner_map = {}
        for line in lines:
            owners = self.extract_owners(line)
            if owners:
                current_owners = owners
            directory = self.extract_directory(line)
            if current_owners and directory:
                owner_map[directory] = current_owners
        return owner_map

    @staticmethod
    def extract_owners(line):
        """Extracts owner email addresses listed on a line."""
        match = re.match(r'##? Owners?: (?P<addresses>.*)', line)
        if not match or not match.group('addresses'):
            return None
        email_part = match.group('addresses')
        addresses = [email_part] if ',' not in email_part else re.split(
            r',\s*', email_part)
        addresses = [s for s in addresses if re.match(r'\S+@\S+', s)]
        return addresses or None

    @staticmethod
    def extract_directory(line):
        match = re.match(r'# ?(?P<directory>\S+) \[ (Pass|Skip) \]', line)
        if match and match.group('directory'):
            return match.group('directory')
        match = re.match(r'(?P<directory>\S+) \[ Pass \]', line)
        if match and match.group('directory'):
            return match.group('directory')
        return None

    def list_owners(self, changed_files):
        """Looks up the owners for the given set of changed files.

        Args:
            changed_files: A list of file paths relative to the repository root.

        Returns:
            A dict mapping (owner) email addresses to (owned) directories.
        """
        tests = [self.finder.layout_test_name(path) for path in changed_files]
        tests = [t for t in tests if t is not None]
        email_map = {}
        for directory, owners in self.owner_map.iteritems():
            owned_tests = [t for t in tests if t.startswith(directory)]
            if owned_tests:
                for owner in owners:
                    email_map[owner] = directory
        return email_map
Пример #36
0
class DepsUpdater(object):
    def __init__(self, host):
        self.host = host
        self.executive = host.executive
        self.fs = host.filesystem
        self.finder = WebKitFinder(self.fs)
        self.verbose = False
        self.allow_local_blink_commits = False
        self.keep_w3c_repos_around = False

    def main(self, argv=None):
        self.parse_args(argv)

        self.cd('')
        if not self.checkout_is_okay():
            return 1

        self.print_('## noting the current Blink commitish')
        blink_commitish = self.run(['git', 'show-ref', 'HEAD'])[1].split()[0]

        wpt_import_text = self.update(
            'web-platform-tests',
            'https://chromium.googlesource.com/external/w3c/web-platform-tests.git'
        )

        for resource in ['testharnessreport.js', 'vendor-prefix.js']:
            source = self.path_from_webkit_base('LayoutTests', 'resources',
                                                resource)
            destination = self.path_from_webkit_base('LayoutTests', 'imported',
                                                     'web-platform-tests',
                                                     'resources', resource)
            self.copyfile(source, destination)
            self.run(['git', 'add', destination])

        css_import_text = self.update(
            'csswg-test',
            'https://chromium.googlesource.com/external/w3c/csswg-test.git')

        self.commit_changes_if_needed(blink_commitish, css_import_text,
                                      wpt_import_text)

        return 0

    def parse_args(self, argv):
        parser = argparse.ArgumentParser()
        parser.description = __doc__
        parser.add_argument('-v',
                            '--verbose',
                            action='store_true',
                            help='log what we are doing')
        parser.add_argument(
            '--allow-local-blink-commits',
            action='store_true',
            help='allow script to run even if we have local blink commits')
        parser.add_argument(
            '--keep-w3c-repos-around',
            action='store_true',
            help='leave the w3c repos around that were imported previously.')

        args = parser.parse_args(argv)
        self.allow_local_blink_commits = args.allow_local_blink_commits
        self.keep_w3c_repos_around = args.keep_w3c_repos_around
        self.verbose = args.verbose

    def checkout_is_okay(self):
        if self.run(['git', 'diff', '--quiet', 'HEAD'],
                    exit_on_failure=False)[0]:
            self.print_('## blink checkout is dirty, aborting')
            return False

        local_blink_commits = self.run(
            ['git', 'log', '--oneline', 'origin/master..HEAD'])[1]
        if local_blink_commits and not self.allow_local_blink_commits:
            self.print_('## blink checkout has local commits, aborting')
            return False

        if self.fs.exists(self.path_from_webkit_base('web-platform-tests')):
            self.print_('## web-platform-tests repo exists, aborting')
            return False

        if self.fs.exists(self.path_from_webkit_base('csswg-test')):
            self.print_('## csswg-test repo exists, aborting')
            return False

        return True

    def update(self, repo, url):
        self.print_('## cloning %s' % repo)
        self.cd('')
        self.run(['git', 'clone', url])
        self.cd(re.compile('.*/([^/]+)\.git').match(url).group(1))
        self.run(['git', 'submodule', 'update', '--init', '--recursive'])

        self.print_('## noting the revision we are importing')
        master_commitish = self.run(['git', 'show-ref',
                                     'origin/master'])[1].split()[0]

        self.print_('## cleaning out tests from LayoutTests/imported/%s' %
                    repo)
        dest_repo = self.path_from_webkit_base('LayoutTests', 'imported', repo)
        files_to_delete = self.fs.files_under(dest_repo,
                                              file_filter=self.is_not_baseline)
        for subpath in files_to_delete:
            self.remove('LayoutTests', 'imported', subpath)

        self.print_('## importing the tests')
        src_repo = self.path_from_webkit_base(repo)
        import_path = self.path_from_webkit_base('Tools', 'Scripts',
                                                 'import-w3c-tests')
        self.run(
            [self.host.executable, import_path, '-d', 'imported', src_repo])

        self.cd('')
        self.run(['git', 'add', '--all', 'LayoutTests/imported/%s' % repo])

        self.print_('## deleting manual tests')
        files_to_delete = self.fs.files_under(dest_repo,
                                              file_filter=self.is_manual_test)
        for subpath in files_to_delete:
            self.remove('LayoutTests', 'imported', subpath)

        self.print_('## deleting any orphaned baselines')
        previous_baselines = self.fs.files_under(dest_repo,
                                                 file_filter=self.is_baseline)
        for subpath in previous_baselines:
            full_path = self.fs.join(dest_repo, subpath)
            if self.fs.glob(full_path.replace('-expected.txt',
                                              '*')) == [full_path]:
                self.fs.remove(full_path)

        if not self.keep_w3c_repos_around:
            self.print_('## deleting %s repo' % repo)
            self.cd('')
            self.rmtree(repo)

        return 'imported %s@%s' % (repo, master_commitish)

    def commit_changes_if_needed(self, blink_commitish, css_import_text,
                                 wpt_import_text):
        if self.run(['git', 'diff', '--quiet', 'HEAD'],
                    exit_on_failure=False)[0]:
            self.print_('## commiting changes')
            commit_msg = ('update-w3c-deps import using blink %s:\n'
                          '\n'
                          '%s\n'
                          '%s\n' %
                          (blink_commitish, css_import_text, wpt_import_text))
            path_to_commit_msg = self.path_from_webkit_base('commit_msg')
            if self.verbose:
                self.print_('cat > %s <<EOF' % path_to_commit_msg)
                self.print_(commit_msg)
                self.print_('EOF')
            self.fs.write_text_file(path_to_commit_msg, commit_msg)
            self.run(['git', 'commit', '-a', '-F', path_to_commit_msg])
            self.remove(path_to_commit_msg)
            self.print_('## Done: changes imported and committed')
        else:
            self.print_('## Done: no changes to import')

    def is_manual_test(self, fs, dirname, basename):
        return basename.endswith('-manual.html') or basename.endswith(
            '-manual.htm')

    def is_baseline(self, fs, dirname, basename):
        return basename.endswith('-expected.txt')

    def is_not_baseline(self, fs, dirname, basename):
        return not self.is_baseline(fs, dirname, basename)

    def run(self, cmd, exit_on_failure=True):
        if self.verbose:
            self.print_(' '.join(cmd))

        proc = self.executive.popen(cmd,
                                    stdout=self.executive.PIPE,
                                    stderr=self.executive.PIPE)
        out, err = proc.communicate()
        if proc.returncode or self.verbose:
            self.print_('# ret> %d' % proc.returncode)
            if out:
                for line in out.splitlines():
                    self.print_('# out> %s' % line)
            if err:
                for line in err.splitlines():
                    self.print_('# err> %s' % line)
        if exit_on_failure and proc.returncode:
            self.host.exit(proc.returncode)
        return proc.returncode, out

    def cd(self, *comps):
        dest = self.path_from_webkit_base(*comps)
        if self.verbose:
            self.print_('cd %s' % dest)
        self.fs.chdir(dest)

    def copyfile(self, source, destination):
        if self.verbose:
            self.print_('cp %s %s' % (source, destination))
        self.fs.copyfile(source, destination)

    def remove(self, *comps):
        dest = self.path_from_webkit_base(*comps)
        if self.verbose:
            self.print_('rm %s' % dest)
        self.fs.remove(dest)

    def rmtree(self, *comps):
        dest = self.path_from_webkit_base(*comps)
        if self.verbose:
            self.print_('rm -fr %s' % dest)
        self.fs.rmtree(dest)

    def path_from_webkit_base(self, *comps):
        return self.finder.path_from_webkit_base(*comps)

    def print_(self, msg):
        self.host.print_(msg)
Пример #37
0
 def absolute_chromium_wpt_dir(self):
     finder = WebKitFinder(self.host.filesystem)
     return finder.path_from_webkit_base('LayoutTests', 'imported', 'wpt')
Пример #38
0
 def load_test_repositories(filesystem=FileSystem()):
     webkit_finder = WebKitFinder(filesystem)
     test_repositories_path = webkit_finder.path_from_webkit_base('LayoutTests', 'imported', 'w3c', 'resources', 'TestRepositories')
     return json.loads(filesystem.read_text_file(test_repositories_path))
Пример #39
0
class DepsUpdater(object):
    def __init__(self, host):
        self.host = host
        self.executive = host.executive
        self.fs = host.filesystem
        self.finder = WebKitFinder(self.fs)
        self.verbose = False
        self.allow_local_blink_commits = False
        self.keep_w3c_repos_around = False

    def main(self, argv=None):
        self.parse_args(argv)

        self.cd("")
        if not self.checkout_is_okay():
            return 1

        self.print_("## noting the current Blink commitish")
        blink_commitish = self.run(["git", "show-ref", "HEAD"])[1].split()[0]

        wpt_import_text = self.update(
            "web-platform-tests", "https://chromium.googlesource.com/external/w3c/web-platform-tests.git"
        )

        for resource in ["testharnessreport.js", "vendor-prefix.js"]:
            source = self.path_from_webkit_base("LayoutTests", "resources", resource)
            destination = self.path_from_webkit_base(
                "LayoutTests", "imported", "web-platform-tests", "resources", resource
            )
            self.copyfile(source, destination)
            self.run(["git", "add", destination])

        css_import_text = self.update("csswg-test", "https://chromium.googlesource.com/external/w3c/csswg-test.git")

        self.commit_changes_if_needed(blink_commitish, css_import_text, wpt_import_text)

        return 0

    def parse_args(self, argv):
        parser = argparse.ArgumentParser()
        parser.description = __doc__
        parser.add_argument("-v", "--verbose", action="store_true", help="log what we are doing")
        parser.add_argument(
            "--allow-local-blink-commits",
            action="store_true",
            help="allow script to run even if we have local blink commits",
        )
        parser.add_argument(
            "--keep-w3c-repos-around",
            action="store_true",
            help="leave the w3c repos around that were imported previously.",
        )

        args = parser.parse_args(argv)
        self.allow_local_blink_commits = args.allow_local_blink_commits
        self.keep_w3c_repos_around = args.keep_w3c_repos_around
        self.verbose = args.verbose

    def checkout_is_okay(self):
        if self.run(["git", "diff", "--quiet", "HEAD"], exit_on_failure=False)[0]:
            self.print_("## blink checkout is dirty, aborting")
            return False

        local_blink_commits = self.run(["git", "log", "--oneline", "origin/master..HEAD"])[1]
        if local_blink_commits and not self.allow_local_blink_commits:
            self.print_("## blink checkout has local commits, aborting")
            return False

        if self.fs.exists(self.path_from_webkit_base("web-platform-tests")):
            self.print_("## web-platform-tests repo exists, aborting")
            return False

        if self.fs.exists(self.path_from_webkit_base("csswg-test")):
            self.print_("## csswg-test repo exists, aborting")
            return False

        return True

    def update(self, repo, url):
        self.print_("## cloning %s" % repo)
        self.cd("")
        self.run(["git", "clone", url])
        self.cd(re.compile(".*/([^/]+)\.git").match(url).group(1))
        self.run(["git", "submodule", "update", "--init", "--recursive"])

        self.print_("## noting the revision we are importing")
        master_commitish = self.run(["git", "show-ref", "origin/master"])[1].split()[0]

        self.print_("## cleaning out tests from LayoutTests/imported/%s" % repo)
        dest_repo = self.path_from_webkit_base("LayoutTests", "imported", repo)
        files_to_delete = self.fs.files_under(dest_repo, file_filter=self.is_not_baseline)
        for subpath in files_to_delete:
            self.remove("LayoutTests", "imported", subpath)

        self.print_("## importing the tests")
        src_repo = self.path_from_webkit_base(repo)
        import_path = self.path_from_webkit_base("Tools", "Scripts", "import-w3c-tests")
        self.run([self.host.executable, import_path, "-d", "imported", src_repo])

        self.cd("")
        self.run(["git", "add", "--all", "LayoutTests/imported/%s" % repo])

        self.print_("## deleting manual tests")
        files_to_delete = self.fs.files_under(dest_repo, file_filter=self.is_manual_test)
        for subpath in files_to_delete:
            self.remove("LayoutTests", "imported", subpath)

        self.print_("## deleting any orphaned baselines")
        previous_baselines = self.fs.files_under(dest_repo, file_filter=self.is_baseline)
        for subpath in previous_baselines:
            full_path = self.fs.join(dest_repo, subpath)
            if self.fs.glob(full_path.replace("-expected.txt", "*")) == [full_path]:
                self.fs.remove(full_path)

        if not self.keep_w3c_repos_around:
            self.print_("## deleting %s repo" % repo)
            self.cd("")
            self.rmtree(repo)

        return "imported %s@%s" % (repo, master_commitish)

    def commit_changes_if_needed(self, blink_commitish, css_import_text, wpt_import_text):
        if self.run(["git", "diff", "--quiet", "HEAD"], exit_on_failure=False)[0]:
            self.print_("## commiting changes")
            commit_msg = (
                "update-w3c-deps import using blink %s:\n"
                "\n"
                "%s\n"
                "%s\n" % (blink_commitish, css_import_text, wpt_import_text)
            )
            path_to_commit_msg = self.path_from_webkit_base("commit_msg")
            if self.verbose:
                self.print_("cat > %s <<EOF" % path_to_commit_msg)
                self.print_(commit_msg)
                self.print_("EOF")
            self.fs.write_text_file(path_to_commit_msg, commit_msg)
            self.run(["git", "commit", "-a", "-F", path_to_commit_msg])
            self.remove(path_to_commit_msg)
            self.print_("## Done: changes imported and committed")
        else:
            self.print_("## Done: no changes to import")

    def is_manual_test(self, fs, dirname, basename):
        return basename.endswith("-manual.html") or basename.endswith("-manual.htm")

    def is_baseline(self, fs, dirname, basename):
        return basename.endswith("-expected.txt")

    def is_not_baseline(self, fs, dirname, basename):
        return not self.is_baseline(fs, dirname, basename)

    def run(self, cmd, exit_on_failure=True):
        if self.verbose:
            self.print_(" ".join(cmd))

        proc = self.executive.popen(cmd, stdout=self.executive.PIPE, stderr=self.executive.PIPE)
        out, err = proc.communicate()
        if proc.returncode or self.verbose:
            self.print_("# ret> %d" % proc.returncode)
            if out:
                for line in out.splitlines():
                    self.print_("# out> %s" % line)
            if err:
                for line in err.splitlines():
                    self.print_("# err> %s" % line)
        if exit_on_failure and proc.returncode:
            self.host.exit(proc.returncode)
        return proc.returncode, out

    def cd(self, *comps):
        dest = self.path_from_webkit_base(*comps)
        if self.verbose:
            self.print_("cd %s" % dest)
        self.fs.chdir(dest)

    def copyfile(self, source, destination):
        if self.verbose:
            self.print_("cp %s %s" % (source, destination))
        self.fs.copyfile(source, destination)

    def remove(self, *comps):
        dest = self.path_from_webkit_base(*comps)
        if self.verbose:
            self.print_("rm %s" % dest)
        self.fs.remove(dest)

    def rmtree(self, *comps):
        dest = self.path_from_webkit_base(*comps)
        if self.verbose:
            self.print_("rm -fr %s" % dest)
        self.fs.rmtree(dest)

    def path_from_webkit_base(self, *comps):
        return self.finder.path_from_webkit_base(*comps)

    def print_(self, msg):
        self.host.print_(msg)
Пример #40
0
class WPTRunner(object):
    def __init__(self,
                 port,
                 host,
                 script_name,
                 options,
                 downloader_class=TestDownloader,
                 create_webdriver_func=create_webdriver,
                 spawn_wpt_func=spawn_wpt):
        self._port = port
        self._host = host
        self._finder = WebKitFinder(self._host.filesystem)

        self._script_name = script_name
        self._options = options

        self._downloader_class = downloader_class
        self._create_webdriver_func = create_webdriver_func
        self._spawn_wpt_func = spawn_wpt_func

    def prepare_wpt_checkout(self):
        if not self._options.wpt_checkout:
            test_downloader = self._downloader_class(
                WPTPaths.checkout_directory(self._finder), self._host,
                self._downloader_class.default_options())
            test_downloader.clone_tests()
            self._options.wpt_checkout = WPTPaths.wpt_checkout_path(
                self._finder)

        if not self._options.wpt_checkout or not self._host.filesystem.exists(
                self._options.wpt_checkout):
            _log.error("Valid web-platform-tests directory required")
            return False
        return True

    def _generate_metadata_directory(self, metadata_path):
        expectations_file = self._finder.path_from_webkit_base(
            "WebPlatformTests", self._port.name(), "TestExpectations.json")
        if not self._host.filesystem.exists(expectations_file):
            _log.error("Port-specific expectation .json file does not exist")
            return False

        with self._host.filesystem.open_text_file_for_reading(
                expectations_file) as fd:
            expectations = json.load(fd)

        self._host.filesystem.rmtree(metadata_path)

        for test_name, test_data in expectations.iteritems():
            ini_file = self._host.filesystem.join(metadata_path,
                                                  test_name + ".ini")
            self._host.filesystem.maybe_make_directory(
                self._host.filesystem.dirname(ini_file))
            with self._host.filesystem.open_text_file_for_writing(
                    ini_file) as fd:
                fd.write("[{}]\n".format(
                    test_data.get("test_name",
                                  self._host.filesystem.basename(test_name))))
                if "disabled" in test_data:
                    fd.write("    disabled: {}\n".format(
                        test_data["disabled"]))
                elif "expected" in test_data:
                    fd.write("    expected: {}\n".format(
                        test_data["expected"]))
                elif "subtests" in test_data:
                    for subtest_name, subtest_data in test_data[
                            "subtests"].iteritems():
                        fd.write("    [{}]\n".format(subtest_name))
                        if "expected" in subtest_data:
                            fd.write("        expected: {}\n".format(
                                subtest_data["expected"]))
                        else:
                            _log.error(
                                "Invalid format of TestExpectations.json")
                            return False
                else:
                    _log.error("Invalid format of TestExpectations.json")
                    return False

        return True

    def run(self, args):
        if not self.prepare_wpt_checkout():
            return False

        # Parse the test expectations JSON and construct corresponding metadata files.
        metadata_path = self._port.wpt_metadata_directory()
        if not self._generate_metadata_directory(metadata_path):
            return False

        include_manifest_path = self._finder.path_from_webkit_base(
            "WebPlatformTests", self._port.name(), "TestManifest.ini")
        if not self._host.filesystem.exists(include_manifest_path):
            _log.error("Port-specific manifest .ini file does not exist")
            return False

        # Construct the WebDriver instance and run WPT tests via the 'webkit' product.
        manifest_path = self._port.wpt_manifest_file()
        webdriver = self._create_webdriver_func(self._port)

        wpt_args = [
            "run", "--webkit-port={}".format(self._port.name()),
            "--processes={}".format(self._options.child_processes),
            "--metadata={}".format(metadata_path),
            "--manifest={}".format(manifest_path),
            "--include-manifest={}".format(include_manifest_path),
            "--webdriver-binary={}".format(webdriver.binary_path()),
            "--binary={}".format(webdriver.browser_path())
        ]
        wpt_args += [
            "--binary-arg={}".format(arg) for arg in webdriver.browser_args()
        ]
        wpt_args += ["webkit"] + args

        self._spawn_wpt_func(self._script_name, self._options.wpt_checkout,
                             wpt_args)
        return True
Пример #41
0
class TestImporter(object):

    def __init__(self, host, dir_to_import, top_of_repo, options):
        self.host = host
        self.dir_to_import = dir_to_import
        self.top_of_repo = top_of_repo
        self.options = options

        self.filesystem = self.host.filesystem
        self.webkit_finder = WebKitFinder(self.filesystem)
        self._webkit_root = self.webkit_finder.webkit_base()
        self.layout_tests_dir = self.webkit_finder.path_from_webkit_base('LayoutTests')
        self.destination_directory = self.filesystem.normpath(self.filesystem.join(self.layout_tests_dir, options.destination,
                                                                                   self.filesystem.basename(self.top_of_repo)))
        self.import_in_place = (self.dir_to_import == self.destination_directory)

        self.changeset = CHANGESET_NOT_AVAILABLE
        self.test_status = TEST_STATUS_UNKNOWN

        self.import_list = []

    def do_import(self):
        _log.info("Importing %s into %s", self.dir_to_import, self.destination_directory)
        self.find_importable_tests(self.dir_to_import)
        self.load_changeset()
        self.import_tests()

    def load_changeset(self):
        """Returns the current changeset from mercurial or "Not Available"."""
        try:
            self.changeset = self.host.executive.run_command(['hg', 'tip']).split('changeset:')[1]
        except (OSError, ScriptError):
            self.changeset = CHANGESET_NOT_AVAILABLE

    def find_importable_tests(self, directory):
        # FIXME: use filesystem
        paths_to_skip = self.find_paths_to_skip()

        for root, dirs, files in os.walk(directory):
            cur_dir = root.replace(self.layout_tests_dir + '/', '') + '/'
            _log.info('  scanning ' + cur_dir + '...')
            total_tests = 0
            reftests = 0
            jstests = 0

            # "archive" and "data" dirs are internal csswg things that live in every approved directory.
            # FIXME: skip 'incoming' tests for now, but we should rework the 'test_status' concept and
            # support reading them as well.
            DIRS_TO_SKIP = ('.git', '.hg', 'data', 'archive', 'incoming')
            if dirs:
                for d in DIRS_TO_SKIP:
                    if d in dirs:
                        dirs.remove(d)

                for path in paths_to_skip:
                    path_base = path.replace(cur_dir, '')
                    path_full = self.filesystem.join(root, path_base)
                    if path_base in dirs:
                        dirs.remove(path_base)
                        if not self.options.dry_run and self.import_in_place:
                            _log.info("  pruning %s" % path_base)
                            self.filesystem.rmtree(path_full)

            copy_list = []

            for filename in files:
                path_full = self.filesystem.join(root, filename)
                path_base = path_full.replace(self.layout_tests_dir + '/', '')
                if path_base in paths_to_skip:
                    if not self.options.dry_run and self.import_in_place:
                        _log.info("  pruning %s" % path_base)
                        self.filesystem.remove(path_full)
                        continue
                # FIXME: This block should really be a separate function, but the early-continues make that difficult.

                if filename.startswith('.') or filename.endswith('.pl'):
                    continue  # For some reason the w3c repo contains random perl scripts we don't care about.

                fullpath = os.path.join(root, filename)

                mimetype = mimetypes.guess_type(fullpath)
                if not 'html' in str(mimetype[0]) and not 'xml' in str(mimetype[0]):
                    copy_list.append({'src': fullpath, 'dest': filename})
                    continue

                if root.endswith('resources'):
                    copy_list.append({'src': fullpath, 'dest': filename})
                    continue

                test_parser = TestParser(vars(self.options), filename=fullpath)
                test_info = test_parser.analyze_test()
                if test_info is None:
                    continue

                if 'reference' in test_info.keys():
                    reftests += 1
                    total_tests += 1
                    test_basename = os.path.basename(test_info['test'])

                    # Add the ref file, following WebKit style.
                    # FIXME: Ideally we'd support reading the metadata
                    # directly rather than relying  on a naming convention.
                    # Using a naming convention creates duplicate copies of the
                    # reference files.
                    ref_file = os.path.splitext(test_basename)[0] + '-expected'
                    ref_file += os.path.splitext(test_basename)[1]

                    copy_list.append({'src': test_info['reference'], 'dest': ref_file})
                    copy_list.append({'src': test_info['test'], 'dest': filename})

                    # Update any support files that need to move as well to remain relative to the -expected file.
                    if 'refsupport' in test_info.keys():
                        for support_file in test_info['refsupport']:
                            source_file = os.path.join(os.path.dirname(test_info['reference']), support_file)
                            source_file = os.path.normpath(source_file)

                            # Keep the dest as it was
                            to_copy = {'src': source_file, 'dest': support_file}

                            # Only add it once
                            if not(to_copy in copy_list):
                                copy_list.append(to_copy)
                elif 'jstest' in test_info.keys():
                    jstests += 1
                    total_tests += 1
                    copy_list.append({'src': fullpath, 'dest': filename})
                else:
                    total_tests += 1
                    copy_list.append({'src': fullpath, 'dest': filename})

            if not total_tests:
                # We can skip the support directory if no tests were found.
                if 'support' in dirs:
                    dirs.remove('support')

            if copy_list:
                # Only add this directory to the list if there's something to import
                self.import_list.append({'dirname': root, 'copy_list': copy_list,
                    'reftests': reftests, 'jstests': jstests, 'total_tests': total_tests})

    def find_paths_to_skip(self):
        if self.options.ignore_expectations:
            return set()

        paths_to_skip = set()
        port = self.host.port_factory.get()
        w3c_import_expectations_path = self.webkit_finder.path_from_webkit_base('LayoutTests', 'W3CImportExpectations')
        w3c_import_expectations = self.filesystem.read_text_file(w3c_import_expectations_path)
        parser = TestExpectationParser(port, full_test_list=(), is_lint_mode=False)
        expectation_lines = parser.parse(w3c_import_expectations_path, w3c_import_expectations)
        for line in expectation_lines:
            if 'SKIP' in line.expectations:
                if line.specifiers:
                    _log.warning("W3CImportExpectations:%s should not have any specifiers" % line.line_numbers)
                    continue
                paths_to_skip.add(line.name)
        return paths_to_skip

    def import_tests(self):
        total_imported_tests = 0
        total_imported_reftests = 0
        total_imported_jstests = 0
        total_prefixed_properties = {}

        for dir_to_copy in self.import_list:
            total_imported_tests += dir_to_copy['total_tests']
            total_imported_reftests += dir_to_copy['reftests']
            total_imported_jstests += dir_to_copy['jstests']

            prefixed_properties = []

            if not dir_to_copy['copy_list']:
                continue

            orig_path = dir_to_copy['dirname']

            subpath = os.path.relpath(orig_path, self.top_of_repo)
            new_path = os.path.join(self.destination_directory, subpath)

            if not(os.path.exists(new_path)):
                os.makedirs(new_path)

            copied_files = []

            for file_to_copy in dir_to_copy['copy_list']:
                # FIXME: Split this block into a separate function.
                orig_filepath = os.path.normpath(file_to_copy['src'])

                if os.path.isdir(orig_filepath):
                    # FIXME: Figure out what is triggering this and what to do about it.
                    _log.error('%s refers to a directory' % orig_filepath)
                    continue

                if not(os.path.exists(orig_filepath)):
                    _log.warning('%s not found. Possible error in the test.', orig_filepath)
                    continue

                new_filepath = os.path.join(new_path, file_to_copy['dest'])

                if not(os.path.exists(os.path.dirname(new_filepath))):
                    if not self.import_in_place and not self.options.dry_run:
                        os.makedirs(os.path.dirname(new_filepath))

                if not self.options.overwrite and os.path.exists(new_filepath):
                    _log.info('  skipping import of existing file ' + new_filepath)
                else:
                    # FIXME: Maybe doing a file diff is in order here for existing files?
                    # In other words, there's no sense in overwriting identical files, but
                    # there's no harm in copying the identical thing.
                    _log.info('  importing %s', os.path.relpath(new_filepath, self.layout_tests_dir))

                # Only html, xml, or css should be converted
                # FIXME: Eventually, so should js when support is added for this type of conversion
                mimetype = mimetypes.guess_type(orig_filepath)
                if 'html' in str(mimetype[0]) or 'xml' in str(mimetype[0])  or 'css' in str(mimetype[0]):
                    converted_file = convert_for_webkit(new_path, filename=orig_filepath)

                    if not converted_file:
                        if not self.import_in_place and not self.options.dry_run:
                            shutil.copyfile(orig_filepath, new_filepath)  # The file was unmodified.
                    else:
                        for prefixed_property in converted_file[0]:
                            total_prefixed_properties.setdefault(prefixed_property, 0)
                            total_prefixed_properties[prefixed_property] += 1

                        prefixed_properties.extend(set(converted_file[0]) - set(prefixed_properties))
                        if not self.options.dry_run:
                            outfile = open(new_filepath, 'wb')
                            outfile.write(converted_file[1])
                            outfile.close()
                else:
                    if not self.import_in_place and not self.options.dry_run:
                        shutil.copyfile(orig_filepath, new_filepath)

                copied_files.append(new_filepath.replace(self._webkit_root, ''))

            if not self.import_in_place and not self.options.dry_run:
                self.remove_deleted_files(new_path, copied_files)
                self.write_import_log(new_path, copied_files, prefixed_properties)

        _log.info('')
        _log.info('Import complete')
        _log.info('')
        _log.info('IMPORTED %d TOTAL TESTS', total_imported_tests)
        _log.info('Imported %d reftests', total_imported_reftests)
        _log.info('Imported %d JS tests', total_imported_jstests)
        _log.info('Imported %d pixel/manual tests', total_imported_tests - total_imported_jstests - total_imported_reftests)
        _log.info('')
        _log.info('Properties needing prefixes (by count):')
        for prefixed_property in sorted(total_prefixed_properties, key=lambda p: total_prefixed_properties[p]):
            _log.info('  %s: %s', prefixed_property, total_prefixed_properties[prefixed_property])

    def setup_destination_directory(self):
        """ Creates a destination directory that mirrors that of the source approved or submitted directory """

        self.update_test_status()

        start = self.dir_to_import.find(self.test_status)
        new_subpath = self.dir_to_import[len(self.top_of_repo):]

        destination_directory = os.path.join(self.destination_directory, new_subpath)

        if not os.path.exists(destination_directory):
            os.makedirs(destination_directory)

        _log.info('Tests will be imported into: %s', destination_directory)

    def update_test_status(self):
        """ Sets the test status to either 'approved' or 'submitted' """

        status = TEST_STATUS_UNKNOWN

        directory_parts = self.dir_to_import.split(os.path.sep)
        for test_status in VALID_TEST_STATUSES:
            if test_status in directory_parts:
                status = test_status

        self.test_status = status

    def remove_deleted_files(self, dir_to_import, new_file_list):
        previous_file_list = []

        import_log_file = os.path.join(dir_to_import, 'w3c-import.log')
        if not os.path.exists(import_log_file):
            return

        import_log = open(import_log_file, 'r')
        contents = import_log.readlines()

        if 'List of files\n' in contents:
            list_index = contents.index('List of files:\n') + 1
            previous_file_list = [filename.strip() for filename in contents[list_index:]]

        deleted_files = set(previous_file_list) - set(new_file_list)
        for deleted_file in deleted_files:
            _log.info('Deleting file removed from the W3C repo: %s', deleted_file)
            deleted_file = os.path.join(self._webkit_root, deleted_file)
            os.remove(deleted_file)

        import_log.close()

    def write_import_log(self, dir_to_import, file_list, prop_list):
        now = datetime.datetime.now()

        import_log = open(os.path.join(dir_to_import, 'w3c-import.log'), 'w')
        import_log.write('The tests in this directory were imported from the W3C repository.\n')
        import_log.write('Do NOT modify these tests directly in Webkit. Instead, push changes to the W3C CSS repo:\n\n')
        import_log.write('http://hg.csswg.org/test\n\n')
        import_log.write('Then run the Tools/Scripts/import-w3c-tests in Webkit to reimport\n\n')
        import_log.write('Do NOT modify or remove this file\n\n')
        import_log.write('------------------------------------------------------------------------\n')
        import_log.write('Last Import: ' + now.strftime('%Y-%m-%d %H:%M') + '\n')
        import_log.write('W3C Mercurial changeset: ' + self.changeset + '\n')
        import_log.write('Test status at time of import: ' + self.test_status + '\n')
        import_log.write('------------------------------------------------------------------------\n')
        import_log.write('Properties requiring vendor prefixes:\n')
        if prop_list:
            for prop in prop_list:
                import_log.write(prop + '\n')
        else:
            import_log.write('None\n')
        import_log.write('------------------------------------------------------------------------\n')
        import_log.write('List of files:\n')
        for item in file_list:
            import_log.write(item + '\n')

        import_log.close()
Пример #42
0
class DepsUpdater(object):
    def __init__(self, host):
        self.host = host
        self.executive = host.executive
        self.fs = host.filesystem
        self.finder = WebKitFinder(self.fs)
        self.verbose = False
        self.git_cl = None

    def main(self, argv=None):
        options = self.parse_args(argv)
        self.verbose = options.verbose

        if not self.checkout_is_okay(options.allow_local_commits):
            return 1

        self.git_cl = GitCL(self.host, auth_refresh_token_json=options.auth_refresh_token_json)

        self.print_("## Noting the current Chromium commit.")
        _, show_ref_output = self.run(["git", "show-ref", "HEAD"])
        chromium_commitish = show_ref_output.split()[0]

        if options.target == "wpt":
            import_commitish = self.update(WPT_DEST_NAME, WPT_REPO_URL, options.keep_w3c_repos_around, options.revision)
            self._copy_resources()
        elif options.target == "css":
            import_commitish = self.update(CSS_DEST_NAME, CSS_REPO_URL, options.keep_w3c_repos_around, options.revision)
        else:
            raise AssertionError("Unsupported target %s" % options.target)

        has_changes = self.commit_changes_if_needed(chromium_commitish, import_commitish)
        if options.auto_update and has_changes:
            commit_successful = self.do_auto_update()
            if not commit_successful:
                return 1
        return 0

    def parse_args(self, argv):
        parser = argparse.ArgumentParser()
        parser.description = __doc__
        parser.add_argument("-v", "--verbose", action="store_true", help="log what we are doing")
        parser.add_argument(
            "--allow-local-commits", action="store_true", help="allow script to run even if we have local commits"
        )
        parser.add_argument(
            "--keep-w3c-repos-around",
            action="store_true",
            help="leave the w3c repos around that were imported previously.",
        )
        parser.add_argument("-r", dest="revision", action="store", help="Target revision.")
        parser.add_argument(
            "target",
            choices=["css", "wpt"],
            help='Target repository.  "css" for csswg-test, "wpt" for web-platform-tests.',
        )
        parser.add_argument("--auto-update", action="store_true", help="uploads CL and initiates commit queue.")
        parser.add_argument("--auth-refresh-token-json", help="Rietveld auth refresh JSON token.")
        return parser.parse_args(argv)

    def checkout_is_okay(self, allow_local_commits):
        git_diff_retcode, _ = self.run(["git", "diff", "--quiet", "HEAD"], exit_on_failure=False)
        if git_diff_retcode:
            self.print_("## Checkout is dirty; aborting.")
            return False

        local_commits = self.run(["git", "log", "--oneline", "origin/master..HEAD"])[1]
        if local_commits and not allow_local_commits:
            self.print_("## Checkout has local commits; aborting. Use --allow-local-commits to allow this.")
            return False

        if self.fs.exists(self.path_from_webkit_base(WPT_DEST_NAME)):
            self.print_("## WebKit/%s exists; aborting." % WPT_DEST_NAME)
            return False

        if self.fs.exists(self.path_from_webkit_base(CSS_DEST_NAME)):
            self.print_("## WebKit/%s repo exists; aborting." % CSS_DEST_NAME)
            return False

        return True

    def _copy_resources(self):
        """Copies resources from LayoutTests/resources to wpt and vice versa.

        There are resources from our repository that we use instead of the
        upstream versions. Conversely, there are also some resources that
        are copied in the other direction.

        Specifically:
          - testharnessreport.js contains code needed to integrate our testing
            with testharness.js; we also want our code to be used for tests
            in wpt.
          - TODO(qyearsley, jsbell): Document why other other files are copied,
            or stop copying them if it's unnecessary.

        If this method is changed, the lists of files expected to be identical
        in LayoutTests/PRESUBMIT.py should also be changed.
        """
        resources_to_copy_to_wpt = [
            ("testharnessreport.js", "resources"),
            ("WebIDLParser.js", "resources"),
            ("vendor-prefix.js", "common"),
        ]
        resources_to_copy_from_wpt = [("idlharness.js", "resources"), ("testharness.js", "resources")]
        for filename, wpt_subdir in resources_to_copy_to_wpt:
            source = self.path_from_webkit_base("LayoutTests", "resources", filename)
            destination = self.path_from_webkit_base("LayoutTests", "imported", WPT_DEST_NAME, wpt_subdir, filename)
            self.copyfile(source, destination)
            self.run(["git", "add", destination])
        for filename, wpt_subdir in resources_to_copy_from_wpt:
            source = self.path_from_webkit_base("LayoutTests", "imported", WPT_DEST_NAME, wpt_subdir, filename)
            destination = self.path_from_webkit_base("LayoutTests", "resources", filename)
            self.copyfile(source, destination)
            self.run(["git", "add", destination])

    def update(self, dest_dir_name, url, keep_w3c_repos_around, revision):
        """Updates an imported repository.

        Args:
            dest_dir_name: The destination directory name.
            url: URL of the git repository.
            revision: Commit hash or None.

        Returns:
            A string for the commit description "<destination>@<commitish>".
        """
        temp_repo_path = self.path_from_webkit_base(dest_dir_name)
        self.print_("## Cloning %s into %s." % (url, temp_repo_path))
        self.run(["git", "clone", url, temp_repo_path])

        if revision is not None:
            self.print_("## Checking out %s" % revision)
            self.run(["git", "checkout", revision], cwd=temp_repo_path)
        self.run(["git", "submodule", "update", "--init", "--recursive"], cwd=temp_repo_path)

        self.print_("## Noting the revision we are importing.")
        _, show_ref_output = self.run(["git", "show-ref", "origin/master"], cwd=temp_repo_path)
        master_commitish = show_ref_output.split()[0]

        self.print_("## Cleaning out tests from LayoutTests/imported/%s." % dest_dir_name)
        dest_path = self.path_from_webkit_base("LayoutTests", "imported", dest_dir_name)
        files_to_delete = self.fs.files_under(dest_path, file_filter=self.is_not_baseline)
        for subpath in files_to_delete:
            self.remove("LayoutTests", "imported", subpath)

        self.print_("## Importing the tests.")
        src_repo = self.path_from_webkit_base(dest_dir_name)
        import_path = self.path_from_webkit_base("Tools", "Scripts", "import-w3c-tests")
        self.run([self.host.executable, import_path, "-d", "imported", src_repo])

        self.run(["git", "add", "--all", "LayoutTests/imported/%s" % dest_dir_name])

        self.print_("## Deleting manual tests.")
        files_to_delete = self.fs.files_under(dest_path, file_filter=self.is_manual_test)
        for subpath in files_to_delete:
            self.remove("LayoutTests", "imported", subpath)

        self.print_("## Deleting any orphaned baselines.")
        previous_baselines = self.fs.files_under(dest_path, file_filter=self.is_baseline)
        for subpath in previous_baselines:
            full_path = self.fs.join(dest_path, subpath)
            if self.fs.glob(full_path.replace("-expected.txt", "*")) == [full_path]:
                self.fs.remove(full_path)

        if not keep_w3c_repos_around:
            self.print_("## Deleting temp repo directory %s." % temp_repo_path)
            self.rmtree(temp_repo_path)

        self.print_("## Updating TestExpectations for any removed or renamed tests.")
        self.update_all_test_expectations_files(self._list_deleted_tests(), self._list_renamed_tests())

        return "%s@%s" % (dest_dir_name, master_commitish)

    def commit_changes_if_needed(self, chromium_commitish, import_commitish):
        if self.run(["git", "diff", "--quiet", "HEAD"], exit_on_failure=False)[0]:
            self.print_("## Committing changes.")
            commit_msg = (
                "Import %s\n" "\n" "Using update-w3c-deps in Chromium %s.\n" % (import_commitish, chromium_commitish)
            )
            path_to_commit_msg = self.path_from_webkit_base("commit_msg")
            if self.verbose:
                self.print_("cat > %s <<EOF" % path_to_commit_msg)
                self.print_(commit_msg)
                self.print_("EOF")
            self.fs.write_text_file(path_to_commit_msg, commit_msg)
            self.run(["git", "commit", "-a", "-F", path_to_commit_msg])
            self.remove(path_to_commit_msg)
            self.print_("## Done: changes imported and committed.")
            return True
        else:
            self.print_("## Done: no changes to import.")
            return False

    def is_manual_test(self, fs, dirname, basename):
        """Returns True if the file should be removed because it's a manual test.

        Tests with "-manual" in the name are not considered manual tests
        if there is a corresponding JS automation file.
        """
        basename_without_extension, _ = self.fs.splitext(basename)
        if not basename_without_extension.endswith("-manual"):
            return False
        dir_from_wpt = fs.relpath(dirname, self.path_from_webkit_base("LayoutTests", "imported", "wpt"))
        automation_dir = self.path_from_webkit_base("LayoutTests", "imported", "wpt_automation", dir_from_wpt)
        if fs.isfile(fs.join(automation_dir, "%s-automation.js" % basename_without_extension)):
            return False
        return True

    # Callback for FileSystem.files_under; not all arguments used - pylint: disable=unused-argument
    def is_baseline(self, fs, dirname, basename):
        return basename.endswith("-expected.txt")

    def is_not_baseline(self, fs, dirname, basename):
        return not self.is_baseline(fs, dirname, basename)

    def run(self, cmd, exit_on_failure=True, cwd=None):
        if self.verbose:
            self.print_(" ".join(cmd))

        cwd = cwd or self.finder.webkit_base()
        proc = self.executive.popen(cmd, stdout=self.executive.PIPE, stderr=self.executive.PIPE, cwd=cwd)
        out, err = proc.communicate()
        if proc.returncode or self.verbose:
            self.print_("# ret> %d" % proc.returncode)
            if out:
                for line in out.splitlines():
                    self.print_("# out> %s" % line)
            if err:
                for line in err.splitlines():
                    self.print_("# err> %s" % line)
        if exit_on_failure and proc.returncode:
            self.host.exit(proc.returncode)
        return proc.returncode, out

    def check_run(self, command):
        return_code, out = self.run(command)
        if return_code:
            raise Exception("%s failed with exit code %d." % " ".join(command), return_code)
        return out

    def copyfile(self, source, destination):
        if self.verbose:
            self.print_("cp %s %s" % (source, destination))
        self.fs.copyfile(source, destination)

    def remove(self, *comps):
        dest = self.path_from_webkit_base(*comps)
        if self.verbose:
            self.print_("rm %s" % dest)
        self.fs.remove(dest)

    def rmtree(self, *comps):
        dest = self.path_from_webkit_base(*comps)
        if self.verbose:
            self.print_("rm -fr %s" % dest)
        self.fs.rmtree(dest)

    def path_from_webkit_base(self, *comps):
        return self.finder.path_from_webkit_base(*comps)

    def print_(self, msg):
        self.host.print_(msg)

    def do_auto_update(self):
        """Attempts to upload a CL, make any required adjustments, and commit.

        This function assumes that the imported repo has already been updated,
        and that change has been committed. There may be newly-failing tests,
        so before being able to commit these new changes, we may need to update
        TestExpectations or download new baselines.

        Returns:
            True if successfully committed, False otherwise.
        """
        self._upload_cl()
        self.print_("## " + self.git_cl.run(["issue"]).strip())

        # First try: if there are failures, update expectations.
        self.print_("## Triggering try jobs.")
        for try_bot in self.host.builders.all_try_builder_names():
            self.git_cl.run(["try", "-b", try_bot])
        try_results = self.git_cl.wait_for_try_jobs()
        if not try_results:
            self.print_("## Timed out waiting for try results.")
            return
        if try_results and self.git_cl.has_failing_try_results(try_results):
            self.fetch_new_expectations_and_baselines()

        # Second try: if there are failures, then abort.
        self.git_cl.run(["set-commit", "--rietveld"])
        try_results = self.git_cl.wait_for_try_jobs()
        if not try_results:
            self.print_("Timed out waiting for try results.")
            self.git_cl.run(["set-close"])
            return False
        if self.git_cl.has_failing_try_results(try_results):
            self.print_("CQ failed; aborting.")
            self.git_cl.run(["set-close"])
            return False
        self.print_("## Update completed.")
        return True

    def _upload_cl(self):
        self.print_("## Uploading change list.")
        cc_list = self.get_directory_owners_to_cc()
        last_commit_message = self.check_run(["git", "log", "-1", "--format=%B"])
        commit_message = last_commit_message + "[email protected]"
        self.git_cl.run(["upload", "-f", "--rietveld", "-m", commit_message] + ["--cc=" + email for email in cc_list])

    def get_directory_owners_to_cc(self):
        """Returns a list of email addresses to CC for the current import."""
        self.print_("## Gathering directory owners emails to CC.")
        directory_owners_file_path = self.finder.path_from_webkit_base(
            "Tools", "Scripts", "webkitpy", "w3c", "directory_owners.json"
        )
        with open(directory_owners_file_path) as data_file:
            directory_to_owner = self.parse_directory_owners(json.load(data_file))
        out = self.check_run(["git", "diff", "origin/master", "--name-only"])
        changed_files = out.splitlines()
        return self.generate_email_list(changed_files, directory_to_owner)

    @staticmethod
    def parse_directory_owners(decoded_data_file):
        directory_dict = {}
        for dict_set in decoded_data_file:
            if dict_set["notification-email"]:
                directory_dict[dict_set["directory"]] = dict_set["notification-email"]
        return directory_dict

    def generate_email_list(self, changed_files, directory_to_owner):
        """Returns a list of email addresses based on the given file list and
        directory-to-owner mapping.

        Args:
            changed_files: A list of file paths relative to the repository root.
            directory_to_owner: A dict mapping layout test directories to emails.

        Returns:
            A list of the email addresses to be notified for the current import.
        """
        email_addresses = set()
        for file_path in changed_files:
            test_path = self.finder.layout_test_name(file_path)
            if test_path is None:
                continue
            test_dir = self.fs.dirname(test_path)
            if test_dir in directory_to_owner:
                email_addresses.add(directory_to_owner[test_dir])
        return sorted(email_addresses)

    def fetch_new_expectations_and_baselines(self):
        """Adds new expectations and downloads baselines based on try job results, then commits and uploads the change."""
        self.print_("## Adding test expectations lines to LayoutTests/TestExpectations.")
        script_path = self.path_from_webkit_base("Tools", "Scripts", "update-w3c-test-expectations")
        self.run([self.host.executable, script_path, "--verbose"])
        message = "Modify TestExpectations or download new baselines for tests."
        self.check_run(["git", "commit", "-a", "-m", message])
        self.git_cl.run(["upload", "-m", message, "--rietveld"])

    def update_all_test_expectations_files(self, deleted_tests, renamed_tests):
        """Updates all test expectations files for tests that have been deleted or renamed."""
        port = self.host.port_factory.get()
        for path, file_contents in port.all_expectations_dict().iteritems():

            parser = TestExpectationParser(port, all_tests=None, is_lint_mode=False)
            expectation_lines = parser.parse(path, file_contents)
            self._update_single_test_expectations_file(path, expectation_lines, deleted_tests, renamed_tests)

    def _update_single_test_expectations_file(self, path, expectation_lines, deleted_tests, renamed_tests):
        """Updates single test expectations file."""
        # FIXME: This won't work for removed or renamed directories with test expectations
        # that are directories rather than individual tests.
        new_lines = []
        changed_lines = []
        for expectation_line in expectation_lines:
            if expectation_line.name in deleted_tests:
                continue
            if expectation_line.name in renamed_tests:
                expectation_line.name = renamed_tests[expectation_line.name]
                # Upon parsing the file, a "path does not exist" warning is expected
                # to be there for tests that have been renamed, and if there are warnings,
                # then the original string is used. If the warnings are reset, then the
                # expectation line is re-serialized when output.
                expectation_line.warnings = []
                changed_lines.append(expectation_line)
            new_lines.append(expectation_line)
        new_file_contents = TestExpectations.list_to_string(new_lines, reconstitute_only_these=changed_lines)
        self.host.filesystem.write_text_file(path, new_file_contents)

    def _list_deleted_tests(self):
        """Returns a list of layout tests that have been deleted."""
        out = self.check_run(["git", "diff", "origin/master", "--diff-filter=D", "--name-only"])
        deleted_tests = []
        for line in out.splitlines():
            test = self.finder.layout_test_name(line)
            if test:
                deleted_tests.append(test)
        return deleted_tests

    def _list_renamed_tests(self):
        """Returns a dict mapping source to dest name for layout tests that have been renamed."""
        out = self.check_run(["git", "diff", "origin/master", "--diff-filter=R", "--name-status"])
        renamed_tests = {}
        for line in out.splitlines():
            _, source_path, dest_path = line.split()
            source_test = self.finder.layout_test_name(source_path)
            dest_test = self.finder.layout_test_name(dest_path)
            if source_test and dest_test:
                renamed_tests[source_test] = dest_test
        return renamed_tests