Esempio n. 1
0
 def test_listdir(self):
     fs = FileSystem()
     with fs.mkdtemp(prefix='filesystem_unittest_') as d:
         self.assertEqual(fs.listdir(d), [])
         new_file = os.path.join(d, 'foo')
         fs.write_text_file(new_file, u'foo')
         self.assertEqual(fs.listdir(d), ['foo'])
         os.remove(new_file)
Esempio n. 2
0
 def test_listdir(self):
     fs = FileSystem()
     with fs.mkdtemp(prefix='filesystem_unittest_') as d:
         self.assertEqual(fs.listdir(d), [])
         new_file = os.path.join(d, 'foo')
         fs.write_text_file(new_file, u'foo')
         self.assertEqual(fs.listdir(d), ['foo'])
         os.remove(new_file)
class TextFileReaderTest(LoggingTestCase):

    class MockProcessor(ProcessorBase):

        """A processor for test purposes.

        This processor simply records the parameters passed to its process()
        method for later checking by the unittest test methods.

        """

        def __init__(self):
            self.processed = []
            """The parameters passed for all calls to the process() method."""

        def should_process(self, file_path):
            return not file_path.endswith('should_not_process.txt')

        def process(self, lines, file_path, test_kwarg=None):
            self.processed.append((lines, file_path, test_kwarg))

    def setUp(self):
        LoggingTestCase.setUp(self)
        # FIXME: This should be a MockFileSystem once TextFileReader is moved entirely on top of FileSystem.
        self.filesystem = FileSystem()
        self._temp_dir = str(self.filesystem.mkdtemp())
        self._processor = TextFileReaderTest.MockProcessor()
        self._file_reader = TextFileReader(self.filesystem, self._processor)

    def tearDown(self):
        LoggingTestCase.tearDown(self)
        self.filesystem.rmtree(self._temp_dir)

    def _create_file(self, rel_path, text):
        """Create a file with given text and return the path to the file."""
        # FIXME: There are better/more secure APIs for creating tmp file paths.
        file_path = self.filesystem.join(self._temp_dir, rel_path)
        self.filesystem.write_text_file(file_path, text)
        return file_path

    def _passed_to_processor(self):
        """Return the parameters passed to MockProcessor.process()."""
        return self._processor.processed

    def _assert_file_reader(self, passed_to_processor, file_count):
        """Assert the state of the file reader."""
        self.assertEqual(passed_to_processor, self._passed_to_processor())
        self.assertEqual(file_count, self._file_reader.file_count)

    def test_process_file__does_not_exist(self):
        try:
            self._file_reader.process_file('does_not_exist.txt')
        except SystemExit, err:
            self.assertEqual(str(err), '1')
        else:
Esempio n. 4
0
class TextFileReaderTest(LoggingTestCase):

    class MockProcessor(ProcessorBase):

        """A processor for test purposes.

        This processor simply records the parameters passed to its process()
        method for later checking by the unittest test methods.

        """

        def __init__(self):
            self.processed = []
            """The parameters passed for all calls to the process() method."""

        def should_process(self, file_path):
            return not file_path.endswith('should_not_process.txt')

        def process(self, lines, file_path, test_kwarg=None):
            self.processed.append((lines, file_path, test_kwarg))

    def setUp(self):
        LoggingTestCase.setUp(self)
        # FIXME: This should be a MockFileSystem once TextFileReader is moved entirely on top of FileSystem.
        self.filesystem = FileSystem()
        self._temp_dir = str(self.filesystem.mkdtemp())
        self._processor = TextFileReaderTest.MockProcessor()
        self._file_reader = TextFileReader(self.filesystem, self._processor)

    def tearDown(self):
        LoggingTestCase.tearDown(self)
        self.filesystem.rmtree(self._temp_dir)

    def _create_file(self, rel_path, text):
        """Create a file with given text and return the path to the file."""
        # FIXME: There are better/more secure APIs for creating tmp file paths.
        file_path = self.filesystem.join(self._temp_dir, rel_path)
        self.filesystem.write_text_file(file_path, text)
        return file_path

    def _passed_to_processor(self):
        """Return the parameters passed to MockProcessor.process()."""
        return self._processor.processed

    def _assert_file_reader(self, passed_to_processor, file_count):
        """Assert the state of the file reader."""
        self.assertEqual(passed_to_processor, self._passed_to_processor())
        self.assertEqual(file_count, self._file_reader.file_count)

    def test_process_file__does_not_exist(self):
        try:
            self._file_reader.process_file('does_not_exist.txt')
        except SystemExit, err:
            self.assertEqual(str(err), '1')
        else:
Esempio n. 5
0
    def test_write_text_file_unicode_encode_error(self):
        fs = FileSystem()
        text_path = None
        try:
            text_path = tempfile.mktemp(prefix='write_text_unittest_')
            bin_path = tempfile.mktemp(prefix='write_bin_unittest_')
            fs.write_binary_file(bin_path, bytearray(b'\x73\x74\x72\x8b'))
            data_to_write = fs.read_binary_file(bin_path)

            self.assertRaises(UnicodeDecodeError, fs.write_text_file,
                              text_path, data_to_write)
            fs.write_text_file(text_path, data_to_write, 'replace')
            self.assertEqual(u'str\ufffd', fs.read_text_file(text_path))
            fs.write_text_file(text_path, data_to_write, 'ignore')
            self.assertEqual('str', fs.read_text_file(text_path))
        finally:
            if text_path and fs.isfile(text_path):
                os.remove(text_path)
            if bin_path and fs.isfile(bin_path):
                os.remove(bin_path)
class ExportFileTest(unittest.TestCase):
    def setUp(self):
        self._filesystem = FileSystem()
        self._temp_dir = str(self._filesystem.mkdtemp(suffix="exportfiles"))
        self._old_cwd = self._filesystem.getcwd()
        self._filesystem.chdir(self._temp_dir)
        self._filesystem.write_text_file(
            os.path.join(self._temp_dir, "sorted_file.exp.in"),
            _sorted_file_contents)
        self._filesystem.write_text_file(
            os.path.join(self._temp_dir, "non_sorted_file.exp.in"),
            _non_sorted_file_contents)
        self._filesystem.write_text_file(
            os.path.join(self._temp_dir, "parse_error_file.exp.in"),
            _parse_error_file_contents)

    def tearDown(self):
        self._filesystem.rmtree(self._temp_dir)
        self._filesystem.chdir(self._old_cwd)

    def test_sorted(self):
        """ Test sorted file. """

        file_path = os.path.join(self._temp_dir, "sorted_file.exp.in")
        error_handler = MockErrorHandler(handle_style_error)
        error_handler.errors = []
        error_handler.had_error = False
        checker = ExportFileChecker(file_path, error_handler)
        checker.check()
        self.assertFalse(error_handler.had_error)

    def test_non_sorted(self):
        """ Test non sorted file. """

        file_path = os.path.join(self._temp_dir, "non_sorted_file.exp.in")
        error_handler = MockErrorHandler(handle_style_error)
        error_handler.errors = []
        error_handler.had_error = False
        checker = ExportFileChecker(file_path, error_handler)
        checker.check()
        self.assertTrue(error_handler.had_error)
        self.assertEqual(
            error_handler.errors[0],
            (0, 'list/order', 5, file_path +
             " should be sorted, use Tools/Scripts/sort-export-file script"))

    def test_parse_error(self):
        """ Test parse error file. """

        file_path = os.path.join(self._temp_dir, "parse_error_file.exp.in")
        error_handler = MockErrorHandler(handle_style_error)
        error_handler.errors = []
        error_handler.had_error = False
        checker = ExportFileChecker(file_path, error_handler)
        checker.check()
        self.assertTrue(error_handler.had_error)
        self.assertEqual(
            error_handler.errors[0],
            (0, 'list/order', 5, "Parse error during processing " + file_path +
             ", use Tools/Scripts/sort-export-files for details"))
Esempio n. 7
0
    def test_read_and_write_file(self):
        fs = FileSystem()
        text_path = None
        binary_path = None

        unicode_text_string = u'\u016An\u012Dc\u014Dde\u033D'
        hex_equivalent = '\xC5\xAA\x6E\xC4\xAD\x63\xC5\x8D\x64\x65\xCC\xBD'
        try:
            text_path = tempfile.mktemp(prefix='tree_unittest_')
            binary_path = tempfile.mktemp(prefix='tree_unittest_')
            fs.write_text_file(text_path, unicode_text_string)
            contents = fs.read_binary_file(text_path)
            self.assertEqual(contents, hex_equivalent)

            fs.write_binary_file(binary_path, hex_equivalent)
            text_contents = fs.read_text_file(binary_path)
            self.assertEqual(text_contents, unicode_text_string)
        finally:
            if text_path and fs.isfile(text_path):
                os.remove(text_path)
            if binary_path and fs.isfile(binary_path):
                os.remove(binary_path)
Esempio n. 8
0
    def test_read_and_write_file(self):
        fs = FileSystem()
        text_path = None
        binary_path = None

        unicode_text_string = u'\u016An\u012Dc\u014Dde\u033D'
        hex_equivalent = '\xC5\xAA\x6E\xC4\xAD\x63\xC5\x8D\x64\x65\xCC\xBD'
        try:
            text_path = tempfile.mktemp(prefix='tree_unittest_')
            binary_path = tempfile.mktemp(prefix='tree_unittest_')
            fs.write_text_file(text_path, unicode_text_string)
            contents = fs.read_binary_file(text_path)
            self.assertEqual(contents, hex_equivalent)

            fs.write_binary_file(binary_path, hex_equivalent)
            text_contents = fs.read_text_file(binary_path)
            self.assertEqual(text_contents, unicode_text_string)
        finally:
            if text_path and fs.isfile(text_path):
                os.remove(text_path)
            if binary_path and fs.isfile(binary_path):
                os.remove(binary_path)
Esempio n. 9
0
    def test_read_and_write_file(self):
        fs = FileSystem()
        text_path = None
        binary_path = None

        unicode_text_string = u'\u016An\u012Dc\u014Dde\u033D'
        hex_equivalent = b'\xC5\xAA\x6E\xC4\xAD\x63\xC5\x8D\x64\x65\xCC\xBD'
        malformed_text_hex = b'\x4D\x69\x63\x72\x6F\x73\x6F\x66\x74\xAE\x20\x56\x69\x73\x75\x61\x6C\x20\x53\x74\x75\x64\x69\x6F\xAE\x20\x32\x30\x31\x30\x0D\x0A'
        malformed_ignored_text_hex = b'\x4D\x69\x63\x72\x6F\x73\x6F\x66\x74\x20\x56\x69\x73\x75\x61\x6C\x20\x53\x74\x75\x64\x69\x6F\x20\x32\x30\x31\x30\x0D\x0A'
        try:
            text_path = tempfile.mktemp(prefix='tree_unittest_')
            binary_path = tempfile.mktemp(prefix='tree_unittest_')
            fs.write_text_file(text_path, unicode_text_string)
            contents = fs.read_binary_file(text_path)
            self.assertEqual(contents, hex_equivalent)

            fs.write_binary_file(binary_path, hex_equivalent)
            text_contents = fs.read_text_file(binary_path)
            self.assertEqual(text_contents, unicode_text_string)

            self.assertRaises(ValueError, fs.write_text_file, binary_path,
                              malformed_text_hex)
            fs.write_binary_file(binary_path, malformed_text_hex)
            self.assertRaises(ValueError, fs.read_text_file, binary_path)
            text_contents = fs.read_binary_file(binary_path).decode(
                'utf8', 'ignore')
            self.assertEqual(
                text_contents,
                malformed_ignored_text_hex.decode('utf8', 'ignore'))
            with fs.open_text_file_for_reading(binary_path, 'replace') as file:
                file.readline()

        finally:
            if text_path and fs.isfile(text_path):
                os.remove(text_path)
            if binary_path and fs.isfile(binary_path):
                os.remove(binary_path)
class ExportFileTest(unittest.TestCase):

    def setUp(self):
        self._filesystem = FileSystem()
        self._temp_dir = str(self._filesystem.mkdtemp(suffix="exportfiles"))
        self._old_cwd = self._filesystem.getcwd()
        self._filesystem.chdir(self._temp_dir)
        self._filesystem.write_text_file(os.path.join(self._temp_dir, "sorted_file.exp.in"), _sorted_file_contents)
        self._filesystem.write_text_file(os.path.join(self._temp_dir, "non_sorted_file.exp.in"), _non_sorted_file_contents)
        self._filesystem.write_text_file(os.path.join(self._temp_dir, "parse_error_file.exp.in"), _parse_error_file_contents)

    def tearDown(self):
        self._filesystem.rmtree(self._temp_dir)
        self._filesystem.chdir(self._old_cwd)

    def test_sorted(self):
        """ Test sorted file. """

        file_path = os.path.join(self._temp_dir, "sorted_file.exp.in")
        error_handler = MockErrorHandler(handle_style_error)
        error_handler.errors = []
        error_handler.had_error = False
        checker = ExportFileChecker(file_path, error_handler)
        checker.check()
        self.assertFalse(error_handler.had_error)

    def test_non_sorted(self):
        """ Test non sorted file. """

        file_path = os.path.join(self._temp_dir, "non_sorted_file.exp.in")
        error_handler = MockErrorHandler(handle_style_error)
        error_handler.errors = []
        error_handler.had_error = False
        checker = ExportFileChecker(file_path, error_handler)
        checker.check()
        self.assertTrue(error_handler.had_error)
        self.assertEqual(error_handler.errors[0], (0, 'list/order', 5, file_path + " should be sorted, use Tools/Scripts/sort-export-file script"))

    def test_parse_error(self):
        """ Test parse error file. """

        file_path = os.path.join(self._temp_dir, "parse_error_file.exp.in")
        error_handler = MockErrorHandler(handle_style_error)
        error_handler.errors = []
        error_handler.had_error = False
        checker = ExportFileChecker(file_path, error_handler)
        checker.check()
        self.assertTrue(error_handler.had_error)
        self.assertEqual(error_handler.errors[0], (0, 'list/order', 5, "Parse error during processing " + file_path + ", use Tools/Scripts/sort-export-files for details"))
Esempio n. 11
0
class CommitMessageForThisCommitTest(unittest.TestCase):
    expected_commit_message = u"""Unreviewed build fix to un-break webkit-patch land.

Tools: 

Move commit_message_for_this_commit from scm to checkout
https://bugs.webkit.org/show_bug.cgi?id=36629

* Scripts/webkitpy/common/checkout/api.py: import scm.CommitMessage

LayoutTests: 

Second part of this complicated change by me, Tor Arne Vestb\u00f8!

* Path/To/Complicated/File: Added.
"""

    def setUp(self):
        # FIXME: This should not need to touch the filesystem, however
        # ChangeLog is difficult to mock at current.
        self.filesystem = FileSystem()
        self.temp_dir = str(self.filesystem.mkdtemp(suffix="changelogs"))
        self.old_cwd = self.filesystem.getcwd()
        self.filesystem.chdir(self.temp_dir)

        # Trick commit-log-editor into thinking we're in a Subversion working copy so it won't
        # complain about not being able to figure out what SCM is in use.
        # FIXME: VCSTools.pm is no longer so easily fooled.  It logs because "svn info" doesn't
        # treat a bare .svn directory being part of an svn checkout.
        self.filesystem.maybe_make_directory(".svn")

        self.changelogs = map(self.filesystem.abspath, (self.filesystem.join("Tools", "ChangeLog"), self.filesystem.join("LayoutTests", "ChangeLog")))
        for path, contents in zip(self.changelogs, (_changelog1, _changelog2)):
            self.filesystem.maybe_make_directory(self.filesystem.dirname(path))
            self.filesystem.write_text_file(path, contents)

    def tearDown(self):
        self.filesystem.rmtree(self.temp_dir)
        self.filesystem.chdir(self.old_cwd)

    def test_commit_message_for_this_commit(self):
        executive = Executive()

        def mock_run(*args, **kwargs):
            # Note that we use a real Executive here, not a MockExecutive, so we can test that we're
            # invoking commit-log-editor correctly.
            env = os.environ.copy()
            env['CHANGE_LOG_EMAIL_ADDRESS'] = '*****@*****.**'
            kwargs['env'] = env
            return executive.run_command(*args, **kwargs)

        detector = SCMDetector(self.filesystem, executive)
        real_scm = detector.detect_scm_system(self.old_cwd)

        mock_scm = MockSCM()
        mock_scm.run = mock_run
        mock_scm.script_path = real_scm.script_path

        checkout = Checkout(mock_scm)
        checkout.modified_changelogs = lambda git_commit, changed_files=None: self.changelogs
        commit_message = checkout.commit_message_for_this_commit(git_commit=None, return_stderr=True)
        # Throw away the first line - a warning about unknown VCS root.
        commit_message.message_lines = commit_message.message_lines[1:]
        self.assertEqual(commit_message.message(), self.expected_commit_message)
Esempio n. 12
0
# ensures that no autoinstalling occurs until a caller imports from
# webkitpy.thirdparty.  This is useful if the caller wants to configure
# logging prior to executing autoinstall code.

# FIXME: If any of these servers is offline, webkit-patch breaks (and maybe
# other scripts do, too). See <http://webkit.org/b/42080>.

# We put auto-installed third-party modules in this directory--
#
#     webkitpy/thirdparty/autoinstalled
fs = FileSystem()
fs.maybe_make_directory(_AUTOINSTALLED_DIR)

init_path = fs.join(_AUTOINSTALLED_DIR, "__init__.py")
if not fs.exists(init_path):
    fs.write_text_file(init_path, "")

readme_path = fs.join(_AUTOINSTALLED_DIR, "README")
if not fs.exists(readme_path):
    fs.write_text_file(readme_path,
        "This directory is auto-generated by WebKit and is "
        "safe to delete.\nIt contains needed third-party Python "
        "packages automatically downloaded from the web.")


class AutoinstallImportHook(object):
    def __init__(self, filesystem=None):
        self._fs = filesystem or FileSystem()

    def find_module(self, fullname, path):
        # This method will run before each import. See http://www.python.org/dev/peps/pep-0302/
Esempio n. 13
0
class TextFileReaderTest(LoggingTestCase):

    class MockProcessor(ProcessorBase):

        """A processor for test purposes.

        This processor simply records the parameters passed to its process()
        method for later checking by the unittest test methods.
        """

        def __init__(self):
            self.processed = []
            """The parameters passed for all calls to the process() method."""

        def should_process(self, file_path):
            return not file_path.endswith('should_not_process.txt')

        def process(self, lines, file_path, test_kwarg=None):
            self.processed.append((lines, file_path, test_kwarg))

    def setUp(self):
        LoggingTestCase.setUp(self)
        # FIXME: This should be a MockFileSystem once TextFileReader is moved entirely on top of FileSystem.
        self.filesystem = FileSystem()
        self._temp_dir = str(self.filesystem.mkdtemp())
        self._processor = TextFileReaderTest.MockProcessor()
        self._file_reader = TextFileReader(self.filesystem, self._processor)

    def tearDown(self):
        LoggingTestCase.tearDown(self)
        self.filesystem.rmtree(self._temp_dir)

    def _create_file(self, rel_path, text):
        """Create a file with given text and return the path to the file."""
        # FIXME: There are better/more secure APIs for creating tmp file paths.
        file_path = self.filesystem.join(self._temp_dir, rel_path)
        self.filesystem.write_text_file(file_path, text)
        return file_path

    def _passed_to_processor(self):
        """Return the parameters passed to MockProcessor.process()."""
        return self._processor.processed

    def _assert_file_reader(self, passed_to_processor, file_count):
        """Assert the state of the file reader."""
        self.assertEqual(passed_to_processor, self._passed_to_processor())
        self.assertEqual(file_count, self._file_reader.file_count)

    def test_process_file__does_not_exist(self):
        try:
            self._file_reader.process_file('does_not_exist.txt')
        except SystemExit as err:
            self.assertEqual(str(err), '1')
        else:
            self.fail('No Exception raised.')
        self._assert_file_reader([], 1)
        self.assertLog(["ERROR: File does not exist: 'does_not_exist.txt'\n"])

    def test_process_file__is_dir(self):
        temp_dir = self.filesystem.join(self._temp_dir, 'test_dir')
        self.filesystem.maybe_make_directory(temp_dir)

        self._file_reader.process_file(temp_dir)

        # Because the log message below contains exception text, it is
        # possible that the text varies across platforms.  For this reason,
        # we check only the portion of the log message that we control,
        # namely the text at the beginning.
        log_messages = self.logMessages()
        # We remove the message we are looking at to prevent the tearDown()
        # from raising an exception when it asserts that no log messages
        # remain.
        message = log_messages.pop()

        self.assertTrue(message.startswith("WARNING: Could not read file. Skipping: '%s'\n  " % temp_dir))

        self._assert_file_reader([], 1)

    def test_process_file__should_not_process(self):
        file_path = self._create_file('should_not_process.txt', 'contents')

        self._file_reader.process_file(file_path)
        self._assert_file_reader([], 1)

    def test_process_file__multiple_lines(self):
        file_path = self._create_file('foo.txt', 'line one\r\nline two\n')

        self._file_reader.process_file(file_path)
        processed = [(['line one\r', 'line two', ''], file_path, None)]
        self._assert_file_reader(processed, 1)

    def test_process_file__file_stdin(self):
        file_path = self._create_file('-', 'file contents')

        self._file_reader.process_file(file_path=file_path, test_kwarg='foo')
        processed = [(['file contents'], file_path, 'foo')]
        self._assert_file_reader(processed, 1)

    def test_process_file__with_kwarg(self):
        file_path = self._create_file('foo.txt', 'file contents')

        self._file_reader.process_file(file_path=file_path, test_kwarg='foo')
        processed = [(['file contents'], file_path, 'foo')]
        self._assert_file_reader(processed, 1)

    def test_process_paths(self):
        # We test a list of paths that contains both a file and a directory.
        dir = self.filesystem.join(self._temp_dir, 'foo_dir')
        self.filesystem.maybe_make_directory(dir)

        file_path1 = self._create_file('file1.txt', 'foo')

        rel_path = self.filesystem.join('foo_dir', 'file2.txt')
        file_path2 = self._create_file(rel_path, 'bar')

        self._file_reader.process_paths([dir, file_path1])
        processed = [(['bar'], file_path2, None),
                     (['foo'], file_path1, None)]
        self._assert_file_reader(processed, 2)

    def test_count_delete_only_file(self):
        self._file_reader.count_delete_only_file()
        delete_only_file_count = self._file_reader.delete_only_file_count
        self.assertEqual(delete_only_file_count, 1)
class CommitMessageForThisCommitTest(unittest.TestCase):
    def setUp(self):
        # FIXME: This should not need to touch the filesystem, however
        # ChangeLog is difficult to mock at current.
        self.filesystem = FileSystem()
        self.temp_dir = str(self.filesystem.mkdtemp(suffix="changelogs"))
        self.old_cwd = self.filesystem.getcwd()
        self.filesystem.chdir(self.temp_dir)
        self.webkit_base = WebKitFinder(self.filesystem).webkit_base()

        # Trick commit-log-editor into thinking we're in a Subversion working copy so it won't
        # complain about not being able to figure out what SCM is in use.
        # FIXME: VCSTools.pm is no longer so easily fooled.  It logs because "svn info" doesn't
        # treat a bare .svn directory being part of an svn checkout.
        self.filesystem.maybe_make_directory(".svn")

    def mock_changelog(self, changelogs):
        for path, contents in zip(self.changelog_paths, changelogs):
            self.filesystem.maybe_make_directory(self.filesystem.dirname(path))
            self.filesystem.write_text_file(path, contents)

    def tearDown(self):
        self.filesystem.rmtree(self.temp_dir)
        self.filesystem.chdir(self.old_cwd)

    def mock_checkout_for_test(self):
        executive = Executive()

        def mock_run(*args, **kwargs):
            # Note that we use a real Executive here, not a MockExecutive, so we can test that we're
            # invoking commit-log-editor correctly.
            env = os.environ.copy()
            env['CHANGE_LOG_EMAIL_ADDRESS'] = '*****@*****.**'
            kwargs['env'] = env
            return executive.run_command(*args, **kwargs)

        detector = SCMDetector(self.filesystem, executive)
        real_scm = detector.detect_scm_system(self.webkit_base)

        mock_scm = MockSCM()
        mock_scm.run = mock_run

        real_checkout = Checkout(real_scm)
        checkout = Checkout(mock_scm)
        checkout.script_path = real_checkout.script_path
        checkout.modified_changelogs = lambda git_commit, changed_files=None: self.changelog_paths

        return checkout

    def test_commit_message_for_unreviewed_changelogs_with_different_messages(
            self):
        expected_commit_message = u"""Unreviewed build fix to un-break webkit-patch land.

Tools:

Move commit_message_for_this_commit from scm to checkout
https://bugs.webkit.org/show_bug.cgi?id=36629

* Scripts/webkitpy/common/checkout/api.py: import scm.CommitMessage

LayoutTests:

Second part of this complicated change by me, Tor Arne Vestb\u00f8!

* Path/To/Complicated/File: Added.
"""

        self.changelog_paths = map(
            self.filesystem.abspath,
            (self.filesystem.join("Tools", "ChangeLog"),
             self.filesystem.join("LayoutTests", "ChangeLog")))

        self.mock_changelog((_changelog1, _changelog2))
        checkout = self.mock_checkout_for_test()
        commit_message = checkout.commit_message_for_this_commit(
            git_commit=None, return_stderr=True)
        # Throw away the first line - a warning about unknown VCS root.
        commit_message.message_lines = commit_message.message_lines[1:]
        self.assertMultiLineEqual(commit_message.message(),
                                  expected_commit_message)

    def test_commit_message_for_one_reviewed_changelog(self):
        expected_commit_message = u"""SECTORDER_FLAGS should be defined in target's xcconfig file, not Base.xcconfig
<http://webkit.org/b/135006>

Patch by David Kilzer <*****@*****.**> on 2014-07-17
Reviewed by Darin Adler.

* WebKit.xcodeproj/project.pbxproj: Remove references to unused
WebKit.xcconfig file.
"""

        self.changelog_paths = map(
            self.filesystem.abspath,
            [self.filesystem.join("Source/WebKit", "ChangeLog")])

        self.mock_changelog([_changelog3])
        checkout = self.mock_checkout_for_test()
        commit_message = checkout.commit_message_for_this_commit(
            git_commit=None, return_stderr=True)
        # Throw away the first line - a warning about unknown VCS root.
        commit_message.message_lines = commit_message.message_lines[1:]
        self.assertMultiLineEqual(commit_message.message(),
                                  expected_commit_message)

    def test_commit_message_for_changelogs_with_same_messages(self):
        expected_commit_message = u"""SECTORDER_FLAGS should be defined in target's xcconfig file, not Base.xcconfig
<http://webkit.org/b/135006>

Patch by David Kilzer <*****@*****.**> on 2014-07-17
Reviewed by Darin Adler.

Source/WebKit:

* WebKit.xcodeproj/project.pbxproj: Remove references to unused
WebKit.xcconfig file.

LayoutTests:

* Path/To/Complicated/File: Added.
"""

        self.changelog_paths = map(
            self.filesystem.abspath,
            (self.filesystem.join("Source/WebKit", "ChangeLog"),
             self.filesystem.join("LayoutTests", "ChangeLog")))

        self.mock_changelog((_changelog3, _changelog4))
        checkout = self.mock_checkout_for_test()
        commit_message = checkout.commit_message_for_this_commit(
            git_commit=None, return_stderr=True)
        # Throw away the first line - a warning about unknown VCS root.
        commit_message.message_lines = commit_message.message_lines[1:]
        self.assertMultiLineEqual(commit_message.message(),
                                  expected_commit_message)

    def test_commit_message_for_changelogs_with_different_messages(self):
        expected_commit_message = u"""SECTORDER_FLAGS should be defined in target's xcconfig file, not Base.xcconfig
<http://webkit.org/b/135006>

Patch by David Kilzer <*****@*****.**> on 2014-07-17
Reviewed by Darin Adler.

Source/WebKit:

* WebKit.xcodeproj/project.pbxproj: Remove references to unused
WebKit.xcconfig file.

LayoutTests:

Filler change.

* Path/To/Complicated/File: Added.
"""

        self.changelog_paths = map(
            self.filesystem.abspath,
            (self.filesystem.join("Source/WebKit", "ChangeLog"),
             self.filesystem.join("LayoutTests", "ChangeLog")))

        self.mock_changelog((_changelog3, _changelog5))
        checkout = self.mock_checkout_for_test()
        commit_message = checkout.commit_message_for_this_commit(
            git_commit=None, return_stderr=True)
        # Throw away the first line - a warning about unknown VCS root.
        commit_message.message_lines = commit_message.message_lines[1:]
        self.assertMultiLineEqual(commit_message.message(),
                                  expected_commit_message)

    def test_commit_message_for_one_rollout_changelog(self):
        expected_commit_message = u"""Rolling out r170340 and r170339.

Changeset r170339 broke the Apple Windows Debug and Release builds.

Reverted changesets:

"[Win] Build fix after r134209"
http://trac.webkit.org/changeset/170340

"[Win] Clean up and refactor WinLauncher"
https://bugs.webkit.org/show_bug.cgi?id=134209
http://trac.webkit.org/changeset/170339

Patch by Daniel Bates <*****@*****.**> on 2014-06-23
"""

        self.changelog_paths = map(
            self.filesystem.abspath,
            [self.filesystem.join("Tools", "ChangeLog")])

        self.mock_changelog([_changelog6])
        checkout = self.mock_checkout_for_test()
        commit_message = checkout.commit_message_for_this_commit(
            git_commit=None, return_stderr=True)
        # Throw away the first line - a warning about unknown VCS root.
        commit_message.message_lines = commit_message.message_lines[1:]
        self.assertMultiLineEqual(commit_message.message(),
                                  expected_commit_message)

    def test_commit_message_for_rollout_changelogs_with_different_directories(
            self):
        expected_commit_message = u"""Rolling out r170340 and r170339.

Changeset r170339 broke the Apple Windows Debug and Release builds.

Reverted changesets:

"[Win] Build fix after r134209"
http://trac.webkit.org/changeset/170340

"[Win] Clean up and refactor WinLauncher"
https://bugs.webkit.org/show_bug.cgi?id=134209
http://trac.webkit.org/changeset/170339

Patch by Daniel Bates <*****@*****.**> on 2014-06-23
"""

        self.changelog_paths = map(
            self.filesystem.abspath,
            (self.filesystem.join("Tools", "ChangeLog"),
             self.filesystem.join("Source/WebCore", "ChangeLog")))

        self.mock_changelog((_changelog6, _changelog6))
        checkout = self.mock_checkout_for_test()
        commit_message = checkout.commit_message_for_this_commit(
            git_commit=None, return_stderr=True)
        # Throw away the first line - a warning about unknown VCS root.
        commit_message.message_lines = commit_message.message_lines[1:]
        self.assertMultiLineEqual(commit_message.message(),
                                  expected_commit_message)
Esempio n. 15
0
 def reformat_in_place(self):
     filesystem = FileSystem()
     json_path = filesystem.join(filesystem.dirname(filesystem.path_to_module('webkitpy.common.config')), 'contributors.json')
     filesystem.write_text_file(json_path, self.as_json())
class SCMTestBase(unittest.TestCase):
    def __init__(self, *args, **kwargs):
        super(SCMTestBase, self).__init__(*args, **kwargs)
        self.scm = None
        self.executive = None
        self.fs = None
        self.original_cwd = None

    def setUp(self):
        self.executive = Executive()
        self.fs = FileSystem()
        self.original_cwd = self.fs.getcwd()

    def tearDown(self):
        self._chdir(self.original_cwd)

    def _join(self, *comps):
        return self.fs.join(*comps)

    def _chdir(self, path):
        self.fs.chdir(path)

    def _mkdir(self, path):
        assert not self.fs.exists(path)
        self.fs.maybe_make_directory(path)

    def _mkdtemp(self, **kwargs):
        return str(self.fs.mkdtemp(**kwargs))

    def _remove(self, path):
        self.fs.remove(path)

    def _rmtree(self, path):
        self.fs.rmtree(path)

    def _run(self, *args, **kwargs):
        return self.executive.run_command(*args, **kwargs)

    def _run_silent(self, args, **kwargs):
        self.executive.run_and_throw_if_fail(args, quiet=True, **kwargs)

    def _write_text_file(self, path, contents):
        self.fs.write_text_file(path, contents)

    def _write_binary_file(self, path, contents):
        self.fs.write_binary_file(path, contents)

    def _make_diff(self, command, *args):
        # We use this wrapper to disable output decoding. diffs should be treated as
        # binary files since they may include text files of multiple differnet encodings.
        return self._run([command, "diff"] + list(args), decode_output=False)

    def _svn_diff(self, *args):
        return self._make_diff("svn", *args)

    def _git_diff(self, *args):
        return self._make_diff("git", *args)

    def _svn_add(self, path):
        self._run(["svn", "add", path])

    def _svn_commit(self, message):
        self._run(["svn", "commit", "--quiet", "--message", message])

    # This is a hot function since it's invoked by unittest before calling each test_ method in SVNTest and
    # GitTest. We create a mock SVN repo once and then perform an SVN checkout from a filesystem copy of
    # it since it's expensive to create the mock repo.
    def _set_up_svn_checkout(self):
        global cached_svn_repo_path
        global original_cwd
        if not cached_svn_repo_path:
            cached_svn_repo_path = self._set_up_svn_repo()
            original_cwd = self.original_cwd

        self.temp_directory = self._mkdtemp(suffix="svn_test")
        self.svn_repo_path = self._join(self.temp_directory, "repo")
        self.svn_repo_url = "file://%s" % self.svn_repo_path
        self.svn_checkout_path = self._join(self.temp_directory, "checkout")
        shutil.copytree(cached_svn_repo_path, self.svn_repo_path)
        self._run(["svn", "checkout", "--quiet", self.svn_repo_url + "/trunk", self.svn_checkout_path])

    def _set_up_svn_repo(self):
        svn_repo_path = self._mkdtemp(suffix="svn_test_repo")
        svn_repo_url = "file://%s" % svn_repo_path  # Not sure this will work on windows
        # git svn complains if we don't pass --pre-1.5-compatible, not sure why:
        # Expected FS format '2'; found format '3' at /usr/local/libexec/git-core//git-svn line 1477
        self._run(["svnadmin", "create", "--pre-1.5-compatible", svn_repo_path])

        # Create a test svn checkout
        svn_checkout_path = self._mkdtemp(suffix="svn_test_checkout")
        self._run(["svn", "checkout", "--quiet", svn_repo_url, svn_checkout_path])

        # Create and checkout a trunk dir to match the standard svn configuration to match git-svn's expectations
        self._chdir(svn_checkout_path)
        self._mkdir("trunk")
        self._svn_add("trunk")
        # We can add tags and branches as well if we ever need to test those.
        self._svn_commit("add trunk")

        self._rmtree(svn_checkout_path)

        self._set_up_svn_test_commits(svn_repo_url + "/trunk")
        return svn_repo_path

    def _set_up_svn_test_commits(self, svn_repo_url):
        svn_checkout_path = self._mkdtemp(suffix="svn_test_checkout")
        self._run(["svn", "checkout", "--quiet", svn_repo_url, svn_checkout_path])

        # Add some test commits
        self._chdir(svn_checkout_path)

        self._write_text_file("test_file", "test1")
        self._svn_add("test_file")
        self._svn_commit("initial commit")

        self._write_text_file("test_file", "test1test2")
        # This used to be the last commit, but doing so broke
        # GitTest.test_apply_git_patch which use the inverse diff of the last commit.
        # svn-apply fails to remove directories in Git, see:
        # https://bugs.webkit.org/show_bug.cgi?id=34871
        self._mkdir("test_dir")
        # Slash should always be the right path separator since we use cygwin on Windows.
        test_file3_path = "test_dir/test_file3"
        self._write_text_file(test_file3_path, "third file")
        self._svn_add("test_dir")
        self._svn_commit("second commit")

        self._write_text_file("test_file", "test1test2test3\n")
        self._write_text_file("test_file2", "second file")
        self._svn_add("test_file2")
        self._svn_commit("third commit")

        # This 4th commit is used to make sure that our patch file handling
        # code correctly treats patches as binary and does not attempt to
        # decode them assuming they're utf-8.
        self._write_binary_file("test_file", u"latin1 test: \u00A0\n".encode("latin-1"))
        self._write_binary_file("test_file2", u"utf-8 test: \u00A0\n".encode("utf-8"))
        self._svn_commit("fourth commit")

        # svn does not seem to update after commit as I would expect.
        self._run(["svn", "update"])
        self._rmtree(svn_checkout_path)

    def _tear_down_svn_checkout(self):
        self._rmtree(self.temp_directory)

    def _shared_test_add_recursively(self):
        self._mkdir("added_dir")
        self._write_text_file("added_dir/added_file", "new stuff")
        self.scm.add("added_dir/added_file")
        self.assertIn("added_dir/added_file", self.scm._added_files())

    def _shared_test_delete_recursively(self):
        self._mkdir("added_dir")
        self._write_text_file("added_dir/added_file", "new stuff")
        self.scm.add("added_dir/added_file")
        self.assertIn("added_dir/added_file", self.scm._added_files())
        self.scm.delete("added_dir/added_file")
        self.assertNotIn("added_dir", self.scm._added_files())

    def _shared_test_delete_recursively_or_not(self):
        self._mkdir("added_dir")
        self._write_text_file("added_dir/added_file", "new stuff")
        self._write_text_file("added_dir/another_added_file", "more new stuff")
        self.scm.add("added_dir/added_file")
        self.scm.add("added_dir/another_added_file")
        self.assertIn("added_dir/added_file", self.scm._added_files())
        self.assertIn("added_dir/another_added_file", self.scm._added_files())
        self.scm.delete("added_dir/added_file")
        self.assertIn("added_dir/another_added_file", self.scm._added_files())

    def _shared_test_exists(self, scm, commit_function):
        self._chdir(scm.checkout_root)
        self.assertFalse(scm.exists("foo.txt"))
        self._write_text_file("foo.txt", "some stuff")
        self.assertFalse(scm.exists("foo.txt"))
        scm.add("foo.txt")
        commit_function("adding foo")
        self.assertTrue(scm.exists("foo.txt"))
        scm.delete("foo.txt")
        commit_function("deleting foo")
        self.assertFalse(scm.exists("foo.txt"))

    def _shared_test_move(self):
        self._write_text_file("added_file", "new stuff")
        self.scm.add("added_file")
        self.scm.move("added_file", "moved_file")
        self.assertIn("moved_file", self.scm._added_files())

    def _shared_test_move_recursive(self):
        self._mkdir("added_dir")
        self._write_text_file("added_dir/added_file", "new stuff")
        self._write_text_file("added_dir/another_added_file", "more new stuff")
        self.scm.add("added_dir")
        self.scm.move("added_dir", "moved_dir")
        self.assertIn("moved_dir/added_file", self.scm._added_files())
        self.assertIn("moved_dir/another_added_file", self.scm._added_files())
class CommitMessageForThisCommitTest(unittest.TestCase):
    def setUp(self):
        # FIXME: This should not need to touch the filesystem, however
        # ChangeLog is difficult to mock at current.
        self.filesystem = FileSystem()
        self.temp_dir = str(self.filesystem.mkdtemp(suffix="changelogs"))
        self.old_cwd = self.filesystem.getcwd()
        self.filesystem.chdir(self.temp_dir)
        self.webkit_base = WebKitFinder(self.filesystem).webkit_base()

        # Trick commit-log-editor into thinking we're in a Subversion working copy so it won't
        # complain about not being able to figure out what SCM is in use.
        # FIXME: VCSTools.pm is no longer so easily fooled.  It logs because "svn info" doesn't
        # treat a bare .svn directory being part of an svn checkout.
        self.filesystem.maybe_make_directory(".svn")

    def mock_changelog(self, changelogs):
        for path, contents in zip(self.changelog_paths, changelogs):
            self.filesystem.maybe_make_directory(self.filesystem.dirname(path))
            self.filesystem.write_text_file(path, contents)

    def tearDown(self):
        self.filesystem.rmtree(self.temp_dir)
        self.filesystem.chdir(self.old_cwd)

    def mock_checkout_for_test(self):
        executive = Executive()

        def mock_run(*args, **kwargs):
            # Note that we use a real Executive here, not a MockExecutive, so we can test that we're
            # invoking commit-log-editor correctly.
            env = os.environ.copy()
            env['CHANGE_LOG_EMAIL_ADDRESS'] = '*****@*****.**'
            kwargs['env'] = env
            return executive.run_command(*args, **kwargs)

        detector = SCMDetector(self.filesystem, executive)
        real_scm = detector.detect_scm_system(self.webkit_base)

        mock_scm = MockSCM()
        mock_scm.run = mock_run

        real_checkout = Checkout(real_scm)
        checkout = Checkout(mock_scm)
        checkout.script_path = real_checkout.script_path
        checkout.modified_changelogs = lambda git_commit, changed_files=None: self.changelog_paths

        return checkout

    def test_commit_message_for_unreviewed_changelogs_with_different_messages(self):
        expected_commit_message = u"""Unreviewed build fix to un-break webkit-patch land.

Tools:

Move commit_message_for_this_commit from scm to checkout
https://bugs.webkit.org/show_bug.cgi?id=36629

* Scripts/webkitpy/common/checkout/api.py: import scm.CommitMessage

LayoutTests:

Second part of this complicated change by me, Tor Arne Vestb\u00f8!

* Path/To/Complicated/File: Added.
"""

        self.changelog_paths = map(self.filesystem.abspath, (self.filesystem.join("Tools", "ChangeLog"), self.filesystem.join("LayoutTests", "ChangeLog")))

        self.mock_changelog((_changelog1, _changelog2))
        checkout = self.mock_checkout_for_test()
        commit_message = checkout.commit_message_for_this_commit(git_commit=None, return_stderr=True)
        # Throw away the first line - a warning about unknown VCS root.
        commit_message.message_lines = commit_message.message_lines[1:]
        self.assertMultiLineEqual(commit_message.message(), expected_commit_message)

    def test_commit_message_for_one_reviewed_changelog(self):
        expected_commit_message = u"""SECTORDER_FLAGS should be defined in target's xcconfig file, not Base.xcconfig
<http://webkit.org/b/135006>

Patch by David Kilzer <*****@*****.**> on 2014-07-17
Reviewed by Darin Adler.

* WebKit.xcodeproj/project.pbxproj: Remove references to unused
WebKit.xcconfig file.
"""

        self.changelog_paths = map(self.filesystem.abspath, [self.filesystem.join("Source/WebKit", "ChangeLog")])

        self.mock_changelog([_changelog3])
        checkout = self.mock_checkout_for_test()
        commit_message = checkout.commit_message_for_this_commit(git_commit=None, return_stderr=True)
        # Throw away the first line - a warning about unknown VCS root.
        commit_message.message_lines = commit_message.message_lines[1:]
        self.assertMultiLineEqual(commit_message.message(), expected_commit_message)

    def test_commit_message_for_changelogs_with_same_messages(self):
        expected_commit_message = u"""SECTORDER_FLAGS should be defined in target's xcconfig file, not Base.xcconfig
<http://webkit.org/b/135006>

Patch by David Kilzer <*****@*****.**> on 2014-07-17
Reviewed by Darin Adler.

Source/WebKit:

* WebKit.xcodeproj/project.pbxproj: Remove references to unused
WebKit.xcconfig file.

LayoutTests:

* Path/To/Complicated/File: Added.
"""

        self.changelog_paths = map(self.filesystem.abspath, (self.filesystem.join("Source/WebKit", "ChangeLog"), self.filesystem.join("LayoutTests", "ChangeLog")))

        self.mock_changelog((_changelog3, _changelog4))
        checkout = self.mock_checkout_for_test()
        commit_message = checkout.commit_message_for_this_commit(git_commit=None, return_stderr=True)
        # Throw away the first line - a warning about unknown VCS root.
        commit_message.message_lines = commit_message.message_lines[1:]
        self.assertMultiLineEqual(commit_message.message(), expected_commit_message)

    def test_commit_message_for_changelogs_with_different_messages(self):
        expected_commit_message = u"""SECTORDER_FLAGS should be defined in target's xcconfig file, not Base.xcconfig
<http://webkit.org/b/135006>

Patch by David Kilzer <*****@*****.**> on 2014-07-17
Reviewed by Darin Adler.

Source/WebKit:

* WebKit.xcodeproj/project.pbxproj: Remove references to unused
WebKit.xcconfig file.

LayoutTests:

Filler change.

* Path/To/Complicated/File: Added.
"""

        self.changelog_paths = map(self.filesystem.abspath, (self.filesystem.join("Source/WebKit", "ChangeLog"), self.filesystem.join("LayoutTests", "ChangeLog")))

        self.mock_changelog((_changelog3, _changelog5))
        checkout = self.mock_checkout_for_test()
        commit_message = checkout.commit_message_for_this_commit(git_commit=None, return_stderr=True)
        # Throw away the first line - a warning about unknown VCS root.
        commit_message.message_lines = commit_message.message_lines[1:]
        self.assertMultiLineEqual(commit_message.message(), expected_commit_message)

    def test_commit_message_for_one_rollout_changelog(self):
        expected_commit_message = u"""Rolling out r170340 and r170339.

Changeset r170339 broke the Apple Windows Debug and Release builds.

Reverted changesets:

"[Win] Build fix after r134209"
http://trac.webkit.org/changeset/170340

"[Win] Clean up and refactor WinLauncher"
https://bugs.webkit.org/show_bug.cgi?id=134209
http://trac.webkit.org/changeset/170339

Patch by Daniel Bates <*****@*****.**> on 2014-06-23
"""

        self.changelog_paths = map(self.filesystem.abspath, [self.filesystem.join("Tools", "ChangeLog")])

        self.mock_changelog([_changelog6])
        checkout = self.mock_checkout_for_test()
        commit_message = checkout.commit_message_for_this_commit(git_commit=None, return_stderr=True)
        # Throw away the first line - a warning about unknown VCS root.
        commit_message.message_lines = commit_message.message_lines[1:]
        self.assertMultiLineEqual(commit_message.message(), expected_commit_message)

    def test_commit_message_for_rollout_changelogs_with_different_directories(self):
        expected_commit_message = u"""Rolling out r170340 and r170339.

Changeset r170339 broke the Apple Windows Debug and Release builds.

Reverted changesets:

"[Win] Build fix after r134209"
http://trac.webkit.org/changeset/170340

"[Win] Clean up and refactor WinLauncher"
https://bugs.webkit.org/show_bug.cgi?id=134209
http://trac.webkit.org/changeset/170339

Patch by Daniel Bates <*****@*****.**> on 2014-06-23
"""

        self.changelog_paths = map(self.filesystem.abspath, (self.filesystem.join("Tools", "ChangeLog"), self.filesystem.join("Source/WebCore", "ChangeLog")))

        self.mock_changelog((_changelog6, _changelog6))
        checkout = self.mock_checkout_for_test()
        commit_message = checkout.commit_message_for_this_commit(git_commit=None, return_stderr=True)
        # Throw away the first line - a warning about unknown VCS root.
        commit_message.message_lines = commit_message.message_lines[1:]
        self.assertMultiLineEqual(commit_message.message(), expected_commit_message)
Esempio n. 18
0
class GitTestWithRealFilesystemAndExecutive(unittest.TestCase):

    def setUp(self):
        self.executive = Executive()
        self.filesystem = FileSystem()
        self.original_cwd = self.filesystem.getcwd()

        # Set up fresh git repository with one commit.
        self.untracking_checkout_path = self._mkdtemp(suffix='-git_unittest_untracking')
        self._run(['git', 'init', self.untracking_checkout_path])
        self._chdir(self.untracking_checkout_path)
        self._write_text_file('foo_file', 'foo')
        self._run(['git', 'add', 'foo_file'])
        self._run(['git', 'commit', '-am', 'dummy commit'])
        self.untracking_git = Git(cwd=self.untracking_checkout_path, filesystem=self.filesystem, executive=self.executive)

        # Then set up a second git repo that tracks the first one.
        self.tracking_git_checkout_path = self._mkdtemp(suffix='-git_unittest_tracking')
        self._run(['git', 'clone', '--quiet', self.untracking_checkout_path, self.tracking_git_checkout_path])
        self._chdir(self.tracking_git_checkout_path)
        self.tracking_git = Git(cwd=self.tracking_git_checkout_path, filesystem=self.filesystem, executive=self.executive)

    def tearDown(self):
        self._chdir(self.original_cwd)
        self._run(['rm', '-rf', self.tracking_git_checkout_path])
        self._run(['rm', '-rf', self.untracking_checkout_path])

    def _join(self, *comps):
        return self.filesystem.join(*comps)

    def _chdir(self, path):
        self.filesystem.chdir(path)

    def _mkdir(self, path):
        assert not self.filesystem.exists(path)
        self.filesystem.maybe_make_directory(path)

    def _mkdtemp(self, **kwargs):
        return str(self.filesystem.mkdtemp(**kwargs))

    def _remove(self, path):
        self.filesystem.remove(path)

    def _run(self, *args, **kwargs):
        return self.executive.run_command(*args, **kwargs)

    def _write_text_file(self, path, contents):
        self.filesystem.write_text_file(path, contents)

    def test_add_list(self):
        self._chdir(self.untracking_checkout_path)
        git = self.untracking_git
        self._mkdir('added_dir')
        self._write_text_file('added_dir/added_file', 'new stuff')
        print self._run(['ls', 'added_dir'])
        print self._run(['pwd'])
        print self._run(['cat', 'added_dir/added_file'])
        git.add_list(['added_dir/added_file'])
        self.assertIn('added_dir/added_file', git.added_files())

    def test_delete_recursively(self):
        self._chdir(self.untracking_checkout_path)
        git = self.untracking_git
        self._mkdir('added_dir')
        self._write_text_file('added_dir/added_file', 'new stuff')
        git.add_list(['added_dir/added_file'])
        self.assertIn('added_dir/added_file', git.added_files())
        git.delete_list(['added_dir/added_file'])
        self.assertNotIn('added_dir', git.added_files())

    def test_delete_recursively_or_not(self):
        self._chdir(self.untracking_checkout_path)
        git = self.untracking_git
        self._mkdir('added_dir')
        self._write_text_file('added_dir/added_file', 'new stuff')
        self._write_text_file('added_dir/another_added_file', 'more new stuff')
        git.add_list(['added_dir/added_file', 'added_dir/another_added_file'])
        self.assertIn('added_dir/added_file', git.added_files())
        self.assertIn('added_dir/another_added_file', git.added_files())
        git.delete_list(['added_dir/added_file'])
        self.assertIn('added_dir/another_added_file', git.added_files())

    def test_exists(self):
        self._chdir(self.untracking_checkout_path)
        git = self.untracking_git
        self._chdir(git.checkout_root)
        self.assertFalse(git.exists('foo.txt'))
        self._write_text_file('foo.txt', 'some stuff')
        self.assertFalse(git.exists('foo.txt'))
        git.add_list(['foo.txt'])
        git.commit_locally_with_message('adding foo')
        self.assertTrue(git.exists('foo.txt'))
        git.delete_list(['foo.txt'])
        git.commit_locally_with_message('deleting foo')
        self.assertFalse(git.exists('foo.txt'))

    def test_move(self):
        self._chdir(self.untracking_checkout_path)
        git = self.untracking_git
        self._write_text_file('added_file', 'new stuff')
        git.add_list(['added_file'])
        git.move('added_file', 'moved_file')
        self.assertIn('moved_file', git.added_files())

    def test_move_recursive(self):
        self._chdir(self.untracking_checkout_path)
        git = self.untracking_git
        self._mkdir('added_dir')
        self._write_text_file('added_dir/added_file', 'new stuff')
        self._write_text_file('added_dir/another_added_file', 'more new stuff')
        git.add_list(['added_dir'])
        git.move('added_dir', 'moved_dir')
        self.assertIn('moved_dir/added_file', git.added_files())
        self.assertIn('moved_dir/another_added_file', git.added_files())

    def test_remote_branch_ref(self):
        # This tests a protected method. pylint: disable=protected-access
        self.assertEqual(self.tracking_git._remote_branch_ref(), 'refs/remotes/origin/master')
        self._chdir(self.untracking_checkout_path)
        self.assertRaises(ScriptError, self.untracking_git._remote_branch_ref)

    def test_create_patch(self):
        self._chdir(self.tracking_git_checkout_path)
        git = self.tracking_git
        self._write_text_file('test_file_commit1', 'contents')
        self._run(['git', 'add', 'test_file_commit1'])
        git.commit_locally_with_message('message')
        git._patch_order = lambda: ''  # pylint: disable=protected-access
        patch = git.create_patch()
        self.assertNotRegexpMatches(patch, r'Subversion Revision:')

    def test_patches_have_filenames_with_prefixes(self):
        self._chdir(self.tracking_git_checkout_path)
        git = self.tracking_git
        self._write_text_file('test_file_commit1', 'contents')
        self._run(['git', 'add', 'test_file_commit1'])
        git.commit_locally_with_message('message')

        # Even if diff.noprefix is enabled, create_patch() produces diffs with prefixes.
        self._run(['git', 'config', 'diff.noprefix', 'true'])
        git._patch_order = lambda: ''  # pylint: disable=protected-access
        patch = git.create_patch()
        self.assertRegexpMatches(patch, r'^diff --git a/test_file_commit1 b/test_file_commit1')

    def test_rename_files(self):
        self._chdir(self.tracking_git_checkout_path)
        git = self.tracking_git
        git.move('foo_file', 'bar_file')
        git.commit_locally_with_message('message')

    def test_commit_position_from_git_log(self):
        # This tests a protected method. pylint: disable=protected-access
        git_log = """
commit 624c3081c0
Author: foobarbaz1 <*****@*****.**>
Date:   Mon Sep 28 19:10:30 2015 -0700

    Test foo bar baz qux 123.

    BUG=000000

    Review URL: https://codereview.chromium.org/999999999

    Cr-Commit-Position: refs/heads/master@{#1234567}
"""
        self._chdir(self.tracking_git_checkout_path)
        git = self.tracking_git
        self.assertEqual(git._commit_position_from_git_log(git_log), 1234567)

    def test_timestamp_of_revision(self):
        # This tests a protected method. pylint: disable=protected-access
        self._chdir(self.tracking_git_checkout_path)
        git = self.tracking_git
        position_regex = git._commit_position_regex_for_timestamp()
        git.most_recent_log_matching(position_regex, git.checkout_root)
Esempio n. 19
0
class CommitMessageForThisCommitTest(unittest.TestCase):
    expected_commit_message = u"""Unreviewed build fix to un-break webkit-patch land.

Tools: 

Move commit_message_for_this_commit from scm to checkout
https://bugs.webkit.org/show_bug.cgi?id=36629

* Scripts/webkitpy/common/checkout/api.py: import scm.CommitMessage

LayoutTests: 

Second part of this complicated change by me, Tor Arne Vestb\u00f8!

* Path/To/Complicated/File: Added.
"""

    def setUp(self):
        # FIXME: This should not need to touch the filesystem, however
        # ChangeLog is difficult to mock at current.
        self.filesystem = FileSystem()
        self.temp_dir = str(self.filesystem.mkdtemp(suffix="changelogs"))
        self.old_cwd = self.filesystem.getcwd()
        self.filesystem.chdir(self.temp_dir)

        # Trick commit-log-editor into thinking we're in a Subversion working copy so it won't
        # complain about not being able to figure out what SCM is in use.
        # FIXME: VCSTools.pm is no longer so easily fooled.  It logs because "svn info" doesn't
        # treat a bare .svn directory being part of an svn checkout.
        self.filesystem.maybe_make_directory(".svn")

        self.changelogs = map(self.filesystem.abspath, (self.filesystem.join("Tools", "ChangeLog"), self.filesystem.join("LayoutTests", "ChangeLog")))
        for path, contents in zip(self.changelogs, (_changelog1, _changelog2)):
            self.filesystem.maybe_make_directory(self.filesystem.dirname(path))
            self.filesystem.write_text_file(path, contents)

    def tearDown(self):
        self.filesystem.rmtree(self.temp_dir)
        self.filesystem.chdir(self.old_cwd)

    def test_commit_message_for_this_commit(self):
        executive = Executive()

        def mock_run(*args, **kwargs):
            # Note that we use a real Executive here, not a MockExecutive, so we can test that we're
            # invoking commit-log-editor correctly.
            env = os.environ.copy()
            env['CHANGE_LOG_EMAIL_ADDRESS'] = '*****@*****.**'
            kwargs['env'] = env
            return executive.run_command(*args, **kwargs)

        detector = SCMDetector(self.filesystem, executive)
        real_scm = detector.detect_scm_system(self.old_cwd)

        mock_scm = MockSCM()
        mock_scm.run = mock_run
        mock_scm.script_path = real_scm.script_path

        checkout = Checkout(mock_scm)
        checkout.modified_changelogs = lambda git_commit, changed_files=None: self.changelogs
        commit_message = checkout.commit_message_for_this_commit(git_commit=None, return_stderr=True)
        # Throw away the first line - a warning about unknown VCS root.
        commit_message.message_lines = commit_message.message_lines[1:]
        self.assertMultiLineEqual(commit_message.message(), self.expected_commit_message)
Esempio n. 20
0
class SCMTestBase(unittest.TestCase):
    def __init__(self, *args, **kwargs):
        super(SCMTestBase, self).__init__(*args, **kwargs)
        self.scm = None
        self.executive = None
        self.fs = None
        self.original_cwd = None

    def setUp(self):
        self.executive = Executive()
        self.fs = FileSystem()
        self.original_cwd = self.fs.getcwd()

    def tearDown(self):
        self._chdir(self.original_cwd)

    def _join(self, *comps):
        return self.fs.join(*comps)

    def _chdir(self, path):
        self.fs.chdir(path)

    def _mkdir(self, path):
        assert not self.fs.exists(path)
        self.fs.maybe_make_directory(path)

    def _mkdtemp(self, **kwargs):
        return str(self.fs.mkdtemp(**kwargs))

    def _remove(self, path):
        self.fs.remove(path)

    def _rmtree(self, path):
        self.fs.rmtree(path)

    def _run(self, *args, **kwargs):
        return self.executive.run_command(*args, **kwargs)

    def _run_silent(self, args, **kwargs):
        self.executive.run_command(args, **kwargs)

    def _write_text_file(self, path, contents):
        self.fs.write_text_file(path, contents)

    def _write_binary_file(self, path, contents):
        self.fs.write_binary_file(path, contents)

    def _make_diff(self, command, *args):
        # We use this wrapper to disable output decoding. diffs should be treated as
        # binary files since they may include text files of multiple different encodings.
        return self._run([command, "diff"] + list(args), decode_output=False)

    def _git_diff(self, *args):
        return self._make_diff("git", *args)

    def _shared_test_add_recursively(self):
        self._mkdir("added_dir")
        self._write_text_file("added_dir/added_file", "new stuff")
        self.scm.add("added_dir/added_file")
        self.assertIn("added_dir/added_file", self.scm._added_files())

    def _shared_test_delete_recursively(self):
        self._mkdir("added_dir")
        self._write_text_file("added_dir/added_file", "new stuff")
        self.scm.add("added_dir/added_file")
        self.assertIn("added_dir/added_file", self.scm._added_files())
        self.scm.delete("added_dir/added_file")
        self.assertNotIn("added_dir", self.scm._added_files())

    def _shared_test_delete_recursively_or_not(self):
        self._mkdir("added_dir")
        self._write_text_file("added_dir/added_file", "new stuff")
        self._write_text_file("added_dir/another_added_file", "more new stuff")
        self.scm.add("added_dir/added_file")
        self.scm.add("added_dir/another_added_file")
        self.assertIn("added_dir/added_file", self.scm._added_files())
        self.assertIn("added_dir/another_added_file", self.scm._added_files())
        self.scm.delete("added_dir/added_file")
        self.assertIn("added_dir/another_added_file", self.scm._added_files())

    def _shared_test_exists(self, scm, commit_function):
        self._chdir(scm.checkout_root)
        self.assertFalse(scm.exists('foo.txt'))
        self._write_text_file('foo.txt', 'some stuff')
        self.assertFalse(scm.exists('foo.txt'))
        scm.add('foo.txt')
        commit_function('adding foo')
        self.assertTrue(scm.exists('foo.txt'))
        scm.delete('foo.txt')
        commit_function('deleting foo')
        self.assertFalse(scm.exists('foo.txt'))

    def _shared_test_move(self):
        self._write_text_file('added_file', 'new stuff')
        self.scm.add('added_file')
        self.scm.move('added_file', 'moved_file')
        self.assertIn('moved_file', self.scm._added_files())

    def _shared_test_move_recursive(self):
        self._mkdir("added_dir")
        self._write_text_file('added_dir/added_file', 'new stuff')
        self._write_text_file('added_dir/another_added_file', 'more new stuff')
        self.scm.add('added_dir')
        self.scm.move('added_dir', 'moved_dir')
        self.assertIn('moved_dir/added_file', self.scm._added_files())
        self.assertIn('moved_dir/another_added_file', self.scm._added_files())
Esempio n. 21
0
class CMakeGenerator(object):
    def __init__(self, inputFilename, outputFilename):
        self.host = Host()
        self.filesystem = FileSystem()
        self.project = json.loads(
            self.filesystem.read_text_file(inputFilename))

        self.enable_g711 = False
        self.enable_g722 = False
        # Current Openssl cannot really compile since they use deprecated openssl functions
        self.enable_boringssl = True
        self.enable_vpx = False
        self.enable_libjpeg = False

        self.targets = self.project["targets"]
        self.outputFilename = outputFilename

        self.skip_test_targets = True

        self.starting_lines = [
            "cmake_minimum_required(VERSION 3.5)",
            "set(CMAKE_CXX_STANDARD 11)",
            "enable_language(ASM)",
            "",
            "if (NOT LIBWEBRTC_INPUT_DIR)",
            "    set(LIBWEBRTC_INPUT_DIR ${CMAKE_SOURCE_DIR}/Source)",
            "endif ()",
            "if (NOT LIBWEBRTC_OUTPUT_DIR)",
            "    set(LIBWEBRTC_OUTPUT_DIR ${CMAKE_BINARY_DIR})",
            "endif ()",
            "",
            "file(WRITE ${LIBWEBRTC_OUTPUT_DIR}/dummy.c \"\")",
            "file(MAKE_DIRECTORY ${LIBWEBRTC_OUTPUT_DIR}/obj/third_party/libjpeg_turbo/simd_asm)",
            "file(MAKE_DIRECTORY ${LIBWEBRTC_OUTPUT_DIR}/obj/third_party/ffmpeg/ffmpeg_yasm)",
            "file(MAKE_DIRECTORY ${LIBWEBRTC_OUTPUT_DIR}/obj/webrtc/sdk)",
            "file(MAKE_DIRECTORY ${LIBWEBRTC_OUTPUT_DIR}/gen/third_party/yasm/include)",
            "file(MAKE_DIRECTORY ${LIBWEBRTC_OUTPUT_DIR}/gen/webrtc/audio_coding/neteq)",
            "file(MAKE_DIRECTORY ${LIBWEBRTC_OUTPUT_DIR}/gen/webrtc/logging/rtc_event_log)",
            "file(MAKE_DIRECTORY ${LIBWEBRTC_OUTPUT_DIR}/gen/webrtc/modules/audio_coding/audio_network_adaptor)",
            "file(MAKE_DIRECTORY ${LIBWEBRTC_OUTPUT_DIR}/gen/webrtc/modules/audio_processing)",
            "file(MAKE_DIRECTORY ${LIBWEBRTC_OUTPUT_DIR}/gen/webrtc/sdk)",
            "file(MAKE_DIRECTORY ${LIBWEBRTC_OUTPUT_DIR}/gen/webrtc/tools/event_log_visualizer)",
            "file(MAKE_DIRECTORY ${LIBWEBRTC_OUTPUT_DIR}/pyproto/webrtc/audio_coding/neteq)",
            "file(MAKE_DIRECTORY ${LIBWEBRTC_OUTPUT_DIR}/pyproto/webrtc/logging/rtc_event_log)",
            "file(MAKE_DIRECTORY ${LIBWEBRTC_OUTPUT_DIR}/pyproto/webrtc/modules/audio_coding/audio_network_adaptor)",
            "file(MAKE_DIRECTORY ${LIBWEBRTC_OUTPUT_DIR}/pyproto/webrtc/modules/audio_coding/audio_network_adaptor)",
            "file(MAKE_DIRECTORY ${LIBWEBRTC_OUTPUT_DIR}/pyproto/webrtc/modules/audio_processing)",
            "file(MAKE_DIRECTORY ${LIBWEBRTC_OUTPUT_DIR}/pyproto/webrtc/tools/event_log_visualizer)",
            "",
        ]

        self.ending_lines = [
            "",
            "set_target_properties(WebrtcBaseGtest_Prod PROPERTIES LINKER_LANGUAGE CXX)",
            "set_target_properties(WebrtcLoggingRtc_Event_Log_Api PROPERTIES LINKER_LANGUAGE CXX)",
        ]
        if self.enable_libjpeg:
            self.ending_lines.append(
                "set_target_properties(Third_PartyLibjpeg_TurboSimd_Asm PROPERTIES LINKER_LANGUAGE CXX)"
            )

        self.initialize_targets()

    def initialize_targets(self):
        # Simplifying generation
        self.targets["//webrtc/sdk:rtc_sdk_framework_objc"]["sources"][:] = []
        # Static_library requires as least one source file
        self.targets["//webrtc/sdk:rtc_sdk_objc"]["sources"] = [
            "//out/dummy.c"
        ]
        # Executable target without any source file
        self.targets["//webrtc:webrtc_tests"]["type"] = "group"
        # Duplicate symbol issue with source_set
        self.targets["//webrtc/api:call_api"]["type"] = "static_library"
        # Simpler for linking WebCore
        self.targets["//third_party/boringssl:boringssl"][
            "type"] = "static_library"
        self.targets["//third_party/boringssl:boringssl"]["outputs"] = [
            "//out/libboringssl.a"
        ]
        # We use a static info plist instead of a dynamic one
        del self.targets["//webrtc/sdk:rtc_sdk_framework_objc_info_plist"]
        self.targets[
            "//webrtc/sdk:rtc_sdk_framework_objc_info_plist_bundle_data"][
                "deps"].remove(
                    "//webrtc/sdk:rtc_sdk_framework_objc_info_plist")

        # Macro to change specific things in LibWebRTC, only used in libjingle_peerconnection currently
        self.targets["//webrtc/api:libjingle_peerconnection"][
            "defines"].append("WEBRTC_WEBKIT_BUILD")

        if not self.enable_g711:
            self.remove_webrtc_g711()
        if not self.enable_g722:
            self.remove_g722()

        if self.enable_boringssl:
            self.ending_lines.append(
                "set_target_properties(Third_PartyBoringsslBoringssl_Asm PROPERTIES LINKER_LANGUAGE CXX)"
            )
        else:
            self.remove_boringssl()

        if self.enable_vpx:
            self.ending_lines.append(
                "set_target_properties(Third_PartyLibvpxLibvpx_Yasm PROPERTIES LINKER_LANGUAGE CXX)"
            )
            self.starting_lines.append(
                "file(MAKE_DIRECTORY ${LIBWEBRTC_OUTPUT_DIR}/obj/third_party/libvpx/libvpx_yasm)"
            )
        else:
            self.remove_libvpx()

        self.remove_openmax_dl()

        if not self.enable_libjpeg:
            self.remove_libjpeg()
            self.remove_yasm()

        self.remove_webrtc_base_sha1()
        self.targets.pop("//build/config/sanitizers:options_sources")

        self.targets["//webrtc/base:rtc_base_approved"]["defines"].append(
            "HAVE_PTHREAD_COND_TIMEDWAIT_RELATIVE")

    def _remove_target(self, targetName):
        self.targets.pop(targetName)
        for name, target in self.targets.iteritems():
            if "deps" in target:
                deps = target["deps"]
                if targetName in deps:
                    deps.remove(targetName)

    def remove_webrtc_g711(self):
        self._remove_target("//webrtc/modules/audio_coding:g711_test")
        self._remove_target(
            "//webrtc/modules/audio_coding:neteq_pcmu_quality_test")
        self._remove_target(
            "//webrtc/modules/audio_coding:audio_decoder_unittests")

        self._remove_target("//webrtc/modules/audio_coding:g711")
        self.targets["//webrtc/modules/audio_coding:pcm16b"]["sources"].append(
            "//webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.cc")
        self.targets["//webrtc/modules/audio_coding:pcm16b"]["source_outputs"][
            "//webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.cc"] = "obj/webrtc/modules/audio_coding/g711/audio_encoder_pcm.o"
        for name, target in self.targets.iteritems():
            if "include_dirs" in target:
                include_dirs = target["include_dirs"]
                if "//webrtc/modules/audio_coding/codecs/g711/include/" in include_dirs:
                    include_dirs.remove(
                        "//webrtc/modules/audio_coding/codecs/g711/include/")

            if "defines" in target:
                defines = target["defines"]
                if "CODEC_G711" in defines:
                    defines.remove("CODEC_G711")

    def remove_libjpeg(self):
        self.targets.pop("//third_party/libjpeg_turbo:libjpeg")
        self.targets.pop("//third_party:jpeg")
        self.targets.pop("//third_party/libjpeg_turbo:simd")
        self.targets.pop("//third_party/libjpeg_turbo:simd_asm")
        self.targets.pop("//third_party/libjpeg_turbo:simd_asm_action")

        libyuv = self.targets["//third_party/libyuv:libyuv"]
        libyuv["deps"].remove("//third_party:jpeg")
        libyuv["defines"].remove("HAVE_JPEG")
        libyuv["defines"].remove("USE_LIBJPEG_TURBO=1")

        self.targets["//third_party/libyuv:libyuv_unittest"]["defines"].remove(
            "HAVE_JPEG")
        self.targets["//third_party/libyuv:psnr"]["defines"].remove(
            "HAVE_JPEG")

        for name, target in self.targets.iteritems():
            if "include_dirs" in target:
                include_dirs = target["include_dirs"]
                if "//third_party/openmax_dl/" in include_dirs:
                    include_dirs.remove("//third_party/openmax_dl/")

            if "deps" in target:
                deps = target["deps"]
                if "//third_party:jpeg" in deps:
                    deps.remove("//third_party:jpeg")

            if "defines" in target:
                defines = target["defines"]
                if "RTC_USE_OPENMAX_DL" in defines:
                    defines.remove("RTC_USE_OPENMAX_DL")

    def remove_webrtc_base_sha1(self):
        base = self.targets["//webrtc/base:rtc_base"]
        base["source_outputs"].pop("//webrtc/base/sha1.cc")
        base["sources"].remove("//webrtc/base/sha1.cc")

    def remove_yasm(self):
        self.targets.pop("//third_party/yasm:yasm")
        self.targets.pop("//third_party/yasm:compile_gperf")
        self.targets.pop("//third_party/yasm:compile_gperf_for_include")
        self.targets.pop("//third_party/yasm:compile_nasm_macros")
        self.targets.pop("//third_party/yasm:compile_nasm_version")
        self.targets.pop("//third_party/yasm:compile_re2c")
        self.targets.pop("//third_party/yasm:compile_re2c_lc3b")
        self.targets.pop("//third_party/yasm:compile_win64_gas")
        self.targets.pop("//third_party/yasm:compile_win64_nasm")
        self.targets.pop("//third_party/yasm:generate_license")
        self.targets.pop("//third_party/yasm:generate_module")
        self.targets.pop("//third_party/yasm:generate_version")
        self.targets.pop("//third_party/yasm:yasm_utils")
        self.targets.pop("//third_party/yasm:genperf")
        self.targets.pop("//third_party/yasm:genmodule")
        self.targets.pop("//third_party/yasm:re2c")
        self.targets.pop("//third_party/yasm:genstring")
        self.targets.pop("//third_party/yasm:genversion")
        self.targets.pop("//third_party/yasm:genmacro")

    def remove_openmax_dl(self):
        self.targets.pop("//third_party/openmax_dl/dl:dl")
        for name, target in self.targets.iteritems():
            if "include_dirs" in target:
                include_dirs = target["include_dirs"]
                if "//third_party/openmax_dl/" in include_dirs:
                    include_dirs.remove("//third_party/openmax_dl/")

            if "deps" in target:
                deps = target["deps"]
                if "//third_party/openmax_dl/dl:dl" in deps:
                    deps.remove("//third_party/openmax_dl/dl:dl")

            if "defines" in target:
                defines = target["defines"]
                if "RTC_USE_OPENMAX_DL" in defines:
                    defines.remove("RTC_USE_OPENMAX_DL")

        common_audio = self.targets["//webrtc/common_audio:common_audio"]
        common_audio["source_outputs"].pop(
            "//webrtc/common_audio/real_fourier_openmax.cc")
        common_audio["sources"].remove(
            "//webrtc/common_audio/real_fourier_openmax.cc")

    def remove_libvpx(self):
        self.targets = {
            name: target
            for name, target in self.targets.iteritems()
            if not ("libvpx" in name or "vp9" in name or "vp8" in name)
        }
        for name, target in self.targets.iteritems():
            if "include_dirs" in target:
                include_dirs = target["include_dirs"]
                if "//third_party/libvpx/source/libvpx/" in include_dirs:
                    include_dirs.remove("//third_party/libvpx/source/libvpx/")

            if not "deps" in target:
                continue
            target["deps"] = [
                dep for dep in target["deps"]
                if not ("libvpx" in dep or "vp9" in dep or "vp8" in dep)
            ]

        target = self.targets["//webrtc/modules/video_coding:video_coding"]
        target["defines"].append("RTC_DISABLE_VP8")
        target["defines"].append("RTC_DISABLE_VP9")
        target["sources"].append(
            "//webrtc/modules/video_coding/codecs/vp8/vp8_noop.cc")
        target["source_outputs"][
            "//webrtc/modules/video_coding/codecs/vp8/vp8_noop.cc"] = [
                "obj/webrtc/modules/video_coding/webrtc_vp8/vp8_noop.o"
            ]
        target["sources"].append(
            "//webrtc/modules/video_coding/codecs/vp9/vp9_noop.cc")
        target["source_outputs"][
            "//webrtc/modules/video_coding/codecs/vp9/vp9_noop.cc"] = [
                "obj/webrtc/modules/video_coding/webrtc_vp9/vp9_noop.o"
            ]

        target = self.targets["//webrtc/media:rtc_media"]
        target["defines"].append("RTC_DISABLE_VP8")
        target["defines"].append("RTC_DISABLE_VP9")

    def remove_boringssl(self):
        self.targets.pop("//third_party/boringssl:boringssl")
        self.targets.pop("//third_party/boringssl:boringssl_asm")
        for name, target in self.targets.iteritems():
            if "include_dirs" in target:
                include_dirs = target["include_dirs"]
                if "//third_party/boringssl/src/include/" in include_dirs:
                    include_dirs.remove("//third_party/boringssl/src/include/")
                    #include_dirs.append("/usr/local/opt/openssl/include/")

            if not "deps" in target:
                continue
            deps = target["deps"]
            if "//third_party/boringssl:boringssl" in deps:
                deps.remove("//third_party/boringssl:boringssl")
                # Do we need this one?
                target["defines"].append("OPENSSL_NO_SSL_INTERN")
                # Do we need to set -L for access to the libs?
                target["ldflags"].extend(["-lcrypto", "-lssl"])
        self.targets["//webrtc/p2p:stun_prober"]["ldflags"].extend(
            ["-lcrypto", "-lssl"])

    def remove_g722(self):
        self.targets.pop("//webrtc/modules/audio_coding:g722")
        self.targets.pop("//webrtc/modules/audio_coding:g722_test")
        for name, target in self.targets.iteritems():
            if "defines" in target:
                defines = target["defines"]
                if "WEBRTC_CODEC_G722" in defines:
                    defines.remove("WEBRTC_CODEC_G722")
                if "CODEC_G722" in defines:
                    defines.remove("CODEC_G722")

            if "include_dirs" in target:
                include_dirs = target["include_dirs"]
                if "//webrtc/modules/audio_coding/codecs/g722/include/" in include_dirs:
                    include_dirs.remove(
                        "//webrtc/modules/audio_coding/codecs/g722/include/")

            if not "deps" in target:
                continue
            deps = target["deps"]
            if "//webrtc/modules/audio_coding:g722" in deps:
                deps.remove("//webrtc/modules/audio_coding:g722")
            if "//webrtc/modules/audio_coding:g722_test" in target["deps"]:
                deps.remove("//webrtc/modules/audio_coding:g722_test")

    def generate(self):
        lines = self.starting_lines

        lines.extend(self._initialize_frameworks())

        for name, target in self.targets.iteritems():
            lines.append("\n".join(
                self.generate_target(self.sanitize_target_name(name), target)))

        lines.extend(self.generate_libwebrtc_target())
        lines.extend(self.ending_lines)
        self.write_lazily("\n".join(lines))

    def _initialize_frameworks(self):
        lines = []
        frameworks = []
        for name, target in self.targets.iteritems():
            if ('sdk' in name and not "peerconnection" in name):
                continue
            if "libs" in target:
                frameworks.extend(target["libs"])
        frameworks = list(set(frameworks))
        for framework in frameworks:
            framework = framework.replace(".framework", "")
            lines.append("find_library(" + framework.upper() + "_LIBRARY " +
                         framework + ")")

        return lines

    def write_lazily(self, content):
        if (self.filesystem.exists(self.outputFilename)):
            old_content = self.filesystem.read_text_file(self.outputFilename)
            if old_content == content:
                return
        self.filesystem.write_text_file(self.outputFilename, content)

    def sanitize_target_name(self, name):
        return "".join([step.title() for step in re.split('/|:', name)])

    def convert_deps(self, names):
        return " ".join([self.sanitize_target_name(name) for name in names])

    def convert_source(self, source):
        return source.replace("//out", "${LIBWEBRTC_OUTPUT_DIR}").replace(
            "//", "${LIBWEBRTC_INPUT_DIR}/")

    def convert_input(self, input):
        return input.replace("//out", "${LIBWEBRTC_OUTPUT_DIR}").replace(
            "//", "${LIBWEBRTC_INPUT_DIR}/")

    def convert_inputs(self, inputs):
        return " ".join(inputs).replace("//out",
                                        "${LIBWEBRTC_OUTPUT_DIR}").replace(
                                            "//", "${LIBWEBRTC_INPUT_DIR}/")

    def convert_output(self, output):
        return output.replace("//out", "${LIBWEBRTC_OUTPUT_DIR}")

    def convert_outputs(self, outputs):
        return " ".join(outputs).replace("//out", "${LIBWEBRTC_OUTPUT_DIR}")

    def generate_libwebrtc_target(self):
        skipped_sources = [
            "//webrtc/base/sha1.cc", "//webrtc/base/sha1digest.cc",
            "//webrtc/base/md5.cc", "//webrtc/base/md5digest.cc",
            "//webrtc/base/json.cc"
            "//third_party/jsoncpp/overrides/src/lib_json/json_reader.cpp",
            "//third_party/jsoncpp/overrides/src/lib_json/json_value.cpp",
            "//third_party/jsoncpp/source/src/lib_json/json_writer.cpp"
        ]
        lines = []
        lines.append("# Start of target LIBWEBRTC")
        objects = []
        dependencies = []
        for name, target in self.targets.iteritems():
            if target["testonly"] or name.startswith("//webrtc/examples"):
                continue
            if "source_outputs" in target:
                for source, output in target["source_outputs"].iteritems():
                    if source in skipped_sources:
                        continue
                    if source.endswith(".o"):
                        continue
                    dependencies.append(self.sanitize_target_name(name))
                    if source.endswith(".asm"):
                        objects.append(output[0].replace("_action", ""))
                    elif output[0].endswith(".o"):
                        filename = source.replace("//out/",
                                                  "").replace("//", "Source/")
                        if not filename.endswith(".o"):
                            filename += ".o"
                        objects.append(
                            ("CMakeFiles/" + self.sanitize_target_name(name) +
                             ".dir/" + filename))
        dependencies = list(set(dependencies))

        lines.append(
            "file(WRITE ${LIBWEBRTC_OUTPUT_DIR}/list_libwebrtc_objects \"" +
            "\n".join(objects) + "\")")
        lines.append(
            "add_custom_command(OUTPUT ${LIBWEBRTC_OUTPUT_DIR}/../libwebrtc.a")
        lines.append(
            "    COMMAND libtool -static -o ${LIBWEBRTC_OUTPUT_DIR}/../libwebrtc.a -filelist ${LIBWEBRTC_OUTPUT_DIR}/list_libwebrtc_objects"
        )
        lines.append("    VERBATIM)")

        lines.append("add_custom_target(LIBWEBRTC DEPENDS " +
                     " ".join(dependencies) +
                     " ${LIBWEBRTC_OUTPUT_DIR}/../libwebrtc.a)")
        lines.append("# End of target LIBWEBRTC")
        return lines

    def generate_target(self, name, target):
        if (self.skip_test_targets
                and target["testonly"]) or name.startswith("WebrtcExamples"):
            return []

        lines = ["\n# Start of target " + name]
        if target["type"] == "action":
            lines.extend(self.generate_action_target(name, target))
        elif target["type"] == "action_foreach":
            lines.extend(self.generate_action_foreach_target(name, target))
        elif target["type"] == "copy":
            lines.extend(self.generate_copy_target(name, target))
        elif target["type"] == "executable":
            lines.extend(self.generate_executable_target(name, target))
        elif target["type"] == "shared_library":
            lines.extend(self.generate_shared_library_target(name, target))
        elif target["type"] == "static_library":
            lines.extend(self.generate_static_library_target(name, target))
        elif target["type"] == "create_bundle":
            lines.extend(self.generate_bundle_target(name, target))
        elif target["type"] == "bundle_data":
            lines.extend(self.generate_bundle_data_target(name, target))
        elif target["type"] == "group":
            lines.extend(self.generate_group_target(name, target))
        elif target["type"] == "source_set":
            lines.extend(self.generate_source_set_target(name, target))
        else:
            raise "unsupported target type: " + target["type"]
        lines.append("# End of target " + name)
        return lines

    def convert_arguments(self, arguments):
        value = ""
        is_first = True
        for argument in arguments:
            if not is_first:
                value += " "
            is_first = False

            if (argument.startswith("../")):
                value += "${LIBWEBRTC_INPUT_DIR}/" + argument[3:]
            elif (argument.startswith("gen/")):
                value += "${LIBWEBRTC_OUTPUT_DIR}/" + argument
            elif (argument.startswith("-I../")):
                value += "-I${LIBWEBRTC_INPUT_DIR}/" + argument[5:]
            elif (argument == "-I."):
                value += "-I${LIBWEBRTC_OUTPUT_DIR}"
            elif (argument == "-I.."):
                value += "-I${LIBWEBRTC_INPUT_DIR}"
            elif (argument == "-Igen"):
                value += "-I${LIBWEBRTC_OUTPUT_DIR}/gen"
            else:
                value += argument
        return value

    def _generate_add_dependencies(self, name, target):
        if not "deps" in target:
            return []
        dependencies = self.convert_deps(
            [dep for dep in target["deps"] if self._is_active_dependency(dep)])
        return ["add_dependencies(" + name + " " + dependencies +
                ")"] if len(dependencies) else []

    def _is_active_dependency(self, name):
        return not ((self.skip_test_targets and self.targets[name]["testonly"])
                    or name.startswith("//webrtc/examples"))

    def generate_action_target(self, name, target):
        lines = []
        outputs = self.convert_outputs(target["outputs"])
        deps = self.convert_deps(target["deps"])
        args = self.convert_arguments(target["args"])
        script = "${LIBWEBRTC_INPUT_DIR}/" + target["script"][2:]
        if (script.endswith(".py")):
            script = "python " + script

        lines.append("add_custom_command(OUTPUT " + outputs)
        if deps:
            lines.append("    DEPENDS " + deps)
        lines.append("    COMMAND " + script + " " + args)
        lines.append("    VERBATIM)")

        lines.append("add_custom_target(" + name + " DEPENDS " +
                     self.convert_deps(target["deps"]) + " " +
                     self.convert_outputs(target["outputs"]) + ")")

        return lines

    def generate_action_foreach_target(self, name, target):
        lines = []
        outputs = [self.convert_output(output) for output in target["outputs"]]
        deps = self.convert_deps(target["deps"])
        sources = [self.convert_source(source) for source in target["sources"]]
        script = "${LIBWEBRTC_INPUT_DIR}/" + target["script"][2:]
        if (script.endswith(".py")):
            script = "python " + script

        for output, source in zip(outputs, sources):
            args = self.convert_arguments(target["args"])
            args = args.replace("{{source}}", source).replace(
                "{{source_name_part}}",
                self.filesystem.splitext(self.filesystem.basename(source))[0])
            lines.append("add_custom_command(OUTPUT " + output)
            lines.append("    MAIN_DEPENDENCY " + source)
            lines.append("    COMMAND " + script + " " + args)
            if deps:
                lines.append("    DEPENDS " + deps)
            lines.append("    VERBATIM)")

        lines.append("add_custom_target(" + name + " DEPENDS " +
                     " ".join(outputs) + ")")

        return lines

    def generate_copy_target(self, name, target):
        lines = []
        outputs = self.convert_outputs(target["outputs"])
        sources = [self.convert_source(source) for source in target["sources"]]
        lines.append("list(APPEND " + name + " " + outputs + ")")

        for output, source in zip(target["outputs"], sources):
            lines.append("file(COPY " + source + " DESTINATION " +
                         self.convert_output(output) + ")")
        lines.append("add_custom_target(" + name)
        lines.append("    COMMAND echo \"Generating copy target" + name + "\"")
        lines.append("    VERBATIM)")
        lines.extend(self._generate_add_dependencies(name, target))
        return lines

    def _compute_compile_target_objects(self, name):
        target = self.targets[name]
        if target["type"] == "source_set" and not "sources" in target:
            return []
        sources = ["$<TARGET_OBJECTS:" + self.sanitize_target_name(name) + ">"]
        for dep in self.targets[name]["deps"]:
            if not self.targets[dep]["type"] == "source_set":
                continue
            sources.extend(self._compute_compile_target_objects(dep))
        return sources

    def _compute_compile_target_sources(self, target):
        sources = [
            self.convert_source(source) for source in target["sources"]
            if not source.endswith(".h")
        ] if "sources" in target else []
        if target["type"] == "source_set":
            return sources

        for dep in target["deps"]:
            if not self.targets[dep]["type"] == "source_set":
                continue
            sources.extend(self._compute_compile_target_objects(dep))

        return sources

    def _generate_compile_target_sources(self, name, target):
        lines = []
        sources = self._compute_compile_target_sources(target)
        if len(sources):
            lines.append("set(" + name + "_SOURCES " + "\n    ".join(sources) +
                         ")")

        return lines

    def _compute_link_flags(self, target):
        if not "ldflags" in target:
            return []

        flags = target["ldflags"]

        self._remove_next_flag = False

        def keep_flag(flag):
            if self._remove_next_flag:
                self._remove_next_flag = False
                return False
            if flag == "-isysroot":
                self._remove_next_flag = True
                return False
            return True

        return filter(keep_flag, flags)

    def _compute_compile_flags(self, target):
        flags = []
        for flag in [
                "asmflags", "cflags", "cflags_c", "cflags_cc", "cflags_objc",
                "cflags_objcc"
        ]:
            if flag in target:
                flags.extend(target[flag])

        self._remove_next_flag = False

        def keep_flag(flag):
            if self._remove_next_flag:
                self._remove_next_flag = False
                return False
            if flag == "-Xclang":
                self._remove_next_flag = True
                return False
            if flag == "-isysroot":
                self._remove_next_flag = True
                return False
            if flag == "-Wno-undefined-var-template":
                return False
            if flag == "-Wno-nonportable-include-path":
                return False
            if flag == "-Wno-address-of-packed-member":
                return False
            if flag == "-std=c++11":
                return False
            return True

        cleaned_flags = filter(keep_flag, flags)
        no_duplicate_flags = []
        [
            no_duplicate_flags.append(flag) for flag in cleaned_flags
            if not no_duplicate_flags.count(flag)
        ]
        return no_duplicate_flags

    def compute_include_dirs(self, target):
        dirs = []
        if "include_dirs" in target:
            dirs.extend(target["include_dirs"])
        return dirs

    def _generate_compile_target_options(self, name, target):
        lines = []

        flags = self._compute_compile_flags(target)
        compilation_flags = "\" \"".join(flags)
        lines.append("target_compile_options(" + name + " PRIVATE \"" +
                     compilation_flags + "\")")

        if "defines" in target:
            lines.append("target_compile_definitions(" + name + " PRIVATE " +
                         " ".join(target["defines"]) + ")")

        dirs = list(set(self.compute_include_dirs(target)))
        if len(dirs):
            lines.append("target_include_directories(" + name + " PRIVATE " +
                         self.convert_inputs(dirs) + ")")

        ldflags = self._compute_link_flags(target)
        if ldflags:
            lines.append("set_target_properties(" + name +
                         " PROPERTIES LINK_FLAGS \"" + " ".join(ldflags) +
                         "\")")

        return lines

    def _compute_linked_libraries(self, target):
        libraries = []
        for dep in target["deps"]:
            dep_target = self.targets[dep]
            if dep_target["type"] == "static_library" or dep_target[
                    "type"] == "shared_library":
                libraries.append(self.sanitize_target_name(dep))
            elif dep_target["type"] == "group" or dep_target[
                    "type"] == "source_set":
                libraries.extend(self._compute_linked_libraries(dep_target))
        return libraries

    def _generate_linked_libraries(self, name, target):
        return [("target_link_libraries(" + name + " " + library + ")")
                for library in self._compute_linked_libraries(target)]

    def _handle_frameworks(self, name, target):
        if not "libs" in target:
            return []

        lines = []
        for framework in target["libs"]:
            framework = framework.replace(".framework", "").upper()
            lines.append("target_include_directories(" + name + " PRIVATE ${" +
                         framework + "_INCLUDE_DIR})")
            lines.append("target_link_libraries(" + name + " ${" + framework +
                         "_LIBRARY})")

        return lines

    def _set_output(self, name, target):
        if not "outputs" in target:
            return []

        lines = []
        output = target["outputs"][0]
        if not output.startswith("//out/"):
            raise "Output not in build directory"
        output_dir = "${LIBWEBRTC_OUTPUT_DIR}/" + self.filesystem.dirname(
            output[6:])
        output_name = self.filesystem.basename(output[6:])
        if output_name.startswith("lib") and output_name.endswith(".a"):
            output_name = output_name[3:-2]
        lines.append("set_target_properties(" + name +
                     " PROPERTIES RUNTIME_OUTPUT_DIRECTORY " + output_dir +
                     ")")
        lines.append("set_target_properties(" + name +
                     " PROPERTIES OUTPUT_NAME " + output_name + ")")
        return lines

    def generate_executable_target(self, name, target):
        lines = self._generate_compile_target_sources(name, target)
        if len(lines):
            lines.append("add_executable(" + name + " ${" + name +
                         "_SOURCES})")
        else:
            lines.append("add_executable(" + name + ")")
        lines.extend(self._generate_compile_target_options(name, target))

        lines.extend(self._set_output(name, target))
        lines.extend(self._generate_linked_libraries(name, target))
        lines.extend(self._handle_frameworks(name, target))

        lines.extend(self._generate_add_dependencies(name, target))
        return lines

    def generate_shared_library_target(self, name, target):
        lines = self._generate_compile_target_sources(name, target)
        if len(lines):
            lines.append("add_library(" + name + " SHARED ${" + name +
                         "_SOURCES})")
        else:
            lines.append("add_library(" + name + " SHARED)")
        lines.extend(self._generate_compile_target_options(name, target))

        lines.extend(self._set_output(name, target))
        lines.extend(self._generate_linked_libraries(name, target))
        lines.extend(self._handle_frameworks(name, target))

        lines.extend(self._generate_add_dependencies(name, target))
        return lines

    def generate_static_library_target(self, name, target):
        lines = self._generate_compile_target_sources(name, target)
        lines.append("add_library(" + name + " STATIC" +
                     ((" ${" + name + "_SOURCES}") if len(lines) else "") +
                     ")")
        lines.extend(self._generate_compile_target_options(name, target))

        lines.extend(self._set_output(name, target))
        lines.extend(self._generate_linked_libraries(name, target))
        lines.extend(self._handle_frameworks(name, target))

        return lines

    def generate_bundle_data_target(self, name, target):
        lines = []
        lines.append("add_custom_target(" + name + ")")
        lines.extend(self._generate_add_dependencies(name, target))
        return lines

    def generate_bundle_target(self, name, target):
        # We replace dynamically Info.plist with a static one.
        info_plist = "${LIBWEBRTC_INPUT_DIR}/../WebKit/" + self.filesystem.basename(
            target["bundle_data"]["source_files"][-1])
        lines = self.generate_shared_library_target(name, target)
        lines.append("set_target_properties(" + name + """ PROPERTIES
            FRAMEWORK TRUE
            FRAMEWORK_VERSION C
            MACOSX_FRAMEWORK_INFO_PLIST """ + info_plist + ")")
        return lines

    def generate_group_target(self, name, target):
        lines = []
        lines.append("add_custom_target(" + name + ")")
        lines.extend(self._generate_add_dependencies(name, target))
        return lines

    def generate_source_set_target(self, name, target):
        if not "sources" in target or not len(target["sources"]):
            return []

        lines = self._generate_compile_target_sources(name, target)
        if len(lines):
            lines.append("add_library(" + name + " OBJECT ${" + name +
                         "_SOURCES})")
        else:
            lines.append("add_library(" + name + " OBJECT)")
        lines.extend(self._generate_compile_target_options(name, target))

        return lines
Esempio n. 22
0
class SCMTestBase(unittest.TestCase):
    def __init__(self, *args, **kwargs):
        super(SCMTestBase, self).__init__(*args, **kwargs)
        self.scm = None
        self.executive = None
        self.fs = None
        self.original_cwd = None

    def setUp(self):
        self.executive = Executive()
        self.fs = FileSystem()
        self.original_cwd = self.fs.getcwd()

    def tearDown(self):
        self._chdir(self.original_cwd)

    def _join(self, *comps):
        return self.fs.join(*comps)

    def _chdir(self, path):
        self.fs.chdir(path)

    def _mkdir(self, path):
        assert not self.fs.exists(path)
        self.fs.maybe_make_directory(path)

    def _mkdtemp(self, **kwargs):
        return str(self.fs.mkdtemp(**kwargs))

    def _remove(self, path):
        self.fs.remove(path)

    def _rmtree(self, path):
        self.fs.rmtree(path)

    def _run(self, *args, **kwargs):
        return self.executive.run_command(*args, **kwargs)

    def _run_silent(self, args, **kwargs):
        self.executive.run_and_throw_if_fail(args, quiet=True, **kwargs)

    def _write_text_file(self, path, contents):
        self.fs.write_text_file(path, contents)

    def _write_binary_file(self, path, contents):
        self.fs.write_binary_file(path, contents)

    def _make_diff(self, command, *args):
        # We use this wrapper to disable output decoding. diffs should be treated as
        # binary files since they may include text files of multiple differnet encodings.
        return self._run([command, "diff"] + list(args), decode_output=False)

    def _svn_diff(self, *args):
        return self._make_diff("svn", *args)

    def _git_diff(self, *args):
        return self._make_diff("git", *args)

    def _svn_add(self, path):
        self._run(["svn", "add", path])

    def _svn_commit(self, message):
        self._run(["svn", "commit", "--quiet", "--message", message])

    # This is a hot function since it's invoked by unittest before calling each test_ method in SVNTest and
    # GitTest. We create a mock SVN repo once and then perform an SVN checkout from a filesystem copy of
    # it since it's expensive to create the mock repo.
    def _set_up_svn_checkout(self):
        global cached_svn_repo_path
        global original_cwd
        if not cached_svn_repo_path:
            cached_svn_repo_path = self._set_up_svn_repo()
            original_cwd = self.original_cwd

        self.temp_directory = self._mkdtemp(suffix="svn_test")
        self.svn_repo_path = self._join(self.temp_directory, "repo")
        self.svn_repo_url = "file://%s" % self.svn_repo_path
        self.svn_checkout_path = self._join(self.temp_directory, "checkout")
        shutil.copytree(cached_svn_repo_path, self.svn_repo_path)
        self._run([
            'svn', 'checkout', '--quiet', self.svn_repo_url + "/trunk",
            self.svn_checkout_path
        ])

    def _set_up_svn_repo(self):
        svn_repo_path = self._mkdtemp(suffix="svn_test_repo")
        svn_repo_url = "file://%s" % svn_repo_path  # Not sure this will work on windows
        # git svn complains if we don't pass --pre-1.5-compatible, not sure why:
        # Expected FS format '2'; found format '3' at /usr/local/libexec/git-core//git-svn line 1477
        self._run(
            ['svnadmin', 'create', '--pre-1.5-compatible', svn_repo_path])

        # Create a test svn checkout
        svn_checkout_path = self._mkdtemp(suffix="svn_test_checkout")
        self._run(
            ['svn', 'checkout', '--quiet', svn_repo_url, svn_checkout_path])

        # Create and checkout a trunk dir to match the standard svn configuration to match git-svn's expectations
        self._chdir(svn_checkout_path)
        self._mkdir('trunk')
        self._svn_add('trunk')
        # We can add tags and branches as well if we ever need to test those.
        self._svn_commit('add trunk')

        self._rmtree(svn_checkout_path)

        self._set_up_svn_test_commits(svn_repo_url + "/trunk")
        return svn_repo_path

    def _set_up_svn_test_commits(self, svn_repo_url):
        svn_checkout_path = self._mkdtemp(suffix="svn_test_checkout")
        self._run(
            ['svn', 'checkout', '--quiet', svn_repo_url, svn_checkout_path])

        # Add some test commits
        self._chdir(svn_checkout_path)

        self._write_text_file("test_file", "test1")
        self._svn_add("test_file")
        self._svn_commit("initial commit")

        self._write_text_file("test_file", "test1test2")
        # This used to be the last commit, but doing so broke
        # GitTest.test_apply_git_patch which use the inverse diff of the last commit.
        # svn-apply fails to remove directories in Git, see:
        # https://bugs.webkit.org/show_bug.cgi?id=34871
        self._mkdir("test_dir")
        # Slash should always be the right path separator since we use cygwin on Windows.
        test_file3_path = "test_dir/test_file3"
        self._write_text_file(test_file3_path, "third file")
        self._svn_add("test_dir")
        self._svn_commit("second commit")

        self._write_text_file("test_file", "test1test2test3\n")
        self._write_text_file("test_file2", "second file")
        self._svn_add("test_file2")
        self._svn_commit("third commit")

        # This 4th commit is used to make sure that our patch file handling
        # code correctly treats patches as binary and does not attempt to
        # decode them assuming they're utf-8.
        self._write_binary_file("test_file",
                                u"latin1 test: \u00A0\n".encode("latin-1"))
        self._write_binary_file("test_file2",
                                u"utf-8 test: \u00A0\n".encode("utf-8"))
        self._svn_commit("fourth commit")

        # svn does not seem to update after commit as I would expect.
        self._run(['svn', 'update'])
        self._rmtree(svn_checkout_path)

    def _tear_down_svn_checkout(self):
        self._rmtree(self.temp_directory)

    def _shared_test_add_recursively(self):
        self._mkdir("added_dir")
        self._write_text_file("added_dir/added_file", "new stuff")
        self.scm.add("added_dir/added_file")
        self.assertIn("added_dir/added_file", self.scm._added_files())

    def _shared_test_delete_recursively(self):
        self._mkdir("added_dir")
        self._write_text_file("added_dir/added_file", "new stuff")
        self.scm.add("added_dir/added_file")
        self.assertIn("added_dir/added_file", self.scm._added_files())
        self.scm.delete("added_dir/added_file")
        self.assertNotIn("added_dir", self.scm._added_files())

    def _shared_test_delete_recursively_or_not(self):
        self._mkdir("added_dir")
        self._write_text_file("added_dir/added_file", "new stuff")
        self._write_text_file("added_dir/another_added_file", "more new stuff")
        self.scm.add("added_dir/added_file")
        self.scm.add("added_dir/another_added_file")
        self.assertIn("added_dir/added_file", self.scm._added_files())
        self.assertIn("added_dir/another_added_file", self.scm._added_files())
        self.scm.delete("added_dir/added_file")
        self.assertIn("added_dir/another_added_file", self.scm._added_files())

    def _shared_test_exists(self, scm, commit_function):
        self._chdir(scm.checkout_root)
        self.assertFalse(scm.exists('foo.txt'))
        self._write_text_file('foo.txt', 'some stuff')
        self.assertFalse(scm.exists('foo.txt'))
        scm.add('foo.txt')
        commit_function('adding foo')
        self.assertTrue(scm.exists('foo.txt'))
        scm.delete('foo.txt')
        commit_function('deleting foo')
        self.assertFalse(scm.exists('foo.txt'))

    def _shared_test_move(self):
        self._write_text_file('added_file', 'new stuff')
        self.scm.add('added_file')
        self.scm.move('added_file', 'moved_file')
        self.assertIn('moved_file', self.scm._added_files())

    def _shared_test_move_recursive(self):
        self._mkdir("added_dir")
        self._write_text_file('added_dir/added_file', 'new stuff')
        self._write_text_file('added_dir/another_added_file', 'more new stuff')
        self.scm.add('added_dir')
        self.scm.move('added_dir', 'moved_dir')
        self.assertIn('moved_dir/added_file', self.scm._added_files())
        self.assertIn('moved_dir/another_added_file', self.scm._added_files())
class SCMTestBase(unittest.TestCase):

    def __init__(self, *args, **kwargs):
        super(SCMTestBase, self).__init__(*args, **kwargs)
        self.scm = None
        self.executive = None
        self.fs = None
        self.original_cwd = None

    def setUp(self):
        self.executive = Executive()
        self.fs = FileSystem()
        self.original_cwd = self.fs.getcwd()

    def tearDown(self):
        self._chdir(self.original_cwd)

    def _join(self, *comps):
        return self.fs.join(*comps)

    def _chdir(self, path):
        self.fs.chdir(path)

    def _mkdir(self, path):
        assert not self.fs.exists(path)
        self.fs.maybe_make_directory(path)

    def _mkdtemp(self, **kwargs):
        return str(self.fs.mkdtemp(**kwargs))

    def _remove(self, path):
        self.fs.remove(path)

    def _rmtree(self, path):
        self.fs.rmtree(path)

    def _run(self, *args, **kwargs):
        return self.executive.run_command(*args, **kwargs)

    def _run_silent(self, args, **kwargs):
        self.executive.run_command(args, **kwargs)

    def _write_text_file(self, path, contents):
        self.fs.write_text_file(path, contents)

    def _write_binary_file(self, path, contents):
        self.fs.write_binary_file(path, contents)

    def _make_diff(self, command, *args):
        # We use this wrapper to disable output decoding. diffs should be treated as
        # binary files since they may include text files of multiple differnet encodings.
        return self._run([command, "diff"] + list(args), decode_output=False)

    def _git_diff(self, *args):
        return self._make_diff("git", *args)

    def _shared_test_add_recursively(self):
        self._mkdir("added_dir")
        self._write_text_file("added_dir/added_file", "new stuff")
        self.scm.add("added_dir/added_file")
        self.assertIn("added_dir/added_file", self.scm._added_files())

    def _shared_test_delete_recursively(self):
        self._mkdir("added_dir")
        self._write_text_file("added_dir/added_file", "new stuff")
        self.scm.add("added_dir/added_file")
        self.assertIn("added_dir/added_file", self.scm._added_files())
        self.scm.delete("added_dir/added_file")
        self.assertNotIn("added_dir", self.scm._added_files())

    def _shared_test_delete_recursively_or_not(self):
        self._mkdir("added_dir")
        self._write_text_file("added_dir/added_file", "new stuff")
        self._write_text_file("added_dir/another_added_file", "more new stuff")
        self.scm.add("added_dir/added_file")
        self.scm.add("added_dir/another_added_file")
        self.assertIn("added_dir/added_file", self.scm._added_files())
        self.assertIn("added_dir/another_added_file", self.scm._added_files())
        self.scm.delete("added_dir/added_file")
        self.assertIn("added_dir/another_added_file", self.scm._added_files())

    def _shared_test_exists(self, scm, commit_function):
        self._chdir(scm.checkout_root)
        self.assertFalse(scm.exists('foo.txt'))
        self._write_text_file('foo.txt', 'some stuff')
        self.assertFalse(scm.exists('foo.txt'))
        scm.add('foo.txt')
        commit_function('adding foo')
        self.assertTrue(scm.exists('foo.txt'))
        scm.delete('foo.txt')
        commit_function('deleting foo')
        self.assertFalse(scm.exists('foo.txt'))

    def _shared_test_move(self):
        self._write_text_file('added_file', 'new stuff')
        self.scm.add('added_file')
        self.scm.move('added_file', 'moved_file')
        self.assertIn('moved_file', self.scm._added_files())

    def _shared_test_move_recursive(self):
        self._mkdir("added_dir")
        self._write_text_file('added_dir/added_file', 'new stuff')
        self._write_text_file('added_dir/another_added_file', 'more new stuff')
        self.scm.add('added_dir')
        self.scm.move('added_dir', 'moved_dir')
        self.assertIn('moved_dir/added_file', self.scm._added_files())
        self.assertIn('moved_dir/another_added_file', self.scm._added_files())
Esempio n. 24
0
# webkitpy.thirdparty.  This is useful if the caller wants to configure
# logging prior to executing autoinstall code.

# FIXME: If any of these servers is offline, webkit-patch breaks (and maybe
# other scripts do, too). See <http://webkit.org/b/42080>.

# We put auto-installed third-party modules in this directory--
#
#     webkitpy/thirdparty/autoinstalled

fs = FileSystem()
fs.maybe_make_directory(_AUTOINSTALLED_DIR)

init_path = fs.join(_AUTOINSTALLED_DIR, "__init__.py")
if not fs.exists(init_path):
    fs.write_text_file(init_path, "")

readme_path = fs.join(_AUTOINSTALLED_DIR, "README")
if not fs.exists(readme_path):
    fs.write_text_file(readme_path,
        "This directory is auto-generated by WebKit and is "
        "safe to delete.\nIt contains needed third-party Python "
        "packages automatically downloaded from the web.")


class AutoinstallImportHook(object):
    def __init__(self, filesystem=None):
        self._fs = filesystem or FileSystem()

    def _ensure_autoinstalled_dir_is_in_sys_path(self):
        # Some packages require that the are being put somewhere under a directory in sys.path.
Esempio n. 25
0
class TextFileReaderTest(LoggingTestCase):
    class MockProcessor(ProcessorBase):
        """A processor for test purposes.

        This processor simply records the parameters passed to its process()
        method for later checking by the unittest test methods.

        """
        def __init__(self):
            self.processed = []
            """The parameters passed for all calls to the process() method."""

        def should_process(self, file_path):
            return not file_path.endswith('should_not_process.txt')

        def process(self, lines, file_path, test_kwarg=None):
            self.processed.append((lines, file_path, test_kwarg))

    def setUp(self):
        LoggingTestCase.setUp(self)
        # FIXME: This should be a MockFileSystem once TextFileReader is moved entirely on top of FileSystem.
        self.filesystem = FileSystem()
        self._temp_dir = str(self.filesystem.mkdtemp())
        self._processor = TextFileReaderTest.MockProcessor()
        self._file_reader = TextFileReader(self.filesystem, self._processor)

    def tearDown(self):
        LoggingTestCase.tearDown(self)
        self.filesystem.rmtree(self._temp_dir)

    def _create_file(self, rel_path, text):
        """Create a file with given text and return the path to the file."""
        # FIXME: There are better/more secure APIs for creating tmp file paths.
        file_path = self.filesystem.join(self._temp_dir, rel_path)
        self.filesystem.write_text_file(file_path, text)
        return file_path

    def _passed_to_processor(self):
        """Return the parameters passed to MockProcessor.process()."""
        return self._processor.processed

    def _assert_file_reader(self, passed_to_processor, file_count):
        """Assert the state of the file reader."""
        self.assertEqual(passed_to_processor, self._passed_to_processor())
        self.assertEqual(file_count, self._file_reader.file_count)

    def test_process_file__does_not_exist(self):
        try:
            self._file_reader.process_file('does_not_exist.txt')
        except SystemExit as err:
            self.assertEqual(str(err), '1')
        else:
            self.fail('No Exception raised.')
        self._assert_file_reader([], 1)
        self.assertLog(["ERROR: File does not exist: 'does_not_exist.txt'\n"])

    def test_process_file__is_dir(self):
        temp_dir = self.filesystem.join(self._temp_dir, 'test_dir')
        self.filesystem.maybe_make_directory(temp_dir)

        self._file_reader.process_file(temp_dir)

        # Because the log message below contains exception text, it is
        # possible that the text varies across platforms.  For this reason,
        # we check only the portion of the log message that we control,
        # namely the text at the beginning.
        log_messages = self.logMessages()
        # We remove the message we are looking at to prevent the tearDown()
        # from raising an exception when it asserts that no log messages
        # remain.
        message = log_messages.pop()

        self.assertTrue(
            message.startswith(
                "WARNING: Could not read file. Skipping: '%s'\n  " % temp_dir))

        self._assert_file_reader([], 1)

    def test_process_file__should_not_process(self):
        file_path = self._create_file('should_not_process.txt', 'contents')

        self._file_reader.process_file(file_path)
        self._assert_file_reader([], 1)

    def test_process_file__multiple_lines(self):
        file_path = self._create_file('foo.txt', 'line one\r\nline two\n')

        self._file_reader.process_file(file_path)
        processed = [(['line one\r', 'line two', ''], file_path, None)]
        self._assert_file_reader(processed, 1)

    def test_process_file__file_stdin(self):
        file_path = self._create_file('-', 'file contents')

        self._file_reader.process_file(file_path=file_path, test_kwarg='foo')
        processed = [(['file contents'], file_path, 'foo')]
        self._assert_file_reader(processed, 1)

    def test_process_file__with_kwarg(self):
        file_path = self._create_file('foo.txt', 'file contents')

        self._file_reader.process_file(file_path=file_path, test_kwarg='foo')
        processed = [(['file contents'], file_path, 'foo')]
        self._assert_file_reader(processed, 1)

    def test_process_paths(self):
        # We test a list of paths that contains both a file and a directory.
        dir = self.filesystem.join(self._temp_dir, 'foo_dir')
        self.filesystem.maybe_make_directory(dir)

        file_path1 = self._create_file('file1.txt', 'foo')

        rel_path = self.filesystem.join('foo_dir', 'file2.txt')
        file_path2 = self._create_file(rel_path, 'bar')

        self._file_reader.process_paths([dir, file_path1])
        processed = [(['bar'], file_path2, None), (['foo'], file_path1, None)]
        self._assert_file_reader(processed, 2)

    def test_count_delete_only_file(self):
        self._file_reader.count_delete_only_file()
        delete_only_file_count = self._file_reader.delete_only_file_count
        self.assertEqual(delete_only_file_count, 1)