def test_coverage_works(self): # This is awkward; by design, running test-webkitpy -c will # create a .coverage file in tools, so we need to be # careful not to clobber an existing one, and to clean up. # FIXME: This design needs to change since it means we can't actually # run this method itself under coverage properly. filesystem = FileSystem() executive = Executive() module_path = filesystem.path_to_module(self.__module__) script_dir = module_path[0:module_path.find('webkitpy') - 1] coverage_file = filesystem.join(script_dir, '.coverage') coverage_file_orig = None if filesystem.exists(coverage_file): coverage_file_orig = coverage_file + '.orig' filesystem.move(coverage_file, coverage_file_orig) try: proc = executive.popen([sys.executable, filesystem.join(script_dir, 'test-webkitpy'), '-c', STUBS_CLASS + '.test_empty'], stdout=executive.PIPE, stderr=executive.PIPE) out, _ = proc.communicate() retcode = proc.returncode self.assertEqual(retcode, 0) self.assertIn('Cover', out) finally: if coverage_file_orig: filesystem.move(coverage_file_orig, coverage_file) elif filesystem.exists(coverage_file): filesystem.remove(coverage_file)
def test_coverage_works(self): # This is awkward; by design, running test-webkitpy -c will # create a .coverage file in tools, so we need to be # careful not to clobber an existing one, and to clean up. # FIXME: This design needs to change since it means we can't actually # run this method itself under coverage properly. filesystem = FileSystem() executive = Executive() module_path = filesystem.path_to_module(self.__module__) script_dir = module_path[0:module_path.find('webkitpy') - 1] coverage_file = filesystem.join(script_dir, '.coverage') coverage_file_orig = None if filesystem.exists(coverage_file): coverage_file_orig = coverage_file + '.orig' filesystem.move(coverage_file, coverage_file_orig) try: proc = executive.popen([ sys.executable, filesystem.join(script_dir, 'test-webkitpy'), '-c', STUBS_CLASS + '.test_empty' ], stdout=executive.PIPE, stderr=executive.PIPE) out, _ = proc.communicate() retcode = proc.returncode self.assertEqual(retcode, 0) self.assertIn('Cover', out) finally: if coverage_file_orig: filesystem.move(coverage_file_orig, coverage_file) elif filesystem.exists(coverage_file): filesystem.remove(coverage_file)
def main(_argv, _stdout, _stderr): options, args = parse_args(_argv) import_dir = args[0] if args else None filesystem = FileSystem() if import_dir and not filesystem.exists(import_dir): sys.exit('Source directory %s not found!' % import_dir) configure_logging() test_importer = TestImporter(Host(), import_dir, options) test_importer.do_import()
def main(_argv, _stdout, _stderr): options, args = parse_args(_argv) import_dir = args[0] filesystem = FileSystem() if not filesystem.exists(import_dir): sys.exit('Source directory %s not found!' % import_dir) configure_logging() test_importer = TestImporter(Host(), import_dir, options) test_importer.do_import()
def _find_parent_path_matching_callback_condition(cls, path, callback, filesystem=None): if not filesystem: filesystem = FileSystem() previous_path = '' path = filesystem.abspath(path) while path and path != previous_path: if filesystem.exists(filesystem.join(path, cls._stub_repository_json)): return callback(path) previous_path = path path = filesystem.dirname(path) return None
def test_getcwd(self): fs = FileSystem() self.assertTrue(fs.exists(fs.getcwd()))
def test_exists__false(self): fs = FileSystem() self.assertFalse(fs.exists(self._missing_file))
def test_exists__true(self): fs = FileSystem() self.assertTrue(fs.exists(self._this_file))
class SCMTestBase(unittest.TestCase): def __init__(self, *args, **kwargs): super(SCMTestBase, self).__init__(*args, **kwargs) self.scm = None self.executive = None self.fs = None self.original_cwd = None def setUp(self): self.executive = Executive() self.fs = FileSystem() self.original_cwd = self.fs.getcwd() def tearDown(self): self._chdir(self.original_cwd) def _join(self, *comps): return self.fs.join(*comps) def _chdir(self, path): self.fs.chdir(path) def _mkdir(self, path): assert not self.fs.exists(path) self.fs.maybe_make_directory(path) def _mkdtemp(self, **kwargs): return str(self.fs.mkdtemp(**kwargs)) def _remove(self, path): self.fs.remove(path) def _rmtree(self, path): self.fs.rmtree(path) def _run(self, *args, **kwargs): return self.executive.run_command(*args, **kwargs) def _run_silent(self, args, **kwargs): self.executive.run_and_throw_if_fail(args, quiet=True, **kwargs) def _write_text_file(self, path, contents): self.fs.write_text_file(path, contents) def _write_binary_file(self, path, contents): self.fs.write_binary_file(path, contents) def _make_diff(self, command, *args): # We use this wrapper to disable output decoding. diffs should be treated as # binary files since they may include text files of multiple differnet encodings. return self._run([command, "diff"] + list(args), decode_output=False) def _svn_diff(self, *args): return self._make_diff("svn", *args) def _git_diff(self, *args): return self._make_diff("git", *args) def _svn_add(self, path): self._run(["svn", "add", path]) def _svn_commit(self, message): self._run(["svn", "commit", "--quiet", "--message", message]) # This is a hot function since it's invoked by unittest before calling each test_ method in SVNTest and # GitTest. We create a mock SVN repo once and then perform an SVN checkout from a filesystem copy of # it since it's expensive to create the mock repo. def _set_up_svn_checkout(self): global cached_svn_repo_path global original_cwd if not cached_svn_repo_path: cached_svn_repo_path = self._set_up_svn_repo() original_cwd = self.original_cwd self.temp_directory = self._mkdtemp(suffix="svn_test") self.svn_repo_path = self._join(self.temp_directory, "repo") self.svn_repo_url = "file://%s" % self.svn_repo_path self.svn_checkout_path = self._join(self.temp_directory, "checkout") shutil.copytree(cached_svn_repo_path, self.svn_repo_path) self._run(["svn", "checkout", "--quiet", self.svn_repo_url + "/trunk", self.svn_checkout_path]) def _set_up_svn_repo(self): svn_repo_path = self._mkdtemp(suffix="svn_test_repo") svn_repo_url = "file://%s" % svn_repo_path # Not sure this will work on windows # git svn complains if we don't pass --pre-1.5-compatible, not sure why: # Expected FS format '2'; found format '3' at /usr/local/libexec/git-core//git-svn line 1477 self._run(["svnadmin", "create", "--pre-1.5-compatible", svn_repo_path]) # Create a test svn checkout svn_checkout_path = self._mkdtemp(suffix="svn_test_checkout") self._run(["svn", "checkout", "--quiet", svn_repo_url, svn_checkout_path]) # Create and checkout a trunk dir to match the standard svn configuration to match git-svn's expectations self._chdir(svn_checkout_path) self._mkdir("trunk") self._svn_add("trunk") # We can add tags and branches as well if we ever need to test those. self._svn_commit("add trunk") self._rmtree(svn_checkout_path) self._set_up_svn_test_commits(svn_repo_url + "/trunk") return svn_repo_path def _set_up_svn_test_commits(self, svn_repo_url): svn_checkout_path = self._mkdtemp(suffix="svn_test_checkout") self._run(["svn", "checkout", "--quiet", svn_repo_url, svn_checkout_path]) # Add some test commits self._chdir(svn_checkout_path) self._write_text_file("test_file", "test1") self._svn_add("test_file") self._svn_commit("initial commit") self._write_text_file("test_file", "test1test2") # This used to be the last commit, but doing so broke # GitTest.test_apply_git_patch which use the inverse diff of the last commit. # svn-apply fails to remove directories in Git, see: # https://bugs.webkit.org/show_bug.cgi?id=34871 self._mkdir("test_dir") # Slash should always be the right path separator since we use cygwin on Windows. test_file3_path = "test_dir/test_file3" self._write_text_file(test_file3_path, "third file") self._svn_add("test_dir") self._svn_commit("second commit") self._write_text_file("test_file", "test1test2test3\n") self._write_text_file("test_file2", "second file") self._svn_add("test_file2") self._svn_commit("third commit") # This 4th commit is used to make sure that our patch file handling # code correctly treats patches as binary and does not attempt to # decode them assuming they're utf-8. self._write_binary_file("test_file", u"latin1 test: \u00A0\n".encode("latin-1")) self._write_binary_file("test_file2", u"utf-8 test: \u00A0\n".encode("utf-8")) self._svn_commit("fourth commit") # svn does not seem to update after commit as I would expect. self._run(["svn", "update"]) self._rmtree(svn_checkout_path) def _tear_down_svn_checkout(self): self._rmtree(self.temp_directory) def _shared_test_add_recursively(self): self._mkdir("added_dir") self._write_text_file("added_dir/added_file", "new stuff") self.scm.add("added_dir/added_file") self.assertIn("added_dir/added_file", self.scm._added_files()) def _shared_test_delete_recursively(self): self._mkdir("added_dir") self._write_text_file("added_dir/added_file", "new stuff") self.scm.add("added_dir/added_file") self.assertIn("added_dir/added_file", self.scm._added_files()) self.scm.delete("added_dir/added_file") self.assertNotIn("added_dir", self.scm._added_files()) def _shared_test_delete_recursively_or_not(self): self._mkdir("added_dir") self._write_text_file("added_dir/added_file", "new stuff") self._write_text_file("added_dir/another_added_file", "more new stuff") self.scm.add("added_dir/added_file") self.scm.add("added_dir/another_added_file") self.assertIn("added_dir/added_file", self.scm._added_files()) self.assertIn("added_dir/another_added_file", self.scm._added_files()) self.scm.delete("added_dir/added_file") self.assertIn("added_dir/another_added_file", self.scm._added_files()) def _shared_test_exists(self, scm, commit_function): self._chdir(scm.checkout_root) self.assertFalse(scm.exists("foo.txt")) self._write_text_file("foo.txt", "some stuff") self.assertFalse(scm.exists("foo.txt")) scm.add("foo.txt") commit_function("adding foo") self.assertTrue(scm.exists("foo.txt")) scm.delete("foo.txt") commit_function("deleting foo") self.assertFalse(scm.exists("foo.txt")) def _shared_test_move(self): self._write_text_file("added_file", "new stuff") self.scm.add("added_file") self.scm.move("added_file", "moved_file") self.assertIn("moved_file", self.scm._added_files()) def _shared_test_move_recursive(self): self._mkdir("added_dir") self._write_text_file("added_dir/added_file", "new stuff") self._write_text_file("added_dir/another_added_file", "more new stuff") self.scm.add("added_dir") self.scm.move("added_dir", "moved_dir") self.assertIn("moved_dir/added_file", self.scm._added_files()) self.assertIn("moved_dir/another_added_file", self.scm._added_files())
class GitTestWithRealFilesystemAndExecutive(unittest.TestCase): def setUp(self): self.executive = Executive() self.filesystem = FileSystem() self.original_cwd = self.filesystem.getcwd() # Set up fresh git repository with one commit. self.untracking_checkout_path = self._mkdtemp(suffix='-git_unittest_untracking') self._run(['git', 'init', self.untracking_checkout_path]) self._chdir(self.untracking_checkout_path) self._write_text_file('foo_file', 'foo') self._run(['git', 'add', 'foo_file']) self._run(['git', 'commit', '-am', 'dummy commit']) self.untracking_git = Git(cwd=self.untracking_checkout_path, filesystem=self.filesystem, executive=self.executive) # Then set up a second git repo that tracks the first one. self.tracking_git_checkout_path = self._mkdtemp(suffix='-git_unittest_tracking') self._run(['git', 'clone', '--quiet', self.untracking_checkout_path, self.tracking_git_checkout_path]) self._chdir(self.tracking_git_checkout_path) self.tracking_git = Git(cwd=self.tracking_git_checkout_path, filesystem=self.filesystem, executive=self.executive) def tearDown(self): self._chdir(self.original_cwd) self._run(['rm', '-rf', self.tracking_git_checkout_path]) self._run(['rm', '-rf', self.untracking_checkout_path]) def _join(self, *comps): return self.filesystem.join(*comps) def _chdir(self, path): self.filesystem.chdir(path) def _mkdir(self, path): assert not self.filesystem.exists(path) self.filesystem.maybe_make_directory(path) def _mkdtemp(self, **kwargs): return str(self.filesystem.mkdtemp(**kwargs)) def _remove(self, path): self.filesystem.remove(path) def _run(self, *args, **kwargs): return self.executive.run_command(*args, **kwargs) def _write_text_file(self, path, contents): self.filesystem.write_text_file(path, contents) def test_add_list(self): self._chdir(self.untracking_checkout_path) git = self.untracking_git self._mkdir('added_dir') self._write_text_file('added_dir/added_file', 'new stuff') print self._run(['ls', 'added_dir']) print self._run(['pwd']) print self._run(['cat', 'added_dir/added_file']) git.add_list(['added_dir/added_file']) self.assertIn('added_dir/added_file', git.added_files()) def test_delete_recursively(self): self._chdir(self.untracking_checkout_path) git = self.untracking_git self._mkdir('added_dir') self._write_text_file('added_dir/added_file', 'new stuff') git.add_list(['added_dir/added_file']) self.assertIn('added_dir/added_file', git.added_files()) git.delete_list(['added_dir/added_file']) self.assertNotIn('added_dir', git.added_files()) def test_delete_recursively_or_not(self): self._chdir(self.untracking_checkout_path) git = self.untracking_git self._mkdir('added_dir') self._write_text_file('added_dir/added_file', 'new stuff') self._write_text_file('added_dir/another_added_file', 'more new stuff') git.add_list(['added_dir/added_file', 'added_dir/another_added_file']) self.assertIn('added_dir/added_file', git.added_files()) self.assertIn('added_dir/another_added_file', git.added_files()) git.delete_list(['added_dir/added_file']) self.assertIn('added_dir/another_added_file', git.added_files()) def test_exists(self): self._chdir(self.untracking_checkout_path) git = self.untracking_git self._chdir(git.checkout_root) self.assertFalse(git.exists('foo.txt')) self._write_text_file('foo.txt', 'some stuff') self.assertFalse(git.exists('foo.txt')) git.add_list(['foo.txt']) git.commit_locally_with_message('adding foo') self.assertTrue(git.exists('foo.txt')) git.delete_list(['foo.txt']) git.commit_locally_with_message('deleting foo') self.assertFalse(git.exists('foo.txt')) def test_move(self): self._chdir(self.untracking_checkout_path) git = self.untracking_git self._write_text_file('added_file', 'new stuff') git.add_list(['added_file']) git.move('added_file', 'moved_file') self.assertIn('moved_file', git.added_files()) def test_move_recursive(self): self._chdir(self.untracking_checkout_path) git = self.untracking_git self._mkdir('added_dir') self._write_text_file('added_dir/added_file', 'new stuff') self._write_text_file('added_dir/another_added_file', 'more new stuff') git.add_list(['added_dir']) git.move('added_dir', 'moved_dir') self.assertIn('moved_dir/added_file', git.added_files()) self.assertIn('moved_dir/another_added_file', git.added_files()) def test_remote_branch_ref(self): # This tests a protected method. pylint: disable=protected-access self.assertEqual(self.tracking_git._remote_branch_ref(), 'refs/remotes/origin/master') self._chdir(self.untracking_checkout_path) self.assertRaises(ScriptError, self.untracking_git._remote_branch_ref) def test_create_patch(self): self._chdir(self.tracking_git_checkout_path) git = self.tracking_git self._write_text_file('test_file_commit1', 'contents') self._run(['git', 'add', 'test_file_commit1']) git.commit_locally_with_message('message') git._patch_order = lambda: '' # pylint: disable=protected-access patch = git.create_patch() self.assertNotRegexpMatches(patch, r'Subversion Revision:') def test_patches_have_filenames_with_prefixes(self): self._chdir(self.tracking_git_checkout_path) git = self.tracking_git self._write_text_file('test_file_commit1', 'contents') self._run(['git', 'add', 'test_file_commit1']) git.commit_locally_with_message('message') # Even if diff.noprefix is enabled, create_patch() produces diffs with prefixes. self._run(['git', 'config', 'diff.noprefix', 'true']) git._patch_order = lambda: '' # pylint: disable=protected-access patch = git.create_patch() self.assertRegexpMatches(patch, r'^diff --git a/test_file_commit1 b/test_file_commit1') def test_rename_files(self): self._chdir(self.tracking_git_checkout_path) git = self.tracking_git git.move('foo_file', 'bar_file') git.commit_locally_with_message('message') def test_commit_position_from_git_log(self): # This tests a protected method. pylint: disable=protected-access git_log = """ commit 624c3081c0 Author: foobarbaz1 <*****@*****.**> Date: Mon Sep 28 19:10:30 2015 -0700 Test foo bar baz qux 123. BUG=000000 Review URL: https://codereview.chromium.org/999999999 Cr-Commit-Position: refs/heads/master@{#1234567} """ self._chdir(self.tracking_git_checkout_path) git = self.tracking_git self.assertEqual(git._commit_position_from_git_log(git_log), 1234567) def test_timestamp_of_revision(self): # This tests a protected method. pylint: disable=protected-access self._chdir(self.tracking_git_checkout_path) git = self.tracking_git position_regex = git._commit_position_regex_for_timestamp() git.most_recent_log_matching(position_regex, git.checkout_root)
def test_getcwd(self): fs = FileSystem() self.assertTrue(fs.exists(fs.getcwd()))
def test_exists__false(self): fs = FileSystem() self.assertFalse(fs.exists(self._missing_file))
class SCMTestBase(unittest.TestCase): def __init__(self, *args, **kwargs): super(SCMTestBase, self).__init__(*args, **kwargs) self.scm = None self.executive = None self.fs = None self.original_cwd = None def setUp(self): self.executive = Executive() self.fs = FileSystem() self.original_cwd = self.fs.getcwd() def tearDown(self): self._chdir(self.original_cwd) def _join(self, *comps): return self.fs.join(*comps) def _chdir(self, path): self.fs.chdir(path) def _mkdir(self, path): assert not self.fs.exists(path) self.fs.maybe_make_directory(path) def _mkdtemp(self, **kwargs): return str(self.fs.mkdtemp(**kwargs)) def _remove(self, path): self.fs.remove(path) def _rmtree(self, path): self.fs.rmtree(path) def _run(self, *args, **kwargs): return self.executive.run_command(*args, **kwargs) def _run_silent(self, args, **kwargs): self.executive.run_command(args, **kwargs) def _write_text_file(self, path, contents): self.fs.write_text_file(path, contents) def _write_binary_file(self, path, contents): self.fs.write_binary_file(path, contents) def _make_diff(self, command, *args): # We use this wrapper to disable output decoding. diffs should be treated as # binary files since they may include text files of multiple different encodings. return self._run([command, "diff"] + list(args), decode_output=False) def _git_diff(self, *args): return self._make_diff("git", *args) def _shared_test_add_recursively(self): self._mkdir("added_dir") self._write_text_file("added_dir/added_file", "new stuff") self.scm.add("added_dir/added_file") self.assertIn("added_dir/added_file", self.scm._added_files()) def _shared_test_delete_recursively(self): self._mkdir("added_dir") self._write_text_file("added_dir/added_file", "new stuff") self.scm.add("added_dir/added_file") self.assertIn("added_dir/added_file", self.scm._added_files()) self.scm.delete("added_dir/added_file") self.assertNotIn("added_dir", self.scm._added_files()) def _shared_test_delete_recursively_or_not(self): self._mkdir("added_dir") self._write_text_file("added_dir/added_file", "new stuff") self._write_text_file("added_dir/another_added_file", "more new stuff") self.scm.add("added_dir/added_file") self.scm.add("added_dir/another_added_file") self.assertIn("added_dir/added_file", self.scm._added_files()) self.assertIn("added_dir/another_added_file", self.scm._added_files()) self.scm.delete("added_dir/added_file") self.assertIn("added_dir/another_added_file", self.scm._added_files()) def _shared_test_exists(self, scm, commit_function): self._chdir(scm.checkout_root) self.assertFalse(scm.exists('foo.txt')) self._write_text_file('foo.txt', 'some stuff') self.assertFalse(scm.exists('foo.txt')) scm.add('foo.txt') commit_function('adding foo') self.assertTrue(scm.exists('foo.txt')) scm.delete('foo.txt') commit_function('deleting foo') self.assertFalse(scm.exists('foo.txt')) def _shared_test_move(self): self._write_text_file('added_file', 'new stuff') self.scm.add('added_file') self.scm.move('added_file', 'moved_file') self.assertIn('moved_file', self.scm._added_files()) def _shared_test_move_recursive(self): self._mkdir("added_dir") self._write_text_file('added_dir/added_file', 'new stuff') self._write_text_file('added_dir/another_added_file', 'more new stuff') self.scm.add('added_dir') self.scm.move('added_dir', 'moved_dir') self.assertIn('moved_dir/added_file', self.scm._added_files()) self.assertIn('moved_dir/another_added_file', self.scm._added_files())
# Putting the autoinstall code into webkitpy/thirdparty/__init__.py # ensures that no autoinstalling occurs until a caller imports from # webkitpy.thirdparty. This is useful if the caller wants to configure # logging prior to executing autoinstall code. # FIXME: If any of these servers is offline, webkit-patch breaks (and maybe # other scripts do, too). See <http://webkit.org/b/42080>. # We put auto-installed third-party modules in this directory-- # # webkitpy/thirdparty/autoinstalled fs = FileSystem() fs.maybe_make_directory(_AUTOINSTALLED_DIR) init_path = fs.join(_AUTOINSTALLED_DIR, "__init__.py") if not fs.exists(init_path): fs.write_text_file(init_path, "") readme_path = fs.join(_AUTOINSTALLED_DIR, "README") if not fs.exists(readme_path): fs.write_text_file(readme_path, "This directory is auto-generated by WebKit and is " "safe to delete.\nIt contains needed third-party Python " "packages automatically downloaded from the web.") class AutoinstallImportHook(object): def __init__(self, filesystem=None): self._fs = filesystem or FileSystem() def find_module(self, fullname, path):
class CMakeGenerator(object): def __init__(self, inputFilename, outputFilename): self.host = Host() self.filesystem = FileSystem() self.project = json.loads( self.filesystem.read_text_file(inputFilename)) self.enable_g711 = False self.enable_g722 = False # Current Openssl cannot really compile since they use deprecated openssl functions self.enable_boringssl = True self.enable_vpx = False self.enable_libjpeg = False self.targets = self.project["targets"] self.outputFilename = outputFilename self.skip_test_targets = True self.starting_lines = [ "cmake_minimum_required(VERSION 3.5)", "set(CMAKE_CXX_STANDARD 11)", "enable_language(ASM)", "", "if (NOT LIBWEBRTC_INPUT_DIR)", " set(LIBWEBRTC_INPUT_DIR ${CMAKE_SOURCE_DIR}/Source)", "endif ()", "if (NOT LIBWEBRTC_OUTPUT_DIR)", " set(LIBWEBRTC_OUTPUT_DIR ${CMAKE_BINARY_DIR})", "endif ()", "", "file(WRITE ${LIBWEBRTC_OUTPUT_DIR}/dummy.c \"\")", "file(MAKE_DIRECTORY ${LIBWEBRTC_OUTPUT_DIR}/obj/third_party/libjpeg_turbo/simd_asm)", "file(MAKE_DIRECTORY ${LIBWEBRTC_OUTPUT_DIR}/obj/third_party/ffmpeg/ffmpeg_yasm)", "file(MAKE_DIRECTORY ${LIBWEBRTC_OUTPUT_DIR}/obj/webrtc/sdk)", "file(MAKE_DIRECTORY ${LIBWEBRTC_OUTPUT_DIR}/gen/third_party/yasm/include)", "file(MAKE_DIRECTORY ${LIBWEBRTC_OUTPUT_DIR}/gen/webrtc/audio_coding/neteq)", "file(MAKE_DIRECTORY ${LIBWEBRTC_OUTPUT_DIR}/gen/webrtc/logging/rtc_event_log)", "file(MAKE_DIRECTORY ${LIBWEBRTC_OUTPUT_DIR}/gen/webrtc/modules/audio_coding/audio_network_adaptor)", "file(MAKE_DIRECTORY ${LIBWEBRTC_OUTPUT_DIR}/gen/webrtc/modules/audio_processing)", "file(MAKE_DIRECTORY ${LIBWEBRTC_OUTPUT_DIR}/gen/webrtc/sdk)", "file(MAKE_DIRECTORY ${LIBWEBRTC_OUTPUT_DIR}/gen/webrtc/tools/event_log_visualizer)", "file(MAKE_DIRECTORY ${LIBWEBRTC_OUTPUT_DIR}/pyproto/webrtc/audio_coding/neteq)", "file(MAKE_DIRECTORY ${LIBWEBRTC_OUTPUT_DIR}/pyproto/webrtc/logging/rtc_event_log)", "file(MAKE_DIRECTORY ${LIBWEBRTC_OUTPUT_DIR}/pyproto/webrtc/modules/audio_coding/audio_network_adaptor)", "file(MAKE_DIRECTORY ${LIBWEBRTC_OUTPUT_DIR}/pyproto/webrtc/modules/audio_coding/audio_network_adaptor)", "file(MAKE_DIRECTORY ${LIBWEBRTC_OUTPUT_DIR}/pyproto/webrtc/modules/audio_processing)", "file(MAKE_DIRECTORY ${LIBWEBRTC_OUTPUT_DIR}/pyproto/webrtc/tools/event_log_visualizer)", "", ] self.ending_lines = [ "", "set_target_properties(WebrtcBaseGtest_Prod PROPERTIES LINKER_LANGUAGE CXX)", "set_target_properties(WebrtcLoggingRtc_Event_Log_Api PROPERTIES LINKER_LANGUAGE CXX)", ] if self.enable_libjpeg: self.ending_lines.append( "set_target_properties(Third_PartyLibjpeg_TurboSimd_Asm PROPERTIES LINKER_LANGUAGE CXX)" ) self.initialize_targets() def initialize_targets(self): # Simplifying generation self.targets["//webrtc/sdk:rtc_sdk_framework_objc"]["sources"][:] = [] # Static_library requires as least one source file self.targets["//webrtc/sdk:rtc_sdk_objc"]["sources"] = [ "//out/dummy.c" ] # Executable target without any source file self.targets["//webrtc:webrtc_tests"]["type"] = "group" # Duplicate symbol issue with source_set self.targets["//webrtc/api:call_api"]["type"] = "static_library" # Simpler for linking WebCore self.targets["//third_party/boringssl:boringssl"][ "type"] = "static_library" self.targets["//third_party/boringssl:boringssl"]["outputs"] = [ "//out/libboringssl.a" ] # We use a static info plist instead of a dynamic one del self.targets["//webrtc/sdk:rtc_sdk_framework_objc_info_plist"] self.targets[ "//webrtc/sdk:rtc_sdk_framework_objc_info_plist_bundle_data"][ "deps"].remove( "//webrtc/sdk:rtc_sdk_framework_objc_info_plist") # Macro to change specific things in LibWebRTC, only used in libjingle_peerconnection currently self.targets["//webrtc/api:libjingle_peerconnection"][ "defines"].append("WEBRTC_WEBKIT_BUILD") if not self.enable_g711: self.remove_webrtc_g711() if not self.enable_g722: self.remove_g722() if self.enable_boringssl: self.ending_lines.append( "set_target_properties(Third_PartyBoringsslBoringssl_Asm PROPERTIES LINKER_LANGUAGE CXX)" ) else: self.remove_boringssl() if self.enable_vpx: self.ending_lines.append( "set_target_properties(Third_PartyLibvpxLibvpx_Yasm PROPERTIES LINKER_LANGUAGE CXX)" ) self.starting_lines.append( "file(MAKE_DIRECTORY ${LIBWEBRTC_OUTPUT_DIR}/obj/third_party/libvpx/libvpx_yasm)" ) else: self.remove_libvpx() self.remove_openmax_dl() if not self.enable_libjpeg: self.remove_libjpeg() self.remove_yasm() self.remove_webrtc_base_sha1() self.targets.pop("//build/config/sanitizers:options_sources") self.targets["//webrtc/base:rtc_base_approved"]["defines"].append( "HAVE_PTHREAD_COND_TIMEDWAIT_RELATIVE") def _remove_target(self, targetName): self.targets.pop(targetName) for name, target in self.targets.iteritems(): if "deps" in target: deps = target["deps"] if targetName in deps: deps.remove(targetName) def remove_webrtc_g711(self): self._remove_target("//webrtc/modules/audio_coding:g711_test") self._remove_target( "//webrtc/modules/audio_coding:neteq_pcmu_quality_test") self._remove_target( "//webrtc/modules/audio_coding:audio_decoder_unittests") self._remove_target("//webrtc/modules/audio_coding:g711") self.targets["//webrtc/modules/audio_coding:pcm16b"]["sources"].append( "//webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.cc") self.targets["//webrtc/modules/audio_coding:pcm16b"]["source_outputs"][ "//webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.cc"] = "obj/webrtc/modules/audio_coding/g711/audio_encoder_pcm.o" for name, target in self.targets.iteritems(): if "include_dirs" in target: include_dirs = target["include_dirs"] if "//webrtc/modules/audio_coding/codecs/g711/include/" in include_dirs: include_dirs.remove( "//webrtc/modules/audio_coding/codecs/g711/include/") if "defines" in target: defines = target["defines"] if "CODEC_G711" in defines: defines.remove("CODEC_G711") def remove_libjpeg(self): self.targets.pop("//third_party/libjpeg_turbo:libjpeg") self.targets.pop("//third_party:jpeg") self.targets.pop("//third_party/libjpeg_turbo:simd") self.targets.pop("//third_party/libjpeg_turbo:simd_asm") self.targets.pop("//third_party/libjpeg_turbo:simd_asm_action") libyuv = self.targets["//third_party/libyuv:libyuv"] libyuv["deps"].remove("//third_party:jpeg") libyuv["defines"].remove("HAVE_JPEG") libyuv["defines"].remove("USE_LIBJPEG_TURBO=1") self.targets["//third_party/libyuv:libyuv_unittest"]["defines"].remove( "HAVE_JPEG") self.targets["//third_party/libyuv:psnr"]["defines"].remove( "HAVE_JPEG") for name, target in self.targets.iteritems(): if "include_dirs" in target: include_dirs = target["include_dirs"] if "//third_party/openmax_dl/" in include_dirs: include_dirs.remove("//third_party/openmax_dl/") if "deps" in target: deps = target["deps"] if "//third_party:jpeg" in deps: deps.remove("//third_party:jpeg") if "defines" in target: defines = target["defines"] if "RTC_USE_OPENMAX_DL" in defines: defines.remove("RTC_USE_OPENMAX_DL") def remove_webrtc_base_sha1(self): base = self.targets["//webrtc/base:rtc_base"] base["source_outputs"].pop("//webrtc/base/sha1.cc") base["sources"].remove("//webrtc/base/sha1.cc") def remove_yasm(self): self.targets.pop("//third_party/yasm:yasm") self.targets.pop("//third_party/yasm:compile_gperf") self.targets.pop("//third_party/yasm:compile_gperf_for_include") self.targets.pop("//third_party/yasm:compile_nasm_macros") self.targets.pop("//third_party/yasm:compile_nasm_version") self.targets.pop("//third_party/yasm:compile_re2c") self.targets.pop("//third_party/yasm:compile_re2c_lc3b") self.targets.pop("//third_party/yasm:compile_win64_gas") self.targets.pop("//third_party/yasm:compile_win64_nasm") self.targets.pop("//third_party/yasm:generate_license") self.targets.pop("//third_party/yasm:generate_module") self.targets.pop("//third_party/yasm:generate_version") self.targets.pop("//third_party/yasm:yasm_utils") self.targets.pop("//third_party/yasm:genperf") self.targets.pop("//third_party/yasm:genmodule") self.targets.pop("//third_party/yasm:re2c") self.targets.pop("//third_party/yasm:genstring") self.targets.pop("//third_party/yasm:genversion") self.targets.pop("//third_party/yasm:genmacro") def remove_openmax_dl(self): self.targets.pop("//third_party/openmax_dl/dl:dl") for name, target in self.targets.iteritems(): if "include_dirs" in target: include_dirs = target["include_dirs"] if "//third_party/openmax_dl/" in include_dirs: include_dirs.remove("//third_party/openmax_dl/") if "deps" in target: deps = target["deps"] if "//third_party/openmax_dl/dl:dl" in deps: deps.remove("//third_party/openmax_dl/dl:dl") if "defines" in target: defines = target["defines"] if "RTC_USE_OPENMAX_DL" in defines: defines.remove("RTC_USE_OPENMAX_DL") common_audio = self.targets["//webrtc/common_audio:common_audio"] common_audio["source_outputs"].pop( "//webrtc/common_audio/real_fourier_openmax.cc") common_audio["sources"].remove( "//webrtc/common_audio/real_fourier_openmax.cc") def remove_libvpx(self): self.targets = { name: target for name, target in self.targets.iteritems() if not ("libvpx" in name or "vp9" in name or "vp8" in name) } for name, target in self.targets.iteritems(): if "include_dirs" in target: include_dirs = target["include_dirs"] if "//third_party/libvpx/source/libvpx/" in include_dirs: include_dirs.remove("//third_party/libvpx/source/libvpx/") if not "deps" in target: continue target["deps"] = [ dep for dep in target["deps"] if not ("libvpx" in dep or "vp9" in dep or "vp8" in dep) ] target = self.targets["//webrtc/modules/video_coding:video_coding"] target["defines"].append("RTC_DISABLE_VP8") target["defines"].append("RTC_DISABLE_VP9") target["sources"].append( "//webrtc/modules/video_coding/codecs/vp8/vp8_noop.cc") target["source_outputs"][ "//webrtc/modules/video_coding/codecs/vp8/vp8_noop.cc"] = [ "obj/webrtc/modules/video_coding/webrtc_vp8/vp8_noop.o" ] target["sources"].append( "//webrtc/modules/video_coding/codecs/vp9/vp9_noop.cc") target["source_outputs"][ "//webrtc/modules/video_coding/codecs/vp9/vp9_noop.cc"] = [ "obj/webrtc/modules/video_coding/webrtc_vp9/vp9_noop.o" ] target = self.targets["//webrtc/media:rtc_media"] target["defines"].append("RTC_DISABLE_VP8") target["defines"].append("RTC_DISABLE_VP9") def remove_boringssl(self): self.targets.pop("//third_party/boringssl:boringssl") self.targets.pop("//third_party/boringssl:boringssl_asm") for name, target in self.targets.iteritems(): if "include_dirs" in target: include_dirs = target["include_dirs"] if "//third_party/boringssl/src/include/" in include_dirs: include_dirs.remove("//third_party/boringssl/src/include/") #include_dirs.append("/usr/local/opt/openssl/include/") if not "deps" in target: continue deps = target["deps"] if "//third_party/boringssl:boringssl" in deps: deps.remove("//third_party/boringssl:boringssl") # Do we need this one? target["defines"].append("OPENSSL_NO_SSL_INTERN") # Do we need to set -L for access to the libs? target["ldflags"].extend(["-lcrypto", "-lssl"]) self.targets["//webrtc/p2p:stun_prober"]["ldflags"].extend( ["-lcrypto", "-lssl"]) def remove_g722(self): self.targets.pop("//webrtc/modules/audio_coding:g722") self.targets.pop("//webrtc/modules/audio_coding:g722_test") for name, target in self.targets.iteritems(): if "defines" in target: defines = target["defines"] if "WEBRTC_CODEC_G722" in defines: defines.remove("WEBRTC_CODEC_G722") if "CODEC_G722" in defines: defines.remove("CODEC_G722") if "include_dirs" in target: include_dirs = target["include_dirs"] if "//webrtc/modules/audio_coding/codecs/g722/include/" in include_dirs: include_dirs.remove( "//webrtc/modules/audio_coding/codecs/g722/include/") if not "deps" in target: continue deps = target["deps"] if "//webrtc/modules/audio_coding:g722" in deps: deps.remove("//webrtc/modules/audio_coding:g722") if "//webrtc/modules/audio_coding:g722_test" in target["deps"]: deps.remove("//webrtc/modules/audio_coding:g722_test") def generate(self): lines = self.starting_lines lines.extend(self._initialize_frameworks()) for name, target in self.targets.iteritems(): lines.append("\n".join( self.generate_target(self.sanitize_target_name(name), target))) lines.extend(self.generate_libwebrtc_target()) lines.extend(self.ending_lines) self.write_lazily("\n".join(lines)) def _initialize_frameworks(self): lines = [] frameworks = [] for name, target in self.targets.iteritems(): if ('sdk' in name and not "peerconnection" in name): continue if "libs" in target: frameworks.extend(target["libs"]) frameworks = list(set(frameworks)) for framework in frameworks: framework = framework.replace(".framework", "") lines.append("find_library(" + framework.upper() + "_LIBRARY " + framework + ")") return lines def write_lazily(self, content): if (self.filesystem.exists(self.outputFilename)): old_content = self.filesystem.read_text_file(self.outputFilename) if old_content == content: return self.filesystem.write_text_file(self.outputFilename, content) def sanitize_target_name(self, name): return "".join([step.title() for step in re.split('/|:', name)]) def convert_deps(self, names): return " ".join([self.sanitize_target_name(name) for name in names]) def convert_source(self, source): return source.replace("//out", "${LIBWEBRTC_OUTPUT_DIR}").replace( "//", "${LIBWEBRTC_INPUT_DIR}/") def convert_input(self, input): return input.replace("//out", "${LIBWEBRTC_OUTPUT_DIR}").replace( "//", "${LIBWEBRTC_INPUT_DIR}/") def convert_inputs(self, inputs): return " ".join(inputs).replace("//out", "${LIBWEBRTC_OUTPUT_DIR}").replace( "//", "${LIBWEBRTC_INPUT_DIR}/") def convert_output(self, output): return output.replace("//out", "${LIBWEBRTC_OUTPUT_DIR}") def convert_outputs(self, outputs): return " ".join(outputs).replace("//out", "${LIBWEBRTC_OUTPUT_DIR}") def generate_libwebrtc_target(self): skipped_sources = [ "//webrtc/base/sha1.cc", "//webrtc/base/sha1digest.cc", "//webrtc/base/md5.cc", "//webrtc/base/md5digest.cc", "//webrtc/base/json.cc" "//third_party/jsoncpp/overrides/src/lib_json/json_reader.cpp", "//third_party/jsoncpp/overrides/src/lib_json/json_value.cpp", "//third_party/jsoncpp/source/src/lib_json/json_writer.cpp" ] lines = [] lines.append("# Start of target LIBWEBRTC") objects = [] dependencies = [] for name, target in self.targets.iteritems(): if target["testonly"] or name.startswith("//webrtc/examples"): continue if "source_outputs" in target: for source, output in target["source_outputs"].iteritems(): if source in skipped_sources: continue if source.endswith(".o"): continue dependencies.append(self.sanitize_target_name(name)) if source.endswith(".asm"): objects.append(output[0].replace("_action", "")) elif output[0].endswith(".o"): filename = source.replace("//out/", "").replace("//", "Source/") if not filename.endswith(".o"): filename += ".o" objects.append( ("CMakeFiles/" + self.sanitize_target_name(name) + ".dir/" + filename)) dependencies = list(set(dependencies)) lines.append( "file(WRITE ${LIBWEBRTC_OUTPUT_DIR}/list_libwebrtc_objects \"" + "\n".join(objects) + "\")") lines.append( "add_custom_command(OUTPUT ${LIBWEBRTC_OUTPUT_DIR}/../libwebrtc.a") lines.append( " COMMAND libtool -static -o ${LIBWEBRTC_OUTPUT_DIR}/../libwebrtc.a -filelist ${LIBWEBRTC_OUTPUT_DIR}/list_libwebrtc_objects" ) lines.append(" VERBATIM)") lines.append("add_custom_target(LIBWEBRTC DEPENDS " + " ".join(dependencies) + " ${LIBWEBRTC_OUTPUT_DIR}/../libwebrtc.a)") lines.append("# End of target LIBWEBRTC") return lines def generate_target(self, name, target): if (self.skip_test_targets and target["testonly"]) or name.startswith("WebrtcExamples"): return [] lines = ["\n# Start of target " + name] if target["type"] == "action": lines.extend(self.generate_action_target(name, target)) elif target["type"] == "action_foreach": lines.extend(self.generate_action_foreach_target(name, target)) elif target["type"] == "copy": lines.extend(self.generate_copy_target(name, target)) elif target["type"] == "executable": lines.extend(self.generate_executable_target(name, target)) elif target["type"] == "shared_library": lines.extend(self.generate_shared_library_target(name, target)) elif target["type"] == "static_library": lines.extend(self.generate_static_library_target(name, target)) elif target["type"] == "create_bundle": lines.extend(self.generate_bundle_target(name, target)) elif target["type"] == "bundle_data": lines.extend(self.generate_bundle_data_target(name, target)) elif target["type"] == "group": lines.extend(self.generate_group_target(name, target)) elif target["type"] == "source_set": lines.extend(self.generate_source_set_target(name, target)) else: raise "unsupported target type: " + target["type"] lines.append("# End of target " + name) return lines def convert_arguments(self, arguments): value = "" is_first = True for argument in arguments: if not is_first: value += " " is_first = False if (argument.startswith("../")): value += "${LIBWEBRTC_INPUT_DIR}/" + argument[3:] elif (argument.startswith("gen/")): value += "${LIBWEBRTC_OUTPUT_DIR}/" + argument elif (argument.startswith("-I../")): value += "-I${LIBWEBRTC_INPUT_DIR}/" + argument[5:] elif (argument == "-I."): value += "-I${LIBWEBRTC_OUTPUT_DIR}" elif (argument == "-I.."): value += "-I${LIBWEBRTC_INPUT_DIR}" elif (argument == "-Igen"): value += "-I${LIBWEBRTC_OUTPUT_DIR}/gen" else: value += argument return value def _generate_add_dependencies(self, name, target): if not "deps" in target: return [] dependencies = self.convert_deps( [dep for dep in target["deps"] if self._is_active_dependency(dep)]) return ["add_dependencies(" + name + " " + dependencies + ")"] if len(dependencies) else [] def _is_active_dependency(self, name): return not ((self.skip_test_targets and self.targets[name]["testonly"]) or name.startswith("//webrtc/examples")) def generate_action_target(self, name, target): lines = [] outputs = self.convert_outputs(target["outputs"]) deps = self.convert_deps(target["deps"]) args = self.convert_arguments(target["args"]) script = "${LIBWEBRTC_INPUT_DIR}/" + target["script"][2:] if (script.endswith(".py")): script = "python " + script lines.append("add_custom_command(OUTPUT " + outputs) if deps: lines.append(" DEPENDS " + deps) lines.append(" COMMAND " + script + " " + args) lines.append(" VERBATIM)") lines.append("add_custom_target(" + name + " DEPENDS " + self.convert_deps(target["deps"]) + " " + self.convert_outputs(target["outputs"]) + ")") return lines def generate_action_foreach_target(self, name, target): lines = [] outputs = [self.convert_output(output) for output in target["outputs"]] deps = self.convert_deps(target["deps"]) sources = [self.convert_source(source) for source in target["sources"]] script = "${LIBWEBRTC_INPUT_DIR}/" + target["script"][2:] if (script.endswith(".py")): script = "python " + script for output, source in zip(outputs, sources): args = self.convert_arguments(target["args"]) args = args.replace("{{source}}", source).replace( "{{source_name_part}}", self.filesystem.splitext(self.filesystem.basename(source))[0]) lines.append("add_custom_command(OUTPUT " + output) lines.append(" MAIN_DEPENDENCY " + source) lines.append(" COMMAND " + script + " " + args) if deps: lines.append(" DEPENDS " + deps) lines.append(" VERBATIM)") lines.append("add_custom_target(" + name + " DEPENDS " + " ".join(outputs) + ")") return lines def generate_copy_target(self, name, target): lines = [] outputs = self.convert_outputs(target["outputs"]) sources = [self.convert_source(source) for source in target["sources"]] lines.append("list(APPEND " + name + " " + outputs + ")") for output, source in zip(target["outputs"], sources): lines.append("file(COPY " + source + " DESTINATION " + self.convert_output(output) + ")") lines.append("add_custom_target(" + name) lines.append(" COMMAND echo \"Generating copy target" + name + "\"") lines.append(" VERBATIM)") lines.extend(self._generate_add_dependencies(name, target)) return lines def _compute_compile_target_objects(self, name): target = self.targets[name] if target["type"] == "source_set" and not "sources" in target: return [] sources = ["$<TARGET_OBJECTS:" + self.sanitize_target_name(name) + ">"] for dep in self.targets[name]["deps"]: if not self.targets[dep]["type"] == "source_set": continue sources.extend(self._compute_compile_target_objects(dep)) return sources def _compute_compile_target_sources(self, target): sources = [ self.convert_source(source) for source in target["sources"] if not source.endswith(".h") ] if "sources" in target else [] if target["type"] == "source_set": return sources for dep in target["deps"]: if not self.targets[dep]["type"] == "source_set": continue sources.extend(self._compute_compile_target_objects(dep)) return sources def _generate_compile_target_sources(self, name, target): lines = [] sources = self._compute_compile_target_sources(target) if len(sources): lines.append("set(" + name + "_SOURCES " + "\n ".join(sources) + ")") return lines def _compute_link_flags(self, target): if not "ldflags" in target: return [] flags = target["ldflags"] self._remove_next_flag = False def keep_flag(flag): if self._remove_next_flag: self._remove_next_flag = False return False if flag == "-isysroot": self._remove_next_flag = True return False return True return filter(keep_flag, flags) def _compute_compile_flags(self, target): flags = [] for flag in [ "asmflags", "cflags", "cflags_c", "cflags_cc", "cflags_objc", "cflags_objcc" ]: if flag in target: flags.extend(target[flag]) self._remove_next_flag = False def keep_flag(flag): if self._remove_next_flag: self._remove_next_flag = False return False if flag == "-Xclang": self._remove_next_flag = True return False if flag == "-isysroot": self._remove_next_flag = True return False if flag == "-Wno-undefined-var-template": return False if flag == "-Wno-nonportable-include-path": return False if flag == "-Wno-address-of-packed-member": return False if flag == "-std=c++11": return False return True cleaned_flags = filter(keep_flag, flags) no_duplicate_flags = [] [ no_duplicate_flags.append(flag) for flag in cleaned_flags if not no_duplicate_flags.count(flag) ] return no_duplicate_flags def compute_include_dirs(self, target): dirs = [] if "include_dirs" in target: dirs.extend(target["include_dirs"]) return dirs def _generate_compile_target_options(self, name, target): lines = [] flags = self._compute_compile_flags(target) compilation_flags = "\" \"".join(flags) lines.append("target_compile_options(" + name + " PRIVATE \"" + compilation_flags + "\")") if "defines" in target: lines.append("target_compile_definitions(" + name + " PRIVATE " + " ".join(target["defines"]) + ")") dirs = list(set(self.compute_include_dirs(target))) if len(dirs): lines.append("target_include_directories(" + name + " PRIVATE " + self.convert_inputs(dirs) + ")") ldflags = self._compute_link_flags(target) if ldflags: lines.append("set_target_properties(" + name + " PROPERTIES LINK_FLAGS \"" + " ".join(ldflags) + "\")") return lines def _compute_linked_libraries(self, target): libraries = [] for dep in target["deps"]: dep_target = self.targets[dep] if dep_target["type"] == "static_library" or dep_target[ "type"] == "shared_library": libraries.append(self.sanitize_target_name(dep)) elif dep_target["type"] == "group" or dep_target[ "type"] == "source_set": libraries.extend(self._compute_linked_libraries(dep_target)) return libraries def _generate_linked_libraries(self, name, target): return [("target_link_libraries(" + name + " " + library + ")") for library in self._compute_linked_libraries(target)] def _handle_frameworks(self, name, target): if not "libs" in target: return [] lines = [] for framework in target["libs"]: framework = framework.replace(".framework", "").upper() lines.append("target_include_directories(" + name + " PRIVATE ${" + framework + "_INCLUDE_DIR})") lines.append("target_link_libraries(" + name + " ${" + framework + "_LIBRARY})") return lines def _set_output(self, name, target): if not "outputs" in target: return [] lines = [] output = target["outputs"][0] if not output.startswith("//out/"): raise "Output not in build directory" output_dir = "${LIBWEBRTC_OUTPUT_DIR}/" + self.filesystem.dirname( output[6:]) output_name = self.filesystem.basename(output[6:]) if output_name.startswith("lib") and output_name.endswith(".a"): output_name = output_name[3:-2] lines.append("set_target_properties(" + name + " PROPERTIES RUNTIME_OUTPUT_DIRECTORY " + output_dir + ")") lines.append("set_target_properties(" + name + " PROPERTIES OUTPUT_NAME " + output_name + ")") return lines def generate_executable_target(self, name, target): lines = self._generate_compile_target_sources(name, target) if len(lines): lines.append("add_executable(" + name + " ${" + name + "_SOURCES})") else: lines.append("add_executable(" + name + ")") lines.extend(self._generate_compile_target_options(name, target)) lines.extend(self._set_output(name, target)) lines.extend(self._generate_linked_libraries(name, target)) lines.extend(self._handle_frameworks(name, target)) lines.extend(self._generate_add_dependencies(name, target)) return lines def generate_shared_library_target(self, name, target): lines = self._generate_compile_target_sources(name, target) if len(lines): lines.append("add_library(" + name + " SHARED ${" + name + "_SOURCES})") else: lines.append("add_library(" + name + " SHARED)") lines.extend(self._generate_compile_target_options(name, target)) lines.extend(self._set_output(name, target)) lines.extend(self._generate_linked_libraries(name, target)) lines.extend(self._handle_frameworks(name, target)) lines.extend(self._generate_add_dependencies(name, target)) return lines def generate_static_library_target(self, name, target): lines = self._generate_compile_target_sources(name, target) lines.append("add_library(" + name + " STATIC" + ((" ${" + name + "_SOURCES}") if len(lines) else "") + ")") lines.extend(self._generate_compile_target_options(name, target)) lines.extend(self._set_output(name, target)) lines.extend(self._generate_linked_libraries(name, target)) lines.extend(self._handle_frameworks(name, target)) return lines def generate_bundle_data_target(self, name, target): lines = [] lines.append("add_custom_target(" + name + ")") lines.extend(self._generate_add_dependencies(name, target)) return lines def generate_bundle_target(self, name, target): # We replace dynamically Info.plist with a static one. info_plist = "${LIBWEBRTC_INPUT_DIR}/../WebKit/" + self.filesystem.basename( target["bundle_data"]["source_files"][-1]) lines = self.generate_shared_library_target(name, target) lines.append("set_target_properties(" + name + """ PROPERTIES FRAMEWORK TRUE FRAMEWORK_VERSION C MACOSX_FRAMEWORK_INFO_PLIST """ + info_plist + ")") return lines def generate_group_target(self, name, target): lines = [] lines.append("add_custom_target(" + name + ")") lines.extend(self._generate_add_dependencies(name, target)) return lines def generate_source_set_target(self, name, target): if not "sources" in target or not len(target["sources"]): return [] lines = self._generate_compile_target_sources(name, target) if len(lines): lines.append("add_library(" + name + " OBJECT ${" + name + "_SOURCES})") else: lines.append("add_library(" + name + " OBJECT)") lines.extend(self._generate_compile_target_options(name, target)) return lines
class SCMTestBase(unittest.TestCase): def __init__(self, *args, **kwargs): super(SCMTestBase, self).__init__(*args, **kwargs) self.scm = None self.executive = None self.fs = None self.original_cwd = None def setUp(self): self.executive = Executive() self.fs = FileSystem() self.original_cwd = self.fs.getcwd() def tearDown(self): self._chdir(self.original_cwd) def _join(self, *comps): return self.fs.join(*comps) def _chdir(self, path): self.fs.chdir(path) def _mkdir(self, path): assert not self.fs.exists(path) self.fs.maybe_make_directory(path) def _mkdtemp(self, **kwargs): return str(self.fs.mkdtemp(**kwargs)) def _remove(self, path): self.fs.remove(path) def _rmtree(self, path): self.fs.rmtree(path) def _run(self, *args, **kwargs): return self.executive.run_command(*args, **kwargs) def _run_silent(self, args, **kwargs): self.executive.run_and_throw_if_fail(args, quiet=True, **kwargs) def _write_text_file(self, path, contents): self.fs.write_text_file(path, contents) def _write_binary_file(self, path, contents): self.fs.write_binary_file(path, contents) def _make_diff(self, command, *args): # We use this wrapper to disable output decoding. diffs should be treated as # binary files since they may include text files of multiple differnet encodings. return self._run([command, "diff"] + list(args), decode_output=False) def _svn_diff(self, *args): return self._make_diff("svn", *args) def _git_diff(self, *args): return self._make_diff("git", *args) def _svn_add(self, path): self._run(["svn", "add", path]) def _svn_commit(self, message): self._run(["svn", "commit", "--quiet", "--message", message]) # This is a hot function since it's invoked by unittest before calling each test_ method in SVNTest and # GitTest. We create a mock SVN repo once and then perform an SVN checkout from a filesystem copy of # it since it's expensive to create the mock repo. def _set_up_svn_checkout(self): global cached_svn_repo_path global original_cwd if not cached_svn_repo_path: cached_svn_repo_path = self._set_up_svn_repo() original_cwd = self.original_cwd self.temp_directory = self._mkdtemp(suffix="svn_test") self.svn_repo_path = self._join(self.temp_directory, "repo") self.svn_repo_url = "file://%s" % self.svn_repo_path self.svn_checkout_path = self._join(self.temp_directory, "checkout") shutil.copytree(cached_svn_repo_path, self.svn_repo_path) self._run([ 'svn', 'checkout', '--quiet', self.svn_repo_url + "/trunk", self.svn_checkout_path ]) def _set_up_svn_repo(self): svn_repo_path = self._mkdtemp(suffix="svn_test_repo") svn_repo_url = "file://%s" % svn_repo_path # Not sure this will work on windows # git svn complains if we don't pass --pre-1.5-compatible, not sure why: # Expected FS format '2'; found format '3' at /usr/local/libexec/git-core//git-svn line 1477 self._run( ['svnadmin', 'create', '--pre-1.5-compatible', svn_repo_path]) # Create a test svn checkout svn_checkout_path = self._mkdtemp(suffix="svn_test_checkout") self._run( ['svn', 'checkout', '--quiet', svn_repo_url, svn_checkout_path]) # Create and checkout a trunk dir to match the standard svn configuration to match git-svn's expectations self._chdir(svn_checkout_path) self._mkdir('trunk') self._svn_add('trunk') # We can add tags and branches as well if we ever need to test those. self._svn_commit('add trunk') self._rmtree(svn_checkout_path) self._set_up_svn_test_commits(svn_repo_url + "/trunk") return svn_repo_path def _set_up_svn_test_commits(self, svn_repo_url): svn_checkout_path = self._mkdtemp(suffix="svn_test_checkout") self._run( ['svn', 'checkout', '--quiet', svn_repo_url, svn_checkout_path]) # Add some test commits self._chdir(svn_checkout_path) self._write_text_file("test_file", "test1") self._svn_add("test_file") self._svn_commit("initial commit") self._write_text_file("test_file", "test1test2") # This used to be the last commit, but doing so broke # GitTest.test_apply_git_patch which use the inverse diff of the last commit. # svn-apply fails to remove directories in Git, see: # https://bugs.webkit.org/show_bug.cgi?id=34871 self._mkdir("test_dir") # Slash should always be the right path separator since we use cygwin on Windows. test_file3_path = "test_dir/test_file3" self._write_text_file(test_file3_path, "third file") self._svn_add("test_dir") self._svn_commit("second commit") self._write_text_file("test_file", "test1test2test3\n") self._write_text_file("test_file2", "second file") self._svn_add("test_file2") self._svn_commit("third commit") # This 4th commit is used to make sure that our patch file handling # code correctly treats patches as binary and does not attempt to # decode them assuming they're utf-8. self._write_binary_file("test_file", u"latin1 test: \u00A0\n".encode("latin-1")) self._write_binary_file("test_file2", u"utf-8 test: \u00A0\n".encode("utf-8")) self._svn_commit("fourth commit") # svn does not seem to update after commit as I would expect. self._run(['svn', 'update']) self._rmtree(svn_checkout_path) def _tear_down_svn_checkout(self): self._rmtree(self.temp_directory) def _shared_test_add_recursively(self): self._mkdir("added_dir") self._write_text_file("added_dir/added_file", "new stuff") self.scm.add("added_dir/added_file") self.assertIn("added_dir/added_file", self.scm._added_files()) def _shared_test_delete_recursively(self): self._mkdir("added_dir") self._write_text_file("added_dir/added_file", "new stuff") self.scm.add("added_dir/added_file") self.assertIn("added_dir/added_file", self.scm._added_files()) self.scm.delete("added_dir/added_file") self.assertNotIn("added_dir", self.scm._added_files()) def _shared_test_delete_recursively_or_not(self): self._mkdir("added_dir") self._write_text_file("added_dir/added_file", "new stuff") self._write_text_file("added_dir/another_added_file", "more new stuff") self.scm.add("added_dir/added_file") self.scm.add("added_dir/another_added_file") self.assertIn("added_dir/added_file", self.scm._added_files()) self.assertIn("added_dir/another_added_file", self.scm._added_files()) self.scm.delete("added_dir/added_file") self.assertIn("added_dir/another_added_file", self.scm._added_files()) def _shared_test_exists(self, scm, commit_function): self._chdir(scm.checkout_root) self.assertFalse(scm.exists('foo.txt')) self._write_text_file('foo.txt', 'some stuff') self.assertFalse(scm.exists('foo.txt')) scm.add('foo.txt') commit_function('adding foo') self.assertTrue(scm.exists('foo.txt')) scm.delete('foo.txt') commit_function('deleting foo') self.assertFalse(scm.exists('foo.txt')) def _shared_test_move(self): self._write_text_file('added_file', 'new stuff') self.scm.add('added_file') self.scm.move('added_file', 'moved_file') self.assertIn('moved_file', self.scm._added_files()) def _shared_test_move_recursive(self): self._mkdir("added_dir") self._write_text_file('added_dir/added_file', 'new stuff') self._write_text_file('added_dir/another_added_file', 'more new stuff') self.scm.add('added_dir') self.scm.move('added_dir', 'moved_dir') self.assertIn('moved_dir/added_file', self.scm._added_files()) self.assertIn('moved_dir/another_added_file', self.scm._added_files())
class SCMTestBase(unittest.TestCase): def __init__(self, *args, **kwargs): super(SCMTestBase, self).__init__(*args, **kwargs) self.scm = None self.executive = None self.fs = None self.original_cwd = None def setUp(self): self.executive = Executive() self.fs = FileSystem() self.original_cwd = self.fs.getcwd() def tearDown(self): self._chdir(self.original_cwd) def _join(self, *comps): return self.fs.join(*comps) def _chdir(self, path): self.fs.chdir(path) def _mkdir(self, path): assert not self.fs.exists(path) self.fs.maybe_make_directory(path) def _mkdtemp(self, **kwargs): return str(self.fs.mkdtemp(**kwargs)) def _remove(self, path): self.fs.remove(path) def _rmtree(self, path): self.fs.rmtree(path) def _run(self, *args, **kwargs): return self.executive.run_command(*args, **kwargs) def _run_silent(self, args, **kwargs): self.executive.run_command(args, **kwargs) def _write_text_file(self, path, contents): self.fs.write_text_file(path, contents) def _write_binary_file(self, path, contents): self.fs.write_binary_file(path, contents) def _make_diff(self, command, *args): # We use this wrapper to disable output decoding. diffs should be treated as # binary files since they may include text files of multiple differnet encodings. return self._run([command, "diff"] + list(args), decode_output=False) def _git_diff(self, *args): return self._make_diff("git", *args) def _shared_test_add_recursively(self): self._mkdir("added_dir") self._write_text_file("added_dir/added_file", "new stuff") self.scm.add("added_dir/added_file") self.assertIn("added_dir/added_file", self.scm._added_files()) def _shared_test_delete_recursively(self): self._mkdir("added_dir") self._write_text_file("added_dir/added_file", "new stuff") self.scm.add("added_dir/added_file") self.assertIn("added_dir/added_file", self.scm._added_files()) self.scm.delete("added_dir/added_file") self.assertNotIn("added_dir", self.scm._added_files()) def _shared_test_delete_recursively_or_not(self): self._mkdir("added_dir") self._write_text_file("added_dir/added_file", "new stuff") self._write_text_file("added_dir/another_added_file", "more new stuff") self.scm.add("added_dir/added_file") self.scm.add("added_dir/another_added_file") self.assertIn("added_dir/added_file", self.scm._added_files()) self.assertIn("added_dir/another_added_file", self.scm._added_files()) self.scm.delete("added_dir/added_file") self.assertIn("added_dir/another_added_file", self.scm._added_files()) def _shared_test_exists(self, scm, commit_function): self._chdir(scm.checkout_root) self.assertFalse(scm.exists('foo.txt')) self._write_text_file('foo.txt', 'some stuff') self.assertFalse(scm.exists('foo.txt')) scm.add('foo.txt') commit_function('adding foo') self.assertTrue(scm.exists('foo.txt')) scm.delete('foo.txt') commit_function('deleting foo') self.assertFalse(scm.exists('foo.txt')) def _shared_test_move(self): self._write_text_file('added_file', 'new stuff') self.scm.add('added_file') self.scm.move('added_file', 'moved_file') self.assertIn('moved_file', self.scm._added_files()) def _shared_test_move_recursive(self): self._mkdir("added_dir") self._write_text_file('added_dir/added_file', 'new stuff') self._write_text_file('added_dir/another_added_file', 'more new stuff') self.scm.add('added_dir') self.scm.move('added_dir', 'moved_dir') self.assertIn('moved_dir/added_file', self.scm._added_files()) self.assertIn('moved_dir/another_added_file', self.scm._added_files())
# ensures that no autoinstalling occurs until a caller imports from # webkitpy.thirdparty. This is useful if the caller wants to configure # logging prior to executing autoinstall code. # FIXME: If any of these servers is offline, webkit-patch breaks (and maybe # other scripts do, too). See <http://webkit.org/b/42080>. # We put auto-installed third-party modules in this directory-- # # webkitpy/thirdparty/autoinstalled fs = FileSystem() fs.maybe_make_directory(_AUTOINSTALLED_DIR) init_path = fs.join(_AUTOINSTALLED_DIR, "__init__.py") if not fs.exists(init_path): fs.write_text_file(init_path, "") readme_path = fs.join(_AUTOINSTALLED_DIR, "README") if not fs.exists(readme_path): fs.write_text_file(readme_path, "This directory is auto-generated by WebKit and is " "safe to delete.\nIt contains needed third-party Python " "packages automatically downloaded from the web.") class AutoinstallImportHook(object): def __init__(self, filesystem=None): self._fs = filesystem or FileSystem() def _ensure_autoinstalled_dir_is_in_sys_path(self):
def test_exists__true(self): fs = FileSystem() self.assertTrue(fs.exists(self._this_file))