def _synchronize_docs(self): m = InstallManifest() m.add_symlink(self._conf_py_path, 'conf.py') for dest, source in sorted(self._trees.items()): source_dir = os.path.join(self._topsrcdir, source) for root, dirs, files in os.walk(source_dir): for f in files: source_path = os.path.join(root, f) rel_source = source_path[len(source_dir) + 1:] m.add_symlink(source_path, os.path.join(dest, rel_source)) stage_dir = os.path.join(self._output_dir, 'staging') copier = FileCopier() m.populate_registry(copier) copier.copy(stage_dir) with open(self._index_path, 'rb') as fh: data = fh.read() indexes = ['%s/index' % p for p in sorted(self._trees.keys())] indexes = '\n '.join(indexes) packages = [os.path.basename(p) for p in self._python_package_dirs] packages = ['python/%s' % p for p in packages] packages = '\n '.join(sorted(packages)) data = data.format(indexes=indexes, python_packages=packages) with open(os.path.join(stage_dir, 'index.rst'), 'wb') as fh: fh.write(data)
def unpack(source): ''' Transform a jar chrome or omnijar packaged directory into a flat package. ''' copier = FileCopier() unpack_to_registry(source, copier) copier.copy(source, skip_if_older=False)
def test_file_copier(self): copier = FileCopier() copier.add('foo/bar', GeneratedFile('foobar')) copier.add('foo/qux', GeneratedFile('fooqux')) copier.add('foo/deep/nested/directory/file', GeneratedFile('fooz')) copier.add('bar', GeneratedFile('bar')) copier.add('qux/foo', GeneratedFile('quxfoo')) copier.add('qux/bar', GeneratedFile('')) result = copier.copy(self.tmpdir) self.assertEqual(self.all_files(self.tmpdir), set(copier.paths())) self.assertEqual(self.all_dirs(self.tmpdir), set(['foo/deep/nested/directory', 'qux'])) self.assertEqual(result.updated_files, set(self.tmppath(p) for p in self.all_files(self.tmpdir))) self.assertEqual(result.existing_files, set()) self.assertEqual(result.removed_files, set()) self.assertEqual(result.removed_directories, set()) copier.remove('foo') copier.add('test', GeneratedFile('test')) result = copier.copy(self.tmpdir) self.assertEqual(self.all_files(self.tmpdir), set(copier.paths())) self.assertEqual(self.all_dirs(self.tmpdir), set(['qux'])) self.assertEqual(result.removed_files, set(self.tmppath(p) for p in ('foo/bar', 'foo/qux', 'foo/deep/nested/directory/file')))
def _synchronize_docs(self): m = InstallManifest() m.add_symlink(self._conf_py_path, "conf.py") for dest, source in sorted(self._trees.items()): source_dir = os.path.join(self._topsrcdir, source) for root, dirs, files in os.walk(source_dir): for f in files: source_path = os.path.join(root, f) rel_source = source_path[len(source_dir) + 1 :] m.add_symlink(source_path, os.path.join(dest, rel_source)) copier = FileCopier() m.populate_registry(copier) copier.copy(self._docs_dir) with open(self._index_path, "rb") as fh: data = fh.read() indexes = ["%s/index" % p for p in sorted(self._trees.keys())] indexes = "\n ".join(indexes) packages = [os.path.basename(p) for p in self._python_package_dirs] packages = ["python/%s" % p for p in packages] packages = "\n ".join(sorted(packages)) data = data.format(indexes=indexes, python_packages=packages) with open(os.path.join(self._docs_dir, "index.rst"), "wb") as fh: fh.write(data)
def unpack(source): ''' Transform a jar chrome or omnijar packaged directory into a flat package. ''' copier = FileCopier() unpack_to_registry(source, copier) copier.copy(source, skip_if_older=False)
def test_file_copier(self): copier = FileCopier() copier.add("foo/bar", GeneratedFile(b"foobar")) copier.add("foo/qux", GeneratedFile(b"fooqux")) copier.add("foo/deep/nested/directory/file", GeneratedFile(b"fooz")) copier.add("bar", GeneratedFile(b"bar")) copier.add("qux/foo", GeneratedFile(b"quxfoo")) copier.add("qux/bar", GeneratedFile(b"")) result = copier.copy(self.tmpdir) self.assertEqual(self.all_files(self.tmpdir), set(copier.paths())) self.assertEqual(self.all_dirs(self.tmpdir), set(["foo/deep/nested/directory", "qux"])) self.assertEqual( result.updated_files, set(self.tmppath(p) for p in self.all_files(self.tmpdir)), ) self.assertEqual(result.existing_files, set()) self.assertEqual(result.removed_files, set()) self.assertEqual(result.removed_directories, set()) copier.remove("foo") copier.add("test", GeneratedFile(b"test")) result = copier.copy(self.tmpdir) self.assertEqual(self.all_files(self.tmpdir), set(copier.paths())) self.assertEqual(self.all_dirs(self.tmpdir), set(["qux"])) self.assertEqual( result.removed_files, set( self.tmppath(p) for p in ("foo/bar", "foo/qux", "foo/deep/nested/directory/file")), )
def main(): parser = argparse.ArgumentParser( description="Merge two directories, creating Universal binaries for " "executables and libraries they contain.") parser.add_argument("dir1", help="Directory") parser.add_argument("dir2", help="Directory to merge") options = parser.parse_args() buildconfig.substs["OS_ARCH"] = "Darwin" buildconfig.substs["LIPO"] = os.environ.get("LIPO") dir1_finder = FileFinder(options.dir1, find_executables=True, find_dotfiles=True) dir2_finder = FileFinder(options.dir2, find_executables=True, find_dotfiles=True) finder = UnifiedTestFinder(dir1_finder, dir2_finder) copier = FileCopier() with errors.accumulate(): for p, f in finder: copier.add(p, f) copier.copy(options.dir1, skip_if_older=False)
def unpack(source, omnijar_name=None): """ Transform a jar chrome or omnijar packaged directory into a flat package. """ copier = FileCopier() unpack_to_registry(source, copier, omnijar_name) copier.copy(source, skip_if_older=False)
def _synchronize_docs(self): m = InstallManifest() m.add_symlink(self._conf_py_path, 'conf.py') for dest, source in sorted(self._trees.items()): source_dir = os.path.join(self._topsrcdir, source) for root, dirs, files in os.walk(source_dir): for f in files: source_path = os.path.join(root, f) rel_source = source_path[len(source_dir) + 1:] m.add_symlink(source_path, os.path.join(dest, rel_source)) stage_dir = os.path.join(self._output_dir, 'staging') copier = FileCopier() m.populate_registry(copier) copier.copy(stage_dir) with open(self._index_path, 'rb') as fh: data = fh.read() indexes = ['%s/index' % p for p in sorted(self._trees.keys())] indexes = '\n '.join(indexes) packages = [os.path.basename(p) for p in self._python_package_dirs] packages = ['python/%s' % p for p in packages] packages = '\n '.join(sorted(packages)) data = data.format(indexes=indexes, python_packages=packages) with open(os.path.join(stage_dir, 'index.rst'), 'wb') as fh: fh.write(data)
def test_file_copier(self): copier = FileCopier() copier.add("foo/bar", GeneratedFile("foobar")) copier.add("foo/qux", GeneratedFile("fooqux")) copier.add("foo/deep/nested/directory/file", GeneratedFile("fooz")) copier.add("bar", GeneratedFile("bar")) copier.add("qux/foo", GeneratedFile("quxfoo")) copier.add("qux/bar", GeneratedFile("")) result = copier.copy(self.tmpdir) self.assertEqual(self.all_files(self.tmpdir), set(copier.paths())) self.assertEqual(self.all_dirs(self.tmpdir), set(["foo/deep/nested/directory", "qux"])) self.assertEqual(result.updated_files, set(self.tmppath(p) for p in self.all_files(self.tmpdir))) self.assertEqual(result.existing_files, set()) self.assertEqual(result.removed_files, set()) self.assertEqual(result.removed_directories, set()) copier.remove("foo") copier.add("test", GeneratedFile("test")) result = copier.copy(self.tmpdir) self.assertEqual(self.all_files(self.tmpdir), set(copier.paths())) self.assertEqual(self.all_dirs(self.tmpdir), set(["qux"])) self.assertEqual( result.removed_files, set(self.tmppath(p) for p in ("foo/bar", "foo/qux", "foo/deep/nested/directory/file")) )
def test_file_copier(self): copier = FileCopier() copier.add('foo/bar', GeneratedFile('foobar')) copier.add('foo/qux', GeneratedFile('fooqux')) copier.add('foo/deep/nested/directory/file', GeneratedFile('fooz')) copier.add('bar', GeneratedFile('bar')) copier.add('qux/foo', GeneratedFile('quxfoo')) copier.add('qux/bar', GeneratedFile('')) result = copier.copy(self.tmpdir) self.assertEqual(self.all_files(self.tmpdir), set(copier.paths())) self.assertEqual(self.all_dirs(self.tmpdir), set(['foo/deep/nested/directory', 'qux'])) self.assertEqual( result.updated_files, set(self.tmppath(p) for p in self.all_files(self.tmpdir))) self.assertEqual(result.existing_files, set()) self.assertEqual(result.removed_files, set()) self.assertEqual(result.removed_directories, set()) copier.remove('foo') copier.add('test', GeneratedFile('test')) result = copier.copy(self.tmpdir) self.assertEqual(self.all_files(self.tmpdir), set(copier.paths())) self.assertEqual(self.all_dirs(self.tmpdir), set(['qux'])) self.assertEqual( result.removed_files, set( self.tmppath(p) for p in ('foo/bar', 'foo/qux', 'foo/deep/nested/directory/file')))
def install_test_files(topsrcdir, topobjdir, tests_root, test_objs): """Installs the requested test files to the objdir. This is invoked by test runners to avoid installing tens of thousands of test files when only a few tests need to be run. """ if test_objs: manifest = _make_install_manifest(topsrcdir, topobjdir, test_objs) else: # If we don't actually have a list of tests to install we install # test and support files wholesale. manifest = InstallManifest( mozpath.join(topobjdir, '_build_manifests', 'install', '_test_files')) harness_files_manifest = mozpath.join(topobjdir, '_build_manifests', 'install', tests_root) if os.path.isfile(harness_files_manifest): # If the backend has generated an install manifest for test harness # files they are treated as a monolith and installed each time we # run tests. Fortunately there are not very many. manifest |= InstallManifest(harness_files_manifest) copier = FileCopier() manifest.populate_registry(copier) copier.copy(mozpath.join(topobjdir, tests_root), remove_unaccounted=False)
def strip(dir): copier = FileCopier() # The FileFinder will give use ExecutableFile instances for files # that can be stripped, and copying ExecutableFiles defaults to # stripping them when buildconfig.substs['PKG_STRIP'] is set. for p, f in FileFinder(dir, find_executables=True): copier.add(p, f) copier.copy(dir)
def strip(dir): copier = FileCopier() # The FileFinder will give use ExecutableFile instances for files # that can be stripped, and copying ExecutableFiles defaults to # stripping them unless buildconfig.substs['PKG_SKIP_STRIP'] is set. for p, f in FileFinder(dir): copier.add(p, f) copier.copy(dir)
def install_test_files(topsrcdir, topobjdir, tests_root, test_objs): """Installs the requested test files to the objdir. This is invoked by test runners to avoid installing tens of thousands of test files when only a few tests need to be run. """ flavor_info = {flavor: (root, prefix, install) for (flavor, root, prefix, install) in TEST_MANIFESTS.values()} objdir_dest = mozpath.join(topobjdir, tests_root) converter = SupportFilesConverter() install_info = TestInstallInfo() for o in test_objs: flavor = o['flavor'] if flavor not in flavor_info: # This is a test flavor that isn't installed by the build system. continue root, prefix, install = flavor_info[flavor] if not install: # This flavor isn't installed to the objdir. continue manifest_path = o['manifest'] manifest_dir = mozpath.dirname(manifest_path) out_dir = mozpath.join(root, prefix, manifest_dir[len(topsrcdir) + 1:]) file_relpath = o['file_relpath'] source = mozpath.join(topsrcdir, file_relpath) dest = mozpath.join(root, prefix, file_relpath) if 'install-to-subdir' in o: out_dir = mozpath.join(out_dir, o['install-to-subdir']) manifest_relpath = mozpath.relpath(source, mozpath.dirname(manifest_path)) dest = mozpath.join(out_dir, manifest_relpath) install_info.installs.append((source, dest)) install_info |= converter.convert_support_files(o, root, manifest_dir, out_dir) manifest = InstallManifest() for source, dest in set(install_info.installs): if dest in install_info.external_installs: continue manifest.add_symlink(source, dest) for base, pattern, dest in install_info.pattern_installs: manifest.add_pattern_symlink(base, pattern, dest) _resolve_installs(install_info.deferred_installs, topobjdir, manifest) # Harness files are treated as a monolith and installed each time we run tests. # Fortunately there are not very many. manifest |= InstallManifest(mozpath.join(topobjdir, '_build_manifests', 'install', tests_root)) copier = FileCopier() manifest.populate_registry(copier) copier.copy(objdir_dest, remove_unaccounted=False)
def install_test_files(topsrcdir, topobjdir, tests_root, test_objs): """Installs the requested test files to the objdir. This is invoked by test runners to avoid installing tens of thousands of test files when only a few tests need to be run. """ flavor_info = { flavor: (root, prefix, install) for (flavor, root, prefix, install) in TEST_MANIFESTS.values() } objdir_dest = mozpath.join(topobjdir, tests_root) converter = SupportFilesConverter() install_info = TestInstallInfo() for o in test_objs: flavor = o['flavor'] if flavor not in flavor_info: # This is a test flavor that isn't installed by the build system. continue root, prefix, install = flavor_info[flavor] if not install: # This flavor isn't installed to the objdir. continue manifest_path = o['manifest'] manifest_dir = mozpath.dirname(manifest_path) out_dir = mozpath.join(root, prefix, manifest_dir[len(topsrcdir) + 1:]) file_relpath = o['file_relpath'] source = mozpath.join(topsrcdir, file_relpath) dest = mozpath.join(root, prefix, file_relpath) if 'install-to-subdir' in o: out_dir = mozpath.join(out_dir, o['install-to-subdir']) manifest_relpath = mozpath.relpath(source, mozpath.dirname(manifest_path)) dest = mozpath.join(out_dir, manifest_relpath) install_info.installs.append((source, dest)) install_info |= converter.convert_support_files( o, root, manifest_dir, out_dir) manifest = InstallManifest() for source, dest in set(install_info.installs): if dest in install_info.external_installs: continue manifest.add_symlink(source, dest) for base, pattern, dest in install_info.pattern_installs: manifest.add_pattern_symlink(base, pattern, dest) _resolve_installs(install_info.deferred_installs, topobjdir, manifest) # Harness files are treated as a monolith and installed each time we run tests. # Fortunately there are not very many. manifest |= InstallManifest( mozpath.join(topobjdir, '_build_manifests', 'install', tests_root)) copier = FileCopier() manifest.populate_registry(copier) copier.copy(objdir_dest, remove_unaccounted=False)
def unpack(source): ''' Transform a jar chrome or omnijar packaged directory into a flat package. ''' copier = FileCopier() finder = UnpackFinder(source) packager = SimplePackager(FlatFormatter(copier)) for p, f in finder.find('*'): if mozpack.path.split(p)[0] not in STARTUP_CACHE_PATHS: packager.add(p, f) packager.close() copier.copy(source, skip_if_older=False)
def unpack(source): ''' Transform a jar chrome or omnijar packaged directory into a flat package. ''' copier = FileCopier() finder = UnpackFinder(source) packager = SimplePackager(FlatFormatter(copier)) for p, f in finder.find('*'): if mozpack.path.split(p)[0] not in STARTUP_CACHE_PATHS: packager.add(p, f) packager.close() copier.copy(source, skip_if_older=False)
def main(): parser = argparse.ArgumentParser( description= "Merge two builds of a Gecko-based application into a Universal build") parser.add_argument("app1", help="Directory containing the application") parser.add_argument("app2", help="Directory containing the application to merge") parser.add_argument( "--non-resource", nargs="+", metavar="PATTERN", default=[], help="Extra files not to be considered as resources", ) options = parser.parse_args() buildconfig.substs["OS_ARCH"] = "Darwin" buildconfig.substs["LIPO"] = os.environ.get("LIPO") app1_finder = UnpackFinder(FileFinder(options.app1, find_executables=True)) app2_finder = UnpackFinder(FileFinder(options.app2, find_executables=True)) app_finder = UnifiedBuildFinderWasmHack(app1_finder, app2_finder) copier = FileCopier() compress = min(app1_finder.compressed, JAR_DEFLATED) if app1_finder.kind == "flat": formatter = FlatFormatter(copier) elif app1_finder.kind == "jar": formatter = JarFormatter(copier, compress=compress) elif app1_finder.kind == "omni": formatter = OmniJarFormatter( copier, app1_finder.omnijar, compress=compress, non_resources=options.non_resource, ) with errors.accumulate(): packager = SimplePackager(formatter) for p, f in app_finder: packager.add(p, f) packager.close() # Transplant jar preloading information. for path, log in six.iteritems(app1_finder.jarlogs): assert isinstance(copier[path], Jarrer) copier[path].preload(log) copier.copy(options.app1, skip_if_older=False)
def repack(source, l10n, extra_l10n={}, non_resources=[], non_chrome=set()): ''' Replace localized data from the `source` directory with localized data from `l10n` and `extra_l10n`. The `source` argument points to a directory containing a packaged application (in omnijar, jar or flat form). The `l10n` argument points to a directory containing the main localized data (usually in the form of a language pack addon) to use to replace in the packaged application. The `extra_l10n` argument contains a dict associating relative paths in the source to separate directories containing localized data for them. This can be used to point at different language pack addons for different parts of the package application. The `non_resources` argument gives a list of relative paths in the source that should not be added in an omnijar in case the packaged application is in that format. The `non_chrome` argument gives a list of file/directory patterns for localized files that are not listed in a chrome.manifest. ''' app_finder = UnpackFinder(source) l10n_finder = UnpackFinder(l10n) if extra_l10n: finders = { '': l10n_finder, } for base, path in extra_l10n.iteritems(): finders[base] = UnpackFinder(path) l10n_finder = ComposedFinder(finders) copier = FileCopier() compress = min(app_finder.compressed, JAR_DEFLATED) if app_finder.kind == 'flat': formatter = FlatFormatter(copier) elif app_finder.kind == 'jar': formatter = JarFormatter(copier, optimize=app_finder.optimizedjars, compress=compress) elif app_finder.kind == 'omni': formatter = OmniJarFormatter(copier, app_finder.omnijar, optimize=app_finder.optimizedjars, compress=compress, non_resources=non_resources) with errors.accumulate(): _repack(app_finder, l10n_finder, copier, formatter, non_chrome) copier.copy(source, skip_if_older=False) generate_precomplete(source)
def test_remove_unaccounted_file_registry(self): """Test FileCopier.copy(remove_unaccounted=FileRegistry())""" dest = self.tmppath("dest") copier = FileCopier() copier.add("foo/bar/baz", GeneratedFile(b"foobarbaz")) copier.add("foo/bar/qux", GeneratedFile(b"foobarqux")) copier.add("foo/hoge/fuga", GeneratedFile(b"foohogefuga")) copier.add("foo/toto/tata", GeneratedFile(b"footototata")) os.makedirs(os.path.join(dest, "bar")) with open(os.path.join(dest, "bar", "bar"), "w") as fh: fh.write("barbar") os.makedirs(os.path.join(dest, "foo", "toto")) with open(os.path.join(dest, "foo", "toto", "toto"), "w") as fh: fh.write("foototototo") result = copier.copy(dest, remove_unaccounted=False) self.assertEqual(self.all_files(dest), set(copier.paths()) | {"foo/toto/toto", "bar/bar"}) self.assertEqual(self.all_dirs(dest), {"foo/bar", "foo/hoge", "foo/toto", "bar"}) copier2 = FileCopier() copier2.add("foo/hoge/fuga", GeneratedFile(b"foohogefuga")) # We expect only files copied from the first copier to be removed, # not the extra file that was there beforehand. result = copier2.copy(dest, remove_unaccounted=copier) self.assertEqual(self.all_files(dest), set(copier2.paths()) | {"foo/toto/toto", "bar/bar"}) self.assertEqual(self.all_dirs(dest), {"foo/hoge", "foo/toto", "bar"}) self.assertEqual(result.updated_files, {self.tmppath("dest/foo/hoge/fuga")}) self.assertEqual(result.existing_files, set()) self.assertEqual( result.removed_files, { self.tmppath(p) for p in ("dest/foo/bar/baz", "dest/foo/bar/qux", "dest/foo/toto/tata") }, ) self.assertEqual(result.removed_directories, {self.tmppath("dest/foo/bar")})
def test_optional_exists_creates_unneeded_directory(self): """Demonstrate that a directory not strictly required, but specified as the path to an optional file, will be unnecessarily created. This behaviour is wrong; fixing it is tracked by Bug 972432; and this test exists to guard against unexpected changes in behaviour. """ dest = self.tmppath("dest") copier = FileCopier() copier.add("foo/bar", ExistingFile(required=False)) result = copier.copy(dest) st = os.lstat(self.tmppath("dest/foo")) self.assertFalse(stat.S_ISLNK(st.st_mode)) self.assertTrue(stat.S_ISDIR(st.st_mode)) # What's worse, we have no record that dest was created. self.assertEquals(len(result.updated_files), 0) # But we do have an erroneous record of an optional file # existing when it does not. self.assertIn(self.tmppath("dest/foo/bar"), result.existing_files)
def test_symlink_directory(self): """Directory symlinks in destination are deleted.""" if not self.symlink_supported: return dest = self.tmppath('dest') copier = FileCopier() copier.add('foo/bar/baz', GeneratedFile('foobarbaz')) os.makedirs(self.tmppath('dest/foo')) dummy = self.tmppath('dummy') os.mkdir(dummy) link = self.tmppath('dest/foo/bar') os.symlink(dummy, link) result = copier.copy(dest) st = os.lstat(link) self.assertFalse(stat.S_ISLNK(st.st_mode)) self.assertTrue(stat.S_ISDIR(st.st_mode)) self.assertEqual(self.all_files(dest), set(copier.paths())) self.assertEqual(result.removed_directories, set()) self.assertEqual(len(result.updated_files), 1)
def test_symlink_directory_replaced(self): """Directory symlinks in destination are replaced if they need to be real directories.""" if not self.symlink_supported: return dest = self.tmppath('dest') copier = FileCopier() copier.add('foo/bar/baz', GeneratedFile('foobarbaz')) os.makedirs(self.tmppath('dest/foo')) dummy = self.tmppath('dummy') os.mkdir(dummy) link = self.tmppath('dest/foo/bar') os.symlink(dummy, link) result = copier.copy(dest) st = os.lstat(link) self.assertFalse(stat.S_ISLNK(st.st_mode)) self.assertTrue(stat.S_ISDIR(st.st_mode)) self.assertEqual(self.all_files(dest), set(copier.paths())) self.assertEqual(result.removed_directories, set()) self.assertEqual(len(result.updated_files), 1)
def test_symlink_directory_replaced(self): """Directory symlinks in destination are replaced if they need to be real directories.""" if not self.symlink_supported: return dest = self.tmppath("dest") copier = FileCopier() copier.add("foo/bar/baz", GeneratedFile(b"foobarbaz")) os.makedirs(self.tmppath("dest/foo")) dummy = self.tmppath("dummy") os.mkdir(dummy) link = self.tmppath("dest/foo/bar") os.symlink(dummy, link) result = copier.copy(dest) st = os.lstat(link) self.assertFalse(stat.S_ISLNK(st.st_mode)) self.assertTrue(stat.S_ISDIR(st.st_mode)) self.assertEqual(self.all_files(dest), set(copier.paths())) self.assertEqual(result.removed_directories, set()) self.assertEqual(len(result.updated_files), 1)
def test_optional_exists_creates_unneeded_directory(self): """Demonstrate that a directory not strictly required, but specified as the path to an optional file, will be unnecessarily created. This behaviour is wrong; fixing it is tracked by Bug 972432; and this test exists to guard against unexpected changes in behaviour. """ dest = self.tmppath('dest') copier = FileCopier() copier.add('foo/bar', ExistingFile(required=False)) result = copier.copy(dest) st = os.lstat(self.tmppath('dest/foo')) self.assertFalse(stat.S_ISLNK(st.st_mode)) self.assertTrue(stat.S_ISDIR(st.st_mode)) # What's worse, we have no record that dest was created. self.assertEquals(len(result.updated_files), 0) # But we do have an erroneous record of an optional file # existing when it does not. self.assertIn(self.tmppath('dest/foo/bar'), result.existing_files)
def repack(source, l10n, extra_l10n={}, non_resources=[], non_chrome=set()): ''' Replace localized data from the `source` directory with localized data from `l10n` and `extra_l10n`. The `source` argument points to a directory containing a packaged application (in omnijar, jar or flat form). The `l10n` argument points to a directory containing the main localized data (usually in the form of a language pack addon) to use to replace in the packaged application. The `extra_l10n` argument contains a dict associating relative paths in the source to separate directories containing localized data for them. This can be used to point at different language pack addons for different parts of the package application. The `non_resources` argument gives a list of relative paths in the source that should not be added in an omnijar in case the packaged application is in that format. The `non_chrome` argument gives a list of file/directory patterns for localized files that are not listed in a chrome.manifest. ''' app_finder = UnpackFinder(source) l10n_finder = UnpackFinder(l10n) if extra_l10n: finders = { '': l10n_finder, } for base, path in extra_l10n.iteritems(): finders[base] = UnpackFinder(path) l10n_finder = ComposedFinder(finders) copier = FileCopier() compress = min(app_finder.compressed, JAR_DEFLATED) if app_finder.kind == 'flat': formatter = FlatFormatter(copier) elif app_finder.kind == 'jar': formatter = JarFormatter(copier, optimize=app_finder.optimizedjars, compress=compress) elif app_finder.kind == 'omni': formatter = OmniJarFormatter(copier, app_finder.omnijar, optimize=app_finder.optimizedjars, compress=compress, non_resources=non_resources) with errors.accumulate(): _repack(app_finder, l10n_finder, copier, formatter, non_chrome) copier.copy(source, skip_if_older=False) generate_precomplete(source)
def process_manifest(destdir, paths, remove_unaccounted=True): manifest = InstallManifest() for path in paths: manifest |= InstallManifest(path=path) copier = FileCopier() manifest.populate_registry(copier) return copier.copy(destdir, remove_unaccounted=remove_unaccounted)
def process_manifest(destdir, paths, remove_unaccounted=True): manifest = InstallManifest() for path in paths: manifest |= InstallManifest(path=path) copier = FileCopier() manifest.populate_registry(copier) return copier.copy(destdir, remove_unaccounted=remove_unaccounted)
def _synchronize_docs(self): m = InstallManifest() m.add_link(self.conf_py_path, 'conf.py') for dest, source in sorted(self.trees.items()): source_dir = os.path.join(self.topsrcdir, source) for root, dirs, files in os.walk(source_dir): for f in files: source_path = os.path.join(root, f) rel_source = source_path[len(source_dir) + 1:] m.add_link(source_path, os.path.join(dest, rel_source)) copier = FileCopier() m.populate_registry(copier) copier.copy(self.staging_dir) with open(self.index_path, 'rb') as fh: data = fh.read() def is_toplevel(key): """Whether the tree is nested under the toplevel index, or is nested under another tree's index. """ for k in self.trees: if k == key: continue if key.startswith(k): return False return True toplevel_trees = { k: v for k, v in self.trees.items() if is_toplevel(k) } indexes = ['%s/index' % p for p in sorted(toplevel_trees.keys())] indexes = '\n '.join(indexes) packages = [os.path.basename(p) for p in self.python_package_dirs] packages = ['python/%s' % p for p in packages] packages = '\n '.join(sorted(packages)) data = data.format(indexes=indexes, python_packages=packages) with open(os.path.join(self.staging_dir, 'index.rst'), 'wb') as fh: fh.write(data)
def process_manifest(destdir, *paths): manifest = InstallManifest() for path in paths: manifest |= InstallManifest(path=path) copier = FileCopier() manifest.populate_registry(copier) return copier.copy(destdir)
def repack(source, l10n, non_resources=[], non_chrome=set()): app_finder = UnpackFinder(source) l10n_finder = UnpackFinder(l10n) copier = FileCopier() if app_finder.kind == 'flat': formatter = FlatFormatter(copier) elif app_finder.kind == 'jar': formatter = JarFormatter(copier, optimize=app_finder.optimizedjars) elif app_finder.kind == 'omni': formatter = OmniJarFormatter(copier, app_finder.omnijar, optimize=app_finder.optimizedjars, non_resources=non_resources) with errors.accumulate(): _repack(app_finder, l10n_finder, copier, formatter, non_chrome) copier.copy(source, skip_if_older=False) generate_precomplete(source)
def test_remove_unaccounted_file_registry(self): """Test FileCopier.copy(remove_unaccounted=FileRegistry())""" dest = self.tmppath('dest') copier = FileCopier() copier.add('foo/bar/baz', GeneratedFile('foobarbaz')) copier.add('foo/bar/qux', GeneratedFile('foobarqux')) copier.add('foo/hoge/fuga', GeneratedFile('foohogefuga')) copier.add('foo/toto/tata', GeneratedFile('footototata')) os.makedirs(os.path.join(dest, 'bar')) with open(os.path.join(dest, 'bar', 'bar'), 'w') as fh: fh.write('barbar') os.makedirs(os.path.join(dest, 'foo', 'toto')) with open(os.path.join(dest, 'foo', 'toto', 'toto'), 'w') as fh: fh.write('foototototo') result = copier.copy(dest, remove_unaccounted=False) self.assertEqual(self.all_files(dest), set(copier.paths()) | {'foo/toto/toto', 'bar/bar'}) self.assertEqual(self.all_dirs(dest), {'foo/bar', 'foo/hoge', 'foo/toto', 'bar'}) copier2 = FileCopier() copier2.add('foo/hoge/fuga', GeneratedFile('foohogefuga')) # We expect only files copied from the first copier to be removed, # not the extra file that was there beforehand. result = copier2.copy(dest, remove_unaccounted=copier) self.assertEqual(self.all_files(dest), set(copier2.paths()) | {'foo/toto/toto', 'bar/bar'}) self.assertEqual(self.all_dirs(dest), {'foo/hoge', 'foo/toto', 'bar'}) self.assertEqual(result.updated_files, {self.tmppath('dest/foo/hoge/fuga')}) self.assertEqual(result.existing_files, set()) self.assertEqual( result.removed_files, { self.tmppath(p) for p in ('dest/foo/bar/baz', 'dest/foo/bar/qux', 'dest/foo/toto/tata') }) self.assertEqual(result.removed_directories, {self.tmppath('dest/foo/bar')})
def test_copier_application(self): dest = self.tmppath("dest") os.mkdir(dest) to_delete = self.tmppath("dest/to_delete") with open(to_delete, "a"): pass with open(self.tmppath("s_source"), "wt") as fh: fh.write("symlink!") with open(self.tmppath("c_source"), "wt") as fh: fh.write("copy!") with open(self.tmppath("p_source"), "wt") as fh: fh.write("#define FOO 1\npreprocess!") with open(self.tmppath("dest/e_dest"), "a"): pass with open(self.tmppath("dest/o_dest"), "a"): pass m = self._get_test_manifest() c = FileCopier() m.populate_registry(c) result = c.copy(dest) self.assertTrue(os.path.exists(self.tmppath("dest/s_dest"))) self.assertTrue(os.path.exists(self.tmppath("dest/c_dest"))) self.assertTrue(os.path.exists(self.tmppath("dest/p_dest"))) self.assertTrue(os.path.exists(self.tmppath("dest/e_dest"))) self.assertTrue(os.path.exists(self.tmppath("dest/o_dest"))) self.assertTrue(os.path.exists(self.tmppath("dest/content"))) self.assertFalse(os.path.exists(to_delete)) with open(self.tmppath("dest/s_dest"), "rt") as fh: self.assertEqual(fh.read(), "symlink!") with open(self.tmppath("dest/c_dest"), "rt") as fh: self.assertEqual(fh.read(), "copy!") with open(self.tmppath("dest/p_dest"), "rt") as fh: self.assertEqual(fh.read(), "preprocess!") self.assertEqual( result.updated_files, set( self.tmppath(p) for p in ("dest/s_dest", "dest/c_dest", "dest/p_dest", "dest/content")), ) self.assertEqual( result.existing_files, set([self.tmppath("dest/e_dest"), self.tmppath("dest/o_dest")]), ) self.assertEqual(result.removed_files, {to_delete}) self.assertEqual(result.removed_directories, set())
def setup_benchmarks(self): """Make sure benchmarks are linked to the proper location in the objdir. Benchmarks can either live in-tree or in an external repository. In the latter case also clone/update the repository if necessary. """ print("Updating external benchmarks from {}".format( BENCHMARK_REPOSITORY)) # Set up the external repo external_repo_path = os.path.join(get_state_dir()[0], 'performance-tests') if not os.path.isdir(external_repo_path): subprocess.check_call( ['git', 'clone', BENCHMARK_REPOSITORY, external_repo_path]) else: subprocess.check_call(['git', 'checkout', 'master'], cwd=external_repo_path) subprocess.check_call(['git', 'pull'], cwd=external_repo_path) subprocess.check_call(['git', 'checkout', BENCHMARK_REVISION], cwd=external_repo_path) # Link benchmarks to the objdir benchmark_paths = ( os.path.join(external_repo_path, 'benchmarks'), os.path.join(self.topsrcdir, 'third_party', 'webkit', 'PerformanceTests'), ) manifest = InstallManifest() for benchmark_path in benchmark_paths: for path in os.listdir(benchmark_path): abspath = os.path.join(benchmark_path, path) if not os.path.isdir(abspath) or path.startswith('.'): continue manifest.add_link(abspath, path) copier = FileCopier() manifest.populate_registry(copier) copier.copy( os.path.join(self.topobjdir, 'testing', 'raptor', 'benchmarks'))
def test_copier_application(self): dest = self.tmppath('dest') os.mkdir(dest) to_delete = self.tmppath('dest/to_delete') with open(to_delete, 'a'): pass with open(self.tmppath('s_source'), 'wt') as fh: fh.write('symlink!') with open(self.tmppath('c_source'), 'wt') as fh: fh.write('copy!') with open(self.tmppath('p_source'), 'wt') as fh: fh.write('#define FOO 1\npreprocess!') with open(self.tmppath('dest/e_dest'), 'a'): pass with open(self.tmppath('dest/o_dest'), 'a'): pass m = self._get_test_manifest() c = FileCopier() m.populate_registry(c) result = c.copy(dest) self.assertTrue(os.path.exists(self.tmppath('dest/s_dest'))) self.assertTrue(os.path.exists(self.tmppath('dest/c_dest'))) self.assertTrue(os.path.exists(self.tmppath('dest/p_dest'))) self.assertTrue(os.path.exists(self.tmppath('dest/e_dest'))) self.assertTrue(os.path.exists(self.tmppath('dest/o_dest'))) self.assertTrue(os.path.exists(self.tmppath('dest/content'))) self.assertFalse(os.path.exists(to_delete)) with open(self.tmppath('dest/s_dest'), 'rt') as fh: self.assertEqual(fh.read(), 'symlink!') with open(self.tmppath('dest/c_dest'), 'rt') as fh: self.assertEqual(fh.read(), 'copy!') with open(self.tmppath('dest/p_dest'), 'rt') as fh: self.assertEqual(fh.read(), 'preprocess!') self.assertEqual( result.updated_files, set( self.tmppath(p) for p in ('dest/s_dest', 'dest/c_dest', 'dest/p_dest', 'dest/content'))) self.assertEqual( result.existing_files, set([self.tmppath('dest/e_dest'), self.tmppath('dest/o_dest')])) self.assertEqual(result.removed_files, {to_delete}) self.assertEqual(result.removed_directories, set())
def test_file_copier(self): copier = FileCopier() copier.add('foo/bar', GeneratedFile('foobar')) copier.add('foo/qux', GeneratedFile('fooqux')) copier.add('foo/deep/nested/directory/file', GeneratedFile('fooz')) copier.add('bar', GeneratedFile('bar')) copier.add('qux/foo', GeneratedFile('quxfoo')) copier.add('qux/bar', GeneratedFile('')) copier.copy(self.tmpdir) self.assertEqual(self.all_files(self.tmpdir), set(copier.paths())) self.assertEqual(self.all_dirs(self.tmpdir), set(['foo/deep/nested/directory', 'qux'])) copier.remove('foo') copier.add('test', GeneratedFile('test')) copier.copy(self.tmpdir) self.assertEqual(self.all_files(self.tmpdir), set(copier.paths())) self.assertEqual(self.all_dirs(self.tmpdir), set(['qux']))
def test_file_copier(self): copier = FileCopier() copier.add('foo/bar', GeneratedFile('foobar')) copier.add('foo/qux', GeneratedFile('fooqux')) copier.add('foo/deep/nested/directory/file', GeneratedFile('fooz')) copier.add('bar', GeneratedFile('bar')) copier.add('qux/foo', GeneratedFile('quxfoo')) copier.add('qux/bar', GeneratedFile('')) copier.copy(self.tmpdir) self.assertEqual(self.all_files(self.tmpdir), set(copier.paths())) self.assertEqual(self.all_dirs(self.tmpdir), set(['foo/deep/nested/directory', 'qux'])) copier.remove('foo') copier.add('test', GeneratedFile('test')) copier.copy(self.tmpdir) self.assertEqual(self.all_files(self.tmpdir), set(copier.paths())) self.assertEqual(self.all_dirs(self.tmpdir), set(['qux']))
def test_remove_unaccounted_file_registry(self): """Test FileCopier.copy(remove_unaccounted=FileRegistry())""" dest = self.tmppath('dest') copier = FileCopier() copier.add('foo/bar/baz', GeneratedFile('foobarbaz')) copier.add('foo/bar/qux', GeneratedFile('foobarqux')) copier.add('foo/hoge/fuga', GeneratedFile('foohogefuga')) copier.add('foo/toto/tata', GeneratedFile('footototata')) os.makedirs(os.path.join(dest, 'bar')) with open(os.path.join(dest, 'bar', 'bar'), 'w') as fh: fh.write('barbar'); os.makedirs(os.path.join(dest, 'foo', 'toto')) with open(os.path.join(dest, 'foo', 'toto', 'toto'), 'w') as fh: fh.write('foototototo'); result = copier.copy(dest, remove_unaccounted=False) self.assertEqual(self.all_files(dest), set(copier.paths()) | { 'foo/toto/toto', 'bar/bar'}) self.assertEqual(self.all_dirs(dest), {'foo/bar', 'foo/hoge', 'foo/toto', 'bar'}) copier2 = FileCopier() copier2.add('foo/hoge/fuga', GeneratedFile('foohogefuga')) # We expect only files copied from the first copier to be removed, # not the extra file that was there beforehand. result = copier2.copy(dest, remove_unaccounted=copier) self.assertEqual(self.all_files(dest), set(copier2.paths()) | { 'foo/toto/toto', 'bar/bar'}) self.assertEqual(self.all_dirs(dest), {'foo/hoge', 'foo/toto', 'bar'}) self.assertEqual(result.updated_files, {self.tmppath('dest/foo/hoge/fuga')}) self.assertEqual(result.existing_files, set()) self.assertEqual(result.removed_files, {self.tmppath(p) for p in ('dest/foo/bar/baz', 'dest/foo/bar/qux', 'dest/foo/toto/tata')}) self.assertEqual(result.removed_directories, {self.tmppath('dest/foo/bar')})
def main(): parser = argparse.ArgumentParser( description="Merge two crashreporter symbols directories." ) parser.add_argument("dir1", help="Directory") parser.add_argument("dir2", help="Directory to merge") options = parser.parse_args() dir1_finder = FileFinder(options.dir1) dir2_finder = FileFinder(options.dir2) finder = UnifiedSymbolsFinder(dir1_finder, dir2_finder) copier = FileCopier() with errors.accumulate(): for p, f in finder: copier.add(p, f) copier.copy(options.dir1, skip_if_older=False)
def install_test_files(topsrcdir, topobjdir, tests_root): """Installs the requested test files to the objdir. This is invoked by test runners to avoid installing tens of thousands of test files when only a few tests need to be run. """ manifest = InstallManifest( mozpath.join(topobjdir, "_build_manifests", "install", "_test_files")) harness_files_manifest = mozpath.join(topobjdir, "_build_manifests", "install", tests_root) if os.path.isfile(harness_files_manifest): # If the backend has generated an install manifest for test harness # files they are treated as a monolith and installed each time we # run tests. Fortunately there are not very many. manifest |= InstallManifest(harness_files_manifest) copier = FileCopier() manifest.populate_registry(copier) copier.copy(mozpath.join(topobjdir, tests_root), remove_unaccounted=False)
def test_remove_unaccounted_file_registry(self): """Test FileCopier.copy(remove_unaccounted=FileRegistry())""" dest = self.tmppath("dest") copier = FileCopier() copier.add("foo/bar/baz", GeneratedFile("foobarbaz")) copier.add("foo/bar/qux", GeneratedFile("foobarqux")) copier.add("foo/hoge/fuga", GeneratedFile("foohogefuga")) copier.add("foo/toto/tata", GeneratedFile("footototata")) os.makedirs(os.path.join(dest, "bar")) with open(os.path.join(dest, "bar", "bar"), "w") as fh: fh.write("barbar") os.makedirs(os.path.join(dest, "foo", "toto")) with open(os.path.join(dest, "foo", "toto", "toto"), "w") as fh: fh.write("foototototo") result = copier.copy(dest, remove_unaccounted=False) self.assertEqual(self.all_files(dest), set(copier.paths()) | {"foo/toto/toto", "bar/bar"}) self.assertEqual(self.all_dirs(dest), {"foo/bar", "foo/hoge", "foo/toto", "bar"}) copier2 = FileCopier() copier2.add("foo/hoge/fuga", GeneratedFile("foohogefuga")) # We expect only files copied from the first copier to be removed, # not the extra file that was there beforehand. result = copier2.copy(dest, remove_unaccounted=copier) self.assertEqual(self.all_files(dest), set(copier2.paths()) | {"foo/toto/toto", "bar/bar"}) self.assertEqual(self.all_dirs(dest), {"foo/hoge", "foo/toto", "bar"}) self.assertEqual(result.updated_files, {self.tmppath("dest/foo/hoge/fuga")}) self.assertEqual(result.existing_files, set()) self.assertEqual( result.removed_files, {self.tmppath(p) for p in ("dest/foo/bar/baz", "dest/foo/bar/qux", "dest/foo/toto/tata")}, ) self.assertEqual(result.removed_directories, {self.tmppath("dest/foo/bar")})
def test_copier_application(self): dest = self.tmppath('dest') os.mkdir(dest) to_delete = self.tmppath('dest/to_delete') with open(to_delete, 'a'): pass with open(self.tmppath('s_source'), 'wt') as fh: fh.write('symlink!') with open(self.tmppath('c_source'), 'wt') as fh: fh.write('copy!') with open(self.tmppath('p_source'), 'wt') as fh: fh.write('#define FOO 1\npreprocess!') with open(self.tmppath('dest/e_dest'), 'a'): pass with open(self.tmppath('dest/o_dest'), 'a'): pass m = self._get_test_manifest() c = FileCopier() m.populate_registry(c) result = c.copy(dest) self.assertTrue(os.path.exists(self.tmppath('dest/s_dest'))) self.assertTrue(os.path.exists(self.tmppath('dest/c_dest'))) self.assertTrue(os.path.exists(self.tmppath('dest/p_dest'))) self.assertTrue(os.path.exists(self.tmppath('dest/e_dest'))) self.assertTrue(os.path.exists(self.tmppath('dest/o_dest'))) self.assertTrue(os.path.exists(self.tmppath('dest/content'))) self.assertFalse(os.path.exists(to_delete)) with open(self.tmppath('dest/s_dest'), 'rt') as fh: self.assertEqual(fh.read(), 'symlink!') with open(self.tmppath('dest/c_dest'), 'rt') as fh: self.assertEqual(fh.read(), 'copy!') with open(self.tmppath('dest/p_dest'), 'rt') as fh: self.assertEqual(fh.read(), 'preprocess!') self.assertEqual(result.updated_files, set(self.tmppath(p) for p in ( 'dest/s_dest', 'dest/c_dest', 'dest/p_dest', 'dest/content'))) self.assertEqual(result.existing_files, set([self.tmppath('dest/e_dest'), self.tmppath('dest/o_dest')])) self.assertEqual(result.removed_files, {to_delete}) self.assertEqual(result.removed_directories, set())
def process_manifest(destdir, paths, remove_unaccounted=True, remove_all_directory_symlinks=True, remove_empty_directories=True): manifest = InstallManifest() for path in paths: manifest |= InstallManifest(path=path) copier = FileCopier() manifest.populate_registry(copier) return copier.copy(destdir, remove_unaccounted=remove_unaccounted, remove_all_directory_symlinks=remove_all_directory_symlinks, remove_empty_directories=remove_empty_directories)
def test_copier_application(self): dest = self.tmppath("dest") os.mkdir(dest) to_delete = self.tmppath("dest/to_delete") with open(to_delete, "a"): pass with open(self.tmppath("s_source"), "wt") as fh: fh.write("symlink!") with open(self.tmppath("c_source"), "wt") as fh: fh.write("copy!") with open(self.tmppath("p_source"), "wt") as fh: fh.write("#define FOO 1\npreprocess!") with open(self.tmppath("dest/e_dest"), "a"): pass with open(self.tmppath("dest/o_dest"), "a"): pass m = self._get_test_manifest() c = FileCopier() m.populate_registry(c) result = c.copy(dest) self.assertTrue(os.path.exists(self.tmppath("dest/s_dest"))) self.assertTrue(os.path.exists(self.tmppath("dest/c_dest"))) self.assertTrue(os.path.exists(self.tmppath("dest/p_dest"))) self.assertTrue(os.path.exists(self.tmppath("dest/e_dest"))) self.assertTrue(os.path.exists(self.tmppath("dest/o_dest"))) self.assertFalse(os.path.exists(to_delete)) with open(self.tmppath("dest/s_dest"), "rt") as fh: self.assertEqual(fh.read(), "symlink!") with open(self.tmppath("dest/c_dest"), "rt") as fh: self.assertEqual(fh.read(), "copy!") with open(self.tmppath("dest/p_dest"), "rt") as fh: self.assertEqual(fh.read(), "preprocess!") self.assertEqual( result.updated_files, set(self.tmppath(p) for p in ("dest/s_dest", "dest/c_dest", "dest/p_dest")) ) self.assertEqual(result.existing_files, set([self.tmppath("dest/e_dest"), self.tmppath("dest/o_dest")])) self.assertEqual(result.removed_files, {to_delete}) self.assertEqual(result.removed_directories, set())
def test_no_remove(self): copier = FileCopier() copier.add('foo', GeneratedFile('foo')) with open(self.tmppath('bar'), 'a'): pass os.mkdir(self.tmppath('emptydir')) result = copier.copy(self.tmpdir, remove_unaccounted=False) self.assertEqual(self.all_files(self.tmpdir), set(['foo', 'bar'])) self.assertEqual(result.removed_files, set()) self.assertEqual(result.removed_directories, set([self.tmppath('emptydir')]))
def test_no_remove(self): copier = FileCopier() copier.add('foo', GeneratedFile('foo')) with open(self.tmppath('bar'), 'a'): pass os.mkdir(self.tmppath('emptydir')) result = copier.copy(self.tmpdir, remove_unaccounted=False) self.assertEqual(self.all_files(self.tmpdir), set(['foo', 'bar'])) self.assertEqual(result.removed_files, set()) self.assertEqual(result.removed_directories, set([self.tmppath('emptydir')]))
def process_manifest(destdir, paths, track=None, remove_unaccounted=True, remove_all_directory_symlinks=True, remove_empty_directories=True, defines={}): if track: if os.path.exists(track): # We use the same format as install manifests for the tracking # data. manifest = InstallManifest(path=track) remove_unaccounted = FileRegistry() dummy_file = BaseFile() finder = FileFinder(destdir, find_executables=False, find_dotfiles=True) for dest in manifest._dests: if '*' in dest: for p, f in finder.find(dest): remove_unaccounted.add(p, dummy_file) else: remove_unaccounted.add(dest, dummy_file) else: # If tracking is enabled and there is no file, we don't want to # be removing anything. remove_unaccounted = False remove_empty_directories = False remove_all_directory_symlinks = False manifest = InstallManifest() for path in paths: manifest |= InstallManifest(path=path) copier = FileCopier() manifest.populate_registry(copier, defines_override=defines) result = copier.copy( destdir, remove_unaccounted=remove_unaccounted, remove_all_directory_symlinks=remove_all_directory_symlinks, remove_empty_directories=remove_empty_directories) if track: manifest.write(path=track) return result
def process_manifest(destdir, paths, remove_unaccounted=True, remove_all_directory_symlinks=True, remove_empty_directories=True): manifest = InstallManifest() for path in paths: manifest |= InstallManifest(path=path) copier = FileCopier() manifest.populate_registry(copier) return copier.copy( destdir, remove_unaccounted=remove_unaccounted, remove_all_directory_symlinks=remove_all_directory_symlinks, remove_empty_directories=remove_empty_directories)
def process_manifest( destdir, paths, track=None, remove_unaccounted=True, remove_all_directory_symlinks=True, remove_empty_directories=True, defines={}, ): if track: if os.path.exists(track): # We use the same format as install manifests for the tracking # data. manifest = InstallManifest(path=track) remove_unaccounted = FileRegistry() dummy_file = BaseFile() finder = FileFinder(destdir, find_executables=False, find_dotfiles=True) for dest in manifest._dests: for p, f in finder.find(dest): remove_unaccounted.add(p, dummy_file) else: # If tracking is enabled and there is no file, we don't want to # be removing anything. remove_unaccounted = False remove_empty_directories = False remove_all_directory_symlinks = False manifest = InstallManifest() for path in paths: manifest |= InstallManifest(path=path) copier = FileCopier() manifest.populate_registry(copier, defines_override=defines) result = copier.copy( destdir, remove_unaccounted=remove_unaccounted, remove_all_directory_symlinks=remove_all_directory_symlinks, remove_empty_directories=remove_empty_directories, ) if track: manifest.write(path=track) return result
def test_no_remove_empty_directories(self): copier = FileCopier() copier.add("foo", GeneratedFile("foo")) with open(self.tmppath("bar"), "a"): pass os.mkdir(self.tmppath("emptydir")) d = self.tmppath("populateddir") os.mkdir(d) with open(self.tmppath("populateddir/foo"), "a"): pass result = copier.copy(self.tmpdir, remove_unaccounted=False, remove_empty_directories=False) self.assertEqual(self.all_files(self.tmpdir), set(["foo", "bar", "populateddir/foo"])) self.assertEqual(self.all_dirs(self.tmpdir), set(["emptydir", "populateddir"])) self.assertEqual(result.removed_files, set()) self.assertEqual(result.removed_directories, set())
def test_permissions(self): """Ensure files without write permission can be deleted.""" with open(self.tmppath('dummy'), 'a'): pass p = self.tmppath('no_perms') with open(p, 'a'): pass # Make file and directory unwritable. Reminder: making a directory # unwritable prevents modifications (including deletes) from the list # of files in that directory. os.chmod(p, 0400) os.chmod(self.tmpdir, 0400) copier = FileCopier() copier.add('dummy', GeneratedFile('content')) result = copier.copy(self.tmpdir) self.assertEqual(result.removed_files_count, 1) self.assertFalse(os.path.exists(p))
def main(): parser = ArgumentParser() parser.add_argument('-D', dest='defines', action='append', metavar="VAR[=VAL]", help='Define a variable') parser.add_argument('--format', default='omni', help='Choose the chrome format for packaging ' + '(omni, jar or flat ; default: %(default)s)') parser.add_argument('--removals', default=None, help='removed-files source file') parser.add_argument('--ignore-errors', action='store_true', default=False, help='Transform errors into warnings.') parser.add_argument('--minify', action='store_true', default=False, help='Make some files more compact while packaging') parser.add_argument('--minify-js', action='store_true', help='Minify JavaScript files while packaging.') parser.add_argument('--js-binary', help='Path to js binary. This is used to verify ' 'minified JavaScript. If this is not defined, ' 'minification verification will not be performed.') parser.add_argument('--jarlog', default='', help='File containing jar ' + 'access logs') parser.add_argument('--optimizejars', action='store_true', default=False, help='Enable jar optimizations') parser.add_argument('--unify', default='', help='Base directory of another build to unify with') parser.add_argument('manifest', default=None, nargs='?', help='Manifest file name') parser.add_argument('source', help='Source directory') parser.add_argument('destination', help='Destination directory') parser.add_argument('--non-resource', nargs='+', metavar='PATTERN', default=[], help='Extra files not to be considered as resources') args = parser.parse_args() defines = dict(buildconfig.defines) if args.ignore_errors: errors.ignore_errors() if args.defines: for name, value in [split_define(d) for d in args.defines]: defines[name] = value copier = FileCopier() if args.format == 'flat': formatter = FlatFormatter(copier) elif args.format == 'jar': formatter = JarFormatter(copier, optimize=args.optimizejars) elif args.format == 'omni': formatter = OmniJarFormatter(copier, buildconfig.substs['OMNIJAR_NAME'], optimize=args.optimizejars, non_resources=args.non_resource) else: errors.fatal('Unknown format: %s' % args.format) # Adjust defines according to the requested format. if isinstance(formatter, OmniJarFormatter): defines['MOZ_OMNIJAR'] = 1 elif 'MOZ_OMNIJAR' in defines: del defines['MOZ_OMNIJAR'] respath = '' if 'RESPATH' in defines: respath = SimpleManifestSink.normalize_path(defines['RESPATH']) while respath.startswith('/'): respath = respath[1:] if args.unify: def is_native(path): path = os.path.abspath(path) return platform.machine() in mozpath.split(path) # Invert args.unify and args.source if args.unify points to the # native architecture. args.source, args.unify = sorted([args.source, args.unify], key=is_native, reverse=True) if is_native(args.source): launcher.tooldir = args.source elif not buildconfig.substs['CROSS_COMPILE']: launcher.tooldir = buildconfig.substs['LIBXUL_DIST'] with errors.accumulate(): finder_args = dict( minify=args.minify, minify_js=args.minify_js, ) if args.js_binary: finder_args['minify_js_verify_command'] = [ args.js_binary, os.path.join(os.path.abspath(os.path.dirname(__file__)), 'js-compare-ast.js') ] if args.unify: finder = UnifiedBuildFinder(FileFinder(args.source), FileFinder(args.unify), **finder_args) else: finder = FileFinder(args.source, **finder_args) if 'NO_PKG_FILES' in os.environ: sinkformatter = NoPkgFilesRemover(formatter, args.manifest is not None) else: sinkformatter = formatter sink = SimpleManifestSink(finder, sinkformatter) if args.manifest: preprocess_manifest(sink, args.manifest, defines) else: sink.add(Component(''), 'bin/*') sink.close(args.manifest is not None) if args.removals: removals_in = StringIO(open(args.removals).read()) removals_in.name = args.removals removals = RemovedFiles(copier) preprocess(removals_in, removals, defines) copier.add(mozpath.join(respath, 'removed-files'), removals) # shlibsign libraries if launcher.can_launch(): if not mozinfo.isMac: for lib in SIGN_LIBS: libbase = mozpath.join(respath, '%s%s') \ % (buildconfig.substs['DLL_PREFIX'], lib) libname = '%s%s' % (libbase, buildconfig.substs['DLL_SUFFIX']) if copier.contains(libname): copier.add(libbase + '.chk', LibSignFile(os.path.join(args.destination, libname))) # Setup preloading if args.jarlog and os.path.exists(args.jarlog): from mozpack.mozjar import JarLog log = JarLog(args.jarlog) for p, f in copier: if not isinstance(f, Jarrer): continue key = JarLog.canonicalize(os.path.join(args.destination, p)) if key in log: f.preload(log[key]) # Fill startup cache if isinstance(formatter, OmniJarFormatter) and launcher.can_launch() \ and buildconfig.substs['MOZ_DISABLE_STARTUPCACHE'] != '1': gre_path = None def get_bases(): for b in sink.packager.get_bases(addons=False): for p in (mozpath.join('bin', b), b): if os.path.exists(os.path.join(args.source, p)): yield p break for base in sorted(get_bases()): if not gre_path: gre_path = base base_path = sink.normalize_path(base) if base_path in formatter.omnijars: precompile_cache(formatter.omnijars[base_path], args.source, gre_path, base) copier.copy(args.destination)
def test_preprocessor_dependencies(self): manifest = self.tmppath('m') deps = self.tmppath('m.pp') dest = self.tmppath('dest') source = self.tmppath('p_source') destfile = self.tmppath('dest/p_dest') include = self.tmppath('p_incl') os.mkdir(dest) with open(source, 'wt') as fh: fh.write('#define SRC\nSOURCE\n') time = os.path.getmtime(source) - 3 os.utime(source, (time, time)) with open(include, 'wt') as fh: fh.write('INCLUDE\n') time = os.path.getmtime(source) - 3 os.utime(include, (time, time)) # Create and write a manifest with the preprocessed file. m = InstallManifest() m.add_preprocess(source, 'p_dest', deps, '#', {'FOO':'BAR', 'BAZ':'QUX'}) m.write(path=manifest) time = os.path.getmtime(source) - 5 os.utime(manifest, (time, time)) # Now read the manifest back in, and apply it. This should write out # our preprocessed file. m = InstallManifest(path=manifest) c = FileCopier() m.populate_registry(c) self.assertTrue(c.copy(dest)) with open(destfile, 'rt') as fh: self.assertEqual(fh.read(), 'SOURCE\n') # Next, modify the source to #INCLUDE another file. with open(source, 'wt') as fh: fh.write('SOURCE\n#include p_incl\n') time = os.path.getmtime(source) - 1 os.utime(destfile, (time, time)) # Apply the manifest, and confirm that it also reads the newly included # file. m = InstallManifest(path=manifest) c = FileCopier() m.populate_registry(c) c.copy(dest) with open(destfile, 'rt') as fh: self.assertEqual(fh.read(), 'SOURCE\nINCLUDE\n') # Set the time on the source file back, so it won't be picked up as # modified in the next test. time = os.path.getmtime(source) - 1 os.utime(source, (time, time)) # Now, modify the include file (but not the original source). with open(include, 'wt') as fh: fh.write('INCLUDE MODIFIED\n') time = os.path.getmtime(include) - 1 os.utime(destfile, (time, time)) # Apply the manifest, and confirm that the change to the include file # is detected. That should cause the preprocessor to run again. m = InstallManifest(path=manifest) c = FileCopier() m.populate_registry(c) c.copy(dest) with open(destfile, 'rt') as fh: self.assertEqual(fh.read(), 'SOURCE\nINCLUDE MODIFIED\n') # ORing an InstallManifest should copy file dependencies m = InstallManifest() m |= InstallManifest(path=manifest) c = FileCopier() m.populate_registry(c) e = c._files['p_dest'] self.assertEqual(e.extra_depends, [manifest])
def _process_android_eclipse_project_data(self, data, srcdir, objdir): # This can't be relative to the environment's topsrcdir, # because during testing topsrcdir is faked. template_directory = os.path.abspath(mozpath.join(os.path.dirname(__file__), 'templates', 'android_eclipse')) project_directory = mozpath.join(self.environment.topobjdir, 'android_eclipse', data.name) manifest_path = mozpath.join(self.environment.topobjdir, 'android_eclipse', '%s.manifest' % data.name) manifest = self._manifest_for_project(srcdir, data) ensureParentDir(manifest_path) manifest.write(path=manifest_path) classpathentries = [] for cpe in sorted(data._classpathentries, key=lambda x: x.path): e = self._Element_for_classpathentry(cpe) classpathentries.append(ET.tostring(e)) for name in sorted(data.referenced_projects): e = self._Element_for_referenced_project(name) classpathentries.append(ET.tostring(e)) for name in sorted(data.extra_jars): e = self._Element_for_extra_jar(mozpath.join(srcdir, name)) classpathentries.append(ET.tostring(e)) defines = {} defines['IDE_OBJDIR'] = objdir defines['IDE_TOPOBJDIR'] = self.environment.topobjdir defines['IDE_SRCDIR'] = srcdir defines['IDE_TOPSRCDIR'] = self.environment.topsrcdir defines['IDE_PROJECT_NAME'] = data.name defines['IDE_PACKAGE_NAME'] = data.package_name defines['IDE_PROJECT_DIRECTORY'] = project_directory defines['IDE_RELSRCDIR'] = mozpath.relpath(srcdir, self.environment.topsrcdir) defines['IDE_CLASSPATH_ENTRIES'] = '\n'.join('\t' + cpe for cpe in classpathentries) defines['IDE_RECURSIVE_MAKE_TARGETS'] = ' '.join(sorted(data.recursive_make_targets)) # Like android.library=true defines['IDE_PROJECT_LIBRARY_SETTING'] = 'android.library=true' if data.is_library else '' # Like android.library.reference.1=FennecBrandingResources defines['IDE_PROJECT_LIBRARY_REFERENCES'] = '\n'.join( 'android.library.reference.%s=%s' % (i + 1, ref) for i, ref in enumerate(sorted(data.included_projects))) if data.filtered_resources: filteredResources = self._Element_for_filtered_resources(data.filtered_resources) defines['IDE_PROJECT_FILTERED_RESOURCES'] = pretty_print(filteredResources).strip() else: defines['IDE_PROJECT_FILTERED_RESOURCES'] = '' copier = FileCopier() finder = FileFinder(template_directory) for input_filename, f in itertools.chain(finder.find('**'), finder.find('.**')): if input_filename == 'AndroidManifest.xml' and not data.is_library: # Main projects supply their own manifests. continue copier.add(input_filename, PreprocessedFile( mozpath.join(finder.base, input_filename), depfile_path=None, marker='#', defines=defines, extra_depends={mozpath.join(finder.base, input_filename)})) # When we re-create the build backend, we kill everything that was there. if os.path.isdir(project_directory): self.summary.updated_count += 1 else: self.summary.created_count += 1 copier.copy(project_directory, skip_if_older=False, remove_unaccounted=True)
def main(): parser = ArgumentParser() parser.add_argument('-D', dest='defines', action='append', metavar="VAR[=VAL]", help='Define a variable') parser.add_argument('--format', default='omni', help='Choose the chrome format for packaging ' + '(omni, jar or flat ; default: %(default)s)') parser.add_argument('--removals', default=None, help='removed-files source file') parser.add_argument('--ignore-errors', action='store_true', default=False, help='Transform errors into warnings.') parser.add_argument('--minify', action='store_true', default=False, help='Make some files more compact while packaging') parser.add_argument('--minify-js', action='store_true', help='Minify JavaScript files while packaging.') parser.add_argument('--js-binary', help='Path to js binary. This is used to verify ' 'minified JavaScript. If this is not defined, ' 'minification verification will not be performed.') parser.add_argument('--jarlog', default='', help='File containing jar ' + 'access logs') parser.add_argument('--optimizejars', action='store_true', default=False, help='Enable jar optimizations') parser.add_argument('--disable-compression', action='store_false', dest='compress', default=True, help='Disable jar compression') parser.add_argument('manifest', default=None, nargs='?', help='Manifest file name') parser.add_argument('source', help='Source directory') parser.add_argument('destination', help='Destination directory') parser.add_argument('--non-resource', nargs='+', metavar='PATTERN', default=[], help='Extra files not to be considered as resources') args = parser.parse_args() defines = dict(buildconfig.defines) if args.ignore_errors: errors.ignore_errors() if args.defines: for name, value in [split_define(d) for d in args.defines]: defines[name] = value copier = FileCopier() if args.format == 'flat': formatter = FlatFormatter(copier) elif args.format == 'jar': formatter = JarFormatter(copier, compress=args.compress, optimize=args.optimizejars) elif args.format == 'omni': formatter = OmniJarFormatter(copier, buildconfig.substs['OMNIJAR_NAME'], compress=args.compress, optimize=args.optimizejars, non_resources=args.non_resource) else: errors.fatal('Unknown format: %s' % args.format) # Adjust defines according to the requested format. if isinstance(formatter, OmniJarFormatter): defines['MOZ_OMNIJAR'] = 1 elif 'MOZ_OMNIJAR' in defines: del defines['MOZ_OMNIJAR'] respath = '' if 'RESPATH' in defines: respath = SimpleManifestSink.normalize_path(defines['RESPATH']) while respath.startswith('/'): respath = respath[1:] if not buildconfig.substs['CROSS_COMPILE']: launcher.tooldir = mozpath.join(buildconfig.topobjdir, 'dist') with errors.accumulate(): finder_args = dict( minify=args.minify, minify_js=args.minify_js, ) if args.js_binary: finder_args['minify_js_verify_command'] = [ args.js_binary, os.path.join(os.path.abspath(os.path.dirname(__file__)), 'js-compare-ast.js') ] finder = FileFinder(args.source, find_executables=True, **finder_args) if 'NO_PKG_FILES' in os.environ: sinkformatter = NoPkgFilesRemover(formatter, args.manifest is not None) else: sinkformatter = formatter sink = SimpleManifestSink(finder, sinkformatter) if args.manifest: preprocess_manifest(sink, args.manifest, defines) else: sink.add(Component(''), 'bin/*') sink.close(args.manifest is not None) if args.removals: removals_in = StringIO(open(args.removals).read()) removals_in.name = args.removals removals = RemovedFiles(copier) preprocess(removals_in, removals, defines) copier.add(mozpath.join(respath, 'removed-files'), removals) # shlibsign libraries if launcher.can_launch(): if not mozinfo.isMac and buildconfig.substs.get('COMPILE_ENVIRONMENT'): for lib in SIGN_LIBS: libbase = mozpath.join(respath, '%s%s') \ % (buildconfig.substs['DLL_PREFIX'], lib) libname = '%s%s' % (libbase, buildconfig.substs['DLL_SUFFIX']) if copier.contains(libname): copier.add(libbase + '.chk', LibSignFile(os.path.join(args.destination, libname))) # Setup preloading if args.jarlog and os.path.exists(args.jarlog): from mozpack.mozjar import JarLog log = JarLog(args.jarlog) for p, f in copier: if not isinstance(f, Jarrer): continue key = JarLog.canonicalize(os.path.join(args.destination, p)) if key in log: f.preload(log[key]) copier.copy(args.destination) generate_precomplete(os.path.normpath(os.path.join(args.destination, respath)))
def main(): parser = ArgumentParser() parser.add_argument('-D', dest='defines', action='append', metavar="VAR[=VAL]", help='Define a variable') parser.add_argument('--format', default='omni', help='Choose the chrome format for packaging ' + '(omni, jar or flat ; default: %(default)s)') parser.add_argument('--removals', default=None, help='removed-files source file') parser.add_argument('--ignore-errors', action='store_true', default=False, help='Transform errors into warnings.') parser.add_argument('--minify', action='store_true', default=False, help='Make some files more compact while packaging') parser.add_argument('--jarlog', default='', help='File containing jar ' + 'access logs') parser.add_argument('--optimizejars', action='store_true', default=False, help='Enable jar optimizations') parser.add_argument('--unify', default='', help='Base directory of another build to unify with') parser.add_argument('manifest', default=None, nargs='?', help='Manifest file name') parser.add_argument('source', help='Source directory') parser.add_argument('destination', help='Destination directory') parser.add_argument('--non-resource', nargs='+', metavar='PATTERN', default=[], help='Extra files not to be considered as resources') args = parser.parse_args() defines = dict(buildconfig.defines) if args.ignore_errors: errors.ignore_errors() if args.defines: for name, value in [split_define(d) for d in args.defines]: defines[name] = value copier = FileCopier() if args.format == 'flat': formatter = FlatFormatter(copier) elif args.format == 'jar': formatter = JarFormatter(copier, optimize=args.optimizejars) elif args.format == 'omni': formatter = OmniJarFormatter(copier, buildconfig.substs['OMNIJAR_NAME'], optimize=args.optimizejars, non_resources=args.non_resource) else: errors.fatal('Unknown format: %s' % args.format) # Adjust defines according to the requested format. if isinstance(formatter, OmniJarFormatter): defines['MOZ_OMNIJAR'] = 1 elif 'MOZ_OMNIJAR' in defines: del defines['MOZ_OMNIJAR'] binpath = '' if 'BINPATH' in defines: binpath = SimpleManifestSink.normalize_path(defines['BINPATH']) while binpath.startswith('/'): binpath = binpath[1:] if args.unify: def is_native(path): path = os.path.abspath(path) return platform.machine() in mozpack.path.split(path) # Invert args.unify and args.source if args.unify points to the # native architecture. args.source, args.unify = sorted([args.source, args.unify], key=is_native, reverse=True) if is_native(args.source): launcher.tooldir = args.source elif not buildconfig.substs['CROSS_COMPILE']: launcher.tooldir = buildconfig.substs['LIBXUL_DIST'] with errors.accumulate(): if args.unify: finder = UnifiedBuildFinder(FileFinder(args.source), FileFinder(args.unify), minify=args.minify) else: finder = FileFinder(args.source, minify=args.minify) if 'NO_PKG_FILES' in os.environ: sinkformatter = NoPkgFilesRemover(formatter, args.manifest is not None) else: sinkformatter = formatter sink = SimpleManifestSink(finder, sinkformatter) if args.manifest: preprocess_manifest(sink, args.manifest, defines) else: sink.add(Component(''), 'bin/*') sink.close(args.manifest is not None) if args.removals: lines = [l.lstrip() for l in open(args.removals).readlines()] removals_in = StringIO(''.join(lines)) removals_in.name = args.removals removals = RemovedFiles(copier) preprocess(removals_in, removals, defines) copier.add(mozpack.path.join(binpath, 'removed-files'), removals) # shlibsign libraries if launcher.can_launch(): for lib in SIGN_LIBS: libbase = mozpack.path.join(binpath, '%s%s') \ % (buildconfig.substs['DLL_PREFIX'], lib) libname = '%s%s' % (libbase, buildconfig.substs['DLL_SUFFIX']) if copier.contains(libname): copier.add(libbase + '.chk', LibSignFile(os.path.join(args.destination, libname))) # Setup preloading if args.jarlog and os.path.exists(args.jarlog): from mozpack.mozjar import JarLog log = JarLog(args.jarlog) for p, f in copier: if not isinstance(f, Jarrer): continue key = JarLog.canonicalize(os.path.join(args.destination, p)) if key in log: f.preload(log[key]) # Fill startup cache on Windows and Linux only # (this currently causes build failure on BSD, so skip on that platfom) if sys.platform == 'win32' or sys.platform.startswith ('linux'): if isinstance(formatter, OmniJarFormatter) and launcher.can_launch(): if buildconfig.substs['LIBXUL_SDK']: gre_path = mozpack.path.join(buildconfig.substs['LIBXUL_DIST'], 'bin') else: gre_path = None for base in sorted([[p for p in [mozpack.path.join('bin', b), b] if os.path.exists(os.path.join(args.source, p))][0] for b in sink.packager.get_bases()]): if not gre_path: gre_path = base base_path = sink.normalize_path(base) if base_path in formatter.omnijars: precompile_cache(formatter.omnijars[base_path], args.source, gre_path, base) copier.copy(args.destination) generate_precomplete(os.path.normpath(os.path.join(args.destination, binpath)))
def test_remove_unaccounted_directory_symlinks(self): """Directory symlinks in destination that are not in the way are deleted according to remove_unaccounted and remove_all_directory_symlinks. """ if not self.symlink_supported: return dest = self.tmppath('dest') copier = FileCopier() copier.add('foo/bar/baz', GeneratedFile('foobarbaz')) os.makedirs(self.tmppath('dest/foo')) dummy = self.tmppath('dummy') os.mkdir(dummy) os.mkdir(self.tmppath('dest/zot')) link = self.tmppath('dest/zot/zap') os.symlink(dummy, link) # If not remove_unaccounted but remove_empty_directories, then # the symlinked directory remains (as does its containing # directory). result = copier.copy(dest, remove_unaccounted=False, remove_empty_directories=True, remove_all_directory_symlinks=False) st = os.lstat(link) self.assertTrue(stat.S_ISLNK(st.st_mode)) self.assertFalse(stat.S_ISDIR(st.st_mode)) self.assertEqual(self.all_files(dest), set(copier.paths())) self.assertEqual(self.all_dirs(dest), set(['foo/bar'])) self.assertEqual(result.removed_directories, set()) self.assertEqual(len(result.updated_files), 1) # If remove_unaccounted but not remove_empty_directories, then # only the symlinked directory is removed. result = copier.copy(dest, remove_unaccounted=True, remove_empty_directories=False, remove_all_directory_symlinks=False) st = os.lstat(self.tmppath('dest/zot')) self.assertFalse(stat.S_ISLNK(st.st_mode)) self.assertTrue(stat.S_ISDIR(st.st_mode)) self.assertEqual(result.removed_files, set([link])) self.assertEqual(result.removed_directories, set()) self.assertEqual(self.all_files(dest), set(copier.paths())) self.assertEqual(self.all_dirs(dest), set(['foo/bar', 'zot'])) # If remove_unaccounted and remove_empty_directories, then # both the symlink and its containing directory are removed. link = self.tmppath('dest/zot/zap') os.symlink(dummy, link) result = copier.copy(dest, remove_unaccounted=True, remove_empty_directories=True, remove_all_directory_symlinks=False) self.assertEqual(result.removed_files, set([link])) self.assertEqual(result.removed_directories, set([self.tmppath('dest/zot')])) self.assertEqual(self.all_files(dest), set(copier.paths())) self.assertEqual(self.all_dirs(dest), set(['foo/bar']))