def test_minified_verify_failure(self): orig_f = GeneratedFile('\n'.join(self.orig_lines)) min_f = MinifiedJavaScript(orig_f, verify_command=self._verify_command('1')) mini_lines = min_f.open().readlines() self.assertEqual(mini_lines, orig_f.open().readlines())
def test_generated_file_open(self): ''' Test whether GeneratedFile.open returns an appropriately reset file object. ''' content = ''.join(samples) f = GeneratedFile(content) self.assertEqual(content[:42], f.open().read(42)) self.assertEqual(content, f.open().read())
def test_generated_file(self): ''' Check that GeneratedFile.copy yields the proper content in the destination file in all situations that trigger different code paths (see TestFile.test_file) ''' dest = self.tmppath('dest') for content in samples: f = GeneratedFile(content) f.copy(dest) self.assertEqual(content, open(dest, 'rb').read())
def test_minified_verify_failure(self): orig_f = GeneratedFile('\n'.join(self.orig_lines)) errors.out = StringIO() min_f = MinifiedJavaScript(orig_f, verify_command=self._verify_command('1')) mini_lines = min_f.open().readlines() output = errors.out.getvalue() errors.out = sys.stderr self.assertEqual(output, 'Warning: JS minification verification failed for <unknown>:\n' 'Warning: Error message\n') self.assertEqual(mini_lines, orig_f.open().readlines())
def test_minified_verify_success(self): orig_f = GeneratedFile('\n'.join(self.orig_lines)) min_f = MinifiedJavaScript(orig_f, verify_command=self._verify_command('0')) mini_lines = min_f.open().readlines() self.assertTrue(mini_lines) self.assertTrue(len(mini_lines) < len(self.orig_lines))
def test_remove_unaccounted_file_registry(self): """Test FileCopier.copy(remove_unaccounted=FileRegistry())""" dest = self.tmppath('dest') copier = FileCopier() copier.add('foo/bar/baz', GeneratedFile(b'foobarbaz')) copier.add('foo/bar/qux', GeneratedFile(b'foobarqux')) copier.add('foo/hoge/fuga', GeneratedFile(b'foohogefuga')) copier.add('foo/toto/tata', GeneratedFile(b'footototata')) os.makedirs(os.path.join(dest, 'bar')) with open(os.path.join(dest, 'bar', 'bar'), 'w') as fh: fh.write('barbar') os.makedirs(os.path.join(dest, 'foo', 'toto')) with open(os.path.join(dest, 'foo', 'toto', 'toto'), 'w') as fh: fh.write('foototototo') result = copier.copy(dest, remove_unaccounted=False) self.assertEqual(self.all_files(dest), set(copier.paths()) | {'foo/toto/toto', 'bar/bar'}) self.assertEqual(self.all_dirs(dest), {'foo/bar', 'foo/hoge', 'foo/toto', 'bar'}) copier2 = FileCopier() copier2.add('foo/hoge/fuga', GeneratedFile(b'foohogefuga')) # We expect only files copied from the first copier to be removed, # not the extra file that was there beforehand. result = copier2.copy(dest, remove_unaccounted=copier) self.assertEqual(self.all_files(dest), set(copier2.paths()) | {'foo/toto/toto', 'bar/bar'}) self.assertEqual(self.all_dirs(dest), {'foo/hoge', 'foo/toto', 'bar'}) self.assertEqual(result.updated_files, {self.tmppath('dest/foo/hoge/fuga')}) self.assertEqual(result.existing_files, set()) self.assertEqual( result.removed_files, { self.tmppath(p) for p in ('dest/foo/bar/baz', 'dest/foo/bar/qux', 'dest/foo/toto/tata') }) self.assertEqual(result.removed_directories, {self.tmppath('dest/foo/bar')})
def unify_file(self, path, file1, file2): ''' Unify files taking Mozilla application special cases into account. Otherwise defer to UnifiedFinder.unify_file. ''' basename = mozpath.basename(path) if basename == 'buildconfig.html': content1 = file1.open().readlines() content2 = file2.open().readlines() # Copy everything from the first file up to the end of its <body>, # insert a <hr> between the two files and copy the second file's # content beginning after its leading <h1>. return GeneratedFile(''.join( content1[:content1.index('</body>\n')] + ['<hr> </hr>\n'] + content2[content2.index('<h1>about:buildconfig</h1>\n') + 1:])) elif basename == 'install.rdf': # install.rdf files often have em:targetPlatform (either as # attribute or as tag) that will differ between platforms. The # unified install.rdf should contain both em:targetPlatforms if # they exist, or strip them if only one file has a target platform. content1, content2 = ( FIND_TARGET_PLATFORM_ATTR.sub(lambda m: \ m.group('tag') + m.group('attrs') + m.group('otherattrs') + '<%stargetPlatform>%s</%stargetPlatform>' % \ (m.group('ns') or "", m.group('platform'), m.group('ns') or ""), f.open().read() ) for f in (file1, file2) ) platform2 = FIND_TARGET_PLATFORM.search(content2) return GeneratedFile( FIND_TARGET_PLATFORM.sub( lambda m: m.group(0) + platform2.group(0) if platform2 else '', content1)) elif path.endswith('.xpi'): finder1 = JarFinder(os.path.join(self._finder1.base, path), JarReader(fileobj=file1.open())) finder2 = JarFinder(os.path.join(self._finder2.base, path), JarReader(fileobj=file2.open())) unifier = UnifiedFinder(finder1, finder2, sorted=self._sorted) err = errors.count all(unifier.find('')) if err == errors.count: return file1 return None return UnifiedFinder.unify_file(self, path, file1, file2)
def test_file_copier(self): copier = FileCopier() copier.add('foo/bar', GeneratedFile('foobar')) copier.add('foo/qux', GeneratedFile('fooqux')) copier.add('foo/deep/nested/directory/file', GeneratedFile('fooz')) copier.add('bar', GeneratedFile('bar')) copier.add('qux/foo', GeneratedFile('quxfoo')) copier.add('qux/bar', GeneratedFile('')) result = copier.copy(self.tmpdir) self.assertEqual(self.all_files(self.tmpdir), set(copier.paths())) self.assertEqual(self.all_dirs(self.tmpdir), set(['foo/deep/nested/directory', 'qux'])) self.assertEqual( result.updated_files, set(self.tmppath(p) for p in self.all_files(self.tmpdir))) self.assertEqual(result.existing_files, set()) self.assertEqual(result.removed_files, set()) self.assertEqual(result.removed_directories, set()) copier.remove('foo') copier.add('test', GeneratedFile('test')) result = copier.copy(self.tmpdir) self.assertEqual(self.all_files(self.tmpdir), set(copier.paths())) self.assertEqual(self.all_dirs(self.tmpdir), set(['qux'])) self.assertEqual( result.removed_files, set( self.tmppath(p) for p in ('foo/bar', 'foo/qux', 'foo/deep/nested/directory/file')))
def test_file_copier(self): copier = FileCopier() copier.add("foo/bar", GeneratedFile(b"foobar")) copier.add("foo/qux", GeneratedFile(b"fooqux")) copier.add("foo/deep/nested/directory/file", GeneratedFile(b"fooz")) copier.add("bar", GeneratedFile(b"bar")) copier.add("qux/foo", GeneratedFile(b"quxfoo")) copier.add("qux/bar", GeneratedFile(b"")) result = copier.copy(self.tmpdir) self.assertEqual(self.all_files(self.tmpdir), set(copier.paths())) self.assertEqual(self.all_dirs(self.tmpdir), set(["foo/deep/nested/directory", "qux"])) self.assertEqual( result.updated_files, set(self.tmppath(p) for p in self.all_files(self.tmpdir)), ) self.assertEqual(result.existing_files, set()) self.assertEqual(result.removed_files, set()) self.assertEqual(result.removed_directories, set()) copier.remove("foo") copier.add("test", GeneratedFile(b"test")) result = copier.copy(self.tmpdir) self.assertEqual(self.all_files(self.tmpdir), set(copier.paths())) self.assertEqual(self.all_dirs(self.tmpdir), set(["qux"])) self.assertEqual( result.removed_files, set( self.tmppath(p) for p in ("foo/bar", "foo/qux", "foo/deep/nested/directory/file")), )
def get(self, path): f = super(PackagerFileFinder, self).get(path) # Normalize Info.plist files, and remove the MozillaDeveloper*Path # entries which are only needed on unpackaged builds. if mozpath.basename(path) == "Info.plist": info = plistlib.load(f.open(), dict_type=OrderedDict) info.pop("MozillaDeveloperObjPath", None) info.pop("MozillaDeveloperRepoPath", None) return GeneratedFile(plistlib.dumps(info, sort_keys=False)) return f
def precompile_cache(registry, source_path, gre_path, app_path): ''' Create startup cache for the given application directory, using the given GRE path. - registry is a FileRegistry-like instance where to add the startup cache. - source_path is the base path of the package. - gre_path is the GRE path, relative to source_path. - app_path is the application path, relative to source_path. Startup cache for all resources under resource://app/ are generated, except when gre_path == app_path, in which case it's under resource://gre/. ''' from tempfile import mkstemp source_path = os.path.abspath(source_path) if app_path != gre_path: resource = 'app' else: resource = 'gre' app_path = os.path.join(source_path, app_path) gre_path = os.path.join(source_path, gre_path) fd, cache = mkstemp('.zip') os.close(fd) os.remove(cache) try: extra_env = {'MOZ_STARTUP_CACHE': cache} if buildconfig.substs.get('MOZ_TSAN'): extra_env['TSAN_OPTIONS'] = 'report_bugs=0' if buildconfig.substs.get('MOZ_ASAN'): extra_env['ASAN_OPTIONS'] = 'detect_leaks=0' if launcher.launch([ 'xpcshell', '-g', gre_path, '-a', app_path, '-f', os.path.join(os.path.dirname(__file__), 'precompile_cache.js'), '-e', 'precompile_startupcache("resource://%s/");' % resource ], extra_linker_path=gre_path, extra_env=extra_env): errors.fatal('Error while running startup cache precompilation') return from mozpack.mozjar import JarReader jar = JarReader(cache) resource = '/resource/%s/' % resource for f in jar: if resource in f.filename: path = f.filename[f.filename.index(resource) + len(resource):] if registry.contains(path): registry.add(f.filename, GeneratedFile(f.read())) jar.close() finally: if os.path.exists(cache): os.remove(cache)
def stream_context_tar(topsrcdir, context_dir, out_file, image_name, args): """Like create_context_tar, but streams the tar file to the `out_file` file object.""" archive_files = {} content = [] context_dir = os.path.join(topsrcdir, context_dir) for root, dirs, files in os.walk(context_dir): for f in files: source_path = os.path.join(root, f) archive_path = source_path[len(context_dir) + 1:] archive_files[archive_path] = source_path # Parse Dockerfile for special syntax of extra files to include. with open(os.path.join(context_dir, "Dockerfile"), "r") as fh: for line in fh: content.append(line) if not line.startswith("# %include"): continue p = line[len("# %include "):].strip() if os.path.isabs(p): raise Exception("extra include path cannot be absolute: %s" % p) fs_path = os.path.normpath(os.path.join(topsrcdir, p)) # Check for filesystem traversal exploits. if not fs_path.startswith(topsrcdir): raise Exception("extra include path outside topsrcdir: %s" % p) if not os.path.exists(fs_path): raise Exception("extra include path does not exist: %s" % p) if os.path.isdir(fs_path): for root, dirs, files in os.walk(fs_path): for f in files: source_path = os.path.join(root, f) rel = source_path[len(fs_path) + 1:] archive_path = os.path.join("topsrcdir", p, rel) archive_files[archive_path] = source_path else: archive_path = os.path.join("topsrcdir", p) archive_files[archive_path] = fs_path archive_files["Dockerfile"] = GeneratedFile( "".join(content).encode("utf-8")) writer = HashingWriter(out_file) create_tar_gz_from_files(writer, archive_files, f"{image_name}.tar") return writer.hexdigest()
def add(self, path, file): if path.endswith('.dic'): base, relpath = self._get_base(path) if relpath.startswith('dictionaries/'): root, ext = mozpath.splitext(mozpath.basename(path)) self._dictionaries[root] = path elif path.endswith('/built_in_addons.json'): data = json.loads(six.ensure_text(file.open().read())) data['dictionaries'] = self._dictionaries # The GeneratedFile content is only really generated after # all calls to formatter.add. file = GeneratedFile(lambda: json.dumps(data)) super(L10NRepackFormatterMixin, self).add(path, file)
def unify_file(self, path, file1, file2): """ Unify files taking Mozilla application special cases into account. Otherwise defer to UnifiedFinder.unify_file. """ basename = mozpath.basename(path) if file1 and file2 and basename == "buildconfig.html": content1 = file1.open().readlines() content2 = file2.open().readlines() # Copy everything from the first file up to the end of its <div>, # insert a <hr> between the two files and copy the second file's # content beginning after its leading <h1>. return GeneratedFile(b"".join( content1[:content1.index(b" </div>\n")] + [b" <hr> </hr>\n"] + content2[content2.index(b" <h1>Build Configuration</h1>\n" ) + 1:])) elif file1 and file2 and basename == "install.rdf": # install.rdf files often have em:targetPlatform (either as # attribute or as tag) that will differ between platforms. The # unified install.rdf should contain both em:targetPlatforms if # they exist, or strip them if only one file has a target platform. content1, content2 = (FIND_TARGET_PLATFORM_ATTR.sub( lambda m: m.group("tag") + m.group("attrs") + m.group( "otherattrs") + "<%stargetPlatform>%s</%stargetPlatform>" % (m.group("ns") or "", m.group("platform"), m.group("ns") or "" ), f.open().read().decode("utf-8"), ) for f in (file1, file2)) platform2 = FIND_TARGET_PLATFORM.search(content2) return GeneratedFile( FIND_TARGET_PLATFORM.sub( lambda m: m.group(0) + platform2.group(0) if platform2 else "", content1, )) return UnifiedFinder.unify_file(self, path, file1, file2)
def _create_files(self, root): files = {} for i in range(10): p = os.path.join(root, b'file%02d' % i) with open(p, 'wb') as fh: fh.write(b'file%02d' % i) # Need to set permissions or umask may influence testing. os.chmod(p, MODE_STANDARD) files[b'file%02d' % i] = p for i in range(10): files[b'file%02d' % (i + 10)] = GeneratedFile('file%02d' % (i + 10)) return files
def package_coverage_data(root, output_file): finder = FileFinder(root) jarrer = Jarrer() for p, f in finder.find("**/*.gcno"): jarrer.add(p, f) dist_include_manifest = mozpath.join(buildconfig.topobjdir, "_build_manifests", "install", "dist_include") linked_files = describe_install_manifest(dist_include_manifest, "dist/include") mapping_file = GeneratedFile(json.dumps(linked_files, sort_keys=True)) jarrer.add("linked-files-map.json", mapping_file) jarrer.copy(output_file)
def test_minified_properties(self): propLines = [ "# Comments are removed", "foo = bar", "", "# Another comment", ] prop = GeneratedFile("\n".join(propLines)) self.assertEqual( MinifiedProperties(prop).open().readlines(), [b"foo = bar\n", b"\n"] ) open(self.tmppath("prop"), "w").write("\n".join(propLines)) MinifiedProperties(File(self.tmppath("prop"))).copy(self.tmppath("prop2")) self.assertEqual(open(self.tmppath("prop2")).readlines(), ["foo = bar\n", "\n"])
def _add(self, interface): from mozpack.files import GeneratedFile from uuid import uuid4 path = interface.filename if not self.registry.contains(path): self.registry.add(path, GeneratedFile(open(path).read())) content = self.registry[path].content content = content[:interface.uuid_pos] + str(uuid4()) + \ content[interface.uuid_pos + len(interface.uuid):] self.registry[path].content = content # Recurse through all the interfaces deriving from this one for derived in self.interfaces.get_by_base(interface.name): self._add(derived)
def precompile_cache(formatter, source_path, gre_path, app_path): ''' Create startup cache for the given application directory, using the given GRE path. - formatter is a Formatter instance where to add the startup cache. - source_path is the base path of the package. - gre_path is the GRE path, relative to source_path. - app_path is the application path, relative to source_path. Startup cache for all resources under resource://app/ are generated, except when gre_path == app_path, in which case it's under resource://gre/. ''' from tempfile import mkstemp source_path = os.path.abspath(source_path) if app_path != gre_path: resource = 'app' else: resource = 'gre' app_path = os.path.join(source_path, app_path) gre_path = os.path.join(source_path, gre_path) fd, cache = mkstemp('.zip') if os.name == 'os2': # NS_NewLocalFile is strict about slashes cache = cache.replace('/', '\\') os.close(fd) os.remove(cache) try: if launcher.launch(['xpcshell', '-g', gre_path, '-a', app_path, '-f', os.path.join(os.path.dirname(__file__), 'precompile_cache.js'), '-e', 'precompile_startupcache("resource://%s/");' % resource], extra_linker_path=gre_path, extra_env={'MOZ_STARTUP_CACHE': cache}): errors.fatal('Error while running startup cache precompilation') return from mozpack.mozjar import JarReader jar = JarReader(cache) resource = '/resource/%s/' % resource for f in jar: if resource in f.filename: path = f.filename[f.filename.index(resource) + len(resource):] if formatter.contains(path): formatter.add(f.filename, GeneratedFile(f.read())) jar.close() finally: if os.path.exists(cache): os.remove(cache)
def test_simple_manifest_parser(self): formatter = MockFormatter() foobar = GeneratedFile('foobar') foobaz = GeneratedFile('foobaz') fooqux = GeneratedFile('fooqux') finder = MockFinder({ 'bin/foo/bar': foobar, 'bin/foo/baz': foobaz, 'bin/foo/qux': fooqux, 'bin/foo/chrome.manifest': GeneratedFile('resource foo foo/'), 'bin/chrome.manifest': GeneratedFile('manifest foo/chrome.manifest'), }) parser = SimpleManifestSink(finder, formatter) parser.add('section0', 'bin/foo/b*') parser.add('section1', 'bin/foo/qux') parser.add('section1', 'bin/foo/chrome.manifest') self.assertRaises(ErrorMessage, parser.add, 'section1', 'bin/bar') self.assertEqual(formatter.log, []) parser.close() self.assertEqual(formatter.log, [ (('foo/chrome.manifest', 1), 'add_manifest', ManifestResource('foo', 'foo', 'foo/')), (None, 'add', 'foo/bar', foobar), (None, 'add', 'foo/baz', foobaz), (None, 'add', 'foo/qux', fooqux), ]) self.assertEqual(finder.log, [ 'bin/foo/b*', 'bin/foo/qux', 'bin/foo/chrome.manifest', 'bin/bar', 'bin/chrome.manifest' ])
def test_no_remove(self): copier = FileCopier() copier.add('foo', GeneratedFile('foo')) with open(self.tmppath('bar'), 'a'): pass os.mkdir(self.tmppath('emptydir')) result = copier.copy(self.tmpdir, remove_unaccounted=False) self.assertEqual(self.all_files(self.tmpdir), set(['foo', 'bar'])) self.assertEqual(result.removed_files, set()) self.assertEqual(result.removed_directories, set([self.tmppath('emptydir')]))
def test_minified_properties(self): propLines = [ '# Comments are removed', 'foo = bar', '', '# Another comment', ] prop = GeneratedFile('\n'.join(propLines)) self.assertEqual( MinifiedProperties(prop).open().readlines(), ['foo = bar\n', '\n']) open(self.tmppath('prop'), 'wb').write('\n'.join(propLines)) MinifiedProperties(File(self.tmppath('prop'))) \ .copy(self.tmppath('prop2')) self.assertEqual( open(self.tmppath('prop2')).readlines(), ['foo = bar\n', '\n'])
def do_test_file_registry(self, registry): self.registry = registry self.registry.add("foo", GeneratedFile(b"foo")) bar = GeneratedFile(b"bar") self.registry.add("bar", bar) self.assertEqual(self.registry.paths(), ["foo", "bar"]) self.assertEqual(self.registry["bar"], bar) self.assertRaises(ErrorMessage, self.registry.add, "foo", GeneratedFile(b"foo2")) self.assertRaises(ErrorMessage, self.registry.remove, "qux") self.assertRaises(ErrorMessage, self.registry.add, "foo/bar", GeneratedFile(b"foobar")) self.assertRaises(ErrorMessage, self.registry.add, "foo/bar/baz", GeneratedFile(b"foobar")) self.assertEqual(self.registry.paths(), ["foo", "bar"]) self.registry.remove("foo") self.assertEqual(self.registry.paths(), ["bar"]) self.registry.remove("bar") self.assertEqual(self.registry.paths(), []) self.prepare_match_test() self.do_match_test() self.assertTrue(self.checked) self.assertEqual( self.registry.paths(), [ "bar", "foo/bar", "foo/baz", "foo/qux/1", "foo/qux/bar", "foo/qux/2/test", "foo/qux/2/test2", ], ) self.registry.remove("foo/qux") self.assertEqual(self.registry.paths(), ["bar", "foo/bar", "foo/baz"]) self.registry.add("foo/qux", GeneratedFile(b"fooqux")) self.assertEqual(self.registry.paths(), ["bar", "foo/bar", "foo/baz", "foo/qux"]) self.registry.remove("foo/b*") self.assertEqual(self.registry.paths(), ["bar", "foo/qux"]) self.assertEqual([f for f, c in self.registry], ["bar", "foo/qux"]) self.assertEqual(len(self.registry), 2) self.add("foo/.foo") self.assertTrue(self.registry.contains("foo/.foo"))
def is_resource(base, path): registry = FileRegistry() f = OmniJarFormatter(registry, 'omni.foo', non_resources=[ 'defaults/messenger/mailViews.dat', 'defaults/foo/*', '*/dummy', ]) f.add_base('') f.add_base('app') f.add(mozpath.join(base, path), GeneratedFile('')) if f.copier.contains(mozpath.join(base, path)): return False self.assertTrue(f.copier.contains(mozpath.join(base, 'omni.foo'))) self.assertTrue(f.copier[mozpath.join(base, 'omni.foo')] .contains(path)) return True
def test_file_registry(self): self.registry = FileRegistry() self.registry.add('foo', GeneratedFile('foo')) bar = GeneratedFile('bar') self.registry.add('bar', bar) self.assertEqual(self.registry.paths(), ['foo', 'bar']) self.assertEqual(self.registry['bar'], bar) self.assertRaises(ErrorMessage, self.registry.add, 'foo', GeneratedFile('foo2')) self.assertRaises(ErrorMessage, self.registry.remove, 'qux') self.assertRaises(ErrorMessage, self.registry.add, 'foo/bar', GeneratedFile('foobar')) self.assertRaises(ErrorMessage, self.registry.add, 'foo/bar/baz', GeneratedFile('foobar')) self.assertEqual(self.registry.paths(), ['foo', 'bar']) self.registry.remove('foo') self.assertEqual(self.registry.paths(), ['bar']) self.registry.remove('bar') self.assertEqual(self.registry.paths(), []) self.prepare_match_test() self.do_match_test() self.assertTrue(self.checked) self.assertEqual(self.registry.paths(), [ 'bar', 'foo/bar', 'foo/baz', 'foo/qux/1', 'foo/qux/bar', 'foo/qux/2/test', 'foo/qux/2/test2', ]) self.registry.remove('foo/qux') self.assertEqual(self.registry.paths(), ['bar', 'foo/bar', 'foo/baz']) self.registry.add('foo/qux', GeneratedFile('fooqux')) self.assertEqual(self.registry.paths(), ['bar', 'foo/bar', 'foo/baz', 'foo/qux']) self.registry.remove('foo/b*') self.assertEqual(self.registry.paths(), ['bar', 'foo/qux']) self.assertEqual([f for f, c in self.registry], ['bar', 'foo/qux']) self.assertEqual(len(self.registry), 2) self.add('foo/.foo') self.assertTrue(self.registry.contains('foo/.foo'))
def package_coverage_data(root, output_file): # XXX JarWriter doesn't support unicode strings, see bug 1056859 if isinstance(root, unicode): root = root.encode('utf-8') finder = FileFinder(root) jarrer = Jarrer() for p, f in finder.find("**/*.gcno"): jarrer.add(p, f) dist_include_manifest = mozpath.join(buildconfig.topobjdir, '_build_manifests', 'install', 'dist_include') linked_files = describe_install_manifest(dist_include_manifest, 'dist/include') mapping_file = GeneratedFile(json.dumps(linked_files, sort_keys=True)) jarrer.add('linked-files-map.json', mapping_file) jarrer.copy(output_file)
def test_generated_file_no_write(self): ''' Test various conditions where GeneratedFile.copy is expected not to write in the destination file. ''' dest = self.tmppath('dest') # Initial copy f = GeneratedFile('test') f.copy(dest) # Ensure subsequent copies won't trigger writes f.copy(DestNoWrite(dest)) self.assertEqual('test', open(dest, 'rb').read()) # When using a new instance with the same content, no copy should occur f = GeneratedFile('test') f.copy(DestNoWrite(dest)) self.assertEqual('test', open(dest, 'rb').read()) # Double check that under conditions where a copy occurs, we would get # an exception. f = GeneratedFile('fooo') self.assertRaises(RuntimeError, f.copy, DestNoWrite(dest))
def test_permissions(self): """Ensure files without write permission can be deleted.""" with open(self.tmppath("dummy"), "a"): pass p = self.tmppath("no_perms") with open(p, "a"): pass # Make file and directory unwritable. Reminder: making a directory # unwritable prevents modifications (including deletes) from the list # of files in that directory. os.chmod(p, 0o400) os.chmod(self.tmpdir, 0o400) copier = FileCopier() copier.add("dummy", GeneratedFile(b"content")) result = copier.copy(self.tmpdir) self.assertEqual(result.removed_files_count, 1) self.assertFalse(os.path.exists(p))
def is_resource(base, path): registry = FileRegistry() f = OmniJarFormatter( registry, "omni.foo", non_resources=[ "defaults/messenger/mailViews.dat", "defaults/foo/*", "*/dummy", ], ) f.add_base("") f.add_base("app") f.add(mozpath.join(base, path), GeneratedFile(b"")) if f.copier.contains(mozpath.join(base, path)): return False self.assertTrue(f.copier.contains(mozpath.join(base, "omni.foo"))) self.assertTrue(f.copier[mozpath.join(base, "omni.foo")].contains(path)) return True
def test_file_copier(self): copier = FileCopier() copier.add('foo/bar', GeneratedFile('foobar')) copier.add('foo/qux', GeneratedFile('fooqux')) copier.add('foo/deep/nested/directory/file', GeneratedFile('fooz')) copier.add('bar', GeneratedFile('bar')) copier.add('qux/foo', GeneratedFile('quxfoo')) copier.add('qux/bar', GeneratedFile('')) copier.copy(self.tmpdir) self.assertEqual(self.all_files(self.tmpdir), set(copier.paths())) self.assertEqual(self.all_dirs(self.tmpdir), set(['foo/deep/nested/directory', 'qux'])) copier.remove('foo') copier.add('test', GeneratedFile('test')) copier.copy(self.tmpdir) self.assertEqual(self.all_files(self.tmpdir), set(copier.paths())) self.assertEqual(self.all_dirs(self.tmpdir), set(['qux']))
def test_jarrer(self): copier = Jarrer() copier.add("foo/bar", GeneratedFile(b"foobar")) copier.add("foo/qux", GeneratedFile(b"fooqux")) copier.add("foo/deep/nested/directory/file", GeneratedFile(b"fooz")) copier.add("bar", GeneratedFile(b"bar")) copier.add("qux/foo", GeneratedFile(b"quxfoo")) copier.add("qux/bar", GeneratedFile(b"")) dest = MockDest() copier.copy(dest) self.check_jar(dest, copier) copier.remove("foo") copier.add("test", GeneratedFile(b"test")) copier.copy(dest) self.check_jar(dest, copier) copier.remove("test") copier.add("test", GeneratedFile(b"replaced-content")) copier.copy(dest) self.check_jar(dest, copier) copier.copy(dest) self.check_jar(dest, copier) preloaded = ["qux/bar", "bar"] copier.preload(preloaded) copier.copy(dest) dest.seek(0) jar = JarReader(fileobj=dest) self.assertEqual( [f.filename for f in jar], preloaded + [p for p in copier.paths() if p not in preloaded], ) self.assertEqual(jar.last_preloaded, preloaded[-1])
def test_no_remove(self): copier = FileCopier() copier.add("foo", GeneratedFile(b"foo")) with open(self.tmppath("bar"), "a"): pass os.mkdir(self.tmppath("emptydir")) d = self.tmppath("populateddir") os.mkdir(d) with open(self.tmppath("populateddir/foo"), "a"): pass result = copier.copy(self.tmpdir, remove_unaccounted=False) self.assertEqual(self.all_files(self.tmpdir), set(["foo", "bar", "populateddir/foo"])) self.assertEqual(self.all_dirs(self.tmpdir), set(["populateddir"])) self.assertEqual(result.removed_files, set()) self.assertEqual(result.removed_directories, set([self.tmppath("emptydir")]))
def test_simple_manifest_parser(self): formatter = MockFormatter() foobar = GeneratedFile('foobar') foobaz = GeneratedFile('foobaz') fooqux = GeneratedFile('fooqux') foozot = GeneratedFile('foozot') finder = MockFinder({ 'bin/foo/bar': foobar, 'bin/foo/baz': foobaz, 'bin/foo/qux': fooqux, 'bin/foo/zot': foozot, 'bin/foo/chrome.manifest': GeneratedFile('resource foo foo/'), 'bin/chrome.manifest': GeneratedFile('manifest foo/chrome.manifest'), }) parser = SimpleManifestSink(finder, formatter) component0 = Component('component0') component1 = Component('component1') component2 = Component('component2', destdir='destdir') parser.add(component0, 'bin/foo/b*') parser.add(component1, 'bin/foo/qux') parser.add(component1, 'bin/foo/chrome.manifest') parser.add(component2, 'bin/foo/zot') self.assertRaises(ErrorMessage, parser.add, 'component1', 'bin/bar') self.assertEqual(formatter.log, []) parser.close() self.assertEqual(formatter.log, [ (None, 'add_base', '', False), (('foo/chrome.manifest', 1), 'add_manifest', ManifestResource('foo', 'foo', 'foo/')), (None, 'add', 'foo/bar', foobar), (None, 'add', 'foo/baz', foobaz), (None, 'add', 'foo/qux', fooqux), (None, 'add', 'destdir/foo/zot', foozot), ]) self.assertEqual(finder.log, [ 'bin/foo/b*', 'bin/foo/qux', 'bin/foo/chrome.manifest', 'bin/foo/zot', 'bin/bar', 'bin/chrome.manifest' ])
def __init__(self, copier): self.copier = copier GeneratedFile.__init__(self, '')
def test_generated_file_function(self): """ Test GeneratedFile behavior with functions. """ dest = self.tmppath("dest") data = { "num_calls": 0, } def content(): data["num_calls"] += 1 return b"content" f = GeneratedFile(content) self.assertEqual(data["num_calls"], 0) f.copy(dest) self.assertEqual(data["num_calls"], 1) self.assertEqual(b"content", open(dest, "rb").read()) self.assertEqual(b"content", f.open().read()) self.assertEqual(b"content", f.read()) self.assertEqual(len(b"content"), f.size()) self.assertEqual(data["num_calls"], 1) f.content = b"modified" f.copy(dest) self.assertEqual(data["num_calls"], 1) self.assertEqual(b"modified", open(dest, "rb").read()) self.assertEqual(b"modified", f.open().read()) self.assertEqual(b"modified", f.read()) self.assertEqual(len(b"modified"), f.size()) f.content = content self.assertEqual(data["num_calls"], 1) self.assertEqual(b"content", f.read()) self.assertEqual(data["num_calls"], 2)
def test_generated_file_no_write(self): """ Test various conditions where GeneratedFile.copy is expected not to write in the destination file. """ dest = self.tmppath("dest") # Initial copy f = GeneratedFile(b"test") f.copy(dest) # Ensure subsequent copies won't trigger writes f.copy(DestNoWrite(dest)) self.assertEqual(b"test", open(dest, "rb").read()) # When using a new instance with the same content, no copy should occur f = GeneratedFile(b"test") f.copy(DestNoWrite(dest)) self.assertEqual(b"test", open(dest, "rb").read()) # Double check that under conditions where a copy occurs, we would get # an exception. f = GeneratedFile(b"fooo") self.assertRaises(RuntimeError, f.copy, DestNoWrite(dest))
def __init__(self, path, content): GeneratedFile.__init__(self, content) self.path = path