def test_diff_not_default(self): """Diffs are not produced by default.""" faw = FileAvoidWrite('doesnotexist') faw.write('dummy') faw.close() self.assertIsNone(faw.diff)
def test_diff_not_default(self): """Diffs are not produced by default.""" faw = FileAvoidWrite('doesnotexist') faw.write('dummy') faw.close() self.assertIsNone(faw.diff)
def test_diff_not_default(self): """Diffs are not produced by default.""" with MockedOpen({'file': 'old'}): faw = FileAvoidWrite('file') faw.write('dummy') faw.close() self.assertIsNone(faw.diff)
def test_write_unicode(tmp_path): # Unicode grinning face :D binary_emoji = b"\xf0\x9f\x98\x80" file = tmp_path / "file.dat" faw = FileAvoidWrite(str(file)) faw.write(binary_emoji) faw.close()
def test_diff_not_default(self): """Diffs are not produced by default.""" with MockedOpen({"file": "old"}): faw = FileAvoidWrite("file") faw.write("dummy") faw.close() self.assertIsNone(faw.diff)
def test_diff_not_default(self): """Diffs are not produced by default.""" with MockedOpen({'file': 'old'}): faw = FileAvoidWrite('file') faw.write('dummy') faw.close() self.assertIsNone(faw.diff)
def copy(self, dest, skip_if_older=True): ''' Invokes the preprocessor to create the destination file. ''' if isinstance(dest, six.string_types): dest = Dest(dest) else: assert isinstance(dest, Dest) # We have to account for the case where the destination exists and is a # symlink to something. Since we know the preprocessor is certainly not # going to create a symlink, we can just remove the existing one. If the # destination is not a symlink, we leave it alone, since we're going to # overwrite its contents anyway. # If symlinks aren't supported at all, we can skip this step. # See comment in AbsoluteSymlinkFile about Windows. if hasattr(os, 'symlink') and platform.system() != 'Windows': if os.path.islink(dest.path): os.remove(dest.path) pp_deps = set(self.extra_depends) # If a dependency file was specified, and it exists, add any # dependencies from that file to our list. if self.depfile and os.path.exists(self.depfile): target = mozpath.normpath(dest.name) with _open(self.depfile, 'rt') as fileobj: for rule in makeutil.read_dep_makefile(fileobj): if target in rule.targets(): pp_deps.update(rule.dependencies()) skip = False if dest.exists() and skip_if_older: # If a dependency file was specified, and it doesn't exist, # assume that the preprocessor needs to be rerun. That will # regenerate the dependency file. if self.depfile and not os.path.exists(self.depfile): skip = False else: skip = not BaseFile.any_newer(dest.path, pp_deps) if skip: return False deps_out = None if self.depfile: deps_out = FileAvoidWrite(self.depfile) pp = Preprocessor(defines=self.defines, marker=self.marker) pp.setSilenceDirectiveWarnings(self.silence_missing_directive_warnings) with _open(self.path, 'rU') as input: pp.processFile(input=input, output=dest, depfile=deps_out) dest.close() if self.depfile: deps_out.close() return True
def copy(self, dest, skip_if_older=True): ''' Invokes the preprocessor to create the destination file. ''' if isinstance(dest, basestring): dest = Dest(dest) else: assert isinstance(dest, Dest) # We have to account for the case where the destination exists and is a # symlink to something. Since we know the preprocessor is certainly not # going to create a symlink, we can just remove the existing one. If the # destination is not a symlink, we leave it alone, since we're going to # overwrite its contents anyway. # If symlinks aren't supported at all, we can skip this step. if hasattr(os, 'symlink'): if os.path.islink(dest.path): os.remove(dest.path) pp_deps = set(self.extra_depends) # If a dependency file was specified, and it exists, add any # dependencies from that file to our list. if self.depfile and os.path.exists(self.depfile): target = mozpath.normpath(dest.name) with open(self.depfile, 'rb') as fileobj: for rule in makeutil.read_dep_makefile(fileobj): if target in rule.targets(): pp_deps.update(rule.dependencies()) skip = False if dest.exists() and skip_if_older: # If a dependency file was specified, and it doesn't exist, # assume that the preprocessor needs to be rerun. That will # regenerate the dependency file. if self.depfile and not os.path.exists(self.depfile): skip = False else: skip = not BaseFile.any_newer(dest.path, pp_deps) if skip: return False deps_out = None if self.depfile: deps_out = FileAvoidWrite(self.depfile) pp = Preprocessor(defines=self.defines, marker=self.marker) pp.setSilenceDirectiveWarnings(self.silence_missing_directive_warnings) with open(self.path, 'rU') as input: pp.processFile(input=input, output=dest, depfile=deps_out) dest.close() if self.depfile: deps_out.close() return True
def test_diff_update(self): """Diffs are produced on file update.""" with MockedOpen({"file": "old"}): faw = FileAvoidWrite("file", capture_diff=True) faw.write("new") faw.close() diff = "\n".join(faw.diff) self.assertIn("-old", diff) self.assertIn("+new", diff)
def test_diff_update(self): """Diffs are produced on file update.""" with MockedOpen({'file': 'old'}): faw = FileAvoidWrite('file', capture_diff=True) faw.write('new') faw.close() diff = '\n'.join(faw.diff) self.assertIn('-old', diff) self.assertIn('+new', diff)
def test_diff_update(tmp_path): file = tmp_path / "diffable.txt" file.write_text("old") faw = FileAvoidWrite(str(file), capture_diff=True) faw.write("new") faw.close() diff = "\n".join(faw.diff) assert "-old" in diff assert "+new" in diff
def test_diff_update(self): """Diffs are produced on file update.""" with MockedOpen({'file': 'old'}): faw = FileAvoidWrite('file', capture_diff=True) faw.write('new') faw.close() diff = '\n'.join(faw.diff) self.assertIn('-old', diff) self.assertIn('+new', diff)
def test_diff_create(self): """Diffs are produced when files are created.""" tmpdir = tempfile.mkdtemp() try: path = os.path.join(tmpdir, 'file') faw = FileAvoidWrite(path, capture_diff=True) faw.write('new') faw.close() diff = '\n'.join(faw.diff) self.assertIn('+new', diff) finally: shutil.rmtree(tmpdir)
def test_diff_create(self): """Diffs are produced when files are created.""" tmpdir = tempfile.mkdtemp() try: path = os.path.join(tmpdir, 'file') faw = FileAvoidWrite(path, capture_diff=True) faw.write('new') faw.close() diff = '\n'.join(faw.diff) self.assertIn('+new', diff) finally: shutil.rmtree(tmpdir)
def test_store_new_contents(tmp_path): file = tmp_path / "file.txt" faw = FileAvoidWrite(str(file)) faw.write("content") assert faw.close() == (False, True) assert file.read_text() == "content"
def test_change_binary_file_contents(tmp_path): file = tmp_path / "file.dat" file.write_bytes(b"\0") faw = FileAvoidWrite(str(file), readmode="rb") faw.write(b"\0\0\0") assert faw.close() == (True, True) assert file.read_bytes() == b"\0\0\0"
def test_overwrite_contents(tmp_path): file = tmp_path / "file.txt" file.write_text("abc") faw = FileAvoidWrite(str(file)) faw.write("bazqux") assert faw.close() == (True, True) assert file.read_text() == "bazqux"
def install_from_file(self, filename, distdir): self.log(logging.INFO, "artifact", {"filename": filename}, "Installing from {filename}") # Do we need to post-process? processed_filename = filename + PROCESSED_SUFFIX if self._skip_cache and os.path.exists(processed_filename): self.log( logging.DEBUG, "artifact", {"path": processed_filename}, "Skipping cache: removing cached processed artifact {path}", ) os.remove(processed_filename) if not os.path.exists(processed_filename): self.log(logging.INFO, "artifact", {"filename": filename}, "Processing contents of {filename}") self.log( logging.INFO, "artifact", {"processed_filename": processed_filename}, "Writing processed {processed_filename}", ) self._artifact_job.process_artifact(filename, processed_filename) self.log( logging.INFO, "artifact", {"processed_filename": processed_filename}, "Installing from processed {processed_filename}", ) # Copy all .so files, avoiding modification where possible. ensureParentDir(mozpath.join(distdir, ".dummy")) with zipfile.ZipFile(processed_filename) as zf: for info in zf.infolist(): if info.filename.endswith(".ini"): continue n = mozpath.join(distdir, info.filename) fh = FileAvoidWrite(n, mode="rb") shutil.copyfileobj(zf.open(info), fh) file_existed, file_updated = fh.close() self.log( logging.INFO, "artifact", {"updating": "Updating" if file_updated else "Not updating", "filename": n}, "{updating} {filename}", ) if not file_existed or file_updated: # Libraries and binaries may need to be marked executable, # depending on platform. perms = info.external_attr >> 16 # See http://stackoverflow.com/a/434689. perms |= stat.S_IWUSR | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH # u+w, a+r. os.chmod(n, perms) return 0
def test_no_write_happens_if_file_contents_same(tmp_path): file = tmp_path / "file.txt" file.write_text("content") original_write_time = file.stat().st_mtime faw = FileAvoidWrite(str(file)) faw.write("content") assert faw.close() == (True, False) assert file.stat().st_mtime == original_write_time
def _maybe_write_file(self, path, content, result): fh = FileAvoidWrite(path) fh.write(content) existed, updated = fh.close() if not existed: result[0].add(path) elif updated: result[1].add(path) else: result[2].add(path)
def _maybe_write_file(self, path, content, result): fh = FileAvoidWrite(path) fh.write(content) existed, updated = fh.close() if not existed: result[0].add(path) elif updated: result[1].add(path) else: result[2].add(path)
def test_file_avoid_write(self): with MockedOpen({"file": "content"}): # Overwriting an existing file replaces its content faw = FileAvoidWrite("file") faw.write("bazqux") self.assertEqual(faw.close(), (True, True)) self.assertEqual(open("file", "r").read(), "bazqux") # Creating a new file (obviously) stores its content faw = FileAvoidWrite("file2") faw.write("content") self.assertEqual(faw.close(), (False, True)) self.assertEqual(open("file2").read(), "content") with MockedOpen({"file": "content"}): with FileAvoidWrite("file") as file: file.write("foobar") self.assertEqual(open("file", "r").read(), "foobar") class MyMockedOpen(MockedOpen): """MockedOpen extension to raise an exception if something attempts to write in an opened file. """ def __call__(self, name, mode): if "w" in mode: raise Exception, "Unexpected open with write mode" return MockedOpen.__call__(self, name, mode) with MyMockedOpen({"file": "content"}): # Validate that MyMockedOpen works as intended file = FileAvoidWrite("file") file.write("foobar") self.assertRaises(Exception, file.close) # Check that no write actually happens when writing the # same content as what already is in the file faw = FileAvoidWrite("file") faw.write("content") self.assertEqual(faw.close(), (True, False))
def test_file_avoid_write(self): with MockedOpen({'file': 'content'}): # Overwriting an existing file replaces its content faw = FileAvoidWrite('file') faw.write('bazqux') self.assertEqual(faw.close(), (True, True)) self.assertEqual(open('file', 'r').read(), 'bazqux') # Creating a new file (obviously) stores its content faw = FileAvoidWrite('file2') faw.write('content') self.assertEqual(faw.close(), (False, True)) self.assertEqual(open('file2').read(), 'content') with MockedOpen({'file': 'content'}): with FileAvoidWrite('file') as file: file.write('foobar') self.assertEqual(open('file', 'r').read(), 'foobar') class MyMockedOpen(MockedOpen): '''MockedOpen extension to raise an exception if something attempts to write in an opened file. ''' def __call__(self, name, mode): if 'w' in mode: raise Exception, 'Unexpected open with write mode' return MockedOpen.__call__(self, name, mode) with MyMockedOpen({'file': 'content'}): # Validate that MyMockedOpen works as intended file = FileAvoidWrite('file') file.write('foobar') self.assertRaises(Exception, file.close) # Check that no write actually happens when writing the # same content as what already is in the file faw = FileAvoidWrite('file') faw.write('content') self.assertEqual(faw.close(), (True, False))
def test_file_avoid_write(self): with MockedOpen({'file': 'content'}): # Overwriting an existing file replaces its content faw = FileAvoidWrite('file') faw.write('bazqux') self.assertEqual(faw.close(), (True, True)) self.assertEqual(open('file', 'r').read(), 'bazqux') # Creating a new file (obviously) stores its content faw = FileAvoidWrite('file2') faw.write('content') self.assertEqual(faw.close(), (False, True)) self.assertEqual(open('file2').read(), 'content') with MockedOpen({'file': 'content'}): with FileAvoidWrite('file') as file: file.write('foobar') self.assertEqual(open('file', 'r').read(), 'foobar') class MyMockedOpen(MockedOpen): '''MockedOpen extension to raise an exception if something attempts to write in an opened file. ''' def __call__(self, name, mode): if 'w' in mode: raise Exception, 'Unexpected open with write mode' return MockedOpen.__call__(self, name, mode) with MyMockedOpen({'file': 'content'}): # Validate that MyMockedOpen works as intended file = FileAvoidWrite('file') file.write('foobar') self.assertRaises(Exception, file.close) # Check that no write actually happens when writing the # same content as what already is in the file faw = FileAvoidWrite('file') faw.write('content') self.assertEqual(faw.close(), (True, False))
def main(args): parser = argparse.ArgumentParser() parser.add_argument('--verbose', '-v', default=False, action='store_true', help='be verbose') parser.add_argument('--silent', '-s', default=False, action='store_true', help='be silent') parser.add_argument( '--srcdir', metavar='SRCDIR', action='append', required=True, help='directories to read inputs from, in order of priority') parser.add_argument('output', metavar='OUTPUT', help='output') opts = parser.parse_args(args) # Use reversed order so that the first srcdir has higher priority to override keys. properties = merge_properties('region.properties', reversed(opts.srcdir)) default = properties.get('browser.search.defaultenginename') engines = properties.get_list('browser.search.order') if opts.verbose: writer = codecs.getwriter('utf-8')(sys.stdout) print('Read {len} engines: {engines}'.format(len=len(engines), engines=engines), file=writer) print("Default engine is '{default}'.".format(default=default), file=writer) browsersearch = {} browsersearch['default'] = default browsersearch['engines'] = engines # FileAvoidWrite creates its parent directories. output = os.path.abspath(opts.output) fh = FileAvoidWrite(output) json.dump(browsersearch, fh) existed, updated = fh.close() if not opts.silent: if updated: print('{output} updated'.format(output=output)) else: print('{output} already up-to-date'.format(output=output)) return 0
def install_from_file(self, filename, distdir): self.log(logging.INFO, 'artifact', {'filename': filename}, 'Installing from {filename}') # Do we need to post-process? processed_filename = filename + PROCESSED_SUFFIX if self._skip_cache and os.path.exists(processed_filename): self.log(logging.DEBUG, 'artifact', {'path': processed_filename}, 'Skipping cache: removing cached processed artifact {path}') os.remove(processed_filename) if not os.path.exists(processed_filename): self.log(logging.INFO, 'artifact', {'filename': filename}, 'Processing contents of {filename}') self.log(logging.INFO, 'artifact', {'processed_filename': processed_filename}, 'Writing processed {processed_filename}') self._artifact_job.process_artifact(filename, processed_filename) self._artifact_cache._persist_limit.register_file(processed_filename) self.log(logging.INFO, 'artifact', {'processed_filename': processed_filename}, 'Installing from processed {processed_filename}') # Copy all .so files, avoiding modification where possible. ensureParentDir(mozpath.join(distdir, '.dummy')) with zipfile.ZipFile(processed_filename) as zf: for info in zf.infolist(): if info.filename.endswith('.ini'): continue n = mozpath.join(distdir, info.filename) fh = FileAvoidWrite(n, mode='rb') shutil.copyfileobj(zf.open(info), fh) file_existed, file_updated = fh.close() self.log(logging.INFO, 'artifact', {'updating': 'Updating' if file_updated else 'Not updating', 'filename': n}, '{updating} {filename}') if not file_existed or file_updated: # Libraries and binaries may need to be marked executable, # depending on platform. perms = info.external_attr >> 16 # See http://stackoverflow.com/a/434689. perms |= stat.S_IWUSR | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH # u+w, a+r. os.chmod(n, perms) return 0
def install_from_file(self, filename, distdir): self.log(logging.INFO, 'artifact', {'filename': filename}, 'Installing from {filename}') # Do we need to post-process? processed_filename = filename + PROCESSED_SUFFIX if self._skip_cache and os.path.exists(processed_filename): self.log(logging.INFO, 'artifact', {'path': processed_filename}, 'Skipping cache: removing cached processed artifact {path}') os.remove(processed_filename) if not os.path.exists(processed_filename): self.log(logging.INFO, 'artifact', {'filename': filename}, 'Processing contents of {filename}') self.log(logging.INFO, 'artifact', {'processed_filename': processed_filename}, 'Writing processed {processed_filename}') self._artifact_job.process_artifact(filename, processed_filename) self._artifact_cache._persist_limit.register_file(processed_filename) self.log(logging.INFO, 'artifact', {'processed_filename': processed_filename}, 'Installing from processed {processed_filename}') # Copy all .so files, avoiding modification where possible. ensureParentDir(mozpath.join(distdir, '.dummy')) with zipfile.ZipFile(processed_filename) as zf: for info in zf.infolist(): if info.filename.endswith('.ini'): continue n = mozpath.join(distdir, info.filename) fh = FileAvoidWrite(n, mode='rb') shutil.copyfileobj(zf.open(info), fh) file_existed, file_updated = fh.close() self.log(logging.INFO, 'artifact', {'updating': 'Updating' if file_updated else 'Not updating', 'filename': n}, '{updating} {filename}') if not file_existed or file_updated: # Libraries and binaries may need to be marked executable, # depending on platform. perms = info.external_attr >> 16 # See http://stackoverflow.com/a/434689. perms |= stat.S_IWUSR | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH # u+w, a+r. os.chmod(n, perms) return 0
def install_from_file(self, filename, distdir): self.log(logging.INFO, 'artifact', {'filename': filename}, 'Installing from {filename}') # Copy all .so files to dist/bin, avoiding modification where possible. ensureParentDir(os.path.join(distdir, 'bin', '.dummy')) with zipfile.ZipFile(filename) as zf: for info in zf.infolist(): if not info.filename.endswith('.so'): continue n = os.path.join(distdir, 'bin', os.path.basename(info.filename)) fh = FileAvoidWrite(n, mode='r') shutil.copyfileobj(zf.open(info), fh) file_existed, file_updated = fh.close() self.log(logging.INFO, 'artifact', {'updating': 'Updating' if file_updated else 'Not updating', 'filename': n}, '{updating} {filename}') return 0
def main(args): parser = argparse.ArgumentParser() parser.add_argument('--verbose', '-v', default=False, action='store_true', help='be verbose') parser.add_argument('--silent', '-s', default=False, action='store_true', help='be silent') parser.add_argument('--srcdir', metavar='SRCDIR', action='append', required=True, help='directories to read inputs from, in order of priority') parser.add_argument('output', metavar='OUTPUT', help='output') opts = parser.parse_args(args) # Use reversed order so that the first srcdir has higher priority to override keys. properties = merge_properties('region.properties', reversed(opts.srcdir)) default = properties.get('browser.search.defaultenginename') engines = properties.get_list('browser.search.order') if opts.verbose: writer = codecs.getwriter('utf-8')(sys.stdout) print('Read {len} engines: {engines}'.format(len=len(engines), engines=engines), file=writer) print("Default engine is '{default}'.".format(default=default), file=writer) browsersearch = {} browsersearch['default'] = default browsersearch['engines'] = engines # FileAvoidWrite creates its parent directories. output = os.path.abspath(opts.output) fh = FileAvoidWrite(output) json.dump(browsersearch, fh) existed, updated = fh.close() if not opts.silent: if updated: print('{output} updated'.format(output=output)) else: print('{output} already up-to-date'.format(output=output)) return 0
def install_from_file(self, filename, distdir): self.log(logging.INFO, 'artifact', {'filename': filename}, 'Installing from {filename}') # Copy all .so files to dist/bin, avoiding modification where possible. ensureParentDir(os.path.join(distdir, 'bin', '.dummy')) with zipfile.ZipFile(filename) as zf: for info in zf.infolist(): if not info.filename.endswith('.so'): continue n = os.path.join(distdir, 'bin', os.path.basename(info.filename)) fh = FileAvoidWrite(n, mode='r') shutil.copyfileobj(zf.open(info), fh) file_existed, file_updated = fh.close() self.log( logging.INFO, 'artifact', { 'updating': 'Updating' if file_updated else 'Not updating', 'filename': n }, '{updating} {filename}') return 0
def main(args): parser = argparse.ArgumentParser() parser.add_argument("--verbose", "-v", default=False, action="store_true", help="be verbose") parser.add_argument("--silent", "-s", default=False, action="store_true", help="be silent") parser.add_argument( "--srcdir", metavar="SRCDIR", action="append", required=True, help="directories to read inputs from, in order of priority", ) parser.add_argument("output", metavar="OUTPUT", help="output") opts = parser.parse_args(args) # Use reversed order so that the first srcdir has higher priority to override keys. properties = merge_properties("region.properties", reversed(opts.srcdir)) # Default, not region-specific. default = properties.get("browser.search.defaultenginename") engines = properties.get_list("browser.search.order") writer = codecs.getwriter("utf-8")(sys.stdout) if opts.verbose: print("Read {len} engines: {engines}".format(len=len(engines), engines=engines), file=writer) print("Default engine is '{default}'.".format(default=default), file=writer) browsersearch = {} browsersearch["default"] = default browsersearch["engines"] = engines # This gets defaults, yes; but it also gets the list of regions known. regions = properties.get_dict("browser.search.defaultenginename") browsersearch["regions"] = {} for region in regions.keys(): region_default = regions[region] region_engines = properties.get_list("browser.search.order.{region}".format(region=region)) if opts.verbose: print( "Region '{region}': Read {len} engines: {region_engines}".format( len=len(region_engines), region=region, region_engines=region_engines ), file=writer, ) print( "Region '{region}': Default engine is '{region_default}'.".format( region=region, region_default=region_default ), file=writer, ) browsersearch["regions"][region] = {"default": region_default, "engines": region_engines} # FileAvoidWrite creates its parent directories. output = os.path.abspath(opts.output) fh = FileAvoidWrite(output) json.dump(browsersearch, fh) existed, updated = fh.close() if not opts.silent: if updated: print("{output} updated".format(output=output)) else: print("{output} already up-to-date".format(output=output)) return 0
def test_diff_not_created_by_default(tmp_path): file = tmp_path / "file.txt" faw = FileAvoidWrite(str(file)) faw.write("dummy") faw.close() assert faw.diff is None
def main(args): parser = argparse.ArgumentParser() parser.add_argument('--verbose', '-v', default=False, action='store_true', help='be verbose') parser.add_argument('--silent', '-s', default=False, action='store_true', help='be silent') parser.add_argument('--android-package-name', metavar='NAME', required=True, help='Android package name') parser.add_argument('--resources', metavar='RESOURCES', default=None, help='optional Android resource directory to find drawables in') parser.add_argument('--srcdir', metavar='SRCDIR', action='append', required=True, help='directories to read inputs from, in order of priority') parser.add_argument('output', metavar='OUTPUT', help='output') opts = parser.parse_args(args) # Use reversed order so that the first srcdir has higher priority to override keys. properties = merge_properties('region.properties', reversed(opts.srcdir)) names = properties.get_list('browser.suggestedsites.list') if opts.verbose: print('Reading {len} suggested sites: {names}'.format(len=len(names), names=names)) # Keep these two in sync. image_url_template = 'android.resource://%s/drawable/suggestedsites_{name}' % opts.android_package_name drawables_template = 'drawable*/suggestedsites_{name}.*' # Load properties corresponding to each site name and define their # respective image URL. sites = [] for name in names: site = properties.get_dict('browser.suggestedsites.{name}'.format(name=name), required_keys=('title', 'url', 'bgcolor')) site['imageurl'] = image_url_template.format(name=name) sites.append(site) # Now check for existence of an appropriately named drawable. If none # exists, throw. This stops a locale discovering, at runtime, that the # corresponding drawable was not added to en-US. if not opts.resources: continue resources = os.path.abspath(opts.resources) finder = FileFinder(resources) matches = [p for p, _ in finder.find(drawables_template.format(name=name))] if not matches: raise Exception("Could not find drawable in '{resources}' for '{name}'" .format(resources=resources, name=name)) else: if opts.verbose: print("Found {len} drawables in '{resources}' for '{name}': {matches}" .format(len=len(matches), resources=resources, name=name, matches=matches)) # FileAvoidWrite creates its parent directories. output = os.path.abspath(opts.output) fh = FileAvoidWrite(output) json.dump(sites, fh) existed, updated = fh.close() if not opts.silent: if updated: print('{output} updated'.format(output=output)) else: print('{output} already up-to-date'.format(output=output)) return 0
def main(args): parser = argparse.ArgumentParser() parser.add_argument('--verbose', '-v', default=False, action='store_true', help='be verbose') parser.add_argument('--silent', '-s', default=False, action='store_true', help='be silent') parser.add_argument('--android-package-name', metavar='NAME', required=True, help='Android package name') parser.add_argument( '--resources', metavar='RESOURCES', default=None, help='optional Android resource directory to find drawables in') parser.add_argument( '--srcdir', metavar='SRCDIR', action='append', required=True, help='directories to read inputs from, in order of priority') parser.add_argument('output', metavar='OUTPUT', help='output') opts = parser.parse_args(args) # Use reversed order so that the first srcdir has higher priority to override keys. properties = merge_properties('region.properties', reversed(opts.srcdir)) # Keep these two in sync. image_url_template = 'android.resource://%s/drawable/suggestedsites_{name}' % opts.android_package_name drawables_template = 'drawable*/suggestedsites_{name}.*' # Load properties corresponding to each site name and define their # respective image URL. sites = [] def add_names(names, defaults={}): for name in names: site = copy.deepcopy(defaults) site.update( properties.get_dict( 'browser.suggestedsites.{name}'.format(name=name), required_keys=('title', 'url', 'bgcolor'))) site['imageurl'] = image_url_template.format(name=name) sites.append(site) # Now check for existence of an appropriately named drawable. If none # exists, throw. This stops a locale discovering, at runtime, that the # corresponding drawable was not added to en-US. if not opts.resources: continue resources = os.path.abspath(opts.resources) finder = FileFinder(resources) matches = [ p for p, _ in finder.find(drawables_template.format(name=name)) ] if not matches: raise Exception( "Could not find drawable in '{resources}' for '{name}'". format(resources=resources, name=name)) else: if opts.verbose: print( "Found {len} drawables in '{resources}' for '{name}': {matches}" .format(len=len(matches), resources=resources, name=name, matches=matches)) # We want the lists to be ordered for reproducibility. Each list has a # "default" JSON list item which will be extended by the properties read. lists = [ ('browser.suggestedsites.list', {}), ('browser.suggestedsites.restricted.list', { 'restricted': True }), ] if opts.verbose: print('Reading {len} suggested site lists: {lists}'.format( len=len(lists), lists=[list_name for list_name, _ in lists])) for (list_name, list_item_defaults) in lists: names = properties.get_list(list_name) if opts.verbose: print('Reading {len} suggested sites from {list}: {names}'.format( len=len(names), list=list_name, names=names)) add_names(names, list_item_defaults) # FileAvoidWrite creates its parent directories. output = os.path.abspath(opts.output) fh = FileAvoidWrite(output) json.dump(sites, fh) existed, updated = fh.close() if not opts.silent: if updated: print('{output} updated'.format(output=output)) else: print('{output} already up-to-date'.format(output=output)) return 0
def main(args): parser = argparse.ArgumentParser() parser.add_argument('--verbose', '-v', default=False, action='store_true', help='be verbose') parser.add_argument('--silent', '-s', default=False, action='store_true', help='be silent') parser.add_argument('--android-package-name', metavar='NAME', required=True, help='Android package name') parser.add_argument('--resources', metavar='RESOURCES', default=None, help='optional Android resource directory to find drawables in') parser.add_argument('--srcdir', metavar='SRCDIR', action='append', required=True, help='directories to read inputs from, in order of priority') parser.add_argument('output', metavar='OUTPUT', help='output') opts = parser.parse_args(args) def resolve_filename(filename): for srcdir in opts.srcdir: path = mozpath.join(srcdir, filename) if os.path.exists(path): return path return None # The list.txt file has one site name per line. names = [s.strip() for s in open(resolve_filename('list.txt'), 'rt').readlines()] if opts.verbose: print('Reading {len} suggested sites: {names}'.format(len=len(names), names=names)) # Keep these two in sync. image_url_template = 'android.resource://%s/drawable/suggestedsites_{name}' % opts.android_package_name drawables_template = 'drawable*/suggestedsites_{name}.*' # Load json files corresponding to each site name and define their # respective image URL. sites = [] for name in names: filename = resolve_filename(name + '.json') if opts.verbose: print("Reading '{name}' from {filename}" .format(name=name, filename=filename)) site = json.load(open(filename, 'rt')) site['imageurl'] = image_url_template.format(name=name) sites.append(site) # Now check for existence of an appropriately named drawable. If none # exists, throw. This stops a locale discovering, at runtime, that the # corresponding drawable was not added to en-US. if not opts.resources: continue resources = os.path.abspath(opts.resources) finder = FileFinder(resources) matches = [p for p, _ in finder.find(drawables_template.format(name=name))] if not matches: raise Exception("Could not find drawable in '{resources}' for '{name}'" .format(resources=resources, name=name)) else: if opts.verbose: print("Found {len} drawables in '{resources}' for '{name}': {matches}" .format(len=len(matches), resources=resources, name=name, matches=matches)) # FileAvoidWrite creates its parent directories. output = os.path.abspath(opts.output) fh = FileAvoidWrite(output) json.dump(sites, fh) existed, updated = fh.close() if not opts.silent: if updated: print('{output} updated'.format(output=output)) else: print('{output} already up-to-date'.format(output=output)) return 0