def main():
    default_input = "[tools]/third_party/ohchr/ohchr_all.html"
    default_output = "[tools]/third_party/ohchr/attributions.tsv"

    parser = argparse.ArgumentParser()
    parser.add_argument(
        "--src",
        help="input ohchr html file (default %s)" % default_input,
        default=default_input,
        metavar="file",
        dest="htmlfile",
    )
    parser.add_argument(
        "--dst",
        help="output tsv file (default %s)" % default_output,
        default=default_output,
        metavar="file",
        dest="outfile",
    )
    args = parser.parse_args()

    htmlfile = tool_utils.resolve_path(args.htmlfile)
    outfile = tool_utils.resolve_path(args.outfile)

    parse_ohchr_html_file(htmlfile, outfile)
def main():
  parser = argparse.ArgumentParser()
  parser.add_argument('-b', '--base_root', help='root of directory tree, base for comparison '
                      '(default [fonts])', metavar='dir', default='[fonts]')
  parser.add_argument('-t', '--target_root', help='root of directory tree, target for comparison',
                      metavar='dir', required=True)
  parser.add_argument('--name', help='only examine files whose subpath+names contain this regex')
  parser.add_argument('--compare_size', help='include size in comparisons',
                      action='store_true')
  parser.add_argument('--removed',  help='list files not in target', action='store_true')
  parser.add_argument('--added', help='list files not in base', action='store_true')
  parser.add_argument('--identical', help='list files that are identical in base and target',
                      action='store_true')
  parser.add_argument('--nopaths', help='do not print root paths', action='store_false',
                      default=True, dest='show_paths')
  args = parser.parse_args()

  args.base_root = tool_utils.resolve_path(args.base_root)
  args.target_root = tool_utils.resolve_path(args.target_root)

  if not os.path.isdir(args.base_root):
    print 'base_root %s does not exist or is not a directory' % args.base_root
    return

  if not os.path.isdir(args.target_root):
    print 'target_root %s does not exist or is not a directory' % args.target_root
    return

  comparefn = tuple_compare if args.compare_size else tuple_compare_no_size

  compare_summary(args.base_root, args.target_root, args.name, comparefn,
                  args.added, args.removed, args.identical, args.show_paths)
def main(argv):
  DEFAULT_EMOJI_DIR = '[emoji]/svg'
  DEFAULT_FLAGS_DIR = '[emoji]/third_party/region-flags/svg'

  parser = argparse.ArgumentParser(
      description='Collect svg files into target directory with prefix.')
  parser.add_argument(
      'dst_dir', help='Directory to hold copied files.', metavar='dir')
  parser.add_argument(
      '--clean', '-c', help='Replace target directory', action='store_true')
  parser.add_argument(
      '--flags_dir', '-f', metavar='dir', help='directory containing flag svg, '
      'default %s' % DEFAULT_FLAGS_DIR, default=DEFAULT_FLAGS_DIR)
  parser.add_argument(
      '--emoji_dir', '-e', metavar='dir',
      help='directory containing emoji svg, default %s' % DEFAULT_EMOJI_DIR,
      default=DEFAULT_EMOJI_DIR)
  parser.add_argument(
      '-l', '--loglevel', help='log level name/value', default='warning')
  args = parser.parse_args(argv)

  tool_utils.setup_logging(args.loglevel)

  args.flags_dir = tool_utils.resolve_path(args.flags_dir)
  args.emoji_dir = tool_utils.resolve_path(args.emoji_dir)
  build_svg_dir(
      args.dst_dir, clean=args.clean, emoji_dir=args.emoji_dir,
      flags_dir=args.flags_dir)
Beispiel #4
0
 def get_readme_key_for_filepath(self, filepath):
   abs_filepath = tool_utils.resolve_path(filepath)
   for key in self.get_readme_keys()[:-1]:
     key_path = tool_utils.resolve_path('[%s]/' % key)
     if abs_filepath.startswith(key_path):
       return key
   raise Exception('no key for path %s' % abs_filepath)
Beispiel #5
0
def ttcfile_build_from_namesfile(
    output_ttc_path, file_dir, namesfile_name=None, tool_path=_BUILD_TOOL_PATH
):
    """Read names of files from namesfile and pass them to build_ttc to build
  a .ttc file.  The names file will default to one named after output_ttc and
  located in file_dir."""

    output_ttc_path = tool_utils.resolve_path(output_ttc_path)
    if not namesfile_name:
        namesfile_name = ttc_namesfile_name(output_ttc_path)

    namesfile_path = path.join(file_dir, namesfile_name)
    if not path.isfile(namesfile_path):
        raise ValueError("could not find names file %s" % namesfile_path)

    filenames = tool_utils.read_lines(namesfile_path)
    with tool_utils.temp_chdir(file_dir):
        # resolve filenames relative to file_dir
        fontpath_list = [tool_utils.resolve_path(n) for n in filenames]
    missing = [n for n in fontpath_list if not path.isfile(n)]
    if missing:
        raise ValueError(
            "%d files were missing:\n  %s" % (len(missing), "\n  ".join(missing))
        )
    ttcfile_build(output_ttc_path, fontpath_list)
Beispiel #6
0
def main():
  parser = argparse.ArgumentParser()
  parser.add_argument('-b', '--base_root', help='root of directory tree, base for comparison '
                      '(default [fonts])', metavar='dir', default='[fonts]')
  parser.add_argument('-t', '--target_root', help='root of directory tree, target for comparison',
                      metavar='dir', required=True)
  parser.add_argument('--name', help='only examine files whose subpath+names contain this regex')
  parser.add_argument('--compare_size', help='include size in comparisons',
                      action='store_true')
  parser.add_argument('--removed',  help='list files not in target', action='store_true')
  parser.add_argument('--added', help='list files not in base', action='store_true')
  parser.add_argument('--identical', help='list files that are identical in base and target',
                      action='store_true')
  parser.add_argument('--nopaths', help='do not print root paths', action='store_false',
                      default=True, dest='show_paths')
  args = parser.parse_args()

  args.base_root = tool_utils.resolve_path(args.base_root)
  args.target_root = tool_utils.resolve_path(args.target_root)

  if not os.path.isdir(args.base_root):
    print 'base_root %s does not exist or is not a directory' % args.base_root
    return

  if not os.path.isdir(args.target_root):
    print 'target_root %s does not exist or is not a directory' % args.target_root
    return

  comparefn = tuple_compare if args.compare_size else tuple_compare_no_size

  compare_summary(args.base_root, args.target_root, args.name, comparefn,
                  args.added, args.removed, args.identical, args.show_paths)
Beispiel #7
0
def _get_version_info(fonts):
  """If fonts are all from noto-fonts, use information from the current
  state of the repo to build a version string.  Otherwise return None."""

  prefix = tool_utils.resolve_path('[fonts]')
  if not all(tool_utils.resolve_path(f).startswith(prefix) for f in fonts):
    return None
  return _get_fonts_repo_version_info()
Beispiel #8
0
def _collect_paths(dirs, files):
    paths = []
    if dirs:
        for d in dirs:
            d = tool_utils.resolve_path(d)
            paths.extend(n for n in glob.glob(path.join(d, '*')))
    if files:
        paths.extend(tool_utils.resolve_path(f) for f in files)
    return paths
Beispiel #9
0
def _collect_paths(dirs, files):
  paths = []
  if dirs:
    for d in dirs:
      d = tool_utils.resolve_path(d)
      paths.extend(n for n in glob.glob(path.join(d, '*')))
  if files:
    paths.extend(tool_utils.resolve_path(f) for f in files)
  return paths
Beispiel #10
0
def patch_fonts(srcdir, dstdir):
    """Remove dstdir and repopulate with patched contents of srcdir (and
  its 'cjk' subdirectory if it exists)."""

    srcdir = tool_utils.resolve_path(srcdir)
    dstdir = tool_utils.resolve_path(dstdir)

    tool_utils.ensure_dir_exists(dstdir, clean=True)

    patch_hyphen(srcdir, dstdir)
    patch_cjk_ttcs(path.join(srcdir, 'cjk'), path.join(dstdir, 'cjk'))
    subset_symbols(srcdir, dstdir)
Beispiel #11
0
def _collect_paths(dirs, files):
    paths = []
    if dirs:
        for d in dirs:
            d = tool_utils.resolve_path(d)
            paths.extend(n for n in glob.glob(path.join(d, '*')))
    if files:
        for fname in files:
            if fname[0] == '@':
                paths.extend(_read_filename_list(fname[1:]))
            else:
                paths.append(tool_utils.resolve_path(fname))
    return paths
Beispiel #12
0
def patch_fonts(srcdir, dstdir):
  """Remove dstdir and repopulate with patched contents of srcdir (and
  its 'cjk' subdirectory if it exists)."""

  srcdir = tool_utils.resolve_path(srcdir)
  dstdir = tool_utils.resolve_path(dstdir)

  tool_utils.ensure_dir_exists(dstdir, clean=True)

  patch_hyphen(srcdir, dstdir)
  patch_cjk_ttcs(path.join(srcdir, 'cjk'), path.join(dstdir, 'cjk'))
  subset_symbols(srcdir, dstdir)
  patch_post_table(srcdir, dstdir)
Beispiel #13
0
def _collect_paths(dirs, files):
  paths = []
  if dirs:
    for d in dirs:
      d = tool_utils.resolve_path(d)
      paths.extend(n for n in glob.glob(path.join(d, '*')))
  if files:
    for fname in files:
      if fname[0] == '@':
        paths.extend(_read_filename_list(fname[1:]))
      else:
        paths.append(tool_utils.resolve_path(fname))
  return paths
Beispiel #14
0
def ttcfile_extract(input_ttc_path, output_dir, tool_path=_EXTRACT_TOOL_PATH):
  """Extract .ttf/.otf fonts from a .ttc file, and return a list of the names of
  the extracted fonts."""

  otc2otf = tool_utils.resolve_path(tool_path)
  if not otc2otf:
    raise ValueError('can not resolve %s' % tool_path)

  input_ttc_path = tool_utils.resolve_path(input_ttc_path)
  output_dir = tool_utils.ensure_dir_exists(output_dir)
  with tool_utils.temp_chdir(output_dir):
    # capture and discard standard output, the tool is noisy
    subprocess.check_output([otc2otf, input_ttc_path])
  return ttcfile_filenames(input_ttc_path)
def main():
  default_input = '[tools]/third_party/ohchr/ohchr_all.html'
  default_output = '[tools]/third_party/ohchr/attributions.tsv'

  parser = argparse.ArgumentParser()
  parser.add_argument('--src', help='input ohchr html file (default %s)' % default_input,
                      default=default_input, metavar='file', dest='htmlfile')
  parser.add_argument('--dst', help='output tsv file (default %s)' % default_output,
                      default=default_output, metavar='file', dest='outfile')
  args = parser.parse_args()

  htmlfile = tool_utils.resolve_path(args.htmlfile)
  outfile = tool_utils.resolve_path(args.outfile)

  parse_ohchr_html_file(htmlfile, outfile)
def main():
  default_input = '[tools]/third_party/ohchr/ohchr_all.html'
  default_output = '[tools]/third_party/ohchr/attributions.tsv'

  parser = argparse.ArgumentParser()
  parser.add_argument('--src', help='input ohchr html file (default %s)' % default_input,
                      default=default_input, metavar='file', dest='htmlfile')
  parser.add_argument('--dst', help='output tsv file (default %s)' % default_output,
                      default=default_output, metavar='file', dest='outfile')
  args = parser.parse_args()

  htmlfile = tool_utils.resolve_path(args.htmlfile)
  outfile = tool_utils.resolve_path(args.outfile)

  parse_ohchr_html_file(htmlfile, outfile)
Beispiel #17
0
def get_noto_fonts(paths=NOTO_FONT_PATHS):
    """Scan paths for fonts, and create a NotoFont for each one, returning a list
    of these.  'paths' defaults to the standard noto font paths, using notoconfig."""

    font_dirs = list(filter(None, [tool_utils.resolve_path(p) for p in paths]))
    print("Getting fonts from: %s" % font_dirs)

    all_fonts = []
    for font_dir in font_dirs:
        if not os.path.exists(font_dir):
            continue
        for filename in os.listdir(font_dir):
            if not _EXT_REGEX.match(filename):
                continue

            filepath = path.join(font_dir, filename)
            font = get_noto_font(filepath)
            if not font:
                sys.stderr.write("bad font filename in %s: '%s'.\n" %
                                 ((font_dir, filename)))
                continue

            all_fonts.append(font)

    return all_fonts
Beispiel #18
0
def autofix_fonts(
    font_names, dstdir, release_dir, version, version_info, autohint, dry_run):
  dstdir = tool_utils.ensure_dir_exists(dstdir)

  font_names.sort()
  print 'Processing\n  %s' % '\n  '.join(font_names)
  print 'Dest dir: %s' % dstdir

  if release_dir is None:
    reldir = None
  else:
    reldir = tool_utils.resolve_path(release_dir)
    if not path.isdir(reldir):
      raise Exception('release dir "%s" does not exist' % reldir)

  if version_info is None or version_info == '[fonts]':
    if version_info is None:
      version_info = _get_version_info(font_names)
    else:
      version_info = _get_fonts_repo_version_info()

    if not version_info:
      raise Exception('could not compute version info from fonts')
    print 'Computed version_info: %s' % version_info
  else:
    _check_version_info(version_info)

  _check_version(version)
  _check_autohint(autohint)

  if dry_run:
    print '*** dry run %s***' % ('(autohint) ' if autohint else '')
  for f in font_names:
    fix_font(f, dstdir, reldir, version, version_info, autohint, dry_run)
Beispiel #19
0
def main():
  default_cmap = '[tools]/nototools/data/noto_cmap_phase3.xml'
  default_namepats = ['cps_%s.txt']

  epilog = """If a namepat contains the string "%s" then the script id will
  be substituted for it. If one namepat is provided it is used for all scripts,
  otherwise there should be as many namepats as there are scripts."""

  parser = argparse.ArgumentParser(epilog=epilog)
  parser.add_argument(
      '-c', '--cmap_file', help='cmap data file to use (default %s)' %
      default_cmap, default=default_cmap, metavar='file')
  parser.add_argument(
      '-d', '--dest_dir', help='directory for output, (defaults to current '
      'directory)', metavar='dir', default='.')
  parser.add_argument(
      '-s', '--scripts', help='script ids of data to output', nargs='+',
      metavar='id', required=True)
  parser.add_argument(
      '-n', '--namepats', help='name patterns used to generate output '
      'filenames (default "cps_%%s.txt")',
      default=default_namepats, metavar='npat', nargs='+')
  args = parser.parse_args()

  cmap_filepath = tool_utils.resolve_path(args.cmap_file)
  cmapdata = cmap_data.read_cmap_data_file(cmap_filepath)
  generate(cmapdata, args.dest_dir, args.scripts, args.namepats)
Beispiel #20
0
def main():
  default_coverage_file = '[tools]/nototools/data/noto_cmap_phase3.xml'

  parser = argparse.ArgumentParser()
  parser.add_argument(
      '-o', '--output_file', help='name of xml file to output', metavar='file')
  parser.add_argument(
      '-d', '--dirs', help='directories containing font files', metavar='dir',
      nargs='+')
  parser.add_argument(
      '-f', '--files', help='font files', metavar='file', nargs='+')
  parser.add_argument(
      '-n', '--name', help='short name of this collection, used in reports',
      metavar='name', required=True)
  parser.add_argument(
      '-c', '--cmap_data', help='cmap data file (default %s)' %
      default_coverage_file, const=default_coverage_file, nargs='?',
      metavar='file')
  args = parser.parse_args()

  cmap_path = None
  if args.dirs or args.files:
    paths = tool_utils.collect_paths(args.dirs, args.files)
    cps, paths = get_cps_from_files(paths)
  elif args.cmap_data:
    cmap_path = tool_utils.resolve_path(args.cmap_data)
    cps = get_cps_from_cmap_data_file(cmap_path)
    paths = None
  else:
    print('Please specify font files, directories, or a cmap data file.')
    return
  coverage = create(args.name, cps, paths=paths, cmap_data=cmap_path)
  write(coverage, args.output_file)
def main():
  default_coverage_file = '[tools]/nototools/data/noto_cmap_phase3.xml'

  parser = argparse.ArgumentParser()
  parser.add_argument(
      '-o', '--output_file', help='name of xml file to output', metavar='file')
  parser.add_argument(
      '-d', '--dirs', help='directories containing font files', metavar='dir',
      nargs='+')
  parser.add_argument(
      '-f', '--files', help='font files', metavar='file', nargs='+')
  parser.add_argument(
      '-n', '--name', help='short name of this collection, used in reports',
      metavar='name', required=True)
  parser.add_argument(
      '-c', '--cmap_data', help='cmap data file (default %s)' %
      default_coverage_file, const=default_coverage_file, nargs='?',
      metavar='file')
  args = parser.parse_args()

  cmap_path = None
  if args.dirs or args.files:
    paths = tool_utils.collect_paths(args.dirs, args.files)
    cps, paths = get_cps_from_files(paths)
  elif args.cmap_data:
    cmap_path = tool_utils.resolve_path(args.cmap_data)
    cps = get_cps_from_cmap_data_file(cmap_path)
    paths = None
  else:
    print 'Please specify font files, directories, or a cmap data file.'
    return
  coverage = create(args.name, cps, paths=paths, cmap_data=cmap_path)
  write(coverage, args.output_file)
Beispiel #22
0
def generate(root, font_str, font_sizes, text, lang, out_file):
    root = tool_utils.resolve_path(root)
    if not path.isdir(root):
        raise Exception("%s is not a directory" % root)

    font_names = _get_font_list(root, font_str)
    if not font_names:
        raise Exception('no fonts matching "%s" in %s' % (font_str, root))

    print(
        "found %d fonts under %s:\n  %s"
        % (len(font_names), root, "\n  ".join(sorted(font_names)))
    )

    if not font_sizes:
        font_sizes = [10, 11, 12, 13, 14, 15, 16, 17, 18, 20, 22, 24, 28, 32]

    if not text:
        text = _get_sample_text(root, font_names, lang)

    if out_file:
        out_file = path.abspath(out_file)
        file_dir = tool_utils.ensure_dir_exists(path.dirname(out_file))
        if path.exists(out_file):
            print("file %s already exists, overwriting" % out_file)
        font_dir = tool_utils.ensure_dir_exists(path.join(file_dir, "fonts"))
        for font_name in font_names:
            src = path.join(root, font_name)
            dst = tool_utils.ensure_dir_exists(
                path.dirname(path.join(font_dir, font_name))
            )
            shutil.copy2(src, dst)

    _write_html(root, font_names, font_sizes, text, out_file)
def check_familynames(namefile):
  namefile = tool_utils.resolve_path(namefile)
  passed = [True]
  def fn(name, styles):
    name_passed = check_familyname(name, styles)
    passed[0] &= name_passed
  _for_all_familynames(namefile, fn)
  return passed[0]
Beispiel #24
0
def check_familynames(namefile):
  namefile = tool_utils.resolve_path(namefile)
  passed = [True]
  def fn(name, styles):
    name_passed = check_familyname(name, styles)
    passed[0] &= name_passed
  _for_all_familynames(namefile, fn)
  return passed[0]
Beispiel #25
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        "-b",
        "--base_root",
        help="root of directory tree, base for comparison " "(default [fonts])",
        metavar="dir",
        default="[fonts]",
    )
    parser.add_argument(
        "-t", "--target_root", help="root of directory tree, target for comparison", metavar="dir", required=True
    )
    parser.add_argument("--name", help="only examine files whose subpath+names contain this regex")
    parser.add_argument("--compare_size", help="include size in comparisons", action="store_true")
    parser.add_argument("--removed", help="list files not in target", action="store_true")
    parser.add_argument("--added", help="list files not in base", action="store_true")
    parser.add_argument("--identical", help="list files that are identical in base and target", action="store_true")
    parser.add_argument(
        "--nopaths", help="do not print root paths", action="store_false", default=True, dest="show_paths"
    )
    args = parser.parse_args()

    args.base_root = tool_utils.resolve_path(args.base_root)
    args.target_root = tool_utils.resolve_path(args.target_root)

    if not os.path.isdir(args.base_root):
        print "base_root %s does not exist or is not a directory" % args.base_root
        return

    if not os.path.isdir(args.target_root):
        print "target_root %s does not exist or is not a directory" % args.target_root
        return

    comparefn = tuple_compare if args.compare_size else tuple_compare_no_size

    compare_summary(
        args.base_root,
        args.target_root,
        args.name,
        comparefn,
        args.added,
        args.removed,
        args.identical,
        args.show_paths,
    )
def _get_version_info(fonts):
  """If fonts are all from noto-fonts, use information from the current
  state of the repo to build a version string.  Otherwise return None."""

  # add '/' to distinguish between noto-fonts/ and noto-fonts-alpha/
  for repo_tag in ['[fonts]', '[fonts_alpha]', '[source]']:
    prefix = tool_utils.resolve_path(repo_tag) + '/'
    print 'trying prefix "%s"' % prefix
    if all(tool_utils.resolve_path(f).startswith(prefix) for f in fonts):
      return _get_fonts_repo_version_info(repo_tag)
    # else report the first failure
    for f in fonts:
      if not tool_utils.resolve_path(f).startswith(prefix):
        print '# failed at "%s"' % tool_utils.resolve_path(f)
        break

  print 'no prefix succeeded'
  return None
Beispiel #27
0
def family_to_name_info_for_phase(phase):
  """Phase is an int, either 2 or 3."""
  result = _PHASE_TO_NAME_INFO_CACHE.get(phase, None)
  if not result and phase in _PHASE_TO_FILENAME:
    tooldir = tool_utils.resolve_path('[tools]/nototools')
    result = read_family_name_info_file(
        path.join(tooldir, _PHASE_TO_FILENAME[phase]))
    _PHASE_TO_NAME_INFO_CACHE[phase] = result
  return result
def display_missing(cmap_file):
  print 'Checking data in %s' % cmap_file
  filename = tool_utils.resolve_path(cmap_file)
  cps = _covered_cps(filename)
  defined_cps = unicode_data.defined_characters(version=9.0)
  omitted = cmap_block_coverage._OMITTED
  expected_cps = defined_cps - omitted
  missing_cps = expected_cps - cps
  show_cps_by_block(missing_cps)
Beispiel #29
0
def build_ttc(output_path, file_list,
              tool_path='[afdko]/FDK/Tools/SharedData/FDKScripts/otf2otc.py'):
  """Use AFDKO to build a .ttc file from a list of input files."""
  otf2otc = tool_utils.resolve_path(tool_path)
  if not otf2otc:
    raise ValueError('can not resolve %s' % tool_path)

  # capture and discard standard output, the tool is noisy
  subprocess.check_output(['python', otf2otc, '-o', output_path] + file_list)
Beispiel #30
0
def _get_version_info(fonts):
  """If fonts are all from noto-fonts, use information from the current
  state of the repo to build a version string.  Otherwise return None."""

  # add '/' to distinguish between noto-fonts/ and noto-fonts-alpha/
  for repo_tag in ['[fonts]', '[fonts_alpha]', '[source]']:
    prefix = tool_utils.resolve_path(repo_tag) + '/'
    print('trying prefix "%s"' % prefix)
    if all(tool_utils.resolve_path(f).startswith(prefix) for f in fonts):
      return _get_fonts_repo_version_info(repo_tag)
    # else report the first failure
    for f in fonts:
      if not tool_utils.resolve_path(f).startswith(prefix):
        print('# failed at "%s"' % tool_utils.resolve_path(f))
        break

  print('no prefix succeeded')
  return None
Beispiel #31
0
def display_missing(cmap_file):
    print('Checking data in %s' % cmap_file)
    filename = tool_utils.resolve_path(cmap_file)
    cps = _covered_cps(filename)
    defined_cps = unicode_data.defined_characters(version=9.0)
    omitted = cmap_block_coverage._OMITTED
    expected_cps = defined_cps - omitted
    missing_cps = expected_cps - cps
    show_cps_by_block(missing_cps)
Beispiel #32
0
def ttcfile_build(output_ttc_path, fontpath_list, tool_path=_BUILD_TOOL_PATH):
  """Build a .ttc from a list of font files."""
  otf2otc = tool_utils.resolve_path(tool_path)
  if not otf2otc:
    raise ValueError('can not resolve %s' % tool_path)

  tool_utils.ensure_dir_exists(path.dirname(output_ttc_path))
  # capture and discard standard output, the tool is noisy
  subprocess.check_output([otf2otc, '-o', output_ttc_path] + fontpath_list)
Beispiel #33
0
def family_to_name_info_for_phase(phase):
    """Phase is an int, either 2 or 3."""
    result = _PHASE_TO_NAME_INFO_CACHE.get(phase, None)
    if not result and phase in _PHASE_TO_FILENAME:
        tooldir = tool_utils.resolve_path('[tools]/nototools')
        result = read_family_name_info_file(
            path.join(tooldir, _PHASE_TO_FILENAME[phase]))
        _PHASE_TO_NAME_INFO_CACHE[phase] = result
    return result
Beispiel #34
0
def build_ttc(output_path,
              file_list,
              tool_path='[afdko]/FDK/Tools/SharedData/FDKScripts/otf2otc.py'):
    """Use AFDKO to build a .ttc file from a list of input files."""
    otf2otc = tool_utils.resolve_path(tool_path)
    if not otf2otc:
        raise ValueError('can not resolve %s' % tool_path)

    # capture and discard standard output, the tool is noisy
    subprocess.check_output(['python', otf2otc, '-o', output_path] + file_list)
def autofix_fonts(
    font_names, src_root, dst_dir, release_dir, version, version_info, autohint,
    dry_run):
  dst_dir = tool_utils.resolve_path(dst_dir)
  dst_dir = tool_utils.ensure_dir_exists(dst_dir)

  font_names = sorted(_expand_font_names(font_names))
  print 'Processing %d fonts\n  %s' % (
      len(font_names), '\n  '.join(font_names[:5]) + '...')

  src_root = tool_utils.resolve_path(src_root)
  print 'Src root: %s' % src_root
  print 'Dest dir: %s' % dst_dir

  if release_dir is None:
    rel_dir = None
  else:
    rel_dir = tool_utils.resolve_path(release_dir)
    if not path.isdir(rel_dir):
      raise Exception('release dir "%s" does not exist' % rel_dir)

  if (version_info is None or version_info == '[fonts]' or
      version_info == '[fonts_alpha]'):
    if version_info is None:
      version_info = _get_version_info(font_names)
    else:
      version_info = _get_fonts_repo_version_info()

    if not version_info:
      raise Exception('could not compute version info from fonts')
    print 'Computed version_info: %s' % version_info
  else:
    _check_version_info(version_info)

  _check_version(version)
  _check_autohint(autohint)

  if dry_run:
    print '*** dry run %s***' % ('(autohint) ' if autohint else '')
  for f in font_names:
    f = path.join(src_root, f)
    fix_font(f, dst_dir, rel_dir, version, version_info, autohint, dry_run)
Beispiel #36
0
def autofix_fonts(
    font_names, src_root, dst_dir, release_dir, version, version_info, autohint,
    dry_run):
  dst_dir = tool_utils.resolve_path(dst_dir)
  dst_dir = tool_utils.ensure_dir_exists(dst_dir)

  font_names = sorted(_expand_font_names(font_names))
  print('Processing %d fonts\n  %s' % (
      len(font_names), '\n  '.join(font_names[:5]) + '...'))

  src_root = tool_utils.resolve_path(src_root)
  print('Src root: %s' % src_root)
  print('Dest dir: %s' % dst_dir)

  if release_dir is None:
    rel_dir = None
  else:
    rel_dir = tool_utils.resolve_path(release_dir)
    if not path.isdir(rel_dir):
      raise Exception('release dir "%s" does not exist' % rel_dir)

  if (version_info is None or version_info == '[fonts]' or
      version_info == '[fonts_alpha]'):
    if version_info is None:
      version_info = _get_version_info(font_names)
    else:
      version_info = _get_fonts_repo_version_info()

    if not version_info:
      raise Exception('could not compute version info from fonts')
    print('Computed version_info: %s' % version_info)
  else:
    _check_version_info(version_info)

  _check_version(version)
  _check_autohint(autohint)

  if dry_run:
    print('*** dry run %s***' % ('(autohint) ' if autohint else ''))
  for f in font_names:
    f = path.join(src_root, f)
    fix_font(f, dst_dir, rel_dir, version, version_info, autohint, dry_run)
Beispiel #37
0
def match_files(src_dir, names):
    matched_files = set()
    src_dir = tool_utils.resolve_path(src_dir)
    print("# root: %s" % src_dir)
    name_re = _build_regex(names)
    for root, dirs, files in os.walk(src_dir):
        effective_root = root[len(src_dir) + 1 :]
        for f in files:
            if name_re.match(f):
                matched_files.add(path.join(effective_root, f))
    return sorted(matched_files)
def generate_report(title, input_dir, compare_data, output_path):
    """The html file is output_path.  The image data goes in a folder
    with the same name as output_path without the extension.  .css
    and .js files are written as siblings of the html file."""

    if compare_data is None:
        compare_data = glyph_image_compare.read_compare_data(
            path.join(input_dir, "compare_data.txt"))

    output_path = path.abspath(output_path)
    root = path.dirname(output_path)
    image_dir = path.splitext(path.basename(output_path))[0]

    # Do not clean this directory, so we can write multiple html files
    # files to it.
    tool_utils.ensure_dir_exists(root)

    # Copy supporting js/css files, they are always the same.
    filedir = tool_utils.resolve_path("[tools]/nototools/glyph_image")
    for name in ["glyph_image_compare.js", "glyph_image_compare.css"]:
        shutil.copy2(path.join(filedir, name), path.join(root, name))

    # Clean subdir for this html, then copy image files to it
    full_image_dir = tool_utils.ensure_dir_exists(path.join(root, image_dir),
                                                  True)
    for name in [t[0] + ".png" for t in compare_data.pair_data.pair_data]:
        shutil.copy2(path.join(input_dir, name),
                     path.join(full_image_dir, name))

    bname = compare_data.base_fdata.name
    tname = compare_data.target_fdata.name
    name = bname if bname == tname else bname + " / " + tname

    image_data, cp_data = generate_image_data(compare_data)

    ftable = generate_font_table(compare_data)

    header_height = max(250, compare_data.pair_data.max_frame.h + 20)

    # generate html
    with open(output_path, "w") as f:
        f.write(
            Template(_TEMPLATE).substitute(
                title=title,
                ftable=ftable,
                header_height=header_height,
                image_dir=image_dir,
                image_data=image_data,
                cp_data=cp_data,
                name=name,
            ))
    print("wrote %s" % output_path)
Beispiel #39
0
def main(argv):
    DEFAULT_EMOJI_DIR = '[emoji]/svg'
    DEFAULT_FLAGS_DIR = '[emoji]/third_party/region-flags/svg'

    parser = argparse.ArgumentParser(
        description='Collect svg files into target directory with prefix.')
    parser.add_argument('dst_dir',
                        help='Directory to hold copied files.',
                        metavar='dir')
    parser.add_argument('--clean',
                        '-c',
                        help='Replace target directory',
                        action='store_true')
    parser.add_argument('--flags_dir',
                        '-f',
                        metavar='dir',
                        help='directory containing flag svg, '
                        'default %s' % DEFAULT_FLAGS_DIR,
                        default=DEFAULT_FLAGS_DIR)
    parser.add_argument('--emoji_dir',
                        '-e',
                        metavar='dir',
                        help='directory containing emoji svg, default %s' %
                        DEFAULT_EMOJI_DIR,
                        default=DEFAULT_EMOJI_DIR)
    parser.add_argument('-l',
                        '--loglevel',
                        help='log level name/value',
                        default='warning')
    args = parser.parse_args(argv)

    tool_utils.setup_logging(args.loglevel)

    args.flags_dir = tool_utils.resolve_path(args.flags_dir)
    args.emoji_dir = tool_utils.resolve_path(args.emoji_dir)
    build_svg_dir(args.dst_dir,
                  clean=args.clean,
                  emoji_dir=args.emoji_dir,
                  flags_dir=args.flags_dir)
def create_thumbnails_and_aliases(src_dir, dst_dir, crop, dst_prefix):
  """Creates thumbnails in dst_dir based on sources in src.dir, using
  dst_prefix. Assumes the source prefix is 'emoji_u' and the common suffix
  is '.png'."""

  src_dir = tool_utils.resolve_path(src_dir)
  if not path.isdir(src_dir):
    raise ValueError('"%s" is not a directory')

  dst_dir = tool_utils.ensure_dir_exists(tool_utils.resolve_path(dst_dir))

  src_prefix = 'emoji_u'
  suffix = '.png'

  inv_aliases = get_inv_aliases()

  for src_file in os.listdir(src_dir):
    try:
      seq = unicode_data.strip_emoji_vs(
          filename_to_sequence(src_file, src_prefix, suffix))
    except ValueError as ve:
      logger.warning('Error (%s), skipping' % ve)
      continue

    src_path = path.join(src_dir, src_file)

    dst_file = sequence_to_filename(seq, dst_prefix, suffix)
    dst_path = path.join(dst_dir, dst_file)

    create_thumbnail(src_path, dst_path, crop)
    logger.info('wrote thumbnail%s: %s' % (
        ' with crop' if crop else '', dst_file))

    for alias_seq in inv_aliases.get(seq, ()):
      alias_file = sequence_to_filename(alias_seq, dst_prefix, suffix)
      alias_path = path.join(dst_dir, alias_file)
      shutil.copy2(dst_path, alias_path)
      logger.info('wrote alias: %s' % alias_file)
Beispiel #41
0
def create_thumbnails_and_aliases(src_dir, dst_dir, crop, dst_prefix):
    """Creates thumbnails in dst_dir based on sources in src.dir, using
  dst_prefix. Assumes the source prefix is 'emoji_u' and the common suffix
  is '.png'."""

    src_dir = tool_utils.resolve_path(src_dir)
    if not path.isdir(src_dir):
        raise ValueError('"%s" is not a directory')

    dst_dir = tool_utils.ensure_dir_exists(tool_utils.resolve_path(dst_dir))

    src_prefix = 'emoji_u'
    suffix = '.png'

    inv_aliases = get_inv_aliases()

    for src_file in os.listdir(src_dir):
        try:
            seq = unicode_data.strip_emoji_vs(
                filename_to_sequence(src_file, src_prefix, suffix))
        except ValueError as ve:
            logger.warning('Error (%s), skipping' % ve)
            continue

        src_path = path.join(src_dir, src_file)

        dst_file = sequence_to_filename(seq, dst_prefix, suffix)
        dst_path = path.join(dst_dir, dst_file)

        create_thumbnail(src_path, dst_path, crop)
        logger.info('wrote thumbnail%s: %s' %
                    (' with crop' if crop else '', dst_file))

        for alias_seq in inv_aliases.get(seq, ()):
            alias_file = sequence_to_filename(alias_seq, dst_prefix, suffix)
            alias_path = path.join(dst_dir, alias_file)
            shutil.copy2(dst_path, alias_path)
            logger.info('wrote alias: %s' % alias_file)
Beispiel #42
0
def main():
    default_coverage_file = "[tools]/nototools/data/noto_cmap_phase3.xml"

    parser = argparse.ArgumentParser()
    parser.add_argument("-o",
                        "--output_file",
                        help="name of xml file to output",
                        metavar="file")
    parser.add_argument(
        "-d",
        "--dirs",
        help="directories containing font files",
        metavar="dir",
        nargs="+",
    )
    parser.add_argument("-f",
                        "--files",
                        help="font files",
                        metavar="file",
                        nargs="+")
    parser.add_argument(
        "-n",
        "--name",
        help="short name of this collection, used in reports",
        metavar="name",
        required=True,
    )
    parser.add_argument(
        "-c",
        "--cmap_data",
        help="cmap data file (default %s)" % default_coverage_file,
        const=default_coverage_file,
        nargs="?",
        metavar="file",
    )
    args = parser.parse_args()

    cmap_path = None
    if args.dirs or args.files:
        paths = tool_utils.collect_paths(args.dirs, args.files)
        cps, paths = get_cps_from_files(paths)
    elif args.cmap_data:
        cmap_path = tool_utils.resolve_path(args.cmap_data)
        cps = get_cps_from_cmap_data_file(cmap_path)
        paths = None
    else:
        print("Please specify font files, directories, or a cmap data file.")
        return
    coverage = create(args.name, cps, paths=paths, cmap_data=cmap_path)
    write(coverage, args.output_file)
Beispiel #43
0
def generate_filenames(namefile, outfile, sort=False):
  namelist = []
  def fn(name, styles):
    namelist.extend(generate_family_filenames(name, styles))
  namefile = tool_utils.resolve_path(namefile)
  _for_all_familynames(namefile, fn)
  if sort:
    namelist.sort()
  allnames = '\n'.join(namelist)
  if outfile:
    with open(outfile, 'w') as f:
      f.write(allnames)
      f.write('\n')
  else:
    print(allnames)
Beispiel #44
0
def ttcfile_build_from_namesfile(
    output_ttc_path, file_dir, namesfile_name=None, tool_path=_BUILD_TOOL_PATH):
  """Read names of files from namesfile and pass them to build_ttc to build
  a .ttc file.  The names file will default to one named after output_ttc and
  located in file_dir."""

  output_ttc_path = tool_utils.resolve_path(output_ttc_path)
  if not namesfile_name:
    namesfile_name = ttc_namesfile_name(output_ttc_path)

  namesfile_path = path.join(file_dir, namesfile_name)
  if not path.isfile(namesfile_path):
    raise ValueError('could not find names file %s' % namesfile_path)

  filenames = tool_utils.read_lines(namesfile_path)
  with tool_utils.temp_chdir(file_dir):
    # resolve filenames relative to file_dir
    fontpath_list = [tool_utils.resolve_path(n) for n in filenames]
  missing = [n for n in fontpath_list if not path.isfile(n)]
  if missing:
    raise ValueError(
        '%d files were missing:\n  %s' % (
            len(missing), '\n  '.join(missing)))
  ttcfile_build(output_ttc_path, fontpath_list)
def generate_filenames(namefile, outfile, sort=False):
  namelist = []
  def fn(name, styles):
    namelist.extend(generate_family_filenames(name, styles))
  namefile = tool_utils.resolve_path(namefile)
  _for_all_familynames(namefile, fn)
  if sort:
    namelist.sort()
  allnames = '\n'.join(namelist)
  if outfile:
    with open(outfile, 'w') as f:
      f.write(allnames)
      f.write('\n')
  else:
    print allnames
Beispiel #46
0
def _get_repo_version_str(beta):
  """See above for description of this string."""
  if beta is not None:
    date_str = datetime.date.today().strftime('%Y%m%d')
    return 'GOOG;noto-emoji:%s;BETA %s' % (date_str, beta)

  p = tool_utils.resolve_path('[emoji]')
  commit, date, _ = tool_utils.git_head_commit(p)
  if not tool_utils.git_check_remote_commit(p, commit):
    raise Exception('emoji not on upstream master branch')
  date_re = re.compile(r'(\d{4})-(\d{2})-(\d{2})')
  m = date_re.match(date)
  if not m:
    raise Exception('could not match "%s" with "%s"' % (date, date_re.pattern))
  ymd = ''.join(m.groups())
  return 'GOOG;noto-emoji:%s:%s' % (ymd, commit[:12])
Beispiel #47
0
def _get_repo_version_str(beta):
  """See above for description of this string."""
  if beta is not None:
    date_str = datetime.date.today().strftime('%Y%m%d')
    return 'GOOG;noto-emoji:%s;BETA %s' % (date_str, beta)

  p = tool_utils.resolve_path('[emoji]')
  commit, date, _ = tool_utils.git_head_commit(p)
  if not tool_utils.git_check_remote_commit(p, commit):
    raise Exception('emoji not on upstream master branch')
  date_re = re.compile(r'(\d{4})-(\d{2})-(\d{2})')
  m = date_re.match(date)
  if not m:
    raise Exception('could not match "%s" with "%s"' % (date, date_re.pattern))
  ymd = ''.join(m.groups())
  return 'GOOG;noto-emoji:%s:%s' % (ymd, commit[:12])
Beispiel #48
0
def _get_fonts_repo_version_info():
  prefix = tool_utils.resolve_path('[fonts]')

  commit, date, commit_msg = tool_utils.git_head_commit(prefix)

  # check that commit is on the upstream master
  if not tool_utils.git_check_remote_commit(prefix, commit):
    raise Exception(
        'commit %s (%s) not on upstream master branch' % (
            commit[:12], commit_msg.splitlines()[0].strip()))

  date_re = re.compile(r'(\d{4})-(\d{2})-(\d{2})')
  m = date_re.match(date)
  if not m:
    raise Exception('could not match "%s" with "%s"' % (date, date_re.pattern))
  ymd = ''.join(m.groups())
  return 'GOOG;noto-fonts:%s:%s' % (ymd, commit[:12])
Beispiel #49
0
def main():
    default_cmap = "[tools]/nototools/data/noto_cmap_phase3.xml"
    default_namepats = ["cps_%s.txt"]

    epilog = """If a namepat contains the string "%s" then the script id will
  be substituted for it. If one namepat is provided it is used for all scripts,
  otherwise there should be as many namepats as there are scripts."""

    parser = argparse.ArgumentParser(epilog=epilog)
    parser.add_argument(
        "-c",
        "--cmap_file",
        help="cmap data file to use (default %s)" % default_cmap,
        default=default_cmap,
        metavar="file",
    )
    parser.add_argument(
        "-d",
        "--dest_dir",
        help="directory for output, (defaults to current " "directory)",
        metavar="dir",
        default=".",
    )
    parser.add_argument(
        "-s",
        "--scripts",
        help="script ids of data to output",
        nargs="+",
        metavar="id",
        required=True,
    )
    parser.add_argument(
        "-n",
        "--namepats",
        help="name patterns used to generate output "
        'filenames (default "cps_%%s.txt")',
        default=default_namepats,
        metavar="npat",
        nargs="+",
    )
    args = parser.parse_args()

    cmap_filepath = tool_utils.resolve_path(args.cmap_file)
    cmapdata = cmap_data.read_cmap_data_file(cmap_filepath)
    generate(cmapdata, args.dest_dir, args.scripts, args.namepats)
Beispiel #50
0
def get_repo_info(skip_checks):
  """Looks at the three noto fonts repos (fonts, cjk, emoji) and
  gets information about the current state of each.  Returns
  a mapping from 'fonts', 'cjk', and 'emoji' to the corresponding
  info.

  If skip_checks is not set, checks that the repos are in a good
  state (at a known annotated tag and there are no pending commits),
  otherwise an exception is raised."""

  repo_info = {}
  errors = []
  for repo_name in 'fonts cjk emoji'.split():
    msg_lines = []
    repo = tool_utils.resolve_path('[%s]' % repo_name)
    repo_head_commit = tool_utils.git_head_commit(repo)
    repo_branch = tool_utils.git_get_branch(repo)
    msg_lines.append('Repo: noto-%s' % repo_name)
    if skip_checks:
      msg_lines.append('Branch: %s' % repo_branch)
      msg_lines.append('Commit: %s\nSubject: %s' % repo_head_commit)
    else:
      if not tool_utils.git_is_clean(repo):
        errors.append('repo noto-%s is not clean' % repo_name)
        continue
      repo_tag = None
      for tag in tool_utils.git_tags(repo):
        if tag[0] == repo_head_commit[0]: # matching commits
          repo_tag = tag
          break
      if not repo_tag:
        errors.append('noto-%s is not at a release tag' % repo_name)
        continue
      tag_commit, tag_name, tag_date = tag
      tag_info = tool_utils.git_tag_info(repo, tag_name)
      msg_lines.append(
          'Tag: %s\nDate: %s\nCommit:%s\n\n%s' % (
              tag_name, tag_date, tag_commit, tag_info))
    repo_info[repo_name] = '\n'.join(msg_lines)
  if errors:
    for _, v in sorted(repo_info.iteritems()):
      print v
    raise Exception('Some repos are not clean\n' + '\n'.join(errors))
  return repo_info
Beispiel #51
0
def _get_fonts_repo_version_info(repo_tag):
  prefix = tool_utils.resolve_path(repo_tag)

  commit, date, commit_msg = tool_utils.git_head_commit(prefix)

  # check that commit is on the upstream master
  if not tool_utils.git_check_remote_commit(prefix, commit):
    raise Exception(
        'commit %s (%s) not on upstream master branch' % (
            commit[:12], commit_msg.splitlines()[0].strip()))

  date_re = re.compile(r'(\d{4})-(\d{2})-(\d{2})')
  m = date_re.match(date)
  if not m:
    raise Exception('could not match "%s" with "%s"' % (date, date_re.pattern))
  ymd = ''.join(m.groups())

  # hack tag to get the formal repo name.  strip enclosing brackets...
  repo_name = 'noto-' + repo_tag[1:-1].replace('_', '-')
  return 'GOOG;%s:%s:%s' % (repo_name, ymd, commit[:12])
def _get_fonts_repo_version_info(repo_tag):
  prefix = tool_utils.resolve_path(repo_tag)

  commit, date, commit_msg = tool_utils.git_head_commit(prefix)

  # check that commit is on the upstream master
  if not tool_utils.git_check_remote_commit(prefix, commit):
    raise Exception(
        'commit %s (%s) not on upstream master branch' % (
            commit[:12], commit_msg.splitlines()[0].strip()))

  date_re = re.compile(r'(\d{4})-(\d{2})-(\d{2})')
  m = date_re.match(date)
  if not m:
    raise Exception('could not match "%s" with "%s"' % (date, date_re.pattern))
  ymd = ''.join(m.groups())

  # hack tag to get the formal repo name.  strip enclosing brackets...
  repo_name = 'noto-' + repo_tag[1:-1].replace('_', '-')
  return 'GOOG;%s:%s:%s' % (repo_name, ymd, commit[:12])
Beispiel #53
0
def get_noto_fonts(paths=NOTO_FONT_PATHS):
  """Scan paths for fonts, and create a NotoFont for each one, returning a list
  of these.  'paths' defaults to the standard noto font paths, using notoconfig."""

  font_dirs = filter(None, [tool_utils.resolve_path(p) for p in paths])
  print 'Getting fonts from: %s' % font_dirs

  all_fonts = []
  for font_dir in font_dirs:
    for filename in os.listdir(font_dir):
      if not _EXT_REGEX.match(filename):
        continue

      filepath = path.join(font_dir, filename)
      font = get_noto_font(filepath)
      if not font:
        raise ValueError('bad font filename in %s: \'%s\'.' %
                         (font_dir, filename))

      all_fonts.append(font)

  return all_fonts
def _get_script_to_samples():
  script_to_samples = collections.defaultdict(list)

  sample_dir = tool_utils.resolve_path('[tools]/sample_texts')
  for f in sorted(os.listdir(sample_dir)):
    base, ext = path.splitext(f)
    if ext != '.txt' or '_' not in base:
      print 'skipping', f
      continue
    bcp, sample_type = base.split('_')
    try:
      lang, script, region, variant = cldr_data.loc_tag_to_lsrv(bcp)
    except:
      print 'bcp %s did not parse as lsrv' % bcp
      continue
    if script == 'Latn':
      continue
    script_to_samples[script].append((bcp, sample_type))

  for script, samples in sorted(script_to_samples.iteritems()):
    pref = {}
    for bcp, sample_type in samples:
      if bcp not in pref or sample_type == 'udhr':
        pref[bcp] = sample_type

    full_samples = []
    for bcp, sample_type in sorted(pref.iteritems()):
      filename = '%s_%s.txt' % (bcp, sample_type)
      filepath = path.join(sample_dir, filename)
      with codecs.open(filepath, 'r', 'utf-8') as f:
        sample_text = f.read()
      full_samples.append((bcp, sample_type, sample_text))

    script_to_samples[script] = full_samples

  return script_to_samples
Beispiel #55
0
def main():
  CMDS = ['dump', 'write', 'test', 'info']
  HELP = """
  dump  - read the family info file, and display the names to generate
          for some fonts.
  write - collect all the names of the provided fonts, and write a family name
          info file if one was provided (via -i or -p), else write to stdout.
  test  - collect all the names of the provided fonts, show the family name
          info file that would be generated, and show the names to generate
          for those fonts.
  info  - collect the preferred names of the provided fonts, and display them.
  """

  parser = argparse.ArgumentParser(
      epilog=HELP, formatter_class=argparse.RawDescriptionHelpFormatter)
  parser.add_argument(
      '-i', '--info_file', metavar='fname',
      help='name of xml family info file, overrides name based on phase')
  parser.add_argument(
      '-p', '--phase', metavar = 'phase', type=int,
      help='determine info file name by phase (2 or 3)')
  parser.add_argument(
      '-d', '--dirs', metavar='dir', help='font directories to examine '
      '(use "[noto]" for noto fonts/cjk/emoji font dirs)', nargs='+')
  parser.add_argument(
      '-f', '--files', metavar='fname', help='fonts to examine, prefix with'
      '\'@\' to read list from file', nargs='+')
  parser.add_argument(
      'cmd', metavar='cmd', help='operation to perform (%s)' % ', '.join(CMDS),
      choices=CMDS)
  args = parser.parse_args()

  if args.dirs:
    for i in range(len(args.dirs)):
      if args.dirs[i] == '[noto]':
        args.dirs[i] = None
        args.dirs.extend(noto_fonts.NOTO_FONT_PATHS)
        args.dirs = filter(None, args.dirs)
        break

  paths = _collect_paths(args.dirs, args.files)
  fonts = _get_noto_fonts(paths)
  if not fonts:
    print 'Please specify at least one directory or file'
    return

  if not args.info_file:
    if args.phase:
      args.info_file = _PHASE_TO_FILENAME[args.phase]
      print 'using name info file: "%s"' % args.info_file

  if args.cmd == 'dump':
    if not args.info_file:
      print 'must specify an info file to dump'
      return
    info_file = tool_utils.resolve_path(args.info_file)
    if not path.exists(info_file):
      print '"%s" does not exist.' % args.info_file
      return
    _dump(fonts, info_file, args.phase)
  elif args.cmd == 'write':
    if not args.phase:
      print 'Must specify phase when generating info.'
      return
    _write(fonts, args.info_file, args.phase)
  elif args.cmd == 'test':
    _test(fonts, args.phase)
  elif args.cmd == 'info':
    _info(fonts)
Beispiel #56
0
def read_family_name_info_file(filename):
  """Returns a map from preferred family name to FontNameInfo."""
  filename = tool_utils.resolve_path(filename)
  return _read_tree(ET.parse(filename).getroot())
Beispiel #57
0
def write_family_name_info_file(family_to_name_info, filename, pretty=False):
  filename = tool_utils.resolve_path(filename)
  _build_tree(family_to_name_info, pretty).write(
      filename, encoding='utf8', xml_declaration=True)
Beispiel #58
0
"""Sync the noto repos to the given tags.

This helps prepare for generating website data.  We have the option
of requiring that the noto-fonts, noto-emoji, and noto-cjk repos are at
tagged releases.  This tool lets you specify release names, ensures the
release names are valid, and checks out those releases.  Main exits with
error code 100 if there is a failure."""

import argparse
import sys

from nototools import tool_utils

_REPOS = 'fonts emoji cjk'.split()
_REPO_PATHS = [tool_utils.resolve_path('[%s]' % r) for r in _REPOS]

def noto_check_clean():
  errors = []
  for r, p in zip(_REPOS, _REPO_PATHS):
    if not tool_utils.git_is_clean(p):
      errors.append(r)

  if errors:
    print >> sys.stderr, '%s %s not clean' % (
        ' '.join(errors), 'is' if len(errors) == 1 else 'are')
    return False
  return True


def noto_checkout_master(dry_run=False):