示例#1
0
    def testMixedStrTypes(self):
        temp_dir = compat.as_bytes(test.get_temp_dir())

        for sub_dir in ['str', b'bytes', u'unicode']:
            base_dir = os.path.join((temp_dir if isinstance(sub_dir, bytes)
                                     else temp_dir.decode()), sub_dir)
            self.assertFalse(gfile.Exists(base_dir))
            gfile.MakeDirs(os.path.join(compat.as_str_any(base_dir), "42"))
            gc.get_paths(base_dir, _create_parser(base_dir))
示例#2
0
  def testMixedStrTypes(self):
    temp_dir = compat.as_bytes(test.get_temp_dir())

    for sub_dir in ["str", b"bytes", u"unicode"]:
      base_dir = os.path.join(
          (temp_dir
           if isinstance(sub_dir, bytes) else temp_dir.decode()), sub_dir)
      self.assertFalse(gfile.Exists(base_dir))
      gfile.MakeDirs(os.path.join(compat.as_str_any(base_dir), "42"))
      gc.get_paths(base_dir, _create_parser(base_dir))
def garbage_collect_exports(export_dir_base, exports_to_keep):
  """Deletes older exports, retaining only a given number of the most recent.

  Export subdirectories are assumed to be named with monotonically increasing
  integers; the most recent are taken to be those with the largest values.

  Args:
    export_dir_base: the base directory under which each export is in a
      versioned subdirectory.
    exports_to_keep: the number of recent exports to retain.
  """
  if exports_to_keep is None:
    return

  keep_filter = gc.largest_export_versions(exports_to_keep)
  delete_filter = gc.negation(keep_filter)

  # Export dir must not end with / or it will break the re match below.
  if export_dir_base.endswith('/'):
    export_dir_base = export_dir_base[:-1]

  # create a simple parser that pulls the export_version from the directory.
  def parser(path):
    match = re.match('^' + export_dir_base + '/(\\d{13})$', path.path)
    if not match:
      return None
    return path._replace(export_version=int(match.group(1)))

  for p in delete_filter(gc.get_paths(export_dir_base, parser=parser)):
    gfile.DeleteRecursively(p.path)
示例#4
0
def garbage_collect_exports(export_dir_base, exports_to_keep):
    """Deletes older exports, retaining only a given number of the most recent.

  Export subdirectories are assumed to be named with monotonically increasing
  integers; the most recent are taken to be those with the largest values.

  Args:
    export_dir_base: the base directory under which each export is in a
      versioned subdirectory.
    exports_to_keep: the number of recent exports to retain.
  """
    if exports_to_keep is None:
        return

    keep_filter = gc.largest_export_versions(exports_to_keep)
    delete_filter = gc.negation(keep_filter)

    # Export dir must not end with / or it will break the re match below.
    if export_dir_base.endswith('/'):
        export_dir_base = export_dir_base[:-1]

    # create a simple parser that pulls the export_version from the directory.
    def parser(path):
        match = re.match('^' + export_dir_base + '/(\\d{13})$', path.path)
        if not match:
            return None
        return path._replace(export_version=int(match.group(1)))

    for p in delete_filter(gc.get_paths(export_dir_base, parser=parser)):
        gfile.DeleteRecursively(p.path)
def garbage_collect_exports(export_dir_base, exports_to_keep):
    """Deletes older exports, retaining only a given number of the most recent.

  Export subdirectories are assumed to be named with monotonically increasing
  integers; the most recent are taken to be those with the largest values.

  Args:
    export_dir_base: the base directory under which each export is in a
      versioned subdirectory.
    exports_to_keep: the number of recent exports to retain.
  """
    if exports_to_keep is None:
        return

    keep_filter = gc.largest_export_versions(exports_to_keep)
    delete_filter = gc.negation(keep_filter)

    # create a simple parser that pulls the export_version from the directory.
    def parser(path):
        filename = os.path.basename(path.path)
        if not (len(filename) == 10 and filename.isdigit()):
            return None
        return path._replace(export_version=int(filename))

    for p in delete_filter(gc.get_paths(export_dir_base, parser=parser)):
        gfile.DeleteRecursively(p.path)
def garbage_collect_exports(export_dir_base, exports_to_keep):
  """Deletes older exports, retaining only a given number of the most recent.

  Export subdirectories are assumed to be named with monotonically increasing
  integers; the most recent are taken to be those with the largest values.

  Args:
    export_dir_base: the base directory under which each export is in a
      versioned subdirectory.
    exports_to_keep: the number of recent exports to retain.
  """
  if exports_to_keep is None:
    return

  keep_filter = gc.largest_export_versions(exports_to_keep)
  delete_filter = gc.negation(keep_filter)

  # create a simple parser that pulls the export_version from the directory.
  def parser(path):
    filename = os.path.basename(path.path)
    if not (len(filename) == 10 and filename.isdigit()):
      return None
    return path._replace(export_version=int(filename))

  for p in delete_filter(gc.get_paths(export_dir_base, parser=parser)):
    gfile.DeleteRecursively(p.path)
示例#7
0
    def testPathsWithParse(self):
        base_dir = os.path.join(test.get_temp_dir(), "paths_parse")
        self.assertFalse(gfile.Exists(base_dir))
        for p in xrange(3):
            gfile.MakeDirs(os.path.join(base_dir, "%d" % p))
        # add a base_directory to ignore
        gfile.MakeDirs(os.path.join(base_dir, "ignore"))

        self.assertEqual(gc.get_paths(base_dir, _create_parser(base_dir)), [
            gc.Path(os.path.join(base_dir, "0"), 0),
            gc.Path(os.path.join(base_dir, "1"), 1),
            gc.Path(os.path.join(base_dir, "2"), 2)
        ])
示例#8
0
  def testPathsWithParse(self):
    base_dir = os.path.join(test.get_temp_dir(), "paths_parse")
    self.assertFalse(gfile.Exists(base_dir))
    for p in xrange(3):
      gfile.MakeDirs(os.path.join(base_dir, "%d" % p))
    # add a base_directory to ignore
    gfile.MakeDirs(os.path.join(base_dir, "ignore"))

    self.assertEqual(
        gc.get_paths(base_dir, _create_parser(base_dir)), [
            gc.Path(os.path.join(base_dir, "0"), 0),
            gc.Path(os.path.join(base_dir, "1"), 1),
            gc.Path(os.path.join(base_dir, "2"), 2)
        ])
示例#9
0
def get_most_recent_export(export_dir_base):
    """Locate the most recent SavedModel export in a directory of many exports.

  This method assumes that SavedModel subdirectories are named as a timestamp
  (seconds from epoch), as produced by get_timestamped_export_dir().

  Args:
    export_dir_base: A base directory containing multiple timestamped
                     directories.

  Returns:
    A gc.Path, whith is just a namedtuple of (path, export_version).
  """
    select_filter = gc.largest_export_versions(1)
    results = select_filter(gc.get_paths(export_dir_base, parser=_export_version_parser))
    return next(iter(results or []), None)
def get_most_recent_export(export_dir_base):
  """Locate the most recent SavedModel export in a directory of many exports.

  This method assumes that SavedModel subdirectories are named as a timestamp
  (seconds from epoch), as produced by get_timestamped_export_dir().

  Args:
    export_dir_base: A base directory containing multiple timestamped
                     directories.

  Returns:
    A gc.Path, with is just a namedtuple of (path, export_version).
  """
  select_filter = gc.largest_export_versions(1)
  results = select_filter(
      gc.get_paths(export_dir_base, parser=_export_version_parser))
  return next(iter(results or []), None)
示例#11
0
def garbage_collect_exports(export_dir_base, exports_to_keep):
    """Deletes older exports, retaining only a given number of the most recent.

  Export subdirectories are assumed to be named with monotonically increasing
  integers; the most recent are taken to be those with the largest values.

  Args:
    export_dir_base: the base directory under which each export is in a
      versioned subdirectory.
    exports_to_keep: the number of recent exports to retain.
  """
    if exports_to_keep is None:
        return

    keep_filter = gc.largest_export_versions(exports_to_keep)
    delete_filter = gc.negation(keep_filter)
    for p in delete_filter(gc.get_paths(export_dir_base, parser=_export_version_parser)):
        gfile.DeleteRecursively(p.path)
def garbage_collect_exports(export_dir_base, exports_to_keep):
  """Deletes older exports, retaining only a given number of the most recent.

  Export subdirectories are assumed to be named with monotonically increasing
  integers; the most recent are taken to be those with the largest values.

  Args:
    export_dir_base: the base directory under which each export is in a
      versioned subdirectory.
    exports_to_keep: the number of recent exports to retain.
  """
  if exports_to_keep is None:
    return

  keep_filter = gc.largest_export_versions(exports_to_keep)
  delete_filter = gc.negation(keep_filter)
  for p in delete_filter(gc.get_paths(export_dir_base,
                                      parser=_export_version_parser)):
    gfile.DeleteRecursively(p.path)
示例#13
0
    def testPathsWithParse(self):
        base_dir = os.path.join(tf.test.get_temp_dir(), "paths_parse")
        self.assertFalse(gfile.Exists(base_dir))
        for p in xrange(3):
            gfile.MakeDirs(os.path.join(base_dir, "%d" % p))
        # add a base_directory to ignore
        gfile.MakeDirs(os.path.join(base_dir, "ignore"))

        # create a simple parser that pulls the export_version from the directory.
        def parser(path):
            match = re.match("^" + base_dir + "/(\\d+)$", path.path)
            if not match:
                return None
            return path._replace(export_version=int(match.group(1)))

        self.assertEquals(gc.get_paths(base_dir, parser=parser), [
            gc.Path(os.path.join(base_dir, "0"), 0),
            gc.Path(os.path.join(base_dir, "1"), 1),
            gc.Path(os.path.join(base_dir, "2"), 2)
        ])
示例#14
0
  def testPathsWithParse(self):
    base_dir = os.path.join(test.get_temp_dir(), "paths_parse")
    self.assertFalse(gfile.Exists(base_dir))
    for p in xrange(3):
      gfile.MakeDirs(os.path.join(base_dir, "%d" % p))
    # add a base_directory to ignore
    gfile.MakeDirs(os.path.join(base_dir, "ignore"))

    # create a simple parser that pulls the export_version from the directory.
    def parser(path):
      match = re.match("^" + base_dir + "/(\\d+)$", path.path)
      if not match:
        return None
      return path._replace(export_version=int(match.group(1)))

    self.assertEquals(
        gc.get_paths(
            base_dir, parser=parser), [
                gc.Path(os.path.join(base_dir, "0"), 0),
                gc.Path(os.path.join(base_dir, "1"), 1),
                gc.Path(os.path.join(base_dir, "2"), 2)
            ])