Пример #1
0
def main(unused_argv):
  if FLAGS.checkpoint is None or not FLAGS.checkpoint:
    raise ValueError(
        'Need to provide a path to checkpoint directory.')
  wmodel = lib_graph.load_checkpoint(FLAGS.checkpoint)
  if FLAGS.eval_logdir is None:
    raise ValueError(
        'Set flag eval_logdir to specify a path for saving eval statistics.')
  else:
    eval_logdir = os.path.join(FLAGS.eval_logdir, 'eval_stats')
    tf.gfile.MakeDirs(eval_logdir)

  evaluator = lib_evaluation.BaseEvaluator.make(
      FLAGS.unit, wmodel=wmodel, chronological=FLAGS.chronological)
  evaluator = lib_evaluation.EnsemblingEvaluator(evaluator, FLAGS.ensemble_size)

  if not FLAGS.sample_npy_path and FLAGS.fold is None:
    raise ValueError(
        'Either --fold must be specified, or paths of npy files to load must '
        'be given, but not both.')
  if FLAGS.fold is not None:
    evaluate_fold(FLAGS.fold, evaluator, wmodel.hparams, eval_logdir)
  if FLAGS.sample_npy_path is not None:
    evaluate_paths([FLAGS.sample_npy_path], evaluator, wmodel.hparams,
                   eval_logdir)
  print('Done')
Пример #2
0
def main(unused_argv):
  if FLAGS.checkpoint is None or not FLAGS.checkpoint:
    raise ValueError(
        'Need to provide a path to checkpoint directory.')
  if FLAGS.destination is None or not FLAGS.destination:
    raise ValueError(
        'Need to provide a destination directory for the SavedModel.')
  model = lib_graph.load_checkpoint(FLAGS.checkpoint)
  export_saved_model(model, FLAGS.destination)
  tf.logging.info('Exported SavedModel to %s.', FLAGS.destination)
Пример #3
0
def export(checkpoint, destination, use_tf_sampling):
    model = None
    if use_tf_sampling:
        model = lib_tfsampling.CoconetSampleGraph(checkpoint)
        model.instantiate_sess_and_restore_checkpoint()
    else:
        model = lib_graph.load_checkpoint(checkpoint)
    tf.logging.info('Loaded graph.')
    lib_saved_model.export_saved_model(model, destination,
                                       [tf.saved_model.tag_constants.SERVING],
                                       use_tf_sampling)
Пример #4
0
def export(checkpoint, destination, use_tf_sampling):
  model = None
  if use_tf_sampling:
    model = lib_tfsampling.CoconetSampleGraph(checkpoint)
    model.instantiate_sess_and_restore_checkpoint()
  else:
    model = lib_graph.load_checkpoint(checkpoint)
  tf.logging.info('Loaded graph.')
  lib_saved_model.export_saved_model(model, destination,
                                     [tf.saved_model.tag_constants.SERVING],
                                     use_tf_sampling)
Пример #5
0
def main(unused_argv):
    if FLAGS.checkpoint is None or not FLAGS.checkpoint:
        raise ValueError('Need to provide a path to checkpoint directory.')
    if FLAGS.destination is None or not FLAGS.destination:
        raise ValueError(
            'Need to provide a destination directory for the SavedModel.')
    model = None
    if FLAGS.use_tf_sampling:
        model = lib_tfsampling.CoconetSampleGraph(FLAGS.checkpoint)
        model.instantiate_sess_and_restore_checkpoint()
    else:
        model = lib_graph.load_checkpoint(FLAGS.checkpoint)
    tf.logging.info('Loaded graph.')
    lib_saved_model.export_saved_model(model, FLAGS.destination,
                                       [tf.saved_model.tag_constants.SERVING],
                                       FLAGS.use_tf_sampling)
    tf.logging.info('Exported SavedModel to %s.', FLAGS.destination)
Пример #6
0
def main(ckpt, evaldir, unit, chronological, ensemble_size, sample_path,
         folder, index, data_dir):
    checkpoint_dir = ckpt
    if not checkpoint_dir:
        # If a checkpoint directory is not specified, see if there is only one
        # subdir in this folder and use that.
        possible_checkpoint_dirs = tf.gfile.ListDirectory(evaldir)
        possible_checkpoint_dirs = [
            i for i in possible_checkpoint_dirs
            if tf.gfile.IsDirectory(os.path.join(evaldir, i))
        ]
        if EVAL_SUBDIR in possible_checkpoint_dirs:
            possible_checkpoint_dirs.remove(EVAL_SUBDIR)
        if len(possible_checkpoint_dirs) == 1:
            checkpoint_dir = os.path.join(evaldir, possible_checkpoint_dirs[0])
            tf.logging.info('Using checkpoint dir: %s', checkpoint_dir)
        else:
            raise ValueError(
                'Need to provide a path to checkpoint directory or use an '
                'eval_logdir with only 1 checkpoint subdirectory.')
    wmodel = lib_graph.load_checkpoint(checkpoint_dir)
    if evaldir is None:
        raise ValueError(
            'Set flag eval_logdir to specify a path for saving eval statistics.'
        )
    else:
        eval_logdir = os.path.join(evaldir, EVAL_SUBDIR)
        tf.gfile.MakeDirs(eval_logdir)

    evaluator = lib_evaluation.BaseEvaluator.make(unit,
                                                  wmodel=wmodel,
                                                  chronological=chronological)
    evaluator = lib_evaluation.EnsemblingEvaluator(evaluator, ensemble_size)

    if not sample_path and folder is None:
        raise ValueError(
            'Either --fold must be specified, or paths of npy files to load must '
            'be given, but not both.')
    if folder is not None:
        evaluate_fold(folder, evaluator, wmodel.hparams, eval_logdir,
                      checkpoint_dir, index, unit, ensemble_size,
                      chronological, data_dir)
    if sample_path is not None:
        evaluate_paths([sample_path], evaluator, wmodel.hparams, eval_logdir,
                       unit, ensemble_size, chronological)
    tf.logging.info('Done')
Пример #7
0
def main(unused_argv):
  checkpoint_dir = FLAGS.checkpoint
  if not checkpoint_dir:
    # If a checkpoint directory is not specified, see if there is only one
    # subdir in this folder and use that.
    possible_checkpoint_dirs = tf.gfile.ListDirectory(FLAGS.eval_logdir)
    possible_checkpoint_dirs = [
        i for i in possible_checkpoint_dirs if
        tf.gfile.IsDirectory(os.path.join(FLAGS.eval_logdir, i))]
    if EVAL_SUBDIR in possible_checkpoint_dirs:
      possible_checkpoint_dirs.remove(EVAL_SUBDIR)
    if len(possible_checkpoint_dirs) == 1:
      checkpoint_dir = os.path.join(
          FLAGS.eval_logdir, possible_checkpoint_dirs[0])
      tf.logging.info('Using checkpoint dir: %s', checkpoint_dir)
    else:
      raise ValueError(
          'Need to provide a path to checkpoint directory or use an '
          'eval_logdir with only 1 checkpoint subdirectory.')
  wmodel = lib_graph.load_checkpoint(checkpoint_dir)
  if FLAGS.eval_logdir is None:
    raise ValueError(
        'Set flag eval_logdir to specify a path for saving eval statistics.')
  else:
    eval_logdir = os.path.join(FLAGS.eval_logdir, EVAL_SUBDIR)
    tf.gfile.MakeDirs(eval_logdir)

  evaluator = lib_evaluation.BaseEvaluator.make(
      FLAGS.unit, wmodel=wmodel, chronological=FLAGS.chronological)
  evaluator = lib_evaluation.EnsemblingEvaluator(evaluator, FLAGS.ensemble_size)

  if not FLAGS.sample_npy_path and FLAGS.fold is None:
    raise ValueError(
        'Either --fold must be specified, or paths of npy files to load must '
        'be given, but not both.')
  if FLAGS.fold is not None:
    evaluate_fold(
        FLAGS.fold, evaluator, wmodel.hparams, eval_logdir, checkpoint_dir)
  if FLAGS.sample_npy_path is not None:
    evaluate_paths([FLAGS.sample_npy_path], evaluator, wmodel.hparams,
                   eval_logdir)
  tf.logging.info('Done')
Пример #8
0
def instantiate_model(checkpoint, instantiate_sess=True):
  wmodel = lib_graph.load_checkpoint(
      checkpoint, instantiate_sess=instantiate_sess)
  return wmodel
Пример #9
0
def instantiate_model(checkpoint, instantiate_sess=True):
    wmodel = lib_graph.load_checkpoint(checkpoint,
                                       instantiate_sess=instantiate_sess)
    return wmodel
Пример #10
0
def instantiate_model(checkpoint):
  wmodel = lib_graph.load_checkpoint(checkpoint)
  return wmodel