示例#1
0
def list_png_in_dir(dirpath):
  """List all directoties under dirpath."""
  filelist = gfile.listdir(dirpath)
  filelist = [_ for _ in filelist if _.endswith('.png')]
  filelist = [_ for _ in filelist if not _.startswith('IB')]
  filelist = sorted(filelist)
  filelist.sort(key=lambda f: int(''.join(list(filter(str.isdigit, f))) or -1))
  result = [os.path.join(dirpath, _) for _ in filelist if _.endswith('.png')]
  return result
def main(_):
  if FLAGS.checkpoint_path:
    folder_list = gfile.listdir(FLAGS.input_lr_dir)
    for folder_name in folder_list:
      input_lr_dir = os.path.join(FLAGS.input_lr_dir, folder_name)
      output_pre = folder_name
      inference(input_lr_dir, FLAGS.input_hr_dir, FLAGS.input_dir_len,
                FLAGS.num_resblock, FLAGS.vsr_scale, FLAGS.checkpoint_path,
                FLAGS.output_dir, output_pre, FLAGS.output_name,
                FLAGS.output_ext)
    compute_metrics(FLAGS)
示例#3
0
def inference_data_loader(
    input_lr_dir,
    input_hr_dir=None,
    input_dir_len=-1,
):
    """Inference pipeline data loader."""
    filedir = input_lr_dir
    down_sp = False
    if (input_lr_dir is None) or (not gfile.exists(input_lr_dir)):
        if (input_hr_dir is None) or (not gfile.exists(input_hr_dir)):
            raise ValueError('Input directory not found')
        filedir = input_hr_dir
        down_sp = True

    image_list_lr_temp = gfile.listdir(filedir)
    image_list_lr_temp = [_ for _ in image_list_lr_temp if _.endswith('.png')]
    image_list_lr_temp = sorted(
        image_list_lr_temp
    )  # first sort according to abc, then sort according to 123
    image_list_lr_temp.sort(
        key=lambda f: int(''.join(list(filter(str.isdigit, f))) or -1))
    if input_dir_len > 0:
        image_list_lr_temp = image_list_lr_temp[:input_dir_len]

    image_list_lr = [os.path.join(filedir, _) for _ in image_list_lr_temp]

    # Read in and preprocess the images
    def preprocess_test(name):

        with tf.gfile.Open(name, 'rb') as fid:
            raw_im = np.asarray(bytearray(fid.read()), dtype=np.uint8)
            im = cv2.imdecode(raw_im,
                              cv2.IMREAD_COLOR).astype(np.float32)[:, :, ::-1]

        if down_sp:
            icol_blur = cv2.GaussianBlur(im, (0, 0), sigmaX=1.5)
            im = icol_blur[::4, ::4, ::]
        im = im / 255.0
        return im

    image_lr = [preprocess_test(_) for _ in image_list_lr]
    image_list_lr = image_list_lr[5:0:-1] + image_list_lr
    image_lr = image_lr[5:0:-1] + image_lr

    Data = collections.namedtuple('Data', 'paths_LR, inputs')
    return Data(paths_LR=image_list_lr, inputs=image_lr)
示例#4
0
  def _next_trajectory_dir(self):
    """Assigns a new output dir for a trajectory under self._output_dir.

    Directory names are consecutive integers starting from zero. New directory
    index is assigned as the maximum of past indices plus one. Directories that
    are not integers are ignored.

    Returns:
      A path of the new directory.
    """
    trajectory_dirs = gfile.listdir(self._output_dir)

    def int_or_none(s):
      try:
        return int(s)
      except TypeError:
        return None

    past_trajectory_ids = [
        trajectory_id for trajectory_id in map(int_or_none, trajectory_dirs)
        if trajectory_id is not None]
    next_trajectory_id = max([-1] + past_trajectory_ids) + 1

    return os.path.join(self._output_dir, str(next_trajectory_id))
def compute_metrics(input_flags):
  """Compute metrics."""
  if input_flags.is_vid4_eval:
    result_list_all = ['calendar', 'city', 'foliage', 'walk']
  else:
    # Reds4 datasets.
    result_list_all = ['000', '011', '015', '020']
  result_list_all.sort()

  result_list_all = [
      os.path.join(input_flags.output_dir, dir) for dir in result_list_all
  ]

  print('all eval path:')
  print(result_list_all)
  result_list = []
  for dir_name in result_list_all:
    if gfile.isdir(dir_name):
      result_list.append(dir_name)
  target_list = gfile.listdir(input_flags.targets)
  target_list = [
      os.path.join(input_flags.targets, dir_name) for dir_name in target_list
  ]
  target_list.sort()
  folder_n = len(result_list)

  cutfr = 2

  keys = ['PSNR', 'SSIM']
  sum_dict = dict.fromkeys(['FrameAvg_' + _ for _ in keys], 0)
  len_dict = dict.fromkeys(keys, 0)
  avg_dict = dict.fromkeys(['Avg_' + _ for _ in keys], 0)
  folder_dict = dict.fromkeys(['FolderAvg_' + _ for _ in keys], 0)

  for folder_i in range(folder_n):
    print(folder_i)
    result = metrics.list_png_in_dir(result_list[folder_i])
    target = metrics.list_png_in_dir(target_list[folder_i])
    image_no = len(target)

    list_dict = {}
    for key_i in keys:
      list_dict[key_i] = []

    for i in range(cutfr, image_no - cutfr):

      with tf.gfile.Open(result[i], 'rb') as fid:
        raw_im = np.asarray(bytearray(fid.read()), dtype=np.uint8)
        output_img = cv2.imdecode(raw_im, cv2.IMREAD_COLOR).astype(
            np.float32)[:, :, ::-1]
      with tf.gfile.Open(target[i], 'rb') as fid:
        raw_im = np.asarray(bytearray(fid.read()), dtype=np.uint8)
        target_img = cv2.imdecode(raw_im, cv2.IMREAD_COLOR).astype(
            np.float32)[:, :, ::-1]

      msg = 'frame %d, tar %s, out %s, ' % (i, str(
          target_img.shape), str(output_img.shape))
      if (target_img.shape[0] < output_img.shape[0]) or (
          target_img.shape[1] <
          output_img.shape[1]):  # target is not dividable by 4
        output_img = output_img[:target_img.shape[0], :target_img.shape[1]]

      target_img, _, _ = metrics.crop_8x8(target_img)
      output_img, _, _ = metrics.crop_8x8(output_img)

      if input_flags.is_vid4_eval:
        y_channel = True
      else:
        y_channel = False
      if 'PSNR' in keys:  # psnr
        list_dict['PSNR'].append(
            metrics.psnr(target_img, output_img, y_channel))
        msg += 'psnr %02.2f' % (list_dict['PSNR'][-1])

      if 'SSIM' in keys:  # ssim
        list_dict['SSIM'].append(
            metrics.ssim(target_img, output_img, y_channel))
        msg += ', ssim %02.2f' % (list_dict['SSIM'][-1])

      print(msg)
    mode = 'w' if folder_i == 0 else 'a'

    pd_dict = {}
    for cur_num_data in keys:
      num_data = cur_num_data + '_%02d' % folder_i
      cur_list = np.float32(list_dict[cur_num_data])
      pd_dict[num_data] = pd.Series(cur_list)

      num_data_sum = cur_list.sum()
      num_data_len = cur_list.shape[0]

      num_data_mean = num_data_sum / num_data_len
      print('%s, max %02.4f, min %02.4f, avg %02.4f' %
            (num_data, cur_list.max(), cur_list.min(), num_data_mean))

      if folder_i == 0:
        avg_dict['Avg_' + cur_num_data] = [num_data_mean]
      else:
        avg_dict['Avg_' + cur_num_data] += num_data_mean

      sum_dict['FrameAvg_' + cur_num_data] += num_data_sum
      len_dict[cur_num_data] += num_data_len
      folder_dict['FolderAvg_' + cur_num_data] += num_data_mean

    csv_filepath = os.path.join(input_flags.output_dir, 'metrics.csv')
    with tf.gfile.Open(csv_filepath, 'w') as csvf:
      pd.DataFrame(pd_dict).to_csv(csvf, mode=mode)

  for num_data in keys:
    sum_dict['FrameAvg_' + num_data] = pd.Series(
        [sum_dict['FrameAvg_' + num_data] / len_dict[num_data]])
    folder_dict['FolderAvg_' + num_data] = pd.Series(
        [folder_dict['FolderAvg_' + num_data] / folder_n])
    avg_dict['Avg_' + num_data] = pd.Series(
        np.float32(avg_dict['Avg_' + num_data]))
    print('%s, total frame %d, total avg %02.4f, folder avg %02.4f' %
          (num_data, len_dict[num_data], sum_dict['FrameAvg_' + num_data][0],
           folder_dict['FolderAvg_' + num_data][0]))
  cvs_filepath = os.path.join(input_flags.output_dir, 'metrics.csv')
  with tf.gfile.Open(cvs_filepath, 'w') as csvf:
    pd.DataFrame(avg_dict).to_csv(csvf, mode='a')
    pd.DataFrame(folder_dict).to_csv(csvf, mode='a')
    pd.DataFrame(sum_dict).to_csv(csvf, mode='a')
  print('Finished.')