コード例 #1
0
def main(args):
  global executor
  executor = concurrent.futures.ThreadPoolExecutor(16)

  # Make dataset directories
  for action in ACTIONS:
    os.makedirs(os.path.join(args.output_dir, action), exist_ok=True)

  # Load match file
  match_file = os.path.join(args.data_root_dir, args.date,
                            'match_{}_{}.pkl'.format(args.thermal_sensor, args.depth_sensor))
  utils.check_exists(match_file)
  with open(match_file, 'rb') as f:
    tasks = pickle.load(f)

  thermal_video_dir = os.path.join(args.data_root_dir, args.date, 'thermal',
                                   args.thermal_sensor, 'videos')
  annotation_dir = os.path.join(args.data_root_dir, args.date, 'results',
                                '{}_{}'.format(args.thermal_sensor, args.depth_sensor))
  task_ids = sorted([int(f.split('.')[0]) for f in os.listdir(annotation_dir)])

  # Read and process each task.
  videos = {}
  for task_id in task_ids:
    print("Task: {}".format(task_id))
    clips = utils.read_csv(os.path.join(annotation_dir, '{}.csv'.format(task_id)))
    task = tasks[task_id]
    thermal_time_str = task[0][0]
    # Raw thermal data
    frames = get_thermal_frames(videos, thermal_video_dir, thermal_time_str)
    process_annotations(args, task, clips, frames)

  print("Waiting for jobs to finish...")
  executor.shutdown(wait=True)
コード例 #2
0
def write_frames(task, depth_data_dir, thermal_frames, depth_output_dir, thermal_output_dir):
  for index, (_, frame_index, thermal_t, depth_t) in enumerate(task, 1):
    # Depth
    depth_file = os.path.join(depth_data_dir, 'd-{}.jpg'.format(depth_t))
    utils.check_exists(depth_file)
    img = cv2.imread(depth_file, cv2.IMREAD_UNCHANGED)
    # Inpainting
    img = imgproc.inpaint(img, threshold=5)
    new_file = os.path.join(depth_output_dir, 'D-{:08d}.jpg'.format(index))
    cv2.imwrite(new_file, img)
    # Thermal
    new_file = os.path.join(thermal_output_dir, 'T-{:08d}.png'.format(index))
    cv2.imwrite(new_file, thermal_frames[frame_index])
コード例 #3
0
def process_depth(args):
  """
  Move depth images stored in /data/onlok/ to /data/onlok_processed/
  with each filename containing its timestamp.
  """
  jobs = []
  sensor_name = "10.0.1." + args.depth_sensor
  # Check if there is a data.txt file
  utils.check_exists(os.path.join(args.depth_data_root_dir, args.date, "data.txt"))
  # Output directory
  output_dir = os.path.join(args.output_dir, args.date, "depth", sensor_name)
  if args.overwrite and os.path.exists(output_dir):
    os.system('rm -r {}'.format(output_dir))
  os.makedirs(output_dir, exist_ok=True) # Ex. /data/onlok_processed/17-10-11/depth/10.0.1.188

  # Task file
  task_file = os.path.join(output_dir, "tasks.txt")
  if not args.overwrite and os.path.exists(task_file):
    print("Depth images from {} already processed".format(args.date))
    return

  print('Reading timestamps...')
  all_timestamps = get_all_timestamps(args.depth_data_root_dir, args.date, sensor_name)
  print(sorted(all_timestamps.keys()))

  total_tasks = []
  for hour in range(24):
    if hour in all_timestamps:
      print("Hour: {}".format(hour))
      timestamps = all_timestamps[hour]
      data_dir = os.path.join(args.depth_data_root_dir, args.date, "{:02d}".format(hour),
                              sensor_name, "d")
      # Make sure number of files and length of timestamps match
      files = glob.glob(os.path.join(data_dir, "*.jpg"))
      assert len(files) == len(timestamps), "{}, {}".format(len(files), len(timestamps))
      if not args.get_tasks_only:
        # Move the files
        job = executor.submit(move_depth_images, data_dir, output_dir, timestamps, hour)
        jobs.append(job)
      # Get tasks
      tasks = get_tasks(timestamps)
      total_tasks.extend(tasks)

  total_count = 0
  for job in jobs:
    total_count += job.result()
  print('Total number of frames: {}'.format(total_count))

  # Write tasks
  utils.write_tasks_to_file(total_tasks, task_file)
  print("Number of tasks: {}".format(len(total_tasks)))
コード例 #4
0
def move_depth_images(data_dir, output_dir, timestamps, hour):
  """
  Move depth images within the specific hour to output.
  """
  count = 0
  for i in range(len(timestamps)):
    num, t = timestamps[i] # In milliseconds
    filename = "d-{:06d}.jpg".format(num)
    file_path = os.path.join(data_dir, filename)
    utils.check_exists(file_path)
    output_filename = "d-{}.jpg".format(t)
    output_file_path = os.path.join(output_dir, output_filename)
    # Move file
    os.system('cp {} {}'.format(file_path, output_file_path))
    count += 1
  return count
コード例 #5
0
def extract_frames(tasks, data_dir):
  """
  Extract and save the thermal frames in tasks.
  """
  tasks_in_video = {}
  for task in tasks:
    time_str = task[0][0]
    if time_str not in tasks_in_video:
      tasks_in_video[time_str] = list(task)
    else:
      tasks_in_video[time_str].extend(list(task))

  # Read video and write frames
  video_dir = os.path.join(data_dir, "videos")
  for time_str in sorted(tasks_in_video.keys()):
    filename = time_str + ".000000.mov"
    file_path = os.path.join(video_dir, filename)
    utils.check_exists(file_path)
    task = tasks_in_video[time_str]
    executor.submit(extract_and_write_frames, file_path, task, data_dir)
コード例 #6
0
def match_thermal_and_depth(args):
  # Output as a pickle file
  output_filename = "match_{}_{}.pkl".format(args.thermal_sensor, args.depth_sensor)
  output_file_path = os.path.join(args.output_dir, args.date, output_filename)
  if not args.overwrite and os.path.exists(output_file_path):
    print("Match file {} on {} already exists.".format(output_filename, args.date))
    return
  if args.overwrite and os.path.exists(output_file_path):
    os.remove(output_file_path)

  sensor_name = "10.0.1." + args.depth_sensor
  depth_task_file = os.path.join(args.output_dir, args.date, "depth",
                                 sensor_name, "tasks.txt")
  thermal_task_file = os.path.join(args.output_dir, args.date, "thermal",
                                   args.thermal_sensor, "tasks.pkl")
  utils.check_exists(depth_task_file)
  utils.check_exists(thermal_task_file)
  depth_tasks = utils.read_tasks_from_file(depth_task_file) # Array
  with open(thermal_task_file, 'rb') as f:
    thermal_tasks = pickle.load(f) # Dict

  # Find tasks
  tasks = find_match(thermal_tasks, depth_tasks, args.time_diff)

#  output_dir = os.path.join(args.output_dir, args.date, "thermal", args.thermal_sensor)
#  extract_frames(tasks, output_dir)

#  print("Extracting frames...")
#  executor.shutdown(wait=True)

  # Sanity check
  prev_t = 0
  for task in tasks:
    for _, _, t, _ in task:
      assert t > prev_t
      prev_t = t

  print("Number of matches (tasks): {}".format(len(tasks)))
  print("Writing matches to {}".format(output_file_path))
  utils.write_pickle(tasks, output_file_path)