Пример #1
0
    def test_load_metadata(self):
        # Arrange
        files_c = file_service.walk_to_path(Path(config.TRAIN_PARENT_PATH_C),
                                            filename_endswith="metadata.json")
        files_d = file_service.walk_to_path(Path(config.TRAIN_PARENT_PATH_D),
                                            filename_endswith="metadata.json")
        files = files_c + files_d

        for f in files:
            batch_data: BatchData = batch_data_loader_service.load_batch_from_path(
                f)
            break

        # Act
        assert (len(files) > 0)
Пример #2
0
def get_video_paths(parent_folder_paths: List[str]):
  video_paths = []
  for par_path in parent_folder_paths:
    files = file_service.walk_to_path(par_path, filename_endswith=".mp4")
    video_paths.extend(files)

  return video_paths
Пример #3
0
def get_all_dataframes(output_par_path):
  pickles = file_service.walk_to_path(output_par_path, filename_endswith='.pkl')
  pickles_filtered = [p for p in pickles if file_service.does_file_have_bytes(p)]

  logger.info(f'About to collect all {len(pickles_filtered)} pickle paths in list ...')
  all_df = [pd.read_pickle(str(p)) for p in pickles_filtered]

  return all_df
Пример #4
0
    def load_history(max_pickles=None) -> pd.DataFrame:
        logger.info('About to get persisted fake data ...')
        pickle_parent_path: Path = Path(config.SSIM_RND_DIFFS_OUTPUT_PATH,
                                        'data')
        pickles = file_service.walk_to_path(pickle_parent_path,
                                            filename_endswith='.pkl')

        if max_pickles is not None and len(pickles) > max_pickles:
            pickles = pickles[:max_pickles]

        df_all = []
        for p in pickles:
            df = pd.read_pickle(str(p))
            df_all.append(df)

        return pd.concat(df_all)
Пример #5
0
    def __init__(self,
                 output_par_path: Path,
                 max_output_size_mb: int = 1,
                 max_pickles=None):
        self.parent_path = output_par_path
        self.max_output_size_mb = max_output_size_mb

        file_paths = file_service.walk_to_path(output_par_path,
                                               filename_endswith=".pkl")
        if max_pickles is not None and len(file_paths) > max_pickles:
            file_paths = file_paths[:max_pickles]

        self.path_map = {}

        for f in file_paths:
            logger.info(f'Loading pickle {f.name} ...')
            df = pd.read_pickle(f)
            path_set = set(df['path'].tolist())
            for p in path_set:
                frame_index_list = df[df['path'] == p]['frame_index'].tolist()
                self.path_map[Path(p).name] = frame_index_list

        self.intialize_new_dataframe()
Пример #6
0
    def test_get_video_frame_face(self):
        # Arrange
        filename = 'dnrpknwija.mp4'
        files = file_service.walk_to_path(Path(config.TRAIN_PARENT_PATH_D),
                                          filename_endswith=filename)
        assert (len(files) == 1)

        vid_path = files[0]
        logger.info(f'vid: {vid_path}')

        assert (vid_path.exists())

        image, _, _ = video_service.get_single_image_from_vid(vid_path, 0)

        # l1: 408,706; r1: 652:950 - swatch
        # l2: 397,812; r2: 560,976 - face
        red = (255, 0, 0)
        green = (0, 255, 0)

        l1 = (408, 706)
        r1 = (652, 950)

        l2 = (397, 812)
        r2 = (560, 976)

        image_rect_1 = cv2.rectangle(image,
                                     pt1=l1,
                                     pt2=r1,
                                     color=red,
                                     thickness=3)
        image_rect_1 = cv2.rectangle(image,
                                     pt1=l2,
                                     pt2=r2,
                                     color=green,
                                     thickness=3)

        image_service.show_image(image_rect_1, 'Original')
def get_metadata_json_files(which_drive: str):
  if which_drive.lower() == 'c':
    return file_service.walk_to_path(Path(config.TRAIN_PARENT_PATH_C), filename_endswith="metadata.json")
  else:
    return file_service.walk_to_path(Path(config.TRAIN_PARENT_PATH_D), filename_endswith="metadata.json")
Пример #8
0
def _erase_history():
  if config.SSIM_REALS_DATA_OUTPUT_PATH.exists():
    files = file_service.walk_to_path(config.SSIM_REALS_DATA_OUTPUT_PATH, filename_endswith='.pkl')
    for f in files:
      f.unlink()
 def __init__(self, output_parent_path: Path):
     files = file_service.walk_to_path(output_parent_path,
                                       filename_endswith=".pkl")
     self.files = files