def test_lct_algorithms(self): src_samples = SampleLoader.load(SampleType.FACE, './test_src', None) dst_samples = SampleLoader.load(SampleType.FACE, './test_dst', None) for src_sample in src_samples: src_img = src_sample.load_bgr() src_mask = src_sample.load_mask() # Toggle to see masks show_masks = True grid = [] for ct_sample in dst_samples: print(src_sample.filename, ct_sample.filename) ct_img = ct_sample.load_bgr() ct_mask = ct_sample.load_mask() results = [] for mode in ['sym']: for eps in [10**-n for n in range(1, 10, 2)]: results.append(linear_color_transfer(src_img, ct_img, mode=mode, eps=eps)) if show_masks: results = [src_mask * im for im in results] src_img *= src_mask ct_img *= ct_mask results = np.concatenate((src_img, ct_img, *results), axis=1) grid.append(results) cv2.namedWindow('test output', cv2.WINDOW_NORMAL) cv2.imshow('test output', np.concatenate(grid, axis=0)) cv2.waitKey(0) cv2.destroyAllWindows()
def __init__(self, samples_path, debug=False, batch_size=1, random_ct_samples_path=None, sample_process_options=SampleProcessor.Options(), output_sample_types=[], add_sample_idx=False, generators_count=4, **kwargs): super().__init__(samples_path, debug, batch_size) self.sample_process_options = sample_process_options self.output_sample_types = output_sample_types self.add_sample_idx = add_sample_idx if self.debug: self.generators_count = 1 else: self.generators_count = max(1, generators_count) samples = SampleLoader.load(SampleType.FACE, self.samples_path) self.samples_len = len(samples) if self.samples_len == 0: raise ValueError('No training data provided.') index_host = mplib.IndexHost(self.samples_len) if random_ct_samples_path is not None: ct_samples = SampleLoader.load(SampleType.FACE, random_ct_samples_path) ct_index_host = mplib.IndexHost(len(ct_samples)) else: ct_samples = None ct_index_host = None pickled_samples = pickle.dumps(samples, 4) ct_pickled_samples = pickle.dumps( ct_samples, 4) if ct_samples is not None else None if self.debug: self.generators = [ ThisThreadGenerator( self.batch_func, (pickled_samples, index_host.create_cli(), ct_pickled_samples, ct_index_host.create_cli() if ct_index_host is not None else None)) ] else: self.generators = [ SubprocessGenerator( self.batch_func, (pickled_samples, index_host.create_cli(), ct_pickled_samples, ct_index_host.create_cli() if ct_index_host is not None else None), start_now=True) for i in range(self.generators_count) ] self.generator_counter = -1
def __init__(self, samples_path, debug, batch_size, sort_by_yaw=False, sort_by_yaw_target_samples_path=None, random_ct_samples_path=None, sample_process_options=SampleProcessor.Options(), output_sample_types=[], add_sample_idx=False, generators_count=2, generators_random_seed=None, **kwargs): super().__init__(samples_path, debug, batch_size) self.sample_process_options = sample_process_options self.output_sample_types = output_sample_types self.add_sample_idx = add_sample_idx if sort_by_yaw_target_samples_path is not None: self.sample_type = SampleType.FACE_YAW_SORTED_AS_TARGET elif sort_by_yaw: self.sample_type = SampleType.FACE_YAW_SORTED else: self.sample_type = SampleType.FACE if generators_random_seed is not None and len( generators_random_seed) != generators_count: raise ValueError("len(generators_random_seed) != generators_count") self.generators_random_seed = generators_random_seed samples = SampleLoader.load(self.sample_type, self.samples_path, sort_by_yaw_target_samples_path) ct_samples = SampleLoader.load( SampleType.FACE, random_ct_samples_path ) if random_ct_samples_path is not None else None self.random_ct_sample_chance = 100 if self.debug: self.generators_count = 1 self.generators = [ iter_utils.ThisThreadGenerator(self.batch_func, (0, samples, ct_samples)) ] else: self.generators_count = min(generators_count, len(samples)) self.generators = [ iter_utils.SubprocessGenerator( self.batch_func, (i, samples[i::self.generators_count], ct_samples)) for i in range(self.generators_count) ] self.generator_counter = -1
def __init__(self, samples_path, debug=False, batch_size=1, random_ct_samples_path=None, sample_process_options=SampleProcessor.Options(), output_sample_types=[], add_sample_idx=False, generators_count=4, rnd_seed=None, **kwargs): super().__init__(debug, batch_size) self.sample_process_options = sample_process_options self.output_sample_types = output_sample_types self.add_sample_idx = add_sample_idx if rnd_seed is None: rnd_seed = np.random.randint(0x80000000) if self.debug: self.generators_count = 1 else: self.generators_count = max(1, generators_count) samples = SampleLoader.load(SampleType.FACE, samples_path) self.samples_len = len(samples) if self.samples_len == 0: raise ValueError('No training data provided.') if random_ct_samples_path is not None: ct_samples = SampleLoader.load(SampleType.FACE, random_ct_samples_path) else: ct_samples = None pickled_samples = pickle.dumps(samples, 4) ct_pickled_samples = pickle.dumps( ct_samples, 4) if ct_samples is not None else None if self.debug: self.generators = [ ThisThreadGenerator( self.batch_func, (pickled_samples, ct_pickled_samples, rnd_seed)) ] else: self.generators = [SubprocessGenerator ( self.batch_func, (pickled_samples, ct_pickled_samples, rnd_seed+i), start_now=False ) \ for i in range(self.generators_count) ] SubprocessGenerator.start_in_parallel(self.generators) self.generator_counter = -1
def __init__ (self, samples_path, debug=False, batch_size=1, random_ct_samples_path=None, sample_process_options=SampleProcessor.Options(), output_sample_types=[], add_sample_idx=False, generators_count=4, raise_on_no_data=True, **kwargs): super().__init__(debug, batch_size) self.sample_process_options = sample_process_options self.output_sample_types = output_sample_types self.add_sample_idx = add_sample_idx if self.debug: self.generators_count = 1 else: self.generators_count = max(1, generators_count) print('**SAMPLE PATH: ', samples_path) print(os.listdir(samples_path)) samples = SampleLoader.load (SampleType.FACE, samples_path) print(samples) self.samples_len = len(samples) self.initialized = False if self.samples_len == 0: if raise_on_no_data: raise ValueError('No training data provided.') else: return index_host = mplib.IndexHost(self.samples_len) if random_ct_samples_path is not None: ct_samples = SampleLoader.load (SampleType.FACE, random_ct_samples_path) ct_index_host = mplib.IndexHost( len(ct_samples) ) else: ct_samples = None ct_index_host = None if self.debug: self.generators = [ThisThreadGenerator ( self.batch_func, (samples, index_host.create_cli(), ct_samples, ct_index_host.create_cli() if ct_index_host is not None else None) )] else: self.generators = [SubprocessGenerator ( self.batch_func, (samples, index_host.create_cli(), ct_samples, ct_index_host.create_cli() if ct_index_host is not None else None), start_now=False ) \ for i in range(self.generators_count) ] SubprocessGenerator.start_in_parallel( self.generators ) self.generator_counter = -1 self.initialized = True
def __init__(self, samples_path, debug, batch_size, temporal_image_count, sample_process_options=SampleProcessor.Options(), output_sample_types=[], generators_count=2, **kwargs): super().__init__(samples_path, debug, batch_size) self.temporal_image_count = temporal_image_count self.sample_process_options = sample_process_options self.output_sample_types = output_sample_types self.samples = SampleLoader.load(SampleType.FACE_TEMPORAL_SORTED, self.samples_path) if self.debug: self.generators_count = 1 self.generators = [ iter_utils.ThisThreadGenerator(self.batch_func, 0) ] else: self.generators_count = min(generators_count, len(self.samples)) self.generators = [ iter_utils.SubprocessGenerator(self.batch_func, i) for i in range(self.generators_count) ] self.generator_counter = -1
def __init__(self, samples_path, debug, batch_size, sample_process_options=SampleProcessor.Options(), output_sample_types=[], raise_on_no_data=True, **kwargs): super().__init__(debug, batch_size) self.initialized = False self.sample_process_options = sample_process_options self.output_sample_types = output_sample_types samples = SampleLoader.load(SampleType.IMAGE, samples_path) if len(samples) == 0: if raise_on_no_data: raise ValueError('No training data provided.') return self.generators = [ThisThreadGenerator ( self.batch_func, samples )] if self.debug else \ [SubprocessGenerator ( self.batch_func, samples )] self.generator_counter = -1 self.initialized = True
def __init__ (self, samples_path, debug, batch_size, temporal_image_count=3, sample_process_options=SampleProcessor.Options(), output_sample_types=[], generators_count=2, **kwargs): super().__init__(debug, batch_size) self.temporal_image_count = temporal_image_count self.sample_process_options = sample_process_options self.output_sample_types = output_sample_types if self.debug: self.generators_count = 1 else: self.generators_count = generators_count samples = SampleLoader.load (SampleType.FACE_TEMPORAL_SORTED, samples_path) samples_len = len(samples) if samples_len == 0: raise ValueError('No training data provided.') mult_max = 1 l = samples_len - ( (self.temporal_image_count)*mult_max - (mult_max-1) ) index_host = mplib.IndexHost(l+1) pickled_samples = pickle.dumps(samples, 4) if self.debug: self.generators = [ThisThreadGenerator ( self.batch_func, (pickled_samples, index_host.create_cli(),) )] else: self.generators = [SubprocessGenerator ( self.batch_func, (pickled_samples, index_host.create_cli(),) ) for i in range(self.generators_count) ] self.generator_counter = -1
def __init__ (self, paths, debug=False, batch_size=1, resolution=256, face_type=None, generators_count=4, data_format="NHWC", **kwargs): super().__init__(debug, batch_size) self.initialized = False samples = sum([ SampleLoader.load (SampleType.FACE, path) for path in paths if path is not None] ) seg_sample_idxs = SegmentedSampleFilterSubprocessor(samples).run() if len(seg_sample_idxs) == 0: seg_sample_idxs = SegmentedSampleFilterSubprocessor(samples, count_xseg_mask=True).run() if len(seg_sample_idxs) == 0: raise Exception(f"No segmented faces found.") else: io.log_info(f"Using {len(seg_sample_idxs)} xseg labeled samples.") else: io.log_info(f"Using {len(seg_sample_idxs)} segmented samples.") if self.debug: self.generators_count = 1 else: self.generators_count = max(1, generators_count) if self.debug: self.generators = [ThisThreadGenerator ( self.batch_func, (samples, seg_sample_idxs, resolution, face_type, data_format) )] else: self.generators = [SubprocessGenerator ( self.batch_func, (samples, seg_sample_idxs, resolution, face_type, data_format), start_now=False ) \ for i in range(self.generators_count) ] SubprocessGenerator.start_in_parallel( self.generators ) self.generator_counter = -1 self.initialized = True
def __init__(self, samples_path, debug=False, batch_size=1, sample_process_options=SampleProcessor.Options(), output_sample_types=[], person_id_mode=1, generators_count=2, generators_random_seed=None, **kwargs): super().__init__(samples_path, debug, batch_size) self.sample_process_options = sample_process_options self.output_sample_types = output_sample_types self.person_id_mode = person_id_mode if generators_random_seed is not None and len( generators_random_seed) != generators_count: raise ValueError("len(generators_random_seed) != generators_count") self.generators_random_seed = generators_random_seed samples = SampleLoader.load(SampleType.FACE, self.samples_path, person_id_mode=True) if person_id_mode == 1: new_samples = [] for s in samples: new_samples += s samples = new_samples np.random.shuffle(samples) self.samples_len = len(samples) if self.samples_len == 0: raise ValueError('No training data provided.') if self.debug: self.generators_count = 1 self.generators = [ iter_utils.ThisThreadGenerator(self.batch_func, (0, samples)) ] else: self.generators_count = min(generators_count, self.samples_len) if person_id_mode == 1: self.generators = [ iter_utils.SubprocessGenerator( self.batch_func, (i, samples[i::self.generators_count])) for i in range(self.generators_count) ] else: self.generators = [ iter_utils.SubprocessGenerator(self.batch_func, (i, samples)) for i in range(self.generators_count) ] self.generator_counter = -1
def __init__(self, samples_path, debug, batch_size, sort_by_yaw=False, sort_by_yaw_target_samples_path=None, sample_process_options=SampleProcessor.Options(), output_sample_types=[], add_sample_idx=False, add_pitch=False, add_yaw=False, generators_count=2, generators_random_seed=None, **kwargs): super().__init__(samples_path, debug, batch_size) self.sample_process_options = sample_process_options self.output_sample_types = output_sample_types self.add_sample_idx = add_sample_idx self.add_pitch = add_pitch self.add_yaw = add_yaw if sort_by_yaw_target_samples_path is not None: self.sample_type = SampleType.FACE_YAW_SORTED_AS_TARGET elif sort_by_yaw: self.sample_type = SampleType.FACE_YAW_SORTED else: self.sample_type = SampleType.FACE self.samples = SampleLoader.load(self.sample_type, self.samples_path, sort_by_yaw_target_samples_path) if generators_random_seed is not None and len( generators_random_seed) != generators_count: raise ValueError("len(generators_random_seed) != generators_count") self.generators_random_seed = generators_random_seed if self.debug: self.generators_count = 1 self.generators = [ iter_utils.ThisThreadGenerator(self.batch_func, 0) ] else: self.generators_count = min(generators_count, len(self.samples)) self.generators = [ iter_utils.SubprocessGenerator(self.batch_func, i) for i in range(self.generators_count) ] self.generators_sq = [ multiprocessing.Queue() for _ in range(self.generators_count) ] self.generator_counter = -1
def __init__ (self, samples_path, debug, batch_size, temporal_image_count, sample_process_options=SampleProcessor.Options(), output_sample_types=[], **kwargs): super().__init__(samples_path, debug, batch_size) self.temporal_image_count = temporal_image_count self.sample_process_options = sample_process_options self.output_sample_types = output_sample_types self.samples = SampleLoader.load (SampleType.IMAGE, self.samples_path) self.generator_samples = [ self.samples ] self.generators = [iter_utils.ThisThreadGenerator ( self.batch_func, 0 )] if self.debug else \ [iter_utils.SubprocessGenerator ( self.batch_func, 0 )] self.generator_counter = -1
def __init__(self, root_path, debug=False, batch_size=1, resolution=256, face_type=None, generators_count=4, data_format="NHWC", **kwargs): super().__init__(debug, batch_size) self.initialized = False aligned_path = root_path / 'aligned' if not aligned_path.exists(): raise ValueError(f'Unable to find {aligned_path}') obstructions_path = root_path / 'obstructions' obstructions_images_paths = pathex.get_image_paths( obstructions_path, image_extensions=['.png'], subdirs=True) samples = SampleLoader.load(SampleType.FACE, aligned_path, subdirs=True) self.samples_len = len(samples) pickled_samples = pickle.dumps(samples, 4) if self.debug: self.generators_count = 1 else: self.generators_count = max(1, generators_count) if self.debug: self.generators = [ ThisThreadGenerator( self.batch_func, (pickled_samples, obstructions_images_paths, resolution, face_type, data_format)) ] else: self.generators = [SubprocessGenerator ( self.batch_func, (pickled_samples, obstructions_images_paths, resolution, face_type, data_format), start_now=False ) \ for i in range(self.generators_count) ] SubprocessGenerator.start_in_parallel(self.generators) self.generator_counter = -1 self.initialized = True
def __init__(self, paths, debug=False, batch_size=1, resolution=256, face_type=None, generators_count=4, data_format="NHWC", **kwargs): super().__init__(debug, batch_size) self.initialized = False samples = [] for path in paths: samples += SampleLoader.load(SampleType.FACE, path) seg_samples = [ sample for sample in samples if sample.seg_ie_polys.get_pts_count() != 0 ] seg_samples_len = len(seg_samples) if seg_samples_len == 0: raise Exception(f"No segmented faces found.") else: io.log_info(f"Using {seg_samples_len} segmented samples.") pickled_samples = pickle.dumps(seg_samples, 4) if self.debug: self.generators_count = 1 else: self.generators_count = max(1, generators_count) if self.debug: self.generators = [ ThisThreadGenerator( self.batch_func, (pickled_samples, resolution, face_type, data_format)) ] else: self.generators = [SubprocessGenerator ( self.batch_func, (pickled_samples, resolution, face_type, data_format), start_now=False ) \ for i in range(self.generators_count) ] SubprocessGenerator.start_in_parallel(self.generators) self.generator_counter = -1 self.initialized = True
def test_algorithms(self): src_samples = SampleLoader.load(SampleType.FACE, '../../imagelib/test/test_dst', None) grid = [] for src_sample in src_samples: src_img = src_sample.load_bgr() src_mask = src_sample.load_image_hull_mask() src_landmarks = src_sample.landmarks draw_landmarks(src_img, src_landmarks) results = np.concatenate((src_img, src_mask * src_img), axis=1) grid.append(results) cv2.namedWindow('test output', cv2.WINDOW_NORMAL) for g in grid: print(np.shape(g)) cv2.imshow('test output', np.concatenate(grid, axis=0)) cv2.waitKey(0) cv2.destroyAllWindows()
def test_plot_landmarks_algorithms(self): src_samples = SampleLoader.load(SampleType.FACE, '../../imagelib/test/test_src', None) grid = [] for src_sample in src_samples: src_img = src_sample.load_bgr() src_mask = src_sample.load_image_hull_mask() src_landmarks = src_sample.landmarks print('landmarks:', src_landmarks) for landmark in src_landmarks: landmark = np.array(landmark, dtype=np.int) cv2.circle(src_img, tuple(landmark), 3, (0, 0, 255)) results = np.concatenate((src_img, src_mask * src_img), axis=1) grid.append(results) cv2.namedWindow('test output', cv2.WINDOW_NORMAL) for g in grid: print(np.shape(g)) cv2.imshow('test output', np.concatenate(grid, axis=0)) cv2.waitKey(0) cv2.destroyAllWindows()
def __init__(self, samples_path, debug, batch_size, resolution, sample_process_options=SampleProcessor.Options(), **kwargs): super().__init__(samples_path, debug, batch_size) self.sample_process_options = sample_process_options self.resolution = resolution self.samples = SampleLoader.load( SampleType.FACE_WITH_NEAREST_AS_TARGET, self.samples_path, self.samples_path) if self.debug: self.generator_samples = [self.samples] self.generators = [ iter_utils.ThisThreadGenerator(self.batch_func, 0) ] else: if len(self.samples) > 1: self.generator_samples = [ self.samples[0::2], self.samples[1::2] ] self.generators = [ iter_utils.SubprocessGenerator(self.batch_func, 0), iter_utils.SubprocessGenerator(self.batch_func, 1) ] else: self.generator_samples = [self.samples] self.generators = [ iter_utils.SubprocessGenerator(self.batch_func, 0) ] self.generator_counter = -1
def test_compare_hull_mask_with_mesh_mask(self): src_samples = SampleLoader.load(SampleType.FACE, '../../imagelib/test/test_dst', None) sample_grid = self.get_sample_grid(src_samples) display_grid = [] for sample_row in sample_grid: display_row = [] for sample in sample_row: src_img = sample.load_bgr() src_hull_mask = sample.load_image_hull_mask() src_mesh_mask = sample.load_image_mesh_mask() results = np.concatenate((src_img, src_hull_mask * src_img, src_mesh_mask * src_img), axis=1) display_row.append(results) display_grid.append(np.concatenate(display_row, axis=1)) output_grid = np.concatenate(display_grid, axis=0) cv2.namedWindow('test output', cv2.WINDOW_NORMAL) cv2.imshow('test output', output_grid) cv2.waitKey(0) cv2.destroyAllWindows()
def test_algorithms(self): src_samples = SampleLoader.load(SampleType.FACE, './test_src', None) dst_samples = SampleLoader.load(SampleType.FACE, './test_dst', None) for src_sample in src_samples: src_img = src_sample.load_bgr() src_mask = src_sample.load_mask() # Toggle to see masks show_masks = False grid = [] labels = [] fontFace = cv2.FONT_HERSHEY_SIMPLEX fontScale = 1.5 thickness = 4 src_h, src_w, src_c = np.shape(src_img) src_h //= 3 for text in ['src', 'dst', 'LCT', 'RCT', 'RCT-c', 'RCT-p', 'RCT-pc', 'mRTC', 'mRTC-c', 'mRTC-p', 'mRTC-pc']: label = np.zeros((src_h, src_w, src_c)) size, baseline = cv2.getTextSize(text, fontFace, fontScale, thickness) w, h = size label = cv2.putText(label, text, ((src_w - w)//2, (src_h - h + baseline * 2)//2), fontFace, fontScale, (1, 1, 1), thickness=thickness) labels.append(label) labels = np.concatenate(labels, axis=1) grid.append(labels) for ct_sample in dst_samples: print(src_sample.filename, ct_sample.filename) ct_img = ct_sample.load_bgr() ct_mask = ct_sample.load_mask() lct_img = linear_color_transfer(src_img, ct_img) rct_img = reinhard_color_transfer(src_img, ct_img) rct_img_clip = reinhard_color_transfer(src_img, ct_img, clip=True) rct_img_paper = reinhard_color_transfer(src_img, ct_img, preserve_paper=True) rct_img_paper_clip = reinhard_color_transfer(src_img, ct_img, clip=True, preserve_paper=True) masked_rct_img = reinhard_color_transfer(src_img, ct_img, source_mask=src_mask, target_mask=ct_mask) masked_rct_img_clip = reinhard_color_transfer(src_img, ct_img, clip=True, source_mask=src_mask, target_mask=ct_mask) masked_rct_img_paper = reinhard_color_transfer(src_img, ct_img, preserve_paper=True, source_mask=src_mask, target_mask=ct_mask) masked_rct_img_paper_clip = reinhard_color_transfer(src_img, ct_img, clip=True, preserve_paper=True, source_mask=src_mask, target_mask=ct_mask) results = [lct_img, rct_img, rct_img_clip, rct_img_paper, rct_img_paper_clip, masked_rct_img, masked_rct_img_clip, masked_rct_img_paper, masked_rct_img_paper_clip] if show_masks: results = [src_mask * im for im in results] src_img *= src_mask ct_img *= ct_mask results = np.concatenate((src_img, ct_img, *results), axis=1) grid.append(results) cv2.namedWindow('test output', cv2.WINDOW_NORMAL) for g in grid: print(np.shape(g)) cv2.imshow('test output', np.concatenate(grid, axis=0)) cv2.waitKey(0) cv2.destroyAllWindows()
def __init__(self, root_path, debug=False, batch_size=1, resolution=256, face_type=None, generators_count=4, data_format="NHWC", **kwargs): super().__init__(debug, batch_size) self.initialized = False dataset_path = root_path / 'AvatarOperatorDataset' if not dataset_path.exists(): raise ValueError(f'Unable to find {dataset_path}') chains_dir_names = pathex.get_all_dir_names(dataset_path) samples = SampleLoader.load(SampleType.FACE, dataset_path, subdirs=True) sample_idx_by_path = { sample.filename: i for i, sample in enumerate(samples) } kf_idxs = [] for chain_dir_name in chains_dir_names: chain_root_path = dataset_path / chain_dir_name subchain_dir_names = pathex.get_all_dir_names(chain_root_path) try: subchain_dir_names.sort(key=int) except: raise Exception( f'{chain_root_path} must contain only numerical name of directories' ) chain_samples = [] for subchain_dir_name in subchain_dir_names: subchain_root = chain_root_path / subchain_dir_name subchain_samples = [ sample_idx_by_path[image_path] for image_path in pathex.get_image_paths(subchain_root) \ if image_path in sample_idx_by_path ] if len(subchain_samples) < 3: raise Exception( f'subchain {subchain_dir_name} must contain at least 3 faces. If you delete this subchain, then th echain will be corrupted.' ) chain_samples += [subchain_samples] chain_samples_len = len(chain_samples) for i in range(chain_samples_len - 1): kf_idxs += [(chain_samples[i + 1][0], chain_samples[i][-1], chain_samples[i][:-1])] for i in range(1, chain_samples_len): kf_idxs += [(chain_samples[i - 1][-1], chain_samples[i][0], chain_samples[i][1:])] if self.debug: self.generators_count = 1 else: self.generators_count = max(1, generators_count) if self.debug: self.generators = [ ThisThreadGenerator( self.batch_func, (samples, kf_idxs, resolution, face_type, data_format)) ] else: self.generators = [SubprocessGenerator ( self.batch_func, (samples, kf_idxs, resolution, face_type, data_format), start_now=False ) \ for i in range(self.generators_count) ] SubprocessGenerator.start_in_parallel(self.generators) self.generator_counter = -1 self.initialized = True
def __init__ (self, samples_path, debug=False, batch_size=1, random_ct_samples_path=None, sample_process_options=SampleProcessor.Options(), output_sample_types=[], uniform_yaw_distribution=False, generators_count=4, raise_on_no_data=True, **kwargs): super().__init__(debug, batch_size) self.initialized = False self.sample_process_options = sample_process_options self.output_sample_types = output_sample_types if self.debug: self.generators_count = 1 else: self.generators_count = max(1, generators_count) samples = SampleLoader.load (SampleType.FACE, samples_path) self.samples_len = len(samples) if self.samples_len == 0: if raise_on_no_data: raise ValueError('No training data provided.') else: return if uniform_yaw_distribution: samples_pyr = [ ( idx, sample.get_pitch_yaw_roll() ) for idx, sample in enumerate(samples) ] grads = 128 #instead of math.pi / 2, using -1.2,+1.2 because actually maximum yaw for 2DFAN landmarks are -1.2+1.2 grads_space = np.linspace (-1.2, 1.2,grads) yaws_sample_list = [None]*grads for g in io.progress_bar_generator ( range(grads), "Sort by yaw"): yaw = grads_space[g] next_yaw = grads_space[g+1] if g < grads-1 else yaw yaw_samples = [] for idx, pyr in samples_pyr: s_yaw = -pyr[1] if (g == 0 and s_yaw < next_yaw) or \ (g < grads-1 and s_yaw >= yaw and s_yaw < next_yaw) or \ (g == grads-1 and s_yaw >= yaw): yaw_samples += [ idx ] if len(yaw_samples) > 0: yaws_sample_list[g] = yaw_samples yaws_sample_list = [ y for y in yaws_sample_list if y is not None ] index_host = mplib.Index2DHost( yaws_sample_list ) else: index_host = mplib.IndexHost(self.samples_len) if random_ct_samples_path is not None: ct_samples = SampleLoader.load (SampleType.FACE, random_ct_samples_path) ct_index_host = mplib.IndexHost( len(ct_samples) ) else: ct_samples = None ct_index_host = None if self.debug: self.generators = [ThisThreadGenerator ( self.batch_func, (samples, index_host.create_cli(), ct_samples, ct_index_host.create_cli() if ct_index_host is not None else None) )] else: self.generators = [SubprocessGenerator ( self.batch_func, (samples, index_host.create_cli(), ct_samples, ct_index_host.create_cli() if ct_index_host is not None else None), start_now=False ) \ for i in range(self.generators_count) ] SubprocessGenerator.start_in_parallel( self.generators ) self.generator_counter = -1 self.initialized = True