def __init__ (self, samples_path, debug, batch_size, temporal_image_count=3, sample_process_options=SampleProcessor.Options(), output_sample_types=[], generators_count=2, **kwargs): super().__init__(debug, batch_size) self.temporal_image_count = temporal_image_count self.sample_process_options = sample_process_options self.output_sample_types = output_sample_types if self.debug: self.generators_count = 1 else: self.generators_count = generators_count samples = SampleLoader.load (SampleType.FACE_TEMPORAL_SORTED, samples_path) samples_len = len(samples) if samples_len == 0: raise ValueError('No training data provided.') mult_max = 1 l = samples_len - ( (self.temporal_image_count)*mult_max - (mult_max-1) ) index_host = mplib.IndexHost(l+1) pickled_samples = pickle.dumps(samples, 4) if self.debug: self.generators = [ThisThreadGenerator ( self.batch_func, (pickled_samples, index_host.create_cli(),) )] else: self.generators = [SubprocessGenerator ( self.batch_func, (pickled_samples, index_host.create_cli(),) ) for i in range(self.generators_count) ] self.generator_counter = -1
def __init__(self, samples_path, debug=False, batch_size=1, random_ct_samples_path=None, sample_process_options=SampleProcessor.Options(), output_sample_types=[], add_sample_idx=False, generators_count=4, **kwargs): super().__init__(samples_path, debug, batch_size) self.sample_process_options = sample_process_options self.output_sample_types = output_sample_types self.add_sample_idx = add_sample_idx if self.debug: self.generators_count = 1 else: self.generators_count = max(1, generators_count) samples = SampleHost.load(SampleType.FACE, self.samples_path) self.samples_len = len(samples) if self.samples_len == 0: raise ValueError('No training data provided.') index_host = mplib.IndexHost(self.samples_len) if random_ct_samples_path is not None: ct_samples = SampleHost.load(SampleType.FACE, random_ct_samples_path) ct_index_host = mplib.IndexHost(len(ct_samples)) else: ct_samples = None ct_index_host = None pickled_samples = pickle.dumps(samples, 4) ct_pickled_samples = pickle.dumps( ct_samples, 4) if ct_samples is not None else None if self.debug: self.generators = [ ThisThreadGenerator( self.batch_func, (pickled_samples, index_host.create_cli(), ct_pickled_samples, ct_index_host.create_cli() if ct_index_host is not None else None)) ] else: self.generators = [ SubprocessGenerator( self.batch_func, (pickled_samples, index_host.create_cli(), ct_pickled_samples, ct_index_host.create_cli() if ct_index_host is not None else None), start_now=True) for i in range(self.generators_count) ] self.generator_counter = -1
def __init__ (self, paths, debug=False, batch_size=1, resolution=256, face_type=None, generators_count=4, data_format="NHWC", **kwargs): super().__init__(debug, batch_size) self.initialized = False samples = sum([ SampleLoader.load (SampleType.FACE, path) for path in paths if path is not None] ) seg_sample_idxs = SegmentedSampleFilterSubprocessor(samples).run() if len(seg_sample_idxs) == 0: seg_sample_idxs = SegmentedSampleFilterSubprocessor(samples, count_xseg_mask=True).run() if len(seg_sample_idxs) == 0: raise Exception(f"No segmented faces found.") else: io.log_info(f"Using {len(seg_sample_idxs)} xseg labeled samples.") else: io.log_info(f"Using {len(seg_sample_idxs)} segmented samples.") if self.debug: self.generators_count = 1 else: self.generators_count = max(1, generators_count) if self.debug: self.generators = [ThisThreadGenerator ( self.batch_func, (samples, seg_sample_idxs, resolution, face_type, data_format) )] else: self.generators = [SubprocessGenerator ( self.batch_func, (samples, seg_sample_idxs, resolution, face_type, data_format), start_now=False ) \ for i in range(self.generators_count) ] SubprocessGenerator.start_in_parallel( self.generators ) self.generator_counter = -1 self.initialized = True
def __init__(self, samples_path, debug, batch_size, sample_process_options=SampleProcessor.Options(), output_sample_types=[], raise_on_no_data=True, **kwargs): super().__init__(debug, batch_size) self.initialized = False self.sample_process_options = sample_process_options self.output_sample_types = output_sample_types samples = SampleLoader.load(SampleType.IMAGE, samples_path) if len(samples) == 0: if raise_on_no_data: raise ValueError('No training data provided.') return self.generators = [ThisThreadGenerator ( self.batch_func, samples )] if self.debug else \ [SubprocessGenerator ( self.batch_func, samples )] self.generator_counter = -1 self.initialized = True
def __init__(self, samples_path, debug=False, batch_size=1, random_ct_samples_path=None, sample_process_options=SampleProcessor.Options(), output_sample_types=[], add_sample_idx=False, generators_count=4, raise_on_no_data=True, **kwargs): super().__init__(debug, batch_size) self.sample_process_options = sample_process_options self.output_sample_types = output_sample_types self.add_sample_idx = add_sample_idx if self.debug: self.generators_count = 1 else: self.generators_count = max(1, generators_count) samples = SampleLoader.load(SampleType.FACE, samples_path) self.samples_len = len(samples) self.initialized = False if self.samples_len == 0: if raise_on_no_data: raise ValueError('No training data provided.') else: return index_host = mplib.IndexHost(self.samples_len) if random_ct_samples_path is not None: ct_samples = SampleLoader.load(SampleType.FACE, random_ct_samples_path) ct_index_host = mplib.IndexHost(len(ct_samples)) else: ct_samples = None ct_index_host = None if self.debug: self.generators = [ ThisThreadGenerator(self.batch_func, (samples, index_host.create_cli(), ct_samples, ct_index_host.create_cli() if ct_index_host is not None else None)) ] else: self.generators = [SubprocessGenerator ( self.batch_func, (samples, index_host.create_cli(), ct_samples, ct_index_host.create_cli() if ct_index_host is not None else None), start_now=False ) \ for i in range(self.generators_count) ] SubprocessGenerator.start_in_parallel(self.generators) self.generator_counter = -1 self.initialized = True
def __init__(self, samples_path, debug=False, batch_size=1, random_ct_samples_path=None, sample_process_options=SampleProcessor.Options(), output_sample_types=[], add_sample_idx=False, generators_count=4, rnd_seed=None, **kwargs): super().__init__(debug, batch_size) self.sample_process_options = sample_process_options self.output_sample_types = output_sample_types self.add_sample_idx = add_sample_idx if rnd_seed is None: rnd_seed = np.random.randint(0x80000000) if self.debug: self.generators_count = 1 else: self.generators_count = max(1, generators_count) samples = SampleLoader.load(SampleType.FACE, samples_path) self.samples_len = len(samples) if self.samples_len == 0: raise ValueError('No training data provided.') if random_ct_samples_path is not None: ct_samples = SampleLoader.load(SampleType.FACE, random_ct_samples_path) else: ct_samples = None pickled_samples = pickle.dumps(samples, 4) ct_pickled_samples = pickle.dumps( ct_samples, 4) if ct_samples is not None else None if self.debug: self.generators = [ ThisThreadGenerator( self.batch_func, (pickled_samples, ct_pickled_samples, rnd_seed)) ] else: self.generators = [SubprocessGenerator ( self.batch_func, (pickled_samples, ct_pickled_samples, rnd_seed+i), start_now=False ) \ for i in range(self.generators_count) ] SubprocessGenerator.start_in_parallel(self.generators) self.generator_counter = -1
def __init__(self, root_path, debug=False, batch_size=1, resolution=256, face_type=None, generators_count=4, data_format="NHWC", **kwargs): super().__init__(debug, batch_size) self.initialized = False aligned_path = root_path / 'aligned' if not aligned_path.exists(): raise ValueError(f'Unable to find {aligned_path}') obstructions_path = root_path / 'obstructions' obstructions_images_paths = pathex.get_image_paths( obstructions_path, image_extensions=['.png'], subdirs=True) samples = SampleLoader.load(SampleType.FACE, aligned_path, subdirs=True) self.samples_len = len(samples) pickled_samples = pickle.dumps(samples, 4) if self.debug: self.generators_count = 1 else: self.generators_count = max(1, generators_count) if self.debug: self.generators = [ ThisThreadGenerator( self.batch_func, (pickled_samples, obstructions_images_paths, resolution, face_type, data_format)) ] else: self.generators = [SubprocessGenerator ( self.batch_func, (pickled_samples, obstructions_images_paths, resolution, face_type, data_format), start_now=False ) \ for i in range(self.generators_count) ] SubprocessGenerator.start_in_parallel(self.generators) self.generator_counter = -1 self.initialized = True
def __init__(self, paths, debug=False, batch_size=1, resolution=256, face_type=None, generators_count=4, data_format="NHWC", **kwargs): super().__init__(debug, batch_size) self.initialized = False samples = [] for path in paths: samples += SampleLoader.load(SampleType.FACE, path) seg_samples = [ sample for sample in samples if sample.seg_ie_polys.get_pts_count() != 0 ] seg_samples_len = len(seg_samples) if seg_samples_len == 0: raise Exception(f"No segmented faces found.") else: io.log_info(f"Using {seg_samples_len} segmented samples.") pickled_samples = pickle.dumps(seg_samples, 4) if self.debug: self.generators_count = 1 else: self.generators_count = max(1, generators_count) if self.debug: self.generators = [ ThisThreadGenerator( self.batch_func, (pickled_samples, resolution, face_type, data_format)) ] else: self.generators = [SubprocessGenerator ( self.batch_func, (pickled_samples, resolution, face_type, data_format), start_now=False ) \ for i in range(self.generators_count) ] SubprocessGenerator.start_in_parallel(self.generators) self.generator_counter = -1 self.initialized = True
def __init__ (self, samples_path, debug=False, batch_size=1, random_ct_samples_path=None, sample_process_options=SampleProcessor.Options(), output_sample_types=[], uniform_yaw_distribution=False, generators_count=4, raise_on_no_data=True, **kwargs): super().__init__(debug, batch_size) self.initialized = False self.sample_process_options = sample_process_options self.output_sample_types = output_sample_types if self.debug: self.generators_count = 1 else: self.generators_count = max(1, generators_count) samples = SampleLoader.load (SampleType.FACE, samples_path) self.samples_len = len(samples) if self.samples_len == 0: if raise_on_no_data: raise ValueError('No training data provided.') else: return if uniform_yaw_distribution: samples_pyr = [ ( idx, sample.get_pitch_yaw_roll() ) for idx, sample in enumerate(samples) ] grads = 128 #instead of math.pi / 2, using -1.2,+1.2 because actually maximum yaw for 2DFAN landmarks are -1.2+1.2 grads_space = np.linspace (-1.2, 1.2,grads) yaws_sample_list = [None]*grads for g in io.progress_bar_generator ( range(grads), "Sort by yaw"): yaw = grads_space[g] next_yaw = grads_space[g+1] if g < grads-1 else yaw yaw_samples = [] for idx, pyr in samples_pyr: s_yaw = -pyr[1] if (g == 0 and s_yaw < next_yaw) or \ (g < grads-1 and s_yaw >= yaw and s_yaw < next_yaw) or \ (g == grads-1 and s_yaw >= yaw): yaw_samples += [ idx ] if len(yaw_samples) > 0: yaws_sample_list[g] = yaw_samples yaws_sample_list = [ y for y in yaws_sample_list if y is not None ] index_host = mplib.Index2DHost( yaws_sample_list ) else: index_host = mplib.IndexHost(self.samples_len) if random_ct_samples_path is not None: ct_samples = SampleLoader.load (SampleType.FACE, random_ct_samples_path) ct_index_host = mplib.IndexHost( len(ct_samples) ) else: ct_samples = None ct_index_host = None if self.debug: self.generators = [ThisThreadGenerator ( self.batch_func, (samples, index_host.create_cli(), ct_samples, ct_index_host.create_cli() if ct_index_host is not None else None) )] else: self.generators = [SubprocessGenerator ( self.batch_func, (samples, index_host.create_cli(), ct_samples, ct_index_host.create_cli() if ct_index_host is not None else None), start_now=False ) \ for i in range(self.generators_count) ] SubprocessGenerator.start_in_parallel( self.generators ) self.generator_counter = -1 self.initialized = True
def __init__(self, root_path, debug=False, batch_size=1, resolution=256, generators_count=4, data_format="NHWC", **kwargs): super().__init__(debug, batch_size) self.initialized = False dataset_path = root_path / 'CelebAMask-HQ' if not dataset_path.exists(): raise ValueError(f'Unable to find {dataset_path}') images_path = dataset_path / 'CelebA-HQ-img' if not images_path.exists(): raise ValueError(f'Unable to find {images_path}') masks_path = dataset_path / 'CelebAMask-HQ-mask-anno' if not masks_path.exists(): raise ValueError(f'Unable to find {masks_path}') if self.debug: self.generators_count = 1 else: self.generators_count = max(1, generators_count) source_images_paths = pathex.get_image_paths(images_path, return_Path_class=True) source_images_paths_len = len(source_images_paths) mask_images_paths = pathex.get_image_paths(masks_path, subdirs=True, return_Path_class=True) if source_images_paths_len == 0 or len(mask_images_paths) == 0: raise ValueError('No training data provided.') mask_file_id_hash = {} for filepath in io.progress_bar_generator(mask_images_paths, "Loading"): stem = filepath.stem file_id, mask_type = stem.split('_', 1) file_id = int(file_id) if file_id not in mask_file_id_hash: mask_file_id_hash[file_id] = {} mask_file_id_hash[file_id][MaskType_from_name[mask_type]] = str( filepath.relative_to(masks_path)) source_file_id_set = set() for filepath in source_images_paths: stem = filepath.stem file_id = int(stem) source_file_id_set.update({file_id}) for k in mask_file_id_hash.keys(): if k not in source_file_id_set: io.log_err(f"Corrupted dataset: {k} not in {images_path}") if self.debug: self.generators = [ ThisThreadGenerator( self.batch_func, (images_path, masks_path, mask_file_id_hash, data_format)) ] else: self.generators = [SubprocessGenerator ( self.batch_func, (images_path, masks_path, mask_file_id_hash, data_format), start_now=False ) \ for i in range(self.generators_count) ] SubprocessGenerator.start_in_parallel(self.generators) self.generator_counter = -1 self.initialized = True
def __init__(self, root_path, debug=False, batch_size=1, resolution=256, face_type=None, generators_count=4, data_format="NHWC", **kwargs): super().__init__(debug, batch_size) self.initialized = False dataset_path = root_path / 'AvatarOperatorDataset' if not dataset_path.exists(): raise ValueError(f'Unable to find {dataset_path}') chains_dir_names = pathex.get_all_dir_names(dataset_path) samples = SampleLoader.load(SampleType.FACE, dataset_path, subdirs=True) sample_idx_by_path = { sample.filename: i for i, sample in enumerate(samples) } kf_idxs = [] for chain_dir_name in chains_dir_names: chain_root_path = dataset_path / chain_dir_name subchain_dir_names = pathex.get_all_dir_names(chain_root_path) try: subchain_dir_names.sort(key=int) except: raise Exception( f'{chain_root_path} must contain only numerical name of directories' ) chain_samples = [] for subchain_dir_name in subchain_dir_names: subchain_root = chain_root_path / subchain_dir_name subchain_samples = [ sample_idx_by_path[image_path] for image_path in pathex.get_image_paths(subchain_root) \ if image_path in sample_idx_by_path ] if len(subchain_samples) < 3: raise Exception( f'subchain {subchain_dir_name} must contain at least 3 faces. If you delete this subchain, then th echain will be corrupted.' ) chain_samples += [subchain_samples] chain_samples_len = len(chain_samples) for i in range(chain_samples_len - 1): kf_idxs += [(chain_samples[i + 1][0], chain_samples[i][-1], chain_samples[i][:-1])] for i in range(1, chain_samples_len): kf_idxs += [(chain_samples[i - 1][-1], chain_samples[i][0], chain_samples[i][1:])] if self.debug: self.generators_count = 1 else: self.generators_count = max(1, generators_count) if self.debug: self.generators = [ ThisThreadGenerator( self.batch_func, (samples, kf_idxs, resolution, face_type, data_format)) ] else: self.generators = [SubprocessGenerator ( self.batch_func, (samples, kf_idxs, resolution, face_type, data_format), start_now=False ) \ for i in range(self.generators_count) ] SubprocessGenerator.start_in_parallel(self.generators) self.generator_counter = -1 self.initialized = True