def __init__(self, samples_path, debug=False, batch_size=1, random_ct_samples_path=None, sample_process_options=SampleProcessor.Options(), output_sample_types=[], add_sample_idx=False, generators_count=4, **kwargs): super().__init__(samples_path, debug, batch_size) self.sample_process_options = sample_process_options self.output_sample_types = output_sample_types self.add_sample_idx = add_sample_idx if self.debug: self.generators_count = 1 else: self.generators_count = max(1, generators_count) samples = SampleHost.load(SampleType.FACE, self.samples_path) self.samples_len = len(samples) if self.samples_len == 0: raise ValueError('No training data provided.') index_host = mplib.IndexHost(self.samples_len) if random_ct_samples_path is not None: ct_samples = SampleHost.load(SampleType.FACE, random_ct_samples_path) ct_index_host = mplib.IndexHost(len(ct_samples)) else: ct_samples = None ct_index_host = None pickled_samples = pickle.dumps(samples, 4) ct_pickled_samples = pickle.dumps( ct_samples, 4) if ct_samples is not None else None if self.debug: self.generators = [ ThisThreadGenerator( self.batch_func, (pickled_samples, index_host.create_cli(), ct_pickled_samples, ct_index_host.create_cli() if ct_index_host is not None else None)) ] else: self.generators = [ SubprocessGenerator( self.batch_func, (pickled_samples, index_host.create_cli(), ct_pickled_samples, ct_index_host.create_cli() if ct_index_host is not None else None), start_now=True) for i in range(self.generators_count) ] self.generator_counter = -1
def __init__(self, samples_path, debug=False, batch_size=1, random_ct_samples_path=None, sample_process_options=SampleProcessor.Options(), output_sample_types=[], add_sample_idx=False, **kwargs): super().__init__(samples_path, debug, batch_size) self.sample_process_options = sample_process_options self.output_sample_types = output_sample_types self.add_sample_idx = add_sample_idx samples_host = SampleHost.mp_host(SampleType.FACE, self.samples_path) self.samples_len = len(samples_host) if self.samples_len == 0: raise ValueError('No training data provided.') index_host = mp_utils.IndexHost(self.samples_len) if random_ct_samples_path is not None: ct_samples_host = SampleHost.mp_host(SampleType.FACE, random_ct_samples_path) ct_index_host = mp_utils.IndexHost(len(ct_samples_host)) else: ct_samples_host = None ct_index_host = None if self.debug: self.generators_count = 1 self.generators = [ iter_utils.ThisThreadGenerator( self.batch_func, (samples_host.create_cli(), index_host.create_cli(), ct_samples_host.create_cli() if ct_index_host is not None else None, ct_index_host.create_cli() if ct_index_host is not None else None)) ] else: self.generators_count = np.clip(multiprocessing.cpu_count(), 2, 4) self.generators = [ iter_utils.SubprocessGenerator( self.batch_func, (samples_host.create_cli(), index_host.create_cli(), ct_samples_host.create_cli() if ct_index_host is not None else None, ct_index_host.create_cli() if ct_index_host is not None else None), start_now=True) for i in range(self.generators_count) ] self.generator_counter = -1
def __init__ (self, samples_path, debug=False, batch_size=1, sample_process_options=SampleProcessor.Options(), output_sample_types=[], person_id_mode=1, **kwargs): super().__init__(samples_path, debug, batch_size) self.sample_process_options = sample_process_options self.output_sample_types = output_sample_types self.person_id_mode = person_id_mode samples_host = SampleHost.mp_host (SampleType.FACE, self.samples_path) samples = samples_host.get_list() self.samples_len = len(samples) if self.samples_len == 0: raise ValueError('No training data provided.') unique_person_names = { sample.person_name for sample in samples } persons_name_idxs = { person_name : [] for person_name in unique_person_names } for i,sample in enumerate(samples): persons_name_idxs[sample.person_name].append (i) indexes2D = [ persons_name_idxs[person_name] for person_name in unique_person_names ] index2d_host = mp_utils.Index2DHost(indexes2D) if self.debug: self.generators_count = 1 self.generators = [iter_utils.ThisThreadGenerator ( self.batch_func, (samples_host.create_cli(), index2d_host.create_cli(),) )] else: self.generators_count = np.clip(multiprocessing.cpu_count(), 2, 4) self.generators = [iter_utils.SubprocessGenerator ( self.batch_func, (samples_host.create_cli(), index2d_host.create_cli(),), start_now=True ) for i in range(self.generators_count) ] self.generator_counter = -1
def __init__(self, samples_path, debug, batch_size, temporal_image_count, sample_process_options=SampleProcessor.Options(), output_sample_types=[], generators_count=2, **kwargs): super().__init__(samples_path, debug, batch_size) self.temporal_image_count = temporal_image_count self.sample_process_options = sample_process_options self.output_sample_types = output_sample_types self.samples = SampleHost.load(SampleType.FACE_TEMPORAL_SORTED, self.samples_path) if self.debug: self.generators_count = 1 self.generators = [ iter_utils.ThisThreadGenerator(self.batch_func, 0) ] else: self.generators_count = min(generators_count, len(self.samples)) self.generators = [ iter_utils.SubprocessGenerator(self.batch_func, i) for i in range(self.generators_count) ] self.generator_counter = -1
def __init__(self, samples_path, debug, batch_size, temporal_image_count=3, sample_process_options=SampleProcessor.Options(), output_sample_types=[], generators_count=2, **kwargs): super().__init__(samples_path, debug, batch_size) self.temporal_image_count = temporal_image_count self.sample_process_options = sample_process_options self.output_sample_types = output_sample_types if self.debug: self.generators_count = 1 else: self.generators_count = generators_count samples = SampleHost.load(SampleType.FACE_TEMPORAL_SORTED, self.samples_path) samples_len = len(samples) if samples_len == 0: raise ValueError('No training data provided.') mult_max = 1 l = samples_len - ((self.temporal_image_count) * mult_max - (mult_max - 1)) index_host = mplib.IndexHost(l + 1) pickled_samples = pickle.dumps(samples, 4) if self.debug: self.generators = [ ThisThreadGenerator(self.batch_func, ( pickled_samples, index_host.create_cli(), )) ] else: self.generators = [ SubprocessGenerator(self.batch_func, ( pickled_samples, index_host.create_cli(), ), start_now=True) for i in range(self.generators_count) ] self.generator_counter = -1
def __init__(self, samples_path, debug, batch_size, temporal_image_count, sample_process_options=SampleProcessor.Options(), output_sample_types=[], **kwargs): super().__init__(samples_path, debug, batch_size) self.temporal_image_count = temporal_image_count self.sample_process_options = sample_process_options self.output_sample_types = output_sample_types self.samples = SampleHost.load(SampleType.IMAGE, self.samples_path) self.generator_samples = [self.samples] self.generators = [iter_utils.ThisThreadGenerator ( self.batch_func, 0 )] if self.debug else \ [iter_utils.SubprocessGenerator ( self.batch_func, 0 )] self.generator_counter = -1
def __init__(self, samples_path, debug, batch_size, temporal_image_count, sample_process_options=SampleProcessor.Options(), output_sample_types=[], generators_count=2, **kwargs): super().__init__(samples_path, debug, batch_size) self.temporal_image_count = temporal_image_count self.sample_process_options = sample_process_options self.output_sample_types = output_sample_types if self.debug: self.generators_count = 1 else: self.generators_count = generators_count samples = SampleHost.load(SampleType.FACE_TEMPORAL_SORTED, self.samples_path) samples_len = len(samples) if samples_len == 0: raise ValueError('No training data provided.') pickled_samples = pickle.dumps(samples, 4) if self.debug: self.generators = [ iter_utils.ThisThreadGenerator(self.batch_func, (0, pickled_samples)) ] else: self.generators = [ iter_utils.SubprocessGenerator(self.batch_func, (i, pickled_samples)) for i in range(self.generators_count) ] self.generator_counter = -1
def get_person_id_max_count(samples_path): return SampleHost.get_person_id_max_count(samples_path)
def __init__ (self, samples_path, debug=False, batch_size=1, sample_process_options=SampleProcessor.Options(), output_sample_types=[], person_id_mode=1, use_caching=False, generators_count=2, generators_random_seed=None, **kwargs): super().__init__(samples_path, debug, batch_size) self.sample_process_options = sample_process_options self.output_sample_types = output_sample_types self.person_id_mode = person_id_mode if generators_random_seed is not None and len(generators_random_seed) != generators_count: raise ValueError("len(generators_random_seed) != generators_count") self.generators_random_seed = generators_random_seed samples = SampleHost.load (SampleType.FACE, self.samples_path, person_id_mode=True, use_caching=use_caching) samples = copy.copy(samples) for i in range(len(samples)): samples[i] = copy.copy(samples[i]) if person_id_mode==1: #np.random.shuffle(samples) # #new_samples = [] #while len(samples) > 0: # for i in range( len(samples)-1, -1, -1): # sample = samples[i] # # if len(sample) > 0: # new_samples.append(sample.pop(0)) # # if len(sample) == 0: # samples.pop(i) # i -= 1 #samples = new_samples new_samples = [] for s in samples: new_samples += s samples = new_samples np.random.shuffle(samples) self.samples_len = len(samples) if self.samples_len == 0: raise ValueError('No training data provided.') if self.debug: self.generators_count = 1 self.generators = [iter_utils.ThisThreadGenerator ( self.batch_func, (0, samples) )] else: self.generators_count = min ( generators_count, self.samples_len ) if person_id_mode==1: self.generators = [iter_utils.SubprocessGenerator ( self.batch_func, (i, samples[i::self.generators_count]) ) for i in range(self.generators_count) ] else: self.generators = [iter_utils.SubprocessGenerator ( self.batch_func, (i, samples) ) for i in range(self.generators_count) ] self.generator_counter = -1