def batch_func(self, param ):        
        samples, index2d_host, = param
        bs = self.batch_size

        while True:
            person_idxs = index2d_host.get_1D(bs)            
            samples_idxs = index2d_host.get_2D(person_idxs, 1)
            
            batches = None
            for n_batch in range(bs):
                person_id = person_idxs[n_batch]
                sample_idx = samples_idxs[n_batch][0]

                sample = samples[ sample_idx ]
                try:
                    x, = SampleProcessor.process ([sample], self.sample_process_options, self.output_sample_types, self.debug)
                except:
                    raise Exception ("Exception occured in sample %s. Error: %s" % (sample.filename, traceback.format_exc() ) )
  
                if batches is None:
                    batches = [ [] for _ in range(len(x)) ]
                    
                    batches += [ [] ]
                    i_person_id = len(batches)-1

                for i in range(len(x)):
                    batches[i].append ( x[i] )

                batches[i_person_id].append ( np.array([person_id]) )
            
            yield [ np.array(batch) for batch in batches]
Beispiel #2
0
    def __init__(self,
                 samples_path,
                 debug,
                 batch_size,
                 sample_process_options=SampleProcessor.Options(),
                 output_sample_types=[],
                 raise_on_no_data=True,
                 **kwargs):
        super().__init__(debug, batch_size)
        self.initialized = False
        self.sample_process_options = sample_process_options
        self.output_sample_types = output_sample_types

        samples = SampleLoader.load(SampleType.IMAGE, samples_path)

        if len(samples) == 0:
            if raise_on_no_data:
                raise ValueError('No training data provided.')
            return

        self.generators = [ThisThreadGenerator ( self.batch_func, samples )] if self.debug else \
                          [SubprocessGenerator ( self.batch_func, samples )]

        self.generator_counter = -1
        self.initialized = True
Beispiel #3
0
    def batch_func(self, samples):
        samples_len = len(samples)
        

        idxs = [ *range(samples_len) ]
        shuffle_idxs = []

        while True:

            batches = None
            for n_batch in range(self.batch_size):

                if len(shuffle_idxs) == 0:
                    shuffle_idxs = idxs.copy()
                    np.random.shuffle (shuffle_idxs)

                idx = shuffle_idxs.pop()
                sample = samples[idx]
                
                x, = SampleProcessor.process ([sample], self.sample_process_options, self.output_sample_types, self.debug)

                if batches is None:
                    batches = [ [] for _ in range(len(x)) ]

                for i in range(len(x)):
                    batches[i].append ( x[i] )

            yield [ np.array(batch) for batch in batches]
Beispiel #4
0
    def __init__(self,
                 samples_path,
                 debug=False,
                 batch_size=1,
                 random_ct_samples_path=None,
                 sample_process_options=SampleProcessor.Options(),
                 output_sample_types=[],
                 add_sample_idx=False,
                 generators_count=4,
                 **kwargs):

        super().__init__(samples_path, debug, batch_size)
        self.sample_process_options = sample_process_options
        self.output_sample_types = output_sample_types
        self.add_sample_idx = add_sample_idx

        if self.debug:
            self.generators_count = 1
        else:
            self.generators_count = max(1, generators_count)

        samples = SampleHost.load(SampleType.FACE, self.samples_path)
        self.samples_len = len(samples)

        if self.samples_len == 0:
            raise ValueError('No training data provided.')

        index_host = mplib.IndexHost(self.samples_len)

        if random_ct_samples_path is not None:
            ct_samples = SampleHost.load(SampleType.FACE,
                                         random_ct_samples_path)
            ct_index_host = mplib.IndexHost(len(ct_samples))
        else:
            ct_samples = None
            ct_index_host = None

        pickled_samples = pickle.dumps(samples, 4)
        ct_pickled_samples = pickle.dumps(
            ct_samples, 4) if ct_samples is not None else None

        if self.debug:
            self.generators = [
                ThisThreadGenerator(
                    self.batch_func,
                    (pickled_samples, index_host.create_cli(),
                     ct_pickled_samples, ct_index_host.create_cli()
                     if ct_index_host is not None else None))
            ]
        else:
            self.generators = [
                SubprocessGenerator(
                    self.batch_func,
                    (pickled_samples, index_host.create_cli(),
                     ct_pickled_samples, ct_index_host.create_cli()
                     if ct_index_host is not None else None),
                    start_now=True) for i in range(self.generators_count)
            ]

        self.generator_counter = -1
    def __init__(self,
                 samples_path,
                 debug=False,
                 batch_size=1,
                 sample_process_options=SampleProcessor.Options(),
                 output_sample_types=[],
                 person_id_mode=1,
                 generators_count=2,
                 generators_random_seed=None,
                 **kwargs):

        super().__init__(samples_path, debug, batch_size)
        self.sample_process_options = sample_process_options
        self.output_sample_types = output_sample_types
        self.person_id_mode = person_id_mode

        if generators_random_seed is not None and len(
                generators_random_seed) != generators_count:
            raise ValueError("len(generators_random_seed) != generators_count")
        self.generators_random_seed = generators_random_seed

        samples = SampleLoader.load(SampleType.FACE,
                                    self.samples_path,
                                    person_id_mode=True)

        if person_id_mode == 1:
            new_samples = []
            for s in samples:
                new_samples += s
            samples = new_samples
            np.random.shuffle(samples)

        self.samples_len = len(samples)

        if self.samples_len == 0:
            raise ValueError('No training data provided.')

        if self.debug:
            self.generators_count = 1
            self.generators = [
                iter_utils.ThisThreadGenerator(self.batch_func, (0, samples))
            ]
        else:
            self.generators_count = min(generators_count, self.samples_len)

            if person_id_mode == 1:
                self.generators = [
                    iter_utils.SubprocessGenerator(
                        self.batch_func,
                        (i, samples[i::self.generators_count]))
                    for i in range(self.generators_count)
                ]
            else:
                self.generators = [
                    iter_utils.SubprocessGenerator(self.batch_func,
                                                   (i, samples))
                    for i in range(self.generators_count)
                ]

        self.generator_counter = -1
    def __init__ (self, samples_path, debug, batch_size,
                        temporal_image_count=3,
                        sample_process_options=SampleProcessor.Options(),
                        output_sample_types=[],
                        generators_count=2,
                        **kwargs):
        super().__init__(debug, batch_size)

        self.temporal_image_count = temporal_image_count
        self.sample_process_options = sample_process_options
        self.output_sample_types = output_sample_types

        if self.debug:
            self.generators_count = 1
        else:
            self.generators_count = generators_count

        samples = SampleLoader.load (SampleType.FACE_TEMPORAL_SORTED, samples_path)
        samples_len = len(samples)
        if samples_len == 0:
            raise ValueError('No training data provided.')

        mult_max = 1
        l = samples_len - ( (self.temporal_image_count)*mult_max - (mult_max-1)  )
        index_host = mplib.IndexHost(l+1)

        pickled_samples = pickle.dumps(samples, 4)
        if self.debug:
            self.generators = [ThisThreadGenerator ( self.batch_func, (pickled_samples, index_host.create_cli(),) )]
        else:
            self.generators = [SubprocessGenerator ( self.batch_func, (pickled_samples, index_host.create_cli(),) ) for i in range(self.generators_count) ]

        self.generator_counter = -1
    def batch_func(self, param):
        mult_max = 1
        bs = self.batch_size
        pickled_samples, index_host = param
        samples = pickle.loads(pickled_samples)

        while True:
            batches = None

            indexes = index_host.multi_get(bs)

            for n_batch in range(self.batch_size):
                idx = indexes[n_batch]

                temporal_samples = []
                mult = np.random.randint(mult_max)+1
                for i in range( self.temporal_image_count ):
                    sample = samples[ idx+i*mult ]
                    try:
                        temporal_samples += SampleProcessor.process ([sample], self.sample_process_options, self.output_sample_types, self.debug)[0]
                    except:
                        raise Exception ("Exception occured in sample %s. Error: %s" % (sample.filename, traceback.format_exc() ) )

                if batches is None:
                    batches = [ [] for _ in range(len(temporal_samples)) ]

                for i in range(len(temporal_samples)):
                    batches[i].append ( temporal_samples[i] )

            yield [ np.array(batch) for batch in batches]
    def __init__ (self, samples_path, debug=False, batch_size=1, 
                        sample_process_options=SampleProcessor.Options(), 
                        output_sample_types=[], 
                        person_id_mode=1,
                        **kwargs):
                        
        super().__init__(samples_path, debug, batch_size)
        self.sample_process_options = sample_process_options
        self.output_sample_types = output_sample_types
        self.person_id_mode = person_id_mode


        samples_host = SampleHost.mp_host (SampleType.FACE, self.samples_path)
        samples = samples_host.get_list()
        self.samples_len = len(samples)

        if self.samples_len == 0:
            raise ValueError('No training data provided.')

        unique_person_names = { sample.person_name for sample in samples } 
        persons_name_idxs = { person_name : [] for person_name in unique_person_names }        
        for i,sample in enumerate(samples):            
            persons_name_idxs[sample.person_name].append (i)  
        indexes2D = [ persons_name_idxs[person_name] for person_name in unique_person_names ]
        index2d_host = mp_utils.Index2DHost(indexes2D)
        
        if self.debug:
            self.generators_count = 1
            self.generators = [iter_utils.ThisThreadGenerator ( self.batch_func, (samples_host.create_cli(), index2d_host.create_cli(),) )]
        else:
            self.generators_count = np.clip(multiprocessing.cpu_count(), 2, 4)
            self.generators = [iter_utils.SubprocessGenerator ( self.batch_func, (samples_host.create_cli(), index2d_host.create_cli(),), start_now=True ) for i in range(self.generators_count) ]

        self.generator_counter = -1
    def __init__(self,
                 samples_path,
                 debug,
                 batch_size,
                 temporal_image_count,
                 sample_process_options=SampleProcessor.Options(),
                 output_sample_types=[],
                 generators_count=2,
                 **kwargs):
        super().__init__(samples_path, debug, batch_size)

        self.temporal_image_count = temporal_image_count
        self.sample_process_options = sample_process_options
        self.output_sample_types = output_sample_types

        self.samples = SampleLoader.load(SampleType.FACE_TEMPORAL_SORTED,
                                         self.samples_path)

        if self.debug:
            self.generators_count = 1
            self.generators = [
                iter_utils.ThisThreadGenerator(self.batch_func, 0)
            ]
        else:
            self.generators_count = min(generators_count, len(self.samples))
            self.generators = [
                iter_utils.SubprocessGenerator(self.batch_func, i)
                for i in range(self.generators_count)
            ]

        self.generator_counter = -1
    def batch_func(self, param ):
        samples, index_host, ct_samples, ct_index_host = param
 
        bs = self.batch_size
        while True:
            batches = None

            indexes = index_host.multi_get(bs)
            ct_indexes = ct_index_host.multi_get(bs) if ct_samples is not None else None

            t = time.time()
            for n_batch in range(bs):
                sample_idx = indexes[n_batch]
                sample = samples[sample_idx]

                ct_sample = None
                if ct_samples is not None:
                    ct_sample = ct_samples[ct_indexes[n_batch]]

                try:
                    x, = SampleProcessor.process ([sample], self.sample_process_options, self.output_sample_types, self.debug, ct_sample=ct_sample)
                except:
                    raise Exception ("Exception occured in sample %s. Error: %s" % (sample.filename, traceback.format_exc() ) )

                if batches is None:
                    batches = [ [] for _ in range(len(x)) ]

                for i in range(len(x)):
                    batches[i].append ( x[i] )

            yield [ np.array(batch) for batch in batches]
Beispiel #11
0
    def __init__(self,
                 samples_path,
                 debug=False,
                 batch_size=1,
                 random_ct_samples_path=None,
                 sample_process_options=SampleProcessor.Options(),
                 output_sample_types=[],
                 add_sample_idx=False,
                 generators_count=4,
                 raise_on_no_data=True,
                 **kwargs):

        super().__init__(debug, batch_size)
        self.sample_process_options = sample_process_options
        self.output_sample_types = output_sample_types
        self.add_sample_idx = add_sample_idx

        if self.debug:
            self.generators_count = 1
        else:
            self.generators_count = max(1, generators_count)

        samples = SampleLoader.load(SampleType.FACE, samples_path)
        self.samples_len = len(samples)

        self.initialized = False
        if self.samples_len == 0:
            if raise_on_no_data:
                raise ValueError('No training data provided.')
            else:
                return

        index_host = mplib.IndexHost(self.samples_len)

        if random_ct_samples_path is not None:
            ct_samples = SampleLoader.load(SampleType.FACE,
                                           random_ct_samples_path)
            ct_index_host = mplib.IndexHost(len(ct_samples))
        else:
            ct_samples = None
            ct_index_host = None

        if self.debug:
            self.generators = [
                ThisThreadGenerator(self.batch_func,
                                    (samples, index_host.create_cli(),
                                     ct_samples, ct_index_host.create_cli()
                                     if ct_index_host is not None else None))
            ]
        else:
            self.generators = [SubprocessGenerator ( self.batch_func, (samples, index_host.create_cli(), ct_samples, ct_index_host.create_cli() if ct_index_host is not None else None), start_now=False ) \
                               for i in range(self.generators_count) ]

            SubprocessGenerator.start_in_parallel(self.generators)

        self.generator_counter = -1

        self.initialized = True
Beispiel #12
0
    def __init__(self,
                 samples_path,
                 debug,
                 batch_size,
                 sort_by_yaw=False,
                 sort_by_yaw_target_samples_path=None,
                 random_ct_samples_path=None,
                 sample_process_options=SampleProcessor.Options(),
                 output_sample_types=[],
                 add_sample_idx=False,
                 generators_count=2,
                 generators_random_seed=None,
                 **kwargs):
        super().__init__(samples_path, debug, batch_size)
        self.sample_process_options = sample_process_options
        self.output_sample_types = output_sample_types
        self.add_sample_idx = add_sample_idx

        if sort_by_yaw_target_samples_path is not None:
            self.sample_type = SampleType.FACE_YAW_SORTED_AS_TARGET
        elif sort_by_yaw:
            self.sample_type = SampleType.FACE_YAW_SORTED
        else:
            self.sample_type = SampleType.FACE

        if generators_random_seed is not None and len(
                generators_random_seed) != generators_count:
            raise ValueError("len(generators_random_seed) != generators_count")

        self.generators_random_seed = generators_random_seed

        samples = SampleLoader.load(self.sample_type, self.samples_path,
                                    sort_by_yaw_target_samples_path)

        ct_samples = SampleLoader.load(
            SampleType.FACE, random_ct_samples_path
        ) if random_ct_samples_path is not None else None
        self.random_ct_sample_chance = 100

        if self.debug:
            self.generators_count = 1
            self.generators = [
                iter_utils.ThisThreadGenerator(self.batch_func,
                                               (0, samples, ct_samples))
            ]
        else:
            self.generators_count = min(generators_count, len(samples))
            self.generators = [
                iter_utils.SubprocessGenerator(
                    self.batch_func,
                    (i, samples[i::self.generators_count], ct_samples))
                for i in range(self.generators_count)
            ]

        self.generator_counter = -1
    def __init__(self,
                 samples_path,
                 debug=False,
                 batch_size=1,
                 random_ct_samples_path=None,
                 sample_process_options=SampleProcessor.Options(),
                 output_sample_types=[],
                 add_sample_idx=False,
                 generators_count=4,
                 rnd_seed=None,
                 **kwargs):

        super().__init__(debug, batch_size)
        self.sample_process_options = sample_process_options
        self.output_sample_types = output_sample_types
        self.add_sample_idx = add_sample_idx

        if rnd_seed is None:
            rnd_seed = np.random.randint(0x80000000)

        if self.debug:
            self.generators_count = 1
        else:
            self.generators_count = max(1, generators_count)

        samples = SampleLoader.load(SampleType.FACE, samples_path)
        self.samples_len = len(samples)

        if self.samples_len == 0:
            raise ValueError('No training data provided.')

        if random_ct_samples_path is not None:
            ct_samples = SampleLoader.load(SampleType.FACE,
                                           random_ct_samples_path)
        else:
            ct_samples = None

        pickled_samples = pickle.dumps(samples, 4)
        ct_pickled_samples = pickle.dumps(
            ct_samples, 4) if ct_samples is not None else None

        if self.debug:
            self.generators = [
                ThisThreadGenerator(
                    self.batch_func,
                    (pickled_samples, ct_pickled_samples, rnd_seed))
            ]
        else:
            self.generators = [SubprocessGenerator ( self.batch_func, (pickled_samples, ct_pickled_samples, rnd_seed+i), start_now=False ) \
                               for i in range(self.generators_count) ]

            SubprocessGenerator.start_in_parallel(self.generators)

        self.generator_counter = -1
    def __init__(self,
                 samples_path,
                 debug,
                 batch_size,
                 sort_by_yaw=False,
                 sort_by_yaw_target_samples_path=None,
                 sample_process_options=SampleProcessor.Options(),
                 output_sample_types=[],
                 add_sample_idx=False,
                 add_pitch=False,
                 add_yaw=False,
                 generators_count=2,
                 generators_random_seed=None,
                 **kwargs):
        super().__init__(samples_path, debug, batch_size)
        self.sample_process_options = sample_process_options
        self.output_sample_types = output_sample_types
        self.add_sample_idx = add_sample_idx
        self.add_pitch = add_pitch
        self.add_yaw = add_yaw

        if sort_by_yaw_target_samples_path is not None:
            self.sample_type = SampleType.FACE_YAW_SORTED_AS_TARGET
        elif sort_by_yaw:
            self.sample_type = SampleType.FACE_YAW_SORTED
        else:
            self.sample_type = SampleType.FACE

        self.samples = SampleLoader.load(self.sample_type, self.samples_path,
                                         sort_by_yaw_target_samples_path)

        if generators_random_seed is not None and len(
                generators_random_seed) != generators_count:
            raise ValueError("len(generators_random_seed) != generators_count")

        self.generators_random_seed = generators_random_seed

        if self.debug:
            self.generators_count = 1
            self.generators = [
                iter_utils.ThisThreadGenerator(self.batch_func, 0)
            ]
        else:
            self.generators_count = min(generators_count, len(self.samples))
            self.generators = [
                iter_utils.SubprocessGenerator(self.batch_func, i)
                for i in range(self.generators_count)
            ]

        self.generators_sq = [
            multiprocessing.Queue() for _ in range(self.generators_count)
        ]

        self.generator_counter = -1
    def __init__(self,
                 samples_path,
                 debug=False,
                 batch_size=1,
                 random_ct_samples_path=None,
                 sample_process_options=SampleProcessor.Options(),
                 output_sample_types=[],
                 add_sample_idx=False,
                 **kwargs):

        super().__init__(samples_path, debug, batch_size)
        self.sample_process_options = sample_process_options
        self.output_sample_types = output_sample_types
        self.add_sample_idx = add_sample_idx

        samples_host = SampleHost.mp_host(SampleType.FACE, self.samples_path)
        self.samples_len = len(samples_host)

        if self.samples_len == 0:
            raise ValueError('No training data provided.')

        index_host = mp_utils.IndexHost(self.samples_len)

        if random_ct_samples_path is not None:
            ct_samples_host = SampleHost.mp_host(SampleType.FACE,
                                                 random_ct_samples_path)
            ct_index_host = mp_utils.IndexHost(len(ct_samples_host))
        else:
            ct_samples_host = None
            ct_index_host = None

        if self.debug:
            self.generators_count = 1
            self.generators = [
                iter_utils.ThisThreadGenerator(
                    self.batch_func,
                    (samples_host.create_cli(), index_host.create_cli(),
                     ct_samples_host.create_cli() if ct_index_host is not None
                     else None, ct_index_host.create_cli()
                     if ct_index_host is not None else None))
            ]
        else:
            self.generators_count = np.clip(multiprocessing.cpu_count(), 2, 4)
            self.generators = [
                iter_utils.SubprocessGenerator(
                    self.batch_func,
                    (samples_host.create_cli(), index_host.create_cli(),
                     ct_samples_host.create_cli() if ct_index_host is not None
                     else None, ct_index_host.create_cli()
                     if ct_index_host is not None else None),
                    start_now=True) for i in range(self.generators_count)
            ]

        self.generator_counter = -1
    def batch_func(self, generator_id):
        samples = self.samples
        samples_len = len(samples)
        if samples_len == 0:
            raise ValueError('No training data provided.')

        mult_max = 1
        l = samples_len - ((self.temporal_image_count) * mult_max -
                           (mult_max - 1))

        samples_idxs = [*range(l + 1)][generator_id::self.generators_count]

        if len(samples_idxs) - self.temporal_image_count < 0:
            raise ValueError('Not enough samples to fit temporal line.')

        shuffle_idxs = []

        while True:

            batches = None
            for _ in range(self.batch_size):

                if not shuffle_idxs:
                    shuffle_idxs = samples_idxs.copy()
                    np.random.shuffle(shuffle_idxs)

                idx = shuffle_idxs.pop()

                temporal_samples = []
                mult = np.random.randint(mult_max) + 1
                for i in range(self.temporal_image_count):
                    sample = samples[idx + i * mult]
                    try:
                        temporal_samples += SampleProcessor.process(
                            sample, self.sample_process_options,
                            self.output_sample_types, self.debug)
                    except:
                        raise Exception(
                            "Exception occured in sample %s. Error: %s" %
                            (sample.filename, traceback.format_exc()))

                if batches is None:
                    batches = [[] for _ in range(len(temporal_samples))]

                for i in range(len(temporal_samples)):
                    batches[i].append(temporal_samples[i])

            yield [np.array(batch) for batch in batches]
Beispiel #17
0
    def batch_func(self, generator_id):
        samples = self.generator_samples[generator_id]
        samples_len = len(samples)
        if samples_len == 0:
            raise ValueError('No training data provided.')

        if samples_len - self.temporal_image_count < 0:
            raise ValueError('Not enough samples to fit temporal line.')

        shuffle_idxs = []
        samples_sub_len = samples_len - self.temporal_image_count + 1

        while True:

            batches = None
            for n_batch in range(self.batch_size):

                if len(shuffle_idxs) == 0:
                    shuffle_idxs = [*range(samples_sub_len)]
                    np.random.shuffle(shuffle_idxs)

                idx = shuffle_idxs.pop()

                temporal_samples = []

                for i in range(self.temporal_image_count):
                    sample = samples[idx + i]
                    try:
                        temporal_samples += SampleProcessor.process(
                            sample, self.sample_process_options,
                            self.output_sample_types, self.debug)
                    except:
                        raise Exception(
                            "Exception occured in sample %s. Error: %s" %
                            (sample.filename, traceback.format_exc()))

                if batches is None:
                    batches = [[] for _ in range(len(temporal_samples))]

                for i in range(len(temporal_samples)):
                    batches[i].append(temporal_samples[i])

            yield [np.array(batch) for batch in batches]
Beispiel #18
0
    def __init__(self,
                 samples_path,
                 debug,
                 batch_size,
                 temporal_image_count,
                 sample_process_options=SampleProcessor.Options(),
                 output_sample_types=[],
                 **kwargs):
        super().__init__(debug, batch_size)

        self.temporal_image_count = temporal_image_count
        self.sample_process_options = sample_process_options
        self.output_sample_types = output_sample_types

        self.samples = SampleLoader.load(SampleType.IMAGE, samples_path)

        self.generator_samples = [self.samples]
        self.generators = [iter_utils.ThisThreadGenerator ( self.batch_func, 0 )] if self.debug else \
                          [iter_utils.SubprocessGenerator ( self.batch_func, 0 )]

        self.generator_counter = -1
Beispiel #19
0
    def __init__(self,
                 samples_path,
                 debug,
                 batch_size,
                 temporal_image_count,
                 sample_process_options=SampleProcessor.Options(),
                 output_sample_types=[],
                 generators_count=2,
                 **kwargs):
        super().__init__(samples_path, debug, batch_size)

        self.temporal_image_count = temporal_image_count
        self.sample_process_options = sample_process_options
        self.output_sample_types = output_sample_types

        if self.debug:
            self.generators_count = 1
        else:
            self.generators_count = generators_count

        samples = SampleHost.load(SampleType.FACE_TEMPORAL_SORTED,
                                  self.samples_path)
        samples_len = len(samples)
        if samples_len == 0:
            raise ValueError('No training data provided.')

        pickled_samples = pickle.dumps(samples, 4)
        if self.debug:
            self.generators = [
                iter_utils.ThisThreadGenerator(self.batch_func,
                                               (0, pickled_samples))
            ]
        else:
            self.generators = [
                iter_utils.SubprocessGenerator(self.batch_func,
                                               (i, pickled_samples))
                for i in range(self.generators_count)
            ]

        self.generator_counter = -1
    def __init__(self,
                 samples_path,
                 debug,
                 batch_size,
                 resolution,
                 sample_process_options=SampleProcessor.Options(),
                 **kwargs):
        super().__init__(samples_path, debug, batch_size)
        self.sample_process_options = sample_process_options
        self.resolution = resolution

        self.samples = SampleLoader.load(
            SampleType.FACE_WITH_NEAREST_AS_TARGET, self.samples_path,
            self.samples_path)

        if self.debug:
            self.generator_samples = [self.samples]
            self.generators = [
                iter_utils.ThisThreadGenerator(self.batch_func, 0)
            ]
        else:
            if len(self.samples) > 1:
                self.generator_samples = [
                    self.samples[0::2], self.samples[1::2]
                ]
                self.generators = [
                    iter_utils.SubprocessGenerator(self.batch_func, 0),
                    iter_utils.SubprocessGenerator(self.batch_func, 1)
                ]
            else:
                self.generator_samples = [self.samples]
                self.generators = [
                    iter_utils.SubprocessGenerator(self.batch_func, 0)
                ]

        self.generator_counter = -1
    def batch_func(self, param):
        pickled_samples, ct_pickled_samples, rnd_seed = param

        rnd_state = np.random.RandomState(rnd_seed)

        samples = pickle.loads(pickled_samples)
        idxs = [*range(len(samples))]
        shuffle_idxs = []

        if ct_pickled_samples is not None:
            ct_samples = pickle.loads(ct_pickled_samples)
            ct_idxs = [*range(len(ct_samples))]
            ct_shuffle_idxs = []
        else:
            ct_samples = None

        bs = self.batch_size
        while True:
            batches = None

            for n_batch in range(bs):

                if len(shuffle_idxs) == 0:
                    shuffle_idxs = idxs.copy()
                    rnd_state.shuffle(shuffle_idxs)

                sample_idx = shuffle_idxs.pop()
                sample = samples[sample_idx]

                ct_sample = None
                if ct_samples is not None:
                    if len(ct_shuffle_idxs) == 0:
                        ct_shuffle_idxs = ct_idxs.copy()
                        rnd_state.shuffle(ct_shuffle_idxs)
                    ct_sample_idx = ct_shuffle_idxs.pop()
                    ct_sample = ct_samples[ct_sample_idx]

                try:
                    x, = SampleProcessor.process([sample],
                                                 self.sample_process_options,
                                                 self.output_sample_types,
                                                 self.debug,
                                                 ct_sample=ct_sample,
                                                 rnd_state=rnd_state)
                except:
                    raise Exception(
                        "Exception occured in sample %s. Error: %s" %
                        (sample.filename, traceback.format_exc()))

                if batches is None:
                    batches = [[] for _ in range(len(x))]
                    if self.add_sample_idx:
                        batches += [[]]
                        i_sample_idx = len(batches) - 1

                for i in range(len(x)):
                    batches[i].append(x[i])

                if self.add_sample_idx:
                    batches[i_sample_idx].append(sample_idx)

            yield [np.array(batch) for batch in batches]
    def batch_func(self, param):
        generator_id, samples, ct_samples = param

        if self.generators_random_seed is not None:
            np.random.seed(self.generators_random_seed[generator_id])

        samples_len = len(samples)
        samples_idxs = [*range(samples_len)]

        ct_samples_len = len(ct_samples) if ct_samples is not None else 0

        if len(samples_idxs) == 0:
            raise ValueError('No training data provided.')

        if self.sample_type == SampleType.FACE_YAW_SORTED or self.sample_type == SampleType.FACE_YAW_SORTED_AS_TARGET:
            if all([samples[idx] == None for idx in samples_idxs]):
                raise ValueError(
                    'Not enough training data. Gather more faces!')

        if self.sample_type == SampleType.FACE:
            shuffle_idxs = []
        elif self.sample_type == SampleType.FACE_YAW_SORTED or self.sample_type == SampleType.FACE_YAW_SORTED_AS_TARGET:
            shuffle_idxs = []
            shuffle_idxs_2D = [[]] * samples_len

        while True:
            batches = None
            for n_batch in range(self.batch_size):
                while True:
                    sample = None

                    if self.sample_type == SampleType.FACE:
                        if len(shuffle_idxs) == 0:
                            shuffle_idxs = samples_idxs.copy()
                            np.random.shuffle(shuffle_idxs)

                        idx = shuffle_idxs.pop()
                        sample = samples[idx]

                    elif self.sample_type == SampleType.FACE_YAW_SORTED or self.sample_type == SampleType.FACE_YAW_SORTED_AS_TARGET:
                        if len(shuffle_idxs) == 0:
                            shuffle_idxs = samples_idxs.copy()
                            np.random.shuffle(shuffle_idxs)

                        idx = shuffle_idxs.pop()
                        if samples[idx] != None:
                            if len(shuffle_idxs_2D[idx]) == 0:
                                a = shuffle_idxs_2D[idx] = [
                                    *range(len(samples[idx]))
                                ]
                                np.random.shuffle(a)

                            idx2 = shuffle_idxs_2D[idx].pop()
                            sample = samples[idx][idx2]

                            idx = (idx << 16) | (idx2 & 0xFFFF)

                    if sample is not None:
                        try:
                            x = SampleProcessor.process(
                                sample,
                                self.sample_process_options,
                                self.output_sample_types,
                                self.debug,
                                ct_sample=ct_samples[np.random.randint(
                                    ct_samples_len)]
                                if ct_samples is not None else None)
                        except:
                            raise Exception(
                                "Exception occured in sample %s. Error: %s" %
                                (sample.filename, traceback.format_exc()))

                        if type(x) != tuple and type(x) != list:
                            raise Exception(
                                'SampleProcessor.process returns NOT tuple/list'
                            )

                        if batches is None:
                            batches = [[] for _ in range(len(x))]
                            if self.add_sample_idx:
                                batches += [[]]
                                i_sample_idx = len(batches) - 1

                        for i in range(len(x)):
                            batches[i].append(x[i])

                        if self.add_sample_idx:
                            batches[i_sample_idx].append(idx)

                        break
            yield [np.array(batch) for batch in batches]
    def batch_func(self, generator_id):
        gen_sq = self.generators_sq[generator_id]
        if self.generators_random_seed is not None:
            np.random.seed(self.generators_random_seed[generator_id])

        samples = self.samples
        samples_len = len(samples)
        samples_idxs = [*range(samples_len)
                        ][generator_id::self.generators_count]
        repeat_samples_idxs = []

        if len(samples_idxs) == 0:
            raise ValueError('No training data provided.')

        if self.sample_type == SampleType.FACE_YAW_SORTED or self.sample_type == SampleType.FACE_YAW_SORTED_AS_TARGET:
            if all([samples[idx] == None for idx in samples_idxs]):
                raise ValueError(
                    'Not enough training data. Gather more faces!')

        if self.sample_type == SampleType.FACE:
            shuffle_idxs = []
        elif self.sample_type == SampleType.FACE_YAW_SORTED or self.sample_type == SampleType.FACE_YAW_SORTED_AS_TARGET:
            shuffle_idxs = []
            shuffle_idxs_2D = [[]] * samples_len

        while True:
            while not gen_sq.empty():
                idxs = gen_sq.get()
                for idx in idxs:
                    if idx in samples_idxs:
                        repeat_samples_idxs.append(idx)

            batches = None
            for n_batch in range(self.batch_size):
                while True:
                    sample = None

                    if len(repeat_samples_idxs) > 0:
                        idx = repeat_samples_idxs.pop()
                        if self.sample_type == SampleType.FACE:
                            sample = samples[idx]
                        elif self.sample_type == SampleType.FACE_YAW_SORTED or self.sample_type == SampleType.FACE_YAW_SORTED_AS_TARGET:
                            sample = samples[(idx >> 16) & 0xFFFF][idx
                                                                   & 0xFFFF]
                    else:
                        if self.sample_type == SampleType.FACE:
                            if len(shuffle_idxs) == 0:
                                shuffle_idxs = samples_idxs.copy()
                                np.random.shuffle(shuffle_idxs)

                            idx = shuffle_idxs.pop()
                            sample = samples[idx]

                        elif self.sample_type == SampleType.FACE_YAW_SORTED or self.sample_type == SampleType.FACE_YAW_SORTED_AS_TARGET:
                            if len(shuffle_idxs) == 0:
                                shuffle_idxs = samples_idxs.copy()
                                np.random.shuffle(shuffle_idxs)

                            idx = shuffle_idxs.pop()
                            if samples[idx] != None:
                                if len(shuffle_idxs_2D[idx]) == 0:
                                    a = shuffle_idxs_2D[idx] = [
                                        *range(len(samples[idx]))
                                    ]
                                    np.random.shuffle(a)

                                idx2 = shuffle_idxs_2D[idx].pop()
                                sample = samples[idx][idx2]

                                idx = (idx << 16) | (idx2 & 0xFFFF)

                    if sample is not None:
                        try:
                            x = SampleProcessor.process(
                                sample, self.sample_process_options,
                                self.output_sample_types, self.debug)
                        except:
                            raise Exception(
                                "Exception occured in sample %s. Error: %s" %
                                (sample.filename, traceback.format_exc()))

                        if type(x) != tuple and type(x) != list:
                            raise Exception(
                                'SampleProcessor.process returns NOT tuple/list'
                            )

                        if batches is None:
                            batches = [[] for _ in range(len(x))]
                            if self.add_sample_idx:
                                batches += [[]]
                                i_sample_idx = len(batches) - 1
                            if self.add_pitch:
                                batches += [[]]
                                i_pitch = len(batches) - 1
                            if self.add_yaw:
                                batches += [[]]
                                i_yaw = len(batches) - 1

                        for i in range(len(x)):
                            batches[i].append(x[i])

                        if self.add_sample_idx:
                            batches[i_sample_idx].append(idx)

                        if self.add_pitch or self.add_yaw:
                            pitch, yaw = LandmarksProcessor.estimate_pitch_yaw(
                                sample.landmarks)

                        if self.add_pitch:
                            batches[i_pitch].append([pitch])

                        if self.add_yaw:
                            batches[i_yaw].append([yaw])

                        break
            yield [np.array(batch) for batch in batches]
    def batch_func(self, generator_id):
        samples = self.generator_samples[generator_id]
        data_len = len(samples)
        if data_len == 0:
            raise ValueError('No training data provided.')

        shuffle_idxs = []

        output_sample_types = [
            [f.WARPED_TRANSFORMED | face_type | f.MODE_BGR, self.resolution],
            [f.TRANSFORMED | face_type | f.MODE_BGR, self.resolution],
            [
                f.TRANSFORMED | face_type | f.MODE_M | f.FACE_MASK_FULL,
                self.resolution
            ]
        ]

        while True:

            batches = None
            for n_batch in range(self.batch_size):
                while True:

                    if len(shuffle_idxs) == 0:
                        shuffle_idxs = [*range(data_len)]
                        np.random.shuffle(shuffle_idxs)

                    idx = shuffle_idxs.pop()
                    sample = samples[idx]
                    nearest_sample = sample.nearest_target_list[
                        np.random.randint(1, len(sample.nearest_target_list))]

                    try:
                        x = SampleProcessor.process(
                            sample, self.sample_process_options,
                            output_sample_types, self.debug)
                    except:
                        raise Exception(
                            "Exception occured in sample %s. Error: %s" %
                            (sample.filename, traceback.format_exc()))

                    try:
                        x2 = SampleProcessor.process(
                            nearest_sample, self.sample_process_options,
                            output_sample_types, self.debug)
                    except:
                        raise Exception(
                            "Exception occured in sample %s. Error: %s" %
                            (nearest_sample.filename, traceback.format_exc()))

                    if batches is None:
                        batches = [[] for _ in range(6)]

                    batches[0].append(x[0])
                    batches[1].append(x[1])
                    batches[2].append(x[2])
                    batches[3].append(x2[0])
                    batches[4].append(x2[1])
                    batches[5].append(x2[2])

                    res = sample.shape[0]

                    s_landmarks = sample.landmarks.copy()
                    d_landmarks = nearest_sample.landmarks.copy()

                    idxs = list(range(len(s_landmarks)))

                    for i in idxs[:]:
                        s_l = s_landmarks[i]
                        d_l = d_landmarks[i]
                        if s_l[0] < 5 or s_l[1] < 5 or s_l[0] >= res-5 or s_l[1] >= res-5 or \
                           d_l[0] < 5 or d_l[1] < 5 or d_l[0] >= res-5 or d_l[1] >= res-5:
                            idxs.remove(i)

                    for landmarks in [s_landmarks, d_landmarks]:
                        for i in idxs[:]:
                            s_l = landmarks[i]
                            for j in idxs[:]:
                                if i == j:
                                    continue
                                s_l_2 = landmarks[j]
                                diff_l = np.abs(s_l - s_l_2)
                                if np.sqrt(diff_l.dot(diff_l)) < 5:
                                    idxs.remove(i)
                                    break

                    s_landmarks = s_landmarks[idxs]
                    d_landmarks = d_landmarks[idxs]

                    s_landmarks = np.concatenate([
                        s_landmarks,
                        [[0, 0], [res // 2, 0], [res - 1, 0], [0, res // 2],
                         [res - 1, res // 2], [0, res - 1],
                         [res // 2, res - 1], [res - 1, res - 1]]
                    ])
                    d_landmarks = np.concatenate([
                        d_landmarks,
                        [[0, 0], [res // 2, 0], [res - 1, 0], [0, res // 2],
                         [res - 1, res // 2], [0, res - 1],
                         [res // 2, res - 1], [res - 1, res - 1]]
                    ])

                    x_len = len(x)
                    for i in range(x_len):

                        x[i] = imagelib.morph_by_points(
                            x[i], s_landmarks, d_landmarks)
                        x2[i] = imagelib.morph_by_points(
                            x2[i], d_landmarks, s_landmarks)

                        batches[i].append(x[i])
                        batches[i + x_len].append(x2[i])

                    break

            yield [np.array(batch) for batch in batches]
    def __init__ (self, samples_path, debug=False, batch_size=1,
                        random_ct_samples_path=None,
                        sample_process_options=SampleProcessor.Options(),
                        output_sample_types=[],
                        uniform_yaw_distribution=False,
                        generators_count=4,
                        raise_on_no_data=True,                        
                        **kwargs):

        super().__init__(debug, batch_size)
        self.initialized = False
        self.sample_process_options = sample_process_options
        self.output_sample_types = output_sample_types
        
        if self.debug:
            self.generators_count = 1
        else:
            self.generators_count = max(1, generators_count)

        samples = SampleLoader.load (SampleType.FACE, samples_path)
        self.samples_len = len(samples)
        
        if self.samples_len == 0:
            if raise_on_no_data:
                raise ValueError('No training data provided.')
            else:
                return
                
        if uniform_yaw_distribution:
            samples_pyr = [ ( idx, sample.get_pitch_yaw_roll() ) for idx, sample in enumerate(samples) ]
            
            grads = 128
            #instead of math.pi / 2, using -1.2,+1.2 because actually maximum yaw for 2DFAN landmarks are -1.2+1.2
            grads_space = np.linspace (-1.2, 1.2,grads)

            yaws_sample_list = [None]*grads
            for g in io.progress_bar_generator ( range(grads), "Sort by yaw"):
                yaw = grads_space[g]
                next_yaw = grads_space[g+1] if g < grads-1 else yaw

                yaw_samples = []
                for idx, pyr in samples_pyr:
                    s_yaw = -pyr[1]
                    if (g == 0          and s_yaw < next_yaw) or \
                    (g < grads-1     and s_yaw >= yaw and s_yaw < next_yaw) or \
                    (g == grads-1    and s_yaw >= yaw):
                        yaw_samples += [ idx ]
                if len(yaw_samples) > 0:
                    yaws_sample_list[g] = yaw_samples
            
            yaws_sample_list = [ y for y in yaws_sample_list if y is not None ]
            
            index_host = mplib.Index2DHost( yaws_sample_list )
        else:
            index_host = mplib.IndexHost(self.samples_len)

        if random_ct_samples_path is not None:
            ct_samples = SampleLoader.load (SampleType.FACE, random_ct_samples_path)
            ct_index_host = mplib.IndexHost( len(ct_samples) )
        else:
            ct_samples = None
            ct_index_host = None

        if self.debug:
            self.generators = [ThisThreadGenerator ( self.batch_func, (samples, index_host.create_cli(), ct_samples, ct_index_host.create_cli() if ct_index_host is not None else None) )]
        else:
            self.generators = [SubprocessGenerator ( self.batch_func, (samples, index_host.create_cli(), ct_samples, ct_index_host.create_cli() if ct_index_host is not None else None), start_now=False ) \
                               for i in range(self.generators_count) ]
                               
            SubprocessGenerator.start_in_parallel( self.generators )

        self.generator_counter = -1
        
        self.initialized = True
    def __init__ (self, samples_path, debug=False, batch_size=1, 
                        sample_process_options=SampleProcessor.Options(), 
                        output_sample_types=[], 
                        person_id_mode=1,
                        use_caching=False,
                        generators_count=2, 
                        generators_random_seed=None,
                        **kwargs):
                        
        super().__init__(samples_path, debug, batch_size)
        self.sample_process_options = sample_process_options
        self.output_sample_types = output_sample_types
        self.person_id_mode = person_id_mode

        if generators_random_seed is not None and len(generators_random_seed) != generators_count:
            raise ValueError("len(generators_random_seed) != generators_count")
        self.generators_random_seed = generators_random_seed
        
        samples = SampleHost.load (SampleType.FACE, self.samples_path, person_id_mode=True, use_caching=use_caching)
        samples = copy.copy(samples)
        for i in range(len(samples)):
            samples[i] = copy.copy(samples[i])
        
        if person_id_mode==1:
            #np.random.shuffle(samples)
            #
            #new_samples = []
            #while len(samples) > 0:
            #    for i in range( len(samples)-1, -1, -1):
            #        sample = samples[i]
            #        
            #        if len(sample) > 0:
            #            new_samples.append(sample.pop(0))
            #            
            #        if len(sample) == 0:
            #            samples.pop(i)
            #            i -= 1
            #samples = new_samples            
            new_samples = []
            for s in samples:    
                new_samples += s
            samples = new_samples
            np.random.shuffle(samples)
            
        self.samples_len = len(samples)
        
        if self.samples_len == 0:
            raise ValueError('No training data provided.')        

        if self.debug:
            self.generators_count = 1
            self.generators = [iter_utils.ThisThreadGenerator ( self.batch_func, (0, samples) )]
        else:
            self.generators_count = min ( generators_count, self.samples_len )
            
            if person_id_mode==1:
                self.generators = [iter_utils.SubprocessGenerator ( self.batch_func, (i, samples[i::self.generators_count]) ) for i in range(self.generators_count) ]
            else:
                self.generators = [iter_utils.SubprocessGenerator ( self.batch_func, (i, samples) ) for i in range(self.generators_count) ]

        self.generator_counter = -1
    def batch_func(self, param):
        generator_id, samples = param

        if self.generators_random_seed is not None:
            np.random.seed(self.generators_random_seed[generator_id])

        if self.person_id_mode == 1:
            samples_len = len(samples)
            samples_idxs = [*range(samples_len)]
            shuffle_idxs = []
        elif self.person_id_mode == 2:
            persons_count = len(samples)

            person_idxs = []
            for j in range(persons_count):
                for i in range(j + 1, persons_count):
                    person_idxs += [[i, j]]

            shuffle_person_idxs = []

            samples_idxs = [None] * persons_count
            shuffle_idxs = [None] * persons_count

            for i in range(persons_count):
                samples_idxs[i] = [*range(len(samples[i]))]
                shuffle_idxs[i] = []

        while True:

            if self.person_id_mode == 2:
                if len(shuffle_person_idxs) == 0:
                    shuffle_person_idxs = person_idxs.copy()
                    np.random.shuffle(shuffle_person_idxs)
                person_ids = shuffle_person_idxs.pop()

            batches = None
            for n_batch in range(self.batch_size):

                if self.person_id_mode == 1:
                    if len(shuffle_idxs) == 0:
                        shuffle_idxs = samples_idxs.copy()
                        #np.random.shuffle(shuffle_idxs)

                    idx = shuffle_idxs.pop()
                    sample = samples[idx]

                    try:
                        x = SampleProcessor.process(
                            sample, self.sample_process_options,
                            self.output_sample_types, self.debug)
                    except:
                        raise Exception(
                            "Exception occured in sample %s. Error: %s" %
                            (sample.filename, traceback.format_exc()))

                    if type(x) != tuple and type(x) != list:
                        raise Exception(
                            'SampleProcessor.process returns NOT tuple/list')

                    if batches is None:
                        batches = [[] for _ in range(len(x))]

                        batches += [[]]
                        i_person_id = len(batches) - 1

                    for i in range(len(x)):
                        batches[i].append(x[i])

                    batches[i_person_id].append(np.array([sample.person_id]))

                else:
                    person_id1, person_id2 = person_ids

                    if len(shuffle_idxs[person_id1]) == 0:
                        shuffle_idxs[person_id1] = samples_idxs[
                            person_id1].copy()
                        np.random.shuffle(shuffle_idxs[person_id1])

                    idx = shuffle_idxs[person_id1].pop()
                    sample1 = samples[person_id1][idx]

                    if len(shuffle_idxs[person_id2]) == 0:
                        shuffle_idxs[person_id2] = samples_idxs[
                            person_id2].copy()
                        np.random.shuffle(shuffle_idxs[person_id2])

                    idx = shuffle_idxs[person_id2].pop()
                    sample2 = samples[person_id2][idx]

                    if sample1 is not None and sample2 is not None:
                        try:
                            x1 = SampleProcessor.process(
                                sample1, self.sample_process_options,
                                self.output_sample_types, self.debug)
                        except:
                            raise Exception(
                                "Exception occured in sample %s. Error: %s" %
                                (sample1.filename, traceback.format_exc()))

                        try:
                            x2 = SampleProcessor.process(
                                sample2, self.sample_process_options,
                                self.output_sample_types, self.debug)
                        except:
                            raise Exception(
                                "Exception occured in sample %s. Error: %s" %
                                (sample2.filename, traceback.format_exc()))

                        x1_len = len(x1)
                        if batches is None:
                            batches = [[] for _ in range(x1_len)]
                            batches += [[]]
                            i_person_id1 = len(batches) - 1

                            batches += [[] for _ in range(len(x2))]
                            batches += [[]]
                            i_person_id2 = len(batches) - 1

                        for i in range(x1_len):
                            batches[i].append(x1[i])

                        for i in range(len(x2)):
                            batches[x1_len + 1 + i].append(x2[i])

                        batches[i_person_id1].append(
                            np.array([sample1.person_id]))

                        batches[i_person_id2].append(
                            np.array([sample2.person_id]))

            yield [np.array(batch) for batch in batches]