コード例 #1
0
ファイル: net_work.py プロジェクト: ikitozen/DSFD-tensorflow
    def make_data(self, ds,is_training=True):

        if is_training:
            ds = MultiThreadMapData(ds, 10, self.train_map_func, buffer_size=200, strict=True)
        else:
            ds = MultiThreadMapData(ds, 5, self.val_map_func, buffer_size=200, strict=True)
        ds = BatchData(ds, cfg.TRAIN.num_gpu * cfg.TRAIN.batch_size, remainder=True,use_list=False)
        ds = MultiProcessPrefetchData(ds, 100,2)
        ds.reset_state()
        ds=ds.get_data()

        ###########
        # ds = data_set.shuffle(buffer_size=512)  # shuffle before loading images
        # ds = ds.repeat(cfg.TRAIN.epoch)
        # if is_training:
        #     ds = ds.map(self.train_map_func, num_parallel_calls=multiprocessing.cpu_count())  # decouple the heavy map_fn
        # else:
        #     ds = ds.map(self.val_map_func, num_parallel_calls=multiprocessing.cpu_count())  # decouple the heavy map_fn
        # ds = ds.batch(
        #     cfg.TRAIN.num_gpu * cfg.TRAIN.batch_size)  # TODO: consider using tf.contrib.map_and_batch
        #
        # ds = ds.prefetch(5 * cfg.TRAIN.num_gpu)
        # iterator = ds.make_one_shot_iterator()
        # one_element = iterator.get_next()
        # images, labels = one_element
        return ds
コード例 #2
0
    def build_iter(self):

        ds = DataFromGenerator(self.generator)
        ds = BatchData(ds, self.batch_size)
        ds = MultiProcessPrefetchData(ds, self.prefetch_size, self.process_num)
        ds.reset_state()
        ds = ds.get_data()
        return ds
コード例 #3
0
ファイル: dataietr.py プロジェクト: zhaoyk1986/face_landmark
    def build_iter(self):

        map_func = partial(self._map_func, is_training=self.training_flag)
        ds = DataFromGenerator(self.generator)
        ds = BatchData(ds, self.num_gpu * self.batch_size)
        ds = MultiProcessPrefetchData(ds, self.prefetch_size, self.process_num)
        ds.reset_state()
        ds = ds.get_data()
        return ds
コード例 #4
0
    def build_iter(self,samples):

        map_func=partial(self._map_func,is_training=self.training_flag)
        ds = DataFromList(samples, shuffle=True)

        ds = MultiThreadMapData(ds, self.thread_num, map_func, buffer_size=self.buffer_size)

        ds = BatchData(ds, self.num_gpu *  self.batch_size)
        ds = MultiProcessPrefetchData(ds, self.prefetch_size, self.process_num)
        ds.reset_state()
        ds = ds.get_data()
        return ds
コード例 #5
0
    def build_iter(self, ):

        ds = DataFromGenerator(self.generator)

        if cfg.DATA.mutiscale and self.training_flag:
            ds = MutiScaleBatcher(ds,
                                  self.num_gpu * self.batch_size,
                                  scale_range=cfg.DATA.scales,
                                  input_size=(cfg.DATA.hin, cfg.DATA.win))
        else:
            ds = MutiScaleBatcher(ds,
                                  self.num_gpu * self.batch_size,
                                  input_size=(cfg.DATA.hin, cfg.DATA.win))

        ds = MultiProcessPrefetchData(ds, self.prefetch_size, self.process_num)
        ds.reset_state()
        ds = ds.get_data()
        return ds