def prepare_dataset(self):
        mode_settings = importlib.import_module(self.settings_type)
        train_args = mode_settings.get_args('train')
        # load dataset
        train_mini_batch_loader = DatasetPreProcessor(train_args)
        test_mini_batch_loader = DatasetPreProcessor(mode_settings.get_args('test'))
        print("---set mini_batch----------")

        if train_args.importance_sampling:
            print("importance----------")
            train_it = ImportantSerialIterator( \
                                    train_mini_batch_loader, \
                                    train_args.training_params.batch_size, \
                                    shuffle=train_args.shuffle, \
                                    p=np.loadtxt(train_args.weights_file_path))
        else:
            if train_args.training_params.iter_type=='multi':
                iterator = chainer.iterators.MultiprocessIterator
            else:
                iterator = chainer.iterators.SerialIterator
            train_it = iterator( \
                            train_mini_batch_loader, \
                            train_args.training_params.batch_size, \
                            shuffle=train_args.shuffle)

        val_batch_size = 1
        val_it = iterator( \
                    test_mini_batch_loader, \
                    val_batch_size, repeat=False, shuffle=False)
        return train_it, val_it, train_mini_batch_loader.__len__()
Beispiel #2
0
def prepare_dataset():
    # load dataset
    test_mini_batch_loader = DatasetPreProcessor('test')
    val_it = chainer.iterators.SerialIterator( \
                            test_mini_batch_loader, \
                            1, repeat=False, shuffle=False)
    return val_it, test_mini_batch_loader.__len__(), test_mini_batch_loader.pairs
Beispiel #3
0
def prepare_dataset(data_set):
    train_args = get_args('train')
    # load dataset
    if data_set == 'mnist':
        train_mini_batch_loader, test_mini_batch_loader = \
            chainer.datasets.get_mnist()
    else:
        train_mini_batch_loader = DatasetPreProcessor(train_args)
        test_mini_batch_loader = DatasetPreProcessor(get_args('test'))
    print("---set mini_batch----------")

    if train_args.importance_sampling:
        print("importance----------")
        train_it = ImportantSerialIterator( \
                                train_mini_batch_loader, \
                                train_args.training_params.batch_size, \
                                shuffle=train_args.shuffle, \
                                p=np.loadtxt(train_args.weights_file_path))
    else:
        if train_args.training_params.iter_type == 'multi':
            iterator = chainer.iterators.MultiprocessIterator
        else:
            iterator = chainer.iterators.SerialIterator
        train_it = iterator( \
                        train_mini_batch_loader, \
                        train_args.training_params.batch_size, \
                        shuffle=train_args.shuffle)

    val_it = iterator( \
                test_mini_batch_loader, \
                1, repeat=False, shuffle=False)
    return train_it, val_it, train_mini_batch_loader.__len__()
Beispiel #4
0
def prepare_dataset():
    # データセットのロード
    train_mini_batch_loader = \
        DatasetPreProcessor(chainer.global_config.user_train_args)
    train_it = chainer.iterators.SerialIterator(
        train_mini_batch_loader,
        chainer.global_config.user_train_args.training_params.batch_size)
    return train_mini_batch_loader, train_mini_batch_loader.__len__()
    def prepare_dataset(self):
        # load dataset
        if chainer.config.data_set == 'mnist':
            train_mini_batch_loader, test_mini_batch_loader = \
                chainer.datasets.get_mnist()
        else:
            train_mini_batch_loader = DatasetPreProcessor('train')
            test_mini_batch_loader = DatasetPreProcessor('test')

        if config.training_params.iter_type == 'multi':
            iterator = chainer.iterators.MultiprocessIterator
        else:
            iterator = chainer.iterators.SerialIterator
        train_it = iterator( \
                        train_mini_batch_loader, \
                        config.training_params.batch_size, \
                        shuffle=config.train)

        val_batch_size = 1
        val_it = iterator( \
                    test_mini_batch_loader, \
                    val_batch_size, repeat=False, shuffle=False)
        return train_it, val_it, train_mini_batch_loader.__len__()
Beispiel #6
0
def test(model):
    # データセットのロード
    test_mini_batch_loader = \
        DatasetPreProcessor(chainer.global_config.user_test_args)
    test_data_size = test_mini_batch_loader.__len__()

    print("------test data size")
    print(test_data_size)

    sum_accuracy = 0
    sum_iou      = 0
    for idx in range(test_data_size):
        raw_x, raw_t = test_mini_batch_loader.load_data([idx])

        x = chainer.Variable(raw_x)
        t = chainer.Variable(raw_t)

        model(x, t)

        print("accuracy:", model.accuracy)
        sum_accuracy += model.accuracy
        sum_iou      += np.array(model.iou)

        # 出力ラベルを画像に変換
        result_labels = model.h.data.argmax(axis=1)
        color_data = pd.read_csv(chainer.global_config.user_test_args.label_path)
        restoration(
            result_labels,
            color_data,
            chainer.global_config.user_test_args.output_path,
            idx,
            chainer.global_config.user_test_args.n_class)

    print("test mean accuracy {}".format(sum_accuracy/test_data_size))
    print("test mean IoU {}, meanIU {}" \
            .format(sum_iou/test_data_size, np.sum(sum_iou/test_data_size)/n_class))
Beispiel #7
0
        h, w, _ = raw_image.shape

        mask, target_class = \
            self.compute_attention_mask(preprocessed_image, gt)
        mask = mask[0].transpose(1, 2, 0).astype(np.uint8)

        xp = cuda.get_array_module(mask)
        if xp != np:
            mask = self.xp.asnumpy(mask)
        mask = cv2.resize(mask, (w, h))
        return raw_image * mask, target_class


if __name__ == '__main__':
    args = get_args()
    mini_batch_loader = DatasetPreProcessor(args)
    visualizer = AttentionVisualizer(args)

    _, model_eval = EasyTrainer.prepare_model(args)
    args.net = model_eval
    for idx, (image_path, label) in enumerate(mini_batch_loader.pairs):
        raw_image = cv2.imread(image_path)
        preprocessed_image, _ = mini_batch_loader.get_example(idx)

        attention_view, target_class = \
            visualizer.visualize_attention(raw_image, preprocessed_image, label)

        name, extension = os.path.basename(image_path).split('.')
        image_fname = name + '_' + str(label) + '_' + str(
            target_class) + '.' + extension
        output_path = os.path.join(args.output_path,