Ejemplo n.º 1
0
def test_zips_no_continent_data_correctly():
    opt = NullOptions()
    opt.dataroot = os.path.expanduser('~/data/new_geo_data')
    opt.phase = ''

    opt.resize_or_crop = 'resize_and_crop'

    opt.loadSize = 256
    opt.fineSize = 256
    opt.which_direction = 'AtoB'

    opt.input_nc = 1
    opt.output_nc = 1
    opt.no_flip = True,
    opt.div_threshold = 1000
    opt.inpaint_single_class = False

    opt.continent_data = True

    geo = GeoDataset()
    geo.initialize(opt)

    # print(geo.A_paths)

    for paths in geo.A_paths:

        def get_subfolder(path):
            return os.path.basename(os.path.dirname(path))

        subfolders = [get_subfolder(path) for path in paths]
        s_nos = [get_series_number(path) for path in paths]

        assert (all(
            [subfolder == subfolders[0] for subfolder in subfolders[1:]]))
        assert (all([s_no == s_nos[0] for s_no in s_nos[1:]]))
Ejemplo n.º 2
0
def new_dataset():
    opt = NullOptions()
    opt.dataroot = 'test_data/with_continents'
    opt.phase = ''

    opt.resize_or_crop = 'resize_and_crop'

    opt.loadSize = 256
    opt.fineSize = 256
    opt.which_direction = 'AtoB'

    opt.input_nc = 1
    opt.output_nc = 1
    opt.no_flip = True,
    opt.div_threshold = 1000
    opt.inpaint_single_class = False

    opt.continent_data = True

    geo = GeoDataset()
    geo.initialize(opt)

    return geo
Ejemplo n.º 3
0
def dataset(pytestconfig):
    opt = NullOptions()
    opt.dataroot = os.path.expanduser(pytestconfig.option.dataroot)
    opt.phase = 'test'
    #
    opt.resize_or_crop = 'resize_and_crop'

    opt.loadSize = 256
    opt.fineSize = 256
    opt.which_direction = 'AtoB'

    opt.input_nc = 1
    opt.output_nc = 1
    opt.no_flip = True
    opt.div_threshold = 1000
    opt.inpaint_single_class = False

    opt.continent_data = False

    geo = GeoDataset()
    geo.initialize(opt)

    return geo
Ejemplo n.º 4
0
def test_default_continent_map_is_blank():
    opt = NullOptions()
    opt.dataroot = 'test_data/no_continents'
    opt.phase = ''

    opt.resize_or_crop = 'resize_and_crop'

    opt.loadSize = 256
    opt.fineSize = 256
    opt.which_direction = 'AtoB'

    opt.input_nc = 1
    opt.output_nc = 1
    opt.no_flip = True,
    opt.div_threshold = 1000
    opt.inpaint_single_class = False

    opt.continent_data = True

    geo = GeoDataset()
    geo.initialize(opt)

    data = geo[0]
    assert (torch.sum(data['continents']) == 0)
Ejemplo n.º 5
0
def CreateDataset(opt):
    dataset = None
    # Data stored as one image concatenated along horizontal axis
    if opt.dataset_mode == 'aligned':
        from data.aligned_dataset import AlignedDataset
        dataset = AlignedDataset()
    # Data stored in different directories
    elif opt.dataset_mode == 'unaligned':
        from data.unaligned_dataset import UnalignedDataset
        dataset = UnalignedDataset()
    elif opt.dataset_mode == 'geo':
        from data.geo_dataset import GeoDataset
        dataset = GeoDataset()
    elif opt.dataset_mode == 'single':
        from data.single_dataset import SingleDataset
        dataset = SingleDataset()
    else:
        raise ValueError("Dataset [%s] not recognized." % opt.dataset_mode)

    print("dataset [%s] was created" % (dataset.name()))
    dataset.initialize(opt)
    return dataset
    # tensorboard 出力
    board_train = SummaryWriter(
        log_dir=os.path.join(args.tensorboard_dir, args.exper_name))
    board_valid = SummaryWriter(
        log_dir=os.path.join(args.tensorboard_dir, args.exper_name + "_valid"))
    board_eval = SummaryWriter(
        log_dir=os.path.join(args.tensorboard_dir, args.exper_name + "_eval"))

    #================================
    # データセットの読み込み
    #================================
    # 学習用データセットとテスト用データセットの設定
    ds_train = GeoDataset(args,
                          args.dataset_train_dir,
                          image_height=args.image_height,
                          image_width=args.image_width,
                          data_augument=args.data_augument,
                          geometric_model=args.geometric_model,
                          debug=args.debug)

    index = np.arange(len(ds_train))
    train_index, valid_index = train_test_split(index,
                                                test_size=args.val_rate,
                                                random_state=args.seed)
    if (args.debug):
        print("train_index.shape : ", train_index.shape)
        print("valid_index.shape : ", valid_index.shape)
        print("train_index[0:10] : ", train_index[0:10])
        print("valid_index[0:10] : ", valid_index[0:10])

    dloader_train = torch.utils.data.DataLoader(Subset(ds_train, train_index),
Ejemplo n.º 7
0
def temp_dataset(dataset, folder_nums=[1, 2, 3, 4]):
    dataroot = dataset.opt.dataroot

    # Create a temporary directory to test
    temp_data_parent = tempfile.mkdtemp(dir='/tmp')

    folder_labels = ['{:02}'.format(num) for num in folder_nums]

    temp_data_dir_1 = os.path.join(temp_data_parent, folder_labels[0])
    temp_data_dir_2 = os.path.join(temp_data_parent, folder_labels[1])
    temp_data_dir_3 = os.path.join(temp_data_parent, folder_labels[2])
    temp_data_dir_4 = os.path.join(temp_data_parent, folder_labels[3])

    os.mkdir(temp_data_dir_1)
    os.mkdir(temp_data_dir_2)
    os.mkdir(temp_data_dir_3)
    os.mkdir(temp_data_dir_4)

    [
        shutil.copy(file, temp_data_dir_1)
        for file in glob.glob(dataroot + '/test/serie100001_project_*.dat')
    ]
    [
        shutil.copy(file, temp_data_dir_2)
        for file in glob.glob(dataroot + '/test/serie100002_project_*.dat')
    ]
    [
        shutil.copy(file, temp_data_dir_3)
        for file in glob.glob(dataroot + '/test/serie100003_project_*.dat')
    ]
    [
        shutil.copy(file, temp_data_dir_3)
        for file in glob.glob(dataroot + '/test/serie100004_project_*.dat')
    ]
    [
        shutil.copy(file, temp_data_dir_4)
        for file in glob.glob(dataroot + '/test/serie100004_project_*.dat')
    ]
    [
        shutil.copy(file, temp_data_parent)
        for file in glob.glob(dataroot + '/test/serie100004_project_*.dat')
    ]

    for folder in [
            temp_data_parent, temp_data_dir_1, temp_data_dir_2,
            temp_data_dir_3, temp_data_dir_4
    ]:
        for tag in ['DIV', 'Vx', 'Vy']:
            with open(os.path.join(folder, tag + '_norm.dat'), 'w') as file:
                file.write('-10000 10000')

    # Check they're in the target directory
    glob.glob(os.path.join(temp_data_parent, '*.dat'))[0]
    glob.glob(os.path.join(temp_data_dir_1, '*.dat'))[0]
    glob.glob(os.path.join(temp_data_dir_2, '*.dat'))[0]
    glob.glob(os.path.join(temp_data_dir_3, '*.dat'))[0]
    glob.glob(os.path.join(temp_data_dir_4, '*.dat'))[0]

    # Now build a second dataset using this dummy directory
    opt = NullOptions()
    opt.dataroot = temp_data_parent
    opt.phase = ''

    opt.resize_or_crop = 'resize_and_crop'

    opt.loadSize = 256
    opt.fineSize = 256
    opt.which_direction = 'AtoB'

    opt.input_nc = 1
    opt.output_nc = 1
    opt.no_flip = True
    opt.div_threshold = 1000
    opt.inpaint_single_class = False

    opt.continent_data = False

    geo = GeoDataset()
    geo.initialize(opt)

    div_files, vx_files, vy_files, _ = geo.get_dat_files(temp_data_parent)

    assert (len(div_files) == 6)
    assert (len(vx_files) == 6)
    assert (len(vy_files) == 6)

    geo.initialize(opt)

    return geo, opt