Beispiel #1
0
def get_datasets(args, test_seed_offset=0):
    """ Gets training and test datasets. """

    # Load superpoints graphs
    testlist, trainlist, validlist = [], [], []
    valid_names = ['0001_00000.h5','0001_00085.h5', '0001_00170.h5','0001_00230.h5','0001_00325.h5','0001_00420.h5', \
                   '0002_00000.h5','0002_00111.h5','0002_00223.h5','0018_00030.h5','0018_00184.h5','0018_00338.h5',\
                   '0020_00080.h5','0020_00262.h5','0020_00444.h5','0020_00542.h5','0020_00692.h5', '0020_00800.h5']

    for n in range(1, 7):
        if n != args.cvfold:
            path = '{}/superpoint_graphs/0{:d}/'.format(args.VKITTI_PATH, n)
            for fname in sorted(os.listdir(path)):
                if fname.endswith(".h5") and not (args.use_val_set
                                                  and fname in valid_names):
                    #training set
                    trainlist.append(spg.spg_reader(args, path + fname, True))
                if fname.endswith(".h5") and (args.use_val_set
                                              and fname in valid_names):
                    #validation set
                    validlist.append(spg.spg_reader(args, path + fname, True))
    path = '{}/superpoint_graphs/0{:d}/'.format(args.VKITTI_PATH, args.cvfold)
    #evaluation set
    for fname in sorted(os.listdir(path)):
        if fname.endswith(".h5"):
            testlist.append(spg.spg_reader(args, path + fname, True))

    # Normalize edge features
    if args.spg_attribs01:
        trainlist, testlist, validlist, scaler = spg.scaler01(
            trainlist, testlist, validlist=validlist)

    return tnt.dataset.ListDataset([spg.spg_to_igraph(*tlist) for tlist in trainlist],
                                    functools.partial(spg.loader, train=True, args=args, db_path=args.VKITTI_PATH)), \
           tnt.dataset.ListDataset([spg.spg_to_igraph(*tlist) for tlist in testlist],
                                    functools.partial(spg.loader, train=False, args=args, db_path=args.VKITTI_PATH, test_seed_offset=test_seed_offset)), \
           tnt.dataset.ListDataset([spg.spg_to_igraph(*tlist) for tlist in validlist],
                                    functools.partial(spg.loader, train=False, args=args, db_path=args.VKITTI_PATH, test_seed_offset=test_seed_offset)), \
            scaler
Beispiel #2
0
def get_datasets(args, test_seed_offset=0):
    """build training and testing set"""
    
    #for a simple train/test organization
    trainset = ['train/' + f for f in os.listdir(args.CUSTOM_SET_PATH + '/superpoint_graphs/train')]
    testset  = ['test/' + f for f in os.listdir(args.CUSTOM_SET_PATH + '/superpoint_graphs/train')]
    
    # Load superpoints graphs
    testlist, trainlist = [], []
    for n in trainset:
        trainlist.append(spg.spg_reader(args, args.CUSTOM_SET_PATH + '/superpoint_graphs/' + n + '.h5', True))
    for n in testset:
        testlist.append(spg.spg_reader(args, args.CUSTOM_SET_PATH + '/superpoint_graphs/' + n + '.h5', True))

    # Normalize edge features
    if args.spg_attribs01:
       trainlist, testlist, validlist, scaler = spg.scaler01(trainlist, testlist)

    return tnt.dataset.ListDataset([spg.spg_to_igraph(*tlist) for tlist in trainlist],
                                    functools.partial(spg.loader, train=True, args=args, db_path=args.CUSTOM_SET_PATH)), \
           tnt.dataset.ListDataset([spg.spg_to_igraph(*tlist) for tlist in testlist],
                                    functools.partial(spg.loader, train=False, args=args, db_path=args.CUSTOM_SET_PATH, test_seed_offset=test_seed_offset)) ,\
            scaler
def get_datasets(args, test_seed_offset=0):
    """build training and testing set"""

    #for a simple train/test organization
    # trainset = ['trainval/' + f for f in os.listdir(args.AERIAL7_PATH + '/superpoint_graphs/train')]
    # testset  = ['test/' + f for f in os.listdir(args.AERIAL7_PATH + '/superpoint_graphs/test')]

    # #Load superpoints graphs
    # testlist, trainlist = [], []
    # for n in trainset:
    #     trainlist.append(spg.spg_reader(args, args.AERIAL7_PATH + '/superpoint_graphs/' + n, True))
    # for n in testset:
    #     testlist.append(spg.spg_reader(args, args.AERIAL7_PATH + '/superpoint_graphs/' + n, True))

    testlist, trainlist = [], []
    for n in range(1, 7):
        if n != args.cvfold:
            path = '{}/superpoint_graphs/Area_{:d}/'.format(
                args.AERIAL7_PATH, n)
            for fname in sorted(os.listdir(path)):
                if fname.endswith(".h5"):
                    trainlist.append(spg.spg_reader(args, path + fname, True))
    path = '{}/superpoint_graphs/Area_{:d}/'.format(args.AERIAL7_PATH,
                                                    args.cvfold)
    for fname in sorted(os.listdir(path)):
        if fname.endswith(".h5"):
            testlist.append(spg.spg_reader(args, path + fname, True))

    # Normalize edge features
    if args.spg_attribs01:
        trainlist, testlist = spg.scaler01(trainlist, testlist)

    return tnt.dataset.ListDataset([spg.spg_to_igraph(*tlist) for tlist in trainlist],
                                    functools.partial(spg.loader, train=True, args=args, db_path=args.AERIAL7_PATH)), \
           tnt.dataset.ListDataset([spg.spg_to_igraph(*tlist) for tlist in testlist],
                                    functools.partial(spg.loader, train=False, args=args, db_path=args.AERIAL7_PATH, test_seed_offset=test_seed_offset))
Beispiel #4
0
def get_datasets(args, test_seed_offset=0):

    train_names = [
        'bildstein_station1', 'bildstein_station5', 'domfountain_station1',
        'domfountain_station3', 'neugasse_station1', 'sg27_station1',
        'sg27_station2', 'sg27_station5', 'sg27_station9', 'sg28_station4',
        'untermaederbrunnen_station1'
    ]
    valid_names = [
        'bildstein_station3', 'domfountain_station2', 'sg27_station4',
        'untermaederbrunnen_station3'
    ]

    if args.db_train_name == 'train':
        trainset = ['train/' + f for f in train_names]
    elif args.db_train_name == 'trainval':
        trainset = ['train/' + f for f in train_names + valid_names]

    validset = []
    testset = []
    if args.use_val_set:
        validset = ['train/' + f for f in valid_names]
    if args.db_test_name == 'testred':
        testset = [
            'test_reduced/' + os.path.splitext(f)[0]
            for f in os.listdir(args.SEMA3D_PATH +
                                '/superpoint_graphs/test_reduced')
        ]
    elif args.db_test_name == 'testfull':
        testset = [
            'test_full/' + os.path.splitext(f)[0]
            for f in os.listdir(args.SEMA3D_PATH +
                                '/superpoint_graphs/test_full')
        ]

    # Load superpoints graphs
    testlist, trainlist, validlist = [], [], []
    for n in trainset:
        trainlist.append(
            spg.spg_reader(
                args, args.SEMA3D_PATH + '/superpoint_graphs/' + n + '.h5',
                True))
    for n in validset:
        validlist.append(
            spg.spg_reader(
                args, args.SEMA3D_PATH + '/superpoint_graphs/' + n + '.h5',
                True))
    for n in testset:
        testlist.append(
            spg.spg_reader(
                args, args.SEMA3D_PATH + '/superpoint_graphs/' + n + '.h5',
                True))

    # Normalize edge features
    if args.spg_attribs01:
        trainlist, testlist, validlist, scaler = spg.scaler01(
            trainlist, testlist, validlist=validlist)

    return tnt.dataset.ListDataset([spg.spg_to_igraph(*tlist) for tlist in trainlist],
                                    functools.partial(spg.loader, train=True, args=args, db_path=args.SEMA3D_PATH)), \
           tnt.dataset.ListDataset([spg.spg_to_igraph(*tlist) for tlist in testlist],
                                    functools.partial(spg.loader, train=False, args=args, db_path=args.SEMA3D_PATH, test_seed_offset=test_seed_offset)), \
           tnt.dataset.ListDataset([spg.spg_to_igraph(*tlist) for tlist in validlist],
                                    functools.partial(spg.loader, train=False, args=args, db_path=args.SEMA3D_PATH, test_seed_offset=test_seed_offset)),\
            scaler
def get_datasets(args, test_seed_offset=0):
    train_names = [
        '6755_66525.h5', '6955_66645.h5', '6875_66585.h5', '6820_66550.h5',
        '6950_66640.h5', '6955_66650.h5', '7055_66455.h5', '7020_66300.h5',
        '7050_66335.h5', '6825_66560.h5', '7015_66550.h5', '7055_66350.h5',
        '7030_66525.h5', '6985_66650.h5', '7005_66575.h5', '6750_66520.h5',
        '6770_66530.h5', '6990_66595.h5', '6760_66530.h5', '6805_66540.h5',
        '7000_66580.h5', '6980_66650.h5', '7060_66385.h5', '7055_66410.h5',
        '7010_66565.h5', '7015_66555.h5', '6810_66545.h5', '7040_66495.h5',
        '6870_66580.h5', '7005_66565.h5', '7045_66485.h5', '7055_66445.h5',
        '7025_66530.h5', '6835_66565.h5', '7050_66340.h5', '7030_66315.h5',
        '7035_66325.h5', '6985_66605.h5', '7020_66305.h5', '7040_66505.h5',
        '6875_66580.h5', '6965_66630.h5', '7015_66560.h5', '7045_66500.h5',
        '7055_66340.h5', '7060_66420.h5', '6995_66575.h5', '7060_66400.h5',
        '6800_66540.h5', '7015_66545.h5', '7040_66325.h5', '7020_66310.h5',
        '7050_66330.h5', '7055_66420.h5', '6900_66605.h5', '6890_66600.h5',
        '6830_66560.h5', '6790_66540.h5', '6840_66565.h5', '6915_66610.h5',
        '7020_66540.h5', '7000_66570.h5', '6855_66570.h5', '7045_66335.h5',
        '6785_66535.h5', '6870_66585.h5', '6950_66645.h5', '6845_66570.h5',
        '6980_66610.h5', '6990_66650.h5', '6905_66610.h5', '6860_66575.h5',
        '7060_66360.h5', '6940_66635.h5', '6765_66530.h5', '6935_66635.h5',
        '6780_66530.h5', '6975_66650.h5', '7055_66345.h5', '7035_66520.h5',
        '6970_66620.h5', '7005_66570.h5', '6905_66605.h5', '6935_66625.h5',
        '7020_66550.h5', '7060_66395.h5', '6930_66625.h5', '7050_66450.h5',
        '7025_66305.h5', '7050_66475.h5', '6900_66600.h5', '6850_66565.h5',
        '6865_66575.h5', '7045_66495.h5', '6990_66590.h5', '6795_66540.h5',
        '7035_66320.h5', '7055_66460.h5', '6975_66615.h5', '6910_66610.h5',
        '7060_66380.h5', '7055_66335.h5', '6845_66565.h5', '6965_66635.h5',
        '6775_66530.h5', '6750_66525.h5', '6815_66545.h5', '6975_66620.h5'
    ]  #'6875_66590.h5',, '6960_66635.h5'
    valid_names = [
        '6835_66560.h5', '6960_66640.h5', '6880_66595.h5', '6860_66580.h5',
        '6955_66640.h5', '6795_66535.h5'
    ]
    if args.db_train_name == 'train':
        trainset = ['train/' + f for f in train_names]
    elif args.db_train_name == 'trainval':
        trainset = ['train/' + f for f in train_names + valid_names]

    validset = []
    testset = []
    if args.use_val_set:
        validset = ['train/' + f for f in valid_names]
    if args.db_test_name == 'testred':
        testset = [
            'test_reduced/' + os.path.splitext(f)[0]
            for f in os.listdir(args.SEMA3D_PATH +
                                '/superpoint_graphs/test_reduced')
        ]
    elif args.db_test_name == 'testfull':
        testset = [
            'test_full/' + os.path.splitext(f)[0]
            for f in os.listdir(args.SEMA3D_PATH +
                                '/superpoint_graphs/test_full')
        ]

    # Load superpoints graphs
    testlist, trainlist, validlist = [], [], []
    for n in trainset:
        trainlist.append(
            spg.spg_reader(args, args.SEMA3D_PATH + '/superpoint_graphs/' + n,
                           True))
    for n in validset:
        validlist.append(
            spg.spg_reader(args, args.SEMA3D_PATH + '/superpoint_graphs/' + n,
                           True))
    for n in testset:
        testlist.append(
            spg.spg_reader(
                args, args.SEMA3D_PATH + '/superpoint_graphs/' + n + '.h5',
                True))

    # Normalize edge features
    if args.spg_attribs01:
        trainlist, testlist, validlist, scaler = spg.scaler01(
            trainlist, testlist, validlist=validlist)

    return tnt.dataset.ListDataset([spg.spg_to_igraph(*tlist) for tlist in trainlist],
                                    functools.partial(spg.loader, train=True, args=args, db_path=args.SEMA3D_PATH)), \
           tnt.dataset.ListDataset([spg.spg_to_igraph(*tlist) for tlist in testlist],
                                    functools.partial(spg.loader, train=False, args=args, db_path=args.SEMA3D_PATH, test_seed_offset=test_seed_offset)), \
           tnt.dataset.ListDataset([spg.spg_to_igraph(*tlist) for tlist in validlist],
                                    functools.partial(spg.loader, train=False, args=args, db_path=args.SEMA3D_PATH, test_seed_offset=test_seed_offset)),\
            scaler