示例#1
0
def pack_all():
    trainSet, validationSet = trainval.read(
        '//msralab/ProjectData/ehealth02/v-dinliu/Flow2D/Data/FlyingChairs_release/FlyingChairs_train_val.txt'
    )

    n = 64
    for name, Set in [('train', trainSet), ('val', validationSet)]:
        bn = 0
        for i in range(0, len(Set), n):
            subset = Set[i:i + n]
            trainImg1 = [
                ppm.load(
                    '//msralab/ProjectData/ehealth02/v-dinliu/Flow2D/Data/FlyingChairs_release/data/'
                    + ('%05d' % i) + '_img1.ppm') for i in subset
            ]
            trainImg2 = [
                ppm.load(
                    '//msralab/ProjectData/ehealth02/v-dinliu/Flow2D/Data/FlyingChairs_release/data/'
                    + ('%05d' % i) + '_img2.ppm') for i in subset
            ]
            trainFlow = [
                flo.load(
                    '//msralab/ProjectData/ehealth02/v-dinliu/Flow2D/Data/FlyingChairs_release/data/'
                    + ('%05d' % i) + '_flow.flo') for i in subset
            ]
            prefix = r'\\msralab\ProjectData\ScratchSSD\Users\v-dinliu\data\FlyingChairsBlock'
            pack_data(
                os.path.join(prefix,
                             '{}{}_{}.bin'.format(name, bn, len(subset))),
                trainImg1, trainImg2, trainFlow)
            bn += 1
            print('{}/{}'.format(i, len(Set)))
示例#2
0
	from pympler.asizeof import asizeof
	trainImg1 = [cv2.imread(file).astype('uint8') for file in things3d_dataset['image_0'][:samples:args.shard]]
	print(asizeof(trainImg1[0]))
	print(asizeof(trainImg1))
	trainImg2 = [cv2.imread(file).astype('uint8') for file in things3d_dataset['image_1'][:samples:args.shard]]
	print(asizeof(trainImg2[0]))
	print(asizeof(trainImg2))
	trainFlow = [things3d.load(file).astype('float16') for file in things3d_dataset['flow'][:samples:args.shard]]
	print(asizeof(trainFlow[0]))
	print(asizeof(trainFlow))
	trainSize = len(trainFlow)
	training_datasets = [(trainImg1, trainImg2, trainFlow)] * batch_size
	print(asizeof(training_datasets))

	# validation- chairs
	_, validationSet = trainval.read(chairs_split_file)
	validationSet = validationSet[:samples]
	validationImg1 = [ppm.load(os.path.join(chairs_path, '%05d_img1.ppm' % i)) for i in validationSet]
	validationImg2 = [ppm.load(os.path.join(chairs_path, '%05d_img2.ppm' % i)) for i in validationSet]
	validationFlow = [flo.load(os.path.join(chairs_path, '%05d_flow.flo' % i)) for i in validationSet]
	validationSize = len(validationFlow)
	validation_datasets['chairs'] = (validationImg1, validationImg2, validationFlow)

	'''
	# validation- sintel
	sintel_dataset = sintel.list_data()
	divs = ('training',) if not getattr(config.network, 'class').get() == 'MaskFlownet' else ('training2',)
	for div in divs:
		for k, dataset in sintel_dataset[div].items():
			img1, img2, flow, mask = [[sintel.load(p) for p in data] for data in zip(*dataset)]
			validationSize += len(flow)
示例#3
0
文件: main.py 项目: ldf921/flownext
# load training set and validation set
if args.debug or args.fake_data:
    trainSet = np.arange(0, 128)
    validationSet = np.arange(0, 128)
    trainImg1 = np.random.normal(size=(128, 384, 512, 3))
    trainImg2 = np.random.normal(size=(128, 384, 512, 3))
    trainFlow = np.random.normal(size=(128, 384, 512, 2))
    validationImg1 = np.random.normal(size=(128, 384, 512, 3))
    validationImg2 = np.random.normal(size=(128, 384, 512, 3))
    validationFlow = np.random.normal(size=(128, 384, 512, 2))
    trainSize = validationSize = 128
elif args.net_data:
    print('socket data ...')
    trainSet, validationSet = trainval.read(
        '//msralab/ProjectData/ehealth02/v-dinliu/Flow2D/Data/FlyingChairs_release/FlyingChairs_train_val.txt'
    )
    client = DatasetClient()
    trainSize = len(trainSet)
    validationSize = len(validationSet)

    validationImg1, validationImg2, validationFlow = zip(
        *[fetch_data(i) for i in validationSet])
elif train_cfg.dataset.value == 'sintel':
    print('loading sintel dataset ...')
    subsets = train_cfg.subsets.value
    print(subsets[0], subsets[1])
    trainImg1 = []
    trainImg2 = []
    trainFlow = []
    sintel_dataset = sintel.list_data(sintel.sintel_path)