1.0 * pw / sum(PARTIALS_WEIGHTS) for pw in PARTIALS_WEIGHTS ] if not USE_PARTIALS: partials = partials[0:1] PARTIALS_WEIGHTS = [1.0] qsf.evaluateAllResults(result_files=files['result'], absolute_disparity=ABSOLUTE_DISPARITY, cluster_radius=CLUSTER_RADIUS, labels=SLICE_LABELS, logpath=LOGPATH) image_data = qsf.initImageData(files=files, max_imgs=MAX_IMGS_IN_MEM, cluster_radius=CLUSTER_RADIUS, tile_layers=TILE_LAYERS, tile_side=TILE_SIDE, width=IMG_WIDTH, replace_nans=True) corr2d_len, target_disparity_len, _ = qsf.get_lengths(CLUSTER_RADIUS, TILE_LAYERS, TILE_SIDE) train_next, dataset_train, datasets_test = qsf.initTrainTestData( files=files, cluster_radius=CLUSTER_RADIUS, buffer_size=TRAIN_BUFFER_SIZE * BATCH_SIZE, # number of clusters per train test_titles=TEST_TITLES) corr2d_train_placeholder = tf.placeholder( dataset_train.dtype,
#PB_TAGS = ["model_pb"] print ("Copying config files to results directory:\n ('%s' -> '%s')"%(conf_file,dirs['result'])) try: os.makedirs(dirs['result']) except: pass shutil.copy2(conf_file,dirs['result']) LOGPATH = os.path.join(dirs['result'],LOGFILE) image_data = qsf.initImageData( # just use image_data[0] files = files, max_imgs = MAX_IMGS_IN_MEM, cluster_radius = 0, # CLUSTER_RADIUS, tile_layers = TILE_LAYERS, tile_side = TILE_SIDE, width = IMG_WIDTH, replace_nans = True, infer = True, keep_gt = True) # to generate same output files cluster_radius = CLUSTER_RADIUS ROOT_PATH = './attic/infer_qcds_graph'+SUFFIX+"/" # for tensorboard try: os.makedirs(os.path.dirname(files['inference'])) print ("Created directory ",os.path.dirname(files['inference'])) except: pass