def test(): # Set input and output dirs input_dir = "/storage/cfmata/deeplab/crf_rnn/crfasrnn_keras/data/horse_fine/images_orig/" output_dir = "/storage/cfmata/deeplab/crf_rnn/crfasrnn_keras/image_results/horse_fine/fcn/" input_size = 224 num_crf_iter = 10 saved_model_path = 'results/horse_fine/horse_fine_weights.500-0.53' #model = get_crfrnn_model_def() model = load_model_gby('fcn_RESNET50_8s', input_size, 22, num_crf_iter) model.load_weights(saved_model_path) im_list = open("lst/horsecoarse_test.txt").readlines() im_list = [f[:-1] for f in im_list] for img in im_list: img_data, img_h, img_w = util.get_preprocessed_image(input_dir + img + ".jpg") probs = model.predict(img_data, verbose=False, batch_size=1)[0, :, :, :] segmentation = util.get_label_image(probs, img_h, img_w) print(output_dir + img) segmentation.save(output_dir + img[:-4] + ".png")
# # print("batch sizes train ", batch_sizes_train) # print("batch sizes val ", batch_sizes_val) # print("batch sizes total ", batch_sizes_total) # pdb.set_trace() # with tf.device('/cpu:0'): # input_image, output_image = data_augmentation(input_image, output_image) # =============== # LOAD model: # =============== # for training: num_crf_iterations = 5 model = load_model_gby(args.model, INPUT_SIZE, nb_classes, num_crf_iterations, args.finetune_path, batch_size) #,batch_sizes_train, batch_sizes_val, batch_sizes_total) # if resuming training: if (args.weights is not None) and (os.path.exists(args.weights)): print("loading weights %s.."% args.weights) model.load_weights(args.weights) model.summary() print('trining model %s..'% model.name) # =============== # LOAD sp segment: # =============== # if model.sp_flag: # segments_train = load_segmentations(ds.segments_dir, ds.train_list, INPUT_SIZE)
# =============== # LOAD model: # =============== model_name = args.model model_path_name = args.weights base_img_name = [] print('====================================================================================') print(model_path_name) print('====================================================================================') finetune_path = '' #pdb.set_trace() model = load_model_gby(model_name, INPUT_SIZE, n_classes, num_crf_iterations, finetune_path) #loading weights: model.load_weights(model_path_name) # Computing prediction: # ------------------------------ print('computing prediction..') if args.folderpath==None: img_org = cv2.imread(args.imagepath) x = load_image(img_org, INPUT_SIZE) base_img_name.append(os.path.splitext(os.path.basename(args.imagepath))[0]) else: X = [] outdirName = args.folderpath+'out/'
ds = load_dataset(args.dataset, INPUT_SIZE) print(ds.X_train.shape, ds.y_train.shape) print(ds.X_test.shape, ds.y_test.shape) nb_classes = ds.nb_classes # pdb.set_trace() # with tf.device('/cpu:0'): # input_image, output_image = data_augmentation(input_image, output_image) # =============== # LOAD model: # =============== # for training: num_crf_iterations = 5 model = load_model_gby(args.model, INPUT_SIZE, nb_classes, num_crf_iterations) # if resuming training: if (args.weights is not None) and (os.path.exists(args.weights)): print("loading weights %s.." % args.weights) model.load_weights(args.weights) model.summary() print('training model %s..' % model.name) # =============== # LOAD sp segment: # =============== if model.sp_flag: segments_train = load_segmentations(ds.segments_dir, ds.train_list, INPUT_SIZE)