def process_image_sw(image_fname, w_size, s): image = sw.normalize_image(image_fname) bin_map = sw.get_binary_map(image) crops = sw.get_sliding_window_patches(image, bin_map, w_size, s) coords = sw.get_sliding_window_patches_coord(image, bin_map, w_size, s) return crops, coords
doc_probs = [] for fname in os.listdir(image_path): print 'Process %s' % fname image_name = os.path.join(image_path, fname) patches, patches_coord = process_image_sw(image_name, window_size, stride) output = net.predict(map(img_as_ubyte, patches)) bbox_list = [(patches_coord[i], output[i][1]) for i in range(len(output))] doc_probs.append(np.mean([output[i][1] for i in range(len(output))])) matplotlib.rcParams['figure.figsize'] = (10.0, 18.0) viz.plot_image_estimated_al_maqrizi_probability2(sw.normalize_image(image_name), bbox_list) print 'Al-Maqrizi document probability: %f' % np.mean(doc_probs) elif patches_type == 'components': patch_size = 28 model = '/home/andrew/Projects/al-maqrizi/nets/components_bin/deploy.prototxt' pretrained = '/home/andrew/Projects/al-maqrizi/nets/components_bin/snapshot_iter_70100.caffemodel' mean_file = '/home/andrew/Projects/al-maqrizi/nets/components_bin/mean.npy' net = caffe.Classifier(model, pretrained, image_dims=(patch_size, patch_size), mean=np.load(mean_file).mean(1).mean(1)) doc_probs = [] for fname in os.listdir(image_path):