def admin_storage(): num_images = db.get_sample_count() num_human_annotations = db.get_human_annotation_count() paths = ( ('server_heatmap', config.get_server_heatmap_path()), ('server_image', config.get_server_image_path()), ('cnn', config.get_cnn_path()), ('caffe', config.get_caffe_path()), ('plot', config.get_plot_path()), ('train_data', config.get_train_data_path()), ) path_data = [] for path_name, path in paths: pstats = os.statvfs(path) path_data.append({ 'name': path_name, 'path': path, 'disk_total': bytes_humanfriendly(pstats.f_frsize * pstats.f_blocks), 'disk_avail': bytes_humanfriendly(pstats.f_frsize * pstats.f_bavail), 'used': bytes_humanfriendly(get_recursive_folder_size(path)) if path_name != 'train_data' else '?' }) return render_template('admin_storage.html', num_images=num_images, num_human_annotations=num_human_annotations, path_data=path_data, error=pop_last_error())
def db2patches(output_path, train_label=None, sample_limit=None): # All human annotations in DB converted to a training set n_angles = 8 angles = np.linspace(0, 360, num=n_angles, endpoint=False) extract_size = (256, 256) # wdt, hgt annotated_samples = db.get_human_annotated_samples(train_label=train_label) n_annotated_samples = len(annotated_samples) if sample_limit is not None and n_annotated_samples > sample_limit: print 'Reducing from %d to sample limit %d...' % (n_annotated_samples, sample_limit) annotated_samples = np.random.choice(annotated_samples, sample_limit, replace=False) print 'Extracting patches from %d images to %s...' % ( len(annotated_samples), output_path) n = 0 for s in tqdm(annotated_samples): img_filename = os.path.join(config.get_server_image_path(), s['filename']) img_name = s['filename'].replace('.', '_') for annotation in db.get_human_annotations(s['_id']): allpos = [dbpos2extpos(p) for p in annotation['positions']] n += extract_positions(img_filename, allpos, output_path, img_name, angles, extract_size, s['_id']) print '%d patches extracted.' % n
def add_image_measures(): for s in db.samples.find({'processed': True}): if not s.get('imq_entropy'): image_filename = s['filename'] print 'Processing %s...' % image_filename image_filename_full = os.path.join(config.get_server_image_path(), image_filename) image_measures = get_image_measures(image_filename_full) db.set_image_measures(s['_id'], image_measures)
def make_unique_server_image_filename(filename): basename, ext = os.path.splitext(filename) filename = basename + ext i = 1 while True: full_fn = os.path.join(config.get_server_image_path(), filename) if not os.path.isfile(full_fn): break filename = '%s-%03d%s' % (basename, i, ext) i += 1 if i > 1: print 'File renamed to be unique:', filename return full_fn
def compute_stomata_positions_for_sample(sample_id, plot=False): machine_annotations = db.get_machine_annotations(sample_id) sample = db.get_sample_by_id(sample_id) image_filename = os.path.join(config.get_server_image_path(), sample['filename']) for machine_annotation in machine_annotations: heatmap_image_filename = os.path.join( config.get_server_heatmap_path(), machine_annotation['heatmap_image_filename']) heatmap_filename = os.path.join(config.get_server_heatmap_path(), machine_annotation['heatmap_filename']) #plot_heatmap(image_filename, heatmap_filename, heatmap_image_filename) print heatmap_image_filename heatmap_image = imread(heatmap_image_filename) positions = compute_stomata_positions(machine_annotation, heatmap_image, plot=plot)
def delete_sample(sample_id, delete_files=False, do_access_dataset=True): sample = samples.find_one_and_delete({'_id': sample_id}) if sample is None: return False # Also delete files. if delete_files: image_filename = sample['filename'] image_filename_base = os.path.splitext(image_filename)[0] image_filename_full = os.path.join(config.get_server_image_path(), image_filename) heatmap_filename = os.path.join(config.get_server_heatmap_path(), 'alexnetftc_5000', image_filename_base + '_heatmap.jpg') heatmap_data_filename = os.path.join(config.get_server_heatmap_path(), 'alexnetftc_5000', image_filename_base + '_heatmap.npz') for fn in image_filename_full, heatmap_filename, heatmap_data_filename: try: os.remove(fn) print 'Deleted', fn except OSError: print 'Error deleting', fn # Mark dataset as accessed if do_access_dataset: access_dataset(sample['dataset_id']) return True
def import_karl_labels(): dataset_id = get_karl_dataset_id() train_path = os.path.join(config.get_data_path(), 'Pb_stomata_09_03_16_Archive') positions = load_positions( os.path.join(train_path, 'VT_stomata_xy_trial_10_15_16.txt')) for fn, pos in positions.iteritems(): pos_db = [pos2db(p) for p in pos] fnj = fn + '.jpg' fn_full = os.path.join(train_path, fnj) im = Image.open(fn_full) filename = os.path.basename(fnj) fn_target = os.path.join(config.get_server_image_path(), filename) shutil.copyfile(fn_full, fn_target) sample = db.add_sample(os.path.basename(fn_target), size=im.size, dataset_id=dataset_id) sample_id = sample['_id'] db.set_human_annotation(sample_id, None, pos_db, margin=32) print 'http://0.0.0.0:9000/info/%s' % str(sample_id) print train_path print positions
def static_images(path): return send_from_directory(config.get_server_image_path(), path)