def get_size(dataset): s = 0 for run in dataset: db1 = database.ImageLMDB(run + "/image") db2 = database.ImageLMDB(run + "/poke") print run, db1.i, db2.i s += db1.i return s
def get_runs(runs, stop=True): while True: random.shuffle(runs) for r in runs: db_images = database.ImageLMDB(r + "/image") db_pokes = database.SensorLMDB(r + "/poke") print "loading from", r, "size", db_pokes.i, yield db_images.images(), db_pokes.readings() if stop: break
def get_data(dataset, shuffle=True): """dataset: name """ location = DATASET_LOCATION + dataset db_image_before = database.ImageLMDB(location + "/image_before", convert_from_ros=False) db_image_after = database.ImageLMDB(location + "/image_after", convert_from_ros=False) db_poke = database.SensorLMDB(location + "/poke") while True: iterators = [ db_image_before.images(), db_image_after.images(), db_poke.readings() ] for image_before, image_after, poke in itertools.izip(*iterators): n_before = normalize(image_before) n_after = normalize(image_after) before_segment = get_rope_segmentation(n_before) after_segment = get_rope_segmentation(n_after) imgs = [n_before, n_after, before_segment, after_segment] c_data = rc.random_crop(imgs, poke[:4, 0, 0]) if c_data: counter = 0 while counter < 64: c_data = rc.random_crop(imgs, poke[:4, 0, 0]) if c_data: cropped_images, cropped_poke = c_data img_before, img_after, s_before, s_after = cropped_images s_b = resize_segmentation(s_before) s_a = resize_segmentation(s_after) im = np.sum( abs( img_before.astype(float) - img_after.astype(float)), 2) > 100 diff = np.sum( im ) # rough measure of number of pixels different between the two images if diff > 1000: yield img_before, img_after, s_b, s_a, cropped_poke counter += 1