from skimage.feature import canny from vision.segmentation.segment import crop_by_saliency, saliency_dragonfly from vision.tests import get_test_image from vision.measurements import subspace_shape src = np.zeros((11, 2)) src[:, 0] = np.arange(-5, 6) src[:, 1] = np.power(src[:, 0], 2) dst = np.zeros((11, 2)) dst[:, 0] = np.arange(-5, 6) + 3 dst[:, 1] = np.power(src[:, 0], 2) / 12 mu, phi, sigma2 = subspace_shape.learn((src, dst)) # plt.plot(src[:, 0], src[:, 1], 'r') # plt.plot(dst[:, 0], dst[:, 1], 'g') # avg = mu + phi @ (-0.75 * np.ones((1, 1))) # avg = avg.reshape(-1, 2) # plt.plot(avg[:, 0], avg[:, 1], 'b') # plt.show() image = np.zeros((100, 100), dtype=np.bool) j = np.arange(20, 80) i = np.power(j - 50, 2) / 10 image[90 - i.astype(np.int), j] = True subspace_shape.infer(image, mu, phi, sigma2)
initial_translation[i, :] = wing.centroid coords = np.array([[-(minor / 2), -(major / 2)], [-(minor / 2), (major / 2)], [(minor / 2), (major / 2)], [(minor / 2), -(major / 2)]]) rotated_coords = tform(coords) + wing.centroid box_coords = polygon_perimeter(rotated_coords[:, 0], rotated_coords[:, 1]) set_color(wings_image, box_coords, [0, 0, 1]) slices = [slice(13, -2)] + [slice(start, None) for start in range(13)[::-1]] for wing_index in range(len(wings)): inference = subspace_shape.infer(edges, edge_lengths, *shape_model, update_slice=slices[0], scale_estimate=initial_scale[wing_index], rotation=initial_rotation[wing_index], translation=initial_translation[wing_index, [1, 0]]) inference.send(None) for i, s in enumerate(slices): for iteration in range(100): fitted_shape, closest_edge_points, h, psi = inference.send(s) # if iteration % 50 == 0: # output_image = visualize_result(wings_image, edges, fitted_shape, closest_edge_points) # write_image('wing_{}_slice_{}_iteration_{}.png'.format(wing_index, i, iteration), output_image) print(subspace_shape.similarity(edges, *shape_model, h, psi))
coords = np.array([[-(minor / 2), -(major / 2)], [-(minor / 2), (major / 2)], [(minor / 2), (major / 2)], [(minor / 2), -(major / 2)]]) rotated_coords = tform(coords) + wing.centroid box_coords = polygon_perimeter(rotated_coords[:, 0], rotated_coords[:, 1]) set_color(wings_image, box_coords, [0, 0, 1]) # write_image('distance_box.png', wings_image) slices = [slice(13, -2)] + [slice(start, None) for start in range(13)[::-1]] # slices = [slice(None)] inference = subspace_shape.infer(edges, edge_lengths, *shape_model, update_slice=slices[0], scale_estimate=initial_scale[0], rotation=initial_rotation[0], translation=initial_translation[0, [1, 0]]) fitted_shape_old = np.zeros_like(shape_model[0].reshape(-1, 2)) inference.send(None) for i, s in enumerate(slices): for iteration in range(100): fitted_shape, closest_edge_points = inference.send(s) print((np.power(fitted_shape - fitted_shape_old, 2).sum(axis=1).mean())) fitted_shape_old = fitted_shape if iteration % 50 == 0: output_image = visualize_result(wings_image, edges, fitted_shape, closest_edge_points)