def predict_shape(sketch, iv3_model, mapping_model, gan_model): sketch = np.tile(sketch, (1, 1, 3)).T sketch = preprocess_input(sketch) features = iv3_model.predict(sketch, batch_size=1, verbose=1) latent_vec = mapping_model.predict(features, batch_size=1)[0] latent_vec = torch.Tensor(latent_vec) latent_vec = latent_vec.view(1, -1, 1, 1, 1) fake = gan_model(latent_vec) np_fake = fake.detach().numpy() voxels = np.reshape(fake.detach().numpy(), (64, 64, 64)) voxels = downsample(voxels, 4, method='max') return threshold(voxels)
def predict_nearest_shape(bgr_sketch, NNmodel, gan_model, latent_vectors, n_neighbors=5, colors=[[255, 255, 255], [0, 255, 0], [255, 0, 0], [0, 0, 255]]): # load data gan_features_path = r"data/samples_screenshot_BT.npy" # process rgb sketch contours = process_rgb_sketch(bgr_sketch, colors=colors, precision=0.00001, smoothing=10, resolution=200, diff_threshold=30, bspline_degree=1, n_contours=1) contours = process_contours(contours[:, :, 0, :]) contours[0, :, :] *= OUTLINE_IMPORTANCE x = np.reshape( contours, (1, contours.shape[0] * contours.shape[1] * contours.shape[2])) # predict nearest neighbors closest = indices[0][0] # generate model latent_vec = latent_vectors[closest] latent_vec = torch.Tensor(latent_vec) latent_vec = latent_vec.view(1, -1, 1, 1, 1) fake = gan_model(latent_vec) np_fake = fake.detach().numpy() voxels = np.reshape(fake.detach().numpy(), (64, 64, 64)) voxels = downsample(voxels, 2, method='mean') return threshold(voxels, 0.4)
contours, (1, contours.shape[0] * contours.shape[1] * contours.shape[2])) # predict nearest neighbors nneighbors = 1 distances, indices = nn_model.kneighbors(x) closest = indices[0][:nneighbors] distances = np.array([(1 / np.exp((x + 1)**2)) for x in range(nneighbors)]).reshape(1, -1) print(closest, distances[0]) # generate model latent_vecs = latent_vectors[closest] latent_vec = np.sum(np.multiply(latent_vecs, (1 / distances).T), axis=0) / np.sum(1 / distances) latent_vec = torch.Tensor(latent_vec) latent_vec = latent_vec.view(1, -1, 1, 1, 1) fake = gan_model(latent_vec) np_fake = fake.detach().numpy() voxels = np.reshape(fake.detach().numpy(), (64, 64, 64)) voxels = downsample(voxels, 2, method='mean') visualization(voxels, 0.3, title=None, uniform_size=0, use_colormap=False, angle=0.3)
cv2.destroyWindow('image') # get sketch bottleneck features iv3_input = (139, 139, 3) images = np.empty((1, iv3_input[0], iv3_input[1], iv3_input[2])) resized = cv2.resize(image.copy(), (iv3_input[0], iv3_input[1])) resized = preprocess_input(resized) images[0] = resized.copy() # extract bottleneck features features = iv3_model.predict(images, batch_size=1, verbose=1) # predict latent vector latent_vec = mapping_model.predict(features, batch_size=1)[0] latent_vec = torch.Tensor(latent_vec) latent_vec = latent_vec.view(1, -1, 1, 1, 1) # generate shape gan_model.eval() fake = gan_model(latent_vec) np_fake = fake.detach().numpy() voxels = np.reshape(fake.detach().numpy(), (64, 64, 64)) voxels = downsample(voxels, 4, method='max') visualization(voxels, 0.5, title=None, uniform_size=1, use_colormap=False, angle=0.3)