bias_list[bias]._get_weight( ).data = bias_list[bias]._get_weight( ).data - learning_rate * bias_list[bias]._get_weight().grad.data bias_list[bias]._get_weight().grad.data.zero_() if i % 2 == 0: print("Total Loss : ", total_loss.sum().data) # Testing n = 20 x_limit = np.linspace(-2, 2, n) y_limit = np.linspace(-2, 2, n) empty_image = np.empty((28 * n, 28 * n)) for i, zi in enumerate(x_limit): for j, pi in enumerate(y_limit): generated_latent_layer = np.array([[zi, pi]] * batch_size) Decoder_Noisy = Decoder(generated_latent_layer) generated_image = Decoder_Noisy.decode(weight_list, bias_list) generated_image = generated_image.detach().numpy() empty_image[(n - i - 1) * 28:(n - i) * 28, j * 28:(j + 1) * 28] = generated_image[0].reshape( 28, 28) plt.figure(figsize=(8, 10)) X, Y = np.meshgrid(x_limit, y_limit) plt.imshow(empty_image, origin="upper", cmap="gray") plt.grid('False') plt.show()