def main(): target_function = lambda x: np.sin(2 * np.pi * x) train_size = 10 test_size = 100 x_train, y_train = generate_random_data(x_min=0, x_max=1, y_function=target_function, size=train_size) x_test, y_test = generate_random_data(x_min=0, x_max=1, y_function=target_function, size=test_size) orders = list(range(10)) losses_train = [] losses_test = [] for order in orders: loss_train, loss_test = compute_loss(x_train, y_train, x_test, y_test, order=order) losses_train.append(loss_train) losses_test.append(loss_test) plt.plot(orders, losses_train, 'o-', mfc='none', mec='b', ms=10, c='b', label='Training') plt.plot(orders, losses_test, 'o-', mfc='none', mec='r', ms=10, c='r', label='Test') plt.xlabel('Degree') plt.ylabel('$E_{RMS}$') plt.legend() plt.show()
def main(): target_function = lambda x: np.sin(2 * np.pi * x) train_size = 10 test_size = 100 x_train, y_train = generate_random_data(x_min=0, x_max=1, y_function=target_function, size=train_size) x_test, y_test = generate_random_data(x_min=0, x_max=1, y_function=target_function, size=test_size) lamda_values = np.linspace(1e-5, 5e-3, 1000) losses_train = [] losses_test = [] for lamda_value in lamda_values: loss_train, loss_test = compute_loss(x_train, y_train, x_test, y_test, order=9, lamda_value=lamda_value) losses_train.append(loss_train) losses_test.append(loss_test) plt.plot(list(lamda_values), losses_train, '-', mfc='none', mec='b', ms=10, c='b', label='Training') plt.plot(list(lamda_values), losses_test, '-', mfc='none', mec='r', ms=10, c='r', label='Test') plt.xlabel('Degree') plt.ylabel('$E_{RMS}$') plt.legend() plt.show()
def plot_with_number_data(ax, number_data, order): np.random.seed(1234) target_function = lambda x: np.sin(2 * np.pi * x) x, y = generate_random_data(x_min=0, x_max=1, y_function=target_function, size=number_data) polynomial_resolve = PolynomialRegression(x, y, order=order) ax.scatter(x, y, facecolor='none', edgecolors='b', s=50) plot_curve(ax, polynomial_resolve.predict, color='r') plot_curve(ax, target_function, color='g') ax.text(0.8, 0.8, 'N = {}'.format(number_data))
def main(): target_function = lambda x: np.sin(2 * np.pi * x) train_size = 10 x_train, y_train = generate_random_data(x_min=0, x_max=1, y_function=target_function, size=train_size) orders = [0, 1, 6, 9] coefs_with_order = {} for order in orders: coefs = compute_coefficient(x_train, y_train, order=order) coefs_with_order['M={}'.format(order)] = coefs coefs_with_order_frame = pd.DataFrame(coefs_with_order) coefs_with_order_frame = coefs_with_order_frame.fillna(' ') print(coefs_with_order_frame)
def main(): target_function = lambda x: np.sin(2 * np.pi * x) x_random, y_random = generate_random_data(x_min=0, x_max=1, y_function=target_function, size=10) fig, axs = plt.subplots(1, 2, figsize=(9, 3)) orders = [9, 9] lamda_values = [1e-3, 1] for ax, order, lamda_value in zip(axs, orders, lamda_values): polynomial_resolve = PolynomialRegression(x_random, y_random, order=order, use_l2=True, lamda_value=lamda_value) y_predict = polynomial_resolve.predict(x_random) y_target = target_function(x_random) ax.scatter(x_random, y_random, facecolor='none', edgecolors='b', s=50, label='training') plot_curve(ax=ax, function_plot=target_function, color='g', label='$sin(2 \pi x)$') plot_curve(ax=ax, function_plot=lambda x: polynomial_resolve.predict(x), color='r', label='fitting') ax.annotate("$\lambda$={}".format(lamda_value), xy=(0.6, 1)) plt.legend(bbox_to_anchor=(1.05, 0.29), loc=2, borderaxespad=0) plt.show()
def main(): target_function = lambda x: np.sin(2 * np.pi * x) x_random, y_random = generate_random_data(x_min=0, x_max=1, y_function=target_function, size=10) fig, axs = plt.subplots(1, 3, figsize=(9, 3)) orders = [1, 3, 9] titles = ["Underfitting", "Good fit", "Overfitting"] for ax, order, title in zip(axs, orders, titles): polynomial_resolve = PolynomialRegression(x_random, y_random, order=order) y_predict = polynomial_resolve.predict(x_random) y_target = target_function(x_random) ax.scatter(x_random, y_random, facecolor='none', edgecolors='b', s=50, label='training') plot_curve(ax=ax, function_plot=target_function, color='g', label='$sin(2 \pi x)$') plot_curve(ax=ax, function_plot=lambda x: polynomial_resolve.predict(x), color='r', label='fitting') ax.set_title(title) plt.legend(bbox_to_anchor=(1.05, 0.29), loc=2, borderaxespad=0) plt.show()
def handle(self, *args, **options): # Create author model try: author_id = 1 + self.author.id except AttributeError: author_id = 1 username = '******' + str(author_id) author = User.objects.create( id=author_id, username='******'.format(username), email='{0}@gmail.com'.format(username), ) author.set_password('password') author.save() # Create article models try: article_id = 1 + self.article.id except AttributeError: article_id = 1 for unit in range(20): article = Article.objects.create( title='{0}'.format('news' + str(article_id)), body=generate_random_data(length=6, type='string'), author=author, ) article.liked_by.add(author) article_id += 1 # Success message for result visualisation in command line and for tests self.stdout.write(self.style.SUCCESS('Successfully created 20 posts.'))
from simple_neural_network import SimpleNeuralNetwork import utils if __name__ == "__main__": X, Y = utils.generate_random_data() utils.plot_data(X, Y, "output/data.png") neural_network = SimpleNeuralNetwork(1, [10, 10], 1) learning_rate = 0.01 num_iterations = 10000 for i in range(num_iterations): loss, predictions, dW, db = neural_network.propagate(X, Y) for j in range(len(neural_network.W)): neural_network.W[j] -= learning_rate * dW[j] neural_network.b[j] -= learning_rate * db[j] if i % 1000 == 0: print("loss:", loss) loss, Z_cache, A_cache = neural_network.propagate_forward(X, Y) utils.plot_results(X, Y, A_cache[-1], "output/results.png")