Beispiel #1
0
def demo_sweep_intersections(n, out):
    for i in range(n):
        try:
            print("{}{}{}{}".format(
                Terminal.bold,
                Terminal.blue,
                i,
                Terminal.end,
            ))
            seed(i)
            xs = random_segments(15)
            (segments, points) = sweep_intersections(xs)
            ax = init_plot()
            plot_points(ax, points)
            plot_segments(ax, segments)
            export("{}/sweep_intersections_{}.png".format(out, i))
            (brute_segments, brute_points) = brute_sweep_intersections(xs)
            ax = init_plot()
            plot_points(ax, brute_points)
            plot_segments(ax, brute_segments)
            export("{}/brute_sweep_intersections_{}.png".format(out, i))
            equal = sorted(points) == sorted(brute_points)
            print("same points : {}{}{}{}\n\n".format(
                Terminal.bold,
                Terminal.green if equal else Terminal.red,
                equal,
                Terminal.end,
            ))
        except:
            pass
Beispiel #2
0
def run(train_data, test_data):
    batch_size = 10
    n_samples = np.array(train_data).shape[0]
    n_batches = int(np.ceil(float(n_samples) / batch_size))
    batch_slices = list(
        gen_even_slices(n_batches * batch_size, n_batches, n_samples))

    nodes = [50, 75, 100, 150]

    for item in nodes:
        errors = []
        model = BernoulliRBM(n_components=item,
                             learning_rate=0.1,
                             batch_size=10,
                             n_iter=1,
                             random_state=None,
                             verbose=1)
        for _ in range(20):
            for batch_slice in batch_slices:
                model.partial_fit(train_data[batch_slice])
            errors.append(percent_error(model.gibbs(test_data), test_data))
        plot.plot_points(errors)
        plot.plot_heatmap(reformat_data(test_data[0]))
        plot.plot_heatmap(reformat_data(model.gibbs(test_data)[0]))

        if item == 50 or item == 100:
            plot.plot_heatmap(model.__dict__['components_'].reshape(item, 784))
Beispiel #3
0
def train_selected_model(activation,
                         learning_rate,
                         momentum,
                         n_points,
                         n_epochs,
                         batch_size,
                         plot_points=False):
    train_data, test_data = data.generate_data(n_points)

    model = train.build_model(activation)
    optimizer = optim.SGD(model.parameters(),
                          lr=learning_rate,
                          momentum=momentum)
    criterion = framework.MSELoss()

    t0 = time.perf_counter()
    history = train.train_model(model, optimizer, criterion, train_data,
                                test_data, n_epochs, batch_size)
    t1 = time.perf_counter()

    result = {
        'train_loss': train.compute_loss(model, criterion, train_data,
                                         batch_size),
        'test_loss': train.compute_error(model, train_data, batch_size) * 100,
        'train_err': train.compute_loss(model, criterion, test_data,
                                        batch_size),
        'test_err': train.compute_error(model, test_data, batch_size) * 100,
        'time': t1 - t0
    }

    if plot_points:
        plot.plot_points(test_data, train_data, model, plot_points)

    return history, result
Beispiel #4
0
def main ():

	# society = init ()
	# # mutation occurs in genes only
	# while 1:
	# 	pass
	plot.canvas ().ion ()
	plot.canvas ().show ()

	X = init (population,a=A,b=B) # random population here!!!
	generation = 0
	try:
		while 1:
			print(f'Generation {generation}')
			plot.canvas().clf ()
			plot.plot_function (line)
			plot.plot_function (f)
			plot.plot_points (X, [0 for i in X])
			plot.plot_points (X, [f(i) for i in X])
			plot.canvas ().pause (0.5)
			X = thrive (X, birth_rate=0.4) # hybirding performs and returns new generation
			X = ageing (X, death_rate=0.3) 
			generation+=1
			pass
	except Exception as e:
		print (e)
		pass


	pass
Beispiel #5
0
def demo_convex_hull(n, out):
    for i in range(n):
        seed(i)
        points = random_points(25)
        ax = init_plot()
        plot_points(ax, points)
        plot_segments(ax, convex_hull(points))
        export("{}/convex_hull_{}.png".format(out, i))
Beispiel #6
0
def demo_point_of_intersection(n, out):
    for i in range(n):
        seed(i)
        segments = random_segments(2)
        point = point_of_intersection(*segments)
        ax = init_plot()
        if point:
            plot_points(ax, [point])
        plot_segments(ax, segments)
        export("{}/point_of_intersection_{}.png".format(out, i))
Beispiel #7
0
def noise():
	data_array = pict_data()
	X0 = data_array[:3]
	W = hopfield.weights(X0)
	for x in X0:
		error = []
		for i in range(0, 101, 10):
			choices = np.random.choice(len(x), size=int(i*len(x)/100), replace=False)
			x_noise = add_noise(x, choices)
			error.append(calculate_error(x, hopfield.recall_until_stable(W, x_noise)))
		print(error)
		plot.plot_points(error)
Beispiel #8
0
def train_selected_model(activation: ty.Union[framework.Tanh, framework.ReLU],
                         learning_rate: float,
                         momentum: float,
                         n_points: int,
                         n_epochs: int,
                         batch_size: int,
                         track_history: bool = False,
                         plot_points: bool = False):
    """
    Train a miniproject model with a given activation using SGD and MSE loss.

    :param activation: activation function
    :param learning_rate: SGD learning rate
    :param momentum: SGD momentum
    :param n_points: number of points in training and test data
    :param n_epochs: number of epochs
    :param batch_size: batch size
    :param trach_history: track training and test error and loss by epoch
    :param plot_points: generate plots visualing model predictions of the training and test data
    :returns: (history dictionary, final results)
    """
    train_data, test_data = data.generate_data(n_points)

    model = train.build_model(activation)
    optimizer = framework.SGD(model, lr=learning_rate, momentum=momentum)
    criterion = framework.MSELoss(model)

    t0 = time.perf_counter()
    history = train.train_model(model, optimizer, criterion, train_data,
                                test_data, n_epochs, batch_size, track_history)
    t1 = time.perf_counter()

    result = {
        'train_loss': train.compute_loss(model, criterion, train_data,
                                         batch_size),
        'test_loss': train.compute_error(model, train_data, batch_size) * 100,
        'train_err': train.compute_loss(model, criterion, test_data,
                                        batch_size),
        'test_err': train.compute_error(model, test_data, batch_size) * 100,
        'time': t1 - t0
    }

    if plot_points:
        plot.plot_points(test_data, train_data, model, plot_points)

    return history, result
Beispiel #9
0
for item in nodes:
    encoding_dim = item

    # this is our input placeholder
    input_img = Input(shape=(784, ))
    encoded = Dense(encoding_dim, activation='relu')(input_img)
    decoded = Dense(784, activation='sigmoid')(encoded)
    autoencoder = Model(input_img, decoded)

    autoencoder.compile(optimizer='SGD',
                        loss='binary_crossentropy',
                        metrics=['binary_accuracy'])

    history = History()

    autoencoder.fit(train_data,
                    train_data,
                    epochs=20,
                    batch_size=10,
                    shuffle=True,
                    verbose=0,
                    callbacks=[history],
                    validation_data=(test_data, test_data))

    errors = [1 - x for x in history.history['val_binary_accuracy']]

    plot.plot_points(errors)
    plot.plot_heatmap(reformat_data(test_data[0]))
    plot.plot_heatmap(reformat_data(autoencoder.predict(test_data)[0]))
    if item == 50 or item == 100:
        print(plot.plot_heatmap(autoencoder.layers[1].get_weights()[0].T))