Beispiel #1
0
def init_database(conn):
    """
    Passing in a connection, initialize the database with
    somewhat randomized data.

    Arguments:
        - conn - postgres connection object
    """
    schema = input("Input a schema name to be created: ")
    create_schema(conn, schema)
    create_tables(conn, schema)
    generate_data(conn, schema)
Beispiel #2
0
def train(path,BATCH_SIZE,IMG_SIZE,n_classes,save_path):
    datagenerator = helpers.generate_data(BATCH_SIZE,IMG_SIZE,preprocess_input,path)
    base_model = NASNetLarge(input_shape=(IMG_SIZE, IMG_SIZE,3),
                                               include_top=False,
                                               weights='imagenet')

    compiled_model = helpers.transfer_learning(base_model,n_classes)
    helpers.fit_model(compiled_model,datagenerator,save_path)
Beispiel #3
0
def squared_sin_data_set():
    def f(x):
        return math.sin(x)**2

    return PreloadSource(
        helpers.generate_data(f=f,
                              start_value=0,
                              end_value=3.14,
                              step_value=0.1))
def update_data(attrname, old, new):
    x = covariance.value
    n1 = n1_variance.value
    n2 = n2_variance.value
    u11 = n1_u1.value
    u12 = n1_u2.value
    u21 = n2_u1.value
    u22 = n2_u2.value

    # update stim1
    u = [u11, u21]
    cov = np.array([[n1, x], [x, n2]])
    data1 = help.generate_data(u, cov)
    source_s1.data = dict(x=data1[:, 0], y=data1[:, 1])

    # update stim2
    u = [u12, u22]
    cov = cov
    data2 = help.generate_data(u, cov)
    source_s2.data = dict(x=data2[:, 0], y=data2[:, 1])

    update_axes(data1, data2)

    update_table(data1, data2)
    def test_gives_correct_output_for_unseen_data(self):
        nnet = NetFactory.create_neural_net(sizes=[1, 10, 1])
        cost_func = QuadraticCost(neural_net=nnet)
        gd = GradientDescent(neural_net=nnet, cost_function=cost_func)

        def parabola(x):
            return x**2

        examples = helpers.generate_data(f=parabola,
                                         start_value=-0.6,
                                         end_value=-0.4,
                                         step_value=0.005)

        gd.train(data_src=PreloadSource(examples), nepochs=10)

        xval = -0.5000125
        yval = parabola(xval)

        net_input = np.array([xval], float)
        output = nnet.feed(net_input)
        self.assertAlmostEqual(output[0], yval, places=1)
Beispiel #6
0
from helpers import generate_data, progress

# ---lOAD MODELS---
face_model = load_model('./models/facenet_keras.h5')

# ---GENERATING DATA---
data_folder = './data/'

# Paths to training images
image_paths = [data_folder + file for file in os.listdir(data_folder)]

print("Generating Dataset...")

# Generate data from paths
dataset = [generate_data(face_model, path) for path in image_paths]

X = np.array([x[0] for x in dataset])
y = np.array([x[1] for x in dataset])

# ---TRAINING---
print("Dataset generated. Start training...")

# Encode labels
encoder = LabelEncoder()
y = encoder.fit_transform(y)

# SVM Classifer
classify_model = SVC(kernel='linear', probability=True)
classify_model.fit(X, y)
Beispiel #7
0
import numpy as np
import matplotlib.pyplot as plt
from helpers import generate_data, LinearRegression, calculate_rmse

# data range and number of points
l_bound = 0
r_bound = 100
n = 1000
data = generate_data(l_bound, r_bound, n)

linreg = LinearRegression()
linreg.fit(data)

# Find regression line
xx = np.linspace(l_bound, r_bound, n)
yy = np.array(linreg.b[0] + linreg.b[1] * xx)

# Check predictions
check_data = generate_data(l_bound, r_bound, n // 10)
pred_x = [[x] for x in check_data[:, 0]]
actual_y = check_data[:, 1]
pred_y = linreg.predict(pred_x)

rmse = calculate_rmse(actual_y, pred_y)
print('rmse:', rmse)

plt.figure(1)
plt.plot(xx, yy.T, color='tab:blue')
plt.scatter(data[:, 0], data[:, 1], color='c')
plt.scatter(check_data[:, 0], check_data[:, 1], color='r')
plt.scatter(pred_x, pred_y, color='m')
from bokeh.layouts import column, grid, row
from bokeh.models import Button
from bokeh.palettes import RdYlBu3
from bokeh.plotting import figure, curdoc
from bokeh.models import Button, ColumnDataSource, ColorBar
from bokeh.models.glyphs import Line
from bokeh.models.widgets import Slider, TextInput, DataTable, TableColumn
import helpers as help
import numpy as np

# generate initial data set
u1 = [2, 2]
u2 = [4, 4]
cov = np.array([[1, 0], [0, 1]])
x = help.generate_data(u1, cov)
y = help.generate_data(u2, cov)

# get the decoding axes and dprime values for this starting point
values = np.round(help.get_table_values(x, y), 3)
metrics = ['dprime_pop_LDA', 'dprime_pop_NULL', 'dprime_ind', 'ratio_LDA', 'ratio_NULL', 'cos(NULL, Noise)',
            'cos(LDA, Noise)', 'rsc']

decoding_metrics = dict(metrics=metrics, values=values)
source = ColumnDataSource(decoding_metrics)
columns = [
        TableColumn(field="metrics", title="Metric"),
        TableColumn(field="values", title="Value"),
    ]
data_table = DataTable(source=source, columns=columns, width=400, height=280)

# set the date source for two stimuli/signals that are trying to be discriminated
Beispiel #9
0
def main():

    X, Y, true_f = h.generate_data(30)

    # individual elements over entire matrix
    kf_poly = lambda x, z, d: (1 + x * z)**d
    kf_rbf = lambda x, z, gamma: np.exp(-gamma * ((x - z)**2))

    if to_run["abc"]:
        """
    part (a)
    """

        k_poly = Kernel(X, Y, kf_poly)
        k_rbf = Kernel(X, Y, kf_rbf)
        """
    part (b)
    """

        true_data = [true_f(x_val) for x_val in c.x_list]
        poly_pred_data = k_poly.get_fhat_data(X, Y)
        rbf_pred_data = k_rbf.get_fhat_data(X, Y)

        poly_list = [true_data, poly_pred_data]
        rbf_list = [true_data, rbf_pred_data]

        h.plot_multiple("poly", "a3_bi", X, Y, poly_list, c.pred_labels,
                        c.a3b_ylimits)
        h.plot_multiple("rbf", "a3_bii", X, Y, rbf_list, c.pred_labels,
                        c.a3b_ylimits)
        """
    part (c)
    """

        poly_5, poly_95 = k_poly.bootstrap(c.B)
        rbf_5, rbf_95 = k_rbf.bootstrap(c.B)

        poly_list = [true_data, poly_pred_data, poly_5, poly_95]
        rbf_list = [true_data, rbf_pred_data, rbf_5, rbf_95]

        h.plot_multiple("poly", "a3_ci", X, Y, poly_list, c.pct_labels,
                        c.a3b_ylimits)
        h.plot_multiple("rbf", "a3_cii", X, Y, rbf_list, c.pct_labels,
                        c.a3b_ylimits)

    if to_run["de"]:
        """
    part (d)
    """

        # repeated a
        X, Y, true_f = h.generate_data(300)

        k_poly = Kernel(X, Y, kf_poly, kfold=True)
        k_rbf = Kernel(X, Y, kf_rbf, kfold=True)

        # repeated b
        true_data = [true_f(x_val) for x_val in c.x_list]
        poly_pred_data = k_poly.get_fhat_data(X, Y)
        rbf_pred_data = k_rbf.get_fhat_data(X, Y)

        poly_list = [true_data, poly_pred_data]
        rbf_list = [true_data, rbf_pred_data]

        h.plot_multiple("poly", "a3_d.bi", X, Y, poly_list, c.pred_labels,
                        c.a3b_ylimits)
        h.plot_multiple("rbf", "a3_d.bii", X, Y, rbf_list, c.pred_labels,
                        c.a3b_ylimits)

        # repeated c
        poly_5, poly_95 = k_poly.bootstrap(c.B)
        rbf_5, rbf_95 = k_rbf.bootstrap(c.B)

        poly_list = [true_data, poly_pred_data, poly_5, poly_95]
        rbf_list = [true_data, rbf_pred_data, rbf_5, rbf_95]

        h.plot_multiple("poly", "a3_d.ci", X, Y, poly_list, c.pct_labels,
                        c.a3b_ylimits)
        h.plot_multiple("rbf", "a3_d.cii", X, Y, rbf_list, c.pct_labels,
                        c.a3b_ylimits)
        """
    part (e)
    """

        X, Y, true_f = h.generate_data(c.m)

        poly_pred = k_poly.kernel_rr(X, Y, k_poly.hp, k_poly.lamb)
        rbf_pred = k_rbf.kernel_rr(X, Y, k_rbf.hp, k_rbf.lamb)

        bs_5, bs_95 = get_bootstrap_values(X, Y, poly_pred, rbf_pred)

        print(bs_5)
        print(bs_95)