ada = Adadelta(lr=0.1, rho=0.95, epsilon=1e-08)
    model.compile(loss='categorical_crossentropy',
                  optimizer=ada,
                  metrics=['accuracy'])
    model.summary()
    return model


img_rows, img_cols = 48, 48
batch_size = 128
nb_classes = 7
nb_epoch = 1200
img_channels = 1

Train_x, Train_y, Val_x, Val_y = dataprocessing.load_data()

Train_x = numpy.asarray(Train_x)
Train_x = Train_x.reshape(Train_x.shape[0], img_rows, img_cols)

Val_x = numpy.asarray(Val_x)
Val_x = Val_x.reshape(Val_x.shape[0], img_rows, img_cols)

Train_x = Train_x.reshape(Train_x.shape[0], 1, img_rows, img_cols)
Val_x = Val_x.reshape(Val_x.shape[0], 1, img_rows, img_cols)

Train_x = Train_x.astype('float32')
Val_x = Val_x.astype('float32')

Train_y = np_utils.to_categorical(Train_y, nb_classes)
Val_y = np_utils.to_categorical(Val_y, nb_classes)
Exemplo n.º 2
0
import linear_regression as rg
import dataprocessing as dp
import numpy as np
import matplotlib.pyplot as plt

# Loading training examples
X = dp.load_data('linearX.csv')
y = dp.load_data('linearY.csv')

X = X.reshape((-1, 1))

#Normalizing the training examples
X, meu, sigma = dp.normalize(X)

#Initialising parameters for gradient descent
m, n = X.shape
init_theta = np.zeros(n + 1)
epsilon = 1e-10
eta = [0.001, 0.005, 0.009, 0.013, 0.017]
color = ['red', 'blue', 'green', 'yellow', 'magenta']

plt.ion()
for i in range(len(eta)):
    # Executing gradient descent
    theta, iterations, theta_history, cost_history = rg.linear_reg(
        X, y, init_theta, eta[i], epsilon)
    plt.plot(list(range(0, iterations + 1)),
             cost_history,
             color=color[i],
             label='eta = ' + str(eta[i]))
Exemplo n.º 3
0
import logistic_regression as log_rg
import dataprocessing as dp
import numpy as np

# Loading training examples
X = dp.load_data('logisticX.csv')
y = dp.load_data('logisticY.csv', dtype='int')

#Normalizing the training examples
X, meu, sigma = dp.normalize(X)

m, n = X.shape
init_theta = np.zeros(n + 1)
epsilon = 1e-10

theta, iterations = log_rg.newton(X, y, init_theta, epsilon)

# Plotting the data and hypothesis
dp.plot_classification_data(X, y, ['Negative', 'Positive'])

print('No. of iterations = ', iterations)
print('Theta = ', theta)

input('Press Enter to draw hypothesis')

#Plotting decision boundary
x_test = np.linspace(np.amin(X[:, 0]) - 1, np.amax(X[:, 0]) + 1, num=500)
dp.plot_linear_decision_boundary(x_test, theta)

input('Press Enter to close')
dp.plot_close()
      
    model.add(Activation('softmax'))

    ada = Adadelta(lr=0.1, rho=0.95, epsilon=1e-08)
    model.compile(loss='categorical_crossentropy',
                  optimizer=ada,
                  metrics=['accuracy'])
    model.summary()
    return model
img_rows, img_cols = 48, 48
batch_size = 128
nb_classes = 7
nb_epoch = 1200
img_channels = 1

Train_x, Train_y, Val_x, Val_y = dataprocessing.load_data()

Train_x = numpy.asarray(Train_x) 
Train_x = Train_x.reshape(Train_x.shape[0],img_rows,img_cols)

Val_x = numpy.asarray(Val_x)
Val_x = Val_x.reshape(Val_x.shape[0],img_rows,img_cols)

Train_x = Train_x.reshape(Train_x.shape[0], 1, img_rows, img_cols)
Val_x = Val_x.reshape(Val_x.shape[0], 1, img_rows, img_cols)

Train_x = Train_x.astype('float32')
Val_x = Val_x.astype('float32')


Train_y = np_utils.to_categorical(Train_y, nb_classes)