Beispiel #1
0
def gmm(K, X, iterations):
    mu, sigma, pi = init(K, X)
    cost = []
    x_ = []
    for run in range(iterations):  
        gamma  = e_step(X, mu, pi, sigma, K)
        pi, mu, sigma = m_step(X, gamma, sigma, mu, pi, K)
        loss = loss_function(X, pi, mu, sigma, gamma, K)  
        cost.append(loss)
        x_.append(run)
    
    x = []
    y = []
    for i in range(len(X)):
        x.append(X[i][0])
        y.append(X[i][1])

    for i in range(K):
        U.plot_contour(mu[i],sigma[i],x,y,plt)
    

    plt.figure()
    plt.plot(x_, cost, color='green')
    plt.savefig('GMM_cost.png')
    plt.close()

    return mu, sigma, pi
Beispiel #2
0
def ex2():
    x, y, x_train, x_val, x_test, y_train, y_val, y_test = generate_dateset()

    nn = NeuralNetwork(NN_SHAPE, LEARNING_RATE)
    pbar = tqdm(range(EPOCH))
    for e in pbar:
        shuffle(x_train, y_train)
        nn.train_sgd(x_train, y_train, batch_size=BATCH_SIZE)

        # compute train and validation accuracy
        train_accurracy = nn.evaluate(x_train, y_train)
        val_accuracy = nn.evaluate(x_val, y_val)
        pbar.set_description(
            f"Epoch {e:03}/{EPOCH} - Train {train_accurracy:.3f}% - Test {val_accuracy:.3f}% "
        )

    # compute test accuracy
    test_accuracy = nn.evaluate(x_test, y_test)
    print(f"Test {test_accuracy:.3f}%")

    # plot NN borders
    plot_contour(nn, x, y)
Beispiel #3
0
def ex2():
    x, y, x_train, x_val, x_test, y_train, y_val, y_test = generate_dateset()

    nn = NeuralNetwork(NN_SHAPE, LEARNING_RATE)
    pbar = tqdm(range(EPOCH))
    for e in pbar:
        shuffle(x_train, y_train)
        for _x, _y in zip(x_train, y_train):
            nn.train(_x, _y)

        # compute train and validation accuracy
        # TODO: Fill me
        pbar.set_description(
            f"Epoch {e:03}/{EPOCH} - Train {train_accurracy:.3f}% - Test {val_accuracy:.3f}% "
        )

    # compute test accuracy
    # TODO: Fill me
    print(f"Test {test_accuracy:.3f}%")

    # plot NN borders
    plot_contour(nn, x, y)
  self.t += self.alphas[i] * self.support_vector_labels[i] * kernel_matrix[ind[i], 0]
self.t -= self.support_vector_labels[0]
"""
# OR
"""
self.t = 0
for n in range(len(self.alphas)):
  self.t += np.sum(self.alphas * self.support_vector_labels * kernel_matrix[ind[n],idx])
  self.t -= self.support_vector_labels[n]
self.t /= len(self.alphas)
"""

from utils import create_dataset, plot_contour

np.random.seed(1)
X, y = create_dataset(N=50)

from sklearn.svm import SVC

model = SupportVectorMachine(kernel_name='rbf', power=2, coef=2, gamma=1)
model.fit(X, y)
y_pred = model.predict(X)
print('Acc:', accuracy_score(np.array(y_pred), np.array(y)))
plot_contour(X, y, model)

model = SVC(kernel='rbf', degree=2, coef0=2, gamma=1)
model.fit(X, y)
y_pred = model.predict(X)
print('Acc:', accuracy_score(np.array(y_pred), np.array(y)))
plot_contour(X, y, model)
Beispiel #5
0
        for i in range(X.shape[0]):
            y_predict[i] = np.sum(
                self.alphas[sv]
                * self.y[sv, np.newaxis]
                * self.kernel(X[i], self.X[sv])[:, np.newaxis]
            )

        return np.sign(y_predict + self.b)

    def get_parameters(self, alphas):
        threshold = 1e-5

        sv = ((alphas > threshold) * (alphas < self.C)).flatten()
        self.w = np.dot(self.X[sv].T, alphas[sv] * self.y[sv, np.newaxis])
        self.b = np.mean(
            self.y[sv, np.newaxis]
            - self.alphas[sv] * self.y[sv, np.newaxis] * self.K[sv, sv][:, np.newaxis]
        )
        return sv


if __name__ == "__main__":
    np.random.seed(1)
    X, y = create_dataset(N=50)

    svm = SVM(kernel=gaussian)
    svm.fit(X, y)
    y_pred = svm.predict(X)
    plot_contour(X, y, svm)

    print(f"Accuracy: {sum(y==y_pred)/y.shape[0]}")
Beispiel #6
0
            # print cost sometimes
            if it % 2500 == 0:
                print(f'At iteration {it} we have a cost of {cost}')

            # back prop
            grads = self.back_prop(cache, parameters, y)

            #update parameters
            parameters = self.update_parameters(parameters, grads)

        return parameters


if __name__ == '__main__':
    # Generate dataset
    X, y = create_dataset(300, K=3)
    y = y.astype(int)

    # Train network
    NN = NeuralNetwork(X, y)
    trained_parameters = NN.main(X, y)

    # Get trained parameters
    W2 = trained_parameters['W2']
    W1 = trained_parameters['W1']
    b2 = trained_parameters['b2']
    b1 = trained_parameters['b1']

    # Plot the decision boundary (for nice visualization)
    plot_contour(X, y, NN, trained_parameters)
coords_A = np.loadtxt(
    '/home/giacomol/Desktop/Research/windLoading/windTunnel/PoliMi/coords_A0')
coords_B = np.loadtxt(
    '/home/giacomol/Desktop/Research/windLoading/windTunnel/PoliMi/coords_B0')

# labels
#cp_rms_LES = np.sqrt(np.loadtxt('data/' + split + '/LES_mesh/pPrime2Mean_' + angle + 'deg.raw')[:,3])/(0.5*7.7**2)
#cp_rms_RANS = np.sqrt(np.loadtxt('data/' + split + '/RANS_mesh/pPrime2Mean_' + angle + 'deg.raw'))/(0.5*7.7**2)

# rotate coordinates
ang = -int(angle) * np.pi / 180
rotation_matrix = np.array([[np.cos(ang), 0, np.sin(ang)], [0, 1, 0],
                            [-np.sin(ang), 0, np.cos(ang)]])

coords_RANS_rotated = coords_RANS.dot(rotation_matrix)
'''
#plot_contour(coords_LES_rotated, cp_rms_LES, [0, 0.3], '')
plot_contour(coords_RANS, cp_rms_RANS, [0, 0.3], '')

# select probes:
index = (coords_LES_rotated[:,2] > 0.299) * (coords_LES_rotated[:,0] > 0.75) * (coords_LES_rotated[:,1] > 1.75)
#index = np.random.choice(np.nonzero(index)[0], size=1000)

Y_values = np.unique(coords_LES_rotated[index,1])
Y_values = Y_values[0::10]

# make contour plot
plt.rcParams.update({'font.size': 18})   
x = coords_LES_rotated[index,0]+0.5
y = coords_LES_rotated[index,1]
z = cp_rms_LES[index]