コード例 #1
0
    def bt_show_image(self):
        self.textedit.append("Make prediction ...")

        img = load_img(self.image_path).resize((32, 32))
        x = img_to_array(img)
        x = normalize(x.reshape((1, ) + x.shape))
        result = np.argmax(self.cnn.single_predict(x))

        self.textedit.append("The food is: {}".format(self.food_list[result]))
        # self.show_index = np.random.randint(3, 1000)
        # for i in range(3):
        self.lb_target2.setPixmap(
            QPixmap("./{}/2.jpg".format(result)).scaled(100, 100))
コード例 #2
0
import numpy as np
from load import loadLasso, normalize
import matplotlib.pyplot as plt
from equations_regression import *

np.random.seed(40)

# load lasso data and normalize:
X, y = loadLasso(version="train")
Xv, yv = loadLasso(version="test")
X = normalize(X)
Xv = normalize(Xv)

# add bias term (column of ones):
x = np.ones((np.shape(X)[0], np.shape(X)[1] + 1))
x[:, 1:np.shape(X)[1] + 1] = X
xv = np.ones((np.shape(Xv)[0], np.shape(Xv)[1] + 1))
xv[:, 1:np.shape(Xv)[1] + 1] = Xv

# store data dimensions:
p = np.shape(x)[0]  # samples -> 50
n = np.shape(x)[1]  # dimensions -> 101

# set regularization term:
alpha = np.logspace(0.3, 0.6, 100)

# initialize for cross fold validation:
cost_crossfold_ridge_test = np.zeros(len(alpha))

counter = 0
# cross validation:
コード例 #3
0
from imports import *
from load import load_data, mapValues, getgen, normalize
from schedule import getcallbacks, totalepochs
from model import getmodel

Xall, Yall, Xtest, Ytest = load_data()
Xall, Xtest = Xall.astype(np.float32), Xtest.astype(np.float32)
Nall, Ntest = Xall.shape[0], Xtest.shape[0]

print(
    "Training data before split: {}\nTest data: {}\nTraining labels before split: {}\nTest labels: {}"
    .format(Xall.shape, Xtest.shape, Yall.shape, Ytest.shape))
Xall, Xtest = mapValues(Xall, 0, 255, save=True), mapValues(Xtest, 0, 255)
Xall, Xtest = normalize(Xall), normalize(Xtest)

print("Intensities after scaling: min={}, max={}, mean={}, std={}".format(
    np.min(Xall.flatten()), np.max(Xall.flatten()), np.mean(Xall.flatten()),
    np.std(Xall.flatten())))

Xtrain, Xval, Ytrain, Yval = train_test_split(Xall,
                                              Yall,
                                              test_size=0.1,
                                              random_state=SEED)
print("Train data: {}, Validation data: {}".format(Xtrain.shape, Xval.shape))
model = getmodel()
if DO_TRAIN:
    history = model.fit_generator(generator=getgen().flow(Xtrain,
                                                          Ytrain,
                                                          batch_size=BS),
                                  steps_per_epoch=int(Xtrain.shape[0] / BS),
                                  epochs=totalepochs(SCHEDULE),
コード例 #4
0
ファイル: main.py プロジェクト: iverasp/ntnu
def get_ai_moves(game):
    board = get_board_weird(game)
    board = numpy.asarray(board)
    board = normalize(board)
    #board = append_snake(board)
    return ann.predict_move(board)
コード例 #5
0
def correlated_inputs():
    n = 3  # dimensions
    p = 1000  # samples
    #w = [2.0, 3.0, 0.0]
    w = [-2.0, 3.0, 0.0]
    x = np.zeros((p, n))
    x[:, 0:2] = np.random.randn(p, 2)
    x[:, 2] = (2.0 / 3.0) * x[:, 0] + (2.0 / 3.0) * x[:, 1] + (
        1.0 / 3.0) * np.random.randn(p)
    y = np.matmul(w, np.transpose(x)) + np.random.randn(p)
    return x, y


# load data:
X, y = correlated_inputs()
x = normalize(X)

# store data dimensions:
p = np.shape(x)[0]  # samples -> 1000
n = np.shape(x)[1]  # dimensions -> 3

# set regularization term:
alpha = np.logspace(-5, 2, 100)

# initialize for cross fold validation:
weights_crossfold = np.zeros((len(alpha), n))

counter = 0
# cross validation:
for decay in alpha:
    if counter == 0: