Esempio n. 1
0
def test_keras_regressor():
    model = Sequential()
    model.add(Dense(input_dim, input_shape=(input_dim,)))
    model.add(Activation('relu'))
    model.add(Dense(1))
    model.add(Activation('softmax'))

    sklearn_regressor = KerasRegressor(model, optimizer=optim, loss=loss,
                                       train_batch_size=batch_size,
                                       test_batch_size=batch_size,
                                       nb_epoch=nb_epoch)
    sklearn_regressor.fit(X_train_reg, y_train_reg)
    sklearn_regressor.score(X_test_reg, y_test_reg)
Esempio n. 2
0
def _read_loto_result():
    _df_loto = pd.read_csv('C:\\Users\\hal\\Downloads\\loto6.csv',
                           sep=",",
                           encoding='shift_jis')
    #print(_df_loto)
    x_train, x_test, y_train, y_test = model_selection.train_test_split(
        X, Y, test_size=0.2)
    return
    for a_i in range(1, 2):
        a_num_str = '第' + str(a_i) + '数字'
        # 過去の当選結果の抽出
        _df_loto_sub = _df_loto[['日付', a_num_str]]
        print(_df_loto_sub)
        #plt.plot(_df_loto_sub)
        #plt.show()

        ########################################
        # 欠損データの削除
        ########################################
        #_df_loto_sub = _df_loto_sub.dropna()

        ########################################
        # One-hotエンコーディング
        ########################################

        ########################################
        # 学習
        ########################################
        X = _df_loto_sub['日付']
        Y = _df_loto_sub[a_num_str]
        x_train, x_test, y_train, y_test = model_selection.train_test_split(
            X, Y, test_size=0.2)
        # 正規化
        scaler = StandardScaler()
        scaler.fit(x_train)
        x_train = scaler.transform(x_train)
        x_test = scaler.transform(x_test)

        ########################################
        # Keras読込
        ########################################
        # モデルの取得
        model = KerasRegressor(build_fn=reg_model,
                               epochs=200,
                               batch_size=16,
                               verbose=0)
        # 学習
        model.fit(x_train, y_train)

        # スコア(参考値)
        model.score(x_test, y_test)
Esempio n. 3
0
def runANN(inputs, hp):
    if inputs:
        data = pickle.loads(inputs)
    else:
        data = build_dataset()

    ts = datetime.datetime.now()

    outputs = []
    for l1_units, l2_units in hp:
        model = KerasRegressor(build_fn=create_baseline_model,
                               l1_units=l1_units,
                               l2_units=l2_units,
                               verbose=0)

        model.fit(data["x_train"],
                  data["y_train"],
                  verbose=0,
                  validation_data=(data["x_val"], data["y_val"]),
                  epochs=300)

        score = model.score(data["x_val"], data["y_val"])
        preds = model.predict(data["x_test"])

        pred = preds.reshape(len(preds))
        real = data["y_test"]

        mse = mean_squared_error(real, pred)

        output = {}
        output['score'] = score
        output['mse'] = mse
        output['hp'] = [l1_units, l2_units]

        outputs.append(output)

    ts = datetime.datetime.now() - ts
    h = socket.gethostname()

    return {
        "msg": "Run ANN! [" + h + "] > elapsed time: " + str(ts),
        "data": outputs,
    }
def test_regression_model(build_fn, modle_name, num_repeats=10):

    # NOTE!
    # The negative MSE might be confusing, but it depends a bit what parts of sklearn you use
    # https://github.com/scikit-learn/scikit-learn/issues/2439
    # So the closer to Zero the MSE, the better

    # For regression, see e.g.
    # https://stackoverflow.com/questions/44132652/keras-how-to-perform-a-prediction-using-kerasregressor

    seed(MJTCP_SEED)
    results = np.zeros(num_repeats)
    acc = np.zeros(num_repeats)
    results_cv = np.zeros(num_repeats)
    for i in range(num_repeats):

        print('Repeat #', i+1, '/', num_repeats)
        reg = KerasRegressor(build_fn=build_fn, epochs=100, batch_size=10, verbose=0)
        reg.fit(x_train, y_train)
        # prediction = reg.predict(x_train)
        # acc[i] = accuracy_score(y_test, prediction) # TODO!
        results[i] = reg.score(x_test, y_test)
        print('   MSE (no cross-validation) = ', results[i])

        # n_splits = 10
        # kfold = KFold(n_splits=10, random_state=seed)
        # results_cv[i] = cross_val_score(reg, x_train, y_train, cv=kfold)
        # print('   MSE (cross-validation, ', n_splits, ' splits) = ', results_cv[i])
        # # print('   Accuracy = ', acc[i])
        # sys.stdout.flush()

    print("\n%s: Mean of MSE from %i repeats: %.2f (stdev = %.2f)" % (
        modle_name, num_repeats, results.mean(), results.std()))
    # print("%s: Mean of MSE with cross validation from %i repeats: %.2f (stdev = %.2f)" % (
    #     modle_name, num_repeats, results_cv.mean(), results_cv.std()))
    # print("%s: Mean Accuracy from %i repeats: %.2f (stdev = %.2f)" % (
    #     modle_name, num_repeats, acc.mean(), acc.std()))

    return results.mean()
Esempio n. 5
0
scaler = StandardScaler()
x_train = scaler.fit_transform(x_train)
x_valid = scaler.transform(x_valid)
x_test = scaler.transform(x_test)

x_new = x_test[:3]


def build_model(n_hidden=1, n_neurons=30, learning_rate=3e-3, input_shape=[8]):
    model = Sequential()
    model.add(InputLayer(input_shape=input_shape))
    for layer in range(n_hidden):
        model.add(Dense(n_neurons, activation='relu'))
    model.add(Dense(1))
    optimizer = SGD(lr=learning_rate)
    model.compile(loss='mse',optimizer=optimizer)
    return model


keras_reg = KerasRegressor(build_model)

keras_reg.fit(x_train,y_train,epochs=100,
                validation_data=(x_valid,y_valid),
                callbacks=[EarlyStopping(patience=10)])


mse_test = keras_reg.score(x_test,y_test)
y_pred = keras_reg.predict(x_new)

print(y_pred)
Esempio n. 6
0
def run_regressor(model=LSTM2,
                  sequence_length=SEQ_LENGTH,
                  data=None,
                  data_file='df_dh.csv',
                  isload_model=True,
                  testonly=False):
    epochs = 20000
    path_to_dataset = data_file

    global mses

    if data is None:

        X_train, y_train, X_test, y_test, X_val, Y_val = get_data(
            sequence_length=sequence_length,
            stateful=STATEFUL,
            path_to_dataset=data_file)
    else:
        X_train, y_train, X_test, y_test, X_val, Y_val = data

    if STATEFUL:
        X_test = X_test[:int(X_test.shape[0] / batch_size) * batch_size]
        y_test = y_test[:int(y_test.shape[0] / batch_size) * batch_size]

    estimator = KerasRegressor(build_fn=lambda x=X_train: model(x))

    # if testonly == True:
    #     # predicted = model.predict(X_test, verbose=1,batch_size=batch_size)
    #     prediction = estimator.predict(X_test)

    #     stat_metrics(X_test, y_test, prediction)
    #     draw_scatter(predicted_arr[0], y_test, X_test, X_train, y_train, data_file)
    #     return

    early_stopping = EarlyStopping(monitor='val_loss', verbose=1, patience=40)
    checkpoint = ModelCheckpoint("./lstm.h5",
                                 monitor='val_loss',
                                 verbose=1,
                                 save_best_only=True,
                                 save_weights_only=True)
    ################
    hist = estimator.fit(X_train,
                         y_train,
                         validation_data=(X_val, Y_val),
                         callbacks=[checkpoint],
                         epochs=epochs,
                         batch_size=batch_size,
                         verbose=1)

    # prediction = estimator.predict(X_test)
    score = mean_squared_error(y_test, estimator.predict(X_test))
    estimator_score = estimator.score(X_test, y_test)
    print(score)

    mses.append(score)

    prediction = estimator.predict(X_test)
    print(prediction)
    print(X_test)
    print("##############################################")
    # predicted_arr = prediction.T.tolist()
    # print(predicted_arr)
    global scaler
    prediction_, y_test_, y_train_ = inverse_xy_transform(
        scaler, prediction, y_test, y_train)
    predicted_df = pd.DataFrame(prediction_)
    y_test_df = pd.DataFrame(y_test_)
    # X_test_df = pd.DataFrame(X_test) #columns
    predicted_df.to_csv(DATAPATH + str(prefix) + data_file + str(batch_size) +
                        str(sequence_length) + "predicted_df.csv")
    y_test_df.to_csv(DATAPATH + str(prefix) + data_file + str(batch_size) +
                     str(sequence_length) + "y_test_df.csv")
    # X_test_df.to_csv(DATAPATH+data_file+"X_test_df.csv")
    draw_scatter(prediction, y_test, X_test, X_train, y_train, data_file)
    his_figures(hist)

    draw_line(prediction, y_test, X_test, X_train, y_train, data_file)
    return predicted_df, y_test_df
Esempio n. 7
0
def EvaluateModel(params):

    global MOD_TYPE
    global INPUT_SHAPE

    # Get the Train/Test data... depending on the various parameters,
    XTrain, YTrain, XTestIDs, XTestValues = HousingDataUtils.GetTrainTestData(
        CatEncoding=params[1],
        ScalingType=params[2],
        LogSalesPrice=params[3],
        DropLilVarFeats=params[4],
        DropLilCorrFeats=params[5],
        MakeNewFeatures=params[6],
        DropUsedFeatures=params[7],
        DropOutliers=params[8])

    #XTrain_split, XValid, YTrain_split, YValid = train_test_split (XTrain, YTrain, random_state=42)

    # Make the model...
    print('\n>>> Building and Fitting Model...')

    MOD_TYPE = params[0]
    INPUT_SHAPE = XTrain.shape[1:]

    cb = tf.keras.callbacks.EarlyStopping(monitor='loss', patience=PATIENCE)
    model = KerasRegressor(build_fn=BuildModel,
                           validation_split=0.2,
                           epochs=MAX_EPOCHS,
                           verbose=2,
                           callbacks=[cb])

    history = model.fit(XTrain, YTrain)

    # get the model score... or loss and mean square error
    modelScore = model.score(XTrain, YTrain)
    print('>>> modelScore = ', modelScore)
    print()

    # feature importance...
    print('>>> Finding feature importance...')
    featImportanceResults = HousingDataUtils.FindFeatureImportance(
        model, XTrain, YTrain, 3, 100)

    for i in range(len(featImportanceResults)):
        print(
            f'{featImportanceResults[i][0]:25} {featImportanceResults[i][1]:14.7f} {featImportanceResults[i][2]:14.7f}'
        )

    # Predict the model
    print('Predicting the model...')
    predYValues = model.predict(XTestValues).tolist()

    # output...
    outputFile = MakeOutputFileName(params)
    print('>>>Writing output to...', outputFile)
    results, errMsg = HousingDataUtils.MergeResult(XTestIDs, predYValues,
                                                   params[3], YTrain.mean())
    results.to_csv(outputFile, index=False)

    if errMsg != '':
        print('>>> Errors')
        print('   ', errMsg)

    return (modelScore, errMsg)
for i in range(len(models)):
    # Create a Regressor (input is model)
    regressor = KerasRegressor(
        build_fn=models[i][1], batch_size=32,
        epochs=20)  # 1 epoch: all examples have been processed 1 time

    # Fit the model to the training data
    history = regressor.fit(x=X_train,
                            y=Y_train,
                            verbose=1,
                            epochs=20,
                            batch_size=32)

    # Predict the data for test data
    y_pred = regressor.predict(X_test)
    score = regressor.score(X_test, Y_test)
    mse = mean_squared_error(Y_test, y_pred)
    # mean squared error: measure for distance between measurement and prediction
    r2 = r2_score(Y_test, y_pred)
    # measures 'goodness of fit' (is model complete, 1= optimal)

    print()
    print(models[i][0] + ": train/test: ")
    print(" - X_test has shape:  " + str(X_test.shape))
    print(" - y_pred has shape:  " + str(y_pred.shape))
    print(" - score: " + str(score))
    print(" - mse:  " + str(mse))
    print(" - r^2:    " + str(r2))
    print(" - y_pred:  " + str(y_pred))
    plt.subplot(2, len(models), i + 1)
    plt.scatter(Y_test, y_pred)
Esempio n. 9
0
def run_regressor(model=LSTM2,
                  data=None,
                  data_file='df_dh.csv',
                  isload_model=True,
                  testonly=False):
    epochs = 8000
    path_to_dataset = data_file
    sequence_length = SEQ_LENGTH

    if data is None:

        X_train, y_train, X_test, y_test, X_val, Y_val = get_data(
            sequence_length=sequence_length,
            stateful=STATEFUL,
            path_to_dataset=data_file)
    else:
        X_train, y_train, X_test, y_test, X_val, Y_val = data

    if STATEFUL:
        X_test = X_test[:int(X_test.shape[0] / batch_size) * batch_size]
        y_test = y_test[:int(y_test.shape[0] / batch_size) * batch_size]

    estimator = KerasRegressor(build_fn=lambda x=X_train: model(x))

    # if testonly == True:
    #     # predicted = model.predict(X_test, verbose=1,batch_size=batch_size)
    #     prediction = estimator.predict(X_test)

    #     stat_metrics(X_test, y_test, prediction)
    #     draw_scatter(predicted_arr[0], y_test, X_test, X_train, y_train, data_file)
    #     return

    early_stopping = EarlyStopping(monitor='val_loss', verbose=1, patience=20)
    checkpoint = ModelCheckpoint("./lstm.h5",
                                 monitor='val_loss',
                                 verbose=1,
                                 save_best_only=True,
                                 save_weights_only=True)
    ################
    hist = estimator.fit(X_train,
                         y_train,
                         validation_data=(X_val, Y_val),
                         callbacks=[checkpoint],
                         epochs=epochs,
                         batch_size=batch_size,
                         verbose=1)

    # prediction = estimator.predict(X_test)
    score = mean_squared_error(y_test, estimator.predict(X_test))
    estimator_score = estimator.score(X_test, y_test)
    print(score)

    prediction = estimator.predict(X_test)
    # invert predictions
    prediction_trans = scaler.inverse_transform(prediction)
    X_test_trans = scaler.inverse_transform(X_test)
    y_test_trans = scaler.inverse_transform(y_test)
    X_train_trans = scaler.inverse_transform(X_train)
    y_train_trans = scaler.inverse_transform(y_train)

    print(prediction)
    print(X_test)
    print("##############################################")
    # predicted_arr = prediction.T.tolist()
    # print(predicted_arr)
    draw_scatter(prediction, y_test, X_test, X_train, y_train, data_file)
    his_figures(hist)
Esempio n. 10
0
from keras.wrappers.scikit_learn import KerasRegressor
model = KerasRegressor(build_fn=make_model, epochs=epochs, batch_size=batch_size, verbose=True, callbacks=keras_callbacks)
model.fit(x_train, y_train)

'''
history = model.fit(x_train, y_train,
    batch_size=batch_size,
    epochs=epochs,
    shuffle=True,
    verbose=2,#0, # Change it to 2, if wished to observe execution
    #validation_data=(arr_x_valid, arr_y_valid),
    callbacks=keras_callbacks)
'''
y_pred = model.predict(x_test[:20,])

print (y_pred)
print (y_test[:20])
print('KERAS score is %f (traning)' % model.score(x_train, y_train))
print('KERAS score is %f (test)' % model.score(x_test, y_test)) # ??%

'''
from sklearn.metrics import mean_squared_error
score = mean_squared_error(y_test, model.predict(x_test))
print(score)
from sklearn.metrics import mean_absolute_error
score = mean_absolute_error(y_test, model.predict(x_test))
print(score)
'''

Esempio n. 11
0
def createmodel():
    model = Sequential()
    model.add(Dense(8, input_dim=8, init='normal', activation='relu'))
    model.add(Dense(13, init='normal', activation='relu'))
    model.add(Dense(1))
    model.compile(loss='mean_squared_error',
                  optimizer='adam',
                  metrics=['accuracy'])
    return model


tensorboard = TensorBoard(log_dir="logslo1/1",
                          histogram_freq=0,
                          write_graph=True,
                          write_images=True)

estimator = KerasRegressor(build_fn=createmodel)
est = estimator.fit(X_train,
                    Y_train,
                    epochs=20,
                    batch_size=160,
                    callbacks=[tensorboard])
evaluation = estimator.score(X_test, Y_test)
print(evaluation)
plt.plot(est.history['loss'])
# plt.plot(history.history['test_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
plt.show()
Esempio n. 12
0
def main():
    analysis_score = []
    with open(path3, "w", newline="") as f:
        writer = csv.writer(f)
        for i in [7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19]:
            training_personality = np.loadtxt(path1,\
                            delimiter=",",\
                            skiprows=1,\
                            max_rows=12,\
                            usecols=(i),\
                            encoding="utf-8"\
                            )

            training_preference = np.loadtxt(path1,\
                            delimiter=",",\
                            skiprows=1,\
                            max_rows=12,\
                            usecols=(1, 2, 3, 4, 5),\
                            encoding="utf-8"\
                            )

            test_preference = np.empty((1, 5))
            test_preference[0] = np.loadtxt(path2,
                            delimiter=",",\
                            skiprows=0,\
                            max_rows=1,\
                            usecols=(1, 2, 3, 4, 5),\
                            encoding="utf-8"\
                            )
            np.reshape(test_preference, (1, 5))

            seed = len(training_personality)
            np.random.seed(seed)
            model = KerasRegressor(build_fn=reg_model,
                                   epochs=100,
                                   batch_size=10,
                                   verbose=0)
            model.fit(training_preference, training_personality)
            #kfold = KFold(n_splits=10, random_state=seed)

            #調整されたパラメータを見る
            #print(i)
            #print("model.coef_")
            #print(model.coef_)
            #print("model.intercept_")
            #print(model.intercept_)
            print("score(0.5以上が目安)")
            print(model.score(training_preference, training_personality))

            #予測
            print(model.predict(test_preference))
            print(model.score(training_preference, training_personality))
            #np.savetxt(path3,model.score(training_preference, training_personality))
            analysis_score.append(
                model.score(training_preference, training_personality))

            path4 = "C:\\Users\\bubbl\\OneDrive\\Desktop\\facial_expression_data\\deeplearning_clf_data{}.pkl".format(
                i)
            joblib.dump(model, path4)  #学習データを保存

        writer.writerow(analysis_score)

    print(analysis_score)
Esempio n. 13
0
    model.add(Dense(256,input_shape=(10,),activation='relu'))
    model.add(Dense(128,activation='relu'))
    model.add(Dense(4))
    model.compile(loss='mae',optimizer='adam',metrics=['mae'])
    return model

from keras.wrappers.scikit_learn import KerasRegressor
from sklearn.metrics import mean_absolute_error as mae
model=KerasRegressor(build_fn=create_model,epochs=100,verbose=1,batch_size=3)

#4. 평가, 예측
model.fit(x_train,y_train,epochs=100,batch_size=3)

results=cross_val_score(model,x_train,y_train,cv=kfold,n_jobs=1)

print(test.shape)
y_predict=model.predict(x_test)
print(y_predict.shape)

mae_result=mae(y_predict,y_test) #훈련이 얼마나 잘 되었는지 평가

score=model.score(x_test,y_test)
print("r2:",score)
print("result:",results)
print("mae:",mae_result)


"""
mae: 2.58
r2: -2.58
"""
Esempio n. 14
0
            test_new = test_X[:, :, 0].reshape((test_X.shape[0], time_step, 1))
            for wey in chosen[s]:
                #APPENDING COLUMNS ACCORDING TO THE CHOSEN FEATURES LENGTH FOR TEST SET
                test_new = numpy.concatenate(
                    (test_new, test_X[:, :, wey].reshape(
                        (test_X.shape[0], time_step, 1))),
                    axis=2)

            # FITTING DATASET
            history = kears_estimator.fit(train_new,
                                          train_y,
                                          epochs=num_epochs,
                                          batch_size=batch_size,
                                          verbose=0,
                                          shuffle=False)
            score = kears_estimator.score(test_new, test_y)

            # # MEASURING TRAIN
            # yhat_train = kears_estimator.predict(train_new)
            # train_X1 = train_X.reshape((train_X.shape[0], features))
            # # invert scaling for actual / appending same colums from train to not have problem with inverse scale
            # train_y1 = train_y.reshape((len(train_y), 1))
            # inv_y1 = concatenate((train_y1, train_X1[:, -(n_features-1):]), axis=1)
            # inv_y1 = scaler.inverse_transform(inv_y1)
            # inv_y1 = inv_y1[:,0]
            # # invert scaling for forecast/ appending same colums from train to not have problem with inverse scale
            # inv_yhat1 = concatenate((yhat_train.reshape(train_X.shape[0],1), train_X1[:, -(n_features-1):]), axis=1)
            # inv_yhat1 = scaler.inverse_transform(inv_yhat1)
            # inv_yhat1 = inv_yhat1[:,0]
            # # calculate RMSE
            # rmse_t = sqrt(mean_squared_error(inv_y1, inv_yhat1))
Esempio n. 15
0
# evaluate model

start = time.time()


model = KerasRegressor(build_fn=baseline_model, epochs=100, batch_size=5, verbose=0)

model.fit(X_train, y_train)

#predictions: test data
y_pred = model.predict(X_test)

print('\nRandom Forest report')
#Scores
print('Train score')
print(model.score(X_train, y_train))
print('Test score')
print(model.score(X_test, y_test))
print('-------------------------------------------------------')

# MAE
print('Mean absolute error')
print(mean_absolute_error(y_test, y_pred))
print('-------------------------------------------------------')

# MSE
print('Mean squared error')
print(mean_squared_error(y_test, y_pred))
print('-------------------------------------------------------')

# R-squared
Esempio n. 16
0
    model.add(Dense(1, kernel_initializer='normal'))
    # Compile model
    model.compile(loss='mean_squared_error', optimizer='adam')
    return model


# fix random seed for reproducibility
seed = 7
np.random.seed(seed)
# evaluate model with standardized dataset
estimator = KerasRegressor(build_fn=baseline_model,
                           epochs=200, batch_size=5, verbose = 0)


estimator.fit(X_train, y_train)
scr = estimator.score(X_test, y_test)
print(scr)
y_pred = estimator.predict(X_test)

score = mean_squared_error(y_test, estimator.predict(X_test))
print("Deep NN: ", score)
#r2 = r2_score(y_test, y_pred)
#print(r2)
# 0.3445108367171906
# 0.3612083730671892 //200 epochs, no activation
# 0.36304349263413993 //200 epochs , activation = relu
# 0.36586558665547375 // another dense layer
# print(y_pred)
#kfold = KFold(n_splits=10, random_state=seed)
#results = cross_val_score(estimator, X, Y, cv=kfold)