Пример #1
0
 def loadModel(self,basemodel):
     # Need to find basemodel.json and basemodel.h5 #
     self.basemodel = basemodel
     if os.path.exists(basemodel+'.zip'):
         logging.info("Loading model from zip file %s.zip"%basemodel)
         self.model = talos.Restore(basemodel+'.zip',custom_objects=self.custom_objects).model 
     elif os.path.exists(basemodel+'.json') and os.path.exists(basemodel+'.h5'):
         with open(basemodel+'.json','r') as f:
             loaded_model_json = f.read()
         self.model = tf.keras.models.model_from_json(loaded_model_json,custom_objects=self.custom_objects)
         self.model.load_weights(basemodel+".h5")
     else:
         raise RuntimeError('Could not find the suitable format for %s'%basemodel)
Пример #2
0
 def talos_model(self, force=False):
     """
     Método para retornar um modelo iterativo do Talos. Se um modelo já existir, carregará os pesos e retornará um objeto
     de modelo iterativo. Caso não exista um modelo, um será gerado.
     :param force: se deve ou não forçar a geração de um modelo fresco
     :return: modelo iterativo do talos
     """
     model_path = self.MODULE_PATH + "/data/talos/fakedata.zip"
     if Path(model_path).is_file() and force is False:
         return ta.Restore(model_path)
     else:
         t = ta.Scan(x=self.X,
                     y=self.Y,
                     model=self.fake_news_model,
                     grid_downsample=.01,
                     params=self.p,
                     dataset_name='fakenews',
                     experiment_no='1')
         ta.Deploy(t, str(model_path).replace(".zip", ""))
         return t
Пример #3
0
import pprint
import os, sys
import time as tt
from sklearn.metrics import f1_score
import numpy as np
import talos as ta
from keras.optimizers import SGD

sys.path.insert(1, '../Helpers/')
import Config
from Model import ExternalValidate, cross_validate_keras, reshape_data_1d, reset_weights
import DataRna as Data

start_time = tt.time()

scan_object = ta.Restore('cnn_1d_tumor_type_t_' + Config.treat + '.zip')
print(scan_object.params)
talos_best_params = {
    'num_epochs': 500,
    'batch_size': 32,
    'learning_rate': 0.001,
    'isNadam': False
}

train_x_all = Data.all_data
train_y_all = Data.all_labels
train_x_all = reshape_data_1d(train_x_all)

cross_validate_keras(scan_object.model, talos_best_params, 10, train_x_all,
                     train_y_all)
Пример #4
0
    my_test_fold.append(-1)
for k in range(j, j + len(x_valid)):
    my_test_fold.append(0)
x_train = np.concatenate((x_train, x_valid), axis=0)
y_train = np.concatenate((y_train, y_valid), axis=0)
ps = PredefinedSplit(test_fold=my_test_fold)
svr = svm.SVR(C=1000, epsilon=0.01, gamma=0.001, kernel='rbf')
svr.fit(X=x_train, y=y_train.ravel())
min = data.mm[0]
max = data.mm[1]

y_svr_notrend = svr.predict(x_test) * (max - min) + min

#Data from FFNN without Google Trendd
with CustomObjectScope({'GlorotUniform': glorot_uniform()}):
    WithTrend = ta.Restore('Talos_Results/ANN_V2_stock_window_20_deploy.zip')
df = create_finance(returns=False, plot_corr=False, Trends=True)
data = prepare_data(df, 20, 10, 10, returns=False, normalize_cheat=False)
data.create_windows()
x_test = data.x_test.reshape(len(data.x_test), -1)
y_test = data.y_test.reshape(-1, 1)
min = data.mm[0]
max = data.mm[1]
y_ffnn_trend = WithTrend.model.predict(x_test) * (max - min) + min
y_test = y_test * (max - min) + min

#Data from FFNN with Google Trendd
with CustomObjectScope({'GlorotUniform': glorot_uniform()}):
    WithoutTrend = ta.Restore(
        'Talos_Results/ANN_WithoutTrends_stock_window_20_deploy.zip')
Пример #5
0
def test_rest(scan_object):

    print('\n >>> start testing the rest... \n')

    import talos

    import random

    deploy_filename = 'test' + str(random.randint(1, 20000000000))

    print('\n ...Deploy()... \n')
    talos.Deploy(scan_object, deploy_filename, 'val_acc')

    print('\n ...Restore()... \n')
    restored = talos.Restore(deploy_filename + '.zip')

    x, y = talos.templates.datasets.breast_cancer()
    x = x[:50]
    y = y[:50]

    x_train, y_train, x_val, y_val = talos.utils.val_split(x, y, .2)
    x = talos.utils.rescale_meanzero(x)

    callbacks = [
        talos.utils.early_stopper(10),
        talos.utils.ExperimentLogCallback('test', {})
    ]

    metrics = [
        talos.utils.metrics.f1score, talos.utils.metrics.fbeta,
        talos.utils.metrics.mae, talos.utils.metrics.mape,
        talos.utils.metrics.matthews, talos.utils.metrics.mse,
        talos.utils.metrics.msle, talos.utils.metrics.precision,
        talos.utils.metrics.recall, talos.utils.metrics.rmae,
        talos.utils.metrics.rmse, talos.utils.metrics.rmsle
    ]

    from tensorflow.keras.models import Sequential
    from tensorflow.keras.layers import Dense

    print('\n ...callbacks and metrics... \n')

    model1 = Sequential()
    model1.add(Dense(10, input_dim=x.shape[1]))
    model1.add(Dense(1))
    model1.compile('adam', 'logcosh', metrics=metrics)
    model1.fit(x, y, callbacks=callbacks)

    print('\n ...generator... \n')

    model2 = Sequential()
    model2.add(Dense(10, input_dim=x.shape[1]))
    model2.add(Dense(1))
    model2.compile('adam', 'logcosh')
    model2.fit_generator(talos.utils.generator(x, y, 10), 5)

    print('\n ...SequenceGenerator... \n')

    model3 = Sequential()
    model3.add(Dense(10, input_dim=x.shape[1]))
    model3.add(Dense(1))
    model3.compile('adam', 'logcosh')
    model3.fit_generator(talos.utils.SequenceGenerator(x, y, 10))

    print('\n ...gpu_utils... \n')

    talos.utils.gpu_utils.force_cpu()
    talos.utils.gpu_utils.parallel_gpu_jobs()

    print('\n ...gpu_utils... \n')

    from talos.utils.test_utils import create_param_space
    create_param_space(restored.results, 5)

    print('finished testing the rest \n')
Пример #6
0
    x_train = data.x_train
    y_train = data.y_train.reshape(-1, 1)
    x_valid = data.x_valid
    y_valid = data.y_valid.reshape(-1, 1)
    x_test = data.x_test
    y_test = data.y_test.reshape(-1, 1)

    #    file = 'ANN_WithoutTrends_stock_window_' + str(i)
    file = 'Talos_Results/LSTM_window_large_b' + str(i)

    print('<><><><><><><><><><><><><><><><><><><><><><><><><>')
    print("Opening Talos on window size: {}".format(i))
    dpl = file + '_deploy.zip'
    print(dpl)
    with CustomObjectScope({'GlorotUniform': glorot_uniform()}):
        LSTM = ta.Restore(dpl)
    #y_pred = ANN.model.predict(x_test)
    #f = '/Users/mariusjessen/UiO19/MachineLearning/HW/FYS-STK-4155-Project3/my_dir/'+file + '.png'
    #plot_model(ANN.model, to_file=f,show_shapes=True)

    y_pred = data.inv.inverse_transform(LSTM.model.predict(x_test))
    y_test = data.inv.inverse_transform(y_test)

    #y_test, y_pred = y_test[:20], y_pred[:20]

    print('RMSE: {}'.format(np.sqrt(mse(y_test, y_pred))))
    print('MAPE: {}'.format(mean_absolute_percentage_error(y_test, y_pred)))
    print('AMAPE: {}'.format(AMAPE(y_pred, y_test)))
    print('MAE: {}'.format(MAE(y_pred, y_test)))
    print('PCT: {}'.format(PCT(y_test, y_pred)))
r.plot_kde('val_acc')

# a simple histogram
r.plot_hist(bins=50)

# heatmap correlation
r.plot_corr()

# a four dimensional bar grid
r.plot_bars('batch_size', 'val_acc', 'first_neuron', 'lr')

e = ta.Evaluate(h)
e.evaluate(x, y, folds=10, average='macro')

ta.Deploy(h, 'iris')

iris = ta.Restore('iris.zip')

# make predictions with the model
iris.model.predict(x)

# get the meta-data for the experiment
print(iris.details)
# get the hyperparameter space boundary
print(iris.params)
# sample of x and y data
print(iris.x)
print(iris.y)
# the results dataframe
print(iris.results)

talos_best_params = {
    'num_epochs': 500,
    'batch_size': 32,
    'learning_rate': 0.001,
    'isNadam': False
}
tag = '_6'  ##791 e500_b32_r0.001_Adam
tag = '_7'  ##241 e500_b32_r0.001_Adam

#scan_object = ta.Restore('resnet_tumor_type_t_ResNet_DEG_Feature_1000_smote_orderChromo_Best.zip')
#res_history = train_keras(scan_object.model, talos_best_params, 'resnet', train_data_resnet, train_labels_resnet, test_data_resnet, test_labels_resnet)
#savePickle(res_history,'resnet_history'+tag)

scan_object = ta.Restore(
    'cnn_1d_tumor_type_t_CNN1d_DEG_Feature_10_smote_OrderChromo.zip')
cnn1d_history = train_keras(scan_object.model, talos_best_params, 'cnn1d',
                            train_data, train_labels, test_data, test_labels)
savePickle(cnn1d_history, 'cnn1d_history' + tag)

scan_object = ta.Restore(
    'InceptionNet1d_tumor_type_t_InceptionNet1d_diverse_DEG_Feature_10_deep_orderChromo_scan.zip'
)
inception_history = train_keras(scan_object.model, talos_best_params,
                                'inception', train_data, train_labels,
                                test_data, test_labels)
savePickle(inception_history, 'inception_history' + tag)

#Terminal can't display
#SCALE_FACTOR = 1.25
#
Пример #9
0
    # Scan
    scan_object = test_scan_object()

    # Reporting
    test_reporting_object(scan_object)

    start_time = str(time.strftime("%s"))

    p = ta.Predict(scan_object)
    p.predict(scan_object.x)
    p.predict_classes(scan_object.x)

    ta.Autom8(scan_object, scan_object.x, scan_object.y)
    ta.Evaluate(scan_object)
    ta.Deploy(scan_object, start_time)
    ta.Restore(start_time + '.zip')

    test_random_methods()

    fit_generator = generator(scan_object.x, scan_object.y, 20)
    force_cpu()

    TestCancer().test_scan_cancer_metric_reduction()
    TestCancer().test_scan_cancer_loss_reduction()
    TestCancer().test_linear_method()
    TestCancer().test_reverse_method()
    TestIris().test_scan_iris_explicit_validation_set()
    TestIris().test_scan_iris_explicit_validation_set_force_fail()
    TestIris().test_scan_iris_1()
    TestIris().test_scan_iris_2()
    TestReporting()
Пример #10
0
def test_rest(scan_object):

    print('\n >>> start testing the rest... \n')

    import talos
    import random

    print('\n ...Deploy()... \n')
    talos.Deploy(scan_object, 'testing_deploy', 'val_acc')

    print('\n ...Restore()... \n')
    talos.Restore('testing_deploy' + '.zip')

    x, y = talos.templates.datasets.breast_cancer()
    x = x[:50]
    y = y[:50]

    callbacks = [
        talos.utils.early_stopper(10),
        talos.utils.ExperimentLogCallback('test', {})
    ]

    metrics = [
        talos.utils.metrics.f1score, talos.utils.metrics.fbeta,
        talos.utils.metrics.mae, talos.utils.metrics.mape,
        talos.utils.metrics.matthews, talos.utils.metrics.mse,
        talos.utils.metrics.msle, talos.utils.metrics.precision,
        talos.utils.metrics.recall, talos.utils.metrics.rmae,
        talos.utils.metrics.rmse, talos.utils.metrics.rmsle
    ]

    from keras.models import Sequential
    from keras.layers import Dense

    print('\n ...callbacks and metrics... \n')

    model1 = Sequential()
    model1.add(Dense(10, input_dim=x.shape[1]))
    model1.add(Dense(1))
    model1.compile('adam', 'logcosh', metrics=metrics)
    model1.fit(x, y, callbacks=callbacks)

    print('\n ...generator... \n')

    model2 = Sequential()
    model2.add(Dense(10, input_dim=x.shape[1]))
    model2.add(Dense(1))
    model2.compile('adam', 'logcosh')
    model2.fit_generator(talos.utils.generator(x, y, 10), 5)

    print('\n ...SequenceGenerator... \n')

    model3 = Sequential()
    model3.add(Dense(10, input_dim=x.shape[1]))
    model3.add(Dense(1))
    model3.compile('adam', 'logcosh')
    model3.fit_generator(talos.utils.SequenceGenerator(x, y, 10))

    print('\n ...gpu_utils... \n')

    talos.utils.gpu_utils.force_cpu()
    talos.utils.gpu_utils.parallel_gpu_jobs()

    print('finised testing the rest \n')
Пример #11
0
    x_train = data.x_train.reshape(len(data.x_train), -1)
    y_train = data.y_train.reshape(-1, 1)
    x_valid = data.x_valid.reshape(len(data.x_valid), -1)
    y_valid = data.y_valid.reshape(-1, 1)
    x_test = data.x_test.reshape(len(data.x_test), -1)
    y_test = data.y_test.reshape(-1, 1)

    #    file = 'ANN_WithoutTrends_stock_window_' + str(i)
    file = 'ANN_WithoutTrends_stock_window_' + str(i)

    print('<><><><><><><><><><><><><><><><><><><><><><><><><>')
    print("Opening Talos on window size: {}".format(i))
    dpl = file + '_deploy.zip'

    with CustomObjectScope({'GlorotUniform': glorot_uniform()}):
        ANN = ta.Restore(dpl)
    #y_pred = ANN.model.predict(x_test)
    f = '/Users/mariusjessen/UiO19/MachineLearning/HW/FYS-STK-4155-Project3/my_dir/' + file + '.png'
    #plot_model(ANN.model, to_file=f,show_shapes=True)
    min = data.mm[0]
    max = data.mm[1]

    y_pred = ANN.model.predict(x_test) * (max - min) + min
    y_test = y_test * (max - min) + min

    print('RMSE: {}'.format(np.sqrt(mse(y_test, y_pred))))
    print('MAPE: {}'.format(mean_absolute_percentage_error(y_test, y_pred)))
    print('AMAPE: {}'.format(AMAPE(y_pred, y_test)))
    print('MAE: {}'.format(MAE(y_pred, y_test)))
    print('PCT: {}'.format(PCT(y_test, y_pred)))
Пример #12
0
    x_test2 = data2.x_test
    x_test = data.x_test
    y_test = data2.y_test.reshape(-1, 1)

    windows = [15, 20, 30]
    #get_best_Talos(windows)

    with CustomObjectScope({'GlorotUniform': glorot_uniform()}):

        #LSTM = ta.Restore('LSTM_window20_deploy.zip');
        #LSTM2 = ta.Restore('LSTM_window_W_O_Trend20_deploy.zip');

        #LSTM = ta.Restore('LSTM_window_W_O_Trend15_deploy.zip');
        #LSTM = ta.Restore('LSTM_window_large_b220_deploy.zip');
        #LSTM2 = ta.Restore('LSTM_window_large_b2_WO_Trend220_deploy.zip');
        LSTM = ta.Restore('LSTM_window_large_repl20_deploy.zip')
        LSTM2 = ta.Restore('LSTM_window_large_repl_W_O20_deploy.zip')

    y_pred = LSTM.model.predict(x_test)
    y_pred2 = LSTM2.model.predict(x_test2)

    #plot_model(LSTM.model, to_file='LSTM_20_model.png')
    #SVG(model_to_dot(LSTM.model).create(prog='dot', format='svg'))

    print(y_pred)

    y_true, y_pred = np.array(data.y_test).reshape(
        -1, 1), np.array(y_pred).reshape(-1, 1)
    y_pred2 = np.array(y_pred2).reshape(-1, 1)

    #y_pred = [x * (data.mm[1] - data.mm[0]) + data.mm[0] for x in y_pred.reshape(-1)]
Пример #13
0
from sklearn.metrics import f1_score
import numpy as np
import pandas as pd
import talos as ta
import pickle
from keras.optimizers import SGD

sys.path.insert(1, '../Helpers/')
import Config
import Model
from Model import ExternalValidate, cross_validate_keras, reshape_data_1d, reset_weights
import DataRna as Data

start_time = tt.time()

scan_object = ta.Restore('InceptionNet1d_tumor_type_t_'+Config.treat+'.zip')
print(scan_object.params)
talos_best_params = {'num_epochs': 500, 'batch_size':32, 'learning_rate':0.001, 'isNadam':False}

scan_object.model.summary()

train_x_all = Data.all_data 
train_y_all = Data.all_labels
train_x_all = np.expand_dims(train_x_all, axis=2)
cross_validate_keras(scan_object.model, talos_best_params, 10, train_x_all, train_y_all)

####retrain with all data
keras_model = scan_object.model
reset_weights(keras_model)

sgd = SGD(lr=talos_best_params['learning_rate'], nesterov=talos_best_params['isNadam'])###Nadams