Exemplo n.º 1
0
def train(data):
    X = np.asarray(data.drop(['ETA'], axis=1))
    y = np.asarray(data["ETA"])
    scaler = MinMaxScaler()
    X = scaler.fit_transform(X)
    with open("han_bike_scalers.pkl", "wb") as outfile:
        pkl.dump(scaler, outfile)
        upload_to_bucket('model/han_bike_scalers.pkl', 'han_bike_scalers.pkl',
                         'aha-ds-ml-pipeline')
    X_train, X_test, y_train, y_test = train_test_split(X,
                                                        y,
                                                        test_size=0.2,
                                                        random_state=0)
    model = KerasRegressor(build_fn=baseline_model,
                           epochs=2,
                           batch_size=3,
                           verbose=1)
    history = model.fit(X_train,
                        y_train,
                        validation_data=(X_test, y_test),
                        callbacks=[telegram_callback])
    #==============================================================================
    # Predict & Evaluation
    #==============================================================================
    prediction = model.predict(X_test)
    score = mean_absolute_error(y_test, prediction)
    if score < 5:
        model.model.save('han_bike_models.h5')
        upload_to_bucket('model/han_bike_models.h5', 'han_bike_models.h5',
                         'aha-ds-ml-pipeline')
    return model
Exemplo n.º 2
0
def create_scikit_keras_regressor(X, y):
    # create simple (dummy) Keras DNN model for regression
    batch_size = 500
    epochs = 10
    model_func = create_scikit_keras_model_func(X.shape[1])
    model = KerasRegressor(build_fn=model_func,
                           nb_epoch=epochs,
                           batch_size=batch_size,
                           verbose=1)
    model.fit(X, y)
    return model
Exemplo n.º 3
0
def _base_learner(name_model, data_name):
    """Creates base-model object.

  Args:
    name_model: name of the base-model.
    data_name: name of the dataset
  Raises:
    Exception: Not Implemented Error.
  Returns:
    model object.
  """
    if name_model == 'LinearRegression':
        return linear_model.LinearRegression()
    elif name_model == 'Lasso':
        return linear_model.Lasso()
    elif name_model == 'RandomForest':
        return ensemble.RandomForestRegressor()
    elif name_model == 'ElasticNet':
        return linear_model.ElasticNet()
    elif name_model == 'XGBoost':
        return ensemble.GradientBoostingRegressor()
    elif name_model == 'NN_regression':
        if data_name == 'ACIC':
            return KerasRegressor(build_fn=create_nn_regression_acic,
                                  verbose=0)
        elif data_name == 'IHDP':
            return KerasRegressor(build_fn=create_nn_regression_ihdp,
                                  verbose=0)
        else:
            return KerasRegressor(build_fn=create_nn_regression, verbose=0)
    elif name_model == 'MeanDiff':
        return _MeanDiff()
    else:
        if data_name == 'ukb':
            model_config = {}
            model_config['weights'] = 'imagenet'
            model_config['input_shape'] = (587, 587, 3)
            if name_model == 'resnet50':
                model_config['name_base_model'] = 'resnet50'
                return image_model_construction(model_config)
            elif name_model == 'inceptionv3':
                model_config['name_base_model'] = 'inceptionv3'
                return image_model_construction(model_config)
            else:
                model_config['name_base_model'] = 'image_regression'
                return image_model_construction(model_config)

        raise NotImplementedError(
            'Estimator not supported:{}'.format(name_model))
Exemplo n.º 4
0
    def do(self):
        regressor = KerasRegressor(build_fn=self.model)

        grid = GridSearchCV(regressor, param_grid=self.parameters,
                            cv=2)  # i need to solve this cv issue
        # my_callbacks = EarlyStopping(monitor='mse', patience=10, verbose=2)
        return grid
Exemplo n.º 5
0
def fitness(filter1, filter2, filter3, learning_rate):
    n_input = 51 # Number of gold nanocluster classes 
    n_classes = 751 # Total wavelengths in UV-VIS pattern
    EPOCHS = 300
    BATCH_SIZE = 10
    
    # wrap model with KerasRegressor in order to include epochs and batch size
    model = KerasRegressor(build_fn = lambda: cnn_model(n_input, n_classes, filter1, filter2, filter3, learning_rate),
                          epochs = EPOCHS, 
                           batch_size = BATCH_SIZE, 
                           verbose = False)
    
    print("")
    print("Current hyperparams:")
    print(f"filter1: {filter1}")
    print(f"filter2: {filter2}")
    print(f"filter3: {filter3}")
    print(f"learning_rate: {learning_rate}")
    
    # 5 sets of train validation splits 
    kfold = KFold(n_splits = 5, shuffle = True, random_state = 42)
    results = cross_val_score(model, X_train, Y_train, 
                             cv = kfold, 
                             scoring = 'neg_mean_absolute_error',
                             verbose = 1)
    print(results)
    mean_neg_MAE = results.mean()

    K2.clear_session()
    return -mean_neg_MAE
Exemplo n.º 6
0
    def set_pipeline(self):
        """defines the pipeline as a class attribute"""
        # preprocessing pipeline
        preproc_pipe = Pipeline([('text_preprocessor', TextProcessor())])

        # instantiate pipeline with sklearn wrapper for keras model (to be able to save it into a .joblib format)
        self.pipeline = Pipeline([('preproc', preproc_pipe), ('nn_model', KerasRegressor(build_fn = initialize_model))])
Exemplo n.º 7
0
    def grid_search_cv(self):
        """ Performs grid search cross validation on the train data
		"""
        logger.info(
            "\tBeggining grid_search_cv to discover optimal hyperparameters")
        model = KerasRegressor(build_fn=self.compile_model,
                               verbose=self.verbose)
        grid = grid_search_cv(estimator=model,
                              param_grid=self.exploreParams,
                              n_jobs=-1,
                              cv=2)  #return_train_score=True,
        grid_result = grid.fit(self.train_X, self.train_Y)

        bestParameters = grid_result.best_params_
        bestModel = grid_result.best_estimator_.model
        bestModel.summary()
        bestModel.save(os.path.join(self.modelpath), overwrite=True)
        logger.info(
            "\tgrid_search_cv finished, flashing model to disk and saving best parameters to config..."
        )
        logger.info("\tBest parameters: {params}".format(bestParameters))

        # Save best found parameters to config file
        with open(os.path.join(args.configpath,
                               args.configoptname)) as outfile:
            configOpt = yaml.load(file, Loader=yaml.FullLoader)
            configOpt['Optimal LSTM Parameters'] = bestParameters
            yaml.dump(configOpt, outfile, default_flow_style=False)
        self.history = bestModel.history.history
        return bestModel
Exemplo n.º 8
0
def kfold_cross_val(df, target_col, bucket_mod, data_path, verbose, n_jobs):
    # evaluate using 10-fold cross validation
    X, y = prep.split_Xy(df, target_col)
    # run estimator
    if target_col == "mem_bin":
        # Y = y.reshape(-1, 1)
        encoder = LabelEncoder()
        y = encoder.fit_transform(y)
        # y_enc = keras.utils.to_categorical(y)
        estimator = KerasClassifier(build_fn=train.memory_classifier,
                                    epochs=60,
                                    batch_size=32,
                                    verbose=verbose)
        kfold = StratifiedKFold(n_splits=10, shuffle=True)
    elif target_col == "memory":
        estimator = KerasRegressor(build_fn=train.memory_regressor,
                                   epochs=60,
                                   batch_size=32,
                                   verbose=verbose)
        kfold = KFold(n_splits=10, shuffle=True)
    elif target_col == "wallclock":
        estimator = KerasRegressor(build_fn=train.wallclock_regressor,
                                   epochs=150,
                                   batch_size=64,
                                   verbose=verbose)
        kfold = KFold(n_splits=10, shuffle=True)
    print("\nStarting KFOLD Cross-Validation...")
    start = time.time()
    results = cross_val_score(estimator, X, y, cv=kfold, n_jobs=n_jobs)
    end = time.time()
    duration = io.proc_time(start, end)
    if target_col == "mem_bin":
        score = np.mean(results)
    else:
        score = np.sqrt(np.abs(np.mean(results)))
    print(f"\nKFOLD scores: {results}\n")
    print(f"\nMean Score: {score}\n")
    print("\nProcess took ", duration)
    kfold_dict = {
        "kfold": {
            "results": list(results),
            "score": score,
            "time": duration
        }
    }
    keys = io.save_to_pickle(kfold_dict, target_col=target_col)
    io.s3_upload(keys, bucket_mod, f"{data_path}/results")
Exemplo n.º 9
0
    def initialize_network(self, network_function):
        """
        Initialize KerasRegressor object that makes Keras model compatible with scikit-learn library
        :param network_function: any function that returns a compiled Keras model, taking hyper parameters as input
        :return:
        """

        self._model = KerasRegressor(build_fn=network_function)
        self._is_model_initialized = True

        return
Exemplo n.º 10
0
 def best_model(self):
     model = KerasRegressor(build_fn=self.DNN, verbose=1)
     if self.search_method == 'grid':
         search = GridSearchCV(model, self.hyper, cv=5)
         best_search = search.fit(self.data, self.label, verbose=2)
         best_param = search.best_params_
         return best_search, best_param
     if self.search_method == 'random':
         search = RandomizedSearchCV(model, self.hyper, cv=5)
         best_search = search.fit(self.data, self.label, verbose=2)
         best_param = search.best_params_
         return best_search, best_param
Exemplo n.º 11
0
def best_model_pca8(data, label, search_method):
    model = KerasRegressor(build_fn=DNN_pca8, verbose=1)
    if search_method == 'grid':
        search = GridSearchCV(model, hyper, cv=5)
        best_search = search.fit(data, label, verbose=2)
        best_param = search.best_params_
        return best_search, best_param
    if search_method == 'random':
        search = RandomizedSearchCV(model, hyper, cv=5)
        best_search = search.fit(data, label, verbose=2)
        best_param = search.best_params_
        return best_search, best_param
Exemplo n.º 12
0
def train_model_cnn(epochs, batch, train_data, val_data, train_target,
                    val_target):
    """train KerasRegressor model to predict PHI with 2D CNN"""

    n_dim = train_data.shape

    np.random.seed(7)
    estimator = KerasRegressor(build_fn=createModel.model_cnn,
                               dim=n_dim,
                               epochs=epochs,
                               batch_size=batch,
                               verbose=2)

    cross_validation(estimator, train_data, train_target, val_data, val_target)
def do_grid_search():
    batch_size = [1, 10, 32, 64]
    epochs = [10, 50, 100]
    model = KerasRegressor(build_fn=design_model)
    param_grid = dict(batch_size=batch_size, epochs=epochs)
    grid = GridSearchCV(estimator=model,
                        param_grid=param_grid,
                        scoring=make_scorer(mean_squared_error,
                                            greater_is_better=False),
                        return_train_score=True)
    grid_result = grid.fit(x_train, y_train, verbose=0)
    print(grid_result)
    print("Best: %f using %s" %
          (grid_result.best_score_, grid_result.best_params_))
Exemplo n.º 14
0
Arquivo: main.py Projeto: krutt/ML
def create_regressor(perceptron_no: int, epoch_no: int = 500):
    """
    Creates and returns regressor based on a MLP with given number of perceptrons
    and trained given number of times.

    :param perceptron_no: number of perceptrons
    :param epoch_no: number of epochs to train ANN MLP
    :return: KerasRegressor object configured to spec
    """
    nn = KerasRegressor(build_fn=create_nn,
                        perceptron_no=perceptron_no,
                        epochs=epoch_no,
                        batch_size=100,
                        verbose=0)

    return nn
Exemplo n.º 15
0
 def nnTrain(self, X, Y):
     # use neural net for training
     seed = 7
     np.random.seed(seed)
     estimators = []
     estimators.append((RapidNN.SCALER, StandardScaler()))
     estimators.append((RapidNN.REGRESSER,
                        KerasRegressor(build_fn=self.initNNModel,
                                       epochs=30,
                                       batch_size=10,
                                       verbose=0)))
     pipeline = Pipeline(estimators)
     kfold = KFold(n_splits=10, random_state=seed)
     pipeline.fit(X, Y)
     # scores = pipeline.evaluate(X, Y)
     # print("%s: %.2f%%" % (model.metrics_names[1], scores[1] * 100))
     return pipeline
Exemplo n.º 16
0
def wrap_model():
    """
    Build and wrap keras-TF model for use with scikit-learn API.
    ======================================

    Input:
        None

    Output:
        wrapped_model (KerasRegressor) - Wrapped model for use with scikit-learn API.
    """

    # Build and wrap model.
    wrapped_model = KerasRegressor(build_multilayer_perceptron)

    # Return model
    return wrapped_model
Exemplo n.º 17
0
def DNNRegressor(inputDim):
    '''
	Parameters
	inputDim: int, input dimension, i.e. # of features
	
	Returns
	model: wrapped DNN model
	'''
    def make_model(layers=[64, 64],
                   optimizer='adam',
                   activation='relu',
                   inputDim=10):
        '''
		Parameters
		layers: lst, length is the # of hidden layers, element is the # of neurons in each layer
		optimizer: str, tensorflow optimizer
		activation: str, tensorflow activation
		inputDim: int, input dimension, i.e. # of features
		
		Returns
		model: DNN model
		'''

        model = Sequential()

        for i, neurons in enumerate(layers):
            if not i:
                model.add(
                    Dense(neurons, activation=activation, input_dim=inputDim))
            else:
                model.add(Dense(neurons, activation=activation))
        model.add(Dense(1))

        model.compile(loss='mse', optimizer=optimizer, metrics=['mae'])

        return model

    model = KerasRegressor(build_fn=make_model, inputDim=inputDim, verbose=0)

    return model
Exemplo n.º 18
0
def get_opt_params_by_randomized_search_cv(X_train, y_train):
    # pass in fixed parameters n_input and n_class
    model_keras = KerasRegressor(
        build_fn = build_CNN_base,
        epochs=150, 
        batch_size=12, 
        #callbacks = callbacks,
        verbose=0
    )
    
    # parameter ranges to use in randomized search
    n_filters_opts  = [1, 2, 4, 8, 32, 64]
    n_kernel_opts = list(range(2, 12))
    l2_penalty_opts = [0.0, 0.01, 0.25, 0.50]

    keras_param_options = {
        'n_filters': n_filters_opts,
        'n_kernel': n_kernel_opts,
        'l2_penalty': l2_penalty_opts
    }
    
    # Create a RandomizedSearchCV instance, and then fit.
    rs_keras = RandomizedSearchCV( 
        model_keras, 
        param_distributions = keras_param_options,
        scoring = 'neg_mean_absolute_error',
        n_iter = 10, 
        cv = 5,
        n_jobs = -1,
        verbose = 1
    )
    rs_keras.fit(X_train, y_train)

    print('Best score obtained: {0}'.format(rs_keras.best_score_))
    print('Parameters:')
    for param, value in rs_keras.best_params_.items():
        print('\t{}: {}'.format(param, value))
        
    return rs_keras.best_params_
Exemplo n.º 19
0
def run_experiment(writer, name, generate_data):
    np.random.seed(SEED)

    data = DataHolder(generate_data(TRAIN_SIZE), generate_data(TEST_SIZE))

    if DUMP_FILES:
        data.dump(name)

    # Define model types to use
    models = [
        svr_grid(),
        RandomForestRegressor(n_estimators=100),
        GradientBoostingRegressor(n_estimators=100,
                                  learning_rate=0.1,
                                  max_depth=10,
                                  random_state=0,
                                  verbose=VERBOSE),
        KerasRegressor(build_fn=neural_network_regression, data=data)
    ]

    for model in models:
        eval_data(writer, name, model, data)
Exemplo n.º 20
0
def make_regressor_pipeline(X: pd.DataFrame) -> Pipeline:
    """
    Make the regressor pipeline with the required preprocessor steps and estimator in the end.

    Parameters
    ----------
    X: pd.DataFrame
        X containing all the required features for training

    Returns
    -------
    regressor_pipeline: Pipeline
        Regressor pipeline with preprocessor and estimator
    """
    numerics = ["int16", "int32", "int64", "float16", "float32", "float64"]
    num_features = list(
        X.dropna(axis=1, how="all").select_dtypes(include=numerics).columns)

    # This example model only uses numeric features and drops the rest
    num_transformer = Pipeline(steps=[("imputer", SimpleImputer(
        strategy="mean")), ("standardize", StandardScaler())])
    preprocessor = ColumnTransformer(transformers=[("num", num_transformer,
                                                    num_features)])

    # create model
    estimator = KerasRegressor(
        build_fn=create_regression_model,
        num_features=len(num_features),
        epochs=200,
        batch_size=8,
        verbose=1,
    )

    # pipeline with preprocessor and estimator bundled
    regressor_pipeline = Pipeline(
        steps=[("preprocessor", preprocessor), ("estimator", estimator)])
    return regressor_pipeline
Exemplo n.º 21
0
def train_net(X, Y, dil, drop, poolsize, kernel, ep, ba, k, validate, window_size, stride, freq_factor, files):
    cur_model = partial(depthwise_model, shape_X=X.shape, shape_Y=Y.shape, drp=drop, krnl=kernel, dilate=dil, mpool=poolsize)
    model = cur_model()

    if not validate:
        data_path = r'C:\Users\hbkm9\Documents\Projects\CYB\Balint\CYB104\Data'
        X0, Y0, _ = data_proc(data_path, norm_emg,
                            window_size=window_size,  task='Validation', stride=stride, freq_factor=freq_factor)
        #Y0 = normalize(np.array(Y0), axis=1)
        #Y0 = np.expand_dims(Y0[:, 2], 1)
        X0 = np.expand_dims(X0, 1)

        mc = ModelCheckpoint('../Models/best_model.h5', monitor='val_loss', mode='min', verbose=1, save_best_only=True)
        es = EarlyStopping(monitor='val_loss', mode='min', verbose=1, patience=8)
        history = model.fit(X, Y, batch_size=ba, epochs=ep, verbose=1, callbacks=[es, mc], validation_data=(X0, Y0))

        dir_path, ends = incr_dir(r'/Models', 'model_')
        model.save(dir_path+'\\model_' + str(max(ends) + 1) + '.h5')
        import pickle
        with open(dir_path+'\\history_' + str(max(ends) + 1) + r'.pickle', 'wb') as handle:
            pickle.dump(history.history, handle, protocol=pickle.HIGHEST_PROTOCOL)
        file_names = "\n".join(files)
        str_sum = model_summary_to_string(model) + '\n'
        str_sum += kw_summary(dil=dil, drop=drop, poolsize=poolsize, kernel=kernel,
                              epochs=ep, batch=ba, window_size=window_size, stride=stride)
        str_sum += '\n\n' + file_names
        print_to_file(str_sum, dir_path + '\\summary_'+ str(max(ends) + 1) + r'.txt')
        return




    estimator = KerasRegressor(build_fn=cur_model, epochs=ep, batch_size=ba, verbose=2)
    kfold = KFold(n_splits=k)
    scores = cross_val_score(estimator, X, Y, cv=kfold, scoring='neg_mean_squared_error', n_jobs=3)
    return scores, model
def deep_mlp_model(build_fn, param_dict):
    return KerasRegressor(build_fn=build_fn,
                          batch_size=param_dict.get(BATCH_SIZE),
                          epochs=param_dict.get(EPOCHS),
                          verbose=False)
Exemplo n.º 23
0
        Dense(input_layer_neurons,
              activation='relu',
              input_shape=(input_layer_neurons, )))
    if (second_layer_boolean):
        model.add(Dense(neurons_on_first_layer / 2, activation='relu'))
    model.add(Dense(neurons_on_second_layer, activation='relu'))
    model.add(Dense(neurons_on_chokehold_layer, activation='relu'))
    if (second_layer_boolean):
        model.add(Dense(neurons_on_first_layer / 2, activation='relu'))
    model.add(Dense(neurons_on_first_layer, activation='relu'))
    model.add(Dense(input_layer_neurons, activation=None))
    model.compile(loss='mean_squared_error', optimizer='adam', metrics=metrics)
    return model


model = KerasRegressor(build_fn=create_model, verbose=2)
input_layer_neurons = [X_train.shape[1]]
batch_size = [500000]  #, 10000]
epochs = [200]
learn_rate = [0.0001]  #, 0.001]
dropout_rate = [0.0]
weight_constraint = [0]
neurons_on_first_layer = [32]  #, 64]
neurons_on_chokehold_layer = [4]  #, 8]
second_layer_boolean = [False]  #, True] # Is there another layer?
param_grid = dict(batch_size=batch_size,
                  epochs=epochs,
                  dropout_rate=dropout_rate,
                  learn_rate=learn_rate,
                  weight_constraint=weight_constraint,
                  input_layer_neurons=input_layer_neurons,
Exemplo n.º 24
0
    def createModel(self):
        X = self.df[list(self.predictor_list.get(0, tk.END))].to_numpy()
        y = self.df[self.target_list.get(0)].to_numpy().reshape(-1)

        layers = self.no_optimization_choice_var.get()

        learning_rate = self.hyperparameters[4].get()
        momentum = self.hyperparameters[5].get()

        optimizers = {
            "Adam": Adam(learning_rate=learning_rate),
            "SGD": SGD(learning_rate=learning_rate, momentum=momentum),
            "RMSprop": RMSprop(learning_rate=learning_rate, momentum=momentum)
        }

        def base_model():
            model = Sequential()

            for i in range(layers):
                neuron_number = self.neuron_numbers_var[i].get()
                activation = self.activation_var[i].get()
                if i == 0:
                    model.add(
                        Dense(neuron_number,
                              activation=activation,
                              input_dim=X.shape[1]))
                else:
                    model.add(Dense(neuron_number, activation=activation))

            model.add(Dense(1, activation="relu"))
            model.compile(optimizer=optimizers[self.hyperparameters[2].get()],
                          loss=self.hyperparameters[3].get())
            return model

        do_forecast = self.do_forecast_option.get()
        val_option = self.validation_option.get()

        if val_option == 0 or val_option == 1:
            model = base_model()
        elif val_option == 2 or val_option == 3:
            model = KerasRegressor(build_fn=base_model,
                                   epochs=self.hyperparameters[0].get(),
                                   batch_size=self.hyperparameters[1].get())

        if val_option == 0:
            model.fit(X,
                      y,
                      epochs=self.hyperparameters[0].get(),
                      batch_size=self.hyperparameters[1].get())
            if do_forecast == 0:
                pred = model.predict(X).reshape(-1)
                losses = loss(y, pred)[:-1]
                self.y_test = y
                self.pred = pred
                for i, j in enumerate(losses):
                    self.test_metrics_vars[i].set(j)
            self.model = model

        elif val_option == 1:
            X_train, X_test, y_train, y_test = train_test_split(
                X, y, train_size=self.random_percent_var.get() / 100)
            model.fit(X_train,
                      y_train,
                      epochs=self.hyperparameters[0].get(),
                      batch_size=self.hyperparameters[1].get())
            if do_forecast == 0:
                pred = model.predict(X_test).reshape(-1)
                losses = loss(y_test, pred)[:-1]
                self.y_test = y_test.reshape(-1)
                self.pred = pred
                for i, j in enumerate(losses):
                    self.test_metrics_vars[i].set(j)
            self.model = model

        elif val_option == 2:
            cvs = cross_validate(model,
                                 X,
                                 y,
                                 cv=self.cross_val_var.get(),
                                 scoring=skloss)
            for i, j in enumerate(list(cvs.values())[2:]):
                self.test_metrics_vars[i].set(j.mean())

        elif val_option == 3:
            cvs = cross_validate(model,
                                 X,
                                 y,
                                 cv=X.shape[0] - 1,
                                 scoring=skloss)
            for i, j in enumerate(list(cvs.values())[2:]):
                self.test_metrics_vars[i].set(j.mean())
Exemplo n.º 25
0
              input_dim=87))
    regressor.add(
        Dense(units=66, kernel_initializer='normal', activation='relu'))
    regressor.add(
        Dense(units=66, kernel_initializer='normal', activation='relu'))
    regressor.add(
        Dense(units=66, kernel_initializer='normal', activation='relu'))
    regressor.add(
        Dense(units=66, kernel_initializer='normal', activation='relu'))
    regressor.add(
        Dense(units=1, kernel_initializer='normal', activation='relu'))
    regressor.compile(optimizer='sgd', loss='mean_squared_error')
    return regressor


regressor = KerasRegressor(build_fn=build_regressor, batch_size=10, epochs=100)
regressor.fit(X_train, y_train)
preds = regressor.predict(X_test)
rmse = np.sqrt(mean_squared_error(y_test, preds))
rmse

#XGBoost
import xgboost as xgb

data_dmatrix = xgb.DMatrix(data=X, label=y)
X_train, X_test, y_train, y_test = train_test_split(X,
                                                    y,
                                                    test_size=0.2,
                                                    random_state=123)
xg_reg = xgb.XGBRegressor(objective='reg:linear',
                          colsample_bytree=0.3,
Exemplo n.º 26
0
# 훈련 loop
for train_index, valid_index in kfold.split(x_train1):

    # print(train_index, len(train_index))    #2777702
    # print(valid_index, len(valid_index))    #694426

    x_train = x_train1[train_index]
    x_valid = x_train1[valid_index]
    y_train = y_train1[train_index]
    y_valid = y_train1[valid_index]

    x_train = x_train.reshape(x_train.shape[0], x_train.shape[1], 1)
    x_valid = x_valid.reshape(x_valid.shape[0], x_valid.shape[1], 1)

    #2. 모델구성
    model2 = KerasRegressor(build_fn=build_model, verbose=1)  #, epochs = 2)
    search = GridSearchCV(model2, hyperparameters, cv=kfold)

    start_time = timeit.default_timer()
    #3. 훈련
    modelpath = f'./data/hdf5/1_conv1d_kfold_{num}.hdf5'
    er, mo, lr = callbacks(modelpath)
    search.fit(x_train,
               y_train,
               verbose=1,
               epochs=200,
               validation_data=(x_valid, y_valid),
               callbacks=[er, lr, mo])

    finish_time = timeit.default_timer()
    time = round(finish_time - start_time, 2)
Exemplo n.º 27
0
    dropout = [0.1, 0.2, 0.3, 0.4]
    activation = ['relu', 'selu', 'elu', 'swish']
    batches = [10, 20, 30, 40, 50]
    optimizer = ['adam', 'nadam', 'rmseprop', 'adadelta']
    return {
        "node": node,
        "drop": dropout,
        "optimizer": optimizer,
        "batch_size": batches,
        "activation": activation
    }


parameters = param()

model = KerasRegressor(build_fn=dnn_model, verbose=1)

search = RandomizedSearchCV(model, parameters, cv=3)
# search = GridSearchCV(model, parameters, cv=3)

filepath = '../data/modelcheckpoint/k62_iris_{epoch:02d}-{val_loss:.4f}.hdf5'
es = EarlyStopping(monitor='val_loss', patience=10, mode='auto')
cp = ModelCheckpoint(filepath=filepath,
                     monitor='val_loss',
                     save_best_only=True,
                     mode='auto')
lr = ReduceLROnPlateau(monitor='val_loss', patience=3, factor=0.5, mode='auto')
search.fit(x_train,
           y_train,
           validation_split=0.2,
           verbose=1,
Exemplo n.º 28
0
        'activation1': activations,
        'activation2': activations,
        'activation3': activations,
        'a': nodes,
        'b': nodes,
        'c': nodes
    }


hyperparameters = create_hyperparameters()
model2 = build_model

from tensorflow.keras.wrappers.scikit_learn import KerasClassifier, KerasRegressor

# 여기서 epochs, validation, callback등 넣을수 있다
model2 = KerasRegressor(build_fn=build_model,
                        verbose=1)  #,epochs= 3,validation_split = 0.2)

search = GridSearchCV(model2, hyperparameters, cv=3)

lr = ReduceLROnPlateau(monitor='val_loss', patience=25, factor=0.5, verbose=1)
modelpath = '../Data/modelCheckPoint/k62_boston_{epoch:02d}-{val_loss:.4f}.hdf5'
mc = ModelCheckpoint(filepath=modelpath,
                     monitor='val_loss',
                     save_best_only=True,
                     mode='auto')
es = EarlyStopping(monitor='val_loss', patience=50, mode='auto')

# epochs, validation, callback등 fit에서도 먹힌다 (fit이 우선순위가 더 높다)
search.fit(x_train,
           y_train,
           verbose=1,
    # hidden layers
    for hidden_layer_number in range(1, number_of_hidden_layers):
        model.add(Dense(number_of_neurons, activation='relu'))

    # output layer
    model.add(Dense(1))

    model.compile(optimizer='adam', loss='mse')

    return model


# In[13]:

tuned_model = KerasRegressor(build_fn=build_model)

# In[16]:

from sklearn.preprocessing import StandardScaler
from sklearn.model_selection import train_test_split

tuned_model = KerasRegressor(build_fn=build_model)

# possible values of parameters - we want to find the best set of them

params = {
    'number_of_hidden_layers': [2, 3, 4, 5],
    'number_of_neurons': [5, 15, 25]
}
Exemplo n.º 30
0
    model.add(Dense(3500, input_dim=X_train.shape[1], activation='relu'))
    model.add(Dense(3500, activation='relu'))
    model.add(Dense(3500, activation='relu'))
    model.add(Dense(3500, activation='relu'))
    model.add(Dense(3500, activation='relu'))
    model.add(Dense(3500, activation='relu'))
    model.add(Dense(3500, activation='relu'))
    model.add(Dense(len(y_train[0]), activation='linear'))

    # compile the keras model
    model.compile(loss='mse', optimizer='adam')
    return model


estimator = KerasRegressor(build_fn=baseline_model,
                           nb_epoch=20,
                           batch_size=1000,
                           verbose=1)

# fit the keras model on the dataset
estimator.fit(X_train, y_train, epochs=20, batch_size=1000, verbose=1)

# make class predictions with the model\
predictions = estimator.predict(X_test)

top_20_count = 0
correct_count = 0
pred_list = np.zeros((len(predictions), 2))
for i in range(len(predictions)):
    pred_list[i][0] = i
    pred_list_temp = np.zeros((len(y_test), 2))