Ejemplo n.º 1
0
def baseline_model():
    # create model
    model = Sequential()
    rbflayer = RBFLayer(30,
                        initializer=InitCentersRandom(Xtrain),
                        betas=1.0,
                        input_shape=(6, ))

    model.add(rbflayer)
    model.add(
        Dense(50, input_dim=30, kernel_initializer='normal',
              activation='relu'))
    model.add(Dense(25, kernel_initializer='normal', activation='relu'))
    model.add(Dense(12, kernel_initializer='normal', activation='relu'))
    #model.add(GaussianNoise(0.1))
    model.add(Dense(1, kernel_initializer='normal'))
    # Compile model
    model.compile(loss='mean_squared_error',
                  optimizer='adam',
                  metrics=["mean_squared_error"])

    # Training
    model.fit(Xtrain, ytrain, batch_size=10, epochs=10, verbose=0)
    #model.fit(X_train, y_train, batch_size = 32, epochs=20, verbose=2)

    scores = model.evaluate(Xtest, ytest, verbose=0)
    model.save("model-DNNGP.h5")
    return model
Ejemplo n.º 2
0
def train(X_train, y_train, epochs = 50):
#    dataset_sz = X.shape[0]
#    test_sz = X_test.shape[0]
    
    train_sz = X_train.shape[0]
    
    X_train = np.reshape(X_train, (train_sz, 1))
    y_train = np.reshape(y_train, (train_sz, 1))
    
    # Initialising the RBF
    regressor = Sequential()
    
    # Adding the input layer and the first layer and Drop out Regularization
    regressor.add(RBFLayer(500, initializer=InitCentersRandom(X_train), betas=2.0, input_shape=(1,)))
    regressor.add(Dropout(.2))
    
    # Adding the output layer
    regressor.add(Dense(units = 1, kernel_initializer = 'uniform', activation = 'sigmoid'))
    
    # Compiling the RBF
    regressor.compile(optimizer = 'adam', loss = 'mean_squared_error')
    
    # Fitting the RBF to the Training set
    regressor.fit(X_train, y_train, batch_size = 32, epochs = epochs)
    
    return regressor
def RBFNN(X_train, X_test, y_train, y_test):
    model = Sequential()
    rbflayer = RBFLayer(10,
                        initializer=InitCentersRandom(X_train),
                        betas=2.0,
                        input_shape=(len(X_train[0]), ))
    model.add(rbflayer)
    model.add(Dense(1))
    model.compile(loss='mean_squared_error', optimizer=RMSprop())
    model.fit(X_train, y_train, batch_size=50, epochs=2000, verbose=0)
    row = getRow(X_test, y_test)
    result1 = model.predict(row)
    y_pred = model.predict(X_test)
    return result1, y_pred
Ejemplo n.º 4
0
def create_network(train_set, trainingConfig):

    model = Sequential()

    # Create the RBF layer
    if trainingConfig.use_kmeans:
        initializer = InitCentersKMeans(train_set, trainingConfig.k_num)
        layer_exit_num = trainingConfig.k_num
    else:
        initializer = InitCentersRandom(train_set,
                                        trainingConfig.random_samples_num)
        layer_exit_num = trainingConfig.random_samples_num

    rbflayer = RBFLayer(layer_exit_num,
                        initializer,
                        betas=trainingConfig.betas,
                        input_shape=(784, ))

    # First layer is the RBF layer
    model.add(rbflayer)

    if trainingConfig.hidden_layers_num > 0:

        if trainingConfig.use_kmeans:
            hidden_layer_output = trainingConfig.k_num
        else:
            hidden_layer_output = trainingConfig.random_samples_num

        # Add the hidden layers, Dense neural is used in combination with Dropout
        hidden_layers_range = range(trainingConfig.hidden_layers_num)

        for n in hidden_layers_range:
            model.add(Dropout(trainingConfig.dropoutRate))
            model.add(
                Dense(units=hidden_layer_output,
                      activation=trainingConfig.hidden_layer_act_func))

    # last classification layer, output dim is 10, 10 possible classes
    model.add(Dense(units=10, activation=trainingConfig.last_layer_act_func))

    model.summary()

    model.compile(
        loss='categorical_crossentropy',
        optimizer=RMSprop(),  # Used for multiclass problems
        metrics=['accuracy'
                 ])  # Accuracy because the problem solved is classification

    return (model, rbflayer)
print("X_train after transform \n", X_train)
#print("reshaped shape 1 Xtrain ", X_train.shape, "X_trrain 941 element ", X_train[940])
print("y_train with size ", y_train.shape)

############ Building the RBF ############
# Initialising the RBF
regressor = Sequential()

# Adding the input layer and the first layer and Drop out Regularization
#Anti gia X_train[0] sto InitCentersRandom vazw kai X_lookback
# betas = 2.0
regressor.add(
    RBFLayer(units,
             input_dim=lookback,
             initializer=InitCentersRandom(X_train[0]),
             betas=1.0,
             input_shape=(1, lookback)))
regressor.add(Dropout(.2))

# Adding the 2nd hidden layer
#regressor.add(LSTM(10, input_shape=(1, lookback)))
#regressor.add( RBFLayer(50, initializer=InitCentersRandom(X_all_2nd_layer), betas=2.0, input_shape=(1, units)))
#regressor.add(Dropout(.2))
#regressor.add(Dense(units=50, kernel_initializer='uniform', activation='relu'))
#regressor.add(Dropout(.2))

# Adding the output layer
regressor.add(
    Dense(units=step_size, kernel_initializer='uniform', activation='linear'))
Ejemplo n.º 6
0

def load_data():

    data = np.loadtxt("data/data.txt")
    X = data[:, :-1]
    y = data[:, -1:]
    return X, y

if __name__ == "__main__":

    X, y = load_data()

    model = Sequential()
    rbflayer = RBFLayer(10,
                        initializer=InitCentersRandom(X),
                        betas=2.0,
                        input_shape=(1,))
    model.add(rbflayer)
    model.add(Dense(1))

    model.compile(loss='mean_squared_error',
                  optimizer=RMSprop())

    model.fit(X, y,
              batch_size=50,
              epochs=2000,
              verbose=1)

    y_pred = model.predict(X)
Ejemplo n.º 7
0
for j in range(10):
    slices = KFold(n_splits=K_FOLD, shuffle=True)
    oData = Data(len(oDataSet.labelsNames), 31, samples=50)
    oData.random_training_test_by_percent(
        np.unique(classes, return_counts=True)[1], 0.8)
    grid_result = np.zeros((len(GRID_NEURON), len(GRID_B), K_FOLD))
    for g1, g_param in enumerate(GRID_NEURON):
        for g2, g2_param in enumerate(GRID_B):
            k_slice = 0
            for train, test in slices.split(oData.Training_indexes):
                K.clear_session()

                model = Sequential()
                rbflayer = RBFLayer(
                    g_param,
                    initializer=InitCentersRandom(
                        oDataSet.attributes[oData.Training_indexes[train]]),
                    betas=g2_param,
                    input_shape=(base.shape[1], ))
                model.add(rbflayer)
                model.add(
                    Dense(len(oDataSet.labelsNames), activation='sigmoid'))
                model.compile(loss='categorical_crossentropy',
                              optimizer=_OPTIMIZER)
                model.fit(oDataSet.attributes[oData.Training_indexes[train]],
                          binarizer(
                              oDataSet.labels[oData.Training_indexes[train]]),
                          batch_size=50,
                          epochs=epochs,
                          verbose=0)

                y_pred = model.predict(
Ejemplo n.º 8
0
    outputlayer = Dense(1,
                        kernel_initializer=InitFromFile("weights.npy"),
                        use_bias=False)
    print("output layer created")

    model2 = Sequential()
    model2.add(rbflayer)
    model2.add(outputlayer)

    res2 = model2.predict(X).squeeze()
    print(f"MSE: {MSE(y, res2):.4f}")
    print("Same responses: ", all(res == res2))


if __name__ == "__main__":

    X, y = load_data()

    # test simple RBF Network with random  setup of centers
    test(X, y, InitCentersRandom(X))

    # test simple RBF Network with centers set up by k-means
    test(X, y, InitCentersKMeans(X))

    # test simple RBF Networks with centers loaded from previous
    # computation
    test(X, y, InitFromFile("centers.npy"))

    # test InitFromFile initializer
    test_init_from_file(X, y)
Ejemplo n.º 9
0
Archivo: rbf.py Proyecto: scorrea92/LSC
    y_train = scaler.transform(y_train)
    y_val = scaler.transform(y_val)

    return x_train, y_train, x_val, y_val, test, scaler, y_val_nostandard, y_train_nostandard


# Get Data
path_train = '../dataset_cajamar/Dataset_Salesforce_Predictive_Modelling_TRAIN.txt'
path_test = '../dataset_cajamar/Dataset_Salesforce_Predictive_Modelling_TEST.txt'

x_train, y_train, x_val, y_val, test, scaler, y_val_nostandard, y_train_nostandard = data(
    path_train, path_test)

model = Sequential()
rbflayer = RBFLayer(10,
                    initializer=InitCentersRandom(x_train),
                    betas=2.0,
                    input_shape=(76, ))
model.add(rbflayer)

model.add(Dense(512))
model.add(BN())
model.add(GN(0.3))
model.add(Activation('relu'))

model.add(Dense(1))
model.add(Activation('relu'))

model.compile(loss='mape', optimizer=RMSprop(), metrics=['mse'])

model.fit(x_train,
Ejemplo n.º 10
0
X_test = np.reshape(X_test, (X_test.shape[0], 1, X_test.shape[1]))

print("AFTER RESHAPE train_sz: ", X_train.shape, "\ntest_sz: ", X_test.shape, "\nall_sz: ", X_all.shape)


print("X_train after transform \n", X_train)
print("reshaped shape 1 Xtrain ", X_train.shape, "X_trrain 941 element ", X_train[940])
print("y_train with size ", y_train.shape)

############ Building the RBF ############
# Initialising the RBF
regressor = Sequential()

# Adding the input layer and the first layer and Drop out Regularization
regressor.add(
    RBFLayer(60, input_dim=lookback, initializer=InitCentersRandom(X_train[0]), betas=2.0, input_shape=(1, lookback)))
regressor.add(Dropout(.2))

# Adding the output layer
regressor.add(Dense(units=1, kernel_initializer='uniform', activation='linear'))

# Compiling the RBF
regressor.compile(optimizer='adam', loss='mean_squared_error')
regressor.summary()
# Fitting the RBF to the Training set
regressor.fit(X_train, y_train, batch_size=1, epochs=5, shuffle=False)

############ Save & load Trained Model ############
# Save Trained Model
regressor.save('TICKER-RBF.h5')