Exemple #1
0
def load_data():

    data = np.loadtxt("data/data.txt")
    X = data[:, :-1]
    y = data[:, -1:]
    return X, y


if __name__ == "__main__":

    X, y = load_data()

    model = Sequential()
    rbflayer = RBFLayer(10,
                        initializer=InitCentersRandom(X),
                        betas=2.0,
                        input_shape=(1,))
    model.add(rbflayer)
    model.add(Dense(1))

    model.compile(loss='mean_squared_error',
                  optimizer=RMSprop())

    model.fit(X, y,
              batch_size=50,
              epochs=2000,
              verbose=1)

    y_pred = model.predict(X)

    print(rbflayer.get_weights())
Exemple #2
0
    )
    experiment.set_name("REALIZACAO_{:02d}".format(j + 1))

    slices = KFold(n_splits=K_FOLD, shuffle=True)
    oData = Data(len(oDataSet.labelsNames), 31, samples=50)
    oData.random_training_test_by_percent(
        np.unique(classes, return_counts=True)[1], 0.8)
    grid_result = np.zeros((len(GRID_NEURON), len(GRID_B), K_FOLD))
    for g1, g_param in enumerate(GRID_NEURON):
        for g2, g2_param in enumerate(GRID_B):
            k_slice = 0
            for train, test in slices.split(oData.Training_indexes):
                model = Sequential()
                rbflayer = RBFLayer(
                    g_param,
                    initializer=InitCentersRandom(
                        oDataSet.attributes[oData.Training_indexes[train]]),
                    betas=g2_param,
                    input_shape=(base.shape[1], ))
                model.add(rbflayer)
                model.add(Dense(len(lb.classes_), activation='sigmoid'))
                model.compile(loss='categorical_crossentropy',
                              optimizer=_OPTIMIZER)
                model.fit(oDataSet.attributes[oData.Training_indexes[train]],
                          lb.transform(
                              oDataSet.labels[oData.Training_indexes[train]]),
                          batch_size=1,
                          epochs=epochs,
                          verbose=0)

                y_pred = model.predict(
                    oDataSet.attributes[oData.Training_indexes[test]]).argmax(
Exemple #3
0
    y_train = scaler.transform(y_train)
    y_val = scaler.transform(y_val)

    return x_train, y_train, x_val, y_val, test, scaler, y_val_nostandard, y_train_nostandard


# Get Data
path_train = '../dataset_cajamar/Dataset_Salesforce_Predictive_Modelling_TRAIN.txt'
path_test = '../dataset_cajamar/Dataset_Salesforce_Predictive_Modelling_TEST.txt'

x_train, y_train, x_val, y_val, test, scaler, y_val_nostandard, y_train_nostandard = data(
    path_train, path_test)

model = Sequential()
rbflayer = RBFLayer(10,
                    initializer=InitCentersRandom(x_train),
                    betas=2.0,
                    input_shape=(76, ))
model.add(rbflayer)

model.add(Dense(512))
model.add(BN())
model.add(GN(0.3))
model.add(Activation('relu'))

model.add(Dense(1))
model.add(Activation('relu'))

model.compile(loss='mape', optimizer=RMSprop(), metrics=['mse'])

model.fit(x_train,
          y_train,
X_test = np.reshape(X_test, (X_test.shape[0], 1, X_test.shape[1]))

print("AFTER RESHAPE train_sz: ", X_train.shape, "\ntest_sz: ", X_test.shape, "\nall_sz: ", X_all.shape)


print("X_train after transform \n", X_train)
print("reshaped shape 1 Xtrain ", X_train.shape, "X_trrain 941 element ", X_train[940])
print("y_train with size ", y_train.shape)

############ Building the RBF ############
# Initialising the RBF
regressor = Sequential()

# Adding the input layer and the first layer and Drop out Regularization
regressor.add(
    RBFLayer(60, input_dim=lookback, initializer=InitCentersRandom(X_train[0]), betas=2.0, input_shape=(1, lookback)))
regressor.add(Dropout(.2))

# Adding the output layer
regressor.add(Dense(units=1, kernel_initializer='uniform', activation='linear'))

# Compiling the RBF
regressor.compile(optimizer='adam', loss='mean_squared_error')
regressor.summary()
# Fitting the RBF to the Training set
regressor.fit(X_train, y_train, batch_size=1, epochs=5, shuffle=False)

############ Save & load Trained Model ############
# Save Trained Model
regressor.save('TICKER-RBF.h5')
def add_rbf_layer(model, betas, X_train, Y_train, X_test, Y_test):
    """ Create a new model as a copy of model + RBF network. Train 
    it on [X_train, Y_train], reports its test accuracy and returns 
    the new model.
    """

    sess = tf.InteractiveSession()
    init_op = tf.global_variables_initializer()
    sess.run(init_op)

    newmodel = Sequential()
    copymodel = Sequential()
    for i in range(len(model.layers)):
        newmodel.add(model.layers[i])
        copymodel.add(model.layers[i])

    #    for layer in newmodel.layers:
    #        layer.trainable = False

    rbflayer = RBFLayer(300, betas=betas)

    newmodel.add(rbflayer)
    newmodel.add(Dense(10, use_bias=False, name="dense_rbf"))
    newmodel.add(Activation('softmax', name="Activation_rbf"))

    newmodel.compile(loss='categorical_crossentropy',
                     optimizer=RMSprop(),
                     metrics=['acc'])

    newmodel.summary()
    rbf = newmodel.get_layer(index=-3)
    '''
    import pdb; pdb.set_trace()
    rbf = newmodel.get_layer(index=-3)
    print("Betas and centers before training:")
    old_betas = sess.run(rbf.betas)
    old_centers = sess.run(rbf.centers)
    print(sess.run(rbf.betas))
    print(sess.run(rbf.centers))
    '''
    #model.compile(loss='mean_squared_error',
    #              optimizer=SGD(lr=0.1, decay=1e-6))

    newmodel.fit(X_train, Y_train, batch_size=128, epochs=3, verbose=1)

    print("Betas and centers after training:")
    new_betas = sess.run(rbf.betas)
    new_centers = sess.run(rbf.centers)

    print(sess.run(rbf.betas))
    print(sess.run(rbf.centers))

    drbf = newmodel.get_layer(name="dense_rbf")
    trained_weights = sess.run(drbf.weights)[0]
    trained_weights = trained_weights.T
    from collections import Counter
    important_weights = Counter()

    for i in range(10):
        important = np.argpartition(trained_weights[i], -50)[-50:]
        for j in important:
            important_weights[j] += 1

    import pdb
    pdb.set_trace()
    top_30_units = important_weights.most_common(30)
    tb = np.array(new_betas[top_30_units[0][0]])
    tc = np.array([new_centers[top_30_units[0][0]]])
    for unit, count in top_30_units[1:]:
        tb = np.append(tb, new_betas[unit])
        tc = np.append(tc, [new_centers[unit]], axis=0)

    import pdb
    pdb.set_trace()

    Y_pred = newmodel.predict(X_test)
    print("Test Accuracy: ", accuracy_score(Y_pred, Y_test))

    rbflayer = RBFLayer(30, betas=betas)
    copymodel.add(rbflayer)
    copymodel.add(Dense(10, use_bias=False, name="dense_rbf"))
    copymodel.add(Activation('softmax', name="Activation_rbf"))

    op1 = copymodel.layers[-3].betas.assign(tb)
    op2 = copymodel.layers[-3].centers.assign(tc)
    sess.run(op1)
    sess.run(op2)

    #newmodel.layers[-2].set_weights(t)
    copymodel.compile(loss='categorical_crossentropy',
                      optimizer=RMSprop(),
                      metrics=['acc'])

    copymodel.summary()
    copymodel.fit(X_train, Y_train, batch_size=128, epochs=3, verbose=1)
    Y_pred = copymodel.predict(X_test)
    print(accuracy_score(Y_pred, Y_test))

    import pdb
    pdb.set_trace()

    return newmodel
Exemple #6
0
    
    scaler, data_bm = load_data(comodity_numb)
    data_bm_lag = data_bm.iloc[:0-comodity_numb,:]
    
    X = data_bm_lag.iloc[:, :comodity_numb]
    y = data_bm_lag.iloc[:, comodity_numb:]

    X_train, X_test, y_train, y_test = split(X, y)
    
    # get time
    for i in range(2,3):
        start = time.time()
        
        model = Sequential()
        rbflayer = RBFLayer(2,
                            InitCentersKMeans(X_train),
                            input_shape=(1,))
        
        model.add(rbflayer)
        model.add(Dense(1)) # sesuai jumlah array target
        
        model.compile(loss='mean_squared_error',
                      optimizer=RMSprop(),
                      metrics=['accuracy', 'mse', 'mae'])
        
        history = model.fit(X_train, y_train,
                            batch_size=50,
                            epochs=200,
                            verbose=1)
        
        y_pred = model.predict(X_test)
Exemple #7
0
    mean, std = np.mean(X, axis=0, keepdims=True), np.std(X,
                                                          axis=0,
                                                          keepdims=True)
    X = (X - mean) / std
    y = data[:, -1]
    y = (y + 1) / 2
    return X, y


if __name__ == "__main__":

    X, y = load_data()

    model = Sequential()
    rbflayer = RBFLayer(20,
                        initializer=InitCentersRandom(X),
                        betas=1.0,
                        input_shape=(X.shape[1], ))
    model.add(rbflayer)
    model.add(Dense(1, activation='sigmoid', use_bias=False))

    model.compile(loss='binary_crossentropy', optimizer=RMSprop(lr=0.001))

    model.fit(X, y, batch_size=50, epochs=2000, verbose=1)
    # model.save("some_fency_file_name.h5")

    y_pred = model.predict(X)

    # print(rbflayer.get_weights())

    total_num = X.shape[0]
    y_pred = np.squeeze(y_pred)