Exemplo n.º 1
0
def direct(dataset, testnum, featurenum):
    dataset = dataset[np.newaxis, :]
    x_train_one, x_test, y_train_one, y_test = generate_data(
        dataset, testnum, featurenum)
    x_train_two = x_train_one[0:x_train_one.shape[0] - 1, :]
    y_train_two = y_train_one[1:y_train_one.shape[0]]
    x_test_two = x_test[::2, :]
    y_test_two = y_test[1::2]
    x_test_one = x_test[::2, :]
    y_test_one = y_test[::2]
    min_max_scaler1 = MinMaxScaler()
    x_train_one = min_max_scaler1.fit_transform(x_train_one)
    x_test_one = min_max_scaler1.transform(x_test_one)
    min_max_scaler2 = MinMaxScaler()
    x_train_two = min_max_scaler2.fit_transform(x_train_two)
    x_test_two = min_max_scaler2.transform(x_test_two)
    dbn1 = dbn.DBN(x_train=x_train_one,
                   y_train=y_train_one,
                   x_test=x_test_one,
                   y_test=y_test_one,
                   hidden_layer=[250],
                   learning_rate_rbm=0.0005,
                   batch_size_rbm=150,
                   n_epochs_rbm=200,
                   verbose_rbm=1,
                   random_seed_rbm=500,
                   activation_function_nn='tanh',
                   learning_rate_nn=0.005,
                   batch_size_nn=200,
                   n_epochs_nn=300,
                   verbose_nn=1,
                   decay_rate=0)
    dbn1.pretraining()
    dbn1.finetuning()
    dataset_pred_one = dbn1.result[:, 0]
    dbn2 = dbn.DBN(x_train=x_train_two,
                   y_train=y_train_two,
                   x_test=x_test_two,
                   y_test=y_test_two,
                   hidden_layer=[250],
                   learning_rate_rbm=0.0005,
                   batch_size_rbm=150,
                   n_epochs_rbm=200,
                   verbose_rbm=1,
                   random_seed_rbm=500,
                   activation_function_nn='tanh',
                   learning_rate_nn=0.005,
                   batch_size_nn=200,
                   n_epochs_nn=300,
                   verbose_nn=1,
                   decay_rate=0)
    dbn2.pretraining()
    dbn2.finetuning()
    dataset_pred_two = dbn2.result[:, 0]
    dataset_pred = []
    for i in range(len(dbn2.result[:, 0])):
        dataset_pred.append(dataset_pred_one[i])
        dataset_pred.append(dataset_pred_two[i])
    return dataset_pred
Exemplo n.º 2
0
    def test_storage(self):
        path = "association_test_2"
        store.create_path(path)
        self.assertTrue(os.path.isdir('/'.join([store.data_dir, path])))

        path = "association_test_3"
        store.move_to(path)
        self.assertTrue(os.getcwd() == '/'.join([store.data_dir, path]))

        store.move_to_root()
        self.assertTrue(os.getcwd() == store.root_dir)

        store.move_to(path)
        rbm = RBM.RBM()
        store.store_object(rbm)

        rbm2 = store.retrieve_object(str(rbm))

        self.assertTrue(str(rbm) == str(rbm2))

        dbn = DBN.DBN()
        store.store_object(dbn)

        dbn2 = store.retrieve_object(str(dbn))
        self.assertTrue(str(dbn) == str(dbn2))
        store.move_to_root()
Exemplo n.º 3
0
def multioutput(dataset, testnum, featurenum, nstep):
    dataset = dataset[np.newaxis, :]
    x_train, x_test, y_train, y_test = generate_data_nstep(
        dataset, testnum, featurenum, nstep)
    min_max_scaler = MinMaxScaler()
    x_train = min_max_scaler.fit_transform(x_train)
    x_test = min_max_scaler.transform(x_test)
    dbn1 = dbn.DBN(x_train=x_train,
                   y_train=y_train,
                   x_test=x_test,
                   y_test=y_test,
                   hidden_layer=[250],
                   learning_rate_rbm=0.005,
                   batch_size_rbm=150,
                   n_epochs_rbm=200,
                   verbose_rbm=1,
                   random_seed_rbm=500,
                   activation_function_nn='tanh',
                   learning_rate_nn=0.005,
                   batch_size_nn=300,
                   n_epochs_nn=300,
                   verbose_nn=1,
                   decay_rate=0)
    dbn1.pretraining()
    dbn1.finetuning()
    dataset_pred = dbn1.result.reshape(1, testnum)
    return dataset_pred[0, :]
Exemplo n.º 4
0
def predict_with_dwt(dataset, testnum, featurenum):
    ca, cd = dwt.dwt(dataset)
    ca_matrix = ca[np.newaxis, :]
    print('DWT finish.')
    x_train, x_test, y_train, y_test = generate_data(ca_matrix,
                                                     int(testnum / 2),
                                                     featurenum)
    min_max_scaler = MinMaxScaler()
    x_train = min_max_scaler.fit_transform(x_train)
    x_test = min_max_scaler.transform(x_test)
    dbn1 = dbn.DBN(x_train=x_train,
                   y_train=y_train,
                   x_test=x_test,
                   y_test=y_test,
                   hidden_layer=[250],
                   learning_rate_rbm=0.0005,
                   batch_size_rbm=150,
                   n_epochs_rbm=200,
                   verbose_rbm=1,
                   random_seed_rbm=500,
                   activation_function_nn='tanh',
                   learning_rate_nn=0.005,
                   batch_size_nn=150,
                   n_epochs_nn=1500,
                   verbose_nn=1,
                   decay_rate=0)
    dbn1.pretraining()
    dbn1.finetuning()
    ca_pred = dbn1.result[:, 0]
    print('Lowpass coefficient estimation finish.')
    mu, sigma_2, cd_pred = generateData(cd[0:len(cd) - int(testnum / 2)],
                                        outputnum=int(testnum / 2))
    print('Highpass coefficient estimation finish.')
    dataset_pred = dwt.idwt(ca_pred, cd_pred)
    print('IDWT finish.')
    dataset_test = dataset[len(dataset) - testnum:len(dataset)]
    ca_test, cd_test = dwt.dwt(dataset_test)
    plt.figure(figsize=(12, 9), dpi=100)
    plt.subplot(3, 1, 1)
    plt.plot(ca_test)
    plt.plot(ca_pred)
    plt.legend(['lowpass_real', 'lowpass_prediction'], loc='upper right')
    plt.title('lowpass coefficient prediction result', fontsize=16)
    plt.subplot(3, 1, 2)
    plt.plot(cd_test)
    plt.plot(cd_pred)
    plt.legend(['highpass_real', 'highpass_prediction'], loc='upper right')
    plt.title('highpass coefficient prediction result', fontsize=16)
    plt.subplot(3, 1, 3)
    mse = mean_squared_error(dataset_pred, dataset_test)
    plt.plot(dataset_test)
    plt.plot(dataset_pred)
    plt.legend(['dataset_real', 'dataset_prediction'], loc='upper right')
    plt.title('sequence prediction result', fontsize=16)
    plt.xlabel('MSE = %f' % mse)
    plt.draw()
    #plt.show()
    return dataset_pred, mse
        def svm_create(xs, ys):

            if self.dbn == None:
                layers = [xs.shape[1], 600, 600]
                epochs = [2000, 2000]
                learning_rate = [0.5, 0.5]

                net = dbn.DBN(layers, learning_rate)
                net.train(xs, epochs)

                # Store for other instances
                Codes_ClassifyUsingSVM_withDBN.sharedDbn = net
                self.dbn = net

            if self.kernel == 'linear':
                svm_cls = svm.LinearSVC(C=self.c, dual=True)
            else:
                svm_cls = svm.SVC(kernel=self.kernel, C=self.c)

            data = self.dbn.get_activations(xs, 3)
            svm_cls.fit(data, ys)
            return svm_cls
Exemplo n.º 6
0
def recursive(dataset, testnum, featurenum):
    dataset = dataset[np.newaxis, :]
    x_train, x_test, y_train, y_test = generate_data(dataset, testnum,
                                                     featurenum)
    x_test_one = x_test[::2, :]
    y_test_one = y_test[::2]
    y_test_two = y_test[1::2]
    min_max_scaler = MinMaxScaler()
    x_train = min_max_scaler.fit_transform(x_train)
    x_test_one = min_max_scaler.transform(x_test_one)
    dataset_pred = []
    dbn1 = dbn.DBN(x_train=x_train,
                   y_train=y_train,
                   x_test=x_test_one,
                   y_test=y_test_one,
                   hidden_layer=[250],
                   learning_rate_rbm=0.0005,
                   batch_size_rbm=150,
                   n_epochs_rbm=200,
                   verbose_rbm=1,
                   random_seed_rbm=500,
                   activation_function_nn='tanh',
                   learning_rate_nn=0.005,
                   batch_size_nn=200,
                   n_epochs_nn=300,
                   verbose_nn=1,
                   decay_rate=0)
    dbn1.pretraining()
    dbn1.finetuning()
    dataset_pred_one = dbn1.result[:, 0]
    x_test_two = np.delete(x_test[::2, :], 0, axis=1)
    x_test_two = np.hstack(
        (x_test_two, dbn1.result.reshape(len(dbn1.result), 1)))
    x_test_two = min_max_scaler.transform(x_test_two)
    dataset_pred_two = dbn1.predict(x_test_two)
    for i in range(len(dataset_pred_one)):
        dataset_pred.append(dataset_pred_one[i])
        dataset_pred.append(dataset_pred_two[i])
    return dataset_pred
Exemplo n.º 7
0
def predict_without_dwt(dataset, testnum, featurenum):
    dataset = dataset[np.newaxis, :]
    x_train, x_test, y_train, y_test = generate_data(dataset, testnum,
                                                     featurenum)
    min_max_scaler = MinMaxScaler()
    x_train = min_max_scaler.fit_transform(x_train)
    x_test = min_max_scaler.transform(x_test)
    dbn1 = dbn.DBN(x_train=x_train,
                   y_train=y_train,
                   x_test=x_test,
                   y_test=y_test,
                   hidden_layer=[250],
                   learning_rate_rbm=0.0005,
                   batch_size_rbm=150,
                   n_epochs_rbm=200,
                   verbose_rbm=1,
                   random_seed_rbm=500,
                   activation_function_nn='tanh',
                   learning_rate_nn=0.005,
                   batch_size_nn=150,
                   n_epochs_nn=1500,
                   verbose_nn=1,
                   decay_rate=0)
    dbn1.pretraining()
    dbn1.finetuning()
    dataset_pred = dbn1.result[:, 0]
    dataset_test = dataset[0, dataset.shape[1] - testnum:dataset.shape[1]]
    mse = mean_squared_error(dataset_pred, dataset_test)
    plt.figure(figsize=(12, 9), dpi=100)
    plt.plot(dataset_test)
    plt.plot(dataset_pred)
    plt.legend(['dataset_real', 'dataset_prediction'], loc='upper right')
    plt.title('sequence prediction result', fontsize=16)
    plt.xlabel('MSE = %f' % mse)
    plt.draw()
    #plt.show()
    return dataset_pred, mse