Ejemplo n.º 1
0
 def deep_belief_network_prediction(
     self,
     learning_rate,
     training_iterations,
     testing_iterations=10,
     hidden_layer_sizes_array=[10, 10],
 ):
     accuracy_list = []
     for x in range(testing_iterations):
         self.prepare_training_data_from_csv_data(self.csv_data)
         classifier = SupervisedDBNClassification(
             hidden_layers_structure=hidden_layer_sizes_array,
             learning_rate_rbm=learning_rate / 2,
             learning_rate=learning_rate,
             n_epochs_rbm=int(training_iterations / 10),
             n_iter_backprop=training_iterations,
             batch_size=256,
             activation_function="relu",
             dropout_p=0.2,
         )
         classifier.fit(self.x_data_training, self.y_data_training)
         y_data_prediction = classifier.predict(self.x_data_testing)
         classifier_accuracy = accuracy_score(self.y_data_testing, y_data_prediction)
         accuracy_list.append(classifier_accuracy)
     return max(accuracy_list)
Ejemplo n.º 2
0
def foo():  # всё ок

    import os

    dir_name = 'experiment2'

    work_path = os.getcwd()  # current working dir
    path = os.path.join(work_path, dir_name)
    print("The current working directory is %s" % work_path)

    if (os.path.exists(path) == False):
        os.mkdir(path)
    else:
        print('Directory already exist')

    savedir = path
    import os
    filename = os.path.join(savedir, 'model.joblib')

    from sklearn.datasets import load_breast_cancer
    X, Y = load_breast_cancer(return_X_y=True)

    #from sklearn.gaussian_process import GaussianProcessClassifier
    #to_persist=GaussianProcessClassifier()

    #from lightning.classification import AdaGradClassifier
    #to_persist=AdaGradClassifier()

    from dbn import SupervisedDBNClassification
    to_persist = SupervisedDBNClassification()

    to_persist.fit(X[:400], Y[:400])

    print(filename)

    import joblib
    joblib.dump(to_persist, filename)

    # load from file
    import joblib
    clf = joblib.load(filename)

    print(clf.score(X[400:], Y[400:]))
Ejemplo n.º 3
0
def create_model():
    classifier = SupervisedDBNClassification(
        hidden_layers_structure=[256, 256],
        learning_rate_rbm=0.05,
        learning_rate=0.1,
        n_epochs_rbm=10,
        n_iter_backprop=100,
        batch_size=32,
        activation_function='relu',
        dropout_p=0.2,
        verbose=False)
    return classifier
Ejemplo n.º 4
0
 def deep_belief_network_prediction(
     self,
     learning_rate,
     training_iterations,
     testing_iterations=10,
     hidden_layer_sizes_array=[10, 10],
 ):
     self.prepare_training_data_from_csv_data(self.csv_data)
     classifier = SupervisedDBNClassification(
         hidden_layers_structure=hidden_layer_sizes_array,
         learning_rate_rbm=learning_rate / 2,
         learning_rate=learning_rate,
         n_epochs_rbm=int(training_iterations / 10),
         n_iter_backprop=training_iterations,
         batch_size=256,
         activation_function="relu",
         dropout_p=0.2,
     )
     classifier
Ejemplo n.º 5
0
import pandas as pd
from sklearn.preprocessing import MinMaxScaler
from dbn import SupervisedDBNClassification, SupervisedDBNRegression
from sklearn.metrics import confusion_matrix
from sklearn.model_selection import train_test_split
import numpy as np
import tensorflow as tf
from PIL import Image

path_url = sys.argv[1]
path1 = ('ImagesPredict//' + path_url)
image = Image.open(path1).convert('L').resize((100, 100))
img = np.asarray(image)
img = img.astype('float32')
img /= 255.0
path = 'dbntrained_255_24_hum_94.pkl'
classifier = SupervisedDBNClassification.load(path)
y = list()
y.append(img)
y = np.array(y)
y = y.reshape(-1, 100 * 100)
Y_pred = classifier.predict(y)
# print(Y_pred[0]) #0-> normal 1-> pneumonie
if (Y_pred[0] == 0):
    print("Absence de signe de pneumonie.")
elif (Y_pred[0] == 1):
    print("Attention presence d'une pneumonie mogene !")
else:
    print("veillez inserer une image correct !")

import numpy as np
import winsound
import csv
np.random.seed(1337)  # for reproducibility
from sklearn.metrics.classification import accuracy_score
from dbn import SupervisedDBNClassification

classifier = SupervisedDBNClassification(
    hidden_layers_structure=[500, 500, 2000],
    learning_rate_rbm=0.1,
    learning_rate=0.1,
    n_epochs_rbm=20,
    n_iter_backprop=200,
    batch_size=32,
    activation_function='relu',
    dropout_p=0.2)


def load_dataset(filename):
    print('Loading data from ' + filename + ' file...')
    import scipy.io
    mat = scipy.io.loadmat(filename)
    classnames = [item for sublist in mat['classnames'] for item in sublist]
    train_data = mat['train_data']
    test_data = mat['test_data']
    train_labels = [
        item for sublist in mat['train_labels'] for item in sublist
    ]
    test_labels = [item for sublist in mat['test_labels'] for item in sublist]
    return classnames, train_data, train_labels, test_data, test_labels
Ejemplo n.º 7
0
# Data scaling
X = (X / 16).astype(np.float32)

# Splitting data
X_train, X_test, Y_train, Y_test = train_test_split(X,
                                                    Y,
                                                    test_size=0.2,
                                                    random_state=0)

# Training
classifier = SupervisedDBNClassification(
    hidden_layers_structure=[1000, 1000, 1000],
    learning_rate_rbm=0.05,
    learning_rate=0.1,
    n_epochs_rbm=15,
    n_iter_backprop=50,
    batch_size=32,
    activation_function='relu',
    dropout_p=0.2)
classifier.fit(X_train, Y_train)

# Save the model
classifier.save('model.pkl')

# Restore it
classifier = SupervisedDBNClassification.load('model.pkl')

# Test
Y_pred = classifier.predict(X_test)
print 'Done.\nAccuracy: %f' % accuracy_score(Y_test, Y_pred)
Ejemplo n.º 8
0
from sklearn.metrics.classification import accuracy_score

from dbn import SupervisedDBNClassification


# Loading dataset
digits = load_digits()
X, Y = digits.data, digits.target

# Data scaling
X = (X / 16).astype(np.float32)

# Splitting data
X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.2, random_state=0)

# Training
classifier = SupervisedDBNClassification(hidden_layers_structure=[256, 256],
                                         learning_rate_rbm=0.1,
                                         learning_rate=0.1,
                                         n_epochs_rbm=10,
                                         n_iter_backprop=100,
                                         l2_regularization=0.0,
                                         batch_size=32,
                                         activation_function='relu',
                                         dropout_p=0.2)
classifier.fit(X_train, Y_train)

# Test
Y_pred = classifier.predict(X_test)
print 'Done.\nAccuracy: %f' % accuracy_score(Y_test, Y_pred)
#     for row in reader:
#         a.append(row)
#         rownum += 1

#     ifile.close()
#     return a

# encoded_out = readcsv('encoded_output.csv')
'''
Step 4: Deep Belief Network
'''
classifier = SupervisedDBNClassification(hidden_layers_structure=[20, 10],
                                         learning_rate_rbm=0.05,
                                         learning_rate=0.1,
                                         n_epochs_rbm=10,
                                         n_iter_backprop=100,
                                         batch_size=32,
                                         activation_function='relu',
                                         dropout_p=0.2)

# from dbn.models import UnsupervisedDBN

# classifier = SupervisedDBN(hidden_layers_structure=[32, 16, 32, 64],
#                         batch_size=10,
#                         learning_rate_rbm=0.06,
#                         n_epochs_rbm=2,
#                         activation_function='sigmoid')

X_train, X_test, Y_train, Y_test = train_test_split(encoded_out,
                                                    Y,
                                                    train_size=0.7,
np.random.seed(1000)

if __name__ == '__main__':
    # Load and normalize the dataset
    wine = load_wine()

    ss = StandardScaler()
    X = ss.fit_transform(wine['data'])
    Y = wine['target']

    # Create train and test sets
    X_train, X_test, Y_train, Y_test = \
        train_test_split(X, Y,
                         test_size=0.25,
                         random_state=1000)

    # Train the model
    classifier = SupervisedDBNClassification(hidden_layers_structure=[16, 8],
                                             learning_rate_rbm=0.001,
                                             learning_rate=0.01,
                                             n_epochs_rbm=20,
                                             n_iter_backprop=100,
                                             batch_size=16,
                                             activation_function='relu',
                                             dropout_p=0.1)

    classifier.fit(X_train, Y_train)

    Y_pred = classifier.predict(X_test)
    print(classification_report(Y_test, Y_pred))
Ejemplo n.º 11
0
X, Y = digits.data, digits.target

# Data scaling
X = (X / 16).astype(np.float32)

# Splitting data
X_train, X_test, Y_train, Y_test = train_test_split(X,
                                                    Y,
                                                    test_size=0.2,
                                                    random_state=0)

# Training
classifier = SupervisedDBNClassification(hidden_layers_structure=[256, 256],
                                         learning_rate_rbm=0.05,
                                         learning_rate=0.1,
                                         n_epochs_rbm=10,
                                         n_iter_backprop=100,
                                         batch_size=32,
                                         activation_function='relu',
                                         dropout_p=0.2)
classifier.fit(X_train, Y_train)

# Test
Y_pred = classifier.predict(X_test)
print('Done.\nAccuracy: %f' % accuracy_score(Y_test, Y_pred))

# In[22]:

from sklearn.metrics import confusion_matrix
import numpy as np
import matplotlib.pyplot as plt
from sklearn import manifold, datasets
Ejemplo n.º 12
0
#X_scaled_train = preprocessing.scale(X_train)
min_max_scaler = preprocessing.MinMaxScaler()
X_scaled_train = min_max_scaler.fit_transform(X_train)
y_train = train_set[1:, -1]
X_test = test_set[:5000, 1:-1]
#X_scaled_test = preprocessing.scale(X_test)
min_max_scaler = preprocessing.MinMaxScaler()
X_scaled_test = min_max_scaler.fit_transform(X_test)
y_test = test_set[:5000, -1]

# Training
clf = SupervisedDBNClassification(
    hidden_layers_structure=[1024, 512],
    learning_rate_rbm=0.05,
    learning_rate=0.1,
    n_epochs_rbm=3,
    n_iter_backprop=10,
    batch_size=128,
    activation_function='sigmoid',  # relu->error
    dropout_p=0.2)
clf.fit(X_train, y_train)

# Save the model
clf.save('model.pkl')

# Restore it
classifier = SupervisedDBNClassification.load('model.pkl')

# Test
y_pred = classifier.predict(X_test)
print('Done.\nAccuracy: %f' % accuracy_score(y_test, y_pred))