Exemplo n.º 1
0
from keras.layers import Dense

# Khoi tao mang luoi

classifier = Sequential()
classifier.add(
    Dense(units=15,
          kernel_initializer='uniform',
          activation='relu',
          input_dim=29))
classifier.add(Dense(units=15, kernel_initializer='uniform',
                     activation='relu'))
classifier.add(
    Dense(units=1, kernel_initializer='uniform', activation='sigmoid'))
classifier.compile(optimizer='adam',
                   loss='binary_crossentropy',
                   metrics=['accuracy'])

# Phu hop mang luoi vao tap train

classifier.fit(X_train, y_train, batch_size=32, epochs=100)

# Du doan ket qua tap thu

y_pred = classifier.predict(X_test)
y_pred = (y_pred > 0.5)

score = classifier.evaluate(X_test, y_test)
score

# Tao ma tran hon loan
dataset = loadtxt('labeled.csv', delimiter=',')

# split into input (X) and output (y) variables
X = dataset[:, 0:16]
y = dataset[:, 16]

# Define the keras model
# Default activator: linear
model = Sequential()
model.add(Dense(8, input_dim=16))
model.add(Dense(4))
model.add(Dense(1))

# compile the keras model
model.compile(loss='binary_crossentropy',
              optimizer='adam',
              metrics=['accuracy'])

# fit the keras model on the dataset
# model.fit(X, y, epochs=100, batch_size=25)
model.fit(X, y, epochs=100, batch_size=32)

# evaluate the keras model
_, accuracy = model.evaluate(X, y)
print('Accuracy: %.2f' % (accuracy * 100), '%')

# Single Hidden Layer ANN with Holdout

from sklearn.model_selection import train_test_split
from sklearn.metrics import *
from sklearn.neural_network import MLPClassifier
Exemplo n.º 3
0
from sklearn.preprocessing import MinMaxScaler
scaler=MinMaxScaler()
scaler.fit(X_train)
X_train=scaler.transform(X_train)
X_test=scaler.transform(X_test)

from keras.models import Sequential
from keras.layers import Dense, Dropout
from keras.optimizers import Adam

model = Sequential([Dense(128, activation='relu'),
                    Dropout(0.5),
                    Dense(128, activation='relu'),
                    Dropout(0.5),
                    Dense(11, activation='softmax')])

model.compile(loss='sparse_categorical_crossentropy',
              optimizer=Adam(),
              metrics=['mean_squared_error','accuracy'])
history = model.fit(
    X_train, 
    Y_train, 
    batch_size=32, 
    epochs=30, 
    validation_split = 0.2,
    verbose=1)

model.predict(testDF)