Пример #1
0
from keras.layers import Dense, Dropout
from keras.layers import Embedding
from keras.layers import LSTM
from keras.models import Sequential
from keras.preprocessing import sequence

# TODO

dataset = Data(path="data/",
               stem=False,
               simply=True,
               stop_word=False,
               delete_class=['0', '000'],
               codif='bagofwords',
               max_features=None)
X_train, y_train, X_test, y_test = dataset.train_test_split(0.8)

x, y = dataset.get_non_coded()

# create the model
embedding_vecor_length = 2
model = Sequential()
model.add(
    Embedding(n_features,
              embedding_vecor_length,
              input_length=max_review_length))
model.add(LSTM(2))
model.add(Dense(1, activation='sigmoid'))
model.compile(loss='binary_crossentropy',
              optimizer='adam',
              metrics=['accuracy'])
Пример #2
0
"""

from toy_simpleNN import ToySimpleNN
from preprocess import Data

hyperparam = {
    'activation': 'tanh',
    'regularization': 'l2',
    'batch_size': 20,
    'learning_rate': 1e-4,
    'valid_rate': 0.1,
    'optimizer': 'SGD',
    'train_step': 1000
}

data_set = Data()
data_set.load_OUTCAR("C:/Users/Seungwoo Hwang/Desktop/toy-simpleNN/OUTCAR")
#data_set = Data("OUTCAR")

x_train, y_train, x_test, y_test = data_set.train_test_split()

model = ToySimpleNN(act=hyperparam['activation'],
                    train_step=hyperparam['train_step'])
model.set_optimizer(optimizer=hyperparam['optimizer'])
model.load_data(data_set)

model.train()

model.test()