user_embedding = Embedding(n_users,
                           n_latent_factors,
                           embeddings_regularizer=regularizers.l2(0.00001),
                           name='user_embedding')(user_input)
"""- 벡터화(Flatten)"""

# Item latent vector
movie_vec = Flatten()(movie_embedding)
# User latent vector
user_vec = Flatten()(user_embedding)
"""- 모델링(Modeling)"""

r_hat = dot([movie_vec, user_vec], axes=-1)
model = Model([user_input, movie_input], r_hat)
model.compile(optimizer='adam',
              loss='mean_squared_error',
              metrics=['accuracy'])
"""- 모델 훈련(Train Model)"""

# Commented out IPython magic to ensure Python compatibility.
hist = model.fit([train_df.userId, train_df.movieId],
                 train_df.rating,
                 validation_split=0.1,
                 batch_size=128,
                 epochs=50,
                 verbose=1)
print(hist.history.keys())
print('train loss: ', hist.history['loss'][-1])
print('train acc: ', hist.history['accuracy'][-1])
print('val acc: ', hist.history['val_loss'][-1])
print('val acc: ', hist.history['val_accuracy'][-1])
Exemple #2
0
from sklearn.linear_model import SGDClassifier
clf = SGDClassifier(loss="hinge", penalty="l2", max_iter=50)
clf.fit(X_train, y_train)
clf.score(X_test, y_test)

# In[ ]:

from keras.models import Sequential
from keras import layers
input_dim = X_train.shape[1]  # Number of features

model = Sequential()
model.add(layers.Dense(10, input_dim=input_dim, activation='relu'))
model.add(layers.Dense(1, activation='sigmoid'))
model.compile(loss='binary_crossentropy',
              optimizer='adam',
              metrics=['accuracy'])
model.summary()
history = model.fit(X_train,
                    y_train,
                    epochs=100,
                    verbose=False,
                    validation_data=(X_test, y_test),
                    batch_size=10)
loss, accuracy = model.evaluate(X_train, y_train, verbose=False)
print("Training Accuracy: {:.4f}".format(accuracy))
loss, accuracy = model.evaluate(X_test, y_test, verbose=False)
print("Testing Accuracy:  {:.4f}".format(accuracy))

# In[ ]:
model = tf.keras.models.Sequential([
  tf.keras.layers.Input(shape=(5)),
  tf.keras.layers.Dense(12800, activation="relu"),
  tf.keras.layers.Dropout(0.2),
  tf.keras.layers.Dense(1600, activation="relu"),
  tf.keras.layers.Dropout(0.2),
  tf.keras.layers.Dense(80, activation="relu"),
  tf.keras.layers.Dense(80, activation="relu"),
  tf.keras.layers.Dense(80, activation="relu"),
  tf.keras.layers.Dense(80, activation="relu"),
  tf.keras.layers.Dense(80, activation="relu"),
  tf.keras.layers.Dense(80, activation="relu"),
  tf.keras.layers.Dense(80, activation="relu"),
  tf.keras.layers.Dense(4)
])

model.compile(optimizer="adam",
              loss="mse",
              metrics=['mae', 'mse'])

model.fit(x_train, y_train, epochs=50)


mse, mae, mse = model.evaluate(x_test, y_test, verbose=2)



print('\nTest mean absolute error:', mae)

Exemple #4
0
model.add(BatchNormalization(axis=-1))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2,2)))
#
model.add(Flatten())
#
## Fully connected layer
model.add(Dense(512))
model.add(BatchNormalization())
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(6))

model.add(Activation('softmax'))

model.compile(loss='categorical_crossentropy', optimizer=Adam(), metrics=['accuracy'])

model.fit(X_train,label_train, steps_per_epoch=1,
          validation_data=(x_val,y_val), validation_steps=1, epochs=5, verbose=2)


model.predict_generator(test_batch,verbose=2)

train_batch.filenames