Пример #1
0
          input_dim=X_train.shape[1],
          activation='relu'))
##Hidden Layer:
model.add(Dense(256, kernel_initializer='normal', activation='relu'))
model.add(Dense(256, kernel_initializer='normal', activation='relu'))
model.add(Dense(256, kernel_initializer='normal', activation='relu'))

#Output layer
model.add(Dense(1, kernel_initializer='normal', activation='linear'))

#Compile the model
model.compile(loss='mean_absolute_error',
              optimizer='adam',
              metrics=['mean_absolute_error'])

print(model.summary())

##Now train the model using fit method
result = model.fit(X_train,
                   y_train,
                   validation_split=0.3,
                   batch_size=10,
                   epochs=100)

#model evaluation
prediction = model.predict(X_test)

sns.distplot(y_test.values.reshape(-1, 1) - prediction)
plt.show()

plt.scatter(y_test, prediction)
model = Sequential()
# The Input Layer :
model.add(Dense(128, kernel_initializer = 'normal', input_dim = X_train.shape[1], activation = 'relu'))

# The Hidden Layers :
model.add(Dense(256, kernel_initializer = 'normal', activation = 'relu'))
model.add(Dense(256, kernel_initializer = 'normal', activation = 'relu'))
model.add(Dense(256, kernel_initializer = 'normal', activation = 'relu'))

# The Output Layer :
model.add(Dense(1, kernel_initializer = 'normal', activation = 'linear'))

# Compile the network :
model.compile(loss = 'mean_absolute_error', optimizer = 'adam', metrics = ['mean_absolute_error'])
model.summary()

# Fitting the ANN to the Training set
model_history = model.fit(X_train, y_train, validation_split = 0.33, batch_size = 10, nb_epoch = 100)

#### Model Evaluation
prediction = model.predict(X_test)

sns.distplot(y_test.values.reshape(-1,1)-prediction)
plt.show()

plt.scatter(y_test,prediction)
plt.show()

# # Regression Evaluation Metrics
print('MAE:', metrics.mean_absolute_error(y_test, prediction))