Ejemplo n.º 1
0
    # Fit the model
    hist = model.fit(predictors, target, validation_split=0.3)


def early_stopping(predictors, target):
    # Save the number of columns in predictors: n_cols
    n_cols = predictors.shape[1]
    input_shape = (n_cols,)

    # Specify the model
    model = Sequential()
    model.add(Dense(100, activation='relu', input_shape=input_shape))
    model.add(Dense(100, activation='relu'))
    model.add(Dense(2, activation='softmax'))

    # Compile the model
    model.compile(optimizer='adam',
                  loss='categorical_crossentropy', metrics=['accuracy'])

    # Define early_stopping_monitor
    early_stopping_monitor = EarlyStopping(patience=2)

    # Fit the model
    model.fit(predictors, target, validation_split=0.3, epochs=30,
              callbacks=[early_stopping_monitor])


predictors, target = load_titanic_data()
#validation_dataset(predictors, target)
early_stopping(predictors, target)
from sklearn.neural_network import MLPClassifier
from sklearn.cluster import KMeans
from sklearn.mixture import GaussianMixture
from sklearn.model_selection import cross_val_score
from data import load_tennis_data, load_titanic_data
import matplotlib.pyplot as plt
import numpy.random as random
import numpy as np
from sklearn.metrics import silhouette_samples, silhouette_score
from scipy.stats import kurtosis

NEGINF = -float("inf")

# IMPORT DATA

X_train, y_train, X_test, y_test = load_titanic_data()
tit_X_train, tit_y_train, tit_X_test, tit_y_test = load_titanic_data()

tit_df = load_titanic_data(form="original df")
tit_features, tit_labels = load_titanic_data(form="df")

tit_cols = list(tit_features.columns)
print(tit_cols)

# RCA

wss_km = []
log_like_em = []
sil_km = []
sil_em = []
rca_models = []
Ejemplo n.º 3
0
from sklearn.cluster import KMeans
from sklearn.mixture import GaussianMixture
from sklearn.model_selection import cross_val_score
from data import load_tennis_data, load_titanic_data
import matplotlib.pyplot as plt
import numpy.random as random
import numpy as np
from sklearn.metrics import silhouette_samples, silhouette_score
from scipy.stats import kurtosis

NEGINF = -float("inf")

# IMPORT DATA

ten_X_train, ten_y_train, ten_X_test, ten_y_test = load_tennis_data()
tit_X_train, tit_y_train, tit_X_test, tit_y_test = load_titanic_data()

ten_features, ten_labels = load_tennis_data(form="df")
tit_features, tit_labels = load_titanic_data(form="df")

tit_cols = list(tit_features.columns)
ten_cols = list(ten_features.columns)

# NO DIM REDUC CLUSTERING

tit_sil_em = []
ten_sil_em = []

ks = range(2, 20)
for k in ks:
    em = GaussianMixture(n_components=k)