Пример #1
0
# Model definition
model = model_mlp(input_shape=(sz * d + 2, ),
                  hidden_layers=n_units_hidden_layers,
                  n_classes=n_classes,
                  activation="relu")
model.compile(loss="categorical_crossentropy",
              optimizer="adam",
              metrics=["accuracy"])

# Just check that weights are shared, not repeated as many times as the number of features in the sets
print("Weights:", [w.shape for w in model.get_weights()])
print("Total number of parameters:", model.count_params())

# Fit
basename = "output/models_baseline/mlp."
for n_units in n_units_hidden_layers:
    basename += "%d-" % n_units
basename = basename[:-1]
fname_weights = model_fit_and_save(model,
                                   basename,
                                   X=numpy.hstack((X, X_coord)),
                                   y=y,
                                   patience_early_stopping=100,
                                   save_acc=True,
                                   validation_split=0.1)
model.load_weights(fname_weights)

# Go!
print_eval(model, numpy.hstack((X_test, X_coord_test)), y_test)
n_units_hidden_layers_ensemble = [64, 32]

# Model definition
ensemble_model = model_mlp(input_shape=(ensemble_features.shape[1], ), hidden_layers=n_units_hidden_layers_ensemble,
                           n_classes=n_classes)
ensemble_model.compile(loss="categorical_crossentropy", optimizer="rmsprop", metrics=["accuracy"])

# Just check that weights are shared, not repeated as many times as the number of features in the sets
print("Weights:", [w.shape for w in ensemble_model.get_weights()])
print("Total number of parameters:", ensemble_model.count_params())

# Fit
basename = "output/models_ensemble/mlp_rnn_rff."
for units in n_units_hidden_layers_ensemble:
    basename += "%d-" % units
basename = basename[:-1] + "."
short_rnn = fname_model_rnn.split("/")[-1]
short_rnn = short_rnn[:short_rnn.rfind(".weights")]
short_mlp = fname_model_mlp.split("/")[-1]
short_mlp = short_mlp[:short_mlp.rfind(".weights")]
short_rff = fname_model_rff.split("/")[-1]
short_rff = short_rff[:short_rff.rfind(".weights")]
basename += short_rnn + "." + short_mlp + "." + short_rff

fname_weights = model_fit_and_save(ensemble_model, basename, X=ensemble_features, y=y, patience_early_stopping=100,
                                   save_acc=True, validation_split=0.1)
ensemble_model.load_weights(fname_weights)

# Go!
print_eval(ensemble_model, ensemble_features_test, y_test)
Пример #3
0
n_classes = 9

convsz = 8
n_filters_conv = 10
rnn_dim = 512

# Load training data
X, X_coord, y = load_tiselac(training_set=True, shuffle=True, random_state=0)

# Model definition
input = Input(shape=(sz, d))
conv_layer = Conv1D(filters=n_filters_conv, kernel_size=convsz)(input)
rnn_layer = SimpleRNN(units=rnn_dim)(conv_layer)
preds = Dense(units=n_classes, activation="softmax")(rnn_layer)
model = Model(inputs=input, outputs=preds)
model.compile(loss="categorical_crossentropy",
              optimizer="rmsprop",
              metrics=["accuracy"])

# Just check that weights are shared, not repeated as many times as the number of features in the sets
print("Weights:", [w.shape for w in model.get_weights()])
print("Total number of parameters:", model.count_params())

# Fit
basename = "output/models_baseline/crnn.%d-%d-%d" % (convsz, n_filters_conv,
                                                     rnn_dim)
model_fit_and_save(model, basename, X=X, y=y)

# Go!
print_eval(model, X, y)
Пример #4
0
model = model_rnn(input_shape=(sz, d),
                  hidden_layers=n_units_hidden_layers,
                  rnn_layer_dim=dim_rnn,
                  input_shape_side_info=(2, ),
                  n_classes=n_classes,
                  use_lstm=True)
model.compile(loss="categorical_crossentropy",
              optimizer="adam",
              metrics=["accuracy"])

# Just check that weights are shared, not repeated as many times as the number of features in the sets
print("Weights:", [w.shape for w in model.get_weights()])
print("Total number of parameters:", model.count_params())

# Fit
basename = "output/models_baseline/rnn.%d." % dim_rnn
for n_units in n_units_hidden_layers:
    basename += "%d-" % n_units
basename = basename[:-1]
fname_weights = model_fit_and_save(model,
                                   basename,
                                   X=[X, X_coord],
                                   y=y,
                                   patience_early_stopping=100,
                                   save_acc=True,
                                   validation_split=0.1)
model.load_weights(fname_weights)

# Go!
print_eval(model, [X_test, X_coord_test], y_test)
Пример #5
0
sz = 23
n_classes = 9

rff_dim = 256
feature_sizes = [4, 8, 12, 16]

# Load training data
X, X_coord, y = load_tiselac(training_set=True, shuffle=True, random_state=0)
feats_8_12_16 = ecml17_tiselac_data_preparation(X, d=d, feature_sizes=tuple(feature_sizes), use_time=True)

# Prepare model
dict_dims = {(d * f_sz + 1): sz - f_sz + 1 for f_sz in feature_sizes}
model = model_mk_rff(input_dimensions=dict_dims, embedding_dim=rff_dim, n_classes=n_classes, side_info_dim=2)
model.compile(loss="categorical_crossentropy", optimizer="rmsprop", metrics=["accuracy"])

# Just check that weights are shared, not repeated as many times as the number of features in the sets
print("Weights:", [w.shape for w in model.get_weights()])
print("Total number of parameters:", model.count_params())

# Fit
basename = "output/models_rff/"
for sz in feature_sizes:
    basename += "%d-" % sz
basename = basename[:-1] + ".%d" % rff_dim
list_callbacks = model_fit_and_save(model, basename, X=feats_8_12_16 + [X_coord], y=y)

# Go!
print("Best model performance (full training set")
model.load_weights(list_callbacks[0].last_saved_model)
print_eval(model, feats_8_12_16 + [X_coord], y)