Ejemplo n.º 1
0
def mc_mlp_val(x_train,
               y_train,
               size_input,
               hidden_layers,
               nodes_hidden_layers,
               size_output=2,
               runs=1000,
               n_samples=5):
    max_acc = 0.0
    max_conf = []

    es = EarlyStopping(monitor='val_loss',
                       mode='min',
                       verbose=0,
                       patience=0,
                       restore_best_weights=True)

    mlp_cache = {}
    for _ in tqdm(range(runs)):
        # generate random mlp
        conf = gen_random_mlp(size_input, hidden_layers, nodes_hidden_layers,
                              size_output)

        if conf not in mlp_cache:
            acc = 0.0
            for _ in range(n_samples):
                model = MLP(conf, use_bias_input=False)

                model.compile(optimizer='adam',
                              loss='sparse_categorical_crossentropy',
                              metrics=['accuracy'])

                history = model.fit(x_train,
                                    y_train,
                                    epochs=50,
                                    batch_size=300,
                                    validation_split=0.2,
                                    callbacks=[es],
                                    verbose=0)
                acc += history.history['val_accuracy'][-1]
            acc /= n_samples

            mlp_cache[conf] = acc

            if acc > max_acc:
                max_acc = acc
                max_conf = conf

    return max_conf, max_acc