Ejemplo n.º 1
0
            .format(*params))
        if params in final_train_perfs and params in final_val_perfs:
            print("Skipping: already have results")
            continue
        save_path = os.path.join(
            base_save_dir,
            "lr{};epochs{};dropout{};dense_layers{};dense_layer_units{};batch_size{}"
            .format(*params))
        if not os.path.exists(save_path):
            os.makedirs(save_path)

        av_train_perf = {"acc": 0, "prec": 0, "rec": 0, "f1": 0}
        av_val_perf = {"acc": 0, "prec": 0, "rec": 0, "f1": 0}
        for i in range(args.ensemble_size):
            print("Building model")
            model = ShallowNet(Xs["train"].shape[1], dropout, dense_layers,
                               dense_layer_units, args.weights)
            model.compile(optimizer=Adam(lr=lr), loss="binary_crossentropy")
            print("Model built")

            history = model.fit(
                X=Xs["train"],
                y=ys["train"],
                batch_size=batch_size,
                nb_epoch=epochs,
                verbose=1,
                validation_data=(Xs["val"], ys["val"]),
                shuffle=True,
                show_accuracy=True,
                callbacks=[
                    LearningRateScheduler(lambda e: lr_schedule(epochs, lr, e))
                ])