Ejemplo n.º 1
0
            .format(*params))
        if params in final_train_perfs and params in final_val_perfs:
            print("Skipping: already have results")
            continue
        save_path = os.path.join(
            base_save_dir,
            "lr{};epochs{};dropout{};dense_layers{};dense_layer_units{};batch_size{}"
            .format(*params))
        if not os.path.exists(save_path):
            os.makedirs(save_path)

        av_train_perf = {"acc": 0, "prec": 0, "rec": 0, "f1": 0}
        av_val_perf = {"acc": 0, "prec": 0, "rec": 0, "f1": 0}
        for i in range(args.ensemble_size):
            print("Building model")
            model = ShallowNet(Xs["train"].shape[1], dropout, dense_layers,
                               dense_layer_units, args.weights)
            model.compile(optimizer=Adam(lr=lr), loss="binary_crossentropy")
            print("Model built")

            history = model.fit(
                X=Xs["train"],
                y=ys["train"],
                batch_size=batch_size,
                nb_epoch=epochs,
                verbose=1,
                validation_data=(Xs["val"], ys["val"]),
                shuffle=True,
                show_accuracy=True,
                callbacks=[
                    LearningRateScheduler(lambda e: lr_schedule(epochs, lr, e))
                ])
    base_save_dir = os.path.join(args.save_path, date)
    os.makedirs(base_save_dir)

    final_train_perfs = {}
    final_val_perfs = {}
    for lr, epochs, dropout, dense_layers, dense_layer_units, batch_size in itertools.product(args.lr, args.epochs, args.dropout, args.dense_layers, args.dense_layer_units, args.batch_size):
        params = lr, epochs, dropout, dense_layers, dense_layer_units, batch_size
        print("LR: {}, EPOCHS: {}, DROPOUT: {}, DENSE LAYERS: {}, DENSE_LAYER_UNITS: {}, BATCH_SIZE: {}".format(*params))
        save_path = os.path.join(base_save_dir, "lr{};epochs{};dropout{};dense_layers{};dense_layer_units{};batch_size{}".format(*params))
        os.makedirs(save_path)

        train_preds = np.zeros((Xs["train"].shape[0], args.ensemble_size))
        val_preds = np.zeros((Xs["val"].shape[0], args.ensemble_size))
    
        print("Building model")
        model = ShallowNet(Xs["train"].shape[1], dropout, dense_layers, dense_layer_units, args.weights)
        model.compile(optimizer=Adam(lr=lr), loss="binary_crossentropy")
        print("Model built")

        history = model.fit(
            X=Xs["train"],
            y=ys["train"],
            batch_size=batch_size,
            nb_epoch=epochs,
            verbose=1,
            validation_data=(Xs["val"], ys["val"]),
            shuffle=True,
            show_accuracy=True,
        )

        model.layers.pop()
            args.dense_layer_units, args.batch_size):
        params = lr, epochs, dropout, dense_layers, dense_layer_units, batch_size
        print(
            "LR: {}, EPOCHS: {}, DROPOUT: {}, DENSE LAYERS: {}, DENSE_LAYER_UNITS: {}, BATCH_SIZE: {}"
            .format(*params))
        save_path = os.path.join(
            base_save_dir,
            "lr{};epochs{};dropout{};dense_layers{};dense_layer_units{};batch_size{}"
            .format(*params))
        os.makedirs(save_path)

        train_preds = np.zeros((Xs["train"].shape[0], args.ensemble_size))
        val_preds = np.zeros((Xs["val"].shape[0], args.ensemble_size))

        print("Building model")
        model = ShallowNet(Xs["train"].shape[1], dropout, dense_layers,
                           dense_layer_units, args.weights)
        model.compile(optimizer=Adam(lr=lr), loss="binary_crossentropy")
        print("Model built")

        history = model.fit(
            X=Xs["train"],
            y=ys["train"],
            batch_size=batch_size,
            nb_epoch=epochs,
            verbose=1,
            validation_data=(Xs["val"], ys["val"]),
            shuffle=True,
            show_accuracy=True,
        )

        model.layers.pop()
Ejemplo n.º 4
0
    turn = 0
    for lr, epochs, dropout, dense_layers, dense_layer_units, batch_size in itertools.product(args.lr, args.epochs, args.dropout, args.dense_layers, args.dense_layer_units, args.batch_size):
        params = lr, epochs, dropout, dense_layers, dense_layer_units, batch_size
        print("LR: {}, EPOCHS: {}, DROPOUT: {}, DENSE LAYERS: {}, DENSE_LAYER_UNITS: {}, BATCH_SIZE: {}".format(*params))
        if params in final_train_perfs and params in final_val_perfs:
            print("Skipping: already have results")
            continue
        save_path = os.path.join(base_save_dir, "lr{};epochs{};dropout{};dense_layers{};dense_layer_units{};batch_size{}".format(*params))
        if not os.path.exists(save_path):
            os.makedirs(save_path)

        av_train_perf = {"acc": 0, "prec": 0, "rec": 0, "f1": 0}
        av_val_perf = {"acc": 0, "prec": 0, "rec": 0, "f1": 0}
        for i in range(args.ensemble_size):
            print("Building model")
            model = ShallowNet(Xs["train"].shape[1], dropout, dense_layers, dense_layer_units, args.weights)
            model.compile(optimizer=Adam(lr=lr), loss="binary_crossentropy")
            print("Model built")

            history = model.fit(
                X=Xs["train"],
                y=ys["train"],
                batch_size=batch_size,
                nb_epoch=epochs,
                verbose=1,
                validation_data=(Xs["val"], ys["val"]),
                shuffle=True,
                show_accuracy=True,
                callbacks=[LearningRateScheduler(lambda e: lr_schedule(epochs, lr, e))]
            )