Beispiel #1
0
        vertical_flip=args.v_flip)

    val_datagen = ImageDataGenerator(
        preprocessing_function=preprocessing_function)

    train_generator = train_datagen.flow_from_directory(TRAIN_DIR,
                                                        target_size=(HEIGHT,
                                                                     WIDTH),
                                                        batch_size=BATCH_SIZE)

    validation_generator = val_datagen.flow_from_directory(
        VAL_DIR, target_size=(HEIGHT, WIDTH), batch_size=BATCH_SIZE)

    # Save the list of classes for prediction mode later
    class_list = utils.get_subfolders(TRAIN_DIR)
    utils.save_class_list(class_list, model_name=args.model, dataset_name="")

    finetune_model = utils.build_finetune_model(base_model,
                                                dropout=args.dropout,
                                                fc_layers=FC_LAYERS,
                                                num_classes=len(class_list))

    if args.continue_training:
        finetune_model.load_weights("./checkpoints/" + args.model +
                                    "_model_weights.h5")
        print("load success!")

    adam = Adam(lr=0.00001)
    finetune_model.compile(adam,
                           loss='categorical_crossentropy',
                           metrics=['accuracy'])
    )

    val_datagen = ImageDataGenerator(preprocessing_function=preprocessing_function)

    train_generator = train_datagen.flow_from_directory(TRAIN_DIR, target_size=(HEIGHT, WIDTH), batch_size=BATCH_SIZE)

    validation_generator = val_datagen.flow_from_directory(VAL_DIR, target_size=(HEIGHT, WIDTH), batch_size=BATCH_SIZE)
    """

    ### STOP EDITING HERE .

    # Save the list of classes for prediction mode later
    class_list = utils.get_subfolders(TRAIN_DIR)
    utils.save_class_list(
        OUT_DIR,
        class_list,
        model_name=args.model,
        dataset_name=os.path.basename(args.dataset),
    )

    optim = eval(args.optimizer)(lr=args.lr)
    if args.continue_training is not None:
        finetune_model = load_model(args.continue_training)
        if args.transfer_strategy == "finetune":
            utils.set_trainable(finetune_model, True)
    else:
        finetune_model = utils.build_finetune_model(
            base_model,
            dropout=args.dropout,
            fc_layers=FC_LAYERS,
            num_classes=len(class_list),
            as_fixed_feature_extractor=True
Beispiel #3
0
        USE_BOTTLENECKS = False

    # Create directories if needed
    if not os.path.isdir("checkpoints"):
        os.makedirs("checkpoints")
    if not os.path.isdir("bottlenecks"):
        os.makedirs("bottlenecks")
    if not os.path.isdir("models"):
        os.makedirs("models")

    if USE_BOTTLENECKS and not args.skip_bottleneck_check:
        save_bottleneck_features()

    class_list = utils.get_subfolders(TRAIN_DIR)
    utils.save_class_list(class_list,
                          model_name=args.model,
                          dataset_name=DATASET_NAME)
    num_classes = len(class_list)

    if (num_classes > 2):
        class_mode = 'categorical'
    else:
        class_mode = 'binary'

    print("Class Mode -->", class_mode,
          "(" + str(num_classes) + " categories)")

    if USE_BOTTLENECKS:
        train_generator, validation_generator = prepare_bottleneck_data_generators(
            val_split=args.val_split, batch_size=BATCH_SIZE)
    else:
Beispiel #4
0
    val_datagen = ImageDataGenerator(
        preprocessing_function=preprocessing_function)

    train_generator = train_datagen.flow_from_directory(
        BASE_IMG_DIR + TRAIN_DIR,
        target_size=(HEIGHT, WIDTH),
        batch_size=BATCH_SIZE)

    validation_generator = val_datagen.flow_from_directory(
        BASE_IMG_DIR + VAL_DIR,
        target_size=(HEIGHT, WIDTH),
        batch_size=BATCH_SIZE)

    # Save the list of classes for prediction mode later
    class_list = utils.get_subfolders(BASE_IMG_DIR + TRAIN_DIR)
    utils.save_class_list(class_list, model_name=args.model)

    finetune_model = utils.build_finetune_model(base_model,
                                                dropout=args.dropout,
                                                fc_layers=FC_LAYERS,
                                                num_classes=len(class_list))

    if args.continue_training:
        finetune_model.load_weights("./checkpoints/" + args.model +
                                    "_model_weights.h5")

    adam = Adam(lr=0.00001)
    finetune_model.compile(adam,
                           loss='categorical_crossentropy',
                           metrics=['accuracy'])