Exemplo n.º 1
0
    print(f"Dataset: Train: {len(x_train)} - Valid: {len(x_valid)} - Test: {len(x_test)}")

    """ Hyperparameters """
    shape = (2048,1024,3)
    num_classes = 30
    lr = 1e-4
    bacth_size = 8
    epochs = 10

    """ Model """
    model = build_Unet(shape, num_classes)
    model.compile(loss = "categorical_crossentropy", optimizer=tf.keras.optimizers.Adam(lr),metrics=["accuracy"])
    model.summary()

    """ data """
    train_dataset = tf_dataset(x_train, y_train, batch=bacth_size)
    valid_dataset = tf_dataset(x_valid, y_valid, batch=bacth_size)

    train_steps = len(x_train)//bacth_size
    valid_steps = len(x_valid)//bacth_size

    callbacks = [
        ModelCheckpoint("model.h5",verbose=1,  save_best_only=True),
        ReduceLROnPlateau(monitor="val_loss",patience=1,factor=0.1, verbose=1,min_lr=1e-6),
        EarlyStopping(monitor="val_loss", patience=5,verbose=1)
    ]
    start = datetime.now()
    model.fit( train_dataset,
              steps_per_epoch=train_steps,
              validation_steps=valid_steps,
              validation_data=valid_dataset,
        ]
        mask = np.concatenate(all_images, axis=1)
        cv2.imwrite(f"results/{name}", mask)


if __name__ == "__main__":
    print("")
    ## Seeding
    np.random.seed(42)
    tf.random.set_seed(42)

    ## Creating folders
    create_dir("results/")

    ## Hyperparameters
    batch_size = 32

    test_path = "../new_data/test/"
    test_x = sorted(glob(os.path.join(test_path, "image", "*.jpg")))
    test_y = sorted(glob(os.path.join(test_path, "mask", "*.jpg")))
    test_dataset = tf_dataset(test_x, test_y, batch=batch_size)

    test_steps = (len(test_x) // batch_size)
    if len(test_x) % batch_size != 0:
        test_steps += 1

    model = load_model("files/model.h5")

    model.evaluate(test_dataset, steps=test_steps)
    evaluate_normal(model, test_x, test_y)
Exemplo n.º 3
0
    recall = true_positives / (possible_positives + K.epsilon())
    f1_val = 2*(precision*recall)/(precision+recall+K.epsilon())
    return f1_val


if __name__ == "__main__":
    ## Dataset
    path = "new/"
    (train_x, train_y), (valid_x, valid_y), (test_x, test_y) = load_data(path)

    ## Hyperparameters
    batch = 32
    lr = 1e-4
    epochs = 20

    train_dataset = tf_dataset(train_x, train_y, batch=batch)
    valid_dataset = tf_dataset(valid_x, valid_y, batch=batch)

    model = build_model()

    opt = tf.keras.optimizers.Adam(lr)
    metrics = ["acc", tf.keras.metrics.Recall(), tf.keras.metrics.Precision(), f1_score]
    model.compile(loss="binary_crossentropy", optimizer=opt, metrics=metrics)
    tensorboard_callback = tf.keras.callbacks.TensorBoard(log_dir="./logs")

    callbacks = [
        ModelCheckpoint("files/model.h5"),
        ReduceLROnPlateau(monitor='val_loss', factor=0.1, patience=4),
        CSVLogger("files/data.csv"),
        tensorboard_callback,
        EarlyStopping(monitor='val_loss', patience=10, restore_best_weights=False)
Exemplo n.º 4
0
import matplotlib.pyplot as plt
if __name__ == "__main__":
    """ Hyperparamaters """
    dataset_path = "C:/Users/VISWKADE/Documents/Uniklinik_a/Uniklinik_a/1PIC/*"
    input_shape = (256, 256, 3)
    batch_size = 12
    epochs = 100
    lr = 1e-4
    model_path = "unet.h5"
    csv_path = "data.csv"
    """ Load the dataset """
    (train_x, train_y), (test_x, test_y) = load_dataset(dataset_path)
    print(f"Train: {len(train_x)} - {len(train_y)}")
    print(f"Test: {len(test_x)} - {len(test_y)}")

    train_dataset = tf_dataset(train_x, train_y, batch=batch_size)
    test_dataset = tf_dataset(test_x, test_y, batch=batch_size)
    """ Model """
    model = build_unet(input_shape)
    model.compile(loss="binary_crossentropy",
                  optimizer=tf.keras.optimizers.Adam(lr),
                  metrics=[
                      tf.keras.metrics.MeanIoU(num_classes=2),
                      tf.keras.metrics.Recall(),
                      tf.keras.metrics.Precision()
                  ])

    # model.summary()

    callbacks = [
        ModelCheckpoint(model_path, monitor="val_loss", verbose=1),
Exemplo n.º 5
0
    batch_size = args['batch_size']
    output_dir = args['output_dir']

    ROOT_DIR = os.path.abspath(os.curdir)
    OUTPUT_DIR = os.path.join(ROOT_DIR, output_dir,
                              datetime.now().strftime("%d-%m-%YT%H-%M-%S"))
    PREDICTIONS_DIR = os.path.join(OUTPUT_DIR, 'predictions')

    if not os.path.exists(OUTPUT_DIR):
        os.makedirs(OUTPUT_DIR)
    if not os.path.exists(PREDICTIONS_DIR):
        os.makedirs(PREDICTIONS_DIR)

    (train_x, train_y), (valid_x, valid_y), (test_x, test_y) = load_data()

    train_dataset = tf_dataset(train_x, train_y, batch=batch_size)
    valid_dataset = tf_dataset(valid_x, valid_y, batch=batch_size)
    test_dataset = tf_dataset(test_x, test_y, batch=batch_size)

    with strategy.scope():
        model = build_model()
        opt = tf.keras.optimizers.Adam(learning_rate)
        metrics = [
            'acc',
            tf.keras.metrics.Recall(),
            tf.keras.metrics.Precision(), iou, dice_coefficient
        ]
        model.compile(loss="binary_crossentropy",
                      optimizer=opt,
                      metrics=metrics)