Esempio n. 1
0
train_datagen = DataGenerator(rescale=1. / 255,
                              random_crop_size=(cst.CROP_HEIGHT,
                                                cst.CROP_WIDTH))
train_generator = train_datagen.flow_from_directory(
    directory_small=cst.TRAIN_SMALL_SIZE,
    directory_medium=cst.TRAIN_MEDIUM_SIZE,
    directory_large=cst.TRAIN_LARGE_SIZE,
    batch_size=BATCH_SIZE)

val_datagen = DataGenerator(rescale=1. / 255,
                            random_crop_size=(cst.CROP_HEIGHT, cst.CROP_WIDTH))
val_generator = val_datagen.flow_from_directory(
    directory_small=cst.VALID_SMALL_SIZE,
    directory_medium=cst.VALID_MEDIUM_SIZE,
    directory_large=cst.VALID_LARGE_SIZE,
    batch_size=BATCH_SIZE,
    seed=1)

model = PConvUnet(weight_filepath=cst.WEIGHT_PATH)

model.fit(train_generator,
          steps_per_epoch=8000 // BATCH_SIZE,
          validation_data=val_generator,
          validation_steps=8000 // BATCH_SIZE,
          epochs=300,
          plot_callback=None,
          callbacks=[
              TensorBoard(log_dir=cst.TFLOG_PATH, write_graph=False),
          ])
Esempio n. 2
0
        plt.close()


# Instantiate the model
model = PConvUnet()
model = PConvUnet(weight_filepath='data/logs/')
latest_weights = get_latest_weights_file()
print(latest_weights)
model.load(latest_weights)

# Run training for certain amount of epochs
model.fit(train_generator,
          steps_per_epoch=100,
          validation_data=val_generator,
          validation_steps=10,
          epochs=50,
          plot_callback=plot_callback,
          callbacks=[
              TensorBoard(log_dir='../data/logs/initial_training',
                          write_graph=False)
          ])

# Load weights from previous run
latest_weights = get_latest_weights_file()
model = PConvUnet(weight_filepath='data/logs/')
model.load(latest_weights, train_bn=False, lr=0.00005)

# Run training for certain amount of epochs
model.fit(train_generator,
          steps_per_epoch=100,
          validation_data=val_generator,
          validation_steps=10,
Esempio n. 3
0
    pred_img = model.predict([masked, mask])

    # Clear current output and display test images
    for i in range(len(ori)):
        imsave('result/{}_orginal.png'.format(i), ori[i,:,:,:])
        imsave('result/{}_masked.png'.format(i), masked[i,:,:,:])
        imsave('result/{}_pred.png'.format(i), pred_img[i,:,:,:])


if not os.path.exists('result/logs'):
    os.mkdir('result/logs')

model = PConvUnet(weight_filepath='result/logs/')
model.fit(
    generator,
    steps_per_epoch=1000,
    epochs=1,
    plot_callback=plot_callback,
)

# Load image
org = cv2.imread('./data/building.jpg')
org = cv2.cvtColor(org, cv2.COLOR_BGR2RGB)
org = org / 255
shape = org.shape
print(f"Shape of image is: {shape}")

# Load mask
org_mask = random_mask(shape[0], shape[1])

# Image + mask
masked_org = deepcopy(org)
    width_shift_range=0.2,
    height_shift_range=0.2,
    rescale=1. / 255,
    horizontal_flip=True
)
train_generator = train_datagen.flow_from_directory(
    TRAIN_DIR, target_size=(256, 256), batch_size=BATCH_SIZE
)

# Create validation generator
val_datagen = DataGenerator(rescale=1. / 255)
val_generator = val_datagen.flow_from_directory(
    VAL_DIR, target_size=(256, 256), batch_size=BATCH_SIZE, seed=1
)

# Instantiate the model
model = PConvUnet(weight_filepath="{}/PConv-Keras/data/model/".format(path_prefix))
# Run training for certain amount of epochs
model.fit(
    train_generator,
    steps_per_epoch=10,
    validation_data=val_generator,
    validation_steps=100,
    epochs=5,
    plot_callback=None,
    callbacks=[
        TensorBoard(log_dir="{}/PConv-Keras/data/model/initial_training".format(path_prefix), write_graph=False)
    ]
)