Exemple #1
0
# If the --verbose argument is not supplied, suppress all of the TensorFlow startup messages.
if not args.verbose:
    import os
    os.environ["TF_CPP_MIN_LOG_LEVEL"] = "3"
    import tensorflow as tf
    tf.logging.set_verbosity(tf.logging.ERROR)

import models
import data
from datetime import datetime
from keras.callbacks import TensorBoard, ModelCheckpoint, EarlyStopping
from keras.optimizers import Adam

# Compile the model
model = models.unet2D(size=args.size, ablated=args.ablated)
model.compile(optimizer=Adam(lr=0.0001),
              loss="binary_crossentropy",
              metrics=["accuracy"])
model.load_weights(args.weights)

# Process the images in the test set and save the results to the test/ directory.
test_gen = data.test_generator(f"{args.dir}",
                               num_image=args.tests,
                               target_size=(args.size, args.size))
results = model.predict_generator(test_gen, args.tests, verbose=1)
# Use the custom save_result() method defined in data.py to save the results as
# greyscale .png images.
print("Saving results in ./test/")
data.save_result("test", results)
                    
# Data and model checkpoints directories
parser.add_argument('--data_dir', type=str, default='',
                    help='data directory containing input.txt with training examples')     
                    
args = parser.parse_args()
files = os.listdir(args.data_dir)
print(files)

##
# Need to save model weights, history to S3
##
# Save final model out to opt.
##

files = ['ID_0a336e630', 'ID_0ba79c0ef', 'ID_0bc7199c6']
#path = '../Example Bucket'


train_gen = dw.DataGenerator(folder=args.data_dir,batch_size=1, file_list=files, shuffle=False)
test_gen = dw.DataGenerator(folder=args.data_dir,batch_size=1, file_list=files, shuffle=False)

model = md.unet2D(input_size = (512,512,4))
history = md.train_model(model, train_gen, test_gen, name="model", checkpoint_dir=args.final_model, epochs=3)

print(args.final_model)

#model.save(args.final_model + '/trainedmodel.h5') # saving the model
with open(args.final_model + '/trainHistoryOld', 'wb') as handle: # saving the history of the model
    pickle.dump(history.history, handle)