def get_estimator(batch_size=32, epochs=25, model_dir=tempfile.mkdtemp()): # load CUB200 dataset. csv_path, path = cub200.load_data() writer = RecordWriter( save_dir=os.path.join(path, "FEdata"), train_data=csv_path, validation_data=0.2, ops=[ ImageReader(inputs='image', parent_path=path), Resize(target_size=(128, 128), keep_ratio=True, outputs='image'), MatReader(inputs='annotation', parent_path=path), SelectDictKey(), Resize((128, 128), keep_ratio=True), Reshape(shape=(128, 128, 1), outputs="annotation") ]) # data pipeline pipeline = fe.Pipeline(batch_size=batch_size, data=writer, ops=Minmax(inputs='image', outputs='image')) # Network model = FEModel(model_def=UNet, model_name="unet_cub", optimizer=tf.optimizers.Adam()) network = fe.Network(ops=[ ModelOp(inputs='image', model=model, outputs='mask_pred'), BinaryCrossentropy(y_true='annotation', y_pred='mask_pred') ]) # estimator traces = [ Dice(true_key="annotation", pred_key='mask_pred'), ModelSaver(model_name="unet_cub", save_dir=model_dir, save_best=True) ] estimator = fe.Estimator(network=network, pipeline=pipeline, traces=traces, epochs=epochs, log_steps=50) return estimator
def get_estimator(batch_size=8, epochs=25, steps_per_epoch=None, validation_steps=None, model_dir=tempfile.mkdtemp()): # load CUB200 dataset. csv_path, path = cub200.load_data() writer = RecordWriter( save_dir=os.path.join(path, "tfrecords"), train_data=csv_path, validation_data=0.2, ops=[ ImageReader(inputs='image', parent_path=path), Resize(target_size=(512, 512), keep_ratio=True, outputs='image'), MatReader(inputs='annotation', parent_path=path), SelectDictKey(), Resize((512, 512), keep_ratio=True), Reshape(shape=(512, 512, 1), outputs="annotation") ]) #step 1, pipeline pipeline = fe.Pipeline( batch_size=batch_size, data=writer, ops=[ Augmentation2D(inputs=("image", "annotation"), outputs=("image", "annotation"), mode="train", rotation_range=15.0, zoom_range=[0.8, 1.2], flip_left_right=True), Rescale(inputs='image', outputs='image') ]) #step 2, network opt = tf.optimizers.Adam(learning_rate=0.0001) resunet50 = fe.build(model_def=ResUnet50, model_name="resunet50", optimizer=opt, loss_name="total_loss") uncertainty = fe.build(model_def=UncertaintyLoss, model_name="uncertainty", optimizer=opt, loss_name="total_loss") network = fe.Network(ops=[ ModelOp(inputs='image', model=resunet50, outputs=["label_pred", "mask_pred"]), SparseCategoricalCrossentropy(inputs=["label", "label_pred"], outputs="cls_loss"), BinaryCrossentropy(inputs=["annotation", "mask_pred"], outputs="seg_loss"), ModelOp(inputs=("cls_loss", "seg_loss"), model=uncertainty, outputs="total_loss"), Loss(inputs="total_loss", outputs="total_loss") ]) #step 3, estimator traces = [ Dice(true_key="annotation", pred_key='mask_pred'), Accuracy(true_key="label", pred_key="label_pred"), ModelSaver(model_name="resunet50", save_dir=model_dir, save_best=True), LRController(model_name="resunet50", lr_schedule=CyclicLRSchedule()) ] estimator = fe.Estimator(network=network, pipeline=pipeline, traces=traces, epochs=epochs, steps_per_epoch=steps_per_epoch, validation_steps=validation_steps) return estimator
import tempfile import tensorflow as tf from fastestimator.architecture.unet import unet from fastestimator.dataset import cub200 from fastestimator.estimator.estimator import Estimator from fastestimator.estimator.trace import Dice from fastestimator.pipeline.dynamic.preprocess import AbstractPreprocessing, ImageReader, MatReader, Resize from fastestimator.pipeline.pipeline import Pipeline from fastestimator.pipeline.static.preprocess import Minmax, Reshape DATA_SAVE_PATH = os.path.join(tempfile.gettempdir(), 'CUB200') # Download CUB200 dataset. csv_path, path = cub200.load_data(path=DATA_SAVE_PATH) class Network: """Load U-Net and define train and eval ops. """ def __init__(self): self.model = unet("image", "annotation") self.optimizer = tf.optimizers.Adam(learning_rate=0.0001) self.loss = tf.losses.BinaryCrossentropy() def train_op(self, batch): """Training loop. Args: batch (`Tensor`): Batch data for training.