Esempio n. 1
0
def get_estimator(batch_size=4, epochs=1000, steps_per_epoch=1000, validation_steps=None, model_dir=tempfile.mkdtemp(), imagenet_path=None):
    """Args:
        imagenet_path: folder path of ImageNet dataset, containing train and val subdirs .
    """

    assert imagenet_path is not None, 'Pass valid folder path of Imagenet dataset'
    # Ensure ImageNet dataset is downloaded. Pass the folder contianing train and val subdirectories.
    # currently the script doesn't download the ImageNet data.
    train_csv, val_csv, path = srgan.load_data(path_imgnet= imagenet_path)

    writer = fe.RecordWriter(
        save_dir=os.path.join(path, "sr_tfrecords"),
        train_data=train_csv,
        validation_data=val_csv,
        ops=[ImageReader(inputs="lowres", outputs="lowres"), ImageReader(inputs="highres", outputs="highres")],
        compression="GZIP",
        write_feature=['lowres', 'highres'])

    pipeline = fe.Pipeline(
        max_shuffle_buffer_mb=3000,
        batch_size=batch_size,
        data=writer,
        ops=[
            LowresRescale(inputs='lowres', outputs='lowres'),
            Rescale(inputs='highres', outputs='highres'), ])

    # prepare model
    model = fe.build(model_def=lambda: get_generator(input_shape=(24, 24, 3)),
                     model_name="srresnet_gen",
                     optimizer=tf.optimizers.Adam(learning_rate=0.0001),
                     loss_name="mse_loss")
    network = fe.Network(ops=[
        ModelOp(inputs='lowres', model=model, outputs='superres'),
        PixelMeanSquaredError(inputs=('superres', 'highres'), outputs="mse_loss")
    ])

    model_dir = os.path.join(path)
    estimator = fe.Estimator(network=network,
                             pipeline=pipeline,
                             steps_per_epoch=steps_per_epoch,
                             epochs=epochs,
                             traces=[
                                 ModelSaver(model_name="srresnet_gen", save_dir=model_dir, save_best=True), ])
    return estimator
Esempio n. 2
0
def get_estimator(batch_size=4,
                  epochs=200,
                  steps_per_epoch=1000,
                  validation_steps=None,
                  model_dir=tempfile.mkdtemp(),
                  imagenet_path=None,
                  srresnet_model_path=None):
    """Args:
        imagenet_path: folder path of ImageNet dataset, containing train and val subdirs .
        srresnet_model_path: srresnet model weights, srgan generator gets initialized with the weights.
    """

    assert imagenet_path is not None, 'Pass valid folder path of Imagenet dataset'
    assert srresnet_model_path is not None, 'srresnet model is needed to initialize srgan generator model'
    # Ensure ImageNet dataset is downloaded. Pass the folder contianing train and val subdirectories.
    # currently the script doesn't download the ImageNet data.
    train_csv, val_csv, path = srgan.load_data(path_imgnet=imagenet_path)

    writer = fe.RecordWriter(save_dir=os.path.join(path, "sr_tfrecords"),
                             train_data=train_csv,
                             validation_data=val_csv,
                             ops=[
                                 ImageReader(inputs="lowres",
                                             outputs="lowres"),
                                 ImageReader(inputs="highres",
                                             outputs="highres")
                             ],
                             compression="GZIP",
                             write_feature=['lowres', 'highres'])

    pipeline = fe.Pipeline(max_shuffle_buffer_mb=3000,
                           batch_size=batch_size,
                           data=writer,
                           ops=[
                               LowresRescale(inputs='lowres',
                                             outputs='lowres'),
                               Rescale(inputs='highres', outputs='highres'),
                           ])

    # prepare model
    model_gen = fe.build(model_def=srresnet_model_path,
                         model_name="srgan_gen",
                         optimizer=tf.optimizers.Adam(learning_rate=0.0001),
                         loss_name="mse_adv_loss",
                         custom_objects={'SubPixelConv2D': SubPixelConv2D})
    model_desc = fe.build(
        model_def=lambda: get_discriminator(input_shape=(96, 96, 3)),
        model_name="srgan_desc",
        optimizer=tf.optimizers.Adam(learning_rate=0.0001),
        loss_name="desc_loss")

    network = fe.Network(ops=[
        ModelOp(inputs='lowres', model=model_gen, outputs='superres'),
        ModelOp(inputs='superres', model=model_desc, outputs='pred_fake'),
        ModelOp(inputs='highres', model=model_desc, outputs='pred_true'),
        DLoss(inputs=("pred_true", "pred_fake"),
              outputs=("desc_loss", "real_loss", "fake_loss")),
        GLoss(inputs=('superres', 'highres', 'pred_fake'),
              outputs=("mse_adv_loss", "mse_loss", "adv_loss"),
              vgg_content=True)
    ])

    model_dir = os.path.join(path)
    estimator = fe.Estimator(
        network=network,
        pipeline=pipeline,
        steps_per_epoch=steps_per_epoch,
        epochs=epochs,
        traces=[
            ModelSaver(model_name="srgan_gen",
                       save_dir=model_dir,
                       save_best=True),
            ModelSaver(model_name="srgan_desc",
                       save_dir=model_dir,
                       save_best=True),
            LRController(model_name="srgan_gen",
                         lr_schedule=MyLRSchedule(schedule_mode='step')),
            LRController(model_name="srgan_desc",
                         lr_schedule=MyLRSchedule(schedule_mode='step'))
        ])

    return estimator
Esempio n. 3
0
def get_estimator(batch_size=1,
                  epochs=500,
                  steps_per_epoch=128,
                  model_dir=tempfile.mkdtemp(),
                  path_brats=os.path.join(os.getenv('HOME'),
                                          'fastestimator_data', 'BraTs')):
    """Args:
        path_brats: folder path of BraTs 2018 dataset, containing data subdir inturn having LGG and HGG data.
        Expected folder structure path_brats
        path_brats/
        |----------data/
                   |----LGG/
                   |----HGG/
    """
    assert path_brats is not None, 'Pass valid folder path of BraTs 2018 dataset'
    # Ensure Brats 2018 dataset is downloaded. Pass the folder contianing train and val subdirectories.
    # currently the script doesn't download the BraTs data.
    train_csv, val_csv, path_brats = brats.load_data(path_brats=path_brats,
                                                     resized_img_shape=(128,
                                                                        128,
                                                                        128),
                                                     bias_correction=False)
    writer = fe.RecordWriter(save_dir=os.path.join(path_brats, "tfrecords"),
                             train_data=train_csv,
                             validation_data=val_csv,
                             ops=[
                                 NIBImageReader(inputs="mod_img",
                                                outputs="mod_img"),
                                 NIBImageReader(inputs="seg_mask",
                                                outputs="seg_mask")
                             ],
                             compression="GZIP",
                             write_feature=['mod_img', 'seg_mask'])

    pipeline = fe.Pipeline(data=writer,
                           batch_size=batch_size,
                           ops=[
                               SplitMaskLabelwise(inputs="seg_mask",
                                                  outputs="seg_mask"),
                               Augmentation3D(inputs=("mod_img", "seg_mask"),
                                              outputs=("mod_img", "seg_mask"),
                                              mode='train')
                           ])

    model_unet3d_is = fe.build(
        model_def=lambda: UNet3D_Isensee(input_shape=(4, 64, 64, 64)),
        model_name="brats_unet3d_is",
        optimizer=tf.optimizers.Adam(learning_rate=5e-4),
        loss_name="wdice_loss")

    network = fe.Network(ops=[
        ModelOp(
            inputs="mod_img", model=model_unet3d_is, outputs="pred_seg_mask"),
        WeightedDiceLoss(inputs=("seg_mask", "pred_seg_mask"),
                         outputs=("wdice_loss"))
    ])
    model_dir = path_brats
    estimator = fe.Estimator(network=network,
                             pipeline=pipeline,
                             steps_per_epoch=steps_per_epoch,
                             epochs=epochs,
                             traces=[
                                 ModelSaver(model_name="brats_unet3d_is",
                                            save_dir=model_dir,
                                            save_best=True),
                                 LRController(model_name="brats_unet3d_is",
                                              reduce_patience=10,
                                              reduce_factor=0.5,
                                              reduce_on_eval=True,
                                              min_lr=1e-07)
                             ],
                             log_steps=steps_per_epoch)

    return estimator
def get_estimator(data_path=None, model_dir=tempfile.mkdtemp(), batch_size=2):
    #prepare dataset
    train_csv, val_csv, path = load_data(path=data_path)
    writer = fe.RecordWriter(
        save_dir=os.path.join(path, "retinanet_coco_1024"),
        train_data=train_csv,
        validation_data=val_csv,
        ops=[
            ImageReader(inputs="image", parent_path=path, outputs="image"),
            String2List(inputs=["x1", "y1", "width", "height", "obj_label"],
                        outputs=["x1", "y1", "width", "height", "obj_label"]),
            ResizeImageAndBbox(
                target_size=(1024, 1024),
                keep_ratio=True,
                inputs=["image", "x1", "y1", "width", "height"],
                outputs=["image", "x1", "y1", "width", "height"]),
            FlipImageAndBbox(inputs=[
                "image", "x1", "y1", "width", "height", "obj_label", "id"
            ],
                             outputs=[
                                 "image", "x1", "y1", "width", "height",
                                 "obj_label", "id"
                             ]),
            GenerateTarget(inputs=("obj_label", "x1", "y1", "width", "height"),
                           outputs=("cls_gt", "x1_gt", "y1_gt", "w_gt",
                                    "h_gt"))
        ],
        expand_dims=True,
        compression="GZIP",
        write_feature=[
            "image", "id", "cls_gt", "x1_gt", "y1_gt", "w_gt", "h_gt",
            "obj_label", "x1", "y1", "width", "height"
        ])
    # prepare pipeline
    pipeline = fe.Pipeline(batch_size=batch_size,
                           data=writer,
                           ops=[
                               Rescale(inputs="image", outputs="image"),
                               Pad(padded_shape=[2051],
                                   inputs=[
                                       "x1_gt", "y1_gt", "w_gt", "h_gt",
                                       "obj_label", "x1", "y1", "width",
                                       "height"
                                   ],
                                   outputs=[
                                       "x1_gt", "y1_gt", "w_gt", "h_gt",
                                       "obj_label", "x1", "y1", "width",
                                       "height"
                                   ])
                           ])
    # prepare network
    model = fe.build(model_def=lambda: RetinaNet(input_shape=(1024, 1024, 3),
                                                 num_classes=90),
                     model_name="retinanet",
                     optimizer=tf.optimizers.SGD(momentum=0.9),
                     loss_name="total_loss")
    network = fe.Network(ops=[
        ModelOp(inputs="image", model=model, outputs=["cls_pred", "loc_pred"]),
        RetinaLoss(inputs=("cls_gt", "x1_gt", "y1_gt", "w_gt", "h_gt",
                           "cls_pred", "loc_pred"),
                   outputs=("total_loss", "focal_loss", "l1_loss")),
        PredictBox(inputs=[
            "cls_pred", "loc_pred", "obj_label", "x1", "y1", "width", "height"
        ],
                   outputs=("pred", "gt"),
                   mode="eval",
                   input_shape=(1024, 1024, 3))
    ])
    # prepare estimator
    estimator = fe.Estimator(
        network=network,
        pipeline=pipeline,
        epochs=7,
        traces=[
            MeanAvgPrecision(90, (1024, 1024, 3),
                             'pred',
                             'gt',
                             output_name=("mAP", "AP50", "AP75")),
            ModelSaver(model_name="retinanet",
                       save_dir=model_dir,
                       save_best='mAP',
                       save_best_mode='max'),
            LRController(model_name="retinanet",
                         lr_schedule=MyLRSchedule(schedule_mode="step"))
        ])
    return estimator
Esempio n. 5
0
def get_estimator(batch_size=128, epochs=100):

    usps_train_csv, _, usps_parent_dir = usps.load_data()
    mnist_train_csv, _, mnist_parent_dir = mnist.load_data()

    df = pd.read_csv(mnist_train_csv)
    df.columns = ['source_img', 'source_label']
    df.to_csv(mnist_train_csv, index=False)

    df = pd.read_csv(usps_train_csv)
    df.columns = ['target_img', 'target_label']
    df.to_csv(usps_train_csv, index=False)

    writer = fe.RecordWriter(
        save_dir=os.path.join(os.path.dirname(mnist_parent_dir), 'dann',
                              'tfr'),
        train_data=(usps_train_csv, mnist_train_csv),
        ops=(
            [
                ImageReader(inputs="target_img",
                            outputs="target_img",
                            parent_path=usps_parent_dir,
                            grey_scale=True)
            ],  # first tuple element
            [
                ImageReader(inputs="source_img",
                            outputs="source_img",
                            parent_path=mnist_parent_dir,
                            grey_scale=True)
            ]))  # second tuple element

    pipeline = fe.Pipeline(batch_size=batch_size,
                           data=writer,
                           ops=[
                               Resize(inputs="target_img",
                                      outputs="target_img",
                                      size=(28, 28)),
                               Resize(inputs="source_img",
                                      outputs="source_img",
                                      size=(28, 28)),
                               Minmax(inputs="target_img",
                                      outputs="target_img"),
                               Minmax(inputs="source_img",
                                      outputs="source_img")
                           ])

    alpha = tf.Variable(0.0, dtype=tf.float32, trainable=False)
    img_shape = (28, 28, 1)
    feat_dim = 7 * 7 * 48

    feature_extractor = fe.build(
        model_def=lambda: build_feature_extractor(img_shape),
        model_name="feature_extractor",
        loss_name="fe_loss",
        optimizer=tf.keras.optimizers.Adam(1e-4))

    label_predictor = fe.build(
        model_def=lambda: build_label_predictor(feat_dim),
        model_name="label_predictor",
        loss_name="fe_loss",
        optimizer=tf.keras.optimizers.Adam(1e-4))

    domain_predictor = fe.build(
        model_def=lambda: build_domain_predictor(feat_dim, alpha),
        model_name="domain_predictor",
        loss_name="fe_loss",
        optimizer=tf.keras.optimizers.Adam(1e-4))

    network = fe.Network(ops=[
        ModelOp(
            inputs="source_img", outputs="src_feat", model=feature_extractor),
        ModelOp(
            inputs="target_img", outputs="tgt_feat", model=feature_extractor),
        ModelOp(
            inputs="src_feat", outputs="src_c_logit", model=label_predictor),
        ModelOp(
            inputs="src_feat", outputs="src_d_logit", model=domain_predictor),
        ModelOp(
            inputs="tgt_feat", outputs="tgt_d_logit", model=domain_predictor),
        FELoss(inputs=("src_c_logit", "source_label", "src_d_logit",
                       "tgt_d_logit"),
               outputs="fe_loss")
    ])

    traces = [GRLWeightController(alpha=alpha)]

    estimator = fe.Estimator(pipeline=pipeline,
                             network=network,
                             traces=traces,
                             epochs=epochs)

    return estimator