Ejemplo n.º 1
0
 def test_single_input(self):
     coarse_dropout = CoarseDropout(inputs='x', outputs='x')
     output = coarse_dropout.forward(data=self.single_input, state={})
     with self.subTest('Check output type'):
         self.assertEqual(type(output), list)
     with self.subTest('Check output image shape'):
         self.assertEqual(output[0].shape, self.single_output_shape)
Ejemplo n.º 2
0
 def test_multi_input(self):
     coarse_dropout = CoarseDropout(inputs='x', outputs='x')
     output = coarse_dropout.forward(data=self.multi_input, state={})
     with self.subTest('Check output type'):
         self.assertEqual(type(output), list)
     with self.subTest('Check output list length'):
         self.assertEqual(len(output), 2)
     for img_output in output:
         with self.subTest('Check output mask shape'):
             self.assertEqual(img_output.shape, self.multi_output_shape)
Ejemplo n.º 3
0
def get_estimator(epochs=50,
                  batch_size=128,
                  max_train_steps_per_epoch=None,
                  max_eval_steps_per_epoch=None,
                  save_dir=tempfile.mkdtemp()):
    # step 1
    train_data, eval_data = cifair100.load_data()

    # Add label noise to simulate real-world labeling problems
    corrupt_dataset(train_data)

    test_data = eval_data.split(range(len(eval_data) // 2))
    pipeline = fe.Pipeline(
        train_data=train_data,
        eval_data=eval_data,
        test_data=test_data,
        batch_size=batch_size,
        ops=[
            Normalize(inputs="x", outputs="x", mean=(0.4914, 0.4822, 0.4465), std=(0.2471, 0.2435, 0.2616)),
            PadIfNeeded(min_height=40, min_width=40, image_in="x", image_out="x", mode="train"),
            RandomCrop(32, 32, image_in="x", image_out="x", mode="train"),
            Sometimes(HorizontalFlip(image_in="x", image_out="x", mode="train")),
            CoarseDropout(inputs="x", outputs="x", mode="train", max_holes=1),
            ChannelTranspose(inputs="x", outputs="x")
        ])

    # step 2
    model = fe.build(model_fn=big_lenet, optimizer_fn='adam')
    network = fe.Network(ops=[
        ModelOp(model=model, inputs="x", outputs="y_pred"),
        SuperLoss(CrossEntropy(inputs=("y_pred", "y"), outputs="ce"), output_confidence="confidence"),
        UpdateOp(model=model, loss_name="ce")
    ])

    # step 3
    traces = [
        MCC(true_key="y", pred_key="y_pred"),
        BestModelSaver(model=model, save_dir=save_dir, metric="mcc", save_best_mode="max", load_best_final=True),
        LabelTracker(metric="confidence",
                     label="data_labels",
                     label_mapping={
                         "Normal": 0, "Corrupted": 1
                     },
                     mode="train",
                     outputs="label_confidence"),
        ImageSaver(inputs="label_confidence", save_dir=save_dir, mode="train"),
    ]
    estimator = fe.Estimator(pipeline=pipeline,
                             network=network,
                             epochs=epochs,
                             traces=traces,
                             max_train_steps_per_epoch=max_train_steps_per_epoch,
                             max_eval_steps_per_epoch=max_eval_steps_per_epoch)
    return estimator
def get_estimator(epochs=24, batch_size=128, lr_epochs=100, max_train_steps_per_epoch=None,
                  save_dir=tempfile.mkdtemp()):
    # step 1: prepare dataset
    train_data, test_data = load_data()
    pipeline = fe.Pipeline(
        train_data=train_data,
        eval_data=test_data,
        batch_size=batch_size,
        ops=[
            Normalize(inputs="x", outputs="x", mean=(0.4914, 0.4822, 0.4465), std=(0.2471, 0.2435, 0.2616)),
            PadIfNeeded(min_height=40, min_width=40, image_in="x", image_out="x", mode="train"),
            RandomCrop(32, 32, image_in="x", image_out="x", mode="train"),
            Sometimes(HorizontalFlip(image_in="x", image_out="x", mode="train")),
            CoarseDropout(inputs="x", outputs="x", mode="train", max_holes=1),
            ChannelTranspose(inputs="x", outputs="x"),
            Onehot(inputs="y", outputs="y", mode="train", num_classes=10, label_smoothing=0.2)
        ])

    # step 2: prepare network
    model = fe.build(model_fn=ResNet9, optimizer_fn="sgd")
    network = fe.Network(ops=[
        ModelOp(model=model, inputs="x", outputs="y_pred"),
        CrossEntropy(inputs=("y_pred", "y"), outputs="ce"),
        UpdateOp(model=model, loss_name="ce")
    ])

    # get the max learning rate
    lr_max = search_max_lr(pipeline=pipeline, model=model, network=network, epochs=lr_epochs)
    lr_min = lr_max / 40
    print(f"The maximum LR: {lr_max}, and minimun LR: {lr_min}")
    mid_step = int(epochs * 0.45 * len(train_data) / batch_size)
    end_step = int(epochs * len(train_data) / batch_size)

    # reinitialize the model
    model = fe.build(model_fn=ResNet9, optimizer_fn="sgd")
    network = fe.Network(ops=[
        ModelOp(model=model, inputs="x", outputs="y_pred"),
        CrossEntropy(inputs=("y_pred", "y"), outputs="ce"),
        UpdateOp(model=model, loss_name="ce")
    ])

    # step 3: prepare estimator
    traces = [
        Accuracy(true_key="y", pred_key="y_pred"),
        BestModelSaver(model=model, save_dir=save_dir, metric="accuracy", save_best_mode="max"),
        LRScheduler(model=model, lr_fn=lambda step: super_schedule(step, lr_max, lr_min, mid_step, end_step))
    ]
    estimator = fe.Estimator(pipeline=pipeline,
                             network=network,
                             epochs=epochs,
                             traces=traces,
                             max_train_steps_per_epoch=max_train_steps_per_epoch)
    return estimator
Ejemplo n.º 5
0
def finetune(weights_path,
             batch_size,
             epochs,
             model_dir=tempfile.mkdtemp(),
             train_steps_per_epoch=None,
             eval_steps_per_epoch=None):
    train_data, eval_data = cifair10.load_data()
    pipeline = fe.Pipeline(
        train_data=train_data,
        eval_data=eval_data,
        batch_size=batch_size,
        ops=[
            Normalize(inputs="x", outputs="x", mean=(0.4914, 0.4822, 0.4465), std=(0.2471, 0.2435, 0.2616)),
            PadIfNeeded(min_height=40, min_width=40, image_in="x", image_out="x", mode="train"),
            RandomCrop(32, 32, image_in="x", image_out="x", mode="train"),
            Sometimes(HorizontalFlip(image_in="x", image_out="x", mode="train")),
            CoarseDropout(inputs="x", outputs="x", mode="train", max_holes=1)
        ])
    _, model = fe.build(
        model_fn=lambda: vision_transformer(num_class=10,
                                            weights_path=weights_path,
                                            image_size=(32, 32, 3),
                                            patch_size=4,
                                            num_layers=6,
                                            em_dim=256,
                                            num_heads=8,
                                            dff=512),
        optimizer_fn=[None, lambda: tf.optimizers.SGD(0.01, momentum=0.9)])
    network = fe.Network(ops=[
        ModelOp(model=model, inputs="x", outputs="y_pred"),
        CrossEntropy(inputs=("y_pred", "y"), outputs="ce", from_logits=True),
        UpdateOp(model=model, loss_name="ce")
    ])
    traces = [
        Accuracy(true_key="y", pred_key="y_pred"),
        BestModelSaver(model=model, save_dir=model_dir, metric="accuracy", save_best_mode="max")
    ]
    estimator = fe.Estimator(pipeline=pipeline,
                             network=network,
                             epochs=epochs,
                             traces=traces,
                             train_steps_per_epoch=train_steps_per_epoch,
                             eval_steps_per_epoch=eval_steps_per_epoch)
    estimator.fit(warmup=False)
Ejemplo n.º 6
0
def get_estimator(epochs=24, batch_size=512, max_train_steps_per_epoch=None, save_dir=tempfile.mkdtemp()):
    # step 1: prepare dataset
    train_data, test_data = load_data()
    pipeline = fe.Pipeline(
        train_data=train_data,
        test_data=test_data,
        batch_size=batch_size,
        ops=[
            Normalize(inputs="x", outputs="x", mean=(0.4914, 0.4822, 0.4465), std=(0.2471, 0.2435, 0.2616)),
            PadIfNeeded(min_height=40, min_width=40, image_in="x", image_out="x", mode="train"),
            RandomCrop(32, 32, image_in="x", image_out="x", mode="train"),
            Sometimes(HorizontalFlip(image_in="x", image_out="x", mode="train")),
            CoarseDropout(inputs="x", outputs="x", mode="train", max_holes=1),
            ChannelTranspose(inputs="x", outputs="x"),
            Onehot(inputs="y", outputs="y", mode="train", num_classes=10, label_smoothing=0.2)
        ])

    # step 2: prepare network
    model = fe.build(model_fn=FastCifar, optimizer_fn="adam")
    network = fe.Network(ops=[
        ModelOp(model=model, inputs="x", outputs="y_pred"),
        CrossEntropy(inputs=("y_pred", "y"), outputs="ce"),
        UpdateOp(model=model, loss_name="ce")
    ])

    # step 3 prepare estimator
    traces = [
        Accuracy(true_key="y", pred_key="y_pred"),
        BestModelSaver(model=model, save_dir=save_dir, metric="accuracy", save_best_mode="max"),
        LRScheduler(model=model, lr_fn=lr_schedule)
    ]
    estimator = fe.Estimator(pipeline=pipeline,
                             network=network,
                             epochs=epochs,
                             traces=traces,
                             max_train_steps_per_epoch=max_train_steps_per_epoch)
    return estimator
Ejemplo n.º 7
0
def get_estimator(epochs=150,
                  batch_size=32,
                  save_dir=tempfile.mkdtemp(),
                  train_steps_per_epoch=None,
                  eval_steps_per_epoch=None):
    # step 1: prepare dataset
    train_data, eval_data = load_data()
    pipeline = fe.Pipeline(train_data=train_data,
                           eval_data=eval_data,
                           batch_size=batch_size * get_num_devices(),
                           ops=[
                               Normalize(inputs="x",
                                         outputs="x",
                                         mean=(0.4914, 0.4822, 0.4465),
                                         std=(0.2471, 0.2435, 0.2616)),
                               PadIfNeeded(min_height=40,
                                           min_width=40,
                                           image_in="x",
                                           image_out="x",
                                           mode="train"),
                               RandomCrop(32,
                                          32,
                                          image_in="x",
                                          image_out="x",
                                          mode="train"),
                               Sometimes(
                                   HorizontalFlip(image_in="x",
                                                  image_out="x",
                                                  mode="train")),
                               CoarseDropout(inputs="x",
                                             outputs="x",
                                             mode="train",
                                             max_holes=1)
                           ])

    # step 2: prepare network
    model = fe.build(
        model_fn=lambda: pyramidnet_cifar(inputs_shape=(32, 32, 3),
                                          depth=272,
                                          alpha=200,
                                          num_classes=10,
                                          bottleneck=True),
        optimizer_fn=lambda: tfa.optimizers.SGDW(
            weight_decay=0.0001, lr=0.1, momentum=0.9))

    network = fe.Network(ops=[
        ModelOp(model=model, inputs="x", outputs="y_pred"),
        CrossEntropy(inputs=("y_pred", "y"), outputs="ce", from_logits=True),
        UpdateOp(model=model, loss_name="ce")
    ])

    # step 3 prepare estimator
    traces = [
        Accuracy(true_key="y", pred_key="y_pred"),
        LRScheduler(model=model, lr_fn=lr_schedule),
        BestModelSaver(model=model,
                       save_dir=save_dir,
                       metric="accuracy",
                       save_best_mode="max")
    ]
    estimator = fe.Estimator(pipeline=pipeline,
                             network=network,
                             epochs=epochs,
                             traces=traces,
                             train_steps_per_epoch=train_steps_per_epoch,
                             eval_steps_per_epoch=eval_steps_per_epoch)
    return estimator
Ejemplo n.º 8
0
def finetune(pretrained_model,
             batch_size,
             epochs,
             model_dir=tempfile.mkdtemp(),
             train_steps_per_epoch=None,
             eval_steps_per_epoch=None):
    train_data, eval_data = cifair10.load_data()
    pipeline = fe.Pipeline(train_data=train_data,
                           eval_data=eval_data,
                           batch_size=batch_size,
                           ops=[
                               Normalize(inputs="x",
                                         outputs="x",
                                         mean=(0.4914, 0.4822, 0.4465),
                                         std=(0.2471, 0.2435, 0.2616)),
                               PadIfNeeded(min_height=40,
                                           min_width=40,
                                           image_in="x",
                                           image_out="x",
                                           mode="train"),
                               RandomCrop(32,
                                          32,
                                          image_in="x",
                                          image_out="x",
                                          mode="train"),
                               Sometimes(
                                   HorizontalFlip(image_in="x",
                                                  image_out="x",
                                                  mode="train")),
                               CoarseDropout(inputs="x",
                                             outputs="x",
                                             mode="train",
                                             max_holes=1),
                               ChannelTranspose(inputs="x", outputs="x")
                           ])
    model = fe.build(model_fn=lambda: ViTModel(num_classes=100,
                                               image_size=32,
                                               patch_size=4,
                                               num_layers=6,
                                               num_channels=3,
                                               em_dim=256,
                                               num_heads=8,
                                               ff_dim=512),
                     optimizer_fn=lambda x: torch.optim.SGD(
                         x, lr=0.01, momentum=0.9, weight_decay=1e-4))
    # load the encoder's weight
    if hasattr(model, "module"):
        model.module.vit_encoder.load_state_dict(
            pretrained_model.module.vit_encoder.state_dict())
    else:
        model.vit_encoder.load_state_dict(
            pretrained_model.vit_encoder.state_dict())
    network = fe.Network(ops=[
        ModelOp(model=model, inputs="x", outputs="y_pred"),
        CrossEntropy(inputs=("y_pred", "y"), outputs="ce", from_logits=True),
        UpdateOp(model=model, loss_name="ce")
    ])
    traces = [
        Accuracy(true_key="y", pred_key="y_pred"),
        BestModelSaver(model=model,
                       save_dir=model_dir,
                       metric="accuracy",
                       save_best_mode="max")
    ]
    estimator = fe.Estimator(pipeline=pipeline,
                             network=network,
                             epochs=epochs,
                             traces=traces,
                             train_steps_per_epoch=train_steps_per_epoch,
                             eval_steps_per_epoch=eval_steps_per_epoch)
    estimator.fit(warmup=False)
Ejemplo n.º 9
0
                                     mean=(0.4914, 0.4822, 0.4465),
                                     std=(0.2471, 0.2435, 0.2616)),
                           PadIfNeeded(min_height=40,
                                       min_width=40,
                                       image_in="x",
                                       image_out="x",
                                       mode="train"),
                           RandomCrop(32,
                                      32,
                                      image_in="x",
                                      image_out="x",
                                      mode="train"),
                           Sometimes(
                               HorizontalFlip(image_in="x",
                                              image_out="x",
                                              mode="train")),
                           CoarseDropout(inputs="x",
                                         outputs="x",
                                         mode="train",
                                         max_holes=1),
                           Onehot(inputs="y",
                                  outputs="y",
                                  mode="train",
                                  num_classes=10,
                                  label_smoothing=0.2)
                       ])
data = pipeline.get_results(mode="train")
print(data["x"].shape)
data2 = pipeline.get_results(mode="test")
print(data2["x"].shape)
def get_estimator(epochs=12, batch_size=512, save_dir=tempfile.mkdtemp()):
    # epoch 1-10, train on cifair100,  epoch 11-end: train on cifar10
    cifair10_train, cifari10_test = cifair10.load_data()
    cifair100_train, _ = cifair100.load_data()
    train_ds = EpochScheduler({1: cifair100_train, 11: cifair10_train})
    pipeline = fe.Pipeline(train_data=train_ds,
                           test_data=cifari10_test,
                           batch_size=batch_size,
                           ops=[
                               Normalize(inputs="x",
                                         outputs="x",
                                         mean=(0.4914, 0.4822, 0.4465),
                                         std=(0.2471, 0.2435, 0.2616)),
                               PadIfNeeded(min_height=40,
                                           min_width=40,
                                           image_in="x",
                                           image_out="x",
                                           mode="train"),
                               RandomCrop(32,
                                          32,
                                          image_in="x",
                                          image_out="x",
                                          mode="train"),
                               Sometimes(
                                   HorizontalFlip(image_in="x",
                                                  image_out="x",
                                                  mode="train")),
                               CoarseDropout(inputs="x",
                                             outputs="x",
                                             mode="train",
                                             max_holes=1),
                               ChannelTranspose(inputs="x", outputs="x")
                           ])

    # step 2: prepare network
    backbone = fe.build(model_fn=lambda: Backbone(input_size=(3, 32, 32)),
                        optimizer_fn="adam")
    cls_head_cifar100 = fe.build(model_fn=lambda: Classifier(classes=100),
                                 optimizer_fn="adam")
    cls_head_cifar10 = fe.build(model_fn=lambda: Classifier(classes=10),
                                optimizer_fn="adam")
    # if you want to save the final cifar10 model, you can build a model then provide it to ModelSaver
    # final_model_cifar10 = fe.build(model_fn=lambda: MyModel(backbone, cls_head_cifar10), optimizer_fn=None)

    # epoch 1-10: train backbone and cls_head_cifar100, epoch 11-end: train cls_head_cifar10 only
    ModelOp_cls_head = EpochScheduler({
        1:
        ModelOp(model=cls_head_cifar100, inputs="feature", outputs="y_pred"),
        11:
        ModelOp(model=cls_head_cifar10, inputs="feature", outputs="y_pred"),
    })
    UpdateOp_backbone = EpochScheduler({
        1:
        UpdateOp(model=backbone, loss_name="ce"),
        11:
        None
    })
    UpdateOp_cls_head = EpochScheduler({
        1:
        UpdateOp(model=cls_head_cifar100, loss_name="ce"),
        11:
        UpdateOp(model=cls_head_cifar10, loss_name="ce")
    })
    network = fe.Network(ops=[
        ModelOp(model=backbone, inputs="x",
                outputs="feature"), ModelOp_cls_head,
        CrossEntropy(inputs=("y_pred", "y"), outputs="ce", from_logits=True),
        UpdateOp_backbone, UpdateOp_cls_head
    ])
    traces = [Accuracy(true_key="y", pred_key="y_pred")]
    estimator = fe.Estimator(pipeline=pipeline,
                             network=network,
                             epochs=epochs,
                             traces=traces)
    return estimator