Exemplo n.º 1
0
             num_classes=20,
             feat_channels=64,
             stacked_convs=2,
             norm='bn')
model.build((None, HEIGHT, WIDTH, 3))

# load_checkpoint("./drive/MyDrive/models/ImageNet-86/ckpt", model=backbone)

criterion = DetectionLoss(box_loss_fn=iou_loss(mode='ciou', offset=True),
                          cls_loss_fn=focal_loss(alpha=0.25, gamma=2.0),
                          centerness=True)
base_lr = 0.0025
epochs = 60
lr_schedule = CosineLR(base_lr * mul,
                       steps_per_epoch,
                       epochs,
                       min_lr=0,
                       warmup_min_lr=base_lr,
                       warmup_epoch=5)
optimizer = SGD(lr_schedule, momentum=0.9, nesterov=True, weight_decay=1e-4)

train_metrics = {
    'loss': Mean(),
}
eval_metrics = {
    'loss': MeanMetricWrapper(criterion),
}


def output_transform(output):
    bbox_preds, cls_scores, centerness = get(
        ['bbox_pred', 'cls_score', 'centerness'], output)
Exemplo n.º 2
0
                          reduce=[('sep_conv_5x5', 1), ('max_pool_3x3', 0),
                                  ('sep_conv_5x5', 1), ('sep_conv_5x5', 2),
                                  ('sep_conv_3x3', 0), ('sep_conv_3x3', 3),
                                  ('sep_conv_3x3', 1), ('sep_conv_3x3', 2)],
                          reduce_concat=[2, 3, 4, 5])

drop_path = 0.3
model = NASNet(16, 8, True, drop_path, 10, PC_DARTS_cifar)
model.build((None, 32, 32, 3))
model.summary()

criterion = CrossEntropy(auxiliary_weight=0.4)

base_lr = 0.025
epochs = 600
lr_schedule = CosineLR(base_lr * mul, steps_per_epoch, epochs=epochs, min_lr=0)
optimizer = SGD(lr_schedule, momentum=0.9, weight_decay=3e-4, nesterov=True)

train_metrics = {
    'loss': Mean(),
    'acc': CategoricalAccuracy(),
}
eval_metrics = {
    'loss': CategoricalCrossentropy(from_logits=True),
    'acc': CategoricalAccuracy(),
}

learner = SuperLearner(model,
                       criterion,
                       optimizer,
                       grad_clip_norm=5.0,
Exemplo n.º 3
0
def objective(trial: optuna.Trial):

    base_lr = trial.suggest_float("base_lr", 0.001, 0.05, step=0.001)
    weight_decay = trial.suggest_loguniform("weight_decay", 1e-5, 1e-3)
    ema = trial.suggest_categorical("ema", ["true", "false"])
    ema_decay = trial.suggest_loguniform("ema_decay", 0.99,
                                         0.9999) if ema == 'true' else None

    @curry
    def transform(image, label, training):
        image = pad(image, 2)
        image, label = to_tensor(image, label)
        image = normalize(image, [0.1307], [0.3081])

        label = tf.one_hot(label, 10)

        return image, label

    batch_size = 128
    eval_batch_size = 256
    ds_train, ds_test, steps_per_epoch, test_steps = make_mnist_dataset(
        batch_size, eval_batch_size, transform, sub_ratio=0.01)

    model = LeNet5()
    model.build((None, 32, 32, 1))

    criterion = CrossEntropy()

    epochs = 20

    lr_shcedule = CosineLR(base_lr, steps_per_epoch, epochs=epochs, min_lr=0)
    optimizer = SGD(lr_shcedule,
                    momentum=0.9,
                    nesterov=True,
                    weight_decay=weight_decay)

    train_metrics = {
        'loss': Mean(),
        'acc': CategoricalAccuracy(),
    }
    eval_metrics = {
        'loss': CategoricalCrossentropy(from_logits=True),
        'acc': CategoricalAccuracy(),
    }

    learner = SuperLearner(model,
                           criterion,
                           optimizer,
                           train_metrics=train_metrics,
                           eval_metrics=eval_metrics,
                           work_dir=f"./MNIST",
                           multiple_steps=True)

    callbacks = [OptunaReportIntermediateResult('acc', trial)]
    # if ema == 'true':
    #     callbacks.append(EMA(ema_decay))

    learner.fit(ds_train,
                epochs,
                ds_test,
                val_freq=2,
                steps_per_epoch=steps_per_epoch,
                val_steps=test_steps,
                callbacks=callbacks)

    return learner.metric_history.get_metric('acc', "eval")[-1]
Exemplo n.º 4
0
    'bn': {
        'affine': False,
        'track_running_stats': False,
    },
})

set_primitives('tiny')

model = Network(4, 5)
model.build((None, 32, 32, 3))

criterion = CrossEntropy()

base_lr = 0.025
epochs = 240
lr_schedule = CosineLR(base_lr, steps_per_epoch, epochs=epochs, min_lr=1e-3)
optimizer_model = SGD(lr_schedule, momentum=0.9, weight_decay=3e-4)
optimizer_arch = AdamW(learning_rate=3e-4, beta_1=0.5, weight_decay=1e-3)

train_metrics = {
    'loss': Mean(),
    'acc': CategoricalAccuracy(),
}
eval_metrics = {
    'loss': CategoricalCrossentropy(from_logits=True),
    'acc': CategoricalAccuracy(),
}

learner = DARTSLearner(model,
                       criterion,
                       optimizer_arch,
Exemplo n.º 5
0
        x = self.stem(x)
        x = self.normal1(x, hardwts, index)
        x = self.reduce1(x)
        x = self.normal2(x, hardwts, index)
        x = self.reduce2(x)
        x = self.normal3(x, hardwts, index)
        x = self.avg_pool(x)
        x = self.fc(x)
        return x


model = ConvNet()
model.build((None, 32, 32, 1))

epochs = 200
lr_schedule = CosineLR(0.05, steps_per_epoch, epochs, min_lr=0)
optimizer_model = SGD(lr_schedule,
                      momentum=0.9,
                      weight_decay=1e-4,
                      nesterov=True,
                      exclude_from_weight_decay=['alpha_normal'])
optimizer_arch = Adam(1e-3)

train_loss = tf.keras.metrics.Mean()
train_acc = tf.keras.metrics.SparseCategoricalAccuracy()


@tf.function(jit_compile=True)
def train_step(batch):
    x, y = batch
    with tf.GradientTape() as tape:
Exemplo n.º 6
0
def objective(trial: optuna.Trial):

    cutout_prob = trial.suggest_float("cutout_prob", 0, 1.0, step=0.1)
    mixup_alpha = trial.suggest_float("mixup_alpha", 0, 0.5, step=0.1)
    label_smoothing = trial.suggest_uniform("label_smoothing", 0, 0.2)
    base_lr = trial.suggest_float("base_lr", 0.01, 0.2, step=0.01)
    weight_decay = trial.suggest_loguniform("weight_decay", 1e-5, 1e-3)
    ema = trial.suggest_categorical("ema", ["true", "false"])
    ema_decay = trial.suggest_loguniform("ema_decay", 0.995,
                                         0.9999) if ema == 'true' else None

    @curry
    def transform(image, label, training):

        if training:
            image = random_crop(image, (32, 32), (4, 4))
            image = tf.image.random_flip_left_right(image)
            image = autoaugment(image, "CIFAR10")

        image, label = to_tensor(image, label)
        image = normalize(image, [0.491, 0.482, 0.447], [0.247, 0.243, 0.262])

        if training:
            image = random_apply(cutout(length=16), cutout_prob, image)

        label = tf.one_hot(label, 100)

        return image, label

    def zip_transform(data1, data2):
        return mixup(data1, data2, alpha=mixup_alpha)

    batch_size = 128
    eval_batch_size = 2048

    ds_train, ds_test, steps_per_epoch, test_steps = make_cifar100_dataset(
        batch_size, eval_batch_size, transform, zip_transform)

    setup_runtime(fp16=True)
    ds_train, ds_test = distribute_datasets(ds_train, ds_test)

    model = ResNet(depth=16, k=8, num_classes=100)
    model.build((None, 32, 32, 3))
    model.summary()

    criterion = CrossEntropy(label_smoothing=label_smoothing)

    epochs = 50
    lr_schedule = CosineLR(base_lr, steps_per_epoch, epochs=epochs, min_lr=0)
    optimizer = SGD(lr_schedule,
                    momentum=0.9,
                    weight_decay=weight_decay,
                    nesterov=True)
    train_metrics = {
        'loss': Mean(),
        'acc': CategoricalAccuracy(),
    }
    eval_metrics = {
        'loss': CategoricalCrossentropy(from_logits=True),
        'acc': CategoricalAccuracy(),
    }

    learner = SuperLearner(model,
                           criterion,
                           optimizer,
                           train_metrics=train_metrics,
                           eval_metrics=eval_metrics,
                           work_dir=f"./CIFAR100-NNI",
                           multiple_steps=True)

    callbacks = [OptunaReportIntermediateResult('acc', trial)]
    if ema == 'true':
        callbacks.append(EMA(ema_decay))

    learner.fit(ds_train,
                epochs,
                ds_test,
                val_freq=1,
                steps_per_epoch=steps_per_epoch,
                val_steps=test_steps,
                callbacks=callbacks)

    return learner.metric_history.get_metric('acc', "eval")[-1]
Exemplo n.º 7
0
from hanser.train.lr_schedule import FlatCosineLR, CosineLR, CosinePowerAnnealingLR, MultiStepLR, ExponentialDecay, OneCycleLR

steps_per_epoch = 20
epochs = 100
total_steps = steps_per_epoch * epochs

lr_scheduler1 = FlatCosineLR(0.1, steps_per_epoch, epochs, 75, 0.0001, 5, 0.01)
xs1 = [lr_scheduler1(i).numpy() for i in range(total_steps)]

lr_scheduler2 = CosineLR(0.1, steps_per_epoch, epochs, 0.0001, 5, 0.01)
xs2 = [lr_scheduler2(i).numpy() for i in range(total_steps)]

lr_scheduler3 = CosinePowerAnnealingLR(0.1, steps_per_epoch, epochs, 10, 0.0001, 5, 0.01)
xs3 = [lr_scheduler3(i).numpy() for i in range(total_steps)]

lr_scheduler4 = MultiStepLR(0.1, steps_per_epoch, [30, 60, 90], 0.2, 5, 0.01)
xs4 = [lr_scheduler4(i).numpy() for i in range(total_steps)]

lr_scheduler5 = ExponentialDecay(0.1, steps_per_epoch, 2.4, 0.97, False, 5, 0.01)
xs5 = [lr_scheduler5(i).numpy() for i in range(total_steps)]

lr_scheduler6 = OneCycleLR(0.4, steps_per_epoch, epochs, 0.3, div_factor=10, warmup_epoch=5, warmup_min_lr=0.01)
xs6 = [lr_scheduler6(i).numpy() for i in range(total_steps)]

import matplotlib.pyplot as plt
plt.plot(xs6)
Exemplo n.º 8
0
print('Hyper-parameters: %s' % params)

batch_size = 128
eval_batch_size = 256
ds_train, ds_test, steps_per_epoch, test_steps = make_mnist_dataset(
    batch_size, eval_batch_size, transform, sub_ratio=0.1)

model = LeNet5()
model.build((None, 32, 32, 1))

criterion = CrossEntropy()

epochs = 20

base_lr = params["learning_rate"]
lr_shcedule = CosineLR(base_lr, steps_per_epoch, epochs=epochs, min_lr=0)
optimizer = SGD(lr_shcedule,
                momentum=0.9,
                nesterov=True,
                weight_decay=params["weight_decay"])

train_metrics = {
    'loss': Mean(),
    'acc': CategoricalAccuracy(),
}
eval_metrics = {
    'loss': CategoricalCrossentropy(from_logits=True),
    'acc': CategoricalAccuracy(),
}

learner = SuperLearner(model,