コード例 #1
0
def main(exp: mltk.Experiment[Config]):
    # prepare the data
    train_stream, _, test_stream = utils.get_mnist_streams(
        batch_size=exp.config.batch_size,
        test_batch_size=exp.config.test_batch_size,
        flatten=True,
        x_range=(0., 1.),
        use_y=False,
        mapper=utils.BernoulliSampler().as_mapper(),
    )

    tensorkit.utils.misc.print_experiment_summary(exp,
                                                  train_data=train_stream,
                                                  test_data=test_stream)

    # build the network
    vae: VAE = VAE(train_stream.data_shapes[0][0], exp.config)
    params, param_names = tensorkit.utils.misc.get_params_and_names(vae)
    tensorkit.utils.misc.print_parameters_summary(params, param_names)
    print('')
    mltk.print_with_time('Network constructed.')

    # initialize the network with first few batches of train data
    [init_x] = train_stream.get_arrays(max_batch=exp.config.init_batch_count)
    vae.initialize(init_x)
    mltk.print_with_time('Network initialized')

    # define the train and evaluate functions
    def train_step(x):
        chain = vae.get_chain(x)
        loss = chain.vi.training.sgvb(reduction='mean')
        return {'loss': loss}

    def eval_step(x, n_z=exp.config.test_n_z):
        with tk.layers.scoped_eval_mode(vae), T.no_grad():
            chain = vae.get_chain(x, n_z=n_z)
            loss = chain.vi.training.sgvb(reduction='mean')
            nll = -chain.vi.evaluation.is_loglikelihood(reduction='mean')
        return {'elbo': loss, 'nll': nll}

    def plot_samples(epoch=None):
        epoch = epoch or loop.epoch
        with tk.layers.scoped_eval_mode(vae), T.no_grad():
            logits = vae.p(n_z=100)['x'].distribution.logits
            images = T.reshape(
                T.cast(T.clip(T.nn.sigmoid(logits) * 255., 0., 255.),
                       dtype=T.uint8),
                [-1, 28, 28],
            )
        utils.save_images_collection(
            images=T.to_numpy(images),
            filename=exp.abspath(f'plotting/{epoch}.png'),
            grid_size=(10, 10),
        )

    # build the optimizer and the train loop
    loop = mltk.TrainLoop(max_epoch=exp.config.max_epoch)
    loop.add_callback(mltk.callbacks.StopOnNaN())
    optimizer = tk.optim.Adam(tk.layers.iter_parameters(vae))
    lr_scheduler = tk.optim.lr_scheduler.AnnealingLR(
        optimizer=optimizer,
        initial_lr=exp.config.initial_lr,
        ratio=exp.config.lr_anneal_ratio,
        epochs=exp.config.lr_anneal_epochs)
    lr_scheduler.bind(loop)
    loop.run_after_every(
        lambda: loop.test().run(partial(eval_step, n_z=10), test_stream),
        epochs=10)
    loop.run_after_every(plot_samples, epochs=10)

    # train the model
    tk.layers.set_train_mode(vae, True)
    utils.fit_model(loop=loop,
                    optimizer=optimizer,
                    fn=train_step,
                    stream=train_stream)

    # do the final test
    results = mltk.TestLoop().run(eval_step, test_stream)
    plot_samples('final')
コード例 #2
0
ファイル: resnet.py プロジェクト: lizeyan/tensorkit
def main(exp: mltk.Experiment[Config]):
    # prepare the data
    train_stream, _, test_stream = utils.get_mnist_streams(
        batch_size=exp.config.batch_size,
        test_batch_size=exp.config.test_batch_size,
        val_batch_size=exp.config.test_batch_size,
        x_range=(-1., 1.),
    )

    tensorkit.utils.misc.print_experiment_summary(exp,
                                                  train_data=train_stream,
                                                  test_data=test_stream)

    # build the network
    net: T.Module = tk.layers.SequentialBuilder(train_stream.data_shapes[0]). \
        set_args('res_block2d',
                 kernel_size=3,
                 activation=tk.layers.LeakyReLU,
                 normalizer=tk.layers.BatchNorm2d,
                 dropout=0.5,
                 data_init=tk.init.StdDataInit()). \
        res_block2d(16). \
        res_block2d(32, stride=2). \
        res_block2d(32). \
        res_block2d(64, stride=2). \
        res_block2d(64). \
        global_avg_pool2d(). \
        linear(10). \
        log_softmax(). \
        build()
    params, param_names = tensorkit.utils.misc.get_params_and_names(net)
    tensorkit.utils.misc.print_parameters_summary(params, param_names)
    print('')
    mltk.print_with_time('Network constructed.')

    # initialize the network with first few batches of train data
    init_x, _ = train_stream.get_arrays(max_batch=exp.config.init_batch_count)
    init_x = T.as_tensor(init_x)
    _ = net(init_x)  # trigger initialization
    net = tk.layers.jit_compile(net)
    _ = net(init_x)  # trigger JIT
    mltk.print_with_time('Network initialized')

    # the train, test and validate functions
    def train_step(x, y):
        logits = net(x)
        loss = T.nn.cross_entropy_with_logits(logits, y, reduction='mean')
        return {'loss': loss}

    def eval_step(x, y):
        with tk.layers.scoped_eval_mode(net), T.no_grad():
            logits = net(x)
            acc = utils.calculate_acc(logits, y)
        return {'acc': acc}

    # build the optimizer and the train loop
    loop = mltk.TrainLoop(max_epoch=exp.config.max_epoch)
    optimizer = tk.optim.Adam(tk.layers.iter_parameters(net))
    lr_scheduler = tk.optim.lr_scheduler.AnnealingLR(
        optimizer=optimizer,
        initial_lr=exp.config.initial_lr,
        ratio=exp.config.lr_anneal_ratio,
        epochs=exp.config.lr_anneal_epochs)
    lr_scheduler.bind(loop)

    # run test after every 10 epochs
    loop.run_after_every(
        lambda: loop.test().run(eval_step, test_stream),
        epochs=10,
    )

    # train the model
    tk.layers.set_train_mode(net, True)
    utils.fit_model(loop=loop,
                    optimizer=optimizer,
                    fn=train_step,
                    stream=train_stream)

    # do the final test with the best network parameters (according to validation)
    results = mltk.TestLoop().run(eval_step, test_stream)
コード例 #3
0
def main(exp: mltk.Experiment[Config]):
    # prepare the data
    train_stream, val_stream, test_stream = utils.get_mnist_streams(
        batch_size=exp.config.batch_size,
        test_batch_size=exp.config.test_batch_size,
        val_batch_size=exp.config.test_batch_size,
        val_portion=0.2,
        flatten=True,
        x_range=(-1., 1.),
    )

    tensorkit.utils.misc.print_experiment_summary(exp,
                                                  train_data=train_stream,
                                                  val_data=val_stream,
                                                  test_data=test_stream)

    # build the network
    net: T.Module = tk.layers.SequentialBuilder(784). \
        set_args('dense',
                 activation=tk.layers.LeakyReLU,
                 data_init=tk.init.StdDataInit()). \
        dense(500). \
        dense(500). \
        linear(10). \
        log_softmax(). \
        build()
    params, param_names = tensorkit.utils.misc.get_params_and_names(net)
    tensorkit.utils.misc.print_parameters_summary(params, param_names)
    print('')
    mltk.print_with_time('Network constructed.')

    # initialize the network
    init_x, _ = train_stream.get_arrays(max_batch=exp.config.init_batch_count)
    init_x = T.as_tensor(init_x)
    _ = net(init_x)  # trigger initialization
    net = tk.layers.jit_compile(net)
    _ = net(init_x)  # trigger JIT
    mltk.print_with_time('Network initialized')

    # define the train and evaluate functions
    def train_step(x, y):
        logits = net(x)
        loss = T.nn.cross_entropy_with_logits(logits, y, reduction='mean')
        return {'loss': loss}

    def eval_step(x, y):
        with tk.layers.scoped_eval_mode(net), T.no_grad():
            logits = net(x)
            acc = utils.calculate_acc(logits, y)
        return {'acc': acc}

    # build the optimizer and the train loop
    loop = mltk.TrainLoop(max_epoch=exp.config.max_epoch)
    optimizer = tk.optim.Adam(tk.layers.iter_parameters(net))
    lr_scheduler = tk.optim.lr_scheduler.AnnealingLR(
        optimizer=optimizer,
        initial_lr=exp.config.initial_lr,
        ratio=exp.config.lr_anneal_ratio,
        epochs=exp.config.lr_anneal_epochs)
    lr_scheduler.bind(loop)

    # add a callback to do early-stopping on the network parameters
    # according to the validation metric.
    loop.add_callback(
        mltk.callbacks.EarlyStopping(
            checkpoint=tk.train.Checkpoint(net=net),
            root_dir=exp.abspath('./checkpoint/early-stopping'),
            # note for `loop.validation()`, the prefix "val_" will be
            # automatically prepended to any metrics generated by the
            # `evaluate` function.
            metric_name='val_acc',
            smaller_is_better=False,
        ))

    # run validation after every 10 epochs
    if val_stream is not None:
        loop.run_after_every(
            lambda: loop.validation().run(eval_step, val_stream),
            epochs=10,
        )

    # run test after every 10 epochs
    loop.run_after_every(
        lambda: loop.test().run(eval_step, test_stream),
        epochs=10,
    )

    # train the model
    tk.layers.set_train_mode(net, True)
    utils.fit_model(loop=loop,
                    optimizer=optimizer,
                    fn=train_step,
                    stream=train_stream)

    # do the final test with the best network parameters (according to validation)
    results = mltk.TestLoop().run(eval_step, test_stream)