Пример #1
0
def main():
    """
  Testing the convolutional example on the mnist dataset.
  """

    dataset = RNNMNIST(BATCH_SIZE)
    print(dataset.get_train().y.shape)

    in_shape = (None, N_STEPS, N_INPUT)

    inputs = Value(type=tf.float32, shape=in_shape, cls=None)
    targets = Value(type=tf.int32, shape=(None), cls=10)

    fc_hidden = [500, 150]
    rnn_config = RNNHidden(rnn_weights=RNN_HIDDEN,
                           depth=1,
                           fc_weights=fc_hidden)
    config = Config(inputs, targets, rnn_config, LEARNING_RATE)

    network = ConvNetworkBuilder(config)
    hidden = SimpleRNNBuilder()
    _ = network.build_network(hidden)

    train_config = TrainerConfig(epochs=EPOCHS,
                                 display_after=DISPLAY_STEP,
                                 keep_prob=KEEP_PROB,
                                 checkpoint_path=None,
                                 summary_path=None)

    trainer = Trainer(network, train_config)
    trainer.train(dataset)
Пример #2
0
def main():
    """
  Testing the convolutional example on the mnist dataset.
  """

    dataset = ConvMNIST(64)
    print(dataset.get_train().x.shape)

    inputs = Value(type=tf.float32, shape=(None, 28, 28, 1), cls=None)
    targets = Value(type=tf.int64, shape=(None), cls=10)
    learning_rate = 0.0001

    fc_hidden = [1024, 500]
    c_h = [(3, 3, 1, 32), (3, 3, 32, 64)]
    conv_hidden = ConvHidden(conv_weights=c_h, fc_weights=fc_hidden)

    config = Config(inputs, targets, conv_hidden, learning_rate)

    network = ConvNetworkBuilder(config)
    hidden = FFConvHiddenBuilder()
    _ = network.build_network(hidden)

    train_config = TrainerConfig(epochs=EPOCHS,
                                 display_after=DISPLAY_STEP,
                                 keep_prob=KEEP_PROB,
                                 checkpoint_path=None,
                                 summary_path=None)

    trainer = Trainer(network, train_config)
    trainer.train(dataset)
Пример #3
0
def main():
    opt = TrainOptions().parse()

    # create dataloaders for each phase
    dataloaders = create_dataloader(opt)

    print("type of subset: ", type(dataloaders[0]))

    # Create model
    model = create_model(opt)
    model.setup(
        opt)  # regular setup: load and print networks; create schedulers
    visualizer = Visualizer(
        opt)  # create a visualizer that display/save images and plots

    # initialize trainer
    trainer = Trainer(dataloaders, model, visualizer, opt)
    trainer.train()
Пример #4
0
def main():
    args = get_args()
    m_config = process_config(args.config)

    config = tf.ConfigProto(log_device_placement=False)
    config.gpu_options.allow_growth = True

    with tf.Session(config=config) as sess:
        # create_dirs([config.summary_dir, config.checkpoint_dir])
        data_loader = SiameseDataLoader(config=m_config)
        model = ConvNet(data_loader=data_loader, config=m_config)
        logger = Logger(sess=sess, config=m_config)

        trainer = Trainer(sess=sess,
                          model=model,
                          config=m_config,
                          logger=logger,
                          data_loader=data_loader)

        trainer.train()
Пример #5
0
def main():
    """
    Testing the feedforward framework on the mnist dataset.
  """
    dataset = MNIST(BATCH_SIZE)

    inputs = Value(type=tf.float32, shape=(None, 784), cls=None)
    targets = Value(type=tf.int64, shape=(None), cls=10)
    fc_hidden = FCHidden(weights=[300, 150])

    config = Config(inputs, targets, fc_hidden, LEARNING_RATE)

    network_builder = FFNetworkBuilder(config)
    hidden_builder = FFHiddenBuilder()
    _ = network_builder.build_network(hidden_builder)

    train_config = TrainerConfig(epochs=EPOCHS,
                                 display_after=DISPLAY_STEP,
                                 keep_prob=KEEP_PROB,
                                 checkpoint_path=None,
                                 summary_path=None)
    trainer = Trainer(network_builder, train_config)
    trainer.train(dataset)
Пример #6
0
                          smoothing=cfg['model']['smoothing'])

testset = PixWiseDataset(root_dir=cfg['dataset']['root'],
                         csv_file=cfg['dataset']['test_set'],
                         map_size=cfg['model']['map_size'],
                         transform=test_transform,
                         smoothing=cfg['model']['smoothing'])

trainloader = torch.utils.data.DataLoader(
    dataset=trainset,
    batch_size=cfg['train']['batch_size'],
    shuffle=True,
    num_workers=0)

testloader = torch.utils.data.DataLoader(dataset=testset,
                                         batch_size=cfg['test']['batch_size'],
                                         shuffle=True,
                                         num_workers=0)

trainer = Trainer(cfg=cfg,
                  network=network,
                  optimizer=optimizer,
                  loss=loss,
                  lr_scheduler=None,
                  device=device,
                  trainloader=trainloader,
                  testloader=testloader,
                  writer=writer)

trainer.train()
writer.close()