コード例 #1
0
def test_auto_batch_epoch():
    check = {
        "1": (1, 200),
        "10": (10, 200),
        "100": (16, 160),
        "1000": (32, 64),
        "10000": (64, 25),
        "100000": (128, 20),
        "1000000": (256, 20),
        "10000000": (512, 20),
    }

    observe = {}
    # for n_data in [10, int(1e3), int(1e6)]:
    for n_data, (batch_size, epochs) in check.items():
        n_data = int(n_data)
        c = configure.Train(
            learning_rate=None,
            epochs=None,
            batch_size=None,
            loss_func="mse",
            ar_sparsity=None,
            optimizer="SGD",
        )
        c.set_auto_batch_epoch(n_data=n_data)
        observe["{}".format(n_data)] = (c.batch_size, c.epochs)
        log.debug("[config] n_data: {}, batch: {}, epoch: {}".format(
            n_data, c.batch_size, c.epochs))
        log.debug("[should] n_data: {}, batch: {}, epoch: {}".format(
            n_data, batch_size, epochs))
        assert c.batch_size == batch_size
        assert c.epochs == epochs
コード例 #2
0
 def test_auto_batch_epoch(self):
     check = {
         "1": (1, 500),
         "10": (10, 500),
         "100": (16, 320),
         "1000": (32, 181),
         "10000": (64, 102),
         "100000": (128, 57),
         "1000000": (256, 50),
         "10000000": (256, 50),
     }
     for n_data in [10, int(1e3), int(1e6)]:
         c = configure.Train(
             learning_rate=None,
             epochs=None,
             batch_size=None,
             loss_func="mse",
             ar_sparsity=None,
             train_speed=0,
             optimizer="SGD",
         )
         c.set_auto_batch_epoch(n_data)
         log.debug("n_data: {}, batch: {}, epoch: {}".format(n_data, c.batch_size, c.epochs))
         batch, epoch = check["{}".format(n_data)]
         assert c.batch_size == batch
         assert c.epochs == epoch
コード例 #3
0
 def test_auto_batch_epoch(self):
     check = {
         "1": (1, 1000),
         "10": (2, 1000),
         "100": (8, 320),
         "1000": (32, 64),
         "10000": (128, 12),
         "100000": (128, 5),
     }
     for n_data in [1, 10, int(1e2), int(1e3), int(1e4), int(1e5)]:
         c = configure.Train(learning_rate=None,
                             epochs=None,
                             batch_size=None,
                             loss_func="mse",
                             ar_sparsity=None,
                             train_speed=0)
         c.set_auto_batch_epoch(n_data)
         log.debug("n_data: {}, batch: {}, epoch: {}".format(
             n_data, c.batch_size, c.epochs))
         batch, epoch = check["{}".format(n_data)]
         assert c.batch_size == batch
         assert c.epochs == epoch
コード例 #4
0
 def test_auto_batch_epoch(self):
     n2b = lambda x: int(400 / (1 + np.log(x / 100)))
     check = {
         "3": (3, 400),
         "10": (10, 400),
         "30": (16, 400),
         "100": (16, 400),
         "300": (16, 190),
         "1000": (32, 121),
         "10000": (64, 71),
         "100000": (128, 50),
         "1000000": (256, 40),
         "10000000": (256, 40),
     }
     for n_data in [
             3, 10, 30,
             int(1e2),
             int(1e3),
             int(1e4),
             int(1e5),
             int(1e6),
             int(1e7)
     ]:
         c = configure.Train(
             learning_rate=None,
             epochs=None,
             batch_size=None,
             loss_func="mse",
             ar_sparsity=None,
             train_speed=0,
             optimizer="SGD",
         )
         c.set_auto_batch_epoch(n_data)
         log.debug("n_data: {}, batch: {}, epoch: {}".format(
             n_data, c.batch_size, c.epochs))
         batch, epoch = check["{}".format(n_data)]
         assert c.batch_size == batch
         assert c.epochs == epoch