def setUp(self): out = run_train( self, **{ '--generated-data': '', '--dataset': 'ImageNet', '--model-size': 50, '--micro-batch-size': 8, '--available-memory-proportion': 0.1, '--iterations': 10, '--BN-span': 2, '--internal-exchange-optimisation-target': 'memory', '--pipeline': '', '--gradient-accumulation-count': 2, '--pipeline-schedule': 'Sequential', '--enable-recomputation': '', '--pipeline-splits': 'b1/0/relu', '--eight-bit': '', '--replicas': 2, '--enable-half-partials': '', '--disable-variable-offloading': '', '--batch-norm': '', '--normalise-input': '' }) self.validation = get_csv(out, 'validation.csv') self.training = get_csv(out, 'training.csv')
def setUpClass(cls): out = run_train(**{'--data-dir': cifar10_data_dir, '--epochs': 10, '--warmup-epochs': 0, '--learning-rate-decay': '0.1', '--learning-rate-schedule': '0.5,0.75,0.875'}) cls.validation = get_csv(out, 'validation.csv') cls.training = get_csv(out, 'training.csv')
def setUpClass(cls): out = run_train(**{'--generated-data': '', '--dataset': 'ImageNet', '--model-size': 50, '--batch-size': 1, '--available-memory-proportion': 0.1, '--iterations': 10, '--batches-per-step': 10}) cls.validation = get_csv(out, 'validation.csv') cls.training = get_csv(out, 'training.csv')
def setUpClass(cls): out = run_train(**{'--data-dir': cifar10_data_dir, '--model': 'resnet', '--lr-schedule': 'stepped', '--learning-rate-decay': 0.5, '--learning-rate-schedule': '0.5,0.9', '--epochs': 20, '--replicas': 2}) cls.validation = get_csv(out, 'validation.csv') cls.training = get_csv(out, 'training.csv')
def setUpClass(cls): out = run_train(**{'--data-dir': cifar10_data_dir, '--model': "squeezenet", '--epochs': 10, '--use-bypass': '', '--poly-lr-initial-lr': 0.1, '--poly-lr-end-lr': 0.0001, '--lr-schedule': "polynomial_decay_lr"}) cls.validation = get_csv(out, 'validation.csv') cls.training = get_csv(out, 'training.csv')
def setUpClass(cls): out = run_train(**{'--data-dir': cifar10_data_dir, '--epochs': 50, '--batch-size': 48, '--warmup-epochs': 2, '--lr-schedule': 'cosine', '--label-smoothing': '0.05', '--base-learning-rate': -5, '--precision': '16.32'}) cls.validation = get_csv(out, 'validation.csv') cls.training = get_csv(out, 'training.csv')
def setUpClass(cls): out = run_train(**{'--data-dir': cifar10_data_dir, '--name-suffix': 'penguin', '--log-dir': 'logs/walrus', '--iterations': 10, '--batches-per-step': 10}) cls.logdir = None for line in out.split('\n'): if line.find('Saving to ') != -1: cls.logdir = line[11:] break cls.validation = get_csv(out, 'validation.csv') cls.training = get_csv(out, 'training.csv')
def setUp(self): out = run_train( self, **{ '--data-dir': cifar10_data_dir, '--epochs': 50, '--micro-batch-size': 48, '--warmup-epochs': 2, '--lr-schedule': 'cosine', '--label-smoothing': '0.05', '--base-learning-rate-exponent': -5, '--precision': '16.32' }) self.validation = get_csv(out, 'validation.csv') self.training = get_csv(out, 'training.csv')
def setUp(self): out = run_train( self, **{ '--data-dir': cifar10_data_dir, '--epochs': 10, '--model': "resnext", '--model-size': 29, '--micro-batch-size': 8, '--warmup-epochs': 0, '--learning-rate-decay': '0.1', '--learning-rate-schedule': '0.5,0.75,0.875' }) self.validation = get_csv(out, 'validation.csv') self.training = get_csv(out, 'training.csv')
def setUp(self): out = run_train( self, **{ '--data-dir': cifar10_data_dir, '--model': 'resnext', '--lr-schedule': 'stepped', '--model-size': 29, '--micro-batch-size': 4, '--learning-rate-decay': 0.5, '--learning-rate-schedule': '0.5,0.9', '--epochs': 20, '--replicas': 2 }) self.validation = get_csv(out, 'validation.csv') self.training = get_csv(out, 'training.csv')
def setUpClass(cls): out = run_train( **{ '--data-dir': cifar10_data_dir, '--epochs': 400, '--model': "squeezenet", '--use-bypass': '', '--lr-schedule': 'polynomial_decay_lr', '--label-smoothing': '0.05', '--abs-learning-rate': 0.1, '--abs-end-learning-rate': 0.0001, '--warmup-epochs': 0, '--precision': '16.32' }) cls.validation = get_csv(out, 'validation.csv') cls.training = get_csv(out, 'training.csv')
def setUp(self): out = run_train( self, **{ '--config': 'mk2_resnet8_test', '--data-dir': cifar10_data_dir }) self.training = get_csv(out, 'training.csv')
def setUpClass(cls): out = run_train(**{'--dataset': 'cifar-10', '--epochs': 10, '--model-size': 14, '--batch-norm': '', '--pipeline-num-parallel': 8, '--generated-data': '', '--batch-size': 16, '--base-learning-rate': -4, '--precision': '32.32', '--seed': 1234, '--warmup-epochs': 0, '--no-stochastic-rounding': '', '--batches-per-step': 100 }) cls.validation = get_csv(out, 'validation.csv') cls.training = get_csv(out, 'training.csv')
def setUpClass(cls): out = run_train(**{'--config': 'resnet50_1_ipu_training', '--model-size': 8, '--epochs': 10, '--data-dir': cifar10_data_dir, '--dataset': 'cifar-10', '--gradient-accumulation-count': 1 }) cls.training = get_csv(out, 'training.csv')
def setUp(self): out = run_train( self, **{ '--dataset': 'cifar-10', '--model': 'resnext', '--epochs': 2, '--model-size': 29, '--micro-batch-size': 4, '--batch-norm': '', '--pipeline-num-parallel': 8, '--generated-data': '', '--base-learning-rate-exponent': -4, '--precision': '32.32', '--seed': 1234, '--warmup-epochs': 0, '--no-stochastic-rounding': '', '--batches-per-step': 100 }) self.validation = get_csv(out, 'validation.csv') self.training = get_csv(out, 'training.csv')
def setUpClass(cls): out = run_train(**{'--iterations': 100, '--batches-per-step': 10, '--dataset': 'cifar-10', '--synthetic-data': '', '--model': 'EfficientNet', '--model-size': 'cifar', '--batch-size': 10, '--no-validation': '', '--xla-recompute': '', '--group-dim': 16, '--expand-ratio': 4}) cls.training = get_csv(out, 'training.csv')
def setUpClass(cls): out = run_train( **{ '--synthetic-data': '', '--dataset': 'ImageNet', '--model': 'resnext', '--model-size': 14, '--shards': 2, '--batch-size': 2, '--iterations': '5000', '--no-validation': '' }) cls.training = get_csv(out, 'training.csv')
def setUpClass(cls): out = run_train(**{'--synthetic-data': '', '--dataset': 'ImageNet', '--model': 'efficientnet', '--shards': 2, '--pipeline-depth': 256, '--batch-size': 2, '--no-validation': '', '--xla-recompute': '', '--available-memory-proportion': 0.2, '--iterations': 10, '--pipeline-splits': 'block3b'}) cls.out = out cls.training = get_csv(out, 'training.csv')
def setUpClass(cls): out = run_train(**{'--iterations': 10, '--batches-per-step': 10, '--dataset': 'imagenet', '--generated-data': '', '--model-size': 50, '--shards': 2, '--pipeline': '', '--gradient-accumulation-count': 256, '--batch-size': 2, '--no-validation': '', '--xla-recompute': '', '--available-memory-proportion': 0.1, '--pipeline-splits': 'b3/1/relu'}) cls.training = get_csv(out, 'training.csv')
def setUpClass(cls): out = run_train( **{ '--iterations': 1000, '--dataset': 'imagenet', '--model': 'resnext', '--model-size': 14, '--synthetic-data': '', '--shards': 2, '--pipeline-depth': 128, '--batch-size': 1, '--no-validation': '', '--pipeline-splits': 'b2/0/relu' }) cls.training = get_csv(out, 'training.csv') print(cls.training)
def setUp(self): out = run_train( self, **{ '--iterations': 100, '--batches-per-step': 10, '--dataset': 'cifar-10', '--generated-data': '', '--model': 'EfficientNet', '--model-size': 'cifar', '--micro-batch-size': 10, '--no-validation': '', '--enable-recomputation': '', '--group-dim': 16, '--expand-ratio': 4 }) self.training = get_csv(out, 'training.csv')
def setUpClass(cls): out = run_train(**{'--iterations': 10, '--batches-per-step': 10, '--dataset': 'imagenet', '--synthetic-data': '', '--model': 'EfficientNet', '--model-size': 'B0', '--shards': 2, '--pipeline-depth': 128, '--batch-size': 4, '--no-validation': '', '--xla-recompute': '', '--pipeline-schedule': 'Grouped', '--group-dim': 16, '--expand-ratio': 4, '--pipeline-splits': 'block3b'}) cls.training = get_csv(out, 'training.csv')
def setUpClass(cls): out = run_train(**{'--generated-data': '', '--dataset': 'ImageNet', '--model': 'efficientnet', '--model-size': 'B1', '--shards': 4, '--pipeline': '', '--gradient-accumulation-count': 128, '--batch-size': 4, '--no-validation': '', '--xla-recompute': '', '--available-memory-proportion': 0.2, '--pipeline-schedule': 'Grouped', '--iterations': 10, '--pipeline-splits': 'block2a/c', 'block4a': 'block5c'}) cls.out = out cls.training = get_csv(out, 'training.csv')
def setUpClass(cls): out = run_train( **{ '--iterations': 500, '--dataset': 'imagenet', '--model': 'resnext', '--model-size': 14, '--generated-data': '', '--shards': 2, '--pipeline': '', '--gradient-accumulation-count': 128, '--batch-size': 1, '--no-validation': '', '--pipeline-splits': 'b2/0/relu' }) cls.out = out cls.training = get_csv(out, 'training.csv')
def setUp(self): out = run_train( self, **{ '--iterations': 500, '--dataset': 'imagenet', '--model': 'resnext', '--model-size': 14, '--generated-data': '', '--shards': 2, '--pipeline': '', '--gradient-accumulation-count': 128, '--micro-batch-size': 1, '--no-validation': '', '--pipeline-splits': 'b2/0/relu', '--fused-preprocessing': '' }) self.out = out self.training = get_csv(out, 'training.csv')
def setUp(self): out = run_train( self, **{ '--generated-data': '', '--dataset': 'ImageNet', '--model': 'efficientnet', '--shards': 2, '--pipeline': '', '--gradient-accumulation-count': 256, '--micro-batch-size': 2, '--no-validation': '', '--enable-recomputation': '', '--available-memory-proportion': 0.2, '--iterations': 10, '--pipeline-splits': 'block3b', '--fused-preprocessing': '' }) self.out = out self.training = get_csv(out, 'training.csv')
def setUpClass(cls): out = run_train( **{ '--iterations': 10, '--batches-per-step': 10, '--dataset': 'imagenet', '--synthetic-data': '', '--model-size': 50, '--shards': 2, '--replicas': 2, '--pipeline-depth': 128, '--pipeline-schedule': 'Grouped', '--batch-size': 2, '--no-validation': '', '--xla-recompute': '', '--available-memory-proportion': 0.1, '--pipeline-splits': 'b3/0/relu' }) cls.training = get_csv(out, 'training.csv')
def setUp(self): out = run_train( self, **{ '--iterations': 10, '--batches-per-step': 10, '--dataset': 'imagenet', '--generated-data': '', '--model-size': 50, '--shards': 2, '--replicas': 2, '--pipeline': '', '--gradient-accumulation-count': 128, '--pipeline-schedule': 'Grouped', '--micro-batch-size': 2, '--no-validation': '', '--enable-recomputation': '', '--available-memory-proportion': 0.1, '--pipeline-splits': 'b3/0/relu' }) self.training = get_csv(out, 'training.csv')
def setUp(self): out = run_train( self, **{ '--iterations': 10, '--batches-per-step': 10, '--dataset': 'imagenet', '--generated-data': '', '--model': 'EfficientNet', '--model-size': 'B0', '--shards': 2, '--pipeline': '', '--gradient-accumulation-count': 128, '--micro-batch-size': 4, '--no-validation': '', '--enable-recomputation': '', '--pipeline-schedule': 'Grouped', '--group-dim': 16, '--expand-ratio': 4, '--pipeline-splits': 'block3b' }) self.training = get_csv(out, 'training.csv')
def setUpClass(cls): out = run_train(**{'--iterations': 10, '--batches-per-step': 10, '--dataset': 'imagenet', '--synthetic-data': '', '--model': 'EfficientNet', '--model-size': 'B0', '--shards': 2, '--replicas': 2, '--pipeline': '', '--gradient-accumulation-count': 128, '--pipeline-schedule': 'Grouped', '--batch-size': 2, '--no-validation': '', '--xla-recompute': '', '--group-dim': 16, '--expand-ratio': 4, '--use-relu': '', '--available-memory-proportion': 0.2, '--pipeline-splits': 'block3b'}) cls.training = get_csv(out, 'training.csv')