def test_data_dir(self,): prev_data_dir = data_dir() system = platform.system() if system != 'Windows': self.assertEqual(data_dir(), op.join(op.expanduser('~'), '.mxnet')) os.environ['MXNET_HOME'] = '/tmp/mxnet_data' self.assertEqual(data_dir(), '/tmp/mxnet_data') del os.environ['MXNET_HOME'] self.assertEqual(data_dir(), prev_data_dir)
def test_data_dir(self, ): prev_data_dir = data_dir() system = platform.system() if system != 'Windows': self.assertEqual(data_dir(), op.join(op.expanduser('~'), '.mxnet')) os.environ['MXNET_HOME'] = '/tmp/mxnet_data' self.assertEqual(data_dir(), '/tmp/mxnet_data') del os.environ['MXNET_HOME'] self.assertEqual(data_dir(), prev_data_dir)
def test_data_dir(): prev_data_dir = data_dir() system = platform.system() # Test that data_dir() returns the proper default value when MXNET_HOME is not set with environment('MXNET_HOME', None): if system == 'Windows': assert_equal(data_dir(), op.join(os.environ.get('APPDATA'), 'mxnet')) else: assert_equal(data_dir(), op.join(op.expanduser('~'), '.mxnet')) # Test that data_dir() responds to an explicit setting of MXNET_HOME with environment('MXNET_HOME', '/tmp/mxnet_data'): assert_equal(data_dir(), '/tmp/mxnet_data') # Test that this test has not disturbed the MXNET_HOME value existing before the test assert_equal(data_dir(), prev_data_dir)
def constructor(num_layers, pretrained=False, ctx=cpu(), root=os.path.join(base.data_dir(), 'models'), overwrite_init_features=None, overwrite_growth_rate=None, overwrite_downsample=None, overwrite_reduction=None, flex_block_config=None, **kwargs): 64 init_features = default_init_features if overwrite_init_features is None else overwrite_init_features 64 growth_rate = default_growth_rate if overwrite_growth_rate is None else overwrite_growth_rate '22': ([4, 5, 4, 4], [160 / 320, 224 / 480, 256 / 480], DOWNSAMPLE_STRUCT), block_config, reduction_factor, downsample = spec[num_layers] reduction = [1 / x for x in reduction_factor] if num_layers is None: block_config = flex_block_config if overwrite_downsample is not None: downsample = overwrite_downsample # 3 num_transition_blocks = len(block_config) - 1 if overwrite_reduction is not None: reduction = [float(x) for x in overwrite_reduction.split(",")] assert len(reduction) == num_transition_blocks, "need three values for --reduction" net = net_constructor(init_features, growth_rate, block_config, reduction, default_bn_size, downsample, **kwargs) if pretrained: raise ValueError("No pretrained model with automatic downloading exists, yet.") # from ..model_store import get_model_file # net.load_parameters(get_model_file('densenet%d'%(num_layers), root=root), ctx=ctx) return net
def get_squeezenet(version, pretrained=False, ctx=cpu(), root=os.path.join(base.data_dir(), 'models'), **kwargs): r"""SqueezeNet model from the `"SqueezeNet: AlexNet-level accuracy with 50x fewer parameters and <0.5MB model size" <https://arxiv.org/abs/1602.07360>`_ paper. SqueezeNet 1.1 model from the `official SqueezeNet repo <https://github.com/DeepScale/SqueezeNet/tree/master/SqueezeNet_v1.1>`_. SqueezeNet 1.1 has 2.4x less computation and slightly fewer parameters than SqueezeNet 1.0, without sacrificing accuracy. Parameters ---------- version : str Version of squeezenet. Options are '1.0', '1.1'. pretrained : bool, default False Whether to load the pretrained weights for model. ctx : Context, default CPU The context in which to load the pretrained weights. root : str, default $MXNET_HOME/models Location for keeping the model parameters. """ net = SqueezeNet(version, **kwargs) if pretrained: from mxnet.gluon.model_zoo.model_store import get_model_file net.load_parameters(get_model_file('squeezenet%s' % version, root=root), ctx=ctx) return net
def get_resnet_enas(version, num_layers, pretrained=False, ctx=cpu(), root=os.path.join(base.data_dir(), 'models'), **kwargs): r"""ResNet V1 model from `"Deep Residual Learning for Image Recognition" <http://arxiv.org/abs/1512.03385>`_ paper. ResNet V2 model from `"Identity Mappings in Deep Residual Networks" <https://arxiv.org/abs/1603.05027>`_ paper. Parameters ---------- version : int Version of ResNet. Options are 1, 2. num_layers : int Numbers of layers. Options are 18, 34, 50, 101, 152. pretrained : bool, default False Whether to load the pretrained weights for model. ctx : Context, default CPU The context in which to load the pretrained weights. root : str, default $MXNET_HOME/models Location for keeping the model parameters. """ assert num_layers in resnet_spec, \ "Invalid number of layers: %d. Options are %s"%( num_layers, str(resnet_spec.keys())) block_type, layers, channels = resnet_spec[num_layers] assert version >= 1 and version <= 2, \ "Invalid resnet version: %d. Options are 1 and 2."%version resnet_class = resnet_net_versions[version-1] block_class = resnet_block_versions[version-1][block_type] net = resnet_class(block_class, layers, channels, **kwargs) if pretrained: raise ValueError("No pretrained model exists, yet.") # from ..model_store import get_model_file # net.load_parameters(get_model_file('resnet%d_v%d'%(num_layers, version), # root=root), ctx=ctx) return net
def get_vgg(num_layers, pretrained=False, ctx=cpu(), root=os.path.join(base.data_dir(), 'models'), **kwargs): r"""VGG model from the `"Very Deep Convolutional Networks for Large-Scale Image Recognition" <https://arxiv.org/abs/1409.1556>`_ paper. Parameters ---------- num_layers : int Number of layers for the variant of densenet. Options are 11, 13, 16, 19. pretrained : bool, default False Whether to load the pretrained weights for model. ctx : Context, default CPU The context in which to load the pretrained weights. root : str, default $MXNET_HOME/models Location for keeping the model parameters. """ layers, filters = vgg_spec[num_layers] net = VGG(layers, filters, **kwargs) if pretrained: raise ValueError("No pretrained model exists, yet.") # from ..model_store import get_model_file # batch_norm_suffix = '_bn' if kwargs.get('batch_norm') else '' # net.load_parameters(get_model_file('vgg%d%s'%(num_layers, batch_norm_suffix), # root=root), ctx=ctx) return net
def get_binet(num_layers, pretrained=False, ctx=cpu(), root=os.path.join(base.data_dir(), 'models'), **kwargs): r""" From `"Bi-Real Net: Enhancing the Performance of 1-bit CNNs" <https://arxiv.org/abs/1808.00278>`_ paper. Parameters ---------- version : int Version of ResNet. Options are 1, 2. num_layers : int Numbers of layers. Options are 18, 34, 50, 101, 152. pretrained : bool, default False Whether to load the pretrained weights for model. ctx : Context, default CPU The context in which to load the pretrained weights. root : str, default $MXNET_HOME/models Location for keeping the model parameters. """ assert num_layers in resnet_spec, \ "Invalid number of layers: %d. Options are %s" % ( num_layers, str(resnet_spec.keys())) block_type, layers, channels = resnet_spec[num_layers] block_class = resnet_block_versions[0][block_type] net = Binet(block_class, layers, channels, **kwargs) if pretrained: raise ValueError("No pretrained model exists, yet.") # from ..model_store import get_model_file # net.load_parameters(get_model_file('resnet%d_v%d'%(num_layers, version), # root=root), ctx=ctx) return net
def __init__(self, logger, root=os.path.join(base.data_dir(), 'datasets', 'cifar100'), fine_label=False, train=True, transform=None, c_way=5, k_shot=5, fix_class=None, base_class=0): self.name = 'NC_CIFAR100' self._train = train self._archive_file = ('cifar-100-binary.tar.gz', 'a0bb982c76b83111308126cc779a992fa506b90b') self._train_data = [('train.bin', 'e207cd2e05b73b1393c74c7f5e7bea451d63e08e')] self._test_data = [('test.bin', '8fb6623e830365ff53cf14adec797474f5478006')] self._fine_label = fine_label self._namespace = 'cifar100' self._c_way = c_way self._k_shot = k_shot self._fix_class = fix_class self._base_class = base_class self._logger = logger super(NC_CIFAR100, self).__init__(root, transform) # pylint: disable=bad-super-call
def get_mobilenet_v2(multiplier, pretrained=False, ctx=cpu(), root=os.path.join(base.data_dir(), 'models'), **kwargs): r"""MobileNetV2 model from the `"Inverted Residuals and Linear Bottlenecks: Mobile Networks for Classification, Detection and Segmentation" <https://arxiv.org/abs/1801.04381>`_ paper. Parameters ---------- multiplier : float The width multiplier for controling the model size. Only multipliers that are no less than 0.25 are supported. The actual number of channels is equal to the original channel size multiplied by this multiplier. pretrained : bool, default False Whether to load the pretrained weights for model. ctx : Context, default CPU The context in which to load the pretrained weights. root : str, default $MXNET_HOME/models Location for keeping the model parameters. """ net = MobileNetV2(multiplier, **kwargs) if pretrained: from mxnet.gluon.model_zoo.model_store import get_model_file version_suffix = '{0:.2f}'.format(multiplier) if version_suffix in ('1.00', '0.50'): version_suffix = version_suffix[:-1] net.load_parameters( get_model_file('mobilenetv2_%s' % version_suffix, root=root), ctx=ctx) return net
def get_mobilenet_v2(multiplier, pretrained=False, ctx=cpu(), root=os.path.join(base.data_dir(), 'models'), **kwargs): net = MobileNetV2(multiplier, **kwargs) return net
def get_mobilenet(multiplier, pretrained=False, ctx=cpu(), root=os.path.join(base.data_dir(), 'models'), **kwargs): r"""MobileNet model from the `"MobileNets: Efficient Convolutional Neural Networks for Mobile Vision Applications" <https://arxiv.org/abs/1704.04861>`_ paper. Parameters ---------- multiplier : float The width multiplier for controling the model size. Only multipliers that are no less than 0.25 are supported. The actual number of channels is equal to the original channel size multiplied by this multiplier. pretrained : bool, default False Whether to load the pretrained weights for model. ctx : Context, default CPU The context in which to load the pretrained weights. root : str, default $MXNET_HOME/models Location for keeping the model parameters. """ net = MobileNet(multiplier, **kwargs) if pretrained: raise ValueError("No pretrained model exists, yet.") # from ..model_store import get_model_file # version_suffix = '{0:.2f}'.format(multiplier) # if version_suffix in ('1.00', '0.50'): # version_suffix = version_suffix[:-1] # net.load_parameters( # get_model_file('mobilenet%s' % version_suffix, root=root), ctx=ctx) return net
def __init__(self, root=os.path.join(base.data_dir(), 'datasets', 'core50'), split_id=0, train=True, transform=None): self.name = 'CORE50Split' self._train = train self._split_id = split_id self.globals = globals() super(CORE50Split, self).__init__(root, transform)
def __init__(self, root=os.path.join(base.data_dir(), 'datasets', 'cifar10'), train=True, transform=None): self._train = train self._archive_file = ('cifar-10-binary.tar.gz', 'fab780a1e191a7eda0f345501ccd62d20f7ed891') self._train_data = [('data_batch_1.bin', 'aadd24acce27caa71bf4b10992e9e7b2d74c2540'), ('data_batch_2.bin', 'c0ba65cce70568cd57b4e03e9ac8d2a5367c1795'), ('data_batch_3.bin', '1dd00a74ab1d17a6e7d73e185b69dbf31242f295'), ('data_batch_4.bin', 'aab85764eb3584312d3c7f65fd2fd016e36a258e'), ('data_batch_5.bin', '26e2849e66a845b7f1e4614ae70f4889ae604628')] self._test_data = [('test_batch.bin', '67eb016db431130d61cd03c7ad570b013799c88c')] self._namespace = 'cifar10' super(CIFAR10, self).__init__(root, transform)
def __init__(self, dataset, sample_inds, root=os.path.join(base.data_dir(), 'datasets', 'sample'), transform=None): super(SampleDataset, self).__init__(root=root, transform=transform) self.name = dataset.name self._dataset = dataset self._sample_inds = sample_inds self._data = self._dataset._data[self._sample_inds] self._label = self._dataset._label[self._sample_inds]
def get_densenet(num_layers, pretrained=False, ctx=cpu(), bits=None, bits_a=None, opt_init_features=None, opt_growth_rate=None, opt_reduction=None, opt_block_config=None, root=os.path.join(base.data_dir(), 'models'), **kwargs): r"""Densenet-BC model from the `"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`_ paper. Parameters ---------- num_layers : int Number of layers for the variant of densenet. Options are 121, 161, 169, 201. pretrained : bool, default False Whether to load the pretrained weights for model. ctx : Context, default CPU The context in which to load the pretrained weights. root : str, default $MXNET_HOME/models Location for keeping the model parameters. """ init_features, growth_rate, bn_size, reduction, block_config = densenet_spec[ num_layers] if num_layers == -1: block_config = opt_block_config num_transition_blocks = len(block_config) - 1 if opt_init_features is not None: init_features = opt_init_features if opt_growth_rate is not None: growth_rate = opt_growth_rate if opt_reduction is not None: split = [float(x) for x in opt_reduction.split(",")] if len(split) == 1: split *= num_transition_blocks reduction = split assert len( reduction ) == num_transition_blocks, "need one or three values for --reduction" else: reduction = [reduction] * num_transition_blocks net = DenseNet(init_features, growth_rate, block_config, reduction, bn_size, **kwargs) if pretrained: raise ValueError("No pretrained model exists, yet.") # from ..model_store import get_model_file # net.load_parameters(get_model_file('densenet%d'%(num_layers), root=root), ctx=ctx) return net
def __init__(self, d0, d1, root=os.path.join(base.data_dir(), 'datasets', 'merge'), transform=None): super(MergeDataset, self).__init__(root=root, transform=transform) if d1 is None and d0 is not None: self._data, self._label = d0._data, d0._label elif d0 is None and d1 is not None: self._data, self._label = d1._data, d1._label elif d0 is not None and d1 is not None: self._data = nd.concat(d0._data, d1._data, dim=0) self._label = np.concatenate([d0._label, d1._label]) else: self._data, self._label = None, None
def alexnet(pretrained=False, ctx=cpu(), root=os.path.join(base.data_dir(), 'models'), **kwargs): r"""AlexNet model from the `"One weird trick..." <https://arxiv.org/abs/1404.5997>`_ paper. Parameters ---------- pretrained : bool, default False Whether to load the pretrained weights for model. ctx : Context, default CPU The context in which to load the pretrained weights. root : str, default $MXNET_HOME/models Location for keeping the model parameters. """ net = AlexNet(**kwargs) if pretrained: from mxnet.gluon.model_zoo.model_store import get_model_file net.load_parameters(get_model_file('alexnet', root=root), ctx=ctx) return net
def eco_full(pretrained=False, ctx=gpu(), root=os.path.join(base.data_dir(), '/path/to/json'), **kwargs): r"""Build ECO_Full network Parameters ---------- pretrained : bool, default False ctx : Context, default GPU The context in which to load the pretrained weights. root : str, default $MXNET_HOME/models Location for keeping the model parameters. """ net = Eco(**kwargs) if pretrained: from mxnet.gluon.model_zoo.model_store import get_model_file net.load_parameters(get_model_file('eco_full_kinetics', root=root), ctx=ctx) return net
def __init__(self, root=os.path.join(base.data_dir(), 'datasets', 'cifar100'), split_id=0, train=True, transform=None): self.name = 'CIFAR100Split' self._train = train self._split_id = split_id self._archive_file = ('cifar-100-binary.tar.gz', 'a0bb982c76b83111308126cc779a992fa506b90b') self._train_data = [('train.bin', 'e207cd2e05b73b1393c74c7f5e7bea451d63e08e')] self._test_data = [('test.bin', '8fb6623e830365ff53cf14adec797474f5478006')] self._fine_label = True self._namespace = 'cifar100' self._brightness_jitter = [0, -0.1, 0.1, -0.2, 0.2] self._saturation_jitter = [0, -0.1, 0.1, -0.2, 0.2] super(CIFAR100Split, self).__init__(root, transform) # pylint: disable=bad-super-call
def get_densenet(num_layers, pretrained=False, ctx=cpu(), root=os.path.join(base.data_dir(), 'models'), **kwargs): r"""Densenet-BC model from the `"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`_ paper. Parameters ---------- num_layers : int Number of layers for the variant of densenet. Options are 121, 161, 169, 201. pretrained : bool, default False Whether to load the pretrained weights for model. ctx : Context, default CPU The context in which to load the pretrained weights. root : str, default $MXNET_HOME/models Location for keeping the model parameters. """ num_init_features, growth_rate, block_config = densenet_spec[num_layers] net = DenseNet(num_init_features, growth_rate, block_config, **kwargs) if pretrained: from mxnet.gluon.model_zoo.model_store import get_model_file net.load_parameters(get_model_file('densenet%d'%(num_layers), root=root), ctx=ctx) return net
def inception_v3(pretrained=False, ctx=cpu(), root=os.path.join(base.data_dir(), 'models'), **kwargs): r"""Inception v3 model from `"Rethinking the Inception Architecture for Computer Vision" <http://arxiv.org/abs/1512.00567>`_ paper. Parameters ---------- pretrained : bool, default False Whether to load the pretrained weights for model. ctx : Context, default CPU The context in which to load the pretrained weights. root : str, default $MXNET_HOME/models Location for keeping the model parameters. """ net = Inception3(**kwargs) if pretrained: from mxnet.gluon.model_zoo.model_store import get_model_file net.load_parameters(get_model_file('inceptionv3', root=root), ctx=ctx) return net
def __init__(self, root=os.path.join(data_dir(), 'datasets', 'cifar10')): self._test_data = [('test_batch.bin', '67eb016db431130d61cd03c7ad570b013799c88c')] self._namespace = 'cifar10' super(CIFAR10_test, self).__init__(root, None)