Example #1
0
def register_slot_builder(builder,
                          _reg_id=None,
                          args_fmt=None,
                          kwargs_fmt=None):
    """Register a builder that converts slot to module."""
    _reg_id = _reg_id or builder.__qualname__
    register(get_slot_builder(builder, args_fmt, kwargs_fmt), _reg_id)
    return builder
Example #2
0

def resnext101_32x8d(resnet_cls, **kwargs):
    """Construct a ResNeXt-50 32x8d model."""
    return resnet_cls(block=Bottleneck,
                      layers=[3, 4, 23, 3],
                      groups=32,
                      width_per_group=8,
                      **kwargs)


def resnet(resnet_cls, bottleneck=False, **kwargs):
    """Construct a ResNet model."""
    block = Bottleneck if bottleneck else BasicBlock
    return resnet_cls(block=block, **kwargs)


for net_cls in [CIFARResNet, ImageNetResNet]:
    name = 'CIFAR-' if net_cls == CIFARResNet else 'ImageNet-'
    register(partial(resnet10, net_cls), name + 'ResNet-10')
    register(partial(resnet18, net_cls), name + 'ResNet-18')
    register(partial(resnet32, net_cls), name + 'ResNet-32')
    register(partial(resnet34, net_cls), name + 'ResNet-34')
    register(partial(resnet50, net_cls), name + 'ResNet-50')
    register(partial(resnet56, net_cls), name + 'ResNet-56')
    register(partial(resnet101, net_cls), name + 'ResNet-101')
    register(partial(resnet152, net_cls), name + 'ResNet-152')
    register(partial(resnext50_32x4d, net_cls), name + 'ResNeXt-50')
    register(partial(resnext101_32x8d, net_cls), name + 'ResNeXt-101')
    register(partial(resnet, net_cls), name + 'ResNet')
Example #3
0
    [6, 80, 4, 2],
    [6, 96, 4, 1],
    [6, 192, 4, 2],
    [6, 320, 1, 1]
]


def mobilenetv2(cfgs=None, cifar=False, **kwargs):
    """Return MobileNetV2 model."""
    if cfgs is None:
        cfgs = _mbv2_ori_cfgs
    if cifar:
        cfgs[0][3] = 1
        cfgs[6][3] = 1
    return MobileNetV2(cfgs=cfgs, **kwargs)


for cifar in [True, False]:
    img = 'CIFAR' if cifar else 'ImageNet'
    register(partial(mobilenetv2, cifar=cifar), '{}_MobileNetV2'.format(img))
    register(partial(mobilenetv2, cfgs=_mbv2_gpu_cfgs, cifar=cifar), '{}_MobileNetV2_GPU'.format(img))


kernel_sizes = [3, 5, 7, 9]
expand_ratios = [1, 3, 6, 9]
for k in kernel_sizes:
    for e in expand_ratios:
        p = get_same_padding(k)
        builder = partial(MobileInvertedConv, expansion=e, kernel_size=k, padding=p)
        register_slot_builder(builder, 'MB{}E{}'.format(k, e), 'i1o1s2')
Example #4
0
    def __init__(self, inplace=True):
        super(HardSigmoid, self).__init__()
        self.inplace = inplace

    def forward(self, x):
        """Return module output."""
        return F.relu6(x + 3., inplace=self.inplace) / 6.


class HardSwish(nn.Module):
    """Hard Swish activation function."""
    def __init__(self, inplace=True):
        super(HardSwish, self).__init__()
        self.inplace = inplace

    def forward(self, x):
        """Return module output."""
        return x * F.relu6(x + 3., inplace=self.inplace) / 6.


class Swish(nn.Module):
    """Swish activation function."""
    def forward(self, x):
        """Return module output."""
        return x * F.sigmoid(x)


register(HardSigmoid)
register(HardSwish)
register(Swish)
Example #5
0
"""Torch activation functions."""
import torch.nn
from modnas.registry.arch_space import register

modules = [
    'ELU',
    'Hardshrink',
    'Hardtanh',
    'LeakyReLU',
    'LogSigmoid',
    'PReLU',
    'ReLU',
    'ReLU6',
    'RReLU',
    'SELU',
    'CELU',
    'Sigmoid',
    'Softplus',
    'Softshrink',
    'Softsign',
    'Tanh',
    'Tanhshrink',
    'Threshold',
]

for name in modules:
    attr = getattr(torch.nn, name, None)
    if attr is not None:
        register(attr)
Example #6
0
    """Return architecture built from config."""
    n_nodes = 4
    if 'nodes' in kwargs:
        n_nodes = kwargs.pop('nodes')
    darts_kwargs = {
        'cell_cls': DAGLayer,
        'cell_kwargs': {
            'chn_in': None,
            'chn_out': None,
            'stride': None,
            'n_nodes': n_nodes,
            'allocator': 'ReplicateAllocator',
            'merger_state': 'SumMerger',
            'merger_out': 'ConcatMerger',
            'enumerator': 'CombinationEnumerator',
            'preproc': None,
            'edge_cls': Slot,
            'edge_kwargs': {
                '_chn_in': None,
                '_chn_out': None,
                '_stride': None,
            },
        },
    }
    darts_kwargs.update(kwargs)
    return darts_cls(**darts_kwargs)


register(build_from_config, 'DARTS')
register(partial(build_from_config, ImageNetDARTSLikeNet), 'ImageNet-DARTS')
Example #7
0
# -*- coding:utf-8 -*-

# Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the MIT License.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# MIT License for more details.
"""Torch activation functions."""

import torch.nn as nn
from modnas.registry.arch_space import register

# torch activations
register(nn.ELU)
register(nn.Hardshrink)
register(nn.Hardtanh)
register(nn.LeakyReLU)
register(nn.LogSigmoid)
# register(torch.nn.MultiheadAttention)
register(nn.PReLU)
register(nn.ReLU)
register(nn.ReLU6)
register(nn.RReLU)
register(nn.SELU)
register(nn.CELU)
register(nn.Sigmoid)
register(nn.Softplus)
register(nn.Softshrink)
register(nn.Softsign)