def __init__(self, *args, axis=1, activation='relu'):
        """

        Parameters
        ----------
        layer_defs : object
        """
        super(ConcateBlock, self).__init__()
        self.activation = get_activation(activation)
        self.axis = axis
        self.has_identity = False
        for i in range(len(args)):
            arg = args[i]
            if isinstance(arg, (Layer, list, dict)):
                if isinstance(arg, list):
                    arg = Sequential(*arg)
                elif isinstance(arg, dict) and len(args) == 1:
                    for k, v in arg.items():
                        if isinstance(v, Identity):
                            self.has_identity = True
                            self.add_module('Identity', v)
                        else:
                            self.add_module(k, v)
                elif isinstance(arg, dict) and len(args) > 1:
                    raise ValueError(
                        'more than one dict argument is not support.')
                elif isinstance(arg, Identity):
                    self.has_identity = True
                    self.add_module('Identity', arg)
                else:
                    self.add_module('branch{0}'.format(i + 1), arg)
        if len(self._modules) == 1 and self.has_identity == False:
            self.add_module('Identity', Identity())
Esempio n. 2
0
def basic_block(num_filters=64,base_width=64,strides=1,expansion = 4,conv_shortcut=False,use_bias=False,name=None):
    shortcut = Identity()
    if strides>1 or conv_shortcut is True:
        shortcut =Conv2d_Block((1,1),num_filters=num_filters,strides=strides,auto_pad=True,padding_mode='zero',normalization='batch',activation=None,use_bias=use_bias,name=name + '_downsample')

    return ShortCut2d(Sequential(Conv2d_Block((3,3),num_filters=num_filters,strides=strides,auto_pad=True,padding_mode='zero',normalization='batch',activation='relu',use_bias=use_bias,name=name + '_0_conv'),
                                 Conv2d_Block((3,3),num_filters=num_filters,strides=1,auto_pad=True,padding_mode='zero',normalization='batch',activation=None,use_bias=use_bias,name=name + '_1_conv')),
                      shortcut,activation='relu',name=name)
Esempio n. 3
0
def bottleneck(num_filters=64,
               strides=1,
               expansion=4,
               conv_shortcut=True,
               use_bias=False,
               name=None):
    #width = int(num_filters * (base_width / 64.)) * 1#groups'
    shortcut = Identity()
    shortcut_name = '0'
    if conv_shortcut is True:
        shortcut = Conv2d_Block((1, 1),
                                num_filters=num_filters * expansion,
                                strides=strides,
                                auto_pad=True,
                                padding_mode='zero',
                                normalization='batch',
                                activation=None,
                                use_bias=use_bias)
        shortcut_name = '0'

    return ShortCut2d(
        {
            shortcut_name:
            shortcut,
            '1':
            Sequential(
                Conv2d_Block((1, 1),
                             num_filters=num_filters,
                             strides=strides,
                             auto_pad=True,
                             padding_mode='zero',
                             normalization='batch',
                             activation='relu',
                             use_bias=use_bias),
                Conv2d_Block((3, 3),
                             num_filters=num_filters,
                             strides=1,
                             auto_pad=True,
                             padding_mode='zero',
                             normalization='batch',
                             activation='relu',
                             use_bias=use_bias,
                             name=name),
                Conv2d_Block((1, 1),
                             num_filters=num_filters * expansion,
                             strides=1,
                             auto_pad=True,
                             padding_mode='zero',
                             normalization='batch',
                             activation=None,
                             use_bias=use_bias,
                             name=name)),
        },
        activation='relu',
        name=name)
Esempio n. 4
0
def inverted_residual(in_filters,
                      num_filters=64,
                      strides=1,
                      expansion=4,
                      name=''):
    mid_filters = int(round(in_filters * expansion))
    layers = []
    if expansion != 1:
        layers.append(
            Conv2d_Block((1, 1),
                         num_filters=mid_filters,
                         strides=1,
                         auto_pad=True,
                         padding_mode='zero',
                         normalization='batch',
                         activation='relu6',
                         name=name + '_{0}_conv'.format(len(layers))))

    layers.append(
        DepthwiseConv2d_Block((3, 3),
                              depth_multiplier=1,
                              strides=strides,
                              auto_pad=True,
                              padding_mode='zero',
                              normalization='batch',
                              activation='relu6',
                              name=name + '_{0}_conv'.format(len(layers))))
    layers.append(
        Conv2d_Block((1, 1),
                     num_filters=num_filters,
                     strides=1,
                     auto_pad=False,
                     padding_mode='zero',
                     normalization='batch',
                     activation=None,
                     name=name + '_{0}_conv'.format(len(layers))))
    if strides == 1 and in_filters == num_filters:
        return ShortCut2d(Sequential(*layers), Identity(), activation=None)
    else:
        return Sequential(*layers)
    def __init__(self,
                 *args,
                 axis=-1,
                 branch_from=None,
                 activation=None,
                 mode='add',
                 name=None,
                 keep_output=False,
                 **kwargs):
        """
        Args
        layer_defs : object
        """
        super(ShortCut2d, self).__init__(name=name)
        self.activation = get_activation(activation)
        self.has_identity = False
        self.mode = mode if isinstance(mode, str) else mode
        self.axis = axis
        self.branch_from = branch_from
        self.branch_from_uuid = None
        self.keep_output = keep_output

        for i in range(len(args)):
            arg = args[i]
            if isinstance(arg, (Layer, tf.Tensor, list, dict)):
                if isinstance(arg, list):
                    arg = Sequential(*arg)
                elif isinstance(arg, OrderedDict) and len(args) == 1:
                    for k, v in arg.items():
                        if isinstance(v, Identity):
                            self.has_identity = True
                            self.add_module('Identity', v)
                        else:
                            self.add_module(k, v)
                elif isinstance(arg, dict) and len(args) == 1:
                    keys = sorted(list(arg.keys()))
                    for k in keys:
                        v = arg[k]
                        if isinstance(v, Identity):
                            self.has_identity = True
                            self.add_module('Identity', v)
                        else:
                            self.add_module(str(k), v)
                elif isinstance(arg, (dict, OrderedDict)) and len(args) > 1:
                    raise ValueError(
                        'more than one dict argument is not support.')
                elif isinstance(arg, Identity):
                    self.has_identity = True
                    self.add_module('Identity', arg)
                elif isinstance(arg, Layer):
                    if len(arg.name) > 0 and arg.name != arg._name:
                        self.add_module(arg.name, arg)
                    else:
                        self.add_module('branch{0}'.format(i + 1), arg)
                else:
                    raise ValueError('{0} is not support.'.format(
                        arg.__class__.__name))
        if len(
                self._modules
        ) == 1 and self.has_identity == False and self.branch_from is None:
            self.has_identity = True
            self.add_module('Identity', Identity())
Esempio n. 6
0
    def __init__(self,
                 *args,
                 axis=-1,
                 branch_from=None,
                 activation=None,
                 mode='add',
                 name=None,
                 keep_output=False,
                 **kwargs):
        """

        Args:
            *args ():
            axis ():
            branch_from ():
            activation ():
            mode (str):  'add' 'dot' 'concate'
            name (str):
            keep_output (bool):
            **kwargs ():

        """
        super(ShortCut, self).__init__(name=name, keep_output=keep_output)
        valid_mode = ['add', 'subtract', 'concate', 'dot', 'maxout']
        if mode in valid_mode:
            self.mode = mode
        else:
            raise ValueError(
                '{0} is not valid mode. please use one of {1}'.format(
                    mode, valid_mode))
        self.activation = get_activation(activation)
        self.has_identity = False

        self.axis = axis
        self.branch_from = branch_from
        self.branch_from_uuid = None

        self.keep_output = keep_output

        for i in range(len(args)):
            arg = args[i]
            if isinstance(arg, (Layer, Tensor, list, dict)):
                if isinstance(arg, list):
                    arg = Sequential(*arg)
                elif isinstance(arg, OrderedDict) and len(args) == 1:
                    for k, v in arg.items():
                        if isinstance(v, Identity):
                            self.has_identity = True
                            self.add_module('Identity', v)
                        else:
                            self.add_module(k, v)
                elif isinstance(arg, dict) and len(args) == 1:
                    keys = sorted(list(arg.keys()))
                    for k in keys:
                        v = arg[k]
                        if isinstance(v, Identity):
                            self.has_identity = True
                            self.add_module('Identity', v)
                        else:
                            self.add_module(str(k), v)
                elif isinstance(arg, (dict, OrderedDict)) and len(args) > 1:
                    raise ValueError(
                        'more than one dict argument is not support.')

                elif isinstance(arg, Identity):
                    self.has_identity = True
                    self.add_module('Identity', arg)
                elif isinstance(arg, Layer):
                    if len(arg.name) > 0 and arg.name != arg.default_name:
                        self.add_module(arg.name, arg)
                    else:
                        self.add_module('branch{0}'.format(i + 1), arg)
                else:
                    raise ValueError('{0} is not support.'.format(
                        arg.__class__.__name))
        if len(
                self._modules
        ) == 1 and self.has_identity == False and self.branch_from is None and mode != 'concate':
            self.has_identity = True
            self.add_module('Identity', Identity())
        self.to(self.device)
Esempio n. 7
0
def efficient_block(expand_ratio=1,
                    filters_in=32,
                    filters_out=16,
                    kernel_size=3,
                    strides=1,
                    zero_pad=0,
                    se_ratio=0,
                    drop_rate=0.2,
                    is_shortcut=True,
                    name='',
                    **kwargs):
    expand_ratio = kwargs.get('expand_ratio', expand_ratio)
    is_shortcut = kwargs.get('id_skip', is_shortcut)
    filters_in = kwargs.get('filters_in', filters_in)
    filters_out = kwargs.get('filters_out', filters_out)
    kernel_size = kwargs.get('kernel_size', kernel_size)
    is_shortcut = filters_in == filters_out and strides == 1 and kwargs.get(
        'id_skip', is_shortcut)
    filters = filters_in * expand_ratio
    if expand_ratio == 1 and strides == 1:

        bottleneck = Sequential(
            DepthwiseConv2d_Block((kernel_size, kernel_size),
                                  depth_multiplier=1,
                                  strides=strides,
                                  auto_pad=True,
                                  padding_mode='zero',
                                  normalization='batch',
                                  activation='swish',
                                  name=name + 'dwconv'),
            SqueezeExcite(se_filters=builtins.max(1, int(filters_in *
                                                         se_ratio)),
                          num_filters=filters_in,
                          use_bias=True) if 0 < se_ratio <= 1 else Identity(),
            Conv2d_Block((1, 1),
                         num_filters=filters_out,
                         strides=1,
                         auto_pad=True,
                         normalization='batch',
                         activation=None,
                         name=name + 'se'),
            Dropout(dropout_rate=drop_rate)
            if is_shortcut and drop_rate > 0 else Identity())

        if is_shortcut:
            return ShortCut2d(Identity(), bottleneck)
        else:
            return bottleneck

    else:
        bottleneck = Sequential(
            Conv2d_Block((1, 1),
                         num_filters=filters,
                         strides=1,
                         auto_pad=True,
                         normalization='batch',
                         activation='swish',
                         name=name + 'expand_bn'),
            DepthwiseConv2d_Block((kernel_size, kernel_size),
                                  depth_multiplier=1,
                                  strides=strides,
                                  auto_pad=True,
                                  padding_mode='zero',
                                  normalization='batch',
                                  activation='swish',
                                  name=name + 'dwconv'),
            SqueezeExcite(se_filters=builtins.max(1, int(filters_in *
                                                         se_ratio)),
                          num_filters=filters,
                          use_bias=True) if 0 < se_ratio <= 1 else Identity(),
            Conv2d_Block((1, 1),
                         num_filters=filters_out,
                         strides=1,
                         auto_pad=True,
                         normalization='batch',
                         activation=None,
                         name=name + 'se'),
            Dropout(dropout_rate=drop_rate)
            if is_shortcut and drop_rate > 0 else Identity())
        if is_shortcut:
            return ShortCut2d(Identity(), bottleneck)
        else:
            return bottleneck