Exemple #1
0
    def __init__(self, name, nb_act_maps, ratio=16, act_type="relu"):

        super(_GatherExcitePlus, self).__init__(prefix=name)

        self.nb_act_maps = nb_act_maps
        self.body = HybridSequential(prefix="")

        nb_units_hidden = nb_act_maps // ratio
        with self.name_scope():
            # depthwise convolution
            # gather step
            self.body.add(Conv2D(nb_act_maps, kernel_size=3, padding=1, groups=nb_act_maps, strides=2, use_bias=False))
            self.body.add(get_act(act_type))
            self.body.add(BatchNorm())
            self.body.add(Conv2D(nb_act_maps, kernel_size=3, padding=1, groups=nb_act_maps, strides=2, use_bias=False))
            self.body.add(get_act(act_type))
            self.body.add(BatchNorm())
            self.body.add(Conv2D(nb_act_maps, kernel_size=3, padding=1, groups=nb_act_maps, strides=2, use_bias=False))
            self.body.add(get_act(act_type))
            self.body.add(BatchNorm())

            # get excitement parameters
            self.body.add(Dense(nb_units_hidden))
            self.body.add(get_act(act_type))
            self.body.add(Dense(nb_act_maps))
            self.body.add(get_act("sigmoid"))
Exemple #2
0
    def __init__(self, name, channels=1, fc0=256, bn_mom=0.9, act_type="relu"):
        """
        Definition of the value head. Same as alpha zero authors but changed order Batch-Norm with RElu.

        :param name: name prefix for all blocks
        :param channels: Number of channels for 1st conv operation in branch 0
        :param fc0: Number of units in Dense/Fully-Connected layer
        :param bn_mom: Batch normalization momentum parameter
        :param act_type: Activation type to use
        :param se_type: SqueezeExcitation type choose either [None, "cSE", "sSE", csSE"] for no squeeze excitation,
        channelwise squeeze excitation, channel-spatial-squeeze-excitation, respectively
        """

        super(_ValueHeadRise, self).__init__(prefix=name + "_")

        self.body = HybridSequential(prefix="")

        with self.name_scope():
            self.body.add(
                Conv2D(channels=channels, kernel_size=(1, 1), use_bias=False))
            self.body.add(BatchNorm(momentum=bn_mom))
            self.body.add(get_act(act_type))
            self.body.add(Flatten())
            self.body.add(Dense(units=fc0))
            self.body.add(get_act(act_type))
            self.body.add(Dense(units=1))
            self.body.add(get_act("tanh"))
    def __init__(self, channels, bn_mom, act_type, unit_name, use_se=True, res_scale_fac=0.2):
        """

        :param channels: Number of channels used in the conv-operations
        :param bn_mom: Batch normalization momentum
        :param act_type: Activation function to use
        :param unit_name: Unit name of the residual block (only used for description (string))
        """
        super(_RiseResidualBlock, self).__init__(unit_name)
        self.act_type = act_type
        self.unit_name = unit_name
        self.res_scale_fac = res_scale_fac

        self.use_se = use_se

        # branch 0
        self.body = HybridSequential()
        self.body.add(Conv2D(channels=channels, kernel_size=(3, 3), padding=(1, 1), use_bias=False,
                             prefix='%s_conv0' % unit_name))
        self.body.add(BatchNorm(momentum=bn_mom, prefix='%s_bn0' % self.unit_name))
        self.body.add(get_act(act_type, prefix='%s_%s0' % (unit_name, act_type)))

        self.body.add(Conv2D(channels=channels, kernel_size=(3, 3), padding=(1, 1), use_bias=False,
                             prefix='%s_conv1' % unit_name))
        self.body.add(BatchNorm(momentum=bn_mom, prefix='%s_bn1' % self.unit_name))

        self.act0 = get_act(act_type, prefix='%s_%s1' % (unit_name, act_type))

        if use_se is True:
            self.se0 = _SqueezeExcitation('%s_se0' % unit_name, channels, 16, act_type)
Exemple #4
0
    def __init__(self, name, channels=1, fc0=256, bn_mom=0.9, act_type="relu"):
        """
        Definition of the value head proposed by the alpha zero authors

        :param name: name prefix for all blocks
        :param channels: Number of channels for 1st conv operation in branch 0
        :param fc0: Number of units in Dense/Fully-Connected layer
        :param bn_mom: Batch normalization momentum parameter
        :param act_type: Activation type to use
        """

        super(_ValueHeadAlphaZero, self).__init__(prefix=name + "_")

        self.body = HybridSequential(prefix="")

        with self.name_scope():
            self.body.add(
                Conv2D(channels=channels, kernel_size=(1, 1), use_bias=False))
            self.body.add(BatchNorm(momentum=bn_mom))
            self.body.add(get_act(act_type))
            self.body.add(Flatten())
            self.body.add(Dense(units=fc0))
            self.body.add(get_act(act_type))
            self.body.add(Dense(units=1))
            self.body.add(get_act("tanh"))
Exemple #5
0
    def __init__(
        self,
        name,
        in_ch,
        ch_0_0=192,
        ch_1_0=128,
        ch_1_1=224,
        ch_1_2=256,
        ch=2144,
        bn_mom=0.9,
        act_type="relu",
        res_scale_fac=0.2,
        use_se=True,
        shortcut=True,
    ):
        """
        Definition of the InceptionResnetC block

        :param name: name prefix for all blocks
        :param ch_0_0: Number of channels for 1st conv operation in branch 0
        :param ch_1_0: Number of channels for 1st conv operation in branch 1
        :param ch_1_1: Number of channels for 2nd conv operation in branch 1
        :param ch_1_2: Number of channels for 3rd conv operation in branch 1
        :param ch: Number of channels for conv operation after concatenating branches (no act is applied here)
        :param bn_mom: Batch normalization momentum parameter
        :param act_type: Activation type to use
        :param res_scale_fac: Constant multiply scalar which is applied to the residual activations maps
        """
        super(_InceptionResnetC, self).__init__(name, ch, res_scale_fac, act_type, bn_mom, use_se, shortcut)
        self.res_scale_fac = res_scale_fac
        self.block_name = name
        self.body = HybridSequential(prefix="")
        self.branches = HybridConcurrent(axis=1, prefix="")  # entry point for all branches
        # branch 0 of block type C
        self.b_0 = HybridSequential()
        self.b_0.add(Conv2D(channels=ch_0_0, kernel_size=(1, 1), prefix="%s_0_conv0" % name, in_channels=in_ch))
        self.b_0.add(get_act(act_type, prefix="%s_0_%s0" % (name, act_type)))
        # branch 2 of block type C
        self.b_1 = HybridSequential()
        self.b_1.add(Conv2D(channels=ch_1_0, kernel_size=(1, 1), prefix="%s_1_conv0" % name, in_channels=in_ch))
        self.b_1.add(get_act(act_type, prefix="%s_2_%s0" % (name, act_type)))
        self.b_1.add(
            Conv2D(channels=ch_1_1, kernel_size=(1, 3), padding=(0, 1), prefix="%s_1_conv1" % name, in_channels=ch_1_0)
        )
        self.b_1.add(get_act(act_type, prefix="%s_2_%s1" % (name, act_type)))
        self.b_1.add(
            Conv2D(channels=ch_1_2, kernel_size=(3, 1), padding=(1, 0), prefix="%s_1_conv2" % name, in_channels=ch_1_1)
        )
        self.b_1.add(get_act(act_type, prefix="%s_1_%s2" % (name, act_type)))
        # concatenate all branches and add them to the body
        self.branches.add(self.b_0)
        self.branches.add(self.b_1)
        self.body.add(self.branches)
        # apply a single CNN layer without activation function
        self.body.add(
            Conv2D(
                channels=ch, kernel_size=(1, 1), prefix="%s_conv0" % name, in_channels=ch_0_0 + ch_1_2, use_bias=False
            )
        )
Exemple #6
0
    def __init__(self,
                 name,
                 in_channels,
                 groups=2,
                 se_type="cSE",
                 use_residual=True,
                 act_type="relu",
                 **kwargs):

        super(_ShuffleBlock, self).__init__(prefix=name + "_")

        self.in_channels = in_channels
        self.nb_right_channels = in_channels // 2

        self.groups = groups

        self.body = HybridSequential(prefix="")
        self.use_residual = use_residual

        with self.name_scope():
            self.body.add(
                Conv2D(channels=self.nb_right_channels,
                       kernel_size=3,
                       strides=1,
                       padding=1,
                       groups=1,
                       use_bias=False))
            self.body.add(BatchNorm())
            self.body.add(get_act(act_type))
            self.body.add(
                Conv2D(channels=self.nb_right_channels,
                       kernel_size=3,
                       strides=1,
                       padding=1,
                       groups=1,
                       use_bias=False))
            self.body.add(BatchNorm())
            if se_type:
                if se_type == "cSE":
                    # apply squeeze excitation
                    self.body.add(
                        _ChannelSqueezeExcitation("se0",
                                                  self.nb_right_channels, 16,
                                                  act_type))
                elif se_type == "sSE":
                    self.body.add(_SpatialSqueezeExcitation("se0"))
                elif se_type == "scSE":
                    self.body.add(
                        _SpatialChannelSqueezeExcitation(
                            "se0", self.nb_right_channels, 2, act_type))
                else:
                    raise Exception(
                        'Unsupported Squeeze Excitation Module: Choose either [None, "cSE", "sSE", "scSE"'
                    )

        if self.use_residual:
            self.act = get_act(act_type)
        self.shufller = _ShuffleChannelsBlock(groups)
Exemple #7
0
    def __init__(self,
                 name,
                 channels,
                 bn_mom=0.9,
                 act_type="relu",
                 use_se=False):
        """
        Definition of the stem proposed by the alpha zero authors

        :param name: name prefix for all blocks
        :param channels: Number of channels for 1st conv operation
        :param bn_mom: Batch normalization momentum parameter
        :param act_type: Activation type to use
        """

        super(_StemRise, self).__init__(prefix=name + "_")

        self.body = HybridSequential(prefix="")

        with self.name_scope():
            # add all layers to the stem
            self.body.add(
                Conv2D(channels=64,
                       kernel_size=(3, 3),
                       padding=(1, 1),
                       use_bias=False))
            self.body.add(BatchNorm(momentum=bn_mom))
            self.body.add(get_act(act_type))
            self.body.add(
                Conv2D(channels=64,
                       kernel_size=(3, 3),
                       padding=(1, 1),
                       use_bias=False))
            self.body.add(BatchNorm(momentum=bn_mom))
            self.body.add(get_act(act_type))
            self.body.add(
                Conv2D(channels=128,
                       kernel_size=(3, 3),
                       padding=(1, 1),
                       use_bias=False))
            self.body.add(BatchNorm(momentum=bn_mom))
            self.body.add(get_act(act_type))
            self.body.add(
                Conv2D(channels=128,
                       kernel_size=(3, 3),
                       padding=(1, 1),
                       use_bias=False))
            self.body.add(BatchNorm(momentum=bn_mom))
            self.body.add(get_act(act_type))
            self.body.add(
                Conv2D(channels=channels,
                       kernel_size=(3, 3),
                       padding=(1, 1),
                       use_bias=False))
            self.body.add(BatchNorm(momentum=bn_mom))
            self.body.add(get_act(act_type))
Exemple #8
0
    def __init__(self,
                 name,
                 channels,
                 bn_mom=0.9,
                 act_type="relu",
                 se_type=None):
        """
        Definition of the stem proposed by the alpha zero authors

        :param name: name prefix for all blocks
        :param channels: Number of channels for 1st conv operation
        :param bn_mom: Batch normalization momentum parameter
        :param act_type: Activation type to use
        """

        super(_StemRise, self).__init__(prefix=name + "_")

        self.body = HybridSequential(prefix="")

        with self.name_scope():
            # add all layers to the stem
            self.body.add(
                Conv2D(channels=channels // 2,
                       kernel_size=(3, 3),
                       padding=(1, 1),
                       use_bias=False))
            self.body.add(BatchNorm(momentum=bn_mom))
            self.body.add(get_act(act_type))
            if se_type:
                if se_type == "cSE":
                    # apply squeeze excitation
                    # self.se = _ChannelSqueezeExcitation("se0", channels, 2, act_type)
                    self.body.add(
                        _ChannelSqueezeExcitation("se0", channels // 2, 16,
                                                  act_type))
                elif se_type == "sSE":
                    self.body.add(_SpatialSqueezeExcitation("se0"))
                elif se_type == "scSE":
                    self.body.add(
                        _SpatialChannelSqueezeExcitation(
                            "se0", channels // 2, 2, act_type))
                else:
                    raise Exception(
                        'Unsupported Squeeze Excitation Module: Choose either [None, "cSE", "sSE", "scSE"'
                    )
            self.body.add(
                Conv2D(channels=channels,
                       kernel_size=(3, 3),
                       padding=(1, 1),
                       use_bias=False))
            self.body.add(BatchNorm(momentum=bn_mom))
            self.body.add(get_act(act_type))
Exemple #9
0
    def __init__(self,
                 name,
                 channels=1,
                 fc0=256,
                 bn_mom=0.9,
                 act_type="relu",
                 se_type=None):
        """
        Definition of the value head proposed by the alpha zero authors

        :param name: name prefix for all blocks
        :param channels: Number of channels for 1st conv operation in branch 0
        :param fc0: Number of units in Dense/Fully-Connected layer
        :param bn_mom: Batch normalization momentum parameter
        :param act_type: Activation type to use
        :param se_type: SqueezeExcitation type choose either [None, "cSE", "sSE", csSE"] for no squeeze excitation,
        channelwise squeeze excitation, channel-spatial-squeeze-excitation, respectively
        """

        super(_ValueHeadAlphaZero, self).__init__(prefix=name + "_")

        self.body = HybridSequential(prefix="")

        with self.name_scope():
            self.body.add(
                Conv2D(channels=channels, kernel_size=(1, 1), use_bias=False))
            self.body.add(BatchNorm(momentum=bn_mom))

            if se_type:
                if se_type == "cSE":
                    # apply squeeze excitation
                    self.body.add(
                        _ChannelSqueezeExcitation("se0", channels, 16,
                                                  act_type))
                elif se_type == "sSE":
                    self.body.add(_SpatialSqueezeExcitation("se0"))
                elif se_type == "csSE":
                    self.body.add(
                        _SpatialChannelSqueezeExcitation(
                            "se0", channels, 1, act_type))
                else:
                    raise Exception(
                        'Unsupported Squeeze Excitation Module: Choose either [None, "cSE", "sSE", "csSE"'
                    )

            self.body.add(get_act(act_type))
            self.body.add(Flatten())
            self.body.add(Dense(units=fc0))
            self.body.add(get_act(act_type))
            self.body.add(Dense(units=1))
            self.body.add(get_act("tanh"))
Exemple #10
0
    def __init__(self, name, nb_act_maps, ratio=16, act_type='relu'):

        super(_SqueezeExcitation, self).__init__(prefix=name)

        self.nb_act_maps = nb_act_maps
        self.body = HybridSequential(prefix='')

        nb_units_hidden = nb_act_maps // ratio
        with self.name_scope():
            self.body.add(AvgPool2D(pool_size=8))
            self.body.add(Dense(nb_units_hidden))
            self.body.add(get_act(act_type,))
            self.body.add(Dense(nb_act_maps))
            self.body.add(get_act('sigmoid'))
Exemple #11
0
    def __init__(self, unit_name, channels, bn_mom, act_type, se_type="scSE"):
        """

        :param channels: Number of channels used in the conv-operations
        :param bn_mom: Batch normalization momentum
        :param act_type: Activation function to use
        :param unit_name: Unit name of the residual block (only used for description (string))
        """
        super(ResidualBlockX, self).__init__(unit_name + "_")
        self.act_type = act_type
        self.unit_name = unit_name
        self.body = HybridSequential(prefix="")
        self.channels = channels

        with self.name_scope():
            if se_type:
                if se_type == "cSE":
                    # apply squeeze excitation
                    self.body.add(
                        _ChannelSqueezeExcitation("se0", channels, 2,
                                                  act_type))
                elif se_type == "sSE":
                    self.body.add(_SpatialSqueezeExcitation("se0"))
                elif se_type == "scSE":
                    self.body.add(
                        _SpatialChannelSqueezeExcitation(
                            "se0", channels, 2, act_type))
                elif se_type == "GE+":
                    self.body.add(
                        _GatherExcitePlus("ge0", channels, 2, act_type))
                else:
                    raise Exception(
                        'Unsupported Squeeze Excitation Module: Choose either [None, "cSE", "sSE", "scSE",'
                        '"GE+')
            self.body.add(BatchNorm(momentum=bn_mom))
            self.body.add(get_act(self.act_type))
            self.body.add(
                Conv2D(channels=channels,
                       kernel_size=3,
                       padding=1,
                       groups=1,
                       use_bias=False))
            self.body.add(BatchNorm(momentum=bn_mom))
            self.body.add(get_act(self.act_type))
            self.body.add(
                Conv2D(channels=channels,
                       kernel_size=3,
                       padding=1,
                       groups=1,
                       use_bias=False))
    def __init__(self,
                 name,
                 channels=2,
                 n_labels=4992,
                 bn_mom=0.9,
                 act_type="relu",
                 select_policy_from_plane=False):
        """
        Definition of the value head proposed by the alpha zero authors

        :param name: name prefix for all blocks
        :param channels: Number of channels for 1st conv operation in branch 0
        :param bn_mom: Batch normalization momentum parameter
        :param act_type: Activation type to use
        :param se_type: SqueezeExcitation type choose either [None, "cSE", "sSE", csSE"] for no squeeze excitation,
        channelwise squeeze excitation, channel-spatial-squeeze-excitation, respectively
        """

        super(_PolicyHeadAlphaZero, self).__init__(prefix=name + "_")

        self.body = HybridSequential(prefix="")
        self.select_policy_from_plane = select_policy_from_plane

        with self.name_scope():
            if self.select_policy_from_plane:
                self.body.add(
                    Conv2D(channels=256,
                           padding=1,
                           kernel_size=(3, 3),
                           use_bias=False))
                self.body.add(BatchNorm(momentum=bn_mom))
                self.body.add(get_act(act_type))
                self.body.add(
                    Conv2D(channels=channels,
                           padding=1,
                           kernel_size=(3, 3),
                           use_bias=False))
                self.body.add(Flatten())
            else:
                self.body.add(
                    Conv2D(channels=channels,
                           kernel_size=(1, 1),
                           use_bias=False))
                self.body.add(BatchNorm(momentum=bn_mom))
                # if not self.select_policy_from_plane:
                self.body.add(get_act(act_type))

                self.body.add(Flatten())
                self.body.add(Dense(units=n_labels))
Exemple #13
0
    def __init__(self, name, in_ch, ch, res_scale_fac, act_type, bn_mom, use_se, shortcut, pool_type):
        super(_RiseBlockB, self).__init__(name, ch, res_scale_fac, act_type, bn_mom, use_se, shortcut)

        self.body = HybridSequential(prefix='')

        # entry point for all branches
        self.branches = HybridConcurrent(axis=1, prefix='')

        ch_0_0 = 32
        ch_0_1 = 96
        ch_0_2 = 96

        ch_1_0 = 32
        ch_1_1 = 96
        ch_1_2 = 96

        ch_2_0 = 192

        with self.name_scope():
            # branch 0
            self.b_0 = HybridSequential()
            self.b_0.add(get_pool(pool_type, pool_size=(2, 2), strides=(2, 2)))
            self.b_0.add(Conv2D(channels=ch_0_0, kernel_size=(1, 1), in_channels=in_ch))
            self.b_0.add(get_act(act_type))
            self.b_0.add(
                Conv2D(channels=ch_0_1, kernel_size=(3, 1), padding=(0, 1), in_channels=ch_0_0, use_bias=False))
            self.b_0.add(
                Conv2D(channels=ch_0_2, kernel_size=(1, 3), padding=(1, 0), in_channels=ch_0_1, use_bias=False))
            self.b_0.add(_UpsampleBlock('upsample0', scale=2))

            # branch 1
            self.b_1 = HybridSequential()
            self.b_1.add(Conv2D(channels=ch_1_0, kernel_size=(1, 1), in_channels=in_ch))
            self.b_1.add(get_act(act_type))
            self.b_1.add(
                Conv2D(channels=ch_1_1, kernel_size=(3, 1), padding=(0, 1), in_channels=ch_1_0, use_bias=False))
            self.b_1.add(
                Conv2D(channels=ch_1_2, kernel_size=(1, 3), padding=(1, 0), in_channels=ch_1_1, use_bias=False))

            # branch 2
            self.b_2 = HybridSequential()
            self.b_2.add(Conv2D(channels=ch_2_0, kernel_size=(1, 1), in_channels=in_ch, use_bias=False))

            # concatenate all branches and add them to the body
            self.branches.add(self.b_0)
            self.branches.add(self.b_1)
            self.branches.add(self.b_2)
            self.body.add(self.branches)
Exemple #14
0
    def __init__(self, name):

        super(_SpatialSqueezeExcitation, self).__init__(prefix=name)

        self.body = HybridSequential(prefix="")

        with self.name_scope():
            self.body.add(Conv2D(1, kernel_size=1, padding=0, use_bias=True))
            self.body.add(get_act("sigmoid"))
Exemple #15
0
    def __init__(self,
                 unit_name,
                 cardinality,
                 channels,
                 bn_mom,
                 act_type,
                 res_scale_fac,
                 use_se=True):
        """

        :param channels: Number of channels used in the conv-operations
        :param bn_mom: Batch normalization momentum
        :param act_type: Activation function to use
        :param unit_name: Unit name of the residual block (only used for description (string))
        """
        super(ResidualBlockX, self).__init__(unit_name + "_")
        self.act_type = act_type
        self.unit_name = unit_name
        self.res_scale_fac = res_scale_fac
        self.body = HybridSequential(prefix="")

        with self.name_scope():
            self.body.add(
                Conv2D(channels=channels,
                       kernel_size=3,
                       padding=1,
                       use_bias=False))
            self.body.add(BatchNorm(momentum=bn_mom))
            self.body.add(get_act(self.act_type))

            self.body.add(
                Conv2D(channels=channels,
                       kernel_size=3,
                       padding=1,
                       groups=cardinality,
                       use_bias=False))
            self.body.add(BatchNorm(momentum=bn_mom))

            if use_se is True:
                # apply squeeze excitation
                self.body.add(_SqueezeExcitation("se0", channels, 16,
                                                 act_type))

            self.act0 = get_act(act_type)
Exemple #16
0
    def __init__(self, name, ch, res_scale_fac=0.2, act_type="relu", bn_mom=0.9, use_se=True, shortcut=True):
        super(_InceptionResnetBlock, self).__init__(prefix=name)
        self.shortcut = shortcut
        self.body = HybridSequential(prefix="")
        self.bn0 = self.act0 = self.se0 = None
        self.block_name = name
        self.res_scale_fac = res_scale_fac
        self.use_se = use_se
        self.bn0 = BatchNorm(momentum=bn_mom, prefix="%s_bn0" % name, in_channels=ch)
        self.act0 = get_act(act_type, prefix="%s_%s0" % (name, act_type))

        if use_se:
            self.se0 = _ChannelSqueezeExcitation("%s_se0" % name, ch, 16, act_type)
Exemple #17
0
    def __init__(self, name, ch, res_scale_fac=0.2, act_type='relu', bn_mom=0.9, use_se=True, shortcut=True):
        super(_InceptionResnetBlock, self).__init__(prefix=name)

        self.shortcut = shortcut
        self.body = None
        self.bn0 = None
        self.act0 = None
        self.se0 = None
        self.block_name = name
        self.res_scale_fac = res_scale_fac
        self.use_se = use_se

        self.bn0 = BatchNorm(momentum=bn_mom, prefix='%s_bn0' % name, in_channels=ch)
        self.act0 = get_act(act_type, prefix='%s_%s0' % (name, act_type))

        if use_se is True:
            self.se0 = _SqueezeExcitation('%s_se0' % name, ch, 16, act_type)
Exemple #18
0
    def __init__(
        self,
        name,
        in_ch,
        ch_0_0=32,
        ch_1_0=32,
        ch_1_1=32,
        ch_2_0=32,
        ch_2_1=48,
        ch_2_2=64,
        ch=384,
        bn_mom=0.9,
        act_type="relu",
        res_scale_fac=0.2,
        use_se=True,
        shortcut=True,
    ):  # Too many local variables (16/15)
        """
        Definition of the InceptionResnetA block

        :param name: name prefix for all blocks
        :param ch_0_0: Number of channels for 1st conv operation in branch 0
        :param ch_1_0: Number of channels for 1st conv operation in branch 1
        :param ch_1_1: Number of channels for 2nd conv operation in branch 1
        :param ch_2_0: Number of channels for 1st conv operation in branch 2
        :param ch_2_1: Number of channels for 2nd conv operation in branch 2
        :param ch_2_2: Number of channels for 3rd conv operation in branch 2
        :param ch: Number of channels for conv operation after concatenating branches (no act is applied here)
        :param bn_mom: Batch normalization momentum parameter
        :param act_type: Activation type to use
        :param res_scale_fac: Constant multiply scalar which is applied to the residual activations maps
        :param shortcut: Decide weather to enable a shortcut connection
        """

        super(_InceptionResnetA,
              self).__init__(name, ch, res_scale_fac, act_type, bn_mom, use_se,
                             shortcut)
        self.body = HybridSequential(prefix="")
        self.branches = HybridConcurrent(
            axis=1, prefix="")  # entry point for all branches
        # branch 0 of block type A
        self.b_0 = HybridSequential()
        self.b_0.add(
            Conv2D(channels=ch_0_0,
                   kernel_size=(1, 1),
                   prefix="%s_0_conv0" % name,
                   in_channels=in_ch))
        self.b_0.add(get_act(act_type, prefix="%s_0_%s0" % (name, act_type)))
        # branch 1 of block type A
        self.b_1 = HybridSequential()
        self.b_1.add(
            Conv2D(channels=ch_1_0,
                   kernel_size=(1, 1),
                   prefix="%s_1_conv0" % name,
                   in_channels=in_ch))
        self.b_1.add(get_act(act_type, prefix="%s_1_%s0" % (name, act_type)))
        self.b_1.add(
            Conv2D(channels=ch_1_1,
                   kernel_size=(3, 3),
                   padding=(1, 1),
                   prefix="%s_1_conv1" % name,
                   in_channels=ch_1_0))
        self.b_1.add(get_act(act_type, prefix="%s_1_%s1" % (name, act_type)))
        # branch 2 of block type A
        self.b_2 = HybridSequential()
        self.b_2.add(
            Conv2D(channels=ch_2_0,
                   kernel_size=(1, 1),
                   prefix="%s_2_conv0" % name,
                   in_channels=in_ch))
        self.b_2.add(get_act(act_type, prefix="%s_2_%s0" % (name, act_type)))
        self.b_2.add(
            Conv2D(channels=ch_2_1,
                   kernel_size=(3, 3),
                   padding=(1, 1),
                   prefix="%s_2_conv1" % name,
                   in_channels=ch_2_0))
        self.b_2.add(get_act(act_type, prefix="%s_2_%s1" % (name, act_type)))
        self.b_2.add(
            Conv2D(channels=ch_2_2,
                   kernel_size=(3, 3),
                   padding=(1, 1),
                   prefix="%s_2_conv2" % name,
                   in_channels=ch_2_1))
        self.b_2.add(get_act(act_type, prefix="%s_2_%s2" % (name, act_type)))
        # self.b_2.add(PReLU(prefix='%s_2_%s2' % (name, act_type)))
        # concatenate all branches and add them to the body
        self.branches.add(self.b_0)
        self.branches.add(self.b_1)
        self.branches.add(self.b_2)
        self.body.add(self.branches)
        # apply a single CNN layer without activation function
        self.body.add(
            Conv2D(
                channels=ch,
                kernel_size=(1, 1),
                prefix="%s_conv0" % name,
                in_channels=ch_0_0 + ch_1_1 + ch_2_2,
                use_bias=False,
            ))
Exemple #19
0
    def __init__(self, name, in_ch, ch, res_scale_fac, act_type, bn_mom,
                 use_se, shortcut, pool_type):
        """
        IN 8x8: 256 TOTAL

        BRANCH 0

        	b_0         b_0        b_0_0
        -> 24 Conv 1x1 -> POOL2D 2x2 -> 32 Conv 3x3

        -> 24-> 32

        BRANCH 1

        	b_0         b_0        b_0_0
        -> 24 Conv 1x1 -> POOL2D 4x4 -> 32 Conv 2x2

        -> 24-> 32


        BRANCH 2
        -> 24 Conv 1x1 -> 32 Conv 3x3
        -> 24 -> 32

        BRANCH 3
        -> 24 Conv 1x1 -> 24 Conv 3x1
        	       -> 24 Conv 1x3
        -> 24 -> 48


        BRANCH 4
        -> 24 Conv 1x1 -> 24 Conv 5x1
        	       -> 24 Conv 1x5
        -> 24 -> 48


        BRANCH 5
        -> 24 Conv 1x1 -> 24 Conv 3x1 -> 32 Conv 1x3
        	       -> 24 Conv 1x3 -> 32 Conv 3x1
        -> 24 -> 64

        """

        super(_RiseBlockA, self).__init__(name, ch, res_scale_fac, act_type,
                                          bn_mom, use_se, shortcut)

        self.body = HybridSequential(prefix="")

        # entry point for all branches
        self.branches = HybridConcurrent(axis=1, prefix="")

        ch_0_0 = 42
        ch_0_1 = 64

        ch_1_0 = 32
        ch_1_1 = 32

        ch_2_0 = 64
        ch_2_1 = 96
        ch_2_2 = 128

        with self.name_scope():
            # branch 0
            self.b_0 = HybridSequential()
            self.b_0.add(get_pool(pool_type, pool_size=(2, 2), strides=(2, 2)))
            self.b_0.add(
                Conv2D(channels=ch_0_0, kernel_size=(1, 1), in_channels=in_ch))
            self.b_0.add(get_act(act_type))
            self.b_0.add(
                Conv2D(channels=ch_0_1,
                       kernel_size=(3, 3),
                       padding=(1, 1),
                       in_channels=ch_0_0,
                       use_bias=True))
            self.b_0.add(get_act(act_type))
            self.b_0.add(_UpsampleBlock("upsample0", scale=2))

            # branch 1
            self.b_1 = HybridSequential()
            self.b_1.add(get_pool(pool_type, pool_size=(4, 4), strides=(4, 4)))
            self.b_1.add(
                Conv2D(channels=ch_1_0,
                       kernel_size=(1, 1),
                       in_channels=in_ch,
                       use_bias=True))
            self.b_1.add(get_act(act_type))
            self.b_1.add(
                Conv2D(channels=ch_1_1,
                       kernel_size=(3, 3),
                       padding=(1, 1),
                       in_channels=ch_1_0,
                       use_bias=True))
            self.b_1.add(get_act(act_type))
            self.b_1.add(_UpsampleBlock("upsample0", scale=4))
            # branch 1
            # self.b_1 = HybridSequential()
            # self.b_1.add(Conv2D(channels=ch_1_0, kernel_size=(1, 1), in_channels=in_ch, use_bias=False))
            # self.b_1.add(get_act(act_type))

            # branch 2
            self.b_2 = HybridSequential()
            self.b_2.add(
                Conv2D(channels=ch_2_0, kernel_size=(1, 1), in_channels=in_ch))
            self.b_2.add(get_act(act_type))
            self.b_2.add(
                Conv2D(channels=ch_2_1,
                       kernel_size=(3, 3),
                       padding=(1, 1),
                       in_channels=ch_2_0,
                       use_bias=True))
            self.b_2.add(get_act(act_type))
            self.b_2.add(
                Conv2D(channels=ch_2_2,
                       kernel_size=(3, 3),
                       padding=(1, 1),
                       in_channels=ch_2_1,
                       use_bias=True))
            self.b_2.add(get_act(act_type))
            # concatenate all branches and add them to the body
            self.branches.add(self.b_0)
            self.branches.add(self.b_1)
            self.branches.add(self.b_2)
            # self.branches.add(self.b_3)
            # self.branches.add(self.b_4)
            self.body.add(self.branches)
            self.body.add(
                Conv2D(
                    channels=ch,
                    kernel_size=(1, 1),
                    prefix="%s_conv0" % name,
                    in_channels=ch_0_1 + ch_1_1 + ch_2_2,
                    use_bias=False,
                ))  # +ch_3_2+ch_4_2
    def __init__(self,
                 name,
                 nb_in_channels,
                 groups=2,
                 se_type="cSE",
                 use_residual=True,
                 act_type="relu",
                 **kwargs):
        """
        Constructor
        :param name: Layer name 
        :param nb_in_channels: Number of input channels 
        :param groups: Number of groups to use for shuffling
        :param se_type: Squeez excitation type
        :param use_residual: True, if a residual connection shall be used
        :param act_type: Type for the activation function
        :param kwargs: 
        """

        super(_ShuffleBlockNeck, self).__init__(prefix=name + "_")

        self.in_channels = nb_in_channels
        self.nb_right_channels = nb_in_channels // 2

        self.groups = groups

        self.body = HybridSequential(prefix="")
        self.use_residual = use_residual
        with self.name_scope():
            self.body.add(
                Conv2D(channels=self.nb_right_channels,
                       kernel_size=1,
                       strides=1,
                       padding=0,
                       use_bias=False))
            self.body.add(BatchNorm())
            self.body.add(get_act(act_type))
            self.body.add(
                Conv2D(channels=self.nb_right_channels,
                       kernel_size=1,
                       strides=1,
                       padding=0,
                       groups=3,
                       use_bias=False))
            self.body.add(BatchNorm())
            self.body.add(get_act(act_type))
            if se_type:
                if se_type == "cSE":
                    self.body.add(
                        _ChannelSqueezeExcitation("se0",
                                                  self.nb_right_channels, 2,
                                                  act_type))
                elif se_type == "sSE":
                    self.body.add(_SpatialSqueezeExcitation("se0"))
                elif se_type == "scSE":
                    self.body.add(
                        _SpatialChannelSqueezeExcitation(
                            "se0", self.nb_right_channels, 2, act_type))
                else:
                    raise Exception(
                        'Unsupported Squeeze Excitation Module: Choose either [None, "cSE", "sSE", "scSE"'
                    )

        if self.use_residual:
            self.act = get_act(act_type)
        self.shufller = _ShuffleChannelsBlock(groups)
Exemple #21
0
    def __init__(
        self,
        unit_name,
        cardinality,
        channels,
        bn_mom=0.9,
        act_type="relu",
        use_se=True,
        res_scale_fac=0.2,
        dim_match=True,
    ):
        """

        :param channels: Number of channels used in the conv-operations
        :param bn_mom: Batch normalization momentum
        :param act_type: Activation function to use
        :param unit_name: Unit name of the residual block (only used for description (string))
        """
        super(_ResidualBlockXBottleneck, self).__init__(prefix=unit_name + "_")
        self.unit_name = unit_name
        self.res_scale_fac = res_scale_fac

        self.use_se = use_se
        self.dim_match = dim_match
        self.body = HybridSequential(prefix="")

        with self.name_scope():
            self.body.add(get_act(act_type))
            self.body.add(
                Conv2D(int(channels // 2),
                       kernel_size=1,
                       padding=0,
                       use_bias=False))
            self.body.add(BatchNorm(momentum=bn_mom))
            self.body.add(get_act(act_type))
            self.body.add(
                Conv2D(int(channels // 2),
                       kernel_size=3,
                       padding=1,
                       groups=cardinality,
                       use_bias=False))
            self.body.add(BatchNorm(momentum=bn_mom))
            self.body.add(get_act(act_type))

            # add a 1x1 branch after concatenation
            self.body.add(
                Conv2D(channels=channels, kernel_size=1, use_bias=False))
            self.body.add(BatchNorm(momentum=bn_mom))

            if use_se is True:
                # apply squeeze excitation
                self.body.add(_SqueezeExcitation("se0", channels, 16,
                                                 act_type))

            self.act0 = get_act(act_type, prefix="%s1" % act_type)

            if self.dim_match is False:
                self.expander = HybridSequential(prefix="")
                self.expander.add(
                    Conv2D(channels=channels,
                           kernel_size=1,
                           use_bias=False,
                           prefix="expander_conv"))
                self.expander.add(BatchNorm())
Exemple #22
0
    def __init__(self,
                 unit_name,
                 channels,
                 bn_mom=0.9,
                 act_type="relu",
                 se_type="scSE",
                 dim_match=True):
        """

        :param channels: Number of channels used in the conv-operations
        :param bn_mom: Batch normalization momentum
        :param act_type: Activation function to use
        :param unit_name: Unit name of the residual block (only used for description (string))
        """
        super(_ResidualBlockXBottleneck, self).__init__(prefix=unit_name + "_")
        self.unit_name = unit_name
        self.use_se = se_type
        self.dim_match = dim_match
        self.body = HybridSequential(prefix="")

        with self.name_scope():

            if se_type:
                if se_type == "cSE":
                    # apply squeeze excitation
                    self.body.add(
                        _ChannelSqueezeExcitation("se0", channels, 2,
                                                  act_type))
                elif se_type == "sSE":
                    self.body.add(_SpatialSqueezeExcitation("se0"))
                elif se_type == "scSE":
                    self.body.add(
                        _SpatialChannelSqueezeExcitation(
                            "se0", channels, 2, act_type))
                elif se_type == "GE+":
                    self.body.add(
                        _GatherExcitePlus("ge0", channels, 2, act_type))
                else:
                    raise Exception(
                        'Unsupported Squeeze Excitation Module: Choose either [None, "cSE", "sSE", "scSE"'
                    )

            self.body.add(BatchNorm(momentum=bn_mom))
            self.body.add(get_act(act_type))
            self.body.add(
                Conv2D(channels, kernel_size=1, padding=0, use_bias=False))
            self.body.add(BatchNorm(momentum=bn_mom))
            self.body.add(get_act(act_type))
            self.body.add(
                Conv2D(channels,
                       kernel_size=3,
                       padding=1,
                       groups=2,
                       use_bias=False))
            self.body.add(BatchNorm(momentum=bn_mom))
            self.body.add(get_act(act_type))
            #
            # # add a 1x1 branch after concatenation
            self.body.add(
                Conv2D(channels=channels,
                       kernel_size=1,
                       padding=0,
                       use_bias=False))

            if not self.dim_match:
                self.expander = HybridSequential(prefix="")
                self.expander.add(
                    Conv2D(channels=channels,
                           kernel_size=1,
                           use_bias=False,
                           prefix="expander_conv"))
                self.expander.add(BatchNorm())