Пример #1
0
    def __init__(self, in_planes, planes=None, out_planes=None, stride=1,
                 activ='relu', use_batch_norm=True, upsample_block=False):
        super(ResBasicBlock, self).__init__()

        if planes is None:
            planes = in_planes // self.expansion

        if out_planes is None:
            out_planes = planes * self.expansion

        norm = nn.BatchNorm2d if use_batch_norm else nn.InstanceNorm2d

        self.residual = None
        if stride != 1 or in_planes != out_planes:
            self.residual = nn.Sequential(
                (upsample3x3 if upsample_block else conv1x1)(in_planes, out_planes, stride),
                norm(out_planes)
            )

        conv = upsample3x3 if upsample_block else conv3x3

        self.activ = activation_by_name(activ)

        self.conv1 = conv(in_planes, planes, 1 if upsample_block else stride)
        self.bn1 = norm(planes)
        self.conv2 = conv(planes, out_planes, stride if upsample_block else 1)
        self.bn2 = norm(out_planes)
Пример #2
0
    def __init__(self, layer_dims, activ='relu', last_active=False, dropout_rate=0):
        super(MLP, self).__init__()

        layers = []
        in_dim, layer_dims = layer_dims[0], layer_dims[1:]
        for dim in layer_dims[:-1]:
            layers += [
                nn.Linear(in_dim, dim),
                activation_by_name(activ),
                nn.Dropout(dropout_rate)
            ]
            in_dim = dim

        layers += [nn.Linear(in_dim, layer_dims[-1])]
        if last_active:
            layers += [activation_by_name(activ)]
        self.layer = nn.Sequential(*layers)
Пример #3
0
    def __init__(self, block, layer_sizes, activ='relu', use_batch_norm=True):
        super(Encoder, self).__init__()

        norm = nn.BatchNorm2d if use_batch_norm else nn.InstanceNorm2d

        in_planes = 64
        self.conv1 = conv7x7(1, in_planes, stride=2)
        self.bn1 = norm(in_planes)
        self.activ1 = activation_by_name(activ)
        self.conv2 = conv3x3(in_planes, in_planes, stride=2)
        self.bn2 = norm(in_planes)
        self.activ2 = activation_by_name(activ)

        layers = [self._make_layer(block, in_planes, in_planes, 1, layer_sizes[0], activ, use_batch_norm)]
        for layer_size in layer_sizes[1:]:
            layers += [self._make_layer(block, in_planes, in_planes * 2, 2, layer_size, activ, use_batch_norm)]
            in_planes = in_planes * 2

        self.layer = nn.Sequential(*layers)
Пример #4
0
    def __init__(self, in_planes, out_planes=None, stride=1,
                 activ='relu', use_batch_norm=True, upsample_block=False):
        super(SimpleBlock, self).__init__()

        if out_planes is None:
            out_planes = in_planes * self.expansion

        conv = upsample3x3 if upsample_block else conv3x3

        norm = nn.BatchNorm2d if use_batch_norm else nn.InstanceNorm2d

        self.conv1 = conv(in_planes, out_planes, stride)
        self.bn1 = norm(out_planes)
        self.activ1 = activation_by_name(activ)
Пример #5
0
    def __init__(self, image_size, block, layer_sizes, in_planes=512, start_planes=512, activ='lrelu'):
        super(Decoder, self).__init__()

        self.image_size = image_size

        layers = [self._make_layer(block, in_planes, start_planes // 2, 2, layer_sizes[0], activ)]
        in_planes = start_planes // 2
        for layer_size in layer_sizes[1:-1]:
            layers += [self._make_layer(block, in_planes, in_planes // 2, 2, layer_size, activ)]
            in_planes = in_planes // 2
        layers += [self._make_layer(block, in_planes, in_planes, 1, layer_sizes[-1], activ)]
        self.layer = nn.Sequential(*layers)

        self.conv1 = upsample3x3(in_planes, out_planes=64, stride=2)
        self.bn1 = nn.BatchNorm2d(64)
        self.activ1 = activation_by_name(activ)
        self.conv2 = upsample7x7(in_planes=64, out_planes=1, stride=2)
        self.activ2 = nn.Tanh()