Beispiel #1
0
    def __init__(self, in_shape, normalize):
        super().__init__()
        bias = not normalize
        self._nb_output_channel = 3200
        self.conv1 = Conv2d(in_shape[0], 32, 7, stride=2, padding=1, bias=bias)
        self.conv2 = Conv2d(32, 64, 3, stride=2, padding=1, bias=bias)
        self.conv3 = Conv2d(64, 64, 3, stride=2, padding=1, bias=bias)
        self.conv4 = Conv2d(64, 128, 3, stride=2, padding=1, bias=bias)

        if normalize:
            self.bn1 = BatchNorm2d(32)
            self.bn2 = BatchNorm2d(64)
            self.bn3 = BatchNorm2d(64)
            self.bn4 = BatchNorm2d(128)
        else:
            self.bn1 = Identity()
            self.bn2 = Identity()
            self.bn3 = Identity()
            self.bn4 = Identity()

        relu_gain = init.calculate_gain('relu')
        self.conv1.weight.data.mul_(relu_gain)
        self.conv2.weight.data.mul_(relu_gain)
        self.conv3.weight.data.mul_(relu_gain)
        self.conv4.weight.data.mul_(relu_gain)
Beispiel #2
0
    def __init__(self, nb_in_chan, output_shape_dict, normalize):
        self.embedding_size = 512
        super(RMC, self).__init__(self.embedding_size, output_shape_dict)
        bias = not normalize
        self.conv1 = Conv2d(
            nb_in_chan, 32, kernel_size=3, stride=2, padding=1, bias=bias
        )
        self.conv2 = Conv2d(
            32, 32, kernel_size=3, stride=2, padding=1, bias=bias
        )
        self.conv3 = Conv2d(
            32, 32, kernel_size=3, stride=2, padding=1, bias=bias
        )
        self.attention = RMCCell(100, 100, 34)
        self.conv4 = Conv2d(
            34, 8, kernel_size=3, stride=1, padding=1, bias=bias
        )
        # BATCH x 8 x 10 x 10
        self.linear = Linear(800, 512, bias=bias)

        if normalize:
            self.bn1 = BatchNorm2d(32)
            self.bn2 = BatchNorm2d(32)
            self.bn3 = BatchNorm2d(32)
            self.bn4 = BatchNorm2d(8)
            self.bn_linear = BatchNorm1d(512)
        else:
            self.bn1 = Identity()
            self.bn2 = Identity()
            self.bn3 = Identity()
            self.bn_linear = Identity()
Beispiel #3
0
    def __init__(self, in_shape, id, normalize):
        super().__init__(in_shape, id)
        bias = not normalize
        self._in_shape = in_shape
        self._out_shape = None
        self.conv1 = Conv2d(in_shape[0], 32, 7, stride=2, padding=1, bias=bias)
        self.conv2 = Conv2d(32, 32, 3, stride=2, padding=1, bias=bias)
        self.conv3 = Conv2d(32, 32, 3, stride=2, padding=1, bias=bias)
        self.conv4 = Conv2d(32, 32, 3, stride=2, padding=1, bias=bias)

        if normalize == "bn":
            self.bn1 = BatchNorm2d(32)
            self.bn2 = BatchNorm2d(32)
            self.bn3 = BatchNorm2d(32)
            self.bn4 = BatchNorm2d(32)
        elif normalize == "gn":
            self.bn1 = GroupNorm(8, 32)
            self.bn2 = GroupNorm(8, 32)
            self.bn3 = GroupNorm(8, 32)
            self.bn4 = GroupNorm(8, 32)
        else:
            self.bn1 = Identity()
            self.bn2 = Identity()
            self.bn3 = Identity()
            self.bn4 = Identity()

        relu_gain = init.calculate_gain("relu")
        self.conv1.weight.data.mul_(relu_gain)
        self.conv2.weight.data.mul_(relu_gain)
        self.conv3.weight.data.mul_(relu_gain)
        self.conv4.weight.data.mul_(relu_gain)
Beispiel #4
0
    def __init__(self, in_shape, nb_head, normalize):
        self._nb_output_channel = 800
        super().__init__()
        self.normalize = normalize
        bias = not normalize
        self.conv1 = Conv2d(in_shape[0],
                            32,
                            kernel_size=3,
                            stride=2,
                            padding=1,
                            bias=bias)
        self.conv2 = Conv2d(32,
                            32,
                            kernel_size=3,
                            stride=2,
                            padding=1,
                            bias=bias)

        self.attention = MultiHeadSelfAttention(20 * 20, 34, 34, nb_head)
        self.mlp = Linear(34, 34)

        self.conv3 = Conv2d(34,
                            32,
                            kernel_size=3,
                            stride=2,
                            padding=1,
                            bias=bias)
        self.conv4 = Conv2d(32,
                            32,
                            kernel_size=3,
                            stride=2,
                            padding=1,
                            bias=bias)

        if normalize:
            self.bn1 = BatchNorm2d(32)
            self.bn2 = BatchNorm2d(32)
            self.bn3 = BatchNorm2d(32)
            self.bn4 = BatchNorm2d(32)
        else:
            self.bn1 = Identity()
            self.bn2 = Identity()
            self.bn3 = Identity()
            self.bn4 = Identity()

        relu_gain = init.calculate_gain('relu')
        self.conv1.weight.data.mul_(relu_gain)
        self.conv2.weight.data.mul_(relu_gain)
        self.conv3.weight.data.mul_(relu_gain)
        self.conv4.weight.data.mul_(relu_gain)
Beispiel #5
0
    def __init__(self, input_shape, id, normalize, nb_hidden, nb_layer):
        super().__init__(input_shape, id)
        self._nb_hidden = nb_hidden

        nb_input_channel = input_shape[0]

        bias = not normalize
        self.linears = nn.ModuleList([
            nn.Linear(
                nb_input_channel if i == 0 else nb_hidden,
                nb_hidden,
                bias
            )
            for i in range(nb_layer)
        ])
        if normalize == 'bn':
            self.norms = nn.ModuleList([
                nn.BatchNorm1d(nb_hidden) for _ in range(nb_layer)
            ])
        elif normalize == 'gn':
            if nb_hidden % 16 != 0:
                raise Exception('linear_nb_hidden must be divisible by 16 for Group Norm')
            self.norms = nn.ModuleList([
                nn.GroupNorm(nb_hidden // 16, nb_hidden) for _ in range(nb_layer)
            ])
        else:
            self.norms = nn.ModuleList([
                Identity() for _ in range(nb_layer)
            ])
Beispiel #6
0
    def __init__(self, in_shape, normalize):
        super().__init__()
        bias = not normalize
        self._nb_output_channel = 2592
        self.conv1 = Conv2d(in_shape[0], 16, 8, stride=4, padding=0, bias=bias)
        self.conv2 = Conv2d(16, 32, 4, stride=2, padding=0, bias=bias)

        if normalize:
            self.bn1 = BatchNorm2d(16)
            self.bn2 = BatchNorm2d(32)
        else:
            self.bn1 = Identity()
            self.bn2 = Identity()

        relu_gain = init.calculate_gain('relu')
        self.conv1.weight.data.mul_(relu_gain)
        self.conv2.weight.data.mul_(relu_gain)
Beispiel #7
0
    def __init__(self, in_shape, normalize):
        super().__init__()
        bias = not normalize
        self.conv1 = Conv2d(in_shape[0], 64, 7, stride=2, padding=1,
                            bias=bias)  # 40x40
        relu_gain = init.calculate_gain('relu')
        self.conv1.weight.data.mul_(relu_gain)

        if normalize:
            self.bn1 = BatchNorm2d(64)
        else:
            self.bn1 = Identity()
Beispiel #8
0
    def __init__(self, in_shape, normalize):
        super().__init__()
        bias = not normalize
        self._nb_output_channel = 3136
        self.conv1 = Conv2d(in_shape[0], 32, 8, stride=4, padding=0, bias=bias)
        self.conv2 = Conv2d(32, 64, 4, stride=2, padding=0, bias=bias)
        self.conv3 = Conv2d(64, 64, 3, stride=1, padding=0, bias=bias)

        if normalize:
            self.bn1 = BatchNorm2d(32)
            self.bn2 = BatchNorm2d(64)
            self.bn3 = BatchNorm2d(64)
        else:
            self.bn1 = Identity()
            self.bn2 = Identity()
            self.bn3 = Identity()

        relu_gain = init.calculate_gain("relu")
        self.conv1.weight.data.mul_(relu_gain)
        self.conv2.weight.data.mul_(relu_gain)
        self.conv3.weight.data.mul_(relu_gain)
Beispiel #9
0
    def __init__(self, nb_input_channel, _, normalize):
        super().__init__()
        self._nb_output_channel = 256
        bias = not normalize

        self.linear = Linear(nb_input_channel,
                             self._nb_output_channel,
                             bias=bias)
        if normalize:
            self.bn_linear = BatchNorm1d(self._nb_output_channel)
        else:
            self.bn_linear = Identity()
Beispiel #10
0
    def __init__(self, nb_input_channel, nb_out_channel, normalize):
        super().__init__()
        self._nb_output_channel = 256
        self.linear = Linear(2592, self._nb_output_channel)

        if normalize:
            self.lstm = LSTMCellLayerNorm(
                self._nb_output_channel,
                self._nb_output_channel)  # hack for experiment
            self.bn_linear = BatchNorm1d(self._nb_output_channel)
        else:
            self.bn_linear = Identity()
            self.lstm = LSTMCell(self._nb_output_channel,
                                 self._nb_output_channel)
            self.lstm.bias_ih.data.fill_(0)
            self.lstm.bias_hh.data.fill_(0)