Esempio n. 1
0
 def forward(self, x):
     out = F.relu(self.bn1(self.conv1(x)))
     out = F.relu(self.bn2(self.conv2(out)))
     out = self.bn3(self.conv3(out))
     out += self.shortcut(x)
     out = F.relu(out)
     return out
Esempio n. 2
0
 def forward(self, x):
     out = F.relu(self.bn1(x))
     shortcut = self.shortcut(out) if hasattr(self, 'shortcut') else x
     out = self.conv1(out)
     out = self.conv2(F.relu(self.bn2(out)))
     out += shortcut
     return out
Esempio n. 3
0
 def forward(self, x):
     x1, x2 = self.split(x)
     out = F.relu(self.bn1(self.conv1(x2)))
     out = self.bn2(self.conv2(out))
     out = F.relu(self.bn3(self.conv3(out)))
     out = oneflow.cat([x1, out], 1)
     out = self.shuffle(out)
     return out
Esempio n. 4
0
 def forward(self, x):
     out = F.relu(self.bn1(self.conv1(x)))
     out = self.layers(out)
     out = F.relu(self.bn2(self.conv2(out)))
     # NOTE: change pooling kernel_size 7 -> 4 for CIFAR10
     out = F.avg_pool2d(out, 4)
     out = out.view(out.size(0), -1)
     out = self.linear(out)
     return out
Esempio n. 5
0
 def forward(self, x):
     out = F.relu(self.bn1(self.conv1(x)))
     out = self.shuffle1(out)
     out = F.relu(self.bn2(self.conv2(out)))
     out = self.bn3(self.conv3(out))
     res = self.shortcut(x)
     out = F.relu(oneflow.cat([out, res],
                              1)) if self.stride == 2 else F.relu(out + res)
     return out
Esempio n. 6
0
 def forward(self, x):
     out = F.relu(self.conv1(x))
     out = F.max_pool2d(out, 2)
     out = F.relu(self.conv2(out))
     out = F.max_pool2d(out, 2)
     out = out.view(out.size(0), -1)
     out = F.relu(self.fc1(out))
     out = F.relu(self.fc2(out))
     out = self.fc3(out)
     return out
Esempio n. 7
0
 def forward(self, x):
     out = F.relu(self.bn1(self.conv1(x)))
     out = F.relu(self.bn2(self.conv2(out)))
     out = self.bn3(self.conv3(out))
     x = self.shortcut(x)
     d = self.out_planes
     out = flow.cat([
         x[:, :d, :, :] + out[:, :d, :, :], x[:, d:, :, :], out[:, d:, :, :]
     ], 1)
     out = F.relu(out)
     return out
Esempio n. 8
0
 def forward(self, x):
     out = F.relu(self.bn1(self.conv1(x)))
     # out = F.max_pool2d(out, 3, stride=2, padding=1)
     out = self.layer1(out)
     out = self.layer2(out)
     out = self.layer3(out)
     out = F.relu(self.bn2(self.conv2(out)))
     out = F.avg_pool2d(out, 4)
     out = out.view(out.size(0), -1)
     out = self.linear(out)
     return out
Esempio n. 9
0
 def forward(self, x):
     # left
     out1 = self.bn1(self.conv1(x))
     out1 = F.relu(self.bn2(self.conv2(out1)))
     # right
     out2 = F.relu(self.bn3(self.conv3(x)))
     out2 = self.bn4(self.conv4(out2))
     out2 = F.relu(self.bn5(self.conv5(out2)))
     # concat
     out = oneflow.cat([out1, out2], 1)
     out = self.shuffle(out)
     return out
Esempio n. 10
0
    def forward(self, x):
        out = F.relu(self.bn1(self.conv1(x)))
        out = self.bn2(self.conv2(out))

        # Squeeze
        w = F.avg_pool2d(out, out.size(2))
        w = F.relu(self.fc1(w))
        w = F.sigmoid(self.fc2(w))
        # Excitation
        out = out * w  # New broadcasting feature from v0.2!

        out += self.shortcut(x)
        out = F.relu(out)
        return out
Esempio n. 11
0
 def forward(self, x):
     # Left branch
     y1 = self.sep_conv1(x)
     y2 = self.sep_conv2(x)
     # Right branch
     y3 = F.max_pool2d(x, kernel_size=3, stride=self.stride, padding=1)
     if self.stride == 2:
         y3 = self.bn1(self.conv1(y3))
     y4 = self.sep_conv3(x)
     # Concat & reduce channels
     b1 = F.relu(y1 + y2)
     b2 = F.relu(y3 + y4)
     y = oneflow.cat([b1, b2], 1)
     return F.relu(self.bn2(self.conv2(y)))
Esempio n. 12
0
    def forward(self, x):
        out = F.relu(self.bn1(x))
        shortcut = self.shortcut(out) if hasattr(self, 'shortcut') else x
        out = self.conv1(out)
        out = self.conv2(F.relu(self.bn2(out)))

        # Squeeze
        w = F.avg_pool2d(out, out.size(2))
        w = F.relu(self.fc1(w))
        w = F.sigmoid(self.fc2(w))
        # Excitation
        out = out * w

        out += shortcut
        return out
Esempio n. 13
0
 def forward(self, x):
     out = F.relu(self.bn1(self.conv1(x)))
     out = self.layers(out)
     out = F.avg_pool2d(out, 2)
     out = out.view(out.size(0), -1)
     out = self.linear(out)
     return out
Esempio n. 14
0
    def forward(self, x, mask):
        """Forward computation.

        Args:
            x (FloatTensor): `[B, C_i, T, F]`
            mask (IntTensor): `[B, 1, T]`
        Returns:
            out (FloatTensor): `[B, C_o, T', F']`
            out_mask (IntTensor): `[B, 1, T]`

        """
        residual = x

        out = self.conv_layer(x)
        out = F.relu(out)

        if self.batch_norm:
            out = self.norm(out)

        out = self.dropout(out)

        if self.residual and out.size() == residual.size():
            out += residual

        mask = self.return_output_mask(mask, out.size(2))

        return out, mask
Esempio n. 15
0
 def forward(self, x: flow.Tensor) -> flow.Tensor:
     features = self.features(x)
     out = F.relu(features, inplace=True)
     out = F.adaptive_avg_pool2d(out, (1, 1))
     out = flow.flatten(out, 1)
     out = self.classifier(out)
     return out
Esempio n. 16
0
 def forward(self, x):
     if x.dim() >= 3:
         raise RuntimeError(
             "{} accept 1/2D tensor as input, but got {:d}".format(
                 self.__name__, x.dim()
             )
         )
     # when inference, only one utt
     if x.dim() == 1:
         x = flow.unsqueeze(x, 0)
     # n x 1 x S => n x N x T
     w = F.relu(self.encoder_1d(x))
     # n x B x T
     y = self.proj(self.ln(w))
     # n x B x T
     y = self.repeats(y)
     # n x 2N x T
     e = flow.chunk(self.mask(y), self.num_spks, 1)
     # n x N x T
     if self.non_linear_type == "softmax":
         m = self.non_linear(flow.stack(e, dim=0), dim=0)
     else:
         m = self.non_linear(flow.stack(e, dim=0))
     # spks x [n x N x T]
     s = [w * m[n] for n in range(self.num_spks)]
     # spks x n x S
     return [self.decoder_1d(x, squeeze=True) for x in s]
Esempio n. 17
0
 def forward(self, x):
     residual = x
     output = x.transpose(1, 2)
     output = self.w_2(F.relu(self.w_1(output)))
     output = output.transpose(1, 2)
     output = self.dropout(output)
     output = self.layer_norm(output + residual)
     return output
Esempio n. 18
0
 def forward(self, x):
     out = F.relu(self.bn1(self.conv1(x)))
     out = self.layer1(out)
     out = self.layer2(out)
     out = self.layer3(out)
     out = self.layer4(out)
     out = F.adaptive_avg_pool2d(out, (1, 1))
     out = out.view(out.size(0), -1)
     out = self.linear(out)
     return out
Esempio n. 19
0
    def forward(self, x):
        out = self.conv1(x)
        print(out.shape)
        out = self.trans1(self.dense1(out))
        print(out.shape)
        out = self.trans2(self.dense2(out))
        out = self.trans3(self.dense3(out))
        out = self.dense4(out)
        out = F.avg_pool2d(F.relu(self.bn(out)), 4)
        out = out.view(out.size(0), -1)

        out = self.linear(out)
        return out
Esempio n. 20
0
 def forward(self, x):
     out = F.relu(self.bn1(self.conv1(x)))
     out = F.relu(self.bn2(self.conv2(out)))
     out = self.bn3(self.conv3(out))
     out = out + self.shortcut(x) if self.stride == 1 else out
     return out
Esempio n. 21
0
 def forward(self, x):
     out = self.conv1(F.relu(self.bn1(x)))
     out = self.conv2(F.relu(self.bn2(out)))
     out = flow.cat([out, x], 1)
     return out
Esempio n. 22
0
 def forward(self, x):
     out = self.conv(F.relu(self.bn(x)))
     print(out.shape)
     out = F.avg_pool2d(out, 2)
     print(out.shape)
     return out
Esempio n. 23
0
 def forward(self, x):
     residual = x
     output = self.w_2(F.relu(self.w_1(x)))
     output = self.dropout(output)
     output = self.layer_norm(output + residual)
     return output
Esempio n. 24
0
 def forward(self, xs):
     x = flow.cat(xs, 1)
     out = F.relu(self.bn(self.conv(x)))
     return out
Esempio n. 25
0
 def forward(self, x):
     out = F.relu(self.bn1(self.conv1(x)))
     out = F.relu(self.bn2(self.conv2(out)))
     return out
Esempio n. 26
0
 def forward(self, x):
     y1 = self.sep_conv1(x)
     y2 = F.max_pool2d(x, kernel_size=3, stride=self.stride, padding=1)
     if self.stride == 2:
         y2 = self.bn1(self.conv1(y2))
     return F.relu(y1 + y2)
Esempio n. 27
0
 def forward(self, x):
     out = F.adaptive_avg_pool2d(x, (1, 1))
     out = F.relu(self.se1(out))
     out = self.se2(out).sigmoid()
     out = x * out
     return out