Ejemplo n.º 1
0
 def forward(self, x):
     out = swish(self.bn1(self.conv1(x)))
     out = self.layers(out)
     out = F.adaptive_avg_pool2d(out, 1)
     out = out.view(out.size(0), -1)
     dropout_rate = self.cfg['dropout_rate']
     if self.training and dropout_rate > 0:
         out = F.dropout(out, p=dropout_rate)
     out = self.linear(out)
     return out
Ejemplo n.º 2
0
 def attn_forward(self, x, mask, pos, dropout=0.0):
     residual = x
     x = self.mha_norm(x)
     if self.relative_positional:
         slf_attn_out, slf_attn_weights = self.mha(x, mask.unsqueeze(1),
                                                   pos)
     else:
         slf_attn_out, slf_attn_weights = self.mha(x, mask.unsqueeze(1))
     slf_attn_out = residual + F.dropout(slf_attn_out, p=dropout)
     return slf_attn_out, slf_attn_weights
Ejemplo n.º 3
0
    def forward(self, input) -> flow.Tensor:  # noqa: F811
        if isinstance(input, flow.Tensor):
            prev_features = [input]
        else:
            prev_features = input

        bottleneck_output = self.bn_function(prev_features)

        new_features = self.conv2(self.relu2(self.norm2(bottleneck_output)))
        if self.drop_rate > 0:
            new_features = F.dropout(new_features,
                                     p=self.drop_rate,
                                     training=self.training)
        return new_features
Ejemplo n.º 4
0
 def conv_augment_forward(self, x, mask, dropout=0.0):
     residual = x
     x = self.conv_norm(x)
     return residual + F.dropout(self.conv(x, mask), p=dropout)
Ejemplo n.º 5
0
 def pos_ffn_forward(self, x, dropout=0.0):
     residual = x
     x = self.post_ffn_norm(x)
     return residual + self.ffn_scale * F.dropout(self.post_ffn(x),
                                                  p=dropout)
Ejemplo n.º 6
0
 def forward(self, x, mask):
     x, mask = super().forward(x, mask)
     return F.dropout(self.linear(x), p=self.dropout), mask