Esempio n. 1
0
    def forward(self, x, init_states=None):
        seq_sz, bs, _ = x.size()
        hidden_seq = []
        if init_states is None:
            h_t, c_t = (
                flow.zeros((bs, self.hidden_size)).to("cuda"),
                flow.zeros((bs, self.hidden_size)).to("cuda"),
            )
        else:
            h_t, c_t = init_states

        HS = self.hidden_size
        for t in range(seq_sz):
            x_t = x[t, :, :]
            x_t = x_t.reshape(x.shape[1], x.shape[2])
            gates = flow.matmul(x_t, self.W) + flow.matmul(h_t,
                                                           self.U) + self.bias
            i_t, f_t, g_t, o_t = (
                flow.sigmoid(gates[:, :HS]),
                flow.sigmoid(gates[:, HS:HS * 2]),
                flow.tanh(gates[:, HS * 2:HS * 3]),
                flow.sigmoid(gates[:, HS * 3:]),
            )
            c_t = f_t * c_t + i_t * g_t
            h_t = o_t * flow.tanh(c_t)
            hidden_seq.append(h_t.unsqueeze(0))
        hidden_seq = flow.cat(hidden_seq, dim=0)
        return hidden_seq, (h_t, c_t)
Esempio n. 2
0
 def forward(self, x, init_states=None):
     """Assumes x is of shape (batch, sequence, feature)"""
     bs, seq_sz, _ = x.size()
     hidden_seq = []
     if init_states is None:
         h_t, c_t = (
             flow.zeros((bs, self.hidden_size)).to(x.device),
             flow.zeros((bs, self.hidden_size)).to(x.device),
         )
     else:
         h_t, c_t = init_states
     HS = self.hidden_size
     for t in range(seq_sz):
         x_t = x[:, t, :].reshape(x.shape[0], x.shape[2])
         gates = flow.matmul(x_t, self.W) + flow.matmul(h_t, self.U) + self.bias
         i_t, f_t, g_t, o_t = (
             flow.sigmoid(gates[:, :HS]),
             flow.sigmoid(gates[:, HS : HS * 2]),
             flow.tanh(gates[:, HS * 2 : HS * 3]),
             flow.sigmoid(gates[:, HS * 3 :]),
         )
         c_t = f_t * c_t + i_t * g_t
         h_t = o_t * flow.tanh(c_t)
         hidden_seq.append(h_t.unsqueeze(1))
     hidden_seq = flow.cat(hidden_seq, dim=1)
     return hidden_seq, (h_t, c_t)
Esempio n. 3
0
    def forward(self, x, init_states=None):
        """Assumes x is of shape (batch, sequence, feature)"""
        seq_sz, bs, _ = x.size()
        hidden_seq = []
        if init_states is None:
            h_t, c_t = (
                flow.zeros((bs, self.hidden_size)).to("cuda"),
                flow.zeros((bs, self.hidden_size)).to("cuda"),
            )
        else:
            h_t, c_t = init_states

        HS = self.hidden_size
        for t in range(seq_sz):
            x_t = x[t, :, :].reshape(x.shape[1], x.shape[2])
            # batch the computations into a single matrix multiplication
            # NOTE(Xu Zhiqiu): flow does not support view now, use reshape instead
            gates = flow.matmul(x_t, self.W) + flow.matmul(h_t,
                                                           self.U) + self.bias
            i_t, f_t, g_t, o_t = (
                flow.sigmoid(gates[:, :HS]),
                flow.sigmoid(gates[:, HS:HS * 2]),
                flow.tanh(gates[:, HS * 2:HS * 3]),
                flow.sigmoid(gates[:, HS * 3:]),
            )
            c_t = f_t * c_t + i_t * g_t
            h_t = o_t * flow.tanh(c_t)
            hidden_seq.append(h_t.unsqueeze(0))
        hidden_seq = flow.cat(hidden_seq, dim=0)
        return hidden_seq, (h_t, c_t)
Esempio n. 4
0
    def forward(self, x, hidden=None):
        batch_size, seq_len, _ = x.size()
        H_S = self.hidden_size
        hidden_seq = []

        if hidden is None:
            h_t = flow.zeros((batch_size, self.hidden_size))
        else:
            h_t = hidden

        for t in range(seq_len):
            x_t = x[:, t, :]
            gates_1 = flow.matmul(x_t, self.inp_W) + self.inp_b
            gates_2 = flow.matmul(h_t, self.hid_W) + self.hid_b

            r_gate = flow.sigmoid(gates_1[:, :H_S] + gates_2[:, :H_S])
            z_gate = flow.sigmoid(gates_1[:, H_S:H_S * 2] +
                                  gates_2[:, H_S:H_S * 2])
            h_t_ = flow.tanh(gates_1[:, H_S * 2:H_S * 3] +
                             r_gate * gates_2[:, H_S * 2:H_S * 3])
            h_t = (1 - z_gate) * h_t_ + z_gate * h_t

            hidden_seq.append(h_t.unsqueeze(1))

        hidden_seq = flow.cat(hidden_seq, dim=1)
        return hidden_seq, h_t
Esempio n. 5
0
 def forward(self, inputs: Tensor):
     x = self.adaptive_avg_pool2d(inputs)
     x = self.down(x)
     x = self.relu(x)
     x = self.up(x)
     x = flow.sigmoid(x)
     return inputs * x
Esempio n. 6
0
    def forward(self, input):
        input = input.unsqueeze(1)
        conv1 = self.conv1(input) * flow.sigmoid(self.conv1_gates(input))

        # DownloadSample
        downsample1 = self.downSample1(conv1)
        downsample2 = self.downSample2(downsample1)

        # 2D -> 1D
        # reshape
        reshape2dto1d = downsample2.view(downsample2.size(0), 2304, 1, -1)
        reshape2dto1d = reshape2dto1d.squeeze(2)
        conv2dto1d_layer = self.conv2dto1dLayer(reshape2dto1d)

        residual_layer_1 = self.residualLayer1(conv2dto1d_layer)
        residual_layer_2 = self.residualLayer2(residual_layer_1)
        residual_layer_3 = self.residualLayer3(residual_layer_2)
        residual_layer_4 = self.residualLayer4(residual_layer_3)
        residual_layer_5 = self.residualLayer5(residual_layer_4)
        residual_layer_6 = self.residualLayer6(residual_layer_5)

        # 1D -> 2D
        conv1dto2d_layer = self.conv1dto2dLayer(residual_layer_6)
        # reshape
        reshape1dto2d = conv1dto2d_layer.unsqueeze(2)
        reshape1dto2d = reshape1dto2d.view(reshape1dto2d.size(0), 256, 9, -1)

        # UpSample
        upsample_layer_1 = self.upSample1(reshape1dto2d)
        upsample_layer_2 = self.upSample2(upsample_layer_1)

        output = self.lastConvLayer(upsample_layer_2)
        output = output.squeeze(1)
        return output
Esempio n. 7
0
    def forward(self, input):
        h1_norm = self.conv1d_layer(input)
        h1_gates_norm = self.conv_layer_gates(input)

        # GLU
        h1_glu = h1_norm * flow.sigmoid(h1_gates_norm)

        h2_norm = self.conv1d_out_layer(h1_glu)
        return input + h2_norm
Esempio n. 8
0
 def forward(self, x):
     # x has shape [batch_size, num_features, frames]
     # discriminator requires shape [batchSize, 1, num_features, frames]
     x = x.unsqueeze(1)
     conv_layer_1 = self.convLayer1(x)
     downsample1 = self.downSample1(conv_layer_1)
     downsample2 = self.downSample2(downsample1)
     downsample3 = self.downSample3(downsample2)
     output = flow.sigmoid(self.outputConvLayer(downsample3))
     return output
Esempio n. 9
0
    def forward(self, x):
        x1 = self.c1(x)
        x1 = self.n1(x1)

        x2 = self.c2(x)
        x2 = self.n2(x2)

        x3 = x1 * flow.sigmoid(x2)

        return x3
Esempio n. 10
0
 def __call__(self, outputs, targets):
     loss = (1 - self.jaccard_weight) * self.nll_loss(outputs, targets)
     if self.jaccard_weight:
         eps = 1e-15
         jaccard_target = flow.Tensor(
             (targets.numpy() == 1)).to(flow.device("cuda"))
         jaccard_output = flow.sigmoid(outputs)
         intersection = (jaccard_output * jaccard_target).sum()
         union = jaccard_output.sum() + jaccard_target.sum()
         loss -= self.jaccard_weight * flow.log(
             (intersection + eps) / (union - intersection + eps))
     return loss
Esempio n. 11
0
    def forward(self, input):
        # input has shape [batch_size, num_features, time]
        # discriminator requires shape [batchSize, 1, num_features, time]
        input = input.unsqueeze(1)
        conv_layer_1 = self.convLayer1(input)

        downsample1 = self.downSample1(conv_layer_1)
        downsample2 = self.downSample2(downsample1)
        downsample3 = self.downSample3(downsample2)

        output = flow.sigmoid(self.outputConvLayer(downsample3))
        return output
Esempio n. 12
0
    def test_sigmoid(test_case):
        m = flow.nn.Sigmoid()
        input_arr = np.random.randn(2, 3, 4, 5)
        x = flow.Tensor(input_arr)

        y = m(x)
        y2 = flow.sigmoid(x)
        y3 = x.sigmoid()
        output = numpy_sigmoid(input_arr)

        test_case.assertTrue(np.allclose(y.numpy(), output, rtol=1e-05))
        test_case.assertTrue(np.allclose(y2.numpy(), output, rtol=1e-05))
        test_case.assertTrue(np.allclose(y3.numpy(), output, rtol=1e-05))
Esempio n. 13
0
    def forward(self, x, mask):
        # Conv2d
        x = flow.stack((x * mask, mask), dim=1)
        conv1 = self.conv1(x) * flow.sigmoid(self.conv1_gates(x))  # GLU

        # Downsampling
        downsample1 = self.downSample1(conv1)
        downsample2 = self.downSample2(downsample1)

        # Reshape
        reshape2dto1d = downsample2.view(
            downsample2.size(0), self.flattened_channels, 1, -1
        )
        reshape2dto1d = reshape2dto1d.squeeze(2)

        # 2D -> 1D
        conv2dto1d_layer = self.conv2dto1dLayer(reshape2dto1d)
        conv2dto1d_layer = self.conv2dto1dLayer_tfan(conv2dto1d_layer)

        # Residual Blocks
        residual_layer_1 = self.residualLayer1(conv2dto1d_layer)
        residual_layer_2 = self.residualLayer2(residual_layer_1)
        residual_layer_3 = self.residualLayer3(residual_layer_2)
        residual_layer_4 = self.residualLayer4(residual_layer_3)
        residual_layer_5 = self.residualLayer5(residual_layer_4)
        residual_layer_6 = self.residualLayer6(residual_layer_5)

        # 1D -> 2D
        conv1dto2d_layer = self.conv1dto2dLayer(residual_layer_6)
        conv1dto2d_layer = self.conv1dto2dLayer_tfan(conv1dto2d_layer)

        # Reshape
        reshape1dto2d = conv1dto2d_layer.unsqueeze(2)
        reshape1dto2d = reshape1dto2d.view(reshape1dto2d.size(0), 256, 20, -1)

        # UpSampling
        upsample_layer_1 = self.upSample1(reshape1dto2d)
        upsample_layer_2 = self.upSample2(upsample_layer_1)

        # Conv2d
        output = self.lastConvLayer(upsample_layer_2)
        output = output.squeeze(1)
        return output
Esempio n. 14
0
    def forward(self, x, mask):
        """
        Args:
            x: [batch_size, time, channels]
            mask: [batch_size, time]
        """
        mask = mask.unsqueeze(2).repeat([1, 1, x.size(-1)])

        x = self.pointwise_conv1(x)
        x = F.glu(x)
        x = flow.masked_fill(x, mask == 0, 0.0)

        x = x.transpose(1, 2)
        x = self.depthwise_conv(x)
        x = self.batch_norm(x)
        x = x * flow.sigmoid(x)
        x = x.transpose(1, 2)

        x = self.pointwise_conv2(x)
        x = flow.masked_fill(x, mask == 0, 0.0)

        return x
Esempio n. 15
0
File: rnn.py Progetto: zzk0/oneflow
    def forward(self, input, h_0=None):
        if self.batch_first == False:
            input = self.permute_tensor(input)
        D = 2 if self.bidirectional else 1
        num_layers = self.num_layers
        batch_size, seq_len, _ = input.size()

        if h_0 is None:
            real_hidden_size = (
                self.proj_size if self.proj_size > 0 else self.hidden_size
            )
            h_t = flow.zeros(
                (D * num_layers, batch_size, real_hidden_size),
                dtype=input.dtype,
                device=input.device,
            )
            c_t = flow.zeros(
                (D * num_layers, batch_size, self.hidden_size),
                dtype=input.dtype,
                device=input.device,
            )
            h_0 = (h_t, c_t)
        else:
            h_t, c_t = h_0

        if self.bidirectional:
            if h_0 is None:
                h_t_f = h_t[:num_layers, :, :]
                h_t_b = h_t[num_layers:, :, :]
                c_t_f = c_t[:num_layers, :, :]
                c_t_b = c_t[num_layers:, :, :]
            else:
                h_t_f = flow.cat(
                    [
                        h_t[l, :, :].unsqueeze(0)
                        for l in range(h_t.size(0))
                        if l % 2 == 0
                    ],
                    dim=0,
                )
                h_t_b = flow.cat(
                    [
                        h_t[l, :, :].unsqueeze(0)
                        for l in range(h_t.size(0))
                        if l % 2 != 0
                    ],
                    dim=0,
                )
                c_t_f = flow.cat(
                    [
                        c_t[l, :, :].unsqueeze(0)
                        for l in range(c_t.size(0))
                        if l % 2 == 0
                    ],
                    dim=0,
                )
                c_t_b = flow.cat(
                    [
                        c_t[l, :, :].unsqueeze(0)
                        for l in range(c_t.size(0))
                        if l % 2 != 0
                    ],
                    dim=0,
                )
        else:
            h_t_f = h_t
            c_t_f = c_t

        layer_hidden = []
        layer_cell = []

        for layer in range(self.num_layers):

            hidden_seq_f = []
            if self.bidirectional:
                hidden_seq_b = []

            hid_t_f = h_t_f[layer, :, :]
            h_c_t_f = c_t_f[layer, :, :]
            if self.bidirectional:
                hid_t_b = h_t_b[layer, :, :]
                h_c_t_b = c_t_b[layer, :, :]

            for t in range(seq_len):
                if layer == 0:
                    x_t_f = input[:, t, :]
                    if self.bidirectional:
                        x_t_b = input[:, seq_len - 1 - t, :]
                else:
                    x_t_f = hidden_seq[:, t, :]
                    if self.bidirectional:
                        x_t_b = hidden_seq[:, seq_len - 1 - t, :]

                # TODO: Modify after adding the stride attribute
                # gi_f = flow.matmul(
                #     x_t_f,
                #     getattr(self, "weight_ih_l{}{}".format(layer, "")).permute(1, 0),
                # )
                # gh_f = flow.matmul(
                #     hid_t_f,
                #     getattr(self, "weight_hh_l{}{}".format(layer, "")).permute(1, 0),
                # )

                gi_f = flow.matmul(
                    x_t_f, getattr(self, "weight_ih_l{}{}".format(layer, "")),
                )
                gh_f = flow.matmul(
                    hid_t_f, getattr(self, "weight_hh_l{}{}".format(layer, "")),
                )
                if self.bias:
                    gi_f += getattr(self, "bias_ih_l{}{}".format(layer, ""))
                    gh_f += getattr(self, "bias_hh_l{}{}".format(layer, ""))
                gates_f = gi_f + gh_f
                ingate_f, forgetgate_f, cellgate_f, outgate_f = gates_f.chunk(4, dim=1)
                ingate_f = flow.sigmoid(ingate_f)
                forgetgate_f = flow.sigmoid(forgetgate_f)
                cellgate_f = flow.tanh(cellgate_f)
                outgate_f = flow.sigmoid(outgate_f)
                h_c_t_f = (forgetgate_f * h_c_t_f) + (ingate_f * cellgate_f)
                hid_t_f = outgate_f * flow.tanh(h_c_t_f)
                if self.proj_size > 0:

                    # TODO:Modify after adding the stride attribute
                    # hid_t_f = flow.matmul(
                    #     hid_t_f,
                    #     getattr(self, "weight_hr_l{}{}".format(layer, "")).permute(
                    #         1, 0
                    #     ),
                    # )

                    hid_t_f = flow.matmul(
                        hid_t_f, getattr(self, "weight_hr_l{}{}".format(layer, ""))
                    )
                hidden_seq_f.append(hid_t_f.unsqueeze(1))

                if self.bidirectional:

                    # TODO:Modify after adding the stride attribute
                    # gi_b = flow.matmul(
                    #     x_t_b,
                    #     getattr(
                    #         self, "weight_ih_l{}{}".format(layer, "_reverse")
                    #     ).permute(1, 0),
                    # )
                    # gh_b = flow.matmul(
                    #     hid_t_b,
                    #     getattr(
                    #         self, "weight_hh_l{}{}".format(layer, "_reverse")
                    #     ).permute(1, 0),
                    # )

                    gi_b = flow.matmul(
                        x_t_b,
                        getattr(self, "weight_ih_l{}{}".format(layer, "_reverse")),
                    )
                    gh_b = flow.matmul(
                        hid_t_b,
                        getattr(self, "weight_hh_l{}{}".format(layer, "_reverse")),
                    )

                    if self.bias:
                        gi_b += getattr(self, "bias_ih_l{}{}".format(layer, "_reverse"))
                        gh_b += getattr(self, "bias_hh_l{}{}".format(layer, "_reverse"))
                    gates_b = gi_b + gh_b
                    ingate_b, forgetgate_b, cellgate_b, outgate_b = gates_b.chunk(
                        4, dim=1
                    )
                    ingate_b = flow.sigmoid(ingate_b)
                    forgetgate_b = flow.sigmoid(forgetgate_b)
                    cellgate_b = flow.tanh(cellgate_b)
                    outgate_b = flow.sigmoid(outgate_b)
                    h_c_t_b = (forgetgate_b * h_c_t_b) + (ingate_b * cellgate_b)
                    hid_t_b = outgate_b * flow.tanh(h_c_t_b)
                    if self.proj_size > 0:

                        # TODO:Modify after adding the stride attribute
                        # hid_t_b = flow.matmul(
                        #     hid_t_b,
                        #     getattr(
                        #         self, "weight_hr_l{}{}".format(layer, "_reverse")
                        #     ).permute(1, 0),
                        # )

                        hid_t_b = flow.matmul(
                            hid_t_b,
                            getattr(self, "weight_hr_l{}{}".format(layer, "_reverse")),
                        )
                    hidden_seq_b.insert(0, hid_t_b.unsqueeze(1))

            hidden_seq_f = flow.cat(hidden_seq_f, dim=1)
            if self.bidirectional:
                hidden_seq_b = flow.cat(hidden_seq_b, dim=1)

            if self.dropout != 0 and layer != self.num_layers - 1:
                hidden_seq_f = self.drop(hidden_seq_f)
                if self.bidirectional:
                    hidden_seq_b = self.drop(hidden_seq_b)

            if self.bidirectional:
                hidden_seq = flow.cat([hidden_seq_f, hidden_seq_b], dim=2)
            else:
                hidden_seq = hidden_seq_f

            if self.bidirectional:
                h_t = flow.cat([hid_t_f.unsqueeze(0), hid_t_b.unsqueeze(0)], dim=0)
                c_t = flow.cat([h_c_t_f.unsqueeze(0), h_c_t_b.unsqueeze(0)], dim=0)
            else:
                h_t = hid_t_f.unsqueeze(0)
                c_t = h_c_t_f.unsqueeze(0)

            layer_hidden.append(h_t)
            layer_cell.append(c_t)

        h_t = flow.cat(layer_hidden, dim=0)
        c_t = flow.cat(layer_cell, dim=0)

        if self.batch_first == False:
            hidden_seq = self.permute_tensor(hidden_seq)

        return hidden_seq, (h_t, c_t)
Esempio n. 16
0
File: rnn.py Progetto: zzk0/oneflow
    def forward(self, input, h_0=None):
        if self.batch_first == False:
            input = self.permute_tensor(input)
        D = 2 if self.bidirectional else 1
        num_layers = self.num_layers
        batch_size, seq_len, _ = input.size()

        if h_0 is None:
            h_t = flow.zeros(
                (D * num_layers, batch_size, self.hidden_size),
                dtype=input.dtype,
                device=input.device,
            )
        else:
            h_t = h_0

        if self.bidirectional:
            if h_0 is None:
                h_t_f = h_t[:num_layers, :, :]
                h_t_b = h_t[num_layers:, :, :]
            else:
                h_t_f = flow.cat(
                    [
                        h_t[l, :, :].unsqueeze(0)
                        for l in range(h_t.size(0))
                        if l % 2 == 0
                    ],
                    dim=0,
                )
                h_t_b = flow.cat(
                    [
                        h_t[l, :, :].unsqueeze(0)
                        for l in range(h_t.size(0))
                        if l % 2 != 0
                    ],
                    dim=0,
                )
        else:
            h_t_f = h_t

        layer_hidden = []

        for layer in range(self.num_layers):
            hidden_seq_f = []
            if self.bidirectional:
                hidden_seq_b = []

            hid_t_f = h_t_f[layer, :, :]
            if self.bidirectional:
                hid_t_b = h_t_b[layer, :, :]

            for t in range(seq_len):
                if layer == 0:
                    x_t_f = input[:, t, :]
                    if self.bidirectional:
                        x_t_b = input[:, seq_len - 1 - t, :]
                else:
                    x_t_f = hidden_seq[:, t, :]
                    if self.bidirectional:
                        x_t_b = hidden_seq[:, seq_len - 1 - t, :]

                # TODO: Modify after adding the stride attribute
                # gi_f = flow.matmul(
                #     x_t_f,
                #     getattr(self, "weight_ih_l{}{}".format(layer, "")).permute(1, 0),
                # )
                # gh_f = flow.matmul(
                #     hid_t_f,
                #     getattr(self, "weight_hh_l{}{}".format(layer, "")).permute(1, 0),
                # )

                gi_f = flow.matmul(
                    x_t_f, getattr(self, "weight_ih_l{}{}".format(layer, "")),
                )
                gh_f = flow.matmul(
                    hid_t_f, getattr(self, "weight_hh_l{}{}".format(layer, "")),
                )
                if self.bias:
                    gi_f += getattr(self, "bias_ih_l{}{}".format(layer, ""))
                    gh_f += getattr(self, "bias_hh_l{}{}".format(layer, ""))

                i_r_f, i_i_f, i_n_f = gi_f.chunk(3, dim=1)
                h_r_f, h_i_f, h_n_f = gh_f.chunk(3, dim=1)

                resetgate_f = flow.sigmoid(i_r_f + h_r_f)
                inputgate_f = flow.sigmoid(i_i_f + h_i_f)
                newgate_f = flow.tanh(i_n_f + resetgate_f * h_n_f)

                hid_t_f = newgate_f + inputgate_f * (hid_t_f - newgate_f)

                hidden_seq_f.append(hid_t_f.unsqueeze(1))

                if self.bidirectional:

                    # TODO:Modify after adding the stride attribute
                    # gi_b = flow.matmul(
                    #     x_t_b,
                    #     getattr(
                    #         self, "weight_ih_l{}{}".format(layer, "_reverse")
                    #     ).permute(1, 0),
                    # )
                    # gh_b = flow.matmul(
                    #     hid_t_b,
                    #     getattr(
                    #         self, "weight_hh_l{}{}".format(layer, "_reverse")
                    #     ).permute(1, 0),
                    # )

                    gi_b = flow.matmul(
                        x_t_b,
                        getattr(self, "weight_ih_l{}{}".format(layer, "_reverse")),
                    )
                    gh_b = flow.matmul(
                        hid_t_b,
                        getattr(self, "weight_hh_l{}{}".format(layer, "_reverse")),
                    )
                    if self.bias:
                        gi_b += getattr(self, "bias_ih_l{}{}".format(layer, "_reverse"))
                        gh_b += getattr(self, "bias_hh_l{}{}".format(layer, "_reverse"))

                    i_r_b, i_i_b, i_n_b = gi_b.chunk(3, dim=1)
                    h_r_b, h_i_b, h_n_b = gh_b.chunk(3, dim=1)

                    resetgate_b = flow.sigmoid(i_r_b + h_r_b)
                    inputgate_b = flow.sigmoid(i_i_b + h_i_b)
                    newgate_b = flow.tanh(i_n_b + resetgate_b * h_n_b)

                    hid_t_b = newgate_b + inputgate_b * (hid_t_b - newgate_b)

                    hidden_seq_b.insert(0, hid_t_b.unsqueeze(1))

            hidden_seq_f = flow.cat(hidden_seq_f, dim=1)
            if self.bidirectional:
                hidden_seq_b = flow.cat(hidden_seq_b, dim=1)

            if self.dropout != 0 and layer != self.num_layers - 1:
                hidden_seq_f = self.drop(hidden_seq_f)
                if self.bidirectional:
                    hidden_seq_b = self.drop(hidden_seq_b)

            if self.bidirectional:
                hidden_seq = flow.cat([hidden_seq_f, hidden_seq_b], dim=2)
            else:
                hidden_seq = hidden_seq_f

            if self.bidirectional:
                h_t = flow.cat([hid_t_f.unsqueeze(0), hid_t_b.unsqueeze(0)], dim=0)
            else:
                h_t = hid_t_f.unsqueeze(0)

            layer_hidden.append(h_t)

        h_t = flow.cat(layer_hidden, dim=0)

        if self.batch_first == False:
            hidden_seq = self.permute_tensor(hidden_seq)

        return hidden_seq, h_t
Esempio n. 17
0
 def forward(self, x):
     return flow.sigmoid(x)
Esempio n. 18
0
 def forward(self, x):
     b = x.shape[0]
     x1 = self.model(x).reshape(*(b, -1))
     y = flow.sigmoid(self.fc(x1))
     return y.flatten()
Esempio n. 19
0
 def forward(self, input):
     return self.convLayer(input) * flow.sigmoid(
         self.convLayer_gates(input))
Esempio n. 20
0
 def forward(self, input):
     return input * flow.sigmoid(input)
Esempio n. 21
0
 def forward(self, x):
     return self.convLayer(x) * flow.sigmoid(self.convLayer_gates(x))
Esempio n. 22
0
def swish(x: flow.Tensor) -> flow.Tensor:
    return x * flow.sigmoid(x)
Esempio n. 23
0
import logging
import oneflow as flow
import oneflow.nn as nn
import oneflow.nn.functional as F

logger = logging.getLogger(__name__)

_ACTIVATION = {
    "relu": F.relu,
    "gelu": F.gelu,
    "glu": F.glu,
    "tanh": lambda x: flow.tanh(x),
    "swish": lambda x: x * flow.sigmoid(x),
}


class PositionwiseFeedForward(nn.Module):
    """Positionwise feed forward
    """
    def __init__(self, d_model, d_ff, dropout, activation="relu"):
        super(PositionwiseFeedForward, self).__init__()
        self.activation = activation

        assert activation in ["relu", "gelu", "glu", "tanh", "swish"]

        self.w_1 = nn.Linear(d_model,
                             d_ff * 2 if activation == "glu" else d_ff)
        self.w_2 = nn.Linear(d_ff, d_model)
        self.dropout = nn.Dropout(dropout)

    def forward(self, x):
Esempio n. 24
0
def _sigmoid(self):
    return flow.sigmoid(self)
Esempio n. 25
0
 def forward(self, x):
     h1_norm = self.conv1d_layer(x)
     h1_gates_norm = self.conv_layer_gates(x)
     h1_glu = h1_norm * flow.sigmoid(h1_gates_norm)  # GLU
     h2_norm = self.conv1d_out_layer(h1_glu)
     return x + h2_norm