示例#1
0
 def functional(self, place):
     main = fluid.Program()
     start = fluid.Program()
     with fluid.unique_name.guard():
         with fluid.program_guard(main, start):
             input_shape = (-1, self.num_channels,
                            -1) if not self.channel_last else (
                                -1, -1, self.num_channels)
             x_var = fluid.data("input", input_shape, dtype=self.dtype)
             w_var = fluid.data("weight",
                                self.weight_shape,
                                dtype=self.dtype)
             b_var = fluid.data("bias", (self.num_filters, ),
                                dtype=self.dtype)
             y_var = F.conv1d(x_var,
                              w_var,
                              b_var if not self.no_bias else None,
                              padding=self.padding,
                              stride=self.stride,
                              dilation=self.dilation,
                              groups=self.groups,
                              data_format=self.data_format)
     feed_dict = {"input": self.input, "weight": self.weight}
     if self.bias is not None:
         feed_dict["bias"] = self.bias
     exe = fluid.Executor(place)
     exe.run(start)
     y_np, = exe.run(main, feed=feed_dict, fetch_list=[y_var])
     return y_np
示例#2
0
 def dygraph_case(self):
     with dg.guard():
         x = dg.to_variable(self.input, dtype=paddle.float32)
         w = dg.to_variable(self.filter, dtype=paddle.float32)
         b = None if self.bias is None else dg.to_variable(
             self.bias, dtype=paddle.float32)
         y = F.conv1d(x,
                      w,
                      b,
                      padding=self.padding,
                      stride=self.stride,
                      dilation=self.dilation,
                      groups=self.groups,
                      data_format=self.data_format)
1x1x1的3D卷积也有相似的结论。只不过口诀由
13、03、02变成了(即"1"的个数+1)
14、04、03
'''

print('================= 1D的in逐元素乘、1x1的1D卷积 ==================')
N = 256
C = 128
H = 80

x = paddle.randn((N, C, H))
w = paddle.randn((N, C, 1))  # in归一化时会乘以标准差的倒数,这里的w相当于标准差的倒数。

y = x * w  # [N, C, H]   也类似于yolov3中的置信位*分类概率。

x_in = L.reshape(x, (1, N * C, H))  # in即把N和C结合成新的通道维
w_r = L.reshape(w, (N * C, 1, 1))
y2 = F.conv1d(x_in, w_r, None, groups=N * C)  # [1, N*C, H]
y2 = L.reshape(y2, (N, C, H))

y = y.numpy()
y2 = y2.numpy()
d = np.sum((y - y2)**2)
print(d)
'''
总结:
1x1x1的3D卷积也有相似的结论。只不过口诀由
13、03、02变成了(即"1"的个数+1)
14、04、03
'''