コード例 #1
0
    def test_shape_kernel3(self):
        for padding in ['WRAP', 'SAME']:
            layer = tl.LocallyConnected1d(6, 3, padding=padding)
            x = np.array([[0, 1], [2, 3], [4, 5]])
            layer.init(shapes.signature(x))
            y = layer(x)
            self.assertEqual(y.shape, (3, 6))

        for padding in ['VALID']:
            layer = tl.LocallyConnected1d(6, 3, padding=padding)
            x = np.array([[0, 1], [2, 3], [4, 5]])
            layer.init(shapes.signature(x))
            y = layer(x)
            self.assertEqual(y.shape, (1, 6))
コード例 #2
0
def LocallyConnectedDense(
        n_modules,
        n_units,
        kernel_size=1,  # pylint: disable=invalid-name
        kernel_initializer=init.GlorotUniformInitializer(),
        bias_initializer=init.RandomNormalInitializer(1e-6),
        use_bias=True):
    """Layer using LocallyConnected1d for approximation of Dense layer.

  The layer splits the last axis of a tensor into `n_modules`, then runs
  LocallyConnected1d (grouped convolution) on all those modules, and
  concatenates their results. It is essentially a locally-sensitive
  approximation of Dense layer, with number of parameters smaller by the factor
  of `n_modules / kernel_size`.

  Args:
    n_modules: Indicates how many modules (pixels) should be input and output
        split into for processing.
    n_units: how many outputs (filters) should each module generate.
    kernel_size: The size of the kernel to be used.
    kernel_initializer: Function that creates a matrix of (random) initial
        connection weights `W` for the layer.
    bias_initializer: Function that creates a vector of (random) initial
        bias weights `b` for the layer.
    use_bias: If `True`, compute an affine map `y = Wx + b`; else compute
        a linear map `y = Wx`.

  Returns:
      LocallyConnectedDense base.Layer.
  """
    if n_modules == 1:
        return tl.Dense(n_units,
                        kernel_initializer=kernel_initializer,
                        bias_initializer=bias_initializer,
                        use_bias=use_bias)
    return tl.Serial(
        tl.SplitLastAxis(n_modules),
        tl.LocallyConnected1d(n_units,
                              kernel_size,
                              kernel_initializer=kernel_initializer,
                              bias_initializer=bias_initializer,
                              use_bias=use_bias,
                              padding='WRAP'), tl.MergeLastTwoAxes())