Exemplo n.º 1
0
    def build(self, input_shape):
        # Create a trainable weight variable for this layer.
        self._inferred_dimension_permutation = infer_dimension_permutation(input_shape) \
            if self.dimension_permutation == DimensionPermutation.AUTO \
            else self.dimension_permutation

        if self._inferred_dimension_permutation == DimensionPermutation.BATCH_FIRST:
            self._num_nodes_in = num_nodes_in = np.prod(input_shape[1:])
        else:
            num_scopes_in, num_decomps_in, _, num_nodes_in = input_shape

            self._num_nodes_in = num_nodes_in

            if num_scopes_in != 1 or num_decomps_in != 1:
                raise ValueError("Number of scopes and decomps must both be 1")

        initializer = self.accumulator_initializer
        accumulator_constraint = self.linear_accumulator_constraint
        if self.logspace_accumulators:
            initializer = logspace_wrapper_initializer(initializer)
            accumulator_constraint = None

        self.accumulators = self.add_weight(
            name='weights',
            shape=(num_nodes_in, ),
            initializer=initializer,
            regularizer=self.accumulator_regularizer,
            constraint=accumulator_constraint)
Exemplo n.º 2
0
    def build(self, input_shape: Tuple[Optional[int], ...]) -> None:
        """
        Build the internal components for this leaf layer.

        Args:
            input_shape: Shape of the input Tensor.
        """
        # Create a trainable weight variable for this layer.
        _, num_scopes_vertical, num_scopes_horizontal, num_channels_in = input_shape

        weights_shape = (1, 1, num_channels_in, self.num_sums)

        initializer = self.accumulator_initializer
        accumulator_contraint = self.linear_accumulator_constraint
        if self.logspace_accumulators:
            initializer = logspace_wrapper_initializer(initializer)
            accumulator_contraint = self.logspace_accumulator_constraint

        self._accumulators = self.add_weight(
            name="sum_weights",
            shape=weights_shape,
            initializer=initializer,
            regularizer=self.accumulator_regularizer,
            constraint=accumulator_contraint,
        )
        super(DenseSum, self).build(input_shape)
Exemplo n.º 3
0
    def build(self, input_shape):
        # Create a trainable weight variable for this layer.
        self._num_scopes, self._num_decomps, _, num_nodes_in = input_shape

        weights_shape = (self._num_scopes, self._num_decomps, num_nodes_in, self.num_sums)

        initializer = self.accumulator_initializer
        accumulator_constraint = self.linear_accumulator_constraint
        if self.logspace_accumulators:
            initializer = logspace_wrapper_initializer(self.accumulator_initializer)
            accumulator_constraint = None

        self._accumulators = self.add_weight(
            name='sum_weights', shape=weights_shape, initializer=initializer,
            regularizer=self.accumulator_regularizer, constraint=accumulator_constraint
        )
        super(DenseSum, self).build(input_shape)
Exemplo n.º 4
0
    def build(self, input_shape):
        # Create a trainable weight variable for this layer.
        _, num_scopes_vertical, num_scopes_horizontal, num_channels_in = input_shape

        weights_shape = (1, 1, num_channels_in, self.num_sums)

        initializer = self.accumulator_initializer
        accumulator_contraint = self.linear_accumulator_constraint
        if self.logspace_accumulators:
            initializer = logspace_wrapper_initializer(initializer)
            accumulator_contraint = None

        self.accumulators = self.add_weight(
            name='sum_weights', shape=weights_shape, initializer=initializer,
            regularizer=self.accumulator_regularizer, constraint=accumulator_contraint
        )
        super(Conv2DSum, self).build(input_shape)
Exemplo n.º 5
0
    def build(self, input_shape):
        _, num_scopes_in, num_decomps_in, self._num_nodes_in = input_shape

        if num_scopes_in != 1 or num_decomps_in != 1:
            raise ValueError("Number of scopes and decomps must both be 1")

        initializer = self.accumulator_initializer
        accumulator_constraint = self.linear_accumulator_constraint
        if self.logspace_accumulators:
            initializer = logspace_wrapper_initializer(initializer)
            accumulator_constraint = None

        self.accumulators = self.add_weight(
            name='weights',
            shape=(self._num_nodes_in, ),
            initializer=initializer,
            regularizer=self.accumulator_regularizer,
            constraint=accumulator_constraint)
Exemplo n.º 6
0
    def build(self, input_shape: Tuple[Optional[int], ...]) -> None:
        """
        Build the internal components for this layer.

        Args:
            input_shape: Shape of the input Tensor.
        """
        # Create a trainable weight variable for this layer.
        _, self._num_scopes, self._num_decomps, self._num_nodes_in = input_shape

        weights_shape = (
            self._num_scopes,
            self._num_decomps,
            self._num_nodes_in,
            self.num_sums,
        )

        initializer = self.accumulator_initializer
        accumulator_constraint = self.linear_accumulator_constraint
        if self.logspace_accumulators:
            initializer = logspace_wrapper_initializer(
                self.accumulator_initializer)
            accumulator_constraint = self.logspace_accumulator_constraint

        self._accumulators = self.add_weight(
            name="sum_weights",
            shape=weights_shape,
            initializer=initializer,
            regularizer=self.accumulator_regularizer,
            constraint=accumulator_constraint,
        )
        if accumulator_constraint is not None:
            self._accumulators.assign(
                accumulator_constraint(self._accumulators))
        self._forward_normalize = not isinstance(
            accumulator_constraint,
            (GreaterEqualEpsilonNormalized, LogNormalized))
        super(DenseSum, self).build(input_shape)