def _parameter_properties(cls, dtype, num_classes=None):
     # pylint: disable=g-long-lambda
     return dict(total_count=parameter_properties.ParameterProperties(
         default_constraining_bijector_fn=parameter_properties.
         BIJECTOR_NOT_IMPLEMENTED),
                 concentration1=parameter_properties.ParameterProperties(
                     default_constraining_bijector_fn=(
                         lambda: softplus_bijector.Softplus(low=dtype_util.
                                                            eps(dtype)))),
                 concentration0=parameter_properties.ParameterProperties(
                     default_constraining_bijector_fn=(
                         lambda: softplus_bijector.Softplus(low=dtype_util.
                                                            eps(dtype)))))
Beispiel #2
0
 def _parameter_properties(cls, dtype, num_classes=None):
     # pylint: disable=g-long-lambda
     return dict(
         distribution=parameter_properties.BatchedComponentProperties(),
         shift=parameter_properties.ParameterProperties(),
         scale=parameter_properties.ParameterProperties(
             default_constraining_bijector_fn=(
                 lambda: softplus_bijector.Softplus(low=dtype_util.eps(dtype
                                                                       )))),
         tailweight=parameter_properties.ParameterProperties(
             default_constraining_bijector_fn=(
                 lambda: softplus_bijector.Softplus(low=dtype_util.eps(dtype
                                                                       )))))
Beispiel #3
0
 def _parameter_properties(cls, dtype, num_classes=None):
     # pylint: disable=g-long-lambda
     return dict(
         loc=parameter_properties.ParameterProperties(),
         scale=parameter_properties.ParameterProperties(
             default_constraining_bijector_fn=(
                 lambda: softplus_bijector.Softplus(low=dtype_util.eps(dtype
                                                                       )))),
         low=parameter_properties.ParameterProperties(),
         # TODO(b/169874884): Support decoupled parameterization.
         high=parameter_properties.ParameterProperties(
             default_constraining_bijector_fn=parameter_properties.
             BIJECTOR_NOT_IMPLEMENTED, ))
 def _parameter_properties(cls, dtype, num_classes=None):
   # pylint: disable=g-long-lambda
   return dict(
       temperature=parameter_properties.ParameterProperties(
           shape_fn=lambda sample_shape: sample_shape[:-1],
           default_constraining_bijector_fn=(
               lambda: softplus_bijector.Softplus(low=dtype_util.eps(dtype)))),
       logits=parameter_properties.ParameterProperties(event_ndims=1),
       probs=parameter_properties.ParameterProperties(
           event_ndims=1,
           default_constraining_bijector_fn=softmax_centered_bijector
           .SoftmaxCentered,
           is_preferred=False))
 def _parameter_properties(cls, dtype, num_classes=None):
     return dict(
         loc=parameter_properties.ParameterProperties(event_ndims=1),
         atol=parameter_properties.ParameterProperties(
             event_ndims=1,
             default_constraining_bijector_fn=parameter_properties.
             BIJECTOR_NOT_IMPLEMENTED,
             is_preferred=False),
         rtol=parameter_properties.ParameterProperties(
             event_ndims=1,
             default_constraining_bijector_fn=parameter_properties.
             BIJECTOR_NOT_IMPLEMENTED,
             is_preferred=False))
Beispiel #6
0
 def _parameter_properties(cls, dtype):
     from tensorflow_probability.python.bijectors import softplus  # pylint:disable=g-import-not-at-top
     return dict(
         base_kernel=parameter_properties.BatchedComponentProperties(),
         fixed_inputs=parameter_properties.ParameterProperties(
             event_ndims=lambda self: self.base_kernel.feature_ndims + 1),
         fixed_inputs_mask=parameter_properties.ParameterProperties(
             event_ndims=1),
         diag_shift=parameter_properties.ParameterProperties(
             default_constraining_bijector_fn=(
                 lambda: softplus.Softplus(low=dtype_util.eps(dtype)))),
         _precomputed_divisor_matrix_cholesky=(
             parameter_properties.ParameterProperties(event_ndims=2)))
Beispiel #7
0
 def _parameter_properties(cls, dtype):
   from tensorflow_probability.python.bijectors import softplus  # pylint:disable=g-import-not-at-top
   return dict(
       bias_variance=parameter_properties.ParameterProperties(
           default_constraining_bijector_fn=(
               lambda: softplus.Softplus(low=dtype_util.eps(dtype)))),
       exponent=parameter_properties.ParameterProperties(
           default_constraining_bijector_fn=(
               lambda: softplus.Softplus(low=dtype_util.eps(dtype)))),
       slope_variance=parameter_properties.ParameterProperties(
           default_constraining_bijector_fn=(
               lambda: softplus.Softplus(low=dtype_util.eps(dtype)))),
       shift=parameter_properties.ParameterProperties())
Beispiel #8
0
 def _parameter_properties(cls, dtype, num_classes=None):
     return dict(
         rate1=parameter_properties.
         ParameterProperties(default_constraining_bijector_fn=(
             lambda: softplus_bijector.Softplus(low=dtype_util.eps(dtype))),
                             is_preferred=False),
         rate2=parameter_properties.
         ParameterProperties(default_constraining_bijector_fn=(
             lambda: softplus_bijector.Softplus(low=dtype_util.eps(dtype))),
                             is_preferred=False),
         log_rate1=parameter_properties.ParameterProperties(),
         log_rate2=parameter_properties.ParameterProperties(),
     )
Beispiel #9
0
 def _parameter_properties(cls, dtype, num_classes=None):
     # pylint: disable=g-long-lambda
     return dict(
         loc=parameter_properties.ParameterProperties(event_ndims=1),
         scale_diag=parameter_properties.ParameterProperties(
             event_ndims=1,
             default_constraining_bijector_fn=(
                 lambda: softplus_bijector.Softplus(low=dtype_util.eps(dtype
                                                                       )))),
         scale_identity_multiplier=parameter_properties.
         ParameterProperties(default_constraining_bijector_fn=(
             lambda: softplus_bijector.Softplus(low=dtype_util.eps(dtype))),
                             is_preferred=False))
 def _parameter_properties(cls, dtype, num_classes=None):
     return dict(concentration=parameter_properties.ParameterProperties(
         default_constraining_bijector_fn=(
             lambda: softplus_bijector.Softplus(low=dtype_util.eps(dtype))
         )),
                 scale=parameter_properties.ParameterProperties(
                     default_constraining_bijector_fn=(
                         lambda: softplus_bijector.Softplus(low=dtype_util.
                                                            eps(dtype)))),
                 upper_bound=parameter_properties.ParameterProperties(
                     default_constraining_bijector_fn=(
                         lambda: softplus_bijector.Softplus(low=dtype_util.
                                                            eps(dtype)))))
Beispiel #11
0
 def _parameter_properties(cls, dtype, num_classes=None):
     # pylint: disable=g-long-lambda
     return dict(
         logits=parameter_properties.ParameterProperties(
             event_ndims=1,
             shape_fn=lambda sample_shape: ps.concat(
                 [sample_shape, [num_classes]], axis=0)),
         probs=parameter_properties.ParameterProperties(
             event_ndims=1,
             shape_fn=lambda sample_shape: ps.concat(
                 [sample_shape, [num_classes]], axis=0),
             default_constraining_bijector_fn=softmax_centered_bijector.
             SoftmaxCentered,
             is_preferred=False))
Beispiel #12
0
 def _parameter_properties(cls, dtype, num_classes=None):
     from tensorflow_probability.python.bijectors import ascending  # pylint:disable=g-import-not-at-top
     from tensorflow_probability.python.bijectors import softplus  # pylint:disable=g-import-not-at-top
     return dict(
         kernels=parameter_properties.BatchedComponentProperties(
             event_ndims=lambda self: [0 for _ in self.kernels]),
         locs=parameter_properties.ParameterProperties(
             event_ndims=1,
             default_constraining_bijector_fn=lambda: ascending.Ascending(
             )),  # pylint:disable=unnecessary-lambda
         slopes=parameter_properties.ParameterProperties(
             event_ndims=1,
             default_constraining_bijector_fn=(
                 lambda: softplus.Softplus(low=dtype_util.eps(dtype)))))
Beispiel #13
0
 def _parameter_properties(cls, dtype):
     from tensorflow_probability.python.bijectors import softplus  # pylint:disable=g-import-not-at-top
     return dict(
         amplitude=parameter_properties.ParameterProperties(
             default_constraining_bijector_fn=(
                 lambda: softplus.Softplus(low=dtype_util.eps(dtype)))),
         length_scale=parameter_properties.ParameterProperties(
             default_constraining_bijector_fn=(
                 lambda: softplus.Softplus(low=dtype_util.eps(dtype)))),
         inverse_length_scale=parameter_properties.ParameterProperties(
             default_constraining_bijector_fn=softplus.Softplus),
         scale_mixture_rate=parameter_properties.ParameterProperties(
             default_constraining_bijector_fn=(
                 lambda: softplus.Softplus(low=dtype_util.eps(dtype)))))
 def _parameter_properties(cls, dtype, num_classes=None):
     # pylint: disable=g-long-lambda
     return dict(
         loc=parameter_properties.ParameterProperties(event_ndims=1),
         covariance_matrix=parameter_properties.ParameterProperties(
             event_ndims=2,
             shape_fn=lambda sample_shape: ps.concat(
                 [sample_shape, sample_shape[-1:]], axis=0),
             default_constraining_bijector_fn=(
                 lambda: chain_bijector.Chain([
                     cholesky_outer_product_bijector.CholeskyOuterProduct(),
                     fill_scale_tril_bijector.FillScaleTriL(
                         diag_shift=dtype_util.eps(dtype))
                 ]))))
Beispiel #15
0
 def _parameter_properties(cls, dtype, num_classes=None):
   return dict(
       df=parameter_properties.ParameterProperties(
           default_constraining_bijector_fn=(
               lambda: softplus_bijector.Softplus(  # pylint: disable=g-long-lambda
                   low=dtype_util.as_numpy_dtype(dtype)(2.)))),
       index_points=parameter_properties.ParameterProperties(
           event_ndims=lambda self: self.kernel.feature_ndims + 1,
           shape_fn=parameter_properties.SHAPE_FN_NOT_IMPLEMENTED),
       kernel=parameter_properties.BatchedComponentProperties(),
       observation_noise_variance=parameter_properties.ParameterProperties(
           default_constraining_bijector_fn=(
               lambda: softplus_bijector.Softplus(low=dtype_util.eps(dtype))),
           shape_fn=parameter_properties.SHAPE_FN_NOT_IMPLEMENTED))
 def _parameter_properties(cls, dtype, num_classes=None):
     # pylint: disable=g-long-lambda
     return dict(
         concentration=parameter_properties.ParameterProperties(
             default_constraining_bijector_fn=(
                 lambda: softplus_bijector.Softplus(low=dtype_util.eps(dtype
                                                                       )))),
         mixing_concentration=parameter_properties.ParameterProperties(
             default_constraining_bijector_fn=(
                 lambda: softplus_bijector.Softplus(low=dtype_util.eps(dtype
                                                                       )))),
         mixing_rate=parameter_properties.ParameterProperties(
             default_constraining_bijector_fn=(
                 lambda: softplus_bijector.Softplus(low=dtype_util.eps(dtype
                                                                       )))))
Beispiel #17
0
 def _parameter_properties(cls, dtype, num_classes=None):
     return dict(
         loc=parameter_properties.ParameterProperties(event_ndims=1),
         precision_factor=parameter_properties.BatchedComponentProperties(),
         precision=parameter_properties.BatchedComponentProperties(),
         nonzeros=parameter_properties.BatchedComponentProperties(
             event_ndims=1))
Beispiel #18
0
 def _parameter_properties(cls, dtype, num_classes=None):
     # pylint: disable=g-long-lambda
     return dict(df=parameter_properties.ParameterProperties(
         shape_fn=lambda sample_shape: sample_shape[:-2],
         default_constraining_bijector_fn=parameter_properties.
         BIJECTOR_NOT_IMPLEMENTED),
                 scale=parameter_properties.BatchedComponentProperties())
Beispiel #19
0
 def _parameter_properties(cls, dtype, num_classes=None):
     # pylint: disable=g-long-lambda
     return dict(concentration=parameter_properties.ParameterProperties(
         shape_fn=lambda sample_shape: sample_shape[:-2],
         default_constraining_bijector_fn=(
             lambda: softplus_bijector.Softplus(low=tf.convert_to_tensor(
                 1. + dtype_util.eps(dtype), dtype=dtype)))))
Beispiel #20
0
 def _parameter_properties(cls, dtype, num_classes=None):
   # pylint: disable=g-long-lambda
   return dict(
       scores=parameter_properties.ParameterProperties(
           event_ndims=1,
           default_constraining_bijector_fn=(
               lambda: softplus_bijector.Softplus(low=dtype_util.eps(dtype)))))
Beispiel #21
0
 def _parameter_properties(cls, dtype, num_classes=None):
   # pylint: disable=g-long-lambda
   return dict(
       power=parameter_properties.ParameterProperties(
           default_constraining_bijector_fn=(
               lambda: softplus_bijector.Softplus(
                   low=tf.convert_to_tensor(
                       1. + dtype_util.eps(dtype), dtype=dtype)))))
 def _parameter_properties(cls, dtype):
     from tensorflow_probability.python.bijectors import softplus  # pylint:disable=g-import-not-at-top
     return dict(
         kernel=parameter_properties.BatchedComponentProperties(),
         scale_diag=parameter_properties.ParameterProperties(
             event_ndims=lambda self: self.kernel.feature_ndims,
             default_constraining_bijector_fn=(
                 lambda: softplus.Softplus(low=dtype_util.eps(dtype)))))
Beispiel #23
0
 def _parameter_properties(cls, dtype, num_classes=None):
   return dict(
       loc=parameter_properties.ParameterProperties(
           event_ndims=1),
       scale_diag=parameter_properties.ParameterProperties(
           event_ndims=1,
           default_constraining_bijector_fn=(
               lambda: softplus_bijector.Softplus(low=dtype_util.eps(dtype)))),
       scale_perturb_factor=parameter_properties.ParameterProperties(
           event_ndims=2,
           shape_fn=parameter_properties.SHAPE_FN_NOT_IMPLEMENTED,
           is_preferred=False),
       scale_perturb_diag=parameter_properties.ParameterProperties(
           event_ndims=1,
           is_preferred=False,
           default_constraining_bijector_fn=(
               lambda: softplus_bijector.Softplus(low=dtype_util.eps(dtype)))))
Beispiel #24
0
 def _parameter_properties(cls, dtype):
     return {
         'scale_diag':
         parameter_properties.ParameterProperties(
             event_ndims=1,
             default_constraining_bijector_fn=(
                 lambda: softplus.Softplus(low=dtype_util.eps(dtype))))
     }
Beispiel #25
0
 def _parameter_properties(cls, dtype):
     # pylint: disable=g-long-lambda
     return dict(scale_tril=parameter_properties.ParameterProperties(
         event_ndims=2,
         shape_fn=lambda sample_shape: ps.concat(
             [sample_shape, sample_shape[-1:]], axis=0),
         default_constraining_bijector_fn=fill_triangular_bijector.
         FillTriangular))
Beispiel #26
0
 def _parameter_properties(cls, dtype, num_classes=None):
   from tensorflow_probability.python.bijectors import softplus as softplus_bijector  # pylint:disable=g-import-not-at-top
   return dict(
       amplitudes=parameter_properties.ParameterProperties(
           event_ndims=1,
           default_constraining_bijector_fn=(
               softplus_bijector.Softplus(low=dtype_util.eps(dtype)))),
       kernel=parameter_properties.BatchedComponentProperties(event_ndims=1))
Beispiel #27
0
 def _parameter_properties(cls, dtype, num_classes=None):
     return dict(
         total_count=parameter_properties.ParameterProperties(
             default_constraining_bijector_fn=parameter_properties.
             BIJECTOR_NOT_IMPLEMENTED,
             # The method `_sample_bates` currently constructs intermediate
             # samples with a shape that depends on `total_count`, so, although
             # `total_count` is not *inherently* a shape parameter, we annotate
             # it as one in the current implementation (making it the rare case
             # of a shape parameter that also has batch semantics). This could
             # be removed if a different sampling method (eg, rejection sampling)
             # were used.
             specifies_shape=True),
         low=parameter_properties.ParameterProperties(),
         # TODO(b/169874884): Support decoupled parameterization.
         high=parameter_properties.ParameterProperties(
             default_constraining_bijector_fn=parameter_properties.
             BIJECTOR_NOT_IMPLEMENTED))
 def _parameter_properties(cls, dtype):
   return dict(
       bin_widths=parameter_properties.ParameterProperties(
           event_ndims=1,
           shape_fn=parameter_properties.SHAPE_FN_NOT_IMPLEMENTED,
           default_constraining_bijector_fn=parameter_properties
           .BIJECTOR_NOT_IMPLEMENTED),
       bin_heights=parameter_properties.ParameterProperties(
           event_ndims=1,
           shape_fn=parameter_properties.SHAPE_FN_NOT_IMPLEMENTED,
           default_constraining_bijector_fn=parameter_properties
           .BIJECTOR_NOT_IMPLEMENTED),
       knot_slopes=parameter_properties.ParameterProperties(
           event_ndims=1,
           shape_fn=parameter_properties.SHAPE_FN_NOT_IMPLEMENTED,
           default_constraining_bijector_fn=(
               lambda: softplus_bijector.Softplus(low=dtype_util.eps(dtype)))),
       range_min=parameter_properties.ParameterProperties(
           shape_fn=parameter_properties.SHAPE_FN_NOT_IMPLEMENTED,))
 def _parameter_properties(cls, dtype):
     return dict(rightmost_transposed_ndims=parameter_properties.
                 ParameterProperties(
                     shape_fn=lambda sample_shape: [],
                     default_constraining_bijector_fn=parameter_properties.
                     BIJECTOR_NOT_IMPLEMENTED,
                     is_preferred=False),
                 perm=parameter_properties.ParameterProperties(
                     event_ndims=1,
                     shape_fn=parameter_properties.SHAPE_FN_NOT_IMPLEMENTED,
                     default_constraining_bijector_fn=parameter_properties.
                     BIJECTOR_NOT_IMPLEMENTED))
Beispiel #30
0
 def _parameter_properties(cls, dtype, num_classes=None):
     return dict(index_points=parameter_properties.ParameterProperties(
         event_ndims=lambda self: self.kernel.feature_ndims + 1,
         shape_fn=parameter_properties.SHAPE_FN_NOT_IMPLEMENTED,
     ),
                 kernel=parameter_properties.BatchedComponentProperties(),
                 observation_noise_variance=parameter_properties.
                 ParameterProperties(
                     event_ndims=0,
                     shape_fn=lambda sample_shape: sample_shape[:-1],
                     default_constraining_bijector_fn=(
                         lambda: softplus_bijector.Softplus(low=dtype_util.
                                                            eps(dtype)))))