def _parameter_properties(cls, dtype, num_classes=None): return dict(cutpoints=parameter_properties.ParameterProperties( event_ndims=1, shape_fn=parameter_properties.SHAPE_FN_NOT_IMPLEMENTED, default_constraining_bijector_fn=( lambda: invert_bijector.Invert(ordered_bijector.Ordered()))), loc=parameter_properties.ParameterProperties())
def _parameter_properties(cls, dtype, num_classes=None): # pylint: disable=g-long-lambda return dict( outcomes=parameter_properties.ParameterProperties( event_ndims=None, shape_fn=lambda sample_shape: [num_classes], default_constraining_bijector_fn=invert_bijector.Invert( ordered_bijector.Ordered())), logits=parameter_properties.ParameterProperties( event_ndims=1, shape_fn=lambda sample_shape: ps.concat( [sample_shape, [num_classes]], axis=0)), probs=parameter_properties.ParameterProperties( event_ndims=1, shape_fn=lambda sample_shape: ps.concat( [sample_shape, [num_classes]], axis=0), default_constraining_bijector_fn=softmax_centered_bijector. SoftmaxCentered, is_preferred=False), rtol=parameter_properties.ParameterProperties( event_ndims= None, # TODO(b/187469130): standardize batch semantics. default_constraining_bijector_fn=( lambda: softplus_bijector.Softplus(low=dtype_util.eps(dtype ))), is_preferred=False), atol=parameter_properties.ParameterProperties( event_ndims= None, # TODO(b/187469130): standardize batch semantics. default_constraining_bijector_fn=( lambda: softplus_bijector.Softplus(low=dtype_util.eps(dtype ))), is_preferred=False))
def _parameter_properties(cls, dtype, num_classes=None): # pylint: disable=g-long-lambda return dict(cutpoints=parameter_properties.ParameterProperties( event_ndims=1, shape_fn=lambda sample_shape: ps.concat( [sample_shape, [num_classes]], axis=0), default_constraining_bijector_fn=lambda: invert_bijector.Invert( ordered_bijector.Ordered())), loc=parameter_properties.ParameterProperties())