Beispiel #1
0
 def new(params,
         event_shape=(),
         alpha_activation=tf.nn.softplus,
         beta_activation=tf.nn.softplus,
         clip_for_stable=True,
         validate_args=False,
         name="BetaLayer"):
     r"""Create the distribution instance from a `params` vector."""
     params = tf.convert_to_tensor(value=params, name='params')
     alpha_activation = parse_activation(alpha_activation, 'tf')
     beta_activation = parse_activation(beta_activation, 'tf')
     event_shape = dist_util.expand_to_vector(
         tf.convert_to_tensor(value=event_shape,
                              name='event_shape',
                              dtype=tf.int32),
         tensor_name='event_shape',
     )
     output_shape = tf.concat(
         [tf.shape(input=params)[:-1], event_shape],
         axis=0,
     )
     # alpha, beta
     concentration1, concentration0 = tf.split(params, 2, axis=-1)
     concentration1 = alpha_activation(concentration1)
     concentration0 = beta_activation(concentration0)
     if clip_for_stable:
         concentration0 = tf.clip_by_value(concentration0, 1e-3, 1e3)
         concentration1 = tf.clip_by_value(concentration1, 1e-3, 1e3)
     return tfd.Independent(
         tfd.Beta(concentration1=tf.reshape(concentration1, output_shape),
                  concentration0=tf.reshape(concentration0, output_shape),
                  validate_args=validate_args),
         reinterpreted_batch_ndims=tf.size(input=event_shape),
         name=name,
     )
Beispiel #2
0
 def __init__(self,
              event_shape=(),
              mean_activation='softplus',
              disp_activation='softplus1',
              dispersion='full',
              inflation='full',
              convert_to_tensor_fn=tfd.Distribution.sample,
              validate_args=False,
              **kwargs):
     disp = _dispersion(dispersion,
                        event_shape,
                        is_logits=True,
                        name="dispersion")
     rate = _dispersion(inflation,
                        event_shape,
                        is_logits=True,
                        name="inflation")
     super(ZINegativeBinomialDispLayer, self).__init__(
         lambda t: type(self).new(
             t,
             event_shape,
             mean_activation=parse_activation(mean_activation, self),
             disp_activation=parse_activation(disp_activation, self),
             disp=disp,
             rate=rate,
             validate_args=validate_args,
         ), convert_to_tensor_fn, **kwargs)
     self.disp = disp
     self.rate = rate
Beispiel #3
0
 def __init__(self,
              event_shape=(),
              loc_activation='linear',
              scale_activation='softplus1',
              convert_to_tensor_fn=tfd.Distribution.sample,
              validate_args=False,
              **kwargs):
   super(LogNormalLayer, self).__init__(
       lambda t: type(self).new(
           t, event_shape, parse_activation(loc_activation, self),
           parse_activation(scale_activation, self), validate_args),
       convert_to_tensor_fn, **kwargs)
Beispiel #4
0
 def __init__(self,
              event_shape=(),
              convert_to_tensor_fn=tfd.Distribution.sample,
              concentration_activation='softplus1',
              rate_activation='softplus1',
              validate_args=False,
              **kwargs):
   super(GammaLayer, self).__init__(
       lambda t: type(self).new(
           t, event_shape, parse_activation(concentration_activation, self),
           parse_activation(rate_activation, self), validate_args),
       convert_to_tensor_fn, **kwargs)
Beispiel #5
0
 def __init__(self,
              event_shape,
              covariance='diag',
              loc_activation='identity',
              scale_activation='softplus1',
              convert_to_tensor_fn=tfd.Distribution.sample,
              validate_args=False,
              **kwargs):
   super(MultivariateNormalLayer, self).__init__(
       lambda t: type(self).new(
           t, event_shape, covariance, parse_activation(loc_activation, self),
           parse_activation(scale_activation, self), validate_args),
       convert_to_tensor_fn, **kwargs)
Beispiel #6
0
 def __init__(self,
              event_shape=(),
              mean_activation='softplus',
              disp_activation='softplus1',
              dispersion='full',
              convert_to_tensor_fn=tfd.Distribution.sample,
              validate_args=False,
              **kwargs):
   self.dispersion = dispersion
   super(ZINegativeBinomialDispLayer, self).__init__(
       lambda t: type(self).new(
           t, event_shape, parse_activation(mean_activation, self),
           parse_activation(disp_activation, self), dispersion, validate_args),
       convert_to_tensor_fn, **kwargs)
Beispiel #7
0
 def __init__(self,
              rank,
              filters,
              kernel_size,
              strides=1,
              padding='valid',
              data_format=None,
              dilation_rate=1,
              activation=None,
              use_bias=True,
              kernel_initializer='glorot_uniform',
              bias_initializer='zeros',
              name=None,
              **kwargs):
     super(Conv, self).__init__()
     self.rank = rank
     self.filters = filters
     self.kernel_size = conv_utils.normalize_tuple(kernel_size, rank,
                                                   'kernel_size')
     self.strides = conv_utils.normalize_tuple(strides, rank, 'strides')
     self.padding = conv_utils.normalize_padding(padding)
     if (self.padding == 'causal' and not isinstance(self, (Conv1D, ))):
         raise ValueError('Causal padding is only supported for `Conv1D`'
                          'and ``SeparableConv1D`.')
     self.data_format = conv_utils.normalize_data_format(data_format)
     self.dilation_rate = conv_utils.normalize_tuple(
         dilation_rate, rank, 'dilation_rate')
     self.activation = parse_activation(activation, self)
     self.use_bias = use_bias
     self.kernel_initializer = parse_initializer(kernel_initializer, self)
     self.bias_initializer = parse_initializer(bias_initializer, self)
Beispiel #8
0
 def new(params,
         event_shape=(),
         count_activation=tf.nn.softplus,
         validate_args=False,
         name='BinomialLayer'):
     r"""Create the distribution instance from a `params` vector."""
     count_activation = parse_activation(count_activation, 'tf')
     params = tf.convert_to_tensor(value=params, name='params')
     event_shape = dist_util.expand_to_vector(
         tf.convert_to_tensor(value=event_shape,
                              name='event_shape',
                              dtype=tf.int32),
         tensor_name='event_shape',
     )
     output_shape = tf.concat((tf.shape(params)[:-1], event_shape), axis=0)
     total_count, logits = tf.split(params, 2, axis=-1)
     total_count = tf.reshape(total_count, output_shape)
     logits = tf.reshape(logits, output_shape)
     return tfd.Independent(
         tfd.Binomial(total_count=count_activation(total_count),
                      logits=logits,
                      validate_args=validate_args),
         reinterpreted_batch_ndims=tf.size(event_shape),
         name=name,
     )
Beispiel #9
0
 def __init__(self,
              event_shape=(),
              mean_activation='softplus',
              disp_activation='softplus1',
              dispersion='full',
              convert_to_tensor_fn=tfd.Distribution.sample,
              validate_args=False,
              **kwargs):
   dispersion = str(dispersion).lower()
   self.dispersion = dispersion
   assert dispersion in ('full', 'single', 'share'), \
     "Only support three different dispersion value: 'full', 'single' and " + \
       "'share', but given: %s" % dispersion
   super(NegativeBinomialDispLayer, self).__init__(
       lambda t: type(self).new(
           t, event_shape, parse_activation(mean_activation, self),
           parse_activation(disp_activation, self), dispersion, validate_args),
       convert_to_tensor_fn, **kwargs)
Beispiel #10
0
 def __init__(self,
              event_shape=(),
              n_components=2,
              covariance='none',
              loc_activation='linear',
              scale_activation='softplus1',
              convert_to_tensor_fn=tfp.distributions.Distribution.sample,
              validate_args=False,
              **kwargs):
   super().__init__(
       lambda params: MixtureGaussianLayer.new(
           params, event_shape, n_components, covariance,
           parse_activation(loc_activation, self),
           parse_activation(scale_activation, self), validate_args),
       convert_to_tensor_fn, **kwargs)
   self.event_shape = event_shape
   self.n_components = n_components
   self.covariance = str(covariance).strip().lower()
Beispiel #11
0
 def __init__(self,
              event_shape=(),
              activation='linear',
              convert_to_tensor_fn=tfd.Distribution.sample,
              validate_args=False,
              **kwargs):
   super(ZIPoissonLayer, self).__init__(
       lambda t: type(self).new(
           t, event_shape, parse_activation(activation, self), validate_args),
       convert_to_tensor_fn, **kwargs)
Beispiel #12
0
 def new(params,
         event_shape=(),
         count_activation=tf.nn.softplus,
         alpha_activation=tf.nn.softplus,
         clip_for_stable=True,
         validate_args=False,
         name='DirichletMultinomial'):
   r"""Create the distribution instance from a `params` vector."""
   params = tf.convert_to_tensor(value=params, name='params')
   count_activation = parse_activation(count_activation, 'tf')
   alpha_activation = parse_activation(alpha_activation, 'tf')
   total_count = count_activation(params[..., 0])
   concentration = alpha_activation(params[..., 1:])
   if clip_for_stable:
     concentration = tf.clip_by_value(concentration, 1e-3, 1e3)
   return tfd.DirichletMultinomial(total_count=total_count,
                                   concentration=concentration,
                                   validate_args=validate_args,
                                   name=name)
Beispiel #13
0
 def __init__(self,
              event_shape=(),
              count_activation='exp',
              dispersion='full',
              convert_to_tensor_fn=tfd.Distribution.sample,
              validate_args=False,
              **kwargs):
   super(ZINegativeBinomialLayer, self).__init__(
       lambda t: type(self).new(t, event_shape,
                                parse_activation(count_activation, self),
                                dispersion, validate_args),
       convert_to_tensor_fn, **kwargs)
Beispiel #14
0
 def __init__(self,
              event_shape=(),
              n_components=2,
              mean_activation='softplus1',
              disp_activation=None,
              dispersion='full',
              alternative=False,
              zero_inflated=False,
              convert_to_tensor_fn=tfp.distributions.Distribution.sample,
              validate_args=False,
              **kwargs):
   if disp_activation is None:
     disp_activation = 'softplus1' if alternative else 'linear'
   super().__init__(
       lambda params: MixtureNegativeBinomialLayer.new(
           params, event_shape, n_components,
           parse_activation(mean_activation, self),
           parse_activation(disp_activation, self), dispersion, alternative,
           zero_inflated, validate_args), convert_to_tensor_fn, **kwargs)
   self.event_shape = event_shape
   self.n_components = n_components
   self.zero_inflated = zero_inflated
Beispiel #15
0
 def __init__(self,
              units,
              activation=None,
              use_bias=True,
              kernel_initializer='glorot_uniform',
              bias_initializer='zeros',
              **kwargs):
     super(Dense, self).__init__()
     self.units = int(units)
     self.activation = parse_activation(activation, self)
     self.use_bias = use_bias
     self.kernel_initializer = parse_initializer(kernel_initializer, self)
     self.bias_initializer = parse_initializer(bias_initializer, self)
Beispiel #16
0
 def __init__(
     self,
     event_shape: List[int] = (),
     n_components: int = 2,
     low: int = 0,
     bits: int = 8,
     loc_activation: Union[str, Callable[..., tf.Tensor]] = 'identity',
     scale_activation: Union[str, Callable[..., tf.Tensor]] = 'softplus',
     convert_to_tensor_fn: Callable[
         ..., tf.Tensor] = tfp.distributions.Distribution.sample,
     validate_args: bool = False,
     **kwargs,
 ):
     super().__init__(
         lambda params: MixtureQLogisticLayer.new(
             params,
             event_shape=event_shape,
             n_components=n_components,
             low=low,
             bits=bits,
             loc_activation=parse_activation(loc_activation, self),
             scale_activation=parse_activation(scale_activation, self),
             validate_args=validate_args), convert_to_tensor_fn, **kwargs)
Beispiel #17
0
 def new(params,
         event_shape=(),
         count_activation=tf.nn.softplus,
         validate_args=False,
         name='MultinomialLayer'):
     r"""Create the distribution instance from a `params` vector."""
     params = tf.convert_to_tensor(value=params, name='params')
     count_activation = parse_activation(count_activation, 'tf')
     total_count = count_activation(params[..., 0])
     logits = params[..., 1:]
     return tfd.Multinomial(total_count=total_count,
                            logits=logits,
                            validate_args=validate_args,
                            name=name)
Beispiel #18
0
 def new(params,
         event_shape=(),
         concentration_activation=softplus1,
         concentration_clip=True,
         validate_args=False,
         name="DirichletLayer"):
   r"""Create the distribution instance from a `params` vector."""
   params = tf.convert_to_tensor(value=params, name='params')
   # Clips the Dirichlet parameters to the numerically stable KL region
   concentration_activation = parse_activation(concentration_activation, 'tf')
   params = concentration_activation(params)
   if concentration_clip:
     params = tf.clip_by_value(params, 1e-3, 1e3)
   return tfd.Dirichlet(concentration=params,
                        validate_args=validate_args,
                        name=name)
Beispiel #19
0
 def __init__(self,
              event_shape=(),
              count_activation='exp',
              dispersion='full',
              convert_to_tensor_fn=tfd.Distribution.sample,
              validate_args=False,
              **kwargs):
     disp = _dispersion(dispersion,
                        event_shape,
                        is_logits=True,
                        name="dispersion")
     super(NegativeBinomialLayer, self).__init__(
         lambda t: type(self).new(
             t,
             event_shape,
             count_activation=parse_activation(count_activation, self),
             validate_args=validate_args,
             disp=disp,
         ), convert_to_tensor_fn, **kwargs)
     self.disp = disp
Beispiel #20
0
 def __init__(self,
              event_shape=(),
              n_components=2,
              tie_mixtures=False,
              tie_mean=False,
              dispersion='full',
              inflation='full',
              mean_activation='softplus1',
              disp_activation=None,
              alternative=False,
              zero_inflated=False,
              convert_to_tensor_fn=tfp.distributions.Distribution.sample,
              validate_args=False,
              **kwargs):
     if not tie_mixtures:
         if tie_mean and dispersion != 'full':
             raise ValueError(
                 "Mixture distribution has no support for tie_mixtures=False "
                 "and both mean and dispersion are tied")
     if zero_inflated:
         if inflation == 'full' and tie_mean and dispersion != 'full':
             raise ValueError(
                 "ZeroInflated distribution has no support for "
                 "batch-wise inflation rate but tied mean and "
                 "dispersion (this is broadcasting issue).")
     logits, mean, disp, rate = None, None, None, None
     shape = tf.concat(
         [[n_components], tf.nest.flatten(event_shape)], axis=0)
     if tie_mixtures:
         logits = tf.Variable([0.] * n_components,
                              trainable=True,
                              dtype=keras.backend.floatx(),
                              name="mixture_logits")
     if tie_mean:
         mean = tf.Variable(tf.random.normal(shape),
                            trainable=True,
                            dtype=keras.backend.floatx(),
                            name="components_mean")
     disp = _dispersion(dispersion,
                        event_shape,
                        is_logits=not alternative,
                        name='dispersion',
                        n_components=n_components)
     rate = _dispersion(inflation,
                        event_shape,
                        is_logits=True,
                        name='inflation',
                        n_components=n_components)
     if disp_activation is None:
         disp_activation = 'softplus1' if alternative else 'linear'
     super().__init__(
         lambda params: MixtureNegativeBinomialLayer.new(
             params,
             event_shape,
             n_components=n_components,
             mean_activation=parse_activation(mean_activation, self),
             disp_activation=parse_activation(disp_activation, self),
             alternative=alternative,
             zero_inflated=zero_inflated,
             validate_args=validate_args,
             logits=logits,
             mean=mean,
             disp=disp,
             rate=rate), convert_to_tensor_fn, **kwargs)
     self.logits = logits
     self.mean = mean
     self.disp = disp
     self.rate = rate
     self.event_shape = event_shape
     self.n_components = n_components
     self.zero_inflated = zero_inflated
Beispiel #21
0
 def __init__(self,
              event_shape=(),
              n_components=2,
              covariance='none',
              tie_mixtures=False,
              tie_loc=False,
              tie_scale=False,
              loc_activation='linear',
              scale_activation='softplus1',
              convert_to_tensor_fn=tfp.distributions.Distribution.sample,
              validate_args=False,
              **kwargs):
     event_size = tf.convert_to_tensor(value=tf.reduce_prod(event_shape),
                                       name='event_size',
                                       dtype_hint=tf.int32)
     event_size = dist_util.prefer_static_value(event_size)
     if covariance != 'none':  # diag and tril is multivariate Gaussian
         event_shape = event_size
     if not tie_mixtures:
         if tie_loc and tie_scale:
             raise ValueError(
                 "Mixture distribution has no support for tie_mixtures=False "
                 "and both loc and scale are tied")
     logits, loc, scale = None, None, None
     if tie_mixtures:
         logits = tf.Variable([0.] * n_components,
                              trainable=True,
                              dtype=keras.backend.floatx(),
                              name="mixture_logits")
     if tie_loc:
         if covariance == 'none':
             shape = tf.concat([[n_components],
                                tf.nest.flatten(event_shape)],
                               axis=0)
         else:
             shape = (n_components, event_size)
         loc = tf.Variable(
             tf.random.normal(shape),
             trainable=True,
             dtype=keras.backend.floatx(),
             name="components_loc",
         )
     if tie_scale:
         if covariance == 'none':
             shape = tf.concat([[n_components],
                                tf.nest.flatten(event_shape)],
                               axis=0)
         elif covariance == 'diag':
             shape = (n_components, event_size)
         else:
             shape = (n_components, event_size * (event_size + 1) // 2)
         scale = tf.Variable(
             tf.random.normal(shape),
             trainable=True,
             dtype=keras.backend.floatx(),
             name="components_scale",
         )
     super().__init__(
         lambda params: MixtureGaussianLayer.new(
             params,
             event_shape,
             n_components=n_components,
             covariance=covariance,
             loc_activation=parse_activation(loc_activation, self),
             scale_activation=parse_activation(scale_activation, self),
             validate_args=validate_args,
             logits=logits,
             loc=loc,
             scale=scale), convert_to_tensor_fn, **kwargs)
     self.logits = logits
     self.loc = loc
     self.scale = scale
     self.event_shape = event_shape
     self.n_components = n_components
     self.covariance = str(covariance).strip().lower()