Esempio n. 1
0
 def __init__(self,
              n,
              p=None,
              logit_p=None,
              name="Binomial",
              learnable=False):
     self._type = "Binomial"
     if p is not None and logit_p is None:
         ranges = {
             "n": geometric_ranges.UnboundedRange(),
             "p": geometric_ranges.Interval(0., 1.)
         }
         super().__init__(name,
                          n=n,
                          p=p,
                          learnable=learnable,
                          ranges=ranges)
         self.distribution = distributions.BinomialDistribution()
     elif logit_p is not None and p is None:
         ranges = {
             "n": geometric_ranges.UnboundedRange(),
             "logit_p": geometric_ranges.UnboundedRange()
         }
         super().__init__(name,
                          n=n,
                          logit_p=logit_p,
                          learnable=learnable,
                          ranges=ranges)
         self.distribution = distributions.BinomialDistribution()
     else:
         raise ValueError("Either p or " +
                          "logit_p needs to be provided as input")
Esempio n. 2
0
 def __init__(self, df, loc, scale, name, learnable=False, is_observed=False):
     self._type = "StudentT"
     ranges = {"df": geometric_ranges.UnboundedRange(),
               "loc": geometric_ranges.UnboundedRange(),
               "scale": geometric_ranges.RightHalfLine(0.)}
     super().__init__(name, df=df, loc=loc, scale=scale, learnable=learnable, ranges=ranges, is_observed=is_observed)
     self.distribution = distributions.StudentTDistribution()
Esempio n. 3
0
 def __init__(self,
              total_count,
              probs=None,
              logits=None,
              name="Binomial",
              learnable=False,
              is_observed=False):
     self._type = "Binomial"
     if probs is not None and logits is None:
         ranges = {
             "total_count": geometric_ranges.UnboundedRange(),
             "probs": geometric_ranges.Interval(0., 1.)
         }
         super().__init__(name,
                          total_count=total_count,
                          probs=probs,
                          learnable=learnable,
                          ranges=ranges,
                          is_observed=is_observed)
         self.distribution = distributions.BinomialDistribution()
     elif logits is not None and probs is None:
         ranges = {
             "total_count": geometric_ranges.UnboundedRange(),
             "logits": geometric_ranges.UnboundedRange()
         }
         super().__init__(name,
                          total_count=total_count,
                          logits=logits,
                          learnable=learnable,
                          ranges=ranges)
         self.distribution = distributions.BinomialDistribution()
     else:
         raise ValueError("Either probs or " +
                          "logits needs to be provided as input")
Esempio n. 4
0
 def __init__(self,
              dataset,
              name,
              is_observed=True,
              batch_size=(),
              indices=()):
     self._type = "Empirical"
     ranges = {
         "dataset": geometric_ranges.UnboundedRange(),
         "batch_size": geometric_ranges.UnboundedRange(),
         "indices": geometric_ranges.UnboundedRange()
     }
     super().__init__(name,
                      dataset=dataset,
                      indices=indices,
                      learnable=False,
                      ranges=ranges,
                      is_observed=is_observed)
     self.distribution = distributions.EmpiricalDistribution()
     if batch_size:
         self.distribution.batch_size = batch_size
         self.batch_size = batch_size
     elif indices:
         self.distribution.batch_size = len(indices)
         self.batch_size = batch_size  #TODO: Clean up here
     else:
         raise ValueError(
             "Either the indices or the batch size has to be given as input"
         )
Esempio n. 5
0
 def __init__(self, low, high, name, learnable=False, has_bias=False, is_observed=False, is_policy=False, is_reward=False):
     self._type = "Uniform"
     ranges = {"low": geometric_ranges.UnboundedRange(),
               "high": geometric_ranges.UnboundedRange()}
     super().__init__(name, low=low, high=high, learnable=learnable,
                      has_bias=has_bias, ranges=ranges, is_observed=is_observed, is_policy=is_policy, is_reward=is_reward)
     self.distribution = distributions.UniformDistribution()
Esempio n. 6
0
    def __init__(self, loc, covariance_matrix=None, precision_matrix=None,
                 scale_tril=None, name="Multivariate Normal", learnable=False, has_bias=False,
                 is_observed=False, is_policy=False, is_reward=False):
        self._type = "Multivariate Normal"
        if scale_tril is not None and covariance_matrix is None and precision_matrix is None:
            ranges = {"loc": geometric_ranges.UnboundedRange(),
                      "scale_tril": geometric_ranges.UnboundedRange()}
            super().__init__(name, loc=loc, scale_tril=scale_tril, learnable=learnable,
                             has_bias=has_bias, ranges=ranges, is_observed=is_observed, is_policy=is_policy, is_reward=is_reward)
            self.distribution = distributions.MultivariateNormalDistribution()

        elif scale_tril is None and covariance_matrix is not None and precision_matrix is None:
            ranges = {"loc": geometric_ranges.UnboundedRange(),
                      "covariance_matrix": geometric_ranges.PositiveDefiniteMatrix()}
            super().__init__(name, loc=loc, covariance_matrix=covariance_matrix, learnable=learnable,
                             has_bias=has_bias, ranges=ranges, is_observed=is_observed, is_policy=is_policy, is_reward=is_reward)
            self.distribution = distributions.MultivariateNormalDistribution()

        elif scale_tril is None and covariance_matrix is None and precision_matrix is not None:
            ranges = {"loc": geometric_ranges.UnboundedRange(),
                      "precision_matrix": geometric_ranges.PositiveDefiniteMatrix()}
            super().__init__(name, loc=loc, precision_matrix=precision_matrix, learnable=learnable,
                             has_bias=has_bias, ranges=ranges, is_observed=is_observed, is_policy=is_policy, is_reward=is_reward)
            self.distribution = distributions.MultivariateNormalDistribution()

        else:
            raise ValueError("Either covariance_matrix or precision_matrix or"+
                             "scale_tril needs to be provided as input")
Esempio n. 7
0
    def __init__(self,
                 loc,
                 covariance_matrix=None,
                 precision_matrix=None,
                 cholesky_factor=None,
                 name="Multivariate Normal",
                 learnable=False):
        self._type = "Multivariate Normal"
        if cholesky_factor is not None and covariance_matrix is None and precision_matrix is None:
            ranges = {
                "loc": geometric_ranges.UnboundedRange(),
                "cholesky_factor": geometric_ranges.UnboundedRange()
            }
            super().__init__(name,
                             loc=loc,
                             cholesky_factor=cholesky_factor,
                             learnable=learnable,
                             ranges=ranges)
            self.distribution = distributions.MultivariateNormalDistribution()

        elif cholesky_factor is None and covariance_matrix is not None and precision_matrix is None:
            ranges = {
                "loc": geometric_ranges.UnboundedRange(),
                "covariance_matrix": geometric_ranges.PositiveDefiniteMatrix()
            }
            super().__init__(name,
                             loc=loc,
                             covariance_matrix=covariance_matrix,
                             learnable=learnable,
                             ranges=ranges)
            self.distribution = distributions.MultivariateNormalDistribution()

        elif cholesky_factor is None and covariance_matrix is None and precision_matrix is not None:
            ranges = {
                "loc": geometric_ranges.UnboundedRange(),
                "precision_matrix": geometric_ranges.UnboundedRange()
            }
            super().__init__(name,
                             loc=loc,
                             precision_matrix=precision_matrix,
                             learnable=learnable,
                             ranges=ranges)
            self.distribution = distributions.MultivariateNormalDistribution()

        else:
            raise ValueError(
                "Either covariance_matrix or precision_matrix or" +
                "cholesky_factor needs to be provided as input")
Esempio n. 8
0
 def __init__(self, loc, scale, name, learnable=False, has_bias=False, is_observed=False, is_policy=False, is_reward=False):
     self._type = "Log Normal"
     ranges = {"loc": geometric_ranges.UnboundedRange(),
               "scale": geometric_ranges.RightHalfLine(0.)}
     super().__init__(name, loc=loc, scale=scale, learnable=learnable,
                      has_bias=has_bias, ranges=ranges, is_observed=is_observed, is_policy=is_policy, is_reward=is_reward)
     self.distribution = distributions.LogNormalDistribution()
Esempio n. 9
0
 def __init__(self,
              probs=None,
              logits=None,
              name="Geometric",
              learnable=False,
              has_bias=False,
              is_observed=False):
     self._type = "Geometric"
     if probs is not None and logits is None:
         ranges = {"probs": geometric_ranges.Interval(0., 1.)}
         super().__init__(name,
                          probs=probs,
                          learnable=learnable,
                          has_bias=has_bias,
                          ranges=ranges,
                          is_observed=is_observed)
         self.distribution = distributions.GeometricDistribution()
     elif logits is not None and probs is None:
         ranges = {"logits": geometric_ranges.UnboundedRange()}
         super().__init__(name,
                          logits=logits,
                          learnable=learnable,
                          has_bias=has_bias,
                          ranges=ranges)
         self.distribution = distributions.GeometricDistribution()
     else:
         raise ValueError("Either probs or " +
                          "logits needs to be provided as input")
Esempio n. 10
0
 def __init__(self,
              probs=None,
              logits=None,
              name="Categorical",
              learnable=False,
              is_observed=False):
     self._type = "Categorical"
     if probs is not None and logits is None:
         ranges = {"p": geometric_ranges.Simplex()}
         super().__init__(name,
                          probs=probs,
                          learnable=learnable,
                          ranges=ranges,
                          is_observed=is_observed)
         self.distribution = distributions.CategoricalDistribution()
     elif logits is not None and probs is None:
         ranges = {"logits": geometric_ranges.UnboundedRange()}
         super().__init__(name,
                          logits=logits,
                          learnable=learnable,
                          ranges=ranges,
                          is_observed=is_observed)
         self.distribution = distributions.CategoricalDistribution()
     else:
         raise ValueError("Either probs or " +
                          "logits needs to be provided as input")
Esempio n. 11
0
 def __init__(self, total_count, probs=None, logits=None, name="NegativeBinomial",
              learnable=False, has_bias=False, is_observed=False, is_policy=False, is_reward=False):
     self._type = "NegativeBinomial"
     if probs is not None and logits is None:
         ranges = {"total_count": geometric_ranges.UnboundedRange(), #TODO: It should become natural number in the future
                   "probs": geometric_ranges.Interval(0., 1.)}
         super().__init__(name, total_count=total_count, probs=probs, learnable=learnable,
                          has_bias=has_bias, ranges=ranges, is_observed=is_observed, is_policy=is_policy, is_reward=is_reward)
         self.distribution = distributions.BinomialDistribution()
     elif logits is not None and probs is None:
         ranges = {"total_count": geometric_ranges.UnboundedRange(),
                   "logits": geometric_ranges.UnboundedRange()}
         super().__init__(name, total_count=total_count, logits=logits, learnable=learnable,
                          has_bias=has_bias, ranges=ranges, is_policy=is_policy, is_reward=is_reward)
         self.distribution = distributions.NegativeBinomialDistribution()
     else:
         raise ValueError("Either probs or " +
                          "logits needs to be provided as input")
Esempio n. 12
0
 def __init__(self, mu, sigma, name, learnable=False):
     self._type = "Logit Normal"
     ranges = {
         "mu": geometric_ranges.UnboundedRange(),
         "sigma": geometric_ranges.RightHalfLine(0.)
     }
     super().__init__(name,
                      mu=mu,
                      sigma=sigma,
                      learnable=learnable,
                      ranges=ranges)
     self.distribution = distributions.LogitNormalDistribution()
Esempio n. 13
0
 def __init__(self, loc, scale, name, learnable=False):
     self._type = "Logit Normal"
     ranges = {
         "loc": geometric_ranges.UnboundedRange(),
         "scale": geometric_ranges.RightHalfLine(0.)
     }
     super().__init__(name,
                      loc=loc,
                      scale=scale,
                      learnable=learnable,
                      ranges=ranges)
     self.distribution = distributions.LogitNormalDistribution()
Esempio n. 14
0
 def __init__(self, value, name, log_determinant=None, learnable=False, has_bias=False, is_observed=False, variable_range=geometric_ranges.UnboundedRange(),
              is_policy=False, is_reward=False):
     self._type = "Deterministic node"
     if not isinstance(log_determinant, PartialLink):
         if log_determinant is None:
             log_determinant = torch.tensor(np.zeros((1, 1))).float().to(device)
         var2link(log_determinant)
     ranges = {"value": variable_range,
               "log_determinant": geometric_ranges.UnboundedRange()}
     super().__init__(name, value=value, log_determinant=log_determinant, learnable=learnable, has_bias=has_bias,
                      ranges=ranges, is_observed=is_observed, is_policy=is_policy, is_reward=is_reward)
     self.distribution = distributions.DeterministicDistribution()
Esempio n. 15
0
 def __init__(self,
              value,
              name,
              learnable=False,
              is_observed=False,
              variable_range=geometric_ranges.UnboundedRange()):
     self._type = "Deterministic node"
     ranges = {"value": variable_range}
     super().__init__(name,
                      value=value,
                      learnable=learnable,
                      ranges=ranges,
                      is_observed=is_observed)
     self.distribution = distributions.DeterministicDistribution()
Esempio n. 16
0
 def __init__(self,
              mu,
              cov=None,
              chol_cov=None,
              diag_cov=None,
              name="Multivariate Normal",
              learnable=False):
     self._type = "Multivariate Normal"
     if chol_cov is not None and diag_cov is None:
         ranges = {
             "mu": geometric_ranges.UnboundedRange(),
             "chol_cov": geometric_ranges.UnboundedRange()
         }
         super().__init__(name,
                          mu=mu,
                          chol_cov=chol_cov,
                          learnable=learnable,
                          ranges=ranges)
         self.distribution = distributions.CholeskyMultivariateNormal()
     elif diag_cov is not None and chol_cov is None:
         ranges = {
             "mean": geometric_ranges.UnboundedRange(),
             "var": geometric_ranges.RightHalfLine(0.)
         }
         super().__init__(name,
                          mean=mu,
                          var=diag_cov,
                          learnable=learnable,
                          ranges=ranges)
         self.distribution = distributions.NormalDistribution()
     else:
         raise ValueError(
             "Either chol_cov (cholesky factor of the covariance matrix) or "
             +
             "diag_cov (diagonal of the covariance matrix) need to be provided as input"
         )
Esempio n. 17
0
 def __init__(self,
              df,
              name,
              learnable=False,
              has_bias=False,
              is_observed=False):
     self._type = "Chi2"
     ranges = {
         "df": geometric_ranges.UnboundedRange()
     }  #TODO: Natural number
     super().__init__(name,
                      df=df,
                      learnable=learnable,
                      has_bias=has_bias,
                      ranges=ranges,
                      is_observed=is_observed)
     self.distribution = distributions.Chi2Distribution()
Esempio n. 18
0
    def __init__(self,
                 dataset,
                 name,
                 learnable=False,
                 has_bias=False,
                 is_observed=False,
                 batch_size=None,
                 indices=None,
                 weights=None):  #TODO: Ugly logic
        self._type = "Empirical"
        input_parameters = {
            "dataset": dataset,
            "batch_size": batch_size,
            "indices": indices,
            "weights": weights
        }
        ranges = {
            par_name: geometric_ranges.UnboundedRange()
            for par_name, par_value in input_parameters.items()
            if par_value is not None
        }
        kwargs = {
            par_name: par_value
            for par_name, par_value in input_parameters.items()
            if par_value is not None
        }
        super().__init__(name,
                         **kwargs,
                         learnable=learnable,
                         has_bias=has_bias,
                         ranges=ranges,
                         is_observed=is_observed)

        if not batch_size:
            if indices:
                batch_size = len(indices)
            else:
                raise ValueError(
                    "Either the indices or the batch size has to be given as input"
                )

        self.batch_size = batch_size
        self.distribution = distributions.EmpiricalDistribution(
            batch_size=batch_size, is_observed=is_observed)
Esempio n. 19
0
 def __init__(self,
              p=None,
              softmax_p=None,
              name="Categorical",
              learnable=False):
     self._type = "Categorical"
     if p is not None and softmax_p is None:
         ranges = {"p": geometric_ranges.Simplex()}
         super().__init__(name, p=p, learnable=learnable, ranges=ranges)
         self.distribution = distributions.CategoricalDistribution()
     elif softmax_p is not None and p is None:
         ranges = {"softmax_p": geometric_ranges.UnboundedRange()}
         super().__init__(name,
                          softmax_p=softmax_p,
                          learnable=learnable,
                          ranges=ranges)
         self.distribution = distributions.CategoricalDistribution()
     else:
         raise ValueError("Either p or " +
                          "softmax_p needs to be provided as input")
Esempio n. 20
0
 def __init__(self,
              loc,
              scale,
              name,
              learnable=False,
              has_bias=False,
              is_observed=False):
     self._type = "Cauchy"
     ranges = {
         "loc": geometric_ranges.UnboundedRange(),
         "scale": geometric_ranges.RightHalfLine(0.)
     }
     super().__init__(name,
                      loc=loc,
                      scale=scale,
                      learnable=learnable,
                      has_bias=has_bias,
                      ranges=ranges,
                      is_observed=is_observed)
     self.distribution = distributions.CauchyDistribution()