Ejemplo n.º 1
0
 def __init__(self,
              batch_shape=torch.Size(),
              event_shape=torch.Size(),
              validate_args=None):
     self._batch_shape = batch_shape
     self._event_shape = event_shape
     if validate_args is not None:
         self._validate_args = validate_args
     if self._validate_args:
         try:
             arg_constraints = self.arg_constraints
         except NotImplementedError:
             arg_constraints = {}
             warnings.warn(
                 f'{self.__class__} does not define `arg_constraints`. ' +
                 'Please set `arg_constraints = {}` or initialize the distribution '
                 + 'with `validate_args=False` to turn off validation.')
         for param, constraint in arg_constraints.items():
             if constraints.is_dependent(constraint):
                 continue  # skip constraints that cannot be checked
             if param not in self.__dict__ and isinstance(
                     getattr(type(self), param), lazy_property):
                 continue  # skip checking lazily-constructed args
             value = getattr(self, param)
             valid = constraint.check(value)
             if not valid.all():
                 raise ValueError(
                     f"Expected parameter {param} "
                     f"({type(value).__name__} of shape {tuple(value.shape)}) "
                     f"of distribution {repr(self)} "
                     f"to satisfy the constraint {repr(constraint)}, "
                     f"but found invalid values:\n{value}")
     super(Distribution, self).__init__()
Ejemplo n.º 2
0
 def __init__(self,
              batch_shape=torch.Size(),
              event_shape=torch.Size(),
              validate_args=None):
     self._batch_shape = batch_shape
     self._event_shape = event_shape
     if validate_args is not None:
         self._validate_args = validate_args
     if self._validate_args:
         try:
             arg_constraints = self.arg_constraints
         except NotImplementedError:
             arg_constraints = {}
             warnings.warn(
                 f'{self.__class__} does not define `arg_constraints`. ' +
                 'Please set `arg_constraints = {}` or initialize the distribution '
                 + 'with `validate_args=False` to turn off validation.')
         for param, constraint in arg_constraints.items():
             if constraints.is_dependent(constraint):
                 continue  # skip constraints that cannot be checked
             if param not in self.__dict__ and isinstance(
                     getattr(type(self), param), lazy_property):
                 continue  # skip checking lazily-constructed args
             if not constraint.check(getattr(self, param)).all():
                 raise ValueError(
                     "The parameter {} has invalid values".format(param))
     super(Distribution, self).__init__()
Ejemplo n.º 3
0
 def __init__(self,
              batch_shape=torch.Size(),
              event_shape=torch.Size(),
              validate_args=None):
     self._batch_shape = batch_shape
     self._event_shape = event_shape
     if validate_args is not None:
         self._validate_args = validate_args
     if self._validate_args:
         if not constraints.is_dependent(self.params):
             for param, constraint in self.params.items():
                 if not constraints.is_dependent(constraint):
                     if not constraint.check(
                             self.__getattribute__(param)).all():
                         raise ValueError(
                             "The parameter {} has invalid values".format(
                                 param))
Ejemplo n.º 4
0
 def __init__(self, batch_shape=torch.Size(), event_shape=torch.Size(), validate_args=None):
     self._batch_shape = batch_shape
     self._event_shape = event_shape
     if validate_args is not None:
         self._validate_args = validate_args
     if self._validate_args:
         for param, constraint in self.arg_constraints.items():
             if constraints.is_dependent(constraint):
                 continue  # skip constraints that cannot be checked
             if param not in self.__dict__ and isinstance(getattr(type(self), param), lazy_property):
                 continue  # skip checking lazily-constructed args
             if not constraint.check(getattr(self, param)).all():
                 raise ValueError("The parameter {} has invalid values".format(param))
Ejemplo n.º 5
0
    def test_params_contains(self):
        for Dist, params in EXAMPLES:
            for i, param in enumerate(params):
                dist = Dist(**param)
                for name, value in param.items():
                    if isinstance(value, Number):
                        value = torch.tensor([value])
                    try:
                        constraint = dist.arg_constraints[name]
                    except KeyError:
                        continue  # ignore optional parameters

                    if is_dependent(constraint):
                        continue

                    message = "{} example {}/{} parameter {} = {}".format(
                        Dist.__name__, i + 1, len(params), name, value)
                    self.assertTrue(constraint.check(value).all(), msg=message)