def __init__(self, distribution, lower, upper, transform="infer", *args, **kwargs): dtype = kwargs.get("dtype", theano.config.floatX) if lower is not None: lower = tt.as_tensor_variable(lower).astype(dtype) if upper is not None: upper = tt.as_tensor_variable(upper).astype(dtype) if transform == "infer": if lower is None and upper is None: transform = None default = None elif lower is not None and upper is not None: transform = transforms.interval(lower, upper) default = 0.5 * (lower + upper) elif upper is not None: transform = transforms.upperbound(upper) default = upper - 1 else: transform = transforms.lowerbound(lower) default = lower + 1 else: default = None super().__init__( distribution, lower, upper, default, *args, transform=transform, **kwargs )
def __init__(self, distribution, lower, upper, transform='infer', *args, **kwargs): dtype = kwargs.get('dtype', theano.config.floatX) if lower is not None: lower = tt.as_tensor_variable(lower).astype(dtype) if upper is not None: upper = tt.as_tensor_variable(upper).astype(dtype) if transform == 'infer': if lower is None and upper is None: transform = None default = None elif lower is not None and upper is not None: transform = transforms.interval(lower, upper) default = 0.5 * (lower + upper) elif upper is not None: transform = transforms.upperbound(upper) default = upper - 1 else: transform = transforms.lowerbound(lower) default = lower + 1 else: default = None super(_ContinuousBounded, self).__init__( distribution=distribution, lower=lower, upper=upper, transform=transform, default=default, *args, **kwargs)
def __init__(self, distribution, lower, upper, transform="infer", *args, **kwargs): if lower is not None: lower = at.as_tensor_variable(floatX(lower)) if upper is not None: upper = at.as_tensor_variable(floatX(upper)) if transform == "infer": if lower is None and upper is None: transform = None default = None elif lower is not None and upper is not None: transform = transforms.interval(lower, upper) default = 0.5 * (lower + upper) elif upper is not None: transform = transforms.upperbound(upper) default = upper - 1 else: transform = transforms.lowerbound(lower) default = lower + 1 else: default = None super().__init__(distribution, lower, upper, default, *args, transform=transform, **kwargs)
def __init__(self, distribution, lower, upper, transform='infer', *args, **kwargs): import pymc3.distributions.transforms as transforms self.dist = distribution.dist(*args, **kwargs) self.__dict__.update(self.dist.__dict__) self.__dict__.update(locals()) if hasattr(self.dist, 'mode'): self.mode = self.dist.mode if transform == 'infer': default = self.dist.default() if not np.isinf(lower) and not np.isinf(upper): self.transform = transforms.interval(lower, upper) if default <= lower or default >= upper: self.testval = 0.5 * (upper + lower) if not np.isinf(lower) and np.isinf(upper): self.transform = transforms.lowerbound(lower) if default <= lower: self.testval = lower + 1 if np.isinf(lower) and not np.isinf(upper): self.transform = transforms.upperbound(upper) if default >= upper: self.testval = upper - 1 if issubclass(distribution, Discrete): self.transform = None
def __init__(self, distribution, lower, upper, transform="infer", *args, **kwargs): dtype = kwargs.get("dtype", theano.config.floatX) if lower is not None: lower = tt.as_tensor_variable(lower).astype(dtype) if upper is not None: upper = tt.as_tensor_variable(upper).astype(dtype) if transform == "infer": if lower is None and upper is None: transform = None default = None elif lower is not None and upper is not None: transform = transforms.interval(lower, upper) default = 0.5 * (lower + upper) elif upper is not None: transform = transforms.upperbound(upper) default = upper - 1 else: transform = transforms.lowerbound(lower) default = lower + 1 else: default = None super().__init__( distribution, lower, upper, default, *args, transform=transform, **kwargs )
def __init__(self, distribution, lower, upper, transform='infer', *args, **kwargs): dtype = kwargs.get('dtype', theano.config.floatX) if lower is not None: lower = tt.as_tensor_variable(lower).astype(dtype) if upper is not None: upper = tt.as_tensor_variable(upper).astype(dtype) if transform == 'infer': if lower is None and upper is None: transform = None default = None elif lower is not None and upper is not None: transform = transforms.interval(lower, upper) default = 0.5 * (lower + upper) elif upper is not None: transform = transforms.upperbound(upper) default = upper - 1 else: transform = transforms.lowerbound(lower) default = lower + 1 else: default = None super(_ContinuousBounded, self).__init__( distribution=distribution, lower=lower, upper=upper, transform=transform, default=default, *args, **kwargs)
def test_lowerbound(): trans = tr.lowerbound(0.0) check_transform_identity(trans, Rplusbig) check_jacobian_det(trans, Rplusbig, elemwise=True) check_jacobian_det(trans, Vector(Rplusbig,2), tt.dvector, [0,0], elemwise=True) vals = get_values(trans) close_to(vals > 0, True, tol)
def test_lowerbound(): trans = tr.lowerbound(0.0) check_transform_identity(trans, Rplusbig) check_jacobian_det(trans, Rplusbig, elemwise=True) check_jacobian_det(trans, Vector(Rplusbig,2), tt.dvector, [0,0], elemwise=True) vals = get_values(trans) close_to(vals > 0, True, tol)