示例#1
0
 def forward(self, x_):  # Theano
     non_root_proportions, root_height = self.topology.get_proportions(x_)
     if self.max_height is None:
         root_val = tt.log(root_height)
     else:
         root_val = logit(
             root_height /
             (self.max_height - self.topology.get_max_leaf_height()))
     return tt.concatenate(
         [logit(non_root_proportions),
          tt.stack(root_val)])
示例#2
0
    def __init__(self, p=None, logit_p=None, *args, **kwargs):
        super().__init__(*args, **kwargs)
        if sum(int(var is None) for var in [p, logit_p]) != 1:
            raise ValueError('Specify one of p and logit_p')
        if p is not None:
            self._is_logit = False
            self.p = p = tt.as_tensor_variable(floatX(p))
            self._logit_p = logit(p)
        else:
            self._is_logit = True
            self.p = tt.nnet.sigmoid(floatX(logit_p))
            self._logit_p = tt.as_tensor_variable(logit_p)

        self.mode = tt.cast(tround(self.p), 'int8')
示例#3
0
    def __init__(self, p=None, logit_p=None, *args, **kwargs):
        super().__init__(*args, **kwargs)
        if sum(int(var is None) for var in [p, logit_p]) != 1:
            raise ValueError('Specify one of p and logit_p')
        if p is not None:
            self._is_logit = False
            self.p = p = tt.as_tensor_variable(p)
            self._logit_p = logit(p)
        else:
            self._is_logit = True
            self.p = tt.nnet.sigmoid(logit_p)
            self._logit_p = tt.as_tensor_variable(logit_p)

        self.mode = tt.cast(tround(self.p), 'int8')
示例#4
0
 def forward(self, x):
     return logit(x)
示例#5
0
 def forward(self, rv_var, rv_value):
     return logit(rv_value)