def __init__(self): x, y, z = scalars("xyz") e = at.sigmoid((x + y + z)**2) op = OpFromGraph([x, y, z], [e]) e2 = op(x, y, z) self.inputs = [x, y, z] self.outputs = [e2]
def invlogit(x, eps=None): """The inverse of the logit function, 1 / (1 + exp(-x)).""" if eps is not None: warnings.warn( "pymc.math.invlogit no longer supports the ``eps`` argument and it will be ignored.", DeprecationWarning, stacklevel=2, ) return at.sigmoid(x)
def backward(self, rv_var, rv_value): a, b = self.param_extract_fn(rv_var) if a is not None and b is not None: sigmoid_x = at.sigmoid(rv_value) return sigmoid_x * b + (1 - sigmoid_x) * a elif a is not None: return at.exp(rv_value) + a elif b is not None: return b - at.exp(rv_value) else: return rv_value
def __init__(self, nfeatures=100, noutputs=10, nhiddens=50, rng=None): if rng is None: rng = 0 if isinstance(rng, int): rng = np.random.default_rng(rng) self.rng = rng self.nfeatures = nfeatures self.noutputs = noutputs self.nhiddens = nhiddens x = dmatrix("x") wh = shared(self.rng.normal(0, 1, (nfeatures, nhiddens)), borrow=True) bh = shared(np.zeros(nhiddens), borrow=True) h = at.sigmoid(at.dot(x, wh) + bh) wy = shared(self.rng.normal(0, 1, (nhiddens, noutputs))) by = shared(np.zeros(noutputs), borrow=True) y = at.nnet.softmax(at.dot(h, wy) + by) self.inputs = [x] self.outputs = [y]