def observe(self, expression, value, label=None, type=False): if label is None: i = {'instruction':'observe', 'expression':expression, 'value':value} else: label = _symbolize(label) i = {'instruction':'labeled_observe', 'expression':expression, 'value':value, 'label':label} weights = self.execute_instruction(i)['value'] return v.vector(weights) if type else weights
def forget(self, label_or_did, type=False): (tp, val) = _interp_label_or_did(label_or_did) if tp == 'did': i = {'instruction': 'forget', 'directive_id': val} # if asked to forget prelude instruction, decrement _n_prelude if label_or_did <= self._n_prelude: self._n_prelude -= 1 else: # assume that prelude instructions don't have labels i = {'instruction': 'labeled_forget', 'label': val} weights = self.execute_instruction(i)['value'] return v.vector(weights) if type else weights
def particulate(num_obs, epsilon): ripl = get_ripl() ripl.load_prelude() ripl.assume("mu", "(multivariate_normal (zeros 2) (id_matrix 2))") # A slow procedure to compute f(m) = m[0:2] * 1.0 ripl.assume( "f", "(lambda (m) (map (lambda (i) (* 1.0 (lookup m i))) (range 0 2)))") ripl.assume("y", "(lambda () (multivariate_normal (f mu) (id_matrix 2)))") for _ in range(num_obs): ripl.observe("(y)", val.vector(scipy.stats.norm.rvs(0, 1.0, 2))) ripl.infer("(mh default all 1)") def do_infer(): ripl.infer("(subsampled_mh default all 10 3 %f false 0 false 10)" % epsilon) return do_infer
def testVector(): # Test that array-like objects don't get interpreted as expressions. ripl = get_ripl() ripl.predict(v.vector(numpy.array([1, 2])))
def force(self, expression, value, type=False): i = {'instruction': 'force', 'expression': expression, 'value': value} weights = self.execute_instruction(i)["value"] return v.vector(weights) if type else weights