def testVentureScriptAbstractExample(): class EnumerableUniformOutputPSP(cont.UniformOutputPSP): def canEnumerate(self): return True def enumerateValues(self, args): (low, high) = args.operandValues() return np.arange(low, high, (high - low) / 100) r = get_ripl() r.set_mode("venture_script") r.bind_foreign_sp( "uniform_continuous", typed_nr(EnumerableUniformOutputPSP(), [t.NumberType(), t.NumberType()], t.NumberType())) r.execute_program(""" infer resample(2); assume is_funny = tag(quote(fun), 0, flip(0.3)); assume funny_mean = tag(quote(mean), 0, uniform_continuous(-10,10)); assume mean = if (is_funny) { funny_mean } else { 0 }; assume trial = proc() { normal(mean, 1) }; observe trial() = 8; observe trial() = 8; observe trial() = 8; observe trial() = 8; infer emap(default, all, 1, false); """) eq_(True, r.sample("is_funny")) assert_almost_equal(8, r.sample("funny_mean"))
def follow_func(edge, trace): if edge in t.NumberType(): def doit(node): if n.isApplicationNode(node): # The subexpressions are the definite parents, accidentally in # the order useful for this return OrderedFrozenSet( [trace.definiteParentsAt(node)[int(edge.getNumber())]]) else: return OrderedFrozenSet([]) return doit elif edge in t.SymbolType() or edge in t.StringType(): name = edge.getSymbol() if name == "operator": return follow_func(t.NumberType().asVentureValue(0), trace) elif name == "source": def doit(node): if n.isLookupNode(node): # The definite parents are the lookup source return OrderedFrozenSet([trace.definiteParentsAt(node)[0]]) else: return OrderedFrozenSet([]) return doit elif name == "request": def doit(node): if n.isOutputNode(node): # The last definite parent is the request node return OrderedFrozenSet( [trace.definiteParentsAt(node)[-1]]) else: return OrderedFrozenSet([]) return doit else: raise Exception("Unknown named edge type %s" % (name, )) elif edge in t.ForeignBlobType() and isinstance(edge.datum, EsrEdge): def doit(node): if n.isApplicationNode(node): return OrderedFrozenSet( [trace.esrParentsAt(node)[int(edge.datum.index)]]) else: return OrderedFrozenSet([]) return doit else: raise Exception("Unknown edge type %s" % (edge, ))
def testCollectLogScore(): # In the presence of likelihood-free SP's, the calling "collect" or # "printf" should not crash the program. class TestPSP(LikelihoodFreePSP): def simulate(self, args): x = args.operandValues()[0] return x + stats.distributions.norm.rvs() tester = typed_nr(TestPSP(), [t.NumberType()], t.NumberType()) ripl = get_ripl() ripl.bind_foreign_sp('test', tester) prog = ''' [ASSUME x (test 0)] [ASSUME y (normal x 1)] [infer (collect x)]''' ripl.execute_program(prog)
def test_profiling_likelihoodfree(): # Make sure profiling doesn't break with likelihood-free SP's class TestPSP(LikelihoodFreePSP): def simulate(self, args): x = args.operandValues()[0] return x + stats.distributions.norm.rvs() tester = typed_nr(TestPSP(), [t.NumberType()], t.NumberType()) ripl = get_ripl() ripl.bind_foreign_sp('test', tester) prog = ''' [ASSUME x (test 0)] [INFER (mh default one 10)]''' ripl.profiler_enable() ripl.execute_program(prog)
def simulate(self, args): (m0, k0, v0, S0) = args.operandValues() m0 = np.mat(m0).transpose() d = np.size(m0) output = TypedPSP(CMVNOutputPSP(d, m0, k0, v0, S0), SPType([], t.HomogeneousArrayType(t.NumberType()))) return VentureSPRecord(CMVNSP(NullRequestPSP(), output, d))
def setup_likelihood_free(): class TestPSP1(LikelihoodFreePSP): def simulate(self, args): x = args.operandValues()[0] return x + stats.distributions.norm.rvs() tester1 = typed_nr(TestPSP1(), [t.NumberType()], t.NumberType()) class TestPSP2(LikelihoodFreePSP): def simulate(self, args): x = args.operandValues()[0] return x + stats.distributions.bernoulli(0.5).rvs() tester2 = typed_nr(TestPSP2(), [t.NumberType()], t.NumberType()) ripl = get_ripl() ripl.bind_foreign_sp('test1', tester1) ripl.bind_foreign_sp('test2', tester2) return ripl
def follow_edge(thing, edge, trace): if isinstance(thing, Top): if edge in t.NumberType(): did = int(edge.getNumber()) return trace.families[did] else: raise Exception("Selecting subproblems by label is not supported") else: return set_bind(thing, follow_func(edge, trace))
def array_unboxed(self, length=None, elt_type=None, **kwargs): if length is None: length = npr.randint(0, 10) if elt_type is None: elt_type = t.NumberType( ) # TODO Do I want to test on a screwy class of unboxed arrays in general? return v.VentureArrayUnboxed([ elt_type.asPython( elt_type.distribution(self.__class__, **kwargs).generate()) for _ in range(length) ], elt_type)
def __venture_start__(ripl): ripl.execute_program(''' assume gp_cov_wn = (c) -> { gp_cov_scale(c, gp_cov_bump(1e-9, 1e-11)) }; define gp_cov_wn = (c) -> { gp_cov_scale(c, gp_cov_bump(1e-9, 1e-11)) }; ''') ripl.bind_foreign_inference_sp( 'sort', deterministic_typed(np.sort, [ vt.ArrayUnboxedType(vt.NumberType()), ], vt.ArrayUnboxedType(vt.NumberType()), min_req_args=1)) ripl.bind_foreign_inference_sp( 'get_mean', deterministic_typed(np.mean, [ vt.ArrayUnboxedType(vt.NumberType()), ], vt.NumberType(), min_req_args=1)) ripl.bind_foreign_inference_sp( 'load_csv', deterministic_typed(load_csv, [vt.StringType()], vt.ArrayUnboxedType(vt.NumberType()), min_req_args=1)) ripl.bind_foreign_sp( 'compile_ast_to_venturescript', deterministic_typed(compile_ast_to_embedded_dsl, [vt.AnyType()], vt.StringType(), min_req_args=1)) ripl.bind_foreign_sp( 'eval_expr', deterministic_typed(interpret_embedded_dsl, [vt.StringType()], gp.gpType, min_req_args=1))
class SuffBernoulliSPAux(SPAux): def __init__(self): self.yes = 0.0 self.no = 0.0 def copy(self): aux = SuffBernoulliSPAux() aux.yes = self.yes aux.no = self.no return aux v_type = t.HomogeneousListType(t.NumberType()) def asVentureValue(self): return SuffBernoulliSPAux.v_type.asVentureValue([self.yes, self.no]) @staticmethod def fromVentureValue(val): aux = SuffBernoulliSPAux() (aux.yes, aux.no) = SuffBernoulliSPAux.v_type.asPython(val) return aux def cts(self): return [self.yes, self.no]
class SuffPoissonSPAux(SPAux): def __init__(self): self.xsum = 0.0 self.ctN = 0.0 def copy(self): aux = SuffPoissonSPAux() aux.xsum = self.xsum aux.ctN = self.ctN return aux v_type = t.HomogeneousListType(t.NumberType()) def asVentureValue(self): return SuffPoissonSPAux.v_type.asVentureValue([self.xsum, self.ctN]) @staticmethod def fromVentureValue(val): aux = SuffPoissonSPAux() (aux.xsum, aux.ctN) = SuffPoissonSPAux.v_type.asPython(val) return aux def cts(self): return [self.xsum, self.ctN]
# (at your option) any later version. # # Venture is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Venture. If not, see <http://www.gnu.org/licenses/>. import numpy as np from venture.lite.sp import SPType from venture.lite.sp_help import deterministic_psp from venture.lite.sp_help import dispatching_psp from venture.lite.sp_help import no_request from venture.lite.sp_registry import registerBuiltinSP import venture.lite.types as t generic_biplex = dispatching_psp( [SPType([t.BoolType(), t.AnyType(), t.AnyType()], t.AnyType()), SPType([t.ArrayUnboxedType(t.NumberType()), t.ArrayUnboxedType(t.NumberType()), t.ArrayUnboxedType(t.NumberType())], t.ArrayUnboxedType(t.NumberType()))], [deterministic_psp(lambda p, c, a: c if p else a, sim_grad=lambda args, direction: [0, direction, 0] if args[0] else [0, 0, direction], descr="biplex returns either its second or third argument, depending on the first."), deterministic_psp(np.where, # TODO sim_grad descr="vector-wise biplex")]) registerBuiltinSP("biplex", no_request(generic_biplex))
def binaryNumS(output): return typed_nr(output, [t.NumberType(), t.NumberType()], t.NumberType())
def __venture_start__(ripl): ripl.execute_program(''' define set_value_at_scope_block = (scope, block, value) -> { set_value_at2(scope, block, value) }; ''') ripl.bind_foreign_inference_sp( 'sort', deterministic_typed( np.sort, [ vt.ArrayUnboxedType(vt.NumberType()), ], vt.ArrayUnboxedType(vt.NumberType()), min_req_args=1 ) ) ripl.bind_foreign_inference_sp( 'get_mean', deterministic_typed( np.mean, [ vt.ArrayUnboxedType(vt.NumberType()), ], vt.NumberType(), min_req_args=1 ) ) ripl.bind_foreign_inference_sp( 'get_predictive_mean', deterministic_typed( lambda x: np.mean(x, axis=0), [ vt.ArrayUnboxedType(vt.ArrayUnboxedType(vt.NumberType())), ], vt.ArrayUnboxedType(vt.NumberType()), min_req_args=1 ) ) ripl.bind_foreign_inference_sp( 'load_csv', deterministic_typed( load_csv, [vt.StringType()], vt.ArrayUnboxedType(vt.NumberType()), min_req_args=1 ) ) ripl.bind_foreign_inference_sp( 'concatenate', deterministic_typed( concatenate, [ vt.ArrayUnboxedType(vt.NumberType()), vt.ArrayUnboxedType(vt.NumberType()), ], vt.ArrayUnboxedType(vt.NumberType()), min_req_args=2 ) ) ripl.bind_foreign_inference_sp( 'scatter_plot', deterministic_typed( scatter_plot, [ vt.ArrayUnboxedType(vt.NumberType()), vt.ArrayUnboxedType(vt.NumberType()), vt.HomogeneousDictType(vt.StringType(), vt.AnyType()) ], vt.NilType(), min_req_args=2 ) ) ripl.bind_foreign_inference_sp( 'line_plot', deterministic_typed( line_plot, [ vt.ArrayUnboxedType(vt.NumberType()), vt.ArrayUnboxedType(vt.NumberType()), vt.HomogeneousDictType(vt.StringType(), vt.AnyType()) ], vt.NilType(), min_req_args=2 ) ) ripl.bind_foreign_inference_sp( 'legend', deterministic_typed( legend, [vt.StringType()], vt.NilType(), min_req_args=0 ) ) ripl.bind_foreign_inference_sp( 'square_heatmap', deterministic_typed( square_heatmap, [ vt.ArrayUnboxedType(vt.NumberType()), vt.ArrayUnboxedType(vt.NumberType()), vt.HomogeneousDictType(vt.StringType(), vt.AnyType()) ], vt.NilType(), min_req_args=2 ) ) ripl.bind_foreign_sp( 'gp_cov_cp', _cov_sp( change_point, [ vt.NumberType(), vt.NumberType(), GPCovarianceType('K'), GPCovarianceType('H') ] ) )
def binaryNumInt(f, sim_grad=None, descr=None): return deterministic_typed(f, [t.NumberType(), t.NumberType()], t.IntegerType(), sim_grad=sim_grad, descr=descr)
def unaryNumS(f): return typed_nr(f, [t.NumberType()], t.NumberType())
"than or equal to its second")) registerBuiltinSP("lt", binaryPred(lambda x,y: x.compare(y) < 0, descr="lt returns true if its first argument compares less than its " \ "second")) registerBuiltinSP("lte", binaryPred(lambda x,y: x.compare(y) <= 0, descr="lte returns true if its first argument compares less than or " \ "equal to its second")) # If you are wondering about the type signature, this function # bootstraps the implicit coersion to numbers into an explicit one. registerBuiltinSP( "real", deterministic_typed( lambda x: x, [t.NumberType()], t.NumberType(), descr="real explicitly coerces its argument to a number")) registerBuiltinSP( "atom", deterministic_typed( lambda x: x, [t.IntegerType()], t.AtomType(), descr="atom returns the identity of its argument integer as an atom")) registerBuiltinSP( "atom_index", deterministic_typed( lambda x: x, [t.AtomType()], t.IntegerType(),
elif logodds == -inf: return [False] else: return [True, False] def description(self, name): short = name[len('log_odds_'):] return ' {name}(x) returns true with log odds x and false otherwise. '\ 'Equivalent to ({short} (logistic x)), but reduces the chance of'\ ' pathologies due to rounding (logistic x) to 0 or 1.'\ .format(name=name, short=short) registerBuiltinSP( "log_odds_flip", typed_nr(LogOddsBernoulliOutputPSP(), [t.NumberType()], t.BoolType())) registerBuiltinSP( "log_odds_bernoulli", typed_nr(LogOddsBernoulliOutputPSP(), [t.NumberType()], t.IntegerType())) class BinomialOutputPSP(DiscretePSP): def simulate(self, args): (n, p) = args.operandValues() return args.np_prng().binomial(n, p) def logDensity(self, val, args): (n, p) = args.operandValues() return scipy.stats.binom.logpmf(val, n, p)
from venture.lite.sp_help import deterministic_typed from venture.lite.sp_help import type_test from venture.lite.sp_registry import registerBuiltinSP import venture.lite.value as vv import venture.lite.types as t import venture.lite.utils as u registerBuiltinSP("array", deterministic_typed(lambda *args: np.array(args), [t.AnyType()], t.ArrayType(), variadic=True, sim_grad=lambda args, direction: direction.getArray(), descr="array returns an array initialized with its arguments")) registerBuiltinSP("vector", deterministic_typed(lambda *args: np.array(args), [t.NumberType()], t.ArrayUnboxedType(t.NumberType()), variadic=True, sim_grad=lambda args, direction: direction.getArray(), descr="vector returns an unboxed numeric array initialized with its arguments")) registerBuiltinSP("is_array", type_test(t.ArrayType())) registerBuiltinSP("is_vector", type_test(t.ArrayUnboxedType(t.NumberType()))) registerBuiltinSP("to_array", deterministic_typed(lambda seq: seq.getArray(), [t.HomogeneousSequenceType(t.AnyType())], t.ArrayType(), descr="to_array converts its argument sequence to an array")) registerBuiltinSP("to_vector", deterministic_typed(lambda seq: np.array(seq.getArray(t.NumberType())), [t.HomogeneousSequenceType(t.NumberType())], t.ArrayUnboxedType(t.NumberType()),
def vvsum(venture_array): # TODO Why do the directions come in and out as Venture Values # instead of being unpacked by f_type.gradient_type()? return v.VentureNumber(sum(venture_array.getArray(t.NumberType())))
def grad_vector_times_scalar(args, direction): dot_prod = v.vv_dot_product(v.VentureArrayUnboxed(args[0], t.NumberType()), direction) return [direction * args[1], v.VentureNumber(dot_prod)]
def symbolic_zero_left(n, obj): assert n == 0, "Cannot add non-zero integer %r to %r" % (n, obj) return obj def symbolic_zero_right(obj, n): assert n == 0, "Cannot add non-zero integer %r to %r" % (n, obj) return obj generic_add = dispatching_psp([ SPType([t.Int], t.Int, variadic=True), SPType([t.Int, t.Number], t.Number), SPType([t.Number, t.Int], t.Number), SPType([t.NumberType()], t.NumberType(), variadic=True), SPType([t.ArrayUnboxedType(t.NumberType()), t.NumberType()], t.ArrayUnboxedType(t.NumberType())), SPType([t.NumberType(), t.ArrayUnboxedType(t.NumberType())], t.ArrayUnboxedType(t.NumberType())), SPType([t.ArrayUnboxedType(t.NumberType())], t.ArrayUnboxedType(t.NumberType()), variadic=True), SPType([t.Int, t.Object], t.Object), SPType([t.Object, t.Int], t.Object), SPType([t.Object, t.Object], t.Object), ], [ deterministic_psp( lambda *args: sum(args), sim_grad=lambda args, direction: [direction for _ in args], descr="add returns the sum of all its arguments"),
def naryNum(f, sim_grad=None, descr=None): return deterministic_typed(f, [t.NumberType()], t.NumberType(), variadic=True, sim_grad=sim_grad, descr=descr)
def grad_vector_dot(args, direction): gradient_type = t.HomogeneousArrayType(t.NumberType()) untyped = [args[1], args[0]] unscaled = [gradient_type.asVentureValue(x) for x in untyped] return [direction.getNumber() * x for x in unscaled]
# XXX This implementation will suggest to a multi-site proposal # that there are more distinct possibilities than actually exist, # if more than one table was emptied by recent unincorporations. # This is Github issue #462: # https://github.com/probcomp/Venturecxx/issues/462 if aux.cachedTables: tables += sorted(aux.cachedTables.values()) else: tables.append(aux.nextTable) return tables registerBuiltinSP( 'make_crp', typed_nr(MakeCRPOutputPSP(), [t.NumberType(), t.NumberType()], SPType([], t.AtomType()), min_req_args=1)) def draw_crp_samples(n, alpha, np_rng=None): """Jointly draw n samples from CRP(alpha). This returns an assignment of n objects to clusters, given by a length-n list of cluster ids. """ aux = CRPSPAux() args = MockArgs([], aux, np_rng=np_rng) psp = CRPOutputPSP(alpha, 0) # No dispersion def draw_sample():
def __venture_start__(ripl): start = time.time() # NOTE: these are all currently inference SPs ripl.bind_foreign_inference_sp( "make_symbol", deterministic_typed(make_name, [t.SymbolType(), t.NumberType()], t.SymbolType())) ripl.bind_foreign_inference_sp( "logsumexp", deterministic_typed(logsumexp, [t.ArrayUnboxedType(t.NumberType())], t.NumberType())) ripl.bind_foreign_inference_sp( "concatenate", deterministic_typed(concatenate, [ t.ArrayUnboxedType(t.NumberType()), t.ArrayUnboxedType(t.NumberType()) ], t.ArrayUnboxedType(t.NumberType()))) ripl.bind_foreign_inference_sp( "sum", deterministic_typed(sum_sp, [t.ArrayUnboxedType(t.NumberType())], t.NumberType())) ripl.bind_foreign_inference_sp( "mean", deterministic_typed(mean_sp, [t.ArrayUnboxedType(t.NumberType())], t.NumberType())) ripl.bind_foreign_inference_sp( "stderr", deterministic_typed(stderr, [t.ArrayUnboxedType(t.NumberType())], t.NumberType())) ripl.bind_foreign_inference_sp( "random_string", deterministic_typed(random_string, [t.IntegerType()], t.StringType())) ripl.bind_foreign_inference_sp( "cat_string", deterministic_typed(cat_string, [t.StringType(), t.StringType()], t.StringType())) ripl.bind_foreign_inference_sp( "start_timer", deterministic_typed(start_timer, [], t.NumberType())) ripl.bind_foreign_inference_sp( "time_elapsed", deterministic_typed(time_elapsed, [t.NumberType()], t.NumberType())) ripl.execute_program("define new_trace = proc() { run(new_model()) };") ripl.execute_program( "define run_in_trace = proc(trace, program) { first(run(in_model(trace, program))) };" ) ripl.execute_program( "define parallel_mapv = proc(f, l) { run(parallel_mapv_action(f, l, 4)) };" )
aux.N += 1 aux.xTotal += x aux.STotal += x * x.T def unincorporate(self, x, args): x = np.mat(x).reshape((self.d, 1)) aux = args.spaux() aux.N -= 1 aux.xTotal -= x aux.STotal -= x * x.T def logDensityOfData(self, aux): (mN, kN, vN, SN) = self.updatedParams(aux) term1 = -(aux.N * self.d * math.log(math.pi)) / 2. term2 = logGenGamma(self.d, vN / 2.) term3 = -logGenGamma(self.d, self.v0 / 2.) term4 = (self.v0 / 2.) * np.linalg.slogdet(self.S0)[1] # first is sign term5 = -(vN / 2.) * np.linalg.slogdet(SN)[1] term6 = (self.d / 2.) * math.log(float(self.k0) / kN) return term1 + term2 + term3 + term4 + term5 + term6 registerBuiltinSP( "make_niw_normal", typed_nr(MakeCMVNOutputPSP(), [ t.HomogeneousArrayType(t.NumberType()), t.NumberType(), t.NumberType(), t.MatrixType() ], SPType([], t.HomogeneousArrayType(t.NumberType()))))
return _gp_gradientOfLogDensity(self.mean, self.covariance, samples, [x], [o]) def incorporate(self, o, args): samples = args.spaux().samples x = args.operandValues()[0] samples[x] = o def unincorporate(self, _o, args): samples = args.spaux().samples x = args.operandValues()[0] del samples[x] gpType = SPType([t.ArrayUnboxedType(t.NumericArrayType())], t.ArrayUnboxedType(t.NumberType())) gp1Type = SPType([t.NumberType()], t.NumberType()) class GPSPAux(SPAux): def __init__(self, samples): self.samples = samples def copy(self): return GPSPAux(copy.copy(self.samples)) def asVentureValue(self): def encode(xy): # (x,y) = xy # Since we are assuming the domain of the GP is numeric, the
operator = vals[1] if isinstance(operator, SPRef): # XXX trace.madeSPRecordAt(operator.makerNode) operator = operator.makerNode.madeSPRecord if not isinstance(operator.sp.requestPSP, NullRequestPSP): raise VentureValueError("Cannot assess a requesting SP.") if not operator.sp.outputPSP.isRandom(): raise VentureValueError("Cannot assess a deterministic SP.") assessedArgs = ReplacingArgs(args, vals[2:], operandNodes=args.operandNodes[2:], spaux=operator.spAux) return operator.sp.outputPSP.logDensity(value, assessedArgs) def description(self, name): return " %s(val, func, arg1, arg2, ...) returns the log probability" \ " (density) of simulating val from func(arg1, arg2, ...)" % name registerBuiltinSP( "assess", typed_nr(AssessOutputPSP(), [ t.AnyType("<val>"), SPType([t.AnyType("<args>")], t.AnyType("<val>"), variadic=True), t.AnyType("<args>") ], t.NumberType(), variadic=True))