def __venture_start__(ripl, *args): # External SPs argmaxSP = deterministic_typed(np.argmax, [t.HomogeneousArrayType(t.NumberType())], t.NumberType()) absSP = deterministic_typed(abs, [t.NumberType()], t.NumberType()) ripl.assume('make_squaredexp',VentureFunction(makeSquaredExponential, [t.NumberType(), t.NumberType()], t.AnyType("VentureFunction"))) ripl.assume('make_const_func', VentureFunction(makeConstFunc, [t.NumberType()], constantType)) ripl.bind_foreign_sp('allocate_gpmem', gpmem.allocateGPmemSP) ripl.bind_foreign_inference_sp('argmax_of_array', argmaxSP) ripl.bind_foreign_sp('abs', absSP) def f_true(x): return (0.2 + np.exp(-0.1*abs(x-2))) * np.cos(0.4*x) def plot_current_state(probe_index): plt.clf() x_true = np.linspace(-20,20,100) true_function_plot = plt.plot(x_true,f_true(x_true),label="True Curve") for i in range(200): xpost= np.random.uniform(-20,20,200) sampleString=genSamples(xpost) ypost = ripl.sample(sampleString) yp = [y_temp for (x_temp,y_temp) in sorted(zip(xpost,ypost))] plt.plot(sorted(xpost),yp,c="red",alpha=0.01,linewidth=2) probe_points = plt.scatter(all_probed_x[:-1],all_probed_y[:-1],facecolors='none', edgecolors='black',marker='o',s=50,linewidth='1.5',label="Probed Points") next_point = plt.scatter(all_probed_x[-1],all_probed_y[-1],edgecolor='green', marker='s',s=50,facecolors='none',linewidth='1.5',label="Next Point") plt.legend() plt.savefig("results/probe_"+str(probe_index)+".png") # Gpmem example all_probed_x = [] all_probed_y = [] def make_audited_expensive_function(name): def expensive_f(x): expensive_f.count += 1 # A tracker for how many times I am called ans = f_true(x) print "[PROBE %s] Probe #%d: %s(%f) = %f" % (expensive_f.name, expensive_f.count, expensive_f.name, x, ans) all_probed_x.append(x) all_probed_y.append(ans) if ripl.evaluate("plot_result"): plot_current_state(expensive_f.count) return ans expensive_f.count = 0 expensive_f.name = name audited_sp = deterministic_typed(expensive_f, [t.NumberType()], t.NumberType()) return sp.VentureSPRecord(audited_sp) ripl.bind_foreign_sp('make_audited_expensive_function', deterministic_typed( make_audited_expensive_function, [t.StringType()], sp.SPType([t.NumberType()], t.NumberType())))
def __venture_start__(ripl): start = time.time() # NOTE: these are all currently inference SPs ripl.bind_foreign_inference_sp( "make_symbol", deterministic_typed(make_name, [t.SymbolType(), t.NumberType()], t.SymbolType())) ripl.bind_foreign_inference_sp( "logsumexp", deterministic_typed(logsumexp, [t.ArrayUnboxedType(t.NumberType())], t.NumberType())) ripl.bind_foreign_inference_sp( "concatenate", deterministic_typed(concatenate, [ t.ArrayUnboxedType(t.NumberType()), t.ArrayUnboxedType(t.NumberType()) ], t.ArrayUnboxedType(t.NumberType()))) ripl.bind_foreign_inference_sp( "sum", deterministic_typed(sum_sp, [t.ArrayUnboxedType(t.NumberType())], t.NumberType())) ripl.bind_foreign_inference_sp( "mean", deterministic_typed(mean_sp, [t.ArrayUnboxedType(t.NumberType())], t.NumberType())) ripl.bind_foreign_inference_sp( "stderr", deterministic_typed(stderr, [t.ArrayUnboxedType(t.NumberType())], t.NumberType())) ripl.bind_foreign_inference_sp( "random_string", deterministic_typed(random_string, [t.IntegerType()], t.StringType())) ripl.bind_foreign_inference_sp( "cat_string", deterministic_typed(cat_string, [t.StringType(), t.StringType()], t.StringType())) ripl.bind_foreign_inference_sp( "start_timer", deterministic_typed(start_timer, [], t.NumberType())) ripl.bind_foreign_inference_sp( "time_elapsed", deterministic_typed(time_elapsed, [t.NumberType()], t.NumberType())) ripl.execute_program("define new_trace = proc() { run(new_model()) };") ripl.execute_program( "define run_in_trace = proc(trace, program) { first(run(in_model(trace, program))) };" ) ripl.execute_program( "define parallel_mapv = proc(f, l) { run(parallel_mapv_action(f, l, 4)) };" )
def _mean_sp(F, argtypes): def mean_gradientOfSimulate(args, direction): return parameter_nest(F(*args).parameters, direction.getArray()) return deterministic_typed(F, argtypes, GPMeanType(), sim_grad=mean_gradientOfSimulate, descr=F.__doc__)
def _cov_sp(F, argtypes): def cov_gradientOfSimulate(args, direction): return parameter_nest(F(*args).parameters, direction.getArray()) return deterministic_typed(F, argtypes, GPCovarianceType(), sim_grad=cov_gradientOfSimulate, descr=F.__doc__)
def make_audited_expensive_function(name, id_of_preset): f_true = F_TRUES[id_of_preset] def expensive_f(x): expensive_f.count += 1 # A tracker for how many times I am called ans = f_true(x) print "[PROBE %s] Probe #%d: %s(%f) = %f" % ( expensive_f.name, expensive_f.count, expensive_f.name, x, ans) return ans expensive_f.count = 0 expensive_f.name = name audited_sp = deterministic_typed(expensive_f, [t.NumberType()], t.NumberType()) return sp.VentureSPRecord(audited_sp)
def make_data_function(name): def f_restr(x): matches = np.argwhere(np.abs(data_xs - x) < 1e-6) if matches.size == 0: raise Exception('Illegal query') else: assert matches.size == 1 i = matches[0,0] return data_ys[i] f_restr.name = name audited_sp = deterministic_typed(f_restr, [t.NumberType()], t.NumberType()) return sp.VentureSPRecord(audited_sp)
def __venture_start__(ripl): ripl.execute_program(''' assume gp_cov_wn = (c) -> { gp_cov_scale(c, gp_cov_bump(1e-9, 1e-11)) }; define gp_cov_wn = (c) -> { gp_cov_scale(c, gp_cov_bump(1e-9, 1e-11)) }; ''') ripl.bind_foreign_inference_sp( 'sort', deterministic_typed(np.sort, [ vt.ArrayUnboxedType(vt.NumberType()), ], vt.ArrayUnboxedType(vt.NumberType()), min_req_args=1)) ripl.bind_foreign_inference_sp( 'get_mean', deterministic_typed(np.mean, [ vt.ArrayUnboxedType(vt.NumberType()), ], vt.NumberType(), min_req_args=1)) ripl.bind_foreign_inference_sp( 'load_csv', deterministic_typed(load_csv, [vt.StringType()], vt.ArrayUnboxedType(vt.NumberType()), min_req_args=1)) ripl.bind_foreign_sp( 'compile_ast_to_venturescript', deterministic_typed(compile_ast_to_embedded_dsl, [vt.AnyType()], vt.StringType(), min_req_args=1)) ripl.bind_foreign_sp( 'eval_expr', deterministic_typed(interpret_embedded_dsl, [vt.StringType()], gp.gpType, min_req_args=1))
def make_audited_expensive_function(name): def expensive_f(x): expensive_f.count += 1 # A tracker for how many times I am called ans = f_true(x) print "[PROBE %s] Probe #%d: %s(%f) = %f" % (expensive_f.name, expensive_f.count, expensive_f.name, x, ans) all_probed_x.append(x) all_probed_y.append(ans) if ripl.evaluate("plot_result"): plot_current_state(expensive_f.count) return ans expensive_f.count = 0 expensive_f.name = name audited_sp = deterministic_typed(expensive_f, [t.NumberType()], t.NumberType()) return sp.VentureSPRecord(audited_sp)
def record(tag, arity): typ = RecordType(tag) tester = sp_help.type_test(typ) constructor = sp_help.deterministic_typed(lambda *fields: VentureRecord(tag, fields), [t.AnyType()] * arity, typ, descr="%s" + " constructs a %s record" % tag) def accessor_func(r, i): if r in typ: return r.fields[i] else: raise VentureTypeError("Accessor for field %s expected record of type %s but got %s" % (i, tag, r)) def accessor(i): return sp_help.deterministic_typed(lambda r: accessor_func(r, i), [typ], t.AnyType(), descr="%s" + " extracts the %s field of a %s record" % (i, tag)) return (tester, constructor, [accessor(i) for i in range(arity)])
def make_gp(self, ripl): ripl.assume('make_linear', VentureFunction(makeLinear, [t.NumberType(),t.IntegerType()], t.AnyType("VentureFunction"))) ripl.assume('make_periodic', VentureFunction(makePeriodic, [t.NumberType(), t.NumberType(), t.NumberType(),t.IntegerType()], t.AnyType("VentureFunction"))) ripl.assume('make_se',VentureFunction(makeSquaredExponential,[t.NumberType(), t.NumberType(),t.IntegerType()], t.AnyType("VentureFunction"))) ripl.assume('make_noise', VentureFunction(makeNoise, [t.NumberType(),t.IntegerType()], t.AnyType("VentureFunction"))) #ripl.assume('make_rq', VentureFunction(makeRQ, [t.NumberType(), t.NumberType(), t.NumberType(),t.IntegerType()], t.AnyType("VentureFunction"))) ripl.assume('make_const_cov', VentureFunction(makeConst, [t.NumberType(),t.IntegerType()], t.AnyType("VentureFunction"))) ripl.assume('make_CP', VentureFunction(makeCP, [t.NumberType(),t.NumberType(),t.IntegerType()], t.AnyType("VentureFunction"))) ripl.assume('hyper_parameter','(mem(lambda (i j) (tag (quote i) j (uniform_continuous 0.01 100))))') ripl.assume('lin', "(apply_function make_linear (hyper_parameter 0 0) 0 )") ripl.assume('per', "(apply_function make_periodic (hyper_parameter 1 0) (hyper_parameter 1 1) (hyper_parameter 1 2) 1 ) ") ripl.assume('se', "(apply_function make_se (hyper_parameter 2 0 ) (hyper_parameter 2 1) 2 )") #ripl.assume('rq', "(apply_function make_rq (hyper_parameter 3 0) (hyper_parameter 3 1) (hyper_parameter 3 2) 3)") ripl.assume('wn', "(apply_function make_noise (hyper_parameter 3 0) 3 )") #ripl.assume('se2', "(apply_function make_se(hyper_parameter 3 0) (hyper_parameter 3 1) 3 )") #ripl.assume('rq', "(apply_function make_rq (hyper_parameter 4 0) (hyper_parameter 4 1) (hyper_parameter 4 2) 4)") ripl.assume('cp', "(apply_function make_CP (hyper_parameter 4 0) (hyper_parameter 4 1) 4)") ripl.assume('cp_inv',"(apply_function make_CP (negate (hyper_parameter 4 0)) (hyper_parameter 4 1) 4)") #ripl.assume('c',"(apply_function make_const_cov (hyper_parameter 5 0) 5 )") #### GP Structure Prior ###### for simplicity, I start with the max amount of kernel per type given ripl.assume("func_times", makeLiftedMult(lambda x1, x2: np.multiply(x1,x2))) ripl.assume("func_plus", makeLiftedAdd(lambda x1, x2: x1 + x2)) ripl.assume('cov_list','(list lin per se wn )') ripl.bind_foreign_sp("subset",typed_nr(Subset(), [t.ListType(),t.SimplexType()], t.ListType())) number = 4 total_perms =0 perms = [] for i in range(number): perms.append((len(list(itertools.permutations([j for j in range(i+1)]))))) total_perms+=perms[i] simplex = "( simplex " for i in range(number): simplex+=str(float(perms[i])/total_perms) + " " simplex+=" )" ripl.assume('s','(tag (quote grammar) 1 (subset cov_list '+simplex + ' ))') ripl.assume('cov_compo',""" (tag (quote grammar) 0 (lambda (l ) (if (lte ( size l) 1) (first l) (if (flip 0.8) (if (flip) (apply_function func_plus (first l) (cov_compo (rest l))) (apply_function func_times (first l) (cov_compo (rest l))) ) (apply_function func_plus (apply_function func_times (first l) cp) (apply_function func_times (cov_compo (rest l)) cp_inv) ) ) ))) """) ripl.assume('cov_structure','(cov_compo s)') ripl.assume('gp','(tag (quote model) 0 (make_gp_part_der zero cov_structure))') ripl.bind_foreign_sp("covariance_string", deterministic_typed(lambda x:VentureSymbol(x.stuff['name']), [t.AnyType()], t.AnyType(), descr="returns the covariance type")) ripl.bind_foreign_sp("covariance_label", deterministic_typed(lambda x:x.stuff['label_list'], [t.AnyType()], t.ArrayType(), descr="returns the covariance label"))
def __venture_start__(ripl, *args): # External SPs argmaxSP = deterministic_typed(np.argmax, [t.HomogeneousArrayType(t.NumberType())], t.NumberType()) absSP = deterministic_typed(abs, [t.NumberType()], t.NumberType()) make_se_SP = deterministic_typed(lambda sf, l: VentureFunction(squared_exponential(sf, l), name="SE", parameter=[sf,l], sp_type=covType), [t.NumberType(), t.NumberType()], t.AnyType("VentureFunction")) make_const_func_SP = deterministic_typed(lambda c: VentureFunction(lambda x: c, sp_type = sp.SPType([], t.NumberType())), [t.NumberType()], t.AnyType("VentureFunction")) ripl.bind_foreign_sp('allocate_gpmem', gpmem.allocateGPmemSP) ripl.bind_foreign_inference_sp('argmax_of_array', argmaxSP) ripl.bind_foreign_sp('abs', absSP) ripl.bind_foreign_sp('make_squaredexp', make_se_SP) ripl.bind_foreign_sp('make_const_func', make_const_func_SP) # Gpmem example def make_audited_expensive_function(name, id_of_preset): f_true = F_TRUES[id_of_preset] def expensive_f(x): expensive_f.count += 1 # A tracker for how many times I am called ans = f_true(x) print "[PROBE %s] Probe #%d: %s(%f) = %f" % ( expensive_f.name, expensive_f.count, expensive_f.name, x, ans) return ans expensive_f.count = 0 expensive_f.name = name audited_sp = deterministic_typed(expensive_f, [t.NumberType()], t.NumberType()) return sp.VentureSPRecord(audited_sp) ripl.bind_foreign_sp('make_audited_expensive_function', deterministic_typed( make_audited_expensive_function, [t.StringType(), t.SymbolType()], sp.SPType([t.NumberType()], t.NumberType()))) # Accumulator for plot datas PLOT_DATAS = [] class AddPlotDataCallback(object): def __call__(self, inferrer, sigma_, l_, stats_): sigma = FSD(sigma_[0]).getNumber() l = FSD(l_[0]).getNumber() all_pairs = [map(getNumber, p) for p in map(getArray, FSD(stats_[0]).getArray())] (Xseen, Yseen) = zip(*all_pairs) if len(all_pairs) > 0 else ([], []) plot_data = BayesOptPlotData(sigma, l, Xseen, Yseen) PLOT_DATAS.append(plot_data) class DumpPlotDataCallback(object): def __call__(self, inferrer, strategy_name_, func_id_, user_prefix_): strategy_name = FSD(strategy_name_[0]).getSymbol() func_id = FSD(func_id_[0]).getSymbol() user_prefix = FSD(user_prefix_[0]).getSymbol() assert isinstance(func_id, str) date_fmt = '%Y%m%d_%H%M%S' directory = 'bayesopt_output' def j(fname): return os.path.join(directory, fname) log_fname = 'plot_data_%s_%s_%s_%s.pkl' % ( func_id, strategy_name, user_prefix, datetime.now().strftime(date_fmt),) recents_fname = 'MOST_RECENT' print "Logging to %s" % (j(log_fname),) with open(j(log_fname), 'wb') as f: pickle.dump([func_id, PLOT_DATAS], f) with open(j(recents_fname), 'wb') as f: print >> f, log_fname print "Done." ripl.bind_callback("add_gp_plot_data", AddPlotDataCallback()) ripl.bind_callback("dump_gp_plot_data", DumpPlotDataCallback())
from numbers import Number import numpy as np from venture.lite.exception import VentureValueError from venture.lite.sp_help import deterministic_typed from venture.lite.sp_help import type_test from venture.lite.sp_registry import registerBuiltinSP import venture.lite.value as vv import venture.lite.types as t import venture.lite.utils as u registerBuiltinSP("array", deterministic_typed(lambda *args: np.array(args), [t.AnyType()], t.ArrayType(), variadic=True, sim_grad=lambda args, direction: direction.getArray(), descr="array returns an array initialized with its arguments")) registerBuiltinSP("vector", deterministic_typed(lambda *args: np.array(args), [t.NumberType()], t.ArrayUnboxedType(t.NumberType()), variadic=True, sim_grad=lambda args, direction: direction.getArray(), descr="vector returns an unboxed numeric array initialized with its arguments")) registerBuiltinSP("is_array", type_test(t.ArrayType())) registerBuiltinSP("is_vector", type_test(t.ArrayUnboxedType(t.NumberType()))) registerBuiltinSP("to_array", deterministic_typed(lambda seq: seq.getArray(), [t.HomogeneousSequenceType(t.AnyType())], t.ArrayType(), descr="to_array converts its argument sequence to an array"))
"than or equal to its second")) registerBuiltinSP("lt", binaryPred(lambda x,y: x.compare(y) < 0, descr="lt returns true if its first argument compares less than its " \ "second")) registerBuiltinSP("lte", binaryPred(lambda x,y: x.compare(y) <= 0, descr="lte returns true if its first argument compares less than or " \ "equal to its second")) # If you are wondering about the type signature, this function # bootstraps the implicit coersion to numbers into an explicit one. registerBuiltinSP( "real", deterministic_typed( lambda x: x, [t.NumberType()], t.NumberType(), descr="real explicitly coerces its argument to a number")) registerBuiltinSP( "atom", deterministic_typed( lambda x: x, [t.IntegerType()], t.AtomType(), descr="atom returns the identity of its argument integer as an atom")) registerBuiltinSP( "atom_index", deterministic_typed( lambda x: x, [t.AtomType()], t.IntegerType(), descr=
def make_gp(self, ripl): ripl.assume('make_linear', VentureFunction(makeLinear, [t.NumberType(),t.IntegerType()], t.AnyType("VentureFunction"))) ripl.assume('make_periodic', VentureFunction(makePeriodic, [t.NumberType(), t.NumberType(), t.NumberType(),t.IntegerType()], t.AnyType("VentureFunction"))) ripl.assume('make_se',VentureFunction(makeSquaredExponential,[t.NumberType(), t.NumberType(),t.IntegerType()], t.AnyType("VentureFunction"))) ripl.assume('make_rq', VentureFunction(makeRQ, [t.NumberType(), t.NumberType(), t.NumberType(),t.IntegerType()], t.AnyType("VentureFunction"))) ripl.assume('a',' (tag (quote hyper ) 0 (uniform_continuous 0 8))') ripl.assume('l',' (tag (quote hyper) 1 (uniform_continuous 0 8))') ripl.assume('q',' (tag (quote hyper) 2 (uniform_continuous 0.01 8))') ripl.assume('sf1','(tag (quote hyper) 3 (uniform_continuous 0 8))') ripl.assume('theta_se_1',' (tag (quote hyper) 4 (uniform_continuous 0 8))') ripl.assume('theta_se_2',' (tag (quote hyper) 5 (uniform_continuous 0 8))') ripl.assume('theta_rq_1','(tag (quote hyper) 6 (uniform_continuous 0 8))') ripl.assume('theta_rq_2','(tag (quote hyper) 7 (uniform_continuous 0 8))') ripl.assume('theta_rq_3','(tag (quote hyper) 8 (uniform_continuous 0 8))') ripl.assume('lin', "(apply_function make_linear a 0 )") ripl.assume('per', "(apply_function make_periodic l q sf1 1 ) ") ripl.assume('se1', "(apply_function make_se theta_se_1 theta_se_2 2 )") ripl.assume('rq', "(apply_function make_rq theta_rq_1 theta_rq_2 theta_rq_3 7 )") #### GP Structure Prior ###### for simplicity, I start with the max amount of kernel per type given ripl.assume("func_times", makeLiftedMult(lambda x1, x2: np.multiply(x1,x2))) ripl.assume("func_plus", makeLiftedAdd(lambda x1, x2: x1 + x2)) ripl.assume('cov_list','(list lin per se1 rq )') ripl.bind_foreign_sp("subset",typed_nr(Subset(), [t.ListType(),t.SimplexType()], t.ListType())) number = 4 total_perms =0 perms = [] for i in range(number): perms.append((len(list(itertools.permutations([j for j in range(i+1)]))))) total_perms+=perms[i] simplex = "( simplex " for i in range(number): simplex+=str(float(perms[i])/total_perms) + " " simplex+=" )" ripl.assume('s','(tag (quote grammar) 1 (subset cov_list '+simplex + ' ))') ripl.assume('cov_compo',""" (tag (quote grammar) 0 (lambda (l ) (if (lte ( size l) 1) (first l) (if (flip) (apply_function func_plus (first l) (cov_compo (rest l))) (apply_function func_times (first l) (cov_compo (rest l))) ) ))) """) ripl.assume('cov_structure','(cov_compo s)') ripl.assume('gp','(tag (quote model) 0 (make_gp_part_der zero cov_structure))') ripl.bind_foreign_sp("covariance_string", deterministic_typed(lambda x:VentureSymbol(x.stuff['name']), [t.AnyType()], t.AnyType(), descr="returns the covariance type")) ripl.bind_foreign_sp("covariance_label", deterministic_typed(lambda x:x.stuff['label_list'], [t.AnyType()], t.ArrayType(), descr="returns the covariance label"))
for k1, v1 in thing1.iteritems(): for k2, v2 in thing2.iteritems(): ans[(k1, k2)] = f(v1, v2) return ans elif isinstance(thing2, SamplableMap): return set_fmap(thing2, lambda nodes: f(as_set(thing1), nodes)) else: return set_fmap(thing1, lambda nodes: f(nodes, as_set(thing2))) inf.registerBuiltinInferenceSP( "by_intersection", deterministic_typed( Intersect, [t.ForeignBlobType(), t.ForeignBlobType()], t.ForeignBlobType(), descr=""" Intersect the selected choices. """)) inf.registerBuiltinInferenceSP("by_tag", \ deterministic_typed(FetchTag, [t.AnyType("<tag>")], t.ForeignBlobType(), descr=""" Select the choices tagged by the given tag. They remain keyed by their values, so that `random_singleton` will pick all the choices given by a random tag value, rather than a single choice at random from all choices under that tag. """)) def by_tag_value_fun(tag, val):
def __venture_start__(ripl, *_args): np.random.seed(3) random.seed(3) ripl.assume('make_linear', VentureFunctionDiff(makeLinear, [t.NumberType(),t.IntegerType()], t.AnyType("VentureFunction"))) ripl.assume('make_periodic', VentureFunction(makePeriodic, [t.NumberType(), t.NumberType(), t.NumberType(),t.IntegerType()], t.AnyType("VentureFunction"))) ripl.assume('make_squaredexp', VentureFunction(makeSquaredExponential, [t.NumberType(), t.NumberType(),t.IntegerType()], t.AnyType("VentureFunction"))) ripl.assume('make_noise', VentureFunction(makeNoise, [t.NumberType(),t.IntegerType()], t.AnyType("VentureFunction"))) ripl.assume('make_rq', VentureFunction(makeRQ, [t.NumberType(), t.NumberType(), t.NumberType(),t.IntegerType()], t.AnyType("VentureFunction"))) ripl.assume('make_const_func', VentureFunction(makeConstFunc, [t.NumberType()], constantType)) ripl.assume("mult_funcs", makeLiftedMult(lambda x1, x2: np.multiply(x1,x2))) ripl.assume("add_funcs", makeLiftedAdd(lambda x1, x2: x1 + x2)) ripl.bind_foreign_sp('allocate_gpmem', gpmem.allocateGPmemSP) ## pseude-uniform structure prior def uniform_structure_prior(number): total_perms =0 perms = [] for i in range(number): perms.append((len(list(itertools.permutations([j for j in range(i+1)]))))) total_perms+=perms[i] return [float(perms[i])/total_perms for i in range(number)] uniform_structure = deterministic_typed(uniform_structure_prior, [t.IntegerType()],t.SimplexType()) ripl.bind_foreign_sp('uniform_structure', uniform_structure) ripl.bind_foreign_sp("subset",typed_nr(Subset(), [t.ListType(),t.SimplexType()], t.ListType())) if ripl.evaluate("data")=="synthetic": from get_synthetic_data import make_data_function,data_xs elif ripl.evaluate("data")=="airline": from get_airline_data import make_data_function,data_xs elif ripl.evaluate("data")=="co2": from get_co2_data import make_data_function,data_xs else: raise ValueError('Data is not known, please specify synthetic, airline or co2') ripl.bind_foreign_sp('make_data_function', deterministic_typed( make_data_function, [t.StringType()], sp.SPType([t.NumberType()], t.NumberType()))) # helper SP to get the input data get_data_xs_SP = deterministic_typed( lambda: data_xs, [], t.HomogeneousArrayType(t.NumberType())) ripl.bind_foreign_sp('get_data_xs', get_data_xs_SP) # SPs to interpret covariance structure ripl.bind_foreign_sp("covariance_string", deterministic_typed(lambda x:VentureSymbol(x.stuff['name']), [t.AnyType()], t.AnyType(), descr="returns the covariance type")) # SP to output covariance label so that we only infer over the # hyper-parameters of base kernels that are actually in use ripl.bind_foreign_sp("covariance_label", deterministic_typed(lambda x:x.stuff['label_list'], [t.AnyType()], t.ArrayType(), descr="returns the covariance label"))
def __venture_start__(ripl): ripl.execute_program(''' define set_value_at_scope_block = (scope, block, value) -> { set_value_at2(scope, block, value) }; ''') ripl.bind_foreign_inference_sp( 'sort', deterministic_typed( np.sort, [ vt.ArrayUnboxedType(vt.NumberType()), ], vt.ArrayUnboxedType(vt.NumberType()), min_req_args=1 ) ) ripl.bind_foreign_inference_sp( 'get_mean', deterministic_typed( np.mean, [ vt.ArrayUnboxedType(vt.NumberType()), ], vt.NumberType(), min_req_args=1 ) ) ripl.bind_foreign_inference_sp( 'get_predictive_mean', deterministic_typed( lambda x: np.mean(x, axis=0), [ vt.ArrayUnboxedType(vt.ArrayUnboxedType(vt.NumberType())), ], vt.ArrayUnboxedType(vt.NumberType()), min_req_args=1 ) ) ripl.bind_foreign_inference_sp( 'load_csv', deterministic_typed( load_csv, [vt.StringType()], vt.ArrayUnboxedType(vt.NumberType()), min_req_args=1 ) ) ripl.bind_foreign_inference_sp( 'concatenate', deterministic_typed( concatenate, [ vt.ArrayUnboxedType(vt.NumberType()), vt.ArrayUnboxedType(vt.NumberType()), ], vt.ArrayUnboxedType(vt.NumberType()), min_req_args=2 ) ) ripl.bind_foreign_inference_sp( 'scatter_plot', deterministic_typed( scatter_plot, [ vt.ArrayUnboxedType(vt.NumberType()), vt.ArrayUnboxedType(vt.NumberType()), vt.HomogeneousDictType(vt.StringType(), vt.AnyType()) ], vt.NilType(), min_req_args=2 ) ) ripl.bind_foreign_inference_sp( 'line_plot', deterministic_typed( line_plot, [ vt.ArrayUnboxedType(vt.NumberType()), vt.ArrayUnboxedType(vt.NumberType()), vt.HomogeneousDictType(vt.StringType(), vt.AnyType()) ], vt.NilType(), min_req_args=2 ) ) ripl.bind_foreign_inference_sp( 'legend', deterministic_typed( legend, [vt.StringType()], vt.NilType(), min_req_args=0 ) ) ripl.bind_foreign_inference_sp( 'square_heatmap', deterministic_typed( square_heatmap, [ vt.ArrayUnboxedType(vt.NumberType()), vt.ArrayUnboxedType(vt.NumberType()), vt.HomogeneousDictType(vt.StringType(), vt.AnyType()) ], vt.NilType(), min_req_args=2 ) ) ripl.bind_foreign_sp( 'gp_cov_cp', _cov_sp( change_point, [ vt.NumberType(), vt.NumberType(), GPCovarianceType('K'), GPCovarianceType('H') ] ) )
return -abs(max_capacity - capacity) - 3 * discrepancy return max([(w, h) for w in range(1, max_width) for h in range(1, max_height)], key = layout_quality) def plot_samples(data, samples): (block_width, block_height) = block_size(samples[0]) (width, height) = layout(samples) plt.figure(figsize=(block_width * width, block_height * height)) for (i, lw_trace) in enumerate(samples): if i >= width * height: break plt.subplot(height, width, i + 1) lw_trace["data"] = data render_dpmm(lw_trace, show_assignments=True, show_V=True) plt.tight_layout() plt.savefig("results-samples.png") plt.close() def make_plots(data, results): (histories, samples) = zip(*convert_from_venture_value(results)) plot_histories(histories) plot_samples(data, samples) make_plots_sp = deterministic_typed(make_plots, [t.Array(t.Array(t.Number)), t.Object], t.Nil) def __venture_start__(ripl): ripl.bind_foreign_inference_sp("make_plots", make_plots_sp) ripl.bind_foreign_inference_sp('unique', deterministic_typed(lambda l: list(set(l)), [t.HomogeneousListType(t.ExpressionType())], t.HomogeneousListType(t.ExpressionType())))
parser = VentureScriptParser() ast = parser.parse_instruction(expr)['expression'] assert len(ast) == 3 assert ast[0]['value'] == 'make_gp' gp_mean = interpret_mean_kernel(ast[1]) gp_cov = interpret_covariance_kernel(ast[2]) return sp.VentureSPRecord(gp.GPSP(gp_mean, gp_cov)) if __name__ == '__main__': ripl = vs.make_lite_ripl() ripl.bind_foreign_sp( 'interpret_embedded_dsl', deterministic_typed( interpret_embedded_dsl, [vt.StringType()], gp.gpType, min_req_args=1 ) ) ripl.evaluate(""" make_gp(gp_mean_const(0.), gp_cov_scale(0.1, gp_cov_bump(.1,.1))) """) ripl.execute_program(""" assume gp = interpret_embedded_dsl( "make_gp( gp_mean_const(0.), gp_cov_sum( gp_cov_scale(0.1, gp_cov_bump(.1,.1)), gp_cov_se(0.1)))" ) """)
def __venture_start__(ripl): ripl.bind_foreign_inference_sp("make_plots", make_plots_sp) ripl.bind_foreign_inference_sp('unique', deterministic_typed(lambda l: list(set(l)), [t.HomogeneousListType(t.ExpressionType())], t.HomogeneousListType(t.ExpressionType())))
def accessor(i): return sp_help.deterministic_typed(lambda r: accessor_func(r, i), [typ], t.AnyType(), descr="%s" + " extracts the %s field of a %s record" % (i, tag))
else: return arg if len(args) == 1: print convert_arg(args[0]) else: print " ".join([str(convert_arg(a)) for a in args]) inf.registerBuiltinInferenceSP( "print", deterministic_typed(print_fun, [t.AnyType()], t.NilType(), variadic=True, descr="""\ Print the given values to the terminal. If you are trying to add a debugging print statement to a VentureScript expression that is not already an inference action, consider using `debug`, which does not require sequencing. """)) def plot_fun(spec, dataset): spec = t.ExpressionType().asPython(spec) if isinstance(dataset, Dataset): PlotSpec(spec).plot(dataset.asPandas(), dataset.ind_names) else: # Assume a raw data frame PlotSpec(spec).plot(dataset, list(dataset.columns.values))
def grad_logisticv(args, direction): # XXX The direction is a Venture value, but the deriv is a Python # (numpy) array :( [x] = args (_, deriv) = T_logistic(x) answer = direction.array * deriv # print "Gradient of logistic got", x, deriv, direction.array, answer return [v.VentureArrayUnboxed(answer, t.Number)] registerBuiltinSP( "logisticv", deterministic_typed(logistic, [t.UArray(t.Number)], t.UArray(t.Number), sim_grad=grad_logisticv, descr="The logistic function: 1/(1+exp(-x))")) registerBuiltinSP( "logit", unaryNum(logit, descr="The logit (inverse logistic) function: log(x/(1-x))")) def grad_log_logistic(args, direction): [x] = args return [direction * d_log_logistic(x)] registerBuiltinSP( "log_logistic",
def __venture_start__(ripl, *args): # External SPs argmaxSP = deterministic_typed(np.argmax, [t.HomogeneousArrayType(t.NumberType())], t.NumberType()) absSP = deterministic_typed(abs, [t.NumberType()], t.NumberType()) ripl.assume('make_squaredexp',VentureFunctionDiff(makeSquaredExponential, [t.NumberType(), t.NumberType()], t.AnyType("VentureFunctionDiff"))) ripl.assume('make_noise', VentureFunctionDiff(makeNoise, [t.NumberType()], t.AnyType("VentureFunctionDiff"))) ripl.assume('make_const_func', VentureFunction(makeConstFunc, [t.NumberType()], constantType)) ripl.bind_foreign_sp("apply_diff_function",applyDiffFunctionSP) ripl.assume("add_funcs", makeLiftedAdd(lambda x1, x2: x1 + x2)) ripl.bind_foreign_sp('make_matrix_gp', gp.makeGPSP ) #ripl.set_seed(2) n = 55 @np.vectorize def regexmpl_f_noiseless(x): return 0.3 + 0.4*x + 0.5*np.sin(2.7*x) + (1.1/(1+x**2)) # Regression example @np.vectorize def f_noisy(x): p_outlier = 0.3 stdev = (1.0 if rand() < p_outlier else 0.1) return np.random.normal(regexmpl_f_noiseless(x), stdev) # generate and save a data set # print "generating regression example data set" n = 55 global regexempl_data_xs,regexempl_data_ys # needs to be global for plotting plugin np.random.seed(2) regexempl_data_xs = np.random.normal(0,1,n) regexempl_data_ys = f_noisy(regexempl_data_xs) i = np.argsort(regexempl_data_xs) regexempl_data_xs= np.sort(regexempl_data_xs) regexempl_data_ys= regexempl_data_ys[i] minimum_index = np.argmin(regexempl_data_ys[1:]) regexempl_data_ys[minimum_index+1]= -4 # Gpmem example def make_data_function(name): def f_restr(x): matches = np.argwhere(np.abs(regexempl_data_xs - x) < 1e-6) if matches.size == 0: raise Exception('Illegal query') else: assert matches.size == 1 i = matches[0,0] return regexempl_data_ys[i] f_restr.name = name audited_sp = deterministic_typed(f_restr, [t.NumberType()], t.NumberType()) return sp.VentureSPRecord(audited_sp) ripl.bind_foreign_sp('make_data_function', deterministic_typed( make_data_function, [t.StringType()], sp.SPType([t.NumberType()], t.NumberType()))) get_regexempl_data_xs_SP = deterministic_typed(lambda: regexempl_data_xs, [], t.HomogeneousArrayType(t.NumberType())) ripl.bind_foreign_sp('get_regexempl_data_xs', get_regexempl_data_xs_SP) get_regexempl_data_ys_SP = deterministic_typed(lambda: regexempl_data_ys, [], t.HomogeneousArrayType(t.NumberType())) ripl.bind_foreign_sp('get_regexempl_data_ys', get_regexempl_data_ys_SP)