def test_cnf_conversion(): from core import synthesis_context from semantics import semantics_core from semantics import semantics_lia syn_ctx = synthesis_context.SynthesisContext( semantics_core.CoreInstantiator(), semantics_lia.LIAInstantiator()) var_exprs = [ syn_ctx.make_variable_expr(exprtypes.IntType(), 'x%d' % i) for i in range(10) ] max_fun = syn_ctx.make_synth_function('max', [exprtypes.IntType()] * 10, exprtypes.IntType()) max_app = syn_ctx.make_function_expr(max_fun, *var_exprs) max_ge_vars = [ syn_ctx.make_function_expr('ge', max_app, var_expr) for var_expr in var_exprs ] max_eq_vars = [ syn_ctx.make_function_expr('eq', max_app, var_expr) for var_expr in var_exprs ] formula1 = syn_ctx.make_ac_function_expr('or', *max_eq_vars) formula2 = syn_ctx.make_ac_function_expr('and', *max_ge_vars) formula = syn_ctx.make_ac_function_expr('and', formula1, formula2) cnf_converter = CNFConverter() cnf_clauses, cnf_expr = cnf_converter.apply(formula, syn_ctx) # print(exprs.expression_to_string(cnf_expr)) # print([exprs.expression_to_string(cnf_clause) for cnf_clause in cnf_clauses]) # print(check_single_invocation_property(formula, syn_ctx)) binary_max = syn_ctx.make_synth_function( 'max2', [exprtypes.IntType(), exprtypes.IntType()], exprtypes.IntType()) binary_max_app = syn_ctx.make_function_expr(binary_max, var_exprs[0], var_exprs[1]) binary_max_app_rev = syn_ctx.make_function_expr(binary_max, var_exprs[1], var_exprs[0]) # non_separable = syn_ctx.make_function_expr('eq', binary_max_app, binary_max_app_rev) # print(check_single_invocation_property(non_separable, syn_ctx)) # max_rec = syn_ctx.make_function_expr(binary_max, binary_max_app, binary_max_app) # non_separable2 = syn_ctx.make_function_expr('eq', max_rec, binary_max_app) # print(check_single_invocation_property(non_separable2, syn_ctx)) canonicalize_specification(formula, syn_ctx) separable1 = syn_ctx.make_function_expr('ge', binary_max_app, var_exprs[0]) separable2 = syn_ctx.make_function_expr('ge', binary_max_app_rev, var_exprs[0]) separable3 = syn_ctx.make_ac_function_expr( 'or', syn_ctx.make_function_expr('eq', binary_max_app, var_exprs[0]), syn_ctx.make_function_expr('eq', binary_max_app, var_exprs[1])) separable = syn_ctx.make_ac_function_expr('and', separable1, separable2, separable3) # print(check_single_invocation_property(separable, syn_ctx)) canonicalize_specification(separable, syn_ctx)
def _generate_test_generators(): from core import synthesis_context from semantics import semantics_core from semantics import semantics_lia syn_ctx = synthesis_context.SynthesisContext( semantics_core.CoreInstantiator(), semantics_lia.LIAInstantiator()) var_a_info = syn_ctx.make_variable(exprtypes.IntType(), 'varA', 0) var_b_info = syn_ctx.make_variable(exprtypes.IntType(), 'varB', 1) var_c_info = syn_ctx.make_variable(exprtypes.IntType(), 'varC', 2) var_a = exprs.VariableExpression(var_a_info) var_b = exprs.VariableExpression(var_b_info) var_c = exprs.VariableExpression(var_c_info) zero_value = exprs.Value(0, exprtypes.IntType()) one_value = exprs.Value(1, exprtypes.IntType()) zero_exp = exprs.ConstantExpression(zero_value) one_exp = exprs.ConstantExpression(one_value) var_generator = LeafGenerator([var_a, var_b, var_c], 'Variable Generator') const_generator = LeafGenerator([zero_exp, one_exp], 'Constant Generator') leaf_generator = AlternativesGenerator([var_generator, const_generator], 'Leaf Term Generator') generator_factory = RecursiveGeneratorFactory() start_generator_ph = generator_factory.make_placeholder('Start') start_bool_generator_ph = generator_factory.make_placeholder('StartBool') add_fun = syn_ctx.make_function('add', exprtypes.IntType(), exprtypes.IntType()) sub_fun = syn_ctx.make_function('sub', exprtypes.IntType(), exprtypes.IntType()) ite_fun = syn_ctx.make_function('ite', exprtypes.BoolType(), exprtypes.IntType(), exprtypes.IntType()) and_fun = syn_ctx.make_function('and', exprtypes.BoolType(), exprtypes.BoolType()) or_fun = syn_ctx.make_function('or', exprtypes.BoolType(), exprtypes.BoolType()) not_fun = syn_ctx.make_function('not', exprtypes.BoolType()) le_fun = syn_ctx.make_function('le', exprtypes.IntType(), exprtypes.IntType()) ge_fun = syn_ctx.make_function('ge', exprtypes.IntType(), exprtypes.IntType()) eq_fun = syn_ctx.make_function('eq', exprtypes.IntType(), exprtypes.IntType()) start_generator = \ generator_factory.make_generator('Start', AlternativesGenerator, ([leaf_generator] + [FunctionalGenerator(add_fun, [start_generator_ph, start_generator_ph]), FunctionalGenerator(sub_fun, [start_generator_ph, start_generator_ph]), FunctionalGenerator(ite_fun, [start_bool_generator_ph, start_generator_ph, start_generator_ph])],)) generator_factory.make_generator('StartBool', AlternativesGenerator, ([ FunctionalGenerator( and_fun, [start_bool_generator_ph, start_bool_generator_ph]), FunctionalGenerator( or_fun, [start_bool_generator_ph, start_bool_generator_ph]), FunctionalGenerator(not_fun, [start_bool_generator_ph]), FunctionalGenerator(le_fun, [start_generator_ph, start_generator_ph]), FunctionalGenerator(eq_fun, [start_generator_ph, start_generator_ph]), FunctionalGenerator(ge_fun, [start_generator_ph, start_generator_ph]) ], )) return start_generator
def get_func_exprs_grammars(benchmark_files): global eu # Grammars results = [] # for eusolver ite_related_macros = [] for benchmark_file in benchmark_files: fun_exprs = [] print('Loading : ', benchmark_file) file_sexp = parser.sexpFromFile(benchmark_file) if file_sexp is None: continue core_instantiator = semantics_core.CoreInstantiator() theory_instantiators = [ parser.get_theory_instantiator(theory) for theory in parser._known_theories ] macro_instantiator = semantics_core.MacroInstantiator() uf_instantiator = semantics_core.UninterpretedFunctionInstantiator() synth_instantiator = semantics_core.SynthFunctionInstantiator() syn_ctx = synthesis_context.SynthesisContext(core_instantiator, *theory_instantiators, macro_instantiator, uf_instantiator, synth_instantiator) syn_ctx.set_macro_instantiator(macro_instantiator) defs, _ = parser.filter_sexp_for('define-fun', file_sexp) if defs is None: defs = [] for [name, args_data, ret_type_data, interpretation] in defs: for eusolver in ([True, False] if eu else [False]): ((arg_vars, arg_types, arg_var_map), return_type) = parser._process_function_defintion( args_data, ret_type_data) expr = parser.sexp_to_expr(interpretation, syn_ctx, arg_var_map) macro_func = semantics_types.MacroFunction( name, len(arg_vars), tuple(arg_types), return_type, expr, arg_vars) # for eusolver (recording macro functions of which definition include ite) if eusolver: app = exprs.find_application(expr, 'ite') if app is not None: ite_related_macros.append(name) macro_instantiator.add_function(name, macro_func) i = 0 subs_pairs = [] for (var_expr, ty) in zip(arg_vars, arg_types): param_expr = exprs.FormalParameterExpression(None, ty, i) subs_pairs.append((var_expr, param_expr)) i += 1 expr = exprs.substitute_all(expr, subs_pairs) # resolve macro functions involving ite (for enumeration of pred exprs (eusolver)) if eusolver: for fname in ite_related_macros: app = exprs.find_application(expr, fname) if app is None: continue expr = macro_instantiator.instantiate_macro( expr, fname) fun_exprs.append(expr) @static_var("cnt", 0) def rename(synth_funs_data): for synth_fun_data in synth_funs_data: # to avoid duplicated names synth_fun_data[0] = "__aux_name__" + benchmark_file + str( rename.cnt) rename.cnt += 1 # collect grammars synth_funs_data, _ = parser.filter_sexp_for('synth-fun', file_sexp) if len(synth_funs_data) == 0: synth_funs_data, _ = parser.filter_sexp_for('synth-inv', file_sexp) rename(synth_funs_data) synth_funs_grammar_data = parser.process_synth_invs( synth_funs_data, synth_instantiator, syn_ctx) else: rename(synth_funs_data) synth_funs_grammar_data = parser.process_synth_funcs( synth_funs_data, synth_instantiator, syn_ctx) grammar = None for synth_fun, arg_vars, grammar_data in synth_funs_grammar_data: if grammar_data != 'Default grammar': # we only consider a single function synthesis for now grammar = parser.sexp_to_grammar(arg_vars, grammar_data, synth_fun, syn_ctx) break results.append((fun_exprs, grammar)) return results
def get_func_exprs_grammars(benchmark_files): # expected format: # sygus format problem # (check-synth) # a single solution global eu @static_var("cnt", 0) def rename(synth_funs_data): for synth_fun_data in synth_funs_data: # to avoid duplicated names synth_fun_data[0] = "__aux_name__" + benchmark_file + str( rename.cnt) rename.cnt += 1 exprs_per_category = {} # decision tree : label -> exprs ## label : (ret_type, eu, STD spec / PBE spec, spec information ... ) # for eusolver ite_related_macros = [] # all vocabs all_vocabs = set([]) for benchmark_file in benchmark_files: print('Loading : ', benchmark_file) file_sexp = parser.sexpFromFile(benchmark_file) if file_sexp is None: continue ## specification specification = get_specification(file_sexp) all_vocabs.update(basic_vocabs_for_spec(specification)) core_instantiator = semantics_core.CoreInstantiator() theory_instantiators = [ parser.get_theory_instantiator(theory) for theory in parser._known_theories ] macro_instantiator = semantics_core.MacroInstantiator() uf_instantiator = semantics_core.UninterpretedFunctionInstantiator() synth_instantiator = semantics_core.SynthFunctionInstantiator() syn_ctx = synthesis_context.SynthesisContext(core_instantiator, *theory_instantiators, macro_instantiator, uf_instantiator, synth_instantiator) syn_ctx.set_macro_instantiator(macro_instantiator) # collect grammars synth_funs_data, _ = parser.filter_sexp_for('synth-fun', file_sexp) if len(synth_funs_data) == 0: synth_funs_data, _ = parser.filter_sexp_for('synth-inv', file_sexp) # rename(synth_funs_data) synth_funs_grammar_data = parser.process_synth_invs( synth_funs_data, synth_instantiator, syn_ctx) else: # rename(synth_funs_data) synth_funs_grammar_data = parser.process_synth_funcs( synth_funs_data, synth_instantiator, syn_ctx) # handling only single function problems for now fetchop_func = fetchop spec_flag = () synth_fun_name = '' for synth_fun, arg_vars, grammar_data in synth_funs_grammar_data: if grammar_data != 'Default grammar': synth_fun_name = synth_fun.function_name grammar = parser.sexp_to_grammar(arg_vars, grammar_data, synth_fun, syn_ctx) # spec flag spec_flag = get_spec_flag(specification, grammar) # fetchop func fetchop_func = get_fetchop_func(specification, grammar) all_vocabs.update( get_vocabs_from_grammar(grammar, fetchop_func)) defs, _ = parser.filter_sexp_for('define-fun', file_sexp) if defs is None: defs = [] if len(defs) > 0: for [name, args_data, ret_type_data, interpretation] in defs: print(name, ' ', synth_fun_name) if synth_fun_name in name: for eusolver in ([True] if eu else [False]): ((arg_vars, arg_types, arg_var_map), return_type) = parser._process_function_defintion( args_data, ret_type_data) # category flag flag = (return_type, eusolver, spec_flag) expr = parser.sexp_to_expr(interpretation, syn_ctx, arg_var_map) macro_func = semantics_types.MacroFunction( name, len(arg_vars), tuple(arg_types), return_type, expr, arg_vars) # for eusolver (recording macro functions of which definition include ite) if eusolver: app = exprs.find_application(expr, 'ite') if app is not None: ite_related_macros.append(name) macro_instantiator.add_function(name, macro_func) i = 0 subs_pairs = [] for (var_expr, ty) in zip(arg_vars, arg_types): param_expr = exprs.FormalParameterExpression( None, ty, i) subs_pairs.append((var_expr, param_expr)) i += 1 expr = exprs.substitute_all(expr, subs_pairs) # resolve macro functions involving ite (for enumeration of pred exprs (eusolver)) if eusolver: for fname in ite_related_macros: app = exprs.find_application(expr, fname) if app is None: continue expr = macro_instantiator.instantiate_macro( expr, fname) if flag not in exprs_per_category: exprs_per_category[flag] = set([]) exprs_per_category[flag].add((expr, fetchop_func)) return exprs_per_category, all_vocabs
def extract_benchmark(file_sexp): core_instantiator = semantics_core.CoreInstantiator() theories, file_sexp = filter_sexp_for('set-logic', file_sexp) theories = [ x[0] for x in theories ] assert all([theory in _known_theories for theory in theories]) assert len(theories) == 1 theory = theories[0] if len(theories) == 0: theories = _known_theories theory_instantiators = [ get_theory_instantiator(theory) for theory in theories ] macro_instantiator = semantics_core.MacroInstantiator() uf_instantiator = semantics_core.UninterpretedFunctionInstantiator() synth_instantiator = semantics_core.SynthFunctionInstantiator() syn_ctx = synthesis_context.SynthesisContext( core_instantiator, *theory_instantiators, macro_instantiator, uf_instantiator, synth_instantiator) syn_ctx.set_macro_instantiator(macro_instantiator) defs, file_sexp = filter_sexp_for('define-fun', file_sexp) process_definitions(defs, syn_ctx, macro_instantiator) ufuncs_data, file_sexp = filter_sexp_for('declare-fun', file_sexp) _process_uninterpreted_funcs(ufuncs_data, syn_ctx, uf_instantiator) # Synthesis functions and synthesis invariants synth_funs_data, file_sexp = filter_sexp_for('synth-fun', file_sexp) if len(synth_funs_data) == 0: synth_funs_data, file_sexp = filter_sexp_for('synth-inv', file_sexp) synth_funs_grammar_data = process_synth_invs(synth_funs_data, synth_instantiator, syn_ctx) else: synth_funs_grammar_data = process_synth_funcs(synth_funs_data, synth_instantiator, syn_ctx) # Grammars grammar_map = {} for synth_fun, arg_vars, grammar_data in synth_funs_grammar_data: if grammar_data == 'Default grammar': grammar_map[synth_fun] = grammar_data else: grammar_map[synth_fun] = sexp_to_grammar(arg_vars, grammar_data, synth_fun, syn_ctx) # print(grammar_map[synth_fun]) # Universally quantified variables forall_vars_data, file_sexp = filter_sexp_for('declare-var', file_sexp) forall_vars_map = _process_forall_vars(forall_vars_data, syn_ctx) forall_primed_data, file_sexp = filter_sexp_for('declare-primed-var', file_sexp) forall_primed_vars_map = _process_forall_primed_vars(forall_primed_data, syn_ctx) forall_vars_map.update(forall_primed_vars_map) # Constraints constraints_data, file_sexp = filter_sexp_for('constraint', file_sexp) constraints = process_constraints(constraints_data, syn_ctx, forall_vars_map) inv_constraints_data, file_sexp = filter_sexp_for('inv-constraint', file_sexp) inv_constraints = process_inv_constraints(inv_constraints_data, synth_instantiator, syn_ctx, forall_vars_map) constraints.extend(inv_constraints) for sf in grammar_map.keys(): if grammar_map[sf] == 'Default grammar': grammar_map[sf] = grammars.make_default_grammar(syn_ctx, theory, sf.range_type, sf.formal_parameters) grammar_map[sf].add_constant_rules(make_constant_rules(constraints)) check_sats, file_sexp = filter_sexp_for('check-synth', file_sexp) options_data, file_sexp = filter_sexp_for('set-options', file_sexp) assert check_sats == [[]] assert file_sexp == [] return theories, syn_ctx, synth_instantiator, macro_instantiator, uf_instantiator, constraints, grammar_map, forall_vars_map
# print('median: ', datetime.timedelta(seconds=statistics.median(solved))) # exit(0) core_instantiator = semantics_core.CoreInstantiator() theory_instantiators = [ parser.get_theory_instantiator(theory) for theory in parser._known_theories ] macro_instantiator = semantics_core.MacroInstantiator() uf_instantiator = semantics_core.UninterpretedFunctionInstantiator() synth_instantiator = semantics_core.SynthFunctionInstantiator() syn_ctx = synthesis_context.SynthesisContext(core_instantiator, *theory_instantiators, macro_instantiator, uf_instantiator, synth_instantiator) file_sexp = parser.sexpFromFile(benchmark_file) defs, _ = parser.filter_sexp_for('define-fun', file_sexp) if defs is None or len(defs) == 0: print('No function can be found!') exit(0) [name, args_data, ret_type_data, interpretation] = defs[-1] ((arg_vars, arg_types, arg_var_map), return_type) = parser._process_function_defintion(args_data, ret_type_data) expr = parser.sexp_to_expr(interpretation, syn_ctx, arg_var_map) # if basename(benchmark_file) in solved: