def test_compile(*filenames): """ Compile rules -> programs for the virtual machine. """ if not filenames: filenames = find_theories() for stem_rules in filenames: programs = ["# {filename}".format(filename=stem_rules)] assert stem_rules[-len(".theory") :] == ".theory", stem_rules sequents = load_theory(stem_rules)["rules"] for sequent in sequents: for cost, seq, plan in compile_full(sequent): programs += ["", "# using {}".format(sequent), "# infer ".format(seq), "# cost = ".format(cost)] plan.program(programs) for event in get_events(sequent): for cost, seq, plan in compile_given(sequent, event): programs += [ "", "# given {}".format(event), "# using {}".format(sequent), "# infer {}".format(seq), "# cost {}".format(cost), ] plan.program(programs) print "\n".join(programs)
def test_compile(*filenames): ''' Compile rules -> programs for the virtual machine. ''' if not filenames: filenames = find_theories() for stem_rules in filenames: programs = ['# {filename}'.format(filename=stem_rules)] assert stem_rules[-len('.theory'):] == '.theory', stem_rules sequents = load_theory(stem_rules)['rules'] for sequent in sequents: for cost, seq, plan in compile_full(sequent): programs += [ '', '# using {}'.format(sequent), '# infer '.format(seq), '# cost = '.format(cost), ] plan.program(programs) for event in get_events(sequent): for cost, seq, plan in compile_given(sequent, event): programs += [ '', '# given {}'.format(event), '# using {}'.format(sequent), '# infer {}'.format(seq), '# cost {}'.format(cost), ] plan.program(programs) print '\n'.join(programs)
def measure(*filenames): """Measure complexity of rules in files.""" if not filenames: filenames = find_theories() sequents = [] for filename in filenames: sequents += load_theory(filename)['rules'] for sequent in sequents: measure_sequent(sequent)
def test_close_rules(): blacklist = [ # not abstractable: 'group.theory', 'h4.theory', # no validator implemented: 'quote.theory', ] for filename in find_theories(): is_extensional = filename.split('/')[-1] not in blacklist yield main.test_close_rules, filename, is_extensional
def normalize(*filenames): """Show normalized rule set derived from each rule.""" if not filenames: filenames = find_theories() sequents = [] for filename in filenames: sequents += load_theory(filename)['rules'] for sequent in sequents: print sequent.ascii() print for neg in sequent.contrapositives(): print neg.ascii(indent=4) print
def contrapositves(*filenames): """Close rules under contrapositve.""" if not filenames: filenames = find_theories() sequents = [] for filename in filenames: sequents += load_theory(filename)['rules'] for sequent in sequents: print sequent.ascii() print for neg in sequent.contrapositives(): print neg.ascii(indent=4) print
def batch_extract_tasks(*filenames, **kwargs): ''' Extract tasks from infiles '*.theory', saving to '*.tasks'. Options: parallel=true ''' if not filenames: filenames = find_theories() pairs = [] for infile in filenames: infile = os.path.abspath(infile) assert infile.endswith('.theory'), infile outfile = infile.replace('.theory', '.tasks') if not up_to_date([infile], [outfile]): pairs.append((infile, outfile)) parallel = parse_bool(kwargs.get('parallel', 'true')) map_ = multiprocessing.Pool().map if parallel else map map_(_extract_tasks, pairs)
def profile_compile(*filenames, **kwargs): """Profile full compiler chain (task generation + optimization). Optional keyword arguments: loadfrom = None saveto = 'compile.pstats' """ if not filenames: filenames = find_theories() loadfrom = kwargs.get("loadfrom") saveto = kwargs.get("saveto", "compile.pstats") if loadfrom is None: command = 'compile({}, frontend_out="/dev/null")'.format(", ".join(map('"{}"'.format, filenames))) print "profiling {}".format(command) profile.runctx(command, {"compile": compile}, None, saveto) loadfrom = saveto stats = pstats.Stats(loadfrom) stats.strip_dirs() line_count = 50 for sortby in ["time"]: stats.sort_stats(sortby) stats.print_stats(line_count)
def profile_tasks(*filenames, **kwargs): """Profile task generation (first part of compiler chain). Optional keyword arguments: loadfrom = None saveto = 'tasks.pstats' """ if not filenames: filenames = find_theories() loadfrom = kwargs.get("loadfrom") saveto = kwargs.get("saveto", "tasks.pstats") if loadfrom is None: command = "batch_extract_tasks({}, parallel=false)".format(", ".join(map('"{}"'.format, filenames))) print "profiling {}".format(command) profile.run(command, saveto) loadfrom = saveto stats = pstats.Stats(loadfrom) stats.strip_dirs() line_count = 50 for sortby in ["time"]: stats.sort_stats(sortby) stats.print_stats(line_count)
from pomagma.compiler import parser from pomagma.compiler.util import find_theories from pomagma.util.testing import for_each @for_each(find_theories()) def test_parse_theory_file(filename): parser.parse_theory_file(filename) @for_each(find_theories()) def test_parse_theory_string(filename): with open(filename) as f: string = f.read() parser.parse_theory_string(string)
import os from pomagma.compiler import sequents from pomagma.compiler import parser from pomagma.compiler.util import find_theories from pomagma.compiler.sugar import desugar_theory RULE_SETS = { os.path.basename(f): desugar_theory(parser.parse_theory_file(f))['rules'] for f in find_theories() } def _test_contrapositives(name): print '# contrapositives' print for rule in RULE_SETS[name]: print rule.ascii() print if len(rule.succedents) != 1: print ' TODO' print continue for seq in sequents.get_contrapositives(rule): print seq.ascii(indent=4) print def test_contrapositives(): for name in RULE_SETS: yield _test_contrapositives, name
def test_compile_facts(): for filename in find_theories(): yield _test_compile, filename
def test_parse_theory_file(): for filename in find_theories(): yield parser.parse_theory_file, filename
def test_parse_theory_string(): for filename in find_theories(): yield _test_parse_theory_string, filename