def load_bug(bug_fn, grammar_meta): with open(bug_fn) as f: bug_src = f.read() start = grammar_meta['[start]'] grammar = grammar_meta['[grammar]'] parser = Parser(grammar, start_symbol=start, canonical=True) # log=True) forest = parser.parse(bug_src.strip()) tree = list(forest)[0] return grammar_meta, coalesce(tree)
def main(grammar, bug_fn, predicate): input_file = './results/%s.log.json' % os.path.basename(bug_fn) one_fault_grammar_file = './results/%s_atleast_one_fault_g.json' % os.path.basename(bug_fn) with open(one_fault_grammar_file) as f: one_fault_meta_g = json.loads(f.read()) one_fault_grammar = one_fault_meta_g['[grammar]'] one_fault_start = one_fault_meta_g['[start]'] p = Parser(one_fault_grammar, start_symbol=one_fault_start, canonical=True) success_count_total = 0 success_count_neg = 0 fail_count_total = 0 fail_count_neg = 0 count_total = 0 with open(input_file) as f: jsoninputs = f.readlines() for line in jsoninputs: res = json.loads(line) if res['res'] == 'PRes.invalid': continue elif res['res'] == 'PRes.success': success_count_total += 1 if not p.can_parse(res['src']): # Cannot parse a failure successfully reproduced -- bad. success_count_neg += 1 print('ERROR:', res) elif res['res'] == 'PRes.failed': fail_count_total += 1 if p.can_parse(res['src']): # Can parse as failure an input that was marked as failed to reproduce -- bad. fail_count_neg += 1 else: assert False count_total += 1 print('Recognize Success: %d/%d = %f%%' % ( (success_count_total - success_count_neg), success_count_total, (success_count_total - success_count_neg) * 100.0 / success_count_total)) print('Recognize Fail: %d/%d = %f%%' % ( (fail_count_total - fail_count_neg), fail_count_total, (fail_count_total - fail_count_neg) * 100.0 / fail_count_total))
class EvalMysteryRunner(MysteryRunner): def __init__(self): self.parser = EarleyParser(EXPR_GRAMMAR) def run(self, inp): try: tree, *_ = self.parser.parse(inp) except SyntaxError as exc: return (inp, Runner.UNRESOLVED) return super().run(inp)
print('\n## Learning Probabilities from Samples') # ### Counting Expansions if __name__ == "__main__": print('\n### Counting Expansions') if __package__ is None or __package__ == "": from Parser import Parser, EarleyParser, PEGParser else: from .Parser import Parser, EarleyParser, PEGParser IP_ADDRESS_TOKENS = {"<octet>"} # EarleyParser needs explicit tokens if __name__ == "__main__": parser = EarleyParser(IP_ADDRESS_GRAMMAR) if __name__ == "__main__": tree = parser.parse("127.0.0.1")[0] display_tree(tree) class ExpansionCountMiner(object): def __init__(self, parser, log=False): assert isinstance(parser, Parser) self.grammar = extend_grammar(parser.grammar()) self.parser = parser self.log = log self.reset()
if __name__ == "__main__": assert is_valid_grammar(XML_GRAMMAR) if __package__ is None or __package__ == "": from Parser import EarleyParser else: from .Parser import EarleyParser if __package__ is None or __package__ == "": from GrammarFuzzer import display_tree else: from .GrammarFuzzer import display_tree if __name__ == "__main__": parser = EarleyParser(XML_GRAMMAR, tokens=XML_TOKENS) for tree in parser.parse("<html>Text</html>"): display_tree(tree) # ### Building the Fragment Pool if __name__ == "__main__": print('\n### Building the Fragment Pool') class FragmentMutator(Mutator): def __init__(self, parser): """Initialize empty fragment pool and add parser""" self.parser = parser self.fragments = {k: [] for k in self.parser.cgrammar}
def __init__(self): self.parser = EarleyParser(EXPR_GRAMMAR)
# ### A Grammmar-Based Reduction Approach if __name__ == "__main__": print('\n### A Grammmar-Based Reduction Approach') if __package__ is None or __package__ == "": from GrammarFuzzer import all_terminals, expansion_to_children, display_tree else: from .GrammarFuzzer import all_terminals, expansion_to_children, display_tree if __name__ == "__main__": derivation_tree, *_ = EarleyParser(EXPR_GRAMMAR).parse(expr_input) display_tree(derivation_tree) # ### Simplifying by Replacing Subtrees if __name__ == "__main__": print('\n### Simplifying by Replacing Subtrees') import copy if __name__ == "__main__": new_derivation_tree = copy.deepcopy(derivation_tree)
print('\n## Learning Probabilities from Samples') # ### Counting Expansions if __name__ == "__main__": print('\n### Counting Expansions') if __package__ is None or __package__ == "": from Parser import Parser, EarleyParser, PEGParser else: from .Parser import Parser, EarleyParser, PEGParser IP_ADDRESS_TOKENS = {"<octet>"} # EarleyParser needs explicit tokens if __name__ == "__main__": parser = EarleyParser(IP_ADDRESS_GRAMMAR) if __name__ == "__main__": tree, *_ = parser.parse("127.0.0.1") display_tree(tree) class ExpansionCountMiner(object): def __init__(self, parser, log=False): assert isinstance(parser, Parser) self.grammar = extend_grammar(parser.grammar()) self.parser = parser self.log = log self.reset()
if __name__ == "__main__": print('\n### Counting Expansions') if __package__ is None or __package__ == "": from Parser import Parser, EarleyParser, PEGParser else: from .Parser import Parser, EarleyParser, PEGParser IP_ADDRESS_TOKENS = {"<octet>"} # EarleyParser needs explicit tokens if __name__ == "__main__": parser = EarleyParser(IP_ADDRESS_GRAMMAR) if __name__ == "__main__": tree, *_ = parser.parse("127.0.0.1") display_tree(tree) class ExpansionCountMiner(object): def __init__(self, parser, log=False): assert isinstance(parser, Parser) self.grammar = extend_grammar(parser.grammar()) self.parser = parser self.log = log self.reset()
if __name__ == "__main__": print('\n## Learning Probabilities from Samples') if __package__ is None or __package__ == "": from GrammarFuzzer import display_tree else: from .GrammarFuzzer import display_tree if __package__ is None or __package__ == "": from Parser import EarleyParser else: from .Parser import EarleyParser if __name__ == "__main__": parser = EarleyParser(IP_ADDRESS_GRAMMAR) if __name__ == "__main__": tree = parser.parse("127.0.0.1")[0] display_tree(tree) if __name__ == "__main__": IP_ADDRESS_GRAMMAR["<octet>"] = list(sorted(decrange(0, 256), reverse=True)) if __package__ is None or __package__ == "": from Parser import PEGParser else: from .Parser import PEGParser if __name__ == "__main__":
from Grammars import EXPR_GRAMMAR else: from .Grammars import EXPR_GRAMMAR if __package__ is None or __package__ == "": from GrammarFuzzer import display_tree else: from .GrammarFuzzer import display_tree if __package__ is None or __package__ == "": from Parser import EarleyParser else: from .Parser import EarleyParser if __name__ == "__main__": parser = EarleyParser(EXPR_GRAMMAR) tree, *_ = parser.parse("1 + 2 * 3") display_tree(tree) def mutate_tree(tree, grammar): pass # ## Lessons Learned if __name__ == "__main__": print('\n## Lessons Learned') # ## Next Steps
from Parser import PEGParser else: from .Parser import PEGParser if __name__ == "__main__": parser = PEGParser(IP_ADDRESS_GRAMMAR) tree = parser.parse("127.0.0.1")[0] display_tree(tree) if __package__ is None or __package__ == "": from Parser import EarleyParser else: from .Parser import EarleyParser if __name__ == "__main__": parser = EarleyParser(IP_ADDRESS_GRAMMAR) if __name__ == "__main__": tree = parser.parse("127.0.0.1")[0] display_tree(tree) # ## Auto-Tuning Probabilities if __name__ == "__main__": print('\n## Auto-Tuning Probabilities') if __package__ is None or __package__ == "": from Coverage import Coverage, cgi_decode else: from .Coverage import Coverage, cgi_decode