def execute(): parser = ArgumentParser(description='Grammarinator: Parser', epilog=""" The tool parses files with ANTLR v4 grammars, builds Grammarinator- compatible tree representations from them and saves them for further reuse. """) parser.add_argument('files', nargs='+', help='input files to process.') parser.add_argument('-g', '--grammars', nargs='+', metavar='FILE', required=True, help='ANTLR grammar files describing the expected format of input to parse.') parser.add_argument('-r', '--rule', metavar='NAME', help='name of the rule to start parsing with (default: first parser rule)') parser.add_argument('-t', '--transformers', metavar='LIST', nargs='+', default=[], help='list of transformers (in package.module.function format) to postprocess the parsed tree.') parser.add_argument('--hidden', nargs='+', metavar='NAME', help='list of hidden tokens to be built into the parsed tree.') parser.add_argument('--antlr', metavar='FILE', default=default_antlr_path, help='path of the ANTLR jar file (default: %(default)s).') parser.add_argument('--encoding', metavar='ENC', default='utf-8', help='input file encoding (default: %(default)s).') parser.add_argument('--disable-cleanup', dest='cleanup', default=True, action='store_false', help='disable the removal of intermediate files.') parser.add_argument('-j', '--jobs', default=os.cpu_count(), type=int, metavar='NUM', help='parsing parallelization level (default: number of cpu cores (%(default)d)).') parser.add_argument('--max-depth', type=int, default=float('inf'), help='maximum expected tree depth (deeper tests will be discarded (default: %(default)f)).') parser.add_argument('-o', '--out', metavar='DIR', default=os.getcwd(), help='directory to save the trees (default: %(default)s).') parser.add_argument('--parser-dir', metavar='DIR', help='directory to save the parser grammars (default: <OUTDIR>/grammars).') parser.add_argument('--sys-recursion-limit', metavar='NUM', type=int, default=sys.getrecursionlimit(), help='override maximum depth of the Python interpreter stack (default: %(default)d)') parser.add_argument('--log-level', default='INFO', metavar='LEVEL', help='verbosity level of diagnostic messages (default: %(default)s).') parser.add_argument('--version', action='version', version='%(prog)s {version}'.format(version=__version__)) args = parser.parse_args() for grammar in args.grammars: if not exists(grammar): parser.error('{grammar} does not exist.'.format(grammar=grammar)) if not args.parser_dir: args.parser_dir = join(args.out, 'grammars') logger.setLevel(args.log_level) sys.setrecursionlimit(int(args.sys_recursion_limit)) if args.antlr == default_antlr_path: antlerinator.install(lazy=True) with ParserFactory(grammars=args.grammars, hidden=args.hidden, transformers=args.transformers, parser_dir=args.parser_dir, antlr=args.antlr, max_depth=args.max_depth, cleanup=args.cleanup) as factory: if args.jobs > 1: with Pool(args.jobs) as pool: pool.starmap(factory.tree_from_file, iterate_tests(args.files, args.rule, args.out, args.encoding)) else: for create_args in iterate_tests(args.files, args.rule, args.out, args.encoding): factory.tree_from_file(*create_args)
def process_antlr4_path(antlr=None): antlr = antlr or antlr_default_path if antlr == antlr_default_path: antlerinator.install(lazy=True) if not exists(antlr): logger.error('%s does not exist.', antlr) return None return abspath(relpath(antlr))
def run_antlr(grammar, commandline, tmpdir): """ 'ANTLR' test command runner. It will call the ANTLR v4 parser/lexer generator tool for Python 3 target with the specified command line. Tests whether a grammar is valid for ANTLR v4. :param grammar: file name of the grammar that contained the test command. :param commandline: command line as specified in the test command. :param tmpdir: path to a temporary directory (provided by the environment). """ antlerinator.install(lazy=True) run_subprocess( grammar, 'java -jar {antlr} -Dlanguage=Python3 {commandline}'.format( antlr=antlerinator.antlr_jar_path, commandline=commandline), tmpdir)
def execute(): parser = ArgumentParser(description='Grammarinator: Processor', epilog=""" The tool processes a grammar in ANTLR v4 format (*.g4, either separated to lexer and parser grammar files, or a single combined grammar) and creates a fuzzer (a pair of unlexer and unparser) that can generate randomized content conforming to the format described by the grammar. """) parser.add_argument('grammars', nargs='+', metavar='FILE', help='ANTLR grammar files describing the expected format to generate.') parser.add_argument('--antlr', metavar='FILE', default=default_antlr_path, help='path of the ANTLR jar file (default: %(default)s).') parser.add_argument('--no-actions', dest='actions', default=True, action='store_false', help='do not process inline actions.') parser.add_argument('--encoding', metavar='ENC', default='utf-8', help='grammar file encoding (default: %(default)s).') parser.add_argument('--lib', metavar='DIR', help='alternative location of import grammars.') parser.add_argument('--disable-cleanup', dest='cleanup', default=True, action='store_false', help='disable the removal of intermediate files.') parser.add_argument('--pep8', default=False, action='store_true', help='enable autopep8 to format the generated fuzzer.') parser.add_argument('--log-level', metavar='LEVEL', default='INFO', help='verbosity level of diagnostic messages (default: %(default)s).') parser.add_argument('-o', '--out', metavar='DIR', default=getcwd(), help='temporary working directory (default: %(default)s).') parser.add_argument('--version', action='version', version='%(prog)s {version}'.format(version=__version__)) args = parser.parse_args() logger.setLevel(args.log_level) for grammar in args.grammars: if not exists(grammar): parser.error('{grammar} does not exist.'.format(grammar=grammar)) if args.antlr == default_antlr_path: antlerinator.install(lazy=True) FuzzerFactory(args.out, args.antlr).generate_fuzzer(args.grammars, encoding=args.encoding, lib_dir=args.lib, actions=args.actions, pep8=args.pep8) if args.cleanup: rmtree(join(args.out, 'antlr'), ignore_errors=True)
def execute(): parser = ArgumentParser(description='Generinator:RATS Processor') parser.add_argument('input', nargs='+', help='files or directories to gather information from') parser.add_argument( '--antlr', metavar='FILE', default=antlr_default_path, help='path of the antlr jar file (default: %(default)s)') parser.add_argument('-l', '--log-level', metavar='LEVEL', default=logging.INFO, help='set log level (default: INFO)') parser.add_argument( '--uri', default='mongodb://localhost/fuzzinator', help= 'URI of the database to store gathered information (default: %(default)s)' ) parser.add_argument( '-j', '--jobs', default=cpu_count(), type=int, metavar='NUM', help= 'test parsing parallelization level (default: number of cpu cores (%(default)d))' ) parser.add_argument('-o', '--out', metavar='DIR', default=getcwd(), help='temporary working directory (default: .)') parser.add_argument( '--sys-recursion-limit', metavar='NUM', type=int, default=sys.getrecursionlimit(), help= 'override maximum depth of the Python interpreter stack (default: %(default)d)' ) parser.add_argument( '--version', action='version', version='%(prog)s {version}'.format(version=__version__)) args = parser.parse_args() logger.setLevel(args.log_level) sys.setrecursionlimit(args.sys_recursion_limit) if args.antlr == antlr_default_path: antlerinator.install(lazy=True) makedirs(args.out, exist_ok=True) configs = prepare_parsing(args.antlr, args.out) if args.jobs > 1: with Pool(args.jobs) as pool: pool.starmap(process, iterate_tests(args.uri, args.input, configs)) else: update_listeners(configs) for create_args in iterate_tests(args.uri, args.input, configs): process_file(*create_args)
def test_api(): antlerinator.install(force=True) antlerinator.install(lazy=True) with pytest.raises(FileExistsError): antlerinator.install() run_antlr()
def process_antlr_argument(args): if args.antlr == default_antlr_path: antlerinator.install(lazy=True)