def generate_pyasn_code(definition): parse_tree = parse_asn1(definition) modules = build_semantic_model(parse_tree) output = StringIO() for module in modules: generate_pyasn1(module, output) return output.getvalue()
def main(): arg_parser = argparse.ArgumentParser(description='Generate Python classes from an ASN.1 definition file. Output to stdout by default.') arg_parser.add_argument('file', metavar='file', type=argparse.FileType('r'), help='the ASN.1 file to process') arg_parser.add_argument('--split', action='store_true', help='output multiple modules to separate files') args = arg_parser.parse_args() asn1def = args.file.read() parse_tree = parser.parse_asn1(asn1def) modules = build_semantic_model(parse_tree) if len(modules) > 1 and not args.split: print('WARNING: More than one module generated to the same stream.', file=sys.stderr) output_file = sys.stdout for module in modules: try: if args.split: output_file = open(_sanitize_module(module.name) + '.py', 'w') print(pygen.auto_generated_header(), file=output_file) generate_pyasn1(module, output_file, modules) finally: if output_file != sys.stdout: output_file.close() return 0
def main(): args = parse_args() with open(args.file) as f: asn1def = f.read() if args.outdir and (args.parse or args.sema): print('ERROR: can only use --outdir with --gen') return 1 parse_tree = parser.parse_asn1(asn1def) if args.parse: parser.print_parse_tree(parse_tree) return 0 modules = sema.build_semantic_model(parse_tree) if args.sema: for module in modules: print(module) return 0 if args.gen: for module in modules: generate_module_code(args, module, modules) return 0
def go (args): with open (args.file) as f: asn1def = f.read() parse_tree = parser.parse_asn1(asn1def) modules = build_semantic_model(parse_tree) assert (len(modules) == 1) base, ext = os.path.splitext (args.file) parts = os.path.split (base) module_name = parts[-1] if args.outdir: path = args.outdir else: path = "." if args.lang == 'python': from tinyber.py_nodes import PythonBackend as Backend from tinyber import py_nodes as nodes elif args.lang == 'c': from tinyber.c_nodes import CBackend as Backend from tinyber import c_nodes as nodes # pull in the python-specific node implementations walker = Walker (modules[0], nodes) walker.walk() backend = Backend (args, walker, module_name, path) backend.generate_code()
def main(): arg_parser = argparse.ArgumentParser( description='Generate Python classes from an ASN.1 definition file.' 'Output to stdout by default.') arg_parser.add_argument('file', help='the ASN.1 file to process') arg_parser.add_argument('--split', action='store_true', help='output multiple modules to separate files') args = arg_parser.parse_args() with open(args.file, 'r') as data: asn1def = data.read() parse_tree = parser.parse_asn1(asn1def) modules = build_semantic_model(parse_tree) if len(modules) > 1 and not args.split: print('WARNING: More than one module generated to the same stream.', file=sys.stderr) output_file = sys.stdout for module in modules: try: if args.split: output_file = open(_sanitize_module(module.name) + '.py', 'w') print(pygen.auto_generated_header(args.file, __version__), file=output_file) generate_pyasn1(module, output_file, modules) finally: if output_file != sys.stdout: output_file.close() return 0
def main(args): with open(args.file, 'r') as data: asn1def = data.read() parse_tree = parser.parse_asn1(asn1def) modules = build_semantic_model(parse_tree) if len(modules) > 1 and not args.split: print('WARNING: More than one module generated to the same stream.', file=sys.stderr) header = pygen.auto_generated_header(args.file, __version__) if not args.split: # Print header once and then reset so we don't emit it for every module print(header, file=sys.stdout) header = None for module in modules: if args.split: outfile = _sanitize_module(module.name) + '.py' else: outfile = '-' with _maybe_open(outfile) as output_file: generate_pyasn1(module, output_file, modules, header=header) return 0
def go(args): with open(args.file) as f: asn1def = f.read() parse_tree = parser.parse_asn1(asn1def) modules = build_semantic_model(parse_tree) assert (len(modules) == 1) base, ext = os.path.splitext(args.file) parts = os.path.split(base) module_name = parts[-1] if args.outdir: path = args.outdir else: path = "." if args.lang == 'python': from tinyber.py_nodes import PythonBackend as Backend from tinyber import py_nodes as nodes elif args.lang == 'c': from tinyber.c_nodes import CBackend as Backend from tinyber import c_nodes as nodes # pull in the python-specific node implementations walker = Walker(modules[0], nodes) walker.walk() backend = Backend(args, walker, module_name, path) backend.generate_code()
def main(args): with open(args[0]) as f: asn1def = f.read() parse_tree = parser.parse_asn1(asn1def) modules = build_semantic_model(parse_tree) if len(modules) > 1: print('WARNING: More than one module generated to the same stream.', file=sys.stderr) for module in modules: print(pygen.auto_generated_header()) generate_pyasn1(module, sys.stdout) return 0
def generate(infilename, outfilename): class FakeArgs(object): no_standalone = False import os with open(infilename) as f: asn1def = f.read() parse_tree = parser.parse_asn1(asn1def) modules = build_semantic_model(parse_tree) assert (len(modules) == 1) module_name = outfilename path = "." args = FakeArgs() # pull in the python-specific node implementations walker = Walker(modules[0], nodes) walker.walk() backend = CBackend(args, walker, module_name, path) backend.generate_code()
def main(): args = parse_args() with open(args.file) as f: asn1def = f.read() parse_tree = parser.parse_asn1(asn1def) if args.parse: parser.print_parse_tree(parse_tree) return 0 modules = sema.build_semantic_model(parse_tree) if args.sema: for module in modules: print(module) return 0 if args.gen: for module in modules: print(pygen.auto_generated_header(args.file, __version__)) pyasn1gen.generate_pyasn1(module, sys.stdout, modules) return 0
def main(args): with open(args.file, 'r') as data: asn1def = data.read() parse_tree = parser.parse_asn1(asn1def) modules = build_semantic_model(parse_tree) if len(modules) > 1 and not args.split: print('WARNING: More than one module generated to the same stream.', file=sys.stderr) header = pygen.auto_generated_header(args.file, __version__) if args.include_asn1: header += 'ASN1_SOURCES = {}' header += os.linesep if not args.split: # Print header once and then reset so we don't emit it for every module print(header, file=sys.stdout) header = None for module in modules: if args.split: outfile = _sanitize_module(module.name) + '.py' else: outfile = '-' if args.include_asn1: footer = 'ASN1_SOURCES[%r] = %s' % (module.name, pygen.format_longstring(str(module))) footer += os.linesep else: footer = None with _maybe_open(outfile) as output_file: generate_pyasn1(module, output_file, modules, header=header, footer=footer) return 0
def main(): args = parse_args() with open(args.file) as f: asn1def = f.read() if args.outdir and not args.gen: print('ERROR: can only use --outdir with --gen', file=sys.stderr) return 1 parse_tree = parser.parse_asn1(asn1def) if args.parse: parser.print_parse_tree(parse_tree) return 0 modules = sema.build_semantic_model(parse_tree) if args.sema: for module in modules: print(module) return 0 if args.gen: generate_module_code(args) return 0
"""The main program asn2quickder is called with one or more .asn1 files, the first of which is mapped to a C header file and the rest is loaded to fulfil dependencies. """ if len(sys.argv) < 2: sys.stderr.write('Usage: %s main[.asn1] dependency[.asn1]...\n' % sys.argv[0]) sys.exit(1) mods = [] for file in sys.argv[1:]: print('Parsing', file) with open(file, 'r') as asn1fh: asn1txt = asn1fh.read() asn1tree = parser.parse_asn1(asn1txt) print('Building semantic model for', file) asn1sem = build_semantic_model(asn1tree) mods.insert(0, asn1sem[0]) print('Realised semantic model for', file) cogen = QuickDERgen(mods[-1], os.path.basename(sys.argv[1]), mods[1:]) cogen.generate_head() cogen.generate_overlay() cogen.generate_pack() cogen.generate_tail() cogen.close()
"""The main program asn2quickder is called with one or more .asn1 files, the first of which is mapped to a C header file and the rest is loaded to fulfil dependencies. """ if len(sys.argv) < 2: sys.stderr.write('Usage: %s main[.asn1] dependency[.asn1]...\n' % sys.argv [0]) sys.exit(1) mods = [] for file in sys.argv [1:]: print('Parsing', file) with open(file, 'r') as asn1fh: asn1txt = asn1fh.read() asn1tree = parser.parse_asn1(asn1txt) print('Building semantic model for', file) asn1sem = build_semantic_model(asn1tree) mods.insert(0, asn1sem [0]) print('Realised semantic model for', file) cogen = QuickDERgen(mods [-1], os.path.basename(sys.argv [1]), mods [1:]) cogen.generate_head() cogen.generate_overlay() cogen.generate_pack() cogen.generate_tail() cogen.close()