def run(name, expr): """ Emit and print the errors we get for the below grammar with "expr" as a property in ExampleNode. """ global Compound, Expression, FooNode, NullNode, Number Diagnostics.set_lang_source_dir(path.abspath(__file__)) print ("== {} ==".format(name)) @root_grammar_class class FooNode(ASTNode): pass class BarNode(FooNode): prop = Property(expr) def lang_def(): foo_grammar = Grammar("main_rule") foo_grammar.add_rules(main_rule=Row("example") ^ BarNode) return foo_grammar emit_and_print_errors(lang_def) print ("")
def run(name, *args): """ Emit and print the errors we get for the below grammar with *args as a list of NodeMacro classes to use on BarNode. This will not only check the consistency of Property diagnostics, but also that the SLOCs generated for NodeMacros are good, ie. they will reference the original definition site. """ global FooNode Diagnostics.set_lang_source_dir(path.abspath(__file__)) print('== {} =='.format(name)) @root_grammar_class() class FooNode(ASTNode): pass class BarNode(FooNode): _macros = args def lang_def(): foo_grammar = Grammar('main_rule') foo_grammar.add_rules(main_rule=Row('example') ^ BarNode) return foo_grammar emit_and_print_errors(lang_def) print('')
def run(name, expr): """ Emit and print the errors we get for the below grammar with "expr" as a property in BarNode. """ global FooNode, BarNode, ListNode Diagnostics.set_lang_source_dir(path.abspath(__file__)) print ("== {} ==".format(name)) @root_grammar_class class FooNode(ASTNode): pass class BarNode(FooNode): list_node = Field() class ListNode(FooNode): nb_list = Field() bar_node_parent = Property(Self.parent.cast(BarNode)) prop = Property(expr) def lang_def(): foo_grammar = Grammar("main_rule") foo_grammar.add_rules( main_rule=Row("example", foo_grammar.list_rule) ^ BarNode, list_rule=Row(List(Tok(Token.Number))) ^ ListNode ) return foo_grammar emit_and_print_errors(lang_def) print ("")
def run(name, expr): """ Emit and print the errors we get for the below grammar with "expr" as a property in BarNode. """ global FooNode Diagnostics.set_lang_source_dir(path.abspath(__file__)) print('== {} =='.format(name)) @root_grammar_class class FooNode(ASTNode): pass class BarNode(FooNode): prop = Property(expr) def lang_def(): foo_grammar = Grammar('main_rule') foo_grammar.add_rules( main_rule=Row('example') ^ BarNode, ) return foo_grammar emit_and_print_errors(lang_def) print('')
def run(name, expr): """ Emit and print the errors we get for the below grammar with "expr" as a property in BarNode. """ global FooNode Diagnostics.set_lang_source_dir(path.abspath(__file__)) print('== {} =='.format(name)) @root_grammar_class() class FooNode(ASTNode): pass class BarNode(FooNode): prop_2 = Property(lambda x=LongType: x) prop = Property(expr) def lang_def(): foo_grammar = Grammar('main_rule') foo_grammar.add_rules(main_rule=Row('example') ^ BarNode, ) return foo_grammar emit_and_print_errors(lang_def) print('')
def run(name, *args): """ Emit and print the errors we get for the below grammar with *args as a list of NodeMacro classes to use on BarNode. This will not only check the consistency of Property diagnostics, but also that the SLOCs generated for NodeMacros are good, ie. they will reference the original definition site. """ global FooNode Diagnostics.set_lang_source_dir(path.abspath(__file__)) print('== {} =='.format(name)) @root_grammar_class class FooNode(ASTNode): pass class BarNode(FooNode): _macros = args def lang_def(): foo_grammar = Grammar('main_rule') foo_grammar.add_rules(main_rule=Row('example') ^ BarNode) return foo_grammar emit_and_print_errors(lang_def) print('')
def run(expr): """ Emit and print the errors we get for the below grammar for the given "expr" property expression. """ Diagnostics.set_lang_source_dir(path.abspath(__file__)) print '== {} =='.format(expr) @root_grammar_class() class FooNode(ASTNode): pass class ExampleNode(FooNode): tok = Field() implicit_prop = Property(Self, has_implicit_env=True) explicit_prop = Property(Self, has_implicit_env=False) prop = Property(expr, has_implicit_env=False) def lang_def(): foo_grammar = Grammar('main_rule') foo_grammar.add_rules( main_rule=Row(Tok('example', keep=True)) ^ ExampleNode, ) return foo_grammar emit_and_print_errors(lang_def) print('')
def run(name, expr_fn): """ Emit and print the errors we get for the below grammar with "expr_fn" as a property in Example. """ Diagnostics.set_lang_source_dir(path.abspath(__file__)) print('== {} =='.format(name)) @abstract @root_grammar_class() class FooNode(ASTNode): pass class Example(FooNode): name = Field() prop = Property(expr_fn) class Name(FooNode): tok = Field() def lang_def(): foo_grammar = Grammar('main_rule') foo_grammar.add_rules( main_rule=Example('example', Opt(foo_grammar.name)), name=Name(Tok(Token.Identifier, keep=True)), ) return foo_grammar emit_and_print_errors(lang_def) print('')
def run(name, abstract_prop, prop=None): """ Emit and print the errors we get for the below grammar with "expr" as a property in BarNode. """ Diagnostics.set_lang_source_dir(path.abspath(__file__)) print('== {} =='.format(name)) @root_grammar_class() class FooNode(ASTNode): pass @abstract class AbstractExample(FooNode): p = abstract_prop() class Example(AbstractExample): p = prop() if prop else None def lang_def(): foo_grammar = Grammar('main_rule') foo_grammar.add_rules( main_rule=Row('example') ^ Example, ) return foo_grammar emit_and_print_errors(lang_def) print('')
def run(name, match_expr): """ Emit and print the errors we get for the below grammar with "match_expr" as a property in ExampleNode. """ global BodyNode, Compound, Expression, FooNode, NullNode, Number Diagnostics.set_lang_source_dir(path.abspath(__file__)) print('== {} =='.format(name)) @abstract @root_grammar_class() class FooNode(ASTNode): prop = Property(Literal(0)) @abstract class BodyNode(FooNode): pass class NullNode(BodyNode): pass @abstract class Expression(BodyNode): pass class Number(Expression): tok = Field() class Compound(Expression): prefix = Field() suffix = Field() class ExampleNode(FooNode): body = Field() prop = Property(match_expr) def lang_def(): foo_grammar = Grammar('main_rule') foo_grammar.add_rules( main_rule=Row('example', Or(foo_grammar.expression, Row('null') ^ NullNode)) ^ ExampleNode, number=Tok(Token.Number, keep=True) ^ Number, expression=Or( Row(foo_grammar.number, ',', foo_grammar.expression) ^ Compound, foo_grammar.number), ) return foo_grammar emit_and_print_errors(lang_def) print('')
def run(name, prop): Diagnostics.set_lang_source_dir(os.path.abspath(__file__)) print('== {} =='.format(name)) @root_grammar_class() class FooNode(ASTNode): pass class Example(FooNode): result = Property(prop) def lang_def(): foo_grammar = Grammar('main_rule') foo_grammar.add_rules(main_rule=Row('example') ^ Example) return foo_grammar emit_and_print_errors(lang_def) print('')
def run(abstract_private, concrete_private): """ Emit and print the errors we get for the below grammar for the given privacy levels. """ fmt_privacy = { None: 'default', True: 'private', False: 'public', } print '== abstract: {}, concrete: {} =='.format( fmt_privacy[abstract_private], fmt_privacy[concrete_private] ) Diagnostics.set_lang_source_dir(os.path.abspath(__file__)) @root_grammar_class() class RootNode(ASTNode): pass class AbstractNode(RootNode): prop = AbstractProperty(BoolType, private=abstract_private) class ConcreteNode(AbstractNode): prop = Property(Literal(True), private=concrete_private) def lang_def(): foo_grammar = Grammar('main_rule') foo_grammar.add_rules( main_rule=Row('example') ^ ConcreteNode, ) return foo_grammar if emit_and_print_errors(lang_def): for fld in (AbstractNode._fields['prop'], ConcreteNode._fields['prop']): print ' {}: {}'.format(fld.qualname, fmt_privacy[fld.is_private]) print('')
def run(abstract_has_implicit_env, concrete_has_implicit_env): """ Emit and print the errors we get for the below grammar for the given "has_implicit_env" attribute values. """ fmt_value = { None: 'default', True: 'implicit env', False: 'no implicit env', } print '== abstract: {}, concrete: {} =='.format( fmt_value[abstract_has_implicit_env], fmt_value[concrete_has_implicit_env]) Diagnostics.set_lang_source_dir(os.path.abspath(__file__)) @root_grammar_class() class RootNode(ASTNode): pass class AbstractNode(RootNode): prop = AbstractProperty(BoolType, has_implicit_env=abstract_has_implicit_env) class ConcreteNode(AbstractNode): prop = Property(Literal(True), has_implicit_env=concrete_has_implicit_env) def lang_def(): foo_grammar = Grammar('main_rule') foo_grammar.add_rules(main_rule=Row('example') ^ ConcreteNode, ) return foo_grammar if emit_and_print_errors(lang_def): for fld in (AbstractNode._fields['prop'], ConcreteNode._fields['prop']): print ' {}: {}'.format(fld.qualname, fmt_value[fld.has_implicit_env]) print('')
def run(name, prop_expr): """ Emit and print the errors we get for the below grammar with "expr" as a property in BarNode. """ Diagnostics.set_lang_source_dir(path.abspath(__file__)) print('== {} =='.format(name)) prop = None @root_grammar_class() class FooNode(ASTNode): ref_var = UserField(LogicVarType, is_private=True) type_var = UserField(LogicVarType, is_private=True) class BarNode(FooNode): @langkit_property(private=True) def main_prop(): return Bind(Self.type_var, Self.ref_var, eq_prop=prop) class BazNode(FooNode): prop = Property(12) prop2 = Property(True) prop3 = Property(lambda _=T.BarNode: True) prop4 = Property(lambda other=T.BazNode: Self == other) def lang_def(): foo_grammar = Grammar('main_rule') foo_grammar.add_rules(main_rule=Or( Row('example') ^ BarNode, Row('example') ^ BazNode, )) return foo_grammar prop = eval(prop_expr) emit_and_print_errors(lang_def) print('')
def run(name, astnode_fn): """ Emit and print the errors we get for the below grammar with "match_expr" as a property in ExampleNode. """ Diagnostics.set_lang_source_dir(os.path.abspath(__file__)) print('== {} =='.format(name)) astnode = astnode_fn(T) @abstract @root_grammar_class class FooNode(ASTNode): pass @abstract class MiddleNode(FooNode): get_random_node = AbstractProperty(type=T.MiddleNode) class ExampleNode(MiddleNode): get_random_node = Property(No(astnode)) @abstract class NullNode(FooNode): pass def lang_def(): foo_grammar = Grammar('main_rule') foo_grammar.add_rules( main_rule=Or(Row('example') ^ ExampleNode, Row('null') ^ NullNode) ) return foo_grammar emit_and_print_errors(lang_def) print('')
def run(name, expr): """ Emit and print the errors we get for the below grammar with "expr" as a property in BarNode. """ global FooNode Diagnostics.set_lang_source_dir(path.abspath(__file__)) print('== {} =='.format(name)) @root_grammar_class() class FooNode(ASTNode): pass class BarNode(FooNode): list_node = Field() class ListNode(FooNode): nb_list = Field() prop = Property(expr) class NumberNode(FooNode): tok = Field() def lang_def(): foo_grammar = Grammar('main_rule') foo_grammar.add_rules( main_rule=Row('example', foo_grammar.list_rule) ^ BarNode, list_rule=Row( List(Tok(Token.Number, keep=True) ^ NumberNode) ) ^ ListNode, ) return foo_grammar emit_and_print_errors(lang_def) print('')
def run(name, astnode_fn): """ Emit and print the errors we get for the below grammar with "match_expr" as a property in ExampleNode. """ Diagnostics.set_lang_source_dir(os.path.abspath(__file__)) print('== {} =='.format(name)) astnode = astnode_fn(T) @abstract @root_grammar_class() class FooNode(ASTNode): pass @abstract class MiddleNode(FooNode): get_random_node = AbstractProperty(type=T.MiddleNode) class ExampleNode(MiddleNode): get_random_node = Property(No(astnode)) @abstract class NullNode(FooNode): pass def lang_def(): foo_grammar = Grammar('main_rule') foo_grammar.add_rules( main_rule=Or(Row('example') ^ ExampleNode, Row('null') ^ NullNode)) return foo_grammar emit_and_print_errors(lang_def) print('')
def run(abstract_private, concrete_private): """ Emit and print the errors we get for the below grammar for the given privacy levels. """ fmt_privacy = { None: 'default', True: 'private', False: 'public', } print '== abstract: {}, concrete: {} =='.format( fmt_privacy[abstract_private], fmt_privacy[concrete_private] ) Diagnostics.set_lang_source_dir(os.path.abspath(__file__)) @root_grammar_class class AbstractNode(ASTNode): prop = AbstractProperty(BoolType, private=abstract_private) class ConcreteNode(AbstractNode): prop = Property(Literal(True), private=concrete_private) def lang_def(): foo_grammar = Grammar('main_rule') foo_grammar.add_rules( main_rule=Row('example') ^ ConcreteNode, ) return foo_grammar if emit_and_print_errors(lang_def): for fld in (AbstractNode._fields['prop'], ConcreteNode._fields['prop']): print ' {}: {}'.format(fld.qualname, fmt_privacy[fld.is_private]) print('')
def run(name, prop_fn, prop_memoized): """ Emit and print the errors we get for the below grammar with "expr" as a property in BarNode. """ Diagnostics.set_lang_source_dir(os.path.abspath(__file__)) print('== {} =='.format(name)) @root_grammar_class() class FooNode(ASTNode): pass class Literal(FooNode): tok = Field() class EmptyNode(FooNode): pass class LiteralList(Literal.list_type()): prop = Property(prop_fn(), memoized=prop_memoized) def lang_def(): foo_grammar = Grammar('main_rule') foo_grammar.add_rules( main_rule=foo_grammar.list_rule, list_rule=Row( '(', List(foo_grammar.list_item, sep=',', cls=LiteralList), ')')[0], list_item=Row(Tok(Token.Number, keep=True)) ^ Literal, ) return foo_grammar emit_and_print_errors(lang_def) print('')
def __init__(self, override_lang_source_dir=True): self.dirs = Directories( # It is assumed that manage.py is at the root of the language # definition source directory. lang_source_dir=path.dirname( path.abspath(inspect.getfile(self.__class__)))) if override_lang_source_dir: Diagnostics.set_lang_source_dir(self.dirs.lang_source_dir()) ######################## # Main argument parser # ######################## self.args_parser = args_parser = argparse.ArgumentParser( description='General manager to handle actions relative to' ' building/testing libadalang.') self.subparsers = subparsers = args_parser.add_subparsers() args_parser.add_argument( '--pp', action='store_true', default=False, help='Whether to automatically generate a pretty-printer along' ' with the parser for the grammar.') args_parser.add_argument( '--build-dir', default='build', help='Directory to use for generated source code and binaries. By' ' default, use "build" in the current directory.') args_parser.add_argument( '--enable-static', action='store_true', help='Enable the generation of static libraries (default:' ' disabled).') args_parser.add_argument( '--disable-static', action='store_false', dest='enable_static', help='Disable the generation of static libraries.') args_parser.add_argument( '--enable-shared', action='store_true', default=True, help='Enable the generation (and testing) of shared libraries' ' (default: enabled).') args_parser.add_argument( '--disable-shared', action='store_false', dest='enable_shared', help='Disable the generation (and testing) of shared libraries.') args_parser.add_argument('--verbosity', '-v', nargs='?', type=Verbosity, choices=Verbosity.choices(), default=Verbosity('info'), const=Verbosity('debug'), help='Verbosity level') args_parser.add_argument( '--full-error-traces', '-E', action='store_true', default=False, help='Always show full error traces, whatever the verbosity level' ' (default: disabled).') args_parser.add_argument('--trace', '-t', action='append', default=[], help='Activate given debug trace.') args_parser.add_argument( '--no-langkit-support', action='store_true', help='Assuming that Langkit_Support is already built and' ' installed. This is useful to package the generated library' ' only.') # Don't enable this by default so that errors will not make automated # tasks hang. args_parser.add_argument( '-g', '--debug', action='store_true', help='In case of internal error or diagnostic error, run a' ' post-mortem PDB session.') args_parser.add_argument( '--profile', action='store_true', help='Run cProfile and langkit, and generate a data file' ' "langkit.prof".') args_parser.add_argument('--diagnostic-style', '-D', type=DiagnosticStyle, default=DiagnosticStyle.default, help='Style for error messages.') def create_parser(fn, needs_context=False): """ Create a subparser from a function. Uses the name and the docstring of the function to document the subparsers. :param (ManageScript, Namespace) -> None fn: The function to use. :param bool needs_context: Whether the executed function needs a CompileCtx created beforehand or not. :rtype: argparse.ArgumentParser """ p = subparsers.add_parser( # Take the name of the function without the do_ prefix and with # dashes instead of underscores. fn.__name__.replace('do_', '').replace('_', '-'), # Take the first paragraph of the function's documentation as # help. help=fn.__doc__.split('\n\n')[0].strip()) def internal(*args, **kwargs): if needs_context: self.set_context(*args, **kwargs) fn(*args, **kwargs) p.set_defaults(func=internal) return p ######## # Help # ######## self.help_parser = create_parser(self.do_help) ############ # Generate # ############ self.generate_parser = generate_parser = create_parser( self.do_generate, True) self.add_generate_args(generate_parser) ######### # Build # ######### self.build_parser = build_parser = create_parser(self.do_build, True) self.add_build_args(build_parser) ######## # Make # ######## self.make_parser = make_parser = create_parser(self.do_make, True) self.add_generate_args(make_parser) self.add_build_args(make_parser) ########### # Install # ########### self.install_parser = install_parser = create_parser( self.do_install, True) install_parser.add_argument('install-dir', help='Installation directory.') ########## # Setenv # ########## self.setenv_parser = create_parser(self.do_setenv, True) self.setenv_parser.add_argument( '--json', '-J', action='store_true', help='Output necessary env keys to JSON.') ############################################### # Generate, Build and Install Langkit_Support # ############################################### self.generate_lksp_parser = create_parser( self.do_generate_langkit_support) self.build_lksp_parser = create_parser(self.do_build_langkit_support) self.install_lksp_parser = create_parser( self.do_install_langkit_support) self.install_lksp_parser.add_argument('install-dir', help='Installation directory.') self.add_build_args(self.build_lksp_parser) self.add_build_args(self.install_lksp_parser) # The create_context method will create the context and set it here # only right before executing commands. self.context = None # This will be set in the run method, when we have parsed arguments # from the command line. self.verbosity = None ":type: Verbosity"
""" Test that Bind works when binding from env elements. """ import os.path from langkit.compiled_types import (ASTNode, Field, root_grammar_class, LongType, UserField, LogicVarType) from langkit.diagnostics import Diagnostics from langkit.expressions import AbstractProperty, Property, Self, Bind from langkit.parsers import Grammar, Row, Tok from lexer_example import Token from utils import build_and_run Diagnostics.set_lang_source_dir(os.path.abspath(__file__)) @root_grammar_class() class FooNode(ASTNode): prop = AbstractProperty(runtime_check=True, type=LongType) class BarNode(FooNode): pass class Literal(FooNode): tok = Field() a = AbstractProperty(runtime_check=True, type=FooNode.env_el())
def run(self, argv=None): parsed_args = self.args_parser.parse_args(argv) for trace in parsed_args.trace: print("Trace {} is activated".format(trace)) Log.enable(trace) Diagnostics.set_style(parsed_args.diagnostic_style) if parsed_args.profile: import cProfile import pstats pr = cProfile.Profile() pr.enable() # Set the verbosity self.verbosity = parsed_args.verbosity self.no_ada_api = parsed_args.no_ada_api # If asked to, setup the exception hook as a last-chance handler to # invoke a debugger in case of uncaught exception. if parsed_args.debug: # Try to use IPython's debugger if it is available, otherwise # fallback to PDB. try: # noinspection PyPackageRequirements from IPython.core import ultratb except ImportError: ultratb = None # To keep PyCharm happy... def excepthook(type, value, tb): traceback.print_exception(type, value, tb) pdb.post_mortem(tb) sys.excepthook = excepthook else: sys.excepthook = ultratb.FormattedTB(mode='Verbose', color_scheme='Linux', call_pdb=1) del ultratb self.dirs.set_build_dir(parsed_args.build_dir) install_dir = getattr(parsed_args, 'install-dir', None) if install_dir: self.dirs.set_install_dir(install_dir) if getattr(parsed_args, 'list_warnings', False): WarningSet.print_list() return # noinspection PyBroadException try: parsed_args.func(parsed_args) except DiagnosticError: if parsed_args.debug: raise if parsed_args.verbosity.debug or parsed_args.full_error_traces: traceback.print_exc() print(col('Errors, exiting', Colors.FAIL), file=sys.stderr) sys.exit(1) except Exception as e: if parsed_args.debug: raise ex_type, ex, tb = sys.exc_info() # If we have a syntax error, we know for sure the last stack frame # points to the code that must be fixed. Otherwise, point to the # top-most stack frame that does not belong to Langkit. if e.args and e.args[0] == 'invalid syntax': loc = Location(e.filename, e.lineno) else: loc = extract_library_location(traceback.extract_tb(tb)) with Context("", loc, "recovery"): check_source_language(False, str(e), do_raise=False) # Keep Langkit bug "pretty" for users: display the Python stack # trace only when requested. if parsed_args.verbosity.debug or parsed_args.full_error_traces: traceback.print_exc() print(col('Internal error! Exiting', Colors.FAIL), file=sys.stderr) sys.exit(1) finally: if parsed_args.profile: pr.disable() ps = pstats.Stats(pr) ps.dump_stats('langkit.prof')
def __init__(self): self.dirs = Directories( # It is assumed that manage.py is at the root of the language # definition source directory. lang_source_dir=path.dirname( path.abspath(inspect.getfile(self.__class__)))) Diagnostics.set_lang_source_dir(self.dirs.lang_source_dir()) ######################## # Main argument parser # ######################## self.args_parser = args_parser = argparse.ArgumentParser( description='General manager to handle actions relative to' ' building/testing libadalang') self.subparsers = subparsers = args_parser.add_subparsers() args_parser.add_argument( '--build-dir', default='build', help=('Directory to use for generated source code and binaries. By' ' default, use "build" in the current directory.')) args_parser.add_argument( '--enable-static', action='store_true', help='Enable the generation of static libraries (default:' ' disabled)') args_parser.add_argument( '--disable-static', action='store_false', dest='enable_static', help='Disable the generation of static libraries') args_parser.add_argument( '--enable-shared', action='store_true', default=True, help='Enable the generation (and testing) of shared libraries' ' (default: enabled)') args_parser.add_argument( '--disable-shared', action='store_false', dest='enable_shared', help='Disable the generation (and testing) of shared libraries') args_parser.add_argument( '--bindings', '-b', nargs='+', choices=('python', ), default=['python'], help='Bindings to generate (by default: only Python)') args_parser.add_argument('--verbosity', '-v', nargs='?', type=Verbosity, choices=Verbosity.choices(), default=Verbosity('info'), const=Verbosity('debug'), help='Verbosity level') # Don't enable this by default so that errors will not make automated # tasks hang. args_parser.add_argument( '-g', '--debug', action='store_true', help='In case of internal error or diagnostic error, run a' ' post-mortem PDB session') args_parser.add_argument( '--profile', action='store_true', help='Run cProfile and langkit, and generate a data file ' '"langkit.prof"') args_parser.add_argument( '--parsable-errors', '-P', action='store_true', default=False, help='Generate error messages parsable by tools') def create_parser(fn, needs_context=False): """ Create a subparser from a function. Uses the name and the docstring of the function to document the subparsers. :param (ManageScript, Namespace) -> None fn: The function to use. :param bool needs_context: Whether the executed function needs a CompileCtx created beforehand or not. :rtype: argparse.ArgumentParser """ p = subparsers.add_parser( # Take the name of the function without the do_ prefix fn.__name__.replace('do_', ''), # Take the first paragraph of the function's documentation as # help. help=fn.__doc__.split('\n\n')[0].strip()) def internal(*args, **kwargs): if needs_context: self.set_context(*args, **kwargs) fn(*args, **kwargs) p.set_defaults(func=internal) return p ######## # Help # ######## self.help_parser = create_parser(self.do_help) ############ # Generate # ############ self.generate_parser = generate_parser = create_parser( self.do_generate, True) self.add_generate_args(generate_parser) ######### # Build # ######### self.build_parser = build_parser = create_parser(self.do_build, True) self.add_build_args(build_parser) ######## # Make # ######## self.make_parser = make_parser = create_parser(self.do_make, True) self.add_generate_args(make_parser) self.add_build_args(make_parser) ########### # Install # ########### self.install_parser = install_parser = create_parser( self.do_install, True) install_parser.add_argument('install-dir', help='Installation directory.') ########## # Setenv # ########## self.setenv_parser = create_parser(self.do_setenv, True) self.setenv_parser.add_argument( '--json', '-J', action='store_true', help='Output necessary env keys to json') # The create_context method will create the context and set it here # only right before executing commands so that coverage computation # will apply to create_context. self.context = None
def __init__(self): self.dirs = Directories( # It is assumed that manage.py is at the root of the language # definition source directory. lang_source_dir=path.dirname( path.abspath(inspect.getfile(self.__class__)) ) ) Diagnostics.set_lang_source_dir(self.dirs.lang_source_dir()) ######################## # Main argument parser # ######################## self.args_parser = args_parser = argparse.ArgumentParser( description='General manager to handle actions relative to' ' building/testing libadalang' ) self.subparsers = subparsers = args_parser.add_subparsers() args_parser.add_argument( '--build-dir', default='build', help=( 'Directory to use for generated source code and binaries. By' ' default, use "build" in the current directory.' ) ) args_parser.add_argument( '--enable-static', action='store_true', help='Enable the generation of static libraries (default:' ' disabled)' ) args_parser.add_argument( '--disable-static', action='store_false', dest='enable_static', help='Disable the generation of static libraries' ) args_parser.add_argument( '--enable-shared', action='store_true', default=True, help='Enable the generation (and testing) of shared libraries' ' (default: enabled)' ) args_parser.add_argument( '--disable-shared', action='store_false', dest='enable_shared', help='Disable the generation (and testing) of shared libraries' ) args_parser.add_argument( '--bindings', '-b', nargs='+', choices=('python', ), default=['python'], help='Bindings to generate (by default: only Python)' ) args_parser.add_argument( '--verbosity', '-v', nargs='?', type=Verbosity, choices=Verbosity.choices(), default=Verbosity('info'), const=Verbosity('debug'), help='Verbosity level' ) # Don't enable this by default so that errors will not make automated # tasks hang. args_parser.add_argument( '-g', '--debug', action='store_true', help='In case of internal error or diagnostic error, run a' ' post-mortem PDB session' ) def create_parser(fn, needs_context=False): """ Create a subparser from a function. Uses the name and the docstring of the function to document the subparsers. :param (ManageScript, Namespace) -> None fn: The function to use. :param bool needs_context: Whether the executed function needs a CompileCtx created beforehand or not. :rtype: argparse.ArgumentParser """ p = subparsers.add_parser( # Take the name of the function without the do_ prefix fn.__name__.replace('do_', ''), # Take the first paragraph of the function's documentation as # help. help=fn.__doc__.split('\n\n')[0].strip() ) def internal(*args, **kwargs): if needs_context: self.set_context(*args, **kwargs) fn(*args, **kwargs) p.set_defaults(func=internal) return p ######## # Help # ######## self.help_parser = create_parser(self.do_help) ############ # Generate # ############ self.generate_parser = generate_parser = create_parser( self.do_generate, True ) self.add_generate_args(generate_parser) ######### # Build # ######### self.build_parser = build_parser = create_parser(self.do_build, True) self.add_build_args(build_parser) ######## # Make # ######## self.make_parser = make_parser = create_parser(self.do_make, True) self.add_generate_args(make_parser) self.add_build_args(make_parser) ########### # Install # ########### self.install_parser = install_parser = create_parser(self.do_install, True) install_parser.add_argument( 'install-dir', help='Installation directory.' ) ########## # Setenv # ########## self.setenv_parser = create_parser(self.do_setenv, True) # The create_context method will create the context and set it here # only right before executing commands so that coverage computation # will apply to create_context. self.context = None
def run(name, match_expr): """ Emit and print the errors we get for the below grammar with "match_expr" as a property in ExampleNode. """ global BodyNode, Compound, Expression, FooNode, NullNode, Number Diagnostics.set_lang_source_dir(path.abspath(__file__)) print('== {} =='.format(name)) @abstract @root_grammar_class class FooNode(ASTNode): pass @abstract class BodyNode(FooNode): pass class NullNode(BodyNode): pass @abstract class Expression(BodyNode): pass class Number(Expression): tok = Field() class Compound(Expression): prefix = Field() suffix = Field() class ExampleNode(FooNode): body = Field() prop = Property(match_expr) def lang_def(): foo_grammar = Grammar('main_rule') foo_grammar.add_rules( main_rule=Row( 'example', Or(foo_grammar.expression, Row('null') ^ NullNode) ) ^ ExampleNode, number=Tok(Token.Number) ^ Number, expression=Or( Row(foo_grammar.number, ',', foo_grammar.expression) ^ Compound, foo_grammar.number ), ) return foo_grammar emit_and_print_errors(lang_def) print('')
from langkit.compiled_types import ASTNode, Struct, root_grammar_class from langkit.diagnostics import DiagnosticError, Diagnostics from langkit.envs import EnvSpec from os import path from utils import emit_and_print_errors Diagnostics.set_lang_source_dir(path.abspath(__file__)) @root_grammar_class class FooNode(ASTNode): pass try: class StructA(Struct): env_spec = EnvSpec() except DiagnosticError: pass print '' print 'Done'
def run_no_exit(self, argv: Opt[List[str]] = None) -> int: parsed_args, unknown_args = self.args_parser.parse_known_args(argv) for trace in parsed_args.trace: print("Trace {} is activated".format(trace)) Log.enable(trace) Diagnostics.set_style(parsed_args.diagnostic_style) if parsed_args.profile: import cProfile import pstats pr = cProfile.Profile() pr.enable() # Set the verbosity self.verbosity = parsed_args.verbosity self.enable_build_warnings = getattr(parsed_args, "enable_build_warnings", False) # If there is no build_mode (ie. we're not running a command that # requires it), we still need one to call gnatpp, so set it to a dummy # build mode. self.build_mode = getattr(parsed_args, "build_mode", self.BUILD_MODES[0]) self.no_ada_api = parsed_args.no_ada_api # If asked to, setup the exception hook as a last-chance handler to # invoke a debugger in case of uncaught exception. if parsed_args.debug: # Try to use IPython's debugger if it is available, otherwise # fallback to PDB. try: # noinspection PyPackageRequirements from IPython.core import ultratb except ImportError: def excepthook(typ: Type[BaseException], value: BaseException, tb: TracebackType) -> Any: traceback.print_exception(typ, value, tb) pdb.post_mortem(tb) sys.excepthook = excepthook else: sys.excepthook = ultratb.FormattedTB(mode='Verbose', color_scheme='Linux', call_pdb=1) self.dirs.set_build_dir(parsed_args.build_dir) install_dir = getattr(parsed_args, 'install-dir', None) if install_dir: self.dirs.set_install_dir(install_dir) if getattr(parsed_args, 'list_warnings', False): WarningSet.print_list() return 0 # noinspection PyBroadException try: parsed_args.func(parsed_args, unknown_args) return 0 except DiagnosticError: if parsed_args.debug: raise if parsed_args.verbosity.debug or parsed_args.full_error_traces: traceback.print_exc() print(col('Errors, exiting', Colors.FAIL)) return 1 except Exception as e: if parsed_args.debug: raise ex_type, ex, tb = sys.exc_info() # If we have a syntax error, we know for sure the last stack frame # points to the code that must be fixed. Otherwise, point to the # top-most stack frame that does not belong to Langkit. if e.args and e.args[0] == 'invalid syntax': assert isinstance(e, SyntaxError) loc = Location(cast(str, e.filename), cast(int, e.lineno)) else: loc = cast(Location, extract_library_location(traceback.extract_tb(tb))) with diagnostic_context(loc): check_source_language(False, str(e), do_raise=False) # Keep Langkit bug "pretty" for users: display the Python stack # trace only when requested. if parsed_args.verbosity.debug or parsed_args.full_error_traces: traceback.print_exc() print(col('Internal error! Exiting', Colors.FAIL)) return 1 finally: if parsed_args.profile: pr.disable() ps = pstats.Stats(pr) ps.dump_stats('langkit.prof')