Exemplo n.º 1
0
    def compile_rules(self, context: CompileCtx) -> None:
        """
        Pass to turn the lexer DSL into our internal regexp objects.
        """
        assert context.nfa_start is None

        regexps = RegexpCollection(case_insensitive=context.case_insensitive)

        # Import patterns into regexps
        for name, pattern, loc in self.patterns:
            with diagnostic_context(loc):
                regexps.add_pattern(name, pattern)

        # Now turn each rule into a NFA
        nfas = []

        for i, a in enumerate(self.rules):
            assert isinstance(a, RuleAssoc)

            # Check that actions never emit Termination and LexingFailure
            # tokens. These tokens are supposed to be emitted by the lexing
            # engine only.
            def check(token: Action) -> None:
                if token in (self.tokens.Termination,
                             self.tokens.LexingFailure):
                    assert isinstance(token, TokenAction)
                    error(f'{token.dsl_name} is reserved for automatic actions'
                          f' only')

            if isinstance(a.action, Case.CaseAction):
                for alt in a.action.all_alts:
                    check(alt.send)
            elif isinstance(a.action, Ignore):
                pass
            else:
                assert isinstance(a.action, TokenAction)
                check(a.action)

            assert a.location is not None
            with diagnostic_context(a.location):
                nfa_start, nfa_end = regexps.nfa_for(a.matcher.regexp)
            nfas.append(nfa_start)

            # The first rule that was added must have precedence when multiple
            # rules compete for the longest match. To implement this behavior,
            # we associate increasing ids to each token action.
            nfa_end.label = (i, a.action)

        # Create a big OR for all possible accepted patterns
        context.nfa_start = NFAState()
        for nfa in nfas:
            context.nfa_start.add_transition(None, nfa)
Exemplo n.º 2
0
    def collect_fields(
            cls, owning_type: str, location: Location, dct: Dict[str, Any],
            field_cls: Union[Type[AbstractNodeData],
                             Tuple[Type[AbstractNodeData], ...]],
            only_null_fields: bool) -> List[Tuple[str, AbstractNodeData]]:
        """
        Metaclass helper. Excluding __special__ entries, make sure all entries
        in `dct` are instances of `field_cls` and return them as an annotated
        list: (name, field).

        This ensures that all fields are associated to a legal name, and
        records this name in the field instances.

        :param owning_type: Name of the type for the type that will own the
            fields.  Used for diagnostic message formatting purposes.
        :param location: Location for the declaration of the owning type.
        :param dct: Input class dictionnary.
        :param field_cls: AbstractNodeData subclass, or list of subclasses.
        :param only_null_fields: Whether syntax fields, if accepted, must be
            null.
        """
        result = []
        for f_n, f_v in dct.items():
            # Ignore __special__ fields
            if f_n.startswith('__') and f_n.endswith('__'):
                continue

            with diagnostic_context(location):
                expected_types = (field_cls if isinstance(field_cls, tuple)
                                  else (field_cls, ))
                check_source_language(
                    isinstance(f_v, field_cls),
                    'Field {f_name} is a {f_type}, but only instances of'
                    ' {exp_type} subclasses are allowed in {metatype}'
                    ' subclasses'.format(
                        f_name=f_n,
                        f_type=type(f_v),
                        exp_type='/'.join(t.__name__ for t in expected_types),
                        metatype=cls.__name__,
                    ))
                check_source_language(
                    not f_n.startswith('_'),
                    'Underscore-prefixed field names are not allowed')
                check_source_language(f_n.lower() == f_n,
                                      'Field names must be lower-case')
                if only_null_fields and isinstance(f_v, _Field):
                    check_source_language(f_v.null,
                                          'Only null fields allowed here')
            result.append((f_n, f_v))

        # Sort fields by creation time order so that users get fields in the
        # same order as it was declared in the DSL.
        result.sort(key=lambda kv: kv[1]._serial)
        return result
Exemplo n.º 3
0
def _check_decorator_use(decorator, expected_cls, cls):
    """
    Helper for class decorators below. Raise a diagnostic error if `cls`,
    which is the input parameter of `decorator`, is not a subclass of
    `expected_cls`.
    """
    location = extract_library_location()
    with diagnostic_context(location):
        check_source_language(
            issubtype(cls, expected_cls),
            'The {} decorator must be called on a {} subclass'
            ' (here, got: {})'.format(decorator.__name__,
                                      expected_cls.__name__, cls))
Exemplo n.º 4
0
    def process_subclass(mcs, name, bases, dct):
        location = extract_library_location()

        with diagnostic_context(location):
            check_source_language(
                bases == (Struct, ),
                'Struct subclasses must derive from Struct only',
            )

        fields = Struct.collect_fields(
            name, location, dct, _UserField, only_null_fields=False
        )
        DSLType._import_base_type_info(name, location, dct)
        dct['_fields'] = fields
Exemplo n.º 5
0
 def diagnostic_context(self) -> ContextManager[None]:
     """
     Diagnostic context for env specs.
     """
     assert self.location is not None
     return diagnostic_context(self.location)
Exemplo n.º 6
0
 def diagnostic_context(self) -> ContextManager[None]:
     assert self.location is not None
     return diagnostic_context(self.location)
Exemplo n.º 7
0
    def __new__(mcs, name, bases, dct):
        # Don't do anything special for Enum itself
        if not mcs.base_enum_type:
            result = type.__new__(mcs, name, bases, dct)
            mcs.base_enum_type = result
            return result

        location = extract_library_location()
        with diagnostic_context(location):
            check_source_language(
                bases == (Enum, ),
                'Enumeration types must derive from and only from Enum')

            # Get the list of values, initializing their name
            values = []
            default_val_name = None
            for key, value in dct.items():
                # Ignore __special__ fields
                if key.startswith('__') and key.endswith('__'):
                    continue

                check_source_language(
                    isinstance(value, EnumValue),
                    'Enum subclass can only contain EnumValue instances'
                    ' (here, {} is {})'.format(key, value))
                check_source_language(
                    value._type is None,
                    'EnumValue instances cannot be used in multiple Enum'
                    ' subclasses (here: {})'.format(key))
                value._name = names.Name.from_lower(key)
                values.append(value)

                # If this is the default value for this enum type, store it
                if value.is_default_val:
                    check_source_language(default_val_name is None,
                                          'Only one default value is allowed')
                    default_val_name = value._name

            values.sort(key=lambda v: v._id)
            dct['_values'] = values

        DSLType._import_base_type_info(name, location, dct)

        # Create the subclass and associate values to it
        cls = type.__new__(mcs, name, bases, dct)
        for value in cls._values:
            value._type = cls

        # Now create the CompiledType instance, register it where needed
        enum_type = EnumType(cls._name,
                             cls._location,
                             cls._doc, [v._name for v in cls._values],
                             default_val_name=default_val_name)
        enum_type.dsl_decl = cls
        cls._type = enum_type

        # Associate the enumeration values in the DSL/Langkit internals
        for dsl_val, internal_val in zip(cls._values, enum_type.values):
            dsl_val._type = cls
            dsl_val._value = internal_val
            internal_val.dsl_decl = dsl_val

        return cls
Exemplo n.º 8
0
 def node_ctx():
     return diagnostic_context(location)
Exemplo n.º 9
0
 def _diagnostic_context(cls) -> ContextManager[None]:
     return diagnostic_context(cls._location)
Exemplo n.º 10
0
    def run_no_exit(self, argv: Opt[List[str]] = None) -> int:
        parsed_args, unknown_args = self.args_parser.parse_known_args(argv)

        for trace in parsed_args.trace:
            print("Trace {} is activated".format(trace))
            Log.enable(trace)

        Diagnostics.set_style(parsed_args.diagnostic_style)

        if parsed_args.profile:
            import cProfile
            import pstats

            pr = cProfile.Profile()
            pr.enable()

        # Set the verbosity
        self.verbosity = parsed_args.verbosity

        self.enable_build_warnings = getattr(parsed_args,
                                             "enable_build_warnings", False)

        # If there is no build_mode (ie. we're not running a command that
        # requires it), we still need one to call gnatpp, so set it to a dummy
        # build mode.
        self.build_mode = getattr(parsed_args, "build_mode",
                                  self.BUILD_MODES[0])

        self.no_ada_api = parsed_args.no_ada_api

        # If asked to, setup the exception hook as a last-chance handler to
        # invoke a debugger in case of uncaught exception.
        if parsed_args.debug:
            # Try to use IPython's debugger if it is available, otherwise
            # fallback to PDB.
            try:
                # noinspection PyPackageRequirements
                from IPython.core import ultratb
            except ImportError:

                def excepthook(typ: Type[BaseException], value: BaseException,
                               tb: TracebackType) -> Any:
                    traceback.print_exception(typ, value, tb)
                    pdb.post_mortem(tb)

                sys.excepthook = excepthook
            else:
                sys.excepthook = ultratb.FormattedTB(mode='Verbose',
                                                     color_scheme='Linux',
                                                     call_pdb=1)

        self.dirs.set_build_dir(parsed_args.build_dir)
        install_dir = getattr(parsed_args, 'install-dir', None)
        if install_dir:
            self.dirs.set_install_dir(install_dir)

        if getattr(parsed_args, 'list_warnings', False):
            WarningSet.print_list()
            return 0

        # noinspection PyBroadException
        try:
            parsed_args.func(parsed_args, unknown_args)
            return 0

        except DiagnosticError:
            if parsed_args.debug:
                raise
            if parsed_args.verbosity.debug or parsed_args.full_error_traces:
                traceback.print_exc()
            print(col('Errors, exiting', Colors.FAIL))
            return 1

        except Exception as e:
            if parsed_args.debug:
                raise
            ex_type, ex, tb = sys.exc_info()

            # If we have a syntax error, we know for sure the last stack frame
            # points to the code that must be fixed. Otherwise, point to the
            # top-most stack frame that does not belong to Langkit.
            if e.args and e.args[0] == 'invalid syntax':
                assert isinstance(e, SyntaxError)
                loc = Location(cast(str, e.filename), cast(int, e.lineno))
            else:
                loc = cast(Location,
                           extract_library_location(traceback.extract_tb(tb)))
            with diagnostic_context(loc):
                check_source_language(False, str(e), do_raise=False)

            # Keep Langkit bug "pretty" for users: display the Python stack
            # trace only when requested.
            if parsed_args.verbosity.debug or parsed_args.full_error_traces:
                traceback.print_exc()

            print(col('Internal error! Exiting', Colors.FAIL))
            return 1

        finally:
            if parsed_args.profile:
                pr.disable()
                ps = pstats.Stats(pr)
                ps.dump_stats('langkit.prof')