def func(entity, column=0, lang=lang, **kwargs): """ :param str|compiled_types.CompiledType entity: Name for the entity to document, or entity to document. :param int column: Indentation level for the result. :param str lang: Language for the documentation. :param kwargs: Parameters to be passed to the specific formatter. :rtype: str """ from langkit.compile_context import get_context ctx = get_context() template_ctx = dict() template_ctx['ctx'] = get_context() template_ctx['capi'] = ctx.c_api_settings template_ctx['null'] = null_names[lang] template_ctx['TODO'] = todo_markers[lang] if isinstance(entity, str): doc = ctx.documentations[entity].render(ctx=get_context(), capi=ctx.c_api_settings, lang=lang, null=null_names[lang], TODO=todo_markers[lang]) else: doc = entity.doc or '' return formatter(doc, column, **kwargs)
def func(entity: Union[str, CompiledType], column: int = 0, lang: str = lang, **kwargs: Any) -> str: """ :param entity: Name for the entity to document, or entity to document. :param column: Indentation level for the result. :param lang: Language for the documentation. :param kwargs: Parameters to be passed to the specific formatter. """ from langkit.compile_context import get_context from langkit.compiled_types import T, resolve_type ctx = get_context() if isinstance(entity, str): doc_template = ctx.documentations[entity] elif entity.doc: doc_template = Template(entity.doc) else: doc_template = None def node_name(node: Union[CompiledType, TypeRepo.Defer]) -> str: return get_node_name(ctx, resolve_type(node)) doc = doc_template.render(ctx=get_context(), capi=ctx.c_api_settings, pyapi=ctx.python_api_settings, lang=lang, null=null_names[lang], TODO=todo_markers[lang], T=T, node_name=node_name) if doc_template else '' return formatter(doc, column, **kwargs)
def func(entity, column=0, lang=lang, **kwargs): """ :param str|compiled_types.CompiledType entity: Name for the entity to document, or entity to document. :param int column: Indentation level for the result. :param str lang: Language for the documentation. :param kwargs: Parameters to be passed to the specific formatter. :rtype: str """ from langkit.compile_context import get_context from langkit.compiled_types import T, resolve_type ctx = get_context() if isinstance(entity, str): doc_template = ctx.documentations[entity] elif entity.doc: doc_template = Template(entity.doc) else: return '' def node_name(node): return get_node_name(ctx, resolve_type(node)) doc = doc_template.render(ctx=get_context(), capi=ctx.c_api_settings, lang=lang, null=null_names[lang], TODO=todo_markers[lang], T=T, node_name=node_name) return formatter(doc, column, **kwargs)
def add_to_context(cls): if cls in get_context().types: return get_context().types.add(cls) get_context().list_types.add(cls.element_type()) # Make sure the type this list contains is already declared cls.element_type().add_to_context()
def make_renderer(base_renderer=None): """ Create a template renderer with common helpers. :param Renderer base_renderer: The renderer to base the resulting renderer on. """ if base_renderer is None: base_renderer = common_renderer template_args = { 'is_enum': type_check(EnumType), 'is_long': type_check(LongType), 'is_bool': type_check(BoolType), 'is_ast_node': type_check(ASTNode), 'is_sloc_range': type_check(SourceLocationRangeType), 'is_token_type': type_check(Token), 'is_array_type': type_check(ArrayType), 'decl_type': decl_type, } if get_context(): capi = get_context().c_api_settings # Name of the root AST node access type type_name = get_context().root_grammar_class.name() # Name of the root AST node record type value_type = type_name + names.Name("Type") # Name of the root AST node kind type kind_name = value_type + names.Name("Kind") template_args.update({ 'root_node_type_name': type_name, 'root_node_value_type': value_type, 'root_node_kind_name': kind_name, 'ctx': get_context(), 'ada_api': get_context().ada_api_settings, 'capi': capi, 'analysis_context_type': CAPIType(capi, 'analysis_context').name, 'analysis_unit_type': CAPIType(capi, 'analysis_unit').name, 'node_kind_type': CAPIType(capi, 'node_kind_enum').name, 'node_type': c_node_type(capi).name, 'token_type': CAPIType(capi, 'token').name, 'sloc_type': CAPIType(capi, 'source_location').name, 'sloc_range_type': SourceLocationRangeType.c_type(capi).name, 'text_type': CAPIType(capi, 'text').name, 'diagnostic_type': CAPIType(capi, 'diagnostic').name, 'exception_type': CAPIType(capi, 'exception').name, }) return base_renderer.update(template_args)
def __init__(self, domain, logic_var_expr, abstract_expr=None): from langkit.compile_context import get_context self.domain = domain ":type: ResolvedExpression" self.logic_var_expr = logic_var_expr ":type: ResolvedExpression" # Generated code relies on the instantiation of a logic binder package # for the default case (no convertion nor equality properties). get_context().do_generate_logic_binder() super().__init__('Domain_Equation', abstract_expr=abstract_expr)
def format_python(text, column, rtype=None): """ Format some text as Python docstring. :param str text: Text to format. :param int column: Indentation level for the result. :param str|None rtype: If non-None, append to the formatted docstring a Sphinx-style ``rtype`` annotation, whose type is the given ``rtype``. :rtype: str """ from langkit.compile_context import get_context if text.strip() == '' and rtype is None: return '' available_width = get_available_width(column) indent = ' ' * column lines = ['"""'] for i, paragraph in enumerate(split_paragraphs(text)): if i > 0: lines.append('') for line in textwrap.wrap(paragraph, available_width, drop_whitespace=True): lines.append(indent + line) if rtype: if len(lines) > 1: lines.append("") lines.append("{}:rtype: {}".format( indent, get_context().python_api_settings.type_public_name(rtype))) lines.append(indent + '"""') return '\n'.join(lines)
def check(self) -> None: """ Check that the resolver property is conforming. """ ctx = get_context() ctx.has_ref_env = True self.resolver = resolve_property(self.resolver) self.resolver.require_untyped_wrapper() check_source_language( self.resolver.type.matches(T.LexicalEnv), 'Referenced environment resolver must return a lexical' ' environment (not {})'.format( self.resolver.type.dsl_name ) ) check_source_language( not self.resolver.natural_arguments, 'Referenced environment resolver must take no argument' ) check_source_language( not self.resolver.dynamic_vars, 'Referenced environment resolver must have no dynamically bound' ' variable' )
def write_source_file(file_path, source, post_process=None): """ Helper to write a source file. Return whether the file has been updated. :param str file_path: Path of the file to write. :param str source: Content of the file to write. :param post_process: If provided, callable used to transform the source file content just before writing it. :type post_process: None | (str) -> str :rtype: bool """ context = get_context() if post_process: source = post_process(source) if (not os.path.exists(file_path) or context.emitter.cache.is_stale(file_path, source)): if context.verbosity.debug: printcol('Rewriting stale source: {}'.format(file_path), Colors.OKBLUE) # Emit all source files as UTF-8 with "\n" line endings, no matter the # current platform. with open(file_path, 'w', encoding='utf-8', newline='') as f: f.write(source) return True return False
def check(self) -> None: ctx = get_context() self.resolver = resolve_property(self.resolver) with self.mappings_prop.diagnostic_context: mapping_type = self.mappings_prop.type if mapping_type.matches(T.env_assoc): ctx.has_env_assoc = True elif mapping_type.matches(T.env_assoc.array): ctx.has_env_assoc = True ctx.has_env_assoc_array = True else: check_source_language( False, 'The bindings expression in environment specification must' ' must be either an env_assoc or an array of env_assocs:' ' got {} instead'.format(mapping_type.dsl_name)) if self.resolver: # Ask for the creation of untyped wrappers for all # properties used as entity resolvers. self.resolver.require_untyped_wrapper() check_source_language( self.resolver.type.matches(T.entity), 'Entity resolver properties must return entities' ' (got {})'.format(self.resolver.type.dsl_name)) check_source_language( not self.resolver.dynamic_vars, 'Entity resolver properties must have no dynamically' ' bound variable') check_source_language( not self.resolver.natural_arguments, 'Entity resolver properties must have no argument')
def check(self): """ Check that the resolver property is conforming. """ ctx = get_context() ctx.has_ref_env = True if self.category: check_source_language( self.category != 'nocat', 'Nocat is not a valid name for a referenced env category') self.category = names.Name.from_lower(self.category) ctx.ref_cats.add(self.category) self.resolver = resolve_property(self.resolver) self.resolver.require_untyped_wrapper() check_source_language( self.resolver.type.matches(T.LexicalEnv), 'Referenced environment resolver must return a lexical' ' environment (not {})'.format(self.resolver.type.dsl_name)) check_source_language( not self.resolver.natural_arguments, 'Referenced environment resolver must take no argument') check_source_language( not self.resolver.dynamic_vars, 'Referenced environment resolver must have no dynamically bound' ' variable')
def enabled(self): """ Return whether this warning is enabled in the current context. :rtype: bool """ from langkit.compile_context import get_context return self in get_context().warnings
def _render_pre(self): from langkit.compile_context import get_context return '{}\n{}'.format( self.unit_expr.render_pre(), render('properties/null_safety_check_ada', expr=self.unit_expr, result_var=self.prefix_var) if not get_context().no_property_checks else '')
def emit_ocaml_api(self, ctx): """ Generate binding for the external OCaml API. :param str ocaml_path: The directory in which the OCaml module will be generated. """ if not ctx.ocaml_api_settings: return ctx.ocaml_api_settings.init_type_graph() if not os.path.isdir(self.ocaml_path): os.mkdir(self.ocaml_path) with names.camel: ctx = get_context() code = ctx.render_template( "ocaml_api/module_ocaml", c_api=ctx.c_api_settings, ocaml_api=ctx.ocaml_api_settings ) ocaml_filename = '{}.ml'.format(ctx.c_api_settings.lib_name) write_ocaml_file( os.path.join(self.ocaml_path, ocaml_filename), code ) code = ctx.render_template( "ocaml_api/module_sig_ocaml", c_api=ctx.c_api_settings, ocaml_api=ctx.ocaml_api_settings ) ocaml_filename = '{}.mli'.format(ctx.c_api_settings.lib_name) write_ocaml_file( os.path.join(self.ocaml_path, ocaml_filename), code ) # Emit dune file to easily compile and install bindings code = ctx.render_template( "ocaml_api/dune_ocaml", c_api=ctx.c_api_settings, ocaml_api=ctx.ocaml_api_settings ) write_source_file(os.path.join(self.ocaml_path, 'dune'), code) write_source_file(os.path.join(self.ocaml_path, 'dune-project'), '(lang dune 1.6)') # Write an empty opam file to install the lib with dune write_source_file( os.path.join(self.ocaml_path, '{}.opam'.format(ctx.c_api_settings.lib_name)), '' )
def check_source_language(predicate, message, severity=Severity.error, do_raise=True, ok_for_codegen=False): """ Check predicates related to the user's input in the input language definition. Show error messages and eventually terminate if those error messages are critical. :param bool predicate: The predicate to check. :param str message: The base message to display if predicate happens to be false. :param Severity severity: The severity of the diagnostic. :param bool do_raise: If True, raise a DiagnosticError if predicate happens to be false. :param bool ok_for_codegen: If True, allow checks to be performed during code generation. This is False by default as it should be an exceptional situation: we want, when possible, most checks to be performed before we attempt to emit the generated library (for --check-only). """ from langkit.compile_context import get_context def context_from_node(node): return '{}:{}'.format(get_filename(node.unit.filename), node.sloc_range.start) if not ok_for_codegen: ctx = get_context(or_none=True) assert ctx is None or ctx.emitter is None severity = assert_type(severity, Severity) indent = ' ' * 4 if not predicate: message_lines = message.splitlines() message = '\n'.join( message_lines[:1] + [indent + line for line in message_lines[1:]] ) context = get_structured_context() if not isinstance(context, list): print('{}: {}: {}'.format(context_from_node(context), format_severity(severity), message)) elif Diagnostics.style != DiagnosticStyle.default: print('{}: {}'.format(get_parsable_location(), message)) else: print_context(context) print('{}{}: {}'.format( indent if context_stack else '', format_severity(severity), message )) if severity == Severity.error and do_raise: raise DiagnosticError() elif severity == Severity.non_blocking_error: Diagnostics.has_pending_error = True
def ada_token_name(self, token): """ Helper function to get the name of the Ada enumerator to represent the kind of "token". :param TokenAction|Enum|Name|str token: See the token_base_name method. :rtype: str """ prefixed_name = get_context().lang_name + self.token_base_name(token) return prefixed_name.camel_with_underscores
def c_token_name(self, token): """ Helper function to get the name of the Quex constant to represent the kind of "token". :param TokenAction|Enum|Name|str token: See the token_base_name method. :rtype: str """ prefixed_name = get_context().lang_name + self.token_base_name(token) return prefixed_name.upper
def render(*args, **kwargs): return compiled_types.make_renderer().update({ 'is_tok': type_check_instance(Tok), 'is_row': type_check_instance(Row), 'is_class': inspect.isclass, 'ctx': get_context() }).render(*args, **kwargs)
def add_to_context(cls): assert cls.is_typed, ( "Trying to generate code for a type before typing is complete") if cls not in get_context().types and cls != ASTNode: base_class = cls.__bases__[0] if issubclass(base_class, ASTNode): base_class.add_to_context() get_context().types.add(cls) for f in cls.get_fields(include_inherited=False): if f.type: f.type.add_to_context() cls.compute_properties() if cls.env_spec: cls.env_spec.compute(cls)
def compile(self): """ Emit code for this parser as a function into the global context. """ t_env = TemplateEnvironment() t_env._self = self # Don't emit code twice for the same parser if self.gen_fn_name in get_context().fns: return get_context().fns.add(self.gen_fn_name) t_env.parser_context = ( self.generate_code() ) get_context().generated_parsers.append(GeneratedParser( self.gen_fn_name, render('parsers/fn_profile_ada', t_env), render('parsers/fn_code_ada', t_env)))
def emit_ocaml_api(self, ctx: CompileCtx) -> None: """ Generate binding for the external OCaml API. """ if not ctx.ocaml_api_settings: return ctx.ocaml_api_settings.init_type_graph() if not os.path.isdir(self.ocaml_dir): os.mkdir(self.ocaml_dir) with names.camel: # Write an empty ocamlformat file so we can call ocamlformat write_source_file(os.path.join(self.ocaml_dir, '.ocamlformat'), '') ctx = get_context() code = ctx.render_template("ocaml_api/module_ocaml", c_api=ctx.c_api_settings, ocaml_api=ctx.ocaml_api_settings) ocaml_filename = '{}.ml'.format(ctx.c_api_settings.lib_name) write_ocaml_file( os.path.join(self.ocaml_dir, ocaml_filename), code, self.post_process_ocaml, ) code = ctx.render_template("ocaml_api/module_sig_ocaml", c_api=ctx.c_api_settings, ocaml_api=ctx.ocaml_api_settings) ocaml_filename = '{}.mli'.format(ctx.c_api_settings.lib_name) write_ocaml_file( os.path.join(self.ocaml_dir, ocaml_filename), code, self.post_process_ocaml, ) # Emit dune file to easily compile and install bindings code = ctx.render_template("ocaml_api/dune_ocaml", c_api=ctx.c_api_settings, ocaml_api=ctx.ocaml_api_settings) write_source_file(os.path.join(self.ocaml_dir, 'dune'), code) write_source_file(os.path.join(self.ocaml_dir, 'dune-project'), '(lang dune 1.6)') # Write an empty opam file to install the lib with dune write_source_file( os.path.join(self.ocaml_dir, '{}.opam'.format(ctx.c_api_settings.lib_name)), '')
def _render_pre(self): from langkit.compile_context import get_context # Before accessing the field of a record through an access, we must # check whether this access is null in order to raise a # Property_Error in the case it is. return '{}\n{}'.format( render('properties/null_safety_check_ada', expr=self.receiver_expr, result_var=self.prefix_var) if not get_context().no_property_checks else '', '\n'.join(arg.render_pre() for arg in self.arguments) )
def get_context(): """ Return the current compilation context, see langkit.compile_context.get_context. TODO: this function exists only to workaround circular dependency issues. We should get rid of them. :rtype: CompileCtx """ from langkit.compile_context import get_context return get_context()
def gen_code_or_fncall(self, pos_name="pos"): """ Return generated code for this parser into the global context. `pos_name` is the name of a variable that contains the position of the next token in the lexer. Either the "parsing code" is returned, either it is emitted in a dedicated function and a call to it is returned instead. This method relies on the subclasses-defined `generated_code` for "parsing code" generation. :param str|names.Name pos_name: The name of the position variable. :rtype: ParserCodeContext """ if self.name and get_context().verbosity.debug: print "Compiling rule: {0}".format( col(self.gen_fn_name, Colors.HEADER) ) # Users must be able to run parsers that implement a named rule, so # generate dedicated functions for them. if self.is_root: # The call to compile will add the declaration and the definition # (body) of the function to the compile context. self.compile() # Generate a call to the previously compiled function, and return # the context corresponding to this call. pos, res = gen_names("fncall_pos", "fncall_res") fncall_block = render( 'parsers/fn_call_ada', _self=self, pos_name=pos_name, pos=pos, res=res ) return ParserCodeContext( pos_var_name=pos, res_var_name=res, code=fncall_block, var_defs=[ (pos, Token), (res, self.get_type()) ] ) else: return self.generate_code(pos_name)
def func(entity, column=0, **kwargs): """ :type entity: str|compiled_types.CompiledType :type column: int """ from langkit.compile_context import get_context # Tell _render for which binding we are generating documentation kwargs.setdefault('lang', lang) kwargs['ctx'] = get_context() doc = _render(entity, **kwargs) return formatter(doc, column) if doc else ''
def func(entity, column=0, **kwargs): """ :type entity: str|compiled_types.CompiledType :type column: int """ from langkit.compile_context import get_context ctx = get_context() # Tell _render for which binding we are generating documentation kwargs.setdefault('lang', lang) doc = _render(ctx, entity, **kwargs) return formatter(doc, column) if doc else ''
def compile(self): """ Emit code for this parser as a function into the global context. """ t_env = TemplateEnvironment() t_env.parser = self check_source_language( self.get_type() is not None and issubclass(self.get_type(), ASTNode), 'Grammar rules must yield an AST node') # Don't emit code twice for the same parser if self.gen_fn_name in get_context().fns: return get_context().fns.add(self.gen_fn_name) t_env.parser_context = self.generate_code() get_context().generated_parsers.append( GeneratedParser(self.gen_fn_name, render('parsers/fn_profile_ada', t_env), render('parsers/fn_code_ada', t_env)))
def generate_code(self, pos_name="pos"): # Generate the code to match the token of kind 'token_kind', and return # the corresponding context. pos, res = gen_names("tk_pos", "tk_res") code = render('parsers/tok_code_ada', parser=self, pos_name=pos_name, pos=pos, res=res, match_text=self.match_text, token_kind=get_context().lexer.ada_token_name(self.val)) return ParserCodeContext(pos_var_name=pos, res_var_name=res, code=code, var_defs=[(pos, Token), (res, Token)])
def gen_code_or_fncall(self, pos_name="pos"): """ Return generated code for this parser into the global context. `pos_name` is the name of a variable that contains the position of the next token in the lexer. Either the "parsing code" is returned, either it is emitted in a dedicated function and a call to it is returned instead. This method relies on the subclasses-defined `generated_code` for "parsing code" generation. :param str|names.Name pos_name: The name of the position variable. :rtype: ParserCodeContext """ if self.name and get_context().verbosity.debug: print "Compiling rule: {0}".format( col(self.gen_fn_name, Colors.HEADER)) # Users must be able to run parsers that implement a named rule, so # generate dedicated functions for them. if self.is_root: # The call to compile will add the declaration and the definition # (body) of the function to the compile context. self.compile() # Generate a call to the previously compiled function, and return # the context corresponding to this call. pos, res = gen_names("fncall_pos", "fncall_res") fncall_block = render('parsers/fn_call_ada', parser=self, pos_name=pos_name, pos=pos, res=res) return ParserCodeContext(pos_var_name=pos, res_var_name=res, code=fncall_block, var_defs=[(pos, Token), (res, self.get_type())]) else: return self.generate_code(pos_name)
def generate_code(self, pos_name="pos"): # Generate the code to match the token of kind 'token_kind', and return # the corresponding context. pos, res = gen_names("tk_pos", "tk_res") code = render( 'parsers/tok_code_ada', _self=self, pos_name=pos_name, pos=pos, res=res, token_kind=get_context().lexer.ada_token_name(self.val) ) return ParserCodeContext( pos_var_name=pos, res_var_name=res, code=code, var_defs=[(pos, Token), (res, Token)] )
def fmt_type(t: CompiledType) -> str: return get_context().python_api_settings.type_public_name(t)
def c_name(self): prefixed_name = get_context().lang_name + self.base_name return prefixed_name.upper
def ada_name(self): pname = get_context().lang_name + self.base_name return pname.camel_with_underscores
def emit_railroad_diagram(parser: Parser) -> None: """ Pass to emit railroad diagrams using the railroad_diagrams library. Railroads will be emitted in $BUILD/railroad-diagrams. """ from railroad import (Choice, Diagram, DiagramItem, End, OneOrMore, Optional, Sequence, Skip, Start, ZeroOrMore) from langkit.compile_context import get_context def recurse(p: Parser) -> Union[DiagramItem, str, None]: # Transform parsers are just ignored if isinstance(p, _Transform): return recurse(p.children[0]) elif isinstance(p, Opt): # Opt parsers are straightforwardly wrapped into an Optional return Optional(recurse(p.parser)) elif isinstance(p, _Extract): # Extract is ignored return recurse(p.parser) # For list parsers, we create a sequence with the right separator # and sub-parser. elif isinstance(p, List): sep = recurse(p.sep) if p.sep else None child = recurse(p.parser) if p.empty_valid: return ZeroOrMore(child, repeat=sep) else: return OneOrMore(child, repeat=sep) # For defers, we just return the rule name elif isinstance(p, Defer): return p.rule_name # For tokens, we return either the quoted original string, or the DSL # name. elif isinstance(p, _Token): if p._original_string: return repr(p._original_string) else: return p.val.dsl_name children = [] for c in p.children: res = recurse(c) if res is not None: children.append(res) if isinstance(p, Or): if len(children) == 0: return None children = sorted(children, key=lambda c: isinstance(c, Skip)) return Choice(0, *children) elif isinstance(p, _Row): if len(children) == 0: return Skip() return Sequence(*children) else: return None d = Diagram( # Explicit start point with the parser's name as label Start("simple", label=parser.name.lower()), *[c for c in [recurse(parser)] if c is not None], End("simple")) # Output the diagram to svg in $BUILD/railroad-diagrams/$RULENAME.svg emitter = get_context().emitter assert emitter is not None out_dir = pathlib.Path(emitter.lib_root, "railroad-diagrams") out_dir.mkdir(parents=True, exist_ok=True) with (out_dir / f"{parser.name.lower()}.svg").open("w") as f: d.writeSvg(f.write)
def add_to_context(cls): if cls not in get_context().types: get_context().types.add(cls) get_context().enum_types.add(cls)
def do_prepare(self): from langkit.compile_context import get_context get_context().do_generate_logic_binder(self.conv_prop, self.eq_prop)
def fmt_type(t): return get_context().python_api_settings.type_public_name(t)
def construct(self): from langkit.compile_context import get_context self.resolve_props() get_context().do_generate_logic_binder(self.conv_prop, self.eq_prop) # We have to wait for the construct pass for the following checks # because they rely on type information, which is not supposed to be # computed before this pass. if self.conv_prop: check_multiple([ (self.conv_prop.type.matches(T.root_node.entity), 'Bind property must return a subtype of {}'.format( T.root_node.entity.dsl_name)), (self.conv_prop.struct.matches(T.root_node), 'Bind property must belong to a subtype of {}'.format( T.root_node.dsl_name)), ]) DynamicVariable.check_call_bindings(self.conv_prop, "In Bind's conv_prop {prop}") # Those checks are run in construct, because we need the eq_prop to be # prepared already, which is not certain in do_prepare (order # dependent). if self.eq_prop: args = self.eq_prop.natural_arguments check_multiple([ (self.eq_prop.type == T.Bool, 'Equality property must return boolean'), (self.eq_prop.struct.matches(T.root_node), 'Equality property must belong to a subtype of {}'.format( T.root_node.dsl_name)), (len(args) == 1, 'Equality property: expected 1 argument, got {}'.format( len(args))), ]) other_type = args[0].type check_source_language( other_type.is_entity_type, "First arg of equality property should be an entity type") check_source_language( other_type.element_type == self.eq_prop.struct, "Self and first argument should be of the same type") DynamicVariable.check_call_bindings(self.eq_prop, "In Bind's eq_prop {prop}") cprop_uid = (self.conv_prop.uid if self.conv_prop else "Default") eprop_uid = (self.eq_prop.uid if self.eq_prop else "Default") if self.conv_prop: pred_func = Bind.Expr.dynamic_vars_to_holder( self.conv_prop, 'Logic_Converter_{}'.format(cprop_uid)) else: pred_func = untyped_literal_expr('No_Logic_Converter_Default') # Left operand must be a logic variable. Make sure the resulting # equation will work on a clean logic variable. lhs = ResetLogicVar(construct(self.from_expr, T.LogicVar)) # Second one can be either a logic variable or an entity (or an AST # node that is promoted to an entity). rhs = construct(self.to_expr) if rhs.type.matches(T.LogicVar): # For this operand too, make sure it will work on a clean logic # variable. rhs = ResetLogicVar(rhs) elif rhs.type.matches(T.root_node): from langkit.expressions import make_as_entity rhs = make_as_entity(rhs) else: check_source_language( rhs.type.matches(T.root_node.entity), 'Right operand must be either a logic variable or an entity,' ' got {}'.format(rhs.type.dsl_name)) # Because of Ada OOP typing rules, for code generation to work # properly, make sure the type of `rhs` is the root node entity. if (rhs.type.matches(T.root_node.entity) and rhs.type is not T.root_node.entity): from langkit.expressions import Cast rhs = Cast.Expr(rhs, T.root_node.entity) return Bind.Expr(self.conv_prop, self.eq_prop, cprop_uid, eprop_uid, lhs, rhs, pred_func, abstract_expr=self)
def _resolve_property(name: str, prop_ref: _Any, arity: int) -> Optional[PropertyDef]: """ Resolve the ``prop`` property reference (if any, built in the DSL) to the referenced property. If it is present, check its signature. :param name: Name of the property in the DSL construct. Used to format the error message. :param prop_ref: Property reference to resolve. :param arity: Expected number of entity arguments for this property ("Self" included). """ from langkit.expressions import FieldAccess # First, resolve the property prop: PropertyDef if prop_ref is None: return None elif isinstance(prop_ref, FieldAccess): node_data = prop_ref.resolve_field() if isinstance(node_data, PropertyDef): prop = node_data else: error(f"{name} must be a property") elif isinstance(prop_ref, T.Defer): prop = prop_ref.get() elif isinstance(prop_ref, PropertyDef): prop = prop_ref else: error( f"{name} must be either a FieldAccess resolving to a property," " or a direct reference to a property") # Second, check its signature prop = prop.root_property assert prop.struct check_source_language( prop.struct.matches(T.root_node), f"{name} must belong to a subtype of {T.root_node.dsl_name}", ) # Check that it takes the expected number of arguments. "Self" counts # as an implicit argument, so we expect at least ``arity - 1`` natural # arguments. n_args = arity - 1 entity_args = prop.natural_arguments[:n_args] extra_args = prop.natural_arguments[n_args:] check_source_language( len(entity_args) == n_args and all(arg.type.is_entity_type for arg in entity_args), f"{name} property must accept {n_args} entity arguments (only" f" {len(entity_args)} found)", ) # The other argumenst must be optional check_source_language( all(arg.default_value is not None for arg in extra_args), f"extra arguments for {name} must be optional", ) # Check the property return type check_source_language( prop.type.matches(T.root_node.entity), f"{name} must return a subtype of {T.entity.dsl_name}", ) # Check that all dynamic variables for this property are bound in the # current expression context. DynamicVariable.check_call_bindings(prop, f"In call to {{prop}} as {name}") # Third, generate a functor for this property, so that equations can # refer to it. from langkit.compile_context import get_context get_context().do_generate_logic_functors(prop, arity) return prop