def list(cls): """ Return an ASTNode subclass that represents a list of "cls". :rtype: _ASTNodeList """ with cls._diagnostic_context(): check_source_language(cls is not ASTNode, 'Lists of base ASTNode are not allowed') if cls._list_type is None: cls._list_type = type((cls._name + names.Name('List')).camel, (_ASTNodeList, ), {'_element_type': cls}) return cls._list_type
def gen_name(var_name): """ Generates a unique name from var_name. :param str|names.Name var_name: The base name. If it is a string, it needs to be a lower case with underscores string. :rtype: names.Name """ if isinstance(var_name, str): var_name = names.Name.from_lower(var_name) var_id = next(__next_ids[var_name.lower]) return var_name + names.Name(str(var_id))
def create_context(self, args): # Keep these import statements here so that they are executed only # after the coverage computation actually started. from langkit.compile_context import ADA_BODY, CompileCtx, LibraryEntity from ada.lexer import ada_lexer from ada.grammar import ada_grammar from ada.documentation import libadalang_docs ctx = CompileCtx( lang_name='Ada', short_name='LAL', lexer=ada_lexer, grammar=ada_grammar, default_charset='iso-8859-1', verbosity=args.verbosity, default_unit_provider=LibraryEntity( 'Libadalang.Internal_Default_Provider', 'Create'), symbol_canonicalizer=LibraryEntity('Libadalang.Sources', 'Canonicalize'), documentations=libadalang_docs, ) # Internals need to access environment hooks and the symbolizer ctx.add_with_clause('Implementation', ADA_BODY, 'Libadalang.Env_Hooks', use_clause=True) ctx.add_with_clause('Implementation', ADA_BODY, 'Libadalang.Sources', use_clause=False) # Bind Libadalang's custom iterators to the public API ctx.add_with_clause('Iterators', ADA_BODY, 'Libadalang.Iterators.Extensions') # LAL.Analysis.Is_Keyword is implemented using LAL.Lexer's ctx.add_with_clause('Analysis', ADA_BODY, 'Libadalang.Lexer') ctx.post_process_ada = ada.copyright.format_ada ctx.post_process_cpp = ada.copyright.format_c ctx.post_process_python = ada.copyright.format_python # Register our custom exception types ctx.register_exception_type( package=[names.Name("GNATCOLL"), names.Name("Projects")], name=names.Name("Invalid_Project"), doc_section="libadalang.project_provider", ) ctx.register_exception_type( package=[names.Name("Libadalang"), names.Name("Project_Provider")], name=names.Name("Unsupported_View_Error"), doc_section="libadalang.project_provider", ) return ctx
def generate_prelude(self, emitter, context): """ Generate the liblktlang-prelude.ads source file from prelude.lkt. """ # Read the prelude as a sequence of bytes, to match the destination # String value. prelude_filename = os.path.join( os.path.dirname(__file__), "language", "prelude.lkt", ) with open(prelude_filename, "rb") as f: content = f.read() # Format the sources lines = [ "package Liblktlang.Prelude is", " Content : constant String :=", bytes_repr(content, indent=" " * 6), " ;", "end Liblktlang.Prelude;", ] # Write the source file and register it, so that it is referenced in # the generated project file. qual_name = [names.Name("Liblktlang"), names.Name("Prelude")] write_ada_file( out_dir=emitter.src_dir, source_kind=ADA_SPEC, qual_name=qual_name, content="\n".join(lines), post_process=emitter.post_process_ada, ) emitter.add_library_interface( ada_file_path(emitter.src_dir, ADA_SPEC, qual_name), generated=True, )
def construct_common(self): """ Construct the expressions commonly needed by collection expression subclasses, and return them as a tuple constituted of: 1. The resolved collection expression. 2. The resolved expression function passed to CollectionExpression's constructor. 3. If the collection is an AST list, the iteration variable, whose type is the root grammar type. None otherwise. 4. The element variable as a resolved expression. In the case of an AST list collection, this is just 3. converted to the specific type. 5. The index variable as a resolved expression. 6. The inner scope for the iteration. :rtype: (ResolvedExpression, ResolvedExpression, ResolvedExpression|None, ResolvedExpression, ResolvedExpression, langkit.expressions.base.LocalVars.Scope) """ collection_expr = construct( self.collection, lambda t: t.is_collection(), 'Map cannot iterate on {expr_type}, which is not a collection') self.element_var.set_type(collection_expr.type.element_type()) current_scope = PropertyDef.get_scope() # If we are iterating over an AST list, then we get root grammar typed # values. We need to convert them to the more specific type to get the # rest of the expression machinery work. For this, create a new # variable. if collection_expr.type.is_list_type: self.list_element_var = AbstractVariable( names.Name("List_Item_{}".format( next(CollectionExpression._counter))), type=get_context().root_grammar_class) self.element_var.add_to_scope(current_scope) with current_scope.new_child() as iter_scope: if self.index_var: PropertyDef.get_scope().add(self.index_var.local_var) return (collection_expr, construct(self.expr), (construct( self.list_element_var) if self.list_element_var else None), construct(self.element_var), construct(self.index_var) if self.index_var else None, iter_scope)
def create_internal_property(self, name, expr, type): """ Create an internal property for this env spec. """ if expr is None: return None p = PropertyDef( expr, AbstractNodeData.PREFIX_INTERNAL, name=names.Name('_{}_{}'.format(name, next(self.PROPERTY_COUNT))), public=False, type=type, ignore_warn_on_node=True ) p.location = getattr(expr, 'location') or self.location self.ast_node.add_field(p) return p
def emit_mains(self, ctx): """ Emit sources and the project file for mains. """ with names.camel_with_underscores: write_ada_file(path.join(self.lib_root, 'src-mains'), ADA_BODY, [names.Name('Parse')], ctx.render_template('main_parse_ada'), self.post_process_ada) write_source_file( self.mains_project, ctx.render_template('mains_project_file', lib_name=ctx.ada_api_settings.lib_name, source_dirs=self.main_source_dirs, main_programs=self.main_programs))
def emit_c_api(self, ctx): """ Generate header and binding body for the external C API. """ def render(template_name): return ctx.render_template(template_name) with names.lower: write_cpp_file( path.join(self.include_path, '{}.h'.format(ctx.c_api_settings.lib_name)), render('c_api/header_c'), self.post_process_cpp) self.write_ada_module( self.src_path, 'c_api/pkg_main', [names.Name(n) for n in 'Implementation.C'.split('.')])
def import_enum_node_attributes(mcs, dct, qualifier, alts, fields): from langkit.expressions import AbstractProperty from langkit.parsers import Opt, _Row, _Transform def create_parser_bool_node(cls, *args): # If the node is a boolean node, then we want to parse the # sub-parsers as an optional parser that will be booleanized. return Opt(*args).as_bool(cls) def create_parser_enum_node(cls, alt_typeref, *args): # Otherwise, we want to parse the sub-parsers as a row + transform return _Transform(_Row(*args), alt_typeref) def constructor(cls, *args): """ This constructor can be used in the grammar to create a parser for this enum node. This is valid only when qualifier is set to True. """ assert qualifier return create_parser_bool_node(cls, *args) dct['__new__'] = constructor dct['_create_parser'] = classmethod( create_parser_bool_node if qualifier else create_parser_enum_node ) dct['_alternatives'] = alts dct['_qualifier'] = qualifier # Make _EnumNodeAlternative instances available as attributes of the # enum node class for a convenient way to create parsers for them. for alt in alts: attr_name = (names.Name('Alt') + alt.name).lower dct[attr_name] = alt if qualifier: # Add the synthetic "as_bool" abstract property present_alt = alts[0] prop = AbstractProperty( type=T.Bool, public=True, doc='Return whether this is an instance of {}'.format( (dct['_name'] + present_alt.name).camel ) ) prop.location = dct['_location'] fields.append(('as_bool', prop))
def __new__(mcs, name, bases, dct): # Don't do anything for EnumNode itself: it's just a placeholder if bases == (BaseStruct, ): return type.__new__(mcs, name, bases, dct) location = extract_library_location() with Context('in {}'.format(name), location): qualifier = dct.pop('qualifier', False) if qualifier: alternatives = ['present', 'absent'] else: alternatives = dct.pop('alternatives', None) check_source_language(alternatives is not None, 'Missing "alternatives" field') check_source_language( isinstance(alternatives, list) and all(isinstance(alt, str) for alt in alternatives), 'The "alternatives" field must contain a list of strings') alts = [ EnumNode.Alternative(names.Name.from_lower(alt)) for alt in alternatives ] fields = EnumNode.collect_fields(name, location, dct, (_UserField, PropertyDef)) DSLType._import_base_type_info(name, location, dct) dct['_fields'] = fields dct['_alternatives'] = alts dct['_qualifier'] = qualifier if qualifier: dct['_alt_present'], dct['_alt_absent'] = alts # Make Alternative instances available as EnumNode class attributes for # a convenient way to create parsers for them. for alt in alts: attr_name = (names.Name('alt') + alt.name).lower dct[attr_name] = alt cls = type.__new__(mcs, name, bases, dct) mcs.enum_types.append(cls) for alt in alts: alt._enum_node_cls = cls return cls
def generate_lexer_dfa(self, ctx): """ Generate code for the lexer state machine. """ # Source file that contains the state machine implementation lexer_sm_body = ada_file_path( self.src_dir, ADA_BODY, [ctx.lib_name, names.Name('Lexer_State_Machine')]) # Generate the lexer state machine iff the file is missing or its # signature has changed since last time. stale_lexer_spec = write_source_file( os.path.join( self.lib_root, 'obj', '{}_lexer_signature.txt'.format(ctx.short_name_or_long.lower)), json.dumps(ctx.lexer.signature, indent=2)) if not os.path.exists(lexer_sm_body) or stale_lexer_spec: self.dfa_code = ctx.lexer.build_dfa_code(ctx)
def __init__(self, c_api_settings, name, external=False): """Create a stub for a C API type. :param CAPISettings c_api_settings: API settings to use for this type. :param name: The name for the type. Can be either a Name or a lower-case formatted name. :type name: str|names.Name :param bool external: Whether this type is already declared outside the C API. For instance: "int" is external, but "node" is not. """ self.c_api_settings = c_api_settings self.external = external # Make private the following in order to avoid accidental use of these # instead of the properties. self._name = (name if isinstance(name, names.Name) else names.Name(name))
def create_internal_property(name, expr, type): if expr is None: return None # Set has_implicit_env for these internal properties so that they # can use a default environment that the context gives. This # default will be the Self_Env of the parent node, which is always # the same, regardless of being run in the populate lexical env # pass or later on. See Initial_Env_Getter_Fn functions for the # code that fetches this default environment. p = PropertyDef(expr, AbstractNodeData.PREFIX_INTERNAL, name=names.Name('_{}_{}'.format( name, next(self.PROPERTY_COUNT))), private=True, type=type, has_implicit_env=True) result.append(p) return p
def emit_c_api(self, ctx): """ Generate header and binding body for the external C API. """ def render(template_name): return ctx.render_template(template_name) with names.lower: # TODO (TA20-017: gprinstall bug): generate the header in # "src" and add it to the library interface (see disabled code # below). header_filename = '{}.h'.format(ctx.c_api_settings.lib_name) write_cpp_file(path.join(self.lib_root, header_filename), render('c_api/header_c'), self.post_process_cpp) self.write_ada_module( self.src_dir, 'c_api/pkg_main', [names.Name(n) for n in 'Implementation.C'.split('.')], in_library=True)
def __init__(self, template_base_name, rel_qual_name, has_body=True, ada_api=False, unparser=False, cached_body=False, is_interface=True): """ :param str template_base_name: Common prefix for the name of the templates to use in order to generate spec/body sources for this unit. :param str rel_qual_name: Qualified name for the unit to generate, without the top-level library name. :param bool ada_api: Whether we can avoid generating this unit if the Ada API is disabled. :param bool unparser: Whether we can avoid generating this unit if unparsing is disabled. :param bool has_body: Whether this unit has a body (otherwise, it's just a spec). :param bool cached_body: If true, only register the body as a library interface, i.e. do not generate it, considering that it is cached. :param bool is_interface: Whether to include this module in the generated library interface. """ self.template_base_name = template_base_name self.qual_name = ([ names.Name(n) for n in rel_qual_name.split('.') ] if rel_qual_name else []) self.ada_api = ada_api self.unparser = unparser self.has_body = has_body self.cached_body = cached_body self.is_interface = is_interface
def do_prepare(self): # If this Then was created using create_from exprs, there is no lambda # expansion to do. if self.then_expr: return argspec = inspect.getargspec(self.then_fn) check_source_language( len(argspec.args) == 1 and not argspec.varargs and not argspec.keywords and not argspec.defaults, 'Invalid lambda for Then expression: exactly one parameter is' ' required, without a default value' ) self.var_expr = AbstractVariable( names.Name("Var_Expr"), create_local=True, source_name=argspec.args[0] ) self.then_expr = unsugar(self.then_fn(self.var_expr))
def create_internal_property(self, name, expr, type): """ Create an internal property for this env spec. If ``expr`` is None, do not create a property and return None. Otherwise, unsugar it. :param str name: Lower-case name to use to create this property name. Since the property is internal, the name is decorated. """ if expr is None: return None expr = unsugar(expr) p = PropertyDef(expr, AbstractNodeData.PREFIX_INTERNAL, name=names.Name('_{}_{}'.format( name, next(self.PROPERTY_COUNT))), public=False, type=type, ignore_warn_on_node=True) p.location = getattr(expr, 'location') or self.location self.ast_node.add_field(p) return p
class LexicalEnv(_BuiltinType): """ Type for lexical environments. """ _name = names.Name('Lexical_Env')
def construct_common(self): """ Construct and return the expressions commonly needed by collection expression subclasses. :rtype: CollectionExpression.ConstructCommonResult """ current_scope = PropertyDef.get_scope() # First, build the collection expression. From the result, we can # deduce the type of the element variable. collection_expr = construct(self.collection) with_entities = collection_expr.type.is_entity_type if with_entities: saved_entity_coll_expr, collection_expr, entity_info = ( collection_expr.destructure_entity()) collection_expr = SequenceExpr(saved_entity_coll_expr, collection_expr) check_source_language( collection_expr.type.is_collection, 'Cannot iterate on {}, which is not a collection'.format( collection_expr.type.dsl_name)) elt_type = collection_expr.type.element_type if with_entities: elt_type = elt_type.entity self.element_var.set_type(elt_type) # List of "element" iteration variables elt_vars = [construct(self.element_var)] # List of initializing expressions for them elt_var_inits = [] if with_entities: entity_var = elt_vars[-1] node_var = AbstractVariable(names.Name('Bare') + self.element_var._name, type=elt_type.element_type) elt_var_inits.append( make_as_entity(construct(node_var), entity_info=entity_info)) elt_vars.append(construct(node_var)) # If we are iterating over an AST list, then we get root grammar typed # values. We need to convert them to the more specific type to get the # rest of the expression machinery work. if collection_expr.type.is_list_type: typed_elt_var = elt_vars[-1] untyped_elt_var = AbstractVariable( names.Name('Untyped') + self.element_var._name, type=get_context().root_grammar_class) # Initialize the former last variable with a cast from the new last # variable and push the new last variable. elt_var_inits.append( UncheckedCastExpr(construct(untyped_elt_var), typed_elt_var.type)) elt_vars.append(construct(untyped_elt_var)) # Only then we can build the inner expression with current_scope.new_child() as inner_scope: inner_expr = construct(self.expr) if with_entities: entity_var.abstract_var.create_local_variable(inner_scope) if collection_expr.type.is_list_type: typed_elt_var.abstract_var.create_local_variable(inner_scope) if self.index_var: self.index_var.add_to_scope(inner_scope) elt_var_inits.append(None) return self.ConstructCommonResult( collection_expr, funcy.lzip(elt_vars, elt_var_inits), construct(self.index_var) if self.index_var else None, inner_expr, inner_scope)
def get_enum_alternative(self, type_name, alt_name, suffix): return self.get_name( names.Name('{}_{}'.format(type_name.base_name, alt_name.base_name)))
class BigInt(_BuiltinType): """ Type for integers of arbitrary precision. """ _name = names.Name('Big_Int')
class Token(_BuiltinType): """ Type for token values, as found in an analysis unit's token data handler. """ _name = names.Name('Token')
class Symbol(_BuiltinType): """ Type for symbol values (canonicalized names). """ _name = names.Name('Symbol')
class Int(_BuiltinType): """ Simple integer type. """ _name = names.Name('Int')
class LongType(_BuiltinType): """ Simple integer type. """ _name = names.Name('Long_Type')
class EnvRebindings(_BuiltinType): """ Type for environment rebinding values. """ _name = names.Name('Env_Rebindings')
def construct_common(self) -> CollectionExpression.ConstructCommonResult: """ Construct and return the expressions commonly needed by collection expression subclasses. """ assert self.element_var is not None current_scope = PropertyDef.get_scope() # Because of the discrepancy between the storage type in list nodes # (always root nodes) and the element type that user code deals with # (non-root list elements and/or entities), we may need to introduce # variables and initializing expressions. This is what the code below # does. # First, build the collection expression. From the result, we can # deduce the type of the user element variable. collection_expr = construct(self.collection) # If the collection is actually an entity, unwrap the bare list node # and save the entity info for later. with_entities = collection_expr.type.is_entity_type if with_entities: saved_entity_coll_expr, collection_expr, entity_info = ( collection_expr.destructure_entity() ) collection_expr = SequenceExpr(saved_entity_coll_expr, collection_expr) check_source_language( collection_expr.type.is_collection, 'Cannot iterate on {}, which is not a collection'.format( collection_expr.type.dsl_name ) ) # Now that potential entity types are unwrapped, we can look for its # element type. elt_type = collection_expr.type.element_type if with_entities: elt_type = elt_type.entity self.element_var.set_type(elt_type) user_element_var = construct(self.element_var) # List of element variables, and the associated initialization # expressions (when applicable). # # Start with the only element variable that exists at this point: the # one that the user code for each iteration uses directly. When # relevant, each step in the code below creates a new variable N and # initialize variable N-1 from it. element_vars: List[InitializedVar] = [InitializedVar(user_element_var)] # Node lists contain bare nodes: if the user code deals with entities, # create a variable to hold a bare node and initialize the user # variable using it. if with_entities: entity_var = element_vars[-1] node_var = AbstractVariable( names.Name('Bare') + self.element_var._name, type=elt_type.element_type ) entity_var.init_expr = make_as_entity( construct(node_var), entity_info=entity_info ) element_vars.append(InitializedVar(construct(node_var))) # Node lists contain root nodes: if the user code deals with non-root # nodes, create a variable to hold the root bare node and initialize # the non-root node using it. if ( collection_expr.type.is_list_type and not collection_expr.type.is_root_node ): typed_elt_var = element_vars[-1] untyped_elt_var = AbstractVariable( names.Name('Untyped') + self.element_var._name, type=get_context().root_grammar_class ) typed_elt_var.init_expr = UncheckedCastExpr( construct(untyped_elt_var), typed_elt_var.var.type ) element_vars.append(InitializedVar(construct(untyped_elt_var))) # Keep track of the ultimate "codegen" element variable. Unlike all # other iteration variable, it is the only one that will be defined by # the "for" loop in Ada (the other ones must be declared as regular # local variables). codegen_element_var = element_vars[-1].var # Create a scope to contain the code that runs during an iteration and # lower the iteration expression. with current_scope.new_child() as inner_scope: inner_expr = construct(self.expr) # Build the list of all iteration variables iter_vars = list(element_vars) index_var = None if self.index_var: index_var = construct(self.index_var) iter_vars.append(InitializedVar(index_var)) # Create local variables for all iteration variables that need it for v in iter_vars: if v.var != codegen_element_var: v.var.abstract_var.create_local_variable(inner_scope) return self.ConstructCommonResult( collection_expr, codegen_element_var, user_element_var, index_var, iter_vars, inner_expr, inner_scope, )
class AnalysisUnit(_BuiltinType): """ Type for analysis unit values. """ _name = names.Name('Analysis_Unit')
class Bool(_BuiltinType): """ Type for boolean values. """ _name = names.Name('Bool')
class LogicVar(_BuiltinType): """ Type for logic variables, to be used in equations (see Equation). """ _name = names.Name('Logic_Var')