def get_string_ast_node(string_ast: LocatableString, mls: bool) -> Union[Literal, StringFormat]: matches: List[re.Match[str]] = list( format_regex_compiled.finditer(str(string_ast))) if len(matches) == 0: return Literal(str(string_ast)) start_lnr = string_ast.location.lnr start_char_pos = string_ast.location.start_char whole_string = str(string_ast) mls_offset: int = 3 if mls else 1 # len(""") or len(') or len(") def char_count_to_lnr_char(position: int) -> Tuple[int, int]: # convert in-string position to lnr/charcount before = whole_string[0:position] lines = before.count("\n") if lines == 0: return start_lnr, start_char_pos + position + mls_offset else: return start_lnr + lines, position - before.rindex("\n") locatable_matches: List[Tuple[str, LocatableString]] = [] for match in matches: start_line, start_char = char_count_to_lnr_char(match.start(2)) end_line, end_char = char_count_to_lnr_char(match.end(2)) range: Range = Range(string_ast.location.file, start_line, start_char, end_line, end_char) locatable_string = LocatableString(match[2], range, string_ast.lexpos, string_ast.namespace) locatable_matches.append((match[1], locatable_string)) return create_string_format(string_ast, locatable_matches)
def p_error(p: YaccProduction) -> None: pos = lexer.lexpos - lexer.linestart + 1 r = Range(file, lexer.lineno, pos, lexer.lineno, pos) if p is None: # at end of file raise ParserException(r, None, "Unexpected end of file") # keyword instead of ID if p.type in reserved.values(): if hasattr(p.value, "location"): r = p.value.location raise ParserException( r, str(p.value), "invalid identifier, %s is a reserved keyword" % p.value) if parser.symstack[-1].type in reserved.values(): if hasattr(parser.symstack[-1].value, "location"): r = parser.symstack[-1].value.location raise ParserException( r, str(parser.symstack[-1].value), "invalid identifier, %s is a reserved keyword" % parser.symstack[-1].value) raise ParserException(r, p.value)
def __init__( self, namespace: Namespace, lname: LocatableString, comment: Optional[LocatableString], parents: List[LocatableString], attributes: List[DefineAttribute], ) -> None: name = str(lname) TypeDefinitionStatement.__init__(self, namespace, name) if "-" in name: inmanta_warnings.warn(HyphenDeprecationWarning(lname)) self.anchors = [TypeReferenceAnchor(namespace, x) for x in parents] self.name = name self.attributes = attributes if comment is not None: self.comment = str(comment) else: self.comment = None self.parents = parents if len(self.parents) == 0 and not (self.name == "Entity" and self.namespace.name == "std"): dummy_location: Range = Range("__internal__", 1, 1, 1, 1) self.parents.append(LocatableString("std::Entity", dummy_location, -1, namespace)) self.type = Entity(self.name, namespace, self.comment) self.type.location = lname.location
def t_ANY_error(t: lex.LexToken) -> lex.LexToken: # noqa: N802 lexer = t.lexer end = lexer.lexpos - lexer.linestart + 1 char: str = t.value[0] r: Range = Range(lexer.inmfile, lexer.lineno, end, lexer.lineno, end + 1) raise ParserException(r, char, "Illegal character '%s'" % char)
def create_string_format( format_string: LocatableString, variables: List[Tuple[str, LocatableString]]) -> StringFormat: """ Create a string interpolation statement. This function assumes that the variables of a match are on the same line. :param format_string: the LocatableString as it was received by get_string_ast_node() :param variables: A list of tuples where each tuple is a combination of a string and LocatableString The string is the match containing the {{}} (ex: {{a.b}}) and the LocatableString is composed of just the variables and the range for those variables. (ex. LocatableString("a.b", range(a.b), lexpos, namespace)) """ assert namespace _vars = [] for match, var in variables: var_name: str = str(var) var_parts: List[str] = var_name.split(".") start_char = var.location.start_char end_char = start_char + len(var_parts[0]) range: Range = Range(var.location.file, var.location.lnr, start_char, var.location.lnr, end_char) ref_locatable_string = LocatableString(var_parts[0], range, var.lexpos, var.namespace) ref = Reference(ref_locatable_string) ref.namespace = namespace if len(var_parts) > 1: attribute_offsets: Iterator[int] = accumulate( var_parts[1:], lambda acc, part: acc + len(part) + 1, initial=end_char + 1) for attr, char_offset in zip(var_parts[1:], attribute_offsets): range_attr: Range = Range(var.location.file, var.location.lnr, char_offset, var.location.lnr, char_offset + len(attr)) attr_locatable_string: LocatableString = LocatableString( attr, range_attr, var.lexpos, var.namespace) ref = AttributeReference(ref, attr_locatable_string) ref.location = range_attr ref.namespace = namespace _vars.append((ref, match)) else: _vars.append((ref, match)) return StringFormat(str(format_string), _vars)
def t_STRING(t: lex.LexToken) -> lex.LexToken: # noqa: N802 r"(\"([^\\\"]|\\.)*\")|(\'([^\\\']|\\.)*\')" t.value = bytes(t.value[1:-1], "utf-8").decode("unicode_escape") lexer = t.lexer end = lexer.lexpos - lexer.linestart + 1 (s, e) = lexer.lexmatch.span() start = end - (e - s) t.value = LocatableString( t.value, Range(lexer.inmfile, lexer.lineno, start, lexer.lineno, end), lexer.lexpos, lexer.namespace) return t
def t_RSTRING(t: lex.LexToken) -> lex.LexToken: # noqa: N802 r"r(\"([^\\\"\n]|\\.)*\")|r(\'([^\\\'\n]|\\.)*\')" t.value = t.value[2:-1] lexer = t.lexer end = lexer.lexpos - lexer.linestart + 1 (s, e) = lexer.lexmatch.span() start = end - (e - s) t.value = LocatableString( t.value, Range(lexer.inmfile, lexer.lineno, start, lexer.lineno, end), lexer.lexpos, lexer.namespace) return t
def t_ID(t: lex.LexToken) -> lex.LexToken: # noqa: N802 r"[a-zA-Z_][a-zA-Z_0-9-]*" t.type = reserved.get(t.value, "ID") # Check for reserved words if t.value[0].isupper(): t.type = "CID" lexer = t.lexer end = lexer.lexpos - lexer.linestart + 1 (s, e) = lexer.lexmatch.span() start = end - (e - s) t.value = LocatableString( t.value, Range(lexer.inmfile, lexer.lineno, start, lexer.lineno, end), lexer.lexpos, lexer.namespace) return t
def to_type(self, arg_type: Optional[object], resolver: Namespace) -> Optional[inmanta_type.Type]: """ Convert a string representation of a type to a type """ if arg_type is None: return None if not isinstance(arg_type, str): raise CompilerException( "bad annotation in plugin %s::%s, expected str but got %s (%s)" % (self.ns, self.__class__.__function_name__, type(arg_type), arg_type)) if arg_type == "any": return None if arg_type == "expression": return None # quickfix issue #1774 allowed_element_type: inmanta_type.Type = inmanta_type.Type() if arg_type == "list": return inmanta_type.TypedList(allowed_element_type) if arg_type == "dict": return inmanta_type.TypedDict(allowed_element_type) plugin_line: Range = Range(self.location.file, self.location.lnr, 1, self.location.lnr + 1, 1) locatable_type: LocatableString = LocatableString( arg_type, plugin_line, 0, None) # stack of transformations to be applied to the base inmanta_type.Type # transformations will be applied right to left transformation_stack: List[Callable[[inmanta_type.Type], inmanta_type.Type]] = [] if locatable_type.value.endswith("?"): locatable_type.value = locatable_type.value[0:-1] transformation_stack.append(inmanta_type.NullableType) if locatable_type.value.endswith("[]"): locatable_type.value = locatable_type.value[0:-2] transformation_stack.append(inmanta_type.TypedList) return reduce(lambda acc, transform: transform(acc), reversed(transformation_stack), resolver.get_type(locatable_type))
def t_begin_mls(t: lex.LexToken) -> lex.LexToken: r'["]{3}' t.lexer.begin("mls") t.type = "MLS" lexer = t.lexer end = lexer.lexpos - lexer.linestart + 1 (s, e) = lexer.lexmatch.span() start = end - (e - s) t.value = LocatableString( "", Range(lexer.inmfile, lexer.lineno, start, lexer.lineno, end), lexer.lexpos, lexer.namespace) return t
def __init__(self, instance: Reference, attribute: LocatableString) -> None: range: Range = Range( instance.locatable_name.location.file, instance.locatable_name.lnr, instance.locatable_name.start, attribute.elnr, attribute.end, ) reference: LocatableString = LocatableString( "%s.%s" % (instance.full_name, attribute), range, instance.locatable_name.lexpos, instance.namespace ) Reference.__init__(self, reference) self.attribute = attribute # a reference to the instance self.instance = instance
def t_REGEX(t: lex.LexToken) -> lex.LexToken: # noqa: N802 r"/([^/\\]|\\.)+/" value = Reference("self") # anonymous value try: expr = Regex(value, t.value[1:-1]) t.value = expr return t except RegexError as error: end = t.lexer.lexpos - t.lexer.linestart + 1 (s, e) = t.lexer.lexmatch.span() start = end - (e - s) r: Range = Range(t.lexer.inmfile, t.lexer.lineno, start, t.lexer.lineno, end) raise ParserException(r, t.value, "Regex error in %s: '%s'" % (t.value, error))
def t_mls_end(t: lex.LexToken) -> lex.LexToken: r'.*["]{3}' t.lexer.begin("INITIAL") t.type = "MLS_END" value = t.value[:-3] lexer = t.lexer end = lexer.lexpos - lexer.linestart + 1 (s, e) = lexer.lexmatch.span() start = end - (e - s) t.value = LocatableString( value, Range(lexer.inmfile, lexer.lineno, start, lexer.lineno, end), lexer.lexpos, lexer.namespace) return t
def t_MLS(t: lex.LexToken) -> lex.LexToken: r'"{3,5}([\s\S]*?)"{3,5}' value = t.value[3:-3] lexer = t.lexer match = lexer.lexmatch[0] lines = match.split("\n") start_line = lexer.lineno end_line = lexer.lineno + len(lines) - 1 t.lexer.lineno = end_line (s, e) = lexer.lexmatch.span() start = lexer.lexpos - lexer.linestart - (e - s) + 1 end = len(lines[-1]) + 1 t.value = LocatableString( value, Range(lexer.inmfile, start_line, start, end_line, end), lexer.lexpos, lexer.namespace) return t
def merge_lnr_to_string(p: YaccProduction, starttoken: int = 1, endtoken: int = 2) -> None: v = p[0] et = p[endtoken] endline = et.elnr endchar = et.end st = p[starttoken] if isinstance(st, LocatableString): startline = st.lnr startchar = st.start else: startline = et.lnr startchar = et.start p[0] = LocatableString(v, Range(file, startline, startchar, endline, endchar), endchar, namespace)
def compile(self) -> Tuple[List["Statement"], List["BasicBlock"]]: """ This method will parse and prepare everything to start evaluation the configuration specification. This method will: - load all modules using Project.get().get_complete_ast() - add all plugins - create std::Entity """ project = module.Project.get() self.__root_ns = project.get_root_namespace() project.load() statements, blocks = project.get_complete_ast() # load plugins for name, cls in PluginMeta.get_functions().items(): mod_ns = cls.__module__.split(".") if mod_ns[0] != const.PLUGINS_PACKAGE: raise Exception( "All plugin modules should be loaded in the %s package not in %s" % (const.PLUGINS_PACKAGE, cls.__module__) ) mod_ns = mod_ns[1:] ns: Optional[Namespace] = self.__root_ns for part in mod_ns: if ns is None: break ns = ns.get_child(part) if ns is None: raise Exception("Unable to find namespace for plugin module %s" % (cls.__module__)) name = name.split("::")[-1] statement = PluginStatement(ns, name, cls) statements.append(statement) # add the entity type (hack?) ns = self.__root_ns.get_child_or_create("std") nullrange = Range("internal", 1, 0, 0, 0) entity = DefineEntity( ns, LocatableString("Entity", nullrange, 0, ns), LocatableString("The entity all other entities inherit from.", nullrange, 0, ns), [], [], ) str_std_entity = LocatableString("std::Entity", nullrange, 0, ns) requires_rel = DefineRelation( (str_std_entity, LocatableString("requires", nullrange, 0, ns), (0, None)), (str_std_entity, LocatableString("provides", nullrange, 0, ns), (0, None)), ) requires_rel.namespace = self.__root_ns.get_ns_from_string("std") statements.append(entity) statements.append(requires_rel) return (statements, blocks)
def expand_range(start: Range, end: Range) -> Range: """ Returns a new range from the start of `start` to the end of `end`. Assumes both ranges are on the same file. """ return Range(start.file, start.lnr, start.start_char, end.end_lnr, end.end_char)
def test_slots_ast(): assert_slotted(Location("", 0)) assert_slotted(Range("", 0, 0, 0, 0))