def add_messages(self, source): parser = FluentParser() resource = parser.parse(source) # TODO - warn/error about duplicates for item in resource.body: if isinstance(item, (Message, Term)): full_id = ast_to_id(item) if full_id not in self._messages_and_terms: self._messages_and_terms[full_id] = item
class FluentParser(Parser): capabilities = CAN_SKIP def __init__(self): super(FluentParser, self).__init__() self.ftl_parser = FTLParser() def walk(self, only_localizable=False): if not self.ctx: # loading file failed, or we just didn't load anything return resource = self.ftl_parser.parse(self.ctx.contents) last_span_end = 0 for entry in resource.body: if not only_localizable: if entry.span.start > last_span_end: yield Whitespace( self.ctx, (last_span_end, entry.span.start)) if isinstance(entry, ftl.Message): yield FluentMessage(self.ctx, entry) elif isinstance(entry, ftl.Term): yield FluentTerm(self.ctx, entry) elif isinstance(entry, ftl.Junk): start = entry.span.start end = entry.span.end # strip leading whitespace start += re.match('[ \t\r\n]*', entry.content).end() if not only_localizable and entry.span.start < start: yield Whitespace( self.ctx, (entry.span.start, start) ) # strip trailing whitespace ws, we = re.search('[ \t\r\n]*$', entry.content).span() end -= we - ws yield Junk(self.ctx, (start, end)) if not only_localizable and end < entry.span.end: yield Whitespace( self.ctx, (end, entry.span.end) ) elif isinstance(entry, ftl.BaseComment) and not only_localizable: span = (entry.span.start, entry.span.end) yield FluentComment(self.ctx, span, entry) last_span_end = entry.span.end # Yield Whitespace at the EOF. if not only_localizable: eof_offset = len(self.ctx.contents) if eof_offset > last_span_end: yield Whitespace(self.ctx, (last_span_end, eof_offset))
def diff_resources(left_path, right_path): parser = FluentParser(with_spans=False) serializer = FluentSerializer(with_junk=True) lines = [] for p in (left_path, right_path): with codecs.open(p, encoding='utf-8') as fh: res = parser.parse(fh.read()) lines.append(serializer.serialize(res).splitlines(True)) sys.stdout.writelines( chunk for chunk in unified_diff(lines[0], lines[1], left_path, right_path) )
def transforms_from(ftl, **substitutions): """Parse FTL code into a list of Message nodes with Transforms. The FTL may use a fabricated COPY function inside of placeables which will be converted into actual COPY migration transform. new-key = Hardcoded text { COPY("filepath.dtd", "string.key") } For convenience, COPY may also refer to transforms_from's keyword arguments via the MessageReference syntax: transforms_from(\""" new-key = Hardcoded text { COPY(file_dtd, "string.key") } \""", file_dtd="very/long/path/to/a/file.dtd") """ parser = FluentParser(with_spans=False) resource = parser.parse(ftl) return IntoTranforms(substitutions).visit(resource).body
def pretty_ftl(text): parser = FluentParser() serializer = FluentSerializer(with_junk=False) res = parser.parse(dedent_ftl(text)) return serializer.serialize(res)
def transforms_from(ftl, **substitutions): """Parse FTL code into a list of Message nodes with Transforms. The FTL may use a fabricated COPY function inside of placeables which will be converted into actual COPY migration transform. new-key = Hardcoded text { COPY("filepath.dtd", "string.key") } For convenience, COPY may also refer to transforms_from's keyword arguments via the MessageReference syntax: transforms_from(\""" new-key = Hardcoded text { COPY(file_dtd, "string.key") } \""", file_dtd="very/long/path/to/a/file.dtd") """ IMPLICIT_TRANSFORMS = ("CONCAT", ) FORBIDDEN_TRANSFORMS = ("PLURALS", "REPLACE", "REPLACE_IN_TEXT") def into_argument(node): """Convert AST node into an argument to migration transforms.""" if isinstance(node, FTL.StringLiteral): return node.value if isinstance(node, FTL.MessageReference): try: return substitutions[node.id.name] except KeyError: raise InvalidTransformError( "Unknown substitution in COPY: {}".format(node.id.name)) else: raise InvalidTransformError( "Invalid argument passed to COPY: {}".format( type(node).__name__)) def into_transforms(node): """Convert AST node into a migration transform.""" if isinstance(node, FTL.Junk): anno = node.annotations[0] raise InvalidTransformError( "Transform contains parse error: {}, at {}".format( anno.message, anno.span.start)) if isinstance(node, FTL.CallExpression): name = node.callee.name if name == "COPY": args = (into_argument(arg) for arg in node.positional) return COPY(*args) if name in IMPLICIT_TRANSFORMS: raise NotSupportedError( "{} may not be used with transforms_from(). It runs " "implicitly on all Patterns anyways.".format(name)) if name in FORBIDDEN_TRANSFORMS: raise NotSupportedError( "{} may not be used with transforms_from(). It requires " "additional logic in Python code.".format(name)) if (isinstance(node, FTL.Placeable) and isinstance(node.expression, Transform)): # Replace the placeable with the transform it's holding. # Transforms evaluate to Patterns which aren't valid Placeable # expressions. return node.expression if isinstance(node, FTL.Pattern): # Replace the Pattern with CONCAT which is more accepting of its # elements. CONCAT takes PatternElements, Expressions and other # Patterns (e.g. returned from evaluating transforms). return CONCAT(*node.elements) return node parser = FluentParser(with_spans=False) resource = parser.parse(ftl) return resource.traverse(into_transforms).body
def FluentResource(source): parser = FluentParser() return parser.parse(source)