def execute(self): for keys in self.group: full_message = FluentSerializedMessage.from_lokalise_keys( self.group[keys]) parsed_message = FluentParser().parse(full_message) ru_full_path = self.group[keys][0].get_file_path().ru ru_file = FluentFile(ru_full_path) try: ru_file_parsed = ru_file.read_parsed_data() except: logging.error(f'Файла {ru_file.full_path} не существует') continue manager = LokaliseFluentAstComparerManager( sourse_parsed=ru_file_parsed, target_parsed=parsed_message) for_update = manager.for_update() for_create = manager.for_create() for_delete = manager.for_delete() if len(for_update): updated_ru_file_parsed = manager.update(for_update) updated_ru_file_serialized = FluentSerializer( with_junk=True).serialize(updated_ru_file_parsed) ru_file.save_data(updated_ru_file_serialized) updated_keys = list( map(lambda el: el.get_id_name(), for_update)) logging.info( f'Обновлены ключи: {updated_keys} в файле {ru_file.full_path}' )
def to_entry(self): fp = FluentParser(False) # Handle standalone comments separately; they don't have any values or # attributes, just comment text. if self._type in [ast.ResourceComment, ast.GroupComment, ast.Comment]: return (self._type)(self.getnotes()) assert self.source is not None value = fp.maybe_get_pattern(FluentParserStream(self.source)) attributes = [ ast.Attribute( ast.Identifier(id), fp.maybe_get_pattern(FluentParserStream(value)), ) for (id, value) in self._attributes.items() ] comment = None if self.getnotes(): comment = ast.Comment(self.getnotes()) return (self._type if self._type is not None else ast.Message)( ast.Identifier(self.getid()), value=value, attributes=attributes, comment=comment, )
def to_serialized_message(string_message): if not string_message: return None ast_message = FluentParser().parse(string_message) serialized = FluentSerializer(with_junk=True).serialize(ast_message) return serialized if serialized else ''
def add_messages(self, source): parser = FluentParser() resource = parser.parse(source) # TODO - warn/error about duplicates for item in resource.body: if isinstance(item, (Message, Term)): full_id = ast_to_id(item) if full_id not in self._messages_and_terms: self._messages_and_terms[full_id] = item
def diff_resources(left_path, right_path): parser = FluentParser(with_spans=False) serializer = FluentSerializer(with_junk=True) lines = [] for p in (left_path, right_path): with codecs.open(p, encoding='utf-8') as fh: res = parser.parse(fh.read()) lines.append(serializer.serialize(res).splitlines(True)) sys.stdout.writelines( chunk for chunk in unified_diff(lines[0], lines[1], left_path, right_path) )
def migrate_ftl_translations_to_0_6_4(apps, schema): """ Converts all FTL translations to the latest (0.5) syntax and serializes them with the latest (0.6.4) serializer to prevent creating duplicate translations on save. Also convert corresponding TranslationMemoryEntries. See bugs 1441020 and 1442201 for more details. """ parser = FluentParser() serializer = FluentSerializer() # Translations Translation = apps.get_model("base", "Translation") translations = Translation.objects.filter(entity__resource__format="ftl") translations_to_update = [] for t in translations: current = t.string ast = parser.parse_entry(current) t.string = serializer.serialize_entry(ast) if t.string != current: translations_to_update.append(t) bulk_update(translations_to_update, update_fields=["string"]) # Translation Memory Entries TranslationMemoryEntry = apps.get_model("base", "TranslationMemoryEntry") updated_pks = [x.pk for x in translations_to_update] tms = TranslationMemoryEntry.objects.filter( translation__pk__in=updated_pks) tms_to_update = [] for tm in tms: current = tm.target ast = parser.parse_entry(current) tm.target = serializer.serialize_entry(ast) if tm.target != current: tms_to_update.append(tm) bulk_update(tms_to_update, update_fields=["target"])
def transforms_from(ftl, **substitutions): """Parse FTL code into a list of Message nodes with Transforms. The FTL may use a fabricated COPY function inside of placeables which will be converted into actual COPY migration transform. new-key = Hardcoded text { COPY("filepath.dtd", "string.key") } For convenience, COPY may also refer to transforms_from's keyword arguments via the MessageReference syntax: transforms_from(\""" new-key = Hardcoded text { COPY(file_dtd, "string.key") } \""", file_dtd="very/long/path/to/a/file.dtd") """ parser = FluentParser(with_spans=False) resource = parser.parse(ftl) return IntoTranforms(substitutions).visit(resource).body
from allauth.utils import get_request_param from bleach.linkifier import Linker from django_jinja import library from fluent.syntax import FluentParser, FluentSerializer, ast from fluent.syntax.serializer import serialize_expression from django import template from django.conf import settings from django.contrib.humanize.templatetags import humanize from django.contrib.staticfiles.storage import staticfiles_storage from django.core.serializers.json import DjangoJSONEncoder from django.urls import reverse from django.utils.http import url_has_allowed_host_and_scheme register = template.Library() parser = FluentParser() serializer = FluentSerializer() @library.global_function def url(viewname, *args, **kwargs): """Helper for Django's ``reverse`` in templates.""" return reverse(viewname, args=args, kwargs=kwargs) @library.global_function def return_url(request): """Get an url of the previous page.""" url = request.POST.get("return_url", request.META.get("HTTP_REFERER", "/")) if not url_has_allowed_host_and_scheme(url, settings.ALLOWED_HOSTS): return settings.SITE_URL
def remove_comments_from_ftl_translations(apps, schema): """ Remove comments from the string column of FTL Translations and Entities, and source & target columns of FTL TranslationMemoryEntries. See bug 1501168 for more details. """ parser = FluentParser() serializer = FluentSerializer() # Translations Translation = apps.get_model("base", "Translation") translations = Translation.objects.filter(entity__resource__format="ftl") translations_to_update = [] for t in translations: current = t.string ast = parser.parse_entry(current) ast.comment = None t.string = serializer.serialize_entry(ast) if t.string != current: translations_to_update.append(t) bulk_update(translations_to_update, update_fields=["string"]) # Entities Entity = apps.get_model("base", "Entity") entities = Entity.objects.filter(resource__format="ftl") entities_to_update = [] for e in entities: current = e.string ast = parser.parse_entry(current) ast.comment = None e.string = serializer.serialize_entry(ast) if e.string != current: entities_to_update.append(e) bulk_update(entities_to_update, update_fields=["string"]) # Translation Memory Entries TranslationMemoryEntry = apps.get_model("base", "TranslationMemoryEntry") tmes = TranslationMemoryEntry.objects.filter(entity__resource__format="ftl") tmes_to_update = [] for tme in tmes: current_source = tme.source ast = parser.parse_entry(current_source) ast.comment = None tme.source = serializer.serialize_entry(ast) current_target = tme.target ast = parser.parse_entry(current_target) ast.comment = None tme.target = serializer.serialize_entry(ast) if tme.source != current_source or tme.target != current_target: tmes_to_update.append(tme) bulk_update(tmes_to_update, update_fields=["source", "target"])
def pretty_ftl(text): parser = FluentParser() serializer = FluentSerializer(with_junk=False) res = parser.parse(dedent_ftl(text)) return serializer.serialize(res)
def pretty_variant_key(text, index): parser = FluentParser() entry = parser.parse_entry(dedent_ftl(text)) variants = entry.value.elements[0].expression.variants return serialize_variant_key(variants[index].key)
def pretty_expr(text): parser = FluentParser() entry = parser.parse_entry(dedent_ftl(text)) expr = entry.value.elements[0].expression return serialize_expression(expr)
def parse_data(self, file_data: typing.AnyStr): from fluent.syntax import FluentParser return FluentParser().parse(file_data)
def format_serialized_file_data(cls, file_data: typing.AnyStr): parsed_data = FluentParser().parse(file_data) return FluentSerializer(with_junk=True).serialize(parsed_data)
def transforms_from(ftl, **substitutions): """Parse FTL code into a list of Message nodes with Transforms. The FTL may use a fabricated COPY function inside of placeables which will be converted into actual COPY migration transform. new-key = Hardcoded text { COPY("filepath.dtd", "string.key") } For convenience, COPY may also refer to transforms_from's keyword arguments via the MessageReference syntax: transforms_from(\""" new-key = Hardcoded text { COPY(file_dtd, "string.key") } \""", file_dtd="very/long/path/to/a/file.dtd") """ IMPLICIT_TRANSFORMS = ("CONCAT", ) FORBIDDEN_TRANSFORMS = ("PLURALS", "REPLACE", "REPLACE_IN_TEXT") def into_argument(node): """Convert AST node into an argument to migration transforms.""" if isinstance(node, FTL.StringLiteral): return node.value if isinstance(node, FTL.MessageReference): try: return substitutions[node.id.name] except KeyError: raise InvalidTransformError( "Unknown substitution in COPY: {}".format(node.id.name)) else: raise InvalidTransformError( "Invalid argument passed to COPY: {}".format( type(node).__name__)) def into_transforms(node): """Convert AST node into a migration transform.""" if isinstance(node, FTL.Junk): anno = node.annotations[0] raise InvalidTransformError( "Transform contains parse error: {}, at {}".format( anno.message, anno.span.start)) if isinstance(node, FTL.CallExpression): name = node.callee.name if name == "COPY": args = (into_argument(arg) for arg in node.positional) return COPY(*args) if name in IMPLICIT_TRANSFORMS: raise NotSupportedError( "{} may not be used with transforms_from(). It runs " "implicitly on all Patterns anyways.".format(name)) if name in FORBIDDEN_TRANSFORMS: raise NotSupportedError( "{} may not be used with transforms_from(). It requires " "additional logic in Python code.".format(name)) if (isinstance(node, FTL.Placeable) and isinstance(node.expression, Transform)): # Replace the placeable with the transform it's holding. # Transforms evaluate to Patterns which aren't valid Placeable # expressions. return node.expression if isinstance(node, FTL.Pattern): # Replace the Pattern with CONCAT which is more accepting of its # elements. CONCAT takes PatternElements, Expressions and other # Patterns (e.g. returned from evaluating transforms). return CONCAT(*node.elements) return node parser = FluentParser(with_spans=False) resource = parser.parse(ftl) return resource.traverse(into_transforms).body
def FluentResource(source): parser = FluentParser() return parser.parse(source)