예제 #1
0
파일: fluent.py 프로젝트: nijel/translate
    def to_entry(self):
        fp = FluentParser(False)

        # Handle standalone comments separately; they don't have any values or
        # attributes, just comment text.
        if self._type in [ast.ResourceComment, ast.GroupComment, ast.Comment]:
            return (self._type)(self.getnotes())

        assert self.source is not None
        value = fp.maybe_get_pattern(FluentParserStream(self.source))

        attributes = [
            ast.Attribute(
                ast.Identifier(id),
                fp.maybe_get_pattern(FluentParserStream(value)),
            )
            for (id, value) in self._attributes.items()
        ]

        comment = None
        if self.getnotes():
            comment = ast.Comment(self.getnotes())

        return (self._type if self._type is not None else ast.Message)(
            ast.Identifier(self.getid()),
            value=value,
            attributes=attributes,
            comment=comment,
        )
예제 #2
0
 def add_messages(self, source):
     parser = FluentParser()
     resource = parser.parse(source)
     # TODO - warn/error about duplicates
     for item in resource.body:
         if isinstance(item, (Message, Term)):
             full_id = ast_to_id(item)
             if full_id not in self._messages_and_terms:
                 self._messages_and_terms[full_id] = item
예제 #3
0
def diff_resources(left_path, right_path):
    parser = FluentParser(with_spans=False)
    serializer = FluentSerializer(with_junk=True)
    lines = []
    for p in (left_path, right_path):
        with codecs.open(p, encoding='utf-8') as fh:
            res = parser.parse(fh.read())
            lines.append(serializer.serialize(res).splitlines(True))
    sys.stdout.writelines(
        chunk for chunk in unified_diff(lines[0], lines[1], left_path, right_path)
    )
예제 #4
0
    def execute(self):
        for keys in self.group:
            full_message = FluentSerializedMessage.from_lokalise_keys(
                self.group[keys])
            parsed_message = FluentParser().parse(full_message)
            ru_full_path = self.group[keys][0].get_file_path().ru
            ru_file = FluentFile(ru_full_path)
            try:
                ru_file_parsed = ru_file.read_parsed_data()
            except:
                logging.error(f'Файла {ru_file.full_path} не существует')
                continue

            manager = LokaliseFluentAstComparerManager(
                sourse_parsed=ru_file_parsed, target_parsed=parsed_message)

            for_update = manager.for_update()
            for_create = manager.for_create()
            for_delete = manager.for_delete()

            if len(for_update):
                updated_ru_file_parsed = manager.update(for_update)
                updated_ru_file_serialized = FluentSerializer(
                    with_junk=True).serialize(updated_ru_file_parsed)
                ru_file.save_data(updated_ru_file_serialized)

                updated_keys = list(
                    map(lambda el: el.get_id_name(), for_update))
                logging.info(
                    f'Обновлены ключи: {updated_keys} в файле {ru_file.full_path}'
                )
예제 #5
0
    def to_serialized_message(string_message):
        if not string_message:
            return None

        ast_message = FluentParser().parse(string_message)
        serialized = FluentSerializer(with_junk=True).serialize(ast_message)

        return serialized if serialized else ''
def migrate_ftl_translations_to_0_6_4(apps, schema):
    """
    Converts all FTL translations to the latest (0.5) syntax and serializes
    them with the latest (0.6.4) serializer to prevent creating duplicate
    translations on save. Also convert corresponding TranslationMemoryEntries.

    See bugs 1441020 and 1442201 for more details.
    """
    parser = FluentParser()
    serializer = FluentSerializer()

    # Translations
    Translation = apps.get_model("base", "Translation")
    translations = Translation.objects.filter(entity__resource__format="ftl")
    translations_to_update = []

    for t in translations:
        current = t.string
        ast = parser.parse_entry(current)
        t.string = serializer.serialize_entry(ast)

        if t.string != current:
            translations_to_update.append(t)

    bulk_update(translations_to_update, update_fields=["string"])

    # Translation Memory Entries
    TranslationMemoryEntry = apps.get_model("base", "TranslationMemoryEntry")
    updated_pks = [x.pk for x in translations_to_update]
    tms = TranslationMemoryEntry.objects.filter(
        translation__pk__in=updated_pks)
    tms_to_update = []

    for tm in tms:
        current = tm.target
        ast = parser.parse_entry(current)
        tm.target = serializer.serialize_entry(ast)

        if tm.target != current:
            tms_to_update.append(tm)

    bulk_update(tms_to_update, update_fields=["target"])
예제 #7
0
def transforms_from(ftl, **substitutions):
    """Parse FTL code into a list of Message nodes with Transforms.

    The FTL may use a fabricated COPY function inside of placeables which
    will be converted into actual COPY migration transform.

        new-key = Hardcoded text { COPY("filepath.dtd", "string.key") }

    For convenience, COPY may also refer to transforms_from's keyword
    arguments via the MessageReference syntax:

        transforms_from(\"""
        new-key = Hardcoded text { COPY(file_dtd, "string.key") }
        \""", file_dtd="very/long/path/to/a/file.dtd")

    """

    parser = FluentParser(with_spans=False)
    resource = parser.parse(ftl)
    return IntoTranforms(substitutions).visit(resource).body
예제 #8
0
class FluentParser(Parser):
    capabilities = CAN_SKIP

    def __init__(self):
        super(FluentParser, self).__init__()
        self.ftl_parser = FTLParser()

    def walk(self, only_localizable=False):
        if not self.ctx:
            # loading file failed, or we just didn't load anything
            return

        resource = self.ftl_parser.parse(self.ctx.contents)

        last_span_end = 0

        for entry in resource.body:
            if not only_localizable:
                if entry.span.start > last_span_end:
                    yield Whitespace(
                        self.ctx, (last_span_end, entry.span.start))

            if isinstance(entry, ftl.Message):
                yield FluentMessage(self.ctx, entry)
            elif isinstance(entry, ftl.Term):
                yield FluentTerm(self.ctx, entry)
            elif isinstance(entry, ftl.Junk):
                start = entry.span.start
                end = entry.span.end
                # strip leading whitespace
                start += re.match('[ \t\r\n]*', entry.content).end()
                if not only_localizable and entry.span.start < start:
                    yield Whitespace(
                        self.ctx, (entry.span.start, start)
                    )
                # strip trailing whitespace
                ws, we = re.search('[ \t\r\n]*$', entry.content).span()
                end -= we - ws
                yield Junk(self.ctx, (start, end))
                if not only_localizable and end < entry.span.end:
                    yield Whitespace(
                        self.ctx, (end, entry.span.end)
                    )
            elif isinstance(entry, ftl.BaseComment) and not only_localizable:
                span = (entry.span.start, entry.span.end)
                yield FluentComment(self.ctx, span, entry)

            last_span_end = entry.span.end

        # Yield Whitespace at the EOF.
        if not only_localizable:
            eof_offset = len(self.ctx.contents)
            if eof_offset > last_span_end:
                yield Whitespace(self.ctx, (last_span_end, eof_offset))
예제 #9
0
파일: fluent.py 프로젝트: Floflis/gecko-b2g
class FluentParser(Parser):
    capabilities = CAN_SKIP

    def __init__(self):
        super(FluentParser, self).__init__()
        self.ftl_parser = FTLParser()

    def walk(self, only_localizable=False):
        if not self.ctx:
            # loading file failed, or we just didn't load anything
            return

        resource = self.ftl_parser.parse(self.ctx.contents)

        last_span_end = 0

        for entry in resource.body:
            if not only_localizable:
                if entry.span.start > last_span_end:
                    yield Whitespace(
                        self.ctx, (last_span_end, entry.span.start))

            if isinstance(entry, ftl.Message):
                yield FluentMessage(self.ctx, entry)
            elif isinstance(entry, ftl.Term):
                yield FluentTerm(self.ctx, entry)
            elif isinstance(entry, ftl.Junk):
                start = entry.span.start
                end = entry.span.end
                # strip leading whitespace
                start += re.match('[ \t\r\n]*', entry.content).end()
                if not only_localizable and entry.span.start < start:
                    yield Whitespace(
                        self.ctx, (entry.span.start, start)
                    )
                # strip trailing whitespace
                ws, we = re.search('[ \t\r\n]*$', entry.content).span()
                end -= we - ws
                yield Junk(self.ctx, (start, end))
                if not only_localizable and end < entry.span.end:
                    yield Whitespace(
                        self.ctx, (end, entry.span.end)
                    )
            elif isinstance(entry, ftl.BaseComment) and not only_localizable:
                span = (entry.span.start, entry.span.end)
                yield FluentComment(self.ctx, span, entry)

            last_span_end = entry.span.end

        # Yield Whitespace at the EOF.
        if not only_localizable:
            eof_offset = len(self.ctx.contents)
            if eof_offset > last_span_end:
                yield Whitespace(self.ctx, (last_span_end, eof_offset))
예제 #10
0
def migrate_ftl_translations_to_0_6_4(apps, schema):
    """
    Converts all FTL translations to the latest (0.5) syntax and serializes
    them with the latest (0.6.4) serializer to prevent creating duplicate
    translations on save. Also convert corresponding TranslationMemoryEntries.

    See bugs 1441020 and 1442201 for more details.
    """
    parser = FluentParser()
    serializer = FluentSerializer()

    # Translations
    Translation = apps.get_model('base', 'Translation')
    translations = Translation.objects.filter(entity__resource__format='ftl')
    translations_to_update = []

    for t in translations:
        current = t.string
        ast = parser.parse_entry(current)
        t.string = serializer.serialize_entry(ast)

        if t.string != current:
            translations_to_update.append(t)

    bulk_update(translations_to_update, update_fields=['string'])

    # Translation Memory Entries
    TranslationMemoryEntry = apps.get_model('base', 'TranslationMemoryEntry')
    updated_pks = [x.pk for x in translations_to_update]
    tms = TranslationMemoryEntry.objects.filter(translation__pk__in=updated_pks)
    tms_to_update = []

    for tm in tms:
        current = tm.target
        ast = parser.parse_entry(current)
        tm.target = serializer.serialize_entry(ast)

        if tm.target != current:
            tms_to_update.append(tm)

    bulk_update(tms_to_update, update_fields=['target'])
예제 #11
0
from allauth.utils import get_request_param
from bleach.linkifier import Linker
from django_jinja import library
from fluent.syntax import FluentParser, FluentSerializer, ast
from fluent.syntax.serializer import serialize_expression

from django import template
from django.conf import settings
from django.contrib.humanize.templatetags import humanize
from django.contrib.staticfiles.storage import staticfiles_storage
from django.core.serializers.json import DjangoJSONEncoder
from django.urls import reverse
from django.utils.http import url_has_allowed_host_and_scheme

register = template.Library()
parser = FluentParser()
serializer = FluentSerializer()


@library.global_function
def url(viewname, *args, **kwargs):
    """Helper for Django's ``reverse`` in templates."""
    return reverse(viewname, args=args, kwargs=kwargs)


@library.global_function
def return_url(request):
    """Get an url of the previous page."""
    url = request.POST.get("return_url", request.META.get("HTTP_REFERER", "/"))
    if not url_has_allowed_host_and_scheme(url, settings.ALLOWED_HOSTS):
        return settings.SITE_URL
예제 #12
0
 def __init__(self):
     super(FluentParser, self).__init__()
     self.ftl_parser = FTLParser()
예제 #13
0
def remove_comments_from_ftl_translations(apps, schema):
    """
    Remove comments from the string column of FTL Translations and Entities,
    and source & target columns of FTL TranslationMemoryEntries.

    See bug 1501168 for more details.
    """
    parser = FluentParser()
    serializer = FluentSerializer()

    # Translations
    Translation = apps.get_model("base", "Translation")
    translations = Translation.objects.filter(entity__resource__format="ftl")
    translations_to_update = []

    for t in translations:
        current = t.string
        ast = parser.parse_entry(current)
        ast.comment = None
        t.string = serializer.serialize_entry(ast)

        if t.string != current:
            translations_to_update.append(t)

    bulk_update(translations_to_update, update_fields=["string"])

    # Entities
    Entity = apps.get_model("base", "Entity")
    entities = Entity.objects.filter(resource__format="ftl")
    entities_to_update = []

    for e in entities:
        current = e.string
        ast = parser.parse_entry(current)
        ast.comment = None
        e.string = serializer.serialize_entry(ast)

        if e.string != current:
            entities_to_update.append(e)

    bulk_update(entities_to_update, update_fields=["string"])

    # Translation Memory Entries
    TranslationMemoryEntry = apps.get_model("base", "TranslationMemoryEntry")
    tmes = TranslationMemoryEntry.objects.filter(entity__resource__format="ftl")
    tmes_to_update = []

    for tme in tmes:
        current_source = tme.source
        ast = parser.parse_entry(current_source)
        ast.comment = None
        tme.source = serializer.serialize_entry(ast)

        current_target = tme.target
        ast = parser.parse_entry(current_target)
        ast.comment = None
        tme.target = serializer.serialize_entry(ast)

        if tme.source != current_source or tme.target != current_target:
            tmes_to_update.append(tme)

    bulk_update(tmes_to_update, update_fields=["source", "target"])
예제 #14
0
 def pretty_ftl(text):
     parser = FluentParser()
     serializer = FluentSerializer(with_junk=False)
     res = parser.parse(dedent_ftl(text))
     return serializer.serialize(res)
예제 #15
0
 def pretty_variant_key(text, index):
     parser = FluentParser()
     entry = parser.parse_entry(dedent_ftl(text))
     variants = entry.value.elements[0].expression.variants
     return serialize_variant_key(variants[index].key)
예제 #16
0
 def pretty_expr(text):
     parser = FluentParser()
     entry = parser.parse_entry(dedent_ftl(text))
     expr = entry.value.elements[0].expression
     return serialize_expression(expr)
예제 #17
0
def FluentResource(source):
    parser = FluentParser()
    return parser.parse(source)
예제 #18
0
 def __init__(self):
     super(FluentParser, self).__init__()
     self.ftl_parser = FTLParser()
예제 #19
0
def transforms_from(ftl, **substitutions):
    """Parse FTL code into a list of Message nodes with Transforms.

    The FTL may use a fabricated COPY function inside of placeables which
    will be converted into actual COPY migration transform.

        new-key = Hardcoded text { COPY("filepath.dtd", "string.key") }

    For convenience, COPY may also refer to transforms_from's keyword
    arguments via the MessageReference syntax:

        transforms_from(\"""
        new-key = Hardcoded text { COPY(file_dtd, "string.key") }
        \""", file_dtd="very/long/path/to/a/file.dtd")

    """

    IMPLICIT_TRANSFORMS = ("CONCAT", )
    FORBIDDEN_TRANSFORMS = ("PLURALS", "REPLACE", "REPLACE_IN_TEXT")

    def into_argument(node):
        """Convert AST node into an argument to migration transforms."""
        if isinstance(node, FTL.StringLiteral):
            return node.value
        if isinstance(node, FTL.MessageReference):
            try:
                return substitutions[node.id.name]
            except KeyError:
                raise InvalidTransformError(
                    "Unknown substitution in COPY: {}".format(node.id.name))
        else:
            raise InvalidTransformError(
                "Invalid argument passed to COPY: {}".format(
                    type(node).__name__))

    def into_transforms(node):
        """Convert AST node into a migration transform."""

        if isinstance(node, FTL.Junk):
            anno = node.annotations[0]
            raise InvalidTransformError(
                "Transform contains parse error: {}, at {}".format(
                    anno.message, anno.span.start))
        if isinstance(node, FTL.CallExpression):
            name = node.callee.name
            if name == "COPY":
                args = (into_argument(arg) for arg in node.positional)
                return COPY(*args)
            if name in IMPLICIT_TRANSFORMS:
                raise NotSupportedError(
                    "{} may not be used with transforms_from(). It runs "
                    "implicitly on all Patterns anyways.".format(name))
            if name in FORBIDDEN_TRANSFORMS:
                raise NotSupportedError(
                    "{} may not be used with transforms_from(). It requires "
                    "additional logic in Python code.".format(name))
        if (isinstance(node, FTL.Placeable)
                and isinstance(node.expression, Transform)):
            # Replace the placeable with the transform it's holding.
            # Transforms evaluate to Patterns which aren't valid Placeable
            # expressions.
            return node.expression
        if isinstance(node, FTL.Pattern):
            # Replace the Pattern with CONCAT which is more accepting of its
            # elements. CONCAT takes PatternElements, Expressions and other
            # Patterns (e.g. returned from evaluating transforms).
            return CONCAT(*node.elements)
        return node

    parser = FluentParser(with_spans=False)
    resource = parser.parse(ftl)
    return resource.traverse(into_transforms).body
예제 #20
0
    def format_serialized_file_data(cls, file_data: typing.AnyStr):
        parsed_data = FluentParser().parse(file_data)

        return FluentSerializer(with_junk=True).serialize(parsed_data)
예제 #21
0
    def parse_data(self, file_data: typing.AnyStr):
        from fluent.syntax import FluentParser

        return FluentParser().parse(file_data)