示例#1
0
    def __init__(self, lang, reference_dir, localization_dir):
        self.fluent_parser = FluentParser(with_spans=False)
        self.fluent_serializer = FluentSerializer()

        # An iterable of plural category names relevant to the context's
        # language.  E.g. ('one', 'other') for English.
        try:
            self.plural_categories = CATEGORIES_BY_LOCALE[lang]
        except KeyError as locale_key:
            logger = logging.getLogger('migrate')
            logger.warning('Plural rule for "{}" is not defined in '
                           'compare-locales'.format(locale_key))
            self.plural_categories = ('one', 'other')

        # Paths to directories with input data, relative to CWD.
        self.reference_dir = reference_dir
        self.localization_dir = localization_dir

        # Parsed input resources stored by resource path.
        self.reference_resources = {}
        self.localization_resources = {}
        self.target_resources = {}

        # An iterable of `FTL.Message` objects some of whose nodes can be the
        # transform operations.
        self.transforms = {}

        # A dict whose keys are `(path, key)` tuples corresponding to target
        # FTL translations, and values are sets of `(path, key)` tuples
        # corresponding to localized entities which will be migrated.
        self.dependencies = {}
示例#2
0
    def __init__(
        self, lang, reference_dir, localization_dir, enforce_translated=False
    ):
        self.fluent_parser = FluentParser(with_spans=False)
        self.fluent_serializer = FluentSerializer()

        # An iterable of plural category names relevant to the context's
        # language.  E.g. ('one', 'other') for English.
        self.plural_categories = get_plural(lang)
        if self.plural_categories is None:
            logger = logging.getLogger('migrate')
            logger.warning(
                'Plural rule for "{}" is not defined in '
                'compare-locales'.format(lang))
            self.plural_categories = ('one', 'other')

        self.enforce_translated = enforce_translated
        # Parsed input resources stored by resource path.
        self.reference_resources = {}
        self.localization_resources = {}
        self.target_resources = {}

        # An iterable of `FTL.Message` objects some of whose nodes can be the
        # transform operations.
        self.transforms = {}

        # The evaluator instance is an AST transformer capable of walking an
        # AST hierarchy and evaluating nodes which are migration Transforms.
        self.evaluator = Evaluator(self)
class TestASTJSON(unittest.TestCase):
    maxDiff = None

    def setUp(self):
        self.parser = FluentParser()

    def test_simple_resource(self):
        input = """\
            foo = Foo
        """

        ast1 = self.parser.parse(dedent_ftl(input))
        json1 = ast1.to_json()
        ast2 = from_json(json1)
        json2 = ast2.to_json()

        self.assertEqual(json1, json2)

    def test_complex_resource(self):
        input = """\
            ### A Resource comment

            # A comment about shared-photos
            shared-photos =
                { $user_name } { $photo_count ->
                    [0] hasn't added any photos yet
                    [one] added a new photo
                   *[other] added { $photo_count } new photos
                }.


            ## A Section comment

            // A Syntax 0.4 comment about liked-comment
            liked-comment =
                { $user_name } liked your comment on { $user_gender ->
                    [male] his
                    [female] her
                   *[other] their
                } post.
        """

        ast1 = self.parser.parse(dedent_ftl(input))
        json1 = ast1.to_json()
        ast2 = from_json(json1)
        json2 = ast2.to_json()

        self.assertEqual(json1, json2)

    def test_syntax_error(self):
        input = """\
            foo = Foo {
        """

        ast1 = self.parser.parse(dedent_ftl(input))
        json1 = ast1.to_json()
        ast2 = from_json(json1)
        json2 = ast2.to_json()

        self.assertEqual(json1, json2)
示例#4
0
    def __init__(self, lang, reference_dir, localization_dir):
        self.fluent_parser = FluentParser(with_spans=False)
        self.fluent_serializer = FluentSerializer()

        # An iterable of plural category names relevant to the context's
        # language.  E.g. ('one', 'other') for English.
        try:
            self.plural_categories = get_plural_categories(lang)
        except RuntimeError as e:
            logging.getLogger('migrate').warn(e)
            self.plural_categories = get_plural_categories('en')

        # Paths to directories with input data, relative to CWD.
        self.reference_dir = reference_dir
        self.localization_dir = localization_dir

        # Parsed input resources stored by resource path.
        self.reference_resources = {}
        self.localization_resources = {}

        # An iterable of `FTL.Message` objects some of whose nodes can be the
        # transform operations.
        self.transforms = {}

        # A dict whose keys are `(path, key)` tuples corresponding to target
        # FTL translations, and values are sets of `(path, key)` tuples
        # corresponding to localized entities which will be migrated.
        self.dependencies = {}
示例#5
0
    def test_differ_with_spans(self):
        parser = FluentParser()

        strings = [
            ("foo = Foo", "foo =   Foo"),
            ("foo = { $arg }", "foo = {  $arg  }"),
        ]

        messages = [(parser.parse_entry(a), parser.parse_entry(b))
                    for a, b in strings]

        for a, b in messages:
            self.assertFalse(a.equals(b, ignored_fields=None))
示例#6
0
    def test_default_behavior(self):
        parser = FluentParser()

        strings = [
            ("foo = Foo", "foo = Foo"),
            ("foo = Foo", "foo =   Foo"),
            ("foo = { $arg }", "foo = {  $arg  }"),
        ]

        messages = [(parser.parse_entry(a), parser.parse_entry(b))
                    for a, b in strings]

        for a, b in messages:
            self.assertTrue(a.equals(b))
示例#7
0
    def test_parser_without_spans(self):
        parser = FluentParser(with_spans=False)

        strings = [
            ("foo = Foo", "foo = Foo"),
            ("foo = Foo", "foo =   Foo"),
            ("foo = { $arg }", "foo = {  $arg  }"),
        ]

        messages = [(parser.parse_entry(a), parser.parse_entry(b))
                    for a, b in strings]

        for a, b in messages:
            self.assertTrue(a.equals(b))
示例#8
0
    def __init__(self, lang, reference_dir, localization_dir):
        self.fluent_parser = FluentParser(with_spans=False)
        self.fluent_serializer = FluentSerializer()

        # An iterable of plural category names relevant to the context's
        # language.  E.g. ('one', 'other') for English.
        try:
            self.plural_categories = get_plural_categories(lang)
        except RuntimeError as e:
            print(e.message)
            self.plural_categories = 'en'

        # Paths to directories with input data, relative to CWD.
        self.reference_dir = reference_dir
        self.localization_dir = localization_dir

        # Parsed input resources stored by resource path.
        self.reference_resources = {}
        self.localization_resources = {}

        # An iterable of `FTL.Message` objects some of whose nodes can be the
        # transform operations.
        self.transforms = {}

        # A dict whose keys are `(path, key)` tuples corresponding to target
        # FTL translations, and values are sets of `(path, key)` tuples
        # corresponding to localized entities which will be migrated.
        self.dependencies = {}
示例#9
0
 def test(self):
     resource = FluentParser().parse(
         dedent_ftl('''\
     one = Message
     two = Messages
     three = Has a
         .an = Message string in the Attribute
     '''))
     prior_res_id = id(resource)
     prior_msg_id = id(resource.body[1].value)
     backup = resource.clone()
     transformed = ReplaceTransformer('Message', 'Term').visit(resource)
     self.assertEqual(prior_res_id, id(transformed))
     self.assertEqual(prior_msg_id, id(transformed.body[1].value))
     self.assertFalse(transformed.equals(backup))
     self.assertEqual(transformed.body[1].value.elements[0].value, 'Terms')
示例#10
0
class TestOrderEquals(unittest.TestCase):
    def setUp(self):
        self.parser = FluentParser()

    def parse_ftl_entry(self, string):
        return self.parser.parse_entry(dedent_ftl(string))

    def test_attributes(self):
        message1 = self.parse_ftl_entry("""\
            foo
                .attr1 = Attr1
                .attr2 = Attr2
        """)
        message2 = self.parse_ftl_entry("""\
            foo
                .attr2 = Attr2
                .attr1 = Attr1
        """)

        self.assertTrue(message1.equals(message2))
        self.assertTrue(message2.equals(message1))

    def test_variants(self):
        message1 = self.parse_ftl_entry("""\
            foo =
                { $num ->
                    [a] A
                   *[b] B
                }
        """)
        message2 = self.parse_ftl_entry("""\
            foo =
                { $num ->
                   *[b] B
                    [a] A
                }
        """)

        self.assertTrue(message1.equals(message2))
        self.assertTrue(message2.equals(message1))

    def test_variants_with_numbers(self):
        message1 = self.parse_ftl_entry("""\
            foo =
                { $num ->
                    [1] A
                   *[b] B
                }
        """)
        message2 = self.parse_ftl_entry("""\
            foo =
                { $num ->
                   *[b] B
                    [1] A
                }
        """)

        self.assertTrue(message1.equals(message2))
        self.assertTrue(message2.equals(message1))
示例#11
0
 def test_resource(self):
     resource = FluentParser().parse(
         dedent_ftl('''\
     one = Message
     # Comment
     two = Messages
     three = Messages with
         .an = Attribute
     '''))
     mv = MockVisitor()
     mv.visit(resource)
     self.assertEqual(mv.pattern_calls, 4)
     self.assertDictEqual(
         mv.calls, {
             'Resource': 1,
             'Comment': 1,
             'Message': 3,
             'Identifier': 4,
             'Attribute': 1,
             'Span': 10,
         })
示例#12
0
class TestIgnoredFields(unittest.TestCase):
    def setUp(self):
        self.parser = FluentParser()

    def parse_ftl_entry(self, string):
        return self.parser.parse_entry(dedent_ftl(string))

    def test_ignore_value(self):
        a = self.parse_ftl_entry("foo = Foo")
        b = self.parse_ftl_entry("foo = Bar")

        self.assertTrue(a.equals(b, ignored_fields=['value']))

    def test_ignore_value_span(self):
        a = self.parse_ftl_entry("foo = Foo")
        b = self.parse_ftl_entry("foo = Foobar")

        self.assertTrue(a.equals(b, ignored_fields=['span', 'value']))
        self.assertFalse(a.equals(b, ignored_fields=['value']))

    def test_ignore_comments(self):
        a = self.parse_ftl_entry("""\
            # Comment A
            foo = Foo
        """)
        b = self.parse_ftl_entry("""\
            # Comment B
            foo = Foo
        """)
        c = self.parse_ftl_entry("""\
            # Comment CC
            foo = Foo
        """)

        self.assertTrue(a.equals(b, ignored_fields=['comment']))
        self.assertFalse(a.equals(c, ignored_fields=['comment']))
        self.assertTrue(a.equals(c, ignored_fields=['comment', 'span']))
示例#13
0
class MergeContext(object):
    """Stateful context for merging translation resources.

    `MergeContext` must be configured with the target language and the
    directory locations of the input data.

    The transformation takes four types of input data:

        - The en-US FTL reference files which will be used as templates for
          message order, comments and sections.

        - The current FTL files for the given language.

        - The legacy (DTD, properties) translation files for the given
          language.  The translations from these files will be transformed
          into FTL and merged into the existing FTL files for this language.

        - A list of `FTL.Message` objects some of whose nodes are special
          helper or transform nodes:

              helpers: EXTERNAL_ARGUMENT, MESSAGE_REFERENCE
              transforms: COPY, REPLACE_IN_TEXT, REPLACE, PLURALS, CONCAT
    """
    def __init__(self, lang, reference_dir, localization_dir):
        self.fluent_parser = FluentParser(with_spans=False)
        self.fluent_serializer = FluentSerializer()

        # An iterable of plural category names relevant to the context's
        # language.  E.g. ('one', 'other') for English.
        try:
            self.plural_categories = get_plural_categories(lang)
        except RuntimeError as e:
            logging.getLogger('migrate').warn(e)
            self.plural_categories = get_plural_categories('en')

        # Paths to directories with input data, relative to CWD.
        self.reference_dir = reference_dir
        self.localization_dir = localization_dir

        # Parsed input resources stored by resource path.
        self.reference_resources = {}
        self.localization_resources = {}

        # An iterable of `FTL.Message` objects some of whose nodes can be the
        # transform operations.
        self.transforms = {}

        # A dict whose keys are `(path, key)` tuples corresponding to target
        # FTL translations, and values are sets of `(path, key)` tuples
        # corresponding to localized entities which will be migrated.
        self.dependencies = {}

    def read_ftl_resource(self, path):
        """Read an FTL resource and parse it into an AST."""
        f = codecs.open(path, 'r', 'utf8')
        try:
            contents = f.read()
        except UnicodeDecodeError as err:
            logger = logging.getLogger('migrate')
            logger.warn('Unable to read file {}: {}'.format(path, err))
            raise err
        finally:
            f.close()

        ast = self.fluent_parser.parse(contents)

        annots = [annot for entry in ast.body for annot in entry.annotations]

        if len(annots):
            logger = logging.getLogger('migrate')
            for annot in annots:
                msg = annot.message
                logger.warn('Syntax error in {}: {}'.format(path, msg))

        return ast

    def read_legacy_resource(self, path):
        """Read a legacy resource and parse it into a dict."""
        parser = getParser(path)
        parser.readFile(path)
        # Transform the parsed result which is an iterator into a dict.
        return {entity.key: entity.val for entity in parser}

    def read_reference_ftl(self, path):
        """Read and parse a reference FTL file.

        A missing resource file is a fatal error and will raise an
        UnreadableReferenceError.
        """
        fullpath = os.path.join(self.reference_dir, path)
        try:
            return self.read_ftl_resource(fullpath)
        except IOError as err:
            error_message = 'Missing reference file: {}'.format(fullpath)
            logging.getLogger('migrate').error(error_message)
            raise UnreadableReferenceError(error_message)
        except UnicodeDecodeError as err:
            error_message = 'Error reading file {}: {}'.format(fullpath, err)
            logging.getLogger('migrate').error(error_message)
            raise UnreadableReferenceError(error_message)

    def read_localization_ftl(self, path):
        """Read and parse an existing localization FTL file.

        Create a new FTL.Resource if the file doesn't exist or can't be
        decoded.
        """
        fullpath = os.path.join(self.localization_dir, path)
        try:
            return self.read_ftl_resource(fullpath)
        except IOError:
            logger = logging.getLogger('migrate')
            logger.info('Localization file {} does not exist and '
                        'it will be created'.format(path))
            return FTL.Resource()
        except UnicodeDecodeError:
            logger = logging.getLogger('migrate')
            logger.warn('Localization file {} has broken encoding. '
                        'It will be re-created and some translations '
                        'may be lost'.format(path))
            return FTL.Resource()

    def maybe_add_localization(self, path):
        """Add a localization resource to migrate translations from.

        Only legacy resources can be added as migration sources.  The resource
        may be missing on disk.

        Uses a compare-locales parser to create a dict of (key, string value)
        tuples.
        """
        if path.endswith('.ftl'):
            error_message = (
                'Migrating translations from Fluent files is not supported '
                '({})'.format(path))
            logging.getLogger('migrate').error(error_message)
            raise NotSupportedError(error_message)

        try:
            fullpath = os.path.join(self.localization_dir, path)
            collection = self.read_legacy_resource(fullpath)
        except IOError:
            logger = logging.getLogger('migrate')
            logger.warn('Missing localization file: {}'.format(path))
        else:
            self.localization_resources[path] = collection

    def add_transforms(self, target, reference, transforms):
        """Define transforms for target using reference as template.

        `target` is a path of the destination FTL file relative to the
        localization directory. `reference` is a path to the template FTL
        file relative to the reference directory.

        Each transform is an extended FTL node with `Transform` nodes as some
        values.  Transforms are stored in their lazy AST form until
        `merge_changeset` is called, at which point they are evaluated to real
        FTL nodes with migrated translations.

        Each transform is scanned for `Source` nodes which will be used to
        build the list of dependencies for the transformed message.
        """
        def get_sources(acc, cur):
            if isinstance(cur, Source):
                acc.add((cur.path, cur.key))
            return acc

        reference_ast = self.read_reference_ftl(reference)
        self.reference_resources[target] = reference_ast

        for node in transforms:
            ident = node.id.name
            # Scan `node` for `Source` nodes and collect the information they
            # store into a set of dependencies.
            dependencies = fold(get_sources, node, set())
            # Set these sources as dependencies for the current transform.
            self.dependencies[(target, ident)] = dependencies

            # The target Fluent message should exist in the reference file. If
            # it doesn't, it's probably a typo.
            if get_message(reference_ast.body, ident) is None:
                logger = logging.getLogger('migrate')
                logger.warn('{} "{}" was not found in {}'.format(
                    type(node).__name__, ident, reference))

        # Keep track of localization resource paths which were defined as
        # sources in the transforms.
        expected_paths = set()

        # Read all legacy translation files defined in Source transforms. This
        # may fail but a single missing legacy resource doesn't mean that the
        # migration can't succeed.
        for dependencies in self.dependencies.values():
            for path in set(path for path, _ in dependencies):
                expected_paths.add(path)
                self.maybe_add_localization(path)

        # However, if all legacy resources are missing, bail out early. There
        # are no translations to migrate. We'd also get errors in hg annotate.
        if len(expected_paths) > 0 and len(self.localization_resources) == 0:
            error_message = 'No localization files were found'
            logging.getLogger('migrate').error(error_message)
            raise EmptyLocalizationError(error_message)

        # Add the current transforms to any other transforms added earlier for
        # this path.
        path_transforms = self.transforms.setdefault(target, [])
        path_transforms += transforms

        if target not in self.localization_resources:
            target_ast = self.read_localization_ftl(target)
            self.localization_resources[target] = target_ast

    def get_source(self, path, key):
        """Get an entity value from a localized legacy source.

        Used by the `Source` transform.
        """
        resource = self.localization_resources[path]
        return resource.get(key, None)

    def messages_equal(self, res1, res2):
        """Compare messages of two FTL resources.

        Uses FTL.BaseNode.equals to compare all messages in two FTL resources.
        If the order or number of messages differ, the result is also False.
        """
        def message_id(message):
            "Return the message's identifer name for sorting purposes."
            return message.id.name

        messages1 = sorted(
            (entry for entry in res1.body if isinstance(entry, FTL.Message)),
            key=message_id)
        messages2 = sorted(
            (entry for entry in res2.body if isinstance(entry, FTL.Message)),
            key=message_id)
        for msg1, msg2 in zip_longest(messages1, messages2):
            if msg1 is None or msg2 is None:
                return False
            if not msg1.equals(msg2):
                return False
        return True

    def merge_changeset(self, changeset=None):
        """Return a generator of FTL ASTs for the changeset.

        The input data must be configured earlier using the `add_*` methods.
        if given, `changeset` must be a set of (path, key) tuples describing
        which legacy translations are to be merged. If `changeset` is None,
        all legacy translations will be allowed to be migrated in a single
        changeset.

        The inner `in_changeset` function is used to determine if a message
        should be migrated for the given changeset. It compares the legacy
        dependencies of the transform defined for the message with legacy
        translations available in the changeset. If all dependencies are
        present, the message will be migrated.

        Given `changeset`, return a dict whose keys are resource paths and
        values are `FTL.Resource` instances.  The values will also be used to
        update this context's existing localization resources.
        """

        if changeset is None:
            # Merge all known legacy translations. Used in tests.
            changeset = {
                (path, key)
                for path, strings in self.localization_resources.iteritems()
                if not path.endswith('.ftl') for key in strings.iterkeys()
            }

        for path, reference in self.reference_resources.iteritems():
            current = self.localization_resources[path]
            transforms = self.transforms.get(path, [])

            def in_changeset(ident):
                """Check if a message should be migrated.

                A message will be migrated only if all of its dependencies
                are present in the currently processed changeset.

                If a transform defined for this message points to a missing
                legacy translation, this message will not be merged. The
                missing legacy dependency won't be present in the changeset.

                This also means that partially translated messages (e.g.
                constructed from two legacy strings out of which only one is
                avaiable) will never be migrated.
                """
                message_deps = self.dependencies.get((path, ident), None)

                # Don't merge if we don't have a transform for this message.
                if message_deps is None:
                    return False

                # As a special case, if a transform exists but has no
                # dependecies, it's a hardcoded `FTL.Node` which doesn't
                # migrate any existing translation but rather creates a new
                # one.  Merge it.
                if len(message_deps) == 0:
                    return True

                # Make sure all the dependencies are present in the current
                # changeset. Partial migrations are not currently supported.
                # See https://bugzilla.mozilla.org/show_bug.cgi?id=1321271
                available_deps = message_deps & changeset
                return message_deps == available_deps

            # Merge legacy translations with the existing ones using the
            # reference as a template.
            snapshot = merge_resource(self, reference, current, transforms,
                                      in_changeset)

            # Skip this path if the messages in the merged snapshot are
            # identical to those in the current state of the localization file.
            # This may happen when:
            #
            #   - none of the transforms is in the changset, or
            #   - all messages which would be migrated by the context's
            #     transforms already exist in the current state.
            if self.messages_equal(current, snapshot):
                continue

            # Store the merged snapshot on the context so that the next merge
            # already takes it into account as the existing localization.
            self.localization_resources[path] = snapshot

            # The result for this path is a complete `FTL.Resource`.
            yield path, snapshot

    def serialize_changeset(self, changeset):
        """Return a dict of serialized FTLs for the changeset.

        Given `changeset`, return a dict whose keys are resource paths and
        values are serialized FTL snapshots.
        """

        return {
            path: self.fluent_serializer.serialize(snapshot)
            for path, snapshot in self.merge_changeset(changeset)
        }
示例#14
0
 def setUp(self):
     parser = FluentParser()
     workload = os.path.join(os.path.dirname(__file__), 'fixtures_perf',
                             'workload-low.ftl')
     with codecs.open(workload, encoding='utf-8') as f:
         self.resource = parser.parse(f.read())
示例#15
0
# coding=utf8
from __future__ import unicode_literals
from __future__ import absolute_import

import textwrap

import fluent.syntax.ast as FTL
from fluent.syntax.parser import FluentParser, FluentParserStream

fluent_parser = FluentParser(with_spans=False)


def parse(Parser, string):
    if Parser is FluentParser:
        return fluent_parser.parse(string)

    # Parsing a legacy resource.

    # Parse the string into the internal Context.
    parser = Parser()
    # compare-locales expects ASCII strings.
    parser.readContents(string.encode('utf8'))
    # Transform the parsed result which is an iterator into a dict.
    return {ent.key: ent for ent in parser}


def ftl_resource_to_ast(code):
    return fluent_parser.parse(ftl(code))


def ftl_resource_to_json(code):
示例#16
0
class TestEntryEqualToSelf(unittest.TestCase):
    def setUp(self):
        self.parser = FluentParser()

    def parse_ftl_entry(self, string):
        return self.parser.parse_entry(dedent_ftl(string))

    def test_same_simple_message(self):
        message1 = self.parse_ftl_entry("""\
            foo = Foo
        """)

        self.assertTrue(message1.equals(message1))
        self.assertTrue(message1.equals(message1.clone()))

    def test_same_selector_message(self):
        message1 = self.parse_ftl_entry("""\
            foo =
                { $num ->
                    [one] One
                    [two] Two
                    [few] Few
                    [many] Many
                   *[other] Other
                }
        """)

        self.assertTrue(message1.equals(message1))
        self.assertTrue(message1.equals(message1.clone()))

    def test_same_complex_placeable_message(self):
        message1 = self.parse_ftl_entry("""\
            foo = Foo { NUMBER($num, style: "decimal") } Bar
        """)

        self.assertTrue(message1.equals(message1))
        self.assertTrue(message1.equals(message1.clone()))

    def test_same_message_with_attribute(self):
        message1 = self.parse_ftl_entry("""\
            foo =
                .attr = Attr
        """)

        self.assertTrue(message1.equals(message1))
        self.assertTrue(message1.equals(message1.clone()))

    def test_same_message_with_attributes(self):
        message1 = self.parse_ftl_entry("""\
            foo =
                .attr1 = Attr 1
                .attr2 = Attr 2
        """)

        self.assertTrue(message1.equals(message1))
        self.assertTrue(message1.equals(message1.clone()))

    def test_same_junk(self):
        message1 = self.parse_ftl_entry("""\
            foo = Foo {
        """)

        self.assertTrue(message1.equals(message1))
        self.assertTrue(message1.equals(message1.clone()))
示例#17
0
class InternalContext(object):
    """Internal context for merging translation resources.

    For the public interface, see `context.MigrationContext`.
    """

    def __init__(
        self, lang, reference_dir, localization_dir, enforce_translated=False
    ):
        self.fluent_parser = FluentParser(with_spans=False)
        self.fluent_serializer = FluentSerializer()

        # An iterable of plural category names relevant to the context's
        # language.  E.g. ('one', 'other') for English.
        self.plural_categories = get_plural(lang)
        if self.plural_categories is None:
            logger = logging.getLogger('migrate')
            logger.warning(
                'Plural rule for "{}" is not defined in '
                'compare-locales'.format(lang))
            self.plural_categories = ('one', 'other')

        self.enforce_translated = enforce_translated
        # Parsed input resources stored by resource path.
        self.reference_resources = {}
        self.localization_resources = {}
        self.target_resources = {}

        # An iterable of `FTL.Message` objects some of whose nodes can be the
        # transform operations.
        self.transforms = {}

        # The evaluator instance is an AST transformer capable of walking an
        # AST hierarchy and evaluating nodes which are migration Transforms.
        self.evaluator = Evaluator(self)

    def read_ftl_resource(self, path):
        """Read an FTL resource and parse it into an AST."""
        f = codecs.open(path, 'r', 'utf8')
        try:
            contents = f.read()
        except UnicodeDecodeError as err:
            logger = logging.getLogger('migrate')
            logger.warning('Unable to read file {}: {}'.format(path, err))
            raise err
        finally:
            f.close()

        ast = self.fluent_parser.parse(contents)

        annots = [
            annot
            for entry in ast.body
            if isinstance(entry, FTL.Junk)
            for annot in entry.annotations
        ]

        if len(annots):
            logger = logging.getLogger('migrate')
            for annot in annots:
                msg = annot.message
                logger.warning('Syntax error in {}: {}'.format(path, msg))

        return ast

    def read_legacy_resource(self, path):
        """Read a legacy resource and parse it into a dict."""
        parser = getParser(path)
        parser.readFile(path)
        # Transform the parsed result which is an iterator into a dict.
        return {
            entity.key: entity.val for entity in parser
            if entity.localized or self.enforce_translated
        }

    def read_reference_ftl(self, path):
        """Read and parse a reference FTL file.

        A missing resource file is a fatal error and will raise an
        UnreadableReferenceError.
        """
        fullpath = os.path.join(self.reference_dir, path)
        try:
            return self.read_ftl_resource(fullpath)
        except IOError:
            error_message = 'Missing reference file: {}'.format(fullpath)
            logging.getLogger('migrate').error(error_message)
            raise UnreadableReferenceError(error_message)
        except UnicodeDecodeError as err:
            error_message = 'Error reading file {}: {}'.format(fullpath, err)
            logging.getLogger('migrate').error(error_message)
            raise UnreadableReferenceError(error_message)

    def read_localization_ftl(self, path):
        """Read and parse an existing localization FTL file.

        Create a new FTL.Resource if the file doesn't exist or can't be
        decoded.
        """
        fullpath = os.path.join(self.localization_dir, path)
        try:
            return self.read_ftl_resource(fullpath)
        except IOError:
            logger = logging.getLogger('migrate')
            logger.info(
                'Localization file {} does not exist and '
                'it will be created'.format(path))
            return FTL.Resource()
        except UnicodeDecodeError:
            logger = logging.getLogger('migrate')
            logger.warning(
                'Localization file {} has broken encoding. '
                'It will be re-created and some translations '
                'may be lost'.format(path))
            return FTL.Resource()

    def maybe_add_localization(self, path):
        """Add a localization resource to migrate translations from.

        Uses a compare-locales parser to create a dict of (key, string value)
        tuples.
        For Fluent sources, we store the AST.
        """
        try:
            fullpath = os.path.join(self.localization_dir, path)
            if not fullpath.endswith('.ftl'):
                collection = self.read_legacy_resource(fullpath)
            else:
                collection = self.read_ftl_resource(fullpath)
        except IOError:
            logger = logging.getLogger('migrate')
            logger.warning('Missing localization file: {}'.format(path))
        else:
            self.localization_resources[path] = collection

    def get_legacy_source(self, path, key):
        """Get an entity value from a localized legacy source.

        Used by the `Source` transform.
        """
        resource = self.localization_resources[path]
        return resource.get(key, None)

    def get_fluent_source_pattern(self, path, key):
        """Get a pattern from a localized Fluent source.

        If the key contains a `.`, does an attribute lookup.
        Used by the `COPY_PATTERN` transform.
        """
        resource = self.localization_resources[path]
        msg_key, _, attr_key = key.partition('.')
        found = None
        for entry in resource.body:
            if isinstance(entry, (FTL.Message, FTL.Term)):
                if entry.id.name == msg_key:
                    found = entry
                    break
        if found is None:
            return None
        if not attr_key:
            return found.value
        for attribute in found.attributes:
            if attribute.id.name == attr_key:
                return attribute.value
        return None

    def messages_equal(self, res1, res2):
        """Compare messages and terms of two FTL resources.

        Uses FTL.BaseNode.equals to compare all messages/terms
        in two FTL resources.
        If the order or number of messages differ, the result is also False.
        """
        def message_id(message):
            "Return the message's identifer name for sorting purposes."
            return message.id.name

        messages1 = sorted(
            (entry for entry in res1.body
             if isinstance(entry, FTL.Message)
                or isinstance(entry, FTL.Term)),
            key=message_id)
        messages2 = sorted(
            (entry for entry in res2.body
             if isinstance(entry, FTL.Message)
                or isinstance(entry, FTL.Term)),
            key=message_id)
        for msg1, msg2 in zip_longest(messages1, messages2):
            if msg1 is None or msg2 is None:
                return False
            if not msg1.equals(msg2):
                return False
        return True

    def merge_changeset(self, changeset=None, known_translations=None):
        """Return a generator of FTL ASTs for the changeset.

        The input data must be configured earlier using the `add_*` methods.
        if given, `changeset` must be a set of (path, key) tuples describing
        which legacy translations are to be merged. If `changeset` is None,
        all legacy translations will be allowed to be migrated in a single
        changeset.

        We use the `in_changeset` method to determine if a message should be
        migrated for the given changeset.

        Given `changeset`, return a dict whose keys are resource paths and
        values are `FTL.Resource` instances.  The values will also be used to
        update this context's existing localization resources.
        """

        if changeset is None:
            # Merge all known legacy translations. Used in tests.
            changeset = {
                (path, key)
                for path, strings in self.localization_resources.items()
                if not path.endswith('.ftl')
                for key in strings.keys()
            }

        if known_translations is None:
            known_translations = changeset

        for path, reference in self.reference_resources.items():
            current = self.target_resources[path]
            transforms = self.transforms.get(path, [])
            in_changeset = partial(
                self.in_changeset, changeset, known_translations, path)

            # Merge legacy translations with the existing ones using the
            # reference as a template.
            snapshot = merge_resource(
                self, reference, current, transforms, in_changeset
            )

            # Skip this path if the messages in the merged snapshot are
            # identical to those in the current state of the localization file.
            # This may happen when:
            #
            #   - none of the transforms is in the changset, or
            #   - all messages which would be migrated by the context's
            #     transforms already exist in the current state.
            if self.messages_equal(current, snapshot):
                continue

            # Store the merged snapshot on the context so that the next merge
            # already takes it into account as the existing localization.
            self.target_resources[path] = snapshot

            # The result for this path is a complete `FTL.Resource`.
            yield path, snapshot

    def in_changeset(self, changeset, known_translations, path, ident):
        """Check if a message should be migrated in this changeset.

        The message is identified by path and ident.


        A message will be migrated only if all of its dependencies
        are present in the currently processed changeset.

        If a transform defined for this message points to a missing
        legacy translation, this message will not be merged. The
        missing legacy dependency won't be present in the changeset.

        This also means that partially translated messages (e.g.
        constructed from two legacy strings out of which only one is
        avaiable) will never be migrated.
        """
        message_deps = self.dependencies.get((path, ident), None)

        # Don't merge if we don't have a transform for this message.
        if message_deps is None:
            return False

        # As a special case, if a transform exists but has no
        # dependecies, it's a hardcoded `FTL.Node` which doesn't
        # migrate any existing translation but rather creates a new
        # one.  Merge it.
        if len(message_deps) == 0:
            return True

        # Make sure all the dependencies are present in the current
        # changeset. Partial migrations are not currently supported.
        # See https://bugzilla.mozilla.org/show_bug.cgi?id=1321271
        # We only return True if our current changeset touches
        # the transform, and we have all of the dependencies.
        active_deps = message_deps & changeset
        available_deps = message_deps & known_translations
        return active_deps and message_deps == available_deps

    def serialize_changeset(self, changeset, known_translations=None):
        """Return a dict of serialized FTLs for the changeset.

        Given `changeset`, return a dict whose keys are resource paths and
        values are serialized FTL snapshots.
        """

        return {
            path: self.fluent_serializer.serialize(snapshot)
            for path, snapshot in self.merge_changeset(
                changeset, known_translations
            )
        }

    def evaluate(self, node):
        return self.evaluator.visit(node)
示例#18
0
class MergeContext(object):
    """Stateful context for merging translation resources.

    `MergeContext` must be configured with the target language and the
    directory locations of the input data.

    The transformation takes four types of input data:

        - The en-US FTL reference files which will be used as templates for
          message order, comments and sections.

        - The current FTL files for the given language.

        - The legacy (DTD, properties) translation files for the given
          language.  The translations from these files will be transformed
          into FTL and merged into the existing FTL files for this language.

        - A list of `FTL.Message` objects some of whose nodes are special
          helper or transform nodes:

              helpers: EXTERNAL_ARGUMENT, MESSAGE_REFERENCE
              transforms: COPY, REPLACE_IN_TEXT, REPLACE, PLURALS, CONCAT
    """

    def __init__(self, lang, reference_dir, localization_dir):
        self.fluent_parser = FluentParser(with_spans=False)
        self.fluent_serializer = FluentSerializer()

        # An iterable of plural category names relevant to the context's
        # language.  E.g. ('one', 'other') for English.
        try:
            self.plural_categories = get_plural_categories(lang)
        except RuntimeError as e:
            print(e.message)
            self.plural_categories = 'en'

        # Paths to directories with input data, relative to CWD.
        self.reference_dir = reference_dir
        self.localization_dir = localization_dir

        # Parsed input resources stored by resource path.
        self.reference_resources = {}
        self.localization_resources = {}

        # An iterable of `FTL.Message` objects some of whose nodes can be the
        # transform operations.
        self.transforms = {}

        # A dict whose keys are `(path, key)` tuples corresponding to target
        # FTL translations, and values are sets of `(path, key)` tuples
        # corresponding to localized entities which will be migrated.
        self.dependencies = {}

    def read_ftl_resource(self, path):
        """Read an FTL resource and parse it into an AST."""
        f = codecs.open(path, 'r', 'utf8')
        try:
            contents = f.read()
        except UnicodeDecodeError as err:
            logger = logging.getLogger('migrate')
            logger.warn('Unable to read file {}: {}'.format(path, err))
            raise err
        finally:
            f.close()

        ast = self.fluent_parser.parse(contents)

        annots = [
            annot
            for entry in ast.body
            for annot in entry.annotations
        ]

        if len(annots):
            logger = logging.getLogger('migrate')
            for annot in annots:
                msg = annot.message
                logger.warn('Syntax error in {}: {}'.format(path, msg))

        return ast

    def read_legacy_resource(self, path):
        """Read a legacy resource and parse it into a dict."""
        parser = getParser(path)
        parser.readFile(path)
        # Transform the parsed result which is an iterator into a dict.
        return {entity.key: entity.val for entity in parser}

    def maybe_add_localization(self, path):
        """Add a localization resource to migrate translations from.

        Only legacy resources can be added as migration sources.  The resource
        may be missing on disk.

        Uses a compare-locales parser to create a dict of (key, string value)
        tuples.
        """
        if path.endswith('.ftl'):
            error_message = (
                'Migrating translations from Fluent files is not supported '
                '({})'.format(path))
            logging.getLogger('migrate').error(error_message)
            raise NotSupportedError(error_message)

        try:
            fullpath = os.path.join(self.localization_dir, path)
            collection = self.read_legacy_resource(fullpath)
        except IOError:
            logger = logging.getLogger('migrate')
            logger.warn('Missing localization file: {}'.format(path))
        else:
            self.localization_resources[path] = collection

    def add_transforms(self, target, reference, transforms):
        """Define transforms for target using reference as template.

        `target` is a path of the destination FTL file relative to the
        localization directory. `reference` is a path to the template FTL
        file relative to the reference directory.

        Each transform is an extended FTL node with `Transform` nodes as some
        values.  Transforms are stored in their lazy AST form until
        `merge_changeset` is called, at which point they are evaluated to real
        FTL nodes with migrated translations.

        Each transform is scanned for `Source` nodes which will be used to
        build the list of dependencies for the transformed message.
        """
        def get_sources(acc, cur):
            if isinstance(cur, Source):
                acc.add((cur.path, cur.key))
            return acc

        refpath = os.path.join(self.reference_dir, reference)
        try:
            ast = self.read_ftl_resource(refpath)
        except IOError as err:
            error_message = 'Missing reference file: {}'.format(refpath)
            logging.getLogger('migrate').error(error_message)
            raise UnreadableReferenceError(error_message)
        except UnicodeDecodeError as err:
            error_message = 'Error reading file {}: {}'.format(refpath, err)
            logging.getLogger('migrate').error(error_message)
            raise UnreadableReferenceError(error_message)
        else:
            # The reference file will be used by the merge function as
            # a template for serializing the merge results.
            self.reference_resources[target] = ast

        for node in transforms:
            # Scan `node` for `Source` nodes and collect the information they
            # store into a set of dependencies.
            dependencies = fold(get_sources, node, set())
            # Set these sources as dependencies for the current transform.
            self.dependencies[(target, node.id.name)] = dependencies

            # Read all legacy translation files defined in Source transforms.
            for path in set(path for path, _ in dependencies):
                self.maybe_add_localization(path)

        path_transforms = self.transforms.setdefault(target, [])
        path_transforms += transforms

        if target not in self.localization_resources:
            fullpath = os.path.join(self.localization_dir, target)
            try:
                ast = self.read_ftl_resource(fullpath)
            except IOError:
                logger = logging.getLogger('migrate')
                logger.info(
                    'Localization file {} does not exist and '
                    'it will be created'.format(target))
            except UnicodeDecodeError:
                logger = logging.getLogger('migrate')
                logger.warn(
                    'Localization file {} will be re-created and some '
                    'translations might be lost'.format(target))
            else:
                self.localization_resources[target] = ast

    def get_source(self, path, key):
        """Get an entity value from a localized legacy source.

        Used by the `Source` transform.
        """
        resource = self.localization_resources[path]
        return resource.get(key, None)

    def messages_equal(self, res1, res2):
        """Compare messages of two FTL resources.

        Uses FTL.BaseNode.equals to compare all messages in two FTL resources.
        If the order or number of messages differ, the result is also False.
        """
        def message_id(message):
            "Return the message's identifer name for sorting purposes."
            return message.id.name

        messages1 = sorted(
            (entry for entry in res1.body if isinstance(entry, FTL.Message)),
            key=message_id)
        messages2 = sorted(
            (entry for entry in res2.body if isinstance(entry, FTL.Message)),
            key=message_id)
        for msg1, msg2 in zip_longest(messages1, messages2):
            if msg1 is None or msg2 is None:
                return False
            if not msg1.equals(msg2):
                return False
        return True

    def merge_changeset(self, changeset=None):
        """Return a generator of FTL ASTs for the changeset.

        The input data must be configured earlier using the `add_*` methods.
        if given, `changeset` must be a set of (path, key) tuples describing
        which legacy translations are to be merged.

        Given `changeset`, return a dict whose keys are resource paths and
        values are `FTL.Resource` instances.  The values will also be used to
        update this context's existing localization resources.
        """

        if changeset is None:
            # Merge all known legacy translations. Used in tests.
            changeset = {
                (path, key)
                for path, strings in self.localization_resources.iteritems()
                if not path.endswith('.ftl')
                for key in strings.iterkeys()
            }

        for path, reference in self.reference_resources.iteritems():
            current = self.localization_resources.get(path, FTL.Resource())
            transforms = self.transforms.get(path, [])

            def in_changeset(ident):
                """Check if entity should be merged.

                If at least one dependency of the entity is in the current
                set of changeset, merge it.
                """
                message_deps = self.dependencies.get((path, ident), None)

                # Don't merge if we don't have a transform for this message.
                if message_deps is None:
                    return False

                # As a special case, if a transform exists but has no
                # dependecies, it's a hardcoded `FTL.Node` which doesn't
                # migrate any existing translation but rather creates a new
                # one.  Merge it.
                if len(message_deps) == 0:
                    return True

                # If the intersection of the dependencies and the current
                # changeset is non-empty, merge this message.
                return message_deps & changeset

            # Merge legacy translations with the existing ones using the
            # reference as a template.
            snapshot = merge_resource(
                self, reference, current, transforms, in_changeset
            )

            # Skip this path if the messages in the merged snapshot are
            # identical to those in the current state of the localization file.
            # This may happen when:
            #
            #   - none of the transforms is in the changset, or
            #   - all messages which would be migrated by the context's
            #     transforms already exist in the current state.
            if self.messages_equal(current, snapshot):
                continue

            # Store the merged snapshot on the context so that the next merge
            # already takes it into account as the existing localization.
            self.localization_resources[path] = snapshot

            # The result for this path is a complete `FTL.Resource`.
            yield path, snapshot

    def serialize_changeset(self, changeset):
        """Return a dict of serialized FTLs for the changeset.

        Given `changeset`, return a dict whose keys are resource paths and
        values are serialized FTL snapshots.
        """

        return {
            path: self.fluent_serializer.serialize(snapshot)
            for path, snapshot in self.merge_changeset(changeset)
        }
示例#19
0
def parse_literal(input):
    parser = FluentParser(with_spans=False)
    ast = parser.parse_entry(input)
    expr = ast.value.elements[0].expression
    return expr.parse()
示例#20
0
class MergeContext(object):
    """Stateful context for merging translation resources.

    `MergeContext` must be configured with the target language and the
    directory locations of the input data.

    The transformation takes four types of input data:

        - The en-US FTL reference files which will be used as templates for
          message order, comments and sections.

        - The current FTL files for the given language.

        - The legacy (DTD, properties) translation files for the given
          language.  The translations from these files will be transformed
          into FTL and merged into the existing FTL files for this language.

        - A list of `FTL.Message` objects some of whose nodes are special
          helper or transform nodes:

              helpers: LITERAL, EXTERNAL_ARGUMENT, MESSAGE_REFERENCE
              transforms: COPY, REPLACE_IN_TEXT, REPLACE, PLURALS, CONCAT
    """
    def __init__(self, lang, reference_dir, localization_dir):
        self.fluent_parser = FluentParser(with_spans=False)
        self.fluent_serializer = FluentSerializer()

        # An iterable of plural category names relevant to the context's
        # language.  E.g. ('one', 'other') for English.
        try:
            self.plural_categories = get_plural_categories(lang)
        except RuntimeError as e:
            print(e.message)
            self.plural_categories = 'en'

        # Paths to directories with input data, relative to CWD.
        self.reference_dir = reference_dir
        self.localization_dir = localization_dir

        # Parsed input resources stored by resource path.
        self.reference_resources = {}
        self.localization_resources = {}

        # An iterable of `FTL.Message` objects some of whose nodes can be the
        # transform operations.
        self.transforms = {}

        # A dict whose keys are `(path, key)` tuples corresponding to target
        # FTL translations, and values are sets of `(path, key)` tuples
        # corresponding to localized entities which will be migrated.
        self.dependencies = {}

    def read_ftl_resource(self, path):
        """Read an FTL resource and parse it into an AST."""
        f = codecs.open(path, 'r', 'utf8')
        try:
            contents = f.read()
        finally:
            f.close()

        ast = self.fluent_parser.parse(contents)

        annots = [annot for entry in ast.body for annot in entry.annotations]

        if len(annots):
            logger = logging.getLogger('migrate')
            for annot in annots:
                msg = annot.message
                logger.warn(u'Syntax error in {}: {}'.format(path, msg))

        return ast

    def read_legacy_resource(self, path):
        """Read a legacy resource and parse it into a dict."""
        parser = getParser(path)
        parser.readFile(path)
        # Transform the parsed result which is an iterator into a dict.
        return {entity.key: entity.val for entity in parser}

    def add_reference(self, path, realpath=None):
        """Add an FTL AST to this context's reference resources."""
        fullpath = os.path.join(self.reference_dir, realpath or path)
        try:
            ast = self.read_ftl_resource(fullpath)
        except IOError as err:
            logger = logging.getLogger('migrate')
            logger.error(u'Missing reference file: {}'.format(path))
            raise err
        except UnicodeDecodeError as err:
            logger = logging.getLogger('migrate')
            logger.error(u'Error reading file {}: {}'.format(path, err))
            raise err
        else:
            self.reference_resources[path] = ast

    def add_localization(self, path):
        """Add an existing localization resource.

        If it's an FTL resource, add an FTL AST.  Otherwise, it's a legacy
        resource.  Use a compare-locales parser to create a dict of (key,
        string value) tuples.
        """
        fullpath = os.path.join(self.localization_dir, path)
        if fullpath.endswith('.ftl'):
            try:
                ast = self.read_ftl_resource(fullpath)
            except IOError:
                logger = logging.getLogger('migrate')
                logger.warn(u'Missing localization file: {}'.format(path))
            except UnicodeDecodeError as err:
                logger = logging.getLogger('migrate')
                logger.warn(u'Error reading file {}: {}'.format(path, err))
            else:
                self.localization_resources[path] = ast
        else:
            try:
                collection = self.read_legacy_resource(fullpath)
            except IOError:
                logger = logging.getLogger('migrate')
                logger.warn(u'Missing localization file: {}'.format(path))
            else:
                self.localization_resources[path] = collection

    def add_transforms(self, path, transforms):
        """Define transforms for path.

        Each transform is an extended FTL node with `Transform` nodes as some
        values.  Transforms are stored in their lazy AST form until
        `merge_changeset` is called, at which point they are evaluated to real
        FTL nodes with migrated translations.

        Each transform is scanned for `Source` nodes which will be used to
        build the list of dependencies for the transformed message.
        """
        def get_sources(acc, cur):
            if isinstance(cur, Source):
                acc.add((cur.path, cur.key))
            return acc

        for node in transforms:
            # Scan `node` for `Source` nodes and collect the information they
            # store into a set of dependencies.
            dependencies = fold(get_sources, node, set())
            # Set these sources as dependencies for the current transform.
            self.dependencies[(path, node.id.name)] = dependencies

        path_transforms = self.transforms.setdefault(path, [])
        path_transforms += transforms

    def get_source(self, path, key):
        """Get an entity value from the localized source.

        Used by the `Source` transform.
        """
        if path.endswith('.ftl'):
            resource = self.localization_resources[path]
            return get_message(resource.body, key)
        else:
            resource = self.localization_resources[path]
            return resource.get(key, None)

    def merge_changeset(self, changeset=None):
        """Return a generator of FTL ASTs for the changeset.

        The input data must be configured earlier using the `add_*` methods.
        if given, `changeset` must be a set of (path, key) tuples describing
        which legacy translations are to be merged.

        Given `changeset`, return a dict whose keys are resource paths and
        values are `FTL.Resource` instances.  The values will also be used to
        update this context's existing localization resources.
        """

        if changeset is None:
            # Merge all known legacy translations.
            changeset = {
                (path, key)
                for path, strings in self.localization_resources.iteritems()
                for key in strings.iterkeys()
            }

        for path, reference in self.reference_resources.iteritems():
            current = self.localization_resources.get(path, FTL.Resource())
            transforms = self.transforms.get(path, [])

            def in_changeset(ident):
                """Check if entity should be merged.

                If at least one dependency of the entity is in the current
                set of changeset, merge it.
                """
                message_deps = self.dependencies.get((path, ident), None)

                # Don't merge if we don't have a transform for this message.
                if message_deps is None:
                    return False

                # As a special case, if a transform exists but has no
                # dependecies, it's a hardcoded `FTL.Node` which doesn't
                # migrate any existing translation but rather creates a new
                # one.  Merge it.
                if len(message_deps) == 0:
                    return True

                # If the intersection of the dependencies and the current
                # changeset is non-empty, merge this message.
                return message_deps & changeset

            # Merge legacy translations with the existing ones using the
            # reference as a template.
            snapshot = merge_resource(self, reference, current, transforms,
                                      in_changeset)

            # If none of the transforms is in the given changeset, the merged
            # snapshot is identical to the current translation. We compare
            # JSON trees rather then use filtering by `in_changeset` to account
            # for translations removed from `reference`.
            if snapshot.to_json() == current.to_json():
                continue

            # Store the merged snapshot on the context so that the next merge
            # already takes it into account as the existing localization.
            self.localization_resources[path] = snapshot

            # The result for this path is a complete `FTL.Resource`.
            yield path, snapshot

    def serialize_changeset(self, changeset):
        """Return a dict of serialized FTLs for the changeset.

        Given `changeset`, return a dict whose keys are resource paths and
        values are serialized FTL snapshots.
        """

        return {
            path: self.fluent_serializer.serialize(snapshot)
            for path, snapshot in self.merge_changeset(changeset)
        }
示例#21
0
 def setUp(self):
     self.parser = FluentParser(with_spans=False)
示例#22
0
class TestParseEntry(unittest.TestCase):
    maxDiff = None

    def setUp(self):
        self.parser = FluentParser(with_spans=False)

    def test_simple_message(self):
        input = """\
            foo = Foo
        """
        output = {
            "comment": None,
            "value": {
                "elements": [{
                    "span": None,
                    "type": "TextElement",
                    "value": "Foo"
                }],
                "span": None,
                "type": "Pattern"
            },
            "attributes": [],
            "type": "Message",
            "span": None,
            "id": {
                "span": None,
                "type": "Identifier",
                "name": "foo"
            }
        }

        message = self.parser.parse_entry(dedent_ftl(input))
        self.assertEqual(message.to_json(), output)

    def test_ignore_attached_comment(self):
        input = """\
            # Attached Comment
            foo = Foo
        """
        output = {
            "comment": None,
            "value": {
                "elements": [{
                    "span": None,
                    "type": "TextElement",
                    "value": "Foo"
                }],
                "span": None,
                "type": "Pattern"
            },
            "attributes": [],
            "type": "Message",
            "id": {
                "name": "foo",
                "span": None,
                "type": "Identifier"
            },
            "span": None,
            "type": "Message"
        }

        message = self.parser.parse_entry(dedent_ftl(input))
        self.assertEqual(message.to_json(), output)

    def test_return_junk(self):
        input = """\
            # Attached Comment
            junk
        """
        output = {
            "content":
            "junk\n",
            "annotations": [{
                "arguments": ["="],
                "code": "E0003",
                "message": "Expected token: \"=\"",
                "span": {
                    "end": 23,
                    "start": 23,
                    "type": "Span"
                },
                "type": "Annotation"
            }],
            "span":
            None,
            "type":
            "Junk"
        }

        message = self.parser.parse_entry(dedent_ftl(input))
        self.assertEqual(message.to_json(), output)

    def test_ignore_all_valid_comments(self):
        input = """\
            # Attached Comment
            ## Group Comment
            ### Resource Comment
            foo = Foo
        """
        output = {
            "comment": None,
            "value": {
                "elements": [{
                    "span": None,
                    "type": "TextElement",
                    "value": "Foo"
                }],
                "span": None,
                "type": "Pattern"
            },
            "attributes": [],
            "span": None,
            "type": "Message",
            "id": {
                "name": "foo",
                "span": None,
                "type": "Identifier"
            }
        }

        message = self.parser.parse_entry(dedent_ftl(input))
        self.assertEqual(message.to_json(), output)

    def test_do_not_ignore_invalid_comments(self):
        input = """\
        # Attached Comment
        ##Invalid Comment
        """
        output = {
            "content":
            "##Invalid Comment\n",
            "annotations": [{
                "arguments": [" "],
                "code": "E0003",
                "message": "Expected token: \" \"",
                "span": {
                    "end": 21,
                    "start": 21,
                    "type": "Span"
                },
                "type": "Annotation"
            }],
            "span":
            None,
            "type":
            "Junk"
        }

        message = self.parser.parse_entry(dedent_ftl(input))
        self.assertEqual(message.to_json(), output)
 def setUp(self):
     self.parser = FluentParser()
示例#24
0
        ru_file_full_path = en_analog_file_path.replace('en-US', 'ru-RU')

        if os.path.isfile(ru_file_full_path):
            return
        else:
            en_file = FluentFile(f'{en_analog_file_path}')
            file = FluentFile(f'{ru_file_full_path}')
            file.save_data(en_file.read_data())
            logging.info(f'Создан файл русской локали {ru_file_full_path}')

        return ru_file_full_path


######################################## Var definitions ###############################################################

logging.basicConfig(level=logging.INFO)
project = Project()
serializer = FluentSerializer()
parser = FluentParser()
formatter = FluentFormatter()

yaml_files_paths = project.get_files_paths_by_dir(project.prototypes_dir_path,
                                                  'yml')
yaml_files = list(
    map(lambda yaml_file_path: YAMLFile(yaml_file_path), yaml_files_paths))

########################################################################################################################

logging.info(f'Поиск yaml-файлов ...')
YAMLExtractor(yaml_files).execute()