forked from mirrors/gecko-dev
Bug 1666232 - Vendor fluent.syntax 0.18.1, fluent.migrate 0.10, compare-locales 8.1.0. r=flod
Differential Revision: https://phabricator.services.mozilla.com/D90851
This commit is contained in:
parent
34c6a27d2f
commit
9061bd6128
25 changed files with 242 additions and 153 deletions
2
third_party/python/compare-locales/PKG-INFO
vendored
2
third_party/python/compare-locales/PKG-INFO
vendored
|
|
@ -1,6 +1,6 @@
|
|||
Metadata-Version: 2.1
|
||||
Name: compare-locales
|
||||
Version: 8.0.0
|
||||
Version: 8.1.0
|
||||
Summary: Lint Mozilla localizations
|
||||
Home-page: UNKNOWN
|
||||
Author: Axel Hecht
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
version = "8.0.0"
|
||||
version = "8.1.0"
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ from collections import defaultdict
|
|||
|
||||
from fluent.syntax import ast as ftl
|
||||
from fluent.syntax.serializer import serialize_variant_key
|
||||
from fluent.syntax.visitor import Visitor
|
||||
|
||||
from .base import Checker, CSSCheckMixin
|
||||
from compare_locales import plurals
|
||||
|
|
@ -45,7 +46,7 @@ def pattern_variants(pattern):
|
|||
return []
|
||||
|
||||
|
||||
class ReferenceMessageVisitor(ftl.Visitor, CSSCheckMixin):
|
||||
class ReferenceMessageVisitor(Visitor, CSSCheckMixin):
|
||||
def __init__(self):
|
||||
# References to Messages, their Attributes, and Terms
|
||||
# Store reference name and type
|
||||
|
|
@ -284,7 +285,7 @@ class L10nMessageVisitor(GenericL10nChecks, ReferenceMessageVisitor):
|
|||
)
|
||||
|
||||
|
||||
class TermVisitor(GenericL10nChecks, ftl.Visitor):
|
||||
class TermVisitor(GenericL10nChecks, Visitor):
|
||||
def __init__(self, locale):
|
||||
super(TermVisitor, self).__init__()
|
||||
self.locale = locale
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ import re
|
|||
from fluent.syntax import FluentParser as FTLParser
|
||||
from fluent.syntax import ast as ftl
|
||||
from fluent.syntax.serializer import serialize_comment
|
||||
from fluent.syntax.visitor import Visitor
|
||||
from .base import (
|
||||
CAN_SKIP,
|
||||
Entry, Entity, Comment, Junk, Whitespace,
|
||||
|
|
@ -17,7 +18,7 @@ from .base import (
|
|||
)
|
||||
|
||||
|
||||
class WordCounter(ftl.Visitor):
|
||||
class WordCounter(Visitor):
|
||||
def __init__(self):
|
||||
self.word_count = 0
|
||||
|
||||
|
|
|
|||
|
|
@ -177,6 +177,7 @@ CATEGORIES_BY_LOCALE = {
|
|||
'st': 1,
|
||||
'sv': 1,
|
||||
'sw': 1,
|
||||
'szl': 9,
|
||||
'ta': 1,
|
||||
'ta': 1,
|
||||
'te': 1,
|
||||
|
|
|
|||
2
third_party/python/compare-locales/setup.py
vendored
2
third_party/python/compare-locales/setup.py
vendored
|
|
@ -52,7 +52,7 @@ setup(name="compare-locales",
|
|||
'compare_locales.tests': ['data/*.properties', 'data/*.dtd']
|
||||
},
|
||||
install_requires=[
|
||||
'fluent.syntax >=0.17.0, <0.18',
|
||||
'fluent.syntax >=0.18.0, <0.19',
|
||||
'pytoml',
|
||||
'six',
|
||||
],
|
||||
|
|
|
|||
2
third_party/python/fluent.migrate/PKG-INFO
vendored
2
third_party/python/fluent.migrate/PKG-INFO
vendored
|
|
@ -1,6 +1,6 @@
|
|||
Metadata-Version: 2.1
|
||||
Name: fluent.migrate
|
||||
Version: 0.9
|
||||
Version: 0.10
|
||||
Summary: Toolchain to migrate legacy translation to Fluent.
|
||||
Home-page: https://hg.mozilla.org/l10n/fluent-migration/
|
||||
Author: Mozilla
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ from fluent.syntax.serializer import FluentSerializer
|
|||
from compare_locales.parser import getParser
|
||||
from compare_locales.plurals import get_plural
|
||||
|
||||
from .evaluator import Evaluator
|
||||
from .merge import merge_resource
|
||||
from .errors import (
|
||||
UnreadableReferenceError,
|
||||
|
|
@ -52,6 +53,10 @@ class InternalContext(object):
|
|||
# transform operations.
|
||||
self.transforms = {}
|
||||
|
||||
# The evaluator instance is an AST transformer capable of walking an
|
||||
# AST hierarchy and evaluating nodes which are migration Transforms.
|
||||
self.evaluator = Evaluator(self)
|
||||
|
||||
def read_ftl_resource(self, path):
|
||||
"""Read an FTL resource and parse it into an AST."""
|
||||
f = codecs.open(path, 'r', 'utf8')
|
||||
|
|
@ -321,5 +326,8 @@ class InternalContext(object):
|
|||
)
|
||||
}
|
||||
|
||||
def evaluate(self, node):
|
||||
return self.evaluator.visit(node)
|
||||
|
||||
|
||||
logging.basicConfig()
|
||||
|
|
|
|||
28
third_party/python/fluent.migrate/fluent/migrate/evaluator.py
vendored
Normal file
28
third_party/python/fluent.migrate/fluent/migrate/evaluator.py
vendored
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
from fluent.syntax import ast as FTL
|
||||
from fluent.syntax.visitor import Transformer
|
||||
|
||||
from .transforms import Transform
|
||||
|
||||
|
||||
class Evaluator(Transformer):
|
||||
"""An AST transformer for evaluating migration Transforms.
|
||||
|
||||
An AST transformer (i.e. a visitor capable of modifying the AST) which
|
||||
walks an AST hierarchy and evaluates nodes which are migration Transforms.
|
||||
"""
|
||||
|
||||
def __init__(self, ctx):
|
||||
self.ctx = ctx
|
||||
|
||||
def visit(self, node):
|
||||
if not isinstance(node, FTL.BaseNode):
|
||||
return node
|
||||
|
||||
if isinstance(node, Transform):
|
||||
# Some transforms don't expect other transforms as children.
|
||||
# Evaluate the children first.
|
||||
transform = self.generic_visit(node)
|
||||
# Then, evaluate this transform.
|
||||
return transform(self.ctx)
|
||||
|
||||
return self.generic_visit(node)
|
||||
|
|
@ -12,6 +12,7 @@ from __future__ import unicode_literals
|
|||
from __future__ import absolute_import
|
||||
|
||||
from fluent.syntax import FluentParser, ast as FTL
|
||||
from fluent.syntax.visitor import Transformer
|
||||
from .transforms import Transform, CONCAT, COPY, COPY_PATTERN
|
||||
from .errors import NotSupportedError, InvalidTransformError
|
||||
|
||||
|
|
@ -50,7 +51,7 @@ def TERM_REFERENCE(name):
|
|||
)
|
||||
|
||||
|
||||
class IntoTranforms(FTL.Transformer):
|
||||
class IntoTranforms(Transformer):
|
||||
IMPLICIT_TRANSFORMS = ("CONCAT",)
|
||||
FORBIDDEN_TRANSFORMS = ("PLURALS", "REPLACE", "REPLACE_IN_TEXT")
|
||||
|
||||
|
|
|
|||
|
|
@ -5,7 +5,6 @@ from __future__ import absolute_import
|
|||
import fluent.syntax.ast as FTL
|
||||
|
||||
from .errors import SkipTransform
|
||||
from .transforms import evaluate
|
||||
from .util import get_message, get_transform
|
||||
|
||||
|
||||
|
|
@ -52,7 +51,7 @@ def merge_resource(ctx, reference, current, transforms, in_changeset):
|
|||
if transform.comment is None:
|
||||
transform.comment = entry.comment
|
||||
try:
|
||||
return evaluate(ctx, transform)
|
||||
return ctx.evaluate(transform)
|
||||
except SkipTransform:
|
||||
return None
|
||||
|
||||
|
|
|
|||
|
|
@ -66,20 +66,11 @@ from __future__ import unicode_literals
|
|||
from __future__ import absolute_import
|
||||
import re
|
||||
|
||||
import fluent.syntax.ast as FTL
|
||||
from fluent.syntax import ast as FTL
|
||||
from fluent.syntax.visitor import Transformer
|
||||
from .errors import NotSupportedError
|
||||
|
||||
|
||||
def evaluate(ctx, node):
|
||||
def eval_node(subnode):
|
||||
if isinstance(subnode, Transform):
|
||||
return subnode(ctx)
|
||||
else:
|
||||
return subnode
|
||||
|
||||
return node.traverse(eval_node)
|
||||
|
||||
|
||||
def chain_elements(elements):
|
||||
'''Flatten a list of FTL nodes into an iterator over PatternElements.'''
|
||||
for element in elements:
|
||||
|
|
@ -238,7 +229,7 @@ class COPY_PATTERN(FluentSource):
|
|||
pass
|
||||
|
||||
|
||||
class TransformPattern(FluentSource, FTL.Transformer):
|
||||
class TransformPattern(FluentSource, Transformer):
|
||||
"""Base class for modifying a Fluent pattern as part of a migration.
|
||||
|
||||
Implement visit_* methods of the Transformer pattern to do the
|
||||
|
|
@ -395,7 +386,7 @@ class REPLACE_IN_TEXT(Transform):
|
|||
# Order the replacements by the position of the original placeable in
|
||||
# the translation.
|
||||
replacements = (
|
||||
(key, evaluate(ctx, self.replacements[key]))
|
||||
(key, ctx.evaluate(self.replacements[key]))
|
||||
for index, key
|
||||
in sorted(keys_indexed.items(), key=lambda x: x[0])
|
||||
)
|
||||
|
|
@ -462,7 +453,7 @@ class PLURALS(LegacySource):
|
|||
|
||||
def __call__(self, ctx):
|
||||
element = super(PLURALS, self).__call__(ctx)
|
||||
selector = evaluate(ctx, self.selector)
|
||||
selector = ctx.evaluate(self.selector)
|
||||
keys = ctx.plural_categories
|
||||
forms = [
|
||||
FTL.TextElement(part)
|
||||
|
|
@ -495,7 +486,7 @@ class PLURALS(LegacySource):
|
|||
# variant. We don't need to insert a SelectExpression for them.
|
||||
if len(pairs) == 1:
|
||||
_, only_form = pairs[0]
|
||||
only_variant = evaluate(ctx, self.foreach(only_form))
|
||||
only_variant = ctx.evaluate(self.foreach(only_form))
|
||||
return Transform.pattern_of(only_variant)
|
||||
|
||||
# Make sure the default key is defined. If it's missing, use the last
|
||||
|
|
@ -509,7 +500,7 @@ class PLURALS(LegacySource):
|
|||
# Run the legacy plural form through `foreach` which returns an
|
||||
# `FTL.Node` describing the transformation required for each
|
||||
# variant. Then evaluate it to a migrated FTL node.
|
||||
value = evaluate(ctx, self.foreach(form))
|
||||
value = ctx.evaluate(self.foreach(form))
|
||||
return FTL.Variant(
|
||||
key=FTL.Identifier(key),
|
||||
value=value,
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ from fluent.migrate import transforms
|
|||
from fluent.migrate.errors import MigrationError
|
||||
from fluent.migrate.helpers import transforms_from
|
||||
from fluent.syntax import ast as FTL
|
||||
from fluent.syntax.visitor import Visitor
|
||||
from compare_locales import mozpath
|
||||
|
||||
|
||||
|
|
@ -311,7 +312,7 @@ class MigrateAnalyzer(ast.NodeVisitor):
|
|||
return True
|
||||
|
||||
|
||||
class TransformsInspector(FTL.Visitor):
|
||||
class TransformsInspector(Visitor):
|
||||
def __init__(self):
|
||||
super(TransformsInspector, self).__init__()
|
||||
self.issues = []
|
||||
|
|
|
|||
6
third_party/python/fluent.migrate/setup.py
vendored
6
third_party/python/fluent.migrate/setup.py
vendored
|
|
@ -4,7 +4,7 @@ from setuptools import setup
|
|||
|
||||
setup(
|
||||
name='fluent.migrate',
|
||||
version='0.9',
|
||||
version='0.10',
|
||||
description='Toolchain to migrate legacy translation to Fluent.',
|
||||
author='Mozilla',
|
||||
author_email='l10n-drivers@mozilla.org',
|
||||
|
|
@ -20,8 +20,8 @@ setup(
|
|||
],
|
||||
packages=['fluent', 'fluent.migrate'],
|
||||
install_requires=[
|
||||
'compare-locales >=7.6, <8.1',
|
||||
'fluent.syntax >=0.17.0, <0.18',
|
||||
'compare-locales >=8.1, <9.0',
|
||||
'fluent.syntax >=0.18.0, <0.19',
|
||||
'six',
|
||||
],
|
||||
extras_require={
|
||||
|
|
|
|||
29
third_party/python/fluent.syntax/PKG-INFO
vendored
29
third_party/python/fluent.syntax/PKG-INFO
vendored
|
|
@ -1,12 +1,34 @@
|
|||
Metadata-Version: 1.1
|
||||
Metadata-Version: 2.1
|
||||
Name: fluent.syntax
|
||||
Version: 0.17.0
|
||||
Version: 0.18.1
|
||||
Summary: Localization library for expressive translations.
|
||||
Home-page: https://github.com/projectfluent/python-fluent
|
||||
Author: Mozilla
|
||||
Author-email: l10n-drivers@mozilla.org
|
||||
License: APL 2
|
||||
Description: See https://github.com/projectfluent/python-fluent/ for more info.
|
||||
Description: ``fluent.syntax`` |fluent.syntax|
|
||||
---------------------------------
|
||||
|
||||
Read, write, and transform `Fluent`_ files.
|
||||
|
||||
This package includes the parser, serializer, and traversal
|
||||
utilities like Visitor and Transformer. You’re looking for this package
|
||||
if you work on tooling for Fluent in Python.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
>>> from fluent.syntax import parse, ast, serialize
|
||||
>>> resource = parse("a-key = String to localize")
|
||||
>>> resource.body[0].value.elements[0].value = "Localized string"
|
||||
>>> serialize(resource)
|
||||
'a-key = Localized string\n'
|
||||
|
||||
|
||||
Find the full documentation on https://projectfluent.org/python-fluent/fluent.syntax/.
|
||||
|
||||
.. _fluent: https://projectfluent.org/
|
||||
.. |fluent.syntax| image:: https://github.com/projectfluent/python-fluent/workflows/fluent.syntax/badge.svg
|
||||
|
||||
Keywords: fluent,localization,l10n
|
||||
Platform: UNKNOWN
|
||||
Classifier: Development Status :: 3 - Alpha
|
||||
|
|
@ -14,3 +36,4 @@ Classifier: Intended Audience :: Developers
|
|||
Classifier: License :: OSI Approved :: Apache Software License
|
||||
Classifier: Programming Language :: Python :: 2.7
|
||||
Classifier: Programming Language :: Python :: 3.5
|
||||
Description-Content-Type: text/x-rst
|
||||
|
|
|
|||
22
third_party/python/fluent.syntax/README.rst
vendored
Normal file
22
third_party/python/fluent.syntax/README.rst
vendored
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
``fluent.syntax`` |fluent.syntax|
|
||||
---------------------------------
|
||||
|
||||
Read, write, and transform `Fluent`_ files.
|
||||
|
||||
This package includes the parser, serializer, and traversal
|
||||
utilities like Visitor and Transformer. You’re looking for this package
|
||||
if you work on tooling for Fluent in Python.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
>>> from fluent.syntax import parse, ast, serialize
|
||||
>>> resource = parse("a-key = String to localize")
|
||||
>>> resource.body[0].value.elements[0].value = "Localized string"
|
||||
>>> serialize(resource)
|
||||
'a-key = Localized string\n'
|
||||
|
||||
|
||||
Find the full documentation on https://projectfluent.org/python-fluent/fluent.syntax/.
|
||||
|
||||
.. _fluent: https://projectfluent.org/
|
||||
.. |fluent.syntax| image:: https://github.com/projectfluent/python-fluent/workflows/fluent.syntax/badge.svg
|
||||
|
|
@ -3,10 +3,14 @@ from .serializer import FluentSerializer
|
|||
|
||||
|
||||
def parse(source, **kwargs):
|
||||
"""Create an ast.Resource from a Fluent Syntax source.
|
||||
"""
|
||||
parser = FluentParser(**kwargs)
|
||||
return parser.parse(source)
|
||||
|
||||
|
||||
def serialize(resource, **kwargs):
|
||||
"""Serialize an ast.Resource to a unicode string.
|
||||
"""
|
||||
serializer = FluentSerializer(**kwargs)
|
||||
return serializer.serialize(resource)
|
||||
|
|
|
|||
|
|
@ -6,67 +6,6 @@ import json
|
|||
import six
|
||||
|
||||
|
||||
class Visitor(object):
|
||||
'''Read-only visitor pattern.
|
||||
|
||||
Subclass this to gather information from an AST.
|
||||
To generally define which nodes not to descend in to, overload
|
||||
`generic_visit`.
|
||||
To handle specific node types, add methods like `visit_Pattern`.
|
||||
If you want to still descend into the children of the node, call
|
||||
`generic_visit` of the superclass.
|
||||
'''
|
||||
def visit(self, node):
|
||||
if isinstance(node, list):
|
||||
for child in node:
|
||||
self.visit(child)
|
||||
return
|
||||
if not isinstance(node, BaseNode):
|
||||
return
|
||||
nodename = type(node).__name__
|
||||
visit = getattr(self, 'visit_{}'.format(nodename), self.generic_visit)
|
||||
visit(node)
|
||||
|
||||
def generic_visit(self, node):
|
||||
for propname, propvalue in vars(node).items():
|
||||
self.visit(propvalue)
|
||||
|
||||
|
||||
class Transformer(Visitor):
|
||||
'''In-place AST Transformer pattern.
|
||||
|
||||
Subclass this to create an in-place modified variant
|
||||
of the given AST.
|
||||
If you need to keep the original AST around, pass
|
||||
a `node.clone()` to the transformer.
|
||||
'''
|
||||
def visit(self, node):
|
||||
if not isinstance(node, BaseNode):
|
||||
return node
|
||||
|
||||
nodename = type(node).__name__
|
||||
visit = getattr(self, 'visit_{}'.format(nodename), self.generic_visit)
|
||||
return visit(node)
|
||||
|
||||
def generic_visit(self, node):
|
||||
for propname, propvalue in vars(node).items():
|
||||
if isinstance(propvalue, list):
|
||||
new_vals = []
|
||||
for child in propvalue:
|
||||
new_val = self.visit(child)
|
||||
if new_val is not None:
|
||||
new_vals.append(new_val)
|
||||
# in-place manipulation
|
||||
propvalue[:] = new_vals
|
||||
elif isinstance(propvalue, BaseNode):
|
||||
new_val = self.visit(propvalue)
|
||||
if new_val is None:
|
||||
delattr(node, propname)
|
||||
else:
|
||||
setattr(node, propname, new_val)
|
||||
return node
|
||||
|
||||
|
||||
def to_json(value, fn=None):
|
||||
if isinstance(value, BaseNode):
|
||||
return value.to_json(fn)
|
||||
|
|
@ -112,33 +51,6 @@ class BaseNode(object):
|
|||
Annotation. Implements __str__, to_json and traverse.
|
||||
"""
|
||||
|
||||
def traverse(self, fun):
|
||||
"""DEPRECATED. Please use Visitor or Transformer.
|
||||
|
||||
Postorder-traverse this node and apply `fun` to all child nodes.
|
||||
|
||||
Traverse this node depth-first applying `fun` to subnodes and leaves.
|
||||
Children are processed before parents (postorder traversal).
|
||||
|
||||
Return a new instance of the node.
|
||||
"""
|
||||
|
||||
def visit(value):
|
||||
"""Call `fun` on `value` and its descendants."""
|
||||
if isinstance(value, BaseNode):
|
||||
return value.traverse(fun)
|
||||
if isinstance(value, list):
|
||||
return fun(list(map(visit, value)))
|
||||
else:
|
||||
return fun(value)
|
||||
|
||||
# Use all attributes found on the node as kwargs to the constructor.
|
||||
kwargs = vars(self).items()
|
||||
node = self.__class__(
|
||||
**{name: visit(value) for name, value in kwargs})
|
||||
|
||||
return fun(node)
|
||||
|
||||
def clone(self):
|
||||
"""Create a deep clone of the current node."""
|
||||
def visit(value):
|
||||
|
|
|
|||
|
|
@ -26,10 +26,17 @@ def with_span(fn):
|
|||
|
||||
|
||||
class FluentParser(object):
|
||||
"""This class is used to parse Fluent source content.
|
||||
|
||||
``with_spans`` enables source information in the form of
|
||||
:class:`.ast.Span` objects for each :class:`.ast.SyntaxNode`.
|
||||
"""
|
||||
def __init__(self, with_spans=True):
|
||||
self.with_spans = with_spans
|
||||
|
||||
def parse(self, source):
|
||||
"""Create a :class:`.ast.Resource` from a Fluent source.
|
||||
"""
|
||||
ps = FluentParserStream(source)
|
||||
ps.skip_blank_block()
|
||||
|
||||
|
|
@ -73,13 +80,13 @@ class FluentParser(object):
|
|||
return res
|
||||
|
||||
def parse_entry(self, source):
|
||||
"""Parse the first Message or Term in source.
|
||||
"""Parse the first :class:`.ast.Entry` in source.
|
||||
|
||||
Skip all encountered comments and start parsing at the first Mesage
|
||||
or Term start. Return Junk if the parsing is not successful.
|
||||
Skip all encountered comments and start parsing at the first :class:`.ast.Message`
|
||||
or :class:`.ast.Term` start. Return :class:`.ast.Junk` if the parsing is not successful.
|
||||
|
||||
Preceding comments are ignored unless they contain syntax errors
|
||||
themselves, in which case Junk for the invalid comment is returned.
|
||||
themselves, in which case :class:`.ast.Junk` for the invalid comment is returned.
|
||||
"""
|
||||
ps = FluentParserStream(source)
|
||||
ps.skip_blank_block()
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ from __future__ import unicode_literals
|
|||
from . import ast
|
||||
|
||||
|
||||
def indent(content):
|
||||
def indent_except_first_line(content):
|
||||
return " ".join(
|
||||
content.splitlines(True)
|
||||
)
|
||||
|
|
@ -18,13 +18,32 @@ def is_select_expr(elem):
|
|||
isinstance(elem.expression, ast.SelectExpression))
|
||||
|
||||
|
||||
def should_start_on_new_line(pattern):
|
||||
is_multiline = any(is_select_expr(elem) for elem in pattern.elements) \
|
||||
or any(includes_new_line(elem) for elem in pattern.elements)
|
||||
|
||||
if is_multiline:
|
||||
first_element = pattern.elements[0]
|
||||
if isinstance(first_element, ast.TextElement):
|
||||
first_char = first_element.value[0]
|
||||
if first_char in ("[", ".", "*"):
|
||||
return False
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class FluentSerializer(object):
|
||||
"""FluentSerializer converts :class:`.ast.SyntaxNode` objects to unicode strings.
|
||||
|
||||
`with_junk` controls if parse errors are written back or not.
|
||||
"""
|
||||
HAS_ENTRIES = 1
|
||||
|
||||
def __init__(self, with_junk=False):
|
||||
self.with_junk = with_junk
|
||||
|
||||
def serialize(self, resource):
|
||||
"Serialize a :class:`.ast.Resource` to a string."
|
||||
if not isinstance(resource, ast.Resource):
|
||||
raise Exception('Unknown resource type: {}'.format(type(resource)))
|
||||
|
||||
|
|
@ -40,6 +59,7 @@ class FluentSerializer(object):
|
|||
return "".join(parts)
|
||||
|
||||
def serialize_entry(self, entry, state=0):
|
||||
"Serialize an :class:`.ast.Entry` to a string."
|
||||
if isinstance(entry, ast.Message):
|
||||
return serialize_message(entry)
|
||||
if isinstance(entry, ast.Term):
|
||||
|
|
@ -113,19 +133,16 @@ def serialize_term(term):
|
|||
def serialize_attribute(attribute):
|
||||
return "\n .{} ={}".format(
|
||||
attribute.id.name,
|
||||
indent(serialize_pattern(attribute.value))
|
||||
indent_except_first_line(serialize_pattern(attribute.value))
|
||||
)
|
||||
|
||||
|
||||
def serialize_pattern(pattern):
|
||||
content = "".join([
|
||||
serialize_element(elem)
|
||||
for elem in pattern.elements])
|
||||
start_on_new_line = any(
|
||||
includes_new_line(elem) or is_select_expr(elem)
|
||||
for elem in pattern.elements)
|
||||
if start_on_new_line:
|
||||
return '\n {}'.format(indent(content))
|
||||
content = "".join(serialize_element(elem) for elem in pattern.elements)
|
||||
content = indent_except_first_line(content)
|
||||
|
||||
if should_start_on_new_line(pattern):
|
||||
return '\n {}'.format(content)
|
||||
|
||||
return ' {}'.format(content)
|
||||
|
||||
|
|
@ -187,7 +204,7 @@ def serialize_variant(variant):
|
|||
return "\n{}[{}]{}".format(
|
||||
" *" if variant.default else " ",
|
||||
serialize_variant_key(variant.key),
|
||||
indent(serialize_pattern(variant.value))
|
||||
indent_except_first_line(serialize_pattern(variant.value))
|
||||
)
|
||||
|
||||
|
||||
|
|
|
|||
65
third_party/python/fluent.syntax/fluent/syntax/visitor.py
vendored
Normal file
65
third_party/python/fluent.syntax/fluent/syntax/visitor.py
vendored
Normal file
|
|
@ -0,0 +1,65 @@
|
|||
# coding=utf-8
|
||||
from __future__ import unicode_literals, absolute_import
|
||||
|
||||
from .ast import BaseNode
|
||||
|
||||
|
||||
class Visitor(object):
|
||||
'''Read-only visitor pattern.
|
||||
|
||||
Subclass this to gather information from an AST.
|
||||
To generally define which nodes not to descend in to, overload
|
||||
`generic_visit`.
|
||||
To handle specific node types, add methods like `visit_Pattern`.
|
||||
If you want to still descend into the children of the node, call
|
||||
`generic_visit` of the superclass.
|
||||
'''
|
||||
def visit(self, node):
|
||||
if isinstance(node, list):
|
||||
for child in node:
|
||||
self.visit(child)
|
||||
return
|
||||
if not isinstance(node, BaseNode):
|
||||
return
|
||||
nodename = type(node).__name__
|
||||
visit = getattr(self, 'visit_{}'.format(nodename), self.generic_visit)
|
||||
visit(node)
|
||||
|
||||
def generic_visit(self, node):
|
||||
for propname, propvalue in vars(node).items():
|
||||
self.visit(propvalue)
|
||||
|
||||
|
||||
class Transformer(Visitor):
|
||||
'''In-place AST Transformer pattern.
|
||||
|
||||
Subclass this to create an in-place modified variant
|
||||
of the given AST.
|
||||
If you need to keep the original AST around, pass
|
||||
a `node.clone()` to the transformer.
|
||||
'''
|
||||
def visit(self, node):
|
||||
if not isinstance(node, BaseNode):
|
||||
return node
|
||||
|
||||
nodename = type(node).__name__
|
||||
visit = getattr(self, 'visit_{}'.format(nodename), self.generic_visit)
|
||||
return visit(node)
|
||||
|
||||
def generic_visit(self, node):
|
||||
for propname, propvalue in vars(node).items():
|
||||
if isinstance(propvalue, list):
|
||||
new_vals = []
|
||||
for child in propvalue:
|
||||
new_val = self.visit(child)
|
||||
if new_val is not None:
|
||||
new_vals.append(new_val)
|
||||
# in-place manipulation
|
||||
propvalue[:] = new_vals
|
||||
elif isinstance(propvalue, BaseNode):
|
||||
new_val = self.visit(propvalue)
|
||||
if new_val is None:
|
||||
delattr(node, propname)
|
||||
else:
|
||||
setattr(node, propname, new_val)
|
||||
return node
|
||||
3
third_party/python/fluent.syntax/setup.cfg
vendored
3
third_party/python/fluent.syntax/setup.cfg
vendored
|
|
@ -1,3 +1,6 @@
|
|||
[metadata]
|
||||
version = 0.18.1
|
||||
|
||||
[bdist_wheel]
|
||||
universal = 1
|
||||
|
||||
|
|
|
|||
11
third_party/python/fluent.syntax/setup.py
vendored
11
third_party/python/fluent.syntax/setup.py
vendored
|
|
@ -1,10 +1,15 @@
|
|||
#!/usr/bin/env python
|
||||
from setuptools import setup
|
||||
import os
|
||||
|
||||
this_directory = os.path.abspath(os.path.dirname(__file__))
|
||||
with open(os.path.join(this_directory, 'README.rst'), 'rb') as f:
|
||||
long_description = f.read().decode('utf-8')
|
||||
|
||||
setup(name='fluent.syntax',
|
||||
version='0.17.0',
|
||||
description='Localization library for expressive translations.',
|
||||
long_description='See https://github.com/projectfluent/python-fluent/ for more info.',
|
||||
long_description=long_description,
|
||||
long_description_content_type='text/x-rst',
|
||||
author='Mozilla',
|
||||
author_email='l10n-drivers@mozilla.org',
|
||||
license='APL 2',
|
||||
|
|
@ -18,7 +23,7 @@ setup(name='fluent.syntax',
|
|||
'Programming Language :: Python :: 3.5',
|
||||
],
|
||||
packages=['fluent', 'fluent.syntax'],
|
||||
# These should also be duplicated in tox.ini and ../.travis.yml
|
||||
# These should also be duplicated in tox.ini and /.github/workflow/fluent.syntax.yml
|
||||
tests_require=['six'],
|
||||
test_suite='tests.syntax'
|
||||
)
|
||||
|
|
|
|||
5
third_party/python/requirements.in
vendored
5
third_party/python/requirements.in
vendored
|
|
@ -19,13 +19,14 @@
|
|||
attrs==19.1.0
|
||||
biplist==1.0.3
|
||||
blessings==1.7
|
||||
compare-locales==8.0.0
|
||||
compare-locales==8.1.0
|
||||
cookies==2.2.1
|
||||
coverage==5.1
|
||||
distro==1.4.0
|
||||
ecdsa==0.15
|
||||
esprima==4.0.1
|
||||
fluent.migrate==0.9
|
||||
fluent.migrate==0.10
|
||||
fluent.syntax==0.18.1
|
||||
glean_parser==1.28.0
|
||||
jsmin==2.1.0
|
||||
json-e==2.7.0
|
||||
|
|
|
|||
21
third_party/python/requirements.txt
vendored
21
third_party/python/requirements.txt
vendored
|
|
@ -26,9 +26,9 @@ click==7.0 \
|
|||
--hash=sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13 \
|
||||
--hash=sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7 \
|
||||
# via glean-parser, pip-tools
|
||||
compare-locales==8.0.0 \
|
||||
--hash=sha256:077b007bd2c025284f73994970e7fada7fbdcbb4199ff010e378b305dee6d469 \
|
||||
--hash=sha256:ee02bdad012cdc9f6c6df24d7518ba2c5084f6bac0d176b4826156accc8d48d6 \
|
||||
compare-locales==8.1.0 \
|
||||
--hash=sha256:286270797ce64f7a2f25e734bb437870661409884a4f0971c0bb94fdad6c1f35 \
|
||||
--hash=sha256:3d374ff959d5de2cfd5b94caf6b0fa61445f1d8ede5af384002cb3542aacad3a \
|
||||
# via -r requirements-mach-vendor-python.in, fluent.migrate
|
||||
cookies==2.2.1 \
|
||||
--hash=sha256:15bee753002dff684987b8df8c235288eb8d45f8191ae056254812dfd42c81d3 \
|
||||
|
|
@ -82,14 +82,14 @@ ecdsa==0.15 \
|
|||
esprima==4.0.1 \
|
||||
--hash=sha256:08db1a876d3c2910db9cfaeb83108193af5411fc3a3a66ebefacd390d21323ee \
|
||||
# via -r requirements-mach-vendor-python.in
|
||||
fluent.migrate==0.9 \
|
||||
--hash=sha256:735c86816ef7b7b03b32ff9985685f2d99cb0ed135351e4760a85236538f0beb \
|
||||
--hash=sha256:d42a001bd7292cef400e63f3d77c0c813a6a6162e7bd2dfa14eb01172d21e788 \
|
||||
fluent.migrate==0.10 \
|
||||
--hash=sha256:532322b53c895142cf7c1702f95b54b9d3d128fb92eab38f6e8c8a80c447d8c2 \
|
||||
--hash=sha256:ee1b4d827cff6d1df7f9b6a4b3eb78a75f1dd425e2e71b2013fd0dd411167b3e \
|
||||
# via -r requirements-mach-vendor-python.in
|
||||
fluent.syntax==0.17.0 \
|
||||
--hash=sha256:ac3db2f77d62b032fdf1f17ef5c390b7801a9e9fb58d41eca3825c0d47b88d79 \
|
||||
--hash=sha256:e26be470aeebe4badd84f7bb0b648414e0f2ef95d26e5336d634af99e402ea61 \
|
||||
# via compare-locales, fluent.migrate
|
||||
fluent.syntax==0.18.1 \
|
||||
--hash=sha256:0e63679fa4f1b3042565220a5127b4bab842424f07d6a13c12299e3b3835486a \
|
||||
--hash=sha256:3a55f5e605d1b029a65cc8b6492c86ec4608e15447e73db1495de11fd46c104f \
|
||||
# via -r requirements-mach-vendor-python.in, compare-locales, fluent.migrate
|
||||
glean_parser==1.28.0 \
|
||||
--hash=sha256:994c1d12b686ffe49321496497990e24609e11a8edb27d3afbe0b1f86b01fed3 \
|
||||
--hash=sha256:a8402d210fd5a50b05cc64d96a8506be972b35a47c78195f04e745756f75db61 \
|
||||
|
|
@ -275,5 +275,4 @@ zipp==3.1.0 \
|
|||
|
||||
# WARNING: The following packages were not pinned, but pip requires them to be
|
||||
# pinned when the requirements file includes hashes. Consider using the --allow-unsafe flag.
|
||||
# pip
|
||||
# setuptools
|
||||
|
|
|
|||
Loading…
Reference in a new issue