Bug 1877105 - Update to fluent.migrate 0.13.0. r=flod,mach-reviewers

Differential Revision: https://phabricator.services.mozilla.com/D200183
This commit is contained in:
Eemeli Aro 2024-02-02 12:25:28 +00:00
parent 336371826e
commit c66f71a403
23 changed files with 588 additions and 445 deletions

View file

@ -1,20 +0,0 @@
fluent/__init__.py,sha256=jv2YF__bseklT3OWEzlqJ5qE24c4aWd5F4r0TTjOrWQ,65
fluent/migrate/__init__.py,sha256=TLqGTEnsuW9uy9WaUKTkeA3AvhyhnCslKMx4f_zV45o,136
fluent/migrate/_context.py,sha256=kLTbci2fgVBtAXy6sTujse6l9hhgkk62F7sddhD_jhk,12360
fluent/migrate/blame.py,sha256=Fh645Z1kOZHQN-5fBDdDUOJUf7B3LPf5Qzw-V6tdI8k,2624
fluent/migrate/changesets.py,sha256=aSoQ5cmoJkP7EbFwNCZ8CL6HVD2cheuOxhJMp8yyzjk,1523
fluent/migrate/context.py,sha256=Z8AokS8xhFJEUtlq_bHAIJCTPQZfXqiBuwbMy5l8iXg,6090
fluent/migrate/errors.py,sha256=s7JjvA2yCWogO-Ta4OV3z_Ab31-V_ha_3LGyxF46SRk,313
fluent/migrate/evaluator.py,sha256=NhLfdlSo1zKBNDS54sa-Xz67CjNYCnAYHRsBx2Gwj2Q,859
fluent/migrate/helpers.py,sha256=YH6TGE6vjyR7B-d6zJGS2wuz0j-P3SVA22LuplqyCSM,5072
fluent/migrate/merge.py,sha256=h7W0N3O9VcgZpWqL8JUpNM65p3sbH7Sm4chGZXpMZV0,1854
fluent/migrate/tool.py,sha256=g0ecdS2vLC71opcHB1k0AX1pD1Dj9xRRV9aLh8gEhmI,5599
fluent/migrate/transforms.py,sha256=CD5dFwAA9yG1g6nezna8HVVzP8Lx516bQ4cPB2jqkVU,20968
fluent/migrate/util.py,sha256=V_m009XtdTmPj8YxQP4BQ2949Nar7kLQZQcXXeDLPV0,2875
fluent/migrate/validator.py,sha256=1qA1Y_lYIpVmSEG_Nt95ZmMt3FZcoTDwSvDFNRZiwyc,11148
fluent.migrate-0.12.0.dist-info/LICENSE,sha256=yC8xgAJuBJQ0ThoBNcQnXzmBUYVh5xfk3rMDaXQ8gO4,559
fluent.migrate-0.12.0.dist-info/METADATA,sha256=E8HaaCMrwRrqSquzRcjGmUCOnYDtFMAhRK88F-qakso,2315
fluent.migrate-0.12.0.dist-info/WHEEL,sha256=a-zpFRIJzOq5QfuhBzbhiA1eHTzNCJn8OdRvhdNX0Rk,110
fluent.migrate-0.12.0.dist-info/entry_points.txt,sha256=q0mh-Wn0Z8L4j7xyyQhxLDw5yxAMDvSzMgm2uWjIBK8,109
fluent.migrate-0.12.0.dist-info/top_level.txt,sha256=E6y0EXb_8ntRq2470rEss448Ec6wP_-DI3zVECukrn0,7
fluent.migrate-0.12.0.dist-info/RECORD,,

View file

@ -1,6 +1,6 @@
Metadata-Version: 2.1 Metadata-Version: 2.1
Name: fluent.migrate Name: fluent.migrate
Version: 0.12.0 Version: 0.13.0
Summary: Toolchain to migrate legacy translation to Fluent. Summary: Toolchain to migrate legacy translation to Fluent.
Home-page: https://github.com/mozilla/fluent-migrate Home-page: https://github.com/mozilla/fluent-migrate
Author: Mozilla Author: Mozilla
@ -11,14 +11,15 @@ Classifier: Development Status :: 3 - Alpha
Classifier: Intended Audience :: Developers Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: Apache Software License Classifier: License :: OSI Approved :: Apache Software License
Classifier: Operating System :: POSIX Classifier: Operating System :: POSIX
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8 Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9 Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10 Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: 3.12
Description-Content-Type: text/markdown Description-Content-Type: text/markdown
License-File: LICENSE License-File: LICENSE
Requires-Dist: compare-locales (<10.0,>=9.0.1) Requires-Dist: compare-locales <10.0,>=9.0.1
Requires-Dist: fluent.syntax (<0.20,>=0.19.0) Requires-Dist: fluent.syntax <0.20,>=0.19.0
Provides-Extra: hg Provides-Extra: hg
Requires-Dist: python-hglib ; extra == 'hg' Requires-Dist: python-hglib ; extra == 'hg'

View file

@ -0,0 +1,21 @@
fluent/__init__.py,sha256=ED6jHcYiuYpr_0vjGz0zx2lrrmJT9sDJCzIljoDfmlM,65
fluent/migrate/__init__.py,sha256=N1wyurR01T2hdcUhMuvdU9W413HQyw9gi0VJP6dTlm4,129
fluent/migrate/_context.py,sha256=2NCpsBmG7QzwF33cRcrhzavAW0DYsn5Zyvs1mTpg2YI,12890
fluent/migrate/blame.py,sha256=r3a9Zjc2SxAKSLcLQH4YmybbDpyRaYIaC3rEabmxqF8,2493
fluent/migrate/changesets.py,sha256=KKkNk4Ga1rq9QXH5mdb-iy6P86CbXUrUJNhMJQG1s9g,1777
fluent/migrate/context.py,sha256=ORIO46pTDKKq1z_mpM_E-nQiKzkjOVj8_jlAWiEHYQE,6307
fluent/migrate/errors.py,sha256=s7JjvA2yCWogO-Ta4OV3z_Ab31-V_ha_3LGyxF46SRk,313
fluent/migrate/evaluator.py,sha256=NhLfdlSo1zKBNDS54sa-Xz67CjNYCnAYHRsBx2Gwj2Q,859
fluent/migrate/helpers.py,sha256=8jFxbqMuMYOwGrmtdLv8p46QKh_kGEFAcyn2BNQC4Ps,5150
fluent/migrate/merge.py,sha256=J9DL-QUoBL3n9UTObhhETq47bCYSsHcW9F_ZIomrwak,1808
fluent/migrate/repo_client.py,sha256=hZvfD1P-ZOoM6u-aMQ4hNzBtMlcjevZERLfizjcgDWo,3541
fluent/migrate/tool.py,sha256=hGHq4N7gVxNllVrXQiqiCktzYAiTUMKQIDovAQXCMjE,5759
fluent/migrate/transforms.py,sha256=aCKY-fGJBv3e5rTBfLYKCo0urzHUjtHpejt0H5Vlors,20689
fluent/migrate/util.py,sha256=7n0pjmbvyJq7GrWV1gatDj7BYP7amY1S4UfugptWxwk,2853
fluent/migrate/validator.py,sha256=SpjTfaKvH8ZN7ZKuoJCEWIp3xXEyplzN6vF23piXSGE,11043
fluent.migrate-0.13.0.dist-info/LICENSE,sha256=yC8xgAJuBJQ0ThoBNcQnXzmBUYVh5xfk3rMDaXQ8gO4,559
fluent.migrate-0.13.0.dist-info/METADATA,sha256=-mqYB_hRmQqgLT9EyWfNO85wJvKaz3AvY5K1r-jcsZg,2363
fluent.migrate-0.13.0.dist-info/WHEEL,sha256=-G_t0oGuE7UD0DrSpVZnq1hHMBV9DD2XkS5v7XpmTnk,110
fluent.migrate-0.13.0.dist-info/entry_points.txt,sha256=q0mh-Wn0Z8L4j7xyyQhxLDw5yxAMDvSzMgm2uWjIBK8,109
fluent.migrate-0.13.0.dist-info/top_level.txt,sha256=E6y0EXb_8ntRq2470rEss448Ec6wP_-DI3zVECukrn0,7
fluent.migrate-0.13.0.dist-info/RECORD,,

View file

@ -1,5 +1,5 @@
Wheel-Version: 1.0 Wheel-Version: 1.0
Generator: bdist_wheel (0.40.0) Generator: bdist_wheel (0.42.0)
Root-Is-Purelib: true Root-Is-Purelib: true
Tag: py2-none-any Tag: py2-none-any
Tag: py3-none-any Tag: py3-none-any

View file

@ -1 +1 @@
__path__ = __import__('pkgutil').extend_path(__path__, __name__) __path__ = __import__("pkgutil").extend_path(__path__, __name__)

View file

@ -1,3 +1,8 @@
from .transforms import ( # noqa: F401 from .transforms import ( # noqa: F401
CONCAT, COPY, COPY_PATTERN, PLURALS, REPLACE, REPLACE_IN_TEXT CONCAT,
COPY,
COPY_PATTERN,
PLURALS,
REPLACE,
REPLACE_IN_TEXT,
) )

View file

@ -1,20 +1,23 @@
from __future__ import annotations
from typing import Dict, Optional, Set, Tuple, cast
import os import os
import codecs import codecs
from functools import partial from functools import partial
import logging import logging
from itertools import zip_longest from itertools import zip_longest
from compare_locales.parser import getParser
from compare_locales.plurals import get_plural
import fluent.syntax.ast as FTL import fluent.syntax.ast as FTL
from fluent.syntax.parser import FluentParser from fluent.syntax.parser import FluentParser
from fluent.syntax.serializer import FluentSerializer from fluent.syntax.serializer import FluentSerializer
from compare_locales.parser import getParser
from compare_locales.plurals import get_plural
from .changesets import Changes
from .errors import UnreadableReferenceError
from .evaluator import Evaluator from .evaluator import Evaluator
from .merge import merge_resource from .merge import merge_resource
from .errors import ( from .transforms import Source
UnreadableReferenceError,
)
class InternalContext: class InternalContext:
@ -23,9 +26,11 @@ class InternalContext:
For the public interface, see `context.MigrationContext`. For the public interface, see `context.MigrationContext`.
""" """
def __init__( dependencies: Dict[Tuple[str, str], Set[Tuple[str, Source]]] = {}
self, lang, reference_dir, localization_dir, enforce_translated=False localization_dir: str
): reference_dir: str
def __init__(self, lang, enforce_translated=False):
self.fluent_parser = FluentParser(with_spans=False) self.fluent_parser = FluentParser(with_spans=False)
self.fluent_serializer = FluentSerializer() self.fluent_serializer = FluentSerializer()
@ -33,11 +38,11 @@ class InternalContext:
# language. E.g. ('one', 'other') for English. # language. E.g. ('one', 'other') for English.
self.plural_categories = get_plural(lang) self.plural_categories = get_plural(lang)
if self.plural_categories is None: if self.plural_categories is None:
logger = logging.getLogger('migrate') logger = logging.getLogger("migrate")
logger.warning( logger.warning(
'Plural rule for "{}" is not defined in ' f'Plural rule for "{lang}" is not defined in "compare-locales"'
'compare-locales'.format(lang)) )
self.plural_categories = ('one', 'other') self.plural_categories = ("one", "other")
self.enforce_translated = enforce_translated self.enforce_translated = enforce_translated
# Parsed input resources stored by resource path. # Parsed input resources stored by resource path.
@ -53,14 +58,14 @@ class InternalContext:
# AST hierarchy and evaluating nodes which are migration Transforms. # AST hierarchy and evaluating nodes which are migration Transforms.
self.evaluator = Evaluator(self) self.evaluator = Evaluator(self)
def read_ftl_resource(self, path): def read_ftl_resource(self, path: str):
"""Read an FTL resource and parse it into an AST.""" """Read an FTL resource and parse it into an AST."""
f = codecs.open(path, 'r', 'utf8') f = codecs.open(path, "r", "utf8")
try: try:
contents = f.read() contents = f.read()
except UnicodeDecodeError as err: except UnicodeDecodeError as err:
logger = logging.getLogger('migrate') logger = logging.getLogger("migrate")
logger.warning(f'Unable to read file {path}: {err}') logger.warning(f"Unable to read file {path}: {err}")
raise err raise err
finally: finally:
f.close() f.close()
@ -75,24 +80,25 @@ class InternalContext:
] ]
if len(annots): if len(annots):
logger = logging.getLogger('migrate') logger = logging.getLogger("migrate")
for annot in annots: for annot in annots:
msg = annot.message msg = annot.message
logger.warning(f'Syntax error in {path}: {msg}') logger.warning(f"Syntax error in {path}: {msg}")
return ast return ast
def read_legacy_resource(self, path): def read_legacy_resource(self, path: str):
"""Read a legacy resource and parse it into a dict.""" """Read a legacy resource and parse it into a dict."""
parser = getParser(path) parser = getParser(path)
parser.readFile(path) parser.readFile(path)
# Transform the parsed result which is an iterator into a dict. # Transform the parsed result which is an iterator into a dict.
return { return {
entity.key: entity.val for entity in parser entity.key: entity.val
for entity in parser
if entity.localized or self.enforce_translated if entity.localized or self.enforce_translated
} }
def read_reference_ftl(self, path): def read_reference_ftl(self, path: str):
"""Read and parse a reference FTL file. """Read and parse a reference FTL file.
A missing resource file is a fatal error and will raise an A missing resource file is a fatal error and will raise an
@ -102,15 +108,15 @@ class InternalContext:
try: try:
return self.read_ftl_resource(fullpath) return self.read_ftl_resource(fullpath)
except OSError: except OSError:
error_message = f'Missing reference file: {fullpath}' error_message = f"Missing reference file: {fullpath}"
logging.getLogger('migrate').error(error_message) logging.getLogger("migrate").error(error_message)
raise UnreadableReferenceError(error_message) raise UnreadableReferenceError(error_message)
except UnicodeDecodeError as err: except UnicodeDecodeError as err:
error_message = f'Error reading file {fullpath}: {err}' error_message = f"Error reading file {fullpath}: {err}"
logging.getLogger('migrate').error(error_message) logging.getLogger("migrate").error(error_message)
raise UnreadableReferenceError(error_message) raise UnreadableReferenceError(error_message)
def read_localization_ftl(self, path): def read_localization_ftl(self, path: str):
"""Read and parse an existing localization FTL file. """Read and parse an existing localization FTL file.
Create a new FTL.Resource if the file doesn't exist or can't be Create a new FTL.Resource if the file doesn't exist or can't be
@ -120,20 +126,22 @@ class InternalContext:
try: try:
return self.read_ftl_resource(fullpath) return self.read_ftl_resource(fullpath)
except OSError: except OSError:
logger = logging.getLogger('migrate') logger = logging.getLogger("migrate")
logger.info( logger.info(
'Localization file {} does not exist and ' "Localization file {} does not exist and "
'it will be created'.format(path)) "it will be created".format(path)
)
return FTL.Resource() return FTL.Resource()
except UnicodeDecodeError: except UnicodeDecodeError:
logger = logging.getLogger('migrate') logger = logging.getLogger("migrate")
logger.warning( logger.warning(
'Localization file {} has broken encoding. ' "Localization file {} has broken encoding. "
'It will be re-created and some translations ' "It will be re-created and some translations "
'may be lost'.format(path)) "may be lost".format(path)
)
return FTL.Resource() return FTL.Resource()
def maybe_add_localization(self, path): def maybe_add_localization(self, path: str):
"""Add a localization resource to migrate translations from. """Add a localization resource to migrate translations from.
Uses a compare-locales parser to create a dict of (key, string value) Uses a compare-locales parser to create a dict of (key, string value)
@ -142,17 +150,17 @@ class InternalContext:
""" """
try: try:
fullpath = os.path.join(self.localization_dir, path) fullpath = os.path.join(self.localization_dir, path)
if not fullpath.endswith('.ftl'): if not fullpath.endswith(".ftl"):
collection = self.read_legacy_resource(fullpath) collection = self.read_legacy_resource(fullpath)
else: else:
collection = self.read_ftl_resource(fullpath) collection = self.read_ftl_resource(fullpath)
except OSError: except OSError:
logger = logging.getLogger('migrate') logger = logging.getLogger("migrate")
logger.warning(f'Missing localization file: {path}') logger.warning(f"Missing localization file: {path}")
else: else:
self.localization_resources[path] = collection self.localization_resources[path] = collection
def get_legacy_source(self, path, key): def get_legacy_source(self, path: str, key: str):
"""Get an entity value from a localized legacy source. """Get an entity value from a localized legacy source.
Used by the `Source` transform. Used by the `Source` transform.
@ -160,14 +168,14 @@ class InternalContext:
resource = self.localization_resources[path] resource = self.localization_resources[path]
return resource.get(key, None) return resource.get(key, None)
def get_fluent_source_pattern(self, path, key): def get_fluent_source_pattern(self, path: str, key: str):
"""Get a pattern from a localized Fluent source. """Get a pattern from a localized Fluent source.
If the key contains a `.`, does an attribute lookup. If the key contains a `.`, does an attribute lookup.
Used by the `COPY_PATTERN` transform. Used by the `COPY_PATTERN` transform.
""" """
resource = self.localization_resources[path] resource = self.localization_resources[path]
msg_key, _, attr_key = key.partition('.') msg_key, _, attr_key = key.partition(".")
found = None found = None
for entry in resource.body: for entry in resource.body:
if isinstance(entry, (FTL.Message, FTL.Term)): if isinstance(entry, (FTL.Message, FTL.Term)):
@ -190,20 +198,27 @@ class InternalContext:
in two FTL resources. in two FTL resources.
If the order or number of messages differ, the result is also False. If the order or number of messages differ, the result is also False.
""" """
def message_id(message): def message_id(message):
"Return the message's identifer name for sorting purposes." "Return the message's identifer name for sorting purposes."
return message.id.name return message.id.name
messages1 = sorted( messages1 = sorted(
(entry for entry in res1.body (
if isinstance(entry, FTL.Message) entry
or isinstance(entry, FTL.Term)), for entry in res1.body
key=message_id) if isinstance(entry, FTL.Message) or isinstance(entry, FTL.Term)
),
key=message_id,
)
messages2 = sorted( messages2 = sorted(
(entry for entry in res2.body (
if isinstance(entry, FTL.Message) entry
or isinstance(entry, FTL.Term)), for entry in res2.body
key=message_id) if isinstance(entry, FTL.Message) or isinstance(entry, FTL.Term)
),
key=message_id,
)
for msg1, msg2 in zip_longest(messages1, messages2): for msg1, msg2 in zip_longest(messages1, messages2):
if msg1 is None or msg2 is None: if msg1 is None or msg2 is None:
return False return False
@ -211,7 +226,11 @@ class InternalContext:
return False return False
return True return True
def merge_changeset(self, changeset=None, known_translations=None): def merge_changeset(
self,
changeset: Optional[Changes] = None,
known_translations: Optional[Changes] = None,
):
"""Return a generator of FTL ASTs for the changeset. """Return a generator of FTL ASTs for the changeset.
The input data must be configured earlier using the `add_*` methods. The input data must be configured earlier using the `add_*` methods.
@ -233,7 +252,7 @@ class InternalContext:
changeset = { changeset = {
(path, key) (path, key)
for path, strings in self.localization_resources.items() for path, strings in self.localization_resources.items()
if not path.endswith('.ftl') if not path.endswith(".ftl")
for key in strings.keys() for key in strings.keys()
} }
@ -244,7 +263,8 @@ class InternalContext:
current = self.target_resources[path] current = self.target_resources[path]
transforms = self.transforms.get(path, []) transforms = self.transforms.get(path, [])
in_changeset = partial( in_changeset = partial(
self.in_changeset, changeset, known_translations, path) self.in_changeset, changeset, known_translations, path
)
# Merge legacy translations with the existing ones using the # Merge legacy translations with the existing ones using the
# reference as a template. # reference as a template.
@ -269,7 +289,9 @@ class InternalContext:
# The result for this path is a complete `FTL.Resource`. # The result for this path is a complete `FTL.Resource`.
yield path, snapshot yield path, snapshot
def in_changeset(self, changeset, known_translations, path, ident): def in_changeset(
self, changeset: Changes, known_translations: Changes, path: str, ident
) -> bool:
"""Check if a message should be migrated in this changeset. """Check if a message should be migrated in this changeset.
The message is identified by path and ident. The message is identified by path and ident.
@ -304,11 +326,13 @@ class InternalContext:
# See https://bugzilla.mozilla.org/show_bug.cgi?id=1321271 # See https://bugzilla.mozilla.org/show_bug.cgi?id=1321271
# We only return True if our current changeset touches # We only return True if our current changeset touches
# the transform, and we have all of the dependencies. # the transform, and we have all of the dependencies.
active_deps = message_deps & changeset active_deps = cast(bool, message_deps & changeset)
available_deps = message_deps & known_translations available_deps = message_deps & known_translations
return active_deps and message_deps == available_deps return active_deps and message_deps == available_deps
def serialize_changeset(self, changeset, known_translations=None): def serialize_changeset(
self, changeset: Changes, known_translations: Optional[Changes] = None
):
"""Return a dict of serialized FTLs for the changeset. """Return a dict of serialized FTLs for the changeset.
Given `changeset`, return a dict whose keys are resource paths and Given `changeset`, return a dict whose keys are resource paths and
@ -317,9 +341,7 @@ class InternalContext:
return { return {
path: self.fluent_serializer.serialize(snapshot) path: self.fluent_serializer.serialize(snapshot)
for path, snapshot in self.merge_changeset( for path, snapshot in self.merge_changeset(changeset, known_translations)
changeset, known_translations
)
} }
def evaluate(self, node): def evaluate(self, node):

View file

@ -1,36 +1,37 @@
from __future__ import annotations
from typing import Dict, Iterable, Tuple, TypedDict, cast
import argparse import argparse
import json import json
import os from os.path import join
from compare_locales.parser import getParser, Junk from compare_locales.parser import Junk, getParser
from compare_locales.parser.fluent import FluentEntity from compare_locales.parser.fluent import FluentEntity
from compare_locales import mozpath
import hglib from .repo_client import RepoClient
from hglib.util import b, cmdbuilder
BlameData = Dict[str, Dict[str, Tuple[int, float]]]
"File path -> message key -> [userid, timestamp]"
class BlameResult(TypedDict):
authors: list[str]
blame: BlameData
class Blame: class Blame:
def __init__(self, client): def __init__(self, client: RepoClient):
self.client = client self.client = client
self.users = [] self.users: list[str] = []
self.blame = {} self.blame: BlameData = {}
def attribution(self, file_paths): def attribution(self, file_paths: Iterable[str]) -> BlameResult:
args = cmdbuilder( for file in file_paths:
b('annotate'), *[b(p) for p in file_paths], template='json', blame = self.client.blame(file)
date=True, user=True, cwd=self.client.root()) self.handleFile(file, blame)
blame_json = self.client.rawcommand(args) return {"authors": self.users, "blame": self.blame}
file_blames = json.loads(blame_json)
for file_blame in file_blames:
self.handleFile(file_blame)
return {'authors': self.users,
'blame': self.blame}
def handleFile(self, file_blame):
path = mozpath.normsep(file_blame['path'])
def handleFile(self, path: str, file_blame: list[Tuple[str, int]]):
try: try:
parser = getParser(path) parser = getParser(path)
except UserWarning: except UserWarning:
@ -44,37 +45,33 @@ class Blame:
if isinstance(e, Junk): if isinstance(e, Junk):
continue continue
if e.val_span: if e.val_span:
key_vals = [(e.key, e.val_span)] key_vals: list[tuple[str, str]] = [(e.key, e.val_span)]
else: else:
key_vals = [] key_vals = []
if isinstance(e, FluentEntity): if isinstance(e, FluentEntity):
key_vals += [ key_vals += [
(f'{e.key}.{attr.key}', attr.val_span) (f"{e.key}.{attr.key}", cast(str, attr.val_span))
for attr in e.attributes for attr in e.attributes
] ]
for key, (val_start, val_end) in key_vals: for key, (val_start, val_end) in key_vals:
entity_lines = file_blame['lines'][ entity_lines = file_blame[
(e.ctx.linecol(val_start)[0] - 1):e.ctx.linecol(val_end)[0] (e.ctx.linecol(val_start)[0] - 1) : e.ctx.linecol(val_end)[0]
] ]
# ignore timezone user, timestamp = max(entity_lines, key=lambda x: x[1])
entity_lines.sort(key=lambda blame: -blame['date'][0])
line_blame = entity_lines[0]
user = line_blame['user']
timestamp = line_blame['date'][0] # ignore timezone
if user not in self.users: if user not in self.users:
self.users.append(user) self.users.append(user)
userid = self.users.index(user) userid = self.users.index(user)
self.blame[path][key] = [userid, timestamp] self.blame[path][key] = (userid, timestamp)
def readFile(self, parser, path): def readFile(self, parser, path: str):
parser.readFile(os.path.join(self.client.root().decode('utf-8'), path)) parser.readFile(join(self.client.root, path))
if __name__ == '__main__': if __name__ == "__main__":
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument('repo_path') parser.add_argument("repo_path")
parser.add_argument('file_path', nargs='+') parser.add_argument("file_path", nargs="+")
args = parser.parse_args() args = parser.parse_args()
blame = Blame(hglib.open(args.repo_path)) blame = Blame(RepoClient(args.repo_path))
attrib = blame.attribution(args.file_path) attrib = blame.attribution(args.file_path)
print(json.dumps(attrib, indent=4, separators=(',', ': '))) print(json.dumps(attrib, indent=4, separators=(",", ": ")))

View file

@ -1,12 +1,25 @@
from __future__ import annotations
from typing import Set, Tuple, TypedDict
import time import time
from .blame import BlameResult
def by_first_commit(item): Changes = Set[Tuple[str, str]]
class Changeset(TypedDict):
author: str
first_commit: float
changes: Changes
def by_first_commit(item: Changeset):
"""Order two changesets by their first commit date.""" """Order two changesets by their first commit date."""
return item['first_commit'] return item["first_commit"]
def convert_blame_to_changesets(blame_json): def convert_blame_to_changesets(blame_json: BlameResult) -> list[Changeset]:
"""Convert a blame dict into a list of changesets. """Convert a blame dict into a list of changesets.
The blame information in `blame_json` should be a dict of the following The blame information in `blame_json` should be a dict of the following
@ -38,19 +51,16 @@ def convert_blame_to_changesets(blame_json):
""" """
now = time.time() now = time.time()
changesets = [ changesets: list[Changeset] = [
{ {"author": author, "first_commit": now, "changes": set()}
'author': author, for author in blame_json["authors"]
'first_commit': now,
'changes': set()
} for author in blame_json['authors']
] ]
for path, keys_info in blame_json['blame'].items(): for path, keys_info in blame_json["blame"].items():
for key, (author_index, timestamp) in keys_info.items(): for key, (author_index, timestamp) in keys_info.items():
changeset = changesets[author_index] changeset = changesets[author_index]
changeset['changes'].add((path, key)) changeset["changes"].add((path, key))
if timestamp < changeset['first_commit']: if timestamp < changeset["first_commit"]:
changeset['first_commit'] = timestamp changeset["first_commit"] = timestamp
return sorted(changesets, key=by_first_commit) return sorted(changesets, key=by_first_commit)

View file

@ -1,3 +1,6 @@
from __future__ import annotations
from typing import List, Set, Tuple, cast
import logging import logging
import fluent.syntax.ast as FTL import fluent.syntax.ast as FTL
@ -13,9 +16,9 @@ from ._context import InternalContext
__all__ = [ __all__ = [
'EmptyLocalizationError', "EmptyLocalizationError",
'UnreadableReferenceError', "UnreadableReferenceError",
'MigrationContext', "MigrationContext",
] ]
@ -48,23 +51,31 @@ class MigrationContext(InternalContext):
""" """
def __init__( def __init__(
self, locale, reference_dir, localization_dir, enforce_translated=False self,
locale: str,
reference_dir: str,
localization_dir: str,
enforce_translated=False,
): ):
super().__init__( super().__init__(
locale, reference_dir, localization_dir, locale,
enforce_translated=enforce_translated enforce_translated=enforce_translated,
) )
self.locale = locale self.locale = locale
# Paths to directories with input data, relative to CWD. # Paths to directories with input data, relative to CWD.
self.reference_dir = reference_dir self.reference_dir = reference_dir
self.localization_dir = localization_dir self.localization_dir = localization_dir
# A dict whose keys are `(path, key)` tuples corresponding to target
# FTL translations, and values are sets of `(path, key)` tuples
# corresponding to localized entities which will be migrated.
self.dependencies = {} self.dependencies = {}
"""
A dict whose keys are `(path, key)` tuples corresponding to target
FTL translations, and values are sets of `(path, key)` tuples
corresponding to localized entities which will be migrated.
"""
def add_transforms(self, target, reference, transforms): def add_transforms(
self, target: str, reference: str, transforms: List[FTL.Message | FTL.Term]
):
"""Define transforms for target using reference as template. """Define transforms for target using reference as template.
`target` is a path of the destination FTL file relative to the `target` is a path of the destination FTL file relative to the
@ -82,6 +93,7 @@ class MigrationContext(InternalContext):
For transforms that merely copy legacy messages or Fluent patterns, For transforms that merely copy legacy messages or Fluent patterns,
using `fluent.migrate.helpers.transforms_from` is recommended. using `fluent.migrate.helpers.transforms_from` is recommended.
""" """
def get_sources(acc, cur): def get_sources(acc, cur):
if isinstance(cur, Source): if isinstance(cur, Source):
acc.add((cur.path, cur.key)) acc.add((cur.path, cur.key))
@ -93,18 +105,16 @@ class MigrationContext(InternalContext):
reference_ast = self.reference_resources.get(target) reference_ast = self.reference_resources.get(target)
if reference_ast is None: if reference_ast is None:
reference_ast = FTL.Resource() reference_ast = FTL.Resource()
reference_ast.body.extend( reference_ast.body.extend(skeleton(transform) for transform in transforms)
skeleton(transform) for transform in transforms
)
else: else:
reference_ast = self.read_reference_ftl(reference) reference_ast = self.read_reference_ftl(reference)
self.reference_resources[target] = reference_ast self.reference_resources[target] = reference_ast
for node in transforms: for node in transforms:
ident = node.id.name ident = cast(str, node.id.name)
# Scan `node` for `Source` nodes and collect the information they # Scan `node` for `Source` nodes and collect the information they
# store into a set of dependencies. # store into a set of dependencies.
dependencies = fold(get_sources, node, set()) dependencies = cast(Set[Tuple[str, Source]], fold(get_sources, node, set()))
# Set these sources as dependencies for the current transform. # Set these sources as dependencies for the current transform.
self.dependencies[(target, ident)] = dependencies self.dependencies[(target, ident)] = dependencies
@ -114,10 +124,12 @@ class MigrationContext(InternalContext):
if self.reference_dir is None: if self.reference_dir is None:
continue continue
if get_message(reference_ast.body, ident) is None: if get_message(reference_ast.body, ident) is None:
logger = logging.getLogger('migrate') logger = logging.getLogger("migrate")
logger.warning( logger.warning(
'{} "{}" was not found in {}'.format( '{} "{}" was not found in {}'.format(
type(node).__name__, ident, reference)) type(node).__name__, ident, reference
)
)
# Keep track of localization resource paths which were defined as # Keep track of localization resource paths which were defined as
# sources in the transforms. # sources in the transforms.
@ -134,8 +146,8 @@ class MigrationContext(InternalContext):
# However, if all legacy resources are missing, bail out early. There # However, if all legacy resources are missing, bail out early. There
# are no translations to migrate. We'd also get errors in hg annotate. # are no translations to migrate. We'd also get errors in hg annotate.
if len(expected_paths) > 0 and len(self.localization_resources) == 0: if len(expected_paths) > 0 and len(self.localization_resources) == 0:
error_message = 'No localization files were found' error_message = "No localization files were found"
logging.getLogger('migrate').error(error_message) logging.getLogger("migrate").error(error_message)
raise EmptyLocalizationError(error_message) raise EmptyLocalizationError(error_message)
# Add the current transforms to any other transforms added earlier for # Add the current transforms to any other transforms added earlier for

View file

@ -7,6 +7,8 @@ They take a string argument and immediately return a corresponding AST node.
(As opposed to Transforms which are AST nodes on their own and only return the (As opposed to Transforms which are AST nodes on their own and only return the
migrated AST nodes when they are evaluated by a MigrationContext.) """ migrated AST nodes when they are evaluated by a MigrationContext.) """
from __future__ import annotations
from typing import List
from fluent.syntax import FluentParser, ast as FTL from fluent.syntax import FluentParser, ast as FTL
from fluent.syntax.visitor import Transformer from fluent.syntax.visitor import Transformer
@ -17,9 +19,7 @@ from .errors import NotSupportedError, InvalidTransformError
def VARIABLE_REFERENCE(name): def VARIABLE_REFERENCE(name):
"""Create an ExternalArgument expression.""" """Create an ExternalArgument expression."""
return FTL.VariableReference( return FTL.VariableReference(id=FTL.Identifier(name))
id=FTL.Identifier(name)
)
def MESSAGE_REFERENCE(name): def MESSAGE_REFERENCE(name):
@ -28,8 +28,8 @@ def MESSAGE_REFERENCE(name):
If the passed name contains a `.`, we're generating If the passed name contains a `.`, we're generating
a message reference with an attribute. a message reference with an attribute.
""" """
if '.' in name: if "." in name:
name, attribute = name.split('.') name, attribute = name.split(".")
attribute = FTL.Identifier(attribute) attribute = FTL.Identifier(attribute)
else: else:
attribute = None attribute = None
@ -43,9 +43,7 @@ def MESSAGE_REFERENCE(name):
def TERM_REFERENCE(name): def TERM_REFERENCE(name):
"""Create a TermReference expression.""" """Create a TermReference expression."""
return FTL.TermReference( return FTL.TermReference(id=FTL.Identifier(name))
id=FTL.Identifier(name)
)
class IntoTranforms(Transformer): class IntoTranforms(Transformer):
@ -59,26 +57,29 @@ class IntoTranforms(Transformer):
anno = node.annotations[0] anno = node.annotations[0]
raise InvalidTransformError( raise InvalidTransformError(
"Transform contains parse error: {}, at {}".format( "Transform contains parse error: {}, at {}".format(
anno.message, anno.span.start)) anno.message, anno.span.start
)
)
def visit_FunctionReference(self, node): def visit_FunctionReference(self, node):
name = node.id.name name = node.id.name
if name in self.IMPLICIT_TRANSFORMS: if name in self.IMPLICIT_TRANSFORMS:
raise NotSupportedError( raise NotSupportedError(
"{} may not be used with transforms_from(). It runs " "{} may not be used with transforms_from(). It runs "
"implicitly on all Patterns anyways.".format(name)) "implicitly on all Patterns anyways.".format(name)
)
if name in self.FORBIDDEN_TRANSFORMS: if name in self.FORBIDDEN_TRANSFORMS:
raise NotSupportedError( raise NotSupportedError(
"{} may not be used with transforms_from(). It requires " "{} may not be used with transforms_from(). It requires "
"additional logic in Python code.".format(name)) "additional logic in Python code.".format(name)
if name in ('COPY', 'COPY_PATTERN'):
args = (
self.into_argument(arg) for arg in node.arguments.positional
) )
if name in ("COPY", "COPY_PATTERN"):
args = (self.into_argument(arg) for arg in node.arguments.positional)
kwargs = { kwargs = {
arg.name.name: self.into_argument(arg.value) arg.name.name: self.into_argument(arg.value)
for arg in node.arguments.named} for arg in node.arguments.named
if name == 'COPY': }
if name == "COPY":
return COPY(*args, **kwargs) return COPY(*args, **kwargs)
return COPY_PATTERN(*args, **kwargs) return COPY_PATTERN(*args, **kwargs)
return self.generic_visit(node) return self.generic_visit(node)
@ -117,15 +118,15 @@ class IntoTranforms(Transformer):
return self.substitutions[node.id.name] return self.substitutions[node.id.name]
except KeyError: except KeyError:
raise InvalidTransformError( raise InvalidTransformError(
"Unknown substitution in COPY: {}".format( "Unknown substitution in COPY: {}".format(node.id.name)
node.id.name)) )
else: else:
raise InvalidTransformError( raise InvalidTransformError(
"Invalid argument passed to COPY: {}".format( "Invalid argument passed to COPY: {}".format(type(node).__name__)
type(node).__name__)) )
def transforms_from(ftl, **substitutions): def transforms_from(ftl, **substitutions) -> List[FTL.Message | FTL.Term]:
"""Parse FTL code into a list of Message nodes with Transforms. """Parse FTL code into a list of Message nodes with Transforms.
The FTL may use a fabricated COPY function inside of placeables which The FTL may use a fabricated COPY function inside of placeables which

View file

@ -15,11 +15,7 @@ def merge_resource(ctx, reference, current, transforms, in_changeset):
""" """
def merge_body(body): def merge_body(body):
return [ return [entry for entry in map(merge_entry, body) if entry is not None]
entry
for entry in map(merge_entry, body)
if entry is not None
]
def merge_entry(entry): def merge_entry(entry):
# All standalone comments will be merged. # All standalone comments will be merged.

View file

@ -0,0 +1,106 @@
from __future__ import annotations
from typing import Tuple
import json
from subprocess import run
from os.path import isdir, join
import hglib
def git(root: str, *args: str) -> str:
"""
Wrapper for calling command-line git in the `root` directory.
Raises an exception on any error, including a non-0 return code.
Returns the command's stdout as a string.
"""
git = ["git"]
git.extend(args)
proc = run(git, capture_output=True, cwd=root, encoding="utf-8")
if proc.returncode != 0:
raise Exception(proc.stderr or f"git command failed: {args}")
return proc.stdout
class RepoClient:
def __init__(self, root: str):
self.root = root
if isdir(join(root, ".hg")):
self.hgclient = hglib.open(root, "utf-8")
elif isdir(join(root, ".git")):
self.hgclient = None
stdout = git(self.root, "rev-parse", "--is-inside-work-tree")
if stdout != "true\n":
raise Exception("git rev-parse failed")
else:
raise Exception(f"Unsupported repository: {root}")
def close(self):
if self.hgclient:
self.hgclient.close()
def blame(self, file: str) -> list[Tuple[str, int]]:
"Return a list of (author, time) tuples for each line in `file`."
if self.hgclient:
args = hglib.util.cmdbuilder(
b"annotate",
file.encode("latin-1"),
template="json",
date=True,
user=True,
cwd=self.root,
)
blame_json = self.hgclient.rawcommand(args)
return [
(line["user"], int(line["date"][0]))
for line in json.loads(blame_json)[0]["lines"]
]
else:
lines: list[Tuple[str, int]] = []
user = ""
time = 0
stdout = git(self.root, "blame", "--porcelain", file)
for line in stdout.splitlines():
if line.startswith("author "):
user = line[7:]
elif line.startswith("author-mail "):
user += line[11:] # includes leading space
elif line.startswith("author-time "):
time = int(line[12:])
elif line.startswith("\t"):
lines.append((user, time))
return lines
def commit(self, message: str, author: str):
"Add and commit all work tree files"
if self.hgclient:
self.hgclient.commit(message, user=author.encode("utf-8"), addremove=True)
else:
git(self.root, "add", ".")
git(self.root, "commit", f"--author={author}", f"--message={message}")
def head(self) -> str:
"Identifier for the most recent commit"
if self.hgclient:
return self.hgclient.tip().node.decode("utf-8")
else:
return git(self.root, "rev-parse", "HEAD").strip()
def log(self, from_commit: str, to_commit: str) -> list[str]:
if self.hgclient:
return [
rev.desc.decode("utf-8")
for rev in self.hgclient.log(f"{to_commit} % {from_commit}")
]
else:
return (
git(
self.root,
"log",
"--pretty=format:%s",
f"{from_commit}..{to_commit}",
)
.strip()
.splitlines()
)

View file

@ -1,16 +1,19 @@
import os from __future__ import annotations
import logging from types import ModuleType
from typing import Iterable, cast
import argparse import argparse
from contextlib import contextmanager from contextlib import contextmanager
import importlib import importlib
import logging
import os
import sys import sys
import hglib from fluent.migrate.blame import Blame
from fluent.migrate.changesets import Changes, convert_blame_to_changesets
from fluent.migrate.context import MigrationContext from fluent.migrate.context import MigrationContext
from fluent.migrate.errors import MigrationError from fluent.migrate.errors import MigrationError
from fluent.migrate.changesets import convert_blame_to_changesets from fluent.migrate.repo_client import RepoClient
from fluent.migrate.blame import Blame
@contextmanager @contextmanager
@ -22,7 +25,9 @@ def dont_write_bytecode():
class Migrator: class Migrator:
def __init__(self, locale, reference_dir, localization_dir, dry_run): def __init__(
self, locale: str, reference_dir: str, localization_dir: str, dry_run: bool
):
self.locale = locale self.locale = locale
self.reference_dir = reference_dir self.reference_dir = reference_dir
self.localization_dir = localization_dir self.localization_dir = localization_dir
@ -32,7 +37,7 @@ class Migrator:
@property @property
def client(self): def client(self):
if self._client is None: if self._client is None:
self._client = hglib.open(self.localization_dir, 'utf-8') self._client = RepoClient(self.localization_dir)
return self._client return self._client
def close(self): def close(self):
@ -40,26 +45,26 @@ class Migrator:
if self._client is not None: if self._client is not None:
self._client.close() self._client.close()
def run(self, migration): def run(self, migration: ModuleType):
print('\nRunning migration {} for {}'.format( print("\nRunning migration {} for {}".format(migration.__name__, self.locale))
migration.__name__, self.locale))
# For each migration create a new context. # For each migration create a new context.
ctx = MigrationContext( ctx = MigrationContext(self.locale, self.reference_dir, self.localization_dir)
self.locale, self.reference_dir, self.localization_dir
)
try: try:
# Add the migration spec. # Add the migration spec.
migration.migrate(ctx) migration.migrate(ctx)
except MigrationError as e: except MigrationError as e:
print(' Skipping migration {} for {}:\n {}'.format( print(
migration.__name__, self.locale, e)) " Skipping migration {} for {}:\n {}".format(
migration.__name__, self.locale, e
)
)
return return
# Keep track of how many changesets we're committing. # Keep track of how many changesets we're committing.
index = 0 index = 0
description_template = migration.migrate.__doc__ description_template = cast(str, migration.migrate.__doc__)
# Annotate localization files used as sources by this migration # Annotate localization files used as sources by this migration
# to preserve attribution of translations. # to preserve attribution of translations.
@ -70,59 +75,58 @@ class Migrator:
for changeset in changesets: for changeset in changesets:
snapshot = self.snapshot( snapshot = self.snapshot(
ctx, changeset['changes'], known_legacy_translations ctx, changeset["changes"], known_legacy_translations
) )
if not snapshot: if not snapshot:
continue continue
self.serialize_changeset(snapshot) self.serialize_changeset(snapshot)
index += 1 index += 1
self.commit_changeset( self.commit_changeset(description_template, changeset["author"], index)
description_template, changeset['author'], index
)
def snapshot(self, ctx, changes_in_changeset, known_legacy_translations): def snapshot(
'''Run the migration for the changeset, with the set of self,
ctx: MigrationContext,
changes_in_changeset: Changes,
known_legacy_translations: Changes,
):
"""Run the migration for the changeset, with the set of
this and all prior legacy translations. this and all prior legacy translations.
''' """
known_legacy_translations.update(changes_in_changeset) known_legacy_translations.update(changes_in_changeset)
return ctx.serialize_changeset( return ctx.serialize_changeset(changes_in_changeset, known_legacy_translations)
changes_in_changeset,
known_legacy_translations
)
def serialize_changeset(self, snapshot): def serialize_changeset(self, snapshot):
'''Write serialized FTL files to disk.''' """Write serialized FTL files to disk."""
for path, content in snapshot.items(): for path, content in snapshot.items():
fullpath = os.path.join(self.localization_dir, path) fullpath = os.path.join(self.localization_dir, path)
print(f' Writing to {fullpath}') print(f" Writing to {fullpath}")
if not self.dry_run: if not self.dry_run:
fulldir = os.path.dirname(fullpath) fulldir = os.path.dirname(fullpath)
if not os.path.isdir(fulldir): if not os.path.isdir(fulldir):
os.makedirs(fulldir) os.makedirs(fulldir)
with open(fullpath, 'wb') as f: with open(fullpath, "wb") as f:
f.write(content.encode('utf8')) f.write(content.encode("utf8"))
f.close() f.close()
def commit_changeset( def commit_changeset(self, description_template: str, author: str, index: int):
self, description_template, author, index message = description_template.format(index=index, author=author)
):
message = description_template.format(
index=index,
author=author
)
print(f' Committing changeset: {message}') print(f" Committing changeset: {message}")
if self.dry_run: if self.dry_run:
return return
try: try:
self.client.commit( self.client.commit(message, author)
message, user=author.encode('utf-8'), addremove=True except Exception as err:
) print(f" WARNING: commit failed ({err})")
except hglib.error.CommandError as err:
print(f' WARNING: hg commit failed ({err})')
def main(locale, reference_dir, localization_dir, migrations, dry_run): def main(
locale,
reference_dir: str,
localization_dir: str,
migrations: Iterable[ModuleType],
dry_run: bool,
):
"""Run migrations and commit files with the result.""" """Run migrations and commit files with the result."""
migrator = Migrator(locale, reference_dir, localization_dir, dry_run) migrator = Migrator(locale, reference_dir, localization_dir, dry_run)
@ -133,32 +137,31 @@ def main(locale, reference_dir, localization_dir, migrations, dry_run):
def cli(): def cli():
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(description="Migrate translations to FTL.")
description='Migrate translations to FTL.' parser.add_argument(
"migrations",
metavar="MIGRATION",
type=str,
nargs="+",
help="migrations to run (Python modules)",
) )
parser.add_argument( parser.add_argument(
'migrations', metavar='MIGRATION', type=str, nargs='+', "--locale", "--lang", type=str, help="target locale code (--lang is deprecated)"
help='migrations to run (Python modules)'
) )
parser.add_argument( parser.add_argument(
'--locale', '--lang', type=str, "--reference-dir", type=str, help="directory with reference FTL files"
help='target locale code (--lang is deprecated)'
) )
parser.add_argument( parser.add_argument(
'--reference-dir', type=str, "--localization-dir", type=str, help="directory for localization files"
help='directory with reference FTL files'
) )
parser.add_argument( parser.add_argument(
'--localization-dir', type=str, "--dry-run",
help='directory for localization files' action="store_true",
) help="do not write to disk nor commit any changes",
parser.add_argument(
'--dry-run', action='store_true',
help='do not write to disk nor commit any changes'
) )
parser.set_defaults(dry_run=False) parser.set_defaults(dry_run=False)
logger = logging.getLogger('migrate') logger = logging.getLogger("migrate")
logger.setLevel(logging.INFO) logger.setLevel(logging.INFO)
args = parser.parse_args() args = parser.parse_args()
@ -173,9 +176,9 @@ def cli():
reference_dir=args.reference_dir, reference_dir=args.reference_dir,
localization_dir=args.localization_dir, localization_dir=args.localization_dir,
migrations=migrations, migrations=migrations,
dry_run=args.dry_run dry_run=args.dry_run,
) )
if __name__ == '__main__': if __name__ == "__main__":
cli() cli()

View file

@ -69,7 +69,7 @@ from .errors import NotSupportedError
def chain_elements(elements): def chain_elements(elements):
'''Flatten a list of FTL nodes into an iterator over PatternElements.''' """Flatten a list of FTL nodes into an iterator over PatternElements."""
for element in elements: for element in elements:
if isinstance(element, FTL.Pattern): if isinstance(element, FTL.Pattern):
# PY3 yield from element.elements # PY3 yield from element.elements
@ -79,22 +79,20 @@ def chain_elements(elements):
elif isinstance(element, FTL.Expression): elif isinstance(element, FTL.Expression):
yield FTL.Placeable(element) yield FTL.Placeable(element)
else: else:
raise RuntimeError( raise RuntimeError("Expected Pattern, PatternElement or Expression")
'Expected Pattern, PatternElement or Expression')
re_leading_ws = re.compile( re_leading_ws = re.compile(
r'\A(?:(?P<whitespace> +)(?P<text>.*?)|(?P<block_text>\n.*?))\Z', r"\A(?:(?P<whitespace> +)(?P<text>.*?)|(?P<block_text>\n.*?))\Z",
re.S, re.S,
) )
re_trailing_ws = re.compile( re_trailing_ws = re.compile(
r'\A(?:(?P<text>.*?)(?P<whitespace> +)|(?P<block_text>.*\n))\Z', r"\A(?:(?P<text>.*?)(?P<whitespace> +)|(?P<block_text>.*\n))\Z", re.S
re.S
) )
def extract_whitespace(regex, element): def extract_whitespace(regex, element):
'''Extract leading or trailing whitespace from a TextElement. """Extract leading or trailing whitespace from a TextElement.
Return a tuple of (Placeable, TextElement) in which the Placeable Return a tuple of (Placeable, TextElement) in which the Placeable
encodes the extracted whitespace as a StringLiteral and the encodes the extracted whitespace as a StringLiteral and the
@ -102,18 +100,18 @@ def extract_whitespace(regex, element):
Placeable with the extracted whitespace is always returned first. Placeable with the extracted whitespace is always returned first.
If the element starts or ends with a newline, add an empty If the element starts or ends with a newline, add an empty
StringLiteral. StringLiteral.
''' """
match = re.search(regex, element.value) match = re.search(regex, element.value)
if match: if match:
# If white-space is None, we're a newline. Add an # If white-space is None, we're a newline. Add an
# empty { "" } # empty { "" }
whitespace = match.group('whitespace') or '' whitespace = match.group("whitespace") or ""
placeable = FTL.Placeable(FTL.StringLiteral(whitespace)) placeable = FTL.Placeable(FTL.StringLiteral(whitespace))
if whitespace == element.value: if whitespace == element.value:
return placeable, None return placeable, None
else: else:
# Either text or block_text matched the rest. # Either text or block_text matched the rest.
text = match.group('text') or match.group('block_text') text = match.group("text") or match.group("block_text")
return placeable, FTL.TextElement(text) return placeable, FTL.TextElement(text)
else: else:
return None, element return None, element
@ -137,9 +135,11 @@ class Transform(FTL.BaseNode):
for element in chain_elements(elements): for element in chain_elements(elements):
if isinstance(element, FTL.TextElement): if isinstance(element, FTL.TextElement):
text_content = element.value text_content = element.value
elif isinstance(element, FTL.Placeable) \ elif (
and isinstance(element.expression, FTL.StringLiteral) \ isinstance(element, FTL.Placeable)
and re.match(r'^ *$', element.expression.value): and isinstance(element.expression, FTL.StringLiteral)
and re.match(r"^ *$", element.expression.value)
):
text_content = element.expression.value text_content = element.expression.value
else: else:
# The element does not contain text content which should be # The element does not contain text content which should be
@ -161,7 +161,7 @@ class Transform(FTL.BaseNode):
# Store empty values explicitly as {""}. # Store empty values explicitly as {""}.
if len(normalized) == 0: if len(normalized) == 0:
empty = FTL.Placeable(FTL.StringLiteral('')) empty = FTL.Placeable(FTL.StringLiteral(""))
return FTL.Pattern([empty]) return FTL.Pattern([empty])
# Extract explicit leading whitespace into a StringLiteral. # Extract explicit leading whitespace into a StringLiteral.
@ -174,11 +174,7 @@ class Transform(FTL.BaseNode):
ws, text = extract_whitespace(re_trailing_ws, normalized[-1]) ws, text = extract_whitespace(re_trailing_ws, normalized[-1])
normalized[-1:] = [text, ws] normalized[-1:] = [text, ws]
return FTL.Pattern([ return FTL.Pattern([element for element in normalized if element is not None])
element
for element in normalized
if element is not None
])
class Source(Transform): class Source(Transform):
@ -187,6 +183,7 @@ class Source(Transform):
The contract is that the first argument is the source path, and the The contract is that the first argument is the source path, and the
second is a key representing legacy string IDs, or Fluent id.attr. second is a key representing legacy string IDs, or Fluent id.attr.
""" """
def __init__(self, path, key): def __init__(self, path, key):
self.path = path self.path = path
self.key = key self.key = key
@ -197,16 +194,16 @@ class FluentSource(Source):
When evaluated, it clones the Pattern of the parsed source. When evaluated, it clones the Pattern of the parsed source.
""" """
def __init__(self, path, key): def __init__(self, path, key):
if not path.endswith('.ftl'): if not path.endswith(".ftl"):
raise NotSupportedError( raise NotSupportedError(
'Please use COPY to migrate from legacy files ' "Please use COPY to migrate from legacy files " "({})".format(path)
'({})'.format(path)
) )
if key[0] == '-' and '.' in key: if key[0] == "-" and "." in key:
raise NotSupportedError( raise NotSupportedError(
'Cannot migrate from Term Attributes, as they are' "Cannot migrate from Term Attributes, as they are"
'locale-dependent ({})'.format(path) "locale-dependent ({})".format(path)
) )
super().__init__(path, key) super().__init__(path, key)
@ -222,6 +219,7 @@ class COPY_PATTERN(FluentSource):
Term ID. Accessing Term attributes is not supported, as they're internal Term ID. Accessing Term attributes is not supported, as they're internal
to the localization. to the localization.
""" """
pass pass
@ -231,6 +229,7 @@ class TransformPattern(FluentSource, Transformer):
Implement visit_* methods of the Transformer pattern to do the Implement visit_* methods of the Transformer pattern to do the
actual modifications. actual modifications.
""" """
def __call__(self, ctx): def __call__(self, ctx):
pattern = super().__call__(ctx) pattern = super().__call__(ctx)
return self.visit(pattern) return self.visit(pattern)
@ -275,10 +274,11 @@ class LegacySource(Source):
""" """
def __init__(self, path, key, trim=None): def __init__(self, path, key, trim=None):
if path.endswith('.ftl'): if path.endswith(".ftl"):
raise NotSupportedError( raise NotSupportedError(
'Please use COPY_PATTERN to migrate from Fluent files ' "Please use COPY_PATTERN to migrate from Fluent files "
'({})'.format(path)) "({})".format(path)
)
super().__init__(path, key) super().__init__(path, key)
self.trim = trim self.trim = trim
@ -289,11 +289,11 @@ class LegacySource(Source):
@staticmethod @staticmethod
def trim_text(text): def trim_text(text):
# strip leading white-space from each line # strip leading white-space from each line
text = re.sub('^[ \t]+', '', text, flags=re.M) text = re.sub("^[ \t]+", "", text, flags=re.M)
# strip trailing white-space from each line # strip trailing white-space from each line
text = re.sub('[ \t]+$', '', text, flags=re.M) text = re.sub("[ \t]+$", "", text, flags=re.M)
# strip leading and trailing empty lines # strip leading and trailing empty lines
text = text.strip('\r\n') text = text.strip("\r\n")
return text return text
def __call__(self, ctx): def __call__(self, ctx):
@ -312,11 +312,11 @@ class COPY(LegacySource):
PRINTF = re.compile( PRINTF = re.compile(
r'%(?P<good>%|' r"%(?P<good>%|"
r'(?:(?P<number>[1-9][0-9]*)\$)?' r"(?:(?P<number>[1-9][0-9]*)\$)?"
r'(?P<width>\*|[0-9]+)?' r"(?P<width>\*|[0-9]+)?"
r'(?P<prec>\.(?:\*|[0-9]+)?)?' r"(?P<prec>\.(?:\*|[0-9]+)?)?"
r'(?P<spec>[duxXosScpfg]))' r"(?P<spec>[duxXosScpfg]))"
) )
@ -337,13 +337,13 @@ def normalize_printf(text):
next_number = number() next_number = number()
def normalized(match): def normalized(match):
if match.group('good') == '%': if match.group("good") == "%":
return '%' return "%"
hidden = match.group('width') == '0' hidden = match.group("width") == "0"
if match.group('number'): if match.group("number"):
return '' if hidden else match.group() return "" if hidden else match.group()
num = next(next_number) num = next(next_number)
return '' if hidden else '%{}${}'.format(num, match.group('spec')) return "" if hidden else "%{}${}".format(num, match.group("spec"))
return PRINTF.sub(normalized, text) return PRINTF.sub(normalized, text)
@ -383,8 +383,7 @@ class REPLACE_IN_TEXT(Transform):
# the translation. # the translation.
replacements = ( replacements = (
(key, ctx.evaluate(self.replacements[key])) (key, ctx.evaluate(self.replacements[key]))
for index, key for index, key in sorted(keys_indexed.items(), key=lambda x: x[0])
in sorted(keys_indexed.items(), key=lambda x: x[0])
) )
# A list of PatternElements built from the legacy translation and the # A list of PatternElements built from the legacy translation and the
@ -413,16 +412,14 @@ class REPLACE(LegacySource):
replaced with FTL placeables using the `REPLACE_IN_TEXT` transform. replaced with FTL placeables using the `REPLACE_IN_TEXT` transform.
""" """
def __init__( def __init__(self, path, key, replacements, **kwargs):
self, path, key, replacements, **kwargs
):
# We default normalize_printf to False except for .properties files. # We default normalize_printf to False except for .properties files.
# We still allow the caller to override the default value. # We still allow the caller to override the default value.
normalize_printf = False normalize_printf = False
if 'normalize_printf' in kwargs: if "normalize_printf" in kwargs:
normalize_printf = kwargs['normalize_printf'] normalize_printf = kwargs["normalize_printf"]
del kwargs['normalize_printf'] del kwargs["normalize_printf"]
elif path.endswith('.properties'): elif path.endswith(".properties"):
normalize_printf = True normalize_printf = True
super().__init__(path, key, **kwargs) super().__init__(path, key, **kwargs)
@ -432,8 +429,7 @@ class REPLACE(LegacySource):
def __call__(self, ctx): def __call__(self, ctx):
element = super().__call__(ctx) element = super().__call__(ctx)
return REPLACE_IN_TEXT( return REPLACE_IN_TEXT(
element, self.replacements, element, self.replacements, normalize_printf=self.normalize_printf
normalize_printf=self.normalize_printf
)(ctx) )(ctx)
@ -447,10 +443,10 @@ class PLURALS(LegacySource):
return an `FTL.Node` or a `Transform`. By default, the `foreach` function return an `FTL.Node` or a `Transform`. By default, the `foreach` function
creates a valid Pattern from the TextElement passed into it. creates a valid Pattern from the TextElement passed into it.
""" """
DEFAULT_ORDER = ('zero', 'one', 'two', 'few', 'many', 'other')
def __init__(self, path, key, selector, foreach=Transform.pattern_of, DEFAULT_ORDER = ("zero", "one", "two", "few", "many", "other")
**kwargs):
def __init__(self, path, key, selector, foreach=Transform.pattern_of, **kwargs):
super().__init__(path, key, **kwargs) super().__init__(path, key, **kwargs)
self.selector = selector self.selector = selector
self.foreach = foreach self.foreach = foreach
@ -459,27 +455,18 @@ class PLURALS(LegacySource):
element = super().__call__(ctx) element = super().__call__(ctx)
selector = ctx.evaluate(self.selector) selector = ctx.evaluate(self.selector)
keys = ctx.plural_categories keys = ctx.plural_categories
forms = [ forms = [FTL.TextElement(part.strip()) for part in element.value.split(";")]
FTL.TextElement(part.strip())
for part in element.value.split(';')
]
# The default CLDR form should be the last we have in DEFAULT_ORDER, # The default CLDR form should be the last we have in DEFAULT_ORDER,
# usually `other`, but in some cases `many`. If we don't have a variant # usually `other`, but in some cases `many`. If we don't have a variant
# for that, we'll append one, using the, in CLDR order, last existing # for that, we'll append one, using the, in CLDR order, last existing
# variant in the legacy translation. That may or may not be the last # variant in the legacy translation. That may or may not be the last
# variant. # variant.
default_key = [ default_key = [key for key in reversed(self.DEFAULT_ORDER) if key in keys][0]
key for key in reversed(self.DEFAULT_ORDER) if key in keys
][0]
# Match keys to legacy forms in the order they are defined in Gecko's # Match keys to legacy forms in the order they are defined in Gecko's
# PluralForm.jsm. Filter out empty forms. # PluralForm.jsm. Filter out empty forms.
pairs = [ pairs = [(key, var) for key, var in zip(keys, forms) if var.value]
(key, var)
for key, var in zip(keys, forms)
if var.value
]
# A special case for legacy translations which don't define any # A special case for legacy translations which don't define any
# plural forms. # plural forms.
@ -506,17 +493,12 @@ class PLURALS(LegacySource):
# variant. Then evaluate it to a migrated FTL node. # variant. Then evaluate it to a migrated FTL node.
value = ctx.evaluate(self.foreach(form)) value = ctx.evaluate(self.foreach(form))
return FTL.Variant( return FTL.Variant(
key=FTL.Identifier(key), key=FTL.Identifier(key), value=value, default=key == default_key
value=value,
default=key == default_key
) )
select = FTL.SelectExpression( select = FTL.SelectExpression(
selector=selector, selector=selector,
variants=[ variants=[createVariant(key, form) for key, form in pairs],
createVariant(key, form)
for key, form in pairs
]
) )
return Transform.pattern_of(select) return Transform.pattern_of(select)
@ -561,7 +543,7 @@ class CONCAT(Transform):
# migration specs and as elements=[]. The latter is used by # migration specs and as elements=[]. The latter is used by
# FTL.BaseNode.traverse when it recreates the traversed node using its # FTL.BaseNode.traverse when it recreates the traversed node using its
# attributes as kwargs. # attributes as kwargs.
self.elements = list(kwargs.get('elements', elements)) self.elements = list(kwargs.get("elements", elements))
# We want to make CONCAT(COPY()) equivalent to COPY() so that it's # We want to make CONCAT(COPY()) equivalent to COPY() so that it's
# always safe (no-op) to wrap transforms in a CONCAT. This is used by # always safe (no-op) to wrap transforms in a CONCAT. This is used by

View file

@ -16,7 +16,7 @@ def parse(Parser, string):
# Parse the string into the internal Context. # Parse the string into the internal Context.
parser = Parser() parser = Parser()
# compare-locales expects ASCII strings. # compare-locales expects ASCII strings.
parser.readContents(string.encode('utf8')) parser.readContents(string.encode("utf8"))
# Transform the parsed result which is an iterator into a dict. # Transform the parsed result which is an iterator into a dict.
return {ent.key: ent for ent in parser} return {ent.key: ent for ent in parser}
@ -35,10 +35,7 @@ def ftl_pattern_to_json(code):
def to_json(merged_iter): def to_json(merged_iter):
return { return {path: resource.to_json() for path, resource in merged_iter}
path: resource.to_json()
for path, resource in merged_iter
}
LOCALIZABLE_ENTRIES = (FTL.Message, FTL.Term) LOCALIZABLE_ENTRIES = (FTL.Message, FTL.Term)
@ -79,7 +76,7 @@ def ftl(code):
""" """
# The code might be triple-quoted. # The code might be triple-quoted.
code = code.lstrip('\n') code = code.lstrip("\n")
return textwrap.dedent(code) return textwrap.dedent(code)

View file

@ -19,12 +19,14 @@ class BadContextAPIException(Exception):
def process_assign(node, context): def process_assign(node, context):
if isinstance(node.value, ast.Str): if isinstance(node.value, ast.Constant):
val = node.value.s val = node.value.value
elif isinstance(node.value, ast.Name): elif isinstance(node.value, ast.Name):
val = context.get(node.value.id) val = context.get(node.value.id)
elif isinstance(node.value, ast.Call): elif isinstance(node.value, ast.Call):
val = node.value val = node.value
else:
val = None
if val is None: if val is None:
return return
for target in node.targets: for target in node.targets:
@ -55,20 +57,15 @@ class Validator:
migrate_func = None migrate_func = None
global_assigns = {} global_assigns = {}
for top_level in ast.iter_child_nodes(self.ast): for top_level in ast.iter_child_nodes(self.ast):
if ( if isinstance(top_level, ast.FunctionDef) and top_level.name == "migrate":
isinstance(top_level, ast.FunctionDef)
and top_level.name == 'migrate'
):
if migrate_func: if migrate_func:
raise MigrateNotFoundException( raise MigrateNotFoundException("Duplicate definition of migrate")
'Duplicate definition of migrate'
)
migrate_func = top_level migrate_func = top_level
details = self.inspect_migrate(migrate_func, global_assigns) details = self.inspect_migrate(migrate_func, global_assigns)
if isinstance(top_level, ast.Assign): if isinstance(top_level, ast.Assign):
process_assign(top_level, global_assigns) process_assign(top_level, global_assigns)
if isinstance(top_level, (ast.Import, ast.ImportFrom)): if isinstance(top_level, (ast.Import, ast.ImportFrom)):
if 'module' in top_level._fields: if "module" in top_level._fields:
module = top_level.module module = top_level.module
else: else:
module = None module = None
@ -76,26 +73,19 @@ class Validator:
asname = alias.asname or alias.name asname = alias.asname or alias.name
dotted = alias.name dotted = alias.name
if module: if module:
dotted = f'{module}.{dotted}' dotted = f"{module}.{dotted}"
global_assigns[asname] = dotted global_assigns[asname] = dotted
if not migrate_func: if not migrate_func:
raise MigrateNotFoundException( raise MigrateNotFoundException("migrate function not found")
'migrate function not found'
)
return details return details
def inspect_migrate(self, migrate_func, global_assigns): def inspect_migrate(self, migrate_func, global_assigns):
if ( if len(migrate_func.args.args) != 1 or any(
len(migrate_func.args.args) != 1 or getattr(migrate_func.args, arg_field)
any( for arg_field in migrate_func.args._fields
getattr(migrate_func.args, arg_field) if arg_field != "args"
for arg_field in migrate_func.args._fields
if arg_field != 'args'
)
): ):
raise MigrateNotFoundException( raise MigrateNotFoundException("migrate takes only one positional argument")
'migrate takes only one positional argument'
)
arg = migrate_func.args.args[0] arg = migrate_func.args.args[0]
if isinstance(arg, ast.Name): if isinstance(arg, ast.Name):
ctx_var = arg.id # python 2 ctx_var = arg.id # python 2
@ -104,8 +94,8 @@ class Validator:
visitor = MigrateAnalyzer(ctx_var, global_assigns) visitor = MigrateAnalyzer(ctx_var, global_assigns)
visitor.visit(migrate_func) visitor.visit(migrate_func)
return { return {
'references': visitor.references, "references": visitor.references,
'issues': visitor.issues, "issues": visitor.issues,
} }
@ -116,7 +106,7 @@ def full_name(node, global_assigns):
node = node.value node = node.value
if isinstance(node, ast.Name): if isinstance(node, ast.Name):
leafs.append(global_assigns.get(node.id, node.id)) leafs.append(global_assigns.get(node.id, node.id))
return '.'.join(reversed(leafs)) return ".".join(reversed(leafs))
PATH_TYPES = (str,) + (ast.Call,) PATH_TYPES = (str,) + (ast.Call,)
@ -144,11 +134,11 @@ class MigrateAnalyzer(ast.NodeVisitor):
def visit_Attribute(self, node): def visit_Attribute(self, node):
if isinstance(node.value, ast.Name) and node.value.id == self.ctx_var: if isinstance(node.value, ast.Name) and node.value.id == self.ctx_var:
if node.attr not in ( if node.attr not in (
'add_transforms', "add_transforms",
'locale', "locale",
): ):
raise BadContextAPIException( raise BadContextAPIException(
'Unexpected attribute access on {}.{}'.format( "Unexpected attribute access on {}.{}".format(
self.ctx_var, node.attr self.ctx_var, node.attr
) )
) )
@ -156,53 +146,55 @@ class MigrateAnalyzer(ast.NodeVisitor):
def visit_Call(self, node): def visit_Call(self, node):
if ( if (
isinstance(node.func, ast.Attribute) and isinstance(node.func, ast.Attribute)
isinstance(node.func.value, ast.Name) and and isinstance(node.func.value, ast.Name)
node.func.value.id == self.ctx_var and node.func.value.id == self.ctx_var
): ):
return self.call_ctx(node) return self.call_ctx(node)
dotted = full_name(node.func, self.global_assigns) dotted = full_name(node.func, self.global_assigns)
if dotted == 'fluent.migrate.helpers.transforms_from': if dotted == "fluent.migrate.helpers.transforms_from":
return self.call_helpers_transforms_from(node) return self.call_helpers_transforms_from(node)
if dotted.startswith('fluent.migrate.'): if dotted.startswith("fluent.migrate."):
return self.call_transform(node, dotted) return self.call_transform(node, dotted)
self.generic_visit(node) self.generic_visit(node)
def call_ctx(self, node): def call_ctx(self, node):
if node.func.attr == 'add_transforms': if node.func.attr == "add_transforms":
return self.call_add_transforms(node) return self.call_add_transforms(node)
raise BadContextAPIException( raise BadContextAPIException(
'Unexpected call on {}.{}'.format( "Unexpected call on {}.{}".format(self.ctx_var, node.func.attr)
self.ctx_var, node.func.attr
)
) )
def call_add_transforms(self, node): def call_add_transforms(self, node):
args_msg = ( args_msg = (
'Expected arguments to {}.add_transforms: ' "Expected arguments to {}.add_transforms: "
'target_ftl_path, reference_ftl_path, list_of_transforms' "target_ftl_path, reference_ftl_path, list_of_transforms"
).format(self.ctx_var) ).format(self.ctx_var)
ref_msg = ( ref_msg = (
'Expected second argument to {}.add_transforms: ' "Expected second argument to {}.add_transforms: "
'reference should be string or variable with string value' "reference should be string or variable with string value"
).format(self.ctx_var) ).format(self.ctx_var)
# Just check call signature here, check actual types below # Just check call signature here, check actual types below
if not self.check_arguments(node, (ast.AST, ast.AST, ast.AST)): if not self.check_arguments(node, (ast.AST, ast.AST, ast.AST)):
self.issues.append({ self.issues.append(
'msg': args_msg, {
'line': node.lineno, "msg": args_msg,
}) "line": node.lineno,
}
)
return return
in_reference = node.args[1] in_reference = node.args[1]
if isinstance(in_reference, ast.Name): if isinstance(in_reference, ast.Name):
in_reference = self.global_assigns.get(in_reference.id) in_reference = self.global_assigns.get(in_reference.id)
if isinstance(in_reference, ast.Str): if isinstance(in_reference, ast.Constant):
in_reference = in_reference.s in_reference = in_reference.value
if not isinstance(in_reference, str): if not isinstance(in_reference, str):
self.issues.append({ self.issues.append(
'msg': ref_msg, {
'line': node.args[1].lineno, "msg": ref_msg,
}) "line": node.args[1].lineno,
}
)
return return
self.references.add(in_reference) self.references.add(in_reference)
# Checked node.args[1]. # Checked node.args[1].
@ -212,93 +204,91 @@ class MigrateAnalyzer(ast.NodeVisitor):
self.generic_visit(node.args[2]) self.generic_visit(node.args[2])
def call_transform(self, node, dotted): def call_transform(self, node, dotted):
module, called = dotted.rsplit('.', 1) module, called = dotted.rsplit(".", 1)
if module not in ('fluent.migrate', 'fluent.migrate.transforms'): if module not in ("fluent.migrate", "fluent.migrate.transforms"):
return return
transform = getattr(transforms, called) transform = getattr(transforms, called)
if not issubclass(transform, transforms.Source): if not issubclass(transform, transforms.Source):
return return
bad_args = f'{called} takes path and key as first two params' bad_args = f"{called} takes path and key as first two params"
if not self.check_arguments( if not self.check_arguments(
node, ((ast.Str, ast.Name), (ast.Str, ast.Name),), node,
allow_more=True, check_kwargs=False (
(ast.Constant, ast.Name),
(ast.Constant, ast.Name),
),
allow_more=True,
check_kwargs=False,
): ):
self.issues.append({ self.issues.append({"msg": bad_args, "line": node.lineno})
'msg': bad_args,
'line': node.lineno
})
return return
path = node.args[0] path = node.args[0]
if isinstance(path, ast.Str): if isinstance(path, ast.Constant):
path = path.s path = path.value
if isinstance(path, ast.Name): if isinstance(path, ast.Name):
path = self.global_assigns.get(path.id) path = self.global_assigns.get(path.id)
if not isinstance(path, PATH_TYPES): if not isinstance(path, PATH_TYPES):
self.issues.append({ self.issues.append({"msg": bad_args, "line": node.lineno})
'msg': bad_args,
'line': node.lineno
})
def call_helpers_transforms_from(self, node): def call_helpers_transforms_from(self, node):
args_msg = ( args_msg = "Expected arguments to transforms_from: " "str, **substitions"
'Expected arguments to transforms_from: ' if not self.check_arguments(node, (ast.Constant,), check_kwargs=False):
'str, **substitions' self.issues.append(
) {
if not self.check_arguments( "msg": args_msg,
node, (ast.Str,), check_kwargs=False "line": node.lineno,
): }
self.issues.append({ )
'msg': args_msg,
'line': node.lineno,
})
return return
kwargs = {} kwargs = {}
found_bad_keywords = False found_bad_keywords = False
for keyword in node.keywords: for keyword in node.keywords:
v = keyword.value v = keyword.value
if isinstance(v, ast.Str): if isinstance(v, ast.Constant):
v = v.s v = v.value
if isinstance(v, ast.Name): if isinstance(v, ast.Name):
v = self.global_assigns.get(v.id) v = self.global_assigns.get(v.id)
if isinstance(v, ast.Call): if isinstance(v, ast.Call):
v = 'determined at runtime' v = "determined at runtime"
if not isinstance(v, PATH_TYPES): if not isinstance(v, PATH_TYPES):
msg = 'Bad keyword arg {} to transforms_from'.format( msg = "Bad keyword arg {} to transforms_from".format(keyword.arg)
keyword.arg self.issues.append(
{
"msg": msg,
"line": node.lineno,
}
) )
self.issues.append({
'msg': msg,
'line': node.lineno,
})
found_bad_keywords = True found_bad_keywords = True
else: else:
kwargs[keyword.arg] = v kwargs[keyword.arg] = v
if found_bad_keywords: if found_bad_keywords:
return return
try: try:
transforms = transforms_from(node.args[0].s, **kwargs) transforms = transforms_from(node.args[0].value, **kwargs)
except MigrationError as e: except MigrationError as e:
self.issues.append({ self.issues.append(
'msg': str(e), {
'line': node.lineno, "msg": str(e),
}) "line": node.lineno,
}
)
return return
ti = TransformsInspector() ti = TransformsInspector()
ti.visit(transforms) ti.visit(transforms)
self.issues.extend({ self.issues.extend(
'msg': issue, {
'line': node.lineno, "msg": issue,
} for issue in set(ti.issues)) "line": node.lineno,
}
for issue in set(ti.issues)
)
def check_arguments( def check_arguments(self, node, argspec, check_kwargs=True, allow_more=False):
self, node, argspec, check_kwargs=True, allow_more=False
):
if check_kwargs and ( if check_kwargs and (
node.keywords or node.keywords or (hasattr(node, "kwargs") and node.kwargs)
(hasattr(node, 'kwargs') and node.kwargs)
): ):
return False return False
if hasattr(node, 'starargs') and node.starargs: if hasattr(node, "starargs") and node.starargs:
return False return False
for arg, NODE_TYPE in zip_longest(node.args, argspec): for arg, NODE_TYPE in zip_longest(node.args, argspec):
if NODE_TYPE is None: if NODE_TYPE is None:
@ -319,17 +309,15 @@ class TransformsInspector(Visitor):
# Source needs paths to be normalized # Source needs paths to be normalized
# https://bugzilla.mozilla.org/show_bug.cgi?id=1568199 # https://bugzilla.mozilla.org/show_bug.cgi?id=1568199
if src != mozpath.normpath(src): if src != mozpath.normpath(src):
self.issues.append( self.issues.append(f'Source "{src}" needs to be a normalized path')
f'Source "{src}" needs to be a normalized path'
)
super().generic_visit(node) super().generic_visit(node)
def cli(): def cli():
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument('migration') parser.add_argument("migration")
args = parser.parse_args() args = parser.parse_args()
issues = Validator.validate(args.migration)['issues'] issues = Validator.validate(args.migration)["issues"]
for issue in issues: for issue in issues:
print(issue['msg'], 'at line', issue['line']) print(issue["msg"], "at line", issue["line"])
return 1 if issues else 0 return 1 if issues else 0

19
third_party/python/poetry.lock generated vendored
View file

@ -495,14 +495,14 @@ files = [
[[package]] [[package]]
name = "fluent-migrate" name = "fluent-migrate"
version = "0.12.0" version = "0.13.0"
description = "Toolchain to migrate legacy translation to Fluent." description = "Toolchain to migrate legacy translation to Fluent."
category = "main" category = "main"
optional = false optional = false
python-versions = "*" python-versions = "*"
files = [ files = [
{file = "fluent.migrate-0.12.0-py2.py3-none-any.whl", hash = "sha256:e3564c92d1f53700e98792f1be1ff954488d431ff9f5ec290a4ab13b5de69487"}, {file = "fluent.migrate-0.13.0-py2.py3-none-any.whl", hash = "sha256:18a5c9d0c00cd50f45754a8e568d1fa57500679bdd00a98604de963cafef5a70"},
{file = "fluent.migrate-0.12.0.tar.gz", hash = "sha256:926e69e94975521a974b206e242a479310c2cbca1865ca26bf40fa3c7a357338"}, {file = "fluent.migrate-0.13.0.tar.gz", hash = "sha256:15d48d51c838167a2ace3788f34b130d40e4946f08f9f48d9495a34ac565bb1c"},
] ]
[package.dependencies] [package.dependencies]
@ -1129,6 +1129,7 @@ files = [
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"},
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"},
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"},
{file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"},
{file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"},
{file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"},
{file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"},
@ -1136,8 +1137,16 @@ files = [
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"},
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"},
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"},
{file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"},
{file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"},
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
{file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
{file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"},
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
{file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
{file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
{file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"},
{file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"},
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"},
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"},
@ -1154,6 +1163,7 @@ files = [
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"},
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"},
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"},
{file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"},
{file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"},
{file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"},
{file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"},
@ -1161,6 +1171,7 @@ files = [
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"},
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"},
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"},
{file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"},
{file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"},
{file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"},
{file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"},
@ -1595,4 +1606,4 @@ testing = ["func-timeout", "jaraco.itertools", "pytest (>=4.6)", "pytest-black (
[metadata] [metadata]
lock-version = "2.0" lock-version = "2.0"
python-versions = "^3.8" python-versions = "^3.8"
content-hash = "65677d1751349ca40ddb3353b87d30a37a396424a743bd48e376de46c32c3687" content-hash = "22aaa5192c95ba8c8effd4a6e39b32d356a790e72e7bae067615bbfe0c4e9896"

View file

@ -18,7 +18,7 @@ cram==0.7
distro==1.8.0 distro==1.8.0
ecdsa==0.15 ecdsa==0.15
esprima==4.0.1 esprima==4.0.1
fluent.migrate==0.12.0 fluent.migrate==0.13.0
fluent.syntax==0.19.0 fluent.syntax==0.19.0
# Pin `frozenlist` as it is required for `aiohttp`. Use minimum required version. # Pin `frozenlist` as it is required for `aiohttp`. Use minimum required version.
frozenlist==1.1.1 frozenlist==1.1.1

View file

@ -224,9 +224,9 @@ ecdsa==0.15 ; python_version >= "3.8" and python_version < "4.0" \
--hash=sha256:8f12ac317f8a1318efa75757ef0a651abe12e51fc1af8838fb91079445227277 --hash=sha256:8f12ac317f8a1318efa75757ef0a651abe12e51fc1af8838fb91079445227277
esprima==4.0.1 ; python_version >= "3.8" and python_version < "4.0" \ esprima==4.0.1 ; python_version >= "3.8" and python_version < "4.0" \
--hash=sha256:08db1a876d3c2910db9cfaeb83108193af5411fc3a3a66ebefacd390d21323ee --hash=sha256:08db1a876d3c2910db9cfaeb83108193af5411fc3a3a66ebefacd390d21323ee
fluent-migrate==0.12.0 ; python_version >= "3.8" and python_version < "4.0" \ fluent-migrate==0.13.0 ; python_version >= "3.8" and python_version < "4.0" \
--hash=sha256:926e69e94975521a974b206e242a479310c2cbca1865ca26bf40fa3c7a357338 \ --hash=sha256:15d48d51c838167a2ace3788f34b130d40e4946f08f9f48d9495a34ac565bb1c \
--hash=sha256:e3564c92d1f53700e98792f1be1ff954488d431ff9f5ec290a4ab13b5de69487 --hash=sha256:18a5c9d0c00cd50f45754a8e568d1fa57500679bdd00a98604de963cafef5a70
fluent-syntax==0.19.0 ; python_version >= "3.8" and python_version < "4.0" \ fluent-syntax==0.19.0 ; python_version >= "3.8" and python_version < "4.0" \
--hash=sha256:920326d7f46864b9758f0044e9968e3112198bc826acee16ddd8f11d359004fd \ --hash=sha256:920326d7f46864b9758f0044e9968e3112198bc826acee16ddd8f11d359004fd \
--hash=sha256:b352b3475fac6c6ed5f06527921f432aac073d764445508ee5218aeccc7cc5c4 --hash=sha256:b352b3475fac6c6ed5f06527921f432aac073d764445508ee5218aeccc7cc5c4
@ -460,7 +460,9 @@ python-slugify==8.0.1 ; python_version >= "3.8" and python_version < "4.0" \
--hash=sha256:70ca6ea68fe63ecc8fa4fcf00ae651fc8a5d02d93dcd12ae6d4fc7ca46c4d395 \ --hash=sha256:70ca6ea68fe63ecc8fa4fcf00ae651fc8a5d02d93dcd12ae6d4fc7ca46c4d395 \
--hash=sha256:ce0d46ddb668b3be82f4ed5e503dbc33dd815d83e2eb6824211310d3fb172a27 --hash=sha256:ce0d46ddb668b3be82f4ed5e503dbc33dd815d83e2eb6824211310d3fb172a27
pyyaml==6.0.1 ; python_version >= "3.8" and python_version < "4.0" \ pyyaml==6.0.1 ; python_version >= "3.8" and python_version < "4.0" \
--hash=sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5 \
--hash=sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc \ --hash=sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc \
--hash=sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df \
--hash=sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741 \ --hash=sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741 \
--hash=sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206 \ --hash=sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206 \
--hash=sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27 \ --hash=sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27 \
@ -468,7 +470,10 @@ pyyaml==6.0.1 ; python_version >= "3.8" and python_version < "4.0" \
--hash=sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62 \ --hash=sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62 \
--hash=sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98 \ --hash=sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98 \
--hash=sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696 \ --hash=sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696 \
--hash=sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290 \
--hash=sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9 \
--hash=sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d \ --hash=sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d \
--hash=sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6 \
--hash=sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867 \ --hash=sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867 \
--hash=sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47 \ --hash=sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47 \
--hash=sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486 \ --hash=sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486 \
@ -476,11 +481,15 @@ pyyaml==6.0.1 ; python_version >= "3.8" and python_version < "4.0" \
--hash=sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3 \ --hash=sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3 \
--hash=sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007 \ --hash=sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007 \
--hash=sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938 \ --hash=sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938 \
--hash=sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0 \
--hash=sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c \ --hash=sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c \
--hash=sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735 \ --hash=sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735 \
--hash=sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d \ --hash=sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d \
--hash=sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28 \
--hash=sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4 \
--hash=sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba \ --hash=sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba \
--hash=sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8 \ --hash=sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8 \
--hash=sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef \
--hash=sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5 \ --hash=sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5 \
--hash=sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd \ --hash=sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd \
--hash=sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3 \ --hash=sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3 \
@ -493,7 +502,9 @@ pyyaml==6.0.1 ; python_version >= "3.8" and python_version < "4.0" \
--hash=sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43 \ --hash=sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43 \
--hash=sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859 \ --hash=sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859 \
--hash=sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673 \ --hash=sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673 \
--hash=sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54 \
--hash=sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a \ --hash=sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a \
--hash=sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b \
--hash=sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab \ --hash=sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab \
--hash=sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa \ --hash=sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa \
--hash=sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c \ --hash=sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c \