fune/taskcluster/taskgraph/transforms/chunk_partners.py
Ricky Stewart 02a7b4ebdf Bug 1654103: Standardize on Black for Python code in mozilla-central.
Allow-list all Python code in tree for use with the black linter, and re-format all code in-tree accordingly.

To produce this patch I did all of the following:

1. Make changes to tools/lint/black.yml to remove include: stanza and update list of source extensions.

2. Run ./mach lint --linter black --fix

3. Make some ad-hoc manual updates to python/mozbuild/mozbuild/test/configure/test_configure.py -- it has some hard-coded line numbers that the reformat breaks.

4. Make some ad-hoc manual updates to `testing/marionette/client/setup.py`, `testing/marionette/harness/setup.py`, and `testing/firefox-ui/harness/setup.py`, which have hard-coded regexes that break after the reformat.

5. Add a set of exclusions to black.yml. These will be deleted in a follow-up bug (1672023).

# ignore-this-changeset

Differential Revision: https://phabricator.services.mozilla.com/D94045
2020-10-26 18:34:53 +00:00

75 lines
3.3 KiB
Python

# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
Chunk the partner repack tasks by subpartner and locale
"""
from __future__ import absolute_import, print_function, unicode_literals
import copy
from mozbuild.chunkify import chunkify
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.partners import (
get_repack_ids_by_platform,
apply_partner_priority,
)
transforms = TransformSequence()
transforms.add(apply_partner_priority)
@transforms.add
def chunk_partners(config, jobs):
for job in jobs:
dep_job = job["primary-dependency"]
build_platform = dep_job.attributes["build_platform"]
repack_id = dep_job.task.get("extra", {}).get("repack_id")
repack_ids = dep_job.task.get("extra", {}).get("repack_ids")
copy_repack_ids = job.pop("copy-repack-ids", False)
if copy_repack_ids:
assert repack_ids, "dep_job {} doesn't have repack_ids!".format(
dep_job.label
)
job.setdefault("extra", {})["repack_ids"] = repack_ids
yield job
# first downstream of the repack task, no chunking or fanout has been done yet
elif not any([repack_id, repack_ids]):
platform_repack_ids = get_repack_ids_by_platform(config, build_platform)
# we chunk mac signing
if config.kind in (
"release-partner-repack-signing",
"release-eme-free-repack-signing",
"release-partner-repack-notarization-part-1",
"release-eme-free-repack-notarization-part-1",
):
repacks_per_chunk = job.get("repacks-per-chunk")
chunks, remainder = divmod(len(platform_repack_ids), repacks_per_chunk)
if remainder:
chunks = int(chunks + 1)
for this_chunk in range(1, chunks + 1):
chunk = chunkify(platform_repack_ids, this_chunk, chunks)
partner_job = copy.deepcopy(job)
partner_job.setdefault("extra", {}).setdefault("repack_ids", chunk)
partner_job["extra"]["repack_suffix"] = str(this_chunk)
yield partner_job
# linux and windows we fan out immediately to one task per partner-sub_partner-locale
else:
for repack_id in platform_repack_ids:
partner_job = copy.deepcopy(job) # don't overwrite dict values here
partner_job.setdefault("extra", {})
partner_job["extra"]["repack_id"] = repack_id
yield partner_job
# fan out chunked mac signing for repackage
elif repack_ids:
for repack_id in repack_ids:
partner_job = copy.deepcopy(job)
partner_job.setdefault("extra", {}).setdefault("repack_id", repack_id)
yield partner_job
# otherwise we've fully fanned out already, continue by passing repack_id on
else:
partner_job = copy.deepcopy(job)
partner_job.setdefault("extra", {}).setdefault("repack_id", repack_id)
yield partner_job