forked from mirrors/gecko-dev
Bug 1901281 - Update Taskgraph vendor to v9.0.0, a=RyanVM
Original Revision: https://phabricator.services.mozilla.com/D213425 Differential Revision: https://phabricator.services.mozilla.com/D215657
This commit is contained in:
parent
d3da8d137e
commit
e4a6fe2194
28 changed files with 167 additions and 101 deletions
|
|
@ -4,9 +4,9 @@
|
|||
|
||||
|
||||
import os
|
||||
from functools import lru_cache
|
||||
|
||||
import yaml
|
||||
from taskgraph.util.memoize import memoize
|
||||
|
||||
from android_taskgraph import ANDROID_COMPONENTS_DIR, FENIX_DIR, FOCUS_DIR
|
||||
|
||||
|
|
@ -47,7 +47,7 @@ def get_extensions(component):
|
|||
]
|
||||
|
||||
|
||||
@memoize
|
||||
@lru_cache(maxsize=None)
|
||||
def _read_build_config(root_dir):
|
||||
with open(os.path.join(root_dir, ".buildconfig.yml"), "rb") as f:
|
||||
return yaml.safe_load(f)
|
||||
|
|
|
|||
|
|
@ -6,14 +6,14 @@ import itertools
|
|||
import os
|
||||
from copy import deepcopy
|
||||
from datetime import datetime
|
||||
from functools import lru_cache
|
||||
|
||||
import jsone
|
||||
from taskgraph.util.memoize import memoize
|
||||
from taskgraph.util.schema import resolve_keyed_by
|
||||
from taskgraph.util.taskcluster import get_artifact_prefix
|
||||
from taskgraph.util.yaml import load_yaml
|
||||
|
||||
cached_load_yaml = memoize(load_yaml)
|
||||
cached_load_yaml = lru_cache(maxsize=None)(load_yaml)
|
||||
|
||||
|
||||
def generate_beetmover_upstream_artifacts(
|
||||
|
|
|
|||
8
third_party/python/poetry.lock
generated
vendored
8
third_party/python/poetry.lock
generated
vendored
|
|
@ -1348,13 +1348,13 @@ test = ["aiofiles", "coverage", "flake8", "httmock", "httptest", "hypothesis", "
|
|||
|
||||
[[package]]
|
||||
name = "taskcluster-taskgraph"
|
||||
version = "8.2.0"
|
||||
version = "9.0.0"
|
||||
description = "Build taskcluster taskgraphs"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "taskcluster-taskgraph-8.2.0.tar.gz", hash = "sha256:af146323402c2d5f67c65e3c232eda953da1ce319e465069e4d5c7aeb212b66e"},
|
||||
{file = "taskcluster_taskgraph-8.2.0-py3-none-any.whl", hash = "sha256:410e9c9ef43eac1d0676f16867137de90f77eb0b4e0cbe746fe5512d1a626822"},
|
||||
{file = "taskcluster-taskgraph-9.0.0.tar.gz", hash = "sha256:3ebc1a07c31168c19159e71507341b40fc33ae3e652c7c80d8871904855021d1"},
|
||||
{file = "taskcluster_taskgraph-9.0.0-py3-none-any.whl", hash = "sha256:306366c70da7353cc198406a36630b28ecc77ded32f7e4ca0187913d56c40713"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -1594,4 +1594,4 @@ testing = ["func-timeout", "jaraco.itertools", "pytest (>=4.6)", "pytest-black (
|
|||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.8"
|
||||
content-hash = "9d5a7fe02c75289433a471dfd831a195ff604744f787c6b2b91d4af159c98475"
|
||||
content-hash = "f372bf50cc4ec17d72e7ccb25bef84b3fe27913479e4031090e17c0926e849e0"
|
||||
|
|
|
|||
2
third_party/python/requirements.in
vendored
2
third_party/python/requirements.in
vendored
|
|
@ -53,7 +53,7 @@ setuptools==70.0.0
|
|||
six==1.16.0
|
||||
slugid==2.0.0
|
||||
taskcluster==44.2.2
|
||||
taskcluster-taskgraph==8.2.0
|
||||
taskcluster-taskgraph==9.0.0
|
||||
taskcluster-urls==13.0.1
|
||||
toml==0.10.2
|
||||
tomlkit==0.12.3
|
||||
|
|
|
|||
6
third_party/python/requirements.txt
vendored
6
third_party/python/requirements.txt
vendored
|
|
@ -545,9 +545,9 @@ six==1.16.0 ; python_version >= "3.8" and python_version < "4.0" \
|
|||
slugid==2.0.0 ; python_version >= "3.8" and python_version < "4.0" \
|
||||
--hash=sha256:a950d98b72691178bdd4d6c52743c4a2aa039207cf7a97d71060a111ff9ba297 \
|
||||
--hash=sha256:aec8b0e01c4ad32e38e12d609eab3ec912fd129aaf6b2ded0199b56a5f8fd67c
|
||||
taskcluster-taskgraph==8.2.0 ; python_version >= "3.8" and python_version < "4.0" \
|
||||
--hash=sha256:410e9c9ef43eac1d0676f16867137de90f77eb0b4e0cbe746fe5512d1a626822 \
|
||||
--hash=sha256:af146323402c2d5f67c65e3c232eda953da1ce319e465069e4d5c7aeb212b66e
|
||||
taskcluster-taskgraph==9.0.0 ; python_version >= "3.8" and python_version < "4.0" \
|
||||
--hash=sha256:306366c70da7353cc198406a36630b28ecc77ded32f7e4ca0187913d56c40713 \
|
||||
--hash=sha256:3ebc1a07c31168c19159e71507341b40fc33ae3e652c7c80d8871904855021d1
|
||||
taskcluster-urls==13.0.1 ; python_version >= "3.8" and python_version < "4.0" \
|
||||
--hash=sha256:5e25e7e6818e8877178b175ff43d2e6548afad72694aa125f404a7329ece0973 \
|
||||
--hash=sha256:b25e122ecec249c4299ac7b20b08db76e3e2025bdaeb699a9d444556de5fd367 \
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
Metadata-Version: 2.1
|
||||
Name: taskcluster-taskgraph
|
||||
Version: 8.2.0
|
||||
Version: 9.0.0
|
||||
Summary: Build taskcluster taskgraphs
|
||||
Home-page: https://github.com/taskcluster/taskgraph
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
|
|
@ -1,8 +1,8 @@
|
|||
taskgraph/__init__.py,sha256=Y7AMSO_xkN6zeyK0gagjJ7_kp0ra84C6-RPuLB6FH_A,729
|
||||
taskgraph/__init__.py,sha256=NykHPPwI6WNYfpNLs36tia-XBLq5qgICKAaZV_caRa8,729
|
||||
taskgraph/config.py,sha256=8vntWUrPwGds22mFKYAgcsD4Mr8hoONTv2ssGBcClLw,5108
|
||||
taskgraph/create.py,sha256=_zokjSM3ZaO04l2LiMhenE8qXDZVfYvueIIu5hGUhzc,5185
|
||||
taskgraph/decision.py,sha256=gIvVLfMTd6KtnrTFkmFTrky93mknB9dxtL7_aZwEtoA,13088
|
||||
taskgraph/docker.py,sha256=Tw2L4A3Mb3P4BdSkVilhSf8Ob38j15xIYYxtUXSDT9s,8415
|
||||
taskgraph/docker.py,sha256=86sCVmHWR9t9u1sEFIpzqSEVU_mQRY8BaWOxPNn23lM,8411
|
||||
taskgraph/filter_tasks.py,sha256=R7tYXiaVPGIkQ6O1c9-QJrKZ59m9pFXCloUlPraVnZU,866
|
||||
taskgraph/generator.py,sha256=zrH1zfy-8akksKTSOf6e4FEsdOd5y7-h1Jne_2Jabcc,15703
|
||||
taskgraph/graph.py,sha256=bHUsv2pPa2SSaWgBY-ItIj7REPd0o4fFYrwoQbwFKTY,4680
|
||||
|
|
@ -17,7 +17,7 @@ taskgraph/actions/add_new_jobs.py,sha256=c8vGWGXMr4qqW2Axz9rbBrDopabZB3gf3SVFLBZ
|
|||
taskgraph/actions/cancel.py,sha256=xrIzlB5KzcnQ4_HultoIcnlxtbQhUi7723g5K2iQoY0,1263
|
||||
taskgraph/actions/cancel_all.py,sha256=zNiHtOiSQQxLyNJYtaW0JKPazHXSgZrq1C6o8DGYxG8,1887
|
||||
taskgraph/actions/rebuild_cached_tasks.py,sha256=r1QTri2ey30TdEztUgc-nkiHdJPe8Sbn7FvKeR_kt0Y,1115
|
||||
taskgraph/actions/registry.py,sha256=hubblOhL3fbWDRtKv7_6HiD0P94hzQrpjdMkj23CGCg,13564
|
||||
taskgraph/actions/registry.py,sha256=StbcyrCjV3J_fM9jsj0QR9yVXsUY1wYHpUybgTOhvqY,13588
|
||||
taskgraph/actions/retrigger.py,sha256=MKkoZDAe0SKIq6fHqwAc1Ici_wIGRd7MxeBNhwoDEGE,9388
|
||||
taskgraph/actions/util.py,sha256=gB8MZb8juP1S7EsLHJivr6BBY2bf5IUiIpN7Mq9-kXo,10964
|
||||
taskgraph/loader/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
|
|
@ -31,30 +31,30 @@ taskgraph/run-task/hgrc,sha256=BybWLDR89bWi3pE5T05UqmDHs02CbLypE-omLZWU6Uk,896
|
|||
taskgraph/run-task/robustcheckout.py,sha256=vPKvHb3fIIJli9ZVZG88XYoa8Sohy2JrpmH6pDgBDHI,30813
|
||||
taskgraph/run-task/run-task,sha256=ev64Ud2X3482B05aurUcWD93_sZS1aW2N-eVutRHF5k,45753
|
||||
taskgraph/transforms/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
taskgraph/transforms/base.py,sha256=z20Yh619srbwuQJWASRtG2_j6NUbWlCujTTCHWLa0GY,5147
|
||||
taskgraph/transforms/cached_tasks.py,sha256=Z10VD1kEBVXJvj8qSsNTq2mYpklh0V1EN8OT6QK3v_E,2607
|
||||
taskgraph/transforms/base.py,sha256=OYIsK4-pPFPhuK2wV7qyeZSSsPaZdJQuI-ixHf4fbeQ,5155
|
||||
taskgraph/transforms/cached_tasks.py,sha256=h6UOuxWm2-5oxIE7DJLuCGprQW3Av4j2CkSM19J5LJE,2839
|
||||
taskgraph/transforms/chunking.py,sha256=7z9oXiA2dDguYwJPaZYCi-fEzbc--O9avZAFS3vP_kg,2592
|
||||
taskgraph/transforms/code_review.py,sha256=tevRFQli3MkzW_0Zhr-hwlVti8hFaXEz94llwhBu_ns,713
|
||||
taskgraph/transforms/docker_image.py,sha256=GScS7Lld3YcS57eC30wp3DJM_ATLrmmVfZzINKgC1fM,7546
|
||||
taskgraph/transforms/fetch.py,sha256=u1M57LQOi0kHz6FFP1qah3yJh15eXYqQCF_F6r5qjh0,10662
|
||||
taskgraph/transforms/fetch.py,sha256=5G29zOgoJNTWJvnZ4JH9hXfrJqtqdxCoEPA264Qv1-0,10607
|
||||
taskgraph/transforms/from_deps.py,sha256=_cdIefdRkZYWaFJaWpsglivvG8bBGWd4beg7QgNl0Jc,8885
|
||||
taskgraph/transforms/notify.py,sha256=0sga-Ls9dhWLAsL0FBjXmVbbduee8LAZp_1pHBQR0iI,6019
|
||||
taskgraph/transforms/task.py,sha256=nRzNAxLjA6BsFktZAA9Upqb_pSFNhjoCzKm0QDxvVgM,52586
|
||||
taskgraph/transforms/task.py,sha256=lWAIGINdeikGU1U2T11w56opjZPU0LZj8tv7d8lYC6Y,52604
|
||||
taskgraph/transforms/task_context.py,sha256=9v3ke967atAYCtQxIblSFucJA1tum9Q8QpXQeTwNIzU,4278
|
||||
taskgraph/transforms/run/__init__.py,sha256=gVJ4eNquKNlygX18OtWTDnl6FFsZlA12bxfvB3kZz14,17761
|
||||
taskgraph/transforms/run/common.py,sha256=G3WdMHU5YWUfk1uR6xsxWY7MQKjU9tnqtRDmGttUqt4,5626
|
||||
taskgraph/transforms/run/index_search.py,sha256=ABIaX2FFx02o1StZgNAB_ZDXc1lTFO2aUIBH5BuUjtA,1224
|
||||
taskgraph/transforms/run/run_task.py,sha256=0GI8syzGtRDT07g_6SXG99JtxDBe09zsW5ltL-aUhYU,8403
|
||||
taskgraph/transforms/run/toolchain.py,sha256=KiuBfJ6CShwGYIIljy4i7iYSHFFXF_A_zSvRGUgYboA,6033
|
||||
taskgraph/transforms/run/toolchain.py,sha256=3dzNOA2GriihcLgoJLfBG2v1F3Mka8aJdQ2rCXC1lv4,6141
|
||||
taskgraph/util/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
taskgraph/util/archive.py,sha256=NIqg2Su5PUqOv4JM60uFixsMsOXN26u5paB1Uh6foSI,4707
|
||||
taskgraph/util/archive.py,sha256=qfqp00RmH6Fd1cM-7uW8_u3DEZSW2BedxAodg28-RoU,4700
|
||||
taskgraph/util/attributes.py,sha256=pPOFmwkDQQ-IqfDpVghZ10YI_qXRY4Bi5JP3xr6XVvc,2964
|
||||
taskgraph/util/cached_tasks.py,sha256=-AqmOjrkI5PMAlAlQl1wShCrs0HA5lWLBgkxTcFstFM,4150
|
||||
taskgraph/util/copy.py,sha256=-AgF3oPF3lfiUHvAOiGkhUzS68i9kcWRsyDSiBkAVCw,1577
|
||||
taskgraph/util/dependencies.py,sha256=3Qba3zI87JYR5fk5FndGzEVW-5NIzzZrBf9rVYcnLD0,2734
|
||||
taskgraph/util/docker.py,sha256=ffQ6KloQNz_kwYemSZEkh4xUMVMeotnnwphWZth1PqQ,8112
|
||||
taskgraph/util/hash.py,sha256=U5h6WwC3zs0ooX8odc7AjgPQKKFpDXL7PemoyENPJYo,1644
|
||||
taskgraph/util/docker.py,sha256=2boH7CLXA4D7zvpp9S3atqJuKtDq4AzRvKLDCeTp6Tc,8132
|
||||
taskgraph/util/hash.py,sha256=l3l_urIfUN0u1S9xa496_p_Te1Ab4rW_gr0h0JcRzEI,1694
|
||||
taskgraph/util/keyed_by.py,sha256=EMWNRRqYB0AS7A4Y4lthYf2HB7G2ercGFf4hN9zwyaY,3348
|
||||
taskgraph/util/memoize.py,sha256=CvCGl-_qft062b3GZC4aHbPfEOPtqR9oOUEqvk9aojQ,294
|
||||
taskgraph/util/parameterization.py,sha256=DiPE-4jappGMPljDhhZI52BP7dLBGZHu5EI1cW4aRYg,3392
|
||||
taskgraph/util/path.py,sha256=e-JloOQV2-Oua_pe335bv4xWAB07vb82TKpu_zCOl0w,4466
|
||||
taskgraph/util/python_path.py,sha256=ed4F5z2mId56LauVczgxm_LGxgQi8XlxlYDgXOPZyII,1576
|
||||
|
|
@ -62,18 +62,18 @@ taskgraph/util/readonlydict.py,sha256=XzTG-gqGqWVlSkDxSyOL6Ur7Z0ONhIJ9DVLWV3q4q1
|
|||
taskgraph/util/schema.py,sha256=HmbbJ_i5uxZZHZSJ8sVWaD-VMhZI4ymx0STNcjO5t2M,8260
|
||||
taskgraph/util/set_name.py,sha256=cha9awo2nMQ9jfSEcbyNkZkCq_1Yg_kKJTfvDzabHSc,1134
|
||||
taskgraph/util/shell.py,sha256=nf__ly0Ikhj92AiEBCQtvyyckm8UfO_3DSgz0SU-7QA,1321
|
||||
taskgraph/util/taskcluster.py,sha256=-BlQqkxxH5S2BbZ4X2c0lNd1msU2xLM1S5rr8qrLwkE,15961
|
||||
taskgraph/util/taskcluster.py,sha256=IxtqHDTLKiRIfs-rSdCtQ4c9Pzm-jp0OOZ3EecwcSc0,16022
|
||||
taskgraph/util/taskgraph.py,sha256=ecKEvTfmLVvEKLPO_0g34CqVvc0iCzuNMh3064BZNrE,1969
|
||||
taskgraph/util/templates.py,sha256=HGTaIKCpAwEzBDHq0cDai1HJjPJrdnHsjJz6N4LVpKI,2139
|
||||
taskgraph/util/templates.py,sha256=nBYsugF7ER_oo5s8fgPcEj-xv1lt1ofED0XM2oL_6l8,2448
|
||||
taskgraph/util/time.py,sha256=XauJ0DbU0fyFvHLzJLG4ehHv9KaKixxETro89GPC1yk,3350
|
||||
taskgraph/util/treeherder.py,sha256=kc8jCy_lYduBxVMYOQzWpmI_6i2bRmkQLKq5DGmbiDI,2721
|
||||
taskgraph/util/vcs.py,sha256=FjS82fiTsoQ_ArjTCDOtDGfNdVUp_8zvVKB9SoAG3Rs,18019
|
||||
taskgraph/util/verify.py,sha256=htrNX7aXMMDzxymsFVcs0kaO5gErFHd62g9cQsZI_WE,8518
|
||||
taskgraph/util/workertypes.py,sha256=1wgM6vLrlgtyv8854anVIs0Bx11kV8JJJaKcOHJc2j0,2498
|
||||
taskgraph/util/verify.py,sha256=RbjKw6aAux5skXf2BihCdH7qyLWAU_plTYQ4GlRLBr0,8983
|
||||
taskgraph/util/workertypes.py,sha256=jGgd0SS_6YCIYZa_pzv6fQRocIN-tITRa9J_8GYYR0E,2538
|
||||
taskgraph/util/yaml.py,sha256=-LaIf3RROuaSWckOOGN5Iviu-DHWxIChgHn9a7n6ec4,1059
|
||||
taskcluster_taskgraph-8.2.0.dist-info/LICENSE,sha256=HyVuytGSiAUQ6ErWBHTqt1iSGHhLmlC8fO7jTCuR8dU,16725
|
||||
taskcluster_taskgraph-8.2.0.dist-info/METADATA,sha256=minv1wMCm1M-KJtSo85Cj_tUPkQEdc3OuqHt-HT4tjE,4688
|
||||
taskcluster_taskgraph-8.2.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
|
||||
taskcluster_taskgraph-8.2.0.dist-info/entry_points.txt,sha256=2hxDzE3qq_sHh-J3ROqwpxgQgxO-196phWAQREl2-XA,50
|
||||
taskcluster_taskgraph-8.2.0.dist-info/top_level.txt,sha256=3JNeYn_hNiNXC7DrdH_vcv-WYSE7QdgGjdvUYvSjVp0,10
|
||||
taskcluster_taskgraph-8.2.0.dist-info/RECORD,,
|
||||
taskcluster_taskgraph-9.0.0.dist-info/LICENSE,sha256=HyVuytGSiAUQ6ErWBHTqt1iSGHhLmlC8fO7jTCuR8dU,16725
|
||||
taskcluster_taskgraph-9.0.0.dist-info/METADATA,sha256=AHniWr500d-iCHqCQGnb4AUyPe3McIlOLFg8Mh7-Th4,4688
|
||||
taskcluster_taskgraph-9.0.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
|
||||
taskcluster_taskgraph-9.0.0.dist-info/entry_points.txt,sha256=2hxDzE3qq_sHh-J3ROqwpxgQgxO-196phWAQREl2-XA,50
|
||||
taskcluster_taskgraph-9.0.0.dist-info/top_level.txt,sha256=3JNeYn_hNiNXC7DrdH_vcv-WYSE7QdgGjdvUYvSjVp0,10
|
||||
taskcluster_taskgraph-9.0.0.dist-info/RECORD,,
|
||||
|
|
@ -2,7 +2,7 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
__version__ = "8.2.0"
|
||||
__version__ = "9.0.0"
|
||||
|
||||
# Maximum number of dependencies a single task can have
|
||||
# https://docs.taskcluster.net/reference/platform/taskcluster-queue/references/api#createTask
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@
|
|||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
|
||||
import functools
|
||||
import json
|
||||
from collections import namedtuple
|
||||
from types import FunctionType
|
||||
|
|
@ -13,7 +14,6 @@ from taskgraph import create
|
|||
from taskgraph.config import load_graph_config
|
||||
from taskgraph.parameters import Parameters
|
||||
from taskgraph.util import hash, taskcluster, yaml
|
||||
from taskgraph.util.memoize import memoize
|
||||
from taskgraph.util.python_path import import_sibling_modules
|
||||
|
||||
actions = []
|
||||
|
|
@ -31,13 +31,13 @@ def is_json(data):
|
|||
return True
|
||||
|
||||
|
||||
@memoize
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def read_taskcluster_yml(filename):
|
||||
"""Load and parse .taskcluster.yml, memoized to save some time"""
|
||||
"""Load and parse .taskcluster.yml, cached to save some time"""
|
||||
return yaml.load_yaml(filename)
|
||||
|
||||
|
||||
@memoize
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def hash_taskcluster_yml(filename):
|
||||
"""
|
||||
Generate a hash of the given .taskcluster.yml. This is the first 10 digits
|
||||
|
|
|
|||
|
|
@ -102,7 +102,7 @@ def build_context(name, outputFile, args=None):
|
|||
|
||||
image_dir = docker.image_path(name)
|
||||
if not os.path.isdir(image_dir):
|
||||
raise Exception("image directory does not exist: %s" % image_dir)
|
||||
raise Exception(f"image directory does not exist: {image_dir}")
|
||||
|
||||
docker.create_context_tar(".", image_dir, outputFile, args)
|
||||
|
||||
|
|
@ -117,7 +117,7 @@ def build_image(name, tag, args=None):
|
|||
|
||||
image_dir = docker.image_path(name)
|
||||
if not os.path.isdir(image_dir):
|
||||
raise Exception("image directory does not exist: %s" % image_dir)
|
||||
raise Exception(f"image directory does not exist: {image_dir}")
|
||||
|
||||
tag = tag or docker.docker_image(name, by_tag=True)
|
||||
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@
|
|||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
|
||||
import functools
|
||||
import re
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Dict, List, Union
|
||||
|
|
@ -11,7 +12,6 @@ from taskgraph.task import Task
|
|||
|
||||
from ..config import GraphConfig
|
||||
from ..parameters import Parameters
|
||||
from ..util.memoize import memoize
|
||||
from ..util.schema import Schema, validate_schema
|
||||
|
||||
|
||||
|
|
@ -58,7 +58,7 @@ class TransformConfig:
|
|||
write_artifacts: bool
|
||||
|
||||
@property
|
||||
@memoize
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def repo_configs(self):
|
||||
repositories = self.graph_config["taskgraph"]["repositories"]
|
||||
if len(repositories) == 1:
|
||||
|
|
|
|||
|
|
@ -77,7 +77,14 @@ def cache_task(config, tasks):
|
|||
task["label"], p
|
||||
)
|
||||
)
|
||||
|
||||
digest_data = cache["digest-data"] + sorted(dependency_digests)
|
||||
|
||||
# Chain of trust affects task artifacts therefore it should influence
|
||||
# cache digest.
|
||||
if task.get("worker", {}).get("chain-of-trust"):
|
||||
digest_data.append(str(task["worker"]["chain-of-trust"]))
|
||||
|
||||
add_optimization(
|
||||
config,
|
||||
task,
|
||||
|
|
|
|||
|
|
@ -128,7 +128,7 @@ def make_task(config, tasks):
|
|||
"name": name,
|
||||
"description": task["description"],
|
||||
"expires-after": task.get("expires-after", expires),
|
||||
"label": "fetch-%s" % name,
|
||||
"label": f"fetch-{name}",
|
||||
"run-on-projects": [],
|
||||
"run": {
|
||||
"using": "run-task",
|
||||
|
|
@ -265,12 +265,7 @@ def create_fetch_url_task(config, name, fetch):
|
|||
for k, v in fetch["headers"].items():
|
||||
command.extend(["-H", f"{k}:{v}"])
|
||||
|
||||
command.extend(
|
||||
[
|
||||
fetch["url"],
|
||||
"/builds/worker/artifacts/%s" % artifact_name,
|
||||
]
|
||||
)
|
||||
command.extend([fetch["url"], f"/builds/worker/artifacts/{artifact_name}"])
|
||||
|
||||
return {
|
||||
"command": command,
|
||||
|
|
@ -316,7 +311,7 @@ def create_git_fetch_task(config, name, fetch):
|
|||
path_prefix,
|
||||
fetch["repo"],
|
||||
fetch["revision"],
|
||||
"/builds/worker/artifacts/%s" % artifact_name,
|
||||
f"/builds/worker/artifacts/{artifact_name}",
|
||||
]
|
||||
|
||||
ssh_key = fetch.get("ssh-key")
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ from taskgraph.transforms.run.common import (
|
|||
generic_worker_add_artifacts,
|
||||
get_vcsdir_name,
|
||||
)
|
||||
from taskgraph.util import path as mozpath
|
||||
from taskgraph.util.hash import hash_paths
|
||||
from taskgraph.util.schema import Schema
|
||||
from taskgraph.util.shell import quote as shell_quote
|
||||
|
|
@ -57,7 +58,8 @@ toolchain_run_schema = Schema(
|
|||
def get_digest_data(config, run, taskdesc):
|
||||
files = list(run.pop("resources", []))
|
||||
# The script
|
||||
files.append("taskcluster/scripts/toolchain/{}".format(run["script"]))
|
||||
script = mozpath.join("taskcluster/scripts/toolchain/", run["script"])
|
||||
files.append(mozpath.normpath(script))
|
||||
|
||||
# Accumulate dependency hashes for index generation.
|
||||
data = [hash_paths(config.graph_config.vcs_root, files)]
|
||||
|
|
@ -126,16 +128,14 @@ def common_toolchain(config, task, taskdesc, is_docker):
|
|||
"digest-data": get_digest_data(config, run, taskdesc),
|
||||
}
|
||||
|
||||
script = run.pop("script")
|
||||
script = mozpath.join("taskcluster/scripts/toolchain/", run.pop("script"))
|
||||
run["using"] = "run-task"
|
||||
run["cwd"] = "{checkout}/.."
|
||||
|
||||
if script.endswith(".ps1"):
|
||||
run["exec-with"] = "powershell"
|
||||
|
||||
command = [f"{srcdir}/taskcluster/scripts/toolchain/{script}"] + run.pop(
|
||||
"arguments", []
|
||||
)
|
||||
command = [f"{srcdir}/{mozpath.normpath(script)}"] + run.pop("arguments", [])
|
||||
|
||||
if not is_docker:
|
||||
# Don't quote the first item in the command because it purposely contains
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ complexities of worker implementations, scopes, and treeherder annotations.
|
|||
"""
|
||||
|
||||
|
||||
import functools
|
||||
import hashlib
|
||||
import os
|
||||
import re
|
||||
|
|
@ -23,7 +24,6 @@ from taskgraph import MAX_DEPENDENCIES
|
|||
from taskgraph.transforms.base import TransformSequence
|
||||
from taskgraph.util.hash import hash_path
|
||||
from taskgraph.util.keyed_by import evaluate_keyed_by
|
||||
from taskgraph.util.memoize import memoize
|
||||
from taskgraph.util.schema import (
|
||||
OptimizationSchema,
|
||||
Schema,
|
||||
|
|
@ -43,7 +43,7 @@ RUN_TASK = os.path.join(
|
|||
)
|
||||
|
||||
|
||||
@memoize
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def _run_task_suffix():
|
||||
"""String to append to cache names under control of run-task."""
|
||||
return hash_path(RUN_TASK)[0:20]
|
||||
|
|
@ -214,14 +214,14 @@ def get_branch_rev(config):
|
|||
return config.params["head_rev"]
|
||||
|
||||
|
||||
@memoize
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def get_default_priority(graph_config, project):
|
||||
return evaluate_keyed_by(
|
||||
graph_config["task-priority"], "Graph Config", {"project": project}
|
||||
)
|
||||
|
||||
|
||||
@memoize
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def get_default_deadline(graph_config, project):
|
||||
return evaluate_keyed_by(
|
||||
graph_config["task-deadline-after"], "Graph Config", {"project": project}
|
||||
|
|
@ -380,10 +380,10 @@ def build_docker_worker_payload(config, task, task_def):
|
|||
for v in sorted(volumes):
|
||||
if v in worker["volumes"]:
|
||||
raise Exception(
|
||||
"volume %s already defined; "
|
||||
f"volume {v} already defined; "
|
||||
"if it is defined in a Dockerfile, "
|
||||
"it does not need to be specified in the "
|
||||
"worker definition" % v
|
||||
"worker definition"
|
||||
)
|
||||
|
||||
worker["volumes"].append(v)
|
||||
|
|
@ -544,9 +544,7 @@ def build_docker_worker_payload(config, task, task_def):
|
|||
suffix=suffix,
|
||||
)
|
||||
caches[name] = cache["mount-point"]
|
||||
task_def["scopes"].append(
|
||||
{"task-reference": "docker-worker:cache:%s" % name}
|
||||
)
|
||||
task_def["scopes"].append({"task-reference": f"docker-worker:cache:{name}"})
|
||||
|
||||
# Assertion: only run-task is interested in this.
|
||||
if run_task:
|
||||
|
|
|
|||
|
|
@ -80,14 +80,14 @@ def create_tar_from_files(fp, files):
|
|||
ti.type = tarfile.REGTYPE
|
||||
|
||||
if not ti.isreg():
|
||||
raise ValueError("not a regular file: %s" % f)
|
||||
raise ValueError(f"not a regular file: {f}")
|
||||
|
||||
# Disallow setuid and setgid bits. This is an arbitrary restriction.
|
||||
# However, since we set uid/gid to root:root, setuid and setgid
|
||||
# would be a glaring security hole if the archive were
|
||||
# uncompressed as root.
|
||||
if ti.mode & (stat.S_ISUID | stat.S_ISGID):
|
||||
raise ValueError("cannot add file with setuid or setgid set: " "%s" % f)
|
||||
raise ValueError(f"cannot add file with setuid or setgid set: {f}")
|
||||
|
||||
# Set uid, gid, username, and group as deterministic values.
|
||||
ti.uid = 0
|
||||
|
|
|
|||
47
third_party/python/taskcluster_taskgraph/taskgraph/util/copy.py
vendored
Normal file
47
third_party/python/taskcluster_taskgraph/taskgraph/util/copy.py
vendored
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
from typing import Any
|
||||
|
||||
from taskgraph.task import Task
|
||||
from taskgraph.util.readonlydict import ReadOnlyDict
|
||||
|
||||
immutable_types = {int, float, bool, str, type(None), ReadOnlyDict}
|
||||
|
||||
|
||||
def deepcopy(obj: Any) -> Any:
|
||||
"""Perform a deep copy of an object with a tree like structure.
|
||||
|
||||
This is a re-implementation of Python's `copy.deepcopy` function with a few key differences:
|
||||
|
||||
1. Unlike the stdlib, this does *not* support copying graph-like structure,
|
||||
which allows it to be more efficient than deepcopy on tree-like structures
|
||||
(such as Tasks).
|
||||
2. This special cases support for `taskgraph.task.Task` objects.
|
||||
|
||||
Args:
|
||||
obj: The object to deep copy.
|
||||
|
||||
Returns:
|
||||
A deep copy of the object.
|
||||
"""
|
||||
ty = type(obj)
|
||||
if ty in immutable_types:
|
||||
return obj
|
||||
if ty is dict:
|
||||
return {k: deepcopy(v) for k, v in obj.items()}
|
||||
if ty is list:
|
||||
return [deepcopy(elt) for elt in obj]
|
||||
if ty is Task:
|
||||
task = Task(
|
||||
kind=deepcopy(obj.kind),
|
||||
label=deepcopy(obj.label),
|
||||
attributes=deepcopy(obj.attributes),
|
||||
task=deepcopy(obj.task),
|
||||
description=deepcopy(obj.description),
|
||||
optimization=deepcopy(obj.optimization),
|
||||
dependencies=deepcopy(obj.dependencies),
|
||||
soft_dependencies=deepcopy(obj.soft_dependencies),
|
||||
if_dependencies=deepcopy(obj.if_dependencies),
|
||||
)
|
||||
if obj.task_id:
|
||||
task.task_id = obj.task_id
|
||||
return task
|
||||
raise NotImplementedError(f"copying '{ty}' from '{obj}'")
|
||||
|
|
@ -3,6 +3,7 @@
|
|||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
|
||||
import functools
|
||||
import hashlib
|
||||
import io
|
||||
import os
|
||||
|
|
@ -10,7 +11,6 @@ import re
|
|||
from typing import Optional
|
||||
|
||||
from taskgraph.util.archive import create_tar_gz_from_files
|
||||
from taskgraph.util.memoize import memoize
|
||||
|
||||
IMAGE_DIR = os.path.join(".", "taskcluster", "docker")
|
||||
|
||||
|
|
@ -177,15 +177,15 @@ def stream_context_tar(topsrcdir, context_dir, out_file, image_name=None, args=N
|
|||
|
||||
p = line[len("# %include ") :].strip()
|
||||
if os.path.isabs(p):
|
||||
raise Exception("extra include path cannot be absolute: %s" % p)
|
||||
raise Exception(f"extra include path cannot be absolute: {p}")
|
||||
|
||||
fs_path = os.path.normpath(os.path.join(topsrcdir, p))
|
||||
# Check for filesystem traversal exploits.
|
||||
if not fs_path.startswith(topsrcdir):
|
||||
raise Exception("extra include path outside topsrcdir: %s" % p)
|
||||
raise Exception(f"extra include path outside topsrcdir: {p}")
|
||||
|
||||
if not os.path.exists(fs_path):
|
||||
raise Exception("extra include path does not exist: %s" % p)
|
||||
raise Exception(f"extra include path does not exist: {p}")
|
||||
|
||||
if os.path.isdir(fs_path):
|
||||
for root, dirs, files in os.walk(fs_path):
|
||||
|
|
@ -205,7 +205,7 @@ def stream_context_tar(topsrcdir, context_dir, out_file, image_name=None, args=N
|
|||
return writer.hexdigest()
|
||||
|
||||
|
||||
@memoize
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def image_paths():
|
||||
"""Return a map of image name to paths containing their Dockerfile."""
|
||||
config = load_yaml("taskcluster", "kinds", "docker-image", "kind.yml")
|
||||
|
|
@ -222,7 +222,7 @@ def image_path(name):
|
|||
return os.path.join(IMAGE_DIR, name)
|
||||
|
||||
|
||||
@memoize
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def parse_volumes(image):
|
||||
"""Parse VOLUME entries from a Dockerfile for an image."""
|
||||
volumes = set()
|
||||
|
|
|
|||
|
|
@ -2,14 +2,14 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
import functools
|
||||
import hashlib
|
||||
from pathlib import Path
|
||||
|
||||
from taskgraph.util import path as mozpath
|
||||
from taskgraph.util.memoize import memoize
|
||||
|
||||
|
||||
@memoize
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def hash_path(path):
|
||||
"""Hash a single file.
|
||||
|
||||
|
|
@ -36,7 +36,7 @@ def hash_paths(base_path, patterns):
|
|||
if matches:
|
||||
found.update(matches)
|
||||
else:
|
||||
raise Exception("%s did not match anything" % pattern)
|
||||
raise Exception(f"{pattern} did not match anything")
|
||||
for path in sorted(found):
|
||||
h.update(
|
||||
f"{hash_path(mozpath.abspath(mozpath.join(base_path, path)))} {mozpath.normsep(path)}\n".encode()
|
||||
|
|
@ -44,13 +44,13 @@ def hash_paths(base_path, patterns):
|
|||
return h.hexdigest()
|
||||
|
||||
|
||||
@memoize
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def _find_matching_files(base_path, pattern):
|
||||
files = _get_all_files(base_path)
|
||||
return [path for path in files if mozpath.match(path, pattern)]
|
||||
|
||||
|
||||
@memoize
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def _get_all_files(base_path):
|
||||
return [
|
||||
mozpath.normsep(str(path))
|
||||
|
|
|
|||
|
|
@ -1,7 +0,0 @@
|
|||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
import functools
|
||||
|
||||
memoize = functools.lru_cache(maxsize=None) # backwards compatibility shim
|
||||
|
|
@ -16,7 +16,6 @@ from requests.packages.urllib3.util.retry import Retry
|
|||
|
||||
from taskgraph.task import Task
|
||||
from taskgraph.util import yaml
|
||||
from taskgraph.util.memoize import memoize
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -31,7 +30,7 @@ PRODUCTION_TASKCLUSTER_ROOT_URL = None
|
|||
CONCURRENCY = 50
|
||||
|
||||
|
||||
@memoize
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def get_root_url(use_proxy):
|
||||
"""Get the current TASKCLUSTER_ROOT_URL.
|
||||
|
||||
|
|
@ -106,7 +105,7 @@ def requests_retry_session(
|
|||
return session
|
||||
|
||||
|
||||
@memoize
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def get_session():
|
||||
return requests_retry_session(retries=5)
|
||||
|
||||
|
|
@ -277,7 +276,7 @@ def get_task_url(task_id, use_proxy=False):
|
|||
return task_tmpl.format(task_id)
|
||||
|
||||
|
||||
@memoize
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def get_task_definition(task_id, use_proxy=False):
|
||||
response = _do_request(get_task_url(task_id, use_proxy))
|
||||
return response.json()
|
||||
|
|
@ -446,7 +445,7 @@ def list_task_group_incomplete_tasks(task_group_id):
|
|||
break
|
||||
|
||||
|
||||
@memoize
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def _get_deps(task_ids, use_proxy):
|
||||
upstream_tasks = {}
|
||||
for task_id in task_ids:
|
||||
|
|
|
|||
|
|
@ -2,8 +2,7 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
|
||||
import copy
|
||||
from taskgraph.util.copy import deepcopy
|
||||
|
||||
|
||||
def merge_to(source, dest):
|
||||
|
|
@ -18,9 +17,19 @@ def merge_to(source, dest):
|
|||
"""
|
||||
|
||||
for key, value in source.items():
|
||||
if (
|
||||
isinstance(value, dict)
|
||||
and len(value) == 1
|
||||
and list(value)[0].startswith("by-")
|
||||
):
|
||||
# Do not merge by-* values as it will almost certainly not do what
|
||||
# the user expects.
|
||||
dest[key] = value
|
||||
continue
|
||||
|
||||
# Override mismatching or empty types
|
||||
if type(value) != type(dest.get(key)): # noqa
|
||||
dest[key] = source[key]
|
||||
dest[key] = value
|
||||
continue
|
||||
|
||||
# Merge dict
|
||||
|
|
@ -29,10 +38,10 @@ def merge_to(source, dest):
|
|||
continue
|
||||
|
||||
if isinstance(value, list):
|
||||
dest[key] = dest[key] + source[key]
|
||||
dest[key] = dest[key] + value
|
||||
continue
|
||||
|
||||
dest[key] = source[key]
|
||||
dest[key] = value
|
||||
|
||||
return dest
|
||||
|
||||
|
|
@ -46,7 +55,7 @@ def merge(*objects):
|
|||
Returns the result without modifying any arguments.
|
||||
"""
|
||||
if len(objects) == 1:
|
||||
return copy.deepcopy(objects[0])
|
||||
return deepcopy(objects[0])
|
||||
return merge_to(objects[-1], merge(*objects[:-1]))
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@
|
|||
|
||||
import logging
|
||||
import sys
|
||||
import warnings
|
||||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Callable, Dict, List, Union
|
||||
|
|
@ -192,7 +193,17 @@ def verify_routes_notification_filters(
|
|||
if task is None:
|
||||
return
|
||||
route_prefix = "notify."
|
||||
valid_filters = ("on-any", "on-completed", "on-failed", "on-exception")
|
||||
valid_filters = (
|
||||
"on-any",
|
||||
"on-completed",
|
||||
"on-defined",
|
||||
"on-failed",
|
||||
"on-exception",
|
||||
"on-pending",
|
||||
"on-resolved",
|
||||
"on-running",
|
||||
"on-transition",
|
||||
)
|
||||
task_dict = task.task
|
||||
routes = task_dict.get("routes", [])
|
||||
|
||||
|
|
@ -204,6 +215,13 @@ def verify_routes_notification_filters(
|
|||
raise Exception(
|
||||
f"{task.label} has invalid notification filter ({route_filter})"
|
||||
)
|
||||
if route_filter == "on-any":
|
||||
warnings.warn(
|
||||
DeprecationWarning(
|
||||
f"notification filter '{route_filter}' is deprecated. Use "
|
||||
"'on-transition' or 'on-resolved'."
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@verifications.add("full_task_graph")
|
||||
|
|
|
|||
|
|
@ -2,10 +2,10 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
import functools
|
||||
from dataclasses import dataclass
|
||||
|
||||
from .keyed_by import evaluate_keyed_by
|
||||
from .memoize import memoize
|
||||
|
||||
|
||||
@dataclass
|
||||
|
|
@ -29,7 +29,7 @@ _BUILTIN_TYPES = {
|
|||
}
|
||||
|
||||
|
||||
@memoize
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def worker_type_implementation(graph_config, worker_type):
|
||||
"""Get the worker implementation and OS for the given workerType, where the
|
||||
OS represents the host system, not the target OS, in the case of
|
||||
|
|
@ -46,7 +46,7 @@ def worker_type_implementation(graph_config, worker_type):
|
|||
return worker_config["implementation"], worker_config.get("os")
|
||||
|
||||
|
||||
@memoize
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def get_worker_type(graph_config, alias, level):
|
||||
"""
|
||||
Get the worker type based, evaluating aliases from the graph config.
|
||||
|
|
|
|||
Loading…
Reference in a new issue