forked from mirrors/gecko-dev
Bug 1784232 - Bump taskgraph to v3.2.0 r=ahal
Differential Revision: https://phabricator.services.mozilla.com/D155978
This commit is contained in:
parent
35f28283e0
commit
0e57af4969
63 changed files with 2402 additions and 550 deletions
|
|
@ -37,7 +37,7 @@
|
|||
#
|
||||
# {
|
||||
# tasks_for: 'action',
|
||||
# push: {owner, pushlog_id, revision},
|
||||
# push: {owner, pushlog_id, revision, base_revision},
|
||||
# repository: {url, project, level},
|
||||
# input,
|
||||
# taskId, // targetted taskId
|
||||
|
|
@ -196,6 +196,7 @@ tasks:
|
|||
# to `mach taskgraph decision` are all on the command line.
|
||||
$merge:
|
||||
- GECKO_BASE_REPOSITORY: 'https://hg.mozilla.org/mozilla-unified'
|
||||
GECKO_BASE_REV: '${push.base_revision}'
|
||||
GECKO_HEAD_REPOSITORY: '${repoUrl}'
|
||||
GECKO_HEAD_REF: '${push.revision}'
|
||||
GECKO_HEAD_REV: '${push.revision}'
|
||||
|
|
@ -253,6 +254,7 @@ tasks:
|
|||
--tasks-for='${tasks_for}'
|
||||
--repository-type=hg
|
||||
--base-repository="$GECKO_BASE_REPOSITORY"
|
||||
--base-rev="$GECKO_BASE_REV"
|
||||
--head-repository="$GECKO_HEAD_REPOSITORY"
|
||||
--head-ref="$GECKO_HEAD_REF"
|
||||
--head-rev="$GECKO_HEAD_REV"
|
||||
|
|
|
|||
|
|
@ -75,6 +75,7 @@ vendored:third_party/python/ecdsa
|
|||
vendored:third_party/python/esprima
|
||||
vendored:third_party/python/fluent.migrate
|
||||
vendored:third_party/python/fluent.syntax
|
||||
vendored:third_party/python/giturlparse
|
||||
vendored:third_party/python/gyp/pylib
|
||||
vendored:third_party/python/idna
|
||||
vendored:third_party/python/idna-ssl
|
||||
|
|
@ -87,6 +88,7 @@ vendored:third_party/python/jsonschema
|
|||
vendored:third_party/python/looseversion
|
||||
vendored:third_party/python/MarkupSafe/src
|
||||
vendored:third_party/python/mohawk
|
||||
vendored:third_party/python/mozilla_repo_urls
|
||||
vendored:third_party/python/mozilla_version
|
||||
vendored:third_party/python/multidict
|
||||
vendored:third_party/python/packaging
|
||||
|
|
|
|||
|
|
@ -36,9 +36,15 @@ Push Information
|
|||
``base_repository`` in cases where ``base_repository`` is likely to be cached
|
||||
and only a few additional commits are needed from ``head_repository``.
|
||||
|
||||
``base_rev``
|
||||
The previous revision before ``head_rev`` got merged into. This can be a short revision string.
|
||||
|
||||
``head_rev``
|
||||
The revision to check out; this can be a short revision string
|
||||
|
||||
``base_ref``
|
||||
Reference where ``head_rev`` got merged into. It is usually a branch or a tag.
|
||||
|
||||
``head_ref``
|
||||
For Mercurial repositories, this is the same as ``head_rev``. For
|
||||
git repositories, which do not allow pulling explicit revisions, this gives
|
||||
|
|
|
|||
|
|
@ -183,10 +183,14 @@ def register_callback_action(
|
|||
revision = parameters[
|
||||
"{}head_rev".format(graph_config["project-repo-param-prefix"])
|
||||
]
|
||||
base_revision = parameters[
|
||||
"{}base_rev".format(graph_config["project-repo-param-prefix"])
|
||||
]
|
||||
push = {
|
||||
"owner": "mozilla-taskcluster-maintenance@mozilla.com",
|
||||
"pushlog_id": parameters["pushlog_id"],
|
||||
"revision": revision,
|
||||
"base_revision": base_revision,
|
||||
}
|
||||
|
||||
match = re.match(
|
||||
|
|
|
|||
|
|
@ -15,11 +15,19 @@ import yaml
|
|||
from redo import retry
|
||||
from taskgraph import create
|
||||
from taskgraph.create import create_tasks
|
||||
from taskgraph.decision import (
|
||||
# TODO: Let standalone taskgraph generate parameters instead
|
||||
# of calling internals
|
||||
_determine_more_accurate_base_ref,
|
||||
_determine_more_accurate_base_rev,
|
||||
_get_env_prefix,
|
||||
)
|
||||
from taskgraph.parameters import Parameters
|
||||
from taskgraph.taskgraph import TaskGraph
|
||||
from taskgraph.util.python_path import find_object
|
||||
from taskgraph.util.schema import Schema, validate_schema
|
||||
from taskgraph.util.taskcluster import get_artifact
|
||||
from taskgraph.util.vcs import get_repository
|
||||
from taskgraph.util.yaml import load_yaml
|
||||
from voluptuous import Any, Optional, Required
|
||||
|
||||
|
|
@ -283,6 +291,8 @@ def get_decision_parameters(graph_config, options):
|
|||
n: options[n]
|
||||
for n in [
|
||||
"base_repository",
|
||||
"base_ref",
|
||||
"base_rev",
|
||||
"head_repository",
|
||||
"head_rev",
|
||||
"head_ref",
|
||||
|
|
@ -310,6 +320,23 @@ def get_decision_parameters(graph_config, options):
|
|||
|
||||
commit_message = get_hg_commit_message(os.path.join(GECKO, product_dir))
|
||||
|
||||
repo_path = os.getcwd()
|
||||
repo = get_repository(repo_path)
|
||||
parameters["base_ref"] = _determine_more_accurate_base_ref(
|
||||
repo,
|
||||
candidate_base_ref=options.get("base_ref"),
|
||||
head_ref=options.get("head_ref"),
|
||||
base_rev=options.get("base_rev"),
|
||||
)
|
||||
|
||||
parameters["base_rev"] = _determine_more_accurate_base_rev(
|
||||
repo,
|
||||
base_ref=parameters["base_ref"],
|
||||
candidate_base_rev=options.get("base_rev"),
|
||||
head_rev=options.get("head_rev"),
|
||||
env_prefix=_get_env_prefix(graph_config),
|
||||
)
|
||||
|
||||
# Define default filter list, as most configurations shouldn't need
|
||||
# custom filters.
|
||||
parameters["filters"] = [
|
||||
|
|
|
|||
|
|
@ -595,6 +595,15 @@ def image_digest(args):
|
|||
help='Type of repository, either "hg" or "git"',
|
||||
)
|
||||
@argument("--base-repository", required=True, help='URL for "base" repository to clone')
|
||||
@argument(
|
||||
"--base-ref", default="", help='Reference of the revision in the "base" repository'
|
||||
)
|
||||
@argument(
|
||||
"--base-rev",
|
||||
default="",
|
||||
help="Taskgraph decides what to do based on the revision range between "
|
||||
"`--base-rev` and `--head-rev`. Value is determined automatically if not provided",
|
||||
)
|
||||
@argument(
|
||||
"--head-repository",
|
||||
required=True,
|
||||
|
|
|
|||
|
|
@ -75,8 +75,12 @@ class TestGetDecisionParameters(unittest.TestCase):
|
|||
}
|
||||
|
||||
@patch("gecko_taskgraph.decision.get_hg_revision_branch")
|
||||
def test_simple_options(self, mock_get_hg_revision_branch):
|
||||
@patch("gecko_taskgraph.decision._determine_more_accurate_base_rev")
|
||||
def test_simple_options(
|
||||
self, mock_determine_more_accurate_base_rev, mock_get_hg_revision_branch
|
||||
):
|
||||
mock_get_hg_revision_branch.return_value = "default"
|
||||
mock_determine_more_accurate_base_rev.return_value = "baserev"
|
||||
with MockedOpen({self.ttc_file: None}):
|
||||
params = decision.get_decision_parameters(FAKE_GRAPH_CONFIG, self.options)
|
||||
self.assertEqual(params["pushlog_id"], "143")
|
||||
|
|
@ -88,8 +92,12 @@ class TestGetDecisionParameters(unittest.TestCase):
|
|||
self.assertEqual(params["try_task_config"], {})
|
||||
|
||||
@patch("gecko_taskgraph.decision.get_hg_revision_branch")
|
||||
def test_no_email_owner(self, mock_get_hg_revision_branch):
|
||||
@patch("gecko_taskgraph.decision._determine_more_accurate_base_rev")
|
||||
def test_no_email_owner(
|
||||
self, mock_determine_more_accurate_base_rev, mock_get_hg_revision_branch
|
||||
):
|
||||
mock_get_hg_revision_branch.return_value = "default"
|
||||
mock_determine_more_accurate_base_rev.return_value = "baserev"
|
||||
self.options["owner"] = "ffxbld"
|
||||
with MockedOpen({self.ttc_file: None}):
|
||||
params = decision.get_decision_parameters(FAKE_GRAPH_CONFIG, self.options)
|
||||
|
|
@ -97,9 +105,16 @@ class TestGetDecisionParameters(unittest.TestCase):
|
|||
|
||||
@patch("gecko_taskgraph.decision.get_hg_revision_branch")
|
||||
@patch("gecko_taskgraph.decision.get_hg_commit_message")
|
||||
def test_try_options(self, mock_get_hg_commit_message, mock_get_hg_revision_branch):
|
||||
@patch("gecko_taskgraph.decision._determine_more_accurate_base_rev")
|
||||
def test_try_options(
|
||||
self,
|
||||
mock_determine_more_accurate_base_rev,
|
||||
mock_get_hg_commit_message,
|
||||
mock_get_hg_revision_branch,
|
||||
):
|
||||
mock_get_hg_commit_message.return_value = "try: -b do -t all --artifact"
|
||||
mock_get_hg_revision_branch.return_value = "default"
|
||||
mock_determine_more_accurate_base_rev.return_value = "baserev"
|
||||
self.options["project"] = "try"
|
||||
with MockedOpen({self.ttc_file: None}):
|
||||
params = decision.get_decision_parameters(FAKE_GRAPH_CONFIG, self.options)
|
||||
|
|
@ -117,11 +132,16 @@ class TestGetDecisionParameters(unittest.TestCase):
|
|||
|
||||
@patch("gecko_taskgraph.decision.get_hg_revision_branch")
|
||||
@patch("gecko_taskgraph.decision.get_hg_commit_message")
|
||||
@patch("gecko_taskgraph.decision._determine_more_accurate_base_rev")
|
||||
def test_try_task_config(
|
||||
self, mock_get_hg_commit_message, mock_get_hg_revision_branch
|
||||
self,
|
||||
mock_get_hg_commit_message,
|
||||
mock_get_hg_revision_branch,
|
||||
mock_determine_more_accurate_base_rev,
|
||||
):
|
||||
mock_get_hg_commit_message.return_value = "Fuzzy query=foo"
|
||||
mock_get_hg_revision_branch.return_value = "default"
|
||||
mock_determine_more_accurate_base_rev.return_value = "baserev"
|
||||
ttc = {"tasks": ["a", "b"]}
|
||||
self.options["project"] = "try"
|
||||
with MockedOpen({self.ttc_file: json.dumps(ttc)}):
|
||||
|
|
|
|||
|
|
@ -24,6 +24,7 @@ class TestTaskclusterYml(unittest.TestCase):
|
|||
"tasks_for": "hg-push",
|
||||
"push": {
|
||||
"revision": "e8d2d9aff5026ef1f1777b781b47fdcbdb9d8f20",
|
||||
"base_revision": "e8aebe488b2f2e567940577de25013d00e818f7c",
|
||||
"owner": "dustin@mozilla.com",
|
||||
"pushlog_id": 1556565286,
|
||||
"pushdate": 112957,
|
||||
|
|
@ -51,6 +52,7 @@ class TestTaskclusterYml(unittest.TestCase):
|
|||
},
|
||||
"push": {
|
||||
"revision": "e8aebe488b2f2e567940577de25013d00e818f7c",
|
||||
"base_revision": "54cbb3745cdb9a8aa0a4428d405b3b2e1c7d13c2",
|
||||
"pushlog_id": -1,
|
||||
"pushdate": 0,
|
||||
"owner": "cron",
|
||||
|
|
@ -80,6 +82,7 @@ class TestTaskclusterYml(unittest.TestCase):
|
|||
},
|
||||
"push": {
|
||||
"revision": "e8d2d9aff5026ef1f1777b781b47fdcbdb9d8f20",
|
||||
"base_revision": "e8aebe488b2f2e567940577de25013d00e818f7c",
|
||||
"owner": "dustin@mozilla.com",
|
||||
"pushlog_id": 1556565286,
|
||||
"pushdate": 112957,
|
||||
|
|
|
|||
191
third_party/python/giturlparse/giturlparse-0.10.0.dist-info/LICENSE
vendored
Normal file
191
third_party/python/giturlparse/giturlparse-0.10.0.dist-info/LICENSE
vendored
Normal file
|
|
@ -0,0 +1,191 @@
|
|||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction, and
|
||||
distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by the copyright
|
||||
owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all other entities
|
||||
that control, are controlled by, or are under common control with that entity.
|
||||
For the purposes of this definition, "control" means (i) the power, direct or
|
||||
indirect, to cause the direction or management of such entity, whether by
|
||||
contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity exercising
|
||||
permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications, including
|
||||
but not limited to software source code, documentation source, and configuration
|
||||
files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical transformation or
|
||||
translation of a Source form, including but not limited to compiled object code,
|
||||
generated documentation, and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or Object form, made
|
||||
available under the License, as indicated by a copyright notice that is included
|
||||
in or attached to the work (an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object form, that
|
||||
is based on (or derived from) the Work and for which the editorial revisions,
|
||||
annotations, elaborations, or other modifications represent, as a whole, an
|
||||
original work of authorship. For the purposes of this License, Derivative Works
|
||||
shall not include works that remain separable from, or merely link (or bind by
|
||||
name) to the interfaces of, the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including the original version
|
||||
of the Work and any modifications or additions to that Work or Derivative Works
|
||||
thereof, that is intentionally submitted to Licensor for inclusion in the Work
|
||||
by the copyright owner or by an individual or Legal Entity authorized to submit
|
||||
on behalf of the copyright owner. For the purposes of this definition,
|
||||
"submitted" means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems, and
|
||||
issue tracking systems that are managed by, or on behalf of, the Licensor for
|
||||
the purpose of discussing and improving the Work, but excluding communication
|
||||
that is conspicuously marked or otherwise designated in writing by the copyright
|
||||
owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity on behalf
|
||||
of whom a Contribution has been received by Licensor and subsequently
|
||||
incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License.
|
||||
|
||||
Subject to the terms and conditions of this License, each Contributor hereby
|
||||
grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free,
|
||||
irrevocable copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the Work and such
|
||||
Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License.
|
||||
|
||||
Subject to the terms and conditions of this License, each Contributor hereby
|
||||
grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free,
|
||||
irrevocable (except as stated in this section) patent license to make, have
|
||||
made, use, offer to sell, sell, import, and otherwise transfer the Work, where
|
||||
such license applies only to those patent claims licensable by such Contributor
|
||||
that are necessarily infringed by their Contribution(s) alone or by combination
|
||||
of their Contribution(s) with the Work to which such Contribution(s) was
|
||||
submitted. If You institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work or a
|
||||
Contribution incorporated within the Work constitutes direct or contributory
|
||||
patent infringement, then any patent licenses granted to You under this License
|
||||
for that Work shall terminate as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution.
|
||||
|
||||
You may reproduce and distribute copies of the Work or Derivative Works thereof
|
||||
in any medium, with or without modifications, and in Source or Object form,
|
||||
provided that You meet the following conditions:
|
||||
|
||||
You must give any other recipients of the Work or Derivative Works a copy of
|
||||
this License; and
|
||||
You must cause any modified files to carry prominent notices stating that You
|
||||
changed the files; and
|
||||
You must retain, in the Source form of any Derivative Works that You distribute,
|
||||
all copyright, patent, trademark, and attribution notices from the Source form
|
||||
of the Work, excluding those notices that do not pertain to any part of the
|
||||
Derivative Works; and
|
||||
If the Work includes a "NOTICE" text file as part of its distribution, then any
|
||||
Derivative Works that You distribute must include a readable copy of the
|
||||
attribution notices contained within such NOTICE file, excluding those notices
|
||||
that do not pertain to any part of the Derivative Works, in at least one of the
|
||||
following places: within a NOTICE text file distributed as part of the
|
||||
Derivative Works; within the Source form or documentation, if provided along
|
||||
with the Derivative Works; or, within a display generated by the Derivative
|
||||
Works, if and wherever such third-party notices normally appear. The contents of
|
||||
the NOTICE file are for informational purposes only and do not modify the
|
||||
License. You may add Your own attribution notices within Derivative Works that
|
||||
You distribute, alongside or as an addendum to the NOTICE text from the Work,
|
||||
provided that such additional attribution notices cannot be construed as
|
||||
modifying the License.
|
||||
You may add Your own copyright statement to Your modifications and may provide
|
||||
additional or different license terms and conditions for use, reproduction, or
|
||||
distribution of Your modifications, or for any such Derivative Works as a whole,
|
||||
provided Your use, reproduction, and distribution of the Work otherwise complies
|
||||
with the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions.
|
||||
|
||||
Unless You explicitly state otherwise, any Contribution intentionally submitted
|
||||
for inclusion in the Work by You to the Licensor shall be under the terms and
|
||||
conditions of this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify the terms of
|
||||
any separate license agreement you may have executed with Licensor regarding
|
||||
such Contributions.
|
||||
|
||||
6. Trademarks.
|
||||
|
||||
This License does not grant permission to use the trade names, trademarks,
|
||||
service marks, or product names of the Licensor, except as required for
|
||||
reasonable and customary use in describing the origin of the Work and
|
||||
reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty.
|
||||
|
||||
Unless required by applicable law or agreed to in writing, Licensor provides the
|
||||
Work (and each Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied,
|
||||
including, without limitation, any warranties or conditions of TITLE,
|
||||
NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are
|
||||
solely responsible for determining the appropriateness of using or
|
||||
redistributing the Work and assume any risks associated with Your exercise of
|
||||
permissions under this License.
|
||||
|
||||
8. Limitation of Liability.
|
||||
|
||||
In no event and under no legal theory, whether in tort (including negligence),
|
||||
contract, or otherwise, unless required by applicable law (such as deliberate
|
||||
and grossly negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special, incidental,
|
||||
or consequential damages of any character arising as a result of this License or
|
||||
out of the use or inability to use the Work (including but not limited to
|
||||
damages for loss of goodwill, work stoppage, computer failure or malfunction, or
|
||||
any and all other commercial damages or losses), even if such Contributor has
|
||||
been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability.
|
||||
|
||||
While redistributing the Work or Derivative Works thereof, You may choose to
|
||||
offer, and charge a fee for, acceptance of support, warranty, indemnity, or
|
||||
other liability obligations and/or rights consistent with this License. However,
|
||||
in accepting such obligations, You may act only on Your own behalf and on Your
|
||||
sole responsibility, not on behalf of any other Contributor, and only if You
|
||||
agree to indemnify, defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason of your
|
||||
accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work
|
||||
|
||||
To apply the Apache License to your work, attach the following boilerplate
|
||||
notice, with the fields enclosed by brackets "[]" replaced with your own
|
||||
identifying information. (Don't include the brackets!) The text should be
|
||||
enclosed in the appropriate comment syntax for the file format. We also
|
||||
recommend that a file or class name and description of purpose be included on
|
||||
the same "printed page" as the copyright notice for easier identification within
|
||||
third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
165
third_party/python/giturlparse/giturlparse-0.10.0.dist-info/METADATA
vendored
Normal file
165
third_party/python/giturlparse/giturlparse-0.10.0.dist-info/METADATA
vendored
Normal file
|
|
@ -0,0 +1,165 @@
|
|||
Metadata-Version: 2.1
|
||||
Name: giturlparse
|
||||
Version: 0.10.0
|
||||
Summary: A Git URL parsing module (supports parsing and rewriting)
|
||||
Home-page: https://github.com/nephila/giturlparse
|
||||
Author: Aaron O Mullan
|
||||
Author-email: aaron@friendco.de
|
||||
Maintainer: Iacopo Spalletti
|
||||
Maintainer-email: i.spalletti@nephila.it
|
||||
License: Apache v2
|
||||
Keywords: giturlparse
|
||||
Platform: UNKNOWN
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Framework :: Django
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: Apache Software License
|
||||
Classifier: Natural Language :: English
|
||||
Classifier: Programming Language :: Python :: 3.6
|
||||
Classifier: Programming Language :: Python :: 3.7
|
||||
Requires-Python: >=3.6
|
||||
Description-Content-Type: text/x-rst
|
||||
|
||||
===========
|
||||
giturlparse
|
||||
===========
|
||||
|
||||
Parse & rewrite git urls (supports GitHub, Bitbucket, FriendCode, Assembla, Gitlab ...)
|
||||
|
||||
This is a fork of giturlparse.py with updated parsers.
|
||||
|
||||
Original project can be found at https://github.com/FriendCode/giturlparse.py
|
||||
|
||||
************
|
||||
Installing
|
||||
************
|
||||
|
||||
::
|
||||
|
||||
pip install giturlparse
|
||||
|
||||
******************
|
||||
Examples
|
||||
******************
|
||||
|
||||
Exposed attributes
|
||||
==================
|
||||
|
||||
* ``platform``: platform codename
|
||||
* ``host``: server hostname
|
||||
* ``resource``: same as ``host``
|
||||
* ``port``: URL port (only if explicitly defined in URL)
|
||||
* ``protocol``: URL protocol (git, ssh, http/https)
|
||||
* ``protocols``: list of protocols explicitly defined in URL
|
||||
* ``user``: repository user
|
||||
* ``owner``: repository owner (user or organization)
|
||||
* ``repo``: repository name
|
||||
* ``name``: same as ``repo``
|
||||
* ``groups``: list of groups - gitlab only
|
||||
* ``path``: path to file or directory (includes the branch name) - gitlab / github only
|
||||
* ``path_raw``: raw path starting from the repo name (might include platform keyword) - gitlab / github only
|
||||
* ``branch``: branch name (when parseable) - gitlab / github only
|
||||
|
||||
Parse
|
||||
==================
|
||||
|
||||
::
|
||||
|
||||
from giturlparse import parse
|
||||
|
||||
p = parse('git@bitbucket.org:AaronO/some-repo.git')
|
||||
|
||||
p.host, p.owner, p.repo
|
||||
|
||||
# => ('bitbucket.org', 'AaronO', 'some-repo')
|
||||
|
||||
|
||||
Rewrite
|
||||
==================
|
||||
|
||||
::
|
||||
|
||||
from giturlparse import parse
|
||||
|
||||
url = 'git@github.com:Org/Private-repo.git'
|
||||
|
||||
p = parse(url)
|
||||
|
||||
p.url2ssh, p.url2https, p.url2git, p.url2http
|
||||
# => ('git@github.com:Org/Private-repo.git', 'https://github.com/Org/Private-repo.git', 'git://github.com/Org/Private-repo.git', None)
|
||||
|
||||
URLS
|
||||
==================
|
||||
|
||||
Alternative URLs for same repo::
|
||||
|
||||
from giturlparse import parse
|
||||
|
||||
url = 'git@github.com:Org/Private-repo.git'
|
||||
|
||||
parse(url).urls
|
||||
# => {
|
||||
# 'ssh': 'git@github.com:Org/Private-repo.git',
|
||||
# 'https': 'https://github.com/Org/Private-repo.git',
|
||||
# 'git': 'git://github.com/Org/Private-repo.git'
|
||||
# }
|
||||
|
||||
Validate
|
||||
==================
|
||||
|
||||
::
|
||||
|
||||
from giturlparse import parse, validate
|
||||
|
||||
url = 'git@github.com:Org/Private-repo.git'
|
||||
|
||||
parse(url).valid
|
||||
# => True
|
||||
|
||||
# Or
|
||||
|
||||
validate(url)
|
||||
# => True
|
||||
|
||||
Tests
|
||||
==================
|
||||
|
||||
::
|
||||
|
||||
python setup.py test
|
||||
|
||||
License
|
||||
==================
|
||||
|
||||
Apache v2 (Check out LICENSE file)
|
||||
|
||||
.. :changelog:
|
||||
|
||||
*******
|
||||
History
|
||||
*******
|
||||
|
||||
.. towncrier release notes start
|
||||
|
||||
0.10.0 (2020-12-05)
|
||||
===================
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- General matching improvements (#18)
|
||||
- Update tooling, drop python2 (#10213)
|
||||
|
||||
0.9.2 (2018-10-27)
|
||||
==================
|
||||
|
||||
* Removed "s" from the base platform regex
|
||||
* Fix license classifier in setup.py
|
||||
* Update meta files
|
||||
|
||||
0.9.1 (2018-01-20)
|
||||
==================
|
||||
|
||||
* First fork release
|
||||
|
||||
|
||||
18
third_party/python/giturlparse/giturlparse-0.10.0.dist-info/RECORD
vendored
Normal file
18
third_party/python/giturlparse/giturlparse-0.10.0.dist-info/RECORD
vendored
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
giturlparse/__init__.py,sha256=c5WMm7u1auWiuJrsY0bo1IsT6iRi8b6pGebNQC03_PI,332
|
||||
giturlparse/parser.py,sha256=BTaOH--z1-odYdOwEb5iNadYpCvUM4-bKHYXGKxGIZM,1924
|
||||
giturlparse/result.py,sha256=wKg1h9vYXkPseRgEAIk8TDPS1UMIU_z3t4IKbT7uD18,2765
|
||||
giturlparse/platforms/__init__.py,sha256=y8xzQWxqGHwlvx0pY99Hqott-xK2Q0iBzpQ9dTehTrY,527
|
||||
giturlparse/platforms/assembla.py,sha256=iPYpPOu8cNapbniD7sj63aTwPGT4DUH1U8RkvbUkiqE,498
|
||||
giturlparse/platforms/base.py,sha256=cZPxEa1u1WNq6IvhUVp3XWJtks9Dy2sifDaJAdeHclI,1566
|
||||
giturlparse/platforms/bitbucket.py,sha256=R6dsFBhuMlLe9-gIAP7X8hzJn-FHAjI-bBgnfNom4tc,680
|
||||
giturlparse/platforms/friendcode.py,sha256=w__PNSQAkNO2Y45doOw7YMDqwuSyu_FocQTRa305VM0,389
|
||||
giturlparse/platforms/github.py,sha256=G_7VRQpm5ZtvOcc1xbVF3CnC4AcCRnyK7EgkoaoqOEo,1446
|
||||
giturlparse/platforms/gitlab.py,sha256=2K65zlI8CA5OdXV9eXW3SBFH7oW78lFlkhLviW3Mwyo,1794
|
||||
giturlparse/tests/__init__.py,sha256=yBGT6Ycwx1AsTFYemzHoqrJ82seE0gfGti99VyrV3x0,37
|
||||
giturlparse/tests/parse.py,sha256=dpFzvo40qdH7Zg6CmgMqBMeZz473GhbZotmVK_nq_pk,14594
|
||||
giturlparse/tests/rewrite.py,sha256=scB7YGBUeFo3bEyI0Mvc0hK_ajlBY2RkrEGRtnrtukc,3386
|
||||
giturlparse-0.10.0.dist-info/LICENSE,sha256=c7p036pSC0mkAbXSFFmoUjoUbzt1GKgz7qXvqFEwv2g,10273
|
||||
giturlparse-0.10.0.dist-info/METADATA,sha256=NDWxArULRXhAAu2KttDMuZu1k35HvJ1eJHEcWfeB8lI,3511
|
||||
giturlparse-0.10.0.dist-info/WHEEL,sha256=oh0NKYrTcu1i1-wgrI1cnhkjYIi8WJ-8qd9Jrr5_y4E,110
|
||||
giturlparse-0.10.0.dist-info/top_level.txt,sha256=NHfX7iaRAYz-bnROU6Q0tgNInQU-YgIeeii0uznxCLA,12
|
||||
giturlparse-0.10.0.dist-info/RECORD,,
|
||||
6
third_party/python/giturlparse/giturlparse-0.10.0.dist-info/WHEEL
vendored
Normal file
6
third_party/python/giturlparse/giturlparse-0.10.0.dist-info/WHEEL
vendored
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.36.1)
|
||||
Root-Is-Purelib: true
|
||||
Tag: py2-none-any
|
||||
Tag: py3-none-any
|
||||
|
||||
1
third_party/python/giturlparse/giturlparse-0.10.0.dist-info/top_level.txt
vendored
Normal file
1
third_party/python/giturlparse/giturlparse-0.10.0.dist-info/top_level.txt
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
giturlparse
|
||||
14
third_party/python/giturlparse/giturlparse/__init__.py
vendored
Normal file
14
third_party/python/giturlparse/giturlparse/__init__.py
vendored
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
from .parser import parse as _parse
|
||||
from .result import GitUrlParsed
|
||||
|
||||
__author__ = "Iacopo Spalletti"
|
||||
__email__ = "i.spalletti@nephila.it"
|
||||
__version__ = "0.10.0"
|
||||
|
||||
|
||||
def parse(url, check_domain=True):
|
||||
return GitUrlParsed(_parse(url, check_domain))
|
||||
|
||||
|
||||
def validate(url, check_domain=True):
|
||||
return parse(url, check_domain).valid
|
||||
69
third_party/python/giturlparse/giturlparse/parser.py
vendored
Normal file
69
third_party/python/giturlparse/giturlparse/parser.py
vendored
Normal file
|
|
@ -0,0 +1,69 @@
|
|||
from collections import defaultdict
|
||||
|
||||
from .platforms import PLATFORMS
|
||||
|
||||
SUPPORTED_ATTRIBUTES = (
|
||||
"domain",
|
||||
"repo",
|
||||
"owner",
|
||||
"path_raw",
|
||||
"groups_path",
|
||||
"_user",
|
||||
"port",
|
||||
"url",
|
||||
"platform",
|
||||
"protocol",
|
||||
)
|
||||
|
||||
|
||||
def parse(url, check_domain=True):
|
||||
# Values are None by default
|
||||
parsed_info = defaultdict(lambda: None)
|
||||
parsed_info["port"] = ""
|
||||
parsed_info["path_raw"] = ""
|
||||
parsed_info["groups_path"] = ""
|
||||
|
||||
# Defaults to all attributes
|
||||
map(parsed_info.setdefault, SUPPORTED_ATTRIBUTES)
|
||||
|
||||
for name, platform in PLATFORMS:
|
||||
for protocol, regex in platform.COMPILED_PATTERNS.items():
|
||||
# print(name, protocol, regex)
|
||||
# Match current regex against URL
|
||||
match = regex.match(url)
|
||||
|
||||
# Skip if not matched
|
||||
if not match:
|
||||
# print("[%s] URL: %s dit not match %s" % (name, url, regex.pattern))
|
||||
continue
|
||||
|
||||
# Skip if domain is bad
|
||||
domain = match.group("domain")
|
||||
# print('[%s] DOMAIN = %s' % (url, domain,))
|
||||
if check_domain:
|
||||
if platform.DOMAINS and not (domain in platform.DOMAINS):
|
||||
continue
|
||||
if platform.SKIP_DOMAINS and domain in platform.SKIP_DOMAINS:
|
||||
continue
|
||||
|
||||
# add in platform defaults
|
||||
parsed_info.update(platform.DEFAULTS)
|
||||
|
||||
# Get matches as dictionary
|
||||
matches = platform.clean_data(match.groupdict(default=""))
|
||||
|
||||
# Update info with matches
|
||||
parsed_info.update(matches)
|
||||
|
||||
# Update info with platform info
|
||||
parsed_info.update(
|
||||
{
|
||||
"url": url,
|
||||
"platform": name,
|
||||
"protocol": protocol,
|
||||
}
|
||||
)
|
||||
return parsed_info
|
||||
|
||||
# Empty if none matched
|
||||
return parsed_info
|
||||
18
third_party/python/giturlparse/giturlparse/platforms/__init__.py
vendored
Normal file
18
third_party/python/giturlparse/giturlparse/platforms/__init__.py
vendored
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
from .assembla import AssemblaPlatform
|
||||
from .base import BasePlatform
|
||||
from .bitbucket import BitbucketPlatform
|
||||
from .friendcode import FriendCodePlatform
|
||||
from .github import GitHubPlatform
|
||||
from .gitlab import GitLabPlatform
|
||||
|
||||
# Supported platforms
|
||||
PLATFORMS = [
|
||||
# name -> Platform object
|
||||
("github", GitHubPlatform()),
|
||||
("bitbucket", BitbucketPlatform()),
|
||||
("friendcode", FriendCodePlatform()),
|
||||
("assembla", AssemblaPlatform()),
|
||||
("gitlab", GitLabPlatform()),
|
||||
# Match url
|
||||
("base", BasePlatform()),
|
||||
]
|
||||
14
third_party/python/giturlparse/giturlparse/platforms/assembla.py
vendored
Normal file
14
third_party/python/giturlparse/giturlparse/platforms/assembla.py
vendored
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
from .base import BasePlatform
|
||||
|
||||
|
||||
class AssemblaPlatform(BasePlatform):
|
||||
DOMAINS = ("git.assembla.com",)
|
||||
PATTERNS = {
|
||||
"ssh": r"(?P<protocols>(git\+)?(?P<protocol>ssh))?(://)?git@(?P<domain>.+?):(?P<pathname>(?P<repo>.+)).git",
|
||||
"git": r"(?P<protocols>(?P<protocol>git))://(?P<domain>.+?)/(?P<pathname>(?P<repo>.+)).git",
|
||||
}
|
||||
FORMATS = {
|
||||
"ssh": r"git@%(domain)s:%(repo)s.git",
|
||||
"git": r"git://%(domain)s/%(repo)s.git",
|
||||
}
|
||||
DEFAULTS = {"_user": "git"}
|
||||
43
third_party/python/giturlparse/giturlparse/platforms/base.py
vendored
Normal file
43
third_party/python/giturlparse/giturlparse/platforms/base.py
vendored
Normal file
|
|
@ -0,0 +1,43 @@
|
|||
import itertools
|
||||
import re
|
||||
|
||||
|
||||
class BasePlatform:
|
||||
FORMATS = {
|
||||
"ssh": r"(?P<protocols>(git\+)?(?P<protocol>ssh))?(://)?%(_user)s@%(host)s:%(repo)s.git",
|
||||
"http": r"(?P<protocols>(git\+)?(?P<protocol>http))://%(host)s/%(repo)s.git",
|
||||
"https": r"(?P<protocols>(git\+)?(?P<protocol>https))://%(host)s/%(repo)s.git",
|
||||
"git": r"(?P<protocols>(?P<protocol>git))://%(host)s/%(repo)s.git",
|
||||
}
|
||||
|
||||
PATTERNS = {
|
||||
"ssh": r"(?P<_user>.+)@(?P<domain>[^/]+?):(?P<repo>.+).git",
|
||||
"http": r"http://(?P<domain>[^/]+?)/(?P<repo>.+).git",
|
||||
"https": r"https://(?P<domain>[^/]+?)/(?P<repo>.+).git",
|
||||
"git": r"git://(?P<domain>[^/]+?)/(?P<repo>.+).git",
|
||||
}
|
||||
|
||||
# None means it matches all domains
|
||||
DOMAINS = None
|
||||
SKIP_DOMAINS = None
|
||||
DEFAULTS = {}
|
||||
|
||||
def __init__(self):
|
||||
# Precompile PATTERNS
|
||||
self.COMPILED_PATTERNS = {proto: re.compile(regex, re.IGNORECASE) for proto, regex in self.PATTERNS.items()}
|
||||
|
||||
# Supported protocols
|
||||
self.PROTOCOLS = self.PATTERNS.keys()
|
||||
|
||||
if self.__class__ == BasePlatform:
|
||||
sub = [subclass.SKIP_DOMAINS for subclass in self.__class__.__subclasses__() if subclass.SKIP_DOMAINS]
|
||||
if sub:
|
||||
self.SKIP_DOMAINS = list(itertools.chain.from_iterable(sub))
|
||||
|
||||
@staticmethod
|
||||
def clean_data(data):
|
||||
data["path"] = ""
|
||||
data["branch"] = ""
|
||||
data["protocols"] = list(filter(lambda x: x, data["protocols"].split("+")))
|
||||
data["pathname"] = data["pathname"].strip(":")
|
||||
return data
|
||||
20
third_party/python/giturlparse/giturlparse/platforms/bitbucket.py
vendored
Normal file
20
third_party/python/giturlparse/giturlparse/platforms/bitbucket.py
vendored
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
from .base import BasePlatform
|
||||
|
||||
|
||||
class BitbucketPlatform(BasePlatform):
|
||||
PATTERNS = {
|
||||
"https": (
|
||||
r"(?P<protocols>(git\+)?(?P<protocol>https))://(?P<_user>.+)@(?P<domain>.+?)"
|
||||
r"(?P<pathname>/(?P<owner>.+)/(?P<repo>.+?)(?:\.git)?)$"
|
||||
),
|
||||
"ssh": (
|
||||
r"(?P<protocols>(git\+)?(?P<protocol>ssh))?(://)?git@(?P<domain>.+?):"
|
||||
r"(?P<pathname>(?P<owner>.+)/(?P<repo>.+?)(?:\.git)?)$"
|
||||
),
|
||||
}
|
||||
FORMATS = {
|
||||
"https": r"https://%(owner)s@%(domain)s/%(owner)s/%(repo)s.git",
|
||||
"ssh": r"git@%(domain)s:%(owner)s/%(repo)s.git",
|
||||
}
|
||||
DOMAINS = ("bitbucket.org",)
|
||||
DEFAULTS = {"_user": "git"}
|
||||
14
third_party/python/giturlparse/giturlparse/platforms/friendcode.py
vendored
Normal file
14
third_party/python/giturlparse/giturlparse/platforms/friendcode.py
vendored
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
from .base import BasePlatform
|
||||
|
||||
|
||||
class FriendCodePlatform(BasePlatform):
|
||||
DOMAINS = ("friendco.de",)
|
||||
PATTERNS = {
|
||||
"https": (
|
||||
r"(?P<protocols>(git\+)?(?P<protocol>https))://(?P<domain>.+?)/"
|
||||
r"(?P<pathname>(?P<owner>.+)@user/(?P<repo>.+)).git"
|
||||
),
|
||||
}
|
||||
FORMATS = {
|
||||
"https": r"https://%(domain)s/%(owner)s@user/%(repo)s.git",
|
||||
}
|
||||
39
third_party/python/giturlparse/giturlparse/platforms/github.py
vendored
Normal file
39
third_party/python/giturlparse/giturlparse/platforms/github.py
vendored
Normal file
|
|
@ -0,0 +1,39 @@
|
|||
from .base import BasePlatform
|
||||
|
||||
|
||||
class GitHubPlatform(BasePlatform):
|
||||
PATTERNS = {
|
||||
"https": (
|
||||
r"(?P<protocols>(git\+)?(?P<protocol>https))://(?P<domain>[^/]+?)"
|
||||
r"(?P<pathname>/(?P<owner>[^/]+?)/(?P<repo>[^/]+?)(?:\.git)?(?P<path_raw>(/blob/|/tree/).+)?)$"
|
||||
),
|
||||
"ssh": (
|
||||
r"(?P<protocols>(git\+)?(?P<protocol>ssh))?(://)?git@(?P<domain>.+?)(?P<pathname>(:|/)"
|
||||
r"(?P<owner>[^/]+)/(?P<repo>[^/]+?)(?:\.git)"
|
||||
r"(?P<path_raw>(/blob/|/tree/).+)?)$"
|
||||
),
|
||||
"git": (
|
||||
r"(?P<protocols>(?P<protocol>git))://(?P<domain>.+?)"
|
||||
r"(?P<pathname>/(?P<owner>[^/]+)/(?P<repo>[^/]+?)(?:\.git)?"
|
||||
r"(?P<path_raw>(/blob/|/tree/).+)?)$"
|
||||
),
|
||||
}
|
||||
FORMATS = {
|
||||
"https": r"https://%(domain)s/%(owner)s/%(repo)s.git%(path_raw)s",
|
||||
"ssh": r"git@%(domain)s:%(owner)s/%(repo)s.git%(path_raw)s",
|
||||
"git": r"git://%(domain)s/%(owner)s/%(repo)s.git%(path_raw)s",
|
||||
}
|
||||
DOMAINS = (
|
||||
"github.com",
|
||||
"gist.github.com",
|
||||
)
|
||||
DEFAULTS = {"_user": "git"}
|
||||
|
||||
@staticmethod
|
||||
def clean_data(data):
|
||||
data = BasePlatform.clean_data(data)
|
||||
if data["path_raw"].startswith("/blob/"):
|
||||
data["path"] = data["path_raw"].replace("/blob/", "")
|
||||
if data["path_raw"].startswith("/tree/"):
|
||||
data["branch"] = data["path_raw"].replace("/tree/", "")
|
||||
return data
|
||||
43
third_party/python/giturlparse/giturlparse/platforms/gitlab.py
vendored
Normal file
43
third_party/python/giturlparse/giturlparse/platforms/gitlab.py
vendored
Normal file
|
|
@ -0,0 +1,43 @@
|
|||
from .base import BasePlatform
|
||||
|
||||
|
||||
class GitLabPlatform(BasePlatform):
|
||||
PATTERNS = {
|
||||
"https": (
|
||||
r"(?P<protocols>(git\+)?(?P<protocol>https))://(?P<domain>.+?)(?P<port>:[0-9]+)?"
|
||||
r"(?P<pathname>/(?P<owner>[^/]+?)/"
|
||||
r"(?P<groups_path>.*?)?(?(groups_path)/)?(?P<repo>[^/]+?)(?:\.git)?"
|
||||
r"(?P<path_raw>(/blob/|/-/tree/).+)?)$"
|
||||
),
|
||||
"ssh": (
|
||||
r"(?P<protocols>(git\+)?(?P<protocol>ssh))?(://)?git@(?P<domain>.+?):(?P<port>[0-9]+)?(?(port))?"
|
||||
r"(?P<pathname>/?(?P<owner>[^/]+)/"
|
||||
r"(?P<groups_path>.*?)?(?(groups_path)/)?(?P<repo>[^/]+?)(?:\.git)?"
|
||||
r"(?P<path_raw>(/blob/|/-/tree/).+)?)$"
|
||||
),
|
||||
"git": (
|
||||
r"(?P<protocols>(?P<protocol>git))://(?P<domain>.+?):(?P<port>[0-9]+)?(?(port))?"
|
||||
r"(?P<pathname>/?(?P<owner>[^/]+)/"
|
||||
r"(?P<groups_path>.*?)?(?(groups_path)/)?(?P<repo>[^/]+?)(?:\.git)?"
|
||||
r"(?P<path_raw>(/blob/|/-/tree/).+)?)$"
|
||||
),
|
||||
}
|
||||
FORMATS = {
|
||||
"https": r"https://%(domain)s/%(owner)s/%(groups_slash)s%(repo)s.git%(path_raw)s",
|
||||
"ssh": r"git@%(domain)s:%(port_slash)s%(owner)s/%(groups_slash)s%(repo)s.git%(path_raw)s",
|
||||
"git": r"git://%(domain)s%(port)s/%(owner)s/%(groups_slash)s%(repo)s.git%(path_raw)s",
|
||||
}
|
||||
SKIP_DOMAINS = (
|
||||
"github.com",
|
||||
"gist.github.com",
|
||||
)
|
||||
DEFAULTS = {"_user": "git", "port": ""}
|
||||
|
||||
@staticmethod
|
||||
def clean_data(data):
|
||||
data = BasePlatform.clean_data(data)
|
||||
if data["path_raw"].startswith("/blob/"):
|
||||
data["path"] = data["path_raw"].replace("/blob/", "")
|
||||
if data["path_raw"].startswith("/-/tree/"):
|
||||
data["branch"] = data["path_raw"].replace("/-/tree/", "")
|
||||
return data
|
||||
131
third_party/python/giturlparse/giturlparse/result.py
vendored
Normal file
131
third_party/python/giturlparse/giturlparse/result.py
vendored
Normal file
|
|
@ -0,0 +1,131 @@
|
|||
from copy import copy
|
||||
|
||||
from .platforms import PLATFORMS
|
||||
|
||||
# Possible values to extract from a Git Url
|
||||
REQUIRED_ATTRIBUTES = (
|
||||
"domain",
|
||||
"repo",
|
||||
)
|
||||
|
||||
|
||||
class GitUrlParsed:
|
||||
platform = None
|
||||
|
||||
def __init__(self, parsed_info):
|
||||
self._parsed = parsed_info
|
||||
|
||||
# Set parsed objects as attributes
|
||||
for k, v in parsed_info.items():
|
||||
setattr(self, k, v)
|
||||
|
||||
for name, platform in PLATFORMS:
|
||||
if name == self.platform:
|
||||
self._platform_obj = platform
|
||||
break
|
||||
|
||||
def _valid_attrs(self):
|
||||
return all([getattr(self, attr, None) for attr in REQUIRED_ATTRIBUTES]) # NOQA
|
||||
|
||||
@property
|
||||
def valid(self):
|
||||
return all(
|
||||
[
|
||||
self._valid_attrs(),
|
||||
]
|
||||
)
|
||||
|
||||
##
|
||||
# Alias properties
|
||||
##
|
||||
@property
|
||||
def host(self):
|
||||
return self.domain
|
||||
|
||||
@property
|
||||
def resource(self):
|
||||
return self.domain
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self.repo
|
||||
|
||||
@property
|
||||
def user(self):
|
||||
if hasattr(self, "_user"):
|
||||
return self._user
|
||||
|
||||
return self.owner
|
||||
|
||||
@property
|
||||
def groups(self):
|
||||
if self.groups_path:
|
||||
return self.groups_path.split("/")
|
||||
else:
|
||||
return []
|
||||
|
||||
def format(self, protocol): # noqa : A0003
|
||||
"""Reformat URL to protocol."""
|
||||
items = copy(self._parsed)
|
||||
items["port_slash"] = "%s/" % self.port if self.port else ""
|
||||
items["groups_slash"] = "%s/" % self.groups_path if self.groups_path else ""
|
||||
return self._platform_obj.FORMATS[protocol] % items
|
||||
|
||||
@property
|
||||
def normalized(self):
|
||||
"""Normalize URL."""
|
||||
return self.format(self.protocol)
|
||||
|
||||
##
|
||||
# Rewriting
|
||||
##
|
||||
@property
|
||||
def url2ssh(self):
|
||||
return self.format("ssh")
|
||||
|
||||
@property
|
||||
def url2http(self):
|
||||
return self.format("http")
|
||||
|
||||
@property
|
||||
def url2https(self):
|
||||
return self.format("https")
|
||||
|
||||
@property
|
||||
def url2git(self):
|
||||
return self.format("git")
|
||||
|
||||
# All supported Urls for a repo
|
||||
@property
|
||||
def urls(self):
|
||||
return {protocol: self.format(protocol) for protocol in self._platform_obj.PROTOCOLS}
|
||||
|
||||
##
|
||||
# Platforms
|
||||
##
|
||||
@property
|
||||
def github(self):
|
||||
return self.platform == "github"
|
||||
|
||||
@property
|
||||
def bitbucket(self):
|
||||
return self.platform == "bitbucket"
|
||||
|
||||
@property
|
||||
def friendcode(self):
|
||||
return self.platform == "friendcode"
|
||||
|
||||
@property
|
||||
def assembla(self):
|
||||
return self.platform == "assembla"
|
||||
|
||||
@property
|
||||
def gitlab(self):
|
||||
return self.platform == "gitlab"
|
||||
|
||||
##
|
||||
# Get data as dict
|
||||
##
|
||||
@property
|
||||
def data(self):
|
||||
return dict(self._parsed)
|
||||
16
third_party/python/mozilla_repo_urls/mozilla_repo_urls-0.0.3.dist-info/METADATA
vendored
Normal file
16
third_party/python/mozilla_repo_urls/mozilla_repo_urls-0.0.3.dist-info/METADATA
vendored
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
Metadata-Version: 2.1
|
||||
Name: mozilla-repo-urls
|
||||
Version: 0.0.3
|
||||
Summary: Process Mozilla's repository URLs. The intent is to centralize URLs parsing.
|
||||
Home-page: https://github.com/mozilla-releng/mozilla-repo-urls
|
||||
Author: Mozilla Release Engineering
|
||||
Author-email: release+python@mozilla.com
|
||||
License: MPL2
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3.6
|
||||
Classifier: Programming Language :: Python :: 3.7
|
||||
Classifier: Programming Language :: Python :: 3.8
|
||||
Classifier: Programming Language :: Python :: 3.9
|
||||
Classifier: Programming Language :: Python :: 3.10
|
||||
Requires-Dist: giturlparse
|
||||
|
||||
12
third_party/python/mozilla_repo_urls/mozilla_repo_urls-0.0.3.dist-info/RECORD
vendored
Normal file
12
third_party/python/mozilla_repo_urls/mozilla_repo_urls-0.0.3.dist-info/RECORD
vendored
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
mozilla_repo_urls/__init__.py,sha256=seFB5ueyozmIXZxBWVATYPbQCzNln2SWSTirc0yk_A0,108
|
||||
mozilla_repo_urls/errors.py,sha256=koXTtW_axWwCtWVHkvxXjYqAq1AXYbN54Q-C8veqDNw,485
|
||||
mozilla_repo_urls/parser.py,sha256=a6Q1x8o_yu4Qof86aq2M8dwPhiMeAzCV7LxAw7CzwRg,805
|
||||
mozilla_repo_urls/result.py,sha256=SN8hhIc6R1xLGOYZ0h1CpQbFiRP6LIqzNoup3MjOqnc,744
|
||||
mozilla_repo_urls/platforms/__init__.py,sha256=5gwGbeTZUI-0VR0HmC3913e6AUTylDkjmcXYkg8QwYc,89
|
||||
mozilla_repo_urls/platforms/hgmo.py,sha256=bKpBHAqxlMOmGbfn6aSU_q5N-7LbwNDSrt0TZ0UzTvk,1043
|
||||
test/__init__.py,sha256=ui4glNH_cCoz4Ex7hcZhHTcstOPJb2wcojFiNvvIALI,88
|
||||
test/test_integration.py,sha256=Eq0XjKA0w0Ao50UJMcF5tuPiFqfa1stgl9T2lg1vHJE,10919
|
||||
mozilla_repo_urls-0.0.3.dist-info/METADATA,sha256=4bWx4lq-nBnCq0cnpmwfmfVpmFwX0dbvBi3jg05B1s4,628
|
||||
mozilla_repo_urls-0.0.3.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
|
||||
mozilla_repo_urls-0.0.3.dist-info/top_level.txt,sha256=0LuRstNeetmfWdKTPvknIx8aDVzsf1KSmASCgOvKvDM,23
|
||||
mozilla_repo_urls-0.0.3.dist-info/RECORD,,
|
||||
2
third_party/python/mozilla_repo_urls/mozilla_repo_urls-0.0.3.dist-info/top_level.txt
vendored
Normal file
2
third_party/python/mozilla_repo_urls/mozilla_repo_urls-0.0.3.dist-info/top_level.txt
vendored
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
mozilla_repo_urls
|
||||
test
|
||||
2
third_party/python/mozilla_repo_urls/mozilla_repo_urls/__init__.py
vendored
Normal file
2
third_party/python/mozilla_repo_urls/mozilla_repo_urls/__init__.py
vendored
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
from mozilla_repo_urls.errors import * # noqa F401
|
||||
from mozilla_repo_urls.parser import parse # noqa F401
|
||||
15
third_party/python/mozilla_repo_urls/mozilla_repo_urls/errors.py
vendored
Normal file
15
third_party/python/mozilla_repo_urls/mozilla_repo_urls/errors.py
vendored
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
class RepoUrlsBaseError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class InvalidRepoUrlError(RepoUrlsBaseError):
|
||||
def __init__(self, url_string) -> None:
|
||||
super().__init__(f"Could not parse URL: {url_string}")
|
||||
|
||||
|
||||
class UnsupportedPlatformError(RepoUrlsBaseError):
|
||||
def __init__(self, url_string, platform, supported_platforms) -> None:
|
||||
super().__init__(
|
||||
f"Unsupported platform. Got: {platform}. "
|
||||
f"Expected: {supported_platforms}. URL: {url_string}"
|
||||
)
|
||||
27
third_party/python/mozilla_repo_urls/mozilla_repo_urls/parser.py
vendored
Normal file
27
third_party/python/mozilla_repo_urls/mozilla_repo_urls/parser.py
vendored
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
import giturlparse
|
||||
|
||||
from mozilla_repo_urls.platforms import ADDITIONAL_PLATFORMS
|
||||
|
||||
from .errors import InvalidRepoUrlError, UnsupportedPlatformError
|
||||
from .result import RepoUrlParsed
|
||||
|
||||
for i, platform in enumerate(ADDITIONAL_PLATFORMS):
|
||||
giturlparse.platforms.PLATFORMS.insert(i, platform)
|
||||
|
||||
|
||||
_SUPPORTED_PLAFORMS = ("hgmo", "github")
|
||||
|
||||
|
||||
def parse(url_string):
|
||||
# Workaround for https://github.com/nephila/giturlparse/issues/43
|
||||
url_string = url_string.rstrip("/")
|
||||
parsed_info = giturlparse.parser.parse(url_string)
|
||||
parsed_url = RepoUrlParsed(parsed_info)
|
||||
|
||||
if not parsed_url.valid:
|
||||
raise InvalidRepoUrlError(url_string)
|
||||
|
||||
if parsed_url.platform not in _SUPPORTED_PLAFORMS:
|
||||
raise UnsupportedPlatformError(url_string, platform, _SUPPORTED_PLAFORMS)
|
||||
|
||||
return parsed_url
|
||||
5
third_party/python/mozilla_repo_urls/mozilla_repo_urls/platforms/__init__.py
vendored
Normal file
5
third_party/python/mozilla_repo_urls/mozilla_repo_urls/platforms/__init__.py
vendored
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
from .hgmo import HgmoPlatform
|
||||
|
||||
ADDITIONAL_PLATFORMS = [
|
||||
("hgmo", HgmoPlatform()),
|
||||
]
|
||||
34
third_party/python/mozilla_repo_urls/mozilla_repo_urls/platforms/hgmo.py
vendored
Normal file
34
third_party/python/mozilla_repo_urls/mozilla_repo_urls/platforms/hgmo.py
vendored
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
from giturlparse.platforms.base import BasePlatform
|
||||
|
||||
|
||||
class HgmoPlatform(BasePlatform):
|
||||
PATTERNS = {
|
||||
"https": (
|
||||
r"(?P<protocols>(?P<protocol>https))://"
|
||||
r"(?P<domain>[^/]+?)"
|
||||
r"(?P<pathname>/"
|
||||
r"(?P<repo>(([^/]+?)(/)?){1,2}))"
|
||||
r"(?P<path_raw>(/raw-file/|/file/).+)?$"
|
||||
),
|
||||
"ssh": (
|
||||
r"(?P<protocols>(?P<protocol>ssh))(://)?"
|
||||
r"(?P<domain>.+?)"
|
||||
r"(?P<pathname>(:|/))"
|
||||
r"(?P<repo>(([^/]+?)(/)?){1,2})/?$"
|
||||
),
|
||||
}
|
||||
FORMATS = {
|
||||
"https": r"https://%(domain)s/%(repo)s%(path_raw)s",
|
||||
"ssh": r"ssh://%(domain)s/%(repo)s",
|
||||
}
|
||||
DOMAINS = ("hg.mozilla.org",)
|
||||
DEFAULTS = {"_user": ""}
|
||||
|
||||
@staticmethod
|
||||
def clean_data(data):
|
||||
data = BasePlatform.clean_data(data)
|
||||
if data["path_raw"].startswith(("/raw-file/", "/file")):
|
||||
data["path"] = (
|
||||
data["path_raw"].replace("/raw-file/", "").replace("/file/", "")
|
||||
)
|
||||
return data
|
||||
30
third_party/python/mozilla_repo_urls/mozilla_repo_urls/result.py
vendored
Normal file
30
third_party/python/mozilla_repo_urls/mozilla_repo_urls/result.py
vendored
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
import giturlparse
|
||||
|
||||
_DOT_GIT_SUFFIX = ".git"
|
||||
|
||||
|
||||
class RepoUrlParsed(giturlparse.result.GitUrlParsed):
|
||||
@property
|
||||
def hgmo(self) -> bool:
|
||||
return self.platform == "hgmo"
|
||||
|
||||
@property
|
||||
def repo_name(self) -> str:
|
||||
return self.repo_path.split("/")[-1]
|
||||
|
||||
@property
|
||||
def repo_path(self) -> str:
|
||||
repo_path = (
|
||||
self.pathname[: -len(_DOT_GIT_SUFFIX)]
|
||||
if self.pathname.endswith(_DOT_GIT_SUFFIX)
|
||||
else self.pathname
|
||||
)
|
||||
return repo_path.strip("/")
|
||||
|
||||
@property
|
||||
def repo_type(self) -> str:
|
||||
return "hg" if self.platform == "hgmo" else "git"
|
||||
|
||||
@property
|
||||
def taskcluster_role_prefix(self) -> str:
|
||||
return f"repo:{self.host}/{self.repo_path}"
|
||||
6
third_party/python/mozilla_repo_urls/test/__init__.py
vendored
Normal file
6
third_party/python/mozilla_repo_urls/test/__init__.py
vendored
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
from contextlib import contextmanager
|
||||
|
||||
|
||||
@contextmanager
|
||||
def does_not_raise():
|
||||
yield
|
||||
280
third_party/python/mozilla_repo_urls/test/test_integration.py
vendored
Normal file
280
third_party/python/mozilla_repo_urls/test/test_integration.py
vendored
Normal file
|
|
@ -0,0 +1,280 @@
|
|||
from test import does_not_raise
|
||||
|
||||
import pytest
|
||||
|
||||
import mozilla_repo_urls
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"url_string, expectation, expected",
|
||||
(
|
||||
(
|
||||
"https://hg.mozilla.org/mozilla-central",
|
||||
does_not_raise(),
|
||||
{
|
||||
"github": False,
|
||||
"groups": [],
|
||||
"hgmo": True,
|
||||
"host": "hg.mozilla.org",
|
||||
"name": "mozilla-central",
|
||||
"normalized": "https://hg.mozilla.org/mozilla-central",
|
||||
"path_raw": "",
|
||||
"path": "",
|
||||
"pathname": "/mozilla-central",
|
||||
"platform": "hgmo",
|
||||
"port": "",
|
||||
"protocol": "https",
|
||||
"protocols": ["https"],
|
||||
"repo": "mozilla-central",
|
||||
"repo_name": "mozilla-central",
|
||||
"repo_path": "mozilla-central",
|
||||
"repo_type": "hg",
|
||||
"resource": "hg.mozilla.org",
|
||||
"taskcluster_role_prefix": "repo:hg.mozilla.org/mozilla-central",
|
||||
"urls": {
|
||||
"https": "https://hg.mozilla.org/mozilla-central",
|
||||
"ssh": "ssh://hg.mozilla.org/mozilla-central",
|
||||
},
|
||||
"user": "",
|
||||
"valid": True,
|
||||
},
|
||||
),
|
||||
(
|
||||
"https://hg.mozilla.org/releases/mozilla-beta",
|
||||
does_not_raise(),
|
||||
{
|
||||
"github": False,
|
||||
"groups": [],
|
||||
"hgmo": True,
|
||||
"host": "hg.mozilla.org",
|
||||
"name": "releases/mozilla-beta",
|
||||
"normalized": "https://hg.mozilla.org/releases/mozilla-beta",
|
||||
"path_raw": "",
|
||||
"path": "",
|
||||
"pathname": "/releases/mozilla-beta",
|
||||
"platform": "hgmo",
|
||||
"port": "",
|
||||
"protocol": "https",
|
||||
"protocols": ["https"],
|
||||
"repo": "releases/mozilla-beta",
|
||||
"repo_name": "mozilla-beta",
|
||||
"repo_path": "releases/mozilla-beta",
|
||||
"repo_type": "hg",
|
||||
"resource": "hg.mozilla.org",
|
||||
"taskcluster_role_prefix": "repo:hg.mozilla.org/releases/mozilla-beta",
|
||||
"urls": {
|
||||
"https": "https://hg.mozilla.org/releases/mozilla-beta",
|
||||
"ssh": "ssh://hg.mozilla.org/releases/mozilla-beta",
|
||||
},
|
||||
"user": "",
|
||||
"valid": True,
|
||||
},
|
||||
),
|
||||
(
|
||||
"https://hg.mozilla.org/releases/mozilla-release",
|
||||
does_not_raise(),
|
||||
{
|
||||
"github": False,
|
||||
"groups": [],
|
||||
"hgmo": True,
|
||||
"host": "hg.mozilla.org",
|
||||
"name": "releases/mozilla-release",
|
||||
"normalized": "https://hg.mozilla.org/releases/mozilla-release",
|
||||
"path_raw": "",
|
||||
"path": "",
|
||||
"pathname": "/releases/mozilla-release",
|
||||
"platform": "hgmo",
|
||||
"port": "",
|
||||
"protocol": "https",
|
||||
"protocols": ["https"],
|
||||
"repo": "releases/mozilla-release",
|
||||
"repo_name": "mozilla-release",
|
||||
"repo_path": "releases/mozilla-release",
|
||||
"repo_type": "hg",
|
||||
"resource": "hg.mozilla.org",
|
||||
"taskcluster_role_prefix": "repo:hg.mozilla.org/releases/mozilla-release", # noqa: E501
|
||||
"urls": {
|
||||
"https": "https://hg.mozilla.org/releases/mozilla-release",
|
||||
"ssh": "ssh://hg.mozilla.org/releases/mozilla-release",
|
||||
},
|
||||
"user": "",
|
||||
"valid": True,
|
||||
},
|
||||
),
|
||||
(
|
||||
"https://hg.mozilla.org/try",
|
||||
does_not_raise(),
|
||||
{
|
||||
"groups": [],
|
||||
"hgmo": True,
|
||||
"host": "hg.mozilla.org",
|
||||
"name": "try",
|
||||
"normalized": "https://hg.mozilla.org/try",
|
||||
"path_raw": "",
|
||||
"path": "",
|
||||
"pathname": "/try",
|
||||
"platform": "hgmo",
|
||||
"port": "",
|
||||
"protocol": "https",
|
||||
"protocols": ["https"],
|
||||
"repo": "try",
|
||||
"repo_name": "try",
|
||||
"repo_path": "try",
|
||||
"repo_type": "hg",
|
||||
"resource": "hg.mozilla.org",
|
||||
"taskcluster_role_prefix": "repo:hg.mozilla.org/try",
|
||||
"urls": {
|
||||
"https": "https://hg.mozilla.org/try",
|
||||
"ssh": "ssh://hg.mozilla.org/try",
|
||||
},
|
||||
"user": "",
|
||||
"valid": True,
|
||||
},
|
||||
),
|
||||
(
|
||||
"https://hg.mozilla.org/mozilla-central/raw-file/tip/taskcluster/ci/config.yml", # noqa: E501
|
||||
does_not_raise(),
|
||||
{
|
||||
"groups": [],
|
||||
"hgmo": True,
|
||||
"host": "hg.mozilla.org",
|
||||
"name": "mozilla-central",
|
||||
"normalized": "https://hg.mozilla.org/mozilla-central/raw-file/tip/taskcluster/ci/config.yml", # noqa: E501
|
||||
"path_raw": "/raw-file/tip/taskcluster/ci/config.yml",
|
||||
"path": "tip/taskcluster/ci/config.yml",
|
||||
"pathname": "/mozilla-central",
|
||||
"platform": "hgmo",
|
||||
"port": "",
|
||||
"protocol": "https",
|
||||
"protocols": ["https"],
|
||||
"repo": "mozilla-central",
|
||||
"repo_name": "mozilla-central",
|
||||
"repo_path": "mozilla-central",
|
||||
"repo_type": "hg",
|
||||
"resource": "hg.mozilla.org",
|
||||
"taskcluster_role_prefix": "repo:hg.mozilla.org/mozilla-central",
|
||||
"urls": {
|
||||
"https": "https://hg.mozilla.org/mozilla-central/raw-file/tip/taskcluster/ci/config.yml", # noqa: E501
|
||||
"ssh": "ssh://hg.mozilla.org/mozilla-central",
|
||||
},
|
||||
"user": "",
|
||||
"valid": True,
|
||||
},
|
||||
),
|
||||
(
|
||||
"https://hg.mozilla.org/mozilla-central/file/tip/taskcluster/ci/config.yml", # noqa: E501
|
||||
does_not_raise(),
|
||||
{
|
||||
"github": False,
|
||||
"groups": [],
|
||||
"hgmo": True,
|
||||
"host": "hg.mozilla.org",
|
||||
"name": "mozilla-central",
|
||||
"normalized": "https://hg.mozilla.org/mozilla-central/file/tip/taskcluster/ci/config.yml", # noqa: E501
|
||||
"path_raw": "/file/tip/taskcluster/ci/config.yml",
|
||||
"path": "tip/taskcluster/ci/config.yml",
|
||||
"pathname": "/mozilla-central",
|
||||
"platform": "hgmo",
|
||||
"port": "",
|
||||
"protocol": "https",
|
||||
"protocols": ["https"],
|
||||
"repo": "mozilla-central",
|
||||
"repo_name": "mozilla-central",
|
||||
"repo_path": "mozilla-central",
|
||||
"repo_type": "hg",
|
||||
"resource": "hg.mozilla.org",
|
||||
"taskcluster_role_prefix": "repo:hg.mozilla.org/mozilla-central",
|
||||
"urls": {
|
||||
"https": "https://hg.mozilla.org/mozilla-central/file/tip/taskcluster/ci/config.yml", # noqa: E501
|
||||
"ssh": "ssh://hg.mozilla.org/mozilla-central",
|
||||
},
|
||||
"user": "",
|
||||
"valid": True,
|
||||
},
|
||||
),
|
||||
(
|
||||
"https://github.com/mozilla-mobile/fenix",
|
||||
does_not_raise(),
|
||||
{
|
||||
"github": True,
|
||||
"groups": [],
|
||||
"hgmo": False,
|
||||
"host": "github.com",
|
||||
"name": "fenix",
|
||||
"normalized": "https://github.com/mozilla-mobile/fenix.git",
|
||||
"owner": "mozilla-mobile",
|
||||
"path_raw": "",
|
||||
"path": "",
|
||||
"pathname": "/mozilla-mobile/fenix",
|
||||
"platform": "github",
|
||||
"port": "",
|
||||
"protocol": "https",
|
||||
"protocols": ["https"],
|
||||
"repo": "fenix",
|
||||
"repo_name": "fenix",
|
||||
"repo_path": "mozilla-mobile/fenix",
|
||||
"repo_type": "git",
|
||||
"resource": "github.com",
|
||||
"taskcluster_role_prefix": "repo:github.com/mozilla-mobile/fenix",
|
||||
"urls": {
|
||||
"git": "git://github.com/mozilla-mobile/fenix.git",
|
||||
"https": "https://github.com/mozilla-mobile/fenix.git",
|
||||
"ssh": "git@github.com:mozilla-mobile/fenix.git",
|
||||
},
|
||||
"user": "git",
|
||||
"valid": True,
|
||||
},
|
||||
),
|
||||
(
|
||||
"git@github.com:mozilla-mobile/firefox-android.git",
|
||||
does_not_raise(),
|
||||
{
|
||||
"github": True,
|
||||
"groups": [],
|
||||
"hgmo": False,
|
||||
"host": "github.com",
|
||||
"name": "firefox-android",
|
||||
"normalized": "git@github.com:mozilla-mobile/firefox-android.git",
|
||||
"owner": "mozilla-mobile",
|
||||
"path_raw": "",
|
||||
"path": "",
|
||||
"pathname": "mozilla-mobile/firefox-android.git",
|
||||
"platform": "github",
|
||||
"port": "",
|
||||
"protocol": "ssh",
|
||||
"protocols": [],
|
||||
"repo": "firefox-android",
|
||||
"repo_name": "firefox-android",
|
||||
"repo_path": "mozilla-mobile/firefox-android",
|
||||
"repo_type": "git",
|
||||
"resource": "github.com",
|
||||
"taskcluster_role_prefix": "repo:github.com/mozilla-mobile/firefox-android", # noqa: E501
|
||||
"urls": {
|
||||
"git": "git://github.com/mozilla-mobile/firefox-android.git",
|
||||
"https": "https://github.com/mozilla-mobile/firefox-android.git",
|
||||
"ssh": "git@github.com:mozilla-mobile/firefox-android.git",
|
||||
},
|
||||
"user": "git",
|
||||
"valid": True,
|
||||
},
|
||||
),
|
||||
(
|
||||
"https://some.unknown/repo",
|
||||
pytest.raises(mozilla_repo_urls.InvalidRepoUrlError),
|
||||
None,
|
||||
),
|
||||
(
|
||||
"https://gitlab.com/some-owner/some-repo",
|
||||
pytest.raises(mozilla_repo_urls.UnsupportedPlatformError),
|
||||
None,
|
||||
),
|
||||
),
|
||||
)
|
||||
def test_parse(url_string, expectation, expected):
|
||||
with expectation:
|
||||
url_object = mozilla_repo_urls.parse(url_string)
|
||||
actual = {
|
||||
attribute_name: getattr(url_object, attribute_name)
|
||||
for attribute_name in expected.keys()
|
||||
}
|
||||
assert actual == expected
|
||||
36
third_party/python/poetry.lock
generated
vendored
36
third_party/python/poetry.lock
generated
vendored
|
|
@ -194,6 +194,14 @@ category = "main"
|
|||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[[package]]
|
||||
name = "giturlparse"
|
||||
version = "0.10.0"
|
||||
description = "A Git URL parsing module (supports parsing and rewriting)"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
|
||||
[[package]]
|
||||
name = "glean-parser"
|
||||
version = "6.1.2"
|
||||
|
|
@ -334,6 +342,17 @@ python-versions = "*"
|
|||
[package.dependencies]
|
||||
six = "*"
|
||||
|
||||
[[package]]
|
||||
name = "mozilla-repo-urls"
|
||||
version = "0.0.3"
|
||||
description = "Process Mozilla's repository URLs. The intent is to centralize URLs parsing."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[package.dependencies]
|
||||
giturlparse = "*"
|
||||
|
||||
[[package]]
|
||||
name = "mozilla-version"
|
||||
version = "0.3.4"
|
||||
|
|
@ -615,7 +634,7 @@ test = ["pytest", "pytest-cov", "pytest-mock", "httmock", "mock", "setuptools-li
|
|||
|
||||
[[package]]
|
||||
name = "taskcluster-taskgraph"
|
||||
version = "2.0.0"
|
||||
version = "3.2.0"
|
||||
description = "Build taskcluster taskgraphs"
|
||||
category = "main"
|
||||
optional = false
|
||||
|
|
@ -625,6 +644,7 @@ python-versions = "*"
|
|||
appdirs = ">=1.4"
|
||||
attrs = ">=19.1.0"
|
||||
json-e = ">=2.7"
|
||||
mozilla-repo-urls = "*"
|
||||
PyYAML = ">=5.4"
|
||||
redo = ">=2.0"
|
||||
requests = ">=2.25"
|
||||
|
|
@ -737,7 +757,7 @@ testing = ["pytest (>=4.6)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pyt
|
|||
[metadata]
|
||||
lock-version = "1.1"
|
||||
python-versions = "^3.6"
|
||||
content-hash = "9d5442add586f045a8bac2403afaade45b7836ae851e906fd598d48c23075eb1"
|
||||
content-hash = "e350bab49e867e6242ad7be2ca071a731ffe5162e79553e38f82eba26c421da3"
|
||||
|
||||
[metadata.files]
|
||||
aiohttp = [
|
||||
|
|
@ -850,6 +870,10 @@ esprima = [
|
|||
{file = "fluent.syntax-0.18.1-py2.py3-none-any.whl", hash = "sha256:0e63679fa4f1b3042565220a5127b4bab842424f07d6a13c12299e3b3835486a"},
|
||||
{file = "fluent.syntax-0.18.1.tar.gz", hash = "sha256:3a55f5e605d1b029a65cc8b6492c86ec4608e15447e73db1495de11fd46c104f"},
|
||||
]
|
||||
giturlparse = [
|
||||
{file = "giturlparse-0.10.0-py2.py3-none-any.whl", hash = "sha256:04ba1a3a099c3093fa8d24a422913c6a9b2c2cd22bcffc939cf72e3e98f672d7"},
|
||||
{file = "giturlparse-0.10.0.tar.gz", hash = "sha256:2595ab291d30717cda8474b874c9fd509f1b9802ad7f6968c36a45e4b13eb337"},
|
||||
]
|
||||
glean-parser = [
|
||||
{file = "glean_parser-6.1.2-py3-none-any.whl", hash = "sha256:e801af6463b7e0ba79d97ddfc0a58d9d71121c93cea601417571e33fa8142270"},
|
||||
{file = "glean_parser-6.1.2.tar.gz", hash = "sha256:12a0fecedc1144d77fa571e0422ff3fea4dbadc381d631bea800a6b2f58f4f7f"},
|
||||
|
|
@ -945,6 +969,10 @@ mohawk = [
|
|||
{file = "mohawk-0.3.4-py2-none-any.whl", hash = "sha256:b3f85ffa93a5c7d2f9cc591246ef9f8ac4a9fa716bfd5bae0377699a2d89d78c"},
|
||||
{file = "mohawk-0.3.4.tar.gz", hash = "sha256:e98b331d9fa9ece7b8be26094cbe2d57613ae882133cc755167268a984bc0ab3"},
|
||||
]
|
||||
mozilla-repo-urls = [
|
||||
{file = "mozilla-repo-urls-0.0.3.tar.gz", hash = "sha256:3b2f9b42111ce3d50ecdcd081a62229e1fdc8f5472adbf405abe12d3ba8e8af5"},
|
||||
{file = "mozilla_repo_urls-0.0.3-py3-none-any.whl", hash = "sha256:673c80a4d0ed449093203b88e119e0bf1074026f891c1dd50aa04d534fd0658c"},
|
||||
]
|
||||
mozilla-version = [
|
||||
{file = "mozilla-version-0.3.4.tar.gz", hash = "sha256:3ed4deb7a6fb25c83a5346ef4de08ddff9b2ddc4d16dd8fafb4a84978cc71255"},
|
||||
{file = "mozilla_version-0.3.4-py2.py3-none-any.whl", hash = "sha256:ce5741c2e7d12c30b53de9f79e30d6ac2a8bd4c93be711d30c7a7a08e32a094f"},
|
||||
|
|
@ -1125,8 +1153,8 @@ taskcluster = [
|
|||
{file = "taskcluster-44.2.2.tar.gz", hash = "sha256:0266a6a901e1a2ec838984a7f24e7adb6d58f9f2e221a7f613388f8f23f786fc"},
|
||||
]
|
||||
taskcluster-taskgraph = [
|
||||
{file = "taskcluster-taskgraph-2.0.0.tar.gz", hash = "sha256:93eff40ba39a29cd290fc25a2124ed9bf5806d87891edd7e8de35df568708141"},
|
||||
{file = "taskcluster_taskgraph-2.0.0-py3-none-any.whl", hash = "sha256:3d22ab488071ddc82997b33fc6c1c524a44bdc7e14b30a274d99dbbdd7389502"},
|
||||
{file = "taskcluster-taskgraph-3.2.0.tar.gz", hash = "sha256:941d96fd36c20b0e84e322d2c25be04fe5f258405793af5e086bbec698cbcb17"},
|
||||
{file = "taskcluster_taskgraph-3.2.0-py3-none-any.whl", hash = "sha256:fafcd1bfd8ec8b2f57db4e4d87d5b881da3ad7119d78d407d22b4b71b805d1bf"},
|
||||
]
|
||||
taskcluster-urls = [
|
||||
{file = "taskcluster-urls-13.0.1.tar.gz", hash = "sha256:b25e122ecec249c4299ac7b20b08db76e3e2025bdaeb699a9d444556de5fd367"},
|
||||
|
|
|
|||
2
third_party/python/requirements.in
vendored
2
third_party/python/requirements.in
vendored
|
|
@ -39,7 +39,7 @@ setuptools==51.2.0
|
|||
six==1.13.0
|
||||
slugid==2.0.0
|
||||
taskcluster==44.2.2
|
||||
taskcluster-taskgraph==2.0.0
|
||||
taskcluster-taskgraph==3.2.0
|
||||
taskcluster-urls==13.0.1
|
||||
tqdm==4.62.3
|
||||
urllib3==1.26
|
||||
|
|
|
|||
12
third_party/python/requirements.txt
vendored
12
third_party/python/requirements.txt
vendored
|
|
@ -89,6 +89,9 @@ fluent.migrate==0.11 \
|
|||
fluent.syntax==0.18.1 \
|
||||
--hash=sha256:0e63679fa4f1b3042565220a5127b4bab842424f07d6a13c12299e3b3835486a \
|
||||
--hash=sha256:3a55f5e605d1b029a65cc8b6492c86ec4608e15447e73db1495de11fd46c104f
|
||||
giturlparse==0.10.0; python_version >= "3.6" \
|
||||
--hash=sha256:04ba1a3a099c3093fa8d24a422913c6a9b2c2cd22bcffc939cf72e3e98f672d7 \
|
||||
--hash=sha256:2595ab291d30717cda8474b874c9fd509f1b9802ad7f6968c36a45e4b13eb337
|
||||
glean-parser==6.1.2 \
|
||||
--hash=sha256:e801af6463b7e0ba79d97ddfc0a58d9d71121c93cea601417571e33fa8142270 \
|
||||
--hash=sha256:12a0fecedc1144d77fa571e0422ff3fea4dbadc381d631bea800a6b2f58f4f7f
|
||||
|
|
@ -172,6 +175,9 @@ markupsafe==1.1.1; python_version >= "2.7" and python_full_version < "3.0.0" or
|
|||
mohawk==0.3.4 \
|
||||
--hash=sha256:b3f85ffa93a5c7d2f9cc591246ef9f8ac4a9fa716bfd5bae0377699a2d89d78c \
|
||||
--hash=sha256:e98b331d9fa9ece7b8be26094cbe2d57613ae882133cc755167268a984bc0ab3
|
||||
mozilla-repo-urls==0.0.3 \
|
||||
--hash=sha256:3b2f9b42111ce3d50ecdcd081a62229e1fdc8f5472adbf405abe12d3ba8e8af5 \
|
||||
--hash=sha256:673c80a4d0ed449093203b88e119e0bf1074026f891c1dd50aa04d534fd0658c
|
||||
mozilla-version==0.3.4 \
|
||||
--hash=sha256:3ed4deb7a6fb25c83a5346ef4de08ddff9b2ddc4d16dd8fafb4a84978cc71255 \
|
||||
--hash=sha256:ce5741c2e7d12c30b53de9f79e30d6ac2a8bd4c93be711d30c7a7a08e32a094f
|
||||
|
|
@ -322,9 +328,9 @@ six==1.13.0; (python_version >= "2.6" and python_full_version < "3.0.0") or (pyt
|
|||
slugid==2.0.0 \
|
||||
--hash=sha256:aec8b0e01c4ad32e38e12d609eab3ec912fd129aaf6b2ded0199b56a5f8fd67c \
|
||||
--hash=sha256:a950d98b72691178bdd4d6c52743c4a2aa039207cf7a97d71060a111ff9ba297
|
||||
taskcluster-taskgraph==2.0.0 \
|
||||
--hash=sha256:93eff40ba39a29cd290fc25a2124ed9bf5806d87891edd7e8de35df568708141 \
|
||||
--hash=sha256:3d22ab488071ddc82997b33fc6c1c524a44bdc7e14b30a274d99dbbdd7389502
|
||||
taskcluster-taskgraph==3.2.0 \
|
||||
--hash=sha256:941d96fd36c20b0e84e322d2c25be04fe5f258405793af5e086bbec698cbcb17 \
|
||||
--hash=sha256:fafcd1bfd8ec8b2f57db4e4d87d5b881da3ad7119d78d407d22b4b71b805d1bf
|
||||
taskcluster-urls==13.0.1 \
|
||||
--hash=sha256:b25e122ecec249c4299ac7b20b08db76e3e2025bdaeb699a9d444556de5fd367 \
|
||||
--hash=sha256:5e25e7e6818e8877178b175ff43d2e6548afad72694aa125f404a7329ece0973 \
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
Metadata-Version: 2.1
|
||||
Name: taskcluster-taskgraph
|
||||
Version: 3.0.0
|
||||
Version: 3.2.0
|
||||
Summary: Build taskcluster taskgraphs
|
||||
Home-page: https://github.com/taskcluster/taskgraph
|
||||
License: UNKNOWN
|
||||
|
|
@ -18,6 +18,7 @@ License-File: LICENSE
|
|||
Requires-Dist: appdirs (>=1.4)
|
||||
Requires-Dist: attrs (>=19.1.0)
|
||||
Requires-Dist: json-e (>=2.7)
|
||||
Requires-Dist: mozilla-repo-urls
|
||||
Requires-Dist: PyYAML (>=5.4)
|
||||
Requires-Dist: redo (>=2.0)
|
||||
Requires-Dist: requests (>=2.25)
|
||||
|
|
@ -9,7 +9,7 @@ taskgraph/generator.py,sha256=ZfSb8dek6tQRxfpHbvQP2KMxXFzmhqwN821tOlNcvzo,15118
|
|||
taskgraph/graph.py,sha256=9tE3bSSBRHvRLgJzK4dTieGT3RrzQZdR1YbKizEhzlw,4667
|
||||
taskgraph/main.py,sha256=E7dC1q14L4psrNfUe-PMC8QH4cYjsIs91I-aVmzeBaI,23551
|
||||
taskgraph/morph.py,sha256=8qxYdruEQkbHGqv7dh3e1OWhH9Y5i6bFUKzDMs-Ctnw,9625
|
||||
taskgraph/parameters.py,sha256=rye7dxD3A_Voh9w0Ru28zgZ8rGVv5enUu-k5lE7HvEk,11725
|
||||
taskgraph/parameters.py,sha256=Ke-07fA2Qcynzbp-wfmO-VnQ4BGF9inr0hsrC5iyL-8,11792
|
||||
taskgraph/target_tasks.py,sha256=41BIVwiATy8DCQujPduTtnFmgHlKOfw6RPGL4b20WO8,3324
|
||||
taskgraph/task.py,sha256=QCrOzMaTsy5QHShKUo89XgjJVMl3cSZGZJPLuHCXItE,3132
|
||||
taskgraph/taskgraph.py,sha256=tfj0ZMqjuwEQDET0W57EcP-_KBEbqkxJci9Z6DkeOEQ,2397
|
||||
|
|
@ -17,7 +17,7 @@ taskgraph/actions/__init__.py,sha256=lVP1e0YyELg7-_42MWWDbT0cKv_p53BApVE6vWOiPww
|
|||
taskgraph/actions/add_new_jobs.py,sha256=mX_DFDJaQUHetjyMNi5b8zPCCeqfzDrCjDg5DxTaA-I,1831
|
||||
taskgraph/actions/cancel.py,sha256=UQSt_6y3S6PXNmUo_mNaUOuDvK2bixWjzdjTKXieEEg,1309
|
||||
taskgraph/actions/cancel_all.py,sha256=-ETWKl8BHkk5HjGZRIJpUsFOySE6co0pL0dBDupolu8,1947
|
||||
taskgraph/actions/registry.py,sha256=p-YTqnhRPSouOqhSoRL5QgUkpO_ab4XIMSFKreu7E_8,13252
|
||||
taskgraph/actions/registry.py,sha256=xmhoEGMyYj6TTRFwMowZAUp0aqvtLvdVfmRWM7Yh7xo,13122
|
||||
taskgraph/actions/retrigger.py,sha256=awSC8XRtPJxADz5tbEWTKdNEudG8SpwUOM7z2lXxH1U,9382
|
||||
taskgraph/actions/util.py,sha256=jA5xXehV8N2G542LZOEci_gMHEFN-BrIjkA55On0kc0,10673
|
||||
taskgraph/loader/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
|
|
@ -28,7 +28,7 @@ taskgraph/optimize/strategies.py,sha256=Y5fS-f_3xsQNfFjCXIwDxrwXBvyp4yZxdPVNh49c
|
|||
taskgraph/run-task/fetch-content,sha256=uUoyua3OdIgynY5Q9K6EojBwuaM2zo2OiN9bmNS646Q,24291
|
||||
taskgraph/run-task/hgrc,sha256=BybWLDR89bWi3pE5T05UqmDHs02CbLypE-omLZWU6Uk,896
|
||||
taskgraph/run-task/robustcheckout.py,sha256=xc24zaBd6dyuoga1ace0M27jo14K4UXNwhqcbHutJ7U,28977
|
||||
taskgraph/run-task/run-task,sha256=TVjIoZO9kbpaG-GCMJV_wjlR9H2xk8vJi0wB_rFleEg,46953
|
||||
taskgraph/run-task/run-task,sha256=76p0Zo19a6f4NkwTq8s9y4Emt3YW6Q-VdTInlcqjPjo,46956
|
||||
taskgraph/transforms/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
taskgraph/transforms/base.py,sha256=N9ec4kw65V_J2KY4C4QRPlbIREbRDYwTlhClstYmOBU,5285
|
||||
taskgraph/transforms/cached_tasks.py,sha256=Z10VD1kEBVXJvj8qSsNTq2mYpklh0V1EN8OT6QK3v_E,2607
|
||||
|
|
@ -36,7 +36,7 @@ taskgraph/transforms/code_review.py,sha256=eE2xrDtdD_n3HT3caQ2HGAkPm6Uutdm4hDCpC
|
|||
taskgraph/transforms/docker_image.py,sha256=ADiOUB-Ngm9Y6uwzGDpQsDJ_-4w6-ZYwLCxQ-0b16E0,7567
|
||||
taskgraph/transforms/fetch.py,sha256=jxJw7wlEh_WxAa1Bmy2WIHfpdvL79PDsKwC1DFymbBQ,9584
|
||||
taskgraph/transforms/release_notifications.py,sha256=jrb9CCT-z_etDf690T-AeCvdzIoVWBAeM_FGoW7FIzA,3305
|
||||
taskgraph/transforms/task.py,sha256=n73lD8XtzpJm2BqJpZb_oiGqNHBJzTcT7GWX6jk7Xqc,47839
|
||||
taskgraph/transforms/task.py,sha256=kWic-qqvK8vEFxQwojRPxc42GAsdkxoV3HVcG1pdBxE,47942
|
||||
taskgraph/transforms/job/__init__.py,sha256=GKYODycxov7u05owF_ZWgczd7WHi2yHTd8L5Ftvxge0,16929
|
||||
taskgraph/transforms/job/common.py,sha256=onHnerPcmmvbSk0oHt8mvJmOo7AnjHQya0ombgMNLG8,7106
|
||||
taskgraph/transforms/job/index_search.py,sha256=Ngh9FFu1bx2kHVTChW2vcrbnb3SzMneRHopXk18RfB4,1220
|
||||
|
|
@ -60,15 +60,15 @@ taskgraph/util/shell.py,sha256=MB9zHVSvxgOuszgmKr2rWUDahANZkbHHNkjjagZG_3I,1317
|
|||
taskgraph/util/taskcluster.py,sha256=cGUGvkrefRHngjyZm_iQRYKRlGi4jMIr7ky0fi_YBrg,12445
|
||||
taskgraph/util/taskgraph.py,sha256=ecKEvTfmLVvEKLPO_0g34CqVvc0iCzuNMh3064BZNrE,1969
|
||||
taskgraph/util/templates.py,sha256=Dqxfl244u-PX7dnsk3_vYyzDwpDgJtANK6NmZwN3Qow,1417
|
||||
taskgraph/util/time.py,sha256=dmR9Y0IGKuE1eHfFZjDuBUroK63XLBxEMM5ploO4li4,3490
|
||||
taskgraph/util/time.py,sha256=pNFcTH-iYRfm2-okm1lMATc4B5wO-_FXbOFXEtXD27g,3390
|
||||
taskgraph/util/treeherder.py,sha256=XrdE-Je0ZvXe6_8f0DvvqNbrHherUk-hUuxirImPEIo,2138
|
||||
taskgraph/util/vcs.py,sha256=nCmvO_hHJIM4vIJ0vlpbQjdIFRtkpRImCikYde-C_R0,17328
|
||||
taskgraph/util/vcs.py,sha256=i13idS8y9ooR216mnd1gksdjSgHBNlAZEdq7Xr-ROwE,18536
|
||||
taskgraph/util/verify.py,sha256=YETuZVkwnfYe57GRPx2x_vedstgqdGiH46HLWAdcks8,8827
|
||||
taskgraph/util/workertypes.py,sha256=5g2mgIbEKMzDpZNnmPMoMNyy7Wahi-jmWcV1amDAcPo,2341
|
||||
taskgraph/util/yaml.py,sha256=hfKI_D8Q7dimq4_VvO3WEh8CJsTrsIMwN6set7HIQbY,990
|
||||
taskcluster_taskgraph-3.0.0.dist-info/LICENSE,sha256=HyVuytGSiAUQ6ErWBHTqt1iSGHhLmlC8fO7jTCuR8dU,16725
|
||||
taskcluster_taskgraph-3.0.0.dist-info/METADATA,sha256=rDJwBZW7nHDBPBcMH7n9eTnb2GONIfgG_YHTgsiB7no,1017
|
||||
taskcluster_taskgraph-3.0.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
|
||||
taskcluster_taskgraph-3.0.0.dist-info/entry_points.txt,sha256=VoXNtZpN4LvyXYB1wq47AU9CO-DMYMJ0VktKxjugzbY,51
|
||||
taskcluster_taskgraph-3.0.0.dist-info/top_level.txt,sha256=3JNeYn_hNiNXC7DrdH_vcv-WYSE7QdgGjdvUYvSjVp0,10
|
||||
taskcluster_taskgraph-3.0.0.dist-info/RECORD,,
|
||||
taskcluster_taskgraph-3.2.0.dist-info/LICENSE,sha256=HyVuytGSiAUQ6ErWBHTqt1iSGHhLmlC8fO7jTCuR8dU,16725
|
||||
taskcluster_taskgraph-3.2.0.dist-info/METADATA,sha256=UDpk6d8wAiYmub9zhzF6H5uMIQ4JA91IeeLiZGQw0ls,1050
|
||||
taskcluster_taskgraph-3.2.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
|
||||
taskcluster_taskgraph-3.2.0.dist-info/entry_points.txt,sha256=VoXNtZpN4LvyXYB1wq47AU9CO-DMYMJ0VktKxjugzbY,51
|
||||
taskcluster_taskgraph-3.2.0.dist-info/top_level.txt,sha256=3JNeYn_hNiNXC7DrdH_vcv-WYSE7QdgGjdvUYvSjVp0,10
|
||||
taskcluster_taskgraph-3.2.0.dist-info/RECORD,,
|
||||
5
third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-3.2.0.dist-info/WHEEL
vendored
Normal file
5
third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-3.2.0.dist-info/WHEEL
vendored
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.37.1)
|
||||
Root-Is-Purelib: true
|
||||
Tag: py3-none-any
|
||||
|
||||
|
|
@ -7,6 +7,8 @@ import json
|
|||
from collections import namedtuple
|
||||
from types import FunctionType
|
||||
|
||||
from mozilla_repo_urls import parse
|
||||
|
||||
from taskgraph import create
|
||||
from taskgraph.config import load_graph_config
|
||||
from taskgraph.parameters import Parameters
|
||||
|
|
@ -291,24 +293,20 @@ def sanity_check_task_scope(callback, parameters, graph_config):
|
|||
if action.cb_name == callback:
|
||||
break
|
||||
else:
|
||||
raise Exception(f"No action with cb_name {callback}")
|
||||
raise ValueError(f"No action with cb_name {callback}")
|
||||
|
||||
actionPerm = "generic" if action.generic else action.cb_name
|
||||
|
||||
repo_param = "head_repository"
|
||||
head_repository = parameters[repo_param]
|
||||
if not head_repository.startswith(("https://hg.mozilla.org", "https://github.com")):
|
||||
raise Exception(
|
||||
"{} is not either https://hg.mozilla.org or https://github.com !"
|
||||
)
|
||||
|
||||
expected_scope = f"assume:repo:{head_repository[8:]}:action:{actionPerm}"
|
||||
raw_url = parameters[repo_param]
|
||||
parsed_url = parse(raw_url)
|
||||
expected_scope = f"assume:{parsed_url.taskcluster_role_prefix}:action:{actionPerm}"
|
||||
|
||||
# the scope should appear literally; no need for a satisfaction check. The use of
|
||||
# get_current_scopes here calls the auth service through the Taskcluster Proxy, giving
|
||||
# the precise scopes available to this task.
|
||||
if expected_scope not in taskcluster.get_current_scopes():
|
||||
raise Exception(f"Expected task scope {expected_scope} for this action")
|
||||
raise ValueError(f"Expected task scope {expected_scope} for this action")
|
||||
|
||||
|
||||
def trigger_action_callback(
|
||||
|
|
|
|||
|
|
@ -160,6 +160,7 @@ def create_tasks(
|
|||
target_task_graph.for_each_task(update_dependencies)
|
||||
optimized_task_graph, label_to_taskid = optimize_task_graph(
|
||||
target_task_graph,
|
||||
to_run,
|
||||
params,
|
||||
to_run,
|
||||
decision_task_id,
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ from taskgraph.parameters import Parameters, get_version
|
|||
from taskgraph.taskgraph import TaskGraph
|
||||
from taskgraph.util.python_path import find_object
|
||||
from taskgraph.util.schema import Schema, validate_schema
|
||||
from taskgraph.util.vcs import get_repository
|
||||
from taskgraph.util.vcs import Repository, get_repository
|
||||
from taskgraph.util.yaml import load_yaml
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
|
@ -143,6 +143,8 @@ def get_decision_parameters(graph_config, options):
|
|||
n: options[n]
|
||||
for n in [
|
||||
"base_repository",
|
||||
"base_ref",
|
||||
"base_rev",
|
||||
"head_repository",
|
||||
"head_rev",
|
||||
"head_ref",
|
||||
|
|
@ -166,6 +168,21 @@ def get_decision_parameters(graph_config, options):
|
|||
except UnicodeDecodeError:
|
||||
commit_message = ""
|
||||
|
||||
parameters["base_ref"] = _determine_more_accurate_base_ref(
|
||||
repo,
|
||||
candidate_base_ref=options.get("base_ref"),
|
||||
head_ref=options.get("head_ref"),
|
||||
base_rev=options.get("base_rev"),
|
||||
)
|
||||
|
||||
parameters["base_rev"] = _determine_more_accurate_base_rev(
|
||||
repo,
|
||||
base_ref=parameters["base_ref"],
|
||||
candidate_base_rev=options.get("base_rev"),
|
||||
head_rev=options.get("head_rev"),
|
||||
env_prefix=_get_env_prefix(graph_config),
|
||||
)
|
||||
|
||||
# Define default filter list, as most configurations shouldn't need
|
||||
# custom filters.
|
||||
parameters["filters"] = [
|
||||
|
|
@ -236,6 +253,68 @@ def get_decision_parameters(graph_config, options):
|
|||
return result
|
||||
|
||||
|
||||
def _determine_more_accurate_base_ref(repo, candidate_base_ref, head_ref, base_rev):
|
||||
base_ref = candidate_base_ref
|
||||
|
||||
if not candidate_base_ref:
|
||||
base_ref = repo.default_branch
|
||||
elif candidate_base_ref == head_ref and base_rev == Repository.NULL_REVISION:
|
||||
logger.info(
|
||||
"base_ref and head_ref are identical but base_rev equals the null revision. "
|
||||
"This is a new branch but Github didn't identify its actual base."
|
||||
)
|
||||
base_ref = repo.default_branch
|
||||
|
||||
if base_ref != candidate_base_ref:
|
||||
logger.info(
|
||||
f'base_ref has been reset from "{candidate_base_ref}" to "{base_ref}".'
|
||||
)
|
||||
|
||||
return base_ref
|
||||
|
||||
|
||||
def _determine_more_accurate_base_rev(
|
||||
repo, base_ref, candidate_base_rev, head_rev, env_prefix
|
||||
):
|
||||
if not candidate_base_rev:
|
||||
logger.info("base_rev is not set.")
|
||||
base_ref_or_rev = base_ref
|
||||
elif candidate_base_rev == Repository.NULL_REVISION:
|
||||
logger.info("base_rev equals the null revision. This branch is a new one.")
|
||||
base_ref_or_rev = base_ref
|
||||
elif not repo.does_revision_exist_locally(candidate_base_rev):
|
||||
logger.warning(
|
||||
"base_rev does not exist locally. It is likely because the branch was force-pushed. "
|
||||
"taskgraph is not able to assess how many commits were changed and assumes it is only "
|
||||
f"the last one. Please set the {env_prefix.upper()}_BASE_REV environment variable "
|
||||
"in the decision task and provide `--base-rev` to taskgraph."
|
||||
)
|
||||
base_ref_or_rev = base_ref
|
||||
else:
|
||||
base_ref_or_rev = candidate_base_rev
|
||||
|
||||
if base_ref_or_rev == base_ref:
|
||||
logger.info(
|
||||
f'Using base_ref "{base_ref}" to determine latest common revision...'
|
||||
)
|
||||
|
||||
base_rev = repo.find_latest_common_revision(base_ref_or_rev, head_rev)
|
||||
if base_rev != candidate_base_rev:
|
||||
if base_ref_or_rev == candidate_base_rev:
|
||||
logger.info("base_rev is not an ancestor of head_rev.")
|
||||
|
||||
logger.info(
|
||||
f'base_rev has been reset from "{candidate_base_rev}" to "{base_rev}".'
|
||||
)
|
||||
|
||||
return base_rev
|
||||
|
||||
|
||||
def _get_env_prefix(graph_config):
|
||||
repo_keys = list(graph_config["taskgraph"].get("repositories", {}).keys())
|
||||
return repo_keys[0] if repo_keys else ""
|
||||
|
||||
|
||||
def set_try_config(parameters, task_config_file):
|
||||
if os.path.isfile(task_config_file):
|
||||
logger.info(f"using try tasks from {task_config_file}")
|
||||
|
|
|
|||
|
|
@ -8,23 +8,43 @@ Support for optimizing tasks based on the set of files that have changed.
|
|||
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
import requests
|
||||
from redo import retry
|
||||
|
||||
from .util.memoize import memoize
|
||||
from .util.path import match as match_path
|
||||
from .util.vcs import get_repository
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@memoize
|
||||
def get_changed_files(repository, revision):
|
||||
def get_changed_files(head_repository_url, head_rev, base_rev=None):
|
||||
"""
|
||||
Get the set of files changed in the push headed by the given revision.
|
||||
Get the set of files changed between revisions.
|
||||
Responses are cached, so multiple calls with the same arguments are OK.
|
||||
"""
|
||||
url = "{}/json-automationrelevance/{}".format(repository.rstrip("/"), revision)
|
||||
repo_path = os.getcwd()
|
||||
repository = get_repository(repo_path)
|
||||
|
||||
if repository.tool == "hg":
|
||||
# TODO Use VCS version once tested enough
|
||||
return _get_changed_files_json_automationrelevance(
|
||||
head_repository_url, head_rev
|
||||
)
|
||||
|
||||
return repository.get_changed_files(rev=head_rev, base_rev=base_rev)
|
||||
|
||||
|
||||
def _get_changed_files_json_automationrelevance(head_repository_url, head_rev):
|
||||
"""
|
||||
Get the set of files changed in the push headed by the given revision.
|
||||
"""
|
||||
url = "{}/json-automationrelevance/{}".format(
|
||||
head_repository_url.rstrip("/"), head_rev
|
||||
)
|
||||
logger.debug("Querying version control for metadata: %s", url)
|
||||
|
||||
def get_automationrelevance():
|
||||
|
|
@ -48,18 +68,20 @@ def get_changed_files(repository, revision):
|
|||
|
||||
|
||||
def check(params, file_patterns):
|
||||
"""Determine whether any of the files changed in the indicated push to
|
||||
https://hg.mozilla.org match any of the given file patterns."""
|
||||
repository = params.get("head_repository")
|
||||
revision = params.get("head_rev")
|
||||
if not repository or not revision:
|
||||
"""Determine whether any of the files changed between 2 revisions
|
||||
match any of the given file patterns."""
|
||||
|
||||
head_repository_url = params.get("head_repository")
|
||||
head_rev = params.get("head_rev")
|
||||
if not head_repository_url or not head_rev:
|
||||
logger.warning(
|
||||
"Missing `head_repository` or `head_rev` parameters; "
|
||||
"assuming all files have changed"
|
||||
)
|
||||
return True
|
||||
|
||||
changed_files = get_changed_files(repository, revision)
|
||||
base_rev = params.get("base_rev")
|
||||
changed_files = get_changed_files(head_repository_url, head_rev, base_rev)
|
||||
|
||||
for pattern in file_patterns:
|
||||
for path in changed_files:
|
||||
|
|
|
|||
|
|
@ -51,9 +51,9 @@ class Kind:
|
|||
config = copy.deepcopy(self.config)
|
||||
|
||||
kind_dependencies = config.get("kind-dependencies", [])
|
||||
kind_dependencies_tasks = [
|
||||
task for task in loaded_tasks if task.kind in kind_dependencies
|
||||
]
|
||||
kind_dependencies_tasks = {
|
||||
task.label: task for task in loaded_tasks if task.kind in kind_dependencies
|
||||
}
|
||||
|
||||
inputs = loader(self.name, self.path, config, parameters, loaded_tasks)
|
||||
|
||||
|
|
|
|||
|
|
@ -343,7 +343,7 @@ def show_taskgraph(options):
|
|||
logging.root.setLevel(logging.DEBUG)
|
||||
|
||||
repo = None
|
||||
cur_ref = None
|
||||
cur_rev = None
|
||||
diffdir = None
|
||||
output_file = options["output_file"]
|
||||
|
||||
|
|
@ -361,16 +361,16 @@ def show_taskgraph(options):
|
|||
# as best we can after we're done. In all known cases, using
|
||||
# branch or bookmark (which are both available on the VCS object)
|
||||
# as `branch` is preferable to a specific revision.
|
||||
cur_ref = repo.branch or repo.head_ref[:12]
|
||||
cur_rev = repo.branch or repo.head_rev[:12]
|
||||
|
||||
diffdir = tempfile.mkdtemp()
|
||||
atexit.register(
|
||||
shutil.rmtree, diffdir
|
||||
) # make sure the directory gets cleaned up
|
||||
options["output_file"] = os.path.join(
|
||||
diffdir, f"{options['graph_attr']}_{cur_ref}"
|
||||
diffdir, f"{options['graph_attr']}_{cur_rev}"
|
||||
)
|
||||
print(f"Generating {options['graph_attr']} @ {cur_ref}", file=sys.stderr)
|
||||
print(f"Generating {options['graph_attr']} @ {cur_rev}", file=sys.stderr)
|
||||
|
||||
parameters: List[Any[str, Parameters]] = options.pop("parameters")
|
||||
if not parameters:
|
||||
|
|
@ -418,33 +418,33 @@ def show_taskgraph(options):
|
|||
del sys.modules[mod]
|
||||
|
||||
if options["diff"] == "default":
|
||||
base_ref = repo.base_ref
|
||||
base_rev = repo.base_rev
|
||||
else:
|
||||
base_ref = options["diff"]
|
||||
base_rev = options["diff"]
|
||||
|
||||
try:
|
||||
repo.update(base_ref)
|
||||
base_ref = repo.head_ref[:12]
|
||||
repo.update(base_rev)
|
||||
base_rev = repo.head_rev[:12]
|
||||
options["output_file"] = os.path.join(
|
||||
diffdir, f"{options['graph_attr']}_{base_ref}"
|
||||
diffdir, f"{options['graph_attr']}_{base_rev}"
|
||||
)
|
||||
print(f"Generating {options['graph_attr']} @ {base_ref}", file=sys.stderr)
|
||||
print(f"Generating {options['graph_attr']} @ {base_rev}", file=sys.stderr)
|
||||
generate_taskgraph(options, parameters, logdir)
|
||||
finally:
|
||||
repo.update(cur_ref)
|
||||
repo.update(cur_rev)
|
||||
|
||||
# Generate diff(s)
|
||||
diffcmd = [
|
||||
"diff",
|
||||
"-U20",
|
||||
"--report-identical-files",
|
||||
f"--label={options['graph_attr']}@{base_ref}",
|
||||
f"--label={options['graph_attr']}@{cur_ref}",
|
||||
f"--label={options['graph_attr']}@{base_rev}",
|
||||
f"--label={options['graph_attr']}@{cur_rev}",
|
||||
]
|
||||
|
||||
for spec in parameters:
|
||||
base_path = os.path.join(diffdir, f"{options['graph_attr']}_{base_ref}")
|
||||
cur_path = os.path.join(diffdir, f"{options['graph_attr']}_{cur_ref}")
|
||||
base_path = os.path.join(diffdir, f"{options['graph_attr']}_{base_rev}")
|
||||
cur_path = os.path.join(diffdir, f"{options['graph_attr']}_{cur_rev}")
|
||||
|
||||
params_name = None
|
||||
if len(parameters) > 1:
|
||||
|
|
@ -593,6 +593,15 @@ def image_digest(args):
|
|||
help='Type of repository, either "hg" or "git"',
|
||||
)
|
||||
@argument("--base-repository", required=True, help='URL for "base" repository to clone')
|
||||
@argument(
|
||||
"--base-ref", default="", help='Reference of the revision in the "base" repository'
|
||||
)
|
||||
@argument(
|
||||
"--base-rev",
|
||||
default="",
|
||||
help="Taskgraph decides what to do based on the revision range between "
|
||||
"`--base-rev` and `--head-rev`. Value is determined automatically if not provided",
|
||||
)
|
||||
@argument(
|
||||
"--head-repository",
|
||||
required=True,
|
||||
|
|
|
|||
|
|
@ -33,6 +33,12 @@ here = os.path.abspath(os.path.dirname(__file__))
|
|||
logger = logging.getLogger(__name__)
|
||||
MAX_ROUTES = 10
|
||||
|
||||
registered_morphs = []
|
||||
|
||||
|
||||
def register_morph(func):
|
||||
registered_morphs.append(func)
|
||||
|
||||
|
||||
def amend_taskgraph(taskgraph, label_to_taskid, to_add):
|
||||
"""Add the given tasks to the taskgraph, returning a new taskgraph"""
|
||||
|
|
@ -156,6 +162,7 @@ def make_index_task(parent_task, taskgraph, label_to_taskid, parameters, graph_c
|
|||
return task, taskgraph, label_to_taskid
|
||||
|
||||
|
||||
@register_morph
|
||||
def add_index_tasks(taskgraph, label_to_taskid, parameters, graph_config):
|
||||
"""
|
||||
The TaskCluster queue only allows 10 routes on a task, but we have tasks
|
||||
|
|
@ -196,8 +203,9 @@ def _get_morph_url():
|
|||
return f"{taskgraph_repo}/raw-file/{taskgraph_rev}/src/taskgraph/morph.py"
|
||||
|
||||
|
||||
@register_morph
|
||||
def add_code_review_task(taskgraph, label_to_taskid, parameters, graph_config):
|
||||
logger.debug("Morphing: adding index tasks")
|
||||
logger.debug("Morphing: adding code review task")
|
||||
|
||||
review_config = parameters.get("code-review")
|
||||
if not review_config:
|
||||
|
|
@ -256,12 +264,7 @@ def add_code_review_task(taskgraph, label_to_taskid, parameters, graph_config):
|
|||
|
||||
def morph(taskgraph, label_to_taskid, parameters, graph_config):
|
||||
"""Apply all morphs"""
|
||||
morphs = [
|
||||
add_index_tasks,
|
||||
add_code_review_task,
|
||||
]
|
||||
|
||||
for m in morphs:
|
||||
for m in registered_morphs:
|
||||
taskgraph, label_to_taskid = m(
|
||||
taskgraph, label_to_taskid, parameters, graph_config
|
||||
)
|
||||
|
|
|
|||
|
|
@ -50,13 +50,8 @@ class IndexSearch(OptimizationStrategy):
|
|||
@register_strategy("skip-unless-changed")
|
||||
class SkipUnlessChanged(OptimizationStrategy):
|
||||
def should_remove_task(self, task, params, file_patterns):
|
||||
if params.get("repository_type") != "hg":
|
||||
raise RuntimeError(
|
||||
"SkipUnlessChanged optimization only works with mercurial repositories"
|
||||
)
|
||||
|
||||
# pushlog_id == -1 - this is the case when run from a cron.yml job
|
||||
if params.get("pushlog_id") == -1:
|
||||
# pushlog_id == -1 - this is the case when run from a cron.yml job or on a git repository
|
||||
if params.get("repository_type") == "hg" and params.get("pushlog_id") == -1:
|
||||
return False
|
||||
|
||||
changed = files_changed.check(params, file_patterns)
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ from subprocess import CalledProcessError
|
|||
from urllib.parse import urlparse
|
||||
from urllib.request import urlopen
|
||||
|
||||
from mozilla_repo_urls import parse
|
||||
from voluptuous import ALLOW_EXTRA, Any, Optional, Required, Schema
|
||||
|
||||
from taskgraph.util import yaml
|
||||
|
|
@ -31,6 +32,8 @@ class ParameterMismatch(Exception):
|
|||
base_schema = Schema(
|
||||
{
|
||||
Required("base_repository"): str,
|
||||
Required("base_ref"): str,
|
||||
Required("base_rev"): str,
|
||||
Required("build_date"): int,
|
||||
Required("build_number"): int,
|
||||
Required("do_not_optimize"): [str],
|
||||
|
|
@ -77,22 +80,26 @@ def _get_defaults(repo_root=None):
|
|||
repo = get_repository(repo_path)
|
||||
try:
|
||||
repo_url = repo.get_url()
|
||||
project = repo_url.rsplit("/", 1)[1]
|
||||
parsed_url = parse(repo_url)
|
||||
project = parsed_url.repo_name
|
||||
except (CalledProcessError, IndexError):
|
||||
# IndexError is raised if repo url doesn't have any slashes.
|
||||
repo_url = ""
|
||||
project = ""
|
||||
|
||||
default_base_ref = repo.default_branch
|
||||
return {
|
||||
"base_repository": repo_url,
|
||||
"base_ref": default_base_ref,
|
||||
"base_rev": repo.find_latest_common_revision(default_base_ref, repo.head_rev),
|
||||
"build_date": int(time.time()),
|
||||
"build_number": 1,
|
||||
"do_not_optimize": [],
|
||||
"existing_tasks": {},
|
||||
"filters": ["target_tasks_method"],
|
||||
"head_ref": repo.head_ref,
|
||||
"head_ref": repo.branch or repo.head_rev,
|
||||
"head_repository": repo_url,
|
||||
"head_rev": repo.head_ref,
|
||||
"head_rev": repo.head_rev,
|
||||
"head_tag": "",
|
||||
"level": "3",
|
||||
"moz_build_date": datetime.now().strftime("%Y%m%d%H%M%S"),
|
||||
|
|
|
|||
|
|
@ -106,6 +106,13 @@ IS_MACOSX = sys.platform == 'darwin'
|
|||
IS_POSIX = os.name == 'posix'
|
||||
IS_WINDOWS = os.name == 'nt'
|
||||
|
||||
# Both mercurial and git use sha1 as revision idenfiers. Luckily, both define
|
||||
# the same value as the null revision.
|
||||
#
|
||||
# https://github.com/git/git/blob/dc04167d378fb29d30e1647ff6ff51dd182bc9a3/t/oid-info/hash-info#L7
|
||||
# https://www.mercurial-scm.org/repo/hg-stable/file/82efc31bd152/mercurial/node.py#l30
|
||||
NULL_REVISION = "0000000000000000000000000000000000000000"
|
||||
|
||||
|
||||
def print_line(prefix, m):
|
||||
now = datetime.datetime.utcnow().isoformat().encode('utf-8')
|
||||
|
|
@ -557,6 +564,8 @@ def git_checkout(
|
|||
destination_path: str,
|
||||
head_repo: str,
|
||||
base_repo: Optional[str],
|
||||
base_ref: Optional[str],
|
||||
base_rev: Optional[str],
|
||||
ref: Optional[str],
|
||||
commit: Optional[str],
|
||||
ssh_key_file: Optional[Path],
|
||||
|
|
@ -591,11 +600,47 @@ def git_checkout(
|
|||
|
||||
retry_required_command(b'vcs', args, extra_env=env)
|
||||
|
||||
if base_ref:
|
||||
args = [
|
||||
'git',
|
||||
'fetch',
|
||||
'origin',
|
||||
base_ref
|
||||
]
|
||||
|
||||
retry_required_command(b'vcs', args, cwd=destination_path, extra_env=env)
|
||||
|
||||
# Create local branch so that taskgraph is able to compute differences
|
||||
# between the head branch and the base one, if needed
|
||||
args = [
|
||||
'git',
|
||||
'checkout',
|
||||
base_ref
|
||||
]
|
||||
|
||||
retry_required_command(b'vcs', args, cwd=destination_path, extra_env=env)
|
||||
|
||||
# When commits are force-pushed (like on a testing branch), base_rev doesn't
|
||||
# exist on base_ref. Fetching it allows taskgraph to compute differences
|
||||
# between the previous state before the force-push and the current state.
|
||||
#
|
||||
# Unlike base_ref just above, there is no need to checkout the revision:
|
||||
# it's immediately avaiable after the fetch.
|
||||
if base_rev and base_rev != NULL_REVISION:
|
||||
args = [
|
||||
'git',
|
||||
'fetch',
|
||||
'origin',
|
||||
base_rev
|
||||
]
|
||||
|
||||
retry_required_command(b'vcs', args, cwd=destination_path, extra_env=env)
|
||||
|
||||
# If a ref isn't provided, we fetch all refs from head_repo, which may be slow
|
||||
args = [
|
||||
'git',
|
||||
'fetch',
|
||||
'--tags',
|
||||
'--no-tags',
|
||||
head_repo,
|
||||
ref if ref else '+refs/heads/*:refs/remotes/work/*'
|
||||
]
|
||||
|
|
@ -606,7 +651,27 @@ def git_checkout(
|
|||
'git',
|
||||
'checkout',
|
||||
'-f',
|
||||
commit if commit else ref
|
||||
]
|
||||
|
||||
if ref:
|
||||
args.extend(['-B', ref])
|
||||
args.append(commit if commit else ref)
|
||||
|
||||
run_required_command(b'vcs', args, cwd=destination_path)
|
||||
|
||||
if os.path.exists(os.path.join(destination_path, '.gitmodules')):
|
||||
args = [
|
||||
'git',
|
||||
'submodule',
|
||||
'init',
|
||||
]
|
||||
|
||||
run_required_command(b'vcs', args, cwd=destination_path)
|
||||
|
||||
args = [
|
||||
'git',
|
||||
'submodule',
|
||||
'update',
|
||||
]
|
||||
|
||||
run_required_command(b'vcs', args, cwd=destination_path)
|
||||
|
|
@ -818,6 +883,8 @@ def collect_vcs_options(args, project, name):
|
|||
|
||||
repo_type = os.environ.get('%s_REPOSITORY_TYPE' % env_prefix)
|
||||
base_repo = os.environ.get('%s_BASE_REPOSITORY' % env_prefix)
|
||||
base_ref = os.environ.get('%s_BASE_REF' % env_prefix)
|
||||
base_rev = os.environ.get('%s_BASE_REV' % env_prefix)
|
||||
head_repo = os.environ.get('%s_HEAD_REPOSITORY' % env_prefix)
|
||||
revision = os.environ.get('%s_HEAD_REV' % env_prefix)
|
||||
ref = os.environ.get('%s_HEAD_REF' % env_prefix)
|
||||
|
|
@ -849,6 +916,8 @@ def collect_vcs_options(args, project, name):
|
|||
'checkout': checkout,
|
||||
'sparse-profile': sparse_profile,
|
||||
'base-repo': base_repo,
|
||||
'base-ref': base_ref,
|
||||
'base-rev': base_rev,
|
||||
'head-repo': head_repo,
|
||||
'revision': revision,
|
||||
'ref': ref,
|
||||
|
|
@ -896,6 +965,8 @@ def vcs_checkout_from_args(options, *, hgmo_fingerprint):
|
|||
options['checkout'],
|
||||
options['head-repo'],
|
||||
options['base-repo'],
|
||||
options['base-ref'],
|
||||
options['base-rev'],
|
||||
ref,
|
||||
revision,
|
||||
ssh_key_file,
|
||||
|
|
|
|||
|
|
@ -46,9 +46,9 @@ class TransformConfig:
|
|||
# the parameters for this task-graph generation run
|
||||
params = attr.ib(type=Parameters)
|
||||
|
||||
# a list of all the tasks associated with the kind dependencies of the
|
||||
# a dict of all the tasks associated with the kind dependencies of the
|
||||
# current kind
|
||||
kind_dependencies_tasks = attr.ib()
|
||||
kind_dependencies_tasks = attr.ib(type=dict)
|
||||
|
||||
# Global configuration of the taskgraph
|
||||
graph_config = attr.ib(type=GraphConfig)
|
||||
|
|
|
|||
|
|
@ -57,7 +57,7 @@ def cache_task(config, tasks):
|
|||
return
|
||||
|
||||
digests = {}
|
||||
for task in config.kind_dependencies_tasks:
|
||||
for task in config.kind_dependencies_tasks.values():
|
||||
if "cached_task" in task.attributes:
|
||||
digests[task.label] = format_task_digest(task.attributes["cached_task"])
|
||||
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ def add_dependencies(config, jobs):
|
|||
job.setdefault("soft-dependencies", [])
|
||||
job["soft-dependencies"] += [
|
||||
dep_task.label
|
||||
for dep_task in config.kind_dependencies_tasks
|
||||
for dep_task in config.kind_dependencies_tasks.values()
|
||||
if dep_task.attributes.get("code-review") is True
|
||||
]
|
||||
yield job
|
||||
|
|
|
|||
|
|
@ -67,7 +67,7 @@ transforms.add_validate(docker_image_schema)
|
|||
@transforms.add
|
||||
def fill_template(config, tasks):
|
||||
available_packages = set()
|
||||
for task in config.kind_dependencies_tasks:
|
||||
for task in config.kind_dependencies_tasks.values():
|
||||
if task.kind != "packages":
|
||||
continue
|
||||
name = task.label.replace("packages-", "")
|
||||
|
|
|
|||
|
|
@ -211,7 +211,7 @@ def use_fetches(config, jobs):
|
|||
if value:
|
||||
aliases[f"{config.kind}-{value}"] = label
|
||||
|
||||
for task in config.kind_dependencies_tasks:
|
||||
for task in config.kind_dependencies_tasks.values():
|
||||
if task.kind in ("fetch", "toolchain"):
|
||||
get_attribute(
|
||||
artifact_names,
|
||||
|
|
@ -275,8 +275,8 @@ def use_fetches(config, jobs):
|
|||
else:
|
||||
dep_tasks = [
|
||||
task
|
||||
for task in config.kind_dependencies_tasks
|
||||
if task.label == dep_label
|
||||
for label, task in config.kind_dependencies_tasks.items()
|
||||
if label == dep_label
|
||||
]
|
||||
if len(dep_tasks) != 1:
|
||||
raise Exception(
|
||||
|
|
|
|||
|
|
@ -998,6 +998,8 @@ def build_task(config, tasks):
|
|||
# (and causes scope issues) if it doesn't match the name of the
|
||||
# base repo
|
||||
base_project = config.params["base_repository"].split("/")[-1]
|
||||
if base_project.endswith(".git"):
|
||||
base_project = base_project[:-4]
|
||||
th_project_suffix = "-pr"
|
||||
else:
|
||||
base_project = config.params["project"]
|
||||
|
|
|
|||
|
|
@ -99,9 +99,8 @@ def json_time_from_now(input_str, now=None, datetime_format=False):
|
|||
else:
|
||||
# Sorta a big hack but the json schema validator for date does not like the
|
||||
# ISO dates until 'Z' (for timezone) is added...
|
||||
# the [:23] ensures only whole seconds or milliseconds are included,
|
||||
# not microseconds (see bug 1381801)
|
||||
return time.isoformat()[:23] + "Z"
|
||||
# Microseconds are excluded (see bug 1381801)
|
||||
return time.isoformat(timespec="milliseconds") + "Z"
|
||||
|
||||
|
||||
def current_json_time(datetime_format=False):
|
||||
|
|
@ -112,6 +111,5 @@ def current_json_time(datetime_format=False):
|
|||
if datetime_format is True:
|
||||
return datetime.datetime.utcnow()
|
||||
else:
|
||||
# the [:23] ensures only whole seconds or milliseconds are included,
|
||||
# not microseconds (see bug 1381801)
|
||||
return datetime.datetime.utcnow().isoformat()[:23] + "Z"
|
||||
# Microseconds are excluded (see bug 1381801)
|
||||
return datetime.datetime.utcnow().isoformat(timespec="milliseconds") + "Z"
|
||||
|
|
|
|||
|
|
@ -3,7 +3,9 @@
|
|||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
from abc import ABC, abstractmethod, abstractproperty
|
||||
from shutil import which
|
||||
|
|
@ -15,38 +17,91 @@ from taskgraph.util.path import ancestors
|
|||
|
||||
PUSHLOG_TMPL = "{}/json-pushes?version=2&changeset={}&tipsonly=1&full=1"
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Repository(ABC):
|
||||
# Both mercurial and git use sha1 as revision idenfiers. Luckily, both define
|
||||
# the same value as the null revision.
|
||||
#
|
||||
# https://github.com/git/git/blob/dc04167d378fb29d30e1647ff6ff51dd182bc9a3/t/oid-info/hash-info#L7
|
||||
# https://www.mercurial-scm.org/repo/hg-stable/file/82efc31bd152/mercurial/node.py#l30
|
||||
NULL_REVISION = "0000000000000000000000000000000000000000"
|
||||
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
self.binary = which(self.tool)
|
||||
if self.binary is None:
|
||||
raise OSError(f"{self.tool} not found!")
|
||||
self._valid_diff_filter = ("m", "a", "d")
|
||||
|
||||
self._env = os.environ.copy()
|
||||
|
||||
def run(self, *args: str, **kwargs):
|
||||
return_codes = kwargs.pop("return_codes", [])
|
||||
cmd = (self.binary,) + args
|
||||
|
||||
try:
|
||||
return subprocess.check_output(
|
||||
cmd, cwd=self.path, env=self._env, encoding="utf-8", **kwargs
|
||||
)
|
||||
except subprocess.CalledProcessError as e:
|
||||
if e.returncode in return_codes:
|
||||
return ""
|
||||
raise
|
||||
|
||||
@abstractproperty
|
||||
def tool(self) -> str:
|
||||
"""Version control system being used, either 'hg' or 'git'."""
|
||||
|
||||
@abstractproperty
|
||||
def head_ref(self) -> str:
|
||||
def head_rev(self) -> str:
|
||||
"""Hash of HEAD revision."""
|
||||
|
||||
@abstractproperty
|
||||
def base_ref(self):
|
||||
def base_rev(self):
|
||||
"""Hash of revision the current topic branch is based on."""
|
||||
|
||||
@abstractproperty
|
||||
def branch(self):
|
||||
"""Current branch or bookmark the checkout has active."""
|
||||
|
||||
@abstractproperty
|
||||
def all_remote_names(self):
|
||||
"""Name of all configured remote repositories."""
|
||||
|
||||
@abstractproperty
|
||||
def default_remote_name(self):
|
||||
"""Name the VCS defines for the remote repository when cloning
|
||||
it for the first time. This name may not exist anymore if users
|
||||
changed the default configuration, for instance."""
|
||||
|
||||
@abstractproperty
|
||||
def remote_name(self):
|
||||
"""Name of the remote repository."""
|
||||
|
||||
def _get_most_suitable_remote(self, remote_instructions):
|
||||
remotes = self.all_remote_names
|
||||
if len(remotes) == 1:
|
||||
return remotes[0]
|
||||
|
||||
if self.default_remote_name in remotes:
|
||||
return self.default_remote_name
|
||||
|
||||
first_remote = remotes[0]
|
||||
logger.warning(
|
||||
f"Unable to determine which remote repository to use between: {remotes}. "
|
||||
f'Arbitrarily using the first one "{first_remote}". Please set an '
|
||||
f"`{self.default_remote_name}` remote if the arbitrarily selected one "
|
||||
f"is not right. To do so: {remote_instructions}"
|
||||
)
|
||||
|
||||
return first_remote
|
||||
|
||||
@abstractproperty
|
||||
def default_branch(self):
|
||||
"""Name of the default branch."""
|
||||
|
||||
@abstractmethod
|
||||
def get_url(self, remote=None):
|
||||
"""Get URL of the upstream repository."""
|
||||
|
|
@ -55,6 +110,43 @@ class Repository(ABC):
|
|||
def get_commit_message(self, revision=None):
|
||||
"""Commit message of specified revision or current commit."""
|
||||
|
||||
@abstractmethod
|
||||
def get_changed_files(self, diff_filter, mode="unstaged", rev=None, base_rev=None):
|
||||
"""Return a list of files that are changed in:
|
||||
* either this repository's working copy,
|
||||
* or at a given revision (``rev``)
|
||||
* or between 2 revisions (``base_rev`` and ``rev``)
|
||||
|
||||
``diff_filter`` controls which kinds of modifications are returned.
|
||||
It is a string which may only contain the following characters:
|
||||
|
||||
A - Include files that were added
|
||||
D - Include files that were deleted
|
||||
M - Include files that were modified
|
||||
|
||||
By default, all three will be included.
|
||||
|
||||
``mode`` can be one of 'unstaged', 'staged' or 'all'. Only has an
|
||||
effect on git. Defaults to 'unstaged'.
|
||||
|
||||
``rev`` is a specifier for which changesets to consider for
|
||||
changes. The exact meaning depends on the vcs system being used.
|
||||
|
||||
``base_rev`` specifies the range of changesets. This parameter cannot
|
||||
be used without ``rev``. The range includes ``rev`` but excludes
|
||||
``base_rev``.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def get_outgoing_files(self, diff_filter, upstream):
|
||||
"""Return a list of changed files compared to upstream.
|
||||
|
||||
``diff_filter`` works the same as `get_changed_files`.
|
||||
``upstream`` is a remote ref to compare against. If unspecified,
|
||||
this will be determined automatically. If there is no remote ref,
|
||||
a MissingUpstreamRepo exception will be raised.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def working_directory_clean(self, untracked=False, ignored=False):
|
||||
"""Determine if the working directory is free of modifications.
|
||||
|
|
@ -71,20 +163,33 @@ class Repository(ABC):
|
|||
def update(self, ref):
|
||||
"""Update the working directory to the specified reference."""
|
||||
|
||||
@abstractmethod
|
||||
def find_latest_common_revision(self, base_ref_or_rev, head_rev):
|
||||
"""Find the latest revision that is common to both the given
|
||||
``head_rev`` and ``base_ref_or_rev``"""
|
||||
|
||||
@abstractmethod
|
||||
def does_revision_exist_locally(self, revision):
|
||||
"""Check whether this revision exists in the local repository.
|
||||
|
||||
If this function returns an unexpected value, then make sure
|
||||
the revision was fetched from the remote repository."""
|
||||
|
||||
|
||||
class HgRepository(Repository):
|
||||
tool = "hg"
|
||||
default_remote_name = "default"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self._env["HGPLAIN"] = "1"
|
||||
|
||||
@property
|
||||
def head_ref(self):
|
||||
def head_rev(self):
|
||||
return self.run("log", "-r", ".", "-T", "{node}").strip()
|
||||
|
||||
@property
|
||||
def base_ref(self):
|
||||
def base_rev(self):
|
||||
return self.run("log", "-r", "last(ancestors(.) and public())", "-T", "{node}")
|
||||
|
||||
@property
|
||||
|
|
@ -97,13 +202,85 @@ class HgRepository(Repository):
|
|||
|
||||
return None
|
||||
|
||||
@property
|
||||
def all_remote_names(self):
|
||||
remotes = self.run("paths", "--quiet").splitlines()
|
||||
if not remotes:
|
||||
raise RuntimeError("No remotes defined")
|
||||
return remotes
|
||||
|
||||
@property
|
||||
def remote_name(self):
|
||||
return self._get_most_suitable_remote(
|
||||
"Edit .hg/hgrc and add:\n\n[paths]\ndefault = $URL",
|
||||
)
|
||||
|
||||
@property
|
||||
def default_branch(self):
|
||||
# Mercurial recommends keeping "default"
|
||||
# https://www.mercurial-scm.org/wiki/StandardBranching#Don.27t_use_a_name_other_than_default_for_your_main_development_branch
|
||||
return "default"
|
||||
|
||||
def get_url(self, remote="default"):
|
||||
return self.run("path", "-T", "{url}", remote).strip()
|
||||
|
||||
def get_commit_message(self, revision=None):
|
||||
revision = revision or self.head_ref
|
||||
revision = revision or self.head_rev
|
||||
return self.run("log", "-r", ".", "-T", "{desc}")
|
||||
|
||||
def _format_diff_filter(self, diff_filter, for_status=False):
|
||||
df = diff_filter.lower()
|
||||
assert all(f in self._valid_diff_filter for f in df)
|
||||
|
||||
# When looking at the changes in the working directory, the hg status
|
||||
# command uses 'd' for files that have been deleted with a non-hg
|
||||
# command, and 'r' for files that have been `hg rm`ed. Use both.
|
||||
return df.replace("d", "dr") if for_status else df
|
||||
|
||||
def _files_template(self, diff_filter):
|
||||
template = ""
|
||||
df = self._format_diff_filter(diff_filter)
|
||||
if "a" in df:
|
||||
template += "{file_adds % '{file}\\n'}"
|
||||
if "d" in df:
|
||||
template += "{file_dels % '{file}\\n'}"
|
||||
if "m" in df:
|
||||
template += "{file_mods % '{file}\\n'}"
|
||||
return template
|
||||
|
||||
def get_changed_files(
|
||||
self, diff_filter="ADM", mode="unstaged", rev=None, base_rev=None
|
||||
):
|
||||
if rev is None:
|
||||
if base_rev is not None:
|
||||
raise ValueError("Cannot specify `base_rev` without `rev`")
|
||||
# Use --no-status to print just the filename.
|
||||
df = self._format_diff_filter(diff_filter, for_status=True)
|
||||
return self.run("status", "--no-status", f"-{df}").splitlines()
|
||||
else:
|
||||
template = self._files_template(diff_filter)
|
||||
revision_argument = rev if base_rev is None else f"{base_rev}~-1::{rev}"
|
||||
return self.run("log", "-r", revision_argument, "-T", template).splitlines()
|
||||
|
||||
def get_outgoing_files(self, diff_filter="ADM", upstream=None):
|
||||
template = self._files_template(diff_filter)
|
||||
|
||||
if not upstream:
|
||||
return self.run(
|
||||
"log", "-r", "draft() and ancestors(.)", "--template", template
|
||||
).split()
|
||||
|
||||
return self.run(
|
||||
"outgoing",
|
||||
"-r",
|
||||
".",
|
||||
"--quiet",
|
||||
"--template",
|
||||
template,
|
||||
upstream,
|
||||
return_codes=(1,),
|
||||
).split()
|
||||
|
||||
def working_directory_clean(self, untracked=False, ignored=False):
|
||||
args = ["status", "--modified", "--added", "--removed", "--deleted"]
|
||||
if untracked:
|
||||
|
|
@ -118,34 +295,171 @@ class HgRepository(Repository):
|
|||
def update(self, ref):
|
||||
return self.run("update", "--check", ref)
|
||||
|
||||
def find_latest_common_revision(self, base_ref_or_rev, head_rev):
|
||||
return self.run(
|
||||
"log",
|
||||
"-r",
|
||||
f"last(ancestors('{base_ref_or_rev}') and ancestors('{head_rev}'))",
|
||||
"--template",
|
||||
"{node}",
|
||||
).strip()
|
||||
|
||||
def does_revision_exist_locally(self, revision):
|
||||
try:
|
||||
return self.run("log", "-r", revision).strip() != ""
|
||||
except subprocess.CalledProcessError as e:
|
||||
# Error code 255 comes with the message:
|
||||
# "abort: unknown revision $REVISION"
|
||||
if e.returncode == 255:
|
||||
return False
|
||||
raise
|
||||
|
||||
|
||||
class GitRepository(Repository):
|
||||
tool = "git"
|
||||
default_remote_name = "origin"
|
||||
|
||||
_LS_REMOTE_PATTERN = re.compile(r"ref:\s+refs/heads/(?P<branch_name>\S+)\s+HEAD")
|
||||
|
||||
@property
|
||||
def head_ref(self):
|
||||
def head_rev(self):
|
||||
return self.run("rev-parse", "--verify", "HEAD").strip()
|
||||
|
||||
@property
|
||||
def base_ref(self):
|
||||
def base_rev(self):
|
||||
refs = self.run(
|
||||
"rev-list", "HEAD", "--topo-order", "--boundary", "--not", "--remotes"
|
||||
).splitlines()
|
||||
if refs:
|
||||
return refs[-1][1:] # boundary starts with a prefix `-`
|
||||
return self.head_ref
|
||||
return self.head_rev
|
||||
|
||||
@property
|
||||
def branch(self):
|
||||
return self.run("branch", "--show-current").strip() or None
|
||||
|
||||
@property
|
||||
def all_remote_names(self):
|
||||
remotes = self.run("remote").splitlines()
|
||||
if not remotes:
|
||||
raise RuntimeError("No remotes defined")
|
||||
return remotes
|
||||
|
||||
@property
|
||||
def remote_name(self):
|
||||
try:
|
||||
remote_branch_name = self.run(
|
||||
"rev-parse", "--verify", "--abbrev-ref", "--symbolic-full-name", "@{u}"
|
||||
).strip()
|
||||
return remote_branch_name.split("/")[0]
|
||||
except subprocess.CalledProcessError as e:
|
||||
# Error code 128 comes with the message:
|
||||
# "fatal: no upstream configured for branch $BRANCH"
|
||||
if e.returncode != 128:
|
||||
raise
|
||||
|
||||
return self._get_most_suitable_remote("`git remote add origin $URL`")
|
||||
|
||||
@property
|
||||
def default_branch(self):
|
||||
try:
|
||||
# this one works if the current repo was cloned from an existing
|
||||
# repo elsewhere
|
||||
return self._get_default_branch_from_cloned_metadata()
|
||||
except (subprocess.CalledProcessError, RuntimeError):
|
||||
pass
|
||||
|
||||
try:
|
||||
# This call works if you have (network) access to the repo
|
||||
return self._get_default_branch_from_remote_query()
|
||||
except (subprocess.CalledProcessError, RuntimeError):
|
||||
pass
|
||||
|
||||
# this one is the last resort in case the remote is not accessible and
|
||||
# the local repo is where `git init` was made
|
||||
return self._guess_default_branch()
|
||||
|
||||
def _get_default_branch_from_remote_query(self):
|
||||
# This function requires network access to the repo
|
||||
remote_name = self.remote_name
|
||||
output = self.run("ls-remote", "--symref", remote_name, "HEAD")
|
||||
matches = self._LS_REMOTE_PATTERN.search(output)
|
||||
if not matches:
|
||||
raise RuntimeError(
|
||||
f'Could not find the default branch of remote repository "{remote_name}". '
|
||||
"Got: {output}"
|
||||
)
|
||||
|
||||
branch_name = matches.group("branch_name")
|
||||
return f"{remote_name}/{branch_name}"
|
||||
|
||||
def _get_default_branch_from_cloned_metadata(self):
|
||||
return self.run("rev-parse", "--abbrev-ref", f"{self.remote_name}/HEAD").strip()
|
||||
|
||||
def _guess_default_branch(self):
|
||||
branches = [
|
||||
line.strip()
|
||||
for line in self.run(
|
||||
"branch", "--all", "--no-color", "--format=%(refname)"
|
||||
).splitlines()
|
||||
for candidate_branch in ("main", "master", "branches/default/tip")
|
||||
if line.strip().endswith(candidate_branch)
|
||||
]
|
||||
|
||||
if len(branches) == 1:
|
||||
return branches[0]
|
||||
|
||||
raise RuntimeError(f"Unable to find default branch. Got: {branches}")
|
||||
|
||||
def get_url(self, remote="origin"):
|
||||
return self.run("remote", "get-url", remote).strip()
|
||||
|
||||
def get_commit_message(self, revision=None):
|
||||
revision = revision or self.head_ref
|
||||
revision = revision or self.head_rev
|
||||
return self.run("log", "-n1", "--format=%B")
|
||||
|
||||
def get_changed_files(
|
||||
self, diff_filter="ADM", mode="unstaged", rev=None, base_rev=None
|
||||
):
|
||||
assert all(f.lower() in self._valid_diff_filter for f in diff_filter)
|
||||
|
||||
if rev is None:
|
||||
if base_rev is not None:
|
||||
raise ValueError("Cannot specify `base_rev` without `rev`")
|
||||
cmd = ["diff"]
|
||||
if mode == "staged":
|
||||
cmd.append("--cached")
|
||||
elif mode == "all":
|
||||
cmd.append("HEAD")
|
||||
else:
|
||||
revision_argument = (
|
||||
f"{rev}~1..{rev}" if base_rev is None else f"{base_rev}..{rev}"
|
||||
)
|
||||
cmd = ["log", "--format=format:", revision_argument]
|
||||
|
||||
cmd.append("--name-only")
|
||||
cmd.append("--diff-filter=" + diff_filter.upper())
|
||||
|
||||
files = self.run(*cmd).splitlines()
|
||||
return [f for f in files if f]
|
||||
|
||||
def get_outgoing_files(self, diff_filter="ADM", upstream=None):
|
||||
assert all(f.lower() in self._valid_diff_filter for f in diff_filter)
|
||||
|
||||
not_condition = upstream if upstream else "--remotes"
|
||||
|
||||
files = self.run(
|
||||
"log",
|
||||
"--name-only",
|
||||
f"--diff-filter={diff_filter.upper()}",
|
||||
"--oneline",
|
||||
"--pretty=format:",
|
||||
"HEAD",
|
||||
"--not",
|
||||
not_condition,
|
||||
).splitlines()
|
||||
return [f for f in files if f]
|
||||
|
||||
def working_directory_clean(self, untracked=False, ignored=False):
|
||||
args = ["status", "--porcelain"]
|
||||
|
||||
|
|
@ -167,6 +481,19 @@ class GitRepository(Repository):
|
|||
def update(self, ref):
|
||||
self.run("checkout", ref)
|
||||
|
||||
def find_latest_common_revision(self, base_ref_or_rev, head_rev):
|
||||
return self.run("merge-base", base_ref_or_rev, head_rev).strip()
|
||||
|
||||
def does_revision_exist_locally(self, revision):
|
||||
try:
|
||||
return self.run("cat-file", "-t", revision).strip() == "commit"
|
||||
except subprocess.CalledProcessError as e:
|
||||
# Error code 128 comes with the message:
|
||||
# "git cat-file: could not get object info"
|
||||
if e.returncode == 128:
|
||||
return False
|
||||
raise
|
||||
|
||||
|
||||
def get_repository(path):
|
||||
"""Get a repository object for the repository at `path`.
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ from taskgraph.config import GraphConfig
|
|||
from taskgraph.parameters import Parameters
|
||||
from taskgraph.taskgraph import TaskGraph
|
||||
from taskgraph.util.attributes import match_run_on_projects
|
||||
from taskgraph.util.treeherder import join_symbol
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -131,15 +132,26 @@ def verify_task_graph_symbol(task, taskgraph, scratch_pad, graph_config, paramet
|
|||
treeherder = extra["treeherder"]
|
||||
|
||||
collection_keys = tuple(sorted(treeherder.get("collection", {}).keys()))
|
||||
if len(collection_keys) != 1:
|
||||
raise Exception(
|
||||
"Task {} can't be in multiple treeherder collections "
|
||||
"(the part of the platform after `/`): {}".format(
|
||||
task.label, collection_keys
|
||||
)
|
||||
)
|
||||
platform = treeherder.get("machine", {}).get("platform")
|
||||
group_symbol = treeherder.get("groupSymbol")
|
||||
symbol = treeherder.get("symbol")
|
||||
|
||||
key = (collection_keys, platform, group_symbol, symbol)
|
||||
key = (platform, collection_keys[0], group_symbol, symbol)
|
||||
if key in scratch_pad:
|
||||
raise Exception(
|
||||
"conflict between `{}`:`{}` for values `{}`".format(
|
||||
task.label, scratch_pad[key], key
|
||||
"Duplicate treeherder platform and symbol in tasks "
|
||||
"`{}`and `{}`: {} {}".format(
|
||||
task.label,
|
||||
scratch_pad[key],
|
||||
f"{platform}/{collection_keys[0]}",
|
||||
join_symbol(group_symbol, symbol),
|
||||
)
|
||||
)
|
||||
else:
|
||||
|
|
|
|||
Loading…
Reference in a new issue