Bug 1398277: special-case retriggering of tasks not in the taskgraph; r=bstack

This will apply to cron tasks, action tasks, and decision tasks.  It is a
distinct retrigger implementation because (a) we do not want to follow
dependencies, and (b) it takes a lot of scopes to create a decision task, so we
need to limit access to this action.

MozReview-Commit-ID: 21DVSiagcrO

--HG--
extra : rebase_source : 6f027e349e245e4aa4dbed81145a0a5d75218cb1
extra : histedit_source : eff99aee5a0e7496b0734748b29739480eb0e3fb
This commit is contained in:
Dustin J. Mitchell 2018-07-04 02:46:59 +00:00
parent 5ab4495828
commit 91f4fe8c61
9 changed files with 181 additions and 8 deletions

View file

@ -48,12 +48,18 @@ tasks:
tags:
$if: 'tasks_for == "hg-push"'
then: {createdForUser: "${ownerEmail}"}
then:
createdForUser: "${ownerEmail}"
kind: decision-task
else:
$if: 'tasks_for == "action"'
then:
createdForUser: '${ownerEmail}'
kind: 'action-callback'
else:
$if: 'tasks_for == "cron"'
then:
kind: cron-task
routes:
$flatten:

View file

@ -8,13 +8,15 @@ from __future__ import absolute_import, print_function, unicode_literals
import json
import logging
import textwrap
from slugid import nice as slugid
from .util import (
combine_task_graph_files,
create_tasks,
fetch_graph_and_labels,
relativize_datestamps,
create_task_from_def,
fetch_graph_and_labels
)
from ..util.parameterization import resolve_task_references
from .registry import register_callback_action
@ -143,6 +145,35 @@ def mochitest_retrigger_action(parameters, graph_config, input, task_group_id, t
create_task_from_def(new_task_id, new_task_definition, parameters['level'])
@register_callback_action(
title='Retrigger',
name='retrigger',
symbol='rt',
kind='hook',
cb_name='retrigger-decision',
description=textwrap.dedent('''\
Create a clone of the task (retriggering decision, action, and cron tasks requires
special scopes).'''),
order=11,
context=[
{'kind': 'decision-task'},
{'kind': 'action-callback'},
{'kind': 'cron-task'},
],
)
def retrigger_decision_action(parameters, graph_config, input, task_group_id, task_id, task):
decision_task_id, full_task_graph, label_to_taskid = fetch_graph_and_labels(
parameters, graph_config)
"""For a single task, we try to just run exactly the same task once more.
It's quite possible that we don't have the scopes to do so (especially for
an action), but this is best-effort."""
# make all of the timestamps relative; they will then be turned back into
# absolute timestamps relative to the current time.
task = relativize_datestamps(task)
create_task_from_def(slugid(), task, parameters['level'])
@register_callback_action(
title='Retrigger',
name='retrigger',
@ -150,9 +181,9 @@ def mochitest_retrigger_action(parameters, graph_config, input, task_group_id, t
kind='hook',
generic=True,
description=(
'Create a clone of the task.\n\n'
'Create a clone of the task.'
),
order=11, # must be greater than other orders in this file, as this is the fallback version
order=19, # must be greater than other orders in this file, as this is the fallback version
context=[{}],
schema={
'type': 'object',
@ -181,6 +212,7 @@ def retrigger_action(parameters, graph_config, input, task_group_id, task_id, ta
parameters, graph_config)
label = task['metadata']['name']
with_downstream = ' '
to_run = [label]

View file

@ -10,6 +10,7 @@ import copy
import logging
import requests
import os
import re
from requests.exceptions import HTTPError
@ -17,7 +18,13 @@ from taskgraph import create
from taskgraph.decision import read_artifact, write_artifact
from taskgraph.taskgraph import TaskGraph
from taskgraph.optimize import optimize_task_graph
from taskgraph.util.taskcluster import get_session, find_task_id, get_artifact, list_tasks
from taskgraph.util.taskcluster import (
get_session,
find_task_id,
get_artifact,
list_tasks,
parse_time,
)
logger = logging.getLogger(__name__)
@ -164,3 +171,30 @@ def combine_task_graph_files(suffixes):
for suffix in suffixes:
all.update(read_artifact('task-graph-{}.json'.format(suffix)))
write_artifact('task-graph.json', all)
def relativize_datestamps(task_def):
"""
Given a task definition as received from the queue, convert all datestamps
to {relative_datestamp: ..} format, with the task creation time as "now".
The result is useful for handing to ``create_task``.
"""
base = parse_time(task_def['created'])
# borrowed from https://github.com/epoberezkin/ajv/blob/master/lib/compile/formats.js
ts_pattern = re.compile(
r'^\d\d\d\d-[0-1]\d-[0-3]\d[t\s]'
r'(?:[0-2]\d:[0-5]\d:[0-5]\d|23:59:60)(?:\.\d+)?'
r'(?:z|[+-]\d\d:\d\d)$', re.I)
def recurse(value):
if isinstance(value, basestring):
if ts_pattern.match(value):
value = parse_time(value)
diff = value - base
return {'relative-datestamp': '{} seconds'.format(int(diff.total_seconds()))}
if isinstance(value, list):
return [recurse(e) for e in value]
if isinstance(value, dict):
return {k: recurse(v) for k, v in value.items()}
return value
return recurse(task_def)

View file

@ -30,6 +30,9 @@ class TaskGraph(object):
"Get a task by label"
return self.tasks[label]
def __contains__(self, label):
return label in self.tasks
def __iter__(self):
"Iterate over tasks in undefined order"
return self.tasks.itervalues()

View file

@ -2,6 +2,7 @@
subsuite = taskgraph
skip-if = python == 3
[test_actions_util.py]
[test_create.py]
[test_cron_util.py]
[test_decision.py]
@ -21,6 +22,7 @@ skip-if = python == 3
[test_util_python_path.py]
[test_util_runnable_jobs.py]
[test_util_schema.py]
[test_util_taskcluster.py]
[test_util_templates.py]
[test_util_time.py]
[test_util_treeherder.py]

View file

@ -0,0 +1,46 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
import unittest
from mozunit import main
from taskgraph.actions.util import (
relativize_datestamps
)
TASK_DEF = {
'created': '2017-10-10T18:33:03.460Z',
# note that this is not an even number of seconds off!
'deadline': '2017-10-11T18:33:03.461Z',
'dependencies': [],
'expires': '2018-10-10T18:33:04.461Z',
'payload': {
'artifacts': {
'public': {
'expires': '2018-10-10T18:33:03.463Z',
'path': '/builds/worker/artifacts',
'type': 'directory',
},
},
'maxRunTime': 1800,
},
}
class TestRelativize(unittest.TestCase):
def test_relativize(self):
rel = relativize_datestamps(TASK_DEF)
import pprint
pprint.pprint(rel)
assert rel['created'] == {'relative-datestamp': '0 seconds'}
assert rel['deadline'] == {'relative-datestamp': '86400 seconds'}
assert rel['expires'] == {'relative-datestamp': '31536001 seconds'}
assert rel['payload']['artifacts']['public']['expires'] == \
{'relative-datestamp': '31536000 seconds'}
if __name__ == '__main__':
main()

View file

@ -74,6 +74,27 @@ class TestTaskGraph(unittest.TestCase):
tasks, new_graph = TaskGraph.from_json(graph.to_json())
self.assertEqual(graph, new_graph)
simple_graph = TaskGraph(tasks={
'a': Task(
kind='fancy',
label='a',
attributes={},
dependencies={'prereq': 'b'}, # must match edges, below
optimization={'seta': None},
task={'task': 'def'}),
'b': Task(
kind='pre',
label='b',
attributes={},
dependencies={},
optimization={'seta': None},
task={'task': 'def2'}),
}, graph=Graph(nodes={'a', 'b'}, edges={('a', 'b', 'prereq')}))
def test_contains(self):
assert 'a' in self.simple_graph
assert 'c' not in self.simple_graph
if __name__ == '__main__':
main()

View file

@ -0,0 +1,24 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
import datetime
import unittest
import mozunit
from taskgraph.util.taskcluster import (
parse_time
)
class TestTCUtils(unittest.TestCase):
def test_parse_time(self):
exp = datetime.datetime(2018, 10, 10, 18, 33, 3, 463000)
assert parse_time('2018-10-10T18:33:03.463Z') == exp
if __name__ == '__main__':
mozunit.main()

View file

@ -151,10 +151,15 @@ def list_tasks(index_path, use_proxy=False):
# all of these tasks should be created with the same expires time so they end up in
# order from earliest to latest action. If more correctness is needed, consider
# fetching each task and sorting on the created date.
results.sort(key=lambda t: datetime.datetime.strptime(t['expires'], '%Y-%m-%dT%H:%M:%S.%fZ'))
results.sort(key=lambda t: parse_time(t['expires']))
return [t['taskId'] for t in results]
def parse_time(timestamp):
"""Turn a "JSON timestamp" as used in TC APIs into a datetime"""
return datetime.datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%S.%fZ')
def get_task_url(task_id, use_proxy=False):
if use_proxy:
TASK_URL = 'http://taskcluster/queue/v1/task/{}'