RELENG-805 - Pull taskgraph from pypi

upstream-sync
Heitor Neiva 2 years ago committed by mergify[bot]
parent 4268c13f89
commit 1647d93591

@ -6,301 +6,287 @@ policy:
pullRequests: collaborators pullRequests: collaborators
tasks: tasks:
- $let: - $let:
taskgraph: trustDomain: mobile
branch: taskgraph
revision: b6890b6736403f053b731cbd4aabe43819a14acf # Github events have this stuff in different places...
trustDomain: mobile ownerEmail:
$if: 'tasks_for in ["cron", "action"]'
then: '${tasks_for}@noreply.mozilla.org'
else:
$if: 'event.sender.login == "bors[bot]"'
then: 'skaspari+mozlando@mozilla.com' # It must match what's in bors.toml
else:
$if: 'tasks_for == "github-push"'
then: '${event.pusher.email}'
else:
$if: 'tasks_for == "github-pull-request"'
then: '${event.pull_request.user.login}@users.noreply.github.com'
baseRepoUrl:
$if: 'tasks_for == "github-push"'
then: '${event.repository.html_url}'
else:
$if: 'tasks_for == "github-pull-request"'
then: '${event.pull_request.base.repo.html_url}'
else:
$if: 'tasks_for in ["cron", "action"]'
then: '${repository.url}'
repoUrl:
$if: 'tasks_for == "github-push"'
then: '${event.repository.html_url}'
else:
$if: 'tasks_for == "github-pull-request"'
then: '${event.pull_request.head.repo.html_url}'
else:
$if: 'tasks_for in ["cron", "action"]'
then: '${repository.url}'
project:
$if: 'tasks_for == "github-push"'
then: '${event.repository.name}'
else:
$if: 'tasks_for == "github-pull-request"'
then: '${event.pull_request.head.repo.name}'
else:
$if: 'tasks_for in ["cron", "action"]'
then: '${repository.project}'
head_branch:
$if: 'tasks_for == "github-pull-request"'
then: ${event.pull_request.head.ref}
else:
$if: 'tasks_for == "github-push"'
then: ${event.ref}
else:
$if: 'tasks_for in ["action", "cron"]'
then: '${push.branch}'
head_sha:
$if: 'tasks_for == "github-push"'
then: '${event.after}'
else:
$if: 'tasks_for == "github-pull-request"'
then: '${event.pull_request.head.sha}'
else:
$if: 'tasks_for in ["action", "cron"]'
then: '${push.revision}'
ownTaskId:
$if: '"github" in tasks_for'
then: {$eval: as_slugid("decision_task")}
else:
$if: 'tasks_for in ["cron", "action"]'
then: '${ownTaskId}'
pullRequestAction:
$if: 'tasks_for == "github-pull-request"'
then: ${event.action}
else: 'UNDEFINED'
in: in:
$let: $if: >
# Github events have this stuff in different places... tasks_for in ["action", "cron"]
ownerEmail: || (tasks_for == "github-pull-request" && pullRequestAction in ["opened", "reopened", "synchronize"])
$if: 'tasks_for in ["cron", "action"]' || (tasks_for == "github-push" && head_branch[:10] != "refs/tags/") && (head_branch != "staging.tmp") && (head_branch != "trying.tmp") && (head_branch[:8] != "mergify/")
then: '${tasks_for}@noreply.mozilla.org' then:
else: $let:
$if: 'event.sender.login == "bors[bot]"' level:
then: 'skaspari+mozlando@mozilla.com' # It must match what's in bors.toml $if: 'tasks_for in ["github-push", "action", "cron"] && repoUrl == "https://github.com/mozilla-mobile/fenix"'
else: then: '3'
$if: 'tasks_for == "github-push"' else: '1'
then: '${event.pusher.email}' short_head_branch:
$if: 'head_branch[:11] == "refs/heads/"'
then: {$eval: 'head_branch[11:]'}
in:
taskId:
$if: 'tasks_for != "action"'
then: '${ownTaskId}'
taskGroupId:
$if: 'tasks_for == "action"'
then: '${action.taskGroupId}'
else: '${ownTaskId}' # same as taskId; this is how automation identifies a decision task
schedulerId: '${trustDomain}-level-${level}'
created: {$fromNow: ''}
deadline: {$fromNow: '1 day'}
expires: {$fromNow: '1 year 1 second'} # 1 second so artifacts expire first, despite rounding errors
metadata:
$merge:
- owner: "${ownerEmail}"
source: '${repoUrl}/raw/${head_sha}/.taskcluster.yml'
- $if: 'tasks_for in ["github-push", "github-pull-request"]'
then:
name: "Decision Task"
description: 'The task that creates all of the other tasks in the task graph'
else: else:
$if: 'tasks_for == "github-pull-request"' $if: 'tasks_for == "action"'
then: '${event.pull_request.user.login}@users.noreply.github.com' then:
baseRepoUrl: name: "Action: ${action.title}"
$if: 'tasks_for == "github-push"' description: |
then: '${event.repository.html_url}' ${action.description}
else:
$if: 'tasks_for == "github-pull-request"'
then: '${event.pull_request.base.repo.html_url}'
else:
$if: 'tasks_for in ["cron", "action"]'
then: '${repository.url}'
repoUrl:
$if: 'tasks_for == "github-push"'
then: '${event.repository.html_url}'
else:
$if: 'tasks_for == "github-pull-request"'
then: '${event.pull_request.head.repo.html_url}'
else:
$if: 'tasks_for in ["cron", "action"]'
then: '${repository.url}'
project:
$if: 'tasks_for == "github-push"'
then: '${event.repository.name}'
else:
$if: 'tasks_for == "github-pull-request"'
then: '${event.pull_request.head.repo.name}'
else:
$if: 'tasks_for in ["cron", "action"]'
then: '${repository.project}'
head_branch:
$if: 'tasks_for == "github-pull-request"'
then: ${event.pull_request.head.ref}
else:
$if: 'tasks_for == "github-push"'
then: ${event.ref}
else:
$if: 'tasks_for in ["action", "cron"]'
then: '${push.branch}'
head_sha:
$if: 'tasks_for == "github-push"'
then: '${event.after}'
else:
$if: 'tasks_for == "github-pull-request"'
then: '${event.pull_request.head.sha}'
else:
$if: 'tasks_for in ["action", "cron"]'
then: '${push.revision}'
ownTaskId:
$if: '"github" in tasks_for'
then: {$eval: as_slugid("decision_task")}
else:
$if: 'tasks_for in ["cron", "action"]'
then: '${ownTaskId}'
pullRequestAction:
$if: 'tasks_for == "github-pull-request"'
then: ${event.action}
else: 'UNDEFINED'
in:
$if: >
tasks_for in ["action", "cron"]
|| (tasks_for == "github-pull-request" && pullRequestAction in ["opened", "reopened", "synchronize"])
|| (tasks_for == "github-push" && head_branch[:10] != "refs/tags/") && (head_branch != "staging.tmp") && (head_branch != "trying.tmp") && (head_branch[:8] != "mergify/")
then:
$let:
level:
$if: 'tasks_for in ["github-push", "action", "cron"] && repoUrl == "https://github.com/mozilla-mobile/fenix"'
then: '3'
else: '1'
short_head_branch: Action triggered by clientID `${clientId}`
$if: 'head_branch[:11] == "refs/heads/"' else:
then: {$eval: 'head_branch[11:]'} name: "Decision Task for cron job ${cron.job_name}"
in: description: 'Created by a [cron task](https://firefox-ci-tc.services.mozilla.com/tasks/${cron.task_id})'
$mergeDeep: provisionerId: "${trustDomain}-${level}"
- $if: 'tasks_for != "action"' workerType: "decision"
tags:
$if: 'tasks_for in ["github-push", "github-pull-request"]'
then:
kind: decision-task
else:
$if: 'tasks_for == "action"'
then:
kind: 'action-callback'
else:
$if: 'tasks_for == "cron"'
then:
kind: cron-task
routes:
$flattenDeep:
- checks
- $if: 'level == "3" || repoUrl == "https://github.com/mozilla-releng/staging-fenix"'
then: then:
taskId: '${ownTaskId}' - tc-treeherder.v2.${project}.${head_sha}
- taskGroupId: # TODO Bug 1601928: Make this scope fork-friendly once ${project} is better defined. This will enable
$if: 'tasks_for == "action"' # staging release promotion on forks.
then: - $if: 'tasks_for == "github-push"'
'${action.taskGroupId}' then:
else: - index.${trustDomain}.v2.${project}.branch.${short_head_branch}.latest.taskgraph.decision
'${ownTaskId}' # same as taskId; this is how automation identifies a decision task - index.${trustDomain}.v2.${project}.branch.${short_head_branch}.revision.${head_sha}.taskgraph.decision
schedulerId: '${trustDomain}-level-${level}' - index.${trustDomain}.v2.${project}.revision.${head_sha}.taskgraph.decision
created: {$fromNow: ''} - $if: 'tasks_for == "cron"'
deadline: {$fromNow: '1 day'} then:
expires: {$fromNow: '1 year 1 second'} # 1 second so artifacts expire first, despite rounding errors # cron context provides ${head_branch} as a short one
metadata: - index.${trustDomain}.v2.${project}.branch.${head_branch}.latest.taskgraph.decision-${cron.job_name}
$merge: - index.${trustDomain}.v2.${project}.branch.${head_branch}.revision.${head_sha}.taskgraph.decision-${cron.job_name}
- owner: "${ownerEmail}" - index.${trustDomain}.v2.${project}.branch.${head_branch}.revision.${head_sha}.taskgraph.cron.${ownTaskId}
source: '${repoUrl}/raw/${head_sha}/.taskcluster.yml' scopes:
- $if: 'tasks_for in ["github-push", "github-pull-request"]' $if: 'tasks_for == "github-push"'
then: then:
name: "Decision Task" # `https://` is 8 characters so, ${repoUrl[8:]} is the repository without the protocol.
description: 'The task that creates all of the other tasks in the task graph' - 'assume:repo:${repoUrl[8:]}:branch:${short_head_branch}'
else: else:
$if: 'tasks_for == "action"' $if: 'tasks_for == "github-pull-request"'
then: then:
name: "Action: ${action.title}" - 'assume:repo:github.com/${event.pull_request.base.repo.full_name}:pull-request'
description: | else:
${action.description} $if: 'tasks_for == "action"'
then:
# when all actions are hooks, we can calculate this directly rather than using a variable
- '${action.repo_scope}'
else:
- 'assume:repo:${repoUrl[8:]}:cron:${cron.job_name}'
Action triggered by clientID `${clientId}` requires: all-completed
else: priority: lowest
name: "Decision Task for cron job ${cron.job_name}" retries: 5
description: 'Created by a [cron task](https://firefox-ci-tc.services.mozilla.com/tasks/${cron.task_id})' payload:
provisionerId: "mobile-${level}" env:
workerType: "decision" # run-task uses these to check out the source; the inputs
tags: # to `mach taskgraph decision` are all on the command line.
$if: 'tasks_for in ["github-push", "github-pull-request"]' $merge:
- MOBILE_BASE_REPOSITORY: '${baseRepoUrl}'
MOBILE_HEAD_REPOSITORY: '${repoUrl}'
MOBILE_HEAD_REF: '${head_branch}'
MOBILE_HEAD_REV: '${head_sha}'
MOBILE_REPOSITORY_TYPE: git
MOBILE_PIP_REQUIREMENTS: taskcluster/requirements.txt
REPOSITORIES: {$json: {mobile: "Fenix"}}
HG_STORE_PATH: /builds/worker/checkouts/hg-store
ANDROID_SDK_ROOT: /builds/worker/android-sdk
- $if: 'tasks_for in ["github-pull-request"]'
then: then:
kind: decision-task MOBILE_PULL_REQUEST_NUMBER: '${event.pull_request.number}'
else: - $if: 'tasks_for == "action"'
$if: 'tasks_for == "action"'
then:
kind: 'action-callback'
else:
$if: 'tasks_for == "cron"'
then:
kind: cron-task
routes:
$flattenDeep:
- checks
- $if: 'level == "3" || repoUrl == "https://github.com/mozilla-releng/staging-fenix"'
then:
- tc-treeherder.v2.${project}.${head_sha}
# TODO Bug 1601928: Make this scope fork-friendly once ${project} is better defined. This will enable
# staging release promotion on forks.
- $if: 'tasks_for == "github-push"'
then:
- index.mobile.v2.${project}.branch.${short_head_branch}.latest.taskgraph.decision
- index.mobile.v2.${project}.branch.${short_head_branch}.revision.${head_sha}.taskgraph.decision
- index.mobile.v2.${project}.revision.${head_sha}.taskgraph.decision
- $if: 'tasks_for == "cron"'
then:
# cron context provides ${head_branch} as a short one
- index.mobile.v2.${project}.branch.${head_branch}.latest.taskgraph.decision-${cron.job_name}
- index.mobile.v2.${project}.branch.${head_branch}.revision.${head_sha}.taskgraph.decision-${cron.job_name}
- index.mobile.v2.${project}.branch.${head_branch}.revision.${head_sha}.taskgraph.cron.${ownTaskId}
scopes:
$if: 'tasks_for == "github-push"'
then: then:
# `https://` is 8 characters so, ${repoUrl[8:]} is the repository without the protocol. ACTION_TASK_GROUP_ID: '${action.taskGroupId}' # taskGroupId of the target task
- 'assume:repo:${repoUrl[8:]}:branch:${short_head_branch}' ACTION_TASK_ID: {$json: {$eval: 'taskId'}} # taskId of the target task (JSON-encoded)
else: ACTION_INPUT: {$json: {$eval: 'input'}}
$if: 'tasks_for == "github-pull-request"' ACTION_CALLBACK: '${action.cb_name}'
then: features:
- 'assume:repo:github.com/${event.pull_request.base.repo.full_name}:pull-request' taskclusterProxy: true
else: chainOfTrust: true
$if: 'tasks_for == "action"' # Note: This task is built server side without the context or tooling that
then: # exist in tree so we must hard code the hash
# when all actions are hooks, we can calculate this directly rather than using a variable image: mozillareleases/taskgraph:decision-mobile-7e11b0cc3966ad9729e08b19551399b3343d3b385eac067b6335d4c34431a899@sha256:b309fa59efd59991ba286a326cb43b724c38e6f3872c52d0f85e96428899c2fc
- '${action.repo_scope}'
else:
- 'assume:repo:${repoUrl[8:]}:cron:${cron.job_name}'
requires: all-completed
priority: lowest
retries: 5
payload: maxRunTime: 1800
env:
# run-task uses these to check out the source; the inputs
# to `mach taskgraph decision` are all on the command line.
$merge:
- MOBILE_BASE_REPOSITORY: '${baseRepoUrl}'
MOBILE_HEAD_REPOSITORY: '${repoUrl}'
MOBILE_HEAD_REF: '${head_branch}'
MOBILE_HEAD_REV: '${head_sha}'
MOBILE_REPOSITORY_TYPE: git
TASKGRAPH_BASE_REPOSITORY: https://hg.mozilla.org/ci/taskgraph
TASKGRAPH_HEAD_REPOSITORY: https://hg.mozilla.org/ci/${taskgraph.branch}
TASKGRAPH_HEAD_REV: ${taskgraph.revision}
TASKGRAPH_REPOSITORY_TYPE: hg
REPOSITORIES: {$json: {mobile: "Fenix", taskgraph: "Taskgraph"}}
HG_STORE_PATH: /builds/worker/checkouts/hg-store
ANDROID_SDK_ROOT: /builds/worker/android-sdk
- $if: 'tasks_for in ["github-pull-request"]'
then:
MOBILE_PULL_REQUEST_NUMBER: '${event.pull_request.number}'
- $if: 'tasks_for == "action"'
then:
ACTION_TASK_GROUP_ID: '${action.taskGroupId}' # taskGroupId of the target task
ACTION_TASK_ID: {$json: {$eval: 'taskId'}} # taskId of the target task (JSON-encoded)
ACTION_INPUT: {$json: {$eval: 'input'}}
ACTION_CALLBACK: '${action.cb_name}'
features:
taskclusterProxy: true
chainOfTrust: true
# Note: This task is built server side without the context or tooling that
# exist in tree so we must hard code the hash
image:
mozillareleases/taskgraph:decision-mobile-44b6b7b4c370220eff56efa8b508aa5157ef9c6e74847c7ecc19d640946ba49e@sha256:4107cbc5e154502529e4d38efa4dc89c05ee54e2cbc6e2e66023e68407502894
maxRunTime: 1800 command:
- /usr/local/bin/run-task
- '--mobile-checkout=/builds/worker/checkouts/src'
- '--task-cwd=/builds/worker/checkouts/src'
- '--'
- bash
- -cx
- $let:
extraArgs:
$if: 'tasks_for == "cron"'
then: '${cron.quoted_args}'
else: ''
in:
$if: 'tasks_for == "action"'
then: >
taskcluster/scripts/decision-install-sdk.sh &&
ln -s /builds/worker/artifacts artifacts &&
~/.local/bin/taskgraph action-callback
else: >
taskcluster/scripts/decision-install-sdk.sh &&
ln -s /builds/worker/artifacts artifacts &&
~/.local/bin/taskgraph decision
--pushlog-id='0'
--pushdate='0'
--project='${project}'
--message=""
--owner='${ownerEmail}'
--level='${level}'
--base-repository="$MOBILE_BASE_REPOSITORY"
--head-repository="$MOBILE_HEAD_REPOSITORY"
--head-ref="$MOBILE_HEAD_REF"
--head-rev="$MOBILE_HEAD_REV"
--repository-type="$MOBILE_REPOSITORY_TYPE"
--tasks-for='${tasks_for}'
${extraArgs}
command: artifacts:
- /usr/local/bin/run-task 'public':
- '--mobile-checkout=/builds/worker/checkouts/vcs' type: 'directory'
- '--taskgraph-checkout=/builds/worker/checkouts/taskgraph' path: '/builds/worker/artifacts'
- '--task-cwd=/builds/worker/checkouts/vcs' expires:
- '--' $fromNow: '1 year'
- bash 'public/docker-contexts':
- -cx type: 'directory'
- $let: path: '/builds/worker/checkouts/src/docker-contexts'
extraArgs: {$if: 'tasks_for == "cron"', then: '${cron.quoted_args}', else: ''} # This needs to be at least the deadline of the
in: # decision task + the docker-image task deadlines.
$if: 'tasks_for == "action"' # It is set to a week to allow for some time for
then: > # debugging, but they are not useful long-term.
PIP_IGNORE_INSTALLED=0 pip3 install --user /builds/worker/checkouts/taskgraph && expires:
PIP_IGNORE_INSTALLED=0 pip3 install --user mozilla-version && $fromNow: '7 day'
taskcluster/scripts/decision-install-sdk.sh &&
ln -s /builds/worker/artifacts artifacts &&
~/.local/bin/taskgraph action-callback
else: >
PIP_IGNORE_INSTALLED=0 pip3 install --user /builds/worker/checkouts/taskgraph &&
PIP_IGNORE_INSTALLED=0 pip3 install --user mozilla-version &&
taskcluster/scripts/decision-install-sdk.sh &&
ln -s /builds/worker/artifacts artifacts &&
~/.local/bin/taskgraph decision
--pushlog-id='0'
--pushdate='0'
--project='${project}'
--message=""
--owner='${ownerEmail}'
--level='${level}'
--base-repository="$MOBILE_BASE_REPOSITORY"
--head-repository="$MOBILE_HEAD_REPOSITORY"
--head-ref="$MOBILE_HEAD_REF"
--head-rev="$MOBILE_HEAD_REV"
--repository-type="$MOBILE_REPOSITORY_TYPE"
--tasks-for='${tasks_for}'
${extraArgs}
artifacts: extra:
'public': $merge:
type: 'directory' - treeherder:
path: '/builds/worker/artifacts' $merge:
expires: {$fromNow: '1 year'} - machine:
'public/docker-contexts': platform: gecko-decision
type: 'directory' - $if: 'tasks_for in ["github-push", "github-pull-request"]'
path: '/builds/worker/checkouts/vcs/docker-contexts' then:
# This needs to be at least the deadline of the symbol: D
# decision task + the docker-image task deadlines. else:
# It is set to a week to allow for some time for $if: 'tasks_for == "action"'
# debugging, but they are not useful long-term.
expires: {$fromNow: '7 day'}
extra:
$merge:
- treeherder:
$merge:
- machine:
platform: gecko-decision
- $if: 'tasks_for in ["github-push", "github-pull-request"]'
then:
symbol: D
else:
$if: 'tasks_for == "action"'
then:
groupName: 'action-callback'
groupSymbol: AC
symbol: "${action.symbol}"
else:
groupSymbol: cron
symbol: "${cron.job_symbol}"
- $if: 'tasks_for == "action"'
then:
parent: '${action.taskGroupId}'
action:
name: '${action.name}'
context:
taskGroupId: '${action.taskGroupId}'
taskId: {$eval: 'taskId'}
input: {$eval: 'input'}
clientId: {$eval: 'clientId'}
- $if: 'tasks_for == "cron"'
then: then:
cron: {$json: {$eval: 'cron'}} groupName: 'action-callback'
- tasks_for: '${tasks_for}' groupSymbol: AC
symbol: "${action.symbol}"
else:
groupSymbol: cron
symbol: "${cron.job_symbol}"
- $if: 'tasks_for == "action"'
then:
parent: '${action.taskGroupId}'
action:
name: '${action.name}'
context:
taskGroupId: '${action.taskGroupId}'
taskId: {$eval: 'taskId'}
input: {$eval: 'input'}
clientId: {$eval: 'clientId'}
- $if: 'tasks_for == "cron"'
then:
cron: {$json: {$eval: 'cron'}}
- tasks_for: '${tasks_for}'

@ -2,8 +2,6 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this # License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/. # file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
from importlib import import_module from importlib import import_module
@ -12,14 +10,16 @@ def register(graph_config):
Import all modules that are siblings of this one, triggering decorators in Import all modules that are siblings of this one, triggering decorators in
the process. the process.
""" """
_import_modules([ _import_modules(
"job", [
"parameters", "job",
"release_promotion", "parameters",
"routes", "release_promotion",
"target_tasks", "routes",
"worker_types", "target_tasks",
]) "worker_types",
]
)
def _import_modules(modules): def _import_modules(modules):

@ -2,48 +2,48 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this # License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/. # file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
import json import json
import subprocess import subprocess
from taskgraph.util.memoize import memoize from taskgraph.util.memoize import memoize
def get_variant(build_type): def get_variant(build_type):
all_variants = _fetch_all_variants() all_variants = _fetch_all_variants()
matching_variants = [ matching_variants = [
variant for variant in all_variants variant for variant in all_variants if variant["build_type"] == build_type
if variant["build_type"] == build_type
] ]
number_of_matching_variants = len(matching_variants) number_of_matching_variants = len(matching_variants)
if number_of_matching_variants == 0: if number_of_matching_variants == 0:
raise ValueError('No variant found for build type "{}"'.format( raise ValueError('No variant found for build type "{}"'.format(build_type))
build_type
))
elif number_of_matching_variants > 1: elif number_of_matching_variants > 1:
raise ValueError('Too many variants found for build type "{}"": {}'.format( raise ValueError(
build_type, matching_variants 'Too many variants found for build type "{}"": {}'.format(
)) build_type, matching_variants
)
)
return matching_variants.pop() return matching_variants.pop()
@memoize @memoize
def _fetch_all_variants(): def _fetch_all_variants():
output = _run_gradle_process('printVariants') output = _run_gradle_process("printVariants")
content = _extract_content_from_command_output(output, prefix='variants: ') content = _extract_content_from_command_output(output, prefix="variants: ")
return json.loads(content) return json.loads(content)
def _run_gradle_process(gradle_command, **kwargs): def _run_gradle_process(gradle_command, **kwargs):
gradle_properties = [ gradle_properties = [
'-P{property_name}={value}'.format(property_name=property_name, value=value) "-P{property_name}={value}".format(property_name=property_name, value=value)
for property_name, value in kwargs.items() for property_name, value in kwargs.items()
] ]
process = subprocess.Popen(["./gradlew", "--no-daemon", "--quiet", gradle_command] + gradle_properties, stdout=subprocess.PIPE, universal_newlines=True) process = subprocess.Popen(
["./gradlew", "--no-daemon", "--quiet", gradle_command] + gradle_properties,
stdout=subprocess.PIPE,
universal_newlines=True,
)
output, err = process.communicate() output, err = process.communicate()
exit_code = process.wait() exit_code = process.wait()
@ -54,5 +54,5 @@ def _run_gradle_process(gradle_command, **kwargs):
def _extract_content_from_command_output(output, prefix): def _extract_content_from_command_output(output, prefix):
variants_line = [line for line in output.split('\n') if line.startswith(prefix)][0] variants_line = [line for line in output.split("\n") if line.startswith(prefix)][0]
return variants_line.split(' ', 1)[1] return variants_line.split(" ", 1)[1]

@ -2,50 +2,51 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this # License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/. # file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
from taskgraph.transforms.job import run_job_using, configure_taskdesc_for_run from taskgraph.transforms.job import run_job_using, configure_taskdesc_for_run
from taskgraph.util import path from taskgraph.util import path
from taskgraph.util.schema import Schema, taskref_or_string from taskgraph.util.schema import Schema, taskref_or_string
from voluptuous import Required, Optional from voluptuous import Required, Optional
from six import text_type
from pipes import quote as shell_quote from pipes import quote as shell_quote
secret_schema = { secret_schema = {
Required("name"): text_type, Required("name"): str,
Required("path"): text_type, Required("path"): str,
Required("key"): text_type, Required("key"): str,
Optional("json"): bool, Optional("json"): bool,
} }
dummy_secret_schema = { dummy_secret_schema = {
Required("content"): text_type, Required("content"): str,
Required("path"): text_type, Required("path"): str,
Optional("json"): bool, Optional("json"): bool,
} }
gradlew_schema = Schema({ gradlew_schema = Schema(
Required("using"): "gradlew", {
Optional("pre-gradlew"): [[text_type]], Required("using"): "gradlew",
Required("gradlew"): [text_type], Optional("pre-gradlew"): [[str]],
Optional("post-gradlew"): [[text_type]], Required("gradlew"): [str],
# Base work directory used to set up the task. Optional("post-gradlew"): [[str]],
Required("workdir"): text_type, # Base work directory used to set up the task.
Optional("use-caches"): bool, Required("workdir"): str,
Optional("secrets"): [secret_schema], Optional("use-caches"): bool,
Optional("dummy-secrets"): [dummy_secret_schema], Optional("secrets"): [secret_schema],
}) Optional("dummy-secrets"): [dummy_secret_schema],
}
run_commands_schema = Schema({ )
Required("using"): "run-commands",
Optional("pre-commands"): [[text_type]], run_commands_schema = Schema(
Required("commands"): [[taskref_or_string]], {
Required("workdir"): text_type, Required("using"): "run-commands",
Optional("use-caches"): bool, Optional("pre-commands"): [[str]],
Optional("secrets"): [secret_schema], Required("commands"): [[taskref_or_string]],
Optional("dummy-secrets"): [dummy_secret_schema], Required("workdir"): str,
}) Optional("use-caches"): bool,
Optional("secrets"): [secret_schema],
Optional("dummy-secrets"): [dummy_secret_schema],
}
)
@run_job_using("docker-worker", "run-commands", schema=run_commands_schema) @run_job_using("docker-worker", "run-commands", schema=run_commands_schema)
@ -53,7 +54,8 @@ def configure_run_commands_schema(config, job, taskdesc):
run = job["run"] run = job["run"]
pre_commands = run.pop("pre-commands", []) pre_commands = run.pop("pre-commands", [])
pre_commands += [ pre_commands += [
_generate_dummy_secret_command(secret) for secret in run.pop("dummy-secrets", []) _generate_dummy_secret_command(secret)
for secret in run.pop("dummy-secrets", [])
] ]
pre_commands += [ pre_commands += [
_generate_secret_command(secret) for secret in run.get("secrets", []) _generate_secret_command(secret) for secret in run.get("secrets", [])
@ -73,11 +75,9 @@ def configure_gradlew(config, job, taskdesc):
worker = taskdesc["worker"] = job["worker"] worker = taskdesc["worker"] = job["worker"]
fetches_dir = path.join(run["workdir"], worker["env"]["MOZ_FETCHES_DIR"]) fetches_dir = path.join(run["workdir"], worker["env"]["MOZ_FETCHES_DIR"])
worker.setdefault("env", {}).update({ worker.setdefault("env", {}).update(
"ANDROID_SDK_ROOT": path.join( {"ANDROID_SDK_ROOT": path.join(fetches_dir, "android-sdk-linux")}
fetches_dir, "android-sdk-linux" )
)
})
run["command"] = _extract_gradlew_command(run, fetches_dir) run["command"] = _extract_gradlew_command(run, fetches_dir)
_inject_secrets_scopes(run, taskdesc) _inject_secrets_scopes(run, taskdesc)
@ -88,7 +88,8 @@ def configure_gradlew(config, job, taskdesc):
def _extract_gradlew_command(run, fetches_dir): def _extract_gradlew_command(run, fetches_dir):
pre_gradle_commands = run.pop("pre-gradlew", []) pre_gradle_commands = run.pop("pre-gradlew", [])
pre_gradle_commands += [ pre_gradle_commands += [
_generate_dummy_secret_command(secret) for secret in run.pop("dummy-secrets", []) _generate_dummy_secret_command(secret)
for secret in run.pop("dummy-secrets", [])
] ]
pre_gradle_commands += [ pre_gradle_commands += [
_generate_secret_command(secret) for secret in run.get("secrets", []) _generate_secret_command(secret) for secret in run.get("secrets", [])
@ -101,7 +102,9 @@ def _extract_gradlew_command(run, fetches_dir):
) )
for repo_name in ("google", "central") for repo_name in ("google", "central")
] ]
gradle_command = ["./gradlew"] + gradle_repos_args + ["listRepositories"] + run.pop("gradlew") gradle_command = (
["./gradlew"] + gradle_repos_args + ["listRepositories"] + run.pop("gradlew")
)
post_gradle_commands = run.pop("post-gradlew", []) post_gradle_commands = run.pop("post-gradlew", [])
commands = pre_gradle_commands + [gradle_command] + post_gradle_commands commands = pre_gradle_commands + [gradle_command] + post_gradle_commands
@ -111,9 +114,12 @@ def _extract_gradlew_command(run, fetches_dir):
def _generate_secret_command(secret): def _generate_secret_command(secret):
secret_command = [ secret_command = [
"taskcluster/scripts/get-secret.py", "taskcluster/scripts/get-secret.py",
"-s", secret["name"], "-s",
"-k", secret["key"], secret["name"],
"-f", secret["path"], "-k",
secret["key"],
"-f",
secret["path"],
] ]
if secret.get("json"): if secret.get("json"):
secret_command.append("--json") secret_command.append("--json")
@ -124,8 +130,10 @@ def _generate_secret_command(secret):
def _generate_dummy_secret_command(secret): def _generate_dummy_secret_command(secret):
secret_command = [ secret_command = [
"taskcluster/scripts/write-dummy-secret.py", "taskcluster/scripts/write-dummy-secret.py",
"-f", secret["path"], "-f",
"-c", secret["content"], secret["path"],
"-c",
secret["content"],
] ]
if secret.get("json"): if secret.get("json"):
secret_command.append("--json") secret_command.append("--json")
@ -149,18 +157,22 @@ def _convert_commands_to_string(commands):
part_string = part["task-reference"] part_string = part["task-reference"]
should_task_reference = True should_task_reference = True
else: else:
raise ValueError('Unsupported dict: {}'.format(part)) raise ValueError("Unsupported dict: {}".format(part))
else: else:
part_string = part part_string = part
sanitized_parts.append(part_string) sanitized_parts.append(part_string)
sanitized_commands.append(sanitized_parts) sanitized_commands.append(sanitized_parts)
shell_quoted_commands = [" ".join(map(shell_quote, command)) for command in sanitized_commands] shell_quoted_commands = [
" ".join(map(shell_quote, command)) for command in sanitized_commands
]
full_string_command = " && ".join(shell_quoted_commands) full_string_command = " && ".join(shell_quoted_commands)
if should_artifact_reference and should_task_reference: if should_artifact_reference and should_task_reference:
raise NotImplementedError('"arifact-reference" and "task-reference" cannot be both used') raise NotImplementedError(
'"arifact-reference" and "task-reference" cannot be both used'
)
elif should_artifact_reference: elif should_artifact_reference:
return {"artifact-reference": full_string_command} return {"artifact-reference": full_string_command}
elif should_task_reference: elif should_task_reference:
@ -173,7 +185,9 @@ def _inject_secrets_scopes(run, taskdesc):
secrets = run.pop("secrets", []) secrets = run.pop("secrets", [])
scopes = taskdesc.setdefault("scopes", []) scopes = taskdesc.setdefault("scopes", [])
new_secret_scopes = ["secrets:get:{}".format(secret["name"]) for secret in secrets] new_secret_scopes = ["secrets:get:{}".format(secret["name"]) for secret in secrets]
new_secret_scopes = list(set(new_secret_scopes)) # Scopes must not have any duplicates new_secret_scopes = list(
set(new_secret_scopes)
) # Scopes must not have any duplicates
scopes.extend(new_secret_scopes) scopes.extend(new_secret_scopes)

@ -2,8 +2,6 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this # License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/. # file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
import copy import copy
@ -15,12 +13,12 @@ def group_by(name):
def wrapper(func): def wrapper(func):
GROUP_BY_MAP[name] = func GROUP_BY_MAP[name] = func
return func return func
return wrapper
return wrapper
def group_tasks(config, tasks): def group_tasks(config, tasks):
group_by_fn = GROUP_BY_MAP[config['group-by']] group_by_fn = GROUP_BY_MAP[config["group-by"]]
groups = group_by_fn(config, tasks) groups = group_by_fn(config, tasks)
@ -29,33 +27,33 @@ def group_tasks(config, tasks):
yield dependencies yield dependencies
@group_by('build-type') @group_by("build-type")
def build_type_grouping(config, tasks): def build_type_grouping(config, tasks):
groups = {} groups = {}
kind_dependencies = config.get('kind-dependencies') kind_dependencies = config.get("kind-dependencies")
only_build_type = config.get('only-for-build-types') only_build_type = config.get("only-for-build-types")
for task in tasks: for task in tasks:
if task.kind not in kind_dependencies: if task.kind not in kind_dependencies:
continue continue
if only_build_type: if only_build_type:
build_type = task.attributes.get('build-type') build_type = task.attributes.get("build-type")
if build_type not in only_build_type: if build_type not in only_build_type:
continue continue
build_type = task.attributes.get('build-type') build_type = task.attributes.get("build-type")
groups.setdefault(build_type, []).append(task) groups.setdefault(build_type, []).append(task)
return groups return groups
@group_by('attributes') @group_by("attributes")
def attributes_grouping(config, tasks): def attributes_grouping(config, tasks):
groups = {} groups = {}
kind_dependencies = config.get('kind-dependencies') kind_dependencies = config.get("kind-dependencies")
only_attributes = config.get('only-for-attributes') only_attributes = config.get("only-for-attributes")
for task in tasks: for task in tasks:
if task.kind not in kind_dependencies: if task.kind not in kind_dependencies:

@ -2,27 +2,25 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this # License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/. # file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
import copy import copy
from voluptuous import Required from voluptuous import Required
from taskgraph.task import Task from taskgraph.task import Task
from taskgraph.util.attributes import sorted_unique_list
from taskgraph.util.schema import Schema from taskgraph.util.schema import Schema
from . import group_tasks from . import group_tasks
schema = Schema({ schema = Schema(
Required('primary-dependency', 'primary dependency task'): Task, {
Required( Required("primary-dependency", "primary dependency task"): Task,
'dependent-tasks', Required(
'dictionary of dependent tasks, keyed by kind', "dependent-tasks",
): {str: Task}, "dictionary of dependent tasks, keyed by kind",
}) ): {str: Task},
}
)
def loader(kind, path, config, params, loaded_tasks): def loader(kind, path, config, params, loaded_tasks):
@ -39,7 +37,7 @@ def loader(kind, path, config, params, loaded_tasks):
Optional ``job-template`` kind configuration value, if specified, will be used to Optional ``job-template`` kind configuration value, if specified, will be used to
pass configuration down to the specified transforms used. pass configuration down to the specified transforms used.
""" """
job_template = config.get('job-template') job_template = config.get("job-template")
for dep_tasks in group_tasks(config, loaded_tasks): for dep_tasks in group_tasks(config, loaded_tasks):
kinds = [dep.kind for dep in dep_tasks] kinds = [dep.kind for dep in dep_tasks]
@ -50,8 +48,8 @@ def loader(kind, path, config, params, loaded_tasks):
for dep in dep_tasks for dep in dep_tasks
} }
job = {'dependent-tasks': dep_tasks_per_unique_key} job = {"dependent-tasks": dep_tasks_per_unique_key}
job['primary-dependency'] = get_primary_dep(config, dep_tasks_per_unique_key) job["primary-dependency"] = get_primary_dep(config, dep_tasks_per_unique_key)
if job_template: if job_template:
job.update(copy.deepcopy(job_template)) job.update(copy.deepcopy(job_template))
@ -65,7 +63,7 @@ def get_primary_dep(config, dep_tasks):
defined and is a list, the first kind in that list with a matching dep defined and is a list, the first kind in that list with a matching dep
is the primary dependency. If it's undefined, return the first dep. is the primary dependency. If it's undefined, return the first dep.
""" """
primary_dependencies = config.get('primary-dependency') primary_dependencies = config.get("primary-dependency")
if isinstance(primary_dependencies, str): if isinstance(primary_dependencies, str):
primary_dependencies = [primary_dependencies] primary_dependencies = [primary_dependencies]
if not primary_dependencies: if not primary_dependencies:
@ -75,13 +73,16 @@ def get_primary_dep(config, dep_tasks):
for primary_kind in primary_dependencies: for primary_kind in primary_dependencies:
for dep_kind in dep_tasks: for dep_kind in dep_tasks:
if dep_kind == primary_kind: if dep_kind == primary_kind:
assert primary_dep is None, \ assert (
"Too many primary dependent tasks in dep_tasks: {}!".format( primary_dep is None
[t.label for t in dep_tasks] ), "Too many primary dependent tasks in dep_tasks: {}!".format(
) [t.label for t in dep_tasks]
)
primary_dep = dep_tasks[dep_kind] primary_dep = dep_tasks[dep_kind]
if primary_dep is None: if primary_dep is None:
raise Exception("Can't find dependency of {}: {}".format( raise Exception(
config['primary-dependency'], config "Can't find dependency of {}: {}".format(
)) config["primary-dependency"], config
)
)
return primary_dep return primary_dep

@ -2,36 +2,21 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this # License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/. # file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
import os import os
import re
from mozilla_version.fenix import FenixVersion
from six import text_type
from taskgraph.parameters import extend_parameters_schema from taskgraph.parameters import extend_parameters_schema
from voluptuous import All, Any, Optional, Range, Required from voluptuous import All, Any, Optional, Range, Required
from .release_promotion import read_version_file
def get_defaults(repo_root): extend_parameters_schema(
return { {
"pull_request_number": None, Required("pull_request_number"): Any(All(int, Range(min=1)), None),
"release_type": "", Required("release_type", default=""): str,
"shipping_phase": None, Optional("shipping_phase"): Any("build", "ship", None),
"next_version": "", Required("version", default=""): str,
"version": "", Required("next_version", default=""): Any(None, str),
} }
)
extend_parameters_schema({
Required("pull_request_number"): Any(All(int, Range(min=1)), None),
Required("release_type"): text_type,
Optional("shipping_phase"): Any('build', 'ship', None),
Required("version"): text_type,
Required("next_version"): Any(None, text_type),
}, defaults_fn=get_defaults)
def get_decision_parameters(graph_config, parameters): def get_decision_parameters(graph_config, parameters):

@ -2,8 +2,6 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this # License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/. # file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
import os import os
from mozilla_version.fenix import FenixVersion from mozilla_version.fenix import FenixVersion
@ -13,7 +11,10 @@ from taskgraph.util.taskcluster import get_artifact
from taskgraph.taskgraph import TaskGraph from taskgraph.taskgraph import TaskGraph
from taskgraph.decision import taskgraph_decision from taskgraph.decision import taskgraph_decision
from taskgraph.parameters import Parameters from taskgraph.parameters import Parameters
from taskgraph.util.taskgraph import find_decision_task, find_existing_tasks_from_previous_kinds from taskgraph.util.taskgraph import (
find_decision_task,
find_existing_tasks_from_previous_kinds,
)
RELEASE_PROMOTION_PROJECTS = ( RELEASE_PROMOTION_PROJECTS = (
"https://github.com/mozilla-mobile/fenix", "https://github.com/mozilla-mobile/fenix",
@ -22,74 +23,84 @@ RELEASE_PROMOTION_PROJECTS = (
def is_release_promotion_available(parameters): def is_release_promotion_available(parameters):
return parameters['head_repository'] in RELEASE_PROMOTION_PROJECTS return parameters["head_repository"] in RELEASE_PROMOTION_PROJECTS
@register_callback_action( @register_callback_action(
name='release-promotion', name="release-promotion",
title='Ship Fenix', title="Ship Fenix",
symbol='${input.release_promotion_flavor}', symbol="${input.release_promotion_flavor}",
description="Ship Fenix", description="Ship Fenix",
generic=False, generic=False,
order=500, order=500,
context=[], context=[],
available=is_release_promotion_available, available=is_release_promotion_available,
schema=lambda graph_config: { schema=lambda graph_config: {
'type': 'object', "type": "object",
'properties': { "properties": {
'build_number': { "build_number": {
'type': 'integer', "type": "integer",
'default': 1, "default": 1,
'minimum': 1, "minimum": 1,
'title': 'The release build number', "title": "The release build number",
'description': ('The release build number. Starts at 1 per ' "description": (
'release version, and increments on rebuild.'), "The release build number. Starts at 1 per "
"release version, and increments on rebuild."
),
}, },
'do_not_optimize': { "do_not_optimize": {
'type': 'array', "type": "array",
'description': ('Optional: a list of labels to avoid optimizing out ' "description": (
'of the graph (to force a rerun of, say, ' "Optional: a list of labels to avoid optimizing out "
'funsize docker-image tasks).'), "of the graph (to force a rerun of, say, "
'items': { "funsize docker-image tasks)."
'type': 'string', ),
"items": {
"type": "string",
}, },
}, },
'revision': { "revision": {
'type': 'string', "type": "string",
'title': 'Optional: revision to ship', "title": "Optional: revision to ship",
'description': ('Optional: the revision to ship.'), "description": ("Optional: the revision to ship."),
}, },
'release_promotion_flavor': { "release_promotion_flavor": {
'type': 'string', "type": "string",
'description': 'The flavor of release promotion to perform.', "description": "The flavor of release promotion to perform.",
'default': 'build', "default": "build",
'enum': sorted(graph_config['release-promotion']['flavors'].keys()), "enum": sorted(graph_config["release-promotion"]["flavors"].keys()),
}, },
'rebuild_kinds': { "rebuild_kinds": {
'type': 'array', "type": "array",
'description': ('Optional: an array of kinds to ignore from the previous ' "description": (
'graph(s).'), "Optional: an array of kinds to ignore from the previous "
'items': { "graph(s)."
'type': 'string', ),
"items": {
"type": "string",
}, },
}, },
'previous_graph_ids': { "previous_graph_ids": {
'type': 'array', "type": "array",
'description': ('Optional: an array of taskIds of decision or action ' "description": (
'tasks from the previous graph(s) to use to populate ' "Optional: an array of taskIds of decision or action "
'our `previous_graph_kinds`.'), "tasks from the previous graph(s) to use to populate "
'items': { "our `previous_graph_kinds`."
'type': 'string', ),
"items": {
"type": "string",
}, },
}, },
'version': { "version": {
'type': 'string', "type": "string",
'description': ('Optional: override the version for release promotion. ' "description": (
"Occasionally we'll land a taskgraph fix in a later " "Optional: override the version for release promotion. "
'commit, but want to act on a build from a previous ' "Occasionally we'll land a taskgraph fix in a later "
'commit. If a version bump has landed in the meantime, ' "commit, but want to act on a build from a previous "
'relying on the in-tree version will break things.'), "commit. If a version bump has landed in the meantime, "
'default': '', "relying on the in-tree version will break things."
),
"default": "",
}, },
"next_version": { "next_version": {
"type": "string", "type": "string",
@ -97,23 +108,34 @@ def is_release_promotion_available(parameters):
"default": "", "default": "",
}, },
}, },
"required": ["release_promotion_flavor", "version", "build_number", "next_version"], "required": [
} "release_promotion_flavor",
"version",
"build_number",
"next_version",
],
},
) )
def release_promotion_action(parameters, graph_config, input, task_group_id, task_id): def release_promotion_action(parameters, graph_config, input, task_group_id, task_id):
release_promotion_flavor = input['release_promotion_flavor'] release_promotion_flavor = input["release_promotion_flavor"]
promotion_config = graph_config['release-promotion']['flavors'][release_promotion_flavor] promotion_config = graph_config["release-promotion"]["flavors"][
release_promotion_flavor
]
target_tasks_method = promotion_config['target-tasks-method'].format( target_tasks_method = promotion_config["target-tasks-method"].format(
project=parameters['project'] project=parameters["project"]
)
rebuild_kinds = input.get("rebuild_kinds") or promotion_config.get(
"rebuild-kinds", []
)
do_not_optimize = input.get("do_not_optimize") or promotion_config.get(
"do-not-optimize", []
) )
rebuild_kinds = input.get('rebuild_kinds') or promotion_config.get('rebuild-kinds', [])
do_not_optimize = input.get('do_not_optimize') or promotion_config.get('do-not-optimize', [])
# make parameters read-write # make parameters read-write
parameters = dict(parameters) parameters = dict(parameters)
# Build previous_graph_ids from ``previous_graph_ids`` or ``revision``. # Build previous_graph_ids from ``previous_graph_ids`` or ``revision``.
previous_graph_ids = input.get('previous_graph_ids') previous_graph_ids = input.get("previous_graph_ids")
if not previous_graph_ids: if not previous_graph_ids:
previous_graph_ids = [find_decision_task(parameters, graph_config)] previous_graph_ids = [find_decision_task(parameters, graph_config)]
@ -129,25 +151,31 @@ def release_promotion_action(parameters, graph_config, input, task_group_id, tas
full_task_graph = get_artifact(graph_id, "public/full-task-graph.json") full_task_graph = get_artifact(graph_id, "public/full-task-graph.json")
combined_full_task_graph.update(full_task_graph) combined_full_task_graph.update(full_task_graph)
_, combined_full_task_graph = TaskGraph.from_json(combined_full_task_graph) _, combined_full_task_graph = TaskGraph.from_json(combined_full_task_graph)
parameters['existing_tasks'] = find_existing_tasks_from_previous_kinds( parameters["existing_tasks"] = find_existing_tasks_from_previous_kinds(
combined_full_task_graph, previous_graph_ids, rebuild_kinds combined_full_task_graph, previous_graph_ids, rebuild_kinds
) )
parameters['do_not_optimize'] = do_not_optimize parameters["do_not_optimize"] = do_not_optimize
parameters['target_tasks_method'] = target_tasks_method parameters["target_tasks_method"] = target_tasks_method
parameters['build_number'] = int(input['build_number']) parameters["build_number"] = int(input["build_number"])
# When doing staging releases on try, we still want to re-use tasks from # When doing staging releases on try, we still want to re-use tasks from
# previous graphs. # previous graphs.
parameters['optimize_target_tasks'] = True parameters["optimize_target_tasks"] = True
parameters['shipping_phase'] = input['release_promotion_flavor'] parameters["shipping_phase"] = input["release_promotion_flavor"]
version_in_file = read_version_file() version_in_file = read_version_file()
parameters['version'] = input['version'] if input.get('version') else read_version_file() parameters["version"] = (
version_string = parameters['version'] input["version"] if input.get("version") else read_version_file()
)
version_string = parameters["version"]
if version_string != version_in_file: if version_string != version_in_file:
raise ValueError("Version given in tag ({}) does not match the one in version.txt ({})".format(version_string, version_in_file)) raise ValueError(
parameters['head_tag'] = 'v{}'.format(version_string) "Version given in tag ({}) does not match the one in version.txt ({})".format(
version_string, version_in_file
)
)
parameters["head_tag"] = "v{}".format(version_string)
parameters['next_version'] = input['next_version'] parameters["next_version"] = input["next_version"]
version = FenixVersion.parse(version_string) version = FenixVersion.parse(version_string)
if version.is_beta: if version.is_beta:
@ -158,17 +186,17 @@ def release_promotion_action(parameters, graph_config, input, task_group_id, tas
release_type = "release" release_type = "release"
else: else:
raise ValueError("Unsupported version type: {}".format(version.version_type)) raise ValueError("Unsupported version type: {}".format(version.version_type))
parameters['release_type'] = release_type parameters["release_type"] = release_type
parameters['tasks_for'] = 'action' parameters["tasks_for"] = "action"
parameters['pull_request_number'] = None parameters["pull_request_number"] = None
# make parameters read-only # make parameters read-only
parameters = Parameters(**parameters) parameters = Parameters(**parameters)
taskgraph_decision({'root': graph_config.root_dir}, parameters=parameters) taskgraph_decision({"root": graph_config.root_dir}, parameters=parameters)
def read_version_file(): def read_version_file():
with open(os.path.join(os.path.dirname(__file__), '..', '..', 'version.txt')) as f: with open(os.path.join(os.path.dirname(__file__), "..", "..", "version.txt")) as f:
return f.read().strip() return f.read().strip()

@ -2,8 +2,6 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this # License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/. # file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
import time import time
from taskgraph.transforms.task import index_builder from taskgraph.transforms.task import index_builder

@ -2,12 +2,10 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this # License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/. # file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
from taskgraph.target_tasks import _target_task from taskgraph.target_tasks import _target_task
@_target_task('release') @_target_task("release")
def target_tasks_default(full_task_graph, parameters, graph_config): def target_tasks_default(full_task_graph, parameters, graph_config):
# TODO Use shipping-phase # TODO Use shipping-phase
@ -35,7 +33,11 @@ def _filter_fennec(fennec_type, task, parameters):
def target_tasks_fennec_nightly(full_task_graph, parameters, graph_config): def target_tasks_fennec_nightly(full_task_graph, parameters, graph_config):
"""Select the set of tasks required for a production build signed with the fennec key.""" """Select the set of tasks required for a production build signed with the fennec key."""
return [l for l, t in full_task_graph.tasks.items() if _filter_fennec("production", t, parameters)] return [
l
for l, t in full_task_graph.tasks.items()
if _filter_fennec("production", t, parameters)
]
@_target_task("bump_android_components") @_target_task("bump_android_components")

@ -5,15 +5,12 @@
Transform the beetmover task into an actual task description. Transform the beetmover task into an actual task description.
""" """
from __future__ import absolute_import, print_function, unicode_literals
import logging import logging
from six import text_type, ensure_text
from taskgraph.util.schema import optionally_keyed_by, resolve_keyed_by from taskgraph.util.schema import optionally_keyed_by, resolve_keyed_by
from taskgraph.transforms.base import TransformSequence from taskgraph.transforms.base import TransformSequence
from taskgraph.transforms.task import task_description_schema from taskgraph.transforms.task import task_description_schema
from voluptuous import Any, Optional, Required, Schema from voluptuous import Optional, Required, Schema
from fenix_taskgraph.util.scriptworker import generate_beetmover_artifact_map from fenix_taskgraph.util.scriptworker import generate_beetmover_artifact_map
@ -22,13 +19,13 @@ logger = logging.getLogger(__name__)
beetmover_description_schema = Schema( beetmover_description_schema = Schema(
{ {
# unique name to describe this beetmover task, defaults to {dep.label}-beetmover # unique name to describe this beetmover task, defaults to {dep.label}-beetmover
Required("name"): text_type, Required("name"): str,
Required("worker"): {"upstream-artifacts": [dict]}, Required("worker"): {"upstream-artifacts": [dict]},
# treeherder is allowed here to override any defaults we use for beetmover. # treeherder is allowed here to override any defaults we use for beetmover.
Optional("treeherder"): task_description_schema["treeherder"], Optional("treeherder"): task_description_schema["treeherder"],
Optional("attributes"): task_description_schema["attributes"], Optional("attributes"): task_description_schema["attributes"],
Optional("dependencies"): task_description_schema["dependencies"], Optional("dependencies"): task_description_schema["dependencies"],
Optional("run-on-tasks-for"): [text_type], Optional("run-on-tasks-for"): [str],
Optional("bucket-scope"): optionally_keyed_by("level", "build-type", str), Optional("bucket-scope"): optionally_keyed_by("level", "build-type", str),
} }
) )
@ -43,25 +40,23 @@ def make_task_description(config, tasks):
attributes = task["attributes"] attributes = task["attributes"]
label = "beetmover-{}".format(task["name"]) label = "beetmover-{}".format(task["name"])
description = ( description = "Beetmover submission for build type '{build_type}'".format(
"Beetmover submission for build type '{build_type}'".format( build_type=attributes.get("build-type"),
build_type=attributes.get("build-type"),
)
) )
if task.get("locale"): if task.get("locale"):
attributes["locale"] = task["locale"] attributes["locale"] = task["locale"]
resolve_keyed_by( resolve_keyed_by(
task, task,
"bucket-scope", "bucket-scope",
item_name=task['name'], item_name=task["name"],
**{ **{
'build-type': task['attributes']['build-type'], "build-type": task["attributes"]["build-type"],
'level': config.params["level"] "level": config.params["level"],
} }
) )
bucket_scope = task.pop('bucket-scope') bucket_scope = task.pop("bucket-scope")
task = { task = {
"label": label, "label": label,
@ -85,10 +80,10 @@ def make_task_description(config, tasks):
def craft_release_properties(config, task): def craft_release_properties(config, task):
params = config.params params = config.params
return { return {
"app-name": ensure_text(params["project"]), "app-name": str(params["project"]),
"app-version": ensure_text(params["version"]), "app-version": str(params["version"]),
"branch": ensure_text(params["project"]), "branch": str(params["project"]),
"build-id": ensure_text(params["moz_build_date"]), "build-id": str(params["moz_build_date"]),
"hash-type": "sha512", "hash-type": "sha512",
"platform": "android", "platform": "android",
} }

@ -6,8 +6,6 @@ Apply some defaults and minor modifications to the jobs defined in the build
kind. kind.
""" """
from __future__ import absolute_import, print_function, unicode_literals
import copy import copy
import json import json

@ -6,10 +6,6 @@ Apply some defaults and minor modifications to the jobs defined in the build
kind. kind.
""" """
from __future__ import absolute_import, print_function, unicode_literals
import datetime
from taskgraph.transforms.base import TransformSequence from taskgraph.transforms.base import TransformSequence
from fenix_taskgraph.gradle import get_variant from fenix_taskgraph.gradle import get_variant
@ -33,31 +29,49 @@ def add_shippable_secrets(config, tasks):
secrets = task["run"].setdefault("secrets", []) secrets = task["run"].setdefault("secrets", [])
dummy_secrets = task["run"].setdefault("dummy-secrets", []) dummy_secrets = task["run"].setdefault("dummy-secrets", [])
if task.pop("include-shippable-secrets", False) and config.params["level"] == "3": if (
task.pop("include-shippable-secrets", False)
and config.params["level"] == "3"
):
build_type = task["attributes"]["build-type"] build_type = task["attributes"]["build-type"]
gradle_build_type = task["run"]["gradle-build-type"] gradle_build_type = task["run"]["gradle-build-type"]
secret_index = 'project/mobile/fenix/{}'.format(build_type) secret_index = "project/mobile/fenix/{}".format(build_type)
secrets.extend([{ secrets.extend(
"key": key, [
"name": secret_index, {
"path": target_file, "key": key,
} for key, target_file in ( "name": secret_index,
('adjust', '.adjust_token'), "path": target_file,
('firebase', 'app/src/{}/res/values/firebase.xml'.format(gradle_build_type)), }
('sentry_dsn', '.sentry_token'), for key, target_file in (
('mls', '.mls_token'), ("adjust", ".adjust_token"),
('nimbus_url', '.nimbus'), (
('wallpaper_url', ".wallpaper_url") "firebase",
)]) "app/src/{}/res/values/firebase.xml".format(
gradle_build_type
),
),
("sentry_dsn", ".sentry_token"),
("mls", ".mls_token"),
("nimbus_url", ".nimbus"),
("wallpaper_url", ".wallpaper_url"),
)
]
)
else: else:
dummy_secrets.extend([{ dummy_secrets.extend(
"content": fake_value, [
"path": target_file, {
} for fake_value, target_file in ( "content": fake_value,
("faketoken", ".adjust_token"), "path": target_file,
("faketoken", ".mls_token"), }
("https://fake@sentry.prod.mozaws.net/368", ".sentry_token"), for fake_value, target_file in (
)]) ("faketoken", ".adjust_token"),
("faketoken", ".mls_token"),
("https://fake@sentry.prod.mozaws.net/368", ".sentry_token"),
)
]
)
yield task yield task
@ -75,6 +89,7 @@ def build_gradle_command(config, tasks):
yield task yield task
@transforms.add @transforms.add
def track_apk_size(config, tasks): def track_apk_size(config, tasks):
for task in tasks: for task in tasks:
@ -89,6 +104,7 @@ def track_apk_size(config, tasks):
yield task yield task
@transforms.add @transforms.add
def extra_gradle_options(config, tasks): def extra_gradle_options(config, tasks):
for task in tasks: for task in tasks:
@ -97,14 +113,13 @@ def extra_gradle_options(config, tasks):
yield task yield task
@transforms.add @transforms.add
def add_test_build_type(config, tasks): def add_test_build_type(config, tasks):
for task in tasks: for task in tasks:
test_build_type = task["run"].pop("test-build-type", "") test_build_type = task["run"].pop("test-build-type", "")
if test_build_type: if test_build_type:
task["run"]["gradlew"].append( task["run"]["gradlew"].append("-PtestBuildType={}".format(test_build_type))
"-PtestBuildType={}".format(test_build_type)
)
yield task yield task
@ -120,11 +135,13 @@ def add_disable_optimization(config, tasks):
def add_nightly_version(config, tasks): def add_nightly_version(config, tasks):
for task in tasks: for task in tasks:
if task.pop("include-nightly-version", False): if task.pop("include-nightly-version", False):
task["run"]["gradlew"].extend([ task["run"]["gradlew"].extend(
# We only set the `official` flag here. The actual version name will be determined [
# by Gradle (depending on the Gecko/A-C version being used) # We only set the `official` flag here. The actual version name will be determined
'-Pofficial' # by Gradle (depending on the Gecko/A-C version being used)
]) "-Pofficial"
]
)
yield task yield task
@ -132,10 +149,9 @@ def add_nightly_version(config, tasks):
def add_release_version(config, tasks): def add_release_version(config, tasks):
for task in tasks: for task in tasks:
if task.pop("include-release-version", False): if task.pop("include-release-version", False):
task["run"]["gradlew"].extend([ task["run"]["gradlew"].extend(
'-PversionName={}'.format(config.params["version"]), ["-PversionName={}".format(config.params["version"]), "-Pofficial"]
'-Pofficial' )
])
yield task yield task
@ -150,23 +166,21 @@ def add_artifacts(config, tasks):
if "apk-artifact-template" in task: if "apk-artifact-template" in task:
artifact_template = task.pop("apk-artifact-template") artifact_template = task.pop("apk-artifact-template")
for apk in variant_config["apks"]: for apk in variant_config["apks"]:
apk_name = artifact_template["name"].format( apk_name = artifact_template["name"].format(**apk)
**apk artifacts.append(
{
"type": artifact_template["type"],
"name": apk_name,
"path": artifact_template["path"].format(
gradle_build_type=gradle_build_type, **apk
),
}
) )
artifacts.append({
"type": artifact_template["type"],
"name": apk_name,
"path": artifact_template["path"].format(
gradle_build_type=gradle_build_type,
**apk
),
})
apks[apk["abi"]] = { apks[apk["abi"]] = {
"name": apk_name, "name": apk_name,
"github-name": artifact_template["github-name"].format( "github-name": artifact_template["github-name"].format(
version=config.params["version"], version=config.params["version"], **apk
**apk ),
)
} }
yield task yield task
@ -178,5 +192,7 @@ def filter_incomplete_translation(config, tasks):
if task.pop("filter-incomplete-translations", False): if task.pop("filter-incomplete-translations", False):
# filter-release-translations modifies source, which could cause problems if we ever start caching source # filter-release-translations modifies source, which could cause problems if we ever start caching source
pre_gradlew = task["run"].setdefault("pre-gradlew", []) pre_gradlew = task["run"].setdefault("pre-gradlew", [])
pre_gradlew.append(["python", "automation/taskcluster/l10n/filter-release-translations.py"]) pre_gradlew.append(
["python", "automation/taskcluster/l10n/filter-release-translations.py"]
)
yield task yield task

@ -6,8 +6,6 @@ Apply some defaults and minor modifications to the jobs defined in the github_re
kind. kind.
""" """
from __future__ import absolute_import, print_function, unicode_literals
from taskgraph.transforms.base import TransformSequence from taskgraph.transforms.base import TransformSequence
from taskgraph.util.schema import resolve_keyed_by from taskgraph.util.schema import resolve_keyed_by
@ -18,14 +16,18 @@ transforms = TransformSequence()
@transforms.add @transforms.add
def resolve_keys(config, tasks): def resolve_keys(config, tasks):
for task in tasks: for task in tasks:
for key in ("worker.github-project", "worker.is-prerelease", "worker.release-name"): for key in (
"worker.github-project",
"worker.is-prerelease",
"worker.release-name",
):
resolve_keyed_by( resolve_keyed_by(
task, task,
key, key,
item_name=task["name"], item_name=task["name"],
**{ **{
'build-type': task["attributes"]["build-type"], "build-type": task["attributes"]["build-type"],
'level': config.params["level"], "level": config.params["level"],
} }
) )
yield task yield task
@ -38,13 +40,16 @@ def build_worker_definition(config, tasks):
"artifact-map": _build_artifact_map(task), "artifact-map": _build_artifact_map(task),
"git-tag": config.params["head_tag"], "git-tag": config.params["head_tag"],
"git-revision": config.params["head_rev"], "git-revision": config.params["head_rev"],
"release-name": task["worker"]["release-name"].format(version=config.params["version"]), "release-name": task["worker"]["release-name"].format(
version=config.params["version"]
),
} }
task["worker"].update(worker_definition) task["worker"].update(worker_definition)
yield task yield task
def _build_artifact_map(task): def _build_artifact_map(task):
artifact_map = [] artifact_map = []
github_names_per_path = { github_names_per_path = {
@ -55,9 +60,7 @@ def _build_artifact_map(task):
for upstream_artifact_metadata in task["worker"]["upstream-artifacts"]: for upstream_artifact_metadata in task["worker"]["upstream-artifacts"]:
artifacts = {"paths": {}, "taskId": upstream_artifact_metadata["taskId"]} artifacts = {"paths": {}, "taskId": upstream_artifact_metadata["taskId"]}
for path in upstream_artifact_metadata["paths"]: for path in upstream_artifact_metadata["paths"]:
artifacts["paths"][path] = { artifacts["paths"][path] = {"destinations": [github_names_per_path[path]]}
"destinations": [github_names_per_path[path]]
}
artifact_map.append(artifacts) artifact_map.append(artifacts)

@ -6,8 +6,6 @@ Apply some defaults and minor modifications to the jobs defined in the build
kind. kind.
""" """
from __future__ import absolute_import, print_function, unicode_literals
from taskgraph.transforms.base import TransformSequence from taskgraph.transforms.base import TransformSequence
from taskgraph.util.schema import resolve_keyed_by from taskgraph.util.schema import resolve_keyed_by
@ -20,10 +18,10 @@ def resolve_keys(config, tasks):
for task in tasks: for task in tasks:
resolve_keyed_by( resolve_keyed_by(
task, task,
'scopes', "scopes",
item_name=task["name"], item_name=task["name"],
**{ **{
'level': config.params["level"], "level": config.params["level"],
} }
) )
yield task yield task
@ -33,10 +31,12 @@ def resolve_keys(config, tasks):
def make_task_description(config, jobs): def make_task_description(config, jobs):
for job in jobs: for job in jobs:
product = "Fenix" product = "Fenix"
version = config.params['version'] or "{ver}" version = config.params["version"] or "{ver}"
job['worker']['release-name'] = '{product}-{version}-build{build_number}'.format( job["worker"][
"release-name"
] = "{product}-{version}-build{build_number}".format(
product=product, product=product,
version=version, version=version,
build_number=config.params.get('build_number', 1) build_number=config.params.get("build_number", 1),
) )
yield job yield job

@ -5,8 +5,6 @@
Apply some defaults and minor modifications to the single_dep jobs. Apply some defaults and minor modifications to the single_dep jobs.
""" """
from __future__ import absolute_import, print_function, unicode_literals
from taskgraph.transforms.base import TransformSequence from taskgraph.transforms.base import TransformSequence
from taskgraph.util.schema import resolve_keyed_by from taskgraph.util.schema import resolve_keyed_by
from taskgraph.util.treeherder import inherit_treeherder_from_dep, join_symbol from taskgraph.util.treeherder import inherit_treeherder_from_dep, join_symbol
@ -21,21 +19,20 @@ transforms = TransformSequence()
def build_name_and_attributes(config, tasks): def build_name_and_attributes(config, tasks):
for task in tasks: for task in tasks:
task["dependencies"] = { task["dependencies"] = {
dep_key: dep.label dep_key: dep.label for dep_key, dep in _get_all_deps(task).items()
for dep_key, dep in _get_all_deps(task).items()
} }
primary_dep = task["primary-dependency"] primary_dep = task["primary-dependency"]
copy_of_attributes = primary_dep.attributes.copy() copy_of_attributes = primary_dep.attributes.copy()
task.setdefault("attributes", {}).update(copy_of_attributes) task.setdefault("attributes", {}).update(copy_of_attributes)
# run_on_tasks_for is set as an attribute later in the pipeline # run_on_tasks_for is set as an attribute later in the pipeline
task.setdefault("run-on-tasks-for", copy_of_attributes['run_on_tasks_for']) task.setdefault("run-on-tasks-for", copy_of_attributes["run_on_tasks_for"])
task["name"] = _get_dependent_job_name_without_its_kind(primary_dep) task["name"] = _get_dependent_job_name_without_its_kind(primary_dep)
yield task yield task
def _get_dependent_job_name_without_its_kind(dependent_job): def _get_dependent_job_name_without_its_kind(dependent_job):
return dependent_job.label[len(dependent_job.kind) + 1:] return dependent_job.label[len(dependent_job.kind) + 1 :]
def _get_all_deps(task): def _get_all_deps(task):
@ -53,8 +50,8 @@ def resolve_keys(config, tasks):
"treeherder.job-symbol", "treeherder.job-symbol",
item_name=task["name"], item_name=task["name"],
**{ **{
'build-type': task["attributes"]["build-type"], "build-type": task["attributes"]["build-type"],
'level': config.params["level"], "level": config.params["level"],
} }
) )
yield task yield task
@ -71,21 +68,25 @@ def build_upstream_artifacts(config, tasks):
# Beetmover tasks use declarative artifacts. # Beetmover tasks use declarative artifacts.
locale = task["attributes"].get("locale") locale = task["attributes"].get("locale")
build_type = task["attributes"]["build-type"] build_type = task["attributes"]["build-type"]
worker_definition["upstream-artifacts"] = generate_beetmover_upstream_artifacts( worker_definition[
config, task, build_type, locale "upstream-artifacts"
) ] = generate_beetmover_upstream_artifacts(config, task, build_type, locale)
else: else:
for dep in _get_all_deps(task).values(): for dep in _get_all_deps(task).values():
paths = sorted([ paths = sorted(
apk_metadata["name"] [
for apk_metadata in dep.attributes.get("apks", {}).values() apk_metadata["name"]
]) for apk_metadata in dep.attributes.get("apks", {}).values()
]
)
if paths: if paths:
worker_definition["upstream-artifacts"].append({ worker_definition["upstream-artifacts"].append(
"taskId": {"task-reference": "<{}>".format(dep.kind)}, {
"taskType": dep.kind, "taskId": {"task-reference": "<{}>".format(dep.kind)},
"paths": paths, "taskType": dep.kind,
}) "paths": paths,
}
)
task.setdefault("worker", {}).update(worker_definition) task.setdefault("worker", {}).update(worker_definition)
yield task yield task

@ -5,14 +5,7 @@
Handle notifications like emails. Handle notifications like emails.
""" """
from __future__ import absolute_import, print_function, unicode_literals
import copy
import json
from taskgraph.transforms.base import TransformSequence from taskgraph.transforms.base import TransformSequence
from taskgraph.util.treeherder import inherit_treeherder_from_dep
from taskgraph.util.schema import resolve_keyed_by
transforms = TransformSequence() transforms = TransformSequence()
@ -20,22 +13,24 @@ transforms = TransformSequence()
@transforms.add @transforms.add
def add_notify_email(config, tasks): def add_notify_email(config, tasks):
for task in tasks: for task in tasks:
notify = task.pop('notify', {}) notify = task.pop("notify", {})
email_config = notify.get('email') email_config = notify.get("email")
if email_config: if email_config:
extra = task.setdefault('extra', {}) extra = task.setdefault("extra", {})
notify = extra.setdefault('notify', {}) notify = extra.setdefault("notify", {})
notify['email'] = { notify["email"] = {
'content': email_config['content'], "content": email_config["content"],
'subject': email_config['subject'], "subject": email_config["subject"],
'link': email_config.get('link', None), "link": email_config.get("link", None),
} }
routes = task.setdefault('routes', []) routes = task.setdefault("routes", [])
routes.extend([ routes.extend(
'notify.email.{}.on-{}'.format(address, reason) [
for address in email_config['to-addresses'] "notify.email.{}.on-{}".format(address, reason)
for reason in email_config['on-reasons'] for address in email_config["to-addresses"]
]) for reason in email_config["on-reasons"]
]
)
yield task yield task

@ -6,8 +6,6 @@ Apply some defaults and minor modifications to the jobs defined in the build
kind. kind.
""" """
from __future__ import absolute_import, print_function, unicode_literals
from taskgraph.transforms.base import TransformSequence from taskgraph.transforms.base import TransformSequence
from taskgraph.util.schema import resolve_keyed_by from taskgraph.util.schema import resolve_keyed_by
@ -24,8 +22,8 @@ def resolve_keys(config, tasks):
key, key,
item_name=task["name"], item_name=task["name"],
**{ **{
'build-type': task["attributes"]["build-type"], "build-type": task["attributes"]["build-type"],
'level': config.params["level"], "level": config.params["level"],
} }
) )
yield task yield task

@ -5,8 +5,6 @@
Resolve secrets and dummy secrets Resolve secrets and dummy secrets
""" """
from __future__ import absolute_import, print_function, unicode_literals
from taskgraph.transforms.base import TransformSequence from taskgraph.transforms.base import TransformSequence
from taskgraph.util.schema import resolve_keyed_by from taskgraph.util.schema import resolve_keyed_by
@ -19,9 +17,6 @@ def resolve_keys(config, tasks):
for task in tasks: for task in tasks:
for key in ("run.secrets", "run.dummy-secrets"): for key in ("run.secrets", "run.dummy-secrets"):
resolve_keyed_by( resolve_keyed_by(
task, task, key, item_name=task["name"], level=config.params["level"]
key,
item_name=task["name"],
level=config.params["level"]
) )
yield task yield task

@ -6,8 +6,6 @@ Apply some defaults and minor modifications to the jobs defined in the build
kind. kind.
""" """
from __future__ import absolute_import, print_function, unicode_literals
from taskgraph.transforms.base import TransformSequence from taskgraph.transforms.base import TransformSequence
from taskgraph.util.schema import resolve_keyed_by from taskgraph.util.schema import resolve_keyed_by
@ -24,13 +22,14 @@ def resolve_keys(config, tasks):
key, key,
item_name=task["name"], item_name=task["name"],
**{ **{
'build-type': task["attributes"]["build-type"], "build-type": task["attributes"]["build-type"],
'level': config.params["level"], "level": config.params["level"],
'tasks-for': config.params["tasks_for"], "tasks-for": config.params["tasks_for"],
} }
) )
yield task yield task
@transforms.add @transforms.add
def set_worker_type(config, tasks): def set_worker_type(config, tasks):
for task in tasks: for task in tasks:
@ -38,7 +37,14 @@ def set_worker_type(config, tasks):
if ( if (
str(config.params["level"]) == "3" str(config.params["level"]) == "3"
and task["attributes"]["build-type"] and task["attributes"]["build-type"]
in ("nightly", "beta", "release", "android-test-nightly", "beta-mozillaonline", "release-mozillaonline") in (
"nightly",
"beta",
"release",
"android-test-nightly",
"beta-mozillaonline",
"release-mozillaonline",
)
and config.params["tasks_for"] in ("cron", "action") and config.params["tasks_for"] in ("cron", "action")
): ):
worker_type = "signing" worker_type = "signing"
@ -50,13 +56,18 @@ def set_worker_type(config, tasks):
def set_signing_type(config, tasks): def set_signing_type(config, tasks):
for task in tasks: for task in tasks:
signing_type = "dep-signing" signing_type = "dep-signing"
if ( if str(config.params["level"]) == "3" and config.params["tasks_for"] in (
str(config.params["level"]) == "3" "cron",
and config.params["tasks_for"] in ("cron", "action") "action",
): ):
if task["attributes"]["build-type"] in ("beta", "release"): if task["attributes"]["build-type"] in ("beta", "release"):
signing_type = "fennec-production-signing" signing_type = "fennec-production-signing"
elif task["attributes"]["build-type"] in ("nightly", "android-test-nightly", "beta-mozillaonline", "release-mozillaonline"): elif task["attributes"]["build-type"] in (
"nightly",
"android-test-nightly",
"beta-mozillaonline",
"release-mozillaonline",
):
signing_type = "production-signing" signing_type = "production-signing"
task.setdefault("worker", {})["signing-type"] = signing_type task.setdefault("worker", {})["signing-type"] = signing_type
yield task yield task
@ -66,11 +77,9 @@ def set_signing_type(config, tasks):
def set_index(config, tasks): def set_index(config, tasks):
for task in tasks: for task in tasks:
index = {} index = {}
if ( if config.params["tasks_for"] in ("cron", "action") and task["attributes"][
config.params["tasks_for"] in ("cron", "action") "build-type"
and task["attributes"]["build-type"] ] in ("nightly", "debut", "nightly-simulation", "beta", "release"):
in ("nightly", "debut", "nightly-simulation", "beta", "release")
):
index["type"] = "signing" index["type"] = "signing"
task["index"] = index task["index"] = index
yield task yield task

@ -2,8 +2,6 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this # License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/. # file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
from taskgraph.transforms.base import TransformSequence from taskgraph.transforms.base import TransformSequence
@ -15,6 +13,8 @@ def add_pr_number(config, tasks):
for task in tasks: for task in tasks:
include_pr = task.pop("include-pull-request-number") include_pr = task.pop("include-pull-request-number")
if include_pr and config.params["pull_request_number"]: if include_pr and config.params["pull_request_number"]:
task["worker"]["env"]["PULL_REQUEST_NUMBER"] = str(config.params["pull_request_number"]) task["worker"]["env"]["PULL_REQUEST_NUMBER"] = str(
config.params["pull_request_number"]
)
yield task yield task

@ -6,8 +6,6 @@ Apply some defaults and minor modifications to the jobs defined in the version b
kind. kind.
""" """
from __future__ import absolute_import, print_function, unicode_literals
from taskgraph.transforms.base import TransformSequence from taskgraph.transforms.base import TransformSequence
from taskgraph.util.schema import resolve_keyed_by from taskgraph.util.schema import resolve_keyed_by
@ -24,8 +22,8 @@ def resolve_keys(config, tasks):
key, key,
item_name=task["name"], item_name=task["name"],
**{ **{
'build-type': task["attributes"]["build-type"], "build-type": task["attributes"]["build-type"],
'level': config.params["level"], "level": config.params["level"],
} }
) )
yield task yield task

@ -5,9 +5,6 @@
Generate labels for tasks without names, consistently. Generate labels for tasks without names, consistently.
Uses attributes from `primary-dependency`. Uses attributes from `primary-dependency`.
""" """
from __future__ import absolute_import, print_function, unicode_literals
import os
from taskgraph.transforms.base import TransformSequence from taskgraph.transforms.base import TransformSequence
@ -20,31 +17,33 @@ LABEL = "test-vismet-{platform}-{label}"
@transforms.add @transforms.add
def make_label(config, jobs): def make_label(config, jobs):
""" Generate a sane label for a new task constructed from a dependency """Generate a sane label for a new task constructed from a dependency
Using attributes from the dependent job and the current task kind""" Using attributes from the dependent job and the current task kind"""
for job in jobs: for job in jobs:
dep_job = job['primary-dependency'] dep_job = job["primary-dependency"]
attr = dep_job.attributes.get attr = dep_job.attributes.get
if attr('locale', job.get('locale')): if attr("locale", job.get("locale")):
template = "{kind}-{locale}-{build_platform}/{build_type}" template = "{kind}-{locale}-{build_platform}/{build_type}"
elif attr('l10n_chunk'): elif attr("l10n_chunk"):
template = "{kind}-{build_platform}-{l10n_chunk}/{build_type}" template = "{kind}-{build_platform}-{l10n_chunk}/{build_type}"
elif config.kind.startswith("release-eme-free") or \ elif config.kind.startswith("release-eme-free") or config.kind.startswith(
config.kind.startswith("release-partner-repack"): "release-partner-repack"
suffix = job.get("extra", {}).get("repack_suffix", None) or \ ):
job.get("extra", {}).get("repack_id", None) suffix = job.get("extra", {}).get("repack_suffix", None) or job.get(
"extra", {}
).get("repack_id", None)
template = "{kind}-{build_platform}" template = "{kind}-{build_platform}"
if suffix: if suffix:
template += "-{}".format(suffix.replace('/', '-')) template += "-{}".format(suffix.replace("/", "-"))
else: else:
template = "{kind}-{build_platform}/{build_type}" template = "{kind}-{build_platform}/{build_type}"
job['label'] = template.format( job["label"] = template.format(
kind=config.kind, kind=config.kind,
build_platform=attr('build_platform'), build_platform=attr("build_platform"),
build_type=attr('build_type'), build_type=attr("build_type"),
locale=attr('locale', job.get('locale', '')), # Locale can be absent locale=attr("locale", job.get("locale", "")), # Locale can be absent
l10n_chunk=attr('l10n_chunk', '') # Can be empty l10n_chunk=attr("l10n_chunk", ""), # Can be empty
) )
yield job yield job
@ -53,41 +52,40 @@ def make_label(config, jobs):
@transforms.add @transforms.add
def run_visual_metrics(config, jobs): def run_visual_metrics(config, jobs):
for job in jobs: for job in jobs:
dep_job = job.pop('primary-dependency', None) dep_job = job.pop("primary-dependency", None)
if dep_job is not None: if dep_job is not None:
platform = dep_job.task['extra']['treeherder-platform'] platform = dep_job.task["extra"]["treeherder-platform"]
job['dependencies'] = {dep_job.label: dep_job.label} job["dependencies"] = {dep_job.label: dep_job.label}
# Add the artifact to be processed as a fetches artifact # Add the artifact to be processed as a fetches artifact
job['fetches'][dep_job.label] = [{ job["fetches"][dep_job.label] = [
'artifact': 'browsertime-results.tgz', {"artifact": "browsertime-results.tgz", "extract": True}
'extract': True ]
}]
# vismet runs on Linux but we want to have it displayed # vismet runs on Linux but we want to have it displayed
# alongside the job it was triggered by to make it easier for # alongside the job it was triggered by to make it easier for
# people to find it back. # people to find it back.
job['label'] = LABEL.format(platform=platform, label=dep_job.label) job["label"] = LABEL.format(platform=platform, label=dep_job.label)
treeherder_info = dict(dep_job.task['extra']['treeherder']) treeherder_info = dict(dep_job.task["extra"]["treeherder"])
job['treeherder']['platform'] = platform job["treeherder"]["platform"] = platform
job['treeherder']['symbol'] = SYMBOL.format( job["treeherder"]["symbol"] = SYMBOL.format(
groupSymbol=treeherder_info['groupSymbol'], groupSymbol=treeherder_info["groupSymbol"],
symbol=treeherder_info['symbol'] symbol=treeherder_info["symbol"],
) )
# Store the platform name so we can use it to calculate # Store the platform name so we can use it to calculate
# the similarity metric against other tasks # the similarity metric against other tasks
job['worker'].setdefault('env', {})['TC_PLATFORM'] = platform job["worker"].setdefault("env", {})["TC_PLATFORM"] = platform
# run-on-projects needs to be set based on the dependent task # run-on-projects needs to be set based on the dependent task
attributes = dict(dep_job.attributes) attributes = dict(dep_job.attributes)
job['run-on-projects'] = attributes['run_on_projects'] job["run-on-projects"] = attributes["run_on_projects"]
# The run-on-tasks-for also needs to be setup here # The run-on-tasks-for also needs to be setup here
job['run-on-tasks-for'] = attributes.get('run_on_tasks_for', []) job["run-on-tasks-for"] = attributes.get("run_on_tasks_for", [])
# We can't use the multi_dep transforms which remove this # We can't use the multi_dep transforms which remove this
# field, so we remove the dependent-tasks entry here # field, so we remove the dependent-tasks entry here
del job['dependent-tasks'] del job["dependent-tasks"]
yield job yield job

@ -2,8 +2,6 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this # License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/. # file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
import itertools import itertools
import os import os
from copy import deepcopy from copy import deepcopy
@ -44,7 +42,7 @@ def generate_beetmover_upstream_artifacts(
**{ **{
"release-type": config.params["release_type"], "release-type": config.params["release_type"],
"platform": platform, "platform": platform,
} },
) )
map_config = deepcopy(cached_load_yaml(job["attributes"]["artifact_map"])) map_config = deepcopy(cached_load_yaml(job["attributes"]["artifact_map"]))
upstream_artifacts = list() upstream_artifacts = list()
@ -153,7 +151,7 @@ def generate_beetmover_artifact_map(config, job, **kwargs):
**{ **{
"release-type": config.params["release_type"], "release-type": config.params["release_type"],
"platform": platform, "platform": platform,
} },
) )
map_config = deepcopy(cached_load_yaml(job["attributes"]["artifact_map"])) map_config = deepcopy(cached_load_yaml(job["attributes"]["artifact_map"]))
base_artifact_prefix = map_config.get( base_artifact_prefix = map_config.get(
@ -176,9 +174,7 @@ def generate_beetmover_artifact_map(config, job, **kwargs):
map_config, map_config,
"s3_bucket_paths", "s3_bucket_paths",
job["label"], job["label"],
**{ **{"build-type": job["attributes"]["build-type"]},
"build-type": job["attributes"]["build-type"]
}
) )
for locale, dep in sorted(itertools.product(locales, dependencies)): for locale, dep in sorted(itertools.product(locales, dependencies)):
@ -218,12 +214,7 @@ def generate_beetmover_artifact_map(config, job, **kwargs):
"pretty_name", "pretty_name",
"checksums_path", "checksums_path",
]: ]:
resolve_keyed_by( resolve_keyed_by(file_config, field, job["label"], locale=locale)
file_config,
field,
job["label"],
locale=locale
)
# This format string should ideally be in the configuration file, # This format string should ideally be in the configuration file,
# but this would mean keeping variable names in sync between code + config. # but this would mean keeping variable names in sync between code + config.
@ -271,20 +262,16 @@ def generate_beetmover_artifact_map(config, job, **kwargs):
version = read_version_file() version = read_version_file()
upload_date = datetime.fromtimestamp(config.params["build_date"]) upload_date = datetime.fromtimestamp(config.params["build_date"])
if job["attributes"]["build-type"] == "nightly": if job["attributes"]["build-type"] == "nightly":
folder_prefix = upload_date.strftime("%Y/%m/%Y-%m-%d-%H-%M-%S-") folder_prefix = upload_date.strftime("%Y/%m/%Y-%m-%d-%H-%M-%S-")
# TODO: Remove this when version.txt has versioning fixed # TODO: Remove this when version.txt has versioning fixed
version = version.split('-')[0] version = version.split("-")[0]
else: else:
folder_prefix = f"{version}/android/" folder_prefix = f"{version}/android/"
kwargs.update( kwargs.update(
{ {"locale": locale, "version": version, "folder_prefix": folder_prefix}
"locale": locale,
"version": version,
"folder_prefix": folder_prefix
}
) )
kwargs.update(**platforms) kwargs.update(**platforms)
paths = jsone.render(paths, kwargs) paths = jsone.render(paths, kwargs)

@ -2,10 +2,6 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this # License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/. # file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
from six import text_type
from voluptuous import Any, Required, Optional from voluptuous import Any, Required, Optional
from taskgraph.util.schema import taskref_or_string from taskgraph.util.schema import taskref_or_string
@ -17,18 +13,18 @@ from taskgraph.transforms.task import payload_builder
schema={ schema={
# the maximum time to run, in seconds # the maximum time to run, in seconds
Required("max-run-time"): int, Required("max-run-time"): int,
Required("signing-type"): text_type, Required("signing-type"): str,
# list of artifact URLs for the artifacts that should be signed # list of artifact URLs for the artifacts that should be signed
Required("upstream-artifacts"): [ Required("upstream-artifacts"): [
{ {
# taskId of the task with the artifact # taskId of the task with the artifact
Required("taskId"): taskref_or_string, Required("taskId"): taskref_or_string,
# type of signing task (for CoT) # type of signing task (for CoT)
Required("taskType"): text_type, Required("taskType"): str,
# Paths to the artifacts to sign # Paths to the artifacts to sign
Required("paths"): [text_type], Required("paths"): [str],
# Signing formats to use on each of the paths # Signing formats to use on each of the paths
Required("formats"): [text_type], Required("formats"): [str],
} }
], ],
}, },
@ -62,23 +58,27 @@ def build_scriptworker_signing_payload(config, task, task_def):
@payload_builder( @payload_builder(
"scriptworker-beetmover", "scriptworker-beetmover",
schema={ schema={
Required("action"): text_type, Required("action"): str,
Required("version"): text_type, Required("version"): str,
Required("artifact-map"): [{ Required("artifact-map"): [
Required("paths"): { {
Any(text_type): { Required("paths"): {
Required("destinations"): [text_type], Any(str): {
Required("destinations"): [str],
},
}, },
}, Required("taskId"): taskref_or_string,
Required("taskId"): taskref_or_string, }
}], ],
Required("beetmover-application-name"): text_type, Required("beetmover-application-name"): str,
Required("bucket"): text_type, Required("bucket"): str,
Required("upstream-artifacts"): [{ Required("upstream-artifacts"): [
Required("taskId"): taskref_or_string, {
Required("taskType"): text_type, Required("taskId"): taskref_or_string,
Required("paths"): [text_type], Required("taskType"): str,
}], Required("paths"): [str],
}
],
}, },
) )
def build_scriptworker_beetmover_payload(config, task, task_def): def build_scriptworker_beetmover_payload(config, task, task_def):
@ -96,14 +96,16 @@ def build_scriptworker_beetmover_payload(config, task, task_def):
"artifactMap": worker["artifact-map"], "artifactMap": worker["artifact-map"],
"releaseProperties": {"appName": worker.pop("beetmover-application-name")}, "releaseProperties": {"appName": worker.pop("beetmover-application-name")},
"upstreamArtifacts": worker["upstream-artifacts"], "upstreamArtifacts": worker["upstream-artifacts"],
"version": worker["version"] "version": worker["version"],
} }
scope_prefix = config.graph_config["scriptworker"]["scope-prefix"] scope_prefix = config.graph_config["scriptworker"]["scope-prefix"]
task_def["scopes"].extend([ task_def["scopes"].extend(
"{}:beetmover:action:{}".format(scope_prefix, worker["action"]), [
"{}:beetmover:bucket:{}".format(scope_prefix, worker["bucket"]), "{}:beetmover:action:{}".format(scope_prefix, worker["action"]),
]) "{}:beetmover:bucket:{}".format(scope_prefix, worker["bucket"]),
]
)
@payload_builder( @payload_builder(
@ -112,16 +114,16 @@ def build_scriptworker_beetmover_payload(config, task, task_def):
Required("upstream-artifacts"): [ Required("upstream-artifacts"): [
{ {
Required("taskId"): taskref_or_string, Required("taskId"): taskref_or_string,
Required("taskType"): text_type, Required("taskType"): str,
Required("paths"): [text_type], Required("paths"): [str],
} }
], ],
Required("certificate-alias"): text_type, Required("certificate-alias"): str,
Required("channel"): text_type, Required("channel"): str,
Required("commit"): bool, Required("commit"): bool,
Required("product"): text_type, Required("product"): str,
Required("dep"): bool, Required("dep"): bool,
Optional("google-play-track"): text_type, Optional("google-play-track"): str,
}, },
) )
def build_push_apk_payload(config, task, task_def): def build_push_apk_payload(config, task, task_def):
@ -152,11 +154,11 @@ def build_push_apk_payload(config, task, task_def):
Required("upstream-artifacts"): [ Required("upstream-artifacts"): [
{ {
Required("taskId"): taskref_or_string, Required("taskId"): taskref_or_string,
Required("taskType"): text_type, Required("taskType"): str,
Required("paths"): [text_type], Required("paths"): [str],
} }
], ],
Required("release-name"): text_type, Required("release-name"): str,
}, },
) )
def build_shipit_payload(config, task, task_def): def build_shipit_payload(config, task, task_def):
@ -164,9 +166,7 @@ def build_shipit_payload(config, task, task_def):
task_def["tags"]["worker-implementation"] = "scriptworker" task_def["tags"]["worker-implementation"] = "scriptworker"
task_def['payload'] = { task_def["payload"] = {"release_name": worker["release-name"]}
'release_name': worker['release-name']
}
@payload_builder( @payload_builder(
@ -175,17 +175,17 @@ def build_shipit_payload(config, task, task_def):
Required("upstream-artifacts"): [ Required("upstream-artifacts"): [
{ {
Required("taskId"): taskref_or_string, Required("taskId"): taskref_or_string,
Required("taskType"): text_type, Required("taskType"): str,
Required("paths"): [text_type], Required("paths"): [str],
} }
], ],
Required("artifact-map"): [object], Required("artifact-map"): [object],
Required("action"): text_type, Required("action"): str,
Required("git-tag"): text_type, Required("git-tag"): str,
Required("git-revision"): text_type, Required("git-revision"): str,
Required("github-project"): text_type, Required("github-project"): str,
Required("is-prerelease"): bool, Required("is-prerelease"): bool,
Required("release-name"): text_type, Required("release-name"): str,
}, },
) )
def build_github_release_payload(config, task, task_def): def build_github_release_payload(config, task, task_def):
@ -203,10 +203,12 @@ def build_github_release_payload(config, task, task_def):
} }
scope_prefix = config.graph_config["scriptworker"]["scope-prefix"] scope_prefix = config.graph_config["scriptworker"]["scope-prefix"]
task_def["scopes"].extend([ task_def["scopes"].extend(
"{}:github:project:{}".format(scope_prefix, worker["github-project"]), [
"{}:github:action:{}".format(scope_prefix, worker["action"]), "{}:github:project:{}".format(scope_prefix, worker["github-project"]),
]) "{}:github:action:{}".format(scope_prefix, worker["action"]),
]
)
@payload_builder( @payload_builder(
@ -215,38 +217,38 @@ def build_github_release_payload(config, task, task_def):
Optional("upstream-artifacts"): [ Optional("upstream-artifacts"): [
{ {
Optional("taskId"): taskref_or_string, Optional("taskId"): taskref_or_string,
Optional("taskType"): text_type, Optional("taskType"): str,
Optional("paths"): [text_type], Optional("paths"): [str],
} }
], ],
Required("bump"): bool, Required("bump"): bool,
Optional("bump-files"): [text_type], Optional("bump-files"): [str],
Optional("push"): bool, Optional("push"): bool,
Optional("branch"): text_type, Optional("branch"): str,
}, },
) )
def build_version_bump_payload(config, task, task_def): def build_version_bump_payload(config, task, task_def):
worker = task["worker"] worker = task["worker"]
task_def["tags"]["worker-implementation"] = "scriptworker" task_def["tags"]["worker-implementation"] = "scriptworker"
task_def['payload'] = {'actions': []} task_def["payload"] = {"actions": []}
actions = task_def['payload']['actions'] actions = task_def["payload"]["actions"]
if worker['bump']: if worker["bump"]:
if not worker['bump-files']: if not worker["bump-files"]:
raise Exception("Version Bump requested without bump-files") raise Exception("Version Bump requested without bump-files")
bump_info = {} bump_info = {}
bump_info["next_version"] = config.params["next_version"] bump_info["next_version"] = config.params["next_version"]
bump_info['files'] = worker['bump-files'] bump_info["files"] = worker["bump-files"]
task_def['payload']['version_bump_info'] = bump_info task_def["payload"]["version_bump_info"] = bump_info
actions.append('version_bump') actions.append("version_bump")
if worker["push"]: if worker["push"]:
task_def['payload']['push'] = True task_def["payload"]["push"] = True
if worker.get('force-dry-run'): if worker.get("force-dry-run"):
task_def['payload']['dry_run'] = True task_def["payload"]["dry_run"] = True
if worker.get("branch"): if worker.get("branch"):
task_def["payload"]["branch"] = worker["branch"] task_def["payload"]["branch"] = worker["branch"]

@ -0,0 +1,5 @@
# For instructions on managing dependencies, see:
# https://taskcluster-taskgraph.readthedocs.io/en/latest/howto/bootstrap-taskgraph.html
taskcluster-taskgraph>=1.3.0
mozilla-version

@ -0,0 +1,106 @@
#
# This file is autogenerated by pip-compile with python 3.6
# To update, run:
#
# pip-compile --generate-hashes --output-file=requirements.txt requirements.in
#
appdirs==1.4.4 \
--hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41 \
--hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128
# via taskcluster-taskgraph
attrs==21.4.0 \
--hash=sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4 \
--hash=sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd
# via
# mozilla-version
# taskcluster-taskgraph
certifi==2021.10.8 \
--hash=sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872 \
--hash=sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569
# via requests
charset-normalizer==2.0.12 \
--hash=sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597 \
--hash=sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df
# via requests
future==0.18.2 \
--hash=sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d
# via mozilla-version
idna==3.3 \
--hash=sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff \
--hash=sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d
# via requests
json-e==4.4.3 \
--hash=sha256:8ed3974faa887ca96a7987298f6550cf2ad35472419a980766b3abe48258de0a
# via taskcluster-taskgraph
mozilla-version==1.0.0 \
--hash=sha256:678093aacc455a49b6feb6eeb922296c191aca4884e815573156b5966b8b051e \
--hash=sha256:a6d06ac135dd6f4333b9ac1fd04f41f4ba96e83973dd2ac9f70daf577718b96c
# via -r requirements.in
pyyaml==6.0 \
--hash=sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293 \
--hash=sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b \
--hash=sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57 \
--hash=sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b \
--hash=sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4 \
--hash=sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07 \
--hash=sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba \
--hash=sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9 \
--hash=sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287 \
--hash=sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513 \
--hash=sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0 \
--hash=sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0 \
--hash=sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92 \
--hash=sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f \
--hash=sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2 \
--hash=sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc \
--hash=sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c \
--hash=sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86 \
--hash=sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4 \
--hash=sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c \
--hash=sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34 \
--hash=sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b \
--hash=sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c \
--hash=sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb \
--hash=sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737 \
--hash=sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3 \
--hash=sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d \
--hash=sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53 \
--hash=sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78 \
--hash=sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803 \
--hash=sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a \
--hash=sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174 \
--hash=sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5
# via taskcluster-taskgraph
redo==2.0.4 \
--hash=sha256:81066955041c853b0e6491eb65a0877dce45131c4cfa3d42d923fc2aa8f7a043
# via taskcluster-taskgraph
requests==2.27.1 \
--hash=sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61 \
--hash=sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d
# via
# requests-unixsocket
# taskcluster-taskgraph
requests-unixsocket==0.3.0 \
--hash=sha256:28304283ea9357d45fff58ad5b11e47708cfbf5806817aa59b2a363228ee971e \
--hash=sha256:c685c680f0809e1b2955339b1e5afc3c0022b3066f4f7eb343f43a6065fc0e5d
# via taskcluster-taskgraph
slugid==2.0.0 \
--hash=sha256:a950d98b72691178bdd4d6c52743c4a2aa039207cf7a97d71060a111ff9ba297 \
--hash=sha256:aec8b0e01c4ad32e38e12d609eab3ec912fd129aaf6b2ded0199b56a5f8fd67c
# via taskcluster-taskgraph
taskcluster-taskgraph==1.3.0 \
--hash=sha256:a1050f8a613e31fcd7bc4fed85e07e57baa5675a5a5719065caebada533eb8f5 \
--hash=sha256:bd3c35d82296c323064c8b6e6d4a88ab186bcd872bdd930bfe734a0b2525a082
# via -r requirements.in
taskcluster-urls==13.0.1 \
--hash=sha256:5e25e7e6818e8877178b175ff43d2e6548afad72694aa125f404a7329ece0973 \
--hash=sha256:b25e122ecec249c4299ac7b20b08db76e3e2025bdaeb699a9d444556de5fd367 \
--hash=sha256:f66dcbd6572a6216ab65949f0fa0b91f2df647918028436c384e6af5cd12ae2b
# via taskcluster-taskgraph
urllib3==1.26.8 \
--hash=sha256:000ca7f471a233c2251c6c7023ee85305721bfdf18621ebff4fd17a8653427ed \
--hash=sha256:0e7c33d9a63e7ddfcb86780aac87befc2fbddf46c58dbb487e0855f7ceec283c
# via requests
voluptuous==0.12.2 \
--hash=sha256:4db1ac5079db9249820d49c891cb4660a6f8cae350491210abce741fabf56513
# via taskcluster-taskgraph
Loading…
Cancel
Save