RELENG-805 - Pull taskgraph from pypi

upstream-sync
Heitor Neiva 2 years ago committed by mergify[bot]
parent 4268c13f89
commit 1647d93591

@ -6,301 +6,287 @@ policy:
pullRequests: collaborators
tasks:
- $let:
taskgraph:
branch: taskgraph
revision: b6890b6736403f053b731cbd4aabe43819a14acf
trustDomain: mobile
trustDomain: mobile
# Github events have this stuff in different places...
ownerEmail:
$if: 'tasks_for in ["cron", "action"]'
then: '${tasks_for}@noreply.mozilla.org'
else:
$if: 'event.sender.login == "bors[bot]"'
then: 'skaspari+mozlando@mozilla.com' # It must match what's in bors.toml
else:
$if: 'tasks_for == "github-push"'
then: '${event.pusher.email}'
else:
$if: 'tasks_for == "github-pull-request"'
then: '${event.pull_request.user.login}@users.noreply.github.com'
baseRepoUrl:
$if: 'tasks_for == "github-push"'
then: '${event.repository.html_url}'
else:
$if: 'tasks_for == "github-pull-request"'
then: '${event.pull_request.base.repo.html_url}'
else:
$if: 'tasks_for in ["cron", "action"]'
then: '${repository.url}'
repoUrl:
$if: 'tasks_for == "github-push"'
then: '${event.repository.html_url}'
else:
$if: 'tasks_for == "github-pull-request"'
then: '${event.pull_request.head.repo.html_url}'
else:
$if: 'tasks_for in ["cron", "action"]'
then: '${repository.url}'
project:
$if: 'tasks_for == "github-push"'
then: '${event.repository.name}'
else:
$if: 'tasks_for == "github-pull-request"'
then: '${event.pull_request.head.repo.name}'
else:
$if: 'tasks_for in ["cron", "action"]'
then: '${repository.project}'
head_branch:
$if: 'tasks_for == "github-pull-request"'
then: ${event.pull_request.head.ref}
else:
$if: 'tasks_for == "github-push"'
then: ${event.ref}
else:
$if: 'tasks_for in ["action", "cron"]'
then: '${push.branch}'
head_sha:
$if: 'tasks_for == "github-push"'
then: '${event.after}'
else:
$if: 'tasks_for == "github-pull-request"'
then: '${event.pull_request.head.sha}'
else:
$if: 'tasks_for in ["action", "cron"]'
then: '${push.revision}'
ownTaskId:
$if: '"github" in tasks_for'
then: {$eval: as_slugid("decision_task")}
else:
$if: 'tasks_for in ["cron", "action"]'
then: '${ownTaskId}'
pullRequestAction:
$if: 'tasks_for == "github-pull-request"'
then: ${event.action}
else: 'UNDEFINED'
in:
$let:
# Github events have this stuff in different places...
ownerEmail:
$if: 'tasks_for in ["cron", "action"]'
then: '${tasks_for}@noreply.mozilla.org'
else:
$if: 'event.sender.login == "bors[bot]"'
then: 'skaspari+mozlando@mozilla.com' # It must match what's in bors.toml
else:
$if: 'tasks_for == "github-push"'
then: '${event.pusher.email}'
$if: >
tasks_for in ["action", "cron"]
|| (tasks_for == "github-pull-request" && pullRequestAction in ["opened", "reopened", "synchronize"])
|| (tasks_for == "github-push" && head_branch[:10] != "refs/tags/") && (head_branch != "staging.tmp") && (head_branch != "trying.tmp") && (head_branch[:8] != "mergify/")
then:
$let:
level:
$if: 'tasks_for in ["github-push", "action", "cron"] && repoUrl == "https://github.com/mozilla-mobile/fenix"'
then: '3'
else: '1'
short_head_branch:
$if: 'head_branch[:11] == "refs/heads/"'
then: {$eval: 'head_branch[11:]'}
in:
taskId:
$if: 'tasks_for != "action"'
then: '${ownTaskId}'
taskGroupId:
$if: 'tasks_for == "action"'
then: '${action.taskGroupId}'
else: '${ownTaskId}' # same as taskId; this is how automation identifies a decision task
schedulerId: '${trustDomain}-level-${level}'
created: {$fromNow: ''}
deadline: {$fromNow: '1 day'}
expires: {$fromNow: '1 year 1 second'} # 1 second so artifacts expire first, despite rounding errors
metadata:
$merge:
- owner: "${ownerEmail}"
source: '${repoUrl}/raw/${head_sha}/.taskcluster.yml'
- $if: 'tasks_for in ["github-push", "github-pull-request"]'
then:
name: "Decision Task"
description: 'The task that creates all of the other tasks in the task graph'
else:
$if: 'tasks_for == "github-pull-request"'
then: '${event.pull_request.user.login}@users.noreply.github.com'
baseRepoUrl:
$if: 'tasks_for == "github-push"'
then: '${event.repository.html_url}'
else:
$if: 'tasks_for == "github-pull-request"'
then: '${event.pull_request.base.repo.html_url}'
else:
$if: 'tasks_for in ["cron", "action"]'
then: '${repository.url}'
repoUrl:
$if: 'tasks_for == "github-push"'
then: '${event.repository.html_url}'
else:
$if: 'tasks_for == "github-pull-request"'
then: '${event.pull_request.head.repo.html_url}'
else:
$if: 'tasks_for in ["cron", "action"]'
then: '${repository.url}'
project:
$if: 'tasks_for == "github-push"'
then: '${event.repository.name}'
else:
$if: 'tasks_for == "github-pull-request"'
then: '${event.pull_request.head.repo.name}'
else:
$if: 'tasks_for in ["cron", "action"]'
then: '${repository.project}'
head_branch:
$if: 'tasks_for == "github-pull-request"'
then: ${event.pull_request.head.ref}
else:
$if: 'tasks_for == "github-push"'
then: ${event.ref}
else:
$if: 'tasks_for in ["action", "cron"]'
then: '${push.branch}'
head_sha:
$if: 'tasks_for == "github-push"'
then: '${event.after}'
else:
$if: 'tasks_for == "github-pull-request"'
then: '${event.pull_request.head.sha}'
else:
$if: 'tasks_for in ["action", "cron"]'
then: '${push.revision}'
ownTaskId:
$if: '"github" in tasks_for'
then: {$eval: as_slugid("decision_task")}
else:
$if: 'tasks_for in ["cron", "action"]'
then: '${ownTaskId}'
pullRequestAction:
$if: 'tasks_for == "github-pull-request"'
then: ${event.action}
else: 'UNDEFINED'
in:
$if: >
tasks_for in ["action", "cron"]
|| (tasks_for == "github-pull-request" && pullRequestAction in ["opened", "reopened", "synchronize"])
|| (tasks_for == "github-push" && head_branch[:10] != "refs/tags/") && (head_branch != "staging.tmp") && (head_branch != "trying.tmp") && (head_branch[:8] != "mergify/")
then:
$let:
level:
$if: 'tasks_for in ["github-push", "action", "cron"] && repoUrl == "https://github.com/mozilla-mobile/fenix"'
then: '3'
else: '1'
$if: 'tasks_for == "action"'
then:
name: "Action: ${action.title}"
description: |
${action.description}
short_head_branch:
$if: 'head_branch[:11] == "refs/heads/"'
then: {$eval: 'head_branch[11:]'}
in:
$mergeDeep:
- $if: 'tasks_for != "action"'
Action triggered by clientID `${clientId}`
else:
name: "Decision Task for cron job ${cron.job_name}"
description: 'Created by a [cron task](https://firefox-ci-tc.services.mozilla.com/tasks/${cron.task_id})'
provisionerId: "${trustDomain}-${level}"
workerType: "decision"
tags:
$if: 'tasks_for in ["github-push", "github-pull-request"]'
then:
kind: decision-task
else:
$if: 'tasks_for == "action"'
then:
kind: 'action-callback'
else:
$if: 'tasks_for == "cron"'
then:
kind: cron-task
routes:
$flattenDeep:
- checks
- $if: 'level == "3" || repoUrl == "https://github.com/mozilla-releng/staging-fenix"'
then:
taskId: '${ownTaskId}'
- taskGroupId:
$if: 'tasks_for == "action"'
then:
'${action.taskGroupId}'
else:
'${ownTaskId}' # same as taskId; this is how automation identifies a decision task
schedulerId: '${trustDomain}-level-${level}'
created: {$fromNow: ''}
deadline: {$fromNow: '1 day'}
expires: {$fromNow: '1 year 1 second'} # 1 second so artifacts expire first, despite rounding errors
metadata:
$merge:
- owner: "${ownerEmail}"
source: '${repoUrl}/raw/${head_sha}/.taskcluster.yml'
- $if: 'tasks_for in ["github-push", "github-pull-request"]'
then:
name: "Decision Task"
description: 'The task that creates all of the other tasks in the task graph'
else:
$if: 'tasks_for == "action"'
then:
name: "Action: ${action.title}"
description: |
${action.description}
- tc-treeherder.v2.${project}.${head_sha}
# TODO Bug 1601928: Make this scope fork-friendly once ${project} is better defined. This will enable
# staging release promotion on forks.
- $if: 'tasks_for == "github-push"'
then:
- index.${trustDomain}.v2.${project}.branch.${short_head_branch}.latest.taskgraph.decision
- index.${trustDomain}.v2.${project}.branch.${short_head_branch}.revision.${head_sha}.taskgraph.decision
- index.${trustDomain}.v2.${project}.revision.${head_sha}.taskgraph.decision
- $if: 'tasks_for == "cron"'
then:
# cron context provides ${head_branch} as a short one
- index.${trustDomain}.v2.${project}.branch.${head_branch}.latest.taskgraph.decision-${cron.job_name}
- index.${trustDomain}.v2.${project}.branch.${head_branch}.revision.${head_sha}.taskgraph.decision-${cron.job_name}
- index.${trustDomain}.v2.${project}.branch.${head_branch}.revision.${head_sha}.taskgraph.cron.${ownTaskId}
scopes:
$if: 'tasks_for == "github-push"'
then:
# `https://` is 8 characters so, ${repoUrl[8:]} is the repository without the protocol.
- 'assume:repo:${repoUrl[8:]}:branch:${short_head_branch}'
else:
$if: 'tasks_for == "github-pull-request"'
then:
- 'assume:repo:github.com/${event.pull_request.base.repo.full_name}:pull-request'
else:
$if: 'tasks_for == "action"'
then:
# when all actions are hooks, we can calculate this directly rather than using a variable
- '${action.repo_scope}'
else:
- 'assume:repo:${repoUrl[8:]}:cron:${cron.job_name}'
Action triggered by clientID `${clientId}`
else:
name: "Decision Task for cron job ${cron.job_name}"
description: 'Created by a [cron task](https://firefox-ci-tc.services.mozilla.com/tasks/${cron.task_id})'
provisionerId: "mobile-${level}"
workerType: "decision"
tags:
$if: 'tasks_for in ["github-push", "github-pull-request"]'
requires: all-completed
priority: lowest
retries: 5
payload:
env:
# run-task uses these to check out the source; the inputs
# to `mach taskgraph decision` are all on the command line.
$merge:
- MOBILE_BASE_REPOSITORY: '${baseRepoUrl}'
MOBILE_HEAD_REPOSITORY: '${repoUrl}'
MOBILE_HEAD_REF: '${head_branch}'
MOBILE_HEAD_REV: '${head_sha}'
MOBILE_REPOSITORY_TYPE: git
MOBILE_PIP_REQUIREMENTS: taskcluster/requirements.txt
REPOSITORIES: {$json: {mobile: "Fenix"}}
HG_STORE_PATH: /builds/worker/checkouts/hg-store
ANDROID_SDK_ROOT: /builds/worker/android-sdk
- $if: 'tasks_for in ["github-pull-request"]'
then:
kind: decision-task
else:
$if: 'tasks_for == "action"'
then:
kind: 'action-callback'
else:
$if: 'tasks_for == "cron"'
then:
kind: cron-task
routes:
$flattenDeep:
- checks
- $if: 'level == "3" || repoUrl == "https://github.com/mozilla-releng/staging-fenix"'
then:
- tc-treeherder.v2.${project}.${head_sha}
# TODO Bug 1601928: Make this scope fork-friendly once ${project} is better defined. This will enable
# staging release promotion on forks.
- $if: 'tasks_for == "github-push"'
then:
- index.mobile.v2.${project}.branch.${short_head_branch}.latest.taskgraph.decision
- index.mobile.v2.${project}.branch.${short_head_branch}.revision.${head_sha}.taskgraph.decision
- index.mobile.v2.${project}.revision.${head_sha}.taskgraph.decision
- $if: 'tasks_for == "cron"'
then:
# cron context provides ${head_branch} as a short one
- index.mobile.v2.${project}.branch.${head_branch}.latest.taskgraph.decision-${cron.job_name}
- index.mobile.v2.${project}.branch.${head_branch}.revision.${head_sha}.taskgraph.decision-${cron.job_name}
- index.mobile.v2.${project}.branch.${head_branch}.revision.${head_sha}.taskgraph.cron.${ownTaskId}
scopes:
$if: 'tasks_for == "github-push"'
MOBILE_PULL_REQUEST_NUMBER: '${event.pull_request.number}'
- $if: 'tasks_for == "action"'
then:
# `https://` is 8 characters so, ${repoUrl[8:]} is the repository without the protocol.
- 'assume:repo:${repoUrl[8:]}:branch:${short_head_branch}'
else:
$if: 'tasks_for == "github-pull-request"'
then:
- 'assume:repo:github.com/${event.pull_request.base.repo.full_name}:pull-request'
else:
$if: 'tasks_for == "action"'
then:
# when all actions are hooks, we can calculate this directly rather than using a variable
- '${action.repo_scope}'
else:
- 'assume:repo:${repoUrl[8:]}:cron:${cron.job_name}'
requires: all-completed
priority: lowest
retries: 5
ACTION_TASK_GROUP_ID: '${action.taskGroupId}' # taskGroupId of the target task
ACTION_TASK_ID: {$json: {$eval: 'taskId'}} # taskId of the target task (JSON-encoded)
ACTION_INPUT: {$json: {$eval: 'input'}}
ACTION_CALLBACK: '${action.cb_name}'
features:
taskclusterProxy: true
chainOfTrust: true
# Note: This task is built server side without the context or tooling that
# exist in tree so we must hard code the hash
image: mozillareleases/taskgraph:decision-mobile-7e11b0cc3966ad9729e08b19551399b3343d3b385eac067b6335d4c34431a899@sha256:b309fa59efd59991ba286a326cb43b724c38e6f3872c52d0f85e96428899c2fc
payload:
env:
# run-task uses these to check out the source; the inputs
# to `mach taskgraph decision` are all on the command line.
$merge:
- MOBILE_BASE_REPOSITORY: '${baseRepoUrl}'
MOBILE_HEAD_REPOSITORY: '${repoUrl}'
MOBILE_HEAD_REF: '${head_branch}'
MOBILE_HEAD_REV: '${head_sha}'
MOBILE_REPOSITORY_TYPE: git
TASKGRAPH_BASE_REPOSITORY: https://hg.mozilla.org/ci/taskgraph
TASKGRAPH_HEAD_REPOSITORY: https://hg.mozilla.org/ci/${taskgraph.branch}
TASKGRAPH_HEAD_REV: ${taskgraph.revision}
TASKGRAPH_REPOSITORY_TYPE: hg
REPOSITORIES: {$json: {mobile: "Fenix", taskgraph: "Taskgraph"}}
HG_STORE_PATH: /builds/worker/checkouts/hg-store
ANDROID_SDK_ROOT: /builds/worker/android-sdk
- $if: 'tasks_for in ["github-pull-request"]'
then:
MOBILE_PULL_REQUEST_NUMBER: '${event.pull_request.number}'
- $if: 'tasks_for == "action"'
then:
ACTION_TASK_GROUP_ID: '${action.taskGroupId}' # taskGroupId of the target task
ACTION_TASK_ID: {$json: {$eval: 'taskId'}} # taskId of the target task (JSON-encoded)
ACTION_INPUT: {$json: {$eval: 'input'}}
ACTION_CALLBACK: '${action.cb_name}'
features:
taskclusterProxy: true
chainOfTrust: true
# Note: This task is built server side without the context or tooling that
# exist in tree so we must hard code the hash
image:
mozillareleases/taskgraph:decision-mobile-44b6b7b4c370220eff56efa8b508aa5157ef9c6e74847c7ecc19d640946ba49e@sha256:4107cbc5e154502529e4d38efa4dc89c05ee54e2cbc6e2e66023e68407502894
maxRunTime: 1800
maxRunTime: 1800
command:
- /usr/local/bin/run-task
- '--mobile-checkout=/builds/worker/checkouts/src'
- '--task-cwd=/builds/worker/checkouts/src'
- '--'
- bash
- -cx
- $let:
extraArgs:
$if: 'tasks_for == "cron"'
then: '${cron.quoted_args}'
else: ''
in:
$if: 'tasks_for == "action"'
then: >
taskcluster/scripts/decision-install-sdk.sh &&
ln -s /builds/worker/artifacts artifacts &&
~/.local/bin/taskgraph action-callback
else: >
taskcluster/scripts/decision-install-sdk.sh &&
ln -s /builds/worker/artifacts artifacts &&
~/.local/bin/taskgraph decision
--pushlog-id='0'
--pushdate='0'
--project='${project}'
--message=""
--owner='${ownerEmail}'
--level='${level}'
--base-repository="$MOBILE_BASE_REPOSITORY"
--head-repository="$MOBILE_HEAD_REPOSITORY"
--head-ref="$MOBILE_HEAD_REF"
--head-rev="$MOBILE_HEAD_REV"
--repository-type="$MOBILE_REPOSITORY_TYPE"
--tasks-for='${tasks_for}'
${extraArgs}
command:
- /usr/local/bin/run-task
- '--mobile-checkout=/builds/worker/checkouts/vcs'
- '--taskgraph-checkout=/builds/worker/checkouts/taskgraph'
- '--task-cwd=/builds/worker/checkouts/vcs'
- '--'
- bash
- -cx
- $let:
extraArgs: {$if: 'tasks_for == "cron"', then: '${cron.quoted_args}', else: ''}
in:
$if: 'tasks_for == "action"'
then: >
PIP_IGNORE_INSTALLED=0 pip3 install --user /builds/worker/checkouts/taskgraph &&
PIP_IGNORE_INSTALLED=0 pip3 install --user mozilla-version &&
taskcluster/scripts/decision-install-sdk.sh &&
ln -s /builds/worker/artifacts artifacts &&
~/.local/bin/taskgraph action-callback
else: >
PIP_IGNORE_INSTALLED=0 pip3 install --user /builds/worker/checkouts/taskgraph &&
PIP_IGNORE_INSTALLED=0 pip3 install --user mozilla-version &&
taskcluster/scripts/decision-install-sdk.sh &&
ln -s /builds/worker/artifacts artifacts &&
~/.local/bin/taskgraph decision
--pushlog-id='0'
--pushdate='0'
--project='${project}'
--message=""
--owner='${ownerEmail}'
--level='${level}'
--base-repository="$MOBILE_BASE_REPOSITORY"
--head-repository="$MOBILE_HEAD_REPOSITORY"
--head-ref="$MOBILE_HEAD_REF"
--head-rev="$MOBILE_HEAD_REV"
--repository-type="$MOBILE_REPOSITORY_TYPE"
--tasks-for='${tasks_for}'
${extraArgs}
artifacts:
'public':
type: 'directory'
path: '/builds/worker/artifacts'
expires:
$fromNow: '1 year'
'public/docker-contexts':
type: 'directory'
path: '/builds/worker/checkouts/src/docker-contexts'
# This needs to be at least the deadline of the
# decision task + the docker-image task deadlines.
# It is set to a week to allow for some time for
# debugging, but they are not useful long-term.
expires:
$fromNow: '7 day'
artifacts:
'public':
type: 'directory'
path: '/builds/worker/artifacts'
expires: {$fromNow: '1 year'}
'public/docker-contexts':
type: 'directory'
path: '/builds/worker/checkouts/vcs/docker-contexts'
# This needs to be at least the deadline of the
# decision task + the docker-image task deadlines.
# It is set to a week to allow for some time for
# debugging, but they are not useful long-term.
expires: {$fromNow: '7 day'}
extra:
$merge:
- treeherder:
$merge:
- machine:
platform: gecko-decision
- $if: 'tasks_for in ["github-push", "github-pull-request"]'
then:
symbol: D
else:
$if: 'tasks_for == "action"'
then:
groupName: 'action-callback'
groupSymbol: AC
symbol: "${action.symbol}"
else:
groupSymbol: cron
symbol: "${cron.job_symbol}"
- $if: 'tasks_for == "action"'
then:
parent: '${action.taskGroupId}'
action:
name: '${action.name}'
context:
taskGroupId: '${action.taskGroupId}'
taskId: {$eval: 'taskId'}
input: {$eval: 'input'}
clientId: {$eval: 'clientId'}
- $if: 'tasks_for == "cron"'
extra:
$merge:
- treeherder:
$merge:
- machine:
platform: gecko-decision
- $if: 'tasks_for in ["github-push", "github-pull-request"]'
then:
symbol: D
else:
$if: 'tasks_for == "action"'
then:
cron: {$json: {$eval: 'cron'}}
- tasks_for: '${tasks_for}'
groupName: 'action-callback'
groupSymbol: AC
symbol: "${action.symbol}"
else:
groupSymbol: cron
symbol: "${cron.job_symbol}"
- $if: 'tasks_for == "action"'
then:
parent: '${action.taskGroupId}'
action:
name: '${action.name}'
context:
taskGroupId: '${action.taskGroupId}'
taskId: {$eval: 'taskId'}
input: {$eval: 'input'}
clientId: {$eval: 'clientId'}
- $if: 'tasks_for == "cron"'
then:
cron: {$json: {$eval: 'cron'}}
- tasks_for: '${tasks_for}'

@ -2,8 +2,6 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
from importlib import import_module
@ -12,14 +10,16 @@ def register(graph_config):
Import all modules that are siblings of this one, triggering decorators in
the process.
"""
_import_modules([
"job",
"parameters",
"release_promotion",
"routes",
"target_tasks",
"worker_types",
])
_import_modules(
[
"job",
"parameters",
"release_promotion",
"routes",
"target_tasks",
"worker_types",
]
)
def _import_modules(modules):

@ -2,48 +2,48 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
import json
import subprocess
from taskgraph.util.memoize import memoize
def get_variant(build_type):
all_variants = _fetch_all_variants()
matching_variants = [
variant for variant in all_variants
if variant["build_type"] == build_type
variant for variant in all_variants if variant["build_type"] == build_type
]
number_of_matching_variants = len(matching_variants)
if number_of_matching_variants == 0:
raise ValueError('No variant found for build type "{}"'.format(
build_type
))
raise ValueError('No variant found for build type "{}"'.format(build_type))
elif number_of_matching_variants > 1:
raise ValueError('Too many variants found for build type "{}"": {}'.format(
build_type, matching_variants
))
raise ValueError(
'Too many variants found for build type "{}"": {}'.format(
build_type, matching_variants
)
)
return matching_variants.pop()
@memoize
def _fetch_all_variants():
output = _run_gradle_process('printVariants')
content = _extract_content_from_command_output(output, prefix='variants: ')
output = _run_gradle_process("printVariants")
content = _extract_content_from_command_output(output, prefix="variants: ")
return json.loads(content)
def _run_gradle_process(gradle_command, **kwargs):
gradle_properties = [
'-P{property_name}={value}'.format(property_name=property_name, value=value)
"-P{property_name}={value}".format(property_name=property_name, value=value)
for property_name, value in kwargs.items()
]
process = subprocess.Popen(["./gradlew", "--no-daemon", "--quiet", gradle_command] + gradle_properties, stdout=subprocess.PIPE, universal_newlines=True)
process = subprocess.Popen(
["./gradlew", "--no-daemon", "--quiet", gradle_command] + gradle_properties,
stdout=subprocess.PIPE,
universal_newlines=True,
)
output, err = process.communicate()
exit_code = process.wait()
@ -54,5 +54,5 @@ def _run_gradle_process(gradle_command, **kwargs):
def _extract_content_from_command_output(output, prefix):
variants_line = [line for line in output.split('\n') if line.startswith(prefix)][0]
return variants_line.split(' ', 1)[1]
variants_line = [line for line in output.split("\n") if line.startswith(prefix)][0]
return variants_line.split(" ", 1)[1]

@ -2,50 +2,51 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
from taskgraph.transforms.job import run_job_using, configure_taskdesc_for_run
from taskgraph.util import path
from taskgraph.util.schema import Schema, taskref_or_string
from voluptuous import Required, Optional
from six import text_type
from pipes import quote as shell_quote
secret_schema = {
Required("name"): text_type,
Required("path"): text_type,
Required("key"): text_type,
Required("name"): str,
Required("path"): str,
Required("key"): str,
Optional("json"): bool,
}
dummy_secret_schema = {
Required("content"): text_type,
Required("path"): text_type,
Required("content"): str,
Required("path"): str,
Optional("json"): bool,
}
gradlew_schema = Schema({
Required("using"): "gradlew",
Optional("pre-gradlew"): [[text_type]],
Required("gradlew"): [text_type],
Optional("post-gradlew"): [[text_type]],
# Base work directory used to set up the task.
Required("workdir"): text_type,
Optional("use-caches"): bool,
Optional("secrets"): [secret_schema],
Optional("dummy-secrets"): [dummy_secret_schema],
})
run_commands_schema = Schema({
Required("using"): "run-commands",
Optional("pre-commands"): [[text_type]],
Required("commands"): [[taskref_or_string]],
Required("workdir"): text_type,
Optional("use-caches"): bool,
Optional("secrets"): [secret_schema],
Optional("dummy-secrets"): [dummy_secret_schema],
})
gradlew_schema = Schema(
{
Required("using"): "gradlew",
Optional("pre-gradlew"): [[str]],
Required("gradlew"): [str],
Optional("post-gradlew"): [[str]],
# Base work directory used to set up the task.
Required("workdir"): str,
Optional("use-caches"): bool,
Optional("secrets"): [secret_schema],
Optional("dummy-secrets"): [dummy_secret_schema],
}
)
run_commands_schema = Schema(
{
Required("using"): "run-commands",
Optional("pre-commands"): [[str]],
Required("commands"): [[taskref_or_string]],
Required("workdir"): str,
Optional("use-caches"): bool,
Optional("secrets"): [secret_schema],
Optional("dummy-secrets"): [dummy_secret_schema],
}
)
@run_job_using("docker-worker", "run-commands", schema=run_commands_schema)
@ -53,7 +54,8 @@ def configure_run_commands_schema(config, job, taskdesc):
run = job["run"]
pre_commands = run.pop("pre-commands", [])
pre_commands += [
_generate_dummy_secret_command(secret) for secret in run.pop("dummy-secrets", [])
_generate_dummy_secret_command(secret)
for secret in run.pop("dummy-secrets", [])
]
pre_commands += [
_generate_secret_command(secret) for secret in run.get("secrets", [])
@ -73,11 +75,9 @@ def configure_gradlew(config, job, taskdesc):
worker = taskdesc["worker"] = job["worker"]
fetches_dir = path.join(run["workdir"], worker["env"]["MOZ_FETCHES_DIR"])
worker.setdefault("env", {}).update({
"ANDROID_SDK_ROOT": path.join(
fetches_dir, "android-sdk-linux"
)
})
worker.setdefault("env", {}).update(
{"ANDROID_SDK_ROOT": path.join(fetches_dir, "android-sdk-linux")}
)
run["command"] = _extract_gradlew_command(run, fetches_dir)
_inject_secrets_scopes(run, taskdesc)
@ -88,7 +88,8 @@ def configure_gradlew(config, job, taskdesc):
def _extract_gradlew_command(run, fetches_dir):
pre_gradle_commands = run.pop("pre-gradlew", [])
pre_gradle_commands += [
_generate_dummy_secret_command(secret) for secret in run.pop("dummy-secrets", [])
_generate_dummy_secret_command(secret)
for secret in run.pop("dummy-secrets", [])
]
pre_gradle_commands += [
_generate_secret_command(secret) for secret in run.get("secrets", [])
@ -101,7 +102,9 @@ def _extract_gradlew_command(run, fetches_dir):
)
for repo_name in ("google", "central")
]
gradle_command = ["./gradlew"] + gradle_repos_args + ["listRepositories"] + run.pop("gradlew")
gradle_command = (
["./gradlew"] + gradle_repos_args + ["listRepositories"] + run.pop("gradlew")
)
post_gradle_commands = run.pop("post-gradlew", [])
commands = pre_gradle_commands + [gradle_command] + post_gradle_commands
@ -111,9 +114,12 @@ def _extract_gradlew_command(run, fetches_dir):
def _generate_secret_command(secret):
secret_command = [
"taskcluster/scripts/get-secret.py",
"-s", secret["name"],
"-k", secret["key"],
"-f", secret["path"],
"-s",
secret["name"],
"-k",
secret["key"],
"-f",
secret["path"],
]
if secret.get("json"):
secret_command.append("--json")
@ -124,8 +130,10 @@ def _generate_secret_command(secret):
def _generate_dummy_secret_command(secret):
secret_command = [
"taskcluster/scripts/write-dummy-secret.py",
"-f", secret["path"],
"-c", secret["content"],
"-f",
secret["path"],
"-c",
secret["content"],
]
if secret.get("json"):
secret_command.append("--json")
@ -149,18 +157,22 @@ def _convert_commands_to_string(commands):
part_string = part["task-reference"]
should_task_reference = True
else:
raise ValueError('Unsupported dict: {}'.format(part))
raise ValueError("Unsupported dict: {}".format(part))
else:
part_string = part
sanitized_parts.append(part_string)
sanitized_commands.append(sanitized_parts)
shell_quoted_commands = [" ".join(map(shell_quote, command)) for command in sanitized_commands]
shell_quoted_commands = [
" ".join(map(shell_quote, command)) for command in sanitized_commands
]
full_string_command = " && ".join(shell_quoted_commands)
if should_artifact_reference and should_task_reference:
raise NotImplementedError('"arifact-reference" and "task-reference" cannot be both used')
raise NotImplementedError(
'"arifact-reference" and "task-reference" cannot be both used'
)
elif should_artifact_reference:
return {"artifact-reference": full_string_command}
elif should_task_reference:
@ -173,7 +185,9 @@ def _inject_secrets_scopes(run, taskdesc):
secrets = run.pop("secrets", [])
scopes = taskdesc.setdefault("scopes", [])
new_secret_scopes = ["secrets:get:{}".format(secret["name"]) for secret in secrets]
new_secret_scopes = list(set(new_secret_scopes)) # Scopes must not have any duplicates
new_secret_scopes = list(
set(new_secret_scopes)
) # Scopes must not have any duplicates
scopes.extend(new_secret_scopes)

@ -2,8 +2,6 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
import copy
@ -15,12 +13,12 @@ def group_by(name):
def wrapper(func):
GROUP_BY_MAP[name] = func
return func
return wrapper
return wrapper
def group_tasks(config, tasks):
group_by_fn = GROUP_BY_MAP[config['group-by']]
group_by_fn = GROUP_BY_MAP[config["group-by"]]
groups = group_by_fn(config, tasks)
@ -29,33 +27,33 @@ def group_tasks(config, tasks):
yield dependencies
@group_by('build-type')
@group_by("build-type")
def build_type_grouping(config, tasks):
groups = {}
kind_dependencies = config.get('kind-dependencies')
only_build_type = config.get('only-for-build-types')
kind_dependencies = config.get("kind-dependencies")
only_build_type = config.get("only-for-build-types")
for task in tasks:
if task.kind not in kind_dependencies:
continue
if only_build_type:
build_type = task.attributes.get('build-type')
build_type = task.attributes.get("build-type")
if build_type not in only_build_type:
continue
build_type = task.attributes.get('build-type')
build_type = task.attributes.get("build-type")
groups.setdefault(build_type, []).append(task)
return groups
@group_by('attributes')
@group_by("attributes")
def attributes_grouping(config, tasks):
groups = {}
kind_dependencies = config.get('kind-dependencies')
only_attributes = config.get('only-for-attributes')
kind_dependencies = config.get("kind-dependencies")
only_attributes = config.get("only-for-attributes")
for task in tasks:
if task.kind not in kind_dependencies:

@ -2,27 +2,25 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
import copy
from voluptuous import Required
from taskgraph.task import Task
from taskgraph.util.attributes import sorted_unique_list
from taskgraph.util.schema import Schema
from . import group_tasks
schema = Schema({
Required('primary-dependency', 'primary dependency task'): Task,
Required(
'dependent-tasks',
'dictionary of dependent tasks, keyed by kind',
): {str: Task},
})
schema = Schema(
{
Required("primary-dependency", "primary dependency task"): Task,
Required(
"dependent-tasks",
"dictionary of dependent tasks, keyed by kind",
): {str: Task},
}
)
def loader(kind, path, config, params, loaded_tasks):
@ -39,7 +37,7 @@ def loader(kind, path, config, params, loaded_tasks):
Optional ``job-template`` kind configuration value, if specified, will be used to
pass configuration down to the specified transforms used.
"""
job_template = config.get('job-template')
job_template = config.get("job-template")
for dep_tasks in group_tasks(config, loaded_tasks):
kinds = [dep.kind for dep in dep_tasks]
@ -50,8 +48,8 @@ def loader(kind, path, config, params, loaded_tasks):
for dep in dep_tasks
}
job = {'dependent-tasks': dep_tasks_per_unique_key}
job['primary-dependency'] = get_primary_dep(config, dep_tasks_per_unique_key)
job = {"dependent-tasks": dep_tasks_per_unique_key}
job["primary-dependency"] = get_primary_dep(config, dep_tasks_per_unique_key)
if job_template:
job.update(copy.deepcopy(job_template))
@ -65,7 +63,7 @@ def get_primary_dep(config, dep_tasks):
defined and is a list, the first kind in that list with a matching dep
is the primary dependency. If it's undefined, return the first dep.
"""
primary_dependencies = config.get('primary-dependency')
primary_dependencies = config.get("primary-dependency")
if isinstance(primary_dependencies, str):
primary_dependencies = [primary_dependencies]
if not primary_dependencies:
@ -75,13 +73,16 @@ def get_primary_dep(config, dep_tasks):
for primary_kind in primary_dependencies:
for dep_kind in dep_tasks:
if dep_kind == primary_kind:
assert primary_dep is None, \
"Too many primary dependent tasks in dep_tasks: {}!".format(
[t.label for t in dep_tasks]
)
assert (
primary_dep is None
), "Too many primary dependent tasks in dep_tasks: {}!".format(
[t.label for t in dep_tasks]
)
primary_dep = dep_tasks[dep_kind]
if primary_dep is None:
raise Exception("Can't find dependency of {}: {}".format(
config['primary-dependency'], config
))
raise Exception(
"Can't find dependency of {}: {}".format(
config["primary-dependency"], config
)
)
return primary_dep

@ -2,36 +2,21 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
import os
import re
from mozilla_version.fenix import FenixVersion
from six import text_type
from taskgraph.parameters import extend_parameters_schema
from voluptuous import All, Any, Optional, Range, Required
from .release_promotion import read_version_file
def get_defaults(repo_root):
return {
"pull_request_number": None,
"release_type": "",
"shipping_phase": None,
"next_version": "",
"version": "",
extend_parameters_schema(
{
Required("pull_request_number"): Any(All(int, Range(min=1)), None),
Required("release_type", default=""): str,
Optional("shipping_phase"): Any("build", "ship", None),
Required("version", default=""): str,
Required("next_version", default=""): Any(None, str),
}
extend_parameters_schema({
Required("pull_request_number"): Any(All(int, Range(min=1)), None),
Required("release_type"): text_type,
Optional("shipping_phase"): Any('build', 'ship', None),
Required("version"): text_type,
Required("next_version"): Any(None, text_type),
}, defaults_fn=get_defaults)
)
def get_decision_parameters(graph_config, parameters):

@ -2,8 +2,6 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
import os
from mozilla_version.fenix import FenixVersion
@ -13,7 +11,10 @@ from taskgraph.util.taskcluster import get_artifact
from taskgraph.taskgraph import TaskGraph
from taskgraph.decision import taskgraph_decision
from taskgraph.parameters import Parameters
from taskgraph.util.taskgraph import find_decision_task, find_existing_tasks_from_previous_kinds
from taskgraph.util.taskgraph import (
find_decision_task,
find_existing_tasks_from_previous_kinds,
)
RELEASE_PROMOTION_PROJECTS = (
"https://github.com/mozilla-mobile/fenix",
@ -22,74 +23,84 @@ RELEASE_PROMOTION_PROJECTS = (
def is_release_promotion_available(parameters):
return parameters['head_repository'] in RELEASE_PROMOTION_PROJECTS
return parameters["head_repository"] in RELEASE_PROMOTION_PROJECTS
@register_callback_action(
name='release-promotion',
title='Ship Fenix',
symbol='${input.release_promotion_flavor}',
name="release-promotion",
title="Ship Fenix",
symbol="${input.release_promotion_flavor}",
description="Ship Fenix",
generic=False,
order=500,
context=[],
available=is_release_promotion_available,
schema=lambda graph_config: {
'type': 'object',
'properties': {
'build_number': {
'type': 'integer',
'default': 1,
'minimum': 1,
'title': 'The release build number',
'description': ('The release build number. Starts at 1 per '
'release version, and increments on rebuild.'),
"type": "object",
"properties": {
"build_number": {
"type": "integer",
"default": 1,
"minimum": 1,
"title": "The release build number",
"description": (
"The release build number. Starts at 1 per "
"release version, and increments on rebuild."
),
},
'do_not_optimize': {
'type': 'array',
'description': ('Optional: a list of labels to avoid optimizing out '
'of the graph (to force a rerun of, say, '
'funsize docker-image tasks).'),
'items': {
'type': 'string',
"do_not_optimize": {
"type": "array",
"description": (
"Optional: a list of labels to avoid optimizing out "
"of the graph (to force a rerun of, say, "
"funsize docker-image tasks)."
),
"items": {
"type": "string",
},
},
'revision': {
'type': 'string',
'title': 'Optional: revision to ship',
'description': ('Optional: the revision to ship.'),
"revision": {
"type": "string",
"title": "Optional: revision to ship",
"description": ("Optional: the revision to ship."),
},
'release_promotion_flavor': {
'type': 'string',
'description': 'The flavor of release promotion to perform.',
'default': 'build',
'enum': sorted(graph_config['release-promotion']['flavors'].keys()),
"release_promotion_flavor": {
"type": "string",
"description": "The flavor of release promotion to perform.",
"default": "build",
"enum": sorted(graph_config["release-promotion"]["flavors"].keys()),
},
'rebuild_kinds': {
'type': 'array',
'description': ('Optional: an array of kinds to ignore from the previous '
'graph(s).'),
'items': {
'type': 'string',
"rebuild_kinds": {
"type": "array",
"description": (
"Optional: an array of kinds to ignore from the previous "
"graph(s)."
),
"items": {
"type": "string",
},
},
'previous_graph_ids': {
'type': 'array',
'description': ('Optional: an array of taskIds of decision or action '
'tasks from the previous graph(s) to use to populate '
'our `previous_graph_kinds`.'),
'items': {
'type': 'string',
"previous_graph_ids": {
"type": "array",
"description": (
"Optional: an array of taskIds of decision or action "
"tasks from the previous graph(s) to use to populate "
"our `previous_graph_kinds`."
),
"items": {
"type": "string",
},
},
'version': {
'type': 'string',
'description': ('Optional: override the version for release promotion. '
"Occasionally we'll land a taskgraph fix in a later "
'commit, but want to act on a build from a previous '
'commit. If a version bump has landed in the meantime, '
'relying on the in-tree version will break things.'),
'default': '',
"version": {
"type": "string",
"description": (
"Optional: override the version for release promotion. "
"Occasionally we'll land a taskgraph fix in a later "
"commit, but want to act on a build from a previous "
"commit. If a version bump has landed in the meantime, "
"relying on the in-tree version will break things."
),
"default": "",
},
"next_version": {
"type": "string",
@ -97,23 +108,34 @@ def is_release_promotion_available(parameters):
"default": "",
},
},
"required": ["release_promotion_flavor", "version", "build_number", "next_version"],
}
"required": [
"release_promotion_flavor",
"version",
"build_number",
"next_version",
],
},
)
def release_promotion_action(parameters, graph_config, input, task_group_id, task_id):
release_promotion_flavor = input['release_promotion_flavor']
promotion_config = graph_config['release-promotion']['flavors'][release_promotion_flavor]
release_promotion_flavor = input["release_promotion_flavor"]
promotion_config = graph_config["release-promotion"]["flavors"][
release_promotion_flavor
]
target_tasks_method = promotion_config['target-tasks-method'].format(
project=parameters['project']
target_tasks_method = promotion_config["target-tasks-method"].format(
project=parameters["project"]
)
rebuild_kinds = input.get("rebuild_kinds") or promotion_config.get(
"rebuild-kinds", []
)
do_not_optimize = input.get("do_not_optimize") or promotion_config.get(
"do-not-optimize", []
)
rebuild_kinds = input.get('rebuild_kinds') or promotion_config.get('rebuild-kinds', [])
do_not_optimize = input.get('do_not_optimize') or promotion_config.get('do-not-optimize', [])
# make parameters read-write
parameters = dict(parameters)
# Build previous_graph_ids from ``previous_graph_ids`` or ``revision``.
previous_graph_ids = input.get('previous_graph_ids')
previous_graph_ids = input.get("previous_graph_ids")
if not previous_graph_ids:
previous_graph_ids = [find_decision_task(parameters, graph_config)]
@ -129,25 +151,31 @@ def release_promotion_action(parameters, graph_config, input, task_group_id, tas
full_task_graph = get_artifact(graph_id, "public/full-task-graph.json")
combined_full_task_graph.update(full_task_graph)
_, combined_full_task_graph = TaskGraph.from_json(combined_full_task_graph)
parameters['existing_tasks'] = find_existing_tasks_from_previous_kinds(
parameters["existing_tasks"] = find_existing_tasks_from_previous_kinds(
combined_full_task_graph, previous_graph_ids, rebuild_kinds
)
parameters['do_not_optimize'] = do_not_optimize
parameters['target_tasks_method'] = target_tasks_method
parameters['build_number'] = int(input['build_number'])
parameters["do_not_optimize"] = do_not_optimize
parameters["target_tasks_method"] = target_tasks_method
parameters["build_number"] = int(input["build_number"])
# When doing staging releases on try, we still want to re-use tasks from
# previous graphs.
parameters['optimize_target_tasks'] = True
parameters['shipping_phase'] = input['release_promotion_flavor']
parameters["optimize_target_tasks"] = True
parameters["shipping_phase"] = input["release_promotion_flavor"]
version_in_file = read_version_file()
parameters['version'] = input['version'] if input.get('version') else read_version_file()
version_string = parameters['version']
parameters["version"] = (
input["version"] if input.get("version") else read_version_file()
)
version_string = parameters["version"]
if version_string != version_in_file:
raise ValueError("Version given in tag ({}) does not match the one in version.txt ({})".format(version_string, version_in_file))
parameters['head_tag'] = 'v{}'.format(version_string)
raise ValueError(
"Version given in tag ({}) does not match the one in version.txt ({})".format(
version_string, version_in_file
)
)
parameters["head_tag"] = "v{}".format(version_string)
parameters['next_version'] = input['next_version']
parameters["next_version"] = input["next_version"]
version = FenixVersion.parse(version_string)
if version.is_beta:
@ -158,17 +186,17 @@ def release_promotion_action(parameters, graph_config, input, task_group_id, tas
release_type = "release"
else:
raise ValueError("Unsupported version type: {}".format(version.version_type))
parameters['release_type'] = release_type
parameters['tasks_for'] = 'action'
parameters["release_type"] = release_type
parameters["tasks_for"] = "action"
parameters['pull_request_number'] = None
parameters["pull_request_number"] = None
# make parameters read-only
parameters = Parameters(**parameters)
taskgraph_decision({'root': graph_config.root_dir}, parameters=parameters)
taskgraph_decision({"root": graph_config.root_dir}, parameters=parameters)
def read_version_file():
with open(os.path.join(os.path.dirname(__file__), '..', '..', 'version.txt')) as f:
with open(os.path.join(os.path.dirname(__file__), "..", "..", "version.txt")) as f:
return f.read().strip()

@ -2,8 +2,6 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
import time
from taskgraph.transforms.task import index_builder

@ -2,12 +2,10 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
from taskgraph.target_tasks import _target_task
@_target_task('release')
@_target_task("release")
def target_tasks_default(full_task_graph, parameters, graph_config):
# TODO Use shipping-phase
@ -35,7 +33,11 @@ def _filter_fennec(fennec_type, task, parameters):
def target_tasks_fennec_nightly(full_task_graph, parameters, graph_config):
"""Select the set of tasks required for a production build signed with the fennec key."""
return [l for l, t in full_task_graph.tasks.items() if _filter_fennec("production", t, parameters)]
return [
l
for l, t in full_task_graph.tasks.items()
if _filter_fennec("production", t, parameters)
]
@_target_task("bump_android_components")

@ -5,15 +5,12 @@
Transform the beetmover task into an actual task description.
"""
from __future__ import absolute_import, print_function, unicode_literals
import logging
from six import text_type, ensure_text
from taskgraph.util.schema import optionally_keyed_by, resolve_keyed_by
from taskgraph.transforms.base import TransformSequence
from taskgraph.transforms.task import task_description_schema
from voluptuous import Any, Optional, Required, Schema
from voluptuous import Optional, Required, Schema
from fenix_taskgraph.util.scriptworker import generate_beetmover_artifact_map
@ -22,13 +19,13 @@ logger = logging.getLogger(__name__)
beetmover_description_schema = Schema(
{
# unique name to describe this beetmover task, defaults to {dep.label}-beetmover
Required("name"): text_type,
Required("name"): str,
Required("worker"): {"upstream-artifacts": [dict]},
# treeherder is allowed here to override any defaults we use for beetmover.
Optional("treeherder"): task_description_schema["treeherder"],
Optional("attributes"): task_description_schema["attributes"],
Optional("dependencies"): task_description_schema["dependencies"],
Optional("run-on-tasks-for"): [text_type],
Optional("run-on-tasks-for"): [str],
Optional("bucket-scope"): optionally_keyed_by("level", "build-type", str),
}
)
@ -43,25 +40,23 @@ def make_task_description(config, tasks):
attributes = task["attributes"]
label = "beetmover-{}".format(task["name"])
description = (
"Beetmover submission for build type '{build_type}'".format(
build_type=attributes.get("build-type"),
)
description = "Beetmover submission for build type '{build_type}'".format(
build_type=attributes.get("build-type"),
)
if task.get("locale"):
attributes["locale"] = task["locale"]
resolve_keyed_by(
task,
task,
"bucket-scope",
item_name=task['name'],
item_name=task["name"],
**{
'build-type': task['attributes']['build-type'],
'level': config.params["level"]
"build-type": task["attributes"]["build-type"],
"level": config.params["level"],
}
)
bucket_scope = task.pop('bucket-scope')
bucket_scope = task.pop("bucket-scope")
task = {
"label": label,
@ -85,10 +80,10 @@ def make_task_description(config, tasks):
def craft_release_properties(config, task):
params = config.params
return {
"app-name": ensure_text(params["project"]),
"app-version": ensure_text(params["version"]),
"branch": ensure_text(params["project"]),
"build-id": ensure_text(params["moz_build_date"]),
"app-name": str(params["project"]),
"app-version": str(params["version"]),
"branch": str(params["project"]),
"build-id": str(params["moz_build_date"]),
"hash-type": "sha512",
"platform": "android",
}

@ -6,8 +6,6 @@ Apply some defaults and minor modifications to the jobs defined in the build
kind.
"""
from __future__ import absolute_import, print_function, unicode_literals
import copy
import json

@ -6,10 +6,6 @@ Apply some defaults and minor modifications to the jobs defined in the build
kind.
"""
from __future__ import absolute_import, print_function, unicode_literals
import datetime
from taskgraph.transforms.base import TransformSequence
from fenix_taskgraph.gradle import get_variant
@ -33,31 +29,49 @@ def add_shippable_secrets(config, tasks):
secrets = task["run"].setdefault("secrets", [])
dummy_secrets = task["run"].setdefault("dummy-secrets", [])
if task.pop("include-shippable-secrets", False) and config.params["level"] == "3":
if (
task.pop("include-shippable-secrets", False)
and config.params["level"] == "3"
):
build_type = task["attributes"]["build-type"]
gradle_build_type = task["run"]["gradle-build-type"]
secret_index = 'project/mobile/fenix/{}'.format(build_type)
secrets.extend([{
"key": key,
"name": secret_index,
"path": target_file,
} for key, target_file in (
('adjust', '.adjust_token'),
('firebase', 'app/src/{}/res/values/firebase.xml'.format(gradle_build_type)),
('sentry_dsn', '.sentry_token'),
('mls', '.mls_token'),
('nimbus_url', '.nimbus'),
('wallpaper_url', ".wallpaper_url")
)])
secret_index = "project/mobile/fenix/{}".format(build_type)
secrets.extend(
[
{
"key": key,
"name": secret_index,
"path": target_file,
}
for key, target_file in (
("adjust", ".adjust_token"),
(
"firebase",
"app/src/{}/res/values/firebase.xml".format(
gradle_build_type
),
),
("sentry_dsn", ".sentry_token"),
("mls", ".mls_token"),
("nimbus_url", ".nimbus"),
("wallpaper_url", ".wallpaper_url"),
)
]
)
else:
dummy_secrets.extend([{
"content": fake_value,
"path": target_file,
} for fake_value, target_file in (
("faketoken", ".adjust_token"),
("faketoken", ".mls_token"),
("https://fake@sentry.prod.mozaws.net/368", ".sentry_token"),
)])
dummy_secrets.extend(
[
{
"content": fake_value,
"path": target_file,
}
for fake_value, target_file in (
("faketoken", ".adjust_token"),
("faketoken", ".mls_token"),
("https://fake@sentry.prod.mozaws.net/368", ".sentry_token"),
)
]
)
yield task
@ -75,6 +89,7 @@ def build_gradle_command(config, tasks):
yield task
@transforms.add
def track_apk_size(config, tasks):
for task in tasks:
@ -89,6 +104,7 @@ def track_apk_size(config, tasks):
yield task
@transforms.add
def extra_gradle_options(config, tasks):
for task in tasks:
@ -97,14 +113,13 @@ def extra_gradle_options(config, tasks):
yield task
@transforms.add
def add_test_build_type(config, tasks):
for task in tasks:
test_build_type = task["run"].pop("test-build-type", "")
if test_build_type:
task["run"]["gradlew"].append(
"-PtestBuildType={}".format(test_build_type)
)
task["run"]["gradlew"].append("-PtestBuildType={}".format(test_build_type))
yield task
@ -120,11 +135,13 @@ def add_disable_optimization(config, tasks):
def add_nightly_version(config, tasks):
for task in tasks:
if task.pop("include-nightly-version", False):
task["run"]["gradlew"].extend([
# We only set the `official` flag here. The actual version name will be determined
# by Gradle (depending on the Gecko/A-C version being used)
'-Pofficial'
])
task["run"]["gradlew"].extend(
[
# We only set the `official` flag here. The actual version name will be determined
# by Gradle (depending on the Gecko/A-C version being used)
"-Pofficial"
]
)
yield task
@ -132,10 +149,9 @@ def add_nightly_version(config, tasks):
def add_release_version(config, tasks):
for task in tasks:
if task.pop("include-release-version", False):
task["run"]["gradlew"].extend([
'-PversionName={}'.format(config.params["version"]),
'-Pofficial'
])
task["run"]["gradlew"].extend(
["-PversionName={}".format(config.params["version"]), "-Pofficial"]
)
yield task
@ -150,23 +166,21 @@ def add_artifacts(config, tasks):
if "apk-artifact-template" in task:
artifact_template = task.pop("apk-artifact-template")
for apk in variant_config["apks"]:
apk_name = artifact_template["name"].format(
**apk
apk_name = artifact_template["name"].format(**apk)
artifacts.append(
{
"type": artifact_template["type"],
"name": apk_name,
"path": artifact_template["path"].format(
gradle_build_type=gradle_build_type, **apk
),
}
)
artifacts.append({
"type": artifact_template["type"],
"name": apk_name,
"path": artifact_template["path"].format(
gradle_build_type=gradle_build_type,
**apk
),
})
apks[apk["abi"]] = {
"name": apk_name,
"github-name": artifact_template["github-name"].format(
version=config.params["version"],
**apk
)
version=config.params["version"], **apk
),
}
yield task
@ -178,5 +192,7 @@ def filter_incomplete_translation(config, tasks):
if task.pop("filter-incomplete-translations", False):
# filter-release-translations modifies source, which could cause problems if we ever start caching source
pre_gradlew = task["run"].setdefault("pre-gradlew", [])
pre_gradlew.append(["python", "automation/taskcluster/l10n/filter-release-translations.py"])
pre_gradlew.append(
["python", "automation/taskcluster/l10n/filter-release-translations.py"]
)
yield task

@ -6,8 +6,6 @@ Apply some defaults and minor modifications to the jobs defined in the github_re
kind.
"""
from __future__ import absolute_import, print_function, unicode_literals
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.schema import resolve_keyed_by
@ -18,14 +16,18 @@ transforms = TransformSequence()
@transforms.add
def resolve_keys(config, tasks):
for task in tasks:
for key in ("worker.github-project", "worker.is-prerelease", "worker.release-name"):
for key in (
"worker.github-project",
"worker.is-prerelease",
"worker.release-name",
):
resolve_keyed_by(
task,
key,
item_name=task["name"],
**{
'build-type': task["attributes"]["build-type"],
'level': config.params["level"],
"build-type": task["attributes"]["build-type"],
"level": config.params["level"],
}
)
yield task
@ -38,13 +40,16 @@ def build_worker_definition(config, tasks):
"artifact-map": _build_artifact_map(task),
"git-tag": config.params["head_tag"],
"git-revision": config.params["head_rev"],
"release-name": task["worker"]["release-name"].format(version=config.params["version"]),
"release-name": task["worker"]["release-name"].format(
version=config.params["version"]
),
}
task["worker"].update(worker_definition)
yield task
def _build_artifact_map(task):
artifact_map = []
github_names_per_path = {
@ -55,9 +60,7 @@ def _build_artifact_map(task):
for upstream_artifact_metadata in task["worker"]["upstream-artifacts"]:
artifacts = {"paths": {}, "taskId": upstream_artifact_metadata["taskId"]}
for path in upstream_artifact_metadata["paths"]:
artifacts["paths"][path] = {
"destinations": [github_names_per_path[path]]
}
artifacts["paths"][path] = {"destinations": [github_names_per_path[path]]}
artifact_map.append(artifacts)

@ -6,8 +6,6 @@ Apply some defaults and minor modifications to the jobs defined in the build
kind.
"""
from __future__ import absolute_import, print_function, unicode_literals
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.schema import resolve_keyed_by
@ -20,10 +18,10 @@ def resolve_keys(config, tasks):
for task in tasks:
resolve_keyed_by(
task,
'scopes',
"scopes",
item_name=task["name"],
**{
'level': config.params["level"],
"level": config.params["level"],
}
)
yield task
@ -33,10 +31,12 @@ def resolve_keys(config, tasks):
def make_task_description(config, jobs):
for job in jobs:
product = "Fenix"
version = config.params['version'] or "{ver}"
job['worker']['release-name'] = '{product}-{version}-build{build_number}'.format(
version = config.params["version"] or "{ver}"
job["worker"][
"release-name"
] = "{product}-{version}-build{build_number}".format(
product=product,
version=version,
build_number=config.params.get('build_number', 1)
build_number=config.params.get("build_number", 1),
)
yield job

@ -5,8 +5,6 @@
Apply some defaults and minor modifications to the single_dep jobs.
"""
from __future__ import absolute_import, print_function, unicode_literals
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.schema import resolve_keyed_by
from taskgraph.util.treeherder import inherit_treeherder_from_dep, join_symbol
@ -21,21 +19,20 @@ transforms = TransformSequence()
def build_name_and_attributes(config, tasks):
for task in tasks:
task["dependencies"] = {
dep_key: dep.label
for dep_key, dep in _get_all_deps(task).items()
dep_key: dep.label for dep_key, dep in _get_all_deps(task).items()
}
primary_dep = task["primary-dependency"]
copy_of_attributes = primary_dep.attributes.copy()
task.setdefault("attributes", {}).update(copy_of_attributes)
# run_on_tasks_for is set as an attribute later in the pipeline
task.setdefault("run-on-tasks-for", copy_of_attributes['run_on_tasks_for'])
task.setdefault("run-on-tasks-for", copy_of_attributes["run_on_tasks_for"])
task["name"] = _get_dependent_job_name_without_its_kind(primary_dep)
yield task
def _get_dependent_job_name_without_its_kind(dependent_job):
return dependent_job.label[len(dependent_job.kind) + 1:]
return dependent_job.label[len(dependent_job.kind) + 1 :]
def _get_all_deps(task):
@ -53,8 +50,8 @@ def resolve_keys(config, tasks):
"treeherder.job-symbol",
item_name=task["name"],
**{
'build-type': task["attributes"]["build-type"],
'level': config.params["level"],
"build-type": task["attributes"]["build-type"],
"level": config.params["level"],
}
)
yield task
@ -71,21 +68,25 @@ def build_upstream_artifacts(config, tasks):
# Beetmover tasks use declarative artifacts.
locale = task["attributes"].get("locale")
build_type = task["attributes"]["build-type"]
worker_definition["upstream-artifacts"] = generate_beetmover_upstream_artifacts(
config, task, build_type, locale
)
worker_definition[
"upstream-artifacts"
] = generate_beetmover_upstream_artifacts(config, task, build_type, locale)
else:
for dep in _get_all_deps(task).values():
paths = sorted([
apk_metadata["name"]
for apk_metadata in dep.attributes.get("apks", {}).values()
])
paths = sorted(
[
apk_metadata["name"]
for apk_metadata in dep.attributes.get("apks", {}).values()
]
)
if paths:
worker_definition["upstream-artifacts"].append({
"taskId": {"task-reference": "<{}>".format(dep.kind)},
"taskType": dep.kind,
"paths": paths,
})
worker_definition["upstream-artifacts"].append(
{
"taskId": {"task-reference": "<{}>".format(dep.kind)},
"taskType": dep.kind,
"paths": paths,
}
)
task.setdefault("worker", {}).update(worker_definition)
yield task

@ -5,14 +5,7 @@
Handle notifications like emails.
"""
from __future__ import absolute_import, print_function, unicode_literals
import copy
import json
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.treeherder import inherit_treeherder_from_dep
from taskgraph.util.schema import resolve_keyed_by
transforms = TransformSequence()
@ -20,22 +13,24 @@ transforms = TransformSequence()
@transforms.add
def add_notify_email(config, tasks):
for task in tasks:
notify = task.pop('notify', {})
email_config = notify.get('email')
notify = task.pop("notify", {})
email_config = notify.get("email")
if email_config:
extra = task.setdefault('extra', {})
notify = extra.setdefault('notify', {})
notify['email'] = {
'content': email_config['content'],
'subject': email_config['subject'],
'link': email_config.get('link', None),
extra = task.setdefault("extra", {})
notify = extra.setdefault("notify", {})
notify["email"] = {
"content": email_config["content"],
"subject": email_config["subject"],
"link": email_config.get("link", None),
}
routes = task.setdefault('routes', [])
routes.extend([
'notify.email.{}.on-{}'.format(address, reason)
for address in email_config['to-addresses']
for reason in email_config['on-reasons']
])
routes = task.setdefault("routes", [])
routes.extend(
[
"notify.email.{}.on-{}".format(address, reason)
for address in email_config["to-addresses"]
for reason in email_config["on-reasons"]
]
)
yield task

@ -6,8 +6,6 @@ Apply some defaults and minor modifications to the jobs defined in the build
kind.
"""
from __future__ import absolute_import, print_function, unicode_literals
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.schema import resolve_keyed_by
@ -24,8 +22,8 @@ def resolve_keys(config, tasks):
key,
item_name=task["name"],
**{
'build-type': task["attributes"]["build-type"],
'level': config.params["level"],
"build-type": task["attributes"]["build-type"],
"level": config.params["level"],
}
)
yield task

@ -5,8 +5,6 @@
Resolve secrets and dummy secrets
"""
from __future__ import absolute_import, print_function, unicode_literals
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.schema import resolve_keyed_by
@ -19,9 +17,6 @@ def resolve_keys(config, tasks):
for task in tasks:
for key in ("run.secrets", "run.dummy-secrets"):
resolve_keyed_by(
task,
key,
item_name=task["name"],
level=config.params["level"]
task, key, item_name=task["name"], level=config.params["level"]
)
yield task

@ -6,8 +6,6 @@ Apply some defaults and minor modifications to the jobs defined in the build
kind.
"""
from __future__ import absolute_import, print_function, unicode_literals
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.schema import resolve_keyed_by
@ -24,13 +22,14 @@ def resolve_keys(config, tasks):
key,
item_name=task["name"],
**{
'build-type': task["attributes"]["build-type"],
'level': config.params["level"],
'tasks-for': config.params["tasks_for"],
"build-type": task["attributes"]["build-type"],
"level": config.params["level"],
"tasks-for": config.params["tasks_for"],
}
)
yield task
@transforms.add
def set_worker_type(config, tasks):
for task in tasks:
@ -38,7 +37,14 @@ def set_worker_type(config, tasks):
if (
str(config.params["level"]) == "3"
and task["attributes"]["build-type"]
in ("nightly", "beta", "release", "android-test-nightly", "beta-mozillaonline", "release-mozillaonline")
in (
"nightly",
"beta",
"release",
"android-test-nightly",
"beta-mozillaonline",
"release-mozillaonline",
)
and config.params["tasks_for"] in ("cron", "action")
):
worker_type = "signing"
@ -50,13 +56,18 @@ def set_worker_type(config, tasks):
def set_signing_type(config, tasks):
for task in tasks:
signing_type = "dep-signing"
if (
str(config.params["level"]) == "3"
and config.params["tasks_for"] in ("cron", "action")
if str(config.params["level"]) == "3" and config.params["tasks_for"] in (
"cron",
"action",
):
if task["attributes"]["build-type"] in ("beta", "release"):
signing_type = "fennec-production-signing"
elif task["attributes"]["build-type"] in ("nightly", "android-test-nightly", "beta-mozillaonline", "release-mozillaonline"):
elif task["attributes"]["build-type"] in (
"nightly",
"android-test-nightly",
"beta-mozillaonline",
"release-mozillaonline",
):
signing_type = "production-signing"
task.setdefault("worker", {})["signing-type"] = signing_type
yield task
@ -66,11 +77,9 @@ def set_signing_type(config, tasks):
def set_index(config, tasks):
for task in tasks:
index = {}
if (
config.params["tasks_for"] in ("cron", "action")
and task["attributes"]["build-type"]
in ("nightly", "debut", "nightly-simulation", "beta", "release")
):
if config.params["tasks_for"] in ("cron", "action") and task["attributes"][
"build-type"
] in ("nightly", "debut", "nightly-simulation", "beta", "release"):
index["type"] = "signing"
task["index"] = index
yield task

@ -2,8 +2,6 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
from taskgraph.transforms.base import TransformSequence
@ -15,6 +13,8 @@ def add_pr_number(config, tasks):
for task in tasks:
include_pr = task.pop("include-pull-request-number")
if include_pr and config.params["pull_request_number"]:
task["worker"]["env"]["PULL_REQUEST_NUMBER"] = str(config.params["pull_request_number"])
task["worker"]["env"]["PULL_REQUEST_NUMBER"] = str(
config.params["pull_request_number"]
)
yield task

@ -6,8 +6,6 @@ Apply some defaults and minor modifications to the jobs defined in the version b
kind.
"""
from __future__ import absolute_import, print_function, unicode_literals
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.schema import resolve_keyed_by
@ -24,8 +22,8 @@ def resolve_keys(config, tasks):
key,
item_name=task["name"],
**{
'build-type': task["attributes"]["build-type"],
'level': config.params["level"],
"build-type": task["attributes"]["build-type"],
"level": config.params["level"],
}
)
yield task

@ -5,9 +5,6 @@
Generate labels for tasks without names, consistently.
Uses attributes from `primary-dependency`.
"""
from __future__ import absolute_import, print_function, unicode_literals
import os
from taskgraph.transforms.base import TransformSequence
@ -20,31 +17,33 @@ LABEL = "test-vismet-{platform}-{label}"
@transforms.add
def make_label(config, jobs):
""" Generate a sane label for a new task constructed from a dependency
"""Generate a sane label for a new task constructed from a dependency
Using attributes from the dependent job and the current task kind"""
for job in jobs:
dep_job = job['primary-dependency']
dep_job = job["primary-dependency"]
attr = dep_job.attributes.get
if attr('locale', job.get('locale')):
if attr("locale", job.get("locale")):
template = "{kind}-{locale}-{build_platform}/{build_type}"
elif attr('l10n_chunk'):
elif attr("l10n_chunk"):
template = "{kind}-{build_platform}-{l10n_chunk}/{build_type}"
elif config.kind.startswith("release-eme-free") or \
config.kind.startswith("release-partner-repack"):
suffix = job.get("extra", {}).get("repack_suffix", None) or \
job.get("extra", {}).get("repack_id", None)
elif config.kind.startswith("release-eme-free") or config.kind.startswith(
"release-partner-repack"
):
suffix = job.get("extra", {}).get("repack_suffix", None) or job.get(
"extra", {}
).get("repack_id", None)
template = "{kind}-{build_platform}"
if suffix:
template += "-{}".format(suffix.replace('/', '-'))
template += "-{}".format(suffix.replace("/", "-"))
else:
template = "{kind}-{build_platform}/{build_type}"
job['label'] = template.format(
job["label"] = template.format(
kind=config.kind,
build_platform=attr('build_platform'),
build_type=attr('build_type'),
locale=attr('locale', job.get('locale', '')), # Locale can be absent
l10n_chunk=attr('l10n_chunk', '') # Can be empty
build_platform=attr("build_platform"),
build_type=attr("build_type"),
locale=attr("locale", job.get("locale", "")), # Locale can be absent
l10n_chunk=attr("l10n_chunk", ""), # Can be empty
)
yield job
@ -53,41 +52,40 @@ def make_label(config, jobs):
@transforms.add
def run_visual_metrics(config, jobs):
for job in jobs:
dep_job = job.pop('primary-dependency', None)
dep_job = job.pop("primary-dependency", None)
if dep_job is not None:
platform = dep_job.task['extra']['treeherder-platform']
job['dependencies'] = {dep_job.label: dep_job.label}
platform = dep_job.task["extra"]["treeherder-platform"]
job["dependencies"] = {dep_job.label: dep_job.label}
# Add the artifact to be processed as a fetches artifact
job['fetches'][dep_job.label] = [{
'artifact': 'browsertime-results.tgz',
'extract': True
}]
job["fetches"][dep_job.label] = [
{"artifact": "browsertime-results.tgz", "extract": True}
]
# vismet runs on Linux but we want to have it displayed
# alongside the job it was triggered by to make it easier for
# people to find it back.
job['label'] = LABEL.format(platform=platform, label=dep_job.label)
treeherder_info = dict(dep_job.task['extra']['treeherder'])
job['treeherder']['platform'] = platform
job['treeherder']['symbol'] = SYMBOL.format(
groupSymbol=treeherder_info['groupSymbol'],
symbol=treeherder_info['symbol']
job["label"] = LABEL.format(platform=platform, label=dep_job.label)
treeherder_info = dict(dep_job.task["extra"]["treeherder"])
job["treeherder"]["platform"] = platform
job["treeherder"]["symbol"] = SYMBOL.format(
groupSymbol=treeherder_info["groupSymbol"],
symbol=treeherder_info["symbol"],
)
# Store the platform name so we can use it to calculate
# the similarity metric against other tasks
job['worker'].setdefault('env', {})['TC_PLATFORM'] = platform
job["worker"].setdefault("env", {})["TC_PLATFORM"] = platform
# run-on-projects needs to be set based on the dependent task
attributes = dict(dep_job.attributes)
job['run-on-projects'] = attributes['run_on_projects']
job["run-on-projects"] = attributes["run_on_projects"]
# The run-on-tasks-for also needs to be setup here
job['run-on-tasks-for'] = attributes.get('run_on_tasks_for', [])
job["run-on-tasks-for"] = attributes.get("run_on_tasks_for", [])
# We can't use the multi_dep transforms which remove this
# field, so we remove the dependent-tasks entry here
del job['dependent-tasks']
del job["dependent-tasks"]
yield job

@ -2,8 +2,6 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
import itertools
import os
from copy import deepcopy
@ -44,7 +42,7 @@ def generate_beetmover_upstream_artifacts(
**{
"release-type": config.params["release_type"],
"platform": platform,
}
},
)
map_config = deepcopy(cached_load_yaml(job["attributes"]["artifact_map"]))
upstream_artifacts = list()
@ -153,7 +151,7 @@ def generate_beetmover_artifact_map(config, job, **kwargs):
**{
"release-type": config.params["release_type"],
"platform": platform,
}
},
)
map_config = deepcopy(cached_load_yaml(job["attributes"]["artifact_map"]))
base_artifact_prefix = map_config.get(
@ -176,9 +174,7 @@ def generate_beetmover_artifact_map(config, job, **kwargs):
map_config,
"s3_bucket_paths",
job["label"],
**{
"build-type": job["attributes"]["build-type"]
}
**{"build-type": job["attributes"]["build-type"]},
)
for locale, dep in sorted(itertools.product(locales, dependencies)):
@ -218,12 +214,7 @@ def generate_beetmover_artifact_map(config, job, **kwargs):
"pretty_name",
"checksums_path",
]:
resolve_keyed_by(
file_config,
field,
job["label"],
locale=locale
)
resolve_keyed_by(file_config, field, job["label"], locale=locale)
# This format string should ideally be in the configuration file,
# but this would mean keeping variable names in sync between code + config.
@ -271,20 +262,16 @@ def generate_beetmover_artifact_map(config, job, **kwargs):
version = read_version_file()
upload_date = datetime.fromtimestamp(config.params["build_date"])
if job["attributes"]["build-type"] == "nightly":
folder_prefix = upload_date.strftime("%Y/%m/%Y-%m-%d-%H-%M-%S-")
# TODO: Remove this when version.txt has versioning fixed
version = version.split('-')[0]
version = version.split("-")[0]
else:
folder_prefix = f"{version}/android/"
kwargs.update(
{
"locale": locale,
"version": version,
"folder_prefix": folder_prefix
}
{"locale": locale, "version": version, "folder_prefix": folder_prefix}
)
kwargs.update(**platforms)
paths = jsone.render(paths, kwargs)

@ -2,10 +2,6 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
from six import text_type
from voluptuous import Any, Required, Optional
from taskgraph.util.schema import taskref_or_string
@ -17,18 +13,18 @@ from taskgraph.transforms.task import payload_builder
schema={
# the maximum time to run, in seconds
Required("max-run-time"): int,
Required("signing-type"): text_type,
Required("signing-type"): str,
# list of artifact URLs for the artifacts that should be signed
Required("upstream-artifacts"): [
{
# taskId of the task with the artifact
Required("taskId"): taskref_or_string,
# type of signing task (for CoT)
Required("taskType"): text_type,
Required("taskType"): str,
# Paths to the artifacts to sign
Required("paths"): [text_type],
Required("paths"): [str],
# Signing formats to use on each of the paths
Required("formats"): [text_type],
Required("formats"): [str],
}
],
},
@ -62,23 +58,27 @@ def build_scriptworker_signing_payload(config, task, task_def):
@payload_builder(
"scriptworker-beetmover",
schema={
Required("action"): text_type,
Required("version"): text_type,
Required("artifact-map"): [{
Required("paths"): {
Any(text_type): {
Required("destinations"): [text_type],
Required("action"): str,
Required("version"): str,
Required("artifact-map"): [
{
Required("paths"): {
Any(str): {
Required("destinations"): [str],
},
},
},
Required("taskId"): taskref_or_string,
}],
Required("beetmover-application-name"): text_type,
Required("bucket"): text_type,
Required("upstream-artifacts"): [{
Required("taskId"): taskref_or_string,
Required("taskType"): text_type,
Required("paths"): [text_type],
}],
Required("taskId"): taskref_or_string,
}
],
Required("beetmover-application-name"): str,
Required("bucket"): str,
Required("upstream-artifacts"): [
{
Required("taskId"): taskref_or_string,
Required("taskType"): str,
Required("paths"): [str],
}
],
},
)
def build_scriptworker_beetmover_payload(config, task, task_def):
@ -96,14 +96,16 @@ def build_scriptworker_beetmover_payload(config, task, task_def):
"artifactMap": worker["artifact-map"],
"releaseProperties": {"appName": worker.pop("beetmover-application-name")},
"upstreamArtifacts": worker["upstream-artifacts"],
"version": worker["version"]
"version": worker["version"],
}
scope_prefix = config.graph_config["scriptworker"]["scope-prefix"]
task_def["scopes"].extend([
"{}:beetmover:action:{}".format(scope_prefix, worker["action"]),
"{}:beetmover:bucket:{}".format(scope_prefix, worker["bucket"]),
])
task_def["scopes"].extend(
[
"{}:beetmover:action:{}".format(scope_prefix, worker["action"]),
"{}:beetmover:bucket:{}".format(scope_prefix, worker["bucket"]),
]
)
@payload_builder(
@ -112,16 +114,16 @@ def build_scriptworker_beetmover_payload(config, task, task_def):
Required("upstream-artifacts"): [
{
Required("taskId"): taskref_or_string,
Required("taskType"): text_type,
Required("paths"): [text_type],
Required("taskType"): str,
Required("paths"): [str],
}
],
Required("certificate-alias"): text_type,
Required("channel"): text_type,
Required("certificate-alias"): str,
Required("channel"): str,
Required("commit"): bool,
Required("product"): text_type,
Required("product"): str,
Required("dep"): bool,
Optional("google-play-track"): text_type,
Optional("google-play-track"): str,
},
)
def build_push_apk_payload(config, task, task_def):
@ -152,11 +154,11 @@ def build_push_apk_payload(config, task, task_def):
Required("upstream-artifacts"): [
{
Required("taskId"): taskref_or_string,
Required("taskType"): text_type,
Required("paths"): [text_type],
Required("taskType"): str,
Required("paths"): [str],
}
],
Required("release-name"): text_type,
Required("release-name"): str,
},
)
def build_shipit_payload(config, task, task_def):
@ -164,9 +166,7 @@ def build_shipit_payload(config, task, task_def):
task_def["tags"]["worker-implementation"] = "scriptworker"
task_def['payload'] = {
'release_name': worker['release-name']
}
task_def["payload"] = {"release_name": worker["release-name"]}
@payload_builder(
@ -175,17 +175,17 @@ def build_shipit_payload(config, task, task_def):
Required("upstream-artifacts"): [
{
Required("taskId"): taskref_or_string,
Required("taskType"): text_type,
Required("paths"): [text_type],
Required("taskType"): str,
Required("paths"): [str],
}
],
Required("artifact-map"): [object],
Required("action"): text_type,
Required("git-tag"): text_type,
Required("git-revision"): text_type,
Required("github-project"): text_type,
Required("action"): str,
Required("git-tag"): str,
Required("git-revision"): str,
Required("github-project"): str,
Required("is-prerelease"): bool,
Required("release-name"): text_type,
Required("release-name"): str,
},
)
def build_github_release_payload(config, task, task_def):
@ -203,10 +203,12 @@ def build_github_release_payload(config, task, task_def):
}
scope_prefix = config.graph_config["scriptworker"]["scope-prefix"]
task_def["scopes"].extend([
"{}:github:project:{}".format(scope_prefix, worker["github-project"]),
"{}:github:action:{}".format(scope_prefix, worker["action"]),
])
task_def["scopes"].extend(
[
"{}:github:project:{}".format(scope_prefix, worker["github-project"]),
"{}:github:action:{}".format(scope_prefix, worker["action"]),
]
)
@payload_builder(
@ -215,38 +217,38 @@ def build_github_release_payload(config, task, task_def):
Optional("upstream-artifacts"): [
{
Optional("taskId"): taskref_or_string,
Optional("taskType"): text_type,
Optional("paths"): [text_type],
Optional("taskType"): str,
Optional("paths"): [str],
}
],
Required("bump"): bool,
Optional("bump-files"): [text_type],
Optional("bump-files"): [str],
Optional("push"): bool,
Optional("branch"): text_type,
Optional("branch"): str,
},
)
def build_version_bump_payload(config, task, task_def):
worker = task["worker"]
task_def["tags"]["worker-implementation"] = "scriptworker"
task_def['payload'] = {'actions': []}
actions = task_def['payload']['actions']
task_def["payload"] = {"actions": []}
actions = task_def["payload"]["actions"]
if worker['bump']:
if not worker['bump-files']:
if worker["bump"]:
if not worker["bump-files"]:
raise Exception("Version Bump requested without bump-files")
bump_info = {}
bump_info["next_version"] = config.params["next_version"]
bump_info['files'] = worker['bump-files']
task_def['payload']['version_bump_info'] = bump_info
actions.append('version_bump')
bump_info["files"] = worker["bump-files"]
task_def["payload"]["version_bump_info"] = bump_info
actions.append("version_bump")
if worker["push"]:
task_def['payload']['push'] = True
task_def["payload"]["push"] = True
if worker.get('force-dry-run'):
task_def['payload']['dry_run'] = True
if worker.get("force-dry-run"):
task_def["payload"]["dry_run"] = True
if worker.get("branch"):
task_def["payload"]["branch"] = worker["branch"]

@ -0,0 +1,5 @@
# For instructions on managing dependencies, see:
# https://taskcluster-taskgraph.readthedocs.io/en/latest/howto/bootstrap-taskgraph.html
taskcluster-taskgraph>=1.3.0
mozilla-version

@ -0,0 +1,106 @@
#
# This file is autogenerated by pip-compile with python 3.6
# To update, run:
#
# pip-compile --generate-hashes --output-file=requirements.txt requirements.in
#
appdirs==1.4.4 \
--hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41 \
--hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128
# via taskcluster-taskgraph
attrs==21.4.0 \
--hash=sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4 \
--hash=sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd
# via
# mozilla-version
# taskcluster-taskgraph
certifi==2021.10.8 \
--hash=sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872 \
--hash=sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569
# via requests
charset-normalizer==2.0.12 \
--hash=sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597 \
--hash=sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df
# via requests
future==0.18.2 \
--hash=sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d
# via mozilla-version
idna==3.3 \
--hash=sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff \
--hash=sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d
# via requests
json-e==4.4.3 \
--hash=sha256:8ed3974faa887ca96a7987298f6550cf2ad35472419a980766b3abe48258de0a
# via taskcluster-taskgraph
mozilla-version==1.0.0 \
--hash=sha256:678093aacc455a49b6feb6eeb922296c191aca4884e815573156b5966b8b051e \
--hash=sha256:a6d06ac135dd6f4333b9ac1fd04f41f4ba96e83973dd2ac9f70daf577718b96c
# via -r requirements.in
pyyaml==6.0 \
--hash=sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293 \
--hash=sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b \
--hash=sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57 \
--hash=sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b \
--hash=sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4 \
--hash=sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07 \
--hash=sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba \
--hash=sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9 \
--hash=sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287 \
--hash=sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513 \
--hash=sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0 \
--hash=sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0 \
--hash=sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92 \
--hash=sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f \
--hash=sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2 \
--hash=sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc \
--hash=sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c \
--hash=sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86 \
--hash=sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4 \
--hash=sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c \
--hash=sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34 \
--hash=sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b \
--hash=sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c \
--hash=sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb \
--hash=sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737 \
--hash=sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3 \
--hash=sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d \
--hash=sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53 \
--hash=sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78 \
--hash=sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803 \
--hash=sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a \
--hash=sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174 \
--hash=sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5
# via taskcluster-taskgraph
redo==2.0.4 \
--hash=sha256:81066955041c853b0e6491eb65a0877dce45131c4cfa3d42d923fc2aa8f7a043
# via taskcluster-taskgraph
requests==2.27.1 \
--hash=sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61 \
--hash=sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d
# via
# requests-unixsocket
# taskcluster-taskgraph
requests-unixsocket==0.3.0 \
--hash=sha256:28304283ea9357d45fff58ad5b11e47708cfbf5806817aa59b2a363228ee971e \
--hash=sha256:c685c680f0809e1b2955339b1e5afc3c0022b3066f4f7eb343f43a6065fc0e5d
# via taskcluster-taskgraph
slugid==2.0.0 \
--hash=sha256:a950d98b72691178bdd4d6c52743c4a2aa039207cf7a97d71060a111ff9ba297 \
--hash=sha256:aec8b0e01c4ad32e38e12d609eab3ec912fd129aaf6b2ded0199b56a5f8fd67c
# via taskcluster-taskgraph
taskcluster-taskgraph==1.3.0 \
--hash=sha256:a1050f8a613e31fcd7bc4fed85e07e57baa5675a5a5719065caebada533eb8f5 \
--hash=sha256:bd3c35d82296c323064c8b6e6d4a88ab186bcd872bdd930bfe734a0b2525a082
# via -r requirements.in
taskcluster-urls==13.0.1 \
--hash=sha256:5e25e7e6818e8877178b175ff43d2e6548afad72694aa125f404a7329ece0973 \
--hash=sha256:b25e122ecec249c4299ac7b20b08db76e3e2025bdaeb699a9d444556de5fd367 \
--hash=sha256:f66dcbd6572a6216ab65949f0fa0b91f2df647918028436c384e6af5cd12ae2b
# via taskcluster-taskgraph
urllib3==1.26.8 \
--hash=sha256:000ca7f471a233c2251c6c7023ee85305721bfdf18621ebff4fd17a8653427ed \
--hash=sha256:0e7c33d9a63e7ddfcb86780aac87befc2fbddf46c58dbb487e0855f7ceec283c
# via requests
voluptuous==0.12.2 \
--hash=sha256:4db1ac5079db9249820d49c891cb4660a6f8cae350491210abce741fabf56513
# via taskcluster-taskgraph
Loading…
Cancel
Save