From 004b6ee56e02145113cbb362a03d7106bd83b94d Mon Sep 17 00:00:00 2001 From: Mark Hammond Date: Tue, 10 Dec 2024 10:18:19 -0500 Subject: [PATCH] Reformat Python code so it would pass linting checks in moz-central. This was done by: * Temporarily copying pyproject.toml from the root of m-c to the root here. * Running `ruff check --fix --select I {path}` * Running `ruff check {path}` to see what ruff didn't fix, and hand-fixing them. * Running `black {path}` for final "correct" formatting. * Removing pyproject.toml ruff/black: taskcluster --- automation/cargo-update-pr.py | 43 +- automation/check_protobuf_files_current.py | 14 +- automation/prepare-release.py | 34 +- .../publish_to_maven_local_if_modified.py | 24 +- automation/shared.py | 44 +- automation/smoke-test-android-components.py | 74 ++- automation/smoke-test-fenix.py | 90 +-- automation/smoke-test-fxios.py | 61 +- automation/symbols-generation/symbolstore.py | 492 ++++++++++------- .../symbols-generation/upload_symbols.py | 38 +- automation/tag-release.py | 23 +- automation/tests.py | 423 +++++++++----- .../cirrus/tests/python-tests/conftest.py | 7 +- .../cirrus/tests/python-tests/test_cirrus.py | 7 +- .../cirrus/tests/python-tests/test_fml.py | 3 +- .../python/test/conftest.py | 3 +- .../python/test/test_smoke_test.py | 4 +- .../app_services_taskgraph/__init__.py | 82 +-- .../app_services_taskgraph/branch_builds.py | 30 +- .../app_services_taskgraph/build_config.py | 69 +-- taskcluster/app_services_taskgraph/job.py | 103 ++-- .../loader/build_config.py | 9 +- .../app_services_taskgraph/target_tasks.py | 62 ++- .../transforms/__init__.py | 36 +- .../transforms/appservices.py | 8 +- .../transforms/beetmover.py | 2 +- .../transforms/branch_build.py | 25 +- .../transforms/deps_complete.py | 55 +- .../transforms/module_build.py | 50 +- .../transforms/nimbus.py | 180 +++--- .../transforms/release_artifacts.py | 6 +- .../transforms/release_publish.py | 25 +- .../transforms/secrets.py | 7 +- .../transforms/server_megazord.py | 150 ++--- .../transforms/signing.py | 2 +- .../transforms/toolchain.py | 21 +- .../transforms/worker.py | 15 +- .../app_services_taskgraph/worker_types.py | 98 ++-- taskcluster/scripts/build-and-test-swift.py | 126 +++-- taskcluster/scripts/deps-complete.py | 33 +- .../scripts/generate-nimbus-cli-json.py | 20 +- taskcluster/scripts/generate-release-json.py | 67 ++- taskcluster/scripts/get-secret.py | 86 ++- taskcluster/scripts/nimbus-build.py | 51 +- taskcluster/scripts/server-megazord-build.py | 205 ++++--- .../setup-branch-build-firefox-android.py | 46 +- .../scripts/setup-branch-build-firefox-ios.py | 57 +- taskcluster/scripts/write-dummy-secret.py | 12 +- tools/clean-gradle-autopublish.py | 43 +- tools/clean.py | 33 +- tools/dependency_summary.py | 522 ++++++++++-------- tools/loc_summary.py | 152 ++--- tools/update-moz-central-vendoring.py | 30 +- 53 files changed, 2346 insertions(+), 1556 deletions(-) diff --git a/automation/cargo-update-pr.py b/automation/cargo-update-pr.py index 72f71a5714..43ff2b0e21 100755 --- a/automation/cargo-update-pr.py +++ b/automation/cargo-update-pr.py @@ -8,16 +8,21 @@ # Usage: ./automation/cargo-update-pr.py import argparse -from datetime import datetime import subprocess +import sys import webbrowser +from datetime import datetime -from shared import step_msg, fatal_err, run_cmd_checked, ensure_working_tree_clean +from shared import ensure_working_tree_clean, fatal_err, run_cmd_checked, step_msg -parser = argparse.ArgumentParser(description="Run cargo update and make a pull-request against main") -parser.add_argument("--remote", - default="origin", - help="The remote name that corresponds to the Application Services main repository.") +parser = argparse.ArgumentParser( + description="Run cargo update and make a pull-request against main" +) +parser.add_argument( + "--remote", + default="origin", + help="The remote name that corresponds to the Application Services main repository.", +) args = parser.parse_args() remote = args.remote @@ -29,7 +34,11 @@ step_msg(f"Check if branch {branch_name} already exists") -res = subprocess.run(["git", "show-ref", "--verify", f"refs/heads/{branch_name}"], capture_output=True) +res = subprocess.run( + ["git", "show-ref", "--verify", f"refs/heads/{branch_name}"], + capture_output=True, + check=False, +) if res.returncode == 0: fatal_err(f"The branch {branch_name} already exists!") @@ -45,21 +54,27 @@ while True: step_msg("Regenerating dependency summaries") - res = subprocess.run(["./tools/regenerate_dependency_summaries.sh"]) + res = subprocess.run(["./tools/regenerate_dependency_summaries.sh"], check=False) if res.returncode == 0: break print("It looks like the dependency summary generation script couldn't complete.") input("Please fix the issue then press any key to try again.") -step_msg(f"Creating a commit with the changes") -run_cmd_checked(["git", "add", "-A"]) # We can use -A since we checked the working dir is clean. +step_msg("Creating a commit with the changes") +run_cmd_checked( + ["git", "add", "-A"] +) # We can use -A since we checked the working dir is clean. run_cmd_checked(["git", "commit", "-m", "Run cargo update [ci full]"]) step_msg("Print summary of changes") run_cmd_checked(["git", "show", "--stat"]) -response = input("Great! Would you like to push and open a pull-request? ([Y]/N)").lower() -if response != "y" and response != "" and response != "yes": - exit(0) +response = input( + "Great! Would you like to push and open a pull-request? ([Y]/N)" +).lower() +if response not in ("y", "", "yes"): + sys.exit(0) run_cmd_checked(["git", "push", remote, branch_name]) -webbrowser.open_new_tab(f"https://github.com/mozilla/application-services/pull/new/{branch_name}") +webbrowser.open_new_tab( + f"https://github.com/mozilla/application-services/pull/new/{branch_name}" +) diff --git a/automation/check_protobuf_files_current.py b/automation/check_protobuf_files_current.py index 5b8a040b5b..fd53e652e5 100755 --- a/automation/check_protobuf_files_current.py +++ b/automation/check_protobuf_files_current.py @@ -7,7 +7,13 @@ # Dependencies: None # Usage: ./automation/cargo-update-pr.py -from shared import step_msg, fatal_err, run_cmd_checked, find_app_services_root, ensure_working_tree_clean +from shared import ( + # ensure_working_tree_clean, + fatal_err, + find_app_services_root, + run_cmd_checked, + step_msg, +) step_msg("Checking that the generated protobuf Rust files are up-to-date") # ensure_working_tree_clean() @@ -16,7 +22,9 @@ if run_cmd_checked(["git", "status", "--porcelain"], capture_output=True).stdout: run_cmd_checked(["git", "status"]) - fatal_err(""" + fatal_err( + """ The protobuf rust files are outdated. You can fix this yourself by running cargo run --bin protobuf-gen /tools/protobuf_files.toml - """) + """ + ) diff --git a/automation/prepare-release.py b/automation/prepare-release.py index 01de005b31..5fa7c32717 100755 --- a/automation/prepare-release.py +++ b/automation/prepare-release.py @@ -7,11 +7,19 @@ # Dependencies: yaml # Usage: ./automation/prepare-release.py -from datetime import datetime +import sys import webbrowser +from datetime import datetime -from shared import (RefNames, get_moz_remote, step_msg, fatal_err, run_cmd_checked, - ensure_working_tree_clean, check_output) +from shared import ( + RefNames, + check_output, + ensure_working_tree_clean, + fatal_err, + get_moz_remote, + run_cmd_checked, + step_msg, +) # Constants VERSION_FILE = "version.txt" @@ -24,7 +32,7 @@ with open(VERSION_FILE, "r") as stream: cur_version = stream.read().strip() -major_version_number = int(cur_version.split('.')[0]) +major_version_number = int(cur_version.split(".")[0]) next_version_number = major_version_number + 1 release_version = f"{major_version_number}.0" refs = RefNames(major_version_number, 0) @@ -97,7 +105,7 @@ f"# v{next_version_number}.0 (In progress)", "", "[Full Changelog](In progress)", - "" + "", ] with open(CHANGELOG_FILE, "w") as stream: stream.write("\n".join(changelog)) @@ -108,13 +116,19 @@ run_cmd_checked(["git", "commit", "-m", f"Start release v{next_version_number}"]) print() -response = input("Great! Would you like to push and open the two pull-requests? ([Y]/N)").lower() -if response != "y" and response != "" and response != "yes": - exit(0) +response = input( + "Great! Would you like to push and open the two pull-requests? ([Y]/N)" +).lower() +if response not in ("y", "", "yes"): + sys.exit(0) run_cmd_checked(["git", "push", moz_remote, refs.release_pr]) run_cmd_checked(["git", "push", moz_remote, refs.start_release_pr]) -webbrowser.open_new_tab(f"https://github.com/mozilla/application-services/compare/{refs.release}...{refs.release_pr}") -webbrowser.open_new_tab(f"https://github.com/mozilla/application-services/compare/{refs.main}...{refs.start_release_pr}") +webbrowser.open_new_tab( + f"https://github.com/mozilla/application-services/compare/{refs.release}...{refs.release_pr}" +) +webbrowser.open_new_tab( + f"https://github.com/mozilla/application-services/compare/{refs.main}...{refs.start_release_pr}" +) run_cmd_checked(["git", "checkout", refs.main]) diff --git a/automation/publish_to_maven_local_if_modified.py b/automation/publish_to_maven_local_if_modified.py index a0de7874c3..1c3d7a14b5 100755 --- a/automation/publish_to_maven_local_if_modified.py +++ b/automation/publish_to_maven_local_if_modified.py @@ -7,30 +7,38 @@ # Dependencies: None # Usage: ./automation/publish_to_maven_local_if_modified.py +import argparse +import hashlib import os import sys import time -import hashlib -import argparse -from shared import run_cmd_checked, find_app_services_root, fatal_err -import re + +from shared import fatal_err, find_app_services_root, run_cmd_checked LAST_CONTENTS_HASH_FILE = ".lastAutoPublishContentsHash" GITIGNORED_FILES_THAT_AFFECT_THE_BUILD = ["local.properties"] -parser = argparse.ArgumentParser(description="Publish android packages to local maven repo, but only if changed since last publish") +parser = argparse.ArgumentParser( + description="Publish android packages to local maven repo, but only if changed since last publish" +) parser.parse_args() root_dir = find_app_services_root() if str(root_dir) != os.path.abspath(os.curdir): - fatal_err(f"This only works if run from the repo root ({root_dir!r} != {os.path.abspath(os.curdir)!r})") + fatal_err( + f"This only works if run from the repo root ({root_dir!r} != {os.path.abspath(os.curdir)!r})" + ) # This doesn't work on "native" windows, so let's get that out of the way now. if sys.platform.startswith("win"): print("NOTE: The autoPublish workflows do not work on native windows.") - print("You must follow the instructions in /docs/howtos/setup-android-build-environment.md#using-windows") - print("then, manually ensure that the following command has completed successfully in WSL:") + print( + "You must follow the instructions in /docs/howtos/setup-android-build-environment.md#using-windows" + ) + print( + "then, manually ensure that the following command has completed successfully in WSL:" + ) print(sys.argv) print(f"(from the '{root_dir}' directory)") print("Then restart the build") diff --git a/automation/shared.py b/automation/shared.py index 171258fc26..3a490e7cb2 100644 --- a/automation/shared.py +++ b/automation/shared.py @@ -4,31 +4,38 @@ # Common code used by the automation python scripts. +import os import subprocess +import sys from pathlib import Path def step_msg(msg): print(f"> \033[34m{msg}\033[0m") + def fatal_err(msg): print(f"\033[31mError: {msg}\033[0m") - exit(1) + sys.exit(1) + def run_cmd_checked(*args, **kwargs): """Run a command, throwing an exception if it exits with non-zero status.""" kwargs["check"] = True - return subprocess.run(*args, **kwargs) + return subprocess.run(*args, **kwargs) # noqa: PLW1510 + def check_output(*args, **kwargs): """Run a command, throwing an exception if it exits with non-zero status.""" - return subprocess.check_output(*args, **kwargs, encoding='utf8') + return subprocess.check_output(*args, **kwargs, encoding="utf8") + def ensure_working_tree_clean(): """Error out if there are un-committed or staged files in the working tree.""" if run_cmd_checked(["git", "status", "--porcelain"], capture_output=True).stdout: fatal_err("The working tree has un-committed or staged files.") + def find_app_services_root(): """Find the absolute path of the Application services repository root.""" cur_dir = Path(__file__).parent @@ -36,18 +43,23 @@ def find_app_services_root(): cur_dir = cur_dir.parent return cur_dir.absolute() + def get_moz_remote(): """ Get the name of the remote for the official mozilla application-services repo """ for line in check_output(["git", "remote", "-v"]).splitlines(): split = line.split() - if (len(split) == 3 - and split[1] == 'git@github.com:mozilla/application-services.git' - and split[2] == '(push)'): + if ( + len(split) == 3 + and split[1] == "git@github.com:mozilla/application-services.git" + and split[2] == "(push)" + ): return split[0] - else: - fatal_err("Can't find remote origin for git@github.com:mozilla/application-services.git") + fatal_err( + "Can't find remote origin for git@github.com:mozilla/application-services.git" + ) + def set_gradle_substitution_path(project_dir, name, value): """Set a substitution path property in a gradle `local.properties` file. @@ -69,22 +81,25 @@ def set_gradle_substitution_path(project_dir, name, value): for ln in f: # Not exactly a thorough parser, but should be good enough... if ln.startswith(name_eq): - cur_value = ln[len(name_eq):].strip() + cur_value = ln[len(name_eq) :].strip() if Path(project_dir, cur_value).resolve() != abs_value: - fatal_error(f"Conflicting property {name}={cur_value} (not {abs_value})") + fatal_err( + f"Conflicting property {name}={cur_value} (not {abs_value})" + ) return # The file does not contain the required property, append it. # Note that the project probably expects a path relative to the project root. ancestor = Path(os.path.commonpath([project_dir, abs_value])) relpath = Path(".") - for _ in project_dir.parts[len(ancestor.parts):]: + for _ in project_dir.parts[len(ancestor.parts) :]: relpath /= ".." - for nm in abs_value.parts[len(ancestor.parts):]: + for nm in abs_value.parts[len(ancestor.parts) :]: relpath /= nm step_msg(f"Setting relative path from {project_dir} to {abs_value} as {relpath}") with properties_file.open("a") as f: f.write(f"{name}={relpath}\n") + class RefNames: """ Contains the branch and tag names we use for automation. @@ -95,6 +110,7 @@ class RefNames: release_pr_branch -- Used for PRs against release_branch for a new version start_release_pr_branch -- Used for PRs against main to start a new major release """ + def __init__(self, major_version_number, minor_version_number): major_version_number = int(major_version_number) minor_version_number = int(minor_version_number) @@ -106,4 +122,6 @@ def __init__(self, major_version_number, minor_version_number): if minor_version_number == 0: self.previous_version_tag = f"v{major_version_number-1}.0" else: - self.previous_version_tag = f"v{major_version_number}.{minor_version_number-1}" + self.previous_version_tag = ( + f"v{major_version_number}.{minor_version_number-1}" + ) diff --git a/automation/smoke-test-android-components.py b/automation/smoke-test-android-components.py index 89d2551ee1..f86e8241b9 100755 --- a/automation/smoke-test-android-components.py +++ b/automation/smoke-test-android-components.py @@ -9,29 +9,43 @@ import argparse import re -import subprocess +import sys import tempfile from pathlib import Path + import yaml -from shared import step_msg, fatal_err, run_cmd_checked, find_app_services_root, set_gradle_substitution_path +from shared import ( + fatal_err, + find_app_services_root, + run_cmd_checked, + set_gradle_substitution_path, + step_msg, +) -parser = argparse.ArgumentParser(description="Run android-components tests against this application-services working tree.") +parser = argparse.ArgumentParser( + description="Run android-components tests against this application-services working tree." +) group = parser.add_mutually_exclusive_group() -group.add_argument("--use-local-repo", - metavar="LOCAL_REPO_PATH", - help="Use a local copy of a-c instead of cloning it.") -group.add_argument("--remote-repo-url", - metavar="REMOTE_REPO_URL", - help="Clone a different a-c repository.") -parser.add_argument("--branch", - help="Branch of a-c to use.") -parser.add_argument("--action", - # XXX TODO: it would be very nice to have a "launch sample app" helper here as well. - choices=["run-tests", "do-nothing"], - help="Run the following action once a-c is set up.") +group.add_argument( + "--use-local-repo", + metavar="LOCAL_REPO_PATH", + help="Use a local copy of a-c instead of cloning it.", +) +group.add_argument( + "--remote-repo-url", + metavar="REMOTE_REPO_URL", + help="Clone a different a-c repository.", +) +parser.add_argument("--branch", help="Branch of a-c to use.") +parser.add_argument( + "--action", + # XXX TODO: it would be very nice to have a "launch sample app" helper here as well. + choices=["run-tests", "do-nothing"], + help="Run the following action once a-c is set up.", +) -DEFAULT_REMOTE_REPO_URL="https://github.com/mozilla-mobile/android-components.git" +DEFAULT_REMOTE_REPO_URL = "https://github.com/mozilla-mobile/android-components.git" args = parser.parse_args() local_repo_path = args.use_local_repo @@ -49,21 +63,29 @@ if branch is not None: run_cmd_checked(["git", "checkout", branch], cwd=repo_path) elif branch is not None: - fatal_err("Cannot specify fenix branch when using a local repo; check it out locally and try again.") + fatal_err( + "Cannot specify fenix branch when using a local repo; check it out locally and try again." + ) step_msg(f"Configuring {repo_path} to autopublish appservices") -set_gradle_substitution_path(repo_path, "autoPublish.application-services.dir", find_app_services_root()) +set_gradle_substitution_path( + repo_path, "autoPublish.application-services.dir", find_app_services_root() +) if action == "do-nothing": - exit(0) + sys.exit(0) elif action == "run-tests" or action is None: # There are a lot of non-app-services-related components and we don't want to run all their tests. # Read the build config to find which projects actually depend on appservices. # It's a bit gross but it makes the tests run faster! # First, find out what names a-c uses to refer to apservices projects in dependency declarations. dep_names = set() - dep_pattern = re.compile("\s*const val ([A-Za-z0-9_]+) = .*Versions.mozilla_appservices") - with Path(repo_path, "buildSrc", "src", "main", "java", "Dependencies.kt").open() as f: + dep_pattern = re.compile( + "\\s*const val ([A-Za-z0-9_]+) = .*Versions.mozilla_appservices" + ) + with Path( + repo_path, "buildSrc", "src", "main", "java", "Dependencies.kt" + ).open() as f: for ln in f: m = dep_pattern.match(ln) if m is not None: @@ -73,7 +95,7 @@ projects = set() with Path(repo_path, ".buildconfig.yml").open() as f: buildconfig = yaml.safe_load(f.read()) - for (project, details) in buildconfig["projects"].items(): + for project, details in buildconfig["projects"].items(): build_dot_gradle = Path(repo_path, details["path"], "build.gradle") if not build_dot_gradle.exists(): build_dot_gradle = Path(repo_path, details["path"], "build.gradle.kts") @@ -85,7 +107,11 @@ if dep_name in ln: projects.add(project) break - step_msg(f"Running android-components tests for {len(projects)} projects: {projects}") - run_cmd_checked(["./gradlew"] + [f"{project}:test" for project in projects], cwd=repo_path) + step_msg( + f"Running android-components tests for {len(projects)} projects: {projects}" + ) + run_cmd_checked( + ["./gradlew"] + [f"{project}:test" for project in projects], cwd=repo_path + ) else: print("Sorry I did not understand what you wanted. Good luck!") diff --git a/automation/smoke-test-fenix.py b/automation/smoke-test-fenix.py index 00a1e0e51f..c3afffb76e 100755 --- a/automation/smoke-test-fenix.py +++ b/automation/smoke-test-fenix.py @@ -8,38 +8,55 @@ # Usage: ./automation/smoke-test-fenix.py import argparse -import subprocess +import sys import tempfile -from pathlib import Path -from shared import step_msg, fatal_err, run_cmd_checked, find_app_services_root, set_gradle_substitution_path -parser = argparse.ArgumentParser(description="Run Fenix tests against this application-services working tree.") +from shared import ( + fatal_err, + find_app_services_root, + run_cmd_checked, + set_gradle_substitution_path, + step_msg, +) + +parser = argparse.ArgumentParser( + description="Run Fenix tests against this application-services working tree." +) group = parser.add_mutually_exclusive_group() -group.add_argument("--use-local-repo", - metavar="LOCAL_REPO_PATH", - help="Use a local copy of fenix instead of cloning it.") -group.add_argument("--remote-repo-url", - metavar="REMOTE_REPO_URL", - help="Clone a different fenix repository.") +group.add_argument( + "--use-local-repo", + metavar="LOCAL_REPO_PATH", + help="Use a local copy of fenix instead of cloning it.", +) +group.add_argument( + "--remote-repo-url", + metavar="REMOTE_REPO_URL", + help="Clone a different fenix repository.", +) group = parser.add_mutually_exclusive_group() -group.add_argument("--use-local-ac-repo", - metavar="LOCAL_AC_REPO_PATH", - help="Use a local copy of a-c instead of latest release") -group.add_argument("--remote-ac-repo-url", - metavar="REMOTE_AC_REPO_URL", - help="Use a clone of a-c repo instead of latest release.") -parser.add_argument("--branch", - help="Branch of fenix to use.") -parser.add_argument("--ac-branch", - default="main", - help="Branch of android-components to use.") -parser.add_argument("--action", - # XXX TODO: it would be very nice to have a "launch the app" helper here as well. - choices=["run-tests", "do-nothing"], - help="Run the following action once fenix is set up.") +group.add_argument( + "--use-local-ac-repo", + metavar="LOCAL_AC_REPO_PATH", + help="Use a local copy of a-c instead of latest release", +) +group.add_argument( + "--remote-ac-repo-url", + metavar="REMOTE_AC_REPO_URL", + help="Use a clone of a-c repo instead of latest release.", +) +parser.add_argument("--branch", help="Branch of fenix to use.") +parser.add_argument( + "--ac-branch", default="main", help="Branch of android-components to use." +) +parser.add_argument( + "--action", + # XXX TODO: it would be very nice to have a "launch the app" helper here as well. + choices=["run-tests", "do-nothing"], + help="Run the following action once fenix is set up.", +) -DEFAULT_REMOTE_REPO_URL="https://github.com/mozilla-mobile/fenix.git" +DEFAULT_REMOTE_REPO_URL = "https://github.com/mozilla-mobile/fenix.git" args = parser.parse_args() local_repo_path = args.use_local_repo @@ -60,7 +77,9 @@ if fenix_branch is not None: run_cmd_checked(["git", "checkout", fenix_branch], cwd=repo_path) elif fenix_branch is not None: - fatal_err("Cannot specify fenix branch when using a local repo; check it out locally and try again.") + fatal_err( + "Cannot specify fenix branch when using a local repo; check it out locally and try again." + ) ac_repo_path = local_ac_repo_path if ac_repo_path is None: @@ -72,16 +91,23 @@ run_cmd_checked(["git", "checkout", ac_branch], cwd=ac_repo_path) elif ac_branch is not None: fatal_err( - "Cannot specify a-c branch when using a local repo; check it out locally and try again.") + "Cannot specify a-c branch when using a local repo; check it out locally and try again." + ) step_msg(f"Configuring {repo_path} to autopublish appservices") -set_gradle_substitution_path(repo_path, "autoPublish.application-services.dir", find_app_services_root()) +set_gradle_substitution_path( + repo_path, "autoPublish.application-services.dir", find_app_services_root() +) if ac_repo_path is not None: - step_msg(f"Configuring {repo_path} to autopublish android-components from {ac_repo_path}") - set_gradle_substitution_path(repo_path, "autoPublish.android-components.dir", ac_repo_path) + step_msg( + f"Configuring {repo_path} to autopublish android-components from {ac_repo_path}" + ) + set_gradle_substitution_path( + repo_path, "autoPublish.android-components.dir", ac_repo_path + ) if action == "do-nothing": - exit(0) + sys.exit(0) elif action == "run-tests" or action is None: # Fenix has unittest targets for a wide variety of different configurations. # It's not useful to us to run them all, so just pick the one that sounds like it's diff --git a/automation/smoke-test-fxios.py b/automation/smoke-test-fxios.py index 8e86598708..eb8df81183 100755 --- a/automation/smoke-test-fxios.py +++ b/automation/smoke-test-fxios.py @@ -8,26 +8,34 @@ import argparse import subprocess +import sys import tempfile -from pathlib import Path -from shared import step_msg, fatal_err, run_cmd_checked, find_app_services_root -parser = argparse.ArgumentParser(description="Run Firefox-iOS tests against this application-services working tree.") +from shared import fatal_err, find_app_services_root, run_cmd_checked, step_msg + +parser = argparse.ArgumentParser( + description="Run Firefox-iOS tests against this application-services working tree." +) group = parser.add_mutually_exclusive_group() -group.add_argument("--use-local-repo", - metavar="LOCAL_REPO_PATH", - help="Use a local copy of firefox-ios instead of cloning it.") -group.add_argument("--remote-repo-url", - metavar="REMOTE_REPO_PATH", - help="Clone a different firefox-ios repository.") -parser.add_argument("--branch", - help="Branch of firefox-ios to use.") -parser.add_argument("--action", - choices=["open-project", "run-tests", "do-nothing"], - help="Run the following action once firefox-ios is set up.") +group.add_argument( + "--use-local-repo", + metavar="LOCAL_REPO_PATH", + help="Use a local copy of firefox-ios instead of cloning it.", +) +group.add_argument( + "--remote-repo-url", + metavar="REMOTE_REPO_PATH", + help="Clone a different firefox-ios repository.", +) +parser.add_argument("--branch", help="Branch of firefox-ios to use.") +parser.add_argument( + "--action", + choices=["open-project", "run-tests", "do-nothing"], + help="Run the following action once firefox-ios is set up.", +) -DEFAULT_REMOTE_REPO_URL="https://github.com/mozilla-mobile/firefox-ios.git" -DEFAULT_RCS_REPO_URL="https://github.com/mozilla/rust-components-swift.git" +DEFAULT_REMOTE_REPO_URL = "https://github.com/mozilla-mobile/firefox-ios.git" +DEFAULT_RCS_REPO_URL = "https://github.com/mozilla/rust-components-swift.git" args = parser.parse_args() firefox_ios_branch = args.branch @@ -47,28 +55,33 @@ if firefox_ios_branch is not None: run_cmd_checked(["git", "checkout", firefox_ios_branch], cwd=ios_repo_path) elif firefox_ios_branch is not None: - fatal_err("Cannot specify branch when using a local repo; check it out locally and try again.") + fatal_err( + "Cannot specify branch when using a local repo; check it out locally and try again." + ) step_msg("Cloning rust-components-swift") rcs_repo_path = tempfile.mkdtemp(suffix="-rcs") run_cmd_checked(["git", "clone", DEFAULT_RCS_REPO_URL, rcs_repo_path]) step_msg("Setting up iOS to use the local application services") -run_cmd_checked(["./rust_components_local.sh", "-a", appservices_path, rcs_repo_path], cwd=ios_repo_path) +run_cmd_checked( + ["./rust_components_local.sh", "-a", appservices_path, rcs_repo_path], + cwd=ios_repo_path, +) step_msg("Running the firefox-ios bootstrap script") run_cmd_checked(["./bootstrap.sh"], cwd=ios_repo_path) - if action == "do-nothing": - exit(0) + sys.exit(0) elif action == "open-project": run_cmd_checked(["open", "Client.xcodeproj"], cwd=ios_repo_path) elif action == "run-tests" or action is None: # TODO: we specify scheme = Fennec, but it might be wrong? Check with iOS peeps. step_msg("Running firefox-ios tests") - subprocess.run("""\ + subprocess.run( + """\ set -o pipefail && \ xcodebuild \ -workspace ./Client.xcodeproj/project.xcworkspace \ @@ -78,6 +91,10 @@ test | \ tee raw_xcodetest.log | \ xcpretty && exit "${PIPESTATUS[0]}" - """, cwd=ios_repo_path, shell=True) + """, + cwd=ios_repo_path, + shell=True, + check=False, + ) else: print("Sorry I did not understand what you wanted. Good luck!") diff --git a/automation/symbols-generation/symbolstore.py b/automation/symbols-generation/symbolstore.py index e891ac36f1..64a0605257 100644 --- a/automation/symbols-generation/symbolstore.py +++ b/automation/symbols-generation/symbolstore.py @@ -23,34 +23,34 @@ from __future__ import print_function +import ctypes import errno -import sys -import platform import io import os +import platform import re import shutil -import textwrap import subprocess +import sys +import textwrap import time -import ctypes - from optparse import OptionParser # Utility classes + class VCSFileInfo: - """ A base class for version-controlled file information. Ensures that the - following attributes are generated only once (successfully): + """A base class for version-controlled file information. Ensures that the + following attributes are generated only once (successfully): - self.root - self.clean_root - self.revision - self.filename + self.root + self.clean_root + self.revision + self.filename - The attributes are generated by a single call to the GetRoot, - GetRevision, and GetFilename methods. Those methods are explicitly not - implemented here and must be implemented in derived classes. """ + The attributes are generated by a single call to the GetRoot, + GetRevision, and GetFilename methods. Those methods are explicitly not + implemented here and must be implemented in derived classes.""" def __init__(self, file): if not file: @@ -58,10 +58,10 @@ def __init__(self, file): self.file = file def __getattr__(self, name): - """ __getattr__ is only called for attributes that are not set on self, - so setting self.[attr] will prevent future calls to the GetRoot, - GetRevision, and GetFilename methods. We don't set the values on - failure on the off chance that a future call might succeed. """ + """__getattr__ is only called for attributes that are not set on self, + so setting self.[attr] will prevent future calls to the GetRoot, + GetRevision, and GetFilename methods. We don't set the values on + failure on the off chance that a future call might succeed.""" if name == "root": root = self.GetRoot() @@ -90,32 +90,37 @@ def __getattr__(self, name): raise AttributeError def GetRoot(self): - """ This method should return the unmodified root for the file or 'None' - on failure. """ + """This method should return the unmodified root for the file or 'None' + on failure.""" raise NotImplementedError def GetCleanRoot(self): - """ This method should return the repository root for the file or 'None' - on failure. """ + """This method should return the repository root for the file or 'None' + on failure.""" raise NotImplementedError def GetRevision(self): - """ This method should return the revision number for the file or 'None' - on failure. """ + """This method should return the revision number for the file or 'None' + on failure.""" raise NotImplementedError def GetFilename(self): - """ This method should return the repository-specific filename for the - file or 'None' on failure. """ + """This method should return the repository-specific filename for the + file or 'None' on failure.""" raise NotImplementedError + # This regex finds out the org and the repo from a git remote URL. -githubRegex = re.compile(r'^(?:https://github.com/|git@github.com:)([^/]+)/([^/]+?)(?:.git)?$') +githubRegex = re.compile( + r"^(?:https://github.com/|git@github.com:)([^/]+)/([^/]+?)(?:.git)?$" +) + def read_output(*args): (stdout, _) = subprocess.Popen(args=args, stdout=subprocess.PIPE).communicate() return stdout.decode("utf-8").rstrip() + def get_version(srcdirs): for dir in srcdirs: path_try = os.path.join(dir, "version.txt") @@ -123,34 +128,54 @@ def get_version(srcdirs): return open(path_try).read().strip() return "Unknown" + class GitHubRepoInfo: """ Info about a locally cloned Git repository that has its "origin" remote on GitHub. """ + def __init__(self, path): self.path = path - if 'APPSERVICES_HEAD_REPOSITORY' in os.environ: - remote_url = os.environ['APPSERVICES_HEAD_REPOSITORY'] + if "APPSERVICES_HEAD_REPOSITORY" in os.environ: + remote_url = os.environ["APPSERVICES_HEAD_REPOSITORY"] else: - remote_url = read_output('git', '-C', path, 'remote', 'get-url', 'origin') + remote_url = read_output("git", "-C", path, "remote", "get-url", "origin") match = githubRegex.match(remote_url) if match is None: - print(textwrap.dedent("""\ + print( + textwrap.dedent( + """\ Could not determine repo info for %s (%s). This is probably because - the repo is not one that was cloned from a GitHub remote.""") % (path), file=sys.stderr) + the repo is not one that was cloned from a GitHub remote.""" + ) + % (path), + file=sys.stderr, + ) sys.exit(1) (org, repo) = match.groups() cleanroot = "github.com/%s/%s" % (org, repo) # Try to get a tag if possible, otherwise get a git hash. rev = None - p = subprocess.Popen(args=['git', '-C', path, 'name-rev', '--name-only', '--tags', 'HEAD', '--no-undefined'], - stdout=subprocess.PIPE, stderr=subprocess.PIPE) + p = subprocess.Popen( + args=[ + "git", + "-C", + path, + "name-rev", + "--name-only", + "--tags", + "HEAD", + "--no-undefined", + ], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) (stdout, _) = p.communicate() if p.returncode == 0: rev = stdout.decode("utf-8").rstrip() else: - rev = read_output('git', '-C', path, 'rev-parse', 'HEAD') + rev = read_output("git", "-C", path, "rev-parse", "HEAD") root = "https://raw.githubusercontent.com/%s/%s/%s/" % (org, repo, rev) @@ -161,6 +186,7 @@ def __init__(self, path): def GetFileInfo(self, file): return GitFileInfo(file, self) + class GitFileInfo(VCSFileInfo): def __init__(self, file, repo): VCSFileInfo.__init__(self, file) @@ -183,76 +209,81 @@ def GetFilename(self): return "git:%s:%s:%s" % (self.clean_root, self.file, self.revision) return self.file + # Utility functions # A cache of files for which VCS info has already been determined. Used to # prevent extra filesystem activity or process launching. vcsFileInfoCache = {} -if platform.system() == 'Windows': +if platform.system() == "Windows": + def normpath(path): - ''' + """ Normalize a path using `GetFinalPathNameByHandleW` to get the path with all components in the case they exist in on-disk, so that making links to a case-sensitive server (hg.mozilla.org) works. This function also resolves any symlinks in the path. - ''' + """ # Return the original path if something fails, which can happen for paths that # don't exist on this system (like paths from the CRT). result = path ctypes.windll.kernel32.SetErrorMode(ctypes.c_uint(1)) - if not isinstance(path, unicode): - path = unicode(path, sys.getfilesystemencoding()) - handle = ctypes.windll.kernel32.CreateFileW(path, - # GENERIC_READ - 0x80000000, - # FILE_SHARE_READ - 1, - None, - # OPEN_EXISTING - 3, - # FILE_FLAG_BACKUP_SEMANTICS - # This is necessary to open - # directory handles. - 0x02000000, - None) + # if "bytes" was possible we'd decode it using sys.getfilesystemencoding() + assert isinstance(path) is str + handle = ctypes.windll.kernel32.CreateFileW( + path, + # GENERIC_READ + 0x80000000, + # FILE_SHARE_READ + 1, + None, + # OPEN_EXISTING + 3, + # FILE_FLAG_BACKUP_SEMANTICS + # This is necessary to open + # directory handles. + 0x02000000, + None, + ) if handle != -1: - size = ctypes.windll.kernel32.GetFinalPathNameByHandleW(handle, - None, - 0, - 0) + size = ctypes.windll.kernel32.GetFinalPathNameByHandleW(handle, None, 0, 0) buf = ctypes.create_unicode_buffer(size) - if ctypes.windll.kernel32.GetFinalPathNameByHandleW(handle, - buf, - size, - 0) > 0: + if ( + ctypes.windll.kernel32.GetFinalPathNameByHandleW(handle, buf, size, 0) + > 0 + ): # The return value of GetFinalPathNameByHandleW uses the # '\\?\' prefix. result = buf.value.encode(sys.getfilesystemencoding())[4:] ctypes.windll.kernel32.CloseHandle(handle) return result + else: # Just use the os.path version otherwise. normpath = os.path.normpath + def IsInDir(file, dir): # the lower() is to handle win32+vc8, where # the source filenames come out all lowercase, # but the srcdir can be mixed case return os.path.abspath(file).lower().startswith(os.path.abspath(dir).lower()) + def GetVCSFilenameFromSrcdir(file, srcdir): if srcdir not in Dumper.srcdirRepoInfo: # Not in cache, so find it and cache it - if os.path.isdir(os.path.join(srcdir, '.git')): + if os.path.isdir(os.path.join(srcdir, ".git")): Dumper.srcdirRepoInfo[srcdir] = GitHubRepoInfo(srcdir) else: # Unknown VCS or file is not in a repo. return None return Dumper.srcdirRepoInfo[srcdir].GetFileInfo(file) + def GetVCSFilename(file, srcdirs): """Given a full path to a file, and the top source directory, look for version control information about this file, and return @@ -264,11 +295,11 @@ def GetVCSFilename(file, srcdirs): cvs:cvs.mozilla.org/cvsroot:mozilla/browser/app/nsBrowserApp.cpp:1.36 2) the unmodified root information if it exists""" (path, filename) = os.path.split(file) - if path == '' or filename == '': + if path == "" or filename == "": return (file, None) fileInfo = None - root = '' + root = "" if file in vcsFileInfoCache: # Already cached this info, use it. fileInfo = vcsFileInfoCache[file] @@ -292,9 +323,10 @@ def GetVCSFilename(file, srcdirs): def GetPlatformSpecificDumper(**kwargs): """This function simply returns a instance of a subclass of Dumper that is appropriate for the current platform.""" - return {'WINNT': Dumper_Win32, - 'Linux': Dumper_Linux, - 'Darwin': Dumper_Mac}[platform.system()](**kwargs) + return {"WINNT": Dumper_Win32, "Linux": Dumper_Linux, "Darwin": Dumper_Mac}[ + platform.system() + ](**kwargs) + # Git source indexing cargo culted from https://gist.github.com/baldurk/c6feb31b0305125c6d1a def SourceIndex(fileStream, outputPath, vcs_root): @@ -303,11 +335,19 @@ def SourceIndex(fileStream, outputPath, vcs_root): # Create the srcsrv data block that indexes the pdb file result = True pdbStreamFile = open(outputPath, "w") - pdbStreamFile.write('''SRCSRV: ini ------------------------------------------------\r\nVERSION=2\r\nINDEXVERSION=2\r\nVERCTRL=http\r\nSRCSRV: variables ------------------------------------------\r\nHTTP_ALIAS=''') + pdbStreamFile.write( + """SRCSRV: ini ------------------------------------------------\r\nVERSION=2\r\nINDEXVERSION=2\r\nVERCTRL=http\r\nSRCSRV: variables ------------------------------------------\r\nHTTP_ALIAS=""" + ) pdbStreamFile.write(vcs_root) - pdbStreamFile.write('''\r\nHTTP_EXTRACT_TARGET=%HTTP_ALIAS%/%var3%/%var2%\r\nSRCSRVTRG=%http_extract_target%\r\nSRCSRV: source files ---------------------------------------\r\n''') - pdbStreamFile.write(fileStream) # can't do string interpolation because the source server also uses this and so there are % in the above - pdbStreamFile.write("SRCSRV: end ------------------------------------------------\r\n\n") + pdbStreamFile.write( + """\r\nHTTP_EXTRACT_TARGET=%HTTP_ALIAS%/%var3%/%var2%\r\nSRCSRVTRG=%http_extract_target%\r\nSRCSRV: source files ---------------------------------------\r\n""" + ) + pdbStreamFile.write( + fileStream + ) # can't do string interpolation because the source server also uses this and so there are % in the above + pdbStreamFile.write( + "SRCSRV: end ------------------------------------------------\r\n\n" + ) pdbStreamFile.close() return result @@ -327,23 +367,28 @@ class Dumper: You don't want to use this directly if you intend to process files. Instead, call GetPlatformSpecificDumper to get an instance of a subclass.""" + srcdirRepoInfo = {} - def __init__(self, dump_syms, symbol_path, - archs=None, - srcdirs=[], - copy_debug=False, - vcsinfo=False, - srcsrv=False, - file_mapping=None): + def __init__( + self, + dump_syms, + symbol_path, + archs=None, + srcdirs=[], + copy_debug=False, + vcsinfo=False, + srcsrv=False, + file_mapping=None, + ): # popen likes absolute paths, at least on windows self.dump_syms = os.path.abspath(dump_syms) self.symbol_path = symbol_path if archs is None: # makes the loop logic simpler - self.archs = [''] + self.archs = [""] else: - self.archs = ['-a %s' % a for a in archs.split()] + self.archs = ["-a %s" % a for a in archs.split()] # Any paths that get compared to source file names need to go through normpath. self.srcdirs = [normpath(s) for s in srcdirs] self.copy_debug = copy_debug @@ -359,7 +404,7 @@ def RunFileCommand(self, file): """Utility function, returns the output of file(1)""" # we use -L to read the targets of symlinks, # and -b to print just the content, not the filename - return read_output('file', '-Lb', file) + return read_output("file", "-Lb", file) # This is a no-op except on Win32 def SourceServerIndexing(self, debug_file, guid, sourceFileStream, vcs_root): @@ -387,9 +432,9 @@ def ProcessFile(self, file, dsymbundle=None): self.ProcessFileWork(file, arch_num, arch, None, dsymbundle) def dump_syms_cmdline(self, file, arch, dsymbundle=None): - ''' + """ Get the commandline used to invoke dump_syms. - ''' + """ # The Mac dumper overrides this. return [self.dump_syms, "--inlines", file] + self.dump_syms_extra_info() @@ -405,13 +450,14 @@ def ProcessFileWork(self, file, arch_num, arch, vcs_root, dsymbundle=None): t_start = time.time() print("Processing file: %s" % file, file=sys.stderr) - sourceFileStream = '' + sourceFileStream = "" code_id, code_file = None, None try: cmd = self.dump_syms_cmdline(file, arch, dsymbundle=dsymbundle) - print(' '.join(cmd), file=sys.stderr) - proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, - stderr=open(os.devnull, 'wb')) + print(" ".join(cmd), file=sys.stderr) + proc = subprocess.Popen( + cmd, stdout=subprocess.PIPE, stderr=open(os.devnull, "wb") + ) stdout = io.TextIOWrapper(proc.stdout, encoding="utf-8") module_line = stdout.readline() if module_line.startswith("MODULE"): @@ -420,14 +466,11 @@ def ProcessFileWork(self, file, arch_num, arch, vcs_root, dsymbundle=None): # strip off .pdb extensions, and append .sym sym_file = re.sub(r"\.pdb$", "", debug_file) + ".sym" # we do want forward slashes here - rel_path = os.path.join(debug_file, - guid, - sym_file).replace("\\", "/") - full_path = os.path.normpath(os.path.join(self.symbol_path, - rel_path)) + rel_path = os.path.join(debug_file, guid, sym_file).replace("\\", "/") + full_path = os.path.normpath(os.path.join(self.symbol_path, rel_path)) try: os.makedirs(os.path.dirname(full_path)) - except OSError: # already exists + except OSError: # already exists pass f = open(full_path, "w") f.write(module_line) @@ -442,14 +485,18 @@ def ProcessFileWork(self, file, arch_num, arch, vcs_root, dsymbundle=None): if filename in self.file_mapping: filename = self.file_mapping[filename] if self.vcsinfo: - (filename, rootname) = GetVCSFilename(filename, self.srcdirs) + (filename, rootname) = GetVCSFilename( + filename, self.srcdirs + ) # sets vcs_root in case the loop through files were to end on an empty rootname if vcs_root is None: - if rootname: - vcs_root = rootname + if rootname: + vcs_root = rootname # gather up files with git for indexing if filename.startswith("git"): - (vcs, checkout, source_file, revision) = filename.split(":", 3) + (vcs, checkout, source_file, revision) = filename.split( + ":", 3 + ) # Contrary to HG we do not include the revision as it is part of the # repo URL. sourceFileStream += sourcepath + "*" + source_file + "\r\n" @@ -468,18 +515,18 @@ def ProcessFileWork(self, file, arch_num, arch, vcs_root, dsymbundle=None): f.close() retcode = proc.wait() if retcode != 0: - raise RuntimeError( - "dump_syms failed with error code %d" % retcode) + raise RuntimeError("dump_syms failed with error code %d" % retcode) # we output relative paths so callers can get a list of what # was generated print(rel_path) if self.srcsrv and vcs_root: # add source server indexing to the pdb file - self.SourceServerIndexing(debug_file, guid, sourceFileStream, vcs_root) + self.SourceServerIndexing( + debug_file, guid, sourceFileStream, vcs_root + ) # only copy debug the first time if we have multiple architectures if self.copy_debug and arch_num == 0: - self.CopyDebug(file, debug_file, guid, - code_file, code_id) + self.CopyDebug(file, debug_file, guid, code_file, code_id) except StopIteration: pass except Exception as e: @@ -490,21 +537,22 @@ def ProcessFileWork(self, file, arch_num, arch, vcs_root, dsymbundle=None): shutil.rmtree(dsymbundle) elapsed = time.time() - t_start - print('Finished processing %s in %.2fs' % (file, elapsed), - file=sys.stderr) + print("Finished processing %s in %.2fs" % (file, elapsed), file=sys.stderr) + # Platform-specific subclasses. For the most part, these just have # logic to determine what files to extract symbols from. + def locate_pdb(path): - '''Given a path to a binary, attempt to locate the matching pdb file with simple heuristics: + """Given a path to a binary, attempt to locate the matching pdb file with simple heuristics: * Look for a pdb file with the same base name next to the binary * Look for a pdb file with the same base name in the cwd Returns the path to the pdb file if it exists, or None if it could not be located. - ''' + """ path, ext = os.path.splitext(path) - pdb = path + '.pdb' + pdb = path + ".pdb" if os.path.isfile(pdb): return pdb # If there's no pdb next to the file, see if there's a pdb with the same root name @@ -516,6 +564,7 @@ def locate_pdb(path): return pdb return None + class Dumper_Win32(Dumper): fixedFilenameCaseCache = {} @@ -527,44 +576,39 @@ def ShouldProcess(self, file): return True return False - def CopyDebug(self, file, debug_file, guid, code_file, code_id): file = locate_pdb(file) + def compress(path): - compressed_file = path[:-1] + '_' + compressed_file = path[:-1] + "_" # ignore makecab's output - makecab = os.environ['MAKECAB'] - success = subprocess.call([makecab, "-D", - "CompressionType=MSZIP", - path, compressed_file], - stdout=open(os.devnull, 'w'), - stderr=subprocess.STDOUT) + makecab = os.environ["MAKECAB"] + success = subprocess.call( + [makecab, "-D", "CompressionType=MSZIP", path, compressed_file], + stdout=open(os.devnull, "w"), + stderr=subprocess.STDOUT, + ) if success == 0 and os.path.exists(compressed_file): os.unlink(path) return True return False - rel_path = os.path.join(debug_file, - guid, - debug_file).replace("\\", "/") - full_path = os.path.normpath(os.path.join(self.symbol_path, - rel_path)) + rel_path = os.path.join(debug_file, guid, debug_file).replace("\\", "/") + full_path = os.path.normpath(os.path.join(self.symbol_path, rel_path)) shutil.copyfile(file, full_path) if compress(full_path): - print(rel_path[:-1] + '_') + print(rel_path[:-1] + "_") else: print(rel_path) # Copy the binary file as well if code_file and code_id: - full_code_path = os.path.join(os.path.dirname(file), - code_file) + full_code_path = os.path.join(os.path.dirname(file), code_file) if os.path.exists(full_code_path): - rel_path = os.path.join(code_file, - code_id, - code_file).replace("\\", "/") - full_path = os.path.normpath(os.path.join(self.symbol_path, - rel_path)) + rel_path = os.path.join(code_file, code_id, code_file).replace( + "\\", "/" + ) + full_path = os.path.normpath(os.path.join(self.symbol_path, rel_path)) try: os.makedirs(os.path.dirname(full_path)) except OSError as e: @@ -572,7 +616,7 @@ def compress(path): raise shutil.copyfile(full_code_path, full_path) if compress(full_path): - print(rel_path[:-1] + '_') + print(rel_path[:-1] + "_") else: print(rel_path) @@ -585,16 +629,24 @@ def SourceServerIndexing(self, debug_file, guid, sourceFileStream, vcs_root): if self.copy_debug: pdbstr_path = os.environ.get("PDBSTR_PATH") pdbstr = os.path.normpath(pdbstr_path) - subprocess.call([pdbstr, "-w", "-p:" + os.path.basename(debug_file), - "-i:" + os.path.basename(streamFilename), "-s:srcsrv"], - cwd=os.path.dirname(stream_output_path)) + subprocess.call( + [ + pdbstr, + "-w", + "-p:" + os.path.basename(debug_file), + "-i:" + os.path.basename(streamFilename), + "-s:srcsrv", + ], + cwd=os.path.dirname(stream_output_path), + ) # clean up all the .stream files when done os.remove(stream_output_path) return result class Dumper_Linux(Dumper): - objcopy = os.environ['OBJCOPY'] if 'OBJCOPY' in os.environ else 'objcopy' + objcopy = os.environ["OBJCOPY"] if "OBJCOPY" in os.environ else "objcopy" + def ShouldProcess(self, file): """This function will allow processing of files that are executable, or end with the .so extension, and additionally @@ -612,21 +664,28 @@ def CopyDebug(self, file, debug_file, guid, code_file, code_id): # .gnu_debuglink section, and objcopy doesn't want to add one in # such cases, so we make it remove it any existing one first. file_dbg = file + ".dbg" - if subprocess.call([self.objcopy, '--only-keep-debug', file, file_dbg]) == 0 and \ - subprocess.call([self.objcopy, '--remove-section', '.gnu_debuglink', - '--add-gnu-debuglink=%s' % file_dbg, file]) == 0: - rel_path = os.path.join(debug_file, - guid, - debug_file + ".dbg") - full_path = os.path.normpath(os.path.join(self.symbol_path, - rel_path)) + if ( + subprocess.call([self.objcopy, "--only-keep-debug", file, file_dbg]) == 0 + and subprocess.call( + [ + self.objcopy, + "--remove-section", + ".gnu_debuglink", + "--add-gnu-debuglink=%s" % file_dbg, + file, + ] + ) + == 0 + ): + rel_path = os.path.join(debug_file, guid, debug_file + ".dbg") + full_path = os.path.normpath(os.path.join(self.symbol_path, rel_path)) shutil.move(file_dbg, full_path) # gzip the shipped debug files os.system("gzip -4 -f %s" % full_path) print(rel_path + ".gz") - else: - if os.path.isfile(file_dbg): - os.unlink(file_dbg) + elif os.path.isfile(file_dbg): + os.unlink(file_dbg) + class Dumper_Mac(Dumper): def ShouldProcess(self, file): @@ -639,28 +698,29 @@ def ShouldProcess(self, file): return False def ProcessFile(self, file): - print("Starting Mac pre-processing on file: %s" % file, - file=sys.stderr) + print("Starting Mac pre-processing on file: %s" % file, file=sys.stderr) dsymbundle = self.GenerateDSYM(file) if dsymbundle: # kick off new jobs per-arch with our new list of files Dumper.ProcessFile(self, file, dsymbundle=dsymbundle) def dump_syms_cmdline(self, file, arch, dsymbundle=None): - ''' + """ Get the commandline used to invoke dump_syms. - ''' + """ # dump_syms wants the path to the original binary and the .dSYM # in order to dump all the symbols. if dsymbundle: # This is the .dSYM bundle. cmdline = [self.dump_syms] cmdline.extend(arch.split()) - cmdline.extend([ - "--inlines", - "-j", - "2", - ]) + cmdline.extend( + [ + "--inlines", + "-j", + "2", + ] + ) cmdline.extend(self.dump_syms_extra_info()) cmdline.extend([dsymbundle, file]) return cmdline @@ -671,21 +731,22 @@ def GenerateDSYM(self, file): by dsymutil(1), so run dsymutil here and pass the bundle name down to the superclass method instead.""" t_start = time.time() - print("Running Mac pre-processing on file: %s" % (file,), - file=sys.stderr) + print("Running Mac pre-processing on file: %s" % (file,), file=sys.stderr) dsymbundle = file + ".dSYM" if os.path.exists(dsymbundle): shutil.rmtree(dsymbundle) # dsymutil takes --arch=foo instead of -a foo like everything else try: - cmd = (["dsymutil"] + - [a.replace('-a ', '--arch=') for a in self.archs if a] + - [file]) - print(' '.join(cmd), file=sys.stderr) - subprocess.check_call(cmd, stdout=open(os.devnull, 'w')) + cmd = ( + ["dsymutil"] + + [a.replace("-a ", "--arch=") for a in self.archs if a] + + [file] + ) + print(" ".join(cmd), file=sys.stderr) + subprocess.check_call(cmd, stdout=open(os.devnull, "w")) except subprocess.CalledProcessError as e: - print('Error running dsymutil: %s' % str(e), file=sys.stderr) + print("Error running dsymutil: %s" % str(e), file=sys.stderr) raise if not os.path.exists(dsymbundle): @@ -694,8 +755,7 @@ def GenerateDSYM(self, file): return False elapsed = time.time() - t_start - print('Finished processing %s in %.2fs' % (file, elapsed), - file=sys.stderr) + print("Finished processing %s in %.2fs" % (file, elapsed), file=sys.stderr) return dsymbundle def CopyDebug(self, file, debug_file, guid, code_file, code_id): @@ -704,59 +764,93 @@ def CopyDebug(self, file, debug_file, guid, code_file, code_id): into a .tar.bz2 because the debug symbols are pretty huge, and also because it's a bundle, so it's a directory. |file| here is the the original filename.""" - dsymbundle = file + '.dSYM' - rel_path = os.path.join(debug_file, - guid, - os.path.basename(dsymbundle) + ".tar.bz2") - full_path = os.path.abspath(os.path.join(self.symbol_path, - rel_path)) - success = subprocess.call(["tar", "cjf", full_path, os.path.basename(dsymbundle)], - cwd=os.path.dirname(dsymbundle), - stdout=open(os.devnull, 'w'), stderr=subprocess.STDOUT) + dsymbundle = file + ".dSYM" + rel_path = os.path.join( + debug_file, guid, os.path.basename(dsymbundle) + ".tar.bz2" + ) + full_path = os.path.abspath(os.path.join(self.symbol_path, rel_path)) + success = subprocess.call( + ["tar", "cjf", full_path, os.path.basename(dsymbundle)], + cwd=os.path.dirname(dsymbundle), + stdout=open(os.devnull, "w"), + stderr=subprocess.STDOUT, + ) if success == 0 and os.path.exists(full_path): print(rel_path) + # Entry point if called as a standalone program def main(): - parser = OptionParser(usage="usage: %prog [options] ") - parser.add_option("-c", "--copy", - action="store_true", dest="copy_debug", default=False, - help="Copy debug info files into the same directory structure as symbol files") - parser.add_option("-a", "--archs", - action="store", dest="archs", - help="Run dump_syms -a for each space separated cpu architecture in ARCHS (only on OS X)") - parser.add_option("-s", "--srcdir", - action="append", dest="srcdir", default=[], - help="Use SRCDIR to determine relative paths to source files") - parser.add_option("-v", "--vcs-info", - action="store_true", dest="vcsinfo", - help="Try to retrieve VCS info for each FILE listed in the output") - parser.add_option("-i", "--source-index", - action="store_true", dest="srcsrv", default=False, - help="Add source index information to debug files, making them suitable for use in a source server.") + parser = OptionParser( + usage="usage: %prog [options] " + ) + parser.add_option( + "-c", + "--copy", + action="store_true", + dest="copy_debug", + default=False, + help="Copy debug info files into the same directory structure as symbol files", + ) + parser.add_option( + "-a", + "--archs", + action="store", + dest="archs", + help="Run dump_syms -a for each space separated cpu architecture in ARCHS (only on OS X)", + ) + parser.add_option( + "-s", + "--srcdir", + action="append", + dest="srcdir", + default=[], + help="Use SRCDIR to determine relative paths to source files", + ) + parser.add_option( + "-v", + "--vcs-info", + action="store_true", + dest="vcsinfo", + help="Try to retrieve VCS info for each FILE listed in the output", + ) + parser.add_option( + "-i", + "--source-index", + action="store_true", + dest="srcsrv", + default=False, + help="Add source index information to debug files, making them suitable for use in a source server.", + ) (options, args) = parser.parse_args() - #check to see if the pdbstr.exe exists + # check to see if the pdbstr.exe exists if options.srcsrv: pdbstr = os.environ.get("PDBSTR_PATH") if not os.path.exists(pdbstr): - print("Invalid path to pdbstr.exe - please set/check PDBSTR_PATH.\n", file=sys.stderr) + print( + "Invalid path to pdbstr.exe - please set/check PDBSTR_PATH.\n", + file=sys.stderr, + ) sys.exit(1) if len(args) < 3: parser.error("not enough arguments") - exit(1) - - dumper = GetPlatformSpecificDumper(dump_syms=args[0], - symbol_path=args[1], - copy_debug=options.copy_debug, - archs=options.archs, - srcdirs=options.srcdir, - vcsinfo=options.vcsinfo, - srcsrv=options.srcsrv) + sys.exit(1) + + dumper = GetPlatformSpecificDumper( + dump_syms=args[0], + symbol_path=args[1], + copy_debug=options.copy_debug, + archs=options.archs, + srcdirs=options.srcdir, + vcsinfo=options.vcsinfo, + srcsrv=options.srcsrv, + ) dumper.Process(args[2]) + # run main if run directly if __name__ == "__main__": main() diff --git a/automation/symbols-generation/upload_symbols.py b/automation/symbols-generation/upload_symbols.py index 56c7e45fee..71aed84ffc 100644 --- a/automation/symbols-generation/upload_symbols.py +++ b/automation/symbols-generation/upload_symbols.py @@ -5,30 +5,34 @@ from __future__ import print_function -import json import os -import redo -import requests import shutil import sys from optparse import OptionParser +import redo +import requests + DEFAULT_SYMBOL_URL = "https://symbols.mozilla.org/upload/" MAX_RETRIES = 5 + def upload_symbols(zip_file, token_file): - print("Uploading symbols file '{0}' to '{1}'".format(zip_file, DEFAULT_SYMBOL_URL), file=sys.stdout) + print( + "Uploading symbols file '{0}' to '{1}'".format(zip_file, DEFAULT_SYMBOL_URL), + file=sys.stdout, + ) zip_name = os.path.basename(zip_file) # XXX: fetch the symbol upload token from local file, taskgraph handles # already that communication with Taskcluster to get the credentials for # communicating with the server - auth_token = '' - with open(token_file, 'r') as f: + auth_token = "" + with open(token_file, "r") as f: auth_token = f.read().strip() if len(auth_token) == 0: print("Failed to get the symbol token.", file=sys.stderr) - if auth_token == 'faketoken': + if auth_token == "faketoken": print("'faketoken` detected, not pushing anything", file=sys.stdout) sys.exit(0) @@ -38,7 +42,7 @@ def upload_symbols(zip_file, token_file): if zip_file.startswith("http"): zip_arg = {"data": {"url", zip_file}} else: - zip_arg = {"files": {zip_name: open(zip_file, 'rb')}} + zip_arg = {"files": {zip_name: open(zip_file, "rb")}} r = requests.post( DEFAULT_SYMBOL_URL, headers={"Auth-Token": auth_token}, @@ -47,7 +51,8 @@ def upload_symbols(zip_file, token_file): # has to fetch the entire zip file, which can take a while. The load balancer # in front of symbols.mozilla.org has a 300 second timeout, so we'll use that. timeout=(10, 300), - **zip_arg) + **zip_arg + ) # 500 is likely to be a transient failure. # Break out for success or other error codes. if r.status_code < 500: @@ -68,23 +73,30 @@ def upload_symbols(zip_file, token_file): print(r.text, file=sys.stderr) return False + def main(): parser = OptionParser(usage="usage: ") - parser.add_option('-t', '--tokenfile', dest='token_file', - help='upload symbols token file', default='.symbols_upload_token') + parser.add_option( + "-t", + "--tokenfile", + dest="token_file", + help="upload symbols token file", + default=".symbols_upload_token", + ) (options, args) = parser.parse_args() if len(args) < 1: parser.error("not enough arguments") - exit(1) + sys.exit(1) symbol_path = args[0] token_file = options.token_file - shutil.make_archive(symbol_path , "zip", symbol_path) + shutil.make_archive(symbol_path, "zip", symbol_path) upload_success = upload_symbols(symbol_path + ".zip", token_file) if not upload_success: sys.exit(2) + # run main if run directly if __name__ == "__main__": main() diff --git a/automation/tag-release.py b/automation/tag-release.py index 930db722bd..67f81d6ab4 100755 --- a/automation/tag-release.py +++ b/automation/tag-release.py @@ -8,8 +8,9 @@ # Usage: ./automation/tag-release.py [major-version-number] import argparse +import sys -from shared import RefNames, get_moz_remote, step_msg, run_cmd_checked, check_output +from shared import RefNames, check_output, get_moz_remote, run_cmd_checked, step_msg parser = argparse.ArgumentParser(description="Tags an application-services release") parser.add_argument("major_version_number", type=int) @@ -20,11 +21,13 @@ step_msg("Getting version number") run_cmd_checked(["git", "fetch", moz_remote]) -version = check_output([ - "git", - "show", - f"{moz_remote}/{branch}:version.txt", -]).strip() +version = check_output( + [ + "git", + "show", + f"{moz_remote}/{branch}:version.txt", + ] +).strip() tag = f"v{version}" step_msg("Getting commit") @@ -34,9 +37,11 @@ print(f"Branch: {branch}") print(f"Commit: {logline}") print(f"Tag: {tag}") -response = input("Would you like to add the tag to the commit listed above? ([Y]/N)").lower() -if response != "y" and response != "" and response != "yes": - exit(0) +response = input( + "Would you like to add the tag to the commit listed above? ([Y]/N)" +).lower() +if response not in ("y", "", "yes"): + sys.exit(0) run_cmd_checked(["git", "tag", tag, commit]) run_cmd_checked(["git", "push", moz_remote, tag]) diff --git a/automation/tests.py b/automation/tests.py index fa9fe980be..d70c038d5c 100755 --- a/automation/tests.py +++ b/automation/tests.py @@ -33,8 +33,6 @@ - python-tests """ -from enum import Enum -from pathlib import Path import argparse import json import os @@ -43,60 +41,76 @@ import subprocess import sys import traceback +from enum import Enum +from pathlib import Path PROJECT_ROOT = Path(__file__).parent.parent -AUTOMATION_DIR = PROJECT_ROOT / 'automation' -COMPONENTS_DIR = PROJECT_ROOT / 'components' -GRADLE = PROJECT_ROOT / 'gradlew' +AUTOMATION_DIR = PROJECT_ROOT / "automation" +COMPONENTS_DIR = PROJECT_ROOT / "components" +GRADLE = PROJECT_ROOT / "gradlew" # Ensure this is a proper path, so we can execute it without searching $PATH. GRADLE = GRADLE.resolve() -IGNORE_PATHS = set([ - # let's not run tests just for dependency changes - 'megazords/full/DEPENDENCIES.md', - 'megazords/full/android/dependency-licenses.xml', - 'megazords/ios-rust/DEPENDENCIES.md', -]) +IGNORE_PATHS = set( + [ + # let's not run tests just for dependency changes + "megazords/full/DEPENDENCIES.md", + "megazords/full/android/dependency-licenses.xml", + "megazords/ios-rust/DEPENDENCIES.md", + ] +) + def blue_text(text): if not sys.stdout.isatty(): return text - return '\033[96m{}\033[0m'.format(text) + return "\033[96m{}\033[0m".format(text) + def yellow_text(text): if not sys.stdout.isatty(): return text - return '\033[93m{}\033[0m'.format(text) + return "\033[93m{}\033[0m".format(text) + def get_output(cmdline, **kwargs): - output = subprocess.check_output(cmdline, **kwargs).decode('utf8') + output = subprocess.check_output(cmdline, **kwargs).decode("utf8") return output + def run_command(cmdline, **kwargs): - print(yellow_text(' '.join(shlex.quote(str(part)) for part in cmdline))) + print(yellow_text(" ".join(shlex.quote(str(part)) for part in cmdline))) subprocess.check_call(cmdline, **kwargs) + def path_is_relative_to(path, other): """ Implementation of Path.is_relative_to() which was only added in python 3.9 """ return str(path.resolve()).startswith(str(other.resolve())) + def parse_args(): parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter) - parser.add_argument('mode', help='Testing mode', metavar='MODE') - parser.add_argument('--base-branch', dest='base_branch', default='main', - help='git base branch') + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter + ) + parser.add_argument("mode", help="Testing mode", metavar="MODE") + parser.add_argument( + "--base-branch", dest="base_branch", default="main", help="git base branch" + ) return parser.parse_args() + def on_darwin(): - return platform.system() == 'Darwin' + return platform.system() == "Darwin" + def get_default_target(): - return get_output([PROJECT_ROOT / 'taskcluster' / 'scripts' / 'detect-target.sh', '--use-rustc']) + return get_output( + [PROJECT_ROOT / "taskcluster" / "scripts" / "detect-target.sh", "--use-rustc"] + ) + def should_run_rust_tests(package, min_version): # There are no tests in examples/ packages, so don't waste time on them. @@ -108,50 +122,64 @@ def should_run_rust_tests(package, min_version): return False return True + class BranchChanges: """ Tracks which files have been changed in this branch """ + def __init__(self, base_branch): # Calculate the merge base, this is the last commit from the base # branch that's present in this branch. - self.merge_base = get_output([ - 'git', 'merge-base', 'HEAD', base_branch, - ]).strip() + self.merge_base = get_output( + [ + "git", + "merge-base", + "HEAD", + base_branch, + ] + ).strip() # Use the merge base to calculate which files have changed from the # base branch. - raw_paths = get_output([ - 'git', 'diff', '--name-only', self.merge_base, - ]).split('\n') + raw_paths = get_output( + [ + "git", + "diff", + "--name-only", + self.merge_base, + ] + ).split("\n") raw_paths = [p for p in raw_paths if p not in IGNORE_PATHS] self.paths = [PROJECT_ROOT.joinpath(p) for p in raw_paths] @staticmethod def has_unstanged_changes(): - output = get_output(['git', 'status', '--porcelain=v1']) - return any(line and line[1] == 'M' for line in output.split('\n')) + output = get_output(["git", "status", "--porcelain=v1"]) + return any(line and line[1] == "M" for line in output.split("\n")) + class RustPackage: """ A rust package that we want to test and run clippy on """ + def __init__(self, cargo_metadata): self.cargo_metadata = cargo_metadata - self.name = cargo_metadata['name'] + self.name = cargo_metadata["name"] # Use manifest path to select the package in cargo. This works better # when using the --no-default-features flag - self.manifest_path = Path(cargo_metadata['manifest_path']) + self.manifest_path = Path(cargo_metadata["manifest_path"]) self.directory = self.manifest_path.parent def has_default_features(self): - return bool(self.cargo_metadata.get('features').get('default')) + return bool(self.cargo_metadata.get("features").get("default")) def has_features(self): - return bool(self.cargo_metadata.get('features')) + return bool(self.cargo_metadata.get("features")) def has_changes(self, branch_changes): - return any(path_is_relative_to(p, self.directory) - for p in branch_changes.paths) + return any(path_is_relative_to(p, self.directory) for p in branch_changes.paths) + class RustFeatures(Enum): """ @@ -161,9 +189,10 @@ class RustFeatures(Enum): Therefore we want to use different combinations of features when running tests/clippy. """ - DEFAULT = 'default features' - ALL = 'all features' - NONE = 'no features' + + DEFAULT = "default features" + ALL = "all features" + NONE = "no features" def label(self): return self.value @@ -172,9 +201,10 @@ def cmdline_args(self): if self == RustFeatures.DEFAULT: return [] elif self == RustFeatures.ALL: - return ['--all-features'] + return ["--all-features"] elif self == RustFeatures.NONE: - return ['--no-default-features'] + return ["--no-default-features"] + def calc_rust_items(branch_changes=None, default_features_only=False): """ @@ -186,11 +216,19 @@ def calc_rust_items(branch_changes=None, default_features_only=False): Returns: list of (RustPackage, RustFeatures) items """ - json_data = json.loads(get_output([ - 'cargo', 'metadata', '--no-deps', '--format-version', '1', - ])) + json_data = json.loads( + get_output( + [ + "cargo", + "metadata", + "--no-deps", + "--format-version", + "1", + ] + ) + ) - packages = [RustPackage(p) for p in json_data['packages']] + packages = [RustPackage(p) for p in json_data["packages"]] if branch_changes: packages = [p for p in packages if p.has_changes(branch_changes)] @@ -208,6 +246,7 @@ def calc_rust_items(branch_changes=None, default_features_only=False): if p.has_default_features(): yield p, RustFeatures.NONE + def calc_non_workspace_rust_items(branch_changes=None, default_features_only=False): """ Calculate which items are not in our default workspace, but we might want to @@ -216,11 +255,21 @@ def calc_non_workspace_rust_items(branch_changes=None, default_features_only=Fal Returns the same as calc_rust_items """ for path in ["testing/sync-test/Cargo.toml"]: - json_data = json.loads(get_output([ - 'cargo', 'metadata', '--no-deps', '--format-version', '1', '--manifest-path', path - ])) + json_data = json.loads( + get_output( + [ + "cargo", + "metadata", + "--no-deps", + "--format-version", + "1", + "--manifest-path", + path, + ] + ) + ) - packages = [RustPackage(p) for p in json_data['packages']] + packages = [RustPackage(p) for p in json_data["packages"]] if branch_changes: packages = [p for p in packages if p.has_changes(branch_changes)] @@ -246,11 +295,13 @@ def calc_non_workspace_rust_items(branch_changes=None, default_features_only=Fal # we use the faster method of touching the changed files so only they get # rebuilt. + def cargo_clean(): """ Force cargo to rebuild rust files """ - run_command(['cargo', 'clean']) + run_command(["cargo", "clean"]) + def touch_changed_paths(branch_changes): """ @@ -263,54 +314,71 @@ def touch_changed_paths(branch_changes): if path.exists(): path.touch() + def print_rust_environment(): - print('platform: {}'.format(platform.uname())) - print('rustc version: {}'.format( - get_output(['rustc', '--version']).strip())) - print('cargo version: {}'.format( - get_output(['cargo', '--version']).strip())) - print('rustfmt version: {}'.format( - get_output(['rustfmt', '--version']).strip())) - print('GCC version: {}'.format( - get_output(['gcc', '--version']).split('\n')[0])) + print("platform: {}".format(platform.uname())) + print("rustc version: {}".format(get_output(["rustc", "--version"]).strip())) + print("cargo version: {}".format(get_output(["cargo", "--version"]).strip())) + print("rustfmt version: {}".format(get_output(["rustfmt", "--version"]).strip())) + print("GCC version: {}".format(get_output(["gcc", "--version"]).split("\n")[0])) print() + def calc_rust_env(package, features): if features == RustFeatures.ALL: # nss-sys's --features handling is broken. Workaround it by using a # custom --cfg. This shouldn't be this way! - return { - **os.environ, - 'RUSTFLAGS' : "--cfg __appsvc_ci_hack" - } + return {**os.environ, "RUSTFLAGS": "--cfg __appsvc_ci_hack"} else: return None + def run_rust_test(package, features): - run_command([ - 'cargo', 'test', - '--manifest-path', package.manifest_path, - ] + features.cmdline_args(), env=calc_rust_env(package, features)) + run_command( + [ + "cargo", + "test", + "--manifest-path", + package.manifest_path, + ] + + features.cmdline_args(), + env=calc_rust_env(package, features), + ) + def run_nss_bindings_test(): - run_command([ - 'cargo', 'run', '-p', 'systest', - ]) + run_command( + [ + "cargo", + "run", + "-p", + "systest", + ] + ) + def run_clippy(package, features): - run_command([ - 'cargo', 'clippy', '--all-targets', - '--manifest-path', package.manifest_path, - ] + features.cmdline_args() + [ - '--', '-D', 'warnings' - ], env=calc_rust_env(package, features)) + run_command( + [ + "cargo", + "clippy", + "--all-targets", + "--manifest-path", + package.manifest_path, + ] + + features.cmdline_args() + + ["--", "-D", "warnings"], + env=calc_rust_env(package, features), + ) + def run_ktlint(): - run_command([GRADLE, 'ktlint', 'detekt']) + run_command([GRADLE, "ktlint", "detekt"]) + def run_swiftlint(): if on_darwin(): - run_command(['swiftlint', '--strict']) + run_command(["swiftlint", "--strict"]) elif not docker_installed(): print("WARNING: On non-Darwin hosts, docker is required to run swiftlint") print("WARNING: skipping swiftlint on non-Darwin host") @@ -329,45 +397,62 @@ def run_swiftlint(): cwd, "ghcr.io/realm/swiftlint:latest", "swiftlint", - "--strict" + "--strict", ] ) + def run_gradle_tests(): - run_command([GRADLE, 'test']) + run_command([GRADLE, "test"]) + def run_ios_tests(): if on_darwin(): - run_command([AUTOMATION_DIR / 'run_ios_tests.sh']) + run_command([AUTOMATION_DIR / "run_ios_tests.sh"]) else: print("WARNING: skipping iOS tests on non-Darwin host") + def run_python_tests(): - target=get_default_target() - run_command([PROJECT_ROOT / 'taskcluster/scripts/server-megazord-build.py', 'cirrus', target]) - run_command([PROJECT_ROOT / 'taskcluster/scripts/server-megazord-build.py', 'nimbus-experimenter', target]) + target = get_default_target() + run_command( + [ + PROJECT_ROOT / "taskcluster/scripts/server-megazord-build.py", + "cirrus", + target, + ] + ) + run_command( + [ + PROJECT_ROOT / "taskcluster/scripts/server-megazord-build.py", + "nimbus-experimenter", + target, + ] + ) + def cargo_fmt(package=None, fix_issues=False): - cmdline = ['cargo', 'fmt'] + cmdline = ["cargo", "fmt"] if package: - cmdline.extend(['--manifest-path', package.manifest_path]) + cmdline.extend(["--manifest-path", package.manifest_path]) else: - cmdline.append('--all') + cmdline.append("--all") if not fix_issues: - cmdline.extend(['--', '--check']) + cmdline.extend(["--", "--check"]) run_command(cmdline) + def swift_format(): swift_format_args = [ - 'megazords', - 'components/*/ios', - '--exclude', - '**/Generated', - '--exclude', - 'components/nimbus/ios/Nimbus/Utils', - '--lint', - '--swiftversion', - '5', + "megazords", + "components/*/ios", + "--exclude", + "**/Generated", + "--exclude", + "components/nimbus/ios/Nimbus/Utils", + "--lint", + "--swiftversion", + "5", ] if on_darwin(): run_command(["swiftformat", *swift_format_args]) @@ -392,6 +477,7 @@ def swift_format(): ] ) + def check_for_fmt_changes(branch_changes): print() if branch_changes.has_unstanged_changes(): @@ -399,10 +485,12 @@ def check_for_fmt_changes(branch_changes): else: print("All checks passed!") + class Step: """ Represents a single step of the testing process """ + def __init__(self, name, func, *args, **kwargs): self.name = name self.func = func @@ -411,15 +499,18 @@ def __init__(self, name, func, *args, **kwargs): def run(self): print() - print(blue_text('Running {}'.format(self.name))) + print(blue_text("Running {}".format(self.name))) try: self.func(*self.args, **self.kwargs) except subprocess.CalledProcessError: - exit_with_error(1, 'Error while running {}'.format(self.name)) - except: + exit_with_error(1, "Error while running {}".format(self.name)) + except Exception: exit_with_error( - 2, 'Unexpected exception while running {}'.format(self.name), - print_exception=True) + 2, + "Unexpected exception while running {}".format(self.name), + print_exception=True, + ) + def calc_steps(args): """ @@ -427,97 +518,121 @@ def calc_steps(args): Yields a list of (name, func) items. """ - if args.mode == 'changes': + if args.mode == "changes": # changes mode is complicated enough that it's split off into its own # function for step in calc_steps_change_mode(args): yield step - elif args.mode == 'rust-tests': + elif args.mode == "rust-tests": print_rust_environment() - yield Step('cargo clean', cargo_clean) + yield Step("cargo clean", cargo_clean) for package, features in calc_rust_items(): if should_run_rust_tests(package, False): yield Step( - 'tests for {} ({})'.format(package.name, features.label()), - run_rust_test, package, features) - elif args.mode == 'rust-min-version-tests': + "tests for {} ({})".format(package.name, features.label()), + run_rust_test, + package, + features, + ) + elif args.mode == "rust-min-version-tests": print_rust_environment() - yield Step('cargo clean', cargo_clean) + yield Step("cargo clean", cargo_clean) for package, features in calc_rust_items(): if should_run_rust_tests(package, True): yield Step( - 'tests for {} ({})'.format(package.name, features.label()), - run_rust_test, package, features) - elif args.mode == 'rust-clippy': + "tests for {} ({})".format(package.name, features.label()), + run_rust_test, + package, + features, + ) + elif args.mode == "rust-clippy": print_rust_environment() - yield Step('cargo clean', cargo_clean) + yield Step("cargo clean", cargo_clean) for package, features in calc_rust_items(): yield Step( - 'clippy for {} ({})'.format(package.name, features.label()), - run_clippy, package, features) + "clippy for {} ({})".format(package.name, features.label()), + run_clippy, + package, + features, + ) # non-workspace items aren't tested, but we do run clippy on them to # make sure they don't go stale. for package, features in calc_non_workspace_rust_items(): yield Step( - 'clippy for {} ({})'.format(package.name, features.label()), - run_clippy, package, features) - elif args.mode == 'rust-fmt': + "clippy for {} ({})".format(package.name, features.label()), + run_clippy, + package, + features, + ) + elif args.mode == "rust-fmt": print_rust_environment() - yield Step('cargo fmt', cargo_fmt) - elif args.mode == 'ktlint': - yield Step('ktlint', run_ktlint) - elif args.mode == 'swiftlint': - yield Step('swiftlint', run_swiftlint) - elif args.mode == 'swiftformat': - yield Step('swiftformat', swift_format) - elif args.mode == 'nss-bindings': + yield Step("cargo fmt", cargo_fmt) + elif args.mode == "ktlint": + yield Step("ktlint", run_ktlint) + elif args.mode == "swiftlint": + yield Step("swiftlint", run_swiftlint) + elif args.mode == "swiftformat": + yield Step("swiftformat", swift_format) + elif args.mode == "nss-bindings": print_rust_environment() - yield Step('NSS bindings test', run_nss_bindings_test) - elif args.mode == 'gradle': - yield Step('gradle tests', run_gradle_tests) - elif args.mode == 'ios-tests': - yield Step('ios tests', run_ios_tests) - elif args.mode == 'python-tests': - yield Step('python tests', run_python_tests) + yield Step("NSS bindings test", run_nss_bindings_test) + elif args.mode == "gradle": + yield Step("gradle tests", run_gradle_tests) + elif args.mode == "ios-tests": + yield Step("ios tests", run_ios_tests) + elif args.mode == "python-tests": + yield Step("python tests", run_python_tests) else: - print('Invalid mode: {}'.format(args.mode)) + print("Invalid mode: {}".format(args.mode)) sys.exit(1) + def calc_steps_change_mode(args): """ Calculate the steps needed for change mode """ print_rust_environment() branch_changes = BranchChanges(args.base_branch) - rust_items = list(calc_rust_items(branch_changes, - default_features_only=True)) + rust_items = list(calc_rust_items(branch_changes, default_features_only=True)) rust_packages = list(set(package for package, _ in rust_items)) if not rust_items: - print('no changes found.') + print("no changes found.") return if branch_changes.has_unstanged_changes(): - subprocess.run(['git', 'status']) + subprocess.run(["git", "status"], check=False) print() - print('WARNING: unstaged changes in your branch:') - print('Consider git add or git commit to stage them since this ' - 'script will run cargo fmt') + print("WARNING: unstaged changes in your branch:") + print( + "Consider git add or git commit to stage them since this " + "script will run cargo fmt" + ) print("Continue (Y/N)?") - if input().lower() != 'y': + if input().lower() != "y": sys.exit(0) - yield Step('touch changed paths', touch_changed_paths, branch_changes) + yield Step("touch changed paths", touch_changed_paths, branch_changes) for package, features in rust_items: - yield Step('tests for {} ({})'.format(package.name, features.label()), - run_rust_test, package, features) + yield Step( + "tests for {} ({})".format(package.name, features.label()), + run_rust_test, + package, + features, + ) for package, features in rust_items: - yield Step('clippy for {} ({})'.format(package.name, features.label()), - run_clippy, package, features) + yield Step( + "clippy for {} ({})".format(package.name, features.label()), + run_clippy, + package, + features, + ) for package in rust_packages: - yield Step('rustfmt for {}'.format(package.name), cargo_fmt, package, - fix_issues=True) - yield Step('Check for changes', check_for_fmt_changes, branch_changes) + yield Step( + "rustfmt for {}".format(package.name), cargo_fmt, package, fix_issues=True + ) + yield Step("Check for changes", check_for_fmt_changes, branch_changes) + def main(): args = parse_args() @@ -525,23 +640,27 @@ def main(): for step in calc_steps(args): step.run() + def exit_with_error(code, text, print_exception=False): print() - print('-' * 78) + print("-" * 78) print() print(text) if print_exception: traceback.print_exc() sys.exit(code) + def docker_installed(): result = subprocess.run( ["docker"], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, + check=False, ) return result.returncode == 0 -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/megazords/cirrus/tests/python-tests/conftest.py b/megazords/cirrus/tests/python-tests/conftest.py index 5894e8b471..a689216ecc 100644 --- a/megazords/cirrus/tests/python-tests/conftest.py +++ b/megazords/cirrus/tests/python-tests/conftest.py @@ -2,15 +2,18 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. import json + import pytest -from cirrus import CirrusClient, MetricsHandler, EnrollmentStatusExtraDef +from cirrus import CirrusClient, EnrollmentStatusExtraDef, MetricsHandler from fml import FmlClient class TestMetricsHandler(MetricsHandler): recordings = [] - def record_enrollment_statuses(self, enrollment_status_extras: [EnrollmentStatusExtraDef]): + def record_enrollment_statuses( + self, enrollment_status_extras: [EnrollmentStatusExtraDef] + ): self.recordings.clear() self.recordings.extend(enrollment_status_extras) diff --git a/megazords/cirrus/tests/python-tests/test_cirrus.py b/megazords/cirrus/tests/python-tests/test_cirrus.py index 003236323a..14946a417a 100644 --- a/megazords/cirrus/tests/python-tests/test_cirrus.py +++ b/megazords/cirrus/tests/python-tests/test_cirrus.py @@ -1,10 +1,11 @@ # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -from cirrus import NimbusError, CirrusClient -from conftest import TestMetricsHandler import json +from cirrus import CirrusClient, NimbusError +from conftest import TestMetricsHandler + def test_one_experiment_should_enroll(client, req): response = json.loads(client.handle_enrollment(req())) @@ -56,6 +57,6 @@ def test_metrics_handler(app_context, experiment, req): data = json.dumps({"data": [experiment]}) client.set_experiments(data) - response = json.loads(client.handle_enrollment(req())) + json.loads(client.handle_enrollment(req())) assert len(test_metrics.recordings) == 1 diff --git a/megazords/cirrus/tests/python-tests/test_fml.py b/megazords/cirrus/tests/python-tests/test_fml.py index db1fe6e58c..a6ce567539 100644 --- a/megazords/cirrus/tests/python-tests/test_fml.py +++ b/megazords/cirrus/tests/python-tests/test_fml.py @@ -2,6 +2,7 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. import json + import pytest from fml import FmlError, InternalError @@ -97,4 +98,4 @@ def test_get_coenrolling_feature_ids(fml_client): client = fml_client("test-include-import.fml.yml", "developer") result = client.get_coenrolling_feature_ids() - assert result == ["example-feature","imported-module-1-included-feature-1"] + assert result == ["example-feature", "imported-module-1-included-feature-1"] diff --git a/megazords/nimbus-experimenter/python/test/conftest.py b/megazords/nimbus-experimenter/python/test/conftest.py index 98ab1d5e74..8876e1c2de 100644 --- a/megazords/nimbus-experimenter/python/test/conftest.py +++ b/megazords/nimbus-experimenter/python/test/conftest.py @@ -1,10 +1,11 @@ # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -import json + import pytest from fml import FmlClient + @pytest.fixture def fml_client(): def _client(path, channel): diff --git a/megazords/nimbus-experimenter/python/test/test_smoke_test.py b/megazords/nimbus-experimenter/python/test/test_smoke_test.py index 117d7347da..25f5c4dc37 100644 --- a/megazords/nimbus-experimenter/python/test/test_smoke_test.py +++ b/megazords/nimbus-experimenter/python/test/test_smoke_test.py @@ -1,9 +1,7 @@ # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -import json -import pytest -from fml import FmlError, InternalError + def test_smoke_test(fml_client): fml_client("test.fml.yml", "developer") diff --git a/taskcluster/app_services_taskgraph/__init__.py b/taskcluster/app_services_taskgraph/__init__.py index e834589a76..c01451715c 100644 --- a/taskcluster/app_services_taskgraph/__init__.py +++ b/taskcluster/app_services_taskgraph/__init__.py @@ -2,19 +2,19 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -from importlib import import_module -from voluptuous import Optional import os import re +from importlib import import_module -from taskgraph.parameters import extend_parameters_schema from mozilla_taskgraph import register as mozilla_taskgraph_register from mozilla_taskgraph.actions import enable_action +from taskgraph.parameters import extend_parameters_schema +from voluptuous import Optional from . import branch_builds -from .build_config import get_version_from_version_txt +from .build_config import get_version_from_version_txt # noqa: F401 -PREVIEW_RE = re.compile(r'\[preview ([\w-]+)\]') +PREVIEW_RE = re.compile(r"\[preview ([\w-]+)\]") RELEASE_PROMOTION_PROJECTS = ( "https://github.com/mozilla/application-services", "https://github.com/mozilla-releng/staging-application-services", @@ -27,32 +27,36 @@ def is_relpro_available(params): def register(graph_config): # Import modules to register decorated functions - _import_modules([ - "branch_builds", - "job", - "target_tasks", - "transforms", - "worker_types", - ]) - - extend_parameters_schema({ - Optional('branch-build'): { - Optional('firefox-android'): { - Optional('owner'): str, - Optional('branch'): str, - }, - Optional('firefox-ios'): { - Optional('owner'): str, - Optional('branch'): str, + _import_modules( + [ + "branch_builds", + "job", + "target_tasks", + "transforms", + "worker_types", + ] + ) + + extend_parameters_schema( + { + Optional("branch-build"): { + Optional("firefox-android"): { + Optional("owner"): str, + Optional("branch"): str, + }, + Optional("firefox-ios"): { + Optional("owner"): str, + Optional("branch"): str, + }, }, - }, - # Publish a "preview build" for a future version. This is set to - # "nightly" for the nightly builds. Other strings indicate making a - # preview build for a particular application-services branch. - 'preview-build': Optional(str), - # Release type. Set to `release` or `nightly` when we're building release artifacts. - 'release-type': Optional(str), - }) + # Publish a "preview build" for a future version. This is set to + # "nightly" for the nightly builds. Other strings indicate making a + # preview build for a particular application-services branch. + "preview-build": Optional(str), + # Release type. Set to `release` or `nightly` when we're building release artifacts. + "release-type": Optional(str), + } + ) # Register mozilla-taskgraph extensions mozilla_taskgraph_register(graph_config) @@ -71,14 +75,16 @@ def get_decision_parameters(graph_config, parameters): pr_title = os.environ.get("APPSERVICES_PULL_REQUEST_TITLE", "") preview_match = PREVIEW_RE.search(pr_title) if preview_match is not None: - if preview_match.group(1) == 'nightly': + if preview_match.group(1) == "nightly": parameters["preview-build"] = "nightly" parameters["target_tasks_method"] = "full" - elif preview_match.group(1) == 'release': + elif preview_match.group(1) == "release": parameters["target_tasks_method"] = "full" parameters["release-type"] = "release" else: - raise NotImplemented("Only nightly preview builds are currently supported") + raise NotImplementedError( + "Only nightly preview builds are currently supported" + ) elif "[ci full]" in pr_title: parameters["target_tasks_method"] = "pr-full" elif "[ci skip]" in pr_title: @@ -95,7 +101,9 @@ def get_decision_parameters(graph_config, parameters): parameters["preview-build"] = "nightly" parameters["release-type"] = "nightly" - parameters['branch-build'] = branch_builds.calc_branch_build_param(parameters) - parameters['filters'].extend([ - 'branch-build', - ]) + parameters["branch-build"] = branch_builds.calc_branch_build_param(parameters) + parameters["filters"].extend( + [ + "branch-build", + ] + ) diff --git a/taskcluster/app_services_taskgraph/branch_builds.py b/taskcluster/app_services_taskgraph/branch_builds.py index a312a9fc18..c21f5df973 100644 --- a/taskcluster/app_services_taskgraph/branch_builds.py +++ b/taskcluster/app_services_taskgraph/branch_builds.py @@ -7,9 +7,10 @@ from taskgraph.filter_tasks import filter_task -REPO_RE = r'((?P[\.\w-]+)[/:])?(?P[\.\w-]+)' -FIREFOX_IOS_BRANCH_RE = re.compile(r'\[firefox-ios:\s*' + REPO_RE + r'\]') -FIREFOX_ANDROID_BRANCH_RE = re.compile(r'\[firefox-android:\s*' + REPO_RE + r'\]') +REPO_RE = r"((?P[\.\w-]+)[/:])?(?P[\.\w-]+)" +FIREFOX_IOS_BRANCH_RE = re.compile(r"\[firefox-ios:\s*" + REPO_RE + r"\]") +FIREFOX_ANDROID_BRANCH_RE = re.compile(r"\[firefox-android:\s*" + REPO_RE + r"\]") + def calc_branch_build_param(parameters): title = os.environ.get("APPSERVICES_PULL_REQUEST_TITLE", "") @@ -17,25 +18,27 @@ def calc_branch_build_param(parameters): firefox_android_branch_match = FIREFOX_ANDROID_BRANCH_RE.search(title) if firefox_android_branch_match: - branch_build['firefox-android'] = { - 'owner': calc_owner(firefox_android_branch_match), - 'branch': firefox_android_branch_match.group('branch'), + branch_build["firefox-android"] = { + "owner": calc_owner(firefox_android_branch_match), + "branch": firefox_android_branch_match.group("branch"), } firefox_ios_branch_match = FIREFOX_IOS_BRANCH_RE.search(title) if firefox_ios_branch_match: - branch_build['firefox-ios'] = { - 'owner': calc_owner(firefox_ios_branch_match), - 'branch': firefox_ios_branch_match.group('branch'), + branch_build["firefox-ios"] = { + "owner": calc_owner(firefox_ios_branch_match), + "branch": firefox_ios_branch_match.group("branch"), } return branch_build + def calc_owner(match): - if match.group('owner'): - return match.group('owner') + if match.group("owner"): + return match.group("owner") else: - return 'mozilla-mobile' + return "mozilla-mobile" + @filter_task("branch-build") def filter_branch_build_tasks(full_task_graph, parameters, graph_config): @@ -47,5 +50,6 @@ def should_keep_task(task): else: # For tasks with a `branch-build` attribute, include them if there's a matching # parameter - return task_branch_build in parameters.get('branch-build', {}) + return task_branch_build in parameters.get("branch-build", {}) + return [l for l, task in full_task_graph.tasks.items() if should_keep_task(task)] diff --git a/taskcluster/app_services_taskgraph/build_config.py b/taskcluster/app_services_taskgraph/build_config.py index 4513344768..4e7288d04b 100644 --- a/taskcluster/app_services_taskgraph/build_config.py +++ b/taskcluster/app_services_taskgraph/build_config.py @@ -5,66 +5,71 @@ import functools import os -import yaml -EXTENSIONS = { - 'aar': ('.aar', '.pom', '-sources.jar'), - 'jar': ('.jar', '.pom') +import yaml -} -CHECKSUMS_EXTENSIONS = ('.sha1', '.md5') +EXTENSIONS = {"aar": (".aar", ".pom", "-sources.jar"), "jar": (".jar", ".pom")} +CHECKSUMS_EXTENSIONS = (".sha1", ".md5") def get_components(): build_config = _read_build_config() - return [{ - 'name': name, - 'path': project['path'], - 'artifactId': project['artifactId'], - 'uploadSymbols': project.get('uploadSymbols'), - 'publications': [{ - 'name': publication['name'], - 'type': publication['type'], - } for publication in project['publications']] - } for (name, project) in build_config['projects'].items()] + return [ + { + "name": name, + "path": project["path"], + "artifactId": project["artifactId"], + "uploadSymbols": project.get("uploadSymbols"), + "publications": [ + { + "name": publication["name"], + "type": publication["type"], + } + for publication in project["publications"] + ], + } + for (name, project) in build_config["projects"].items() + ] def get_version(params): version = get_version_from_version_txt() - preview_build = params.get('preview-build') - if preview_build == 'nightly': - components = version.split('.') + preview_build = params.get("preview-build") + if preview_build == "nightly": + components = version.split(".") assert len(components) == 2 - components[1] = params['moz_build_date'] - return '.'.join(components) + components[1] = params["moz_build_date"] + return ".".join(components) elif preview_build is not None: - raise NotImplemented("Only nightly preview builds are currently supported") + raise NotImplementedError("Only nightly preview builds are currently supported") else: return version + @functools.cache def get_version_from_version_txt(): current_dir = os.path.dirname(os.path.realpath(__file__)) - project_dir = os.path.realpath(os.path.join(current_dir, '..', '..')) + project_dir = os.path.realpath(os.path.join(current_dir, "..", "..")) - with open(os.path.join(project_dir, 'version.txt'), 'r') as f: + with open(os.path.join(project_dir, "version.txt"), "r") as f: return f.read().strip() + def get_extensions(module_name): - publications = _read_build_config()["projects"][module_name]['publications'] + publications = _read_build_config()["projects"][module_name]["publications"] extensions = {} for publication in publications: - artifact_type = publication['type'] + artifact_type = publication["type"] if artifact_type not in EXTENSIONS: raise ValueError( "For '{}', 'publication->type' must be one of {}".format( module_name, repr(EXTENSIONS.keys()) ) ) - extensions[publication['name']] = [ - extension + checksum_extension - for extension in EXTENSIONS[artifact_type] - for checksum_extension in ('',) + CHECKSUMS_EXTENSIONS + extensions[publication["name"]] = [ + extension + checksum_extension + for extension in EXTENSIONS[artifact_type] + for checksum_extension in ("",) + CHECKSUMS_EXTENSIONS ] return extensions @@ -72,7 +77,7 @@ def get_extensions(module_name): @functools.cache def _read_build_config(): current_dir = os.path.dirname(os.path.realpath(__file__)) - project_dir = os.path.realpath(os.path.join(current_dir, '..', '..')) + project_dir = os.path.realpath(os.path.join(current_dir, "..", "..")) - with open(os.path.join(project_dir, '.buildconfig-android.yml'), 'rb') as f: + with open(os.path.join(project_dir, ".buildconfig-android.yml"), "rb") as f: return yaml.safe_load(f) diff --git a/taskcluster/app_services_taskgraph/job.py b/taskcluster/app_services_taskgraph/job.py index 4789c40b3d..245ae8b035 100644 --- a/taskcluster/app_services_taskgraph/job.py +++ b/taskcluster/app_services_taskgraph/job.py @@ -3,12 +3,12 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. -from taskgraph.transforms.run import run_task_using, configure_taskdesc_for_run -from taskgraph.util.schema import Schema, taskref_or_string -from voluptuous import Required, Optional - from pipes import quote as shell_quote +from taskgraph.transforms.run import configure_taskdesc_for_run, run_task_using +from taskgraph.util.schema import Schema, taskref_or_string +from voluptuous import Optional, Required + secret_schema = { Required("name"): str, Required("path"): str, @@ -22,28 +22,32 @@ Optional("json"): bool, } -gradlew_schema = Schema({ - Required("using"): "gradlew", - Optional("pre-gradlew"): [[str]], - Required("gradlew"): [str], - Optional("post-gradlew"): [[str]], - # Base work directory used to set up the task. - Required("workdir"): str, - Optional("use-caches"): bool, - Optional("secrets"): [secret_schema], - Optional("dummy-secrets"): [dummy_secret_schema], -}) - -run_commands_schema = Schema({ - Required("using"): "run-commands", - Optional("pre-commands"): [[str]], - Required("commands"): [[taskref_or_string]], - Required("workdir"): str, - Optional("use-caches"): bool, - Optional("secrets"): [secret_schema], - Optional("dummy-secrets"): [dummy_secret_schema], - Optional("run-task-command"): [str], -}) +gradlew_schema = Schema( + { + Required("using"): "gradlew", + Optional("pre-gradlew"): [[str]], + Required("gradlew"): [str], + Optional("post-gradlew"): [[str]], + # Base work directory used to set up the task. + Required("workdir"): str, + Optional("use-caches"): bool, + Optional("secrets"): [secret_schema], + Optional("dummy-secrets"): [dummy_secret_schema], + } +) + +run_commands_schema = Schema( + { + Required("using"): "run-commands", + Optional("pre-commands"): [[str]], + Required("commands"): [[taskref_or_string]], + Required("workdir"): str, + Optional("use-caches"): bool, + Optional("secrets"): [secret_schema], + Optional("dummy-secrets"): [dummy_secret_schema], + Optional("run-task-command"): [str], + } +) @run_task_using("docker-worker", "run-commands", schema=run_commands_schema) @@ -51,7 +55,8 @@ def configure_run_commands_schema(config, job, taskdesc): run = job["run"] pre_commands = run.pop("pre-commands", []) pre_commands += [ - _generate_dummy_secret_command(secret) for secret in run.pop("dummy-secrets", []) + _generate_dummy_secret_command(secret) + for secret in run.pop("dummy-secrets", []) ] pre_commands += [ _generate_secret_command(secret) for secret in run.get("secrets", []) @@ -64,12 +69,14 @@ def configure_run_commands_schema(config, job, taskdesc): _set_run_task_attributes(job) configure_taskdesc_for_run(config, job, taskdesc, job["worker"]["implementation"]) + @run_task_using("generic-worker", "run-commands", schema=run_commands_schema) def configure_run_commands_schema_generic(config, job, taskdesc): run = job["run"] pre_commands = run.pop("pre-commands", []) pre_commands += [ - _generate_dummy_secret_command(secret) for secret in run.pop("dummy-secrets", []) + _generate_dummy_secret_command(secret) + for secret in run.pop("dummy-secrets", []) ] pre_commands += [ _generate_secret_command(secret) for secret in run.get("secrets", []) @@ -86,13 +93,13 @@ def configure_run_commands_schema_generic(config, job, taskdesc): @run_task_using("docker-worker", "gradlew", schema=gradlew_schema) def configure_gradlew(config, job, taskdesc): run = job["run"] - worker = taskdesc["worker"] = job["worker"] + taskdesc["worker"] = job["worker"] # TODO: to uncomment later when we'll port over logic from bug 1622339 # worker.setdefault("env", {}).update({ - # "ANDROID_SDK_ROOT": path.join( - # run["workdir"], worker["env"]["MOZ_FETCHES_DIR"], "android-sdk-linux" - # ) + # "ANDROID_SDK_ROOT": path.join( + # run["workdir"], worker["env"]["MOZ_FETCHES_DIR"], "android-sdk-linux" + # ) # }) run["command"] = _extract_gradlew_command(run) @@ -104,7 +111,8 @@ def configure_gradlew(config, job, taskdesc): def _extract_gradlew_command(run): pre_gradle_commands = run.pop("pre-gradlew", []) pre_gradle_commands += [ - _generate_dummy_secret_command(secret) for secret in run.pop("dummy-secrets", []) + _generate_dummy_secret_command(secret) + for secret in run.pop("dummy-secrets", []) ] pre_gradle_commands += [ _generate_secret_command(secret) for secret in run.get("secrets", []) @@ -123,9 +131,12 @@ def _generate_secret_command(secret): secret_command = [ "python3", "taskcluster/scripts/get-secret.py", - "-s", secret["name"], - "-k", secret["key"], - "-f", secret["path"], + "-s", + secret["name"], + "-k", + secret["key"], + "-f", + secret["path"], ] if secret.get("json"): secret_command.append("--json") @@ -136,8 +147,10 @@ def _generate_secret_command(secret): def _generate_dummy_secret_command(secret): secret_command = [ "taskcluster/scripts/write-dummy-secret.py", - "-f", secret["path"], - "-c", secret["content"], + "-f", + secret["path"], + "-c", + secret["content"], ] if secret.get("json"): secret_command.append("--json") @@ -161,18 +174,22 @@ def _convert_commands_to_string(commands): part_string = part["task-reference"] should_task_reference = True else: - raise ValueError(f'Unsupported dict: {part}') + raise ValueError(f"Unsupported dict: {part}") else: part_string = part sanitized_parts.append(part_string) sanitized_commands.append(sanitized_parts) - shell_quoted_commands = [" ".join(map(shell_quote, command)) for command in sanitized_commands] + shell_quoted_commands = [ + " ".join(map(shell_quote, command)) for command in sanitized_commands + ] full_string_command = " && ".join(shell_quoted_commands) if should_artifact_reference and should_task_reference: - raise NotImplementedError('"arifact-reference" and "task-reference" cannot be both used') + raise NotImplementedError( + '"arifact-reference" and "task-reference" cannot be both used' + ) elif should_artifact_reference: return {"artifact-reference": full_string_command} elif should_task_reference: @@ -185,7 +202,9 @@ def _inject_secrets_scopes(run, taskdesc): secrets = run.pop("secrets", []) scopes = taskdesc.setdefault("scopes", []) new_secret_scopes = ["secrets:get:{}".format(secret["name"]) for secret in secrets] - new_secret_scopes = list(set(new_secret_scopes)) # Scopes must not have any duplicates + new_secret_scopes = list( + set(new_secret_scopes) + ) # Scopes must not have any duplicates scopes.extend(new_secret_scopes) diff --git a/taskcluster/app_services_taskgraph/loader/build_config.py b/taskcluster/app_services_taskgraph/loader/build_config.py index 514f00c794..33d61b34b3 100644 --- a/taskcluster/app_services_taskgraph/loader/build_config.py +++ b/taskcluster/app_services_taskgraph/loader/build_config.py @@ -6,19 +6,14 @@ # `.buildconfig-android.yml` - from taskgraph.loader.transform import loader as base_loader from ..build_config import get_components def loader(kind, path, config, params, loaded_tasks): - config['tasks'] = jobs = { - component['name']: { - 'attributes': { - 'buildconfig': component - } - } + config["tasks"] = { + component["name"]: {"attributes": {"buildconfig": component}} for component in get_components() } diff --git a/taskcluster/app_services_taskgraph/target_tasks.py b/taskcluster/app_services_taskgraph/target_tasks.py index c097c2c4b6..b232715678 100644 --- a/taskcluster/app_services_taskgraph/target_tasks.py +++ b/taskcluster/app_services_taskgraph/target_tasks.py @@ -2,10 +2,9 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -import json import logging -from taskgraph.target_tasks import register_target_task, filter_for_tasks_for +from taskgraph.target_tasks import filter_for_tasks_for, register_target_task from taskgraph.util import taskcluster logger = logging.getLogger(__name__) @@ -15,16 +14,17 @@ def filter_out_shipping_phase(task): """Return False for "release" tasks, i.e. tasks with a promote/ship "shipping_phase" attribute unless they also have the "nightly" attribute. """ - return ( - task.attributes.get("nightly") - or task.attributes.get("shipping_phase") in {None, "build"} - ) + return task.attributes.get("nightly") or task.attributes.get("shipping_phase") in { + None, + "build", + } -@register_target_task('pr-skip') +@register_target_task("pr-skip") def target_tasks_pr_skip(full_task_graph, parameters, graph_config): return [] + # Don't filter out any tasks. We use this for: # - Pushes to a release branch # - PRs with `[preview: (nightly|release)]` @@ -32,17 +32,19 @@ def target_tasks_pr_skip(full_task_graph, parameters, graph_config): # This runs the same tasks as `pr-full`, plus: # - build-summary, which sends a slack alert if the build fails # - release-publish, which creates the `release.json` or `nightly.json` artifact -@register_target_task('full') +@register_target_task("full") def target_tasks_release(full_task_graph, parameters, graph_config): return full_task_graph.tasks -@register_target_task('nightly') + +@register_target_task("nightly") def target_tasks_nightly(full_task_graph, parameters, graph_config): head_rev = parameters["head_rev"] try: taskcluster.find_task_id( - f'project.application-services.v2.branch.main.revision.' - f'{head_rev}.taskgraph.decision-nightly') + f"project.application-services.v2.branch.main.revision." + f"{head_rev}.taskgraph.decision-nightly" + ) except BaseException: # No nightly decision task run for this commit, which is expected pass @@ -50,35 +52,44 @@ def target_tasks_nightly(full_task_graph, parameters, graph_config): # We already ran the nightly decision task and tried to build the nightly. Don't try again. logger.info(f"Nightly already ran for {head_rev}, skipping") return [] - return [l for l, task in full_task_graph.tasks.items() if filter_out_shipping_phase(task)] + return [ + l + for l, task in full_task_graph.tasks.items() + if filter_out_shipping_phase(task) + ] -@register_target_task('pr-full') + +@register_target_task("pr-full") def target_tasks_all(full_task_graph, parameters, graph_config): """Target the tasks which have indicated they should be run on this project via the `run_on_projects` attributes.""" + def filter(task): - return (filter_for_tasks_for(task, parameters) - and task.attributes.get("run-on-pr-type", "all") in ("full-ci", "all") - and task.attributes.get('release-type') != 'release-only') + return ( + filter_for_tasks_for(task, parameters) + and task.attributes.get("run-on-pr-type", "all") in ("full-ci", "all") + and task.attributes.get("release-type") != "release-only" + ) return [l for l, task in full_task_graph.tasks.items() if filter(task)] -@register_target_task('pr-normal') +@register_target_task("pr-normal") def target_tasks_default(full_task_graph, parameters, graph_config): """Target the tasks which have indicated they should be run on this project via the `run_on_projects` attributes.""" + def filter(task): - return (filter_for_tasks_for(task, parameters) - and task.attributes.get("run-on-pr-type", "all") in ("normal-ci", "all") - and task.attributes.get('release-type') != 'release-only') + return ( + filter_for_tasks_for(task, parameters) + and task.attributes.get("run-on-pr-type", "all") in ("normal-ci", "all") + and task.attributes.get("release-type") != "release-only" + ) return [l for l, task in full_task_graph.tasks.items() if filter(task)] -def filter_release_promotion( - full_task_graph, filtered_for_candidates, shipping_phase -): +def filter_release_promotion(full_task_graph, filtered_for_candidates, shipping_phase): def filter(task): # Include promotion tasks; these will be optimized out if task.label in filtered_for_candidates: @@ -87,10 +98,7 @@ def filter(task): if task.attributes.get("shipping_phase") == shipping_phase: return True - return [ - label for label, task in full_task_graph.tasks.items() - if filter(task) - ] + return [label for label, task in full_task_graph.tasks.items() if filter(task)] @register_target_task("promote") diff --git a/taskcluster/app_services_taskgraph/transforms/__init__.py b/taskcluster/app_services_taskgraph/transforms/__init__.py index b1967267a8..a13f494955 100644 --- a/taskcluster/app_services_taskgraph/transforms/__init__.py +++ b/taskcluster/app_services_taskgraph/transforms/__init__.py @@ -8,9 +8,14 @@ from ..build_config import EXTENSIONS + def _extensions(type, secondary_extensions): primary_extensions = EXTENSIONS[type] - return [package_ext + secondary_ext for package_ext in primary_extensions for secondary_ext in secondary_extensions] + return [ + package_ext + secondary_ext + for package_ext in primary_extensions + for secondary_ext in secondary_extensions + ] def _artifact_filename(name, version, extension): @@ -21,32 +26,40 @@ def publications_to_artifact_paths(version, publications, secondary_extensions=( paths = [] for publication in publications: for extension in _extensions(publication["type"], secondary_extensions): - artifact_filename = _artifact_filename(publication['name'], version, extension) + artifact_filename = _artifact_filename( + publication["name"], version, extension + ) paths.append(f"public/build/{artifact_filename}") return paths -def publications_to_artifact_map_paths(version, publications, preview_build, secondary_extensions): +def publications_to_artifact_map_paths( + version, publications, preview_build, secondary_extensions +): build_map_paths = {} for publication in publications: for extension in _extensions(publication["type"], secondary_extensions): - publication_name = publication['name'] + publication_name = publication["name"] artifact_filename = _artifact_filename(publication_name, version, extension) if preview_build is not None: # Both nightly and other preview builds are places in separate directory - destination = "maven2/org/mozilla/appservices/nightly/{}/{}/{}".format(publication_name, version, artifact_filename) + destination = "maven2/org/mozilla/appservices/nightly/{}/{}/{}".format( + publication_name, version, artifact_filename + ) else: - destination = "maven2/org/mozilla/appservices/{}/{}/{}".format(publication_name, version, artifact_filename) + destination = "maven2/org/mozilla/appservices/{}/{}/{}".format( + publication_name, version, artifact_filename + ) build_map_paths[f"public/build/{artifact_filename}"] = { "checksums_path": "", # XXX beetmover marks this as required, but it's not needed - "destinations": [destination] + "destinations": [destination], } return build_map_paths -@group_by('component') +@group_by("component") def component_grouping(config, tasks): """Custom group-by function for `from_deps` transforms""" groups = {} @@ -62,10 +75,11 @@ def component_grouping(config, tasks): groups.setdefault(component, []).append(task) tasks_for_all_components = [ - task for task in tasks + task + for task in tasks if task.attributes.get("buildconfig", {}).get("name", "") == "all" ] - for _, tasks in groups.items(): - tasks.extend(copy.deepcopy(tasks_for_all_components)) + for _, grouped_tasks in groups.items(): + grouped_tasks.extend(copy.deepcopy(tasks_for_all_components)) return groups.values() diff --git a/taskcluster/app_services_taskgraph/transforms/appservices.py b/taskcluster/app_services_taskgraph/transforms/appservices.py index 2bea6d4348..5828531e5b 100644 --- a/taskcluster/app_services_taskgraph/transforms/appservices.py +++ b/taskcluster/app_services_taskgraph/transforms/appservices.py @@ -9,16 +9,18 @@ transforms = TransformSequence() + @transforms.add def add_release_routes(config, tasks): for task in tasks: # Add routes listed in `release-routes` if we're building for a release - release_routes = task.get('attributes', {}).get('release-routes') - release_type = config.params.get('release-type') + release_routes = task.get("attributes", {}).get("release-routes") + release_type = config.params.get("release-type") if release_type and release_routes: - task.setdefault('routes', []).extend(release_routes) + task.setdefault("routes", []).extend(release_routes) yield task + @transforms.add def transform_routes(config, tasks): version = get_version(config.params) diff --git a/taskcluster/app_services_taskgraph/transforms/beetmover.py b/taskcluster/app_services_taskgraph/transforms/beetmover.py index a4aabc9d78..532e7a7da2 100644 --- a/taskcluster/app_services_taskgraph/transforms/beetmover.py +++ b/taskcluster/app_services_taskgraph/transforms/beetmover.py @@ -9,8 +9,8 @@ from taskgraph.util.dependencies import get_dependencies, get_primary_dependency from taskgraph.util.schema import resolve_keyed_by -from . import publications_to_artifact_paths, publications_to_artifact_map_paths from ..build_config import get_version +from . import publications_to_artifact_map_paths, publications_to_artifact_paths transforms = TransformSequence() diff --git a/taskcluster/app_services_taskgraph/transforms/branch_build.py b/taskcluster/app_services_taskgraph/transforms/branch_build.py index 3e97ad9fe4..f3e59c7045 100644 --- a/taskcluster/app_services_taskgraph/transforms/branch_build.py +++ b/taskcluster/app_services_taskgraph/transforms/branch_build.py @@ -6,22 +6,29 @@ transforms = TransformSequence() + @transforms.add def setup(config, tasks): - branch_build_params = config.params.get('branch-build', {}) + branch_build_params = config.params.get("branch-build", {}) for task in tasks: if "run" in task: run = task["run"] if "pre-gradlew" in task["run"]: - run["pre-gradlew"] = transform_commands(branch_build_params, run["pre-gradlew"]) + run["pre-gradlew"] = transform_commands( + branch_build_params, run["pre-gradlew"] + ) if "pre-commands" in task["run"]: - run["pre-commands"] = transform_commands(branch_build_params, run["pre-commands"]) + run["pre-commands"] = transform_commands( + branch_build_params, run["pre-commands"] + ) yield task + def transform_commands(branch_build_params, command_list): return [transform_command(branch_build_params, command) for command in command_list] + def transform_command(branch_build_params, command): if command == "setup-branch-build-firefox-android": try: @@ -31,9 +38,9 @@ def transform_command(branch_build_params, command): # filter_branch_build_tasks. In the meantime, return an placeholder value. return ["/bin/false"] return [ - 'taskcluster/scripts/setup-branch-build-firefox-android.py', - firefox_android_params.get('owner', 'mozilla-mobile'), - firefox_android_params.get('branch', 'main'), + "taskcluster/scripts/setup-branch-build-firefox-android.py", + firefox_android_params.get("owner", "mozilla-mobile"), + firefox_android_params.get("branch", "main"), ] elif command == "setup-branch-build-firefox-ios": try: @@ -43,9 +50,9 @@ def transform_command(branch_build_params, command): # filter_branch_build_tasks. In the meantime, return an placeholder value. return ["/bin/false"] return [ - 'taskcluster/scripts/setup-branch-build-firefox-ios.py', - firefox_ios_params.get('owner', 'mozilla-mobile'), - firefox_ios_params.get('branch', 'main'), + "taskcluster/scripts/setup-branch-build-firefox-ios.py", + firefox_ios_params.get("owner", "mozilla-mobile"), + firefox_ios_params.get("branch", "main"), ] else: return command diff --git a/taskcluster/app_services_taskgraph/transforms/deps_complete.py b/taskcluster/app_services_taskgraph/transforms/deps_complete.py index 8fb1d8a0cb..aa6004af47 100644 --- a/taskcluster/app_services_taskgraph/transforms/deps_complete.py +++ b/taskcluster/app_services_taskgraph/transforms/deps_complete.py @@ -1,5 +1,5 @@ -from copy import deepcopy import itertools +from copy import deepcopy from taskgraph import MAX_DEPENDENCIES from taskgraph.transforms.base import TransformSequence @@ -7,35 +7,35 @@ transforms = TransformSequence() alerts = TransformSequence() + @transforms.add def deps_complete_script(config, tasks): """Setup the deps-complete.py script""" for task in tasks: - task.update({ - # Run this task when all dependencies are completed, rather than - # requiring them to be successful - 'requires': 'all-resolved', - 'worker-type': 'b-linux', - 'worker': { - 'chain-of-trust': True, - 'docker-image': { 'in-tree': 'linux' }, - 'max-run-time': 1800, - 'env': { - 'DECISION_TASK_ID': { - 'task-reference': '' - }, - 'TASK_ID': { - 'task-reference': '' + task.update( + { + # Run this task when all dependencies are completed, rather than + # requiring them to be successful + "requires": "all-resolved", + "worker-type": "b-linux", + "worker": { + "chain-of-trust": True, + "docker-image": {"in-tree": "linux"}, + "max-run-time": 1800, + "env": { + "DECISION_TASK_ID": {"task-reference": ""}, + "TASK_ID": {"task-reference": ""}, }, }, - }, - 'run': { - 'using': 'run-task', - 'command': '/builds/worker/checkouts/vcs/taskcluster/scripts/deps-complete.py', + "run": { + "using": "run-task", + "command": "/builds/worker/checkouts/vcs/taskcluster/scripts/deps-complete.py", + }, } - }) + ) yield task + @transforms.add def convert_dependencies(config, tasks): """ @@ -47,11 +47,11 @@ def convert_dependencies(config, tasks): for task in tasks: task.setdefault("soft-dependencies", []) task["soft-dependencies"] += [ - dep_task.label - for dep_task in config.kind_dependencies_tasks.values() + dep_task.label for dep_task in config.kind_dependencies_tasks.values() ] yield task + @alerts.add @transforms.add def add_alert_routes(config, tasks): @@ -64,13 +64,14 @@ def add_alert_routes(config, tasks): yield task continue - task.setdefault('routes', []) + task.setdefault("routes", []) for name, value in alerts.items(): if name not in ("slack-channel", "email", "pulse", "matrix-room"): raise KeyError("Unknown alert type: {}".format(name)) - task['routes'].append("notify.{}.{}.on-failed".format(name, value)) + task["routes"].append("notify.{}.{}.on-failed".format(name, value)) yield task + # Transform that adjusts the dependencies to not exceed MAX_DEPENDENCIES # # This transform checks if the dependency count exceeds MAX_DEPENDENCIES. If @@ -83,13 +84,15 @@ def add_alert_routes(config, tasks): # This code is based off the reverse_chunk_deps transform from Gecko reverse_chunk = TransformSequence() + def adjust_dependencies_child_job(orig_job, deps, count): job = deepcopy(orig_job) job["soft-dependencies"] = deps job["label"] = "{} - {}".format(orig_job["label"], count) - del job["routes"] # don't send alerts for child jobs + del job["routes"] # don't send alerts for child jobs return job + @reverse_chunk.add def adjust_dependencies(config, jobs): for job in jobs: diff --git a/taskcluster/app_services_taskgraph/transforms/module_build.py b/taskcluster/app_services_taskgraph/transforms/module_build.py index 917698f1c0..048346c084 100644 --- a/taskcluster/app_services_taskgraph/transforms/module_build.py +++ b/taskcluster/app_services_taskgraph/transforms/module_build.py @@ -5,22 +5,23 @@ from taskgraph.transforms.base import TransformSequence -from ..build_config import get_version, get_extensions - +from ..build_config import get_extensions, get_version transforms = TransformSequence() + @transforms.add def rustup_setup(config, tasks): for task in tasks: task["run"].setdefault("pre-gradlew", []) - task["run"]["pre-gradlew"].insert(0, + task["run"]["pre-gradlew"].insert( + 0, [ "git", "submodule", "update", "--init", - ] + ], ) yield task @@ -28,9 +29,13 @@ def rustup_setup(config, tasks): @transforms.add def build_task(config, tasks): if config.params.get("preview-build") is None: - path_prefix = "/builds/worker/checkouts/vcs/build/maven/org/mozilla/appservices/" + path_prefix = ( + "/builds/worker/checkouts/vcs/build/maven/org/mozilla/appservices/" + ) else: - path_prefix = "/builds/worker/checkouts/vcs/build/maven/org/mozilla/appservices/nightly" + path_prefix = ( + "/builds/worker/checkouts/vcs/build/maven/org/mozilla/appservices/nightly" + ) for task in tasks: module_info = task["attributes"]["buildconfig"] @@ -41,9 +46,11 @@ def build_task(config, tasks): if module_info.get("uploadSymbols", False): task["attributes"]["uploadSymbols"] = "yes" - for i,item in enumerate(task["run"]["gradlew"]): - task["run"]["gradlew"][i] = task["run"]["gradlew"][i].format(module_name=name) - if config.params.get('preview-build') is not None: + for i, item in enumerate(task["run"]["gradlew"]): + task["run"]["gradlew"][i] = task["run"]["gradlew"][i].format( + module_name=name + ) + if config.params.get("preview-build") is not None: task["run"]["gradlew"].append(f"-PnightlyVersion={version}") task["description"] = task["description"].format(module_name=name) task["worker"]["artifacts"] = artifacts = [] @@ -52,14 +59,17 @@ def build_task(config, tasks): for publication_name, extensions in all_extensions.items(): for extension in extensions: artifact_filename = f"{publication_name}-{version}{extension}" - artifacts.append({ - "name": f"public/build/{artifact_filename}", - "path": f"{path_prefix}/{publication_name}/{version}/{artifact_filename}", - "type": "file", - }) + artifacts.append( + { + "name": f"public/build/{artifact_filename}", + "path": f"{path_prefix}/{publication_name}/{version}/{artifact_filename}", + "type": "file", + } + ) yield task + @transforms.add def generate_symbols(config, tasks): for task in tasks: @@ -76,10 +86,12 @@ def generate_symbols(config, tasks): symbols_path, ] ) - artifacts.append({ - "name": "public/build/crashreporter-symbols.tar.gz", - "path": symbols_path, - "type": "file", - }) + artifacts.append( + { + "name": "public/build/crashreporter-symbols.tar.gz", + "path": symbols_path, + "type": "file", + } + ) yield task diff --git a/taskcluster/app_services_taskgraph/transforms/nimbus.py b/taskcluster/app_services_taskgraph/transforms/nimbus.py index 3134fb6275..cbc8817027 100644 --- a/taskcluster/app_services_taskgraph/transforms/nimbus.py +++ b/taskcluster/app_services_taskgraph/transforms/nimbus.py @@ -3,28 +3,30 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. from collections import namedtuple + from taskgraph.transforms.base import TransformSequence LINUX_BUILD_TARGETS = ( - 'aarch64-unknown-linux-gnu', - 'x86_64-unknown-linux-gnu', - 'x86_64-unknown-linux-musl', - 'x86_64-pc-windows-gnu', + "aarch64-unknown-linux-gnu", + "x86_64-unknown-linux-gnu", + "x86_64-unknown-linux-musl", + "x86_64-pc-windows-gnu", ) MAC_BUILD_TARGETS = ( - 'x86_64-apple-darwin', - 'aarch64-apple-darwin', + "x86_64-apple-darwin", + "aarch64-apple-darwin", ) # Transform for the nimbus-build tasks build = TransformSequence() + @build.add def setup_build_tasks(config, tasks): for task in tasks: - binary = task['attributes']['binary'] - target = task['attributes']['target'] + binary = task["attributes"]["binary"] + target = task["attributes"]["target"] if target in LINUX_BUILD_TARGETS: setup_linux_build_task(task, target, binary) elif target in MAC_BUILD_TARGETS: @@ -33,72 +35,75 @@ def setup_build_tasks(config, tasks): raise ValueError(f"Unknown target for nimbus build task: {target}") yield task + def setup_linux_build_task(task, target, binary): - docker_image = 'linux' + docker_image = "linux" - if target in ('aarch64-unknown-linux-gnu', 'x86_64-unknown-linux-gnu'): - docker_image = 'linux2004' + if target in ("aarch64-unknown-linux-gnu", "x86_64-unknown-linux-gnu"): + docker_image = "linux2004" - task['description'] = f'Build {binary} ({target})' - task['worker-type'] = 'b-linux' - task['worker'] = { - 'max-run-time': 1800, - 'docker-image': { 'in-tree': docker_image }, - 'artifacts': [ + task["description"] = f"Build {binary} ({target})" + task["worker-type"] = "b-linux" + task["worker"] = { + "max-run-time": 1800, + "docker-image": {"in-tree": docker_image}, + "artifacts": [ { - 'name': f'public/build/{binary}-{target}.zip', - 'path': f'/builds/worker/checkouts/vcs/build/{binary}-{target}.zip', - 'type': 'file', + "name": f"public/build/{binary}-{target}.zip", + "path": f"/builds/worker/checkouts/vcs/build/{binary}-{target}.zip", + "type": "file", } - ] + ], } - task['run'] = { - 'using': 'run-commands', - 'pre-commands': [ - ['git', 'submodule', 'update', '--init'], - ['source', 'taskcluster/scripts/toolchain/setup-fetched-rust-toolchain.sh'], + task["run"] = { + "using": "run-commands", + "pre-commands": [ + ["git", "submodule", "update", "--init"], + ["source", "taskcluster/scripts/toolchain/setup-fetched-rust-toolchain.sh"], ], - 'commands': [ - ['taskcluster/scripts/nimbus-build.py', 'build/', binary, target], + "commands": [ + ["taskcluster/scripts/nimbus-build.py", "build/", binary, target], ], - 'use-caches': True, + "use-caches": True, } - task['fetches'] = { - 'toolchain': [ - 'rust', + task["fetches"] = { + "toolchain": [ + "rust", ] } + def setup_mac_build_task(task, target, binary): - task['description'] = f'Build {binary} ({target})' - task['worker-type'] = 'b-osx' - task['worker'] = { - 'max-run-time': 1800, - 'artifacts': [ + task["description"] = f"Build {binary} ({target})" + task["worker-type"] = "b-osx" + task["worker"] = { + "max-run-time": 1800, + "artifacts": [ { - 'name': f'public/build/{binary}-{target}.zip', - 'path': f'checkouts/vcs/build/{binary}-{target}.zip', - 'type': 'file', + "name": f"public/build/{binary}-{target}.zip", + "path": f"checkouts/vcs/build/{binary}-{target}.zip", + "type": "file", } - ] + ], } - task['run'] = { - 'using': 'run-commands', - 'run-task-command': ["/usr/local/bin/python3", "run-task"], - 'pre-commands': [ + task["run"] = { + "using": "run-commands", + "run-task-command": ["/usr/local/bin/python3", "run-task"], + "pre-commands": [ ["source", "taskcluster/scripts/setup-mac-worker.sh"], ["source", "taskcluster/scripts/toolchain/setup-fetched-rust-toolchain.sh"], ], - 'commands': [ - [ "taskcluster/scripts/nimbus-build-osx.sh", "build/", binary, target ] + "commands": [ + ["taskcluster/scripts/nimbus-build-osx.sh", "build/", binary, target] ], } - task['fetches'] = { - 'toolchain': [ - 'rust-osx', + task["fetches"] = { + "toolchain": [ + "rust-osx", ] } + # Transform for the nimbus-assemble task # # This task produces a single zip file + checksum that combines the binaries from each individual @@ -108,72 +113,71 @@ def setup_mac_build_task(task, target, binary): # nimbus-build task that a nimbus-binaries-assemble task depends on NimbusBuildDep = namedtuple("NimbusBuildDep", "label target") + @assemble.add def setup_assemble_tasks(config, tasks): for task in tasks: # Which nimbus binary are we assembling? - binary = task['attributes']['nimbus-binary'] + binary = task["attributes"]["nimbus-binary"] # Find nimbus-build task dependencies for our binary. build_task_deps = [ - NimbusBuildDep(label, build_task.attributes['target']) + NimbusBuildDep(label, build_task.attributes["target"]) for (label, build_task) in config.kind_dependencies_tasks.items() - if build_task.kind == "nimbus-build" and build_task.attributes.get('binary') == binary + if build_task.kind == "nimbus-build" + and build_task.attributes.get("binary") == binary ] - task['dependencies'] = { - dep.label: dep.label - for dep in build_task_deps - } - task['fetches'] = { + task["dependencies"] = {dep.label: dep.label for dep in build_task_deps} + task["fetches"] = { dep.label: [ { - 'artifact': f'{binary}-{dep.target}.zip', - 'dest': binary, - 'extract': True if binary == 'nimbus-fml' else False + "artifact": f"{binary}-{dep.target}.zip", + "dest": binary, + "extract": True if binary == "nimbus-fml" else False, } ] for dep in build_task_deps } - artifact_path = '/builds/worker/artifacts' - if binary == 'nimbus-fml': + artifact_path = "/builds/worker/artifacts" + if binary == "nimbus-fml": # For nimbus-fml, we zip all binaries together and include the sha256 - task['release-artifacts'] = [ - f'{binary}.{ext}' for ext in ('zip', 'sha256') - ] - - task['run'] = { - 'using': 'run-commands', - 'commands': [ - ['mkdir', '-p', artifact_path], - ['cd', '/builds/worker/fetches/nimbus-fml'], - ['zip', f'{artifact_path}/nimbus-fml.zip', '-r', '.'], - ['cd', artifact_path], - ['eval', 'sha256sum', 'nimbus-fml.zip', '>', 'nimbus-fml.sha256'], - ] + task["release-artifacts"] = [f"{binary}.{ext}" for ext in ("zip", "sha256")] + + task["run"] = { + "using": "run-commands", + "commands": [ + ["mkdir", "-p", artifact_path], + ["cd", "/builds/worker/fetches/nimbus-fml"], + ["zip", f"{artifact_path}/nimbus-fml.zip", "-r", "."], + ["cd", artifact_path], + ["eval", "sha256sum", "nimbus-fml.zip", ">", "nimbus-fml.sha256"], + ], } elif binary == "nimbus-cli": # For nimbus-cli, we just publish the binaries separately - task['release-artifacts'] = [ - f'{binary}-{dep.target}.zip' for dep in build_task_deps + task["release-artifacts"] = [ + f"{binary}-{dep.target}.zip" for dep in build_task_deps ] # Publish a JSON file with information about the build - task['release-artifacts'].append('nimbus-cli.json') + task["release-artifacts"].append("nimbus-cli.json") sources = [ - f'/builds/worker/fetches/{binary}/{binary}-{dep.target}.zip' + f"/builds/worker/fetches/{binary}/{binary}-{dep.target}.zip" for dep in build_task_deps ] - task['run'] = { - 'using': 'run-commands', - 'commands': [ - ['mkdir', '-p', artifact_path], - ['cp'] + sources + [artifact_path], - ['taskcluster/scripts/generate-nimbus-cli-json.py', f'{artifact_path}/nimbus-cli.json'], - ] + task["run"] = { + "using": "run-commands", + "commands": [ + ["mkdir", "-p", artifact_path], + ["cp"] + sources + [artifact_path], + [ + "taskcluster/scripts/generate-nimbus-cli-json.py", + f"{artifact_path}/nimbus-cli.json", + ], + ], } - yield task diff --git a/taskcluster/app_services_taskgraph/transforms/release_artifacts.py b/taskcluster/app_services_taskgraph/transforms/release_artifacts.py index f900c19a71..6e47cb54b0 100644 --- a/taskcluster/app_services_taskgraph/transforms/release_artifacts.py +++ b/taskcluster/app_services_taskgraph/transforms/release_artifacts.py @@ -6,6 +6,7 @@ under 'public/build' and adds the corresponding attribute needed by downstream release tasks. """ + import os from taskgraph.transforms.base import TransformSequence @@ -14,7 +15,6 @@ from taskgraph.util.workertypes import worker_type_implementation from voluptuous import Extra, Optional, Required - transforms = TransformSequence() release_artifacts_schema = Schema( @@ -35,7 +35,9 @@ def add_release_artifacts(config, tasks): yield task continue - release_artifacts = task.setdefault("attributes", {}).setdefault("release-artifacts", []) + release_artifacts = task.setdefault("attributes", {}).setdefault( + "release-artifacts", [] + ) impl, _ = worker_type_implementation(config.graph_config, task["worker-type"]) if impl == "generic-worker": diff --git a/taskcluster/app_services_taskgraph/transforms/release_publish.py b/taskcluster/app_services_taskgraph/transforms/release_publish.py index 1a1876c89c..59a83a127e 100644 --- a/taskcluster/app_services_taskgraph/transforms/release_publish.py +++ b/taskcluster/app_services_taskgraph/transforms/release_publish.py @@ -9,32 +9,35 @@ transforms = TransformSequence() + @transforms.add def setup_command(config, tasks): version = get_version(config.params) instance = "production" if config.params["level"] == "3" else "staging" nightly = "-nightly" if config.params.get("preview-build") else "" maven_channel = f"maven{nightly}-{instance}" - release_type = config.params.get('release-type', 'nightly') + release_type = config.params.get("release-type", "nightly") head_rev = config.params["head_rev"] for task in tasks: task["run"]["commands"] = [ - [ - "/builds/worker/checkouts/vcs/taskcluster/scripts/generate-release-json.py", - f"/builds/worker/checkouts/vcs/build/{release_type}.json", - "--version", version, - "--maven-channel", maven_channel, - ] + [ + "/builds/worker/checkouts/vcs/taskcluster/scripts/generate-release-json.py", + f"/builds/worker/checkouts/vcs/build/{release_type}.json", + "--version", + version, + "--maven-channel", + maven_channel, + ] ] - task['worker']['artifacts'] = [ + task["worker"]["artifacts"] = [ { "name": f"public/build/{release_type}.json", "path": f"/builds/worker/checkouts/vcs/build/{release_type}.json", "type": "file", } ] - if config.params['level'] == '3': + if config.params["level"] == "3": task["routes"] = [ f"index.project.application-services.v2.{release_type}.latest", f"index.project.application-services.v2.{release_type}.{version}", @@ -42,6 +45,7 @@ def setup_command(config, tasks): ] yield task + @transforms.add def convert_dependencies(config, tasks): """ @@ -54,7 +58,6 @@ def convert_dependencies(config, tasks): for task in tasks: task.setdefault("soft-dependencies", []) task["soft-dependencies"] += [ - dep_task.label - for dep_task in config.kind_dependencies_tasks.values() + dep_task.label for dep_task in config.kind_dependencies_tasks.values() ] yield task diff --git a/taskcluster/app_services_taskgraph/transforms/secrets.py b/taskcluster/app_services_taskgraph/transforms/secrets.py index 7c39a2453f..310866f8f0 100644 --- a/taskcluster/app_services_taskgraph/transforms/secrets.py +++ b/taskcluster/app_services_taskgraph/transforms/secrets.py @@ -5,11 +5,9 @@ Resolve secrets and dummy secrets """ - from taskgraph.transforms.base import TransformSequence from taskgraph.util.schema import resolve_keyed_by - transforms = TransformSequence() @@ -18,9 +16,6 @@ def resolve_keys(config, tasks): for task in tasks: for key in ("run.secrets", "run.dummy-secrets"): resolve_keyed_by( - task, - key, - item_name=task["name"], - level=config.params["level"] + task, key, item_name=task["name"], level=config.params["level"] ) yield task diff --git a/taskcluster/app_services_taskgraph/transforms/server_megazord.py b/taskcluster/app_services_taskgraph/transforms/server_megazord.py index b03a504030..7fa1075a78 100644 --- a/taskcluster/app_services_taskgraph/transforms/server_megazord.py +++ b/taskcluster/app_services_taskgraph/transforms/server_megazord.py @@ -3,28 +3,30 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. from collections import namedtuple + from taskgraph.transforms.base import TransformSequence # Transform for the nimbus-build tasks build = TransformSequence() LINUX_BUILD_TARGETS = ( - 'aarch64-unknown-linux-gnu', - 'x86_64-unknown-linux-gnu', - 'x86_64-unknown-linux-musl', - 'x86_64-pc-windows-gnu', + "aarch64-unknown-linux-gnu", + "x86_64-unknown-linux-gnu", + "x86_64-unknown-linux-musl", + "x86_64-pc-windows-gnu", ) MAC_BUILD_TARGETS = ( - 'x86_64-apple-darwin', - 'aarch64-apple-darwin', + "x86_64-apple-darwin", + "aarch64-apple-darwin", ) + @build.add def setup_build_tasks(config, tasks): for task in tasks: - binary = task['attributes']['megazord'] - target = task['attributes']['target'] + binary = task["attributes"]["megazord"] + target = task["attributes"]["target"] if target in LINUX_BUILD_TARGETS: setup_linux_build_task(task, target, binary) elif target in MAC_BUILD_TARGETS: @@ -33,70 +35,73 @@ def setup_build_tasks(config, tasks): raise ValueError(f"Unknown target for nimbus build task: {target}") yield task + def setup_linux_build_task(task, target, binary): - task['description'] = f'Build {binary} ({target})' - task['worker-type'] = 'b-linux' - docker_image = 'linux' - if target in ('aarch64-unknown-linux-gnu', 'x86_64-unknown-linux-gnu'): - docker_image = 'linux2004' - task['worker'] = { - 'max-run-time': 1800, - 'docker-image': { 'in-tree': docker_image }, - 'artifacts': [ + task["description"] = f"Build {binary} ({target})" + task["worker-type"] = "b-linux" + docker_image = "linux" + if target in ("aarch64-unknown-linux-gnu", "x86_64-unknown-linux-gnu"): + docker_image = "linux2004" + task["worker"] = { + "max-run-time": 1800, + "docker-image": {"in-tree": docker_image}, + "artifacts": [ { - 'name': f'public/build/{binary}-{target}.zip', - 'path': f'/builds/worker/checkouts/vcs/build/{binary}-{target}.zip', - 'type': 'file', + "name": f"public/build/{binary}-{target}.zip", + "path": f"/builds/worker/checkouts/vcs/build/{binary}-{target}.zip", + "type": "file", } - ] + ], } - task['run'] = { - 'using': 'run-commands', - 'pre-commands': [ - ['git', 'submodule', 'update', '--init'], - ['source', 'taskcluster/scripts/toolchain/setup-fetched-rust-toolchain.sh'], + task["run"] = { + "using": "run-commands", + "pre-commands": [ + ["git", "submodule", "update", "--init"], + ["source", "taskcluster/scripts/toolchain/setup-fetched-rust-toolchain.sh"], ], - 'commands': [ - ['taskcluster/scripts/server-megazord-build.py', binary, target, 'build/'], + "commands": [ + ["taskcluster/scripts/server-megazord-build.py", binary, target, "build/"], ], - 'use-caches': True, + "use-caches": True, } - task['fetches'] = { - 'toolchain': [ - 'rust', + task["fetches"] = { + "toolchain": [ + "rust", ] } + def setup_mac_build_task(task, target, binary): - task['description'] = f'Build {binary} ({target})' - task['worker-type'] = 'b-osx' - task['worker'] = { - 'max-run-time': 1800, - 'artifacts': [ + task["description"] = f"Build {binary} ({target})" + task["worker-type"] = "b-osx" + task["worker"] = { + "max-run-time": 1800, + "artifacts": [ { - 'name': f'public/build/{binary}-{target}.zip', - 'path': f'checkouts/vcs/build/{binary}-{target}.zip', - 'type': 'file', + "name": f"public/build/{binary}-{target}.zip", + "path": f"checkouts/vcs/build/{binary}-{target}.zip", + "type": "file", } - ] + ], } - task['run'] = { - 'using': 'run-commands', - 'run-task-command': ["/usr/local/bin/python3", "run-task"], - 'pre-commands': [ + task["run"] = { + "using": "run-commands", + "run-task-command": ["/usr/local/bin/python3", "run-task"], + "pre-commands": [ ["source", "taskcluster/scripts/setup-mac-worker.sh"], ["source", "taskcluster/scripts/toolchain/setup-fetched-rust-toolchain.sh"], ], - 'commands': [ - [ "taskcluster/scripts/server-megazord-build.py", binary, target, "build/" ] + "commands": [ + ["taskcluster/scripts/server-megazord-build.py", binary, target, "build/"] ], } - task['fetches'] = { - 'toolchain': [ - 'rust-osx', + task["fetches"] = { + "toolchain": [ + "rust-osx", ] } + # Transform for the server-megazord-assemble task # # This task produces a single zip file + checksum that combines the binaries from each individual @@ -106,49 +111,46 @@ def setup_mac_build_task(task, target, binary): # server-megazord task that a server-megazord-assemble task depends on MegazordBuildDep = namedtuple("MegazordBuildDep", "label target") + @assemble.add def setup_assemble_tasks(config, tasks): for task in tasks: # Which megazord binary are we assembling? - binary = task['attributes']['megazord'] + binary = task["attributes"]["megazord"] # Find server-megazord-build task dependencies for our binary. build_task_deps = [ - MegazordBuildDep(label, build_task.attributes['target']) + MegazordBuildDep(label, build_task.attributes["target"]) for (label, build_task) in config.kind_dependencies_tasks.items() - if build_task.kind == "server-megazord-build" and build_task.attributes.get('megazord') == binary + if build_task.kind == "server-megazord-build" + and build_task.attributes.get("megazord") == binary ] - task['dependencies'] = { - dep.label: dep.label - for dep in build_task_deps - } - task['fetches'] = { + task["dependencies"] = {dep.label: dep.label for dep in build_task_deps} + task["fetches"] = { dep.label: [ { - 'artifact': f'{binary}-{dep.target}.zip', - 'dest': binary, - 'extract': True, + "artifact": f"{binary}-{dep.target}.zip", + "dest": binary, + "extract": True, } ] for dep in build_task_deps } - artifact_path = '/builds/worker/artifacts' + artifact_path = "/builds/worker/artifacts" # For server megazords, we zip all binaries together and include the sha256 - task['release-artifacts'] = [ - f'{binary}.{ext}' for ext in ('zip', 'sha256') - ] + task["release-artifacts"] = [f"{binary}.{ext}" for ext in ("zip", "sha256")] - task['run'] = { - 'using': 'run-commands', - 'commands': [ - ['mkdir', '-p', artifact_path], - ['cd', f'/builds/worker/fetches/{binary}'], - ['zip', f'{artifact_path}/{binary}.zip', '-r', '.'], - ['cd', artifact_path], - ['eval', 'sha256sum', f'{binary}.zip', '>', f'{binary}.sha256'], - ] + task["run"] = { + "using": "run-commands", + "commands": [ + ["mkdir", "-p", artifact_path], + ["cd", f"/builds/worker/fetches/{binary}"], + ["zip", f"{artifact_path}/{binary}.zip", "-r", "."], + ["cd", artifact_path], + ["eval", "sha256sum", f"{binary}.zip", ">", f"{binary}.sha256"], + ], } yield task diff --git a/taskcluster/app_services_taskgraph/transforms/signing.py b/taskcluster/app_services_taskgraph/transforms/signing.py index 558f5bc1bf..6c8110fb7a 100644 --- a/taskcluster/app_services_taskgraph/transforms/signing.py +++ b/taskcluster/app_services_taskgraph/transforms/signing.py @@ -6,8 +6,8 @@ from taskgraph.transforms.base import TransformSequence from taskgraph.util.schema import resolve_keyed_by -from . import publications_to_artifact_paths from ..build_config import get_version +from . import publications_to_artifact_paths transforms = TransformSequence() diff --git a/taskcluster/app_services_taskgraph/transforms/toolchain.py b/taskcluster/app_services_taskgraph/transforms/toolchain.py index a1ed54f213..0b2f0270eb 100644 --- a/taskcluster/app_services_taskgraph/transforms/toolchain.py +++ b/taskcluster/app_services_taskgraph/transforms/toolchain.py @@ -14,9 +14,9 @@ # TODO: Bug 1637695 to be removed once we retire these old indexes TOOLCHAIN_OLD_INDEX = { - 'android': 'index.project.application-services.application-services.build.libs.android.{sha}', - 'desktop-linux': 'index.project.application-services.application-services.build.libs.desktop.linux.{sha}', - 'desktop-macos': 'index.project.application-services.application-services.build.libs.desktop.macos.{sha}', + "android": "index.project.application-services.application-services.build.libs.android.{sha}", + "desktop-linux": "index.project.application-services.application-services.build.libs.desktop.linux.{sha}", + "desktop-macos": "index.project.application-services.application-services.build.libs.desktop.macos.{sha}", } @@ -30,14 +30,17 @@ def git_sha_for_directory(directory): @transforms.add def resolve_keys(config, tasks): for task in tasks: - resolve_keyed_by(task, "routes", item_name=task["name"], **{ - "tasks-for": config.params["tasks_for"] - }) + resolve_keyed_by( + task, + "routes", + item_name=task["name"], + **{"tasks-for": config.params["tasks_for"]}, + ) # TODO: Bug 1637695 - temp solution to unblock local building of # application-services. Once we switch to new indexes, we should clean this up - if task['name'] in TOOLCHAIN_OLD_INDEX.keys() and config.params["level"] == "3": + if task["name"] in TOOLCHAIN_OLD_INDEX.keys() and config.params["level"] == "3": sha = git_sha_for_directory("libs") - routes = task['routes'] - routes.append(TOOLCHAIN_OLD_INDEX[task['name']].format(sha=sha)) + routes = task["routes"] + routes.append(TOOLCHAIN_OLD_INDEX[task["name"]].format(sha=sha)) yield task diff --git a/taskcluster/app_services_taskgraph/transforms/worker.py b/taskcluster/app_services_taskgraph/transforms/worker.py index 0d6edafb64..f0d7d63008 100644 --- a/taskcluster/app_services_taskgraph/transforms/worker.py +++ b/taskcluster/app_services_taskgraph/transforms/worker.py @@ -6,19 +6,20 @@ transforms = TransformSequence() + @transforms.add def setup_worker(_, tasks): for task in tasks: - task_name = task['name'] + task_name = task["name"] try: - worker_type = task['worker-type'] + worker_type = task["worker-type"] except KeyError: raise ValueError(f"worker-type not set for {task_name}") - if worker_type == 'b-linux': - worker = task.setdefault('worker', {}) - worker['docker-image'] = {'in-tree': 'linux'} - elif worker_type == 'b-osx': - pass # nothing to do here except avoid raising a ValueError + if worker_type == "b-linux": + worker = task.setdefault("worker", {}) + worker["docker-image"] = {"in-tree": "linux"} + elif worker_type == "b-osx": + pass # nothing to do here except avoid raising a ValueError else: raise ValueError(f"Unknown worker type for {task_name} ({worker_type})") yield task diff --git a/taskcluster/app_services_taskgraph/worker_types.py b/taskcluster/app_services_taskgraph/worker_types.py index 27dddf603b..d3d6741f2c 100644 --- a/taskcluster/app_services_taskgraph/worker_types.py +++ b/taskcluster/app_services_taskgraph/worker_types.py @@ -4,10 +4,9 @@ from datetime import datetime -from voluptuous import Required - -from taskgraph.util.schema import taskref_or_string from taskgraph.transforms.task import payload_builder +from taskgraph.util.schema import taskref_or_string +from voluptuous import Required @payload_builder( @@ -15,13 +14,15 @@ schema={ Required("max-run-time"): int, Required("cert"): str, - Required("upstream-artifacts"): [{ - Required("taskId"): taskref_or_string, - Required("taskType"): str, - Required("paths"): [str], - Required("formats"): [str], - }] - } + Required("upstream-artifacts"): [ + { + Required("taskId"): taskref_or_string, + Required("taskType"): str, + Required("paths"): [str], + Required("formats"): [str], + } + ], + }, ) def build_scriptworker_signing_payload(config, task, task_def): worker = task["worker"] @@ -36,12 +37,13 @@ def build_scriptworker_signing_payload(config, task, task_def): formats.update(artifacts["formats"]) scope_prefix = config.graph_config["scriptworker"]["scope-prefix"] - task_def["scopes"].append( - "{}:signing:cert:{}".format(scope_prefix, worker["cert"]) + task_def["scopes"].append("{}:signing:cert:{}".format(scope_prefix, worker["cert"])) + task_def["scopes"].extend( + [ + f"{scope_prefix}:signing:format:{signing_format}" + for signing_format in sorted(formats) + ] ) - task_def["scopes"].extend([ - f"{scope_prefix}:signing:format:{signing_format}" for signing_format in sorted(formats) - ]) @payload_builder( @@ -52,17 +54,21 @@ def build_scriptworker_signing_payload(config, task, task_def): Required("max-run-time"): int, Required("version"): str, Required("app-name"): str, - Required("upstream-artifacts"): [{ - Required("taskId"): taskref_or_string, - Required("taskType"): str, - Required("paths"): [str], - }], - Required("artifact-map"): [{ - Required("task-id"): taskref_or_string, - Required("locale"): str, - Required("paths"): {str: dict}, - }], - } + Required("upstream-artifacts"): [ + { + Required("taskId"): taskref_or_string, + Required("taskType"): str, + Required("paths"): [str], + } + ], + Required("artifact-map"): [ + { + Required("task-id"): taskref_or_string, + Required("locale"): str, + Required("paths"): {str: dict}, + } + ], + }, ) def build_scriptworker_beetmover_payload(config, task, task_def): worker = task["worker"] @@ -71,26 +77,31 @@ def build_scriptworker_beetmover_payload(config, task, task_def): task_def["payload"] = { "maxRunTime": worker["max-run-time"], "upstreamArtifacts": worker["upstream-artifacts"], - "artifactMap": [{ - "taskId": entry["task-id"], - "locale": entry["locale"], - "paths": entry["paths"], - } for entry in worker["artifact-map"]], + "artifactMap": [ + { + "taskId": entry["task-id"], + "locale": entry["locale"], + "paths": entry["paths"], + } + for entry in worker["artifact-map"] + ], "version": worker["version"], "releaseProperties": { "appName": worker["app-name"], - } + }, } if worker["action"] != "push-to-maven": task_def["payload"]["upload_date"] = int(datetime.now().timestamp()) - task_def["payload"]["releaseProperties"].update({ - "appVersion": worker["version"], - "branch": config.params["head_ref"], - "buildid": config.params["moz_build_date"], - "hashType": "sha512", - "platform": task["attributes"]["build-type"], - }) + task_def["payload"]["releaseProperties"].update( + { + "appVersion": worker["version"], + "branch": config.params["head_ref"], + "buildid": config.params["moz_build_date"], + "hashType": "sha512", + "platform": task["attributes"]["build-type"], + } + ) for artifact in task_def["payload"]["upstreamArtifacts"]: artifact["locale"] = "multi" @@ -100,10 +111,5 @@ def build_scriptworker_beetmover_payload(config, task, task_def): path_config["checksums_path"] = "" scope_prefix = config.graph_config["scriptworker"]["scope-prefix"] - task_def["scopes"].append( - f"{scope_prefix}:beetmover:bucket:{worker['bucket']}" - ) - task_def["scopes"].append( - f"{scope_prefix}:beetmover:action:{worker['action']}" - ) - + task_def["scopes"].append(f"{scope_prefix}:beetmover:bucket:{worker['bucket']}") + task_def["scopes"].append(f"{scope_prefix}:beetmover:action:{worker['action']}") diff --git a/taskcluster/scripts/build-and-test-swift.py b/taskcluster/scripts/build-and-test-swift.py index 51695ba407..c960125b39 100755 --- a/taskcluster/scripts/build-and-test-swift.py +++ b/taskcluster/scripts/build-and-test-swift.py @@ -3,11 +3,11 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at https://mozilla.org/MPL/2.0/. -from collections import namedtuple import argparse -import subprocess -import pathlib import os +import pathlib +import subprocess +from collections import namedtuple # Repository root dir ROOT_DIR = pathlib.Path(__file__).parent.parent.parent @@ -44,14 +44,16 @@ def main(): copy_source_dirs(args) log("build complete") + def parse_args(): - parser = argparse.ArgumentParser(prog='build-and-test-swift.py') - parser.add_argument('out_dir', type=pathlib.Path) - parser.add_argument('xcframework_dir', type=pathlib.Path) - parser.add_argument('glean_work_dir', type=pathlib.Path) - parser.add_argument('--force_build', action="store_true") + parser = argparse.ArgumentParser(prog="build-and-test-swift.py") + parser.add_argument("out_dir", type=pathlib.Path) + parser.add_argument("xcframework_dir", type=pathlib.Path) + parser.add_argument("glean_work_dir", type=pathlib.Path) + parser.add_argument("--force_build", action="store_true") return parser.parse_args() + def run_tests(args): # FIXME: this is currently failing with `Package.resolved file is corrupted or malformed; fix or # delete the file to continue` @@ -60,29 +62,33 @@ def run_tests(args): # ]) pass -XCFrameworkBuildInfo = namedtuple("XCFrameworkBuildInfo", "filename out_path build_command") + +XCFrameworkBuildInfo = namedtuple( + "XCFrameworkBuildInfo", "filename out_path build_command" +) XCFRAMEWORK_BUILDS = [ XCFrameworkBuildInfo( - 'MozillaRustComponents.xcframework.zip', - 'megazords/ios-rust/MozillaRustComponents.xcframework.zip', + "MozillaRustComponents.xcframework.zip", + "megazords/ios-rust/MozillaRustComponents.xcframework.zip", [ - 'megazords/ios-rust/build-xcframework.sh', - '--build-profile', - 'release', + "megazords/ios-rust/build-xcframework.sh", + "--build-profile", + "release", ], ), XCFrameworkBuildInfo( - 'FocusRustComponents.xcframework.zip', - 'megazords/ios-rust/focus/FocusRustComponents.xcframework.zip', + "FocusRustComponents.xcframework.zip", + "megazords/ios-rust/focus/FocusRustComponents.xcframework.zip", [ - 'megazords/ios-rust/build-xcframework.sh', - '--build-profile', - 'release', - '--focus', + "megazords/ios-rust/build-xcframework.sh", + "--build-profile", + "release", + "--focus", ], ), ] + def xcframework_build(args, filename): for build_info in XCFRAMEWORK_BUILDS: if build_info.filename == filename: @@ -95,48 +101,62 @@ def xcframework_build(args, filename): subprocess.check_call(build_info.build_command) # Copy the XCFramework to our output directory - subprocess.check_call(['cp', '-a', build_info.out_path, args.xcframework_dir]) + subprocess.check_call(["cp", "-a", build_info.out_path, args.xcframework_dir]) + """Generate Glean metrics. Run this first, because it appears to delete any other .swift files in the output directory. """ + + def generate_glean_metrics(args): # Make sure there's a python venv for glean to use - venv_dir = args.glean_work_dir / '.venv' + venv_dir = args.glean_work_dir / ".venv" if not venv_dir.exists(): log("setting up Glean venv") - subprocess.check_call(['python3', '-m', 'venv', str(venv_dir)]) + subprocess.check_call(["python3", "-m", "venv", str(venv_dir)]) log("Running Glean for nimbus") # sdk_generator wants to be run from inside Xcode, so we set some env vars to fake it out. env = { - 'SOURCE_ROOT': str(args.glean_work_dir), - 'PROJECT': "MozillaAppServices", - 'GLEAN_PYTHON': '/usr/bin/env python3', - 'LC_ALL': 'C.UTF-8', - 'LANG': 'C.UTF-8', - 'PATH': os.environ['PATH'], + "SOURCE_ROOT": str(args.glean_work_dir), + "PROJECT": "MozillaAppServices", + "GLEAN_PYTHON": "/usr/bin/env python3", + "LC_ALL": "C.UTF-8", + "LANG": "C.UTF-8", + "PATH": os.environ["PATH"], } - glean_script = ROOT_DIR / "components/external/glean/glean-core/ios/sdk_generator.sh" - out_dir = args.out_dir / 'all' / 'Generated' / 'Metrics' - focus_out_dir = args.out_dir / 'focus' / 'Generated' / 'Metrics' + glean_script = ( + ROOT_DIR / "components/external/glean/glean-core/ios/sdk_generator.sh" + ) + out_dir = args.out_dir / "all" / "Generated" / "Metrics" + focus_out_dir = args.out_dir / "focus" / "Generated" / "Metrics" focus_glean_files = map(str, [ROOT_DIR / "components/nimbus/metrics.yaml"]) - firefox_glean_files = map(str, [ROOT_DIR / "components/nimbus/metrics.yaml", ROOT_DIR / "components/sync_manager/metrics.yaml", ROOT_DIR / "components/sync_manager/pings.yaml"]) + firefox_glean_files = map( + str, + [ + ROOT_DIR / "components/nimbus/metrics.yaml", + ROOT_DIR / "components/sync_manager/metrics.yaml", + ROOT_DIR / "components/sync_manager/pings.yaml", + ], + ) generate_glean_metrics_for_target(env, glean_script, out_dir, firefox_glean_files) - generate_glean_metrics_for_target(env, glean_script, focus_out_dir, focus_glean_files) + generate_glean_metrics_for_target( + env, glean_script, focus_out_dir, focus_glean_files + ) + def generate_glean_metrics_for_target(env, glean_script, out_dir, input_files): ensure_dir(out_dir) - subprocess.check_call([ - str(glean_script), - "-o", str(out_dir), - *input_files - ], env=env) + subprocess.check_call( + [str(glean_script), "-o", str(out_dir), *input_files], env=env + ) + def generate_uniffi_bindings(args): - out_dir = args.out_dir / 'all' / 'Generated' - focus_out_dir = args.out_dir / 'focus' / 'Generated' + out_dir = args.out_dir / "all" / "Generated" + focus_out_dir = args.out_dir / "focus" / "Generated" ensure_dir(out_dir) @@ -146,38 +166,44 @@ def generate_uniffi_bindings(args): # Generate sources for Focus generate_uniffi_bindings_for_target(focus_out_dir, "megazord_focus") + def generate_uniffi_bindings_for_target(out_dir, megazord): log(f"generating sources for {megazord}") # We can't use the `-m` flag here because the megazord library was cross-compiled and the # `uniffi-bindgen-library-mode` tool can't handle that yet. Instead, send one of the library # paths using the `-l` flag. Pick an arbitrary target, since the it doesn't affect the UniFFI # bindings. - lib_path = f'target/aarch64-apple-ios/release/lib{megazord}.a' - subprocess.check_call([ - 'cargo', 'uniffi-bindgen-library-mode', '-l', lib_path, "swift", out_dir - ]) + lib_path = f"target/aarch64-apple-ios/release/lib{megazord}.a" + subprocess.check_call( + ["cargo", "uniffi-bindgen-library-mode", "-l", lib_path, "swift", out_dir] + ) + def copy_source_dirs(args): - out_dir = args.out_dir / 'all' - focus_out_dir = args.out_dir / 'focus' + out_dir = args.out_dir / "all" + focus_out_dir = args.out_dir / "focus" copy_sources(out_dir, SOURCE_TO_COPY) copy_sources(focus_out_dir, FOCUS_SOURCE_TO_COPY) + def copy_sources(out_dir, sources): ensure_dir(out_dir) for source in sources: log(f"copying {source}") for path in ROOT_DIR.glob(source): - subprocess.check_call(['cp', '-r', path, out_dir]) + subprocess.check_call(["cp", "-r", path, out_dir]) + def ensure_dir(path): if not path.exists(): os.makedirs(path) + def log(message): print() - print(f'* {message}', flush=True) + print(f"* {message}", flush=True) + -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/taskcluster/scripts/deps-complete.py b/taskcluster/scripts/deps-complete.py index df1deb3bb0..bdf65f1124 100755 --- a/taskcluster/scripts/deps-complete.py +++ b/taskcluster/scripts/deps-complete.py @@ -8,9 +8,10 @@ import sys from urllib.request import urlopen -TASKCLUSTER_PROXY_URL = os.environ['TASKCLUSTER_PROXY_URL'] -DECISION_TASK_ID = os.environ['DECISION_TASK_ID'] -TASK_ID = os.environ['TASK_ID'] +TASKCLUSTER_PROXY_URL = os.environ["TASKCLUSTER_PROXY_URL"] +DECISION_TASK_ID = os.environ["DECISION_TASK_ID"] +TASK_ID = os.environ["TASK_ID"] + def get_tasks_from_group(task_group_id): continuation_token = None @@ -20,42 +21,46 @@ def get_tasks_from_group(task_group_id): if continuation_token: url += f"?continuationToken={continuation_token}" data = json.load(urlopen(url)) - tasks.extend(data['tasks']) - continuation_token = data.get('continuationToken') + tasks.extend(data["tasks"]) + continuation_token = data.get("continuationToken") if continuation_token is None: break return tasks + def get_dependent_task_data(): task_map = { - t['status']['taskId']: t - for t in get_tasks_from_group(DECISION_TASK_ID) + t["status"]["taskId"]: t for t in get_tasks_from_group(DECISION_TASK_ID) } - dependency_ids = task_map.get(TASK_ID)['task']['dependencies'] + dependency_ids = task_map.get(TASK_ID)["task"]["dependencies"] return [ - task_map[task_id] for task_id in dependency_ids + task_map[task_id] + for task_id in dependency_ids # Missing keys indicate cached dependencies, which won't be included in # the task group data if task_id in task_map ] + def check_dependent_tasks(): some_task_failed = False for task in get_dependent_task_data(): - if task['status']['state'] != 'completed': + if task["status"]["state"] != "completed": some_task_failed = True - name = task['task']['metadata']['name'] + name = task["task"]["metadata"]["name"] print(f"Failed task: {name}") return some_task_failed + def main(): print() - print('---- Checking for failed tasks ----') + print("---- Checking for failed tasks ----") if check_dependent_tasks(): sys.exit(1) else: - print('All successful!') + print("All successful!") sys.exit(0) -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/taskcluster/scripts/generate-nimbus-cli-json.py b/taskcluster/scripts/generate-nimbus-cli-json.py index 791cb0f11c..89145d03d8 100755 --- a/taskcluster/scripts/generate-nimbus-cli-json.py +++ b/taskcluster/scripts/generate-nimbus-cli-json.py @@ -9,18 +9,21 @@ import json import os import pathlib + import toml ROOT_DIR = pathlib.Path(__file__).parent.parent.parent + def main(): args = parse_args() dump_json(args) + def dump_json(args): data = { - 'version': find_version(), - 'commit': os.environ['APPSERVICES_HEAD_REV'], + "version": find_version(), + "commit": os.environ["APPSERVICES_HEAD_REV"], } dir = os.path.dirname(args.path) @@ -29,16 +32,21 @@ def dump_json(args): with open(args.path, "wt") as f: json.dump(data, f) + def find_version(): path = ROOT_DIR.joinpath("components", "support", "nimbus-cli", "Cargo.toml") with open(path) as f: data = toml.load(f) - return data['package']['version'] + return data["package"]["version"] + def parse_args(): - parser = argparse.ArgumentParser(description='Generate JSON file with information about the nimbus-cli build') - parser.add_argument('path') + parser = argparse.ArgumentParser( + description="Generate JSON file with information about the nimbus-cli build" + ) + parser.add_argument("path") return parser.parse_args() -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/taskcluster/scripts/generate-release-json.py b/taskcluster/scripts/generate-release-json.py index 3952dfc991..bc1858774f 100755 --- a/taskcluster/scripts/generate-release-json.py +++ b/taskcluster/scripts/generate-release-json.py @@ -5,37 +5,48 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. -from urllib.parse import quote_plus import argparse import json import os +from urllib.parse import quote_plus + def main(): args = parse_args() dump_json(args) + def indexed_artifact_url(index_name, filename): - return 'https://firefox-ci-tc.services.mozilla.com/api/index/v1/task/{}/artifacts/public{}'.format( - index_name, quote_plus(f"/build/{filename}")) + return "https://firefox-ci-tc.services.mozilla.com/api/index/v1/task/{}/artifacts/public{}".format( + index_name, quote_plus(f"/build/{filename}") + ) + def dump_json(args): data = { - 'version': args.version, - 'channel': args.maven_channel, - 'commit': os.environ['APPSERVICES_HEAD_REV'], - 'nimbus-fml.zip': indexed_artifact_url( - f'project.application-services.v2.nimbus-fml.{args.version}', 'nimbus-fml.zip'), - 'nimbus-fml.sha256': indexed_artifact_url( - f'project.application-services.v2.nimbus-fml.{args.version}', 'nimbus-fml.sha256'), - 'FocusRustComponents.xcframework.zip': indexed_artifact_url( - f'project.application-services.v2.swift.{args.version}', - 'FocusRustComponents.xcframework.zip'), - 'MozillaRustComponents.xcframework.zip': indexed_artifact_url( - f'project.application-services.v2.swift.{args.version}', - 'MozillaRustComponents.xcframework.zip'), - 'swift-components.tar.xz': indexed_artifact_url( - f'project.application-services.v2.swift.{args.version}', - 'swift-components.tar.xz'), + "version": args.version, + "channel": args.maven_channel, + "commit": os.environ["APPSERVICES_HEAD_REV"], + "nimbus-fml.zip": indexed_artifact_url( + f"project.application-services.v2.nimbus-fml.{args.version}", + "nimbus-fml.zip", + ), + "nimbus-fml.sha256": indexed_artifact_url( + f"project.application-services.v2.nimbus-fml.{args.version}", + "nimbus-fml.sha256", + ), + "FocusRustComponents.xcframework.zip": indexed_artifact_url( + f"project.application-services.v2.swift.{args.version}", + "FocusRustComponents.xcframework.zip", + ), + "MozillaRustComponents.xcframework.zip": indexed_artifact_url( + f"project.application-services.v2.swift.{args.version}", + "MozillaRustComponents.xcframework.zip", + ), + "swift-components.tar.xz": indexed_artifact_url( + f"project.application-services.v2.swift.{args.version}", + "swift-components.tar.xz", + ), } dir = os.path.dirname(args.path) @@ -44,12 +55,20 @@ def dump_json(args): with open(args.path, "wt") as f: json.dump(data, f) + def parse_args(): - parser = argparse.ArgumentParser(description='Publish information about the release builds') - parser.add_argument('path') - parser.add_argument('--version', help='version string', required=True) - parser.add_argument('--maven-channel', help='channel the maven packages were uploaded to', required=True) + parser = argparse.ArgumentParser( + description="Publish information about the release builds" + ) + parser.add_argument("path") + parser.add_argument("--version", help="version string", required=True) + parser.add_argument( + "--maven-channel", + help="channel the maven packages were uploaded to", + required=True, + ) return parser.parse_args() -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/taskcluster/scripts/get-secret.py b/taskcluster/scripts/get-secret.py index acb34114e7..7869d5e994 100755 --- a/taskcluster/scripts/get-secret.py +++ b/taskcluster/scripts/get-secret.py @@ -10,11 +10,14 @@ import errno import json import os + import taskcluster -def write_secret_to_file(path, data, key, base64decode=False, json_secret=False, append=False, prefix=''): - path = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../' + path)) +def write_secret_to_file( + path, data, key, base64decode=False, json_secret=False, append=False, prefix="" +): + path = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../" + path)) try: os.makedirs(os.path.dirname(path)) except OSError as error: @@ -22,8 +25,8 @@ def write_secret_to_file(path, data, key, base64decode=False, json_secret=False, raise print(f"Outputting secret to: {path}") - with open(path, 'a' if append else 'w') as f: - value = data['secret'][key] + with open(path, "a" if append else "w") as f: + value = data["secret"][key] if base64decode: value = base64.b64decode(value) if json_secret: @@ -33,35 +36,76 @@ def write_secret_to_file(path, data, key, base64decode=False, json_secret=False, def fetch_secret_from_taskcluster(name): try: - secrets = taskcluster.Secrets({ - # BaseUrl is still needed for tasks that haven't migrated to taskgraph yet. - 'baseUrl': 'http://taskcluster/secrets/v1', - }) + secrets = taskcluster.Secrets( + { + # BaseUrl is still needed for tasks that haven't migrated to taskgraph yet. + "baseUrl": "http://taskcluster/secrets/v1", + } + ) except taskcluster.exceptions.TaskclusterFailure: # taskcluster library >=5 errors out when `baseUrl` is used - secrets = taskcluster.Secrets({ - 'rootUrl': os.environ.get('TASKCLUSTER_PROXY_URL', 'https://taskcluster.net'), - }) + secrets = taskcluster.Secrets( + { + "rootUrl": os.environ.get( + "TASKCLUSTER_PROXY_URL", "https://taskcluster.net" + ), + } + ) return secrets.get(name) def main(): parser = argparse.ArgumentParser( - description='Fetch a taskcluster secret value and save it to a file.') - - parser.add_argument('-s', dest="secret", action="store", help="name of the secret") - parser.add_argument('-k', dest='key', action="store", help='key of the secret') - parser.add_argument('-f', dest="path", action="store", help='file to save secret to') - parser.add_argument('--decode', dest="decode", action="store_true", default=False, help='base64 decode secret before saving to file') - parser.add_argument('--json', dest="json", action="store_true", default=False, help='serializes the secret to JSON format') - parser.add_argument('--append', dest="append", action="store_true", default=False, help='append secret to existing file') - parser.add_argument('--prefix', dest="prefix", action="store", default="", help='add prefix when writing secret to file') + description="Fetch a taskcluster secret value and save it to a file." + ) + + parser.add_argument("-s", dest="secret", action="store", help="name of the secret") + parser.add_argument("-k", dest="key", action="store", help="key of the secret") + parser.add_argument( + "-f", dest="path", action="store", help="file to save secret to" + ) + parser.add_argument( + "--decode", + dest="decode", + action="store_true", + default=False, + help="base64 decode secret before saving to file", + ) + parser.add_argument( + "--json", + dest="json", + action="store_true", + default=False, + help="serializes the secret to JSON format", + ) + parser.add_argument( + "--append", + dest="append", + action="store_true", + default=False, + help="append secret to existing file", + ) + parser.add_argument( + "--prefix", + dest="prefix", + action="store", + default="", + help="add prefix when writing secret to file", + ) result = parser.parse_args() secret = fetch_secret_from_taskcluster(result.secret) - write_secret_to_file(result.path, secret, result.key, result.decode, result.json, result.append, result.prefix) + write_secret_to_file( + result.path, + secret, + result.key, + result.decode, + result.json, + result.append, + result.prefix, + ) if __name__ == "__main__": diff --git a/taskcluster/scripts/nimbus-build.py b/taskcluster/scripts/nimbus-build.py index e4f3abf1ac..2a75266e8f 100755 --- a/taskcluster/scripts/nimbus-build.py +++ b/taskcluster/scripts/nimbus-build.py @@ -4,39 +4,56 @@ # file, You can obtain one at https://mozilla.org/MPL/2.0/. import argparse -import subprocess -import pathlib import os +import pathlib +import subprocess # Repository root dir + def main(): args = parse_args() binary = args.binary target = args.target os.makedirs(args.out_dir, exist_ok=True) - filename = f'{binary}.exe' if '-windows-' in target else binary + filename = f"{binary}.exe" if "-windows-" in target else binary env = os.environ - if target == 'aarch64-unknown-linux-gnu': + if target == "aarch64-unknown-linux-gnu": env = os.environ.copy() - env['RUSTFLAGS'] = '-C linker=aarch64-linux-gnu-gcc' + env["RUSTFLAGS"] = "-C linker=aarch64-linux-gnu-gcc" + + subprocess.check_call( + [ + "cargo", + "build", + "--bin", + binary, + "--release", + "--target", + target, + ], + env=env, + ) + subprocess.check_call( + [ + "zip", + "-r", + f"../build/{binary}-{target}.zip", + pathlib.Path(target).joinpath("release", filename), + ], + cwd="target", + ) - subprocess.check_call([ - 'cargo', 'build', '--bin', binary, '--release', '--target', target, - ], env=env) - subprocess.check_call([ - 'zip', '-r', f'../build/{binary}-{target}.zip', - pathlib.Path(target).joinpath('release', filename), - ], cwd='target') def parse_args(): - parser = argparse.ArgumentParser(prog='nimbus-build.py') - parser.add_argument('out_dir', type=pathlib.Path) - parser.add_argument('binary') - parser.add_argument('target') + parser = argparse.ArgumentParser(prog="nimbus-build.py") + parser.add_argument("out_dir", type=pathlib.Path) + parser.add_argument("binary") + parser.add_argument("target") return parser.parse_args() -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/taskcluster/scripts/server-megazord-build.py b/taskcluster/scripts/server-megazord-build.py index 99e1ea379a..1095753f44 100755 --- a/taskcluster/scripts/server-megazord-build.py +++ b/taskcluster/scripts/server-megazord-build.py @@ -5,20 +5,23 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. import argparse -import subprocess -import pathlib import os +import pathlib import shutil +import subprocess import tempfile # Repository root dir SRC_ROOT = pathlib.Path( - subprocess.check_output(['git', 'rev-parse', '--show-toplevel']).decode('utf8').strip() + subprocess.check_output(["git", "rev-parse", "--show-toplevel"]) + .decode("utf8") + .strip() ).resolve() -PATH_NOT_SPECIFIED = pathlib.Path('/not specified').resolve() +PATH_NOT_SPECIFIED = pathlib.Path("/not specified").resolve() PWD = pathlib.Path().resolve() DEBUG = False -TARGET_DETECTOR_SCRIPT = SRC_ROOT / 'taskcluster' / 'scripts'/ 'detect-target.sh' +TARGET_DETECTOR_SCRIPT = SRC_ROOT / "taskcluster" / "scripts" / "detect-target.sh" + def main(): args = parse_args() @@ -26,13 +29,13 @@ def main(): target = args.target out_dir = PWD / args.out_dir - _dir = SRC_ROOT / 'megazords' / megazord + _dir = SRC_ROOT / "megazords" / megazord if not _dir.is_dir(): - raise NotADirectoryError(f'Megazord {megazord} does not exist to build') + raise NotADirectoryError(f"Megazord {megazord} does not exist to build") if DEBUG: temp_dir = None - dist_dir = PWD / 'dist' + dist_dir = PWD / "dist" os.makedirs(dist_dir, exist_ok=True) else: temp_dir = tempfile.TemporaryDirectory() @@ -52,70 +55,103 @@ def main(): if not DEBUG: temp_dir.cleanup() + def _build_shared_library(megazord, target, dist_dir): env = os.environ.copy() - binary=megazord.replace('-', '_') - - if '-linux' in target: - filename = f'lib{binary}.so' - elif '-darwin' in target: - filename = f'lib{binary}.dylib' - elif '-win' in target: - filename = f'{binary}.dll' + binary = megazord.replace("-", "_") + + if "-linux" in target: + filename = f"lib{binary}.so" + elif "-darwin" in target: + filename = f"lib{binary}.dylib" + elif "-win" in target: + filename = f"{binary}.dll" else: - raise NotImplementedError(f'Only targets for linux, darwin or windows available') + raise NotImplementedError("Only targets for linux, darwin or windows available") if "-musl" in target: - env["RUSTFLAGS"] = ( - env.get("RUSTFLAGS", "") + " -C target-feature=-crt-static" - ) - if _host_os() == 'unknown-linux': - env["RUSTFLAGS"] = ( - env.get("RUSTFLAGS", "") + " -C link-arg=-lgcc" - ) - elif _host_os() == 'apple-darwin': + env["RUSTFLAGS"] = env.get("RUSTFLAGS", "") + " -C target-feature=-crt-static" + if _host_os() == "unknown-linux": + env["RUSTFLAGS"] = env.get("RUSTFLAGS", "") + " -C link-arg=-lgcc" + elif _host_os() == "apple-darwin": if "x86_64" in target: - env["CARGO_TARGET_X86_64_UNKNOWN_LINUX_MUSL_LINKER"] = "x86_64-linux-musl-gcc" + env["CARGO_TARGET_X86_64_UNKNOWN_LINUX_MUSL_LINKER"] = ( + "x86_64-linux-musl-gcc" + ) env["TARGET_CC"] = "x86_64-linux-musl-gcc" elif "aarch64" in target: - env["CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_LINKER"] = "aarch64-linux-musl-gcc" + env["CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_LINKER"] = ( + "aarch64-linux-musl-gcc" + ) if target == "x86_64-pc-windows-gnu": env["RUSTFLAGS"] = env.get("RUSTFLAGS", "") + " -C panic=abort" - elif target == 'aarch64-unknown-linux-gnu': - env["CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER"] = 'aarch64-linux-gnu-gcc' - - subprocess.check_call([ - 'cargo', 'build', '--manifest-path', f'{SRC_ROOT}/megazords/{megazord}/Cargo.toml', '--release', '--target', target, - ], env=env, cwd=SRC_ROOT) + elif target == "aarch64-unknown-linux-gnu": + env["CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER"] = "aarch64-linux-gnu-gcc" + + subprocess.check_call( + [ + "cargo", + "build", + "--manifest-path", + f"{SRC_ROOT}/megazords/{megazord}/Cargo.toml", + "--release", + "--target", + target, + ], + env=env, + cwd=SRC_ROOT, + ) # This is only temporary, until cirrus uses pre-built binaries. _patch_uniffi_tomls() - library_path = SRC_ROOT / 'target' / target / 'release' / filename + library_path = SRC_ROOT / "target" / target / "release" / filename # Generate the Python FFI. We do this with `uniffi-bindgen-library-mode` so we don't have to specify the UDL or the uniffi.toml file. # Use the `-l` flag rather than `-m` since we want to specify a particular target. - subprocess.check_call([ - 'cargo', 'uniffi-bindgen-library-mode', '-l', library_path.as_posix(), 'python', dist_dir - ], env=env, cwd=SRC_ROOT) + subprocess.check_call( + [ + "cargo", + "uniffi-bindgen-library-mode", + "-l", + library_path.as_posix(), + "python", + dist_dir, + ], + env=env, + cwd=SRC_ROOT, + ) # Move the .so file to the dist_directory - shutil.move(SRC_ROOT / 'target' / target / 'release' / filename, dist_dir / filename) + shutil.move( + SRC_ROOT / "target" / target / "release" / filename, dist_dir / filename + ) return filename + def _patch_uniffi_tomls(): - _replace_text(SRC_ROOT / 'components' / 'support' / 'nimbus-fml' / 'uniffi.toml', '\ncdylib_name', '\n# cdylib_name') - _replace_text(SRC_ROOT / 'components' / 'nimbus' / 'uniffi.toml', '\ncdylib_name', '\n# cdylib_name') + _replace_text( + SRC_ROOT / "components" / "support" / "nimbus-fml" / "uniffi.toml", + "\ncdylib_name", + "\n# cdylib_name", + ) + _replace_text( + SRC_ROOT / "components" / "nimbus" / "uniffi.toml", + "\ncdylib_name", + "\n# cdylib_name", + ) + def _replace_text(filename, search, replace): - with open(filename, 'r') as file: + with open(filename, "r") as file: data = file.read() data = data.replace(search, replace) - with open(filename, 'w') as file: + with open(filename, "w") as file: file.write(data) + def _run_python_tests(megazord, dist_dir): env = os.environ.copy() existing = env.get("PYTHONPATH", None) @@ -127,43 +163,60 @@ def _run_python_tests(megazord, dist_dir): test_dirs = _python_tests(megazord) for d in test_dirs: - subprocess.check_call([ - 'pytest', '-s', d, - ], env=env, cwd=SRC_ROOT) + subprocess.check_call( + [ + "pytest", + "-s", + d, + ], + env=env, + cwd=SRC_ROOT, + ) + def _target_matches_host(target): return _host_os() in target and _host_machine() in target + def _host_machine(): import platform + m = platform.machine().lower() - if m in ('i386', 'amd64', 'x86_64'): - return 'x86_64' - elif m in ('arm64', 'aarch64'): - return 'aarch64' + if m in ("i386", "amd64", "x86_64"): + return "x86_64" + elif m in ("arm64", "aarch64"): + return "aarch64" else: return m + def _host_os(): import platform + s = platform.system().lower() - if 'windows' in s: - return 'windows' - elif 'linux' in s: - return 'unknown-linux' - elif 'darwin' in s: - return 'apple-darwin' + if "windows" in s: + return "windows" + elif "linux" in s: + return "unknown-linux" + elif "darwin" in s: + return "apple-darwin" else: return s + def _python_sources(megazord): - return _dirs(f'{SRC_ROOT}/megazords/{megazord}', ['python/lib', 'python/src']) + return _dirs(f"{SRC_ROOT}/megazords/{megazord}", ["python/lib", "python/src"]) + def _python_tests(megazord): - return _dirs(f'{SRC_ROOT}/megazords/{megazord}', ['tests/python-tests', 'python/test']) + return _dirs( + f"{SRC_ROOT}/megazords/{megazord}", ["tests/python-tests", "python/test"] + ) + def _dirs(prefix, list): - return [f'{prefix}/{f}' for f in list if os.path.isdir(f'{prefix}/{f}')] + return [f"{prefix}/{f}" for f in list if os.path.isdir(f"{prefix}/{f}")] + def _prepare_artifact(megazord, target, filename, dist_dir): for f in _python_sources(megazord): @@ -176,27 +229,39 @@ def _prepare_artifact(megazord, target, filename, dist_dir): os.makedirs(target_dir, exist_ok=True) shutil.move(dist_dir / filename, target_dir / filename) - scripts_dir = dist_dir / 'scripts' + scripts_dir = dist_dir / "scripts" os.makedirs(scripts_dir, exist_ok=True) shutil.copy(TARGET_DETECTOR_SCRIPT, scripts_dir) + def _create_artifact(megazord, target, dist_dir, out_dir): - archive = out_dir / f'{megazord}-{target}.zip' - subprocess.check_call([ - 'zip', archive, - '-r', '.', - '-x', '*/__pycache__/*', '__pycache__/*', - ], cwd=dist_dir) + archive = out_dir / f"{megazord}-{target}.zip" + subprocess.check_call( + [ + "zip", + archive, + "-r", + ".", + "-x", + "*/__pycache__/*", + "__pycache__/*", + ], + cwd=dist_dir, + ) print(f"Archive complete: {archive}") + def parse_args(): - parser = argparse.ArgumentParser(prog='server-megazord-build.py') - parser.add_argument('megazord') - parser.add_argument('target') - parser.add_argument('out_dir', nargs='?', type=pathlib.Path, default=PATH_NOT_SPECIFIED) + parser = argparse.ArgumentParser(prog="server-megazord-build.py") + parser.add_argument("megazord") + parser.add_argument("target") + parser.add_argument( + "out_dir", nargs="?", type=pathlib.Path, default=PATH_NOT_SPECIFIED + ) return parser.parse_args() -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/taskcluster/scripts/setup-branch-build-firefox-android.py b/taskcluster/scripts/setup-branch-build-firefox-android.py index 2600dc09d7..fcb5e7020e 100755 --- a/taskcluster/scripts/setup-branch-build-firefox-android.py +++ b/taskcluster/scripts/setup-branch-build-firefox-android.py @@ -7,35 +7,59 @@ import os import subprocess + def main(): args = parse_args() git_checkout(firefox_android_repo(args), args.branch) - local_properties = '\n'.join([ - "autoPublish.application-services.dir=../../", - ]) + local_properties = "\n".join( + [ + "autoPublish.application-services.dir=../../", + ] + ) print("Local properties:") print(local_properties) - write_local_properties("firefox-android/android-components/local.properties", local_properties) + write_local_properties( + "firefox-android/android-components/local.properties", local_properties + ) write_local_properties("firefox-android/fenix/local.properties", local_properties) + def parse_args(): - parser = argparse.ArgumentParser(description='Setup a firefox-android branch build in taskcluster') - parser.add_argument('owner', help='firefox-android repository owner') - parser.add_argument('branch', help='firefox-android branch') + parser = argparse.ArgumentParser( + description="Setup a firefox-android branch build in taskcluster" + ) + parser.add_argument("owner", help="firefox-android repository owner") + parser.add_argument("branch", help="firefox-android branch") return parser.parse_args() + def firefox_android_repo(args): - return f'https://github.com/{args.owner}/firefox-android' + return f"https://github.com/{args.owner}/firefox-android" + def git_checkout(url, branch): - subprocess.check_call(['git', 'clone', '--branch', branch, '--recurse-submodules', '--depth', '1', '--', url]) + subprocess.check_call( + [ + "git", + "clone", + "--branch", + branch, + "--recurse-submodules", + "--depth", + "1", + "--", + url, + ] + ) + def write_local_properties(path, local_properties): path = os.path.abspath(path) print(f"Writing local properties to {path}") - with open(path, 'w') as f: + with open(path, "w") as f: f.write(local_properties) -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/taskcluster/scripts/setup-branch-build-firefox-ios.py b/taskcluster/scripts/setup-branch-build-firefox-ios.py index 41131adbaf..c8217d880e 100755 --- a/taskcluster/scripts/setup-branch-build-firefox-ios.py +++ b/taskcluster/scripts/setup-branch-build-firefox-ios.py @@ -7,33 +7,64 @@ import os import subprocess + def main(): args = parse_args() git_checkout(firefox_ios_repo(args), args.branch) - subprocess.check_call(["./rust_components_local.sh", "-a", "../", "../rust-components-swift"], - cwd="firefox-ios") + subprocess.check_call( + ["./rust_components_local.sh", "-a", "../", "../rust-components-swift"], + cwd="firefox-ios", + ) + def parse_args(): - parser = argparse.ArgumentParser(description='Setup a firefox-ios branch build in taskcluster') - parser.add_argument('owner', help='firefox-ios repository owner') - parser.add_argument('branch', help='firefox-ios branch') + parser = argparse.ArgumentParser( + description="Setup a firefox-ios branch build in taskcluster" + ) + parser.add_argument("owner", help="firefox-ios repository owner") + parser.add_argument("branch", help="firefox-ios branch") return parser.parse_args() + def firefox_ios_repo(args): - return f'https://github.com/{args.owner}/firefox-ios' + return f"https://github.com/{args.owner}/firefox-ios" + def git_checkout(url, branch): - subprocess.check_call(['git', 'clone', '--branch', branch, '--recurse-submodules', '--depth', '1', '--', url]) - subprocess.check_call([ - 'git', 'clone', '--branch', 'main', '--recurse-submodules', '--depth', '1', - '--', 'https://github.com/mozilla/rust-components-swift', - ]) + subprocess.check_call( + [ + "git", + "clone", + "--branch", + branch, + "--recurse-submodules", + "--depth", + "1", + "--", + url, + ] + ) + subprocess.check_call( + [ + "git", + "clone", + "--branch", + "main", + "--recurse-submodules", + "--depth", + "1", + "--", + "https://github.com/mozilla/rust-components-swift", + ] + ) + def write_local_properties(path, local_properties): path = os.path.abspath(path) print(f"Writing local properties to {path}") - with open(path, 'w') as f: + with open(path, "w") as f: f.write(local_properties) -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/taskcluster/scripts/write-dummy-secret.py b/taskcluster/scripts/write-dummy-secret.py index 92db32a2d2..3687361810 100755 --- a/taskcluster/scripts/write-dummy-secret.py +++ b/taskcluster/scripts/write-dummy-secret.py @@ -11,7 +11,7 @@ def write_secret_to_file(path, secret): - path = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../' + path)) + path = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../" + path)) try: os.makedirs(os.path.dirname(path)) except OSError as error: @@ -19,15 +19,19 @@ def write_secret_to_file(path, secret): raise print(f"Outputting secret to: {path}") - with open(path, 'w') as f: + with open(path, "w") as f: f.write(secret) def main(): parser = argparse.ArgumentParser(description="Store a dummy secret to a file") - parser.add_argument("-c", dest="content", action="store", help="content of the secret") - parser.add_argument("-f", dest="path", action="store", help="file to save secret to") + parser.add_argument( + "-c", dest="content", action="store", help="content of the secret" + ) + parser.add_argument( + "-f", dest="path", action="store", help="file to save secret to" + ) result = parser.parse_args() diff --git a/tools/clean-gradle-autopublish.py b/tools/clean-gradle-autopublish.py index c3f9fb3d0b..7c5a7e6e46 100755 --- a/tools/clean-gradle-autopublish.py +++ b/tools/clean-gradle-autopublish.py @@ -3,12 +3,13 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at https://mozilla.org/MPL/2.0/. -from configparser import RawConfigParser -from pathlib import Path import io import shutil import subprocess import sys +from configparser import RawConfigParser +from pathlib import Path + def main(args): if len(args) != 1: @@ -21,35 +22,46 @@ def main(args): appservices_path = Path(__file__).parent.parent check_rust_targets(appservices_path) # Delete lastAutoPublishContentsHash to force gradle to rebuild/republish our maven packages - delete_if_exists(appservices_path / '.lastAutoPublishContentsHash') + delete_if_exists(appservices_path / ".lastAutoPublishContentsHash") # Delete the packages in our local maven repository as well - delete_if_exists(Path.home().joinpath('.m2', 'repository', 'org', 'mozilla', 'appservices')) - subprocess.run(["cargo", "clean"], cwd=appservices_path) - subprocess.run(["./gradlew", "clean"], cwd=appservices_path) - subprocess.run(["./gradlew", "clean"], cwd=ff_android_path / 'android-components') - subprocess.run(["./gradlew", "clean"], cwd=ff_android_path / 'fenix') + delete_if_exists( + Path.home().joinpath(".m2", "repository", "org", "mozilla", "appservices") + ) + subprocess.run(["cargo", "clean"], cwd=appservices_path, check=False) + subprocess.run(["./gradlew", "clean"], cwd=appservices_path, check=False) + subprocess.run(["./gradlew", "clean"], cwd=ff_android_path / "android-components", check=False) + subprocess.run(["./gradlew", "clean"], cwd=ff_android_path / "fenix", check=False) + def path_looks_like_firefox_android(path): - return path.joinpath('android-components').exists() and path.joinpath("fenix").exists() + return ( + path.joinpath("android-components").exists() and path.joinpath("fenix").exists() + ) + def check_rust_targets(appservices_path): # config parser expects a header, but properties files don't have them. So add one manually: f = io.StringIO() f.write("[main]\n") - f.write((appservices_path / 'local.properties').open().read()) + f.write((appservices_path / "local.properties").open().read()) f.seek(0) config = RawConfigParser() config.read_file(f) - rust_targets = config['main'].get('rust.targets') + rust_targets = config["main"].get("rust.targets") if rust_targets is not None: - if "linux-x86-64" not in rust_targets.split(','): - print("rust.targets set in local.properties, but linux-x86-64 is not included.") - print("This will cause builds to fail, please fix this before running clean-gradle-autopublish.py") + if "linux-x86-64" not in rust_targets.split(","): + print( + "rust.targets set in local.properties, but linux-x86-64 is not included." + ) + print( + "This will cause builds to fail, please fix this before running clean-gradle-autopublish.py" + ) sys.exit(1) print(f"rust targets set to: {rust_targets}") print("Note: this means that only APKs for those targets will work") input("\nPress enter to continue") + def delete_if_exists(path): if path.exists(): if path.is_file(): @@ -57,5 +69,6 @@ def delete_if_exists(path): else: shutil.rmtree(path) -if __name__ == '__main__': + +if __name__ == "__main__": main(sys.argv[1:]) diff --git a/tools/clean.py b/tools/clean.py index ba99f2de6f..bf0b3281dc 100644 --- a/tools/clean.py +++ b/tools/clean.py @@ -14,30 +14,33 @@ """ import argparse -import subprocess -from pathlib import Path import shlex import shutil +import subprocess +from pathlib import Path PROJECT_ROOT = Path(__file__).parent.parent + def run_command(dry_run, cmdline, **kwargs): - print('Executing:', ' '.join(shlex.quote(str(part)) for part in cmdline)) + print("Executing:", " ".join(shlex.quote(str(part)) for part in cmdline)) if not dry_run: subprocess.check_call(cmdline, **kwargs) + def find_generated_directories(look_dir): for child in look_dir.iterdir(): - if child.name == 'support': + if child.name == "support": for sub in find_generated_directories(child): yield sub else: # `android/build` directories should be removed. - sub = child / 'android' / 'build' + sub = child / "android" / "build" if sub.is_dir(): yield sub # TODO: ios/swift? + def clean_android(dry_run): # pathlib.Path will join "." and "gradlew" as "gradlew", which doesn't # work as "." is not on the path! @@ -50,18 +53,25 @@ def clean_android(dry_run): print("`./gradle clean` failed, but looking for other Android stuff...") # ... and still try and find obviously generated directories. for to_rm in find_generated_directories(PROJECT_ROOT / "components"): - print('Removing:', to_rm) + print("Removing:", to_rm) if not dry_run: shutil.rmtree(to_rm) + def parse_args(): parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter) - parser.add_argument('-n', '--dry-run', dest="dry_run", action="store_true", - help='show what would be executed/removed without actually doing it.') + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter + ) + parser.add_argument( + "-n", + "--dry-run", + dest="dry_run", + action="store_true", + help="show what would be executed/removed without actually doing it.", + ) return parser.parse_args() + def main(): args = parse_args() run_command(args.dry_run, ["cargo", "clean"]) @@ -69,5 +79,6 @@ def main(): # TODO: add swift etc. print("We should be clean! (except for iOS - fix me? :)") -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/tools/dependency_summary.py b/tools/dependency_summary.py index d8ecf416c5..0ecff5e24f 100755 --- a/tools/dependency_summary.py +++ b/tools/dependency_summary.py @@ -8,26 +8,26 @@ # It shells out to `cargo metadata` to gather information about the full dependency tree # and to `cargo build --build-plan` to figure out the dependencies of the specific target package. -import io -import re -import sys -import os.path import argparse -import subprocess +import collections +import difflib import hashlib +import io +import itertools import json import logging +import os.path +import re +import subprocess +import sys import textwrap -import difflib -import itertools -import collections -from urllib.parse import urlparse, urlunparse +from urllib.parse import urlparse from xml.sax import saxutils import requests # handy for debugging; WARNING (default), INFO, and DEBUG can all be useful -logging.basicConfig(level = logging.WARNING) +logging.basicConfig(level=logging.WARNING) # The targets used by rust-android-gradle, including the ones for unit testing. # https://github.com/mozilla/rust-android-gradle/blob/master/plugin/src/main/kotlin/com/nishtahir/RustAndroidPlugin.kt @@ -52,10 +52,11 @@ # in CI that runs on a Mac). # # The alternative is to only let you generate these summaries on a Mac, which is bleh. -ALL_IOS_TARGETS = ["fake-target-for-ios"] if sys.platform != "darwin" else [ - "x86_64-apple-ios", - "aarch64-apple-ios" -] +ALL_IOS_TARGETS = ( + ["fake-target-for-ios"] + if sys.platform != "darwin" + else ["x86_64-apple-ios", "aarch64-apple-ios"] +) ALL_TARGETS = ALL_ANDROID_TARGETS + ALL_IOS_TARGETS @@ -89,12 +90,14 @@ # Packages that get pulled into our dependency tree but we know we definitely don't # ever build with in practice, typically because they're platform-specific support # for platforms we don't actually support. -EXCLUDED_PACKAGES = set([ - "cloudabi", - "fuchsia-cprng", - "fuchsia-zircon", - "fuchsia-zircon-sys", -]) +EXCLUDED_PACKAGES = set( + [ + "cloudabi", + "fuchsia-cprng", + "fuchsia-zircon", + "fuchsia-zircon-sys", + ] +) # Known metadata for special extra packages that are not managed by cargo. EXTRA_PACKAGE_METADATA = { @@ -114,7 +117,7 @@ "name": "SwiftKeychainWrapper", "repository": "https://github.com/jrendel/SwiftKeychainWrapper", "license": "MIT", - "license_file": "https://raw.githubusercontent.com/jrendel/SwiftKeychainWrapper/develop/LICENSE" + "license_file": "https://raw.githubusercontent.com/jrendel/SwiftKeychainWrapper/develop/LICENSE", }, "ext-nss": { "name": "NSS", @@ -143,7 +146,8 @@ "license_url": "https://github.com/briansmith/ring/blob/master/LICENSE", # We're only using the API surface from ring, not its internals, # and all the relevant files and under this ISC-style license. - "license_text": textwrap.dedent(r""" + "license_text": textwrap.dedent( + r""" Copyright 2015-2016 Brian Smith. Permission to use, copy, modify, and/or distribute this software for any @@ -157,15 +161,16 @@ WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - """) + """ + ), }, "ext-sqlite": { "name": "sqlite", "repository": "https://www.sqlite.org/", "license": "EXT-SQLITE", "license_file": "https://sqlite.org/copyright.html", - "license_text": "This software makes use of the 'SQLite' database engine, and we are very"\ - " grateful to D. Richard Hipp and team for producing it.", + "license_text": "This software makes use of the 'SQLite' database engine, and we are very" + " grateful to D. Richard Hipp and team for producing it.", }, } @@ -210,69 +215,57 @@ "tracing": { "license_file": { "check": None, - "fixup": "https://raw.githubusercontent.com/tokio-rs/tracing/master/LICENSE" + "fixup": "https://raw.githubusercontent.com/tokio-rs/tracing/master/LICENSE", } }, "tracing-futures": { "license_file": { "check": None, - "fixup": "https://raw.githubusercontent.com/tokio-rs/tracing/master/LICENSE" + "fixup": "https://raw.githubusercontent.com/tokio-rs/tracing/master/LICENSE", } }, # These packages do not unambiguously declare their licensing file. "publicsuffix": { - "license": { - "check": "MIT/Apache-2.0" - }, + "license": {"check": "MIT/Apache-2.0"}, "license_file": { "check": None, "fixup": "LICENSE-APACHE", - } + }, }, "siphasher": { - "license": { - "check": "MIT/Apache-2.0" - }, + "license": {"check": "MIT/Apache-2.0"}, "license_file": { "check": None, "fixup": "COPYING", - } + }, }, "futures-task": { - "license": { - "check": "MIT OR Apache-2.0" - }, + "license": {"check": "MIT OR Apache-2.0"}, "license_file": { "check": None, "fixup": "https://raw.githubusercontent.com/rust-lang/futures-rs/master/LICENSE-APACHE", - } + }, }, "typenum": { - "license": { - "check": "MIT OR Apache-2.0" - }, + "license": {"check": "MIT OR Apache-2.0"}, "license_file": { "check": None, "fixup": "LICENSE-APACHE", - } + }, }, "ohttp": { - "license": { - "check": "MIT OR Apache-2.0" - }, + "license": {"check": "MIT OR Apache-2.0"}, "license_file": { "check": None, "fixup": "https://raw.githubusercontent.com/martinthomson/ohttp/main/LICENSE-APACHE", - } + }, }, "bhttp": { - "license": { - "check": "MIT OR Apache-2.0" - }, + "license": {"check": "MIT OR Apache-2.0"}, "license_file": { "check": None, "fixup": "https://raw.githubusercontent.com/martinthomson/ohttp/main/LICENSE-APACHE", - } + }, }, # These packages do not include their license file in their release distributions, # so we have to fetch it over the network. Each has been manually checked and resolved @@ -287,7 +280,7 @@ "check": None, # N.B. this was moved to rust-lang org, but the repo link in the distribution hasn't been updated. "fixup": "https://raw.githubusercontent.com/rust-lang/backtrace-rs/master/LICENSE-APACHE", - } + }, }, "base16": { "repository": { @@ -296,7 +289,7 @@ "license_file": { "check": None, "fixup": "https://raw.githubusercontent.com/thomcc/rust-base16/master/LICENSE-APACHE", - } + }, }, "bitvec": { "license": { @@ -311,13 +304,11 @@ "repository": { "check": "https://github.com/rust-lang/cargo", }, - "license": { - "check": "MIT OR Apache-2.0" - }, + "license": {"check": "MIT OR Apache-2.0"}, "license_file": { "check": None, "fixup": "https://raw.githubusercontent.com/rust-lang/cargo/master/LICENSE-APACHE", - } + }, }, "failure_derive": { "repository": { @@ -326,7 +317,7 @@ "license_file": { "check": None, "fixup": "https://raw.githubusercontent.com/rust-lang-nursery/failure/master/LICENSE-APACHE", - } + }, }, "funty": { "license": { @@ -338,13 +329,11 @@ }, }, "fxhash": { - "license": { - "check": "Apache-2.0/MIT" - }, + "license": {"check": "Apache-2.0/MIT"}, "license_file": { "check": None, - "fixup": "https://www.apache.org/licenses/LICENSE-2.0.txt" - } + "fixup": "https://www.apache.org/licenses/LICENSE-2.0.txt", + }, }, "hawk": { "repository": { @@ -353,7 +342,7 @@ "license_file": { "check": None, "fixup": "https://raw.githubusercontent.com/taskcluster/rust-hawk/main/LICENSE", - } + }, }, "oneshot-uniffi": { "repository": { @@ -362,7 +351,7 @@ "license_file": { "check": None, "fixup": "https://www.apache.org/licenses/LICENSE-2.0.txt", - } + }, }, "windows-sys": { "repository": { @@ -409,7 +398,7 @@ "license_file": { "check": None, "fixup": "https://raw.githubusercontent.com/retep998/winapi-rs/0.3/LICENSE-MIT", - } + }, }, "libsqlite3-sys": { "repository": { @@ -418,17 +407,14 @@ "license_file": { "check": None, "fixup": "https://raw.githubusercontent.com/rusqlite/rusqlite/master/LICENSE", - } + }, }, "miniz_oxide": { - "license": { - "check": "MIT OR Zlib OR Apache-2.0", - "fixup": "MIT" - }, + "license": {"check": "MIT OR Zlib OR Apache-2.0", "fixup": "MIT"}, "license_file": { "check": None, "fixup": "https://raw.githubusercontent.com/Frommi/miniz_oxide/master/miniz_oxide/LICENSE-MIT.md", - } + }, }, "parking_lot_core": { "repository": { @@ -450,7 +436,7 @@ "license_file": { "check": None, "fixup": "https://raw.githubusercontent.com/sfackler/rust-phf/master/LICENSE", - } + }, }, "phf_codegen": { "repository": { @@ -459,7 +445,7 @@ "license_file": { "check": None, "fixup": "https://raw.githubusercontent.com/sfackler/rust-phf/master/LICENSE", - } + }, }, "phf_generator": { "repository": { @@ -507,13 +493,11 @@ }, }, "radium": { - "license": { - "check": "MIT" - }, + "license": {"check": "MIT"}, "license_file": { "check": None, "fixup": "https://raw.githubusercontent.com/bitvecto-rs/radium/master/LICENSE.txt", - } + }, }, "security-framework": { "repository": { @@ -534,22 +518,18 @@ }, }, "shlex": { - "repository": { - "check": "https://github.com/comex/rust-shlex" - }, + "repository": {"check": "https://github.com/comex/rust-shlex"}, "license_file": { "check": None, "fixup": "https://www.apache.org/licenses/LICENSE-2.0.txt", }, }, "tinyvec_macros": { - "license": { - "check": "MIT OR Apache-2.0 OR Zlib" - }, + "license": {"check": "MIT OR Apache-2.0 OR Zlib"}, "license_file": { "check": None, "fixup": "https://raw.githubusercontent.com/Soveu/tinyvec_macros/master/LICENSE-APACHE.md", - } + }, }, "url_serde": { "repository": { @@ -601,30 +581,22 @@ }, }, "proc-macro-error": { - "license": { - "check": "MIT OR Apache-2.0" - }, - "repository": { - "check": "https://gitlab.com/CreepySkeleton/proc-macro-error" - }, + "license": {"check": "MIT OR Apache-2.0"}, + "repository": {"check": "https://gitlab.com/CreepySkeleton/proc-macro-error"}, "license_file": { "check": None, - "fixup": "https://gitlab.com/CreepySkeleton/proc-macro-error/-/raw/master/LICENSE-APACHE" - } + "fixup": "https://gitlab.com/CreepySkeleton/proc-macro-error/-/raw/master/LICENSE-APACHE", + }, }, "proc-macro-error-attr": { # This is path dependency in `proc-macro-error` and uses # it's same license - "license": { - "check": "MIT OR Apache-2.0" - }, - "repository": { - "check": "https://gitlab.com/CreepySkeleton/proc-macro-error" - }, + "license": {"check": "MIT OR Apache-2.0"}, + "repository": {"check": "https://gitlab.com/CreepySkeleton/proc-macro-error"}, "license_file": { "check": None, - "fixup": "https://gitlab.com/CreepySkeleton/proc-macro-error/-/raw/master/LICENSE-APACHE" - } + "fixup": "https://gitlab.com/CreepySkeleton/proc-macro-error/-/raw/master/LICENSE-APACHE", + }, }, # Based on https://github.com/Alexhuszagh/minimal-lexical/blob/main/LICENSE.md # the library is licensed as dual licensed and a portion of the @@ -637,13 +609,11 @@ "license": { "check": "MIT/Apache-2.0", }, - "repository": { - "check": "https://github.com/Alexhuszagh/minimal-lexical" - }, + "repository": {"check": "https://github.com/Alexhuszagh/minimal-lexical"}, "license_file": { "check": None, - "fixup": "https://github.com/Alexhuszagh/minimal-lexical/blob/main/LICENSE-APACHE" - } + "fixup": "https://github.com/Alexhuszagh/minimal-lexical/blob/main/LICENSE-APACHE", + }, }, # These packages do not make it easy to infer a URL at which their license can be read, # so we track it down by hand and hard-code it here. @@ -653,13 +623,13 @@ }, "license_url": { "check": None, - "fixup": "https://github.com/cryptocorrosion/cryptocorrosion/blob/master/stream-ciphers/chacha/LICENSE-APACHE" + "fixup": "https://github.com/cryptocorrosion/cryptocorrosion/blob/master/stream-ciphers/chacha/LICENSE-APACHE", }, }, "ffi-support": { "license_url": { "check": None, - "fixup": "https://raw.githubusercontent.com/mozilla/ffi-support/main/LICENSE-APACHE" + "fixup": "https://raw.githubusercontent.com/mozilla/ffi-support/main/LICENSE-APACHE", }, }, "mime": { @@ -670,7 +640,7 @@ }, "license_url": { "check": None, - "fixup": "https://github.com/hyperium/mime/blob/v0.3.17/LICENSE-APACHE" + "fixup": "https://github.com/hyperium/mime/blob/v0.3.17/LICENSE-APACHE", }, }, "ppv-lite86": { @@ -679,7 +649,7 @@ }, "license_url": { "check": None, - "fixup": "https://github.com/cryptocorrosion/cryptocorrosion/blob/master/utils-simd/ppv-lite86/LICENSE-APACHE" + "fixup": "https://github.com/cryptocorrosion/cryptocorrosion/blob/master/utils-simd/ppv-lite86/LICENSE-APACHE", }, }, "time": { @@ -688,7 +658,7 @@ }, "license_url": { "check": None, - "fixup": "https://github.com/time-rs/time/blob/master/LICENSE-Apache" + "fixup": "https://github.com/time-rs/time/blob/master/LICENSE-Apache", }, }, "winapi": { @@ -703,17 +673,15 @@ }, }, "tinyvec": { - "repository": { - "check": "https://github.com/Lokathor/tinyvec" - }, + "repository": {"check": "https://github.com/Lokathor/tinyvec"}, "license_url": { "check": None, - "fixup": "https://github.com/Lokathor/tinyvec/blob/main/LICENSE-ZLIB.md" + "fixup": "https://github.com/Lokathor/tinyvec/blob/main/LICENSE-ZLIB.md", }, "license_file": { "check": None, - "fixup": "https://raw.githubusercontent.com/Lokathor/tinyvec/main/LICENSE-ZLIB.md" - } + "fixup": "https://raw.githubusercontent.com/Lokathor/tinyvec/main/LICENSE-ZLIB.md", + }, }, "glean-core": { "license_url": { @@ -723,7 +691,7 @@ "license_file": { "check": None, "fixup": "https://raw.githubusercontent.com/mozilla/glean/main/LICENSE", - } + }, }, "glean-ffi": { "license_url": { @@ -733,7 +701,7 @@ "license_file": { "check": None, "fixup": "https://raw.githubusercontent.com/mozilla/glean/main/LICENSE", - } + }, }, "lmdb-rkv-sys": { "license_url": { @@ -743,7 +711,7 @@ "license_file": { "check": None, "fixup": "https://raw.githubusercontent.com/mozilla/lmdb-rs/master/LICENSE", - } + }, }, "uniffi_bindgen": { "license_url": { @@ -753,7 +721,7 @@ "license_file": { "check": None, "fixup": "https://raw.githubusercontent.com/mozilla/uniffi-rs/main/LICENSE", - } + }, }, "uniffi_build": { "license_url": { @@ -763,7 +731,7 @@ "license_file": { "check": None, "fixup": "https://raw.githubusercontent.com/mozilla/uniffi-rs/main/LICENSE", - } + }, }, "uniffi_checksum_derive": { "license_url": { @@ -773,7 +741,7 @@ "license_file": { "check": None, "fixup": "https://raw.githubusercontent.com/mozilla/uniffi-rs/main/LICENSE", - } + }, }, "uniffi_core": { "license_url": { @@ -783,7 +751,7 @@ "license_file": { "check": None, "fixup": "https://raw.githubusercontent.com/mozilla/uniffi-rs/main/LICENSE", - } + }, }, "uniffi_macros": { "license_url": { @@ -793,7 +761,7 @@ "license_file": { "check": None, "fixup": "https://raw.githubusercontent.com/mozilla/uniffi-rs/main/LICENSE", - } + }, }, "uniffi_meta": { "license_url": { @@ -803,7 +771,7 @@ "license_file": { "check": None, "fixup": "https://raw.githubusercontent.com/mozilla/uniffi-rs/main/LICENSE", - } + }, }, "uniffi_testing": { "license_url": { @@ -813,7 +781,7 @@ "license_file": { "check": None, "fixup": "https://raw.githubusercontent.com/mozilla/uniffi-rs/main/LICENSE", - } + }, }, "uniffi_udl": { "license_url": { @@ -823,7 +791,7 @@ "license_file": { "check": None, "fixup": "https://raw.githubusercontent.com/mozilla/uniffi-rs/main/LICENSE", - } + }, }, "uniffi": { "license_url": { @@ -833,7 +801,7 @@ "license_file": { "check": None, "fixup": "https://raw.githubusercontent.com/mozilla/uniffi-rs/main/LICENSE", - } + }, }, "jexl-eval": { "license_url": { @@ -843,7 +811,7 @@ "license_file": { "check": None, "fixup": "https://raw.githubusercontent.com/mozilla/jexl-rs/main/LICENSE", - } + }, }, "jexl-parser": { "license_url": { @@ -853,7 +821,7 @@ "license_file": { "check": None, "fixup": "https://raw.githubusercontent.com/mozilla/jexl-rs/main/LICENSE", - } + }, }, "lalrpop-util": { "license": { @@ -866,7 +834,7 @@ "license_file": { "check": None, "fixup": "https://raw.githubusercontent.com/lalrpop/lalrpop/master/LICENSE-APACHE", - } + }, }, "humansize": { "license": { @@ -879,7 +847,7 @@ "license_file": { "check": None, "fixup": "https://raw.githubusercontent.com/LeopoldArkham/humansize/master/LICENSE-APACHE", - } + }, }, "encoding_rs": { "license": { @@ -887,12 +855,12 @@ }, "license_url": { "check": None, - "fixup": "https://github.com/hsivonen/encoding_rs/blob/master/COPYRIGHT" + "fixup": "https://github.com/hsivonen/encoding_rs/blob/master/COPYRIGHT", }, "license_file": { "check": None, - "fixup": "https://raw.githubusercontent.com/hsivonen/encoding_rs/master/COPYRIGHT" - } + "fixup": "https://raw.githubusercontent.com/hsivonen/encoding_rs/master/COPYRIGHT", + }, }, "enum_variant_macros_macros": { "license": { @@ -900,12 +868,12 @@ }, "license_url": { "check": None, - "fixup": "https://github.com/MisterEggnog/enum-variant-macros/blob/trunk/LICENSE-APACHE" + "fixup": "https://github.com/MisterEggnog/enum-variant-macros/blob/trunk/LICENSE-APACHE", }, "license_file": { "check": None, - "fixup": "https://raw.githubusercontent.com/MisterEggnog/enum-variant-macros/trunk/LICENSE-APACHE" - } + "fixup": "https://raw.githubusercontent.com/MisterEggnog/enum-variant-macros/trunk/LICENSE-APACHE", + }, }, "extension-trait": { "license": { @@ -913,34 +881,33 @@ }, "license_url": { "check": None, - "fixup": "https://gitlab.com/KonradBorowski/extension-trait/-/blob/master/LICENSE-APACHE" + "fixup": "https://gitlab.com/KonradBorowski/extension-trait/-/blob/master/LICENSE-APACHE", }, "license_file": { "check": None, - "fixup": "https://gitlab.com/KonradBorowski/extension-trait/-/raw/master/LICENSE-APACHE" - } + "fixup": "https://gitlab.com/KonradBorowski/extension-trait/-/raw/master/LICENSE-APACHE", + }, }, - "xshell-macros": { "license": { "check": "MIT OR Apache-2.0", }, "license_url": { "check": None, - "fixup": "https://github.com/matklad/xshell/blob/master/LICENSE-APACHE" + "fixup": "https://github.com/matklad/xshell/blob/master/LICENSE-APACHE", }, "license_file": { "check": None, - "fixup": "https://raw.githubusercontent.com/matklad/xshell/master/LICENSE-APACHE" - } + "fixup": "https://raw.githubusercontent.com/matklad/xshell/master/LICENSE-APACHE", + }, }, # The following crates do not have repositories in the package metadata "openssl-macros": { "repository": { "check": None, - "fixup": "https://github.com/sfackler/rust-openssl" + "fixup": "https://github.com/sfackler/rust-openssl", } - } + }, } # Sets of common licence file names, by license type. @@ -972,10 +939,11 @@ def subprocess_run_cargo(args): env = os.environ.copy() env["RUSTUP_TOOLCHAIN"] = "nightly" p = subprocess.run( - ('cargo',) + args, + ("cargo",) + args, env=env, stdout=subprocess.PIPE, universal_newlines=True, + check=False, ) p.check_returncode() return p.stdout @@ -983,9 +951,11 @@ def subprocess_run_cargo(args): def get_workspace_metadata(): """Get metadata for all dependencies in the workspace.""" - return WorkspaceMetadata(json.loads(subprocess_run_cargo(( - 'metadata', '--locked', '--format-version', '1' - )))) + return WorkspaceMetadata( + json.loads( + subprocess_run_cargo(("metadata", "--locked", "--format-version", "1")) + ) + ) class WorkspaceMetadata(object): @@ -1013,7 +983,8 @@ def __init__(self, metadata): for key, change in fixups.items(): if info.get(key, None) != change["check"]: assert False, "Fixup check failed for {}.{}: {} != {}".format( - info["name"], key, info.get(key, None), change["check"]) + info["name"], key, info.get(key, None), change["check"] + ) if "fixup" in change: info[key] = change["fixup"] # Index packages for fast lookup. @@ -1063,24 +1034,30 @@ def get_package_dependencies(self, name, targets=None): """ targets = self.get_compatible_targets_for_package(name, targets) cargo_args = ( - '-Z', 'unstable-options', - 'build', - '--build-plan', - '--quiet', - '--locked', - '--package', name, + "-Z", + "unstable-options", + "build", + "--build-plan", + "--quiet", + "--locked", + "--package", + name, ) deps = set() for target in targets: - if target == "fake-target-for-ios": - target = "x86_64-apple-darwin" - buildPlan = subprocess_run_cargo(cargo_args + ('--target', target,)) + this_target = "x86_64-apple-darwin" if target == "fake-target-for-ios" else target + buildPlan = subprocess_run_cargo( + cargo_args + + ( + "--target", + this_target, + ) + ) buildPlan = json.loads(buildPlan) - for manifestPath in buildPlan['inputs']: + for manifestPath in buildPlan["inputs"]: info = self.get_package_by_manifest_path(manifestPath) - deps.add(info['id']) - deps |= self.get_extra_dependencies_not_managed_by_cargo( - name, targets, deps) + deps.add(info["id"]) + deps |= self.get_extra_dependencies_not_managed_by_cargo(name, targets, deps) return deps def get_extra_dependencies_not_managed_by_cargo(self, name, targets, deps): @@ -1117,7 +1094,8 @@ def get_compatible_targets_for_package(self, name, targets=None): for buildTarget in pkgInfo["targets"]: if "cdylib" in buildTarget["kind"]: targets = [ - target for target in targets if not self.target_is_ios(target)] + target for target in targets if not self.target_is_ios(target) + ] return targets def target_is_android(self, target): @@ -1142,8 +1120,7 @@ def is_external_dependency(self, id): # There's no "source" key in info for externally-managed dependencies return True manifest = pkgInfo["manifest_path"] - root = os.path.commonprefix( - [manifest, self.metadata["workspace_root"]]) + root = os.path.commonprefix([manifest, self.metadata["workspace_root"]]) if root != self.metadata["workspace_root"]: return True return False @@ -1155,19 +1132,22 @@ def get_manifest_path(self, id): def get_license_info(self, id): """Get the licensing info for the named dependency, or error if it can't be determined.""" pkgInfo = self.pkgInfoById[id] - chosenLicense = self.pick_most_acceptable_license( - id, pkgInfo["license"]) + chosenLicense = self.pick_most_acceptable_license(id, pkgInfo["license"]) licenseFile = self._find_license_file(id, chosenLicense, pkgInfo) assert pkgInfo["name"] is not None assert pkgInfo["repository"] is not None return { "name": pkgInfo["name"], - "id": pkgInfo.get("id", pkgInfo["name"]), # Our fake external packages don't have an id. + "id": pkgInfo.get( + "id", pkgInfo["name"] + ), # Our fake external packages don't have an id. "repository": pkgInfo["repository"], "license": chosenLicense, "license_file": licenseFile, - "license_text": self._fetch_license_text(id, licenseFile, pkgInfo), - "license_url": self._find_license_url(id, chosenLicense, licenseFile, pkgInfo) + "license_text": self._fetch_license_text(id, licenseFile, pkgInfo), + "license_url": self._find_license_url( + id, chosenLicense, licenseFile, pkgInfo + ), } def pick_most_acceptable_license(self, id, licenseId): @@ -1184,14 +1164,16 @@ def pick_most_acceptable_license(self, id, licenseId): return licenseId # Split "A/B" and "A OR B" into individual license names. - licenses = set(l.strip() - for l in re.split(r"\s*(?:/|\sOR\s)\s*", licenseId)) + licenses = set(l.strip() for l in re.split(r"\s*(?:/|\sOR\s)\s*", licenseId)) # Try to pick the "best" compatible license available. for license in LICENSES_IN_PREFERENCE_ORDER: if license in licenses: return license raise RuntimeError( - "Could not determine acceptable license for {}; license is '{}'".format(id, licenseId)) + "Could not determine acceptable license for {}; license is '{}'".format( + id, licenseId + ) + ) def _find_license_file(self, id, license, pkgInfo): logging.info("finding license file for %s", id) @@ -1206,23 +1188,25 @@ def _find_license_file(self, id, license, pkgInfo): except KeyError: licenseFileNames = COMMON_LICENSE_FILE_NAMES[""] logging.debug("candidate license filenames: %s", licenseFileNames) - foundLicenseFiles = [nm for nm in os.listdir( - pkgRoot) if nm.lower() in licenseFileNames] + foundLicenseFiles = [ + nm for nm in os.listdir(pkgRoot) if nm.lower() in licenseFileNames + ] if len(foundLicenseFiles) == 1: return foundLicenseFiles[0] # We couldn't find the right license file. Let's do what we can to help a human # pick the right one and add it to the list of manual fixups. if len(foundLicenseFiles) > 1: err = "Multiple ambiguous license files found for '{}'.\n".format( - pkgInfo["name"]) + pkgInfo["name"] + ) err += "Please select the correct license file and add it to `PACKAGE_METADATA_FIXUPS`.\n" err += "Potential license files: {}".format(foundLicenseFiles) else: - err = "Could not find license file for '{}'.\n".format( - pkgInfo["name"]) + err = "Could not find license file for '{}'.\n".format(pkgInfo["name"]) err += "Please locate the correct license file and add it to `PACKAGE_METADATA_FIXUPS`.\n" err += "You may need to poke around in the source repository at {}".format( - pkgInfo["repository"]) + pkgInfo["repository"] + ) raise RuntimeError(err) def _fetch_license_text(self, id, licenseFile, pkgInfo): @@ -1253,12 +1237,23 @@ def _find_license_url(self, id, chosenLicense, licenseFile, pkgInfo): repo = repo.replace("http://", "https://") if repo.startswith("https://github.com/"): # Some projects include extra context in their repo URL; strip it off. - for strip_suffix in [".git", "/tree/main/{}".format(pkgInfo["name"]), "/tree/master/{}".format(pkgInfo["name"]),]: + for strip_suffix in [ + ".git", + "/tree/main/{}".format(pkgInfo["name"]), + "/tree/master/{}".format(pkgInfo["name"]), + ]: if repo.endswith(strip_suffix): - repo = repo[:-len(strip_suffix)] + repo = repo[: -len(strip_suffix)] # Try a couple of common locations for the license file. - for path in ["/main/", "/master/", "/main/{}/".format(pkgInfo["name"]), "/master/{}/".format(pkgInfo["name"]),]: - licenseUrl = repo.replace("github.com", "raw.githubusercontent.com") + for path in [ + "/main/", + "/master/", + "/main/{}/".format(pkgInfo["name"]), + "/master/{}/".format(pkgInfo["name"]), + ]: + licenseUrl = repo.replace( + "github.com", "raw.githubusercontent.com" + ) licenseUrl += path + licenseFile r = requests.get(licenseUrl) if r.status_code == 200: @@ -1271,14 +1266,18 @@ def _find_license_url(self, id, chosenLicense, licenseFile, pkgInfo): if licenseUrl is None: err = "Could not infer license URL for '{}'.\n".format(pkgInfo["name"]) err += "Please locate the correct license URL and add it to `PACKAGE_METADATA_FIXUPS`.\n" - err += "You may need to poke around in the source repository at {}".format(repo) + err += "You may need to poke around in the source repository at {}".format( + repo + ) err += " for a {} license file named {}.".format(chosenLicense, licenseFile) raise RuntimeError(err) # As a special case, convert raw github URLs back into human-friendly page URLs. if licenseUrl.startswith("https://raw.githubusercontent.com/"): - licenseUrl = re.sub(r"raw.githubusercontent.com/([^/]+)/([^/]+)/", - r"github.com/\1/\2/blob/", - licenseUrl) + licenseUrl = re.sub( + r"raw.githubusercontent.com/([^/]+)/([^/]+)/", + r"github.com/\1/\2/blob/", + licenseUrl, + ) return licenseUrl @@ -1311,23 +1310,26 @@ def group_dependencies_for_printing(deps): # Group by shared license text where possible. depsByLicenseTextHash = collections.defaultdict(list) for info in deps: - if info["license"] in ("MPL-2.0", "Apache-2.0") or info["license"].startswith("EXT-"): + if info["license"] in ("MPL-2.0", "Apache-2.0") or info["license"].startswith( + "EXT-" + ): # We know these licenses to have shared license text, sometimes differing on e.g. punctuation details. # XXX TODO: should check this more explicitly to ensure they contain the expected text. licenseTextHash = info["license"] else: # Other license texts typically include copyright notices that we can't dedupe, except on whitespace. text = "".join(info["license_text"].split()) - licenseTextHash = info["license"] + ":" + \ - hashlib.sha256(text.encode("utf8")).hexdigest() + licenseTextHash = ( + info["license"] + ":" + hashlib.sha256(text.encode("utf8")).hexdigest() + ) depsByLicenseTextHash[licenseTextHash].append(info) # Add summary information for each group. groups = [] - for licenseTextHash, deps in depsByLicenseTextHash.items(): + for licenseTextHash, unsorted_deps in depsByLicenseTextHash.items(): # Sort by name and then by full package id, to produce a stable total order # that makes sense to humans and handles multiple versions of the same package. - deps = sorted(deps, key=lambda i: (i["name"], i["id"])) + deps = sorted(unsorted_deps, key=lambda i: (i["name"], i["id"])) # Find single canonical license text for the group, which is the whole point of grouping. license = deps[0]["license"] @@ -1341,8 +1343,7 @@ def group_dependencies_for_printing(deps): if "[yyyy]" in licenseText and "NSS" not in licenseText: break else: - raise RuntimeError( - "Could not find appropriate apache license text") + raise RuntimeError("Could not find appropriate apache license text") # Make a nice human-readable description for the group. # For some licenses we don't want to list all the deps in the title. @@ -1351,14 +1352,16 @@ def group_dependencies_for_printing(deps): else: title = make_license_title(license, deps) - groups.append({ - "title": title, - "dependencies": deps, - "license": license, - "license_text_hash": licenseTextHash, - "license_text": licenseText, - "license_url": deps[0].get("license_url"), - }) + groups.append( + { + "title": title, + "dependencies": deps, + "license": license, + "license_text_hash": licenseTextHash, + "license_text": licenseText, + "license_url": deps[0].get("license_url"), + } + ) # List groups in the order in which we prefer their license, then in alphabetical order # of the dependency names. This ensures a convenient and stable ordering. @@ -1374,6 +1377,7 @@ def sort_key(group): def print_dependency_summary_markdown(deps, file=sys.stdout): """Print a nicely-formatted summary of dependencies and their license info.""" + def pf(string, *args): if args: string = string.format(*args) @@ -1381,8 +1385,12 @@ def pf(string, *args): pf("# Licenses for Third-Party Dependencies") pf("") - pf("Binary distributions of this software incorporate code from a number of third-party dependencies.") - pf("These dependencies are available under a variety of free and open source licenses,") + pf( + "Binary distributions of this software incorporate code from a number of third-party dependencies." + ) + pf( + "These dependencies are available under a variety of free and open source licenses," + ) pf("the details of which are reproduced below.") pf("") @@ -1391,8 +1399,13 @@ def pf(string, *args): # First a "table of contents" style thing. for section in sections: header = section["title"] - anchor = header.lower().replace(" ", "-").replace(".", - "").replace(",", "").replace(":", "") + anchor = ( + header.lower() + .replace(" ", "-") + .replace(".", "") + .replace(",", "") + .replace(":", "") + ) pf("* [{}](#{})", header, anchor) pf("-------------") @@ -1401,11 +1414,16 @@ def pf(string, *args): for section in sections: pf("## {}", section["title"]) pf("") - pkgs = ["[{}]({})".format(info["name"], info["repository"]) - for info in section["dependencies"]] + pkgs = [ + "[{}]({})".format(info["name"], info["repository"]) + for info in section["dependencies"] + ] # Dedupe in case of multiple versions of dependencies. pkgs = sorted(set(pkgs)) - pf("The following text applies to code linked from these dependencies:\n{}", ",\n".join(pkgs)) + pf( + "The following text applies to code linked from these dependencies:\n{}", + ",\n".join(pkgs), + ) pf("") pf("```") assert "```" not in section["license_text"] @@ -1416,6 +1434,7 @@ def pf(string, *args): def print_dependency_summary_pom(deps, file=sys.stdout): """Print a summary of dependencies and their license info in .pom file XML format.""" + def pf(string, *args): if args: string = string.format(*args) @@ -1423,8 +1442,12 @@ def pf(string, *args): pf("") pf("") @@ -1433,12 +1456,22 @@ def pf(string, *args): for section in sections: # For the .pom file we want to list each dependency separately unless the name/license/url are identical. # First collapse to a tuple as that's hashable, and put it in a set - deps = sorted(set(map(lambda d: (d["name"], d["license"], d["license_url"]), section["dependencies"]))) + deps = sorted( + set( + map( + lambda d: (d["name"], d["license"], d["license_url"]), + section["dependencies"], + ) + ) + ) - for (name, license, license_url) in deps: + for name, license, license_url in deps: dep = {"name": name, "license": license, "license_url": license_url} pf(" ") - pf(" {}", saxutils.escape(make_license_title(dep["license"], [dep]))) + pf( + " {}", + saxutils.escape(make_license_title(dep["license"], [dep])), + ) pf(" {}", saxutils.escape(dep["license_url"])) pf(" ") @@ -1447,19 +1480,33 @@ def pf(string, *args): if __name__ == "__main__": parser = argparse.ArgumentParser( - description="summarize dependencies and license information") - parser.add_argument('-p', '--package', action="append", dest="packages") - parser.add_argument('--target', action="append", dest="targets") - parser.add_argument('--all-android-targets', action="append_const", - dest="targets", const=ALL_ANDROID_TARGETS) - parser.add_argument('--all-ios-targets', action="append_const", - dest="targets", const=ALL_IOS_TARGETS) - parser.add_argument('--format', - choices=["markdown", "json", "pom"], - default="markdown", - help="output format to generate") - parser.add_argument('--check', action="store", - help="suppress output, instead checking that it matches the given file") + description="summarize dependencies and license information" + ) + parser.add_argument("-p", "--package", action="append", dest="packages") + parser.add_argument("--target", action="append", dest="targets") + parser.add_argument( + "--all-android-targets", + action="append_const", + dest="targets", + const=ALL_ANDROID_TARGETS, + ) + parser.add_argument( + "--all-ios-targets", + action="append_const", + dest="targets", + const=ALL_IOS_TARGETS, + ) + parser.add_argument( + "--format", + choices=["markdown", "json", "pom"], + default="markdown", + help="output format to generate", + ) + parser.add_argument( + "--check", + action="store", + help="suppress output, instead checking that it matches the given file", + ) args = parser.parse_args() # Default to listing dependencies for the "megazord" and "megazord_ios" packages, @@ -1469,8 +1516,9 @@ def pf(string, *args): if args.targets: # Flatten the lists introduced by --all-XXX-targets options. - args.targets = list(itertools.chain( - *([t] if isinstance(t, str) else t for t in args.targets))) + args.targets = list( + itertools.chain(*([t] if isinstance(t, str) else t for t in args.targets)) + ) metadata = get_workspace_metadata() deps = metadata.get_dependency_summary(args.packages, args.targets) @@ -1490,11 +1538,11 @@ def pf(string, *args): if args.check: output.seek(0) outlines = output.readlines() - with open(args.check, 'r') as f: + with open(args.check, "r") as f: checklines = f.readlines() if outlines != checklines: raise RuntimeError( "Dependency details have changed from those in {}:\n{}".format( - args.check, - "".join(difflib.unified_diff(checklines, outlines)) - )) + args.check, "".join(difflib.unified_diff(checklines, outlines)) + ) + ) diff --git a/tools/loc_summary.py b/tools/loc_summary.py index 9250790357..f8d9cbfb9b 100644 --- a/tools/loc_summary.py +++ b/tools/loc_summary.py @@ -25,13 +25,13 @@ # [1] https://github.com/XAMPPRocky/tokei # -import os.path import argparse -import subprocess import json +import os.path +import subprocess ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) -DEFAULT_PATH = os.path.join(ROOT_DIR, 'components') +DEFAULT_PATH = os.path.join(ROOT_DIR, "components") # For each type of file in our repo, is it: # * shared code across platforms? @@ -40,86 +40,90 @@ # * some sort of meta file that we shouldn't count at all? FILETYPE_TO_SUMMARYTYPE = { - 'CHeader': 'ios', - 'Java': 'android', - 'Json': None, - 'Kotlin': 'android', - 'Markdown': None, - 'Prolog': 'android', # Our .pro files are actually proguard rules, not prolog... - 'Protobuf': 'shared', - 'Rust': 'shared', - 'Sql': 'shared', - 'Swift': 'ios', - 'Toml': None, - 'XcodeConfig': 'ios', - 'Xml': None, + "CHeader": "ios", + "Java": "android", + "Json": None, + "Kotlin": "android", + "Markdown": None, + "Prolog": "android", # Our .pro files are actually proguard rules, not prolog... + "Protobuf": "shared", + "Rust": "shared", + "Sql": "shared", + "Swift": "ios", + "Toml": None, + "XcodeConfig": "ios", + "Xml": None, } def get_loc_summary(path): - path = os.path.abspath(path) - p = subprocess.run([ - 'tokei', - '--output', 'json', - '--exclude', 'examples', - path, - ], stdout=subprocess.PIPE, universal_newlines=True) - p.check_returncode() - summary = { - 'shared': 0, - 'android': 0, - 'ios': 0, - 'all': 0, - } - lineOfCode = json.loads(p.stdout)['inner'] - for fileType in lineOfCode: - summaryType = FILETYPE_TO_SUMMARYTYPE[fileType] - if summaryType is not None: - summary[summaryType] += lineOfCode[fileType]['code'] - summary['all'] += lineOfCode[fileType]['code'] - if path.startswith(ROOT_DIR): - summary['path'] = os.path.join('.', path[len(ROOT_DIR) + 1:]) - else: - summary['path'] = path - return summary + path = os.path.abspath(path) + p = subprocess.run( + [ + "tokei", + "--output", + "json", + "--exclude", + "examples", + path, + ], + stdout=subprocess.PIPE, + universal_newlines=True, + check=False, + ) + p.check_returncode() + summary = { + "shared": 0, + "android": 0, + "ios": 0, + "all": 0, + } + lineOfCode = json.loads(p.stdout)["inner"] + for fileType in lineOfCode: + summaryType = FILETYPE_TO_SUMMARYTYPE[fileType] + if summaryType is not None: + summary[summaryType] += lineOfCode[fileType]["code"] + summary["all"] += lineOfCode[fileType]["code"] + if path.startswith(ROOT_DIR): + summary["path"] = os.path.join(".", path[len(ROOT_DIR) + 1 :]) + else: + summary["path"] = path + return summary def print_loc_summaries(paths): - summaries = [get_loc_summary(path) for path in paths] - headers = ['Path', 'Shared', 'Android', 'iOS', 'Total', 'Shared %'] - nameWidth = max( - len(headers[0]), - max(len(summary['path']) for summary in summaries) - ) - numWidth = max( - max(len(h) for h in headers[1:]), - max(len(str(summary['all'])) for summary in summaries) - ) - totalWidth = (nameWidth + 5) + (numWidth + 3) * 5 - 1 - print("-" * totalWidth) - print( - f"| {headers[0]:<{nameWidth}} | " - f"{headers[1]:>{numWidth}} | " - f"{headers[2]:>{numWidth}} | " - f"{headers[3]:>{numWidth}} | " - f"{headers[4]:>{numWidth}} | " - f"{headers[5]:>{numWidth}} |" - ) - print("-" * totalWidth) - for summary in summaries: + summaries = [get_loc_summary(path) for path in paths] + headers = ["Path", "Shared", "Android", "iOS", "Total", "Shared %"] + nameWidth = max(len(headers[0]), max(len(summary["path"]) for summary in summaries)) # noqa: PLW3301 + numWidth = max( # noqa: PLW3301 + max(len(h) for h in headers[1:]), + max(len(str(summary["all"])) for summary in summaries), + ) + totalWidth = (nameWidth + 5) + (numWidth + 3) * 5 - 1 + print("-" * totalWidth) print( - f"| {summary['path']:<{nameWidth}} | " - f"{summary['shared']:>{numWidth}} | " - f"{summary['android']:>{numWidth}} | " - f"{summary['ios']:>{numWidth}} | " - f"{summary['all']:>{numWidth}} | " - f"{(summary['shared'] / summary['all']):>{numWidth}.2%} |" + f"| {headers[0]:<{nameWidth}} | " + f"{headers[1]:>{numWidth}} | " + f"{headers[2]:>{numWidth}} | " + f"{headers[3]:>{numWidth}} | " + f"{headers[4]:>{numWidth}} | " + f"{headers[5]:>{numWidth}} |" ) - print("-" * totalWidth) + print("-" * totalWidth) + for summary in summaries: + print( + f"| {summary['path']:<{nameWidth}} | " + f"{summary['shared']:>{numWidth}} | " + f"{summary['android']:>{numWidth}} | " + f"{summary['ios']:>{numWidth}} | " + f"{summary['all']:>{numWidth}} | " + f"{(summary['shared'] / summary['all']):>{numWidth}.2%} |" + ) + print("-" * totalWidth) if __name__ == "__main__": - parser = argparse.ArgumentParser(description="summarize lines-of-code statistics") - parser.add_argument('paths', type=str, nargs='*', default=[DEFAULT_PATH]) - args = parser.parse_args() - print_loc_summaries(args.paths) + parser = argparse.ArgumentParser(description="summarize lines-of-code statistics") + parser.add_argument("paths", type=str, nargs="*", default=[DEFAULT_PATH]) + args = parser.parse_args() + print_loc_summaries(args.paths) diff --git a/tools/update-moz-central-vendoring.py b/tools/update-moz-central-vendoring.py index 8b4ccd96ea..d08d118b25 100755 --- a/tools/update-moz-central-vendoring.py +++ b/tools/update-moz-central-vendoring.py @@ -14,6 +14,7 @@ ) APP_SERVICES_ROOT = pathlib.Path(__file__).parent.parent + def main(): args = parse_args() moz_central_root = pathlib.Path(args.moz_central_dir) @@ -23,30 +24,38 @@ def main(): run_process(["./mach", "uniffi", "generate"], cwd=moz_central_root) print("The vendoring was successful") - print(" - If you saw a warning saying `There are 2 different versions of crate X`, then " - "follow the listed steps to resolve that issue") + print( + " - If you saw a warning saying `There are 2 different versions of crate X`, then " + "follow the listed steps to resolve that issue" + ) print(" - Run `./mach cargo vet` to manually vet any new dependencies") print(" - Commit any changes and submit a phabricator patch") print() - print("Details here: https://github.com/mozilla/application-services/blob/main/docs/howtos/vendoring-into-mozilla-central.md") + print( + "Details here: https://github.com/mozilla/application-services/blob/main/docs/howtos/vendoring-into-mozilla-central.md" + ) + def run_process(command, cwd): - result = subprocess.run(command, cwd=cwd) + result = subprocess.run(command, cwd=cwd, check=False) if result.returncode != 0: print("Vendoring failed, please see above errors", file=sys.stderr) sys.exit(1) + def parse_args(): parser = argparse.ArgumentParser() parser.add_argument("moz_central_dir") return parser.parse_args() + def get_app_services_rev(): return subprocess.check_output( - ["git", "rev-parse", "HEAD"], - encoding="utf-8", - cwd=APP_SERVICES_ROOT, - ).strip() + ["git", "rev-parse", "HEAD"], + encoding="utf-8", + cwd=APP_SERVICES_ROOT, + ).strip() + def update_cargo_toml(cargo_toml_path, app_services_rev): print(f"Updating application-services revision to {app_services_rev}") @@ -57,10 +66,13 @@ def update_cargo_toml(cargo_toml_path, app_services_rev): m = APP_SERVICES_DEPENDENCY_RE.match(line) if m: crate = m.group(1) - lines[i] = f'{crate} = {{ git = "https://github.com/mozilla/application-services", rev = "{app_services_rev}" }}\n' + lines[i] = ( + f'{crate} = {{ git = "https://github.com/mozilla/application-services", rev = "{app_services_rev}" }}\n' + ) with open(cargo_toml_path, "w") as f: f.write("".join(lines)) + if __name__ == "__main__": main()