diff --git a/.buckconfig b/.buckconfig index fdff03c4de..cf218e51ad 100644 --- a/.buckconfig +++ b/.buckconfig @@ -1,4 +1,4 @@ -[repositories] +[cells] root = . bxl = bxl prelude = prelude @@ -6,7 +6,7 @@ prelude-si = prelude-si toolchains = toolchains none = none -[repository_aliases] +[cell_aliases] buck = none config = prelude fbcode = none diff --git a/flake.lock b/flake.lock index 47e65e5720..b167df906a 100644 --- a/flake.lock +++ b/flake.lock @@ -5,11 +5,11 @@ "systems": "systems" }, "locked": { - "lastModified": 1701680307, - "narHash": "sha256-kAuep2h5ajznlPMD9rnQyffWG8EM/C73lejGofXvdM8=", + "lastModified": 1710146030, + "narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=", "owner": "numtide", "repo": "flake-utils", - "rev": "4022d587cbbfd70fe950c1e2083a02621806a725", + "rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a", "type": "github" }, "original": { @@ -20,11 +20,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1701626906, - "narHash": "sha256-ugr1QyzzwNk505ICE4VMQzonHQ9QS5W33xF2FXzFQ00=", + "lastModified": 1718089647, + "narHash": "sha256-COO4Xk2EzlZ3x9KCiJildlAA6cYDSPlnY8ms7pKl2Iw=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "0c6d8c783336a59f4c59d4a6daed6ab269c4b361", + "rev": "f7207adcc68d9cafa29e3cd252a18743ae512c6a", "type": "github" }, "original": { @@ -51,11 +51,11 @@ ] }, "locked": { - "lastModified": 1715221036, - "narHash": "sha256-81EKOdlmT/4hZpImRlvMVPgmCcJYZjwlWbJese/XqUw=", + "lastModified": 1718072316, + "narHash": "sha256-p33h73iQ1HkLalCplV5MH0oP3HXRaH3zufnFqb5//ps=", "owner": "oxalica", "repo": "rust-overlay", - "rev": "5c4bc8a0a70093a31a12509c5653c147f2310bd2", + "rev": "bedc47af18fc41bb7d2edc2b212d59ca36253f59", "type": "github" }, "original": { diff --git a/prelude-si/cargo.bzl b/prelude-si/cargo.bzl index f8a278900f..3642f5b293 100644 --- a/prelude-si/cargo.bzl +++ b/prelude-si/cargo.bzl @@ -8,7 +8,7 @@ load( ) load( "@prelude//tests:re_utils.bzl", - "get_re_executor_from_props", + "get_re_executors_from_props", ) load( "@prelude-si//:test.bzl", @@ -34,7 +34,7 @@ def cargo_clippy_impl(ctx: AnalysisContext) -> list[[ args_file = ctx.actions.write("cargo-clippy-args.txt", run_cmd_args) # Setup a RE executor based on the `remote_execution` param. - re_executor = get_re_executor_from_props(ctx) + re_executor, executor_overrides = get_re_executors_from_props(ctx) # We implicitly make the target run from the project root if remote # excution options were specified @@ -51,6 +51,7 @@ def cargo_clippy_impl(ctx: AnalysisContext) -> list[[ labels = ctx.attrs.labels, contacts = ctx.attrs.contacts, default_executor = re_executor, + executor_overrides = executor_overrides, run_from_project_root = run_from_project_root, use_project_relative_paths = run_from_project_root, ), @@ -126,7 +127,7 @@ def cargo_check_impl(ctx: AnalysisContext) -> list[[ args_file = ctx.actions.write("cargo-check-args.txt", run_cmd_args) # Setup a RE executor based on the `remote_execution` param. - re_executor = get_re_executor_from_props(ctx) + re_executor, executor_overrides = get_re_executors_from_props(ctx) # We implicitly make the target run from the project root if remote # excution options were specified @@ -143,6 +144,7 @@ def cargo_check_impl(ctx: AnalysisContext) -> list[[ labels = ctx.attrs.labels, contacts = ctx.attrs.contacts, default_executor = re_executor, + executor_overrides = executor_overrides, run_from_project_root = run_from_project_root, use_project_relative_paths = run_from_project_root, ), @@ -225,7 +227,7 @@ def cargo_doc_check_impl(ctx: AnalysisContext) -> list[[ run_cmd_args = cmd_args([script]).hidden(ctx.attrs.srcs) # Setup a RE executor based on the `remote_execution` param. - re_executor = get_re_executor_from_props(ctx) + re_executor, executor_overrides = get_re_executors_from_props(ctx) # We implicitly make the target run from the project root if remote # excution options were specified @@ -242,6 +244,7 @@ def cargo_doc_check_impl(ctx: AnalysisContext) -> list[[ labels = ctx.attrs.labels, contacts = ctx.attrs.contacts, default_executor = re_executor, + executor_overrides = executor_overrides, run_from_project_root = run_from_project_root, use_project_relative_paths = run_from_project_root, ), @@ -312,7 +315,7 @@ def cargo_fmt_check_impl(ctx: AnalysisContext) -> list[[ args_file = ctx.actions.write("cargo-fmt-args.txt", run_cmd_args) # Setup a RE executor based on the `remote_execution` param. - re_executor = get_re_executor_from_props(ctx) + re_executor, executor_overrides = get_re_executors_from_props(ctx) # We implicitly make the target run from the project root if remote # excution options were specified @@ -329,6 +332,7 @@ def cargo_fmt_check_impl(ctx: AnalysisContext) -> list[[ labels = ctx.attrs.labels, contacts = ctx.attrs.contacts, default_executor = re_executor, + executor_overrides = executor_overrides, run_from_project_root = run_from_project_root, use_project_relative_paths = run_from_project_root, ), diff --git a/prelude-si/pnpm.bzl b/prelude-si/pnpm.bzl index f33388b592..f40fc5e8ba 100644 --- a/prelude-si/pnpm.bzl +++ b/prelude-si/pnpm.bzl @@ -20,7 +20,7 @@ load( ) load( "@prelude//tests:re_utils.bzl", - "get_re_executor_from_props", + "get_re_executors_from_props", ) load( "@prelude-si//:test.bzl", @@ -74,7 +74,7 @@ def _npm_test_impl( args_file = ctx.actions.write("args.txt", run_cmd_args) # Setup a RE executor based on the `remote_execution` param. - re_executor = get_re_executor_from_props(ctx) + re_executor, executor_overrides = get_re_executors_from_props(ctx) # We implicitly make the target run from the project root if remote # excution options were specified @@ -91,6 +91,7 @@ def _npm_test_impl( labels = ctx.attrs.labels, contacts = ctx.attrs.contacts, default_executor = re_executor, + executor_overrides = executor_overrides, run_from_project_root = run_from_project_root, use_project_relative_paths = run_from_project_root, ), @@ -504,11 +505,11 @@ package_node_modules = rule( attrs = { "turbo": attrs.dep( providers = [RunInfo], - default = "//third-party/node/turbo:turbo", + default = "root//third-party/node/turbo:turbo", doc = """Turbo dependency.""", ), "pnpm_lock": attrs.source( - default = "//:pnpm-lock.yaml", + default = "root//:pnpm-lock.yaml", doc = """Workspace Pnpm lock file""", ), "package_name": attrs.option( @@ -548,7 +549,7 @@ pnpm_lock = rule( doc = """pnpm-lock.yaml source.""", ), "pnpm_workspace": attrs.dep( - default = "//:pnpm-workspace.yaml", + default = "root//:pnpm-workspace.yaml", doc = """Pnpm Workspace dependency.""", ), }, @@ -880,7 +881,7 @@ workspace_node_modules = rule( impl = workspace_node_modules_impl, attrs = { "pnpm_lock": attrs.source( - default = "//:pnpm-lock.yaml", + default = "root//:pnpm-lock.yaml", doc = """Workspace Pnpm lock file""", ), "root_workspace": attrs.bool( diff --git a/prelude-si/python.bzl b/prelude-si/python.bzl index a48f8ee1e9..284432aca3 100644 --- a/prelude-si/python.bzl +++ b/prelude-si/python.bzl @@ -20,7 +20,7 @@ load( ) load( "@prelude//tests:re_utils.bzl", - "get_re_executor_from_props", + "get_re_executors_from_props", ) load( "@prelude-si//:test.bzl", @@ -59,7 +59,7 @@ def yapf_check_impl(ctx: AnalysisContext) -> list[[ args_file = ctx.actions.write("args.txt", run_cmd_args) # Setup a RE executor based on the `remote_execution` param. - re_executor = get_re_executor_from_props(ctx) + re_executor, executor_overrides = get_re_executors_from_props(ctx) # We implicitly make the target run from the project root if remote # excution options were specified @@ -76,6 +76,7 @@ def yapf_check_impl(ctx: AnalysisContext) -> list[[ labels = ctx.attrs.labels, contacts = ctx.attrs.contacts, default_executor = re_executor, + executor_overrides = executor_overrides, run_from_project_root = run_from_project_root, use_project_relative_paths = run_from_project_root, ), diff --git a/prelude-si/rust.bzl b/prelude-si/rust.bzl index bfaee26334..1266881ce7 100644 --- a/prelude-si/rust.bzl +++ b/prelude-si/rust.bzl @@ -16,7 +16,7 @@ load( ) load( "@prelude//tests:re_utils.bzl", - "get_re_executor_from_props", + "get_re_executors_from_props", ) load( "@prelude-si//:test.bzl", @@ -41,7 +41,7 @@ def clippy_check_impl(ctx: AnalysisContext) -> list[[ args_file = ctx.actions.write("args.txt", run_cmd_args) # Setup a RE executor based on the `remote_execution` param. - re_executor = get_re_executor_from_props(ctx) + re_executor, executor_overrides = get_re_executors_from_props(ctx) # We implicitly make the target run from the project root if remote # excution options were specified @@ -58,6 +58,7 @@ def clippy_check_impl(ctx: AnalysisContext) -> list[[ labels = ctx.attrs.labels, contacts = ctx.attrs.contacts, default_executor = re_executor, + executor_overrides = executor_overrides, run_from_project_root = run_from_project_root, use_project_relative_paths = run_from_project_root, ), @@ -105,7 +106,7 @@ def rustfmt_check_impl(ctx: AnalysisContext) -> list[[ args_file = ctx.actions.write("args.txt", run_cmd_args) # Setup a RE executor based on the `remote_execution` param. - re_executor = get_re_executor_from_props(ctx) + re_executor, executor_overrides = get_re_executors_from_props(ctx) # We implicitly make the target run from the project root if remote # excution options were specified @@ -122,6 +123,7 @@ def rustfmt_check_impl(ctx: AnalysisContext) -> list[[ labels = ctx.attrs.labels, contacts = ctx.attrs.contacts, default_executor = re_executor, + executor_overrides = executor_overrides, run_from_project_root = run_from_project_root, use_project_relative_paths = run_from_project_root, ), diff --git a/prelude-si/shell.bzl b/prelude-si/shell.bzl index fb9f6bc3cf..ae1b3bbc17 100644 --- a/prelude-si/shell.bzl +++ b/prelude-si/shell.bzl @@ -16,7 +16,7 @@ load( ) load( "@prelude//tests:re_utils.bzl", - "get_re_executor_from_props", + "get_re_executors_from_props", ) load( "@prelude-si//:test.bzl", @@ -40,7 +40,7 @@ def shellcheck_impl(ctx: AnalysisContext) -> list[[ args_file = ctx.actions.write("args.txt", run_cmd_args) # Setup a RE executor based on the `remote_execution` param. - re_executor = get_re_executor_from_props(ctx) + re_executor, executor_overrides = get_re_executors_from_props(ctx) # We implicitly make the target run from the project root if remote # excution options were specified @@ -57,6 +57,7 @@ def shellcheck_impl(ctx: AnalysisContext) -> list[[ labels = ctx.attrs.labels, contacts = ctx.attrs.contacts, default_executor = re_executor, + executor_overrides = executor_overrides, run_from_project_root = run_from_project_root, use_project_relative_paths = run_from_project_root, ), @@ -100,7 +101,7 @@ def shfmt_check_impl(ctx: AnalysisContext) -> list[[ args_file = ctx.actions.write("args.txt", run_cmd_args) # Setup a RE executor based on the `remote_execution` param. - re_executor = get_re_executor_from_props(ctx) + re_executor, executor_overrides = get_re_executors_from_props(ctx) # We implicitly make the target run from the project root if remote # excution options were specified @@ -117,6 +118,7 @@ def shfmt_check_impl(ctx: AnalysisContext) -> list[[ labels = ctx.attrs.labels, contacts = ctx.attrs.contacts, default_executor = re_executor, + executor_overrides = executor_overrides, run_from_project_root = run_from_project_root, use_project_relative_paths = run_from_project_root, ), diff --git a/prelude/.buckconfig b/prelude/.buckconfig index 222fc0462a..1fe72c2033 100644 --- a/prelude/.buckconfig +++ b/prelude/.buckconfig @@ -5,17 +5,6 @@ prelude = . # but our custom config format (yuk) doesn't accept inline comments. # Therefore, we hide the name of the group when not open source. -[not_repositories] # @oss-enable -fbcode = ../.. -fbsource = ../../.. -ovr_config = ../../../arvr/tools/build_defs/config -bazel_skylib = ../../../third-party/bazel-skylib -fbcode_macros = ../../../tools/build_defs/fbcode_macros -fbobjc_dylibs = ../../../tools/build_defs/fbobjc_dylibs -buck = ../../../xplat/build_infra/buck_client -buck_bazel_skylib = ../../../xplat/build_infra/buck_client/third-party/skylark/bazel-skylib -toolchains = ../toolchains - [repository_aliases] [not_repository_aliases] # @oss-enable config = ovr_config diff --git a/prelude/BUCK b/prelude/BUCK index f12663a051..befb579ce4 100644 --- a/prelude/BUCK +++ b/prelude/BUCK @@ -1,7 +1,10 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") load(":native.bzl", prelude = "native") oncall("build_infra") +source_listing() + # Done to avoid triggering a lint rule that replaces glob with an fbcode macro globby = glob diff --git a/prelude/CHANGELOG.md b/prelude/CHANGELOG.md index 0ab36850df..524da63093 100644 --- a/prelude/CHANGELOG.md +++ b/prelude/CHANGELOG.md @@ -1,3 +1,3 @@ # Buck2 Prelude -* Initial version. +- Initial version. diff --git a/prelude/CONTRIBUTING.md b/prelude/CONTRIBUTING.md index e2f05f03e6..7f7c52bbb8 100644 --- a/prelude/CONTRIBUTING.md +++ b/prelude/CONTRIBUTING.md @@ -1,15 +1,16 @@ # Contributing to Buck2 Prelude -This repository is a subset of . -You can contribute to either that repo, or this repo - changes will be mirrored to both. +This repository is a subset of . You can +contribute to either that repo, or this repo - changes will be mirrored to both. -We want to make contributing to this project as easy and transparent as possible. +We want to make contributing to this project as easy and transparent as +possible. ## Our Development Process -Buck2 Prelude is currently developed in Facebook's internal repositories and then exported -out to GitHub by a Facebook team member; however, we invite you to submit pull -requests as described below. +Buck2 Prelude is currently developed in Facebook's internal repositories and +then exported out to GitHub by a Facebook team member; however, we invite you to +submit pull requests as described below. ## Pull Requests @@ -45,5 +46,6 @@ We use several Python formatters. ## License By contributing to Buck2 Prelude, you agree that your contributions will be -licensed under both the [LICENSE-MIT](LICENSE-MIT) and [LICENSE-APACHE](LICENSE-APACHE) -files in the root directory of this source tree. +licensed under both the [LICENSE-MIT](LICENSE-MIT) and +[LICENSE-APACHE](LICENSE-APACHE) files in the root directory of this source +tree. diff --git a/prelude/README.md b/prelude/README.md index e41bdc072c..6830efe26d 100644 --- a/prelude/README.md +++ b/prelude/README.md @@ -1,12 +1,16 @@ # Buck2 Prelude -This repo contains a copy of the Buck2 Prelude, which is often included as a submodule with a Buck2 project. -To obtain a copy of this repo, and set up other details of a Buck2, you should usually run `buck2 init --git`. -Most information can be found on the main [Buck2 GitHub project](https://github.com/facebook/buck2). +This repo contains a copy of the Buck2 Prelude, which is often included as a +submodule with a Buck2 project. To obtain a copy of this repo, and set up other +details of a Buck2, you should usually run `buck2 init --git`. Most information +can be found on the main +[Buck2 GitHub project](https://github.com/facebook/buck2). -Pull requests and issues should be raised at [facebook/buck2](https://github.com/facebook/buck2) as that project -is more closely monitored and contains CI checks. +Pull requests and issues should be raised at +[facebook/buck2](https://github.com/facebook/buck2) as that project is more +closely monitored and contains CI checks. ## License -Buck2 Prelude is both MIT and Apache License, Version 2.0 licensed, as found in the [LICENSE-MIT](LICENSE-MIT) and [LICENSE-APACHE](LICENSE-APACHE) files. +Buck2 Prelude is both MIT and Apache License, Version 2.0 licensed, as found in +the [LICENSE-MIT](LICENSE-MIT) and [LICENSE-APACHE](LICENSE-APACHE) files. diff --git a/prelude/abi/BUCK.v2 b/prelude/abi/BUCK.v2 index bb72595e77..aa06c41471 100644 --- a/prelude/abi/BUCK.v2 +++ b/prelude/abi/BUCK.v2 @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + config_setting( name = "gnu", constraint_values = [ diff --git a/prelude/abi/constraints/BUCK.v2 b/prelude/abi/constraints/BUCK.v2 index 9b5673523b..7448fa7c4a 100644 --- a/prelude/abi/constraints/BUCK.v2 +++ b/prelude/abi/constraints/BUCK.v2 @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + # Used by open source projects to support `prelude//` constraint_setting( diff --git a/prelude/alias.bzl b/prelude/alias.bzl index 0593f3a841..d11ef4f884 100644 --- a/prelude/alias.bzl +++ b/prelude/alias.bzl @@ -19,6 +19,9 @@ def configured_alias_impl(ctx: AnalysisContext) -> list[Provider]: return ctx.attrs.fallback_actual.providers fail("must set one of `configured_actual` or `fallback_actual`") +def toolchain_alias_impl(ctx: AnalysisContext) -> list[Provider]: + return ctx.attrs.actual.providers + def versioned_alias_impl(_ctx: AnalysisContext) -> list[Provider]: # Should be intercepted in macro stub and converted to `alias`. fail("unsupported") diff --git a/prelude/android/aapt2_link.bzl b/prelude/android/aapt2_link.bzl index 6a0ddea23a..46d4fd8cd5 100644 --- a/prelude/android/aapt2_link.bzl +++ b/prelude/android/aapt2_link.bzl @@ -15,6 +15,7 @@ def get_aapt2_link( android_toolchain: AndroidToolchainInfo, resource_infos: list[AndroidResourceInfo], android_manifest: Artifact, + manifest_entries: dict, includes_vector_drawables: bool, no_auto_version: bool, no_version_transitions: bool, @@ -22,9 +23,8 @@ def get_aapt2_link( no_resource_removal: bool, should_keep_raw_values: bool, package_id_offset: int, - resource_stable_ids: [Artifact, None], + resource_stable_ids: Artifact | None, preferred_density: [str, None], - min_sdk: [str, None], filter_locales: bool, locales: list[str], compiled_resource_apks: list[Artifact], @@ -33,9 +33,9 @@ def get_aapt2_link( link_infos = [] for use_proto_format in [False, True]: if use_proto_format: - identifier = "use_proto_format" + identifier = "use_proto" else: - identifier = "not_proto_format" + identifier = "not_proto" aapt2_command = cmd_args(android_toolchain.aapt2) aapt2_command.add("link") @@ -48,8 +48,13 @@ def get_aapt2_link( aapt2_command.add(["--proguard", proguard_config.as_output()]) # We don't need the R.java output, but aapt2 won't output R.txt unless we also request R.java. - r_dot_java = ctx.actions.declare_output("{}/initial-rdotjava".format(identifier), dir = True) + # A drawback of this is that the directory structure for the R.java output is deep, resulting + # in long path issues on Windows. The structure is //unused-rjava//R.java + # We can declare a custom dummy package to drastically shorten , which is sketchy, but effective + r_dot_java = ctx.actions.declare_output("{}/unused-rjava".format(identifier), dir = True) aapt2_command.add(["--java", r_dot_java.as_output()]) + aapt2_command.add(["--custom-package", "dummy.package"]) + r_dot_txt = ctx.actions.declare_output("{}/R.txt".format(identifier)) aapt2_command.add(["--output-text-symbols", r_dot_txt.as_output()]) @@ -76,8 +81,23 @@ def get_aapt2_link( aapt2_command.add(["--stable-ids", resource_stable_ids]) if preferred_density != None: aapt2_command.add(["--preferred-density", preferred_density]) - if min_sdk != None: - aapt2_command.add(["--min-sdk-version", min_sdk]) + + manifest_entries_min_sdk = manifest_entries.get("min_sdk_version", None) + if manifest_entries_min_sdk != None: + aapt2_command.add(["--min-sdk-version", str(manifest_entries_min_sdk)]) + manifest_entries_target_sdk = manifest_entries.get("target_sdk_version", None) + if manifest_entries_target_sdk != None: + aapt2_command.add(["--target-sdk-version", str(manifest_entries_target_sdk)]) + manifest_entries_version_code = manifest_entries.get("version_code", None) + if manifest_entries_version_code != None: + aapt2_command.add(["--version-code", manifest_entries_version_code]) + manifest_entries_version_name = manifest_entries.get("version_name", None) + if manifest_entries_version_name != None: + aapt2_command.add(["--version-name", manifest_entries_version_name]) + manifest_entries_debug_mode = str(manifest_entries.get("debug_mode", "False")).lower() == "true" + if manifest_entries_debug_mode: + aapt2_command.add(["--debug-mode"]) + if filter_locales and len(locales) > 0: aapt2_command.add("-c") @@ -97,8 +117,11 @@ def get_aapt2_link( aapt2_compile_rules_args_file = ctx.actions.write("{}/aapt2_compile_rules_args_file".format(identifier), cmd_args(aapt2_compile_rules, delimiter = " ")) aapt2_command.add("-R") - aapt2_command.add(cmd_args(aapt2_compile_rules_args_file, format = "@{}")) - aapt2_command.hidden(aapt2_compile_rules) + aapt2_command.add(cmd_args( + aapt2_compile_rules_args_file, + format = "@{}", + hidden = aapt2_compile_rules, + )) aapt2_command.add(additional_aapt2_params) diff --git a/prelude/android/android.bzl b/prelude/android/android.bzl index 48cec50fb8..bc1a3cf22d 100644 --- a/prelude/android/android.bzl +++ b/prelude/android/android.bzl @@ -5,6 +5,10 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load( + "@prelude//:validation_deps.bzl", + "VALIDATION_DEPS_ATTR_NAME", +) load("@prelude//android:cpu_filters.bzl", "ALL_CPU_FILTERS") load("@prelude//java:java.bzl", "AbiGenerationMode", "dex_min_sdk_version") load("@prelude//decls/android_rules.bzl", "AaptMode", "DuplicateResourceBehaviour") @@ -51,6 +55,7 @@ implemented_rules = { # Can't load `read_bool` here because it will cause circular load. FORCE_SINGLE_CPU = read_root_config("buck2", "android_force_single_cpu") in ("True", "true") FORCE_SINGLE_DEFAULT_CPU = read_root_config("buck2", "android_force_single_default_cpu") in ("True", "true") +DISABLE_STRIPPING = read_root_config("android", "disable_stripping") in ("True", "true") extra_attributes = { "android_aar": { @@ -58,10 +63,13 @@ extra_attributes = { "compress_asset_libraries": attrs.default_only(attrs.bool(default = False)), "cpu_filters": attrs.list(attrs.enum(TargetCpuType), default = ALL_CPU_FILTERS), "deps": attrs.list(attrs.split_transition_dep(cfg = cpu_split_transition), default = []), + "javac": attrs.option(attrs.one_of(attrs.exec_dep(), attrs.source()), default = None), "min_sdk_version": attrs.option(attrs.int(), default = None), "native_library_merge_glue": attrs.option(attrs.split_transition_dep(cfg = cpu_split_transition), default = None), + "native_library_merge_linker_args": attrs.option(attrs.dict(key = attrs.string(), value = attrs.list(attrs.arg())), default = None), "package_asset_libraries": attrs.default_only(attrs.bool(default = True)), "resources_root": attrs.option(attrs.string(), default = None), + "strip_libraries": attrs.default_only(attrs.bool(default = not DISABLE_STRIPPING)), "_android_toolchain": toolchains_common.android(), "_cxx_toolchain": attrs.split_transition_dep(cfg = cpu_split_transition, default = "toolchains//:android-hack"), "_is_building_android_binary": attrs.default_only(attrs.bool(default = True)), @@ -87,6 +95,8 @@ extra_attributes = { "module_manifest_skeleton": attrs.option(attrs.one_of(attrs.transition_dep(cfg = cpu_transition), attrs.source()), default = None), "native_library_merge_code_generator": attrs.option(attrs.exec_dep(), default = None), "native_library_merge_glue": attrs.option(attrs.split_transition_dep(cfg = cpu_split_transition), default = None), + "native_library_merge_linker_args": attrs.option(attrs.dict(key = attrs.string(), value = attrs.list(attrs.arg())), default = None), + "strip_libraries": attrs.bool(default = not DISABLE_STRIPPING), "_android_toolchain": toolchains_common.android(), "_cxx_toolchain": attrs.split_transition_dep(cfg = cpu_split_transition, default = "toolchains//:android-hack"), "_dex_toolchain": toolchains_common.dex(), @@ -95,6 +105,7 @@ extra_attributes = { "_is_force_single_cpu": attrs.default_only(attrs.bool(default = FORCE_SINGLE_CPU)), "_is_force_single_default_cpu": attrs.default_only(attrs.bool(default = FORCE_SINGLE_DEFAULT_CPU)), "_java_toolchain": toolchains_common.java_for_android(), + VALIDATION_DEPS_ATTR_NAME: attrs.set(attrs.transition_dep(cfg = cpu_transition), sorted = True, default = []), }, "android_build_config": { "_android_toolchain": toolchains_common.android(), @@ -115,6 +126,8 @@ extra_attributes = { "module_manifest_skeleton": attrs.option(attrs.one_of(attrs.transition_dep(cfg = cpu_transition), attrs.source()), default = None), "native_library_merge_code_generator": attrs.option(attrs.exec_dep(), default = None), "native_library_merge_glue": attrs.option(attrs.split_transition_dep(cfg = cpu_split_transition), default = None), + "native_library_merge_linker_args": attrs.option(attrs.dict(key = attrs.string(), value = attrs.list(attrs.arg())), default = None), + "use_derived_apk": attrs.bool(default = False), "_android_toolchain": toolchains_common.android(), "_cxx_toolchain": attrs.split_transition_dep(cfg = cpu_split_transition, default = "toolchains//:android-hack"), "_dex_toolchain": toolchains_common.dex(), @@ -123,6 +136,7 @@ extra_attributes = { "_is_force_single_cpu": attrs.default_only(attrs.bool(default = FORCE_SINGLE_CPU)), "_is_force_single_default_cpu": attrs.default_only(attrs.bool(default = FORCE_SINGLE_DEFAULT_CPU)), "_java_toolchain": toolchains_common.java_for_android(), + VALIDATION_DEPS_ATTR_NAME: attrs.set(attrs.transition_dep(cfg = cpu_transition), sorted = True, default = []), }, "android_instrumentation_apk": { "aapt_mode": attrs.enum(AaptMode, default = "aapt1"), # Match default in V1 @@ -130,6 +144,7 @@ extra_attributes = { "cpu_filters": attrs.list(attrs.enum(TargetCpuType), default = []), "deps": attrs.list(attrs.split_transition_dep(cfg = cpu_split_transition), default = []), "dex_tool": attrs.string(default = "d8"), # Match default in V1 + "is_self_instrumenting": attrs.bool(default = False), "manifest": attrs.option(attrs.one_of(attrs.transition_dep(cfg = cpu_transition), attrs.source()), default = None), "manifest_skeleton": attrs.option(attrs.one_of(attrs.transition_dep(cfg = cpu_transition), attrs.source()), default = None), "min_sdk_version": attrs.option(attrs.int(), default = None), @@ -137,21 +152,28 @@ extra_attributes = { "native_library_merge_sequence": attrs.option(attrs.list(attrs.any()), default = None), "_android_toolchain": toolchains_common.android(), "_dex_toolchain": toolchains_common.dex(), + "_exec_os_type": buck.exec_os_type_arg(), "_is_building_android_binary": attrs.default_only(attrs.bool(default = True)), "_is_force_single_cpu": attrs.default_only(attrs.bool(default = FORCE_SINGLE_CPU)), "_is_force_single_default_cpu": attrs.default_only(attrs.bool(default = FORCE_SINGLE_DEFAULT_CPU)), "_java_toolchain": toolchains_common.java_for_android(), }, "android_instrumentation_test": { - "instrumentation_test_listener": attrs.option(attrs.source(), default = None), + "extra_instrumentation_args": attrs.option(attrs.dict(key = attrs.string(), value = attrs.string()), default = None), + "instrumentation_test_listener": attrs.option(attrs.exec_dep(), default = None), "instrumentation_test_listener_class": attrs.option(attrs.string(), default = None), + "is_self_instrumenting": attrs.bool(default = False), "_android_toolchain": toolchains_common.android(), "_exec_os_type": buck.exec_os_type_arg(), + "_java_test_toolchain": toolchains_common.java_for_host_test(), "_java_toolchain": toolchains_common.java_for_android(), }, "android_library": { "abi_generation_mode": attrs.option(attrs.enum(AbiGenerationMode), default = None), + "android_optional_jars": attrs.option(attrs.list(attrs.source()), default = None), + "javac": attrs.option(attrs.one_of(attrs.exec_dep(), attrs.source()), default = None), "resources_root": attrs.option(attrs.string(), default = None), + VALIDATION_DEPS_ATTR_NAME: attrs.set(attrs.dep(), sorted = True, default = []), "_android_toolchain": toolchains_common.android(), "_build_only_native_code": attrs.default_only(attrs.bool(default = is_build_only_native_code())), "_dex_min_sdk_version": attrs.default_only(attrs.option(attrs.int(), default = dex_min_sdk_version())), @@ -185,8 +207,13 @@ extra_attributes = { "_build_only_native_code": attrs.default_only(attrs.bool(default = is_build_only_native_code())), }, "apk_genrule": genrule_attributes() | { + "default_outs": attrs.option(attrs.set(attrs.string(), sorted = False), default = None), + "outs": attrs.option(attrs.dict(key = attrs.string(), value = attrs.set(attrs.string(), sorted = False), sorted = False), default = None), "type": attrs.string(default = "apk"), + "use_derived_apk": attrs.bool(default = False), "_android_toolchain": toolchains_common.android(), + "_exec_os_type": buck.exec_os_type_arg(), + "_java_toolchain": toolchains_common.java_for_android(), }, "gen_aidl": { "import_paths": attrs.list(attrs.arg(), default = []), @@ -199,8 +226,12 @@ extra_attributes = { }, "robolectric_test": { "abi_generation_mode": attrs.option(attrs.enum(AbiGenerationMode), default = None), + "android_optional_jars": attrs.option(attrs.list(attrs.source()), default = None), + "java_agents": attrs.list(attrs.source(), default = []), + "javac": attrs.option(attrs.one_of(attrs.exec_dep(), attrs.source()), default = None), "resources_root": attrs.option(attrs.string(), default = None), "robolectric_runtime_dependencies": attrs.list(attrs.source(), default = []), + "test_class_names_file": attrs.option(attrs.source(), default = None), "unbundled_resources_root": attrs.option(attrs.source(allow_directory = True), default = None), "_android_toolchain": toolchains_common.android(), "_build_only_native_code": attrs.default_only(attrs.bool(default = is_build_only_native_code())), diff --git a/prelude/android/android_aar.bzl b/prelude/android/android_aar.bzl index 1b07a4ed8e..d2fdd7dd46 100644 --- a/prelude/android/android_aar.bzl +++ b/prelude/android/android_aar.bzl @@ -14,18 +14,22 @@ load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo") load("@prelude//android:configuration.bzl", "get_deps_by_platform") load("@prelude//android:cpu_filters.bzl", "CPU_FILTER_FOR_DEFAULT_PLATFORM", "CPU_FILTER_FOR_PRIMARY_PLATFORM") load("@prelude//android:util.bzl", "create_enhancement_context") -load("@prelude//java:java_providers.bzl", "get_all_java_packaging_deps", "get_all_java_packaging_deps_from_packaging_infos") +load("@prelude//java:java_providers.bzl", "create_java_packaging_dep", "get_all_java_packaging_deps", "get_all_java_packaging_deps_from_packaging_infos") load("@prelude//java:java_toolchain.bzl", "JavaToolchainInfo") +load("@prelude//utils:argfile.bzl", "argfile") +load("@prelude//utils:set.bzl", "set") def android_aar_impl(ctx: AnalysisContext) -> list[Provider]: deps_by_platform = get_deps_by_platform(ctx) primary_platform = CPU_FILTER_FOR_PRIMARY_PLATFORM if CPU_FILTER_FOR_PRIMARY_PLATFORM in deps_by_platform else CPU_FILTER_FOR_DEFAULT_PLATFORM deps = deps_by_platform[primary_platform] - java_packaging_deps = [packaging_dep for packaging_dep in get_all_java_packaging_deps(ctx, deps)] + excluded_java_packaging_deps = get_all_java_packaging_deps(ctx, ctx.attrs.excluded_java_deps) + excluded_java_packaging_deps_targets = set([excluded_dep.label.raw_target() for excluded_dep in excluded_java_packaging_deps]) + java_packaging_deps = [packaging_dep for packaging_dep in get_all_java_packaging_deps(ctx, deps) if not excluded_java_packaging_deps_targets.contains(packaging_dep.label.raw_target())] android_packageable_info = merge_android_packageable_info(ctx.label, ctx.actions, deps) - android_manifest = get_manifest(ctx, android_packageable_info, manifest_entries = {}) + android_manifest = get_manifest(ctx, android_packageable_info, manifest_entries = {}, should_replace_application_id_placeholders = False) if ctx.attrs.include_build_config_class: build_config_infos = list(android_packageable_info.build_config_infos.traverse()) if android_packageable_info.build_config_infos else [] @@ -34,16 +38,23 @@ def android_aar_impl(ctx: AnalysisContext) -> list[Provider]: get_build_config_java_libraries(ctx, build_config_infos, package_type = "release", exopackage_modes = []), )) + enhancement_ctx = create_enhancement_context(ctx) + android_binary_native_library_info = get_android_binary_native_library_info(enhancement_ctx, android_packageable_info, deps_by_platform) + java_packaging_deps.extend([create_java_packaging_dep( + ctx, + lib.library_output.full_library, + ) for lib in android_binary_native_library_info.generated_java_code]) + jars = [dep.jar for dep in java_packaging_deps if dep.jar] classes_jar = ctx.actions.declare_output("classes.jar") java_toolchain = ctx.attrs._java_toolchain[JavaToolchainInfo] classes_jar_cmd = cmd_args([ java_toolchain.jar_builder, "--entries-to-jar", - ctx.actions.write("classes_jar_entries.txt", jars), + argfile(actions = ctx.actions, name = "classes_jar_entries.txt", args = jars), "--output", classes_jar.as_output(), - ]).hidden(jars) + ]) if ctx.attrs.remove_classes: remove_classes_file = ctx.actions.write("remove_classes.txt", ctx.attrs.remove_classes) @@ -56,6 +67,31 @@ def android_aar_impl(ctx: AnalysisContext) -> list[Provider]: ctx.actions.run(classes_jar_cmd, category = "create_classes_jar") + sub_targets = {} + dependency_sources_jars = [dep.sources_jar for dep in java_packaging_deps if dep.sources_jar] + if dependency_sources_jars: + combined_sources_jar = ctx.actions.declare_output("sources.jar") + java_toolchain = ctx.attrs._java_toolchain[JavaToolchainInfo] + combined_sources_jar_cmd = cmd_args([ + java_toolchain.jar_builder, + "--entries-to-jar", + argfile(actions = ctx.actions, name = "combined_sources_jar_entries.txt", args = dependency_sources_jars), + "--output", + combined_sources_jar.as_output(), + ]) + + if ctx.attrs.remove_classes: + remove_classes_file = ctx.actions.write("sources_remove_classes.txt", ctx.attrs.remove_classes) + combined_sources_jar_cmd.add([ + "--blocklist-patterns", + remove_classes_file, + "--blocklist-patterns-matcher", + "remove_classes_patterns_matcher", + ]) + + ctx.actions.run(combined_sources_jar_cmd, category = "create_sources_jar") + sub_targets["sources.jar"] = [DefaultInfo(default_output = combined_sources_jar)] + entries = [android_manifest, classes_jar] resource_infos = list(android_packageable_info.resource_infos.traverse()) if android_packageable_info.resource_infos else [] @@ -68,10 +104,10 @@ def android_aar_impl(ctx: AnalysisContext) -> list[Provider]: merge_resource_sources_cmd = cmd_args([ android_toolchain.merge_android_resource_sources[RunInfo], "--resource-paths", - ctx.actions.write("resource_paths.txt", res_dirs), + argfile(actions = ctx.actions, name = "resource_paths.txt", args = res_dirs), "--output", merged_resource_sources_dir.as_output(), - ]).hidden(res_dirs) + ]) ctx.actions.run(merge_resource_sources_cmd, category = "merge_android_resource_sources") @@ -85,28 +121,31 @@ def android_aar_impl(ctx: AnalysisContext) -> list[Provider]: if cxx_resources: entries.append(cxx_resources) - enhancement_ctx = create_enhancement_context(ctx) - android_binary_native_library_info = get_android_binary_native_library_info(enhancement_ctx, android_packageable_info, deps_by_platform) - native_libs_file = ctx.actions.write("native_libs_entries.txt", android_binary_native_library_info.native_libs_for_primary_apk) - native_libs_assets_file = ctx.actions.write("native_libs_assets_entries.txt", android_binary_native_library_info.root_module_native_lib_assets) + native_libs_file = argfile(actions = ctx.actions, name = "native_libs_entries.txt", args = android_binary_native_library_info.native_libs_for_primary_apk) + native_libs_assets_file = argfile(actions = ctx.actions, name = "native_libs_assets_entries.txt", args = android_binary_native_library_info.root_module_native_lib_assets) entries_file = ctx.actions.write("entries.txt", entries) aar = ctx.actions.declare_output("{}.aar".format(ctx.label.name)) - create_aar_cmd = cmd_args([ - android_toolchain.aar_builder, - "--output_path", - aar.as_output(), - "--entries_file", - entries_file, - "--on_duplicate_entry", - "fail", - "--native_libs_file", - native_libs_file, - "--native_libs_assets_file", - native_libs_assets_file, - ]).hidden(entries, android_binary_native_library_info.native_libs_for_primary_apk, android_binary_native_library_info.root_module_native_lib_assets) + create_aar_cmd = cmd_args( + [ + android_toolchain.aar_builder, + "--output_path", + aar.as_output(), + "--entries_file", + entries_file, + "--on_duplicate_entry", + "fail", + "--native_libs_file", + native_libs_file, + "--native_libs_assets_file", + native_libs_assets_file, + ], + hidden = [ + entries, + ], + ) ctx.actions.run(create_aar_cmd, category = "create_aar") - return [DefaultInfo(default_outputs = [aar], sub_targets = enhancement_ctx.get_sub_targets())] + return [DefaultInfo(default_outputs = [aar], sub_targets = enhancement_ctx.get_sub_targets() | sub_targets)] diff --git a/prelude/android/android_apk.bzl b/prelude/android/android_apk.bzl index 84c5e9deaf..ae5cfa93c4 100644 --- a/prelude/android/android_apk.bzl +++ b/prelude/android/android_apk.bzl @@ -5,12 +5,16 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//:validation_deps.bzl", "get_validation_deps_outputs") load("@prelude//android:android_binary.bzl", "get_binary_info") load("@prelude//android:android_providers.bzl", "AndroidApkInfo", "AndroidApkUnderTestInfo", "AndroidBinaryNativeLibsInfo", "AndroidBinaryResourcesInfo", "DexFilesInfo", "ExopackageInfo") load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo") +load("@prelude//java:class_to_srcs.bzl", "merge_class_to_source_map_from_jar") load("@prelude//java:java_providers.bzl", "KeystoreInfo") +load("@prelude//java:java_toolchain.bzl", "JavaToolchainInfo") load("@prelude//java/utils:java_more_utils.bzl", "get_path_separator_for_exec_os") load("@prelude//java/utils:java_utils.bzl", "get_class_to_source_map_info") +load("@prelude//utils:argfile.bzl", "argfile") load("@prelude//utils:set.bzl", "set") def android_apk_impl(ctx: AnalysisContext) -> list[Provider]: @@ -31,35 +35,64 @@ def android_apk_impl(ctx: AnalysisContext) -> list[Provider]: native_library_info = native_library_info, resources_info = resources_info, compress_resources_dot_arsc = ctx.attrs.resource_compression == "enabled" or ctx.attrs.resource_compression == "enabled_with_strings_as_assets", + validation_deps_outputs = get_validation_deps_outputs(ctx), + packaging_options = ctx.attrs.packaging_options, ) - exopackage_info = ExopackageInfo( - secondary_dex_info = dex_files_info.secondary_dex_exopackage_info, - native_library_info = native_library_info.exopackage_info, - resources_info = resources_info.exopackage_info, - ) + if dex_files_info.secondary_dex_exopackage_info or native_library_info.exopackage_info or resources_info.exopackage_info: + exopackage_info = ExopackageInfo( + secondary_dex_info = dex_files_info.secondary_dex_exopackage_info, + native_library_info = native_library_info.exopackage_info, + resources_info = resources_info.exopackage_info, + ) + default_output = ctx.actions.write( + "{}_exopackage_apk_warning".format(ctx.label.name), + "exopackage apks should not be used externally, try buck install or building with exopackage disabled\n", + ) + sub_targets["exo_apk"] = [DefaultInfo(default_output = output_apk)] # Used by tests + else: + exopackage_info = None + default_output = output_apk - class_to_srcs, class_to_srcs_subtargets = get_class_to_source_map_info( + class_to_srcs, _, class_to_srcs_subtargets = get_class_to_source_map_info( ctx, outputs = None, deps = android_binary_info.deps_by_platform[android_binary_info.primary_platform], ) + transitive_class_to_src_map = merge_class_to_source_map_from_jar( + actions = ctx.actions, + name = ctx.label.name + ".transitive_class_to_src.json", + java_toolchain = ctx.attrs._java_toolchain[JavaToolchainInfo], + relative_to = None, + deps = [class_to_srcs], + ) + sub_targets["transitive_class_to_src_map"] = [DefaultInfo(default_output = transitive_class_to_src_map)] + + # We can only be sure that an APK has native libs if it has any shared libraries. Prebuilt native libraries dirs can exist but be empty. + definitely_has_native_libs = bool(native_library_info.shared_libraries) + + install_info = get_install_info(ctx, output_apk = output_apk, manifest = resources_info.manifest, exopackage_info = exopackage_info, definitely_has_native_libs = definitely_has_native_libs) return [ - AndroidApkInfo(apk = output_apk, manifest = resources_info.manifest), + AndroidApkInfo( + apk = output_apk, + manifest = resources_info.manifest, + materialized_artifacts = android_binary_info.materialized_artifacts, + unstripped_shared_libraries = native_library_info.unstripped_shared_libraries, + ), AndroidApkUnderTestInfo( java_packaging_deps = set([dep.label.raw_target() for dep in java_packaging_deps]), keystore = keystore, manifest_entries = ctx.attrs.manifest_entries, - prebuilt_native_library_dirs = set([native_lib.raw_target for native_lib in native_library_info.apk_under_test_prebuilt_native_library_dirs]), + prebuilt_native_library_dirs = set([native_lib.raw_target for native_lib in native_library_info.prebuilt_native_library_dirs]), platforms = android_binary_info.deps_by_platform.keys(), primary_platform = android_binary_info.primary_platform, resource_infos = set([info.raw_target for info in resources_info.unfiltered_resource_infos]), r_dot_java_packages = set([info.specified_r_dot_java_package for info in resources_info.unfiltered_resource_infos if info.specified_r_dot_java_package]), - shared_libraries = set(native_library_info.apk_under_test_shared_libraries), + shared_libraries = set(native_library_info.shared_libraries), ), - DefaultInfo(default_output = output_apk, other_outputs = _get_exopackage_outputs(exopackage_info), sub_targets = sub_targets | class_to_srcs_subtargets), - get_install_info(ctx, output_apk = output_apk, manifest = resources_info.manifest, exopackage_info = exopackage_info), + DefaultInfo(default_output = default_output, other_outputs = install_info.files.values() + android_binary_info.materialized_artifacts, sub_targets = sub_targets | class_to_srcs_subtargets), + install_info, TemplatePlaceholderInfo( keyed_variables = { "classpath": cmd_args([dep.jar for dep in java_packaging_deps if dep.jar], delimiter = get_path_separator_for_exec_os(ctx)), @@ -77,10 +110,12 @@ def build_apk( dex_files_info: DexFilesInfo, native_library_info: AndroidBinaryNativeLibsInfo, resources_info: AndroidBinaryResourcesInfo, - compress_resources_dot_arsc: bool = False) -> Artifact: + compress_resources_dot_arsc: bool = False, + validation_deps_outputs: [list[Artifact], None] = None, + packaging_options: dict | None = None) -> Artifact: output_apk = actions.declare_output("{}.apk".format(label.name)) - apk_builder_args = cmd_args([ + apk_builder_args = cmd_args( android_toolchain.apk_builder[RunInfo], "--output-apk", output_apk.as_output(), @@ -94,12 +129,12 @@ def build_apk( keystore.properties, "--zipalign_tool", android_toolchain.zipalign[RunInfo], - ]) - - if android_toolchain.package_meta_inf_version_files: - apk_builder_args.add("--package-meta-inf-version-files") - if compress_resources_dot_arsc: - apk_builder_args.add("--compress-resources-dot-arsc") + "--package-meta-inf-version-files" if android_toolchain.package_meta_inf_version_files else [], + "--compress-resources-dot-arsc" if compress_resources_dot_arsc else [], + # The outputs of validation_deps need to be added as hidden arguments + # to an action for the validation_deps targets to be built and enforced. + hidden = validation_deps_outputs or [], + ) asset_directories = ( native_library_info.root_module_native_lib_assets + @@ -108,15 +143,11 @@ def build_apk( dex_files_info.non_root_module_secondary_dex_dirs + resources_info.module_manifests ) - asset_directories_file = actions.write("asset_directories.txt", asset_directories) - apk_builder_args.hidden(asset_directories) - native_library_directories = actions.write("native_library_directories", native_library_info.native_libs_for_primary_apk) - apk_builder_args.hidden(native_library_info.native_libs_for_primary_apk) + asset_directories_file = argfile(actions = actions, name = "asset_directories.txt", args = asset_directories) + native_library_directories = argfile(actions = actions, name = "native_library_directories", args = native_library_info.native_libs_for_primary_apk) all_zip_files = [resources_info.packaged_string_assets] if resources_info.packaged_string_assets else [] - zip_files = actions.write("zip_files", all_zip_files) - apk_builder_args.hidden(all_zip_files) - jar_files_that_may_contain_resources = actions.write("jar_files_that_may_contain_resources", resources_info.jar_files_that_may_contain_resources) - apk_builder_args.hidden(resources_info.jar_files_that_may_contain_resources) + zip_files = argfile(actions = actions, name = "zip_files", args = all_zip_files) + jar_files_that_may_contain_resources = argfile(actions = actions, name = "jar_files_that_may_contain_resources", args = resources_info.jar_files_that_may_contain_resources) apk_builder_args.add([ "--asset-directories-list", @@ -129,15 +160,28 @@ def build_apk( jar_files_that_may_contain_resources, ]) + if packaging_options: + for key, value in packaging_options.items(): + if key != "excluded_resources": + fail("Only 'excluded_resources' is supported in packaging_options right now!") + else: + apk_builder_args.add("--excluded-resources", actions.write("excluded_resources.txt", value)) + actions.run(apk_builder_args, category = "apk_build") return output_apk -def get_install_info(ctx: AnalysisContext, output_apk: Artifact, manifest: Artifact, exopackage_info: [ExopackageInfo, None]) -> InstallInfo: +def get_install_info( + ctx: AnalysisContext, + output_apk: Artifact, + manifest: Artifact, + exopackage_info: [ExopackageInfo, None], + definitely_has_native_libs: bool = True, + staged_install_mode_default: bool = False) -> InstallInfo: files = { ctx.attrs.name: output_apk, "manifest": manifest, - "options": generate_install_config(ctx), + "options": generate_install_config(ctx, staged_install_mode_default), } if exopackage_info: @@ -168,7 +212,7 @@ def get_install_info(ctx: AnalysisContext, output_apk: Artifact, manifest: Artif if secondary_dex_exopackage_info or native_library_exopackage_info or resources_info: files["exopackage_agent_apk"] = ctx.attrs._android_toolchain[AndroidToolchainInfo].exopackage_agent_apk - if hasattr(ctx.attrs, "cpu_filters"): + if definitely_has_native_libs and hasattr(ctx.attrs, "cpu_filters"): files["cpu_filters"] = ctx.actions.write("cpu_filters.txt", ctx.attrs.cpu_filters) return InstallInfo( @@ -176,35 +220,13 @@ def get_install_info(ctx: AnalysisContext, output_apk: Artifact, manifest: Artif files = files, ) -def _get_exopackage_outputs(exopackage_info: ExopackageInfo) -> list[Artifact]: - outputs = [] - secondary_dex_exopackage_info = exopackage_info.secondary_dex_info - if secondary_dex_exopackage_info: - outputs.append(secondary_dex_exopackage_info.metadata) - outputs.append(secondary_dex_exopackage_info.directory) - - native_library_exopackage_info = exopackage_info.native_library_info - if native_library_exopackage_info: - outputs.append(native_library_exopackage_info.metadata) - outputs.append(native_library_exopackage_info.directory) - - resources_info = exopackage_info.resources_info - if resources_info: - outputs.append(resources_info.res) - outputs.append(resources_info.res_hash) - - if resources_info.assets: - outputs.append(resources_info.assets) - outputs.append(resources_info.assets_hash) - - return outputs - -def generate_install_config(ctx: AnalysisContext) -> Artifact: - data = get_install_config() +def generate_install_config(ctx: AnalysisContext, staged_install_mode_default: bool) -> Artifact: + data = get_install_config(staged_install_mode_default) return ctx.actions.write_json("install_android_options.json", data) -def get_install_config() -> dict[str, typing.Any]: +def get_install_config(staged_install_mode_default: bool) -> dict[str, typing.Any]: # TODO: read from toolchains + staged_install_mode = read_root_config("adb", "staged_install_mode", None) install_config = { "adb_restart_on_failure": read_root_config("adb", "adb_restart_on_failure", "true"), "agent_port_base": read_root_config("adb", "agent_port_base", "2828"), @@ -214,7 +236,7 @@ def get_install_config() -> dict[str, typing.Any]: "multi_install_mode": read_root_config("adb", "multi_install_mode", "false"), "retry_delay_millis": read_root_config("adb", "retry_delay_millis", "500"), "skip_install_metadata": read_root_config("adb", "skip_install_metadata", "false"), - "staged_install_mode": read_root_config("adb", "staged_install_mode", "false"), + "staged_install_mode": staged_install_mode_default if staged_install_mode == None else staged_install_mode, } adb_executable = read_root_config("android", "adb", None) diff --git a/prelude/android/android_binary.bzl b/prelude/android/android_binary.bzl index 6a270ba5d5..f1f2908f3b 100644 --- a/prelude/android/android_binary.bzl +++ b/prelude/android/android_binary.bzl @@ -10,6 +10,8 @@ load("@prelude//android:android_binary_resources_rules.bzl", "get_android_binary load("@prelude//android:android_build_config.bzl", "generate_android_build_config", "get_build_config_fields") load( "@prelude//android:android_providers.bzl", + "AndroidBinaryNativeLibsInfo", # @unused Used as type + "AndroidBinaryResourcesInfo", # @unused Used as type "AndroidBuildConfigInfo", # @unused Used as type "BuildConfigField", "DexFilesInfo", @@ -24,21 +26,30 @@ load("@prelude//android:preprocess_java_classes.bzl", "get_preprocessed_java_cla load("@prelude//android:proguard.bzl", "get_proguard_output") load("@prelude//android:util.bzl", "create_enhancement_context") load("@prelude//android:voltron.bzl", "get_target_to_module_mapping") -load("@prelude//java:java_providers.bzl", "JavaPackagingInfo", "create_java_packaging_dep", "get_all_java_packaging_deps", "get_all_java_packaging_deps_from_packaging_infos") +load( + "@prelude//java:java_providers.bzl", + "JavaPackagingDep", # @unused Used as type + "JavaPackagingInfo", + "create_java_packaging_dep", + "get_all_java_packaging_deps", + "get_all_java_packaging_deps_from_packaging_infos", +) load("@prelude//utils:expect.bzl", "expect") AndroidBinaryInfo = record( sub_targets = dict, - java_packaging_deps = list["JavaPackagingDep"], + java_packaging_deps = list[JavaPackagingDep], deps_by_platform = dict, primary_platform = str, dex_files_info = DexFilesInfo, - native_library_info = "AndroidBinaryNativeLibsInfo", - resources_info = "AndroidBinaryResourcesInfo", + native_library_info = AndroidBinaryNativeLibsInfo, + resources_info = AndroidBinaryResourcesInfo, + materialized_artifacts = list[Artifact], ) def get_binary_info(ctx: AnalysisContext, use_proto_format: bool) -> AndroidBinaryInfo: sub_targets = {} + materialized_artifacts = [] _verify_params(ctx) @@ -80,8 +91,10 @@ def get_binary_info(ctx: AnalysisContext, use_proto_format: bool) -> AndroidBina use_proto_format = use_proto_format, referenced_resources_lists = referenced_resources_lists, manifest_entries = ctx.attrs.manifest_entries, + generate_strings_and_ids_separately = should_pre_dex, aapt2_preferred_density = ctx.attrs.aapt2_preferred_density, ) + sub_targets["manifest"] = [DefaultInfo(default_output = resources_info.manifest)] android_toolchain = ctx.attrs._android_toolchain[AndroidToolchainInfo] compiled_r_dot_java_deps = [ create_java_packaging_dep( @@ -123,7 +136,9 @@ def get_binary_info(ctx: AnalysisContext, use_proto_format: bool) -> AndroidBina else: jars_to_owners = {packaging_dep.jar: packaging_dep.jar.owner.raw_target() for packaging_dep in dex_java_packaging_deps} if ctx.attrs.preprocess_java_classes_bash: - jars_to_owners = get_preprocessed_java_classes(ctx, jars_to_owners) + jars_to_owners, materialized_artifacts_dir = get_preprocessed_java_classes(enhancement_ctx, jars_to_owners) + if materialized_artifacts_dir: + materialized_artifacts.append(materialized_artifacts_dir) if has_proguard_config: proguard_output = get_proguard_output( ctx, @@ -132,6 +147,7 @@ def get_binary_info(ctx: AnalysisContext, use_proto_format: bool) -> AndroidBina resources_info.proguard_config_file, [no_dx[DefaultInfo].default_outputs[0] for no_dx in ctx.attrs.no_dx if len(no_dx[DefaultInfo].default_outputs) == 1], ) + materialized_artifacts.extend(proguard_output.proguard_artifacts) jars_to_owners = proguard_output.jars_to_owners dir_srcs = {artifact.basename: artifact for artifact in proguard_output.proguard_artifacts} for i, hidden_artifact in enumerate(proguard_output.proguard_hidden_artifacts): @@ -184,6 +200,7 @@ def get_binary_info(ctx: AnalysisContext, use_proto_format: bool) -> AndroidBina dex_files_info = dex_files_info, native_library_info = native_library_info, resources_info = resources_info, + materialized_artifacts = materialized_artifacts, ) def get_build_config_java_libraries( @@ -204,6 +221,8 @@ def get_build_config_java_libraries( default_build_config_fields = get_build_config_fields(ctx.attrs.build_config_values) + android_binary_values_file = ctx.attrs.build_config_values_file[DefaultInfo].default_outputs[0] if isinstance(ctx.attrs.build_config_values_file, Dependency) else ctx.attrs.build_config_values_file + java_libraries = [] java_packages_seen = [] for build_config_info in build_config_infos: @@ -215,13 +234,14 @@ def get_build_config_java_libraries( for build_config_field in build_config_info.build_config_fields + default_build_config_fields + build_config_constants: all_build_config_values[build_config_field.name] = build_config_field + values_file = android_binary_values_file if android_binary_values_file else build_config_info.values_file java_libraries.append(generate_android_build_config( ctx, java_package, java_package, True, # use_constant_expressions all_build_config_values.values(), - ctx.attrs.build_config_values_file[DefaultInfo].default_outputs[0] if isinstance(ctx.attrs.build_config_values_file, Dependency) else ctx.attrs.build_config_values_file, + values_file, )[1]) return java_libraries diff --git a/prelude/android/android_binary_native_library_rules.bzl b/prelude/android/android_binary_native_library_rules.bzl index 4fb656aa51..ed725e4ebf 100644 --- a/prelude/android/android_binary_native_library_rules.bzl +++ b/prelude/android/android_binary_native_library_rules.bzl @@ -17,7 +17,7 @@ load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo") load("@prelude//android:cpu_filters.bzl", "CPU_FILTER_FOR_PRIMARY_PLATFORM", "CPU_FILTER_TO_ABI_DIRECTORY") load("@prelude//android:util.bzl", "EnhancementContext") load("@prelude//android:voltron.bzl", "ROOT_MODULE", "all_targets_in_root_module", "get_apk_module_graph_info", "is_root_module") -load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") +load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo", "PicBehavior") load( "@prelude//cxx:link.bzl", "cxx_link_shared_library", @@ -36,9 +36,11 @@ load( "LibOutputStyle", "LinkArgs", "LinkInfo", - "Linkage", + "LinkOrdering", "SharedLibLinkable", + "get_lib_output_style", "set_link_info_link_whole", + "unpack_link_args", "wrap_link_info", ) load( @@ -52,14 +54,19 @@ load( "@prelude//linking:shared_libraries.bzl", "SharedLibrary", # @unused Used as a type "SharedLibraryInfo", # @unused Used as a type + "create_shlib", "get_strip_non_global_flags", "merge_shared_libraries", "traverse_shared_library_info", + "with_unique_str_sonames", ) load("@prelude//linking:strip.bzl", "strip_object") -load("@prelude//utils:graph_utils.bzl", "breadth_first_traversal_by", "post_order_traversal", "pre_order_traversal", "pre_order_traversal_by") +load("@prelude//linking:types.bzl", "Linkage") +load("@prelude//utils:argfile.bzl", "argfile") +load("@prelude//utils:expect.bzl", "expect") +load("@prelude//utils:graph_utils.bzl", "GraphTraversal", "depth_first_traversal_by", "post_order_traversal", "pre_order_traversal") load("@prelude//utils:set.bzl", "set", "set_type") # @unused Used as a type -load("@prelude//utils:utils.bzl", "dedupe_by_value", "expect") +load("@prelude//utils:utils.bzl", "dedupe_by_value") # Native libraries on Android are built for a particular Application Binary Interface (ABI). We # package native libraries for one (or more, for multi-arch builds) ABIs into an Android APK. @@ -76,22 +83,19 @@ load("@prelude//utils:utils.bzl", "dedupe_by_value", "expect") # 2. As assets. These are passed to the APK build as assets, and are stored at # `assets/lib//library.so` In the root module, we only package a native library as an # asset if it is eligible to be an asset (e.g. `can_be_asset` on a `cxx_library`), and -# `package_asset_libraries` is set to True for the APK. We will additionally compress all the -# assets into a single `assets/lib/libs.xz` (or `assets/libs/libs.zstd` for `zstd` compression) -# if `compress_asset_libraries` is set to True for the APK. Regardless of whether we compress -# the assets or not, we create a metadata file at `assets/libs/metadata.txt` that has a single -# line entry for each packaged asset consisting of ' '. +# `package_asset_libraries` is set to True for the APK. We create a metadata file at +# `assets/libs/metadata.txt` that has a single line entry for each packaged asset consisting of +# ' '. # # Any native library that is not part of the root module (i.e. it is part of some other Voltron -# module) is automatically packaged as an asset, and the assets for each module are compressed -# to a single `assets//libs.xz`. Similarly, the metadata for each module is stored +# module) is automatically packaged as an asset. Similarly, the metadata for each module is stored # at `assets//libs.txt`. def get_android_binary_native_library_info( enhance_ctx: EnhancementContext, android_packageable_info: AndroidPackageableInfo, deps_by_platform: dict[str, list[Dependency]], - apk_module_graph_file: [Artifact, None] = None, + apk_module_graph_file: Artifact | None = None, prebuilt_native_library_dirs_to_exclude: [set_type, None] = None, shared_libraries_to_exclude: [set_type, None] = None) -> AndroidBinaryNativeLibsInfo: ctx = enhance_ctx.ctx @@ -104,14 +108,28 @@ def get_android_binary_native_library_info( ] included_shared_lib_targets = [] - platform_to_original_native_linkables = {} + original_shared_libs_by_platform = {} # dict[str, dict[str (soname), list[SharedLibrary]]] for platform, deps in deps_by_platform.items(): - if platform == CPU_FILTER_FOR_PRIMARY_PLATFORM and platform not in ctx.attrs.cpu_filters: + if platform == CPU_FILTER_FOR_PRIMARY_PLATFORM and platform not in ctx.attrs.cpu_filters and len(ctx.attrs.cpu_filters) != 0: continue - native_linkables = get_native_linkables_by_default(ctx, platform, deps, shared_libraries_to_exclude) - included_shared_lib_targets.extend([lib.label.raw_target() for lib in native_linkables.values()]) - platform_to_original_native_linkables[platform] = native_linkables + shared_libs = get_default_shared_libs(ctx, deps, shared_libraries_to_exclude) + included_shared_lib_targets.extend([lib.label.raw_target() for lib in shared_libs.values()]) + original_shared_libs_by_platform[platform] = shared_libs + + if not all_prebuilt_native_library_dirs and not included_shared_lib_targets: + enhance_ctx.debug_output("unstripped_native_libraries", ctx.actions.write("unstripped_native_libraries", [])) + enhance_ctx.debug_output("unstripped_native_libraries_json", ctx.actions.write_json("unstripped_native_libraries_json", {})) + return AndroidBinaryNativeLibsInfo( + prebuilt_native_library_dirs = [], + shared_libraries = [], + native_libs_for_primary_apk = [], + exopackage_info = None, + root_module_native_lib_assets = [], + non_root_module_native_lib_assets = [], + generated_java_code = [], + unstripped_shared_libraries = None, + ) native_libs = ctx.actions.declare_output("native_libs_symlink") native_libs_metadata = ctx.actions.declare_output("native_libs_metadata_symlink") @@ -119,9 +137,8 @@ def get_android_binary_native_library_info( native_lib_assets_for_primary_apk = ctx.actions.declare_output("native_lib_assets_for_primary_apk_symlink") stripped_native_linkable_assets_for_primary_apk = ctx.actions.declare_output("stripped_native_linkable_assets_for_primary_apk_symlink") root_module_metadata_assets = ctx.actions.declare_output("root_module_metadata_assets_symlink") - root_module_compressed_lib_assets = ctx.actions.declare_output("root_module_compressed_lib_assets_symlink") non_root_module_metadata_assets = ctx.actions.declare_output("non_root_module_metadata_assets_symlink") - non_root_module_compressed_lib_assets = ctx.actions.declare_output("non_root_module_compressed_lib_assets_symlink") + non_root_module_lib_assets = ctx.actions.declare_output("non_root_module_lib_assets_symlink") unstripped_native_libraries = ctx.actions.declare_output("unstripped_native_libraries") unstripped_native_libraries_json = ctx.actions.declare_output("unstripped_native_libraries_json") @@ -137,9 +154,8 @@ def get_android_binary_native_library_info( unstripped_native_libraries_files, stripped_native_linkable_assets_for_primary_apk, root_module_metadata_assets, - root_module_compressed_lib_assets, non_root_module_metadata_assets, - non_root_module_compressed_lib_assets, + non_root_module_lib_assets, ] fake_input = ctx.actions.write("dynamic.trigger", "") @@ -148,7 +164,7 @@ def get_android_binary_native_library_info( dynamic_inputs = [fake_input] if apk_module_graph_file: dynamic_inputs.append(apk_module_graph_file) - native_library_merge_map = None + split_groups_map = None native_library_merge_dir = None native_merge_debug = None generated_java_code = [] @@ -164,12 +180,14 @@ def get_android_binary_native_library_info( expect(glue_linkable.preferred_linkage == Linkage("static"), "buck2 currently only supports preferred_linkage='static' native_library_merge_glue") glue_linkables[platform] = (glue.label, glue_linkable.link_infos[LibOutputStyle("pic_archive")].default) - flattened_linkable_graphs_by_platform = {} + linkable_nodes_by_platform = {} native_library_merge_sequence = getattr(ctx.attrs, "native_library_merge_sequence", None) - has_native_merging = native_library_merge_sequence or getattr(ctx.attrs, "native_library_merge_map", None) + native_library_merge_map = getattr(ctx.attrs, "native_library_merge_map", None) + has_native_merging = native_library_merge_sequence or native_library_merge_map + enable_relinker = getattr(ctx.attrs, "enable_relinker", False) - if has_native_merging: - native_merge_debug = ctx.actions.declare_output("native_merge.debug") + if has_native_merging or enable_relinker: + native_merge_debug = ctx.actions.declare_output("native_merge_debug", dir = True) dynamic_outputs.append(native_merge_debug) # We serialize info about the linkable graph and the apk module mapping and pass that to an @@ -179,14 +197,15 @@ def get_android_binary_native_library_info( graph_node_map = get_linkable_graph_node_map_func(linkable_graph)() linkables_debug = ctx.actions.write("linkables." + platform, list(graph_node_map.keys())) enhance_ctx.debug_output("linkables." + platform, linkables_debug) + linkable_nodes_by_platform[platform] = graph_node_map - flattened_linkable_graphs_by_platform[platform] = graph_node_map + lib_outputs_by_platform = _declare_library_subtargets(ctx, dynamic_outputs, original_shared_libs_by_platform, native_library_merge_map, native_library_merge_sequence, enable_relinker) if native_library_merge_sequence: native_library_merge_input_file = ctx.actions.write_json("mergemap.input", { - "linkable_graphs_by_platform": encode_linkable_graph_for_mergemap(flattened_linkable_graphs_by_platform), + "linkable_graphs_by_platform": encode_linkable_graph_for_mergemap(linkable_nodes_by_platform), "native_library_merge_sequence": ctx.attrs.native_library_merge_sequence, - "native_library_merge_sequence_blocklist": ctx.attrs.native_library_merge_sequence_blocklist, + "native_library_merge_sequence_blocklist": ctx.attrs.native_library_merge_sequence_blocklist or [], }) mergemap_cmd = cmd_args(ctx.attrs._android_toolchain[AndroidToolchainInfo].mergemap_tool) mergemap_cmd.add(cmd_args(native_library_merge_input_file, format = "--mergemap-input={}")) @@ -194,11 +213,13 @@ def get_android_binary_native_library_info( mergemap_cmd.add(cmd_args(apk_module_graph_file, format = "--apk-module-graph={}")) native_library_merge_dir = ctx.actions.declare_output("merge_sequence_output") native_library_merge_map = native_library_merge_dir.project("merge.map") + split_groups_map = native_library_merge_dir.project("split_groups.map") mergemap_cmd.add(cmd_args(native_library_merge_dir.as_output(), format = "--output={}")) ctx.actions.run(mergemap_cmd, category = "compute_mergemap") enhance_ctx.debug_output("compute_merge_sequence", native_library_merge_dir) dynamic_inputs.append(native_library_merge_map) + dynamic_inputs.append(split_groups_map) mergemap_gencode_jar = None if has_native_merging and ctx.attrs.native_library_merge_code_generator: @@ -223,6 +244,8 @@ def get_android_binary_native_library_info( if apk_module_graph_file: get_module_from_target = get_apk_module_graph_info(ctx, apk_module_graph_file, artifacts).target_to_module_mapping_function + split_groups = None + merged_shared_lib_targets_by_platform = {} # dict[str, dict[Label, str]] if has_native_merging: native_library_merge_debug_outputs = {} @@ -230,10 +253,11 @@ def get_android_binary_native_library_info( # then set it as the binary's precomputed_apk_module_graph attr. if ctx.attrs.native_library_merge_sequence: merge_map_by_platform = artifacts[native_library_merge_map].read_json() + split_groups = artifacts[split_groups_map].read_json() native_library_merge_debug_outputs["merge_sequence_output"] = native_library_merge_dir elif ctx.attrs.native_library_merge_map: merge_map_by_platform = {} - for platform, linkable_nodes in flattened_linkable_graphs_by_platform.items(): + for platform, linkable_nodes in linkable_nodes_by_platform.items(): merge_map = merge_map_by_platform.setdefault(platform, {}) merge_lib_to_fancy_regexes = { merge_lib: [regex(pattern, fancy = True) for pattern in patterns] @@ -251,28 +275,47 @@ def get_android_binary_native_library_info( break if merge_result: merge_map[str(target)] = merge_result - merge_map = ctx.actions.write_json("merge.map", merge_map_by_platform) + merge_map = ctx.actions.write_json("merge.map", merge_map_by_platform, pretty = True) native_library_merge_debug_outputs["merge_map_output"] = merge_map else: fail("unreachable") - merged_linkables = _get_merged_linkables( - ctx, - { - platform: LinkableMergeData( - glue_linkable = glue_linkables[platform] if glue_linkables else None, - default_shared_libs = platform_to_original_native_linkables[platform], - linkable_nodes = flattened_linkable_graphs_by_platform[platform], - merge_map = merge_map_by_platform[platform], - apk_module_graph = get_module_from_target, - ) - for platform in platform_to_original_native_linkables - }, - ) - debug_data_json = ctx.actions.write_json("native_merge_debug.json", merged_linkables.debug_info) + shared_object_targets = {} + debug_info_by_platform = {} # dict[str, MergedLinkablesDebugInfo] + merged_shared_libs_by_platform = {} # dict[str, dict[str, MergedSharedLibrary]] + for platform in original_shared_libs_by_platform: + merged_shared_libs, debug_info = _get_merged_linkables_for_platform( + ctx, + ctx.attrs._cxx_toolchain[platform][CxxToolchainInfo], + platform if len(original_shared_libs_by_platform) > 1 else None, + glue_linkable = glue_linkables[platform] if glue_linkables else None, + default_shared_libs = original_shared_libs_by_platform[platform], + linkable_nodes = linkable_nodes_by_platform[platform], + merge_map = merge_map_by_platform[platform], + merge_linker_args = ctx.attrs.native_library_merge_linker_args or {}, + apk_module_graph = get_module_from_target, + ) + debug_info_by_platform[platform] = debug_info + merged_shared_libs_by_platform[platform] = merged_shared_libs + merged_shared_lib_targets = {} + for soname, lib in merged_shared_libs.items(): + shared_object_targets[soname] = [str(target.raw_target()) for target in lib.primary_constituents] + + for target in lib.primary_constituents: + merged_shared_lib_targets[target] = soname + merged_shared_lib_targets_by_platform[platform] = merged_shared_lib_targets + + debug_data_json = ctx.actions.write_json("native_merge_debug.json", debug_info_by_platform, pretty = True) native_library_merge_debug_outputs["native_merge_debug.json"] = debug_data_json + + shared_object_targets_lines = "" + for soname, targets in shared_object_targets.items(): + shared_object_targets_lines += soname + " " + " ".join(targets) + "\n" + shared_object_targets_txt = ctx.actions.write("shared_object_targets.txt", shared_object_targets_lines) + native_library_merge_debug_outputs["shared_object_targets.txt"] = shared_object_targets_txt + if mergemap_gencode_jar: - merged_library_map = write_merged_library_map(ctx, merged_linkables) + merged_library_map = write_merged_library_map(ctx, merged_shared_libs_by_platform) mergemap_gencode = run_mergemap_codegen(ctx, merged_library_map) compile_to_jar(ctx, [mergemap_gencode], output = outputs[mergemap_gencode_jar]) native_library_merge_debug_outputs["NativeLibraryMergeGeneratedCode.java"] = mergemap_gencode @@ -281,18 +324,30 @@ def get_android_binary_native_library_info( ctx.actions.symlinked_dir(outputs[native_merge_debug], native_library_merge_debug_outputs) - final_platform_to_native_linkables = { + final_shared_libs_by_platform = { platform: {soname: d.lib for soname, d in merged_shared_libs.items()} - for platform, merged_shared_libs in merged_linkables.shared_libs_by_platform.items() + for platform, merged_shared_libs in merged_shared_libs_by_platform.items() } + elif enable_relinker: + final_shared_libs_by_platform, native_library_merge_debug_outputs = _create_all_relinkable_links( + ctx, + original_shared_libs_by_platform, + linkable_nodes_by_platform, + ) + ctx.actions.symlinked_dir(outputs[native_merge_debug], native_library_merge_debug_outputs) + else: - final_platform_to_native_linkables = platform_to_original_native_linkables + final_shared_libs_by_platform = original_shared_libs_by_platform + + if enable_relinker: + unrelinked_shared_libs_by_platform = final_shared_libs_by_platform + final_shared_libs_by_platform = relink_libraries(ctx, final_shared_libs_by_platform) + _link_library_subtargets(ctx, outputs, lib_outputs_by_platform, original_shared_libs_by_platform, unrelinked_shared_libs_by_platform, merged_shared_lib_targets_by_platform, split_groups, native_merge_debug, unrelinked = True) - if getattr(ctx.attrs, "enable_relinker", False): - final_platform_to_native_linkables = relink_libraries(ctx, final_platform_to_native_linkables) + _link_library_subtargets(ctx, outputs, lib_outputs_by_platform, original_shared_libs_by_platform, final_shared_libs_by_platform, merged_shared_lib_targets_by_platform, split_groups, native_merge_debug) unstripped_libs = {} - for platform, libs in final_platform_to_native_linkables.items(): + for platform, libs in final_shared_libs_by_platform.items(): for lib in libs.values(): unstripped_libs[lib.lib.output] = platform ctx.actions.write(outputs[unstripped_native_libraries], unstripped_libs.keys()) @@ -306,7 +361,7 @@ def get_android_binary_native_library_info( ctx, get_module_from_target, all_prebuilt_native_library_dirs, - final_platform_to_native_linkables, + final_shared_libs_by_platform, ) # Since we are using a dynamic action, we need to declare the outputs in advance. @@ -318,14 +373,14 @@ def get_android_binary_native_library_info( ctx.actions.symlink_file(outputs[native_lib_assets_for_primary_apk], dynamic_info.native_lib_assets_for_primary_apk if dynamic_info.native_lib_assets_for_primary_apk else ctx.actions.symlinked_dir("empty_native_lib_assets", {})) ctx.actions.symlink_file(outputs[stripped_native_linkable_assets_for_primary_apk], dynamic_info.stripped_native_linkable_assets_for_primary_apk if dynamic_info.stripped_native_linkable_assets_for_primary_apk else ctx.actions.symlinked_dir("empty_stripped_native_linkable_assets", {})) ctx.actions.symlink_file(outputs[root_module_metadata_assets], dynamic_info.root_module_metadata_assets) - ctx.actions.symlink_file(outputs[root_module_compressed_lib_assets], dynamic_info.root_module_compressed_lib_assets) ctx.actions.symlink_file(outputs[non_root_module_metadata_assets], dynamic_info.non_root_module_metadata_assets) - ctx.actions.symlink_file(outputs[non_root_module_compressed_lib_assets], dynamic_info.non_root_module_compressed_lib_assets) + ctx.actions.symlink_file(outputs[non_root_module_lib_assets], dynamic_info.non_root_module_lib_assets if dynamic_info.non_root_module_lib_assets else ctx.actions.symlinked_dir("empty_non_root_module_lib_assets", {})) - ctx.actions.dynamic_output(dynamic = dynamic_inputs, inputs = [], outputs = dynamic_outputs, f = dynamic_native_libs_info) + ctx.actions.dynamic_output(dynamic = dynamic_inputs, inputs = [], outputs = [o.as_output() for o in dynamic_outputs], f = dynamic_native_libs_info) all_native_libs = ctx.actions.symlinked_dir("debug_all_native_libs", {"others": native_libs, "primary": native_libs_always_in_primary_apk}) - enhance_ctx.debug_output("debug_native_libs", all_native_libs) + lib_subtargets = _create_library_subtargets(lib_outputs_by_platform, native_libs) + enhance_ctx.debug_output("native_libs", all_native_libs, sub_targets = lib_subtargets) if native_merge_debug: enhance_ctx.debug_output("native_merge_debug", native_merge_debug) @@ -334,15 +389,134 @@ def get_android_binary_native_library_info( native_libs_for_primary_apk, exopackage_info = _get_exopackage_info(ctx, native_libs_always_in_primary_apk, native_libs, native_libs_metadata) return AndroidBinaryNativeLibsInfo( - apk_under_test_prebuilt_native_library_dirs = all_prebuilt_native_library_dirs, - apk_under_test_shared_libraries = included_shared_lib_targets, + prebuilt_native_library_dirs = all_prebuilt_native_library_dirs, + shared_libraries = included_shared_lib_targets, native_libs_for_primary_apk = native_libs_for_primary_apk, exopackage_info = exopackage_info, - root_module_native_lib_assets = [native_lib_assets_for_primary_apk, stripped_native_linkable_assets_for_primary_apk, root_module_metadata_assets, root_module_compressed_lib_assets], - non_root_module_native_lib_assets = [non_root_module_metadata_assets, non_root_module_compressed_lib_assets], + root_module_native_lib_assets = [native_lib_assets_for_primary_apk, stripped_native_linkable_assets_for_primary_apk, root_module_metadata_assets], + non_root_module_native_lib_assets = [non_root_module_metadata_assets, non_root_module_lib_assets], generated_java_code = generated_java_code, + unstripped_shared_libraries = unstripped_native_libraries_files, ) +_NativeLibSubtargetArtifacts = record( + default = Artifact, + unrelinked = Artifact | None, +) + +# Merged libraries are dynamic dependencies, but outputs need to be declared in advance to be used by subtargets. +# This means we have to declare outputs for all possible merged libs (every merged name and every unmerged library name). +def _declare_library_subtargets( + ctx: AnalysisContext, + dynamic_outputs: list[Artifact], + original_shared_libs_by_platform: dict[str, dict[str, SharedLibrary]], + native_library_merge_map, + native_library_merge_sequence, + enable_relinker: bool) -> dict[str, dict[str, _NativeLibSubtargetArtifacts]]: + lib_outputs_by_platform = {} + for platform, original_shared_libs in original_shared_libs_by_platform.items(): + sonames = set() + sonames.update(original_shared_libs.keys()) + if native_library_merge_map: + sonames.update(native_library_merge_map.keys()) + elif native_library_merge_sequence: + for entry in native_library_merge_sequence: + if type(entry) == "list": + sonames.update([soname for (soname, _) in entry]) + else: + (soname, _) = entry + sonames.add(soname) + + lib_outputs = {} + for soname in sonames.list(): + output_path = _platform_output_path(soname, platform if len(original_shared_libs_by_platform) > 1 else None) + lib_output = ctx.actions.declare_output(output_path, dir = True) + dynamic_outputs.append(lib_output) + if enable_relinker: + output_path = output_path + ".unrelinked" + unrelinked_lib_output = ctx.actions.declare_output(output_path, dir = True) + dynamic_outputs.append(unrelinked_lib_output) + lib_outputs[soname] = _NativeLibSubtargetArtifacts( + default = lib_output, + unrelinked = unrelinked_lib_output, + ) + else: + lib_outputs[soname] = _NativeLibSubtargetArtifacts( + default = lib_output, + unrelinked = None, + ) + + lib_outputs_by_platform[platform] = lib_outputs + return lib_outputs_by_platform + +# Bind debug library subtarget outputs to actual outputs. +# For individual libraries, link to either the unmerged or merged output. +# For merged libraries, link to either the merged output, or a symlinked dir of all merged split group outputs. +def _link_library_subtargets( + ctx: AnalysisContext, + outputs, # IndexSet[OutputArtifact] + lib_outputs_by_platform: dict[str, dict[str, _NativeLibSubtargetArtifacts]], # dict[platform, dict[soname, _NativeLibSubtargetArtifacts]] + original_shared_libs_by_platform: dict[str, dict[str, SharedLibrary]], + final_shared_libs_by_platform: dict[str, dict[str, SharedLibrary]], + merged_shared_lib_targets_by_platform: dict[str, dict[Label, str]], + split_groups: dict[str, str] | None, + native_merge_debug, + unrelinked: bool = False): + for platform, final_shared_libs in final_shared_libs_by_platform.items(): + merged_lib_outputs = {} + for soname, lib in final_shared_libs.items(): + base_soname = soname + if split_groups and soname in split_groups: + base_soname = split_groups[soname] + + group_outputs = merged_lib_outputs.setdefault(base_soname, {}) + group_outputs[soname] = lib.lib.output + + for soname, lib_outputs in lib_outputs_by_platform[platform].items(): + if soname in merged_lib_outputs: + group_outputs = merged_lib_outputs[soname] + elif soname in original_shared_libs_by_platform[platform]: + # link unmerged soname to merged output + original_shared_lib = original_shared_libs_by_platform[platform][soname] + merged_soname = merged_shared_lib_targets_by_platform[platform][original_shared_lib.label] + if split_groups and merged_soname in split_groups: + merged_soname = split_groups[merged_soname] + group_outputs = merged_lib_outputs[merged_soname] + else: + # merged group name has no constituents, link to debug output + group_outputs = {soname: native_merge_debug} + + output = lib_outputs.default + if unrelinked: + output = lib_outputs.unrelinked + ctx.actions.symlinked_dir(outputs[output], group_outputs) + +def _create_library_subtargets(lib_outputs_by_platform: dict[str, dict[str, _NativeLibSubtargetArtifacts]], native_libs: Artifact): + def create_library_subtarget(output: _NativeLibSubtargetArtifacts): + if output.unrelinked: + sub_targets = {"unrelinked": [DefaultInfo(default_outputs = [output.unrelinked])]} + return [DefaultInfo(default_outputs = [output.default], sub_targets = sub_targets)] + return [DefaultInfo(default_outputs = [output.default])] + + if len(lib_outputs_by_platform) > 1: + return { + platform: [DefaultInfo(default_outputs = [native_libs], sub_targets = { + soname: create_library_subtarget(output) + for soname, output in lib_outputs.items() + })] + for platform, lib_outputs in lib_outputs_by_platform.items() + } + elif len(lib_outputs_by_platform) == 1: + lib_outputs = list(lib_outputs_by_platform.values())[0] + return { + soname: create_library_subtarget(output) + for soname, output in lib_outputs.items() + } + else: + # TODO(ctolliday) at this point we should have thrown an error earlier if no libraries matched cpu_filters + # (or returned earlier if there are no native library deps) + return {} + # We could just return two artifacts of libs (one for the primary APK, one which can go # either into the primary APK or be exopackaged), and one artifact of assets, # but we'd need an extra action in order to combine them (we can't use `symlinked_dir` since @@ -351,12 +525,11 @@ _NativeLibsAndAssetsInfo = record( native_libs = Artifact, native_libs_metadata = Artifact, native_libs_always_in_primary_apk = Artifact, - native_lib_assets_for_primary_apk = [Artifact, None], - stripped_native_linkable_assets_for_primary_apk = [Artifact, None], + native_lib_assets_for_primary_apk = Artifact | None, + stripped_native_linkable_assets_for_primary_apk = Artifact | None, root_module_metadata_assets = Artifact, - root_module_compressed_lib_assets = Artifact, non_root_module_metadata_assets = Artifact, - non_root_module_compressed_lib_assets = Artifact, + non_root_module_lib_assets = [Artifact, None], ) def _get_exopackage_info( @@ -381,6 +554,7 @@ def _get_native_libs_and_assets( prebuilt_native_library_dirs_always_in_primary_apk = [] prebuilt_native_library_dir_assets_for_primary_apk = [] prebuilt_native_library_dir_module_assets_map = {} + prebuilt_native_library_dir_module_libs_map = {} for native_lib in all_prebuilt_native_library_dirs: native_lib_target = str(native_lib.raw_target) module = get_module_from_target(native_lib_target) @@ -396,7 +570,7 @@ def _get_native_libs_and_assets( if native_lib.is_asset: prebuilt_native_library_dir_module_assets_map.setdefault(module, []).append(native_lib) else: - prebuilt_native_library_dirs.append(native_lib) + prebuilt_native_library_dir_module_libs_map.setdefault(module, []).append(native_lib) elif native_lib.is_asset and is_packaging_native_libs_as_assets_supported: expect(not native_lib.for_primary_apk, "{} which is marked as needing to be in the primary APK cannot be an asset".format(native_lib_target)) prebuilt_native_library_dir_assets_for_primary_apk.append(native_lib) @@ -424,40 +598,53 @@ def _get_native_libs_and_assets( ) if prebuilt_native_library_dir_assets_for_primary_apk else None native_lib_module_assets_map = {} for module, native_lib_dir in prebuilt_native_library_dir_module_assets_map.items(): - native_lib_module_assets_map[module] = [_filter_prebuilt_native_library_dir( + native_lib_module_assets_map.setdefault(module, []).append(_filter_prebuilt_native_library_dir( ctx, native_lib_dir, "native_lib_assets_for_module_{}".format(module), package_as_assets = True, module = module, - )] + )) + for module, native_lib_dir in prebuilt_native_library_dir_module_libs_map.items(): + native_lib_module_assets_map.setdefault(module, []).append(_filter_prebuilt_native_library_dir( + ctx, + native_lib_dir, + "native_lib_libs_for_module_{}".format(module), + package_as_assets = False, + module = module, + )) stripped_linkables = _get_native_linkables(ctx, platform_to_native_linkables, get_module_from_target, is_packaging_native_libs_as_assets_supported) for module, native_linkable_assets in stripped_linkables.linkable_module_assets_map.items(): native_lib_module_assets_map.setdefault(module, []).append(native_linkable_assets) root_module_metadata_srcs = {} - root_module_compressed_lib_srcs = {} non_root_module_metadata_srcs = {} - non_root_module_compressed_lib_srcs = {} + non_root_module_libs_srcs = [] assets_for_primary_apk = filter(None, [native_lib_assets_for_primary_apk, stripped_linkables.linkable_assets_for_primary_apk]) stripped_linkable_assets_for_primary_apk = stripped_linkables.linkable_assets_for_primary_apk if assets_for_primary_apk: - metadata_file, native_library_paths = _get_native_libs_as_assets_metadata(ctx, assets_for_primary_apk, ROOT_MODULE) + metadata_file = _get_native_libs_as_assets_metadata(ctx, assets_for_primary_apk, ROOT_MODULE) root_module_metadata_srcs[paths.join(_get_native_libs_as_assets_dir(ROOT_MODULE), "metadata.txt")] = metadata_file - if ctx.attrs.compress_asset_libraries: - compressed_lib_dir = _get_compressed_native_libs_as_assets(ctx, assets_for_primary_apk, native_library_paths, ROOT_MODULE) - root_module_compressed_lib_srcs[_get_native_libs_as_assets_dir(ROOT_MODULE)] = compressed_lib_dir - - # Since we're storing these as compressed assets, we need to ignore the uncompressed libs. - native_lib_assets_for_primary_apk = None - stripped_linkable_assets_for_primary_apk = None for module, native_lib_assets in native_lib_module_assets_map.items(): - metadata_file, native_library_paths = _get_native_libs_as_assets_metadata(ctx, native_lib_assets, module) - non_root_module_metadata_srcs[paths.join(_get_native_libs_as_assets_dir(module), "libs.txt")] = metadata_file - compressed_lib_dir = _get_compressed_native_libs_as_assets(ctx, native_lib_assets, native_library_paths, module) - non_root_module_compressed_lib_srcs[_get_native_libs_as_assets_dir(module)] = compressed_lib_dir + metadata_file = _get_native_libs_as_assets_metadata(ctx, native_lib_assets, module) + libs_metadata_path = paths.join("assets", "libs.txt") + non_root_module_metadata_srcs[paths.join(_get_native_libs_as_assets_dir(module), libs_metadata_path)] = metadata_file + non_root_module_libs_srcs.extend(native_lib_assets) + + non_root_module_libs = None + if non_root_module_libs_srcs: + non_root_module_libs = ctx.actions.declare_output("non_root_module_libs") + ctx.actions.run( + cmd_args([ + ctx.attrs._android_toolchain[AndroidToolchainInfo].combine_native_library_dirs[RunInfo], + "--output-dir", + non_root_module_libs.as_output(), + "--library-dirs", + ] + non_root_module_libs_srcs), + category = "combine_non_root_module_native_libs", + ) combined_native_libs = ctx.actions.declare_output("combined_native_libs", dir = True) native_libs_metadata = ctx.actions.declare_output("native_libs_metadata.txt") @@ -489,9 +676,8 @@ def _get_native_libs_and_assets( native_lib_assets_for_primary_apk = native_lib_assets_for_primary_apk, stripped_native_linkable_assets_for_primary_apk = stripped_linkable_assets_for_primary_apk, root_module_metadata_assets = ctx.actions.symlinked_dir("root_module_metadata_assets", root_module_metadata_srcs), - root_module_compressed_lib_assets = ctx.actions.symlinked_dir("root_module_compressed_lib_assets", root_module_compressed_lib_srcs), non_root_module_metadata_assets = ctx.actions.symlinked_dir("non_root_module_metadata_assets", non_root_module_metadata_srcs), - non_root_module_compressed_lib_assets = ctx.actions.symlinked_dir("non_root_module_compressed_lib_assets", non_root_module_compressed_lib_srcs), + non_root_module_lib_assets = non_root_module_libs, ) def _filter_prebuilt_native_library_dir( @@ -503,12 +689,16 @@ def _filter_prebuilt_native_library_dir( cpu_filters = ctx.attrs.cpu_filters or CPU_FILTER_TO_ABI_DIRECTORY.keys() abis = [CPU_FILTER_TO_ABI_DIRECTORY[cpu] for cpu in cpu_filters] filter_tool = ctx.attrs._android_toolchain[AndroidToolchainInfo].filter_prebuilt_native_library_dir[RunInfo] - native_libs_dirs = [native_lib.dir for native_lib in native_libs] - native_libs_dirs_file = ctx.actions.write("{}_list.txt".format(identifier), native_libs_dirs) + native_libs_dirs_file = argfile(actions = ctx.actions, name = "{}_list.txt".format(identifier), args = [native_lib.dir for native_lib in native_libs]) base_output_dir = ctx.actions.declare_output(identifier, dir = True) - output_dir = base_output_dir.project(_get_native_libs_as_assets_dir(module)) if package_as_assets else base_output_dir + if module == ROOT_MODULE: + output_dir = base_output_dir.project(_get_native_libs_as_assets_dir(module)) if package_as_assets else base_output_dir + elif package_as_assets: + output_dir = base_output_dir.project(paths.join(_get_native_libs_as_assets_dir(module), "assets")) + else: + output_dir = base_output_dir.project(paths.join(_get_native_libs_as_assets_dir(module), "lib")) ctx.actions.run( - cmd_args([filter_tool, native_libs_dirs_file, output_dir.as_output(), "--abis"] + abis).hidden(native_libs_dirs), + cmd_args([filter_tool, native_libs_dirs_file, output_dir.as_output(), "--abis"] + abis), category = "filter_prebuilt_native_library_dir", identifier = identifier, ) @@ -518,7 +708,7 @@ def _filter_prebuilt_native_library_dir( _StrippedNativeLinkables = record( linkables = Artifact, linkables_always_in_primary_apk = Artifact, - linkable_assets_for_primary_apk = [Artifact, None], + linkable_assets_for_primary_apk = Artifact | None, linkable_module_assets_map = dict[str, Artifact], ) @@ -531,6 +721,7 @@ def _get_native_linkables( stripped_native_linkables_always_in_primary_apk_srcs = {} stripped_native_linkable_assets_for_primary_apk_srcs = {} stripped_native_linkable_module_assets_srcs = {} + strip_libraries = getattr(ctx.attrs, "strip_libraries", True) cpu_filters = ctx.attrs.cpu_filters for platform, native_linkables in platform_to_native_linkables.items(): @@ -541,6 +732,7 @@ def _get_native_linkables( for so_name, native_linkable in native_linkables.items(): native_linkable_target = str(native_linkable.label.raw_target()) module = get_module_from_target(native_linkable_target) + lib = native_linkable.stripped_lib if strip_libraries else native_linkable.lib.output expect( not native_linkable.for_primary_apk or is_root_module(module), @@ -550,18 +742,22 @@ def _get_native_linkables( not native_linkable.for_primary_apk or not native_linkable.can_be_asset, "{} which is marked as needing to be in the primary APK cannot be an asset".format(native_linkable_target), ) - if native_linkable.can_be_asset and not is_root_module(module): - so_name_path = paths.join(_get_native_libs_as_assets_dir(module), abi_directory, so_name) - stripped_native_linkable_module_assets_srcs.setdefault(module, {})[so_name_path] = native_linkable.stripped_lib - elif native_linkable.can_be_asset and package_native_libs_as_assets_enabled: - so_name_path = paths.join(_get_native_libs_as_assets_dir(module), abi_directory, so_name) - stripped_native_linkable_assets_for_primary_apk_srcs[so_name_path] = native_linkable.stripped_lib - else: - so_name_path = paths.join(abi_directory, so_name) - if native_linkable.for_primary_apk: - stripped_native_linkables_always_in_primary_apk_srcs[so_name_path] = native_linkable.stripped_lib + + if is_root_module(module): + if native_linkable.can_be_asset and package_native_libs_as_assets_enabled: + native_libs_assets_dir = paths.join(_get_native_libs_as_assets_dir(module)) + so_name_path = paths.join(native_libs_assets_dir, abi_directory, so_name) + stripped_native_linkable_assets_for_primary_apk_srcs[so_name_path] = lib else: - stripped_native_linkables_srcs[so_name_path] = native_linkable.stripped_lib + so_name_path = paths.join(abi_directory, so_name) + if native_linkable.for_primary_apk: + stripped_native_linkables_always_in_primary_apk_srcs[so_name_path] = lib + else: + stripped_native_linkables_srcs[so_name_path] = lib + else: + module_dir = "assets" if native_linkable.can_be_asset else "lib" + so_name_path = paths.join(_get_native_libs_as_assets_dir(module), module_dir, abi_directory, so_name) + stripped_native_linkable_module_assets_srcs.setdefault(module, {})[so_name_path] = lib stripped_native_linkables = ctx.actions.symlinked_dir( "stripped_native_linkables", @@ -592,53 +788,34 @@ def _get_native_linkables( def _get_native_libs_as_assets_metadata( ctx: AnalysisContext, native_lib_assets: list[Artifact], - module: str) -> (Artifact, Artifact): - native_lib_assets_file = ctx.actions.write("{}/native_lib_assets".format(module), [cmd_args([native_lib_asset, _get_native_libs_as_assets_dir(module)], delimiter = "/") for native_lib_asset in native_lib_assets]) + module: str) -> Artifact: + native_lib_assets_file = argfile( + actions = ctx.actions, + name = "{}/native_lib_assets".format(module), + args = [cmd_args([native_lib_asset, _get_native_libs_as_assets_dir(module)], delimiter = "/") for native_lib_asset in native_lib_assets], + ) metadata_output = ctx.actions.declare_output("{}/native_libs_as_assets_metadata.txt".format(module)) - native_library_paths = ctx.actions.declare_output("{}/native_libs_as_assets_paths.txt".format(module)) metadata_cmd = cmd_args([ ctx.attrs._android_toolchain[AndroidToolchainInfo].native_libs_as_assets_metadata[RunInfo], "--native-library-dirs", native_lib_assets_file, "--metadata-output", metadata_output.as_output(), - "--native-library-paths-output", - native_library_paths.as_output(), - ]).hidden(native_lib_assets) + ]) ctx.actions.run(metadata_cmd, category = "get_native_libs_as_assets_metadata", identifier = module) - return metadata_output, native_library_paths - -def _get_compressed_native_libs_as_assets( - ctx: AnalysisContext, - native_lib_assets: list[Artifact], - native_library_paths: Artifact, - module: str) -> Artifact: - output_dir = ctx.actions.declare_output("{}/compressed_native_libs_as_assets_dir".format(module)) - compressed_libraries_cmd = cmd_args([ - ctx.attrs._android_toolchain[AndroidToolchainInfo].compress_libraries[RunInfo], - "--libraries", - native_library_paths, - "--output-dir", - output_dir.as_output(), - "--compression-type", - ctx.attrs.asset_compression_algorithm or "xz", - "--xz-compression-level", - str(ctx.attrs.xz_compression_level), - ]).hidden(native_lib_assets) - ctx.actions.run(compressed_libraries_cmd, category = "compress_native_libs_as_assets", identifier = module) - return output_dir + return metadata_output def _get_native_libs_as_assets_dir(module: str) -> str: return "assets/{}".format("lib" if is_root_module(module) else module) -def get_native_linkables_by_default(ctx: AnalysisContext, _platform: str, deps: list[Dependency], shared_libraries_to_exclude) -> dict[str, SharedLibrary]: +def get_default_shared_libs(ctx: AnalysisContext, deps: list[Dependency], shared_libraries_to_exclude) -> dict[str, SharedLibrary]: shared_library_info = merge_shared_libraries( ctx.actions, deps = filter(None, [x.get(SharedLibraryInfo) for x in deps]), ) return { - so_name: shared_lib - for so_name, shared_lib in traverse_shared_library_info(shared_library_info).items() + soname: shared_lib + for soname, shared_lib in with_unique_str_sonames(traverse_shared_library_info(shared_library_info)).items() if not (shared_libraries_to_exclude and shared_libraries_to_exclude.contains(shared_lib.label.raw_target())) } @@ -656,6 +833,9 @@ def encode_linkable_graph_for_mergemap(graph_node_map_by_platform: dict[str, dic platform: { target: _LinkableSharedNode( raw_target = str(target.raw_target()), + # FIXME(JakobDegen): The definition of `LinkableNode` claims that it's ok for this + # to be `None` (I assume in the case of static preferred linkage), so either that is + # wrong or this is. See the diff that added this FIXME for how to reproduce soname = node.default_soname, labels = node.labels, deps = node.deps + node.exported_deps, @@ -684,23 +864,7 @@ MergedSharedLibrary = record( # this only includes solib constituents that are included in the android merge map solib_constituents = list[str], is_actually_merged = bool, -) - -# Output of the linkables merge process, the list of shared libs for each platform and -# debug information about the merge process itself. -MergedLinkables = record( - # dict[platform, dict[final_soname, MergedSharedLibrary]] - shared_libs_by_platform = dict[str, dict[str, MergedSharedLibrary]], - debug_info = dict[str, MergedLinkablesDebugInfo], -) - -# Input data to the linkables merge process -LinkableMergeData = record( - glue_linkable = [(Label, LinkInfo), None], - default_shared_libs = dict[str, SharedLibrary], - linkable_nodes = dict[Label, LinkableNode], - merge_map = dict[str, [str, None]], - apk_module_graph = typing.Callable, + primary_constituents = list[Label], ) # information about a link group derived from the merge mapping @@ -710,22 +874,31 @@ LinkGroupData = record( apk_module = str, ) +# Lookup key for somerge groups, either the soname for shared libraries or the target name for unmerged statics +GroupLabel = str + +# Represents the primary constituents and deps of primary constituents used to create a LinkGroupLinkableNode for a non-prebuilt shared library. +LinkGroupMergeInfo = record( + label = GroupLabel, + deps = list[GroupLabel], + exported_deps = list[GroupLabel], + constituent_link_infos = list[LinkInfo], +) + # Represents a node in the final merged linkable map. Most of these will be shared libraries, either prebuilt shared libs or -# libraries that are created below for a node in the link_groups_graph. The exception is for non-merged static-only nodes, in -# that case this +# libraries that are created below for a node in the link_groups_graph. The exception is for non-merged static-only nodes. LinkGroupLinkableNode = record( # The LinkInfo to add to the link line for a node that links against this. link = LinkInfo, - deps = list[str], - exported_deps = list[str], + deps = list[GroupLabel], + exported_deps = list[GroupLabel], shared_lib = [SharedLibrary, None], - # linker flags to be exported by any node that links against this. This can only be non-None for non-merged static only nodes (as we don't # propagate exported linker flags through transitive shared lib deps). exported_linker_flags = [(list[typing.Any], list[typing.Any]), None], ) -def write_merged_library_map(ctx: AnalysisContext, merged_linkables: MergedLinkables) -> Artifact: +def write_merged_library_map(ctx: AnalysisContext, shared_libs_by_platform: dict[str, dict[str, MergedSharedLibrary]]) -> Artifact: """ Writes the "merged library map". This is a map of original soname to final soname of the form: @@ -737,7 +910,7 @@ def write_merged_library_map(ctx: AnalysisContext, merged_linkables: MergedLinka ``` """ solib_map = {} # dict[final_soname, set[original_soname]] - for _, shared_libs in merged_linkables.shared_libs_by_platform.items(): + for _, shared_libs in shared_libs_by_platform.items(): for soname in shared_libs.keys(): merged_shared_lib = shared_libs[soname] if merged_shared_lib.is_actually_merged: @@ -758,12 +931,6 @@ def run_mergemap_codegen(ctx: AnalysisContext, merged_library_map: Artifact) -> ctx.actions.run(args, category = "mergemap_codegen") return mapping_java -def expect_dedupe(v): - # asserts that the input list is unique - o = dedupe_by_value(v) - expect(len(o) == len(v), "expected `{}` to be a list of unique items, but it wasn't. deduped list was `{}`.", v, o) - return v - # We can't merge a prebuilt shared (that has no archive) and must use it's original info. # Ideally this would probably be structured info on the linkablenode. def _is_prebuilt_shared(node_data: LinkableNode) -> bool: @@ -781,9 +948,83 @@ def _has_linkable(node_data: LinkableNode) -> bool: return True return False -def _get_merged_linkables( +def _platform_output_path(path: str, platform: [str, None] = None): + if platform: + return platform + "/" + path + return path + +def _transitive_has_linkable( + target: Label, + linkable_nodes: dict[Label, LinkableNode], + transitive_linkable_cache: dict[Label, bool]) -> bool: + if target in transitive_linkable_cache: + return transitive_linkable_cache[target] + + target_node = linkable_nodes.get(target) + for dep in target_node.deps: + if _has_linkable(linkable_nodes.get(dep)) or _transitive_has_linkable(dep, linkable_nodes, transitive_linkable_cache): + transitive_linkable_cache[target] = True + return True + for dep in target_node.exported_deps: + if _has_linkable(linkable_nodes.get(dep)) or _transitive_has_linkable(dep, linkable_nodes, transitive_linkable_cache): + transitive_linkable_cache[target] = True + return True + + transitive_linkable_cache[target] = False + return False + +def _shared_lib_for_prebuilt_shared( ctx: AnalysisContext, - merged_data_by_platform: dict[str, LinkableMergeData]) -> MergedLinkables: + cxx_toolchain: CxxToolchainInfo, + target: Label, + node_data: LinkableNode, + linkable_nodes: dict[Label, LinkableNode], + transitive_linkable_cache: dict[Label, bool], + platform: [str, None] = None) -> SharedLibrary: + expect( + len(node_data.shared_libs.libraries) == 1, + "unexpected shared_libs length for somerge of {} ({})".format(target, node_data.shared_libs), + ) + + # TODO(cjhopman): We don't currently support prebuilt shared libs with deps on other libs because + # we don't compute the shared lib deps of prebuilt shared libs here. That + # shouldn't be too hard, but we haven't needed it. + for dep in node_data.deps: + expect( + not _transitive_has_linkable(dep, linkable_nodes, transitive_linkable_cache), + "prebuilt shared library `{}` with deps not supported by somerge".format(target), + ) + for dep in node_data.exported_deps: + expect( + not _transitive_has_linkable(dep, linkable_nodes, transitive_linkable_cache), + "prebuilt shared library `{}` with exported_deps not supported by somerge".format(target), + ) + + shlib = node_data.shared_libs.libraries[0] + soname = shlib.soname.ensure_str() + shlib = shlib.lib + output_path = _platform_output_path(soname, platform) + return create_shlib( + lib = shlib, + stripped_lib = strip_lib(ctx, cxx_toolchain, shlib.output, output_path), + link_args = None, + shlib_deps = None, + can_be_asset = node_data.can_be_asset, + for_primary_apk = False, + soname = soname, + label = target, + ) + +def _get_merged_linkables_for_platform( + ctx: AnalysisContext, + cxx_toolchain: CxxToolchainInfo, + platform: str | None, + glue_linkable: [(Label, LinkInfo), None], + default_shared_libs: dict[str, SharedLibrary], + linkable_nodes: dict[Label, LinkableNode], + merge_map: dict[str, [str, None]], + merge_linker_args: dict[str, typing.Any], + apk_module_graph: typing.Callable) -> (dict[str, MergedSharedLibrary], MergedLinkablesDebugInfo): """ This takes the merge mapping and constructs the resulting merged shared libraries. @@ -825,350 +1066,505 @@ def _get_merged_linkables( of a primary constituent. A public node is linked via "link whole". 2. linker_flags of primary constituents are included in the link, for non primary they are not """ - debug_info_by_platform = {} - shared_libs_by_platform = {} - for platform, merge_data in merged_data_by_platform.items(): - debug_info = debug_info_by_platform.setdefault(platform, MergedLinkablesDebugInfo( - unmerged_statics = [], - group_debug = {}, - with_default_soname = [], - missing_default_solibs = [], - )) - linkable_nodes = merge_data.linkable_nodes - - linkable_nodes_graph = {k: dedupe(v.deps + v.exported_deps) for k, v in linkable_nodes.items()} - topo_sorted_targets = pre_order_traversal(linkable_nodes_graph) - - # first we collect basic information about each link group, this will populate the fields in LinkGroupData and - # map target labels to their link group name. - link_groups = {} - target_to_link_group = {} - - # Additional caching for later. Needs to be per-platform - has_transitive_linkable_cache = dict[Label, bool] - - for target in topo_sorted_targets: - expect(target not in target_to_link_group, "prelude internal error, target seen twice?") - target_apk_module = merge_data.apk_module_graph(str(target.raw_target())) - - link_group = merge_data.merge_map.get(str(target), None) - if not link_group: - link_group = str(target) - link_groups[link_group] = LinkGroupData( - group_name = target, - constituents = [target], - apk_module = target_apk_module, + debug_info = MergedLinkablesDebugInfo( + unmerged_statics = [], + group_debug = {}, + with_default_soname = [], + missing_default_solibs = [], + ) + + linkable_nodes_graph = {k: dedupe(v.deps + v.exported_deps) for k, v in linkable_nodes.items()} + topo_sorted_targets = pre_order_traversal(linkable_nodes_graph) + + # first we collect basic information about each link group, this will populate the fields in LinkGroupData and + # map target labels to their link group name. + link_groups = {} + target_to_link_group = {} + + # Because we cannot attach this to the LinkableNode after the fact, declare a cache for each platform + transitive_linkable_cache = {} + + for target in topo_sorted_targets: + expect(target not in target_to_link_group, "prelude internal error, target seen twice?") + target_apk_module = apk_module_graph(str(target.raw_target())) + + link_group = merge_map.get(str(target), None) + if not link_group: + link_group = str(target) + link_groups[link_group] = LinkGroupData( + group_name = target, + constituents = [target], + apk_module = target_apk_module, + ) + elif link_group in link_groups: + link_group_data = link_groups[link_group] + + # TODO(cjhopman): buck1 provides a more useful error here in that it lists the module mappings for all + # constituents of the merge group (rather than just one conflict). That allows users to resolve all the + # issues at once. With merge sequence merging (the replacement for merge map), this error shouldn't ever be hit + # and so maybe it's not necessary to improve it. + expect( + link_group_data.apk_module == target_apk_module, + "Native library merge of {} has inconsistent application module mappings:\n{} is in module {}\n{} is in module {}", + link_group_data.group_name, + target, + target_apk_module, + link_group_data.constituents[0], + link_group_data.apk_module, + ) + link_groups[link_group].constituents.append(target) + else: + link_groups[link_group] = LinkGroupData( + group_name = link_group, + constituents = [target], + apk_module = target_apk_module, + ) + + target_to_link_group[target] = link_group + + # Now that all targets are assigned to a link group, build up the link group graph. + link_groups_graph_builder = {} + for target in topo_sorted_targets: + target_group = target_to_link_group[target] + group_deps = link_groups_graph_builder.setdefault(target_group, {}) + for dep in linkable_nodes_graph[target]: + dep_group = target_to_link_group[dep] + if target_group != dep_group: + group_deps[dep_group] = True + link_groups_graph = {k: list(v.keys()) for k, v in link_groups_graph_builder.items()} + + archive_output_style = LibOutputStyle("pic_archive") + shlib_output_style = LibOutputStyle("shared_lib") + + link_group_linkable_nodes = {} + group_shared_libs = {} + included_default_solibs = {} + + # Now we will traverse from the leaves up the graph (the link groups graph). As we traverse, we will produce + # a link group linkablenode for each group. + for group in post_order_traversal(link_groups_graph): + group_data = link_groups[group] + is_actually_merged = len(group_data.constituents) > 1 + + can_be_asset = True + for target in group_data.constituents: + if not linkable_nodes[target].can_be_asset: + can_be_asset = False + break + + if not is_actually_merged: + target = group_data.constituents[0] + node_data = linkable_nodes[target] + + if node_data.preferred_linkage == Linkage("static") or not _has_linkable(node_data): + debug_info.unmerged_statics.append(target) + link_group_linkable_nodes[group] = LinkGroupLinkableNode( + link = node_data.link_infos[archive_output_style].default, + deps = dedupe_by_value([target_to_link_group[t] for t in node_data.deps]), + exported_deps = dedupe_by_value([target_to_link_group[t] for t in node_data.exported_deps]), + shared_lib = None, + exported_linker_flags = (node_data.linker_flags.exported_flags, node_data.linker_flags.exported_post_flags), ) - elif link_group in link_groups: - link_group_data = link_groups[link_group] - - # TODO(cjhopman): buck1 provides a more useful error here in that it lists the module mappings for all - # constituents of the merge group (rather than just one conflict). That allows users to resolve all the - # issues at once. With merge sequence merging (the replacement for merge map), this error shouldn't ever be hit - # and so maybe it's not necessary to improve it. - expect( - link_group_data.apk_module == target_apk_module, - "Native library merge of {} has inconsistent application module mappings:\n{} is in module {}\n{} is in module {}", - link_group_data.group_name, + continue + + if _is_prebuilt_shared(node_data): + shared_lib = _shared_lib_for_prebuilt_shared( + ctx, + cxx_toolchain, target, - target_apk_module, - link_group_data.constituents[0], - link_group_data.apk_module, + node_data, + linkable_nodes, + transitive_linkable_cache, + platform, ) - link_groups[link_group].constituents.append(target) - else: - link_groups[link_group] = LinkGroupData( - group_name = link_group, - constituents = [target], - apk_module = target_apk_module, + link_group_linkable_nodes[group] = LinkGroupLinkableNode( + link = node_data.link_infos[shlib_output_style].default, + deps = [], + exported_deps = [], + shared_lib = shared_lib, + # exported linker flags for shared libs are in their linkinfo itself and are not exported from dependents + exported_linker_flags = None, + ) + group_shared_libs[shared_lib.soname.ensure_str()] = MergedSharedLibrary( + soname = shared_lib.soname.ensure_str(), + lib = shared_lib, + apk_module = group_data.apk_module, + solib_constituents = [], + is_actually_merged = False, + primary_constituents = [target], ) + continue + + exported_linker_flags = [] + exported_linker_post_flags = [] + links = [] + + if is_actually_merged and glue_linkable: + links.append(set_link_info_link_whole(glue_linkable[1])) + + solib_constituents = [] + group_deps = [] + group_exported_deps = [] + for key in group_data.constituents: + expect(target_to_link_group[key] == group) + node = linkable_nodes[key] + + default_solibs = list([shlib.soname.ensure_str() for shlib in node.shared_libs.libraries]) + if not default_solibs and node.preferred_linkage == Linkage("static"): + default_solibs = [node.default_soname] + + for soname in default_solibs: + included_default_solibs[soname] = True + if node.include_in_android_mergemap: + solib_constituents.append(soname) + + node = linkable_nodes[key] + link_info = node.link_infos[archive_output_style].default + + # the propagated link info should already be wrapped with exported flags. + link_info = wrap_link_info( + link_info, + pre_flags = node.linker_flags.flags, + post_flags = node.linker_flags.post_flags, + ) + exported_linker_flags.extend(node.linker_flags.exported_flags) + exported_linker_post_flags.extend(node.linker_flags.exported_post_flags) + links.append(set_link_info_link_whole(link_info)) - target_to_link_group[target] = link_group + dep_groups = [target_to_link_group[dep] for dep in node.deps] + group_deps.extend([dep_group for dep_group in dep_groups if dep_group != group]) - # Now that all targets are assigned to a link group, build up the link group graph. - link_groups_graph_builder = {} - for target in topo_sorted_targets: - target_group = target_to_link_group[target] - group_deps = link_groups_graph_builder.setdefault(target_group, {}) - for dep in linkable_nodes_graph[target]: - dep_group = target_to_link_group[dep] - if target_group != dep_group: - group_deps[dep_group] = True - link_groups_graph = {k: list(v.keys()) for k, v in link_groups_graph_builder.items()} + exported_dep_groups = [target_to_link_group[dep] for dep in node.exported_deps] + group_exported_deps.extend([dep_group for dep_group in exported_dep_groups if dep_group != group]) - archive_output_style = LibOutputStyle("pic_archive") - shlib_output_style = LibOutputStyle("shared_lib") + soname = group + if not is_actually_merged: + soname = linkable_nodes[group_data.constituents[0]].default_soname + debug_info.with_default_soname.append((soname, group_data.constituents[0])) - cxx_toolchain = ctx.attrs._cxx_toolchain[platform][CxxToolchainInfo] + output_path = _platform_output_path(soname, platform) - link_group_linkable_nodes = {} - group_shared_libs = {} - included_default_solibs = {} - - def platform_output_path(path): - if len(merged_data_by_platform) > 1: - return platform + "/" + path - return path - - def set_has_transitive_linkable_cache(target: Label, result: bool) -> bool: - has_transitive_linkable_cache[target] = result - return result - - def transitive_has_linkable(target: Label) -> bool: - if target in has_transitive_linkable_cache: - return has_transitive_linkable_cache[target] - - target_node = linkable_nodes.get(target) - for dep in target_node.deps: - if _has_linkable(linkable_nodes.get(dep)) or transitive_has_linkable(dep): - return set_has_transitive_linkable_cache(target, True) - for dep in target_node.exported_deps: - if _has_linkable(linkable_nodes.get(dep)) or transitive_has_linkable(dep): - return set_has_transitive_linkable_cache(target, True) - - return set_has_transitive_linkable_cache(target, False) - - # Now we will traverse from the leaves up the graph (the link groups graph). As we traverse, we will produce - # a link group linkablenode for each group. - for group in post_order_traversal(link_groups_graph): - group_data = link_groups[group] - is_actually_merged = len(group_data.constituents) > 1 - can_be_asset = True - - if not is_actually_merged: - target = group_data.constituents[0] - node_data = linkable_nodes[target] - can_be_asset = node_data.can_be_asset - - if node_data.preferred_linkage == Linkage("static") or not _has_linkable(node_data): - debug_info.unmerged_statics.append(target) - link_group_linkable_nodes[group] = LinkGroupLinkableNode( - link = node_data.link_infos[archive_output_style].default, - deps = dedupe_by_value([target_to_link_group[t] for t in node_data.deps]), - exported_deps = dedupe_by_value([target_to_link_group[t] for t in node_data.exported_deps]), - shared_lib = None, - exported_linker_flags = (node_data.linker_flags.exported_flags, node_data.linker_flags.exported_post_flags), - ) - continue - - if _is_prebuilt_shared(node_data): - expect( - len(node_data.shared_libs) == 1, - "unexpected shared_libs length for somerge of {} ({})".format(target, node_data.shared_libs), - ) - - # TODO(cjhopman): We don't currently support prebuilt shared libs with deps on other libs because - # we don't compute the shared lib deps of prebuilt shared libs here. That - # shouldn't be too hard, but we haven't needed it. - for dep in node_data.deps: - expect(not transitive_has_linkable(dep), "prebuilt shared library `{}` with deps not supported by somerge".format(target)) - for dep in node_data.exported_deps: - expect(not transitive_has_linkable(dep), "prebuilt shared library `{}` with exported_deps not supported by somerge".format(target)) - soname, shlib = node_data.shared_libs.items()[0] - - output_path = platform_output_path(shlib.output.short_path) - shared_lib = SharedLibrary( - lib = shlib, - stripped_lib = strip_lib(ctx, cxx_toolchain, shlib.output, output_path = output_path), - link_args = None, - shlib_deps = None, - can_be_asset = can_be_asset, - for_primary_apk = False, - soname = soname, - label = target, - ) - - link_group_linkable_nodes[group] = LinkGroupLinkableNode( - link = node_data.link_infos[shlib_output_style].default, - deps = [], - exported_deps = [], - shared_lib = shared_lib, - # exported linker flags for shared libs are in their linkinfo itself and are not exported from dependents - exported_linker_flags = None, - ) - group_shared_libs[soname] = MergedSharedLibrary( - soname = soname, - lib = shared_lib, - apk_module = group_data.apk_module, - solib_constituents = [], - is_actually_merged = False, - ) - continue - - # Keys in the current group stay as a Label, deps get converted to the group key. - def convert_to_merged_graph_deps(deps: list[Label], curr_group: str) -> list[[Label, str]]: - converted = [] - for dep in deps: - dep_group = target_to_link_group[dep] - if dep_group == curr_group: - converted.append(dep) - elif dep_group: - converted.append(dep_group) - return dedupe_by_value(converted) - - # For the current group, this will traverse the original linkable graph to find the LinkableNodes for - # the constituents of the group and traverses the link_group graph for non-constituent deps. - def get_merged_graph_traversal(curr_group: str, exported_only: bool) -> typing.Callable: - def traversal(key: [Label, str]) -> list[[Label, str]]: - if eval_type(Label).matches(key): - expect(target_to_link_group[key] == curr_group) - node = linkable_nodes[key] - if exported_only: - return convert_to_merged_graph_deps(node.exported_deps, curr_group) - return convert_to_merged_graph_deps(node.deps + node.exported_deps, curr_group) - else: - link_group_node = link_group_linkable_nodes[key] - if exported_only: - return link_group_node.exported_deps - return dedupe_by_value(link_group_node.deps + link_group_node.exported_deps) - - # It's easy for us to accidentally get this merged traversal wrong, so this provides one guardrail - def checked_traversal(key: [Label, str]) -> list[[Label, str]]: - return expect_dedupe(traversal(key)) - - return checked_traversal - - # note that this will possibly contain shared lib dependencies which aren't really public. that's handled below. - public_node_roots = group_data.constituents - - # this is a hybrid of buck1 somerge behavior and what we do for link groups. - # like link groups, we expose link group by setting link_whole on its link infos (this matches buck1 for - # primary constituents, but not for other constituents). - # like buck1, we treat all primary constituents as public node roots (as opposed to link groups that only treats - # preferred_linkage=shared and edges with an outbound dep as public roots), and then traverse exported deps from - # those roots to find all public nodes. - # the main thing to note from this is that for non-primary constituents that are identified as public, we will - # use link_whole whereas buck1 will make dependents link against them directly - exported_public_nodes = { - d: True - for d in breadth_first_traversal_by( - None, - public_node_roots, - get_merged_graph_traversal(group, True), - ) - } + link_merge_info = LinkGroupMergeInfo( + label = group, + deps = dedupe_by_value(group_deps), + exported_deps = dedupe_by_value(group_exported_deps), + constituent_link_infos = links, + ) + link_args, shlib_deps, link_deps_graph = _create_merged_link_args( + root_target = link_merge_info, + linkable_nodes = link_group_linkable_nodes, + cxx_toolchain = cxx_toolchain, + ) + link_args = [link_args] + if soname in merge_linker_args: + link_args += [LinkArgs(infos = [LinkInfo(pre_flags = merge_linker_args[soname])])] - exported_linker_flags = [] - exported_linker_post_flags = [] - links = [] - shared_lib_deps = [] - real_constituents = [] - - if is_actually_merged and merge_data.glue_linkable: - real_constituents.append(merge_data.glue_linkable[0]) - links.append(set_link_info_link_whole(merge_data.glue_linkable[1])) - - solib_constituents = [] - link_group_deps = [] - ordered_group_constituents = pre_order_traversal_by(group_data.constituents, get_merged_graph_traversal(group, False)) - representative_label = ordered_group_constituents[0] - for key in ordered_group_constituents: - real_constituents.append(key) - if eval_type(Label).matches(key): - # This is handling targets within this link group - expect(target_to_link_group[key] == group) - node = linkable_nodes[key] - - default_solibs = list(node.shared_libs.keys()) - if not default_solibs and node.preferred_linkage == Linkage("static"): - default_solibs = [node.default_soname] - - for soname in default_solibs: - included_default_solibs[soname] = True - if node.include_in_android_mergemap: - solib_constituents.append(soname) - - node = linkable_nodes[key] - link_info = node.link_infos[archive_output_style].default - - # the propagated link info should already be wrapped with exported flags. - link_info = wrap_link_info( - link_info, - pre_flags = node.linker_flags.flags, - post_flags = node.linker_flags.post_flags, - ) - exported_linker_flags.extend(node.linker_flags.exported_flags) - exported_linker_post_flags.extend(node.linker_flags.exported_post_flags) - if key in exported_public_nodes: - link_info = set_link_info_link_whole(link_info) - else: - # This is cross-link-group deps. We add information to the link line from the LinkGroupLinkableNode of the dep. - link_group_node = link_group_linkable_nodes[key] - link_info = link_group_node.link - if link_group_node.shared_lib: - shared_lib_deps.append(link_group_node.shared_lib.soname) - link_group_deps.append(key) - elif key in exported_public_nodes: - link_info = set_link_info_link_whole(link_info) - - if link_group_node.exported_linker_flags: - exported_linker_flags.extend(link_group_node.exported_linker_flags[0]) - exported_linker_post_flags.extend(link_group_node.exported_linker_flags[1]) - - links.append(link_info) - - soname = group - if not is_actually_merged: - soname = linkable_nodes[group_data.constituents[0]].default_soname - debug_info.with_default_soname.append((soname, group_data.constituents[0])) - - debug_info.group_debug.setdefault( - group, - struct( - soname = soname, - merged = is_actually_merged, - constituents = real_constituents, - shlib_deps = shared_lib_deps, - exported_public_nodes = exported_public_nodes, - exported_linker_flags = exported_linker_flags, - exported_linker_post_flags = exported_linker_post_flags, - ), + shared_lib = create_shared_lib( + ctx, + output_path = output_path, + soname = soname, + link_args = link_args, + cxx_toolchain = cxx_toolchain, + shared_lib_deps = [link_group_linkable_nodes[label].shared_lib.soname.ensure_str() for label in shlib_deps], + label = group_data.constituents[0], + can_be_asset = can_be_asset, + ) + + link_group_linkable_nodes[group] = LinkGroupLinkableNode( + link = LinkInfo( + name = soname, + pre_flags = exported_linker_flags, + linkables = [SharedLibLinkable( + lib = shared_lib.lib.output, + )], + post_flags = exported_linker_post_flags, + ), + deps = link_merge_info.deps, + exported_deps = link_merge_info.exported_deps, + shared_lib = shared_lib, + # exported linker flags for shared libs are in their linkinfo itself and are not exported from dependents + exported_linker_flags = None, + ) + group_shared_libs[soname] = MergedSharedLibrary( + soname = soname, + lib = shared_lib, + apk_module = group_data.apk_module, + solib_constituents = solib_constituents, + is_actually_merged = is_actually_merged, + primary_constituents = group_data.constituents, + ) + + debug_info.group_debug.setdefault( + group, + struct( + soname = soname, + merged = is_actually_merged, + primary_constituents = group_data.constituents, + real_constituents = link_deps_graph.keys(), + shlib_deps = shlib_deps, + exported_linker_flags = exported_linker_flags, + exported_linker_post_flags = exported_linker_post_flags, + ), + ) + + debug_info.missing_default_solibs.extend([d for d in default_shared_libs if d not in included_default_solibs]) + + return group_shared_libs, debug_info + +# The current default link strategies don't produce enough information in the +# SharedLibrary objects to perform relinking. To do that best, linking should be based on +# the LinkableGraph rather than the current approach with MergedLinkInfo. +# The overall plan for linking is to move to linkable graph-based linking, but for now +# we can do it just for the case that we need it. +def _create_all_relinkable_links( + ctx: AnalysisContext, + platform_to_original_native_linkables: dict[str, dict[str, SharedLibrary]], + graph_node_map_by_platform: dict[str, dict[Label, LinkableNode]]) -> (dict[str, dict[str, SharedLibrary]], dict[str, typing.Any]): + final_platform_to_native_linkables = {} + link_graphs_by_platform = {} + for platform in platform_to_original_native_linkables: + linkables, link_graphs = _create_relinkable_links( + ctx, + cxx_toolchain = ctx.attrs._cxx_toolchain[platform][CxxToolchainInfo], + linkable_nodes = graph_node_map_by_platform[platform], + platform = platform, + ) + link_graphs_by_platform[platform] = link_graphs + final_platform_to_native_linkables[platform] = linkables + + # sanity check that we produce the same list of linkables that are produced by standard linking. + original_sonames = sorted(platform_to_original_native_linkables.keys()) + final_sonames = sorted(final_platform_to_native_linkables.keys()) + expect(original_sonames == final_sonames, "Unexpected differences in final sonames! {} {}".format(original_sonames, final_sonames)) + + debug_outputs = {} + + # The biggest issue we could run into here is that we produce different link args than the original, so let's make that easy to debug. + for platform in platform_to_original_native_linkables: + for soname, lib in platform_to_original_native_linkables[platform].items(): + final = final_platform_to_native_linkables[platform][soname] + original_args, _ = ctx.actions.write( + "{}/{}/original.args".format(platform, soname), + [unpack_link_args(args, LinkOrdering("topological")) for args in lib.link_args] if lib.link_args else "", + allow_args = True, + ) + final_args, _ = ctx.actions.write( + "{}/{}/final.args".format(platform, soname), + [unpack_link_args(args, LinkOrdering("topological")) for args in final.link_args] if final.link_args else "", + allow_args = True, ) + debug_outputs["{}/{}/original.args".format(platform, soname)] = original_args + debug_outputs["{}/{}/final.args".format(platform, soname)] = final_args + + if lib.label in link_graphs_by_platform[platform]: + link_graph = ctx.actions.write_json( + "{}/{}/link.graph".format(platform, soname), + link_graphs_by_platform[platform][lib.label], + pretty = True, + ) + debug_outputs["{}/{}/link.graph".format(platform, soname)] = link_graph + + # TODO(cjhopman): should we also just produce a diff here? We could also consider creating sort of a merged diff or a list + # of the differing argsfiles. + # We can't compare them eagerly because the link args have large tsets that we don't want to traverse at analysis time. - output_path = platform_output_path(soname) - link_args = [LinkArgs(infos = links)] + return final_platform_to_native_linkables, debug_outputs +def _create_relinkable_links( + ctx: AnalysisContext, + *, + cxx_toolchain: CxxToolchainInfo, + linkable_nodes: dict[Label, LinkableNode], + platform: str) -> (dict[str, SharedLibrary], dict[Label, dict[Label, list[Label]]]): + linkable_nodes_graph = {target: value.deps + value.exported_deps for target, value in linkable_nodes.items()} + shared_libs = {} + shared_lib_overrides = {} + transitive_linkable_cache = {} + debug_link_deps = {} + for target in post_order_traversal(linkable_nodes_graph): + node = linkable_nodes[target] + if node.preferred_linkage == Linkage("static") or not _has_linkable(node): + continue + + if _is_prebuilt_shared(node): + shared_lib = _shared_lib_for_prebuilt_shared(ctx, cxx_toolchain, target, node, linkable_nodes, transitive_linkable_cache, platform) + else: + soname = node.default_soname + output_path = "relinkable-libs/{}/{}".format(platform, soname) + link_args, shlib_deps, link_deps_graph = _create_link_args( + cxx_toolchain = cxx_toolchain, + root_target = target, + node = node, + graph = linkable_nodes, + shared_lib_overrides = shared_lib_overrides, + ) + debug_link_deps[target] = link_deps_graph shared_lib = create_shared_lib( ctx, output_path = output_path, soname = soname, - link_args = link_args, + link_args = [link_args], cxx_toolchain = cxx_toolchain, - shared_lib_deps = shared_lib_deps, - label = representative_label, - can_be_asset = can_be_asset, + shared_lib_deps = [shared_lib_overrides[lib].name for lib in shlib_deps if lib in shared_lib_overrides], + label = target, + can_be_asset = node.can_be_asset, ) + shared_lib_overrides[target] = LinkInfo( + name = shared_lib.soname.ensure_str(), + pre_flags = node.linker_flags.exported_flags, + linkables = [SharedLibLinkable( + lib = shared_lib.lib.output, + )], + post_flags = node.linker_flags.exported_post_flags, + ) + shared_libs[shared_lib.soname.ensure_str()] = shared_lib - link_group_linkable_nodes[group] = LinkGroupLinkableNode( - link = LinkInfo( - name = soname, - pre_flags = exported_linker_flags, - linkables = [SharedLibLinkable( - lib = shared_lib.lib.output, - )], - post_flags = exported_linker_post_flags, - ), - deps = link_group_deps, - exported_deps = [], - shared_lib = shared_lib, - # exported linker flags for shared libs are in their linkinfo itself and are not exported from dependents - exported_linker_flags = None, - ) - group_shared_libs[soname] = MergedSharedLibrary( - soname = soname, - lib = shared_lib, - apk_module = group_data.apk_module, - solib_constituents = solib_constituents, - is_actually_merged = is_actually_merged, + return {lib.soname.ensure_str(): lib for lib in shared_libs.values()}, debug_link_deps + +# To support migration from a tset-based link strategy, we are trying to match buck's internal tset +# traversal logic here. Look for implementation of TopologicalTransitiveSetIteratorGen +def _rust_matching_topological_traversal( + roots: list[typing.Any], + get_nodes_to_traverse_func: typing.Callable) -> list[typing.Any]: + counts = {} + + for label in depth_first_traversal_by(None, roots, get_nodes_to_traverse_func, GraphTraversal("preorder-right-to-left")): + for dep in get_nodes_to_traverse_func(label): + if dep in counts: + counts[dep] += 1 + else: + counts[dep] = 1 + + # some of the targets in roots might be transitive deps of others, we only put those that are true roots + # in the stack at this point + stack = [root_target for root_target in roots if not root_target in counts] + true_roots = len(stack) + + result = [] + for _ in range(2000000000): + if not stack: + break + next = stack.pop() + result.append(next) + deps = get_nodes_to_traverse_func(next) + for child in deps[::-1]: # reverse order ensures we put things on the stack in the same order as rust's tset traversal + counts[child] -= 1 + if counts[child] == 0: + stack.append(child) + + if len(result) != true_roots + len(counts): + fail() # fail_cycle + + return result + +def _create_link_args( + *, + cxx_toolchain: CxxToolchainInfo, + root_target: Label, + node: LinkableNode, + graph: dict[Label, LinkableNode], + shared_lib_overrides: dict[Label, LinkInfo] | None = None) -> (LinkArgs, list[Label], dict[Label, list[Label]]): + if LinkOrdering(cxx_toolchain.linker_info.link_ordering) != LinkOrdering("topological"): + fail("don't yet support link ordering {}".format(cxx_toolchain.linker_info.link_ordering)) + + # TODO(cjhopman): verify picbehavior == pic + link_strategy = node.default_link_strategy + if not shared_lib_overrides: + shared_lib_overrides = {} + + link_traversal_cache = {} + + def link_traversal(label: Label) -> list[Label]: + def link_traversal_deps(label): + node = graph[label] + if label == root_target: + return node.deps + node.exported_deps + actual_linkable_type = get_lib_output_style(link_strategy, node.preferred_linkage, PicBehavior("supported")) + if actual_linkable_type == LibOutputStyle("shared_lib"): + return node.exported_deps + else: + return node.deps + node.exported_deps + + res = link_traversal_cache.get(label, None) + if res: + return res + res = link_traversal_deps(label) + link_traversal_cache[label] = res + return res + + links = [] + shlib_deps = [] + for target in _rust_matching_topological_traversal([root_target], link_traversal): + is_root = target == root_target + node = graph[target] + preferred_linkable_type = get_lib_output_style(link_strategy, node.preferred_linkage, PicBehavior("supported")) + + if is_root: + link_info = node.link_infos[LibOutputStyle("pic_archive")].default + link_info = wrap_link_info( + link_info, + pre_flags = node.linker_flags.flags, + post_flags = node.linker_flags.post_flags, ) + links.append(set_link_info_link_whole(link_info)) + elif preferred_linkable_type == LibOutputStyle("shared_lib"): + if target in shared_lib_overrides: + links.append(shared_lib_overrides[target]) + else: + links.append(node.link_infos[LibOutputStyle("shared_lib")].default) + shlib_deps.append(target) + else: + links.append(node.link_infos[preferred_linkable_type].default) - shared_libs_by_platform[platform] = group_shared_libs - debug_info.missing_default_solibs.extend([d for d in merge_data.default_shared_libs if d not in included_default_solibs]) + extra_runtime_flags = cxx_toolchain.linker_info.shared_dep_runtime_ld_flags or [] + if extra_runtime_flags: + links.append(LinkInfo(pre_flags = extra_runtime_flags)) + return LinkArgs(infos = links), shlib_deps, link_traversal_cache - return MergedLinkables( - shared_libs_by_platform = shared_libs_by_platform, - debug_info = debug_info_by_platform, - ) +# Equivalent to _create_link_args but for somerge +def _create_merged_link_args( + *, + cxx_toolchain: CxxToolchainInfo, + root_target: LinkGroupMergeInfo, + linkable_nodes: dict[GroupLabel, LinkGroupLinkableNode]) -> (LinkArgs, list[GroupLabel], dict[GroupLabel, list[GroupLabel]]): + if LinkOrdering(cxx_toolchain.linker_info.link_ordering) != LinkOrdering("topological"): + fail("don't yet support link ordering {}".format(cxx_toolchain.linker_info.link_ordering)) + + link_traversal_cache = {} + + def link_traversal(label: GroupLabel) -> list[GroupLabel]: + def link_traversal_deps(label: GroupLabel): + if label == root_target.label: + return root_target.deps + root_target.exported_deps + + linkable_node = linkable_nodes[label] + if linkable_node.shared_lib: + return linkable_node.exported_deps + else: + return linkable_node.deps + linkable_node.exported_deps + + res = link_traversal_cache.get(label, None) + if res: + return res + res = link_traversal_deps(label) + link_traversal_cache[label] = res + return res + + links = [] + shlib_deps = [] + for label in _rust_matching_topological_traversal([root_target.label], link_traversal): + if label == root_target.label: + links.extend(root_target.constituent_link_infos) + else: + linkable_node = linkable_nodes[label] + links.append(linkable_node.link) + if linkable_node.shared_lib: + shlib_deps.append(label) + + extra_runtime_flags = cxx_toolchain.linker_info.shared_dep_runtime_ld_flags or [] + if extra_runtime_flags: + links.append(LinkInfo(pre_flags = extra_runtime_flags)) + return LinkArgs(infos = links), shlib_deps, link_traversal_cache # When linking shared libraries, by default, all symbols are exported from the library. In a # particular application, though, many of those symbols may never be used. Ideally, in each apk, @@ -1225,7 +1621,7 @@ def relink_libraries(ctx: AnalysisContext, libraries_by_platform: dict[str, dict create_relinker_version_script( ctx.actions, output = relinker_version_script, - relinker_blocklist = [regex(s) for s in ctx.attrs.relinker_whitelist], + relinker_allowlist = [regex(s) for s in ctx.attrs.relinker_whitelist], provided_symbols = provided_symbols_file, needed_symbols = needed_symbols_for_this, ) @@ -1253,7 +1649,7 @@ def relink_libraries(ctx: AnalysisContext, libraries_by_platform: dict[str, dict def extract_provided_symbols(ctx: AnalysisContext, toolchain: CxxToolchainInfo, lib: Artifact) -> Artifact: return extract_global_syms(ctx, toolchain, lib, "relinker_extract_provided_symbols") -def create_relinker_version_script(actions: AnalysisActions, relinker_blocklist: list[regex], output: Artifact, provided_symbols: Artifact, needed_symbols: list[Artifact]): +def create_relinker_version_script(actions: AnalysisActions, relinker_allowlist: list[regex], output: Artifact, provided_symbols: Artifact, needed_symbols: list[Artifact]): def create_version_script(ctx, artifacts, outputs): all_needed_symbols = {} for symbols_file in needed_symbols: @@ -1270,7 +1666,7 @@ def create_relinker_version_script(actions: AnalysisActions, relinker_blocklist: elif "Java_" in symbol: keep_symbol = True else: - for pattern in relinker_blocklist: + for pattern in relinker_allowlist: if pattern.match(symbol): keep_symbol = True break @@ -1287,7 +1683,7 @@ def create_relinker_version_script(actions: AnalysisActions, relinker_blocklist: version_script += "};\n" ctx.actions.write(outputs[output], version_script) - actions.dynamic_output(dynamic = needed_symbols + [provided_symbols], inputs = [], outputs = [output], f = create_version_script) + actions.dynamic_output(dynamic = needed_symbols + [provided_symbols], inputs = [], outputs = [output.as_output()], f = create_version_script) def extract_undefined_symbols(ctx: AnalysisContext, toolchain: CxxToolchainInfo, lib: Artifact) -> Artifact: return extract_undefined_syms(ctx, toolchain, lib, "relinker_extract_undefined_symbols") @@ -1301,7 +1697,7 @@ def union_needed_symbols(actions: AnalysisActions, output: Artifact, needed_symb symbols = sorted(unioned_symbols.keys()) ctx.actions.write(outputs[output], symbols) - actions.dynamic_output(dynamic = needed_symbols, inputs = [], outputs = [output], f = compute_union) + actions.dynamic_output(dynamic = needed_symbols, inputs = [], outputs = [output.as_output()], f = compute_union) def strip_lib(ctx: AnalysisContext, cxx_toolchain: CxxToolchainInfo, shlib: Artifact, output_path: [str, None] = None): strip_flags = cmd_args(get_strip_non_global_flags(cxx_toolchain)) @@ -1323,6 +1719,15 @@ def create_shared_lib( shared_lib_deps: list[str], label: Label, can_be_asset: bool) -> SharedLibrary: + for link_arg in link_args: + flags = link_arg.flags or [] + for info in link_arg.infos or []: + flags += info.pre_flags or [] + flags += info.post_flags or [] + for flag in flags: + flag = str(flag) + if flag.endswith("--exclude-libs,ALL") or flag.endswith("--exclude-libs=ALL"): + fail("The behavior of --exclude-libs,ALL is not predictable when building Android binaries and may cause runtime crashes, remove it from {} (or its merged constituents)".format(label)) link_result = cxx_link_shared_library( ctx = ctx, output = output_path, @@ -1337,7 +1742,7 @@ def create_shared_lib( ) shlib = link_result.linked_object - return SharedLibrary( + return create_shlib( lib = shlib, stripped_lib = strip_lib(ctx, cxx_toolchain, shlib.output), shlib_deps = shared_lib_deps, diff --git a/prelude/android/android_binary_resources_rules.bzl b/prelude/android/android_binary_resources_rules.bzl index b339eab132..ed84eede8e 100644 --- a/prelude/android/android_binary_resources_rules.bzl +++ b/prelude/android/android_binary_resources_rules.bzl @@ -24,8 +24,8 @@ load("@prelude//decls/android_rules.bzl", "RType") _FilteredResourcesOutput = record( resource_infos = list[AndroidResourceInfo], voltron_res = list[Artifact], - override_symbols = [Artifact, None], - string_files_list = [Artifact, None], + override_symbols = Artifact | None, + string_files_list = Artifact | None, string_files_res_dirs = list[Artifact], ) @@ -36,19 +36,20 @@ def get_android_binary_resources_info( java_packaging_deps: list[JavaPackagingDep], use_proto_format: bool, referenced_resources_lists: list[Artifact], - apk_module_graph_file: [Artifact, None] = None, + apk_module_graph_file: Artifact | None = None, manifest_entries: dict = {}, resource_infos_to_exclude: [set_type, None] = None, r_dot_java_packages_to_exclude: [list[str], None] = [], generate_strings_and_ids_separately: [bool, None] = True, - aapt2_min_sdk: [str, None] = None, aapt2_preferred_density: [str, None] = None) -> AndroidBinaryResourcesInfo: android_toolchain = ctx.attrs._android_toolchain[AndroidToolchainInfo] - unfiltered_resource_infos = [ + + # Use reverse topological sort in resource merging to make sure a resource target will overwrite its dependencies. + unfiltered_resource_infos = reversed([ resource_info - for resource_info in list(android_packageable_info.resource_infos.traverse() if android_packageable_info.resource_infos else []) + for resource_info in list(android_packageable_info.resource_infos.traverse(ordering = "topological") if android_packageable_info.resource_infos else []) if not (resource_infos_to_exclude and resource_infos_to_exclude.contains(resource_info.raw_target)) - ] + ]) filtered_resources_output = _maybe_filter_resources( ctx, unfiltered_resource_infos, @@ -56,13 +57,14 @@ def get_android_binary_resources_info( ) resource_infos = filtered_resources_output.resource_infos - android_manifest = get_manifest(ctx, android_packageable_info, manifest_entries) + android_manifest = get_manifest(ctx, android_packageable_info, manifest_entries, should_replace_application_id_placeholders = True) non_proto_format_aapt2_link_info, proto_format_aapt2_link_info = get_aapt2_link( ctx, ctx.attrs._android_toolchain[AndroidToolchainInfo], resource_infos, android_manifest, + manifest_entries = getattr(ctx.attrs, "manifest_entries", {}), includes_vector_drawables = getattr(ctx.attrs, "includes_vector_drawables", False), no_auto_version = getattr(ctx.attrs, "no_auto_version_resources", False), no_version_transitions = getattr(ctx.attrs, "no_version_transitions_resources", False), @@ -76,7 +78,6 @@ def get_android_binary_resources_info( extra_filtered_resources = getattr(ctx.attrs, "extra_filtered_resources", []), locales = getattr(ctx.attrs, "locales", []) or getattr(ctx.attrs, "locales_for_binary_resources", []), filter_locales = getattr(ctx.attrs, "aapt2_locale_filtering", False) or bool(getattr(ctx.attrs, "locales_for_binary_resources", [])), - min_sdk = aapt2_min_sdk, preferred_density = aapt2_preferred_density, ) @@ -94,12 +95,14 @@ def get_android_binary_resources_info( cxx_resources = get_cxx_resources(ctx, deps) is_exopackaged_enabled_for_resources = "resources" in getattr(ctx.attrs, "exopackage_modes", []) - primary_resources_apk, exopackaged_assets, exopackaged_assets_hash = _merge_assets( + primary_resources_apk, exopackaged_assets, exopackaged_assets_hash, module_assets_apks_dir = _merge_assets( ctx, is_exopackaged_enabled_for_resources, aapt2_link_info.primary_resources_apk, resource_infos, cxx_resources, + use_proto_format, # indicates that this is a .aab build + apk_module_graph_file, ) if is_exopackaged_enabled_for_resources: @@ -180,6 +183,7 @@ def get_android_binary_resources_info( exopackage_info = exopackage_info, manifest = android_manifest, module_manifests = module_manifests, + module_assets = module_assets_apks_dir, packaged_string_assets = packaged_string_assets, primary_resources_apk = primary_resources_apk, proguard_config_file = aapt2_link_info.proguard_config_file, @@ -237,15 +241,17 @@ def _maybe_filter_resources( filter_resources_cmd = cmd_args(android_toolchain.filter_resources[RunInfo]) in_res_dirs = res_to_out_res_dir.keys() - filter_resources_cmd.hidden(in_res_dirs) - filter_resources_cmd.hidden([out_res.as_output() for out_res in res_to_out_res_dir.values()]) + filter_resources_cmd.add(cmd_args( + hidden = + in_res_dirs + [out_res.as_output() for out_res in res_to_out_res_dir.values()], + )) filter_resources_cmd.add([ "--in-res-dir-to-out-res-dir-map", ctx.actions.write_json("in_res_dir_to_out_res_dir_map", {"res_dir_map": res_to_out_res_dir}), ]) if is_voltron_language_pack_enabled: - filter_resources_cmd.hidden([out_res.as_output() for out_res in voltron_res_to_out_res_dir.values()]) + filter_resources_cmd.add(cmd_args(hidden = [out_res.as_output() for out_res in voltron_res_to_out_res_dir.values()])) filter_resources_cmd.add([ "--voltron-in-res-dir-to-out-res-dir-map", ctx.actions.write_json("voltron_in_res_dir_to_out_res_dir_map", {"res_dir_map": voltron_res_to_out_res_dir}), @@ -363,7 +369,7 @@ def _maybe_generate_string_source_map( should_build_source_string_map: bool, res_dirs: list[Artifact], android_toolchain: AndroidToolchainInfo, - is_voltron_string_source_map: bool = False) -> [Artifact, None]: + is_voltron_string_source_map: bool = False) -> Artifact | None: if not should_build_source_string_map or len(res_dirs) == 0: return None @@ -376,7 +382,7 @@ def _maybe_generate_string_source_map( res_dirs_file, "--output", output.as_output(), - ]).hidden(res_dirs) + ], hidden = res_dirs) if is_voltron_string_source_map: generate_string_source_map_cmd.add("--is-voltron") @@ -387,10 +393,10 @@ def _maybe_generate_string_source_map( def _maybe_package_strings_as_assets( ctx: AnalysisContext, - string_files_list: [Artifact, None], + string_files_list: Artifact | None, string_files_res_dirs: list[Artifact], r_dot_txt: Artifact, - android_toolchain: AndroidToolchainInfo) -> [Artifact, None]: + android_toolchain: AndroidToolchainInfo) -> Artifact | None: resource_compression_mode = getattr(ctx.attrs, "resource_compression", "disabled") is_store_strings_as_assets = _is_store_strings_as_assets(resource_compression_mode) expect(is_store_strings_as_assets == (string_files_list != None)) @@ -416,7 +422,7 @@ def _maybe_package_strings_as_assets( string_assets_zip.as_output(), "--all-locales-string-assets-zip", all_locales_string_assets_zip.as_output(), - ]).hidden(string_files_res_dirs) + ], hidden = string_files_res_dirs) if locales: package_strings_as_assets_cmd.add("--locales", ",".join(locales)) @@ -428,7 +434,8 @@ def _maybe_package_strings_as_assets( def get_manifest( ctx: AnalysisContext, android_packageable_info: AndroidPackageableInfo, - manifest_entries: dict) -> Artifact: + manifest_entries: dict, + should_replace_application_id_placeholders: bool) -> Artifact: robolectric_manifest = getattr(ctx.attrs, "robolectric_manifest", None) if robolectric_manifest: return robolectric_manifest @@ -456,7 +463,7 @@ def get_manifest( manifest_entries.get("placeholders", {}), ) - if android_toolchain.set_application_id_to_specified_package: + if android_toolchain.set_application_id_to_specified_package and should_replace_application_id_placeholders: android_manifest_with_replaced_application_id = ctx.actions.declare_output("android_manifest_with_replaced_application_id/AndroidManifest.xml") replace_application_id_placeholders_cmd = cmd_args([ ctx.attrs._android_toolchain[AndroidToolchainInfo].replace_application_id_placeholders[RunInfo], @@ -476,7 +483,7 @@ def get_manifest( def _get_module_manifests( ctx: AnalysisContext, manifest_entries: dict, - apk_module_graph_file: [Artifact, None], + apk_module_graph_file: Artifact | None, use_proto_format: bool, primary_resources_apk: Artifact) -> list[Artifact]: if not apk_module_graph_file: @@ -528,7 +535,7 @@ def _get_module_manifests( ctx.actions.dynamic_output( dynamic = [apk_module_graph_file], inputs = [], - outputs = [module_manifests_dir], + outputs = [module_manifests_dir.as_output()], f = get_manifests_modular, ) @@ -541,35 +548,99 @@ def _merge_assets( is_exopackaged_enabled_for_resources: bool, base_apk: Artifact, resource_infos: list[AndroidResourceInfo], - cxx_resources: [Artifact, None]) -> (Artifact, [Artifact, None], [Artifact, None]): - assets_dirs = [resource_info.assets for resource_info in resource_infos if resource_info.assets] - if cxx_resources != None: - assets_dirs.extend([cxx_resources]) - if len(assets_dirs) == 0: - return base_apk, None, None - - merge_assets_cmd = cmd_args(ctx.attrs._android_toolchain[AndroidToolchainInfo].merge_assets[RunInfo]) + cxx_resources: Artifact | None, + is_bundle_build: bool, + apk_module_graph_file: Artifact | None) -> (Artifact, Artifact | None, Artifact | None, Artifact | None): + expect( + not (is_exopackaged_enabled_for_resources and is_bundle_build), + "Cannot use exopackage-for-resources with AAB builds.", + ) + expect( + not (is_exopackaged_enabled_for_resources and apk_module_graph_file), + "Cannot use exopackage-for-resources with Voltron builds.", + ) + asset_resource_infos = [resource_info for resource_info in resource_infos if resource_info.assets] + if not asset_resource_infos and not cxx_resources: + return base_apk, None, None, None merged_assets_output = ctx.actions.declare_output("merged_assets.ap_") - merge_assets_cmd.add(["--output-apk", merged_assets_output.as_output()]) - if is_exopackaged_enabled_for_resources: - merged_assets_output_hash = ctx.actions.declare_output("merged_assets.ap_.hash") - merge_assets_cmd.add(["--output-apk-hash", merged_assets_output_hash.as_output()]) - else: - merge_assets_cmd.add(["--base-apk", base_apk]) - merged_assets_output_hash = None + def get_common_merge_assets_cmd( + ctx: AnalysisContext, + output_apk: Artifact) -> (cmd_args, Artifact | None): + merge_assets_cmd = cmd_args(ctx.attrs._android_toolchain[AndroidToolchainInfo].merge_assets[RunInfo]) + merge_assets_cmd.add(["--output-apk", output_apk.as_output()]) - assets_dirs_file = ctx.actions.write("assets_dirs", assets_dirs) - merge_assets_cmd.add(["--assets-dirs", assets_dirs_file]) - merge_assets_cmd.hidden(assets_dirs) + if getattr(ctx.attrs, "extra_no_compress_asset_extensions", None): + merge_assets_cmd.add("--extra-no-compress-asset-extensions") + merge_assets_cmd.add(ctx.attrs.extra_no_compress_asset_extensions) - ctx.actions.run(merge_assets_cmd, category = "merge_assets") + if is_exopackaged_enabled_for_resources: + merged_assets_output_hash = ctx.actions.declare_output("merged_assets.ap_.hash") + merge_assets_cmd.add(["--output-apk-hash", merged_assets_output_hash.as_output()]) + else: + merge_assets_cmd.add(["--base-apk", base_apk]) + merged_assets_output_hash = None + + merge_assets_cmd.add("--binary-type", "aab" if is_bundle_build else "apk") + + return merge_assets_cmd, merged_assets_output_hash + + if apk_module_graph_file: + declared_outputs = [merged_assets_output] + if is_bundle_build: + # For Voltron AAB builds, we need to put assets into a separate "APK" for each module. + module_assets_apks_dir = ctx.actions.declare_output("module_assets_apks") + declared_outputs.append(module_assets_apks_dir) + else: + module_assets_apks_dir = None + + def merge_assets_modular(ctx: AnalysisContext, artifacts, outputs): + apk_module_graph_info = get_apk_module_graph_info(ctx, apk_module_graph_file, artifacts) + + module_to_assets_dirs = {} + if cxx_resources != None: + module_to_assets_dirs.setdefault(ROOT_MODULE, []).extend([cxx_resources]) + for asset_resource_info in asset_resource_infos: + module_name = apk_module_graph_info.target_to_module_mapping_function(str(asset_resource_info.raw_target)) + module_to_assets_dirs.setdefault(module_name, []).append(asset_resource_info.assets) + + merge_assets_cmd, _ = get_common_merge_assets_cmd(ctx, outputs[merged_assets_output]) + + if is_bundle_build: + merge_assets_cmd.add(["--module-assets-apks-dir", outputs[module_assets_apks_dir].as_output()]) + + assets_dirs_file = ctx.actions.write_json("assets_dirs.json", module_to_assets_dirs) + merge_assets_cmd.add(["--assets-dirs", assets_dirs_file]) + merge_assets_cmd.add(cmd_args(hidden = [resource_info.assets for resource_info in asset_resource_infos])) + + ctx.actions.run(merge_assets_cmd, category = "merge_assets") + + ctx.actions.dynamic_output( + dynamic = [apk_module_graph_file], + inputs = [], + outputs = [o.as_output() for o in declared_outputs], + f = merge_assets_modular, + ) + + return merged_assets_output, None, None, module_assets_apks_dir - if is_exopackaged_enabled_for_resources: - return base_apk, merged_assets_output, merged_assets_output_hash else: - return merged_assets_output, None, None + merge_assets_cmd, merged_assets_output_hash = get_common_merge_assets_cmd(ctx, merged_assets_output) + + assets_dirs = [resource_info.assets for resource_info in asset_resource_infos] + if cxx_resources: + assets_dirs.extend([cxx_resources]) + assets_dirs_file = ctx.actions.write_json("assets_dirs.json", {ROOT_MODULE: assets_dirs}) + merge_assets_cmd.add(["--assets-dirs", assets_dirs_file]) + merge_assets_cmd.add(cmd_args(hidden = assets_dirs)) + + ctx.actions.run(merge_assets_cmd, category = "merge_assets") + + if is_exopackaged_enabled_for_resources: + return base_apk, merged_assets_output, merged_assets_output_hash, None + else: + return merged_assets_output, None, None, None def get_effective_banned_duplicate_resource_types( duplicate_resource_behavior: str, @@ -590,7 +661,7 @@ def get_effective_banned_duplicate_resource_types( else: fail("Unrecognized duplicate_resource_behavior: {}".format(duplicate_resource_behavior)) -def get_cxx_resources(ctx: AnalysisContext, deps: list[Dependency], dir_name: str = "cxx_resources_dir") -> [Artifact, None]: +def get_cxx_resources(ctx: AnalysisContext, deps: list[Dependency], dir_name: str = "cxx_resources_dir") -> Artifact | None: cxx_resources = gather_resources( label = ctx.label, resources = {}, diff --git a/prelude/android/android_build_config.bzl b/prelude/android/android_build_config.bzl index b1a32b8a3f..95ab62c55c 100644 --- a/prelude/android/android_build_config.bzl +++ b/prelude/android/android_build_config.bzl @@ -22,11 +22,11 @@ def android_build_config_impl(ctx: AnalysisContext) -> list[Provider]: providers = [] default_build_config_fields = get_build_config_fields(ctx.attrs.values) - android_build_config_info = AndroidBuildConfigInfo(package = ctx.attrs.package, build_config_fields = default_build_config_fields) + android_build_config_info = AndroidBuildConfigInfo(package = ctx.attrs.package, build_config_fields = default_build_config_fields, values_file = ctx.attrs.values_file) providers.append(android_build_config_info) providers.append(merge_android_packageable_info(ctx.label, ctx.actions, deps = [], build_config_info = android_build_config_info)) - build_config_dot_java_library, java_packaging_info = generate_android_build_config( + build_config_dot_java_library, java_packaging_info, build_config_dot_java = generate_android_build_config( ctx, ctx.attrs.name, ctx.attrs.package, @@ -38,7 +38,14 @@ def android_build_config_impl(ctx: AnalysisContext) -> list[Provider]: providers.append(java_packaging_info) providers.append(build_config_dot_java_library) - providers.append(DefaultInfo(default_output = build_config_dot_java_library.library_output.full_library)) + providers.append( + DefaultInfo( + default_output = build_config_dot_java_library.library_output.full_library, + sub_targets = { + "build_config_dot_java": [DefaultInfo(default_output = build_config_dot_java)], + }, + ), + ) return providers def generate_android_build_config( @@ -47,7 +54,7 @@ def generate_android_build_config( java_package: str, use_constant_expressions: bool, default_values: list[BuildConfigField], - values_file: [Artifact, None]) -> (JavaLibraryInfo, JavaPackagingInfo): + values_file: Artifact | None) -> (JavaLibraryInfo, JavaPackagingInfo, Artifact): build_config_dot_java = _generate_build_config_dot_java(ctx, source, java_package, use_constant_expressions, default_values, values_file) compiled_build_config_dot_java = _compile_and_package_build_config_dot_java(ctx, java_package, build_config_dot_java) @@ -61,7 +68,7 @@ def generate_android_build_config( output_for_classpath_macro = library_output.full_library, ), JavaPackagingInfo( packaging_deps = packaging_deps, - )) + ), build_config_dot_java) def _generate_build_config_dot_java( ctx: AnalysisContext, @@ -69,7 +76,7 @@ def _generate_build_config_dot_java( java_package: str, use_constant_expressions: bool, default_values: list[BuildConfigField], - values_file: [Artifact, None]) -> Artifact: + values_file: Artifact | None) -> Artifact: generate_build_config_cmd = cmd_args(ctx.attrs._android_toolchain[AndroidToolchainInfo].generate_build_config[RunInfo]) generate_build_config_cmd.add([ "--source", diff --git a/prelude/android/android_bundle.bzl b/prelude/android/android_bundle.bzl index a6bbffbdc7..3168922a11 100644 --- a/prelude/android/android_bundle.bzl +++ b/prelude/android/android_bundle.bzl @@ -5,10 +5,14 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//:validation_deps.bzl", "get_validation_deps_outputs") load("@prelude//android:android_binary.bzl", "get_binary_info") load("@prelude//android:android_providers.bzl", "AndroidAabInfo", "AndroidBinaryNativeLibsInfo", "AndroidBinaryResourcesInfo", "DexFilesInfo") load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo") +load("@prelude//android:bundletool_util.bzl", "derive_universal_apk") +load("@prelude//java:java_providers.bzl", "KeystoreInfo") load("@prelude//java/utils:java_more_utils.bzl", "get_path_separator_for_exec_os") +load("@prelude//utils:argfile.bzl", "argfile") def android_bundle_impl(ctx: AnalysisContext) -> list[Provider]: android_binary_info = get_binary_info(ctx, use_proto_format = True) @@ -20,12 +24,31 @@ def android_bundle_impl(ctx: AnalysisContext) -> list[Provider]: dex_files_info = android_binary_info.dex_files_info, native_library_info = android_binary_info.native_library_info, resources_info = android_binary_info.resources_info, + bundle_config = ctx.attrs.bundle_config_file, + validation_deps_outputs = get_validation_deps_outputs(ctx), + packaging_options = ctx.attrs.packaging_options, ) + sub_targets = {} + sub_targets.update(android_binary_info.sub_targets) + if ctx.attrs.use_derived_apk: + keystore = ctx.attrs.keystore[KeystoreInfo] + default_output = derive_universal_apk( + ctx, + android_toolchain = ctx.attrs._android_toolchain[AndroidToolchainInfo], + app_bundle = output_bundle, + keystore = keystore, + ) + sub_targets["aab"] = [DefaultInfo( + default_outputs = [output_bundle], + )] + else: + default_output = output_bundle + java_packaging_deps = android_binary_info.java_packaging_deps return [ - DefaultInfo(default_output = output_bundle, sub_targets = android_binary_info.sub_targets), - AndroidAabInfo(aab = output_bundle, manifest = android_binary_info.resources_info.manifest), + DefaultInfo(default_output = default_output, other_outputs = android_binary_info.materialized_artifacts, sub_targets = sub_targets), + AndroidAabInfo(aab = output_bundle, manifest = android_binary_info.resources_info.manifest, materialized_artifacts = android_binary_info.materialized_artifacts), TemplatePlaceholderInfo( keyed_variables = { "classpath": cmd_args([dep.jar for dep in java_packaging_deps if dep.jar], delimiter = get_path_separator_for_exec_os(ctx)), @@ -40,10 +63,13 @@ def build_bundle( android_toolchain: AndroidToolchainInfo, dex_files_info: DexFilesInfo, native_library_info: AndroidBinaryNativeLibsInfo, - resources_info: AndroidBinaryResourcesInfo) -> Artifact: + resources_info: AndroidBinaryResourcesInfo, + bundle_config: Artifact | None, + validation_deps_outputs: [list[Artifact], None] = None, + packaging_options: dict | None = None) -> Artifact: output_bundle = actions.declare_output("{}.aab".format(label.name)) - bundle_builder_args = cmd_args([ + bundle_builder_args = cmd_args( android_toolchain.bundle_builder[RunInfo], "--output-bundle", output_bundle.as_output(), @@ -51,30 +77,39 @@ def build_bundle( resources_info.primary_resources_apk, "--dex-file", dex_files_info.primary_dex, - ]) + # The outputs of validation_deps need to be added as hidden arguments + # to an action for the validation_deps targets to be built and enforced. + hidden = validation_deps_outputs or [], + ) + + if bundle_config: + bundle_builder_args.add(["--path-to-bundle-config-file", bundle_config]) if android_toolchain.package_meta_inf_version_files: bundle_builder_args.add("--package-meta-inf-version-files") root_module_asset_directories = native_library_info.root_module_native_lib_assets + dex_files_info.root_module_secondary_dex_dirs - root_module_asset_directories_file = actions.write("root_module_asset_directories.txt", root_module_asset_directories) - bundle_builder_args.hidden(root_module_asset_directories) - non_root_module_asset_directories = resources_info.module_manifests + native_library_info.non_root_module_native_lib_assets + dex_files_info.non_root_module_secondary_dex_dirs - non_root_module_asset_directories_file = actions.write("non_root_module_asset_directories.txt", non_root_module_asset_directories) - bundle_builder_args.hidden(non_root_module_asset_directories) - native_library_directories = actions.write("native_library_directories", native_library_info.native_libs_for_primary_apk) - bundle_builder_args.hidden(native_library_info.native_libs_for_primary_apk) + root_module_asset_directories_file = argfile(actions = actions, name = "root_module_asset_directories.txt", args = root_module_asset_directories) + + non_root_module_asset_directories = resources_info.module_manifests + dex_files_info.non_root_module_secondary_dex_dirs + non_root_module_asset_directories_file = argfile(actions = actions, name = "non_root_module_asset_directories.txt", args = non_root_module_asset_directories) + non_root_module_asset_native_lib_directories = argfile(actions = actions, name = "non_root_module_asset_native_lib_directories.txt", args = native_library_info.non_root_module_native_lib_assets) + + native_library_directories = argfile(actions = actions, name = "native_library_directories", args = native_library_info.native_libs_for_primary_apk) all_zip_files = [resources_info.packaged_string_assets] if resources_info.packaged_string_assets else [] - zip_files = actions.write("zip_files", all_zip_files) - bundle_builder_args.hidden(all_zip_files) - jar_files_that_may_contain_resources = actions.write("jar_files_that_may_contain_resources", resources_info.jar_files_that_may_contain_resources) - bundle_builder_args.hidden(resources_info.jar_files_that_may_contain_resources) + zip_files = argfile(actions = actions, name = "zip_files", args = all_zip_files) + jar_files_that_may_contain_resources = argfile(actions = actions, name = "jar_files_that_may_contain_resources", args = resources_info.jar_files_that_may_contain_resources) + + if resources_info.module_assets: + bundle_builder_args.add(["--module-assets-dir", resources_info.module_assets]) bundle_builder_args.add([ "--root-module-asset-directories-list", root_module_asset_directories_file, "--non-root-module-asset-directories-list", non_root_module_asset_directories_file, + "--non-root-module-asset-native-lib-directories-list", + non_root_module_asset_native_lib_directories, "--native-libraries-directories-list", native_library_directories, "--zip-files-list", @@ -85,6 +120,13 @@ def build_bundle( android_toolchain.zipalign[RunInfo], ]) + if packaging_options: + for key, value in packaging_options.items(): + if key != "excluded_resources": + fail("Only 'excluded_resources' is supported in packaging_options right now!") + else: + bundle_builder_args.add("--excluded-resources", actions.write("excluded_resources.txt", value)) + actions.run(bundle_builder_args, category = "bundle_build") return output_bundle diff --git a/prelude/android/android_instrumentation_apk.bzl b/prelude/android/android_instrumentation_apk.bzl index f8c9b315f9..3e82fc211f 100644 --- a/prelude/android/android_instrumentation_apk.bzl +++ b/prelude/android/android_instrumentation_apk.bzl @@ -12,6 +12,7 @@ load("@prelude//android:android_providers.bzl", "AndroidApkInfo", "AndroidApkUnd load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo") load("@prelude//android:configuration.bzl", "get_deps_by_platform") load("@prelude//android:dex_rules.bzl", "get_multi_dex", "get_single_primary_dex", "get_split_dex_merge_config", "merge_to_single_dex", "merge_to_split_dex") +load("@prelude//android:preprocess_java_classes.bzl", "get_preprocessed_java_classes") load("@prelude//android:util.bzl", "create_enhancement_context") load("@prelude//java:java_providers.bzl", "create_java_packaging_dep", "get_all_java_packaging_deps") load("@prelude//java/utils:java_utils.bzl", "get_class_to_source_map_info") @@ -20,6 +21,9 @@ load("@prelude//utils:expect.bzl", "expect") def android_instrumentation_apk_impl(ctx: AnalysisContext): _verify_params(ctx) + # jar preprocessing cannot be used when the jars were dexed already, so we have to disable predex when we want to preprocess the jars. + disable_pre_dex = ctx.attrs.disable_pre_dex or ctx.attrs.preprocess_java_classes_bash + apk_under_test_info = ctx.attrs.apk[AndroidApkUnderTestInfo] # android_instrumentation_apk uses the same platforms as the APK-under-test @@ -34,10 +38,12 @@ def android_instrumentation_apk_impl(ctx: AnalysisContext): # We use the deps that don't have _build_only_native_code = True deps = unfiltered_deps_by_platform.values()[0] + is_self_instrumenting = ctx.attrs.is_self_instrumenting + java_packaging_deps = [ packaging_dep for packaging_dep in get_all_java_packaging_deps(ctx, deps) - if packaging_dep.dex and not apk_under_test_info.java_packaging_deps.contains(packaging_dep.label.raw_target()) + if packaging_dep.dex and (is_self_instrumenting or not apk_under_test_info.java_packaging_deps.contains(packaging_dep.label.raw_target())) ] android_packageable_info = merge_android_packageable_info(ctx.label, ctx.actions, deps) @@ -50,8 +56,8 @@ def android_instrumentation_apk_impl(ctx: AnalysisContext): use_proto_format = False, referenced_resources_lists = [], manifest_entries = apk_under_test_info.manifest_entries, - resource_infos_to_exclude = apk_under_test_info.resource_infos, - r_dot_java_packages_to_exclude = apk_under_test_info.r_dot_java_packages.list(), + resource_infos_to_exclude = apk_under_test_info.resource_infos if not is_self_instrumenting else None, + r_dot_java_packages_to_exclude = apk_under_test_info.r_dot_java_packages.list() if not is_self_instrumenting else [], ) android_toolchain = ctx.attrs._android_toolchain[AndroidToolchainInfo] java_packaging_deps += [ @@ -63,7 +69,9 @@ def android_instrumentation_apk_impl(ctx: AnalysisContext): for r_dot_java in resources_info.r_dot_java_infos ] - if not ctx.attrs.disable_pre_dex: + enhance_ctx = create_enhancement_context(ctx) + materialized_artifacts = [] + if not disable_pre_dex: pre_dexed_libs = [java_packaging_dep.dex for java_packaging_dep in java_packaging_deps] if ctx.attrs.use_split_dex: dex_merge_config = get_split_dex_merge_config(ctx, android_toolchain) @@ -77,6 +85,10 @@ def android_instrumentation_apk_impl(ctx: AnalysisContext): dex_files_info = merge_to_single_dex(ctx, android_toolchain, pre_dexed_libs) else: jars_to_owners = {packaging_dep.jar: packaging_dep.jar.owner.raw_target() for packaging_dep in java_packaging_deps} + if ctx.attrs.preprocess_java_classes_bash: + jars_to_owners, materialized_artifacts_dir = get_preprocessed_java_classes(enhance_ctx, jars_to_owners) + if materialized_artifacts_dir: + materialized_artifacts.append(materialized_artifacts_dir) if ctx.attrs.use_split_dex: dex_files_info = get_multi_dex( ctx, @@ -91,13 +103,12 @@ def android_instrumentation_apk_impl(ctx: AnalysisContext): jars_to_owners.keys(), ) - enhance_ctx = create_enhancement_context(ctx) native_library_info = get_android_binary_native_library_info( enhance_ctx, android_packageable_info, filtered_deps_by_platform, - prebuilt_native_library_dirs_to_exclude = apk_under_test_info.prebuilt_native_library_dirs, - shared_libraries_to_exclude = apk_under_test_info.shared_libraries, + prebuilt_native_library_dirs_to_exclude = apk_under_test_info.prebuilt_native_library_dirs if not is_self_instrumenting else None, + shared_libraries_to_exclude = apk_under_test_info.shared_libraries if not is_self_instrumenting else None, ) output_apk = build_apk( @@ -110,16 +121,16 @@ def android_instrumentation_apk_impl(ctx: AnalysisContext): resources_info = resources_info, ) - class_to_srcs, _ = get_class_to_source_map_info( + class_to_srcs, _, _ = get_class_to_source_map_info( ctx, outputs = None, deps = deps, ) return [ - AndroidApkInfo(apk = output_apk, manifest = resources_info.manifest), + AndroidApkInfo(apk = output_apk, materialized_artifacts = materialized_artifacts, manifest = resources_info.manifest), AndroidInstrumentationApkInfo(apk_under_test = ctx.attrs.apk[AndroidApkInfo].apk), - DefaultInfo(default_output = output_apk, sub_targets = enhance_ctx.get_sub_targets()), + DefaultInfo(default_output = output_apk, other_outputs = materialized_artifacts, sub_targets = enhance_ctx.get_sub_targets()), class_to_srcs, ] diff --git a/prelude/android/android_instrumentation_test.bzl b/prelude/android/android_instrumentation_test.bzl index 27d56c21f7..82b1b32fa3 100644 --- a/prelude/android/android_instrumentation_test.bzl +++ b/prelude/android/android_instrumentation_test.bzl @@ -8,28 +8,56 @@ load("@prelude//android:android_providers.bzl", "AndroidApkInfo", "AndroidInstrumentationApkInfo") load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo") load("@prelude//java:class_to_srcs.bzl", "JavaClassToSourceMapInfo") +load("@prelude//java:java_providers.bzl", "JavaPackagingInfo", "get_all_java_packaging_deps_tset") load("@prelude//java:java_toolchain.bzl", "JavaToolchainInfo") load("@prelude//java/utils:java_more_utils.bzl", "get_path_separator_for_exec_os") +load( + "@prelude//linking:shared_libraries.bzl", + "SharedLibraryInfo", + "create_shlib_symlink_tree", + "merge_shared_libraries", + "traverse_shared_library_info", +) +load("@prelude//utils:argfile.bzl", "at_argfile") load("@prelude//utils:expect.bzl", "expect") load("@prelude//test/inject_test_run_info.bzl", "inject_test_run_info") +ANDROID_EMULATOR_ABI_LABEL_PREFIX = "tpx-re-config::" DEFAULT_ANDROID_SUBPLATFORM = "android-30" +DEFAULT_ANDROID_PLATFORM = "android-emulator" +DEFAULT_ANDROID_INSTRUMENTATION_TESTS_USE_CASE = "instrumentation-tests" def android_instrumentation_test_impl(ctx: AnalysisContext): android_toolchain = ctx.attrs._android_toolchain[AndroidToolchainInfo] - cmd = [ctx.attrs._java_toolchain[JavaToolchainInfo].java_for_tests] + cmd = [ctx.attrs._java_test_toolchain[JavaToolchainInfo].java_for_tests] classpath = android_toolchain.instrumentation_test_runner_classpath classpath_args = cmd_args() classpath_args.add("-classpath") + env = ctx.attrs.env or {} extra_classpath = [] if ctx.attrs.instrumentation_test_listener != None: - extra_classpath.append(ctx.attrs.instrumentation_test_listener) - classpath_args.add(cmd_args(classpath + extra_classpath, delimiter = get_path_separator_for_exec_os(ctx))) - classpath_args_file = ctx.actions.write("classpath_args_file", classpath_args) - cmd.append(cmd_args(classpath_args_file, format = "@{}").hidden(classpath_args)) + extra_classpath.extend([ + get_all_java_packaging_deps_tset(ctx, java_packaging_infos = [ctx.attrs.instrumentation_test_listener[JavaPackagingInfo]]) + .project_as_args("full_jar_args", ordering = "bfs"), + ]) + + shared_library_info = merge_shared_libraries( + ctx.actions, + deps = [ctx.attrs.instrumentation_test_listener[SharedLibraryInfo]], + ) + + cxx_library_symlink_tree = create_shlib_symlink_tree( + actions = ctx.actions, + out = "cxx_library_symlink_tree", + shared_libs = traverse_shared_library_info(shared_library_info), + ) + + env["BUCK_LD_SYMLINK_TREE"] = cxx_library_symlink_tree + classpath_args.add(cmd_args(extra_classpath + classpath, delimiter = get_path_separator_for_exec_os(ctx))) + cmd.append(at_argfile(actions = ctx.actions, name = "classpath_args_file", args = classpath_args)) cmd.append(android_toolchain.instrumentation_test_runner_main_class) @@ -39,6 +67,17 @@ def android_instrumentation_test_impl(ctx: AnalysisContext): instrumentation_apk_info = ctx.attrs.apk.get(AndroidInstrumentationApkInfo) if instrumentation_apk_info != None: cmd.extend(["--apk-under-test-path", instrumentation_apk_info.apk_under_test]) + if ctx.attrs.is_self_instrumenting: + cmd.extend(["--is-self-instrumenting"]) + extra_instrumentation_args = ctx.attrs.extra_instrumentation_args + if extra_instrumentation_args: + for arg_name, arg_value in extra_instrumentation_args.items(): + cmd.extend( + [ + "--extra-instrumentation-argument", + cmd_args([arg_name, arg_value], delimiter = "="), + ], + ) target_package_file = ctx.actions.declare_output("target_package_file") package_file = ctx.actions.declare_output("package_file") @@ -78,10 +117,18 @@ def android_instrumentation_test_impl(ctx: AnalysisContext): ], ) + remote_execution_properties = { + "platform": _compute_emulator_platform(ctx.attrs.labels or []), + "subplatform": _compute_emulator_subplatform(ctx.attrs.labels or []), + } + re_emulator_abi = _compute_emulator_abi(ctx.attrs.labels or []) + if re_emulator_abi != None: + remote_execution_properties["abi"] = re_emulator_abi + test_info = ExternalRunnerTestInfo( type = "android_instrumentation", command = cmd, - env = ctx.attrs.env, + env = env, labels = ctx.attrs.labels, contacts = ctx.attrs.contacts, run_from_project_root = True, @@ -90,11 +137,8 @@ def android_instrumentation_test_impl(ctx: AnalysisContext): "android-emulator": CommandExecutorConfig( local_enabled = android_toolchain.instrumentation_test_can_run_locally, remote_enabled = True, - remote_execution_properties = { - "platform": "android-emulator", - "subplatform": _compute_emulator_target(ctx.attrs.labels or []), - }, - remote_execution_use_case = "instrumentation-tests", + remote_execution_properties = remote_execution_properties, + remote_execution_use_case = _compute_re_use_case(ctx.attrs.labels or []), ), "static-listing": CommandExecutorConfig( local_enabled = True, @@ -112,15 +156,45 @@ def android_instrumentation_test_impl(ctx: AnalysisContext): classmap_source_info = [ctx.attrs.apk[JavaClassToSourceMapInfo]] if JavaClassToSourceMapInfo in ctx.attrs.apk else [] - return inject_test_run_info(ctx, test_info) + [ + test_info, run_info = inject_test_run_info(ctx, test_info) + + # We append additional args so that "buck2 run" will work with sane defaults + run_info.args.add(cmd_args(["--auto-run-on-connected-device", "--output", ".", "--adb-executable-path", "adb"])) + return [ + test_info, + run_info, DefaultInfo(), ] + classmap_source_info +def _compute_emulator_abi(labels: list[str]): + emulator_abi_labels = [label for label in labels if label.startswith(ANDROID_EMULATOR_ABI_LABEL_PREFIX)] + expect(len(emulator_abi_labels) <= 1, "multiple '{}' labels were found:[{}], there must be only one!".format(ANDROID_EMULATOR_ABI_LABEL_PREFIX, ", ".join(emulator_abi_labels))) + if len(emulator_abi_labels) == 0: + return None + else: # len(emulator_abi_labels) == 1: + return emulator_abi_labels[0].replace(ANDROID_EMULATOR_ABI_LABEL_PREFIX, "") + # replicating the logic in https://fburl.com/code/1fqowxu4 to match buck1's behavior -def _compute_emulator_target(labels: list[str]) -> str: - emulator_target_labels = [label for label in labels if label.startswith("re_emulator_")] - expect(len(emulator_target_labels) <= 1, "multiple 're_emulator_' labels were found:[{}], there must be only one!".format(", ".join(emulator_target_labels))) - if len(emulator_target_labels) == 0: +def _compute_emulator_subplatform(labels: list[str]) -> str: + emulator_subplatform_labels = [label for label in labels if label.startswith("re_emulator_")] + expect(len(emulator_subplatform_labels) <= 1, "multiple 're_emulator_' labels were found:[{}], there must be only one!".format(", ".join(emulator_subplatform_labels))) + if len(emulator_subplatform_labels) == 0: return DEFAULT_ANDROID_SUBPLATFORM - else: # len(emulator_target_labels) == 1: - return emulator_target_labels[0].replace("re_emulator_", "") + else: # len(emulator_subplatform_labels) == 1: + return emulator_subplatform_labels[0].replace("re_emulator_", "") + +def _compute_emulator_platform(labels: list[str]) -> str: + emulator_platform_labels = [label for label in labels if label.startswith("re_platform_")] + expect(len(emulator_platform_labels) <= 1, "multiple 're_platform_' labels were found:[{}], there must be only one!".format(", ".join(emulator_platform_labels))) + if len(emulator_platform_labels) == 0: + return DEFAULT_ANDROID_PLATFORM + else: # len(emulator_platform_labels) == 1: + return emulator_platform_labels[0].replace("re_platform_", "") + +def _compute_re_use_case(labels: list[str]) -> str: + re_use_case_labels = [label for label in labels if label.startswith("re_opts_use_case=")] + expect(len(re_use_case_labels) <= 1, "multiple 're_opts_use_case' labels were found:[{}], there must be only one!".format(", ".join(re_use_case_labels))) + if len(re_use_case_labels) == 0: + return DEFAULT_ANDROID_INSTRUMENTATION_TESTS_USE_CASE + else: # len(re_use_case_labels) == 1: + return re_use_case_labels[0].replace("re_opts_use_case=", "") diff --git a/prelude/android/android_library.bzl b/prelude/android/android_library.bzl index 7e7ad6ea04..df3e9c8392 100644 --- a/prelude/android/android_library.bzl +++ b/prelude/android/android_library.bzl @@ -5,6 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//:validation_deps.bzl", "get_validation_deps_outputs") load( "@prelude//android:android_providers.bzl", "AndroidLibraryIntellijInfo", @@ -39,7 +40,10 @@ def android_library_impl(ctx: AnalysisContext) -> list[Provider]: }), ] - java_providers, android_library_intellij_info = build_android_library(ctx) + java_providers, android_library_intellij_info = build_android_library( + ctx = ctx, + validation_deps_outputs = get_validation_deps_outputs(ctx), + ) android_providers = [android_library_intellij_info] if android_library_intellij_info else [] return to_list(java_providers) + [ @@ -52,14 +56,19 @@ def android_library_impl(ctx: AnalysisContext) -> list[Provider]: merge_exported_android_resource_info(ctx.attrs.exported_deps), ] + android_providers +def optional_jars(ctx: AnalysisContext) -> list[Artifact]: + return ctx.attrs.android_optional_jars or [] + def build_android_library( ctx: AnalysisContext, - r_dot_java: [Artifact, None] = None) -> (JavaProviders, [AndroidLibraryIntellijInfo, None]): - bootclasspath_entries = [] + ctx.attrs._android_toolchain[AndroidToolchainInfo].android_bootclasspath + r_dot_java: Artifact | None = None, + extra_sub_targets = {}, + validation_deps_outputs: [list[Artifact], None] = None) -> (JavaProviders, [AndroidLibraryIntellijInfo, None]): + bootclasspath_entries = [] + ctx.attrs._android_toolchain[AndroidToolchainInfo].android_bootclasspath + optional_jars(ctx) additional_classpath_entries = [] dummy_r_dot_java, android_library_intellij_info = _get_dummy_r_dot_java(ctx) - extra_sub_targets = {} + extra_sub_targets = dict(extra_sub_targets) if r_dot_java: additional_classpath_entries.append(r_dot_java) @@ -73,6 +82,7 @@ def build_android_library( additional_classpath_entries = additional_classpath_entries, bootclasspath_entries = bootclasspath_entries, extra_sub_targets = extra_sub_targets, + validation_deps_outputs = validation_deps_outputs, ), android_library_intellij_info else: return build_java_library( @@ -81,10 +91,11 @@ def build_android_library( additional_classpath_entries = additional_classpath_entries, bootclasspath_entries = bootclasspath_entries, extra_sub_targets = extra_sub_targets, + validation_deps_outputs = validation_deps_outputs, ), android_library_intellij_info def _get_dummy_r_dot_java( - ctx: AnalysisContext) -> ([Artifact, None], [AndroidLibraryIntellijInfo, None]): + ctx: AnalysisContext) -> (Artifact | None, [AndroidLibraryIntellijInfo, None]): android_resources = dedupe([resource for resource in filter(None, [ x.get(AndroidResourceInfo) for x in ctx.attrs.deps + ctx.attrs.provided_deps + (getattr(ctx.attrs, "provided_deps_query", []) or []) diff --git a/prelude/android/android_manifest.bzl b/prelude/android/android_manifest.bzl index 2f1d889f8b..af7e06791d 100644 --- a/prelude/android/android_manifest.bzl +++ b/prelude/android/android_manifest.bzl @@ -13,6 +13,7 @@ load( ) load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo") load("@prelude//android:voltron.bzl", "ROOT_MODULE") +load("@prelude//utils:argfile.bzl", "argfile") def android_manifest_impl(ctx: AnalysisContext) -> list[Provider]: output, merge_report = generate_android_manifest( @@ -49,10 +50,9 @@ def generate_android_manifest( elif type(manifests) == "transitive_set": manifests = manifests.project_as_args("artifacts", ordering = "bfs") - library_manifest_paths_file = ctx.actions.write("{}/library_manifest_paths_file".format(module_name), manifests) + library_manifest_paths_file = argfile(actions = ctx.actions, name = "{}/library_manifest_paths_file".format(module_name), args = manifests) generate_manifest_cmd.add(["--library-manifests-list", library_manifest_paths_file]) - generate_manifest_cmd.hidden(manifests) placeholder_entries_args = cmd_args() for key, val in placeholder_entries.items(): diff --git a/prelude/android/android_prebuilt_aar.bzl b/prelude/android/android_prebuilt_aar.bzl index eda9485c4f..820f5940bd 100644 --- a/prelude/android/android_prebuilt_aar.bzl +++ b/prelude/android/android_prebuilt_aar.bzl @@ -25,6 +25,7 @@ def android_prebuilt_aar_impl(ctx: AnalysisContext) -> list[Provider]: jni = ctx.actions.declare_output("jni", dir = True) annotation_jars_dir = ctx.actions.declare_output("annotation_jars", dir = True) proguard_config = ctx.actions.declare_output("proguard.txt") + lint_jar = ctx.actions.declare_output("lint.jar") android_toolchain = ctx.attrs._android_toolchain[AndroidToolchainInfo] unpack_aar_tool = android_toolchain.unpack_aar[RunInfo] @@ -53,6 +54,8 @@ def android_prebuilt_aar_impl(ctx: AnalysisContext) -> list[Provider]: proguard_config.as_output(), "--jar-builder-tool", jar_builder_tool, + "--lint-jar-path", + lint_jar.as_output(), ] ctx.actions.run(unpack_aar_cmd, category = "android_unpack_aar") @@ -82,10 +85,13 @@ def android_prebuilt_aar_impl(ctx: AnalysisContext) -> list[Provider]: ctx = ctx, library_output = library_output_classpath_entry, exported_deps = ctx.attrs.deps, + provided_deps = ctx.attrs.desugar_deps, needs_desugar = True, is_prebuilt_jar = True, annotation_jars_dir = annotation_jars_dir, proguard_config = proguard_config, + lint_jar = lint_jar, + sources_jar = ctx.attrs.source_jar, ) native_library = PrebuiltNativeLibraryDir( @@ -103,7 +109,15 @@ def android_prebuilt_aar_impl(ctx: AnalysisContext) -> list[Provider]: linkable_graph, template_placeholder_info, java_library_intellij_info, - merge_android_packageable_info(ctx.label, ctx.actions, ctx.attrs.deps, manifest = manifest, prebuilt_native_library_dir = native_library, resource_info = resource_info), + merge_android_packageable_info( + ctx.label, + ctx.actions, + ctx.attrs.deps, + manifest = manifest, + prebuilt_native_library_dir = native_library, + resource_info = resource_info, + for_primary_apk = ctx.attrs.for_primary_apk, + ), resource_info, DefaultInfo(default_output = all_classes_jar, other_outputs = [ manifest, diff --git a/prelude/android/android_providers.bzl b/prelude/android/android_providers.bzl index f520a545e7..b47bb853b2 100644 --- a/prelude/android/android_providers.bzl +++ b/prelude/android/android_providers.bzl @@ -34,8 +34,8 @@ ExopackageNativeInfo = record( ) ExopackageResourcesInfo = record( - assets = [Artifact, None], - assets_hash = [Artifact, None], + assets = Artifact | None, + assets_hash = Artifact | None, res = Artifact, res_hash = Artifact, ) @@ -47,15 +47,16 @@ RDotJavaInfo = record( ) AndroidBinaryNativeLibsInfo = record( - apk_under_test_prebuilt_native_library_dirs = list[PrebuiltNativeLibraryDir], + prebuilt_native_library_dirs = list[PrebuiltNativeLibraryDir], # Indicates which shared lib producing targets are included in the binary. Used by instrumentation tests # to exclude those from the test apk. - apk_under_test_shared_libraries = list[TargetLabel], - exopackage_info = ["ExopackageNativeInfo", None], + shared_libraries = list[TargetLabel], + exopackage_info = [ExopackageNativeInfo, None], root_module_native_lib_assets = list[Artifact], non_root_module_native_lib_assets = list[Artifact], native_libs_for_primary_apk = list[Artifact], generated_java_code = list[JavaLibraryInfo], + unstripped_shared_libraries = [Artifact, None], ) AndroidBinaryResourcesInfo = record( @@ -65,8 +66,10 @@ AndroidBinaryResourcesInfo = record( manifest = Artifact, # per-module manifests (packaged as assets) module_manifests = list[Artifact], + # per-module assets APKs (for .aabs only) + module_assets = Artifact | None, # zip containing any strings packaged as assets - packaged_string_assets = [Artifact, None], + packaged_string_assets = Artifact | None, # "APK" containing resources to be used by the Android binary primary_resources_apk = Artifact, # proguard config needed to retain used resources @@ -74,13 +77,13 @@ AndroidBinaryResourcesInfo = record( # R.java jars containing all the linked resources r_dot_java_infos = list[RDotJavaInfo], # directory containing filtered string resources files - string_source_map = [Artifact, None], + string_source_map = Artifact | None, # directory containing filtered string resources files for Voltron language packs - voltron_string_source_map = [Artifact, None], + voltron_string_source_map = Artifact | None, # list of jars that could contain resources that should be packaged into the APK jar_files_that_may_contain_resources = list[Artifact], # The resource infos that are used in this APK - unfiltered_resource_infos = list["AndroidResourceInfo"], + unfiltered_resource_infos = list, # list[AndroidResourceInfo] ) # Information about an `android_build_config` @@ -95,6 +98,7 @@ AndroidBuildConfigInfo = provider( fields = { "package": str, "build_config_fields": list[BuildConfigField], + "values_file": provider_field(typing.Any, default = None), }, ) @@ -110,6 +114,8 @@ AndroidApkInfo = provider( fields = { "apk": provider_field(typing.Any, default = None), "manifest": provider_field(typing.Any, default = None), + "materialized_artifacts": provider_field(typing.Any, default = None), + "unstripped_shared_libraries": provider_field(typing.Any, default = None), # artifact }, ) @@ -117,6 +123,7 @@ AndroidAabInfo = provider( fields = { "aab": provider_field(typing.Any, default = None), "manifest": provider_field(typing.Any, default = None), + "materialized_artifacts": provider_field(typing.Any, default = None), }, ) @@ -158,6 +165,7 @@ ResourceInfoTSet = transitive_set() DepsInfo = record( name = TargetLabel, deps = list[TargetLabel], + for_primary_apk = bool, ) AndroidPackageableInfo = provider( @@ -213,11 +221,11 @@ ExportedAndroidResourceInfo = provider( DexFilesInfo = record( primary_dex = Artifact, - primary_dex_class_names = [Artifact, None], + primary_dex_class_names = Artifact | None, root_module_secondary_dex_dirs = list[Artifact], non_root_module_secondary_dex_dirs = list[Artifact], secondary_dex_exopackage_info = [ExopackageDexInfo, None], - proguard_text_files_path = [Artifact, None], + proguard_text_files_path = Artifact | None, ) ExopackageInfo = record( @@ -240,9 +248,10 @@ def merge_android_packageable_info( actions: AnalysisActions, deps: list[Dependency], build_config_info: [AndroidBuildConfigInfo, None] = None, - manifest: [Artifact, None] = None, + manifest: Artifact | None = None, prebuilt_native_library_dir: [PrebuiltNativeLibraryDir, None] = None, - resource_info: [AndroidResourceInfo, None] = None) -> AndroidPackageableInfo: + resource_info: [AndroidResourceInfo, None] = None, + for_primary_apk: bool = False) -> AndroidPackageableInfo: android_packageable_deps = filter(None, [x.get(AndroidPackageableInfo) for x in deps]) build_config_infos = _get_transitive_set( @@ -258,6 +267,7 @@ def merge_android_packageable_info( DepsInfo( name = label.raw_target(), deps = [dep.target_label for dep in android_packageable_deps], + for_primary_apk = for_primary_apk, ), AndroidDepsTSet, ) diff --git a/prelude/android/android_resource.bzl b/prelude/android/android_resource.bzl index e51a21cfe9..120a2a38d7 100644 --- a/prelude/android/android_resource.bzl +++ b/prelude/android/android_resource.bzl @@ -6,13 +6,14 @@ # of this source tree. load("@prelude//java:java_providers.bzl", "get_java_packaging_info") +load("@prelude//utils:argfile.bzl", "argfile") load("@prelude//utils:expect.bzl", "expect") load(":android_providers.bzl", "AndroidResourceInfo", "ExportedAndroidResourceInfo", "RESOURCE_PRIORITY_NORMAL", "merge_android_packageable_info") load(":android_toolchain.bzl", "AndroidToolchainInfo") JAVA_PACKAGE_FILENAME = "java_package.txt" -def _convert_to_artifact_dir(ctx: AnalysisContext, attr: [Dependency, dict, Artifact, None], attr_name: str) -> [Artifact, None]: +def _convert_to_artifact_dir(ctx: AnalysisContext, attr: [Dependency, dict, Artifact, None], attr_name: str) -> Artifact | None: if isinstance(attr, Dependency): expect(len(attr[DefaultInfo].default_outputs) == 1, "Expect one default output from build dep of attr {}!".format(attr_name)) return attr[DefaultInfo].default_outputs[0] @@ -82,20 +83,20 @@ def aapt2_compile( android_toolchain: AndroidToolchainInfo, skip_crunch_pngs: bool = False, identifier: [str, None] = None) -> Artifact: - aapt2_command = cmd_args(android_toolchain.aapt2) - aapt2_command.add("compile") - aapt2_command.add("--legacy") + aapt2_command = [cmd_args(android_toolchain.aapt2)] + aapt2_command.append("compile") + aapt2_command.append("--legacy") if skip_crunch_pngs: - aapt2_command.add("--no-crunch") - aapt2_command.add(["--dir", resources_dir]) + aapt2_command.append("--no-crunch") + aapt2_command.extend(["--dir", resources_dir]) aapt2_output = ctx.actions.declare_output("{}_resources.flata".format(identifier) if identifier else "resources.flata") - aapt2_command.add("-o", aapt2_output.as_output()) + aapt2_command.extend(["-o", aapt2_output.as_output()]) - ctx.actions.run(aapt2_command, category = "aapt2_compile", identifier = identifier) + ctx.actions.run(cmd_args(aapt2_command), category = "aapt2_compile", identifier = identifier) return aapt2_output -def _get_package(ctx: AnalysisContext, package: [str, None], manifest: [Artifact, None]) -> Artifact: +def _get_package(ctx: AnalysisContext, package: [str, None], manifest: Artifact | None) -> Artifact: if package: return ctx.actions.write(JAVA_PACKAGE_FILENAME, package) else: @@ -104,9 +105,13 @@ def _get_package(ctx: AnalysisContext, package: [str, None], manifest: [Artifact def extract_package_from_manifest(ctx: AnalysisContext, manifest: Artifact) -> Artifact: r_dot_java_package = ctx.actions.declare_output(JAVA_PACKAGE_FILENAME) - extract_package_cmd = cmd_args(ctx.attrs._android_toolchain[AndroidToolchainInfo].manifest_utils[RunInfo]) - extract_package_cmd.add(["--manifest-path", manifest]) - extract_package_cmd.add(["--package-output", r_dot_java_package.as_output()]) + extract_package_cmd = cmd_args( + ctx.attrs._android_toolchain[AndroidToolchainInfo].manifest_utils[RunInfo], + "--manifest-path", + manifest, + "--package-output", + r_dot_java_package.as_output(), + ) ctx.actions.run(extract_package_cmd, category = "android_extract_package") @@ -125,10 +130,9 @@ def get_text_symbols( dep_symbols = _get_dep_symbols(deps) dep_symbol_paths.add(dep_symbols) - dep_symbol_paths_file, _ = ctx.actions.write("{}_dep_symbol_paths_file".format(identifier) if identifier else "dep_symbol_paths_file", dep_symbol_paths, allow_args = True) + dep_symbol_paths_file = argfile(actions = ctx.actions, name = "{}_dep_symbol_paths_file".format(identifier) if identifier else "dep_symbol_paths_file", args = dep_symbol_paths, allow_args = True) mini_aapt_cmd.add(["--dep-symbol-paths", dep_symbol_paths_file]) - mini_aapt_cmd.hidden(dep_symbols) text_symbols = ctx.actions.declare_output("{}_R.txt".format(identifier) if identifier else "R.txt") mini_aapt_cmd.add(["--output-path", text_symbols.as_output()]) diff --git a/prelude/android/android_toolchain.bzl b/prelude/android/android_toolchain.bzl index b7da69cc71..9113911fc1 100644 --- a/prelude/android/android_toolchain.bzl +++ b/prelude/android/android_toolchain.bzl @@ -19,12 +19,13 @@ AndroidToolchainInfo = provider( "aidl": provider_field(typing.Any, default = None), "android_jar": provider_field(typing.Any, default = None), "android_bootclasspath": provider_field(typing.Any, default = None), + "android_optional_jars": provider_field(typing.Any, default = None), "apk_builder": provider_field(typing.Any, default = None), "apk_module_graph": provider_field(typing.Any, default = None), "app_without_resources_stub": provider_field(typing.Any, default = None), + "bundle_apks_builder": provider_field(typing.Any, default = None), "bundle_builder": provider_field(typing.Any, default = None), "combine_native_library_dirs": provider_field(typing.Any, default = None), - "compress_libraries": provider_field(typing.Any, default = None), "d8_command": provider_field(typing.Any, default = None), "exo_resources_rewriter": provider_field(typing.Any, default = None), "exopackage_agent_apk": provider_field(typing.Any, default = None), @@ -49,6 +50,7 @@ AndroidToolchainInfo = provider( "mini_aapt": provider_field(typing.Any, default = None), "native_libs_as_assets_metadata": provider_field(typing.Any, default = None), "optimized_proguard_config": provider_field(typing.Any, default = None), + "p7zip": provider_field(typing.Any, default = None), "package_meta_inf_version_files": provider_field(typing.Any, default = None), "package_strings_as_assets": provider_field(typing.Any, default = None), "prebuilt_aar_resources_have_low_priority": provider_field(typing.Any, default = None), diff --git a/prelude/android/apk_genrule.bzl b/prelude/android/apk_genrule.bzl index fe91f284a5..926beae709 100644 --- a/prelude/android/apk_genrule.bzl +++ b/prelude/android/apk_genrule.bzl @@ -8,19 +8,30 @@ load("@prelude//:genrule.bzl", "process_genrule") load("@prelude//android:android_apk.bzl", "get_install_info") load("@prelude//android:android_providers.bzl", "AndroidAabInfo", "AndroidApkInfo", "AndroidApkUnderTestInfo") +load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo") +load("@prelude//android:bundletool_util.bzl", "derive_universal_apk") +load("@prelude//java:java_providers.bzl", "KeystoreInfo") load("@prelude//utils:expect.bzl", "expect") load("@prelude//java/class_to_srcs.bzl", "JavaClassToSourceMapInfo") def apk_genrule_impl(ctx: AnalysisContext) -> list[Provider]: expect((ctx.attrs.apk == None) != (ctx.attrs.aab == None), "Exactly one of 'apk' and 'aab' must be specified") + input_android_apk_under_test_info = None + input_unstripped_shared_libraries = None if ctx.attrs.apk != None: # TODO(T104150125) The underlying APK should not have exopackage enabled input_android_apk_info = ctx.attrs.apk[AndroidApkInfo] expect(input_android_apk_info != None, "'apk' attribute must be an Android APK!") input_apk = input_android_apk_info.apk input_manifest = input_android_apk_info.manifest + input_materialized_artifacts = input_android_apk_info.materialized_artifacts + input_unstripped_shared_libraries = input_android_apk_info.unstripped_shared_libraries input_android_apk_under_test_info = ctx.attrs.apk[AndroidApkUnderTestInfo] + + env_vars = { + "APK": cmd_args(input_apk), + } else: input_android_aab_info = ctx.attrs.aab[AndroidAabInfo] expect(input_android_aab_info != None, "'aab' attribute must be an Android Bundle!") @@ -28,35 +39,94 @@ def apk_genrule_impl(ctx: AnalysisContext) -> list[Provider]: # It's not an APK, but buck1 does this so we do it too for compatibility input_apk = input_android_aab_info.aab input_manifest = input_android_aab_info.manifest + input_materialized_artifacts = input_android_aab_info.materialized_artifacts - env_vars = { - "APK": cmd_args(input_apk), - } + env_vars = { + "AAB": cmd_args(input_apk), + } - # Like buck1, we ignore the 'out' attribute and construct the output path ourselves. - output_apk_name = "{}.apk".format(ctx.label.name) + genrule_providers = process_genrule(ctx, ctx.attrs.out, ctx.attrs.outs, env_vars, other_outputs = input_materialized_artifacts) - genrule_providers = process_genrule(ctx, output_apk_name, None, env_vars) + genrule_default_info = filter(lambda x: isinstance(x, DefaultInfo), genrule_providers) expect( - len(genrule_providers) == 1 and isinstance(genrule_providers[0], DefaultInfo), - "Expecting just a single DefaultInfo, but got {}".format(genrule_providers), + len(genrule_default_info) == 1, + "Expecting a single DefaultInfo, but got {}", + genrule_default_info, ) - output_apk = genrule_providers[0].default_outputs[0] - class_to_src_map = [ctx.attrs.apk[JavaClassToSourceMapInfo]] if (ctx.attrs.apk and JavaClassToSourceMapInfo in ctx.attrs.apk) else [] + genrule_default_output = genrule_default_info[0].default_outputs[0] + genrule_default_output_is_aab = genrule_default_output.extension == ".aab" + genrule_default_output_is_apk = genrule_default_output.extension == ".apk" - install_info = get_install_info( - ctx, - output_apk = output_apk, - manifest = input_manifest, - exopackage_info = None, + expect( + genrule_default_output_is_aab or genrule_default_output_is_apk, + "apk_genrule must output a '.apk' or '.aab' file, but got {}", + genrule_default_info, ) - return genrule_providers + [ - AndroidApkInfo( - apk = output_apk, - manifest = input_manifest, - ), - install_info, - ] + filter(None, [input_android_apk_under_test_info]) + class_to_src_map + if ctx.attrs.aab: + if genrule_default_output_is_aab: + output_aab_info = AndroidAabInfo( + aab = genrule_default_output, + manifest = input_manifest, + materialized_artifacts = input_materialized_artifacts, + ) + output_apk = None + else: + output_aab_info = None + output_apk = genrule_default_output + + if ctx.attrs.use_derived_apk: + expect(genrule_default_output_is_aab, "Default genrule output must end in '.aab' if use_derived_apk is True.") + + output_apk = derive_universal_apk( + ctx = ctx, + android_toolchain = ctx.attrs._android_toolchain[AndroidToolchainInfo], + app_bundle = genrule_default_output, + keystore = ctx.attrs.keystore[KeystoreInfo] if ctx.attrs.keystore else None, + ) + default_providers = [ + DefaultInfo( + default_output = output_apk, + other_outputs = input_materialized_artifacts + genrule_default_info[0].other_outputs, + sub_targets = { + "aab": [DefaultInfo( + default_outputs = [genrule_default_output], + )], + }, + ), + ] + filter(lambda x: not isinstance(x, DefaultInfo), genrule_providers) + else: + default_providers = genrule_providers + + else: + default_providers = genrule_providers + expect(genrule_default_output_is_apk, "apk_genrule output must end in '.apk'") + output_apk = genrule_default_output + output_aab_info = None + + class_to_src_map = [ctx.attrs.apk[JavaClassToSourceMapInfo]] if (ctx.attrs.apk and JavaClassToSourceMapInfo in ctx.attrs.apk) else [] + + if output_apk: + apk_providers = [ + AndroidApkInfo( + apk = output_apk, + manifest = input_manifest, + materialized_artifacts = input_materialized_artifacts, + unstripped_shared_libraries = input_unstripped_shared_libraries, + ), + get_install_info( + ctx, + output_apk = output_apk, + manifest = input_manifest, + exopackage_info = None, + ), + ] + else: + apk_providers = [] + + aab_providers = filter(None, [output_aab_info]) + apk_under_test_providers = filter(None, [input_android_apk_under_test_info]) + + return default_providers + apk_providers + aab_providers + apk_under_test_providers + class_to_src_map diff --git a/prelude/android/bundletool_util.bzl b/prelude/android/bundletool_util.bzl new file mode 100644 index 0000000000..9105e1ccbe --- /dev/null +++ b/prelude/android/bundletool_util.bzl @@ -0,0 +1,42 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo") +load("@prelude//java:java_providers.bzl", "KeystoreInfo") # @unused used as type + +def derive_universal_apk( + ctx: AnalysisContext, + android_toolchain: AndroidToolchainInfo, + app_bundle: Artifact, + keystore: [KeystoreInfo, None]) -> Artifact: + output_apk = ctx.actions.declare_output("universal.apk") + + bundle_apks_builder_args = cmd_args([ + android_toolchain.bundle_apks_builder[RunInfo], + "--input-bundle", + app_bundle, + "--p7zip", + android_toolchain.p7zip, + "--aapt2", + android_toolchain.aapt2, + "--zipalign", + android_toolchain.zipalign[RunInfo], + "--output-apk", + output_apk.as_output(), + ]) + + if keystore: + bundle_apks_builder_args.add(cmd_args([ + "--keystore", + keystore.store, + "--keystore-properties", + keystore.properties, + ])) + + ctx.actions.run(bundle_apks_builder_args, category = "bundle_build", identifier = "build_universal_apk") + + return output_apk diff --git a/prelude/android/configuration.bzl b/prelude/android/configuration.bzl index ea18b6d51f..70ad1ef967 100644 --- a/prelude/android/configuration.bzl +++ b/prelude/android/configuration.bzl @@ -23,17 +23,16 @@ load("@prelude//utils:expect.bzl", "expect") # platforms). We only use the "arm64" native libraries if it is one of the specified platforms. We # "throw away" the non-native libraries for all other configured sub-graphs. +_DEFAULT_PLATFORM = "config//platform/android:arm64-fbsource" + _REFS = { "arm64": "config//cpu/constraints:arm64", "armv7": "config//cpu/constraints:arm32", "build_only_native_code": "prelude//android/constraints:build_only_native_code", "building_android_binary": "prelude//os:building_android_binary", "cpu": "config//cpu/constraints:cpu", - "default_platform": "config//platform/android:x86_32-fbsource", "maybe_build_only_native_code": "prelude//android/constraints:maybe_build_only_native_code", "maybe_building_android_binary": "prelude//os:maybe_building_android_binary", - "maybe_merge_native_libraries": "config//features/android/constraints:maybe_merge_native_libraries", - "merge_native_libraries": "config//features/android/constraints:merge_native_libraries", "min_sdk_version": "prelude//android/constraints:min_sdk_version", "x86": "config//cpu/constraints:x86_32", "x86_64": "config//cpu/constraints:x86_64", @@ -42,6 +41,8 @@ for min_sdk in get_min_sdk_version_range(): constraint_value_name = get_min_sdk_version_constraint_value_name(min_sdk) _REFS[constraint_value_name] = "prelude//android/constraints:{}".format(constraint_value_name) +_REFS["default_platform"] = read_root_config("build", "default_platform", _DEFAULT_PLATFORM) + def _cpu_split_transition_impl( platform: PlatformInfo, refs: struct, @@ -57,17 +58,13 @@ def _cpu_split_transition_impl( refs, cpu_filters, attrs.min_sdk_version, - attrs.native_library_merge_map, - attrs.native_library_merge_sequence, ) def _cpu_split_transition( platform: PlatformInfo, refs: struct, cpu_filters: list[str], - min_sdk_version: [int, None], - native_library_merge_map: [dict[str, list[str]], None], - native_library_merge_sequence: [list[list[tuple] | tuple], None]) -> dict[str, PlatformInfo]: + min_sdk_version: [int, None]) -> dict[str, PlatformInfo]: cpu = refs.cpu x86 = refs.x86[ConstraintValueInfo] x86_64 = refs.x86_64[ConstraintValueInfo] @@ -101,9 +98,6 @@ def _cpu_split_transition( base_constraints[refs.maybe_building_android_binary[ConstraintSettingInfo].label] = refs.building_android_binary[ConstraintValueInfo] - if native_library_merge_map or native_library_merge_sequence: - base_constraints[refs.maybe_merge_native_libraries[ConstraintSettingInfo].label] = refs.merge_native_libraries[ConstraintValueInfo] - if min_sdk_version: base_constraints[refs.min_sdk_version[ConstraintSettingInfo].label] = _get_min_sdk_constraint_value(min_sdk_version, refs) @@ -136,8 +130,6 @@ cpu_split_transition = transition( attrs = [ "cpu_filters", "min_sdk_version", - "native_library_merge_map", - "native_library_merge_sequence", "_is_force_single_cpu", "_is_force_single_default_cpu", ], @@ -157,8 +149,6 @@ cpu_transition = transition( attrs = [ "cpu_filters", "min_sdk_version", - "native_library_merge_map", - "native_library_merge_sequence", "_is_force_single_cpu", "_is_force_single_default_cpu", ], diff --git a/prelude/android/constraints/BUCK.v2 b/prelude/android/constraints/BUCK.v2 index 4efea2b3e2..a211f28db0 100644 --- a/prelude/android/constraints/BUCK.v2 +++ b/prelude/android/constraints/BUCK.v2 @@ -1,28 +1,35 @@ load("@prelude//android:min_sdk_version.bzl", "get_min_sdk_version_constraint_value_name", "get_min_sdk_version_range") +load("@prelude//utils:source_listing.bzl", "source_listing") -native.constraint_setting( +oncall("build_infra") + +source_listing() + +prelude = native # Avoid warnings and auto-formatters + +prelude.constraint_setting( name = "maybe_build_only_native_code", visibility = ["PUBLIC"], ) -native.constraint_value( +prelude.constraint_value( name = "build_only_native_code", constraint_setting = ":maybe_build_only_native_code", visibility = ["PUBLIC"], ) -native.constraint_setting( +prelude.constraint_setting( name = "maybe_merge_native_libraries", visibility = ["PUBLIC"], ) -native.constraint_value( +prelude.constraint_value( name = "merge_native_libraries", constraint_setting = ":maybe_merge_native_libraries", visibility = ["PUBLIC"], ) -native.filegroup( +prelude.filegroup( name = "files", srcs = glob( ["**"], @@ -30,13 +37,13 @@ native.filegroup( visibility = ["PUBLIC"], ) -native.constraint_setting( +prelude.constraint_setting( name = "min_sdk_version", visibility = ["PUBLIC"], ) [ - native.constraint_value( + prelude.constraint_value( name = get_min_sdk_version_constraint_value_name(min_sdk), constraint_setting = ":min_sdk_version", ) diff --git a/prelude/android/cpu_filters.bzl b/prelude/android/cpu_filters.bzl index 796d6a16bc..814a38c2da 100644 --- a/prelude/android/cpu_filters.bzl +++ b/prelude/android/cpu_filters.bzl @@ -14,7 +14,7 @@ CPU_FILTER_TO_ABI_DIRECTORY = { ALL_CPU_FILTERS = CPU_FILTER_TO_ABI_DIRECTORY.keys() -CPU_FILTER_FOR_DEFAULT_PLATFORM = "x86" +CPU_FILTER_FOR_DEFAULT_PLATFORM = "arm64" # The "primary platform" is the one that we use for all # the non-native targets. We keep this consistent regardless diff --git a/prelude/android/dex_rules.bzl b/prelude/android/dex_rules.bzl index af03ea65d7..f1928a4df6 100644 --- a/prelude/android/dex_rules.bzl +++ b/prelude/android/dex_rules.bzl @@ -11,6 +11,7 @@ load("@prelude//android:voltron.bzl", "ROOT_MODULE", "get_apk_module_graph_info" load("@prelude//java:dex.bzl", "DexLibraryInfo", "get_dex_produced_from_java_library") load("@prelude//java:dex_toolchain.bzl", "DexToolchainInfo") load("@prelude//java:java_library.bzl", "compile_to_jar") +load("@prelude//utils:argfile.bzl", "argfile", "at_argfile") load("@prelude//utils:expect.bzl", "expect") load("@prelude//utils:utils.bzl", "flatten") load("@prelude//paths.bzl", "paths") @@ -102,15 +103,14 @@ def get_single_primary_dex( output_dex_file = ctx.actions.declare_output("classes.dex") d8_cmd.add(["--output-dex-file", output_dex_file.as_output()]) - jar_to_dex_file = ctx.actions.write("jar_to_dex_file.txt", java_library_jars) + jar_to_dex_file = argfile(actions = ctx.actions, name = "jar_to_dex_file.txt", args = java_library_jars) d8_cmd.add(["--files-to-dex-list", jar_to_dex_file]) - d8_cmd.hidden(java_library_jars) d8_cmd.add(["--android-jar", android_toolchain.android_jar]) if not is_optimized: d8_cmd.add("--no-optimize") - ctx.actions.run(d8_cmd, category = "d8", identifier = "{}:{}".format(ctx.label.package, ctx.label.name)) + ctx.actions.run(d8_cmd, category = "get_single_primary_dex", identifier = "{}:{}".format(ctx.label.package, ctx.label.name)) return DexFilesInfo( primary_dex = output_dex_file, @@ -126,10 +126,10 @@ def get_multi_dex( android_toolchain: AndroidToolchainInfo, java_library_jars_to_owners: dict[Artifact, TargetLabel], primary_dex_patterns: list[str], - proguard_configuration_output_file: [Artifact, None] = None, - proguard_mapping_output_file: [Artifact, None] = None, + proguard_configuration_output_file: Artifact | None = None, + proguard_mapping_output_file: Artifact | None = None, is_optimized: bool = False, - apk_module_graph_file: [Artifact, None] = None) -> DexFilesInfo: + apk_module_graph_file: Artifact | None = None) -> DexFilesInfo: expect( not _is_exopackage_enabled_for_secondary_dex(ctx), "secondary dex exopackage can only be enabled on pre-dexed builds!", @@ -154,7 +154,7 @@ def get_multi_dex( secondary_dex_dir_srcs = {} all_jars = flatten(module_to_jars.values()) - all_jars_list = ctx.actions.write("all_jars_classpath.txt", all_jars) + all_jars_list = argfile(actions = ctx.actions, name = "all_jars_classpath.txt", args = all_jars) for module, jars in module_to_jars.items(): multi_dex_cmd = cmd_args(android_toolchain.multi_dex_command[RunInfo]) secondary_dex_compression_cmd = cmd_args(android_toolchain.secondary_dex_compression_command[RunInfo]) @@ -175,9 +175,8 @@ def get_multi_dex( android_toolchain, ) - primary_dex_jar_to_dex_file = ctx.actions.write("primary_dex_jars_to_dex_file_for_root_module.txt", primary_dex_jars) + primary_dex_jar_to_dex_file = argfile(actions = ctx.actions, name = "primary_dex_jars_to_dex_file_for_root_module.txt", args = primary_dex_jars) multi_dex_cmd.add("--primary-dex-files-to-dex-list", primary_dex_jar_to_dex_file) - multi_dex_cmd.hidden(primary_dex_jars) multi_dex_cmd.add("--minimize-primary-dex") else: jars_to_dex = jars @@ -194,16 +193,14 @@ def get_multi_dex( secondary_dex_compression_cmd.add("--secondary-dex-output-dir", secondary_dex_dir_for_module.as_output()) jars_to_dex = jars multi_dex_cmd.add("--classpath-files", all_jars_list) - multi_dex_cmd.hidden(all_jars) multi_dex_cmd.add("--module", module) multi_dex_cmd.add("--canary-class-name", apk_module_graph_info.module_to_canary_class_name_function(module)) secondary_dex_compression_cmd.add("--module", module) secondary_dex_compression_cmd.add("--canary-class-name", apk_module_graph_info.module_to_canary_class_name_function(module)) - jar_to_dex_file = ctx.actions.write("jars_to_dex_file_for_module_{}.txt".format(module), jars_to_dex) + jar_to_dex_file = argfile(actions = ctx.actions, name = "jars_to_dex_file_for_module_{}.txt".format(module), args = jars_to_dex) multi_dex_cmd.add("--files-to-dex-list", jar_to_dex_file) - multi_dex_cmd.hidden(jars_to_dex) multi_dex_cmd.add("--android-jar", android_toolchain.android_jar) if not is_optimized: @@ -222,7 +219,7 @@ def get_multi_dex( ctx.actions.symlinked_dir(outputs[secondary_dex_dir], secondary_dex_dir_srcs) - ctx.actions.dynamic_output(dynamic = inputs, inputs = [], outputs = outputs, f = do_multi_dex) + ctx.actions.dynamic_output(dynamic = inputs, inputs = [], outputs = [o.as_output() for o in outputs], f = do_multi_dex) return DexFilesInfo( primary_dex = primary_dex_file, @@ -238,8 +235,8 @@ def _get_primary_dex_and_secondary_dex_jars( jars: list[Artifact], java_library_jars_to_owners: dict[Artifact, TargetLabel], primary_dex_patterns_file: Artifact, - proguard_configuration_output_file: [Artifact, None], - proguard_mapping_output_file: [Artifact, None], + proguard_configuration_output_file: Artifact | None, + proguard_mapping_output_file: Artifact | None, android_toolchain: AndroidToolchainInfo) -> (list[Artifact], list[Artifact]): primary_dex_jars = [] secondary_dex_jars = [] @@ -321,7 +318,7 @@ DexInputsWithClassNamesAndWeightEstimatesFile = record( SecondaryDexMetadataConfig = record( secondary_dex_compression = str, secondary_dex_metadata_path = [str, None], - secondary_dex_metadata_file = [Artifact, None], + secondary_dex_metadata_file = Artifact | None, secondary_dex_metadata_line = Artifact, secondary_dex_canary_class_name = str, ) @@ -365,8 +362,7 @@ def _filter_pre_dexed_libs( batch_number: int) -> DexInputsWithClassNamesAndWeightEstimatesFile: weight_estimate_and_filtered_class_names_file = actions.declare_output("class_names_and_weight_estimates_for_batch_{}".format(batch_number)) - filter_dex_cmd = cmd_args([ - android_toolchain.filter_dex_class_names[RunInfo], + filter_dex_cmd_args = cmd_args([ "--primary-dex-patterns", primary_dex_patterns_file, "--dex-target-identifiers", @@ -378,6 +374,15 @@ def _filter_pre_dexed_libs( "--output", weight_estimate_and_filtered_class_names_file.as_output(), ]) + + filter_dex_cmd = cmd_args([ + android_toolchain.filter_dex_class_names[RunInfo], + at_argfile( + actions = actions, + name = "filter_dex_cmd_args_{}".format(batch_number), + args = filter_dex_cmd_args, + ), + ]) actions.run(filter_dex_cmd, category = "filter_dex", identifier = "batch_{}".format(batch_number)) return DexInputsWithClassNamesAndWeightEstimatesFile(libs = pre_dexed_libs, weight_estimate_and_filtered_class_names_file = weight_estimate_and_filtered_class_names_file) @@ -393,7 +398,7 @@ def merge_to_split_dex( android_toolchain: AndroidToolchainInfo, pre_dexed_libs: list[DexLibraryInfo], split_dex_merge_config: SplitDexMergeConfig, - apk_module_graph_file: [Artifact, None] = None) -> DexFilesInfo: + apk_module_graph_file: Artifact | None = None) -> DexFilesInfo: is_exopackage_enabled_for_secondary_dex = _is_exopackage_enabled_for_secondary_dex(ctx) if is_exopackage_enabled_for_secondary_dex: expect( @@ -549,7 +554,7 @@ def merge_to_split_dex( metadata_lines.append(artifacts[metadata_line_artifact].read_string().strip()) ctx.actions.write(outputs[metadata_dot_txt], metadata_lines) - ctx.actions.dynamic_output(dynamic = flatten(metadata_line_artifacts_by_module.values()), inputs = [], outputs = metadata_dot_txt_files_by_module.values(), f = write_metadata_dot_txts) + ctx.actions.dynamic_output(dynamic = flatten(metadata_line_artifacts_by_module.values()), inputs = [], outputs = [o.as_output() for o in metadata_dot_txt_files_by_module.values()], f = write_metadata_dot_txts) ctx.actions.symlinked_dir( outputs[root_module_secondary_dexes_dir], @@ -560,7 +565,7 @@ def merge_to_split_dex( non_root_module_secondary_dexes_for_symlinking, ) - ctx.actions.dynamic_output(dynamic = input_artifacts, inputs = [], outputs = outputs, f = merge_pre_dexed_libs) + ctx.actions.dynamic_output(dynamic = input_artifacts, inputs = [], outputs = [o.as_output() for o in outputs], f = merge_pre_dexed_libs) if is_exopackage_enabled_for_secondary_dex: root_module_secondary_dex_dirs = [] @@ -587,15 +592,14 @@ def _merge_dexes( output_dex_file: Artifact, pre_dexed_artifacts: list[Artifact], pre_dexed_artifacts_file: Artifact, - class_names_to_include: [Artifact, None] = None, - secondary_output_dex_file: [Artifact, None] = None, + class_names_to_include: Artifact | None = None, + secondary_output_dex_file: Artifact | None = None, secondary_dex_metadata_config: [SecondaryDexMetadataConfig, None] = None): d8_cmd = cmd_args(android_toolchain.d8_command[RunInfo]) d8_cmd.add(["--output-dex-file", output_dex_file.as_output()]) - pre_dexed_artifacts_to_dex_file = ctx.actions.write(pre_dexed_artifacts_file.as_output(), pre_dexed_artifacts) + pre_dexed_artifacts_to_dex_file = argfile(actions = ctx.actions, name = pre_dexed_artifacts_file, args = pre_dexed_artifacts) d8_cmd.add(["--files-to-dex-list", pre_dexed_artifacts_to_dex_file]) - d8_cmd.hidden(pre_dexed_artifacts) d8_cmd.add(["--android-jar", android_toolchain.android_jar]) d8_cmd.add(_DEX_MERGE_OPTIONS) @@ -615,7 +619,7 @@ def _merge_dexes( ctx.actions.run( d8_cmd, - category = "d8", + category = "merge_dexes", identifier = "{}:{} {}".format(ctx.label.package, ctx.label.name, output_dex_file.short_path), ) diff --git a/prelude/android/gen_aidl.bzl b/prelude/android/gen_aidl.bzl index 2e5e0297f3..a2b56d27b7 100644 --- a/prelude/android/gen_aidl.bzl +++ b/prelude/android/gen_aidl.bzl @@ -15,14 +15,14 @@ _AidlSourceInfo = provider(fields = { def gen_aidl_impl(ctx: AnalysisContext) -> list[Provider]: android_toolchain = ctx.attrs._android_toolchain[AndroidToolchainInfo] - aidl_cmd = cmd_args(android_toolchain.aidl) - aidl_cmd.add("-p", android_toolchain.framework_aidl_file) - aidl_cmd.add("-I", ctx.attrs.import_path) - for path in ctx.attrs.import_paths: - aidl_cmd.add("-I", path) - - # We need the `aidl_srcs` files - otherwise the search on the `import_path` won't find anything. - aidl_cmd.hidden(ctx.attrs.aidl_srcs) + aidl_cmd = cmd_args( + [android_toolchain.aidl] + + ["-p", android_toolchain.framework_aidl_file] + + ["-I", ctx.attrs.import_path] + + [a for path in ctx.attrs.import_paths for a in ["-I", path]], + # We need the `aidl_srcs` files - otherwise the search on the `import_path` won't find anything. + hidden = ctx.attrs.aidl_srcs, + ) # Allow gen_aidl rules to depend on other gen_aidl rules, and make the source files from the # deps accessible in this context. This is an alternative to adding dependent files in @@ -35,7 +35,7 @@ def gen_aidl_impl(ctx: AnalysisContext) -> list[Provider]: else: warning("`{}` dependency `{}` is not a `gen_aidl` rule and will be ignored".format(ctx.label, dep.label)) - aidl_cmd.hidden(dep_srcs) + aidl_cmd.add(cmd_args(hidden = dep_srcs)) aidl_out = ctx.actions.declare_output("aidl_output", dir = True) aidl_cmd.add("-o", aidl_out.as_output()) diff --git a/prelude/android/preprocess_java_classes.bzl b/prelude/android/preprocess_java_classes.bzl index a8e04c3734..4f4e8cc705 100644 --- a/prelude/android/preprocess_java_classes.bzl +++ b/prelude/android/preprocess_java_classes.bzl @@ -6,33 +6,43 @@ # of this source tree. load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo") +load("@prelude//android:util.bzl", "EnhancementContext") load("@prelude//java:java_toolchain.bzl", "JavaToolchainInfo") load("@prelude//java/utils:java_more_utils.bzl", "get_path_separator_for_exec_os") load("@prelude//utils:expect.bzl", "expect") -def get_preprocessed_java_classes(ctx: AnalysisContext, input_jars = {"artifact": "target_label"}) -> dict[Artifact, TargetLabel]: +def get_preprocessed_java_classes(enhance_ctx: EnhancementContext, input_jars: dict[Artifact, TargetLabel]) -> (dict[Artifact, TargetLabel], Artifact | None): if not input_jars: - return {} + return {}, None + + ctx = enhance_ctx.ctx input_srcs = {} output_jars_to_owners = {} output_dir = ctx.actions.declare_output("preprocessed_java_classes/output_dir") + input_jars_to_owners = {} for i, (input_jar, target_label) in enumerate(input_jars.items()): expect(input_jar.extension == ".jar", "Expected {} to have extension .jar!".format(input_jar)) jar_name = "{}_{}".format(i, input_jar.basename) input_srcs[jar_name] = input_jar + input_jars_to_owners[jar_name] = target_label output_jar = output_dir.project(jar_name) output_jars_to_owners[output_jar] = target_label input_dir = ctx.actions.symlinked_dir("preprocessed_java_classes/input_dir", input_srcs) + input_jars_map = ctx.actions.write_json("preprocessed_java_classes/input_jars_map.json", input_jars_to_owners) + materialized_artifacts_dir = ctx.actions.declare_output("preprocessed_java_classes/materialized_artifacts") + android_toolchain = ctx.attrs._android_toolchain[AndroidToolchainInfo] env = { "ANDROID_BOOTCLASSPATH": cmd_args( - ctx.attrs._android_toolchain[AndroidToolchainInfo].android_bootclasspath, + android_toolchain.android_bootclasspath + android_toolchain.android_optional_jars, delimiter = get_path_separator_for_exec_os(ctx), ), "IN_JARS_DIR": cmd_args(input_dir), + "IN_JARS_MAP": cmd_args(input_jars_map), + "MATERIALIZED_ARTIFACTS_DIR": materialized_artifacts_dir.as_output(), "OUT_JARS_DIR": output_dir.as_output(), "PREPROCESS": ctx.attrs.preprocess_java_classes_bash, "ZIP_SCRUBBER": ctx.attrs._java_toolchain[JavaToolchainInfo].zip_scrubber, @@ -46,16 +56,24 @@ def get_preprocessed_java_classes(ctx: AnalysisContext, input_jars = {"artifact" "bash", "-c", # Note: ZIP_SCRUBBER might expand to multiple words, so no quoting there. - 'mkdir -p "$OUT_JARS_DIR" && eval "$PREPROCESS" && $ZIP_SCRUBBER --paths-to-scrub "$@"', + 'mkdir -p "$OUT_JARS_DIR" && mkdir -p "$MATERIALIZED_ARTIFACTS_DIR" && eval "$PREPROCESS" && $ZIP_SCRUBBER --paths-to-scrub "$@"', "--", output_jars_file, ] - preprocess_cmd = cmd_args(preprocess_cmd) - preprocess_cmd.hidden([output_jar.as_output() for output_jar in output_jars]) - for dep in ctx.attrs.preprocess_java_classes_deps: - preprocess_cmd.hidden(dep[DefaultInfo].default_outputs + dep[DefaultInfo].other_outputs) + preprocess_cmd = cmd_args( + preprocess_cmd, + hidden = [output_jar.as_output() for output_jar in output_jars] + + [ + dep[DefaultInfo].default_outputs + dep[DefaultInfo].other_outputs + for dep in ctx.attrs.preprocess_java_classes_deps + ], + ) ctx.actions.run(preprocess_cmd, env = env, category = "preprocess_java_classes") - return output_jars_to_owners + enhance_ctx.debug_output("preprocess_java_classes_input_dir", input_dir) + enhance_ctx.debug_output("preprocess_java_classes_input_jars_map", input_jars_map) + enhance_ctx.debug_output("preprocess_java_classes_materialized_artifacts_dir", materialized_artifacts_dir) + + return output_jars_to_owners, materialized_artifacts_dir diff --git a/prelude/android/proguard.bzl b/prelude/android/proguard.bzl index b2a3b8abb4..e51278988d 100644 --- a/prelude/android/proguard.bzl +++ b/prelude/android/proguard.bzl @@ -12,11 +12,12 @@ load( ) load("@prelude//java:java_toolchain.bzl", "JavaToolchainInfo") load("@prelude//java/utils:java_more_utils.bzl", "get_path_separator_for_exec_os") +load("@prelude//os_lookup:defs.bzl", "OsLookup") load("@prelude//utils:expect.bzl", "expect") ProguardOutput = record( jars_to_owners = dict[Artifact, TargetLabel], - proguard_configuration_output_file = [Artifact, None], + proguard_configuration_output_file = Artifact | None, proguard_mapping_output_file = Artifact, proguard_artifacts = list[Artifact], proguard_hidden_artifacts = list[Artifact], @@ -28,9 +29,9 @@ def _get_proguard_command_line_args( proguard_configs: list[Artifact], additional_library_jars: list[Artifact], mapping: Artifact, - configuration: [Artifact, None], - seeds: [Artifact, None], - usage: [Artifact, None], + configuration: Artifact | None, + seeds: Artifact | None, + usage: Artifact | None, android_toolchain: AndroidToolchainInfo) -> (cmd_args, list[Artifact]): cmd = cmd_args() hidden = [] @@ -55,7 +56,7 @@ def _get_proguard_command_line_args( for jar_input, jar_output in input_jars_to_output_jars.items(): cmd.add("-injars", jar_input, "-outjars", jar_output if jar_output == jar_input else jar_output.as_output()) - library_jars = android_toolchain.android_bootclasspath + additional_library_jars + library_jars = android_toolchain.android_bootclasspath + android_toolchain.android_optional_jars + additional_library_jars cmd.add("-libraryjars") cmd.add(cmd_args(library_jars, delimiter = get_path_separator_for_exec_os(ctx))) hidden.extend(library_jars) @@ -88,25 +89,40 @@ def run_proguard( "-jar", android_toolchain.proguard_jar, ) - run_proguard_cmd.add(cmd_args(command_line_args_file, format = "@{}")) - run_proguard_cmd.hidden(command_line_args) + run_proguard_cmd.add( + cmd_args(command_line_args_file, format = "@{}", hidden = command_line_args), + ) output_jars_file = ctx.actions.write("proguard/output_jars.txt", output_jars) + is_windows = hasattr(ctx.attrs, "_exec_os_type") and ctx.attrs._exec_os_type[OsLookup].platform == "windows" + # Some proguard configs can propagate the "-dontobfuscate" flag which disables # obfuscation and prevents the mapping.txt and usage.txt file from being generated. # Scrub all jars emitted from proguard to make them deterministic. - sh_cmd = cmd_args([ - "sh", - "-c", - "touch $1 && touch $2 && $3 && $4 --paths-to-scrub $5 --create-if-not-present", - "--", - mapping_file.as_output(), - usage_file.as_output(), - cmd_args(run_proguard_cmd, delimiter = " "), - cmd_args(ctx.attrs._java_toolchain[JavaToolchainInfo].zip_scrubber, delimiter = " "), - output_jars_file, - ]) + if not is_windows: + sh_cmd = cmd_args([ + "sh", + "-c", + "touch $1 && touch $2 && $3 && $4 --paths-to-scrub $5 --create-if-not-present", + "--", + mapping_file.as_output(), + usage_file.as_output(), + cmd_args(run_proguard_cmd, delimiter = " "), + cmd_args(ctx.attrs._java_toolchain[JavaToolchainInfo].zip_scrubber, delimiter = " "), + output_jars_file, + ]) + else: + sh_cmd = cmd_args([ + "cmd.exe", + "/c", + cmd_args([ + cmd_args([mapping_file.as_output()], format = "echo. > {}"), + cmd_args([usage_file.as_output()], format = "echo. > {}"), + cmd_args(run_proguard_cmd, delimiter = " "), + cmd_args(ctx.attrs._java_toolchain[JavaToolchainInfo].zip_scrubber, "--paths-to-scrub", output_jars_file, "--create-if-not-present", delimiter = " "), + ], delimiter = " && "), + ]) ctx.actions.run(sh_cmd, category = "run_proguard") @@ -116,7 +132,7 @@ def get_proguard_output( ctx: AnalysisContext, input_jars: dict[Artifact, TargetLabel], java_packaging_deps: list[JavaPackagingDep], - aapt_generated_proguard_config: [Artifact, None], + aapt_generated_proguard_config: Artifact | None, additional_library_jars: list[Artifact]) -> ProguardOutput: proguard_configs = [packaging_dep.proguard_config for packaging_dep in java_packaging_deps if packaging_dep.proguard_config] if ctx.attrs.proguard_config: diff --git a/prelude/android/r_dot_java.bzl b/prelude/android/r_dot_java.bzl index a21e69a4aa..071ab419aa 100644 --- a/prelude/android/r_dot_java.bzl +++ b/prelude/android/r_dot_java.bzl @@ -9,15 +9,16 @@ load("@prelude//android:android_providers.bzl", "AndroidResourceInfo", "RDotJava load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo") load("@prelude//java:java_library.bzl", "compile_to_jar") load("@prelude//java:java_providers.bzl", "JavaClasspathEntry", "JavaLibraryInfo", "derive_compiling_deps") +load("@prelude//utils:argfile.bzl", "argfile") load("@prelude//utils:set.bzl", "set") RDotJavaSourceCode = record( r_dot_java_source_code_dir = Artifact, r_dot_java_source_code_zipped = Artifact, - strings_source_code_dir = [Artifact, None], - strings_source_code_zipped = [Artifact, None], - ids_source_code_dir = [Artifact, None], - ids_source_code_zipped = [Artifact, None], + strings_source_code_dir = Artifact | None, + strings_source_code_zipped = Artifact | None, + ids_source_code_dir = Artifact | None, + ids_source_code_zipped = Artifact | None, ) def get_dummy_r_dot_java( @@ -39,7 +40,7 @@ def generate_r_dot_javas( banned_duplicate_resource_types: list[str], uber_r_dot_txt_files: list[Artifact], override_symbols_paths: list[Artifact], - duplicate_resources_allowlist: [Artifact, None], + duplicate_resources_allowlist: Artifact | None, union_package: [str, None], referenced_resources_lists: list[Artifact], generate_strings_and_ids_separately: [bool, None] = True, @@ -107,7 +108,7 @@ def _generate_r_dot_java_source_code( banned_duplicate_resource_types: list[str] = [], uber_r_dot_txt_files: list[Artifact] = [], override_symbols_paths: list[Artifact] = [], - duplicate_resources_allowlist: [Artifact, None] = None, + duplicate_resources_allowlist: Artifact | None = None, union_package: [str, None] = None, referenced_resources_lists: list[Artifact] = []) -> RDotJavaSourceCode: merge_resources_cmd = cmd_args(merge_android_resources_tool) @@ -119,8 +120,11 @@ def _generate_r_dot_java_source_code( r_dot_txt_info_file = ctx.actions.write("r_dot_txt_info_file_for_{}.txt".format(identifier), r_dot_txt_info) merge_resources_cmd.add(["--symbol-file-info", r_dot_txt_info_file]) - merge_resources_cmd.hidden([android_resource.r_dot_java_package for android_resource in android_resources]) - merge_resources_cmd.hidden([android_resource.text_symbols for android_resource in android_resources]) + merge_resources_cmd.add(cmd_args( + hidden = + [android_resource.r_dot_java_package for android_resource in android_resources] + + [android_resource.text_symbols for android_resource in android_resources], + )) output_dir = ctx.actions.declare_output("{}_source_code".format(identifier), dir = True) merge_resources_cmd.add(["--output-dir", output_dir.as_output()]) @@ -150,14 +154,12 @@ def _generate_r_dot_java_source_code( merge_resources_cmd.add(["--banned-duplicate-resource-types", banned_duplicate_resource_types_file]) if len(uber_r_dot_txt_files) > 0: - uber_r_dot_txt_files_list = ctx.actions.write("uber_r_dot_txt_files_list", uber_r_dot_txt_files) + uber_r_dot_txt_files_list = argfile(actions = ctx.actions, name = "uber_r_dot_txt_files_list", args = uber_r_dot_txt_files) merge_resources_cmd.add(["--uber-r-dot-txt", uber_r_dot_txt_files_list]) - merge_resources_cmd.hidden(uber_r_dot_txt_files) if len(override_symbols_paths) > 0: - override_symbols_paths_list = ctx.actions.write("override_symbols_paths_list", override_symbols_paths) + override_symbols_paths_list = argfile(actions = ctx.actions, name = "override_symbols_paths_list", args = override_symbols_paths) merge_resources_cmd.add(["--override-symbols", override_symbols_paths_list]) - merge_resources_cmd.hidden(override_symbols_paths) if duplicate_resources_allowlist != None: merge_resources_cmd.add(["--duplicate-resource-allowlist-path", duplicate_resources_allowlist]) @@ -166,9 +168,8 @@ def _generate_r_dot_java_source_code( merge_resources_cmd.add(["--union-package", union_package]) if referenced_resources_lists: - referenced_resources_file = ctx.actions.write("referenced_resources_lists", referenced_resources_lists) + referenced_resources_file = argfile(actions = ctx.actions, name = "referenced_resources_lists", args = referenced_resources_lists) merge_resources_cmd.add(["--referenced-resources-lists", referenced_resources_file]) - merge_resources_cmd.hidden(referenced_resources_lists) ctx.actions.run(merge_resources_cmd, category = "r_dot_java_merge_resources", identifier = identifier) diff --git a/prelude/android/robolectric_test.bzl b/prelude/android/robolectric_test.bzl index 394e76d5ff..b2210de2da 100644 --- a/prelude/android/robolectric_test.bzl +++ b/prelude/android/robolectric_test.bzl @@ -6,9 +6,10 @@ # of this source tree. load("@prelude//android:android_binary_resources_rules.bzl", "get_android_binary_resources_info") -load("@prelude//android:android_library.bzl", "build_android_library") +load("@prelude//android:android_library.bzl", "build_android_library", "optional_jars") load("@prelude//android:android_providers.bzl", "merge_android_packageable_info") load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo") +load("@prelude//java:java_providers.bzl", "JavaLibraryInfo") load("@prelude//java:java_test.bzl", "build_junit_test") load("@prelude//java:java_toolchain.bzl", "JavaToolchainInfo") load("@prelude//utils:expect.bzl", "expect") @@ -45,7 +46,6 @@ def robolectric_test_impl(ctx: AnalysisContext) -> list[Provider]: use_proto_format = False, referenced_resources_lists = [], generate_strings_and_ids_separately = False, - aapt2_min_sdk = ctx.attrs.manifest_entries.get("min_sdk_version", None), aapt2_preferred_density = ctx.attrs.preferred_density_for_binary_resources, ) @@ -69,14 +69,20 @@ def robolectric_test_impl(ctx: AnalysisContext) -> list[Provider]: ".", ]) ctx.actions.run(jar_cmd, category = "test_config_properties_jar_cmd") - extra_cmds.append(cmd_args().hidden(resources_info.primary_resources_apk, resources_info.manifest)) + extra_cmds.append(cmd_args(hidden = [resources_info.primary_resources_apk, resources_info.manifest])) r_dot_javas = [r_dot_java.library_info.library_output.full_library for r_dot_java in resources_info.r_dot_java_infos if r_dot_java.library_info.library_output] expect(len(r_dot_javas) <= 1, "android_library only works with single R.java") - java_providers, _ = build_android_library(ctx, r_dot_java = r_dot_javas[0] if r_dot_javas else None) + extra_sub_targets = {} + if r_dot_javas: + r_dot_java = r_dot_javas[0] + extra_sub_targets["r_dot_java"] = [DefaultInfo(default_output = r_dot_java)] + else: + r_dot_java = None + java_providers, _ = build_android_library(ctx, r_dot_java = r_dot_java, extra_sub_targets = extra_sub_targets) - extra_classpath_entries = [test_config_properties_jar] + ctx.attrs._android_toolchain[AndroidToolchainInfo].android_bootclasspath + extra_classpath_entries = [test_config_properties_jar] + ctx.attrs._android_toolchain[AndroidToolchainInfo].android_bootclasspath + optional_jars(ctx) extra_classpath_entries.extend(r_dot_javas) external_runner_test_info = build_junit_test( ctx, @@ -87,11 +93,22 @@ def robolectric_test_impl(ctx: AnalysisContext) -> list[Provider]: extra_classpath_entries = extra_classpath_entries, ) - return inject_test_run_info(ctx, external_runner_test_info) + [ - java_providers.java_library_info, + providers = inject_test_run_info(ctx, external_runner_test_info) + [ java_providers.java_library_intellij_info, java_providers.java_packaging_info, java_providers.template_placeholder_info, java_providers.default_info, java_providers.class_to_src_map, ] + + if ctx.attrs.used_as_dependency_deprecated_do_not_use: + providers.append(java_providers.java_library_info) + else: + java_library_without_compiling_deps = JavaLibraryInfo( + compiling_deps = None, + library_output = java_providers.java_library_info.library_output, + output_for_classpath_macro = java_providers.java_library_info.output_for_classpath_macro, + ) + providers.append(java_library_without_compiling_deps) + + return providers diff --git a/prelude/android/tools/BUCK.v2 b/prelude/android/tools/BUCK.v2 index 35e57623c0..9f6de47604 100644 --- a/prelude/android/tools/BUCK.v2 +++ b/prelude/android/tools/BUCK.v2 @@ -1,21 +1,29 @@ -native.python_bootstrap_binary( +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + +prelude = native # Avoid warnings and auto-formatters + +prelude.python_bootstrap_binary( name = "unpack_aar", main = "unpack_aar.py", visibility = ["PUBLIC"], deps = [ - ":unpack_aar_lib", "prelude//java/tools:utils_lib", + ":unpack_aar_lib", ], ) -native.python_bootstrap_library( +prelude.python_bootstrap_library( name = "unpack_aar_lib", srcs = [ "unpack_aar.py", ], ) -native.python_bootstrap_binary( +prelude.python_bootstrap_binary( name = "filter_dex", main = "filter_dex.py", visibility = ["PUBLIC"], @@ -24,14 +32,14 @@ native.python_bootstrap_binary( ], ) -native.python_bootstrap_library( +prelude.python_bootstrap_library( name = "filter_dex_lib", srcs = [ "filter_dex.py", ], ) -native.python_bootstrap_binary( +prelude.python_bootstrap_binary( name = "combine_native_library_dirs", main = "combine_native_library_dirs.py", visibility = ["PUBLIC"], @@ -40,14 +48,14 @@ native.python_bootstrap_binary( ], ) -native.python_bootstrap_library( +prelude.python_bootstrap_library( name = "combine_native_library_dirs_lib", srcs = [ "combine_native_library_dirs.py", ], ) -native.python_bootstrap_binary( +prelude.python_bootstrap_binary( name = "filter_prebuilt_native_library_dir", main = "filter_prebuilt_native_library_dir.py", visibility = ["PUBLIC"], @@ -56,14 +64,14 @@ native.python_bootstrap_binary( ], ) -native.python_bootstrap_library( +prelude.python_bootstrap_library( name = "filter_prebuilt_native_library_dir_lib", srcs = [ "filter_prebuilt_native_library_dir.py", ], ) -native.python_bootstrap_binary( +prelude.python_bootstrap_binary( name = "native_libs_as_assets_metadata", main = "native_libs_as_assets_metadata.py", visibility = ["PUBLIC"], @@ -72,20 +80,20 @@ native.python_bootstrap_binary( ], ) -native.python_bootstrap_library( +prelude.python_bootstrap_library( name = "native_libs_as_assets_metadata_lib", srcs = [ "native_libs_as_assets_metadata.py", ], ) -native.python_bootstrap_binary( +prelude.python_bootstrap_binary( name = "compute_merge_sequence", main = "merge_sequence.py", visibility = ["PUBLIC"], ) -native.python_bootstrap_binary( +prelude.python_bootstrap_binary( name = "filter_extra_resources", main = "filter_extra_resources.py", visibility = ["PUBLIC"], @@ -94,7 +102,7 @@ native.python_bootstrap_binary( ], ) -native.zip_file( +prelude.zip_file( name = "app_without_resources_stub", srcs = ["com/facebook/buck_generated/AppWithoutResourcesStub.java"], out = "app_without_resources_stub.src.zip", diff --git a/prelude/android/tools/com/facebook/buck_generated/AppWithoutResourcesStub.java b/prelude/android/tools/com/facebook/buck_generated/AppWithoutResourcesStub.java index 1c8a8df449..9d3de2fc3a 100644 --- a/prelude/android/tools/com/facebook/buck_generated/AppWithoutResourcesStub.java +++ b/prelude/android/tools/com/facebook/buck_generated/AppWithoutResourcesStub.java @@ -1,4 +1,11 @@ -// (c) Meta Platforms, Inc. and affiliates. Confidential and proprietary. +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under both the MIT license found in the + * LICENSE-MIT file in the root directory of this source tree and the Apache + * License, Version 2.0 found in the LICENSE-APACHE file in the root directory + * of this source tree. + */ package com.facebook.buck_generated; diff --git a/prelude/android/tools/combine_native_library_dirs.py b/prelude/android/tools/combine_native_library_dirs.py index a7aa7e5897..bbb52597e9 100644 --- a/prelude/android/tools/combine_native_library_dirs.py +++ b/prelude/android/tools/combine_native_library_dirs.py @@ -51,8 +51,12 @@ def main() -> None: lib, ) - output_path.parent.mkdir(exist_ok=True) - output_path.symlink_to(os.readlink(lib)) + output_path.parent.mkdir(exist_ok=True, parents=True) + relative_path_to_lib = os.path.relpath( + os.path.realpath(lib), + start=os.path.realpath(os.path.dirname(output_path)), + ) + output_path.symlink_to(relative_path_to_lib) if args.metadata_file: with open(lib, "rb") as f: diff --git a/prelude/android/tools/filter_dex.py b/prelude/android/tools/filter_dex.py index e26d507e0a..808f586e21 100644 --- a/prelude/android/tools/filter_dex.py +++ b/prelude/android/tools/filter_dex.py @@ -72,7 +72,8 @@ def class_name_matches_filter(self, class_name): def _parse_args(): parser = argparse.ArgumentParser( - description="Tool to filter a dex for primary class names." + description="Tool to filter a dex for primary class names.", + fromfile_prefix_chars="@", ) parser.add_argument( diff --git a/prelude/android/tools/merge_sequence.py b/prelude/android/tools/merge_sequence.py index 4478e681bb..e5d5ffca35 100644 --- a/prelude/android/tools/merge_sequence.py +++ b/prelude/android/tools/merge_sequence.py @@ -291,7 +291,9 @@ def assign_names( final_lib_graph[key] = list(dep_data.deps) # this topo_sort also verifies that we produced an acyclic final lib graph - sorted_final_lib_keys = topo_sort(final_lib_graph) + sorted_final_lib_keys = topo_sort( + final_lib_graph, lambda x: self.graph[x].module if self.graph[x] else str(x) + ) name_counters = {} final_lib_names: dict[FinalLibKey, str] = {} @@ -576,7 +578,9 @@ def get_split_group( def post_order_traversal_by( - roots: list[T], get_nodes_to_traverse_func: typing.Callable[[T], list[T]] + roots: list[T], + get_nodes_to_traverse_func: typing.Callable[[T], list[T]], + get_node_str: typing.Callable[[T], str] = None, ) -> list[T]: """ Returns the post-order sorted list of the nodes in the traversal. @@ -605,9 +609,17 @@ def post_order_traversal_by( work.append((OUTPUT, node)) for dep in get_nodes_to_traverse_func(node): if dep in current_parents: + current_parents_strs = [] + for k in current_parents: + current_parents_strs.append( + get_node_str(k) if get_node_str else str(k) + ) raise AssertionError( "detected cycle: {}".format( - " -> ".join(current_parents + [dep]) + " -> ".join( + current_parents_strs + + [get_node_str(dep) if get_node_str else str(dep)] + ) ) ) @@ -626,7 +638,9 @@ def is_root_module(module: str) -> bool: return module == ROOT_MODULE -def topo_sort(graph: dict[T, list[T]]) -> list[T]: +def topo_sort( + graph: dict[T, list[T]], get_node_str: typing.Callable[[T], str] = None +) -> list[T]: """ Topo-sort the given graph. """ @@ -642,7 +656,7 @@ def topo_sort(graph: dict[T, list[T]]) -> list[T]: if in_degree == 0: roots.append(node) - postordered = post_order_traversal_by(roots, lambda x: graph[x]) + postordered = post_order_traversal_by(roots, lambda x: graph[x], get_node_str) postordered.reverse() return postordered @@ -683,6 +697,7 @@ def main() -> int: # noqa: C901 final_result = {} debug_results = {} + split_groups = {} mergemap_input = read_mergemap_input(args.mergemap_input) for platform, nodes in mergemap_input.nodes_by_platform.items(): ( @@ -704,6 +719,9 @@ def main() -> int: # noqa: C901 final_mapping[target] = None else: final_mapping[target] = final_lib_names[node.final_lib_key] + split_groups[final_lib_names[node.final_lib_key]] = ( + node.base_library_name + ) else: final_mapping[target] = str(target) debug_results[platform] = ( @@ -717,6 +735,8 @@ def main() -> int: # noqa: C901 pathlib.Path(args.output).mkdir(parents=True, exist_ok=True) with open(os.path.join(args.output, "merge.map"), "w") as outfile: json.dump(final_result, outfile, indent=2) + with open(os.path.join(args.output, "split_groups.map"), "w") as outfile: + json.dump(split_groups, outfile, indent=2) # When writing an output dir we also produce some debugging information. for platform, result in final_result.items(): diff --git a/prelude/android/tools/native_libs_as_assets_metadata.py b/prelude/android/tools/native_libs_as_assets_metadata.py index 6b31c0b7e6..87f8d5a1c3 100644 --- a/prelude/android/tools/native_libs_as_assets_metadata.py +++ b/prelude/android/tools/native_libs_as_assets_metadata.py @@ -45,11 +45,6 @@ def main() -> None: type=Path, help="Metadata is written to this file", ) - parser.add_argument( - "--native-library-paths-output", - type=Path, - help="The actual paths of all the native libraries", - ) args = parser.parse_args() native_libraries = [] @@ -83,11 +78,6 @@ def main() -> None: ) ) - with open(args.native_library_paths_output, "w") as f: - f.write( - "\n".join([str(native_lib.full_path) for native_lib in native_libraries]) - ) - if __name__ == "__main__": main() diff --git a/prelude/android/tools/unpack_aar.py b/prelude/android/tools/unpack_aar.py index 7c0d77dc0b..b86bfb1382 100644 --- a/prelude/android/tools/unpack_aar.py +++ b/prelude/android/tools/unpack_aar.py @@ -8,6 +8,7 @@ import argparse import pathlib +import platform import shutil import zipfile from tempfile import TemporaryDirectory @@ -76,6 +77,12 @@ def _parse_args(): required=True, help="a path to the proguard config that is unpacked", ) + parser.add_argument( + "--lint-jar-path", + type=pathlib.Path, + required=True, + help="a path to the lint jar file that is unpacked", + ) parser.add_argument( "--jar-builder-tool", type=str, @@ -98,6 +105,7 @@ def main(): r_dot_txt_path = args.r_dot_txt_path annotation_jars_dir = args.annotation_jars_dir proguard_config_path = args.proguard_config_path + lint_jar_path = args.lint_jar_path jar_builder_tool = args.jar_builder_tool with TemporaryDirectory() as temp_dir: @@ -105,10 +113,11 @@ def main(): with zipfile.ZipFile(aar_path, "r") as aar_zip: aar_zip.extractall(unpack_dir) - # If the zip file was built on e.g. Windows, then it might not have - # correct permissions (which means we can't read any of the files), so - # make sure we actually read everything here. - utils.execute_command(["chmod", "-R", "+rX", unpack_dir]) + if platform.system() != "Windows": + # If the zip file was built on e.g. Windows, then it might not have + # correct permissions (which means we can't read any of the files), so + # make sure we actually read everything here. + utils.execute_command(["chmod", "-R", "+rX", unpack_dir]) unpacked_manifest = unpack_dir / "AndroidManifest.xml" assert unpacked_manifest.exists() @@ -149,6 +158,12 @@ def main(): else: proguard_config_path.touch() + unpacked_lint_jar = unpack_dir / "lint.jar" + if unpacked_lint_jar.exists(): + shutil.copyfile(unpacked_lint_jar, lint_jar_path) + else: + lint_jar_path.touch() + # Java .class files can exist at `classes.jar` or any jar file in /libs, # so combine them into a single `.jar` file. all_jars = [] diff --git a/prelude/android/util.bzl b/prelude/android/util.bzl index 484b2cddae..6c64544520 100644 --- a/prelude/android/util.bzl +++ b/prelude/android/util.bzl @@ -21,9 +21,9 @@ EnhancementContext = record( def create_enhancement_context(ctx: AnalysisContext) -> EnhancementContext: extra_sub_targets = {} - def debug_output(name: str, output: Artifact, other_outputs = []): + def debug_output(name: str, output: Artifact, other_outputs = [], sub_targets: dict[str, typing.Any] = {}): """Adds a subtarget to expose debugging outputs.""" - extra_sub_targets[name] = [DefaultInfo(default_outputs = [output], other_outputs = other_outputs)] + extra_sub_targets[name] = [DefaultInfo(default_outputs = [output], other_outputs = other_outputs, sub_targets = sub_targets)] def get_sub_targets(): return extra_sub_targets diff --git a/prelude/android/voltron.bzl b/prelude/android/voltron.bzl index 5e8f697bbf..cd2b2f8f26 100644 --- a/prelude/android/voltron.bzl +++ b/prelude/android/voltron.bzl @@ -15,7 +15,9 @@ load( "merge_shared_libraries", "traverse_shared_library_info", ) +load("@prelude//utils:argfile.bzl", "argfile") load("@prelude//utils:expect.bzl", "expect") +load("@prelude//utils:set.bzl", "set") load("@prelude//utils:utils.bzl", "flatten") # "Voltron" gives us the ability to split our Android APKs into different "modules". These @@ -67,7 +69,7 @@ def android_app_modularity_impl(ctx: AnalysisContext) -> list[Provider]: ctx.actions, ctx.label, [android_packageable_info], - traversed_shared_library_info.values(), + traversed_shared_library_info, ctx.attrs._android_toolchain[AndroidToolchainInfo], ctx.attrs.application_module_configs, ctx.attrs.application_module_dependencies, @@ -78,19 +80,19 @@ def android_app_modularity_impl(ctx: AnalysisContext) -> list[Provider]: no_dx_target_labels = [no_dx_target.label.raw_target() for no_dx_target in ctx.attrs.no_dx] java_packaging_deps = [packaging_dep for packaging_dep in get_all_java_packaging_deps(ctx, all_deps) if packaging_dep.dex and packaging_dep.dex.dex.owner.raw_target() not in no_dx_target_labels] targets_to_jars_args = [cmd_args([str(packaging_dep.label.raw_target()), packaging_dep.jar], delimiter = " ") for packaging_dep in java_packaging_deps] - targets_to_jars = ctx.actions.write("targets_to_jars.txt", targets_to_jars_args) + targets_to_jars = argfile(actions = ctx.actions, name = "targets_to_jars.txt", args = targets_to_jars_args) cmd.add([ "--targets-to-jars", targets_to_jars, - ]).hidden(targets_to_jars_args) + ]) if ctx.attrs.should_include_libraries: - targets_to_so_names_args = [cmd_args([str(shared_lib.label.raw_target()), so_name], delimiter = " ") for so_name, shared_lib in traversed_shared_library_info.items()] - targets_to_so_names = ctx.actions.write("targets_to_so_names.txt", targets_to_so_names_args) + targets_to_so_names_args = [cmd_args([str(shared_lib.label.raw_target()), shared_lib.soname.ensure_str()], delimiter = " ") for shared_lib in traversed_shared_library_info] + targets_to_so_names = argfile(actions = ctx.actions, name = "targets_to_so_names.txt", args = targets_to_so_names_args) cmd.add([ "--targets-to-so-names", targets_to_so_names, - ]).hidden(targets_to_so_names_args) + ]) traversed_prebuilt_native_library_dirs = android_packageable_info.prebuilt_native_library_dirs.traverse() if android_packageable_info.prebuilt_native_library_dirs else [] targets_to_non_assets_prebuilt_native_library_dirs_args = [ @@ -98,17 +100,21 @@ def android_app_modularity_impl(ctx: AnalysisContext) -> list[Provider]: for prebuilt_native_library_dir in traversed_prebuilt_native_library_dirs if not prebuilt_native_library_dir.is_asset and not prebuilt_native_library_dir.for_primary_apk ] - targets_to_non_assets_prebuilt_native_library_dirs = ctx.actions.write("targets_to_non_assets_prebuilt_native_library_dirs.txt", targets_to_non_assets_prebuilt_native_library_dirs_args) + targets_to_non_assets_prebuilt_native_library_dirs = argfile( + actions = ctx.actions, + name = "targets_to_non_assets_prebuilt_native_library_dirs.txt", + args = targets_to_non_assets_prebuilt_native_library_dirs_args, + ) cmd.add([ "--targets-to-non-asset-prebuilt-native-library-dirs", targets_to_non_assets_prebuilt_native_library_dirs, - ]).hidden(targets_to_non_assets_prebuilt_native_library_dirs_args) + ]) ctx.actions.run(cmd, category = "apk_module_graph") return [DefaultInfo(default_output = output)] -def get_target_to_module_mapping(ctx: AnalysisContext, deps_by_platform: dict[str, list[Dependency]]) -> [Artifact, None]: +def get_target_to_module_mapping(ctx: AnalysisContext, deps_by_platform: dict[str, list[Dependency]]) -> Artifact | None: if not ctx.attrs.application_module_configs: return None @@ -120,7 +126,7 @@ def get_target_to_module_mapping(ctx: AnalysisContext, deps_by_platform: dict[st ctx.actions, deps = filter(None, [x.get(SharedLibraryInfo) for x in deps]), ) - shared_libraries.extend(traverse_shared_library_info(shared_library_info).values()) + shared_libraries.extend(traverse_shared_library_info(shared_library_info)) cmd, output = _get_base_cmd_and_output( ctx.actions, @@ -149,11 +155,14 @@ def _get_base_cmd_and_output( application_module_dependencies: [dict[str, list[str]], None], application_module_blocklist: [list[list[Dependency]], None]) -> (cmd_args, Artifact): deps_map = {} + primary_apk_deps = set() for android_packageable_info in android_packageable_infos: if android_packageable_info.deps: for deps_info in android_packageable_info.deps.traverse(): deps = deps_map.setdefault(deps_info.name, []) deps_map[deps_info.name] = dedupe(deps + deps_info.deps) + if deps_info.for_primary_apk: + primary_apk_deps.add(deps_info.name) target_graph_file = actions.write_json("target_graph.json", deps_map) application_module_configs_map = { @@ -183,8 +192,8 @@ def _get_base_cmd_and_output( used_by_wrap_script_libs = [str(shared_lib.label.raw_target()) for shared_lib in shared_libraries if shared_lib.for_primary_apk] prebuilt_native_library_dirs = flatten([list(android_packageable_info.prebuilt_native_library_dirs.traverse()) if android_packageable_info.prebuilt_native_library_dirs else [] for android_packageable_info in android_packageable_infos]) prebuilt_native_library_targets_for_primary_apk = dedupe([str(native_lib_dir.raw_target) for native_lib_dir in prebuilt_native_library_dirs if native_lib_dir.for_primary_apk]) - if application_module_blocklist or used_by_wrap_script_libs or prebuilt_native_library_targets_for_primary_apk: - all_blocklisted_deps = used_by_wrap_script_libs + prebuilt_native_library_targets_for_primary_apk + if application_module_blocklist or used_by_wrap_script_libs or prebuilt_native_library_targets_for_primary_apk or primary_apk_deps.size() > 0: + all_blocklisted_deps = used_by_wrap_script_libs + prebuilt_native_library_targets_for_primary_apk + primary_apk_deps.list() if application_module_blocklist: all_blocklisted_deps.extend([str(blocklisted_dep.label.raw_target()) for blocklisted_dep in flatten(application_module_blocklist)]) diff --git a/prelude/apple/apple_asset_catalog.bzl b/prelude/apple/apple_asset_catalog.bzl index a336cc2be6..a8550ba891 100644 --- a/prelude/apple/apple_asset_catalog.bzl +++ b/prelude/apple/apple_asset_catalog.bzl @@ -111,5 +111,5 @@ def _get_actool_command(ctx: AnalysisContext, info: AppleAssetCatalogSpec, catal ], allow_args = True, ) - command = cmd_args(["/bin/sh", wrapper_script]).hidden([actool_command, catalog_output]) + command = cmd_args(["/bin/sh", wrapper_script], hidden = [actool_command, catalog_output]) return command diff --git a/prelude/apple/apple_binary.bzl b/prelude/apple/apple_binary.bzl index 514ec2c23a..3c1f1dcdd9 100644 --- a/prelude/apple/apple_binary.bzl +++ b/prelude/apple/apple_binary.bzl @@ -6,7 +6,7 @@ # of this source tree. load("@prelude//:paths.bzl", "paths") -load("@prelude//apple:apple_buck2_compatibility.bzl", "apple_check_buck2_compatibility") +load("@prelude//:validation_deps.bzl", "get_validation_deps_outputs") load("@prelude//apple:apple_stripping.bzl", "apple_strip_args") # @oss-disable: load("@prelude//apple/meta_only:linker_outputs.bzl", "add_extra_linker_outputs") load( @@ -23,13 +23,13 @@ load( "@prelude//cxx:argsfiles.bzl", "CompileArgsfiles", ) +load("@prelude//cxx:cxx_executable.bzl", "cxx_executable") +load("@prelude//cxx:cxx_library_utility.bzl", "cxx_attr_deps", "cxx_attr_exported_deps") load( - "@prelude//cxx:compile.bzl", + "@prelude//cxx:cxx_sources.bzl", "CxxSrcWithFlags", # @unused Used as a type + "get_srcs_with_flags", ) -load("@prelude//cxx:cxx_executable.bzl", "cxx_executable") -load("@prelude//cxx:cxx_library_utility.bzl", "cxx_attr_deps", "cxx_attr_exported_deps") -load("@prelude//cxx:cxx_sources.bzl", "get_srcs_with_flags") load( "@prelude//cxx:cxx_types.bzl", "CxxRuleAdditionalParams", @@ -52,6 +52,7 @@ load( ) load( "@prelude//linking:link_info.bzl", + "CxxSanitizerRuntimeInfo", "LinkCommandDebugOutputInfo", "UnstrippedLinkOutputInfo", ) @@ -63,7 +64,6 @@ load(":apple_code_signing_types.bzl", "AppleEntitlementsInfo") load(":apple_dsym.bzl", "DSYM_SUBTARGET", "get_apple_dsym") load(":apple_entitlements.bzl", "entitlements_link_flags") load(":apple_frameworks.bzl", "get_framework_search_path_flags") -load(":apple_genrule_deps.bzl", "get_apple_build_genrule_deps_attr_value", "get_apple_genrule_deps_outputs") load(":apple_target_sdk_version.bzl", "get_min_deployment_version_for_node", "get_min_deployment_version_target_linker_flags", "get_min_deployment_version_target_preprocessor_flags") load(":apple_utility.bzl", "get_apple_cxx_headers_layout", "get_apple_stripped_attr_value_with_default_fallback") load(":debug.bzl", "AppleDebuggableInfo") @@ -71,8 +71,6 @@ load(":resource_groups.bzl", "create_resource_graph") load(":xcode.bzl", "apple_populate_xcode_attributes") def apple_binary_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: - apple_check_buck2_compatibility(ctx) - def get_apple_binary_providers(deps_providers) -> list[Provider]: # FIXME: Ideally we'd like to remove the support of "bridging header", # cause it affects build time and in general considered a bad practise. @@ -80,9 +78,10 @@ def apple_binary_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: objc_bridging_header_flags = _get_bridging_header_flags(ctx) cxx_srcs, swift_srcs = _filter_swift_srcs(ctx) + contains_swift_sources = len(swift_srcs) > 0 framework_search_path_flags = get_framework_search_path_flags(ctx) - swift_compile = compile_swift( + swift_compile, _ = compile_swift( ctx, swift_srcs, False, # parse_as_library @@ -101,26 +100,23 @@ def apple_binary_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: extra_link_flags = get_min_deployment_version_target_linker_flags(ctx) + entitlements_link_flags(ctx) + extra_linker_output_flags framework_search_path_pre = CPreprocessor( - relative_args = CPreprocessorArgs(args = [framework_search_path_flags]), + args = CPreprocessorArgs(args = [framework_search_path_flags]), ) - swift_dependency_info = swift_compile.dependency_info if swift_compile else get_swift_dependency_info(ctx, None, None, deps_providers) + swift_dependency_info = swift_compile.dependency_info if swift_compile else get_swift_dependency_info(ctx, None, deps_providers) swift_debug_info = get_swift_debug_infos( ctx, swift_dependency_info, swift_compile, ) - genrule_deps_outputs = [] - if get_apple_build_genrule_deps_attr_value(ctx): - genrule_deps_outputs = get_apple_genrule_deps_outputs(cxx_attr_deps(ctx)) - + validation_deps_outputs = get_validation_deps_outputs(ctx) stripped = get_apple_stripped_attr_value_with_default_fallback(ctx) constructor_params = CxxRuleConstructorParams( rule_type = "apple_binary", headers_layout = get_apple_cxx_headers_layout(ctx), extra_link_flags = extra_link_flags, - extra_hidden = genrule_deps_outputs, + extra_hidden = validation_deps_outputs, srcs = cxx_srcs, additional = CxxRuleAdditionalParams( srcs = swift_srcs, @@ -144,12 +140,20 @@ def apple_binary_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: extra_preprocessors = get_min_deployment_version_target_preprocessor_flags(ctx) + [framework_search_path_pre] + swift_preprocessor, strip_executable = stripped, strip_args_factory = apple_strip_args, - cxx_populate_xcode_attributes_func = apple_populate_xcode_attributes, + cxx_populate_xcode_attributes_func = lambda local_ctx, **kwargs: apple_populate_xcode_attributes(local_ctx, contains_swift_sources = contains_swift_sources, **kwargs), link_group_info = get_link_group_info(ctx), prefer_stripped_objects = ctx.attrs.prefer_stripped_objects, # Some apple rules rely on `static` libs *not* following dependents. link_groups_force_static_follows_dependents = False, swiftmodule_linkable = get_swiftmodule_linkable(swift_compile), + compiler_flags = ctx.attrs.compiler_flags, + lang_compiler_flags = ctx.attrs.lang_compiler_flags, + platform_compiler_flags = ctx.attrs.platform_compiler_flags, + lang_platform_compiler_flags = ctx.attrs.lang_platform_compiler_flags, + preprocessor_flags = ctx.attrs.preprocessor_flags, + lang_preprocessor_flags = ctx.attrs.lang_preprocessor_flags, + platform_preprocessor_flags = ctx.attrs.platform_preprocessor_flags, + lang_platform_preprocessor_flags = ctx.attrs.lang_platform_preprocessor_flags, ) cxx_output = cxx_executable(ctx, constructor_params) @@ -190,16 +194,20 @@ def apple_binary_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: if cxx_output.link_command_debug_output: link_command_providers.append(LinkCommandDebugOutputInfo(debug_outputs = [cxx_output.link_command_debug_output])) + sanitizer_runtime_providers = [] + if cxx_output.sanitizer_runtime_files: + sanitizer_runtime_providers.append(CxxSanitizerRuntimeInfo(runtime_files = cxx_output.sanitizer_runtime_files)) + return [ DefaultInfo(default_output = cxx_output.binary, sub_targets = cxx_output.sub_targets), - RunInfo(args = cmd_args(cxx_output.binary).hidden(cxx_output.runtime_files)), + RunInfo(args = cmd_args(cxx_output.binary, hidden = cxx_output.runtime_files)), AppleEntitlementsInfo(entitlements_file = ctx.attrs.entitlements_file), AppleDebuggableInfo(dsyms = [dsym_artifact], debug_info_tset = cxx_output.external_debug_info), cxx_output.xcode_data, cxx_output.compilation_db, merge_bundle_linker_maps_info(bundle_infos), UnstrippedLinkOutputInfo(artifact = unstripped_binary), - ] + [resource_graph] + min_version_providers + link_command_providers + ] + [resource_graph] + min_version_providers + link_command_providers + sanitizer_runtime_providers if uses_explicit_modules(ctx): return get_swift_anonymous_targets(ctx, get_apple_binary_providers) @@ -230,8 +238,7 @@ def _get_bridging_header_flags(ctx: AnalysisContext) -> list[ArgLike]: header_map = {paths.join(h.namespace, h.name): h.artifact for h in headers} # We need to expose private headers to swift-compile action, in case something is imported to bridging header. - # TODO(chatatap): Handle absolute paths here. - header_root = prepare_headers(ctx, header_map, "apple-binary-private-headers", None) + header_root = prepare_headers(ctx, header_map, "apple-binary-private-headers") if header_root != None: private_headers_args = [cmd_args("-I"), header_root.include_path] else: diff --git a/prelude/apple/apple_bundle.bzl b/prelude/apple/apple_bundle.bzl index c517763edd..65b9aca855 100644 --- a/prelude/apple/apple_bundle.bzl +++ b/prelude/apple/apple_bundle.bzl @@ -12,7 +12,7 @@ load( "project_artifacts", ) load("@prelude//:paths.bzl", "paths") -load("@prelude//apple:apple_buck2_compatibility.bzl", "apple_check_buck2_compatibility") +load("@prelude//:validation_deps.bzl", "get_validation_deps_outputs") load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo", "AppleToolsInfo") # @oss-disable: load("@prelude//apple/meta_only:linker_outputs.bzl", "subtargets_for_apple_bundle_extra_outputs") load("@prelude//apple/user:apple_selected_debug_path_file.bzl", "SELECTED_DEBUG_PATH_FILE_NAME") @@ -34,7 +34,6 @@ load( "make_link_command_debug_output_json_info", ) load("@prelude//utils:arglike.bzl", "ArgLike") -load("@prelude//utils:expect.bzl", "expect") load("@prelude//utils:lazy.bzl", "lazy") load( "@prelude//utils:set.bzl", @@ -46,7 +45,7 @@ load( ) load(":apple_bundle_destination.bzl", "AppleBundleDestination") load(":apple_bundle_part.bzl", "AppleBundlePart", "SwiftStdlibArguments", "assemble_bundle", "bundle_output", "get_apple_bundle_part_relative_destination_path", "get_bundle_dir_name") -load(":apple_bundle_resources.bzl", "get_apple_bundle_resource_part_list", "get_is_watch_bundle") +load(":apple_bundle_resources.bzl", "get_apple_bundle_resource_part_list") load( ":apple_bundle_types.bzl", "AppleBinaryExtraOutputsInfo", @@ -57,11 +56,10 @@ load( "AppleBundleResourceInfo", "AppleBundleType", "AppleBundleTypeDefault", - "AppleBundleTypeWatchApp", ) load(":apple_bundle_utility.bzl", "get_bundle_min_target_version", "get_default_binary_dep", "get_flattened_binary_deps", "get_product_name") -load(":apple_dsym.bzl", "DSYM_INFO_SUBTARGET", "DSYM_SUBTARGET", "get_apple_dsym", "get_apple_dsym_ext", "get_apple_dsym_info") -load(":apple_genrule_deps.bzl", "get_apple_build_genrule_deps_attr_value", "get_apple_genrule_deps_outputs") +load(":apple_code_signing_types.bzl", "CodeSignConfiguration") +load(":apple_dsym.bzl", "DSYM_INFO_SUBTARGET", "DSYM_SUBTARGET", "get_apple_dsym", "get_apple_dsym_ext", "get_apple_dsym_info_json") load(":apple_sdk.bzl", "get_apple_sdk_name") load(":apple_universal_binaries.bzl", "create_universal_binary") load( @@ -72,7 +70,6 @@ load( ) load(":xcode.bzl", "apple_xcode_data_add_xctoolchain") -INSTALL_DATA_SUB_TARGET = "install-data" _INSTALL_DATA_FILE_NAME = "install_apple_data.json" _PLIST = "plist" @@ -80,8 +77,7 @@ _PLIST = "plist" _XCTOOLCHAIN_SUB_TARGET = "xctoolchain" AppleBundleDebuggableInfo = record( - # Can be `None` for WatchKit stub - binary_info = field([AppleDebuggableInfo, None]), + binary_info = field(AppleDebuggableInfo), # Debugable info of all bundle deps dep_infos = field(list[AppleDebuggableInfo]), # Concat of `binary_info` and `dep_infos` @@ -101,13 +97,6 @@ AppleBundlePartListOutput = record( ) def _get_binary(ctx: AnalysisContext) -> AppleBundleBinaryOutput: - # No binary means we are building watchOS bundle. In v1 bundle binary is present, but its sources are empty. - if ctx.attrs.binary == None: - return AppleBundleBinaryOutput( - binary = _get_watch_kit_stub_artifact(ctx), - is_watchkit_stub_binary = True, - ) - if len(get_flattened_binary_deps(ctx.attrs.binary)) > 1: if ctx.attrs.selective_debugging != None: fail("Selective debugging is not supported for universal binaries.") @@ -130,7 +119,8 @@ def _get_bundle_dsym_name(ctx: AnalysisContext) -> str: def _scrub_binary(ctx, binary: Artifact, binary_execution_preference_info: None | LinkExecutionPreferenceInfo) -> Artifact: # If fast adhoc code signing is enabled, we need to resign the binary as it won't be signed later. - if ctx.attrs._fast_adhoc_signing_enabled: + code_signing_configuration = CodeSignConfiguration(ctx.attrs._code_signing_configuration) + if code_signing_configuration == CodeSignConfiguration("fast-adhoc"): apple_tools = ctx.attrs._apple_tools[AppleToolsInfo] adhoc_codesign_tool = apple_tools.adhoc_codesign_tool else: @@ -184,9 +174,6 @@ def _get_binary_bundle_parts(ctx: AnalysisContext, binary_output: AppleBundleBin """Returns a tuple of all binary bundle parts and the primary bundle binary.""" result = [] - if binary_output.is_watchkit_stub_binary: - # If we're using a stub binary from watchkit, we also need to add extra part for stub. - result.append(AppleBundlePart(source = binary_output.binary, destination = AppleBundleDestination("watchkitstub"), new_name = "WK")) primary_binary_part = AppleBundlePart(source = binary_output.binary, destination = AppleBundleDestination("executables"), new_name = get_product_name(ctx)) result.append(primary_binary_part) @@ -197,10 +184,6 @@ def _get_binary_bundle_parts(ctx: AnalysisContext, binary_output: AppleBundleBin return result, primary_binary_part def _get_dsym_input_binary_arg(ctx: AnalysisContext, primary_binary_path_arg: cmd_args) -> cmd_args: - # No binary means we are building watchOS bundle. In v1 bundle binary is present, but its sources are empty. - if ctx.attrs.binary == None: - return cmd_args(_get_watch_kit_stub_artifact(ctx)) - binary_dep = get_default_binary_dep(ctx.attrs.binary) default_binary = binary_dep[DefaultInfo].default_outputs[0] @@ -216,13 +199,6 @@ def _get_dsym_input_binary_arg(ctx: AnalysisContext, primary_binary_path_arg: cm else: return primary_binary_path_arg -def _get_watch_kit_stub_artifact(ctx: AnalysisContext) -> Artifact: - expect(ctx.attrs.binary == None, "Stub is useful only when binary is not set which means watchOS bundle is built.") - stub_binary = ctx.attrs._apple_toolchain[AppleToolchainInfo].watch_kit_stub_binary - if stub_binary == None: - fail("Expected Watch Kit stub binary to be provided when bundle binary is not set.") - return stub_binary - def _apple_bundle_run_validity_checks(ctx: AnalysisContext): if ctx.attrs.extension == None: fail("`extension` attribute is required") @@ -238,10 +214,6 @@ def _get_deps_debuggable_infos(ctx: AnalysisContext) -> list[AppleDebuggableInfo return deps_debuggable_infos def _get_bundle_binary_dsym_artifacts(ctx: AnalysisContext, binary_output: AppleBundleBinaryOutput, executable_arg: ArgLike) -> list[Artifact]: - # We don't care to process the watchkit stub binary. - if binary_output.is_watchkit_stub_binary: - return [] - if not ctx.attrs.split_arch_dsym: # Calling `dsymutil` on the correctly named binary in the _final bundle_ to yield dsym files # with naming convention compatible with Meta infra. @@ -261,18 +233,21 @@ def _get_bundle_binary_dsym_artifacts(ctx: AnalysisContext, binary_output: Apple return binary_output.debuggable_info.dsyms def _get_all_agg_debug_info(ctx: AnalysisContext, binary_output: AppleBundleBinaryOutput, deps_debuggable_infos: list[AppleDebuggableInfo]) -> AggregatedAppleDebugInfo: - all_debug_infos = deps_debuggable_infos - if not binary_output.is_watchkit_stub_binary: - binary_debuggable_info = binary_output.debuggable_info - all_debug_infos = all_debug_infos + [binary_debuggable_info] + all_debug_infos = deps_debuggable_infos + ([binary_output.debuggable_info] if binary_output.debuggable_info else []) return get_aggregated_debug_info(ctx, all_debug_infos) +def _maybe_scrub_selected_debug_paths_file(ctx: AnalysisContext, package_names: list[str]) -> Artifact: + if not ctx.attrs.selective_debugging: + return ctx.actions.write(SELECTED_DEBUG_PATH_FILE_NAME, sorted(set(package_names).list())) + + selective_debugging_info = ctx.attrs.selective_debugging[AppleSelectiveDebuggingInfo] + return selective_debugging_info.scrub_selected_debug_paths_file(ctx, package_names, SELECTED_DEBUG_PATH_FILE_NAME) + def _get_selected_debug_targets_part(ctx: AnalysisContext, agg_debug_info: AggregatedAppleDebugInfo) -> [AppleBundlePart, None]: # Only app bundle need this, and this file is searched by FBReport at the bundle root if ctx.attrs.extension == "app" and agg_debug_info.debug_info.filtered_map: package_names = [label.package for label in agg_debug_info.debug_info.filtered_map.keys()] - package_names = set(package_names).list() - output = ctx.actions.write(SELECTED_DEBUG_PATH_FILE_NAME, package_names) + output = _maybe_scrub_selected_debug_paths_file(ctx, package_names) return AppleBundlePart(source = output, destination = AppleBundleDestination("bundleroot"), new_name = SELECTED_DEBUG_PATH_FILE_NAME) else: return None @@ -293,12 +268,6 @@ def get_apple_bundle_part_list(ctx: AnalysisContext, params: AppleBundlePartList ) def _infer_apple_bundle_type(ctx: AnalysisContext) -> AppleBundleType: - is_watchos = get_is_watch_bundle(ctx) - if is_watchos and ctx.attrs.bundle_type: - fail("Cannot have a watchOS app with an explicit `bundle_type`, target: {}".format(ctx.label)) - - if is_watchos: - return AppleBundleTypeWatchApp if ctx.attrs.bundle_type != None: return AppleBundleType(ctx.attrs.bundle_type) @@ -306,7 +275,6 @@ def _infer_apple_bundle_type(ctx: AnalysisContext) -> AppleBundleType: def apple_bundle_impl(ctx: AnalysisContext) -> list[Provider]: _apple_bundle_run_validity_checks(ctx) - apple_check_buck2_compatibility(ctx) binary_outputs = _get_binary(ctx) @@ -320,22 +288,20 @@ def apple_bundle_impl(ctx: AnalysisContext) -> list[Provider]: primary_binary_rel_path = get_apple_bundle_part_relative_destination_path(ctx, primary_binary_part) - genrule_deps_outputs = [] - if get_apple_build_genrule_deps_attr_value(ctx): - genrule_deps_outputs = get_apple_genrule_deps_outputs(ctx.attrs.deps) - - sub_targets = assemble_bundle( + validation_deps_outputs = get_validation_deps_outputs(ctx) + bundle_result = assemble_bundle( ctx, bundle, apple_bundle_part_list_output.parts, apple_bundle_part_list_output.info_plist_part, SwiftStdlibArguments(primary_binary_rel_path = primary_binary_rel_path), - genrule_deps_outputs, + validation_deps_outputs, ) + sub_targets = bundle_result.sub_targets sub_targets.update(aggregated_debug_info.sub_targets) primary_binary_path = cmd_args([bundle, primary_binary_rel_path], delimiter = "/") - primary_binary_path_arg = cmd_args(primary_binary_path).hidden(bundle) + primary_binary_path_arg = cmd_args(primary_binary_path, hidden = bundle) linker_maps_directory, linker_map_info = _linker_maps_data(ctx) sub_targets["linker-maps"] = [DefaultInfo(default_output = linker_maps_directory)] @@ -352,7 +318,8 @@ def apple_bundle_impl(ctx: AnalysisContext) -> list[Provider]: if dsym_artifacts: sub_targets[DSYM_SUBTARGET] = [DefaultInfo(default_outputs = dsym_artifacts)] - dsym_info = get_apple_dsym_info(ctx, binary_dsyms = binary_dsym_artifacts, dep_dsyms = dep_dsym_artifacts) + dsym_info_json = get_apple_dsym_info_json(binary_dsym_artifacts, dep_dsym_artifacts) + dsym_info = ctx.actions.write_json("dsym-info.json", dsym_info_json) sub_targets[DSYM_INFO_SUBTARGET] = [ DefaultInfo(default_output = dsym_info, other_outputs = dsym_artifacts), ] @@ -373,6 +340,18 @@ def apple_bundle_impl(ctx: AnalysisContext) -> list[Provider]: # @oss-disable: extra_output_subtargets = subtargets_for_apple_bundle_extra_outputs(ctx, extra_output_provider) # @oss-disable: sub_targets.update(extra_output_subtargets) + bundle_and_dsym_info_json = { + "bundle": bundle, + "dsym": dsym_info_json, + } + bundle_and_dsym_info = ctx.actions.write_json("bundle-and-dsym-info.json", bundle_and_dsym_info_json) + sub_targets["bundle-and-dsym-info"] = [ + DefaultInfo( + default_output = bundle_and_dsym_info, + other_outputs = [bundle] + dsym_artifacts, + ), + ] + return [ DefaultInfo(default_output = bundle, sub_targets = sub_targets), AppleBundleInfo( @@ -399,7 +378,7 @@ def apple_bundle_impl(ctx: AnalysisContext) -> list[Provider]: xcode_data_info, extra_output_provider, link_cmd_debug_info, - ] + ] + bundle_result.providers def _xcode_populate_attributes(ctx, processed_info_plist: Artifact) -> dict[str, typing.Any]: data = { @@ -465,6 +444,7 @@ def generate_install_data( data = { "fullyQualifiedName": ctx.label, "info_plist": plist_path, + "platform_name": get_apple_sdk_name(ctx), "use_idb": "true", ## TODO(T110665037): read from .buckconfig # We require the user to have run `xcode-select` and `/var/db/xcode_select_link` to symlink diff --git a/prelude/apple/apple_bundle_config.bzl b/prelude/apple/apple_bundle_config.bzl index 507d9f9de0..002dc35399 100644 --- a/prelude/apple/apple_bundle_config.bzl +++ b/prelude/apple/apple_bundle_config.bzl @@ -5,35 +5,39 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load(":apple_code_signing_types.bzl", "CodeSignConfiguration") + def _maybe_get_bool(config: str, default: [None, bool]) -> [None, bool]: result = read_root_config("apple", config, None) if result == None: return default return result.lower() == "true" -def _get_bundling_path_conflicts_check_enabled(): - check_enabled = _maybe_get_bool("bundling_path_conflicts_check_enabled", None) - if check_enabled != None: - return check_enabled +def _get_code_signing_configuration() -> str: + is_dry_run = _maybe_get_bool("dry_run_code_signing", False) + + # This is a kill switch for the feature, it can also be disabled by setting + # `apple.fast_adhoc_signing_enabled=false` in a global buckconfig file. + is_fast_adhoc_signing_enabled = _maybe_get_bool("fast_adhoc_signing_enabled", True) - return select({ - "DEFAULT": True, - "ovr_config//features/apple/constraints:bundling_path_conflicts_check_disabled": False, - "ovr_config//features/apple/constraints:bundling_path_conflicts_check_enabled": True, - }) + if is_dry_run: + return CodeSignConfiguration("dry-run").value + elif is_fast_adhoc_signing_enabled: + return CodeSignConfiguration("fast-adhoc").value + else: + return CodeSignConfiguration("none").value def apple_bundle_config() -> dict[str, typing.Any]: return { "_bundling_cache_buster": read_root_config("apple", "bundling_cache_buster", None), "_bundling_log_file_enabled": _maybe_get_bool("bundling_log_file_enabled", True), "_bundling_log_file_level": read_root_config("apple", "bundling_log_file_level", None), - "_bundling_path_conflicts_check_enabled": _get_bundling_path_conflicts_check_enabled(), + "_code_signing_configuration": _get_code_signing_configuration(), + "_codesign_identities_command_override": read_root_config("apple", "codesign_identities_command_override", None), "_codesign_type": read_root_config("apple", "codesign_type_override", None), "_compile_resources_locally_override": _maybe_get_bool("compile_resources_locally_override", None), - "_dry_run_code_signing": _maybe_get_bool("dry_run_code_signing", False), - # This is a kill switch for the feature, it can also be disabled by setting - # `apple.fast_adhoc_signing_enabled=false` in a global buckconfig file. - "_fast_adhoc_signing_enabled": _maybe_get_bool("fast_adhoc_signing_enabled", True), + "_embed_provisioning_profile_when_adhoc_code_signing": _maybe_get_bool("embed_provisioning_profile_when_adhoc_code_signing", None), + "_fast_provisioning_profile_parsing_enabled": _maybe_get_bool("fast_provisioning_profile_parsing_enabled", False), "_incremental_bundling_enabled": _maybe_get_bool("incremental_bundling_enabled", True), "_info_plist_identify_build_system_default": _maybe_get_bool("info_plist_identify_build_system", True), "_profile_bundling_enabled": _maybe_get_bool("profile_bundling_enabled", False), diff --git a/prelude/apple/apple_bundle_destination.bzl b/prelude/apple/apple_bundle_destination.bzl index 2d54b273d1..04c4d40006 100644 --- a/prelude/apple/apple_bundle_destination.bzl +++ b/prelude/apple/apple_bundle_destination.bzl @@ -22,7 +22,6 @@ AppleBundleDestination = enum( "headers", "modules", "quicklook", - "watchkitstub", "bundleroot", "loginitems", "appclips", @@ -39,7 +38,6 @@ AppleBundleDestinationPaths = record( headers = field(str, ""), modules = field(str, ""), quicklook = field(str, ""), - watchkitstub = field(str, ""), bundleroot = field(str, ""), loginitems = field(str, ""), appclips = field(str, ""), @@ -51,7 +49,6 @@ _IOSBundleDestinationPaths = AppleBundleDestinationPaths( xpcservices = "XPCServices", watchapp = "Watch", quicklook = "Library/QuickLook", - watchkitstub = "_WatchKitStub", appclips = "AppClips", ) @@ -74,7 +71,6 @@ _MacOSBundleDestinationPaths = AppleBundleDestinationPaths( headers = macOS_content_path, modules = macOS_content_path, quicklook = paths.join(macOS_content_path, "Library/QuickLook"), - watchkitstub = macOS_content_path, bundleroot = macOS_content_path, loginitems = paths.join(macOS_content_path, "Library/LoginItems"), ) @@ -88,21 +84,35 @@ _MacOSFrameworkBundleDestinationPaths = AppleBundleDestinationPaths( modules = "Modules", ) +macOS_versioned_path = "Versions/A" +_MacOSVersionedFrameworkBundleDestinationPaths = AppleBundleDestinationPaths( + resources = paths.join(macOS_versioned_path, "Resources"), + frameworks = paths.join(macOS_versioned_path, "Frameworks"), + xpcservices = paths.join(macOS_versioned_path, "XPCServices"), + metadata = paths.join(macOS_versioned_path, "Resources"), + headers = paths.join(macOS_versioned_path, "Headers"), + modules = paths.join(macOS_versioned_path, "Modules"), + executables = macOS_versioned_path, +) + def _get_apple_bundle_destinations_for_sdk_name(name: str) -> AppleBundleDestinationPaths: if name == "macosx" or name == "maccatalyst": return _MacOSBundleDestinationPaths else: return _IOSBundleDestinationPaths -def _get_apple_framework_bundle_destinations_for_sdk_name(name: str) -> AppleBundleDestinationPaths: +def _get_apple_framework_bundle_destinations_for_sdk_name(name: str, versioned_macos_bundle: bool) -> AppleBundleDestinationPaths: if name == "macosx" or name == "maccatalyst": - return _MacOSFrameworkBundleDestinationPaths + if versioned_macos_bundle: + return _MacOSVersionedFrameworkBundleDestinationPaths + else: + return _MacOSFrameworkBundleDestinationPaths else: return _IOSFrameworkBundleDestinationPaths -def bundle_relative_path_for_destination(destination: AppleBundleDestination, sdk_name: str, extension: str) -> str: +def bundle_relative_path_for_destination(destination: AppleBundleDestination, sdk_name: str, extension: str, versioned_macos_bundle: bool) -> str: if extension == "framework": - bundle_destinations = _get_apple_framework_bundle_destinations_for_sdk_name(sdk_name) + bundle_destinations = _get_apple_framework_bundle_destinations_for_sdk_name(sdk_name, versioned_macos_bundle) else: bundle_destinations = _get_apple_bundle_destinations_for_sdk_name(sdk_name) @@ -126,8 +136,6 @@ def bundle_relative_path_for_destination(destination: AppleBundleDestination, sd return bundle_destinations.modules elif destination.value == "quicklook": return bundle_destinations.quicklook - elif destination.value == "watchkitstub": - return bundle_destinations.watchkitstub elif destination.value == "bundleroot": return bundle_destinations.bundleroot elif destination.value == "loginitems": diff --git a/prelude/apple/apple_bundle_part.bzl b/prelude/apple/apple_bundle_part.bzl index 15055d5575..724834bbb7 100644 --- a/prelude/apple/apple_bundle_part.bzl +++ b/prelude/apple/apple_bundle_part.bzl @@ -7,10 +7,12 @@ load("@prelude//:paths.bzl", "paths") load("@prelude//utils:expect.bzl", "expect") +load("@prelude//utils:utils.bzl", "value_or") load(":apple_bundle_destination.bzl", "AppleBundleDestination", "bundle_relative_path_for_destination") +load(":apple_bundle_types.bzl", "AppleBundleManifest", "AppleBundleManifestInfo", "AppleBundleManifestLogFiles") load(":apple_bundle_utility.bzl", "get_extension_attr", "get_product_name") -load(":apple_code_signing_types.bzl", "CodeSignType") -load(":apple_entitlements.bzl", "get_entitlements_codesign_args") +load(":apple_code_signing_types.bzl", "CodeSignConfiguration", "CodeSignType") +load(":apple_entitlements.bzl", "get_entitlements_codesign_args", "should_include_entitlements") load(":apple_sdk.bzl", "get_apple_sdk_name") load(":apple_sdk_metadata.bzl", "get_apple_sdk_metadata_for_sdk_name") load(":apple_swift_stdlib.bzl", "should_copy_swift_stdlib") @@ -30,12 +32,21 @@ AppleBundlePart = record( new_name = field([str, None], None), # Marks parts which should be code signed separately from the whole bundle. codesign_on_copy = field(bool, False), + # Entitlements to use when this part is code signed separately. + codesign_entitlements = field(Artifact | None, None), + # If present, override the codesign flags with these flags, when this part is code signed separately. + codesign_flags_override = field([list[str], None], None), ) SwiftStdlibArguments = record( primary_binary_rel_path = field(str), ) +AppleBundleConstructionResult = record( + providers = field(list[Provider]), + sub_targets = field(dict[str, list[Provider]]), +) + def bundle_output(ctx: AnalysisContext) -> Artifact: bundle_dir_name = get_bundle_dir_name(ctx) output = ctx.actions.declare_output(bundle_dir_name) @@ -48,27 +59,30 @@ def assemble_bundle( info_plist_part: [AppleBundlePart, None], swift_stdlib_args: [SwiftStdlibArguments, None], extra_hidden: list[Artifact] = [], - skip_adhoc_signing: bool = False) -> dict[str, list[Provider]]: + skip_adhoc_signing: bool = False) -> AppleBundleConstructionResult: """ Returns extra subtargets related to bundling. """ all_parts = parts + [info_plist_part] if info_plist_part else [] - spec_file = _bundle_spec_json(ctx, all_parts) + codesign_type = _detect_codesign_type(ctx, skip_adhoc_signing) + spec_file = _bundle_spec_json(ctx, all_parts, codesign_type) tools = ctx.attrs._apple_tools[AppleToolsInfo] tool = tools.assemble_bundle codesign_args = [] - codesign_type = _detect_codesign_type(ctx, skip_adhoc_signing) codesign_tool = ctx.attrs._apple_toolchain[AppleToolchainInfo].codesign - if ctx.attrs._dry_run_code_signing: + code_signing_configuration = CodeSignConfiguration(ctx.attrs._code_signing_configuration) + if code_signing_configuration == CodeSignConfiguration("dry-run"): codesign_configuration_args = ["--codesign-configuration", "dry-run"] codesign_tool = tools.dry_codesign_tool - elif ctx.attrs._fast_adhoc_signing_enabled: + elif code_signing_configuration == CodeSignConfiguration("fast-adhoc"): codesign_configuration_args = ["--codesign-configuration", "fast-adhoc"] - else: + elif code_signing_configuration == CodeSignConfiguration("none"): codesign_configuration_args = [] + else: + fail("Code signing configuration `{}` not supported".format(code_signing_configuration)) codesign_required = codesign_type.value in ["distribution", "adhoc"] swift_support_required = swift_stdlib_args and (not ctx.attrs.skip_copying_swift_stdlib) and should_copy_swift_stdlib(bundle.extension) @@ -84,11 +98,11 @@ def assemble_bundle( "--binary-destination", swift_stdlib_args.primary_binary_rel_path, "--frameworks-destination", - bundle_relative_path_for_destination(AppleBundleDestination("frameworks"), sdk_name, ctx.attrs.extension), + bundle_relative_path_for_destination(AppleBundleDestination("frameworks"), sdk_name, ctx.attrs.extension, ctx.attrs.versioned_macos_bundle), "--plugins-destination", - bundle_relative_path_for_destination(AppleBundleDestination("plugins"), sdk_name, ctx.attrs.extension), + bundle_relative_path_for_destination(AppleBundleDestination("plugins"), sdk_name, ctx.attrs.extension, ctx.attrs.versioned_macos_bundle), "--appclips-destination", - bundle_relative_path_for_destination(AppleBundleDestination("appclips"), sdk_name, ctx.attrs.extension), + bundle_relative_path_for_destination(AppleBundleDestination("appclips"), sdk_name, ctx.attrs.extension, ctx.attrs.versioned_macos_bundle), "--swift-stdlib-command", cmd_args(ctx.attrs._apple_toolchain[AppleToolchainInfo].swift_toolchain_info.swift_stdlib_tool, delimiter = " ", quote = "shell"), "--sdk-root", @@ -104,7 +118,8 @@ def assemble_bundle( codesign_tool, ] - if codesign_type.value != "adhoc": + profile_selection_required = _should_embed_provisioning_profile(ctx, codesign_type) + if profile_selection_required: provisioning_profiles = ctx.attrs._provisioning_profiles[DefaultInfo] expect( len(provisioning_profiles.default_outputs) == 1, @@ -114,14 +129,20 @@ def assemble_bundle( codesign_args.extend(provisioning_profiles_args) identities_command = ctx.attrs._apple_toolchain[AppleToolchainInfo].codesign_identities_command + if ctx.attrs._codesign_identities_command_override: + identities_command = ctx.attrs._codesign_identities_command_override[RunInfo] identities_command_args = ["--codesign-identities-command", cmd_args(identities_command)] if identities_command else [] codesign_args.extend(identities_command_args) - else: + + if codesign_type.value == "adhoc": codesign_args.append("--ad-hoc") if ctx.attrs.codesign_identity: codesign_args.extend(["--ad-hoc-codesign-identity", ctx.attrs.codesign_identity]) + if profile_selection_required: + codesign_args.append("--embed-provisioning-profile-when-signing-ad-hoc") codesign_args += get_entitlements_codesign_args(ctx, codesign_type) + codesign_args += _get_extra_codesign_args(ctx) info_plist_args = [ "--info-plist-source", @@ -130,19 +151,29 @@ def assemble_bundle( get_apple_bundle_part_relative_destination_path(ctx, info_plist_part), ] if info_plist_part else [] codesign_args.extend(info_plist_args) + + strict_provisioning_profile_search = value_or(ctx.attrs.strict_provisioning_profile_search, ctx.attrs._strict_provisioning_profile_search_default) + if strict_provisioning_profile_search: + codesign_args.append("--strict-provisioning-profile-search") elif codesign_type.value == "skip": pass else: fail("Code sign type `{}` not supported".format(codesign_type)) - command = cmd_args([ - tool, - "--output", - bundle.as_output(), - "--spec", - spec_file, - ] + codesign_args + platform_args + swift_args) - command.hidden([part.source for part in all_parts]) + command = cmd_args( + [ + tool, + "--output", + bundle.as_output(), + "--spec", + spec_file, + ] + codesign_args + platform_args + swift_args, + hidden = + [part.source for part in all_parts] + + [part.codesign_entitlements for part in all_parts if part.codesign_entitlements] + + # Ensures any genrule deps get built, such targets are used for validation + extra_hidden, + ) run_incremental_args = {} incremental_state = ctx.actions.declare_output("incremental_state.json").as_output() @@ -161,14 +192,18 @@ def assemble_bundle( # overwrite file with incremental state so if previous and next builds are incremental # (as opposed to the current non-incremental one), next one won't assume there is a # valid incremental state. - command.hidden(ctx.actions.write_json(incremental_state, {})) + command.add(cmd_args(hidden = ctx.actions.write_json(incremental_state, {}))) category = "apple_assemble_bundle" if ctx.attrs._profile_bundling_enabled: profile_output = ctx.actions.declare_output("bundling_profile.txt").as_output() command.add("--profile-output", profile_output) + if ctx.attrs._fast_provisioning_profile_parsing_enabled: + command.add("--fast-provisioning-profile-parsing") + subtargets = {} + bundling_log_output = None if ctx.attrs._bundling_log_file_enabled: bundling_log_output = ctx.actions.declare_output("bundling_log.txt") command.add("--log-file", bundling_log_output.as_output()) @@ -176,13 +211,37 @@ def assemble_bundle( command.add("--log-level-file", ctx.attrs._bundling_log_file_level) subtargets["bundling-log"] = [DefaultInfo(default_output = bundling_log_output)] - if ctx.attrs._bundling_path_conflicts_check_enabled: - command.add("--check-conflicts") - + command.add("--check-conflicts") + if ctx.attrs.versioned_macos_bundle: + command.add("--versioned-if-macos") command.add(codesign_configuration_args) - # Ensures any genrule deps get built, such targets are used for validation - command.hidden(extra_hidden) + command_json = ctx.actions.declare_output("bundling_command.json") + command_json_cmd_args = ctx.actions.write_json(command_json, command, with_inputs = True, pretty = True) + subtargets["command"] = [DefaultInfo(default_output = command_json, other_outputs = [command_json_cmd_args])] + + bundle_manifest_log_file_map = { + ctx.label: AppleBundleManifestLogFiles( + command_file = command_json, + spec_file = spec_file, + log_file = bundling_log_output, + ), + } + + if hasattr(ctx.attrs, "deps"): + for dep in ctx.attrs.deps: + dep_manifest_info = dep.get(AppleBundleManifestInfo) + if dep_manifest_info: + bundle_manifest_log_file_map.update(dep_manifest_info.manifest.log_file_map) + + bundle_manifest = AppleBundleManifest(log_file_map = bundle_manifest_log_file_map) + bundle_manifest_json_object = _convert_bundle_manifest_to_json_object(bundle_manifest) + + bundle_manifest_json_file = ctx.actions.declare_output("bundle_manifest.json") + bundle_manifest_cmd_args = ctx.actions.write_json(bundle_manifest_json_file, bundle_manifest_json_object, with_inputs = True, pretty = True) + subtargets["manifest"] = [DefaultInfo(default_output = bundle_manifest_json_file, other_outputs = [bundle_manifest_cmd_args])] + + providers = [AppleBundleManifestInfo(manifest = bundle_manifest)] env = {} cache_buster = ctx.attrs._bundling_cache_buster @@ -196,21 +255,23 @@ def assemble_bundle( prefer_local = not force_local_bundling, category = category, env = env, + error_handler = _apple_bundle_error_handler, **run_incremental_args ) - return subtargets + return AppleBundleConstructionResult(sub_targets = subtargets, providers = providers) def get_bundle_dir_name(ctx: AnalysisContext) -> str: return paths.replace_extension(get_product_name(ctx), "." + get_extension_attr(ctx)) def get_apple_bundle_part_relative_destination_path(ctx: AnalysisContext, part: AppleBundlePart) -> str: - bundle_relative_path = bundle_relative_path_for_destination(part.destination, get_apple_sdk_name(ctx), ctx.attrs.extension) + bundle_relative_path = bundle_relative_path_for_destination(part.destination, get_apple_sdk_name(ctx), ctx.attrs.extension, ctx.attrs.versioned_macos_bundle) destination_file_or_directory_name = part.new_name if part.new_name != None else paths.basename(part.source.short_path) return paths.join(bundle_relative_path, destination_file_or_directory_name) # Returns JSON to be passed into bundle assembling tool. It should contain a dictionary which maps bundle relative destination paths to source paths." -def _bundle_spec_json(ctx: AnalysisContext, parts: list[AppleBundlePart]) -> Artifact: +def _bundle_spec_json(ctx: AnalysisContext, parts: list[AppleBundlePart], codesign_type: CodeSignType) -> Artifact: specs = [] + include_entitlements = should_include_entitlements(ctx, codesign_type) for part in parts: part_spec = { @@ -219,9 +280,13 @@ def _bundle_spec_json(ctx: AnalysisContext, parts: list[AppleBundlePart]) -> Art } if part.codesign_on_copy: part_spec["codesign_on_copy"] = True + if include_entitlements and part.codesign_entitlements: + part_spec["codesign_entitlements"] = part.codesign_entitlements + if part.codesign_flags_override: + part_spec["codesign_flags_override"] = part.codesign_flags_override specs.append(part_spec) - return ctx.actions.write_json("bundle_spec.json", specs) + return ctx.actions.write_json("bundle_spec.json", specs, pretty = True) def _get_codesign_type_from_attribs(ctx: AnalysisContext) -> [CodeSignType, None]: # Target-level attribute takes highest priority @@ -235,7 +300,7 @@ def _get_codesign_type_from_attribs(ctx: AnalysisContext) -> [CodeSignType, None def _detect_codesign_type(ctx: AnalysisContext, skip_adhoc_signing: bool) -> CodeSignType: def compute_codesign_type(): - if ctx.attrs.extension not in ["app", "appex", "xctest"]: + if ctx.attrs.extension not in ["app", "appex", "xctest", "driver"]: # Only code sign application bundles, extensions and test bundles return CodeSignType("skip") @@ -252,3 +317,39 @@ def _detect_codesign_type(ctx: AnalysisContext, skip_adhoc_signing: bool) -> Cod codesign_type = CodeSignType("skip") return codesign_type + +def _get_extra_codesign_args(ctx: AnalysisContext) -> list[str]: + codesign_args = ctx.attrs.codesign_flags if hasattr(ctx.attrs, "codesign_flags") else [] + return ["--codesign-args={}".format(flag) for flag in codesign_args] + +def _should_embed_provisioning_profile(ctx: AnalysisContext, codesign_type: CodeSignType) -> bool: + if codesign_type.value == "distribution": + return True + + if codesign_type.value == "adhoc": + # The config-based override value takes priority over target value + if ctx.attrs._embed_provisioning_profile_when_adhoc_code_signing != None: + return ctx.attrs._embed_provisioning_profile_when_adhoc_code_signing + return ctx.attrs.embed_provisioning_profile_when_adhoc_code_signing + + return False + +def _convert_bundle_manifest_to_json_object(manifest: AppleBundleManifest) -> dict[Label, typing.Any]: + manifest_dict = {} + for target_label, logs in manifest.log_file_map.items(): + manifest_dict[target_label] = { + "command": logs.command_file, + "log": logs.log_file, + "spec": logs.spec_file, + } + return manifest_dict + +def _apple_bundle_error_handler(ctx: ActionErrorCtx) -> list[ActionSubError]: + categories = [] + + if "CodeSignProvisioningError" in ctx.stderr: + categories.append(ctx.new_sub_error( + category = "code_sign_provisioning_error", + )) + + return categories diff --git a/prelude/apple/apple_bundle_resources.bzl b/prelude/apple/apple_bundle_resources.bzl index 0dbc6e93f8..3032addbad 100644 --- a/prelude/apple/apple_bundle_resources.bzl +++ b/prelude/apple/apple_bundle_resources.bzl @@ -8,6 +8,10 @@ load("@prelude//:artifacts.bzl", "single_artifact") load("@prelude//:paths.bzl", "paths") load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo") +load( + "@prelude//linking:link_info.bzl", + "CxxSanitizerRuntimeInfo", +) load("@prelude//utils:utils.bzl", "flatten_dict") load( ":apple_asset_catalog.bzl", @@ -20,13 +24,14 @@ load( load(":apple_bundle_destination.bzl", "AppleBundleDestination") load(":apple_bundle_part.bzl", "AppleBundlePart") load(":apple_bundle_types.bzl", "AppleBundleInfo", "AppleBundleTypeAppClip", "AppleBundleTypeDefault", "AppleBundleTypeWatchApp") -load(":apple_bundle_utility.bzl", "get_bundle_resource_processing_options", "get_extension_attr", "get_product_name") +load(":apple_bundle_utility.bzl", "get_bundle_resource_processing_options", "get_default_binary_dep", "get_extension_attr", "get_flattened_binary_deps", "get_product_name") load(":apple_core_data.bzl", "compile_apple_core_data") load( ":apple_core_data_types.bzl", "AppleCoreDataSpec", # @unused Used as a type ) load(":apple_info_plist.bzl", "process_info_plist", "process_plist") +load(":apple_library.bzl", "AppleLibraryForDistributionInfo", "AppleLibraryInfo") load( ":apple_resource_types.bzl", "AppleResourceDestination", @@ -58,6 +63,7 @@ def get_apple_bundle_resource_part_list(ctx: AnalysisContext) -> AppleBundleReso parts = [] parts.extend(_create_pkg_info_if_needed(ctx)) + parts.extend(_copy_privacy_manifest_if_needed(ctx)) (resource_specs, asset_catalog_specs, core_data_specs, scene_kit_assets_spec, cxx_resource_specs) = _select_resources(ctx) @@ -79,6 +85,16 @@ def get_apple_bundle_resource_part_list(ctx: AnalysisContext) -> AppleBundleReso ), ) + cxx_sanitizer_runtime_info = get_default_binary_dep(ctx.attrs.binary).get(CxxSanitizerRuntimeInfo) if ctx.attrs.binary else None + if cxx_sanitizer_runtime_info: + runtime_resource_spec = AppleResourceSpec( + files = cxx_sanitizer_runtime_info.runtime_files, + destination = AppleResourceDestination("frameworks"), + # Sanitizer dylibs require signing, for hardened runtime on macOS and iOS device builds + codesign_files_on_copy = True, + ) + resource_specs.append(runtime_resource_spec) + asset_catalog_result = compile_apple_asset_catalog(ctx, asset_catalog_specs) if asset_catalog_result != None: asset_catalog_part = AppleBundlePart( @@ -114,6 +130,9 @@ def get_apple_bundle_resource_part_list(ctx: AnalysisContext) -> AppleBundleReso parts.extend(_copy_resources(ctx, resource_specs)) parts.extend(_copy_first_level_bundles(ctx)) + parts.extend(_copy_public_headers(ctx)) + parts.extend(_copy_module_map(ctx)) + parts.extend(_copy_swift_library_evolution_support(ctx)) return AppleBundleResourcePartListOutput( resource_parts = parts, @@ -128,6 +147,19 @@ def _create_pkg_info_if_needed(ctx: AnalysisContext) -> list[AppleBundlePart]: artifact = ctx.actions.write("PkgInfo", "APPLWRUN\n") return [AppleBundlePart(source = artifact, destination = AppleBundleDestination("metadata"))] +def _copy_privacy_manifest_if_needed(ctx: AnalysisContext) -> list[AppleBundlePart]: + privacy_manifest = ctx.attrs.privacy_manifest + if privacy_manifest == None: + return [] + + # According to apple docs, privacy manifest has to be named as `PrivacyInfo.xcprivacy` + if privacy_manifest.short_path.split("/", 1)[-1] == "PrivacyInfo.xcprivacy": + artifact = privacy_manifest + else: + output = ctx.actions.declare_output("PrivacyInfo.xcprivacy") + artifact = ctx.actions.copy_file(output.as_output(), privacy_manifest) + return [AppleBundlePart(source = artifact, destination = AppleBundleDestination("metadata"))] + def _select_resources(ctx: AnalysisContext) -> ((list[AppleResourceSpec], list[AppleAssetCatalogSpec], list[AppleCoreDataSpec], list[SceneKitAssetsSpec], list[CxxResourceSpec])): resource_group_info = get_resource_group_info(ctx) if resource_group_info: @@ -140,13 +172,77 @@ def _select_resources(ctx: AnalysisContext) -> ((list[AppleResourceSpec], list[A resource_graph = create_resource_graph( ctx = ctx, labels = [], - bundle_binary = ctx.attrs.binary, + bundle_binary = get_default_binary_dep(ctx.attrs.binary), deps = ctx.attrs.deps + resource_groups_deps, exported_deps = [], ) resource_graph_node_map_func = get_resource_graph_node_map_func(resource_graph) return get_filtered_resources(ctx.label, resource_graph_node_map_func, ctx.attrs.resource_group, resource_group_mappings) +def _copy_swift_library_evolution_support(ctx: AnalysisContext) -> list[AppleBundlePart]: + extension = get_extension_attr(ctx) + if not extension == "framework": + return [] + + binary_deps = getattr(ctx.attrs, "binary") + if binary_deps == None: + return [] + + swiftmodule_files = {} + + module_name = None + for binary in get_flattened_binary_deps(binary_deps): + apple_library_for_distribution_info = binary.get(AppleLibraryForDistributionInfo) + if apple_library_for_distribution_info == None: + continue + module_name = apple_library_for_distribution_info.module_name + swiftmodule_files.update({ + apple_library_for_distribution_info.target_triple + ".swiftinterface": apple_library_for_distribution_info.swiftinterface, + apple_library_for_distribution_info.target_triple + ".private.swiftinterface": apple_library_for_distribution_info.private_swiftinterface, + apple_library_for_distribution_info.target_triple + ".swiftdoc": apple_library_for_distribution_info.swiftdoc, + }) + + if len(swiftmodule_files) == 0 or module_name == None: + return [] + + framework_module_dir = ctx.actions.declare_output(module_name + "framework.swiftmodule", dir = True) + ctx.actions.copied_dir(framework_module_dir.as_output(), swiftmodule_files) + return [AppleBundlePart(source = framework_module_dir, destination = AppleBundleDestination("modules"), new_name = module_name + ".swiftmodule")] + +def _copy_public_headers(ctx: AnalysisContext) -> list[AppleBundlePart]: + if not ctx.attrs.copy_public_framework_headers: + return [] + binary_deps = getattr(ctx.attrs, "binary") + if binary_deps == None: + return [] + + binary = get_default_binary_dep(binary_deps) + apple_library_info = binary.get(AppleLibraryInfo) + if apple_library_info == None: + return [] + tset = apple_library_info.public_framework_headers + + bundle_parts = [] + if tset._tset: + for public_framework_headers in tset._tset.traverse(): + for public_framework_header in public_framework_headers: + for artifact in public_framework_header.artifacts: + bundle_parts.append(AppleBundlePart(source = artifact, destination = AppleBundleDestination("headers"))) + + if apple_library_info.swift_header: + bundle_parts.append(AppleBundlePart(source = apple_library_info.swift_header, destination = AppleBundleDestination("headers"))) + + return bundle_parts + +def _copy_module_map(ctx: AnalysisContext) -> list[AppleBundlePart]: + extension = get_extension_attr(ctx) + if not extension == "framework": + return [] + module_map = ctx.attrs.module_map + if not module_map: + return [] + return [AppleBundlePart(source = module_map, destination = AppleBundleDestination("modules"))] + def _copy_resources(ctx: AnalysisContext, specs: list[AppleResourceSpec]) -> list[AppleBundlePart]: result = [] @@ -158,6 +254,8 @@ def _copy_resources(ctx: AnalysisContext, specs: list[AppleResourceSpec]) -> lis destination = bundle_destination, destination_relative_path = None, codesign_on_copy = spec.codesign_files_on_copy, + codesign_entitlements = spec.codesign_entitlements, + codesign_flags_override = spec.codesign_flags_override, ) for x in spec.files] result += _bundle_parts_for_dirs(spec.dirs, bundle_destination, False) result += _bundle_parts_for_dirs(spec.content_dirs, bundle_destination, True) @@ -280,13 +378,23 @@ def _run_ibtool( ], allow_args = True, ) - command = cmd_args(["/bin/sh", wrapper_script]).hidden([ibtool_command, output]) + command = cmd_args(["/bin/sh", wrapper_script], hidden = [ibtool_command, output]) else: command = ibtool_command processing_options = get_bundle_resource_processing_options(ctx) ctx.actions.run(command, prefer_local = processing_options.prefer_local, allow_cache_upload = processing_options.allow_cache_upload, category = "apple_ibtool", identifier = action_identifier) +def _ibtool_identifier(action: str, raw_file: Artifact) -> str: + "*.xib files can live in .lproj folders and have the same name, so we need to split the id" + identifier_parts = [] + variant_name = _get_variant_dirname(raw_file) + if variant_name: + # variant_name is like "zh_TW.lproj", and we only want "zh_TW" + identifier_parts.append(variant_name) + identifier_parts += [raw_file.basename] + return "ibtool_" + action + " " + "/".join(identifier_parts) + def _compile_ui_resource( ctx: AnalysisContext, raw_file: Artifact, @@ -299,7 +407,7 @@ def _compile_ui_resource( output = output, action_flags = ["--compile"], target_device = target_device, - action_identifier = "compile_" + raw_file.basename, + action_identifier = _ibtool_identifier("compile", raw_file), output_is_dir = output_is_dir, ) @@ -315,7 +423,7 @@ def _link_ui_resource( output = output, action_flags = ["--link"], target_device = target_device, - action_identifier = "link_" + raw_file.basename, + action_identifier = _ibtool_identifier("link", raw_file), output_is_dir = output_is_dir, ) @@ -324,7 +432,9 @@ def _process_apple_resource_file_if_needed( file: Artifact, destination: AppleBundleDestination, destination_relative_path: [str, None], - codesign_on_copy: bool = False) -> AppleBundlePart: + codesign_on_copy: bool = False, + codesign_entitlements: Artifact | None = None, + codesign_flags_override: list[str] | None = None) -> AppleBundlePart: output_dir = "_ProcessedResources" basename = paths.basename(file.short_path) output_is_contents_dir = False @@ -337,6 +447,8 @@ def _process_apple_resource_file_if_needed( action_id = destination_relative_path, ) elif basename.endswith(".storyboard"): + if destination_relative_path: + destination_relative_path = paths.replace_extension(destination_relative_path, ".storyboardc") compiled = ctx.actions.declare_output(paths.join(output_dir, paths.replace_extension(file.short_path, ".storyboardc")), dir = True) if get_is_watch_bundle(ctx): output_is_contents_dir = True @@ -347,6 +459,8 @@ def _process_apple_resource_file_if_needed( processed = compiled _compile_ui_resource(ctx, file, processed.as_output()) elif basename.endswith(".xib"): + if destination_relative_path: + destination_relative_path = paths.replace_extension(destination_relative_path, ".nib") processed = ctx.actions.declare_output(paths.join(output_dir, paths.replace_extension(file.short_path, ".nib"))) _compile_ui_resource(ctx, file, processed.as_output()) else: @@ -355,17 +469,21 @@ def _process_apple_resource_file_if_needed( # When name is empty string only content of the directory will be copied, as opposed to the directory itself. # When name is `None`, directory or file will be copied as it is, without renaming. new_name = destination_relative_path if destination_relative_path else ("" if output_is_contents_dir else None) - return AppleBundlePart(source = processed, destination = destination, new_name = new_name, codesign_on_copy = codesign_on_copy) + return AppleBundlePart(source = processed, destination = destination, new_name = new_name, codesign_on_copy = codesign_on_copy, codesign_entitlements = codesign_entitlements, codesign_flags_override = codesign_flags_override) # Returns a path relative to the _parent_ of the lproj dir. # For example, given a variant file with a short path of`XX/YY.lproj/ZZ` # it would return `YY.lproj/ZZ`. def _get_dest_subpath_for_variant_file(variant_file: Artifact) -> str: - dir_name = paths.basename(paths.dirname(variant_file.short_path)) - if not dir_name.endswith("lproj"): + dir_name = _get_variant_dirname(variant_file) + if not dir_name: fail("Variant files have to be in a directory with name ending in '.lproj' but `{}` was not.".format(variant_file.short_path)) file_name = paths.basename(variant_file.short_path) return paths.join(dir_name, file_name) +def _get_variant_dirname(variant_file: Artifact) -> str | None: + dir_name = paths.basename(paths.dirname(variant_file.short_path)) + return dir_name if dir_name.endswith("lproj") else None + def get_is_watch_bundle(ctx: AnalysisContext) -> bool: - return ctx.attrs._apple_toolchain[AppleToolchainInfo].watch_kit_stub_binary != None + return ctx.attrs._apple_toolchain[AppleToolchainInfo].sdk_name.startswith("watch") diff --git a/prelude/apple/apple_bundle_types.bzl b/prelude/apple/apple_bundle_types.bzl index 527d13c733..9ac49d3f51 100644 --- a/prelude/apple/apple_bundle_types.bzl +++ b/prelude/apple/apple_bundle_types.bzl @@ -15,6 +15,22 @@ AppleBundleType = enum( "appclip", ) +AppleBundleManifestLogFiles = record( + command_file = field(Artifact), + spec_file = field(Artifact), + log_file = field([Artifact, None], None), +) + +AppleBundleManifest = record( + log_file_map = dict[Label, AppleBundleManifestLogFiles], +) + +AppleBundleManifestInfo = provider( + fields = { + "manifest": provider_field(AppleBundleManifest), + }, +) + # Provider flagging that result of the rule contains Apple bundle. # It might be copied into main bundle to appropriate place if rule # with this provider is a dependency of `apple_bundle`. @@ -26,7 +42,7 @@ AppleBundleInfo = provider( "bundle_type": provider_field(AppleBundleType), # The name of the executable within the bundle. "binary_name": provider_field([str, None], default = None), - # If the bundle contains a Watch Extension executable, we have to update the packaging. + # If the bundle contains a Watch bundle, we have to update the packaging. # Similar to `is_watchos`, this might be omitted for certain types of bundles which don't depend on it. "contains_watchapp": provider_field([bool, None]), # By default, non-framework, non-appex binaries copy Swift libraries into the final @@ -63,8 +79,6 @@ AppleBundleExtraOutputsInfo = provider(fields = { AppleBundleBinaryOutput = record( binary = field(Artifact), debuggable_info = field([AppleDebuggableInfo, None], None), - # In the case of watchkit, the `ctx.attrs.binary`'s not set, and we need to create a stub binary. - is_watchkit_stub_binary = field(bool, False), ) AppleBundleTypeDefault = AppleBundleType("default") @@ -74,4 +88,5 @@ AppleBundleTypeAppClip = AppleBundleType("appclip") # Represents the user-visible type which is distinct from the internal one (`AppleBundleType`) AppleBundleTypeAttributeType = enum( "appclip", + "watchapp", ) diff --git a/prelude/apple/apple_bundle_utility.bzl b/prelude/apple/apple_bundle_utility.bzl index 72ef8d2659..b490988963 100644 --- a/prelude/apple/apple_bundle_utility.bzl +++ b/prelude/apple/apple_bundle_utility.bzl @@ -27,7 +27,10 @@ def get_product_name(ctx: AnalysisContext) -> str: def get_extension_attr(ctx: AnalysisContext) -> typing.Any: return ctx.attrs.extension -def get_default_binary_dep(binary_deps: dict[str, Dependency]) -> [Dependency, None]: +def get_default_binary_dep(binary_deps: [dict[str, Dependency], Dependency, None]) -> [Dependency, None]: + if not type(binary_deps) == "dict": + return binary_deps + if len(binary_deps.items()) == 1: return binary_deps.values()[0] @@ -39,20 +42,12 @@ def get_flattened_binary_deps(binary_deps: dict[str, Dependency]) -> list[Depend # Derives the effective deployment target for the bundle. It's # usually the deployment target of the binary if present, # otherwise it falls back to other values (see implementation). -def get_bundle_min_target_version(ctx: AnalysisContext, binary: [Dependency, None]) -> str: +def get_bundle_min_target_version(ctx: AnalysisContext, binary_or_binaries: [dict[str, Dependency], Dependency, None]) -> str: + binary = get_default_binary_dep(binary_or_binaries) + binary_min_version = None - # Could be not set for e.g. watchOS bundles which have a stub - # binary that comes from the apple_toolchain(), not from the - # apple_bundle() itself (i.e., binary field will be None). - # - # TODO(T114147746): The top-level stub bundle for a watchOS app - # does not have the ability to set its deployment target via - # a binary (as that field is empty). If it contains asset - # catalogs (can it?), we need to use correct target version. - # - # The solution might to be support SDK version from - # Info.plist (T110378109). + # apple_xcuitest bundles do not have a binary if binary != None: min_version_info = binary[AppleMinDeploymentVersionInfo] if AppleMinDeploymentVersionInfo in binary else None if min_version_info != None: diff --git a/prelude/apple/apple_code_signing_types.bzl b/prelude/apple/apple_code_signing_types.bzl index 555a04f8aa..66ac6cad95 100644 --- a/prelude/apple/apple_code_signing_types.bzl +++ b/prelude/apple/apple_code_signing_types.bzl @@ -7,7 +7,7 @@ # Provider which exposes a field from `apple_binary` to `apple_bundle` as it might be used during code signing. AppleEntitlementsInfo = provider(fields = { - "entitlements_file": provider_field([Artifact, None], default = None), + "entitlements_file": provider_field(Artifact | None, default = None), }) CodeSignType = enum( @@ -15,3 +15,9 @@ CodeSignType = enum( "adhoc", "distribution", ) + +CodeSignConfiguration = enum( + "dry-run", + "fast-adhoc", + "none", +) diff --git a/prelude/decls/apple_common.bzl b/prelude/apple/apple_common.bzl similarity index 96% rename from prelude/decls/apple_common.bzl rename to prelude/apple/apple_common.bzl index 3c44e6c83e..92ff680364 100644 --- a/prelude/decls/apple_common.bzl +++ b/prelude/apple/apple_common.bzl @@ -53,22 +53,20 @@ def _header_path_prefix_arg(): using ``` - apple_library( name = "Library", headers = glob(["**/*.h"]), header_path_prefix = "Lib", ) - ``` + can be imported using following mapping ``` - Library/SubDir/Header1.h -> Lib/Header1.h Library/Header2.h -> Lib/Header2.h - ``` + Defaults to the short name of the target. Can contain forward slashes (`/`), but cannot start with one. See `headers` for more information. """), @@ -128,6 +126,13 @@ def _extra_xcode_files(): """), } +def _privacy_manifest_arg(): + return { + "privacy_manifest": attrs.option(attrs.source(), default = None, doc = """ + A path to an `.xcprivacy` file that will be placed in the bundle. +"""), + } + apple_common = struct( headers_arg = _headers_arg, exported_headers_arg = _exported_headers_arg, @@ -138,4 +143,5 @@ apple_common = struct( info_plist_substitutions_arg = _info_plist_substitutions_arg, extra_xcode_sources = _extra_xcode_sources, extra_xcode_files = _extra_xcode_files, + privacy_manifest_arg = _privacy_manifest_arg, ) diff --git a/prelude/apple/apple_core_data.bzl b/prelude/apple/apple_core_data.bzl index 82721a4af9..2b67c64ef0 100644 --- a/prelude/apple/apple_core_data.bzl +++ b/prelude/apple/apple_core_data.bzl @@ -5,6 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//:paths.bzl", "paths") load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo") load(":apple_bundle_utility.bzl", "get_bundle_min_target_version", "get_bundle_resource_processing_options") load(":apple_core_data_types.bzl", "AppleCoreDataSpec") @@ -13,6 +14,7 @@ load(":resource_groups.bzl", "create_resource_graph") def apple_core_data_impl(ctx: AnalysisContext) -> list[Provider]: spec = AppleCoreDataSpec( + module = ctx.attrs.module, path = ctx.attrs.path, ) graph = create_resource_graph( @@ -24,17 +26,18 @@ def apple_core_data_impl(ctx: AnalysisContext) -> list[Provider]: ) return [DefaultInfo(), graph] -def compile_apple_core_data(ctx: AnalysisContext, specs: list[AppleCoreDataSpec], product_name: str) -> [Artifact, None]: +def compile_apple_core_data(ctx: AnalysisContext, specs: list[AppleCoreDataSpec], product_name: str) -> Artifact | None: if len(specs) == 0: return None output = ctx.actions.declare_output("AppleCoreDataCompiled") - # Aggregate all the coredata momc commands together - momc_commands = [] + # Aggregate all the coredata momc and mapc commands together + tool_commands = [] for spec in specs: - momc_command = _get_momc_command(ctx, spec, product_name, cmd_args("$TMPDIR")) - momc_commands.append(momc_command) + tool, output_path = _get_model_args(ctx, spec) + tool_command = _get_tool_command(ctx, spec, product_name, tool, output_path) + tool_commands.append(tool_command) # Sandboxing and fs isolation on RE machines results in Xcode tools failing # when those are working in freshly created directories in buck-out. @@ -42,29 +45,38 @@ def compile_apple_core_data(ctx: AnalysisContext, specs: list[AppleCoreDataSpec] # As a workaround create a directory in tmp, use it for Xcode tools, then # copy the result to buck-out. wrapper_script, _ = ctx.actions.write( - "momc_wrapper.sh", + "tool_wrapper.sh", [ cmd_args("set -euo pipefail"), cmd_args('export TMPDIR="$(mktemp -d)"'), - cmd_args(momc_commands), + cmd_args(tool_commands), cmd_args(output, format = 'mkdir -p {} && cp -r "$TMPDIR"/ {}'), ], allow_args = True, ) - combined_command = cmd_args(["/bin/sh", wrapper_script]).hidden(momc_commands + [output.as_output()]) + combined_command = cmd_args(["/bin/sh", wrapper_script], hidden = tool_commands + [output.as_output()]) processing_options = get_bundle_resource_processing_options(ctx) ctx.actions.run(combined_command, prefer_local = processing_options.prefer_local, allow_cache_upload = processing_options.allow_cache_upload, category = "apple_core_data") return output -def _get_momc_command(ctx: AnalysisContext, core_data_spec: AppleCoreDataSpec, product_name: str, output_directory: cmd_args) -> cmd_args: +def _get_model_args(ctx: AnalysisContext, core_data_spec: AppleCoreDataSpec): + toolchain = ctx.attrs._apple_toolchain[AppleToolchainInfo] + + if core_data_spec.path.extension == ".xcmappingmodel": + filename = paths.replace_extension(core_data_spec.path.basename, ".cdm") + return toolchain.mapc, cmd_args("$TMPDIR/" + filename) + else: + return toolchain.momc, cmd_args("$TMPDIR") + +def _get_tool_command(ctx: AnalysisContext, core_data_spec: AppleCoreDataSpec, product_name: str, tool: RunInfo, output: cmd_args) -> cmd_args: return cmd_args([ - ctx.attrs._apple_toolchain[AppleToolchainInfo].momc, + tool, "--sdkroot", ctx.attrs._apple_toolchain[AppleToolchainInfo].sdk_path, "--" + get_apple_sdk_name(ctx) + "-deployment-target", get_bundle_min_target_version(ctx, ctx.attrs.binary), "--module", - product_name, - core_data_spec.path, - output_directory, - ], delimiter = " ") + core_data_spec.module if core_data_spec.module else product_name, + cmd_args(core_data_spec.path, format = "./{}"), + output, + ], delimiter = " ", hidden = core_data_spec.path) diff --git a/prelude/apple/apple_core_data_types.bzl b/prelude/apple/apple_core_data_types.bzl index 700a5d602b..a3cc54c047 100644 --- a/prelude/apple/apple_core_data_types.bzl +++ b/prelude/apple/apple_core_data_types.bzl @@ -6,5 +6,6 @@ # of this source tree. AppleCoreDataSpec = record( + module = field(str | None), path = field(Artifact), ) diff --git a/prelude/apple/apple_dsym.bzl b/prelude/apple/apple_dsym.bzl index 7feabb29ee..819cd4468d 100644 --- a/prelude/apple/apple_dsym.bzl +++ b/prelude/apple/apple_dsym.bzl @@ -17,38 +17,37 @@ def get_apple_dsym(ctx: AnalysisContext, executable: Artifact, debug_info: list[ return get_apple_dsym_ext(ctx, executable, debug_info, action_identifier, output_path) # TODO(T110672942): Things which are still unsupported: -# - pass in dsymutil_extra_flags # - oso_prefix # - dsym_verification def get_apple_dsym_ext(ctx: AnalysisContext, executable: [ArgLike, Artifact], debug_info: list[ArgLike], action_identifier: str, output_path: str) -> Artifact: dsymutil = ctx.attrs._apple_toolchain[AppleToolchainInfo].dsymutil output = ctx.actions.declare_output(output_path, dir = True) - cmd = cmd_args([dsymutil, "-o", output.as_output()]) - cmd.add(executable) + cmd = cmd_args( + [dsymutil] + ctx.attrs._dsymutil_extra_flags + ["-o", output.as_output()], + executable, + # Mach-O executables don't contain DWARF data. + # Instead, they contain paths to the object files which themselves contain DWARF data. + # + # So, those object files are needed for dsymutil to be to create the dSYM bundle. + hidden = debug_info, + ) - # Mach-O executables don't contain DWARF data. - # Instead, they contain paths to the object files which themselves contain DWARF data. - # - # So, those object files are needed for dsymutil to be to create the dSYM bundle. - cmd.hidden(debug_info) ctx.actions.run(cmd, category = "apple_dsym", identifier = action_identifier) return output -def get_apple_dsym_info(ctx: AnalysisContext, binary_dsyms: list[Artifact], dep_dsyms: list[Artifact]) -> Artifact: +def get_apple_dsym_info_json(binary_dsyms: list[Artifact], dep_dsyms: list[Artifact]) -> dict[str, typing.Any]: dsym_info = {} - # WatchOS stub does not have a dSYM, so it's possible that we get zero `binary_dsyms` if len(binary_dsyms) == 1: dsym_info["binary"] = binary_dsyms[0] - elif len(binary_dsyms) > 1: - fail("There cannot be more than one binary dSYM") + else: + fail("There can only be one binary dSYM") if dep_dsyms: # `dedupe` needed as it's possible for the same dSYM to bubble up # through multiple paths in a graph (e.g., including both a binary # + bundle in the `deps` field of a parent bundle). dsym_info["deps"] = dedupe(dep_dsyms) - - return ctx.actions.write_json("dsym-info.json", dsym_info) + return dsym_info diff --git a/prelude/apple/apple_dsym_config.bzl b/prelude/apple/apple_dsym_config.bzl new file mode 100644 index 0000000000..1e29bb0fdc --- /dev/null +++ b/prelude/apple/apple_dsym_config.bzl @@ -0,0 +1,13 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//utils:buckconfig.bzl", "read_list") + +def apple_dsym_config() -> dict[str, typing.Any]: + return { + "_dsymutil_extra_flags": read_list("apple", "dsymutil_extra_flags", delimiter = " ", default = [], root_cell = True), + } diff --git a/prelude/apple/apple_entitlements.bzl b/prelude/apple/apple_entitlements.bzl index 6a44ffb45a..6342f926eb 100644 --- a/prelude/apple/apple_entitlements.bzl +++ b/prelude/apple/apple_entitlements.bzl @@ -12,12 +12,12 @@ load(":apple_sdk_metadata.bzl", "IPhoneSimulatorSdkMetadata", "MacOSXCatalystSdk load(":apple_toolchain_types.bzl", "AppleToolchainInfo") def get_entitlements_codesign_args(ctx: AnalysisContext, codesign_type: CodeSignType) -> list[ArgLike]: - include_entitlements = _should_include_entitlements(ctx, codesign_type) + include_entitlements = should_include_entitlements(ctx, codesign_type) maybe_entitlements = _entitlements_file(ctx) if include_entitlements else None entitlements_args = ["--entitlements", maybe_entitlements] if maybe_entitlements else [] return entitlements_args -def _should_include_entitlements(ctx: AnalysisContext, codesign_type: CodeSignType) -> bool: +def should_include_entitlements(ctx: AnalysisContext, codesign_type: CodeSignType) -> bool: if codesign_type.value == "distribution": return True @@ -29,7 +29,7 @@ def _should_include_entitlements(ctx: AnalysisContext, codesign_type: CodeSignTy return False -def _entitlements_file(ctx: AnalysisContext) -> [Artifact, None]: +def _entitlements_file(ctx: AnalysisContext) -> Artifact | None: if hasattr(ctx.attrs, "entitlements_file"): # Bundling `apple_test` which doesn't have a binary to provide the entitlements, so they are provided via `entitlements_file` attribute directly. return ctx.attrs.entitlements_file @@ -38,7 +38,7 @@ def _entitlements_file(ctx: AnalysisContext) -> [Artifact, None]: return None # The `binary` attribute can be either an apple_binary or a dynamic library from apple_library - binary_entitlement_info = get_default_binary_dep(ctx.attrs.binary)[AppleEntitlementsInfo] + binary_entitlement_info = get_default_binary_dep(ctx.attrs.binary).get(AppleEntitlementsInfo) if binary_entitlement_info and binary_entitlement_info.entitlements_file: return binary_entitlement_info.entitlements_file diff --git a/prelude/apple/apple_framework_versions.bzl b/prelude/apple/apple_framework_versions.bzl index 3f6761b8a2..ea06cd1d21 100644 --- a/prelude/apple/apple_framework_versions.bzl +++ b/prelude/apple/apple_framework_versions.bzl @@ -621,6 +621,10 @@ _FRAMEWORK_INTRODUCED_VERSIONS = { "watchos": (5, 0, 0), }, "MediaSetup": {"iphoneos": (14, 0, 0), "maccatalyst": (15, 4, 0)}, + "MediaToolbox": { + "iphoneos": (6, 0, 0), + "macosx": (10, 9, 0), + }, "MessageUI": {"iphoneos": (3, 0, 0), "maccatalyst": (13, 0, 0)}, "Messages": {"iphoneos": (10, 0, 0), "maccatalyst": (14, 0, 0)}, "Metal": { @@ -660,6 +664,7 @@ _FRAMEWORK_INTRODUCED_VERSIONS = { "MobileCoreServices": { "appletvos": (9, 0, 0), "iphoneos": (2, 0, 0), + "maccatalyst": (14, 0, 0), "watchos": (1, 0, 0), }, "ModelIO": { @@ -834,6 +839,11 @@ _FRAMEWORK_INTRODUCED_VERSIONS = { }, "SecurityFoundation": {"maccatalyst": (13, 0, 0), "macosx": (10, 3, 0)}, "SecurityInterface": {"macosx": (10, 3, 0)}, + "SensitiveContentAnalysis": { + "iphoneos": (17, 0, 0), + "maccatalyst": (17, 0, 0), + "macosx": (14, 0, 0), + }, "SensorKit": { "iphoneos": (14, 0, 0), "maccatalyst": (14, 0, 0), @@ -1015,6 +1025,13 @@ def _parse_version(version: str) -> (int, int, int): result[i] = components[i] return (result[0], result[1], result[2]) +def validate_sdk_frameworks(frameworks: list[str]) -> None: + for framework in frameworks: + if framework.startswith("$SDKROOT/System/Library/Frameworks"): + framework_name = framework[len("$SDKROOT/System/Library/Frameworks/"):-len(".framework")] + if framework_name not in _FRAMEWORK_INTRODUCED_VERSIONS: + fail("Framework {} is missing version information".format(framework_name)) + def get_framework_linker_args(ctx: AnalysisContext, framework_names: list[str]) -> list[str]: if not has_apple_toolchain(ctx): return _get_unchecked_framework_linker_args(framework_names) diff --git a/prelude/apple/apple_frameworks.bzl b/prelude/apple/apple_frameworks.bzl index 970c5c650a..2957cbc660 100644 --- a/prelude/apple/apple_frameworks.bzl +++ b/prelude/apple/apple_frameworks.bzl @@ -23,7 +23,7 @@ load( "merge_swiftmodule_linkables", ) load("@prelude//utils:expect.bzl", "expect") -load(":apple_framework_versions.bzl", "get_framework_linker_args") +load(":apple_framework_versions.bzl", "get_framework_linker_args", "validate_sdk_frameworks") load(":apple_toolchain_types.bzl", "AppleToolchainInfo") _IMPLICIT_SDKROOT_FRAMEWORK_SEARCH_PATHS = [ @@ -55,6 +55,7 @@ def _get_apple_frameworks_linker_flags(ctx: AnalysisContext, linkable: [Framewor return flags def get_framework_search_path_flags(ctx: AnalysisContext) -> cmd_args: + validate_sdk_frameworks(ctx.attrs.frameworks) unresolved_framework_dirs = _get_non_sdk_unresolved_framework_directories(ctx.attrs.frameworks) expanded_framework_dirs = _expand_sdk_framework_paths(ctx, unresolved_framework_dirs) return _get_framework_search_path_flags(expanded_framework_dirs) diff --git a/prelude/apple/apple_genrule_deps.bzl b/prelude/apple/apple_genrule_deps.bzl deleted file mode 100644 index 756a5bdd3e..0000000000 --- a/prelude/apple/apple_genrule_deps.bzl +++ /dev/null @@ -1,47 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under both the MIT license found in the -# LICENSE-MIT file in the root directory of this source tree and the Apache -# License, Version 2.0 found in the LICENSE-APACHE file in the root directory -# of this source tree. - -load("@prelude//:genrule_types.bzl", "GENRULE_MARKER_SUBTARGET_NAME") - -def get_apple_genrule_deps_outputs(deps: list[Dependency]) -> list[Artifact]: - artifacts = [] - for dep in deps: - default_info = dep[DefaultInfo] - if GENRULE_MARKER_SUBTARGET_NAME in default_info.sub_targets: - artifacts += default_info.default_outputs - return artifacts - -def get_apple_build_genrule_deps_attr_value(ctx: AnalysisContext) -> bool: - build_genrule_deps = ctx.attrs.build_genrule_deps - if build_genrule_deps != None: - # `build_genrule_deps` present on a target takes priority - return build_genrule_deps - - # Fallback to the default value which is driven by buckconfig + select() - return ctx.attrs._build_genrule_deps - -def get_apple_build_genrule_deps_default_kwargs() -> dict[str, typing.Any]: - return { - APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_NAME: _build_genrule_deps_default_enabled(), - } - -def _build_genrule_deps_default_enabled() -> typing.Any: - buckconfig_value = read_root_config("apple", "build_genrule_deps", None) - if buckconfig_value != None: - return buckconfig_value.lower() == "true" - - return select({ - "DEFAULT": False, - # TODO(mgd): Make `config//` references possible from macro layer - "ovr_config//features/apple/constraints:build_genrule_deps_enabled": True, - }) - -APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_NAME = "_build_genrule_deps" -APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_TYPE = attrs.bool(default = False) - -APPLE_BUILD_GENRULE_DEPS_TARGET_ATTRIB_NAME = "build_genrule_deps" -APPLE_BUILD_GENRULE_DEPS_TARGET_ATTRIB_TYPE = attrs.option(attrs.bool(), default = None) diff --git a/prelude/apple/apple_info_plist.bzl b/prelude/apple/apple_info_plist.bzl index 44a33f7a51..06e1c23de9 100644 --- a/prelude/apple/apple_info_plist.bzl +++ b/prelude/apple/apple_info_plist.bzl @@ -7,7 +7,7 @@ load(":apple_bundle_destination.bzl", "AppleBundleDestination") load(":apple_bundle_part.bzl", "AppleBundlePart") -load(":apple_bundle_utility.bzl", "get_bundle_min_target_version", "get_product_name") +load(":apple_bundle_utility.bzl", "get_bundle_min_target_version", "get_default_binary_dep", "get_product_name") load(":apple_sdk.bzl", "get_apple_sdk_name") load( ":apple_sdk_metadata.bzl", @@ -20,7 +20,7 @@ load( ) load(":apple_toolchain_types.bzl", "AppleToolchainInfo", "AppleToolsInfo") -def process_info_plist(ctx: AnalysisContext, override_input: [Artifact, None]) -> AppleBundlePart: +def process_info_plist(ctx: AnalysisContext, override_input: Artifact | None) -> AppleBundlePart: input = _preprocess_info_plist(ctx) output = ctx.actions.declare_output("Info.plist") additional_keys = _additional_keys_as_json_file(ctx) @@ -64,7 +64,7 @@ def _preprocess_info_plist(ctx: AnalysisContext) -> Artifact: ctx.actions.run(command, category = "apple_preprocess_info_plist", **_get_plist_run_options()) return output -def _plist_substitutions_as_json_file(ctx: AnalysisContext) -> [Artifact, None]: +def _plist_substitutions_as_json_file(ctx: AnalysisContext) -> Artifact | None: info_plist_substitutions = ctx.attrs.info_plist_substitutions if not info_plist_substitutions: return None @@ -72,7 +72,7 @@ def _plist_substitutions_as_json_file(ctx: AnalysisContext) -> [Artifact, None]: substitutions_json = ctx.actions.write_json("plist_substitutions.json", info_plist_substitutions) return substitutions_json -def process_plist(ctx: AnalysisContext, input: Artifact, output: OutputArtifact, override_input: [Artifact, None] = None, additional_keys: [Artifact, None] = None, override_keys: [Artifact, None] = None, action_id: [str, None] = None): +def process_plist(ctx: AnalysisContext, input: Artifact, output: OutputArtifact, override_input: Artifact | None = None, additional_keys: Artifact | None = None, override_keys: Artifact | None = None, action_id: [str, None] = None): apple_tools = ctx.attrs._apple_tools[AppleToolsInfo] processor = apple_tools.info_plist_processor override_input_arguments = ["--override-input", override_input] if override_input != None else [] @@ -112,7 +112,7 @@ def _info_plist_additional_keys(ctx: AnalysisContext) -> dict[str, typing.Any]: xcode_version = ctx.attrs._apple_toolchain[AppleToolchainInfo].xcode_version if xcode_version: result["DTXcode"] = xcode_version - result[sdk_metadata.min_version_plist_info_key] = get_bundle_min_target_version(ctx, ctx.attrs.binary) + result[sdk_metadata.min_version_plist_info_key] = get_bundle_min_target_version(ctx, get_default_binary_dep(ctx.attrs.binary)) identify_build_system = ctx.attrs._info_plist_identify_build_system_default if ctx.attrs.info_plist_identify_build_system != None: @@ -124,7 +124,7 @@ def _info_plist_additional_keys(ctx: AnalysisContext) -> dict[str, typing.Any]: return result def _extra_mac_info_plist_keys(sdk_metadata: AppleSdkMetadata, extension: str) -> dict[str, typing.Any]: - if sdk_metadata.name == MacOSXSdkMetadata.name and extension == "xpc": + if sdk_metadata.name == MacOSXSdkMetadata.name and extension != "xpc": return { "NSHighResolutionCapable": True, "NSSupportsAutomaticGraphicsSwitching": True, @@ -142,6 +142,9 @@ def _info_plist_override_keys(ctx: AnalysisContext) -> dict[str, typing.Any]: if sdk_name == MacOSXSdkMetadata.name: if ctx.attrs.extension != "xpc": result["LSRequiresIPhoneOS"] = False - elif sdk_name not in [WatchOSSdkMetadata.name, WatchSimulatorSdkMetadata.name, MacOSXCatalystSdkMetadata.name]: + elif sdk_name in [WatchOSSdkMetadata.name, WatchSimulatorSdkMetadata.name]: + result["UIDeviceFamily"] = [4] + result["WKApplication"] = True + elif sdk_name not in [MacOSXCatalystSdkMetadata.name]: result["LSRequiresIPhoneOS"] = True return result diff --git a/prelude/apple/apple_library.bzl b/prelude/apple/apple_library.bzl index ce7cf47040..12a025563d 100644 --- a/prelude/apple/apple_library.bzl +++ b/prelude/apple/apple_library.bzl @@ -7,15 +7,18 @@ load( "@prelude//:artifact_tset.bzl", + "ArtifactTSet", + "make_artifact_tset", "project_artifacts", ) -load("@prelude//apple:apple_buck2_compatibility.bzl", "apple_check_buck2_compatibility") +load("@prelude//:validation_deps.bzl", "get_validation_deps_outputs") load("@prelude//apple:apple_dsym.bzl", "DSYM_SUBTARGET", "get_apple_dsym") load("@prelude//apple:apple_stripping.bzl", "apple_strip_args") load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo") # @oss-disable: load("@prelude//apple/meta_only:linker_outputs.bzl", "add_extra_linker_outputs") load( "@prelude//apple/swift:swift_compilation.bzl", + "SwiftLibraryForDistributionOutput", # @unused Used as a type "compile_swift", "get_swift_anonymous_targets", "get_swift_debug_infos", @@ -30,10 +33,6 @@ load( "CompileArgsfile", # @unused Used as a type "CompileArgsfiles", ) -load( - "@prelude//cxx:compile.bzl", - "CxxSrcWithFlags", # @unused Used as a type -) load( "@prelude//cxx:cxx_library.bzl", "CxxLibraryOutput", # @unused Used as a type @@ -44,7 +43,11 @@ load( "cxx_attr_deps", "cxx_attr_exported_deps", ) -load("@prelude//cxx:cxx_sources.bzl", "get_srcs_with_flags") +load( + "@prelude//cxx:cxx_sources.bzl", + "CxxSrcWithFlags", # @unused Used as a type + "get_srcs_with_flags", +) load( "@prelude//cxx:cxx_types.bzl", "CxxRuleAdditionalParams", @@ -52,7 +55,7 @@ load( "CxxRuleProviderParams", "CxxRuleSubTargetParams", ) -load("@prelude//cxx:headers.bzl", "cxx_attr_exported_headers") +load("@prelude//cxx:headers.bzl", "cxx_attr_exported_headers", "cxx_attr_headers_list") load( "@prelude//cxx:linker.bzl", "SharedLibraryFlagOverrides", @@ -69,11 +72,11 @@ load( ) load("@prelude//utils:arglike.bzl", "ArgLike") load("@prelude//utils:expect.bzl", "expect") +load("@prelude//apple/mockingbird/mockingbird_types.bzl", "MockingbirdLibraryInfo", "MockingbirdLibraryInfoTSet", "MockingbirdLibraryRecord", "MockingbirdSourcesInfo", "MockingbirdTargetType") load(":apple_bundle_types.bzl", "AppleBundleLinkerMapInfo", "AppleMinDeploymentVersionInfo") load(":apple_frameworks.bzl", "get_framework_search_path_flags") -load(":apple_genrule_deps.bzl", "get_apple_build_genrule_deps_attr_value", "get_apple_genrule_deps_outputs") load(":apple_modular_utility.bzl", "MODULE_CACHE_PATH") -load(":apple_target_sdk_version.bzl", "get_min_deployment_version_for_node", "get_min_deployment_version_target_linker_flags", "get_min_deployment_version_target_preprocessor_flags") +load(":apple_target_sdk_version.bzl", "get_min_deployment_version_for_node", "get_min_deployment_version_target_linker_flags", "get_min_deployment_version_target_preprocessor_flags", "get_unversioned_target_triple", "get_versioned_target_triple") load(":apple_utility.bzl", "get_apple_cxx_headers_layout", "get_apple_stripped_attr_value_with_default_fallback", "get_module_name") load( ":debug.bzl", @@ -85,6 +88,13 @@ load(":resource_groups.bzl", "create_resource_graph") load(":xcode.bzl", "apple_populate_xcode_attributes") load(":xctest_swift_support.bzl", "xctest_swift_support_info") +AppleSharedLibraryMachOFileType = enum( + # dynamically bound shared library file + "dylib", + # dynamically bound bundle file aka Mach-O bundle + "bundle", +) + AppleLibraryAdditionalParams = record( # Name of the top level rule utilizing the apple_library rule. rule_type = str, @@ -107,11 +117,36 @@ AppleLibraryAdditionalParams = record( force_link_group_linking = field(bool, False), ) -def apple_library_impl(ctx: AnalysisContext) -> [Promise, list[Provider]]: - apple_check_buck2_compatibility(ctx) +AppleLibraryForDistributionInfo = provider( + fields = { + "module_name": str, + "private_swiftinterface": Artifact, + "swiftdoc": Artifact, + "swiftinterface": Artifact, + "target_triple": str, + }, +) +AppleLibraryInfo = provider( + fields = { + "public_framework_headers": ArtifactTSet, + "swift_header": [Artifact, None], + }, +) +def apple_library_impl(ctx: AnalysisContext) -> [Promise, list[Provider]]: def get_apple_library_providers(deps_providers) -> list[Provider]: - constructor_params = apple_library_rule_constructor_params_and_swift_providers( + shared_type = AppleSharedLibraryMachOFileType(ctx.attrs.shared_library_macho_file_type) + if shared_type == AppleSharedLibraryMachOFileType("bundle"): + shared_library_flags_overrides = SharedLibraryFlagOverrides( + # When `-bundle` is used we can't use the `-install_name` args, thus we keep this field empty. + shared_library_name_linker_flags_format = [], + shared_library_flags = ["-bundle"], + ) + elif shared_type == AppleSharedLibraryMachOFileType("dylib"): + shared_library_flags_overrides = None + else: + fail("Unsupported `shared_library_macho_file_type` attribute value: `{}`".format(shared_type)) + constructor_params, swift_library_for_distribution = apple_library_rule_constructor_params_and_swift_providers( ctx, AppleLibraryAdditionalParams( rule_type = "apple_library", @@ -122,19 +157,97 @@ def apple_library_impl(ctx: AnalysisContext) -> [Promise, list[Provider]]: # We generate a provider on our own, disable to avoid several providers of same type. cxx_resources_as_apple_resources = False, ), + shared_library_flags = shared_library_flags_overrides, ), deps_providers, ) + swift_header = constructor_params.swift_objc_header output = cxx_library_parameterized(ctx, constructor_params) - return output.providers + + return output.providers + _make_mockingbird_library_info_provider(ctx) + _make_apple_library_info_provider(ctx, swift_header) + _make_apple_library_for_distribution_info_provider(ctx, swift_library_for_distribution) if uses_explicit_modules(ctx): return get_swift_anonymous_targets(ctx, get_apple_library_providers) else: return get_apple_library_providers([]) -def apple_library_rule_constructor_params_and_swift_providers(ctx: AnalysisContext, params: AppleLibraryAdditionalParams, deps_providers: list = [], is_test_target: bool = False) -> CxxRuleConstructorParams: - cxx_srcs, swift_srcs = _filter_swift_srcs(ctx) +def _make_apple_library_for_distribution_info_provider(ctx: AnalysisContext, swift_library_for_distribution: [None, SwiftLibraryForDistributionOutput]) -> list[AppleLibraryForDistributionInfo]: + if not swift_library_for_distribution: + return [] + return [AppleLibraryForDistributionInfo( + target_triple = get_unversioned_target_triple(ctx).replace("macosx", "macos"), + swiftinterface = swift_library_for_distribution.swiftinterface, + private_swiftinterface = swift_library_for_distribution.private_swiftinterface, + swiftdoc = swift_library_for_distribution.swiftdoc, + module_name = get_module_name(ctx), + )] + +def _make_apple_library_info_provider(ctx: AnalysisContext, swift_header: [None, Artifact]) -> list[AppleLibraryInfo]: + public_framework_headers = cxx_attr_headers_list(ctx, ctx.attrs.public_framework_headers, [], get_apple_cxx_headers_layout(ctx)) + all_deps = cxx_attr_deps(ctx) + cxx_attr_exported_deps(ctx) + apple_library_infos = filter(None, [dep.get(AppleLibraryInfo) for dep in all_deps]) + + public_framework_header_tset = make_artifact_tset( + actions = ctx.actions, + label = ctx.label, + artifacts = [header.artifact for header in public_framework_headers], + children = [apple_library.public_framework_headers for apple_library in apple_library_infos], + ) + + return [AppleLibraryInfo(public_framework_headers = public_framework_header_tset, swift_header = swift_header)] + +def _make_mockingbird_library_info_provider(ctx: AnalysisContext) -> list[MockingbirdLibraryInfo]: + _, swift_sources = _filter_swift_srcs(ctx) + + if len(swift_sources) == 0: + return [] + + deps_mockingbird_infos = filter(None, [dep.get(MockingbirdLibraryInfo) for dep in cxx_attr_deps(ctx)]) + exported_deps_mockingbird_infos = filter(None, [dep.get(MockingbirdLibraryInfo) for dep in cxx_attr_exported_deps(ctx)]) + + children = [] + dep_names = [] + exported_dep_names = [] + for info in deps_mockingbird_infos: + dep_names.append(info.name) + children.append(info.tset) + + for info in exported_deps_mockingbird_infos: + exported_dep_names.append(info.name) + children.append(info.tset) + + mockingbird_srcs_folder = ctx.actions.declare_output("mockingbird_srcs_" + ctx.attrs.name, dir = True) + + ctx.actions.symlinked_dir( + mockingbird_srcs_folder, + {source.file.basename: source.file for source in swift_sources}, + ) + + mockingbird_record = MockingbirdLibraryRecord( + name = ctx.attrs.name, + srcs = [src.file for src in swift_sources], + dep_names = dep_names, + exported_dep_names = exported_dep_names, + type = MockingbirdTargetType("library"), + src_dir = mockingbird_srcs_folder, + ) + + mockingbird_tset = ctx.actions.tset(MockingbirdLibraryInfoTSet, value = mockingbird_record, children = children) + + return [MockingbirdLibraryInfo( + name = ctx.attrs.name, + tset = mockingbird_tset, + )] + +def apple_library_rule_constructor_params_and_swift_providers(ctx: AnalysisContext, params: AppleLibraryAdditionalParams, deps_providers: list = [], is_test_target: bool = False) -> (CxxRuleConstructorParams, SwiftLibraryForDistributionOutput | None): + mockingbird_gen_sources = [] + if not "dummy_library" in ctx.attrs.labels: + for dep in cxx_attr_deps(ctx) + cxx_attr_exported_deps(ctx): + if MockingbirdSourcesInfo in dep: + for src in dep[MockingbirdSourcesInfo].srcs: + mockingbird_gen_sources.append(src) + + cxx_srcs, swift_srcs = _filter_swift_srcs(ctx, mockingbird_gen_sources) # First create a modulemap if necessary. This is required for importing # ObjC code in Swift so must be done before Swift compilation. @@ -145,7 +258,7 @@ def apple_library_rule_constructor_params_and_swift_providers(ctx: AnalysisConte modulemap_pre = None framework_search_paths_flags = get_framework_search_path_flags(ctx) - swift_compile = compile_swift( + swift_compile, swift_interface = compile_swift( ctx, swift_srcs, True, # parse_as_library @@ -173,7 +286,7 @@ def apple_library_rule_constructor_params_and_swift_providers(ctx: AnalysisConte else: exported_pre = None - swift_dependency_info = swift_compile.dependency_info if swift_compile else get_swift_dependency_info(ctx, None, None, deps_providers) + swift_dependency_info = swift_compile.dependency_info if swift_compile else get_swift_dependency_info(ctx, None, deps_providers) swift_debug_info = get_swift_debug_infos( ctx, swift_dependency_info, @@ -218,19 +331,23 @@ def apple_library_rule_constructor_params_and_swift_providers(ctx: AnalysisConte return providers framework_search_path_pre = CPreprocessor( - relative_args = CPreprocessorArgs(args = [framework_search_paths_flags]), + args = CPreprocessorArgs(args = [framework_search_paths_flags]), ) - genrule_deps_outputs = [] - if get_apple_build_genrule_deps_attr_value(ctx): - genrule_deps_outputs = get_apple_genrule_deps_outputs(cxx_attr_deps(ctx) + cxx_attr_exported_deps(ctx)) + validation_deps_outputs = get_validation_deps_outputs(ctx) + if swift_compile: + swift_objc_header = swift_compile.exported_swift_header + swift_library_for_distribution_output = swift_compile.swift_library_for_distribution_output + else: + swift_objc_header = None + swift_library_for_distribution_output = None return CxxRuleConstructorParams( rule_type = params.rule_type, is_test = (params.rule_type == "apple_test"), headers_layout = get_apple_cxx_headers_layout(ctx), extra_exported_link_flags = params.extra_exported_link_flags, - extra_hidden = genrule_deps_outputs, + extra_hidden = validation_deps_outputs, extra_link_flags = [_get_linker_flags(ctx)], extra_link_input = swift_object_files, extra_link_input_has_external_debug_info = True, @@ -257,6 +374,7 @@ def apple_library_rule_constructor_params_and_swift_providers(ctx: AnalysisConte default_outputs = swift_compile.object_files if swift_compile else None, ), ], + "swift-interface": [swift_interface], "swift-output-file-map": [ DefaultInfo( default_output = swift_compile.output_map_artifact if swift_compile else None, @@ -272,19 +390,30 @@ def apple_library_rule_constructor_params_and_swift_providers(ctx: AnalysisConte ), output_style_sub_targets_and_providers_factory = _get_link_style_sub_targets_and_providers, shared_library_flags = params.shared_library_flags, + shared_library_interface_target = get_versioned_target_triple(ctx), # apple_library's 'stripped' arg only applies to shared subtargets, or, # targets with 'preferred_linkage = "shared"' strip_executable = get_apple_stripped_attr_value_with_default_fallback(ctx), strip_args_factory = apple_strip_args, force_link_group_linking = params.force_link_group_linking, - cxx_populate_xcode_attributes_func = lambda local_ctx, **kwargs: _xcode_populate_attributes(ctx = local_ctx, populate_xcode_attributes_func = params.populate_xcode_attributes_func, **kwargs), + cxx_populate_xcode_attributes_func = lambda local_ctx, **kwargs: _xcode_populate_attributes(ctx = local_ctx, populate_xcode_attributes_func = params.populate_xcode_attributes_func, contains_swift_sources = contains_swift_sources, **kwargs), generate_sub_targets = params.generate_sub_targets, generate_providers = params.generate_providers, # Some apple rules rely on `static` libs *not* following dependents. link_groups_force_static_follows_dependents = False, extra_linker_outputs_factory = _get_extra_linker_flags_and_outputs, swiftmodule_linkable = get_swiftmodule_linkable(swift_compile), - ) + extra_shared_library_interfaces = [swift_compile.exported_symbols] if (swift_compile and swift_compile.exported_symbols) else None, + compiler_flags = ctx.attrs.compiler_flags, + lang_compiler_flags = ctx.attrs.lang_compiler_flags, + platform_compiler_flags = ctx.attrs.platform_compiler_flags, + lang_platform_compiler_flags = ctx.attrs.lang_platform_compiler_flags, + preprocessor_flags = ctx.attrs.preprocessor_flags, + lang_preprocessor_flags = ctx.attrs.lang_preprocessor_flags, + platform_preprocessor_flags = ctx.attrs.platform_preprocessor_flags, + lang_platform_preprocessor_flags = ctx.attrs.lang_platform_preprocessor_flags, + swift_objc_header = swift_objc_header, + ), swift_library_for_distribution_output def _get_extra_linker_flags_and_outputs( ctx: AnalysisContext) -> (list[ArgLike], dict[str, list[DefaultInfo]]): @@ -292,10 +421,10 @@ def _get_extra_linker_flags_and_outputs( # @oss-disable: return add_extra_linker_outputs(ctx) return [], {} # @oss-enable -def _filter_swift_srcs(ctx: AnalysisContext) -> (list[CxxSrcWithFlags], list[CxxSrcWithFlags]): +def _filter_swift_srcs(ctx: AnalysisContext, additional_srcs: list = []) -> (list[CxxSrcWithFlags], list[CxxSrcWithFlags]): cxx_srcs = [] swift_srcs = [] - for s in get_srcs_with_flags(ctx): + for s in get_srcs_with_flags(ctx, additional_srcs): if s.file.extension == SWIFT_EXTENSION: swift_srcs.append(s) else: @@ -373,7 +502,8 @@ def _xcode_populate_attributes( srcs: list[CxxSrcWithFlags], argsfiles: dict[str, CompileArgsfile], populate_xcode_attributes_func: typing.Callable, + contains_swift_sources: bool, **_kwargs) -> dict[str, typing.Any]: # Overwrite the product name - data = populate_xcode_attributes_func(ctx, srcs = srcs, argsfiles = argsfiles, product_name = ctx.attrs.name) + data = populate_xcode_attributes_func(ctx, srcs = srcs, argsfiles = argsfiles, product_name = ctx.attrs.name, contains_swift_sources = contains_swift_sources) return data diff --git a/prelude/apple/apple_macro_layer.bzl b/prelude/apple/apple_macro_layer.bzl index ee4611f098..8cdb75bfd1 100644 --- a/prelude/apple/apple_macro_layer.bzl +++ b/prelude/apple/apple_macro_layer.bzl @@ -6,7 +6,7 @@ # of this source tree. load(":apple_bundle_config.bzl", "apple_bundle_config") -load(":apple_genrule_deps.bzl", "get_apple_build_genrule_deps_default_kwargs") +load(":apple_dsym_config.bzl", "apple_dsym_config") load(":apple_info_plist_substitutions_parsing.bzl", "parse_codesign_entitlements") load(":apple_package_config.bzl", "apple_package_config") load(":apple_resource_bundle.bzl", "make_resource_bundle_rule") @@ -32,6 +32,13 @@ APPLE_LINK_LIBRARIES_LOCALLY_OVERRIDE = AppleBuckConfigAttributeOverride( skip_if_false = True, ) +APPLE_LINK_LIBRARIES_REMOTELY_OVERRIDE = AppleBuckConfigAttributeOverride( + name = "link_execution_preference", + key = "link_libraries_remotely_override", + value_if_true = "remote", + skip_if_false = True, +) + APPLE_STRIPPED_DEFAULT = AppleBuckConfigAttributeOverride( name = "_stripped_default", key = "stripped_default", @@ -40,20 +47,29 @@ APPLE_STRIPPED_DEFAULT = AppleBuckConfigAttributeOverride( _APPLE_LIBRARY_LOCAL_EXECUTION_OVERRIDES = [ APPLE_LINK_LIBRARIES_LOCALLY_OVERRIDE, + APPLE_LINK_LIBRARIES_REMOTELY_OVERRIDE, AppleBuckConfigAttributeOverride(name = APPLE_ARCHIVE_OBJECTS_LOCALLY_OVERRIDE_ATTR_NAME, key = "archive_objects_locally_override"), ] -_APPLE_BINARY_LOCAL_EXECUTION_OVERRIDES = [ +# If both configs are set the last one wins +_APPLE_BINARY_EXECUTION_OVERRIDES = [ AppleBuckConfigAttributeOverride( name = "link_execution_preference", key = "link_binaries_locally_override", value_if_true = "local", skip_if_false = True, ), + AppleBuckConfigAttributeOverride( + name = "link_execution_preference", + key = "link_binaries_remotely_override", + value_if_true = "remote", + skip_if_false = True, + ), ] _APPLE_TEST_LOCAL_EXECUTION_OVERRIDES = [ APPLE_LINK_LIBRARIES_LOCALLY_OVERRIDE, + APPLE_LINK_LIBRARIES_REMOTELY_OVERRIDE, ] def apple_macro_layer_set_bool_override_attrs_from_config(overrides: list[AppleBuckConfigAttributeOverride]) -> dict[str, Select]: @@ -73,8 +89,8 @@ def apple_macro_layer_set_bool_override_attrs_from_config(overrides: list[AppleB def apple_test_macro_impl(apple_test_rule, apple_resource_bundle_rule, **kwargs): kwargs.update(apple_bundle_config()) + kwargs.update(apple_dsym_config()) kwargs.update(apple_macro_layer_set_bool_override_attrs_from_config(_APPLE_TEST_LOCAL_EXECUTION_OVERRIDES)) - kwargs.update(get_apple_build_genrule_deps_default_kwargs()) # `extension` is used both by `apple_test` and `apple_resource_bundle`, so provide default here kwargs["extension"] = kwargs.pop("extension", "xctest") @@ -83,10 +99,16 @@ def apple_test_macro_impl(apple_test_rule, apple_resource_bundle_rule, **kwargs) **kwargs ) +def apple_xcuitest_macro_impl(apple_xcuitest_rule, **kwargs): + kwargs.update(apple_bundle_config()) + apple_xcuitest_rule( + **kwargs + ) + def apple_bundle_macro_impl(apple_bundle_rule, apple_resource_bundle_rule, **kwargs): info_plist_substitutions = kwargs.get("info_plist_substitutions") kwargs.update(apple_bundle_config()) - kwargs.update(get_apple_build_genrule_deps_default_kwargs()) + kwargs.update(apple_dsym_config()) apple_bundle_rule( _codesign_entitlements = parse_codesign_entitlements(info_plist_substitutions), _resource_bundle = make_resource_bundle_rule(apple_resource_bundle_rule, **kwargs), @@ -94,15 +116,16 @@ def apple_bundle_macro_impl(apple_bundle_rule, apple_resource_bundle_rule, **kwa ) def apple_library_macro_impl(apple_library_rule = None, **kwargs): + kwargs.update(apple_dsym_config()) kwargs.update(apple_macro_layer_set_bool_override_attrs_from_config(_APPLE_LIBRARY_LOCAL_EXECUTION_OVERRIDES)) kwargs.update(apple_macro_layer_set_bool_override_attrs_from_config([APPLE_STRIPPED_DEFAULT])) - kwargs.update(get_apple_build_genrule_deps_default_kwargs()) apple_library_rule(**kwargs) def apple_binary_macro_impl(apple_binary_rule = None, apple_universal_executable = None, **kwargs): - kwargs.update(apple_macro_layer_set_bool_override_attrs_from_config(_APPLE_BINARY_LOCAL_EXECUTION_OVERRIDES)) + dsym_args = apple_dsym_config() + kwargs.update(dsym_args) + kwargs.update(apple_macro_layer_set_bool_override_attrs_from_config(_APPLE_BINARY_EXECUTION_OVERRIDES)) kwargs.update(apple_macro_layer_set_bool_override_attrs_from_config([APPLE_STRIPPED_DEFAULT])) - kwargs.update(get_apple_build_genrule_deps_default_kwargs()) original_binary_name = kwargs.pop("name") @@ -115,6 +138,7 @@ def apple_binary_macro_impl(apple_binary_rule = None, apple_universal_executable labels = kwargs.get("labels"), visibility = kwargs.get("visibility"), default_target_platform = kwargs.get("default_target_platform"), + **dsym_args ) else: binary_name = original_binary_name @@ -126,3 +150,9 @@ def apple_package_macro_impl(apple_package_rule = None, **kwargs): apple_package_rule( **kwargs ) + +def apple_universal_executable_macro_impl(apple_universal_executable_rule = None, **kwargs): + kwargs.update(apple_dsym_config()) + apple_universal_executable_rule( + **kwargs + ) diff --git a/prelude/apple/apple_native.bzl b/prelude/apple/apple_native.bzl new file mode 100644 index 0000000000..a64ccd68bd --- /dev/null +++ b/prelude/apple/apple_native.bzl @@ -0,0 +1,96 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//:prelude.bzl", "native") +load( + "@prelude//platforms/apple:constants.bzl", + "APPLE", +) +load("@prelude//platforms/apple:platforms.bzl", "config_backed_apple_target_platform", "get_default_target_platform_for_platform", "set_apple_platforms") +load("@prelude//platforms/apple/platforms_map.bzl", "APPLE_SDK_DEFAULT_PLATFORM_MAP") +load("@prelude//utils/buckconfig.bzl", "read") + +def _apple_library(**kwargs): + kwargs = _update_platforms(**kwargs) + native.apple_library(**kwargs) + +def _apple_asset_catalog(**kwargs): + kwargs = _update_platforms(**kwargs) + native.apple_asset_catalog(**kwargs) + +def _apple_binary(**kwargs): + kwargs = _update_platforms(**kwargs) + native.apple_binary(**kwargs) + +def _apple_bundle(**kwargs): + kwargs = _update_platforms(**kwargs) + native.apple_bundle(**kwargs) + +def _apple_watchos_bundle(**kwargs): + kwargs = _update_platforms(**kwargs) + native.apple_watchos_bundle(**kwargs) + +def _apple_package(**kwargs): + kwargs = _update_platforms(**kwargs) + native.apple_package(**kwargs) + +def _apple_resource(**kwargs): + kwargs = _update_platforms(**kwargs) + native.apple_resource(**kwargs) + +def _apple_test(**kwargs): + kwargs = _update_platforms(**kwargs) + native.apple_test(**kwargs) + +def _apple_xcuitest(**kwargs): + kwargs = _update_platforms(**kwargs) + native.apple_xcuitest(**kwargs) + +def _apple_xcframework(**kwargs): + kwargs = _update_platforms(**kwargs) + native.apple_xcframework(**kwargs) + +def _update_platforms(**kwargs): + platform = _get_default_platform() + + default_target_platform = kwargs.pop("default_target_platform", None) + base_config_backed_target_platform = kwargs.pop("config_backed_target_platform", None) + + if default_target_platform != None and base_config_backed_target_platform != None: + name = kwargs.get("name", "UNKNOWN_TARGET") + fail("{} has both a default_target_platform and a config_backed_target_platform, which is not allowed".format(name)) + + if base_config_backed_target_platform != None: + default_target_platform = config_backed_apple_target_platform(base_config_backed_target_platform, platform) + elif default_target_platform == None: + default_target_platform = get_default_target_platform_for_platform(platform) + + if default_target_platform != None: + kwargs["default_target_platform"] = default_target_platform + + kwargs = set_apple_platforms(platform, base_config_backed_target_platform, kwargs) + + return kwargs + +def _get_default_platform(): + config_platform = read("cxx", "default_platform") + if config_platform != None: + return config_platform + return APPLE_SDK_DEFAULT_PLATFORM_MAP.get(APPLE) + +apple_native = struct( + apple_asset_catalog = _apple_asset_catalog, + apple_binary = _apple_binary, + apple_bundle = _apple_bundle, + apple_watchos_bundle = _apple_watchos_bundle, + apple_library = _apple_library, + apple_package = _apple_package, + apple_resource = _apple_resource, + apple_test = _apple_test, + apple_xcuitest = _apple_xcuitest, + apple_xcframework = _apple_xcframework, +) diff --git a/prelude/apple/apple_package.bzl b/prelude/apple/apple_package.bzl index fc0227933f..c01228ae6d 100644 --- a/prelude/apple/apple_package.bzl +++ b/prelude/apple/apple_package.bzl @@ -15,36 +15,46 @@ load(":apple_swift_stdlib.bzl", "should_copy_swift_stdlib") load(":apple_toolchain_types.bzl", "AppleToolchainInfo", "AppleToolsInfo") def apple_package_impl(ctx: AnalysisContext) -> list[Provider]: - package = ctx.actions.declare_output("{}.{}".format(ctx.attrs.bundle.label.name, ctx.attrs.ext)) + package_name = ctx.attrs.package_name if ctx.attrs.package_name else ctx.attrs.bundle.label.name + package = ctx.actions.declare_output("{}.{}".format(package_name, ctx.attrs.ext)) + contents = ( + ctx.attrs.bundle[DefaultInfo].default_outputs[0] if ctx.attrs.packager else _get_ipa_contents(ctx) + ) if ctx.attrs.packager: process_ipa_cmd = cmd_args([ ctx.attrs.packager[RunInfo], "--app-bundle-path", - ctx.attrs.bundle[DefaultInfo].default_outputs[0], + contents, "--output-path", package.as_output(), ctx.attrs.packager_args, ]) category = "apple_package_make_custom" else: - unprocessed_ipa_contents = _get_ipa_contents(ctx) process_ipa_cmd = _get_default_package_cmd( ctx, - unprocessed_ipa_contents, + contents, package.as_output(), ) category = "apple_package_make" - if ctx.attrs.validator != None: - process_ipa_cmd.add([ - "--validator", - ctx.attrs.validator[RunInfo], - [cmd_args(["--validator-args=", arg], delimiter = "") for arg in ctx.attrs.validator_args], - ]) + sub_targets = {} + + prepackaged_validators_artifacts = _get_prepackaged_validators_outputs(ctx, contents) + if prepackaged_validators_artifacts: + # Add the artifacts to packaging cmd so that they are run. + process_ipa_cmd.add(cmd_args(hidden = prepackaged_validators_artifacts)) + sub_targets["prepackaged_validators"] = [ + DefaultInfo(default_outputs = prepackaged_validators_artifacts), + ] + ctx.actions.run(process_ipa_cmd, category = category) - return [DefaultInfo(default_output = package)] + return [DefaultInfo( + default_output = package, + sub_targets = sub_targets, + )] def _get_default_package_cmd(ctx: AnalysisContext, unprocessed_ipa_contents: Artifact, output: OutputArtifact) -> cmd_args: apple_tools = ctx.attrs._apple_tools[AppleToolsInfo] @@ -112,7 +122,7 @@ def _get_swift_support_dir(ctx, bundle_output: Artifact, bundle_info: AppleBundl cmd_args( [ bundle_output, - bundle_relative_path_for_destination(AppleBundleDestination("executables"), sdk_name, extension), + bundle_relative_path_for_destination(AppleBundleDestination("executables"), sdk_name, extension, False), bundle_info.binary_name, ], delimiter = "/", @@ -130,7 +140,7 @@ def _get_swift_support_dir(ctx, bundle_output: Artifact, bundle_info: AppleBundl allow_args = True, ) ctx.actions.run( - cmd_args(["/bin/sh", script]).hidden([stdlib_tool, bundle_output, swift_support_dir.as_output()]), + cmd_args(["/bin/sh", script], hidden = [stdlib_tool, bundle_output, swift_support_dir.as_output()]), category = "copy_swift_stdlibs", ) @@ -143,7 +153,7 @@ def _get_scan_folder_args(dest: AppleBundleDestination, bundle_output: Artifact, cmd_args( [ bundle_output, - bundle_relative_path_for_destination(dest, sdk_name, extension), + bundle_relative_path_for_destination(dest, sdk_name, extension, False), ], delimiter = "/", ), @@ -161,3 +171,33 @@ def _compression_level_arg(compression_level: IpaCompressionLevel) -> str: return "9" else: fail("Unknown .ipa compression level: " + str(compression_level)) + +def _get_prepackaged_validators_outputs(ctx: AnalysisContext, prepackaged_contents: Artifact) -> list[Artifact]: + if not ctx.attrs.prepackaged_validators: + return [] + + outputs = [] + for idx, validator in enumerate(ctx.attrs.prepackaged_validators): + if type(validator) == "tuple": + validator, validator_args = validator + else: + validator = validator + validator_args = [] + + output = ctx.actions.declare_output(validator.label.name + "_{}".format(idx)) + outputs.append(output) + + ctx.actions.run( + cmd_args([ + validator[RunInfo], + "--contents-dir", + prepackaged_contents, + "--output-path", + output.as_output(), + validator_args, + ]), + category = "prepackaged_validator", + identifier = str(idx), + ) + + return outputs diff --git a/prelude/apple/xcode_prebuild_script.bzl b/prelude/apple/apple_platforms.bzl similarity index 75% rename from prelude/apple/xcode_prebuild_script.bzl rename to prelude/apple/apple_platforms.bzl index dc56f17ef9..c9beeaa1ad 100644 --- a/prelude/apple/xcode_prebuild_script.bzl +++ b/prelude/apple/apple_platforms.bzl @@ -5,5 +5,4 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -def xcode_prebuild_script_impl(_ctx: AnalysisContext) -> list[Provider]: - return [DefaultInfo()] +APPLE_PLATFORMS_KEY = "_apple_platforms" diff --git a/prelude/apple/apple_resource.bzl b/prelude/apple/apple_resource.bzl index 7955fe1eaa..a67ebcb418 100644 --- a/prelude/apple/apple_resource.bzl +++ b/prelude/apple/apple_resource.bzl @@ -18,6 +18,8 @@ def apple_resource_impl(ctx: AnalysisContext) -> list[Provider]: variant_files = ctx.attrs.variants or [], named_variant_files = ctx.attrs.named_variants or {}, codesign_files_on_copy = ctx.attrs.codesign_on_copy, + codesign_entitlements = ctx.attrs.codesign_entitlements, + codesign_flags_override = ctx.attrs.codesign_flags_override, ) # `files` can contain `apple_library()` which in turn can have `apple_resource()` deps diff --git a/prelude/apple/apple_resource_bundle.bzl b/prelude/apple/apple_resource_bundle.bzl index 0ed45dfb4d..db6ccbc89e 100644 --- a/prelude/apple/apple_resource_bundle.bzl +++ b/prelude/apple/apple_resource_bundle.bzl @@ -43,6 +43,7 @@ load("@prelude//apple:apple_bundle_attrs.bzl", "get_apple_info_plist_build_syste _RESOURCE_BUNDLE_FIELDS = [ "asset_catalogs_compilation_options", "binary", + "copy_public_framework_headers", "default_target_platform", "deps", "extension", @@ -50,7 +51,9 @@ _RESOURCE_BUNDLE_FIELDS = [ "ibtool_module_flag", "info_plist", "info_plist_substitutions", + "module_map", "product_name", + "privacy_manifest", "resource_group", "resource_group_map", "within_view", diff --git a/prelude/apple/apple_resource_dedupe_alias.bzl b/prelude/apple/apple_resource_dedupe_alias.bzl new file mode 100644 index 0000000000..448e197738 --- /dev/null +++ b/prelude/apple/apple_resource_dedupe_alias.bzl @@ -0,0 +1,21 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec") +load("@prelude//apple/user/apple_resource_transition.bzl", "apple_resource_transition") + +def _impl(ctx: AnalysisContext) -> list[Provider]: + return ctx.attrs.actual.providers + +registration_spec = RuleRegistrationSpec( + name = "apple_resource_dedupe_alias", + impl = _impl, + attrs = { + "actual": attrs.transition_dep(cfg = apple_resource_transition), + "skip_universal_resource_dedupe": attrs.bool(default = False), + }, +) diff --git a/prelude/apple/apple_resource_types.bzl b/prelude/apple/apple_resource_types.bzl index 3104682fbd..5d8a2773c9 100644 --- a/prelude/apple/apple_resource_types.bzl +++ b/prelude/apple/apple_resource_types.bzl @@ -28,9 +28,11 @@ AppleResourceSpec = record( # `{ "ru.lproj" : ["Localizable.strings"] }` named_variant_files = field(dict[str, list[Artifact]], {}), codesign_files_on_copy = field(bool, False), + codesign_entitlements = field(Artifact | None, None), + codesign_flags_override = field(list[str] | None, None), ) -# Used when invoking `ibtool`, `actool` and `momc` +# Used when invoking `ibtool`, `actool`, `mapc` and `momc` AppleResourceProcessingOptions = record( prefer_local = field(bool, False), allow_cache_upload = field(bool, False), diff --git a/prelude/apple/apple_rules_impl.bzl b/prelude/apple/apple_rules_impl.bzl index 902262370d..2d37fe8dac 100644 --- a/prelude/apple/apple_rules_impl.bzl +++ b/prelude/apple/apple_rules_impl.bzl @@ -5,29 +5,27 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//apple:apple_buck2_compatibility.bzl", "BUCK2_COMPATIBILITY_ATTRIB_NAME", "BUCK2_COMPATIBILITY_ATTRIB_TYPE") load( - "@prelude//apple:apple_genrule_deps.bzl", - "APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_NAME", - "APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_TYPE", - "APPLE_BUILD_GENRULE_DEPS_TARGET_ATTRIB_NAME", - "APPLE_BUILD_GENRULE_DEPS_TARGET_ATTRIB_TYPE", + "@prelude//:validation_deps.bzl", + "VALIDATION_DEPS_ATTR_NAME", + "VALIDATION_DEPS_ATTR_TYPE", ) load("@prelude//apple/swift:swift_incremental_support.bzl", "SwiftCompilationMode") load("@prelude//apple/swift:swift_toolchain.bzl", "swift_toolchain_impl") load("@prelude//apple/swift:swift_toolchain_types.bzl", "SwiftObjectFormat") +load("@prelude//apple/user:apple_xcframework.bzl", "apple_xcframework_extra_attrs") load("@prelude//apple/user:cpu_split_transition.bzl", "cpu_split_transition") load("@prelude//cxx:headers.bzl", "CPrecompiledHeaderInfo", "HeaderMode") -load("@prelude//cxx/user:link_group_map.bzl", "link_group_map_attr") +load("@prelude//cxx:link_groups_types.bzl", "LINK_GROUP_MAP_ATTR") load("@prelude//linking:execution_preference.bzl", "link_execution_preference_attr") load("@prelude//linking:link_info.bzl", "LinkOrdering") -load("@prelude//decls/common.bzl", "Linkage") +load("@prelude//linking:types.bzl", "Linkage") load(":apple_asset_catalog.bzl", "apple_asset_catalog_impl") load(":apple_binary.bzl", "apple_binary_impl") load(":apple_bundle.bzl", "apple_bundle_impl") load(":apple_bundle_types.bzl", "AppleBundleInfo") load(":apple_core_data.bzl", "apple_core_data_impl") -load(":apple_library.bzl", "apple_library_impl") +load(":apple_library.bzl", "AppleSharedLibraryMachOFileType", "apple_library_impl") load(":apple_package.bzl", "apple_package_impl") load(":apple_package_config.bzl", "IpaCompressionLevel") load(":apple_resource.bzl", "apple_resource_impl") @@ -35,20 +33,22 @@ load( ":apple_rules_impl_utility.bzl", "APPLE_ARCHIVE_OBJECTS_LOCALLY_OVERRIDE_ATTR_NAME", "apple_bundle_extra_attrs", + "apple_dsymutil_attrs", "apple_test_extra_attrs", + "apple_xcuitest_extra_attrs", "get_apple_bundle_toolchain_attr", "get_apple_toolchain_attr", "get_apple_xctoolchain_attr", "get_apple_xctoolchain_bundle_id_attr", + "get_enable_library_evolution", ) load(":apple_test.bzl", "apple_test_impl") load(":apple_toolchain.bzl", "apple_toolchain_impl") load(":apple_toolchain_types.bzl", "AppleToolsInfo") load(":apple_universal_executable.bzl", "apple_universal_executable_impl") +load(":apple_xcuitest.bzl", "apple_xcuitest_impl") load(":prebuilt_apple_framework.bzl", "prebuilt_apple_framework_impl") load(":scene_kit_assets.bzl", "scene_kit_assets_impl") -load(":xcode_postbuild_script.bzl", "xcode_postbuild_script_impl") -load(":xcode_prebuild_script.bzl", "xcode_prebuild_script_impl") implemented_rules = { "apple_asset_catalog": apple_asset_catalog_impl, @@ -60,12 +60,11 @@ implemented_rules = { "apple_test": apple_test_impl, "apple_toolchain": apple_toolchain_impl, "apple_universal_executable": apple_universal_executable_impl, + "apple_xcuitest": apple_xcuitest_impl, "core_data_model": apple_core_data_impl, "prebuilt_apple_framework": prebuilt_apple_framework_impl, "scene_kit_assets": scene_kit_assets_impl, "swift_toolchain": swift_toolchain_impl, - "xcode_postbuild_script": xcode_postbuild_script_impl, - "xcode_prebuild_script": xcode_prebuild_script_impl, } _APPLE_TOOLCHAIN_ATTR = get_apple_toolchain_attr() @@ -77,68 +76,104 @@ ApplePackageExtension = enum( "zip", ) -extra_attributes = { - "apple_asset_catalog": { - "dirs": attrs.list(attrs.source(allow_directory = True), default = []), - }, - "apple_binary": { +def _apple_binary_extra_attrs(): + attribs = { "binary_linker_flags": attrs.list(attrs.arg(), default = []), "enable_distributed_thinlto": attrs.bool(default = False), "extra_xcode_sources": attrs.list(attrs.source(allow_directory = True), default = []), "link_execution_preference": link_execution_preference_attr(), - "link_group_map": link_group_map_attr(), + "link_group_map": LINK_GROUP_MAP_ATTR, "link_ordering": attrs.option(attrs.enum(LinkOrdering.values()), default = None), "precompiled_header": attrs.option(attrs.dep(providers = [CPrecompiledHeaderInfo]), default = None), "prefer_stripped_objects": attrs.bool(default = False), - "preferred_linkage": attrs.enum(Linkage, default = "any"), + "preferred_linkage": attrs.enum(Linkage.values(), default = "any"), + "sanitizer_runtime_enabled": attrs.option(attrs.bool(), default = None), "stripped": attrs.option(attrs.bool(), default = None), "swift_compilation_mode": attrs.enum(SwiftCompilationMode.values(), default = "wmo"), + "swift_package_name": attrs.option(attrs.string(), default = None), "_apple_toolchain": _APPLE_TOOLCHAIN_ATTR, "_apple_tools": attrs.exec_dep(default = "prelude//apple/tools:apple-tools", providers = [AppleToolsInfo]), "_apple_xctoolchain": get_apple_xctoolchain_attr(), "_apple_xctoolchain_bundle_id": get_apple_xctoolchain_bundle_id_attr(), + "_enable_library_evolution": get_enable_library_evolution(), "_stripped_default": attrs.bool(default = False), - APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_NAME: APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_TYPE, - APPLE_BUILD_GENRULE_DEPS_TARGET_ATTRIB_NAME: APPLE_BUILD_GENRULE_DEPS_TARGET_ATTRIB_TYPE, - BUCK2_COMPATIBILITY_ATTRIB_NAME: BUCK2_COMPATIBILITY_ATTRIB_TYPE, - }, - "apple_bundle": apple_bundle_extra_attrs(), - "apple_library": { + VALIDATION_DEPS_ATTR_NAME: VALIDATION_DEPS_ATTR_TYPE, + } + attribs.update(apple_dsymutil_attrs()) + return attribs + +def _apple_library_extra_attrs(): + attribs = { + "enable_distributed_thinlto": attrs.bool(default = False), "extra_xcode_sources": attrs.list(attrs.source(allow_directory = True), default = []), "header_mode": attrs.option(attrs.enum(HeaderMode.values()), default = None), "link_execution_preference": link_execution_preference_attr(), - "link_group_map": link_group_map_attr(), + "link_group_map": LINK_GROUP_MAP_ATTR, "link_ordering": attrs.option(attrs.enum(LinkOrdering.values()), default = None), "precompiled_header": attrs.option(attrs.dep(providers = [CPrecompiledHeaderInfo]), default = None), - "preferred_linkage": attrs.enum(Linkage, default = "any"), + "preferred_linkage": attrs.enum(Linkage.values(), default = "any"), "serialize_debugging_options": attrs.bool(default = True), + # Mach-O file type for binary when the target is built as a shared library. + "shared_library_macho_file_type": attrs.enum(AppleSharedLibraryMachOFileType.values(), default = "dylib"), "stripped": attrs.option(attrs.bool(), default = None), "supports_header_symlink_subtarget": attrs.bool(default = False), "supports_shlib_interfaces": attrs.bool(default = True), "swift_compilation_mode": attrs.enum(SwiftCompilationMode.values(), default = "wmo"), + "swift_package_name": attrs.option(attrs.string(), default = None), "use_archive": attrs.option(attrs.bool(), default = None), "_apple_toolchain": _APPLE_TOOLCHAIN_ATTR, "_apple_tools": attrs.exec_dep(default = "prelude//apple/tools:apple-tools", providers = [AppleToolsInfo]), "_apple_xctoolchain": get_apple_xctoolchain_attr(), "_apple_xctoolchain_bundle_id": get_apple_xctoolchain_bundle_id_attr(), + "_enable_library_evolution": get_enable_library_evolution(), "_stripped_default": attrs.bool(default = False), APPLE_ARCHIVE_OBJECTS_LOCALLY_OVERRIDE_ATTR_NAME: attrs.option(attrs.bool(), default = None), - APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_NAME: APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_TYPE, - APPLE_BUILD_GENRULE_DEPS_TARGET_ATTRIB_NAME: APPLE_BUILD_GENRULE_DEPS_TARGET_ATTRIB_TYPE, - BUCK2_COMPATIBILITY_ATTRIB_NAME: BUCK2_COMPATIBILITY_ATTRIB_TYPE, + VALIDATION_DEPS_ATTR_NAME: VALIDATION_DEPS_ATTR_TYPE, + } + attribs.update(apple_dsymutil_attrs()) + return attribs + +def _apple_universal_executable_extra_attrs(): + attribs = { + "executable": attrs.split_transition_dep(cfg = cpu_split_transition), + "executable_name": attrs.option(attrs.string(), default = None), + "labels": attrs.list(attrs.string(), default = []), + "split_arch_dsym": attrs.bool(default = False), + "universal": attrs.option(attrs.bool(), default = None), + "_apple_toolchain": _APPLE_TOOLCHAIN_ATTR, + "_apple_tools": attrs.exec_dep(default = "prelude//apple/tools:apple-tools", providers = [AppleToolsInfo]), + } + attribs.update(apple_dsymutil_attrs()) + return attribs + +extra_attributes = { + "apple_asset_catalog": { + "dirs": attrs.list(attrs.source(allow_directory = True), default = []), + "skip_universal_resource_dedupe": attrs.bool(default = False), }, + "apple_binary": _apple_binary_extra_attrs(), + "apple_bundle": apple_bundle_extra_attrs(), + "apple_library": _apple_library_extra_attrs(), "apple_package": { "bundle": attrs.dep(providers = [AppleBundleInfo]), "ext": attrs.enum(ApplePackageExtension.values(), default = "ipa"), + "package_name": attrs.option(attrs.string(), default = None), "packager": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), "packager_args": attrs.list(attrs.arg(), default = []), - "validator": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), - "validator_args": attrs.list(attrs.arg(), default = []), + "prepackaged_validators": attrs.list( + attrs.one_of( + attrs.exec_dep(providers = [RunInfo]), + attrs.tuple(attrs.exec_dep(providers = [RunInfo]), attrs.list(attrs.arg())), + ), + default = [], + ), "_apple_toolchain": get_apple_bundle_toolchain_attr(), "_apple_tools": attrs.exec_dep(default = "prelude//apple/tools:apple-tools", providers = [AppleToolsInfo]), "_ipa_compression_level": attrs.enum(IpaCompressionLevel.values()), }, "apple_resource": { + "codesign_entitlements": attrs.option(attrs.source(), default = None), + "codesign_flags_override": attrs.option(attrs.list(attrs.string()), default = None), "codesign_on_copy": attrs.bool(default = False), "content_dirs": attrs.list(attrs.source(allow_directory = True), default = []), "dirs": attrs.list(attrs.source(allow_directory = True), default = []), @@ -154,7 +189,7 @@ extra_attributes = { "codesign": attrs.exec_dep(providers = [RunInfo]), "codesign_allocate": attrs.exec_dep(providers = [RunInfo]), "codesign_identities_command": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), - # Controls invocations of `ibtool`, `actool` and `momc` + # Controls invocations of `ibtool`, `actool` `mapc`and `momc` "compile_resources_locally": attrs.bool(default = False), "copy_scene_kit_assets": attrs.exec_dep(providers = [RunInfo]), "cxx_toolchain": attrs.toolchain_dep(), @@ -165,9 +200,10 @@ extra_attributes = { "installer": attrs.default_only(attrs.label(default = "buck//src/com/facebook/buck/installer/apple:apple_installer")), "libtool": attrs.exec_dep(providers = [RunInfo]), "lipo": attrs.exec_dep(providers = [RunInfo]), - "min_version": attrs.option(attrs.string(), default = None), + "mapc": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), + "min_version": attrs.string(), "momc": attrs.exec_dep(providers = [RunInfo]), - "odrcov": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), + "objdump": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), # A placeholder tool that can be used to set up toolchain constraints. # Useful when fat and thin toolchahins share the same underlying tools via `command_alias()`, # which requires setting up separate platform-specific aliases with the correct constraints. @@ -179,6 +215,10 @@ extra_attributes = { "requires_xcode_version_match": attrs.bool(default = False), "sdk_path": attrs.option(attrs.source(), default = None), # Mark as optional until we remove `_internal_sdk_path` "swift_toolchain": attrs.option(attrs.toolchain_dep(), default = None), + # The requested deployment target for a build. This will be used to + # form the versioned target triple when building, and compared with + # each build rule's target_sdk_version to ensure compatibility. + "target_sdk_version": attrs.option(attrs.string(), default = None), "version": attrs.option(attrs.string(), default = None), "xcode_build_version": attrs.option(attrs.string(), default = None), "xcode_version": attrs.option(attrs.string(), default = None), @@ -190,32 +230,28 @@ extra_attributes = { # pass abs paths during development and using the currently selected Xcode. "_internal_sdk_path": attrs.option(attrs.string(), default = None), }, - "apple_universal_executable": { - "executable": attrs.split_transition_dep(cfg = cpu_split_transition), - "executable_name": attrs.option(attrs.string(), default = None), - "labels": attrs.list(attrs.string()), - "split_arch_dsym": attrs.bool(default = False), - "universal": attrs.option(attrs.bool(), default = None), - "_apple_toolchain": _APPLE_TOOLCHAIN_ATTR, - "_apple_tools": attrs.exec_dep(default = "prelude//apple/tools:apple-tools", providers = [AppleToolsInfo]), - }, + "apple_universal_executable": _apple_universal_executable_extra_attrs(), + "apple_xcframework": apple_xcframework_extra_attrs(), + "apple_xcuitest": apple_xcuitest_extra_attrs(), "core_data_model": { + "module": attrs.option(attrs.string(), default = None), "path": attrs.source(allow_directory = True), }, "prebuilt_apple_framework": { "framework": attrs.option(attrs.source(allow_directory = True), default = None), - "preferred_linkage": attrs.enum(Linkage, default = "any"), + "preferred_linkage": attrs.enum(Linkage.values(), default = "any"), "_apple_toolchain": _APPLE_TOOLCHAIN_ATTR, }, "scene_kit_assets": { "path": attrs.source(allow_directory = True), }, "swift_library": { - "preferred_linkage": attrs.enum(Linkage, default = "any"), + "preferred_linkage": attrs.enum(Linkage.values(), default = "any"), }, "swift_toolchain": { "architecture": attrs.option(attrs.string(), default = None), # TODO(T115173356): Make field non-optional "make_swift_comp_db": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//apple/tools:make_swift_comp_db")), + "make_swift_interface": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//apple/tools:make_swift_interface")), "object_format": attrs.enum(SwiftObjectFormat.values(), default = "object"), # A placeholder tool that can be used to set up toolchain constraints. # Useful when fat and thin toolchahins share the same underlying tools via `command_alias()`, @@ -224,6 +260,7 @@ extra_attributes = { "platform_path": attrs.option(attrs.source(), default = None), # Mark as optional until we remove `_internal_platform_path` "sdk_modules": attrs.list(attrs.exec_dep(), default = []), # A list or a root target that represent a graph of sdk modules (e.g Frameworks) "sdk_path": attrs.option(attrs.source(), default = None), # Mark as optional until we remove `_internal_sdk_path` + "swift_ide_test_tool": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), "swift_stdlib_tool": attrs.exec_dep(providers = [RunInfo]), "swiftc": attrs.exec_dep(providers = [RunInfo]), # TODO(T111858757): Mirror of `platform_path` but treated as a string. It allows us to diff --git a/prelude/apple/apple_rules_impl_utility.bzl b/prelude/apple/apple_rules_impl_utility.bzl index 91e1dc6cee..1d12235db8 100644 --- a/prelude/apple/apple_rules_impl_utility.bzl +++ b/prelude/apple/apple_rules_impl_utility.bzl @@ -5,27 +5,20 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//apple:apple_buck2_compatibility.bzl", "BUCK2_COMPATIBILITY_ATTRIB_NAME", "BUCK2_COMPATIBILITY_ATTRIB_TYPE") load("@prelude//apple:apple_bundle_attrs.bzl", "get_apple_info_plist_build_system_identification_attrs") load("@prelude//apple:apple_bundle_types.bzl", "AppleBundleResourceInfo", "AppleBundleTypeAttributeType") -load("@prelude//apple:apple_code_signing_types.bzl", "CodeSignType") -load( - "@prelude//apple:apple_genrule_deps.bzl", - "APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_NAME", - "APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_TYPE", - "APPLE_BUILD_GENRULE_DEPS_TARGET_ATTRIB_NAME", - "APPLE_BUILD_GENRULE_DEPS_TARGET_ATTRIB_TYPE", -) +load("@prelude//apple:apple_code_signing_types.bzl", "CodeSignConfiguration", "CodeSignType") load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo", "AppleToolsInfo") +load("@prelude//apple:resource_groups.bzl", "RESOURCE_GROUP_MAP_ATTR") load("@prelude//apple/swift:swift_incremental_support.bzl", "SwiftCompilationMode") load("@prelude//apple/user:apple_selective_debugging.bzl", "AppleSelectiveDebuggingInfo") load("@prelude//apple/user:apple_simulators.bzl", "apple_simulators_transition") load("@prelude//apple/user:cpu_split_transition.bzl", "cpu_split_transition") -load("@prelude//apple/user:resource_group_map.bzl", "resource_group_map_attr") load("@prelude//cxx:headers.bzl", "CPrecompiledHeaderInfo") load("@prelude//linking:execution_preference.bzl", "link_execution_preference_attr") load("@prelude//linking:link_info.bzl", "LinkOrdering") -load("@prelude//decls/common.bzl", "LinkableDepType", "Linkage") +load("@prelude//linking:types.bzl", "Linkage") +load("@prelude//decls/common.bzl", "LinkableDepType") def get_apple_toolchain_attr(): # FIXME: prelude// should be standalone (not refer to fbcode//) @@ -43,35 +36,64 @@ def get_apple_xctoolchain_bundle_id_attr(): # FIXME: prelude// should be standalone (not refer to fbcode//) return attrs.toolchain_dep(default = "fbcode//buck2/platform/toolchain:apple-xctoolchain-bundle-id") +def get_enable_library_evolution(): + return attrs.bool(default = select({ + "DEFAULT": False, + "config//features/apple:swift_library_evolution_enabled": True, + })) + +def _strict_provisioning_profile_search_default_attr(): + default_value = (read_root_config("apple", "strict_provisioning_profile_search", "").lower() == "true") + return attrs.bool(default = select({ + "DEFAULT": default_value, + "config//features/apple:strict_provisioning_profile_search_enabled": True, + })) + APPLE_ARCHIVE_OBJECTS_LOCALLY_OVERRIDE_ATTR_NAME = "_archive_objects_locally_override" APPLE_USE_ENTITLEMENTS_WHEN_ADHOC_CODE_SIGNING_CONFIG_OVERRIDE_ATTR_NAME = "_use_entitlements_when_adhoc_code_signing" APPLE_USE_ENTITLEMENTS_WHEN_ADHOC_CODE_SIGNING_ATTR_NAME = "use_entitlements_when_adhoc_code_signing" +APPLE_EMBED_PROVISIONING_PROFILE_WHEN_ADHOC_CODE_SIGNING_CONFIG_OVERRIDE_ATTR_NAME = "_embed_provisioning_profile_when_adhoc_code_signing" +APPLE_EMBED_PROVISIONING_PROFILE_WHEN_ADHOC_CODE_SIGNING_ATTR_NAME = "embed_provisioning_profile_when_adhoc_code_signing" + +APPLE_VALIDATION_DEPS_ATTR_NAME = "validation_deps" +APPLE_VALIDATION_DEPS_ATTR_TYPE = attrs.set(attrs.dep(), sorted = True, default = []) + +def apple_dsymutil_attrs(): + return { + "_dsymutil_extra_flags": attrs.list(attrs.string()), + } def _apple_bundle_like_common_attrs(): # `apple_bundle()` and `apple_test()` share a common set of extra attrs attribs = { "codesign_type": attrs.option(attrs.enum(CodeSignType.values()), default = None), + "strict_provisioning_profile_search": attrs.option(attrs.bool(), default = None), + "versioned_macos_bundle": attrs.bool(default = False), "_apple_tools": attrs.exec_dep(default = "prelude//apple/tools:apple-tools", providers = [AppleToolsInfo]), "_apple_xctoolchain": get_apple_xctoolchain_attr(), "_apple_xctoolchain_bundle_id": get_apple_xctoolchain_bundle_id_attr(), "_bundling_cache_buster": attrs.option(attrs.string(), default = None), "_bundling_log_file_enabled": attrs.bool(default = False), "_bundling_log_file_level": attrs.option(attrs.string(), default = None), - "_bundling_path_conflicts_check_enabled": attrs.bool(default = False), + "_code_signing_configuration": attrs.option(attrs.enum(CodeSignConfiguration.values()), default = None), + "_codesign_identities_command_override": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), "_codesign_type": attrs.option(attrs.enum(CodeSignType.values()), default = None), "_compile_resources_locally_override": attrs.option(attrs.bool(), default = None), - "_dry_run_code_signing": attrs.bool(default = False), - "_fast_adhoc_signing_enabled": attrs.bool(default = False), + "_fast_provisioning_profile_parsing_enabled": attrs.bool(default = False), "_incremental_bundling_enabled": attrs.bool(default = False), "_profile_bundling_enabled": attrs.bool(default = False), # FIXME: prelude// should be standalone (not refer to fbsource//) "_provisioning_profiles": attrs.dep(default = "fbsource//xplat/buck2/platform/apple:provisioning_profiles"), "_resource_bundle": attrs.option(attrs.dep(providers = [AppleBundleResourceInfo]), default = None), + "_strict_provisioning_profile_search_default": _strict_provisioning_profile_search_default_attr(), APPLE_USE_ENTITLEMENTS_WHEN_ADHOC_CODE_SIGNING_CONFIG_OVERRIDE_ATTR_NAME: attrs.option(attrs.bool(), default = None), APPLE_USE_ENTITLEMENTS_WHEN_ADHOC_CODE_SIGNING_ATTR_NAME: attrs.bool(default = False), - BUCK2_COMPATIBILITY_ATTRIB_NAME: BUCK2_COMPATIBILITY_ATTRIB_TYPE, + APPLE_EMBED_PROVISIONING_PROFILE_WHEN_ADHOC_CODE_SIGNING_CONFIG_OVERRIDE_ATTR_NAME: attrs.option(attrs.bool(), default = None), + APPLE_EMBED_PROVISIONING_PROFILE_WHEN_ADHOC_CODE_SIGNING_ATTR_NAME: attrs.bool(default = False), + APPLE_VALIDATION_DEPS_ATTR_NAME: APPLE_VALIDATION_DEPS_ATTR_TYPE, } attribs.update(get_apple_info_plist_build_system_identification_attrs()) + attribs.update(apple_dsymutil_attrs()) return attribs def apple_test_extra_attrs(): @@ -91,37 +113,57 @@ def apple_test_extra_attrs(): "link_style": attrs.enum(LinkableDepType, default = "static"), "precompiled_header": attrs.option(attrs.dep(providers = [CPrecompiledHeaderInfo]), default = None), # The test source code and lib dependencies should be built into a shared library. - "preferred_linkage": attrs.enum(Linkage, default = "shared"), + "preferred_linkage": attrs.enum(Linkage.values(), default = "shared"), # Expected by `apple_bundle`, for `apple_test` this field is always None. "resource_group": attrs.option(attrs.string(), default = None), # Expected by `apple_bundle`, for `apple_test` this field is always None. "resource_group_map": attrs.option(attrs.string(), default = None), + "sanitizer_runtime_enabled": attrs.option(attrs.bool(), default = None), "stripped": attrs.bool(default = False), "swift_compilation_mode": attrs.enum(SwiftCompilationMode.values(), default = "wmo"), + "swift_package_name": attrs.option(attrs.string(), default = None), "_apple_toolchain": get_apple_toolchain_attr(), + "_enable_library_evolution": get_enable_library_evolution(), "_ios_booted_simulator": attrs.transition_dep(cfg = apple_simulators_transition, default = "fbsource//xplat/buck2/platform/apple:ios_booted_simulator", providers = [LocalResourceInfo]), "_ios_unbooted_simulator": attrs.transition_dep(cfg = apple_simulators_transition, default = "fbsource//xplat/buck2/platform/apple:ios_unbooted_simulator", providers = [LocalResourceInfo]), "_macos_idb_companion": attrs.transition_dep(cfg = apple_simulators_transition, default = "fbsource//xplat/buck2/platform/apple:macos_idb_companion", providers = [LocalResourceInfo]), } attribs.update(_apple_bundle_like_common_attrs()) - attribs.update({ - APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_NAME: APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_TYPE, - APPLE_BUILD_GENRULE_DEPS_TARGET_ATTRIB_NAME: APPLE_BUILD_GENRULE_DEPS_TARGET_ATTRIB_TYPE, - }) + return attribs + +def apple_xcuitest_extra_attrs(): + attribs = { + # This is ignored, but required for info plist processing. + "binary": attrs.option(attrs.source(), default = None), + "codesign_identity": attrs.option(attrs.string(), default = None), + "entitlements_file": attrs.option(attrs.source(), default = None), + "extension": attrs.default_only(attrs.string(default = "app")), + "incremental_bundling_enabled": attrs.bool(default = False), + "info_plist": attrs.source(), + "info_plist_substitutions": attrs.dict(key = attrs.string(), value = attrs.string(), sorted = False, default = {}), + "target_sdk_version": attrs.option(attrs.string(), default = None), + # The test bundle to package in the UI test runner app. + "test_bundle": attrs.dep(), + "_apple_toolchain": get_apple_toolchain_attr(), + "_enable_library_evolution": get_enable_library_evolution(), + } + attribs.update(_apple_bundle_like_common_attrs()) + attribs.pop("_dsymutil_extra_flags", None) + return attribs def apple_bundle_extra_attrs(): attribs = { "binary": attrs.option(attrs.split_transition_dep(cfg = cpu_split_transition), default = None), "bundle_type": attrs.option(attrs.enum(AppleBundleTypeAttributeType.values()), default = None), - "resource_group_map": resource_group_map_attr(), + "copy_public_framework_headers": attrs.option(attrs.bool(), default = None), + "module_map": attrs.option(attrs.source(), default = None), + "resource_group_map": RESOURCE_GROUP_MAP_ATTR, "selective_debugging": attrs.option(attrs.dep(providers = [AppleSelectiveDebuggingInfo]), default = None), "split_arch_dsym": attrs.bool(default = False), "universal": attrs.option(attrs.bool(), default = None), "_apple_toolchain": get_apple_bundle_toolchain_attr(), "_codesign_entitlements": attrs.option(attrs.source(), default = None), - APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_NAME: APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_TYPE, - APPLE_BUILD_GENRULE_DEPS_TARGET_ATTRIB_NAME: APPLE_BUILD_GENRULE_DEPS_TARGET_ATTRIB_TYPE, } attribs.update(_apple_bundle_like_common_attrs()) return attribs diff --git a/prelude/apple/apple_target_sdk_version.bzl b/prelude/apple/apple_target_sdk_version.bzl index 4b1290384e..6cc2760e8f 100644 --- a/prelude/apple/apple_target_sdk_version.bzl +++ b/prelude/apple/apple_target_sdk_version.bzl @@ -9,6 +9,19 @@ load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo") load("@prelude//cxx:preprocessor.bzl", "CPreprocessor", "CPreprocessorArgs") load(":apple_sdk.bzl", "get_apple_sdk_name") +_TARGET_TRIPLE_MAP = { + "appletvos": "{architecture}-apple-tvos{version}", + "appletvsimulator": "{architecture}-apple-tvos{version}-simulator", + "iphoneos": "{architecture}-apple-ios{version}", + "iphonesimulator": "{architecture}-apple-ios{version}-simulator", + "maccatalyst": "{architecture}-apple-ios{version}-macabi", + "macosx": "{architecture}-apple-macosx{version}", + "visionos": "{architecture}-apple-xros{version}", + "visionsimulator": "{architecture}-apple-xros{version}-simulator", + "watchos": "{architecture}-apple-watchos{version}", + "watchsimulator": "{architecture}-apple-watchos{version}-simulator", +} + # TODO(T112099448): In the future, the min version flag should live on the apple_toolchain() # TODO(T113776898): Switch to -mtargetos= flag which should live on the apple_toolchain() _APPLE_MIN_VERSION_FLAG_SDK_MAP = { @@ -20,22 +33,65 @@ _APPLE_MIN_VERSION_FLAG_SDK_MAP = { "watchsimulator": "-mwatchsimulator-version-min", } +_APPLE_MIN_VERSION_CLAMP_MAP = { + "maccatalyst": "13.1", # Earliest possible Catalyst version +} + +# Compares and returns the the maximum of two version numbers. Assumes +# they are both formatted as dot-separted strings (e.g "14.0.3"). +# If they are otherwise equal but one is longer, the longer is returned. +def max_sdk_version(left: str, right: str): + left_components = left.split(".") + right_components = right.split(".") + for component in zip(left_components, right_components): + diff = int(component[0]) - int(component[1]) + if diff < 0: + return right + if diff > 0: + return left + + length_diff = len(left_components) - len(right_components) + if length_diff < 0: + return right + else: + return left + # Returns the target SDK version for apple_(binary|library) and uses # apple_toolchain() min version as a fallback. This is the central place # where the version for a particular node is defined, no other places # should be accessing `attrs.target_sdk_version` or `attrs.min_version`. -def get_min_deployment_version_for_node(ctx: AnalysisContext) -> [None, str]: +def get_min_deployment_version_for_node(ctx: AnalysisContext) -> str: toolchain_min_version = ctx.attrs._apple_toolchain[AppleToolchainInfo].min_version - if toolchain_min_version == "": - toolchain_min_version = None - return getattr(ctx.attrs, "target_sdk_version", None) or toolchain_min_version + min_version = getattr(ctx.attrs, "target_sdk_version", None) or toolchain_min_version + clamp_version = _APPLE_MIN_VERSION_CLAMP_MAP.get(get_apple_sdk_name(ctx)) + if clamp_version: + min_version = max_sdk_version(min_version, clamp_version) + + return min_version + +def get_versioned_target_triple(ctx: AnalysisContext) -> str: + target_sdk_version = get_min_deployment_version_for_node(ctx) or "" + return _get_target_triple(ctx, target_sdk_version) + +def get_unversioned_target_triple(ctx: AnalysisContext) -> str: + return _get_target_triple(ctx, "") + +def _get_target_triple(ctx: AnalysisContext, target_sdk_version: str) -> str: + apple_toolchain_info = ctx.attrs._apple_toolchain[AppleToolchainInfo] + architecture = apple_toolchain_info.architecture + if architecture == None: + fail("Need to set `architecture` field of apple_toolchain(), target: {}".format(ctx.label)) + + sdk_name = apple_toolchain_info.sdk_name + target_triple_format_str = _TARGET_TRIPLE_MAP.get(sdk_name) + if target_triple_format_str == None: + fail("Could not find target triple for sdk = {}".format(sdk_name)) + + return target_triple_format_str.format(architecture = architecture, version = target_sdk_version) # Returns the min deployment flag to pass to the compiler + linker def _get_min_deployment_version_target_flag(ctx: AnalysisContext) -> [None, str]: target_sdk_version = get_min_deployment_version_for_node(ctx) - if target_sdk_version == None: - return None - sdk_name = get_apple_sdk_name(ctx) min_version_flag = _APPLE_MIN_VERSION_FLAG_SDK_MAP.get(sdk_name) if min_version_flag == None: @@ -76,5 +132,5 @@ def get_min_deployment_version_target_preprocessor_flags(ctx: AnalysisContext) - args = cmd_args(min_version_flag) return [CPreprocessor( - relative_args = CPreprocessorArgs(args = [args]), + args = CPreprocessorArgs(args = [args]), )] diff --git a/prelude/apple/apple_test.bzl b/prelude/apple/apple_test.bzl index ea5a765fda..a75096eea0 100644 --- a/prelude/apple/apple_test.bzl +++ b/prelude/apple/apple_test.bzl @@ -5,8 +5,11 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load( + "@prelude//:artifact_tset.bzl", + "project_artifacts", +) load("@prelude//:paths.bzl", "paths") -load("@prelude//apple:apple_buck2_compatibility.bzl", "apple_check_buck2_compatibility") load("@prelude//apple:apple_library.bzl", "AppleLibraryAdditionalParams", "apple_library_rule_constructor_params_and_swift_providers") load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo") # @oss-disable: load("@prelude//apple/meta_only:apple_test_re_capabilities.bzl", "ios_test_re_capabilities", "macos_test_re_capabilities") @@ -16,11 +19,11 @@ load( "@prelude//cxx:argsfiles.bzl", "CompileArgsfile", # @unused Used as a type ) +load("@prelude//cxx:cxx_library.bzl", "cxx_library_parameterized") load( - "@prelude//cxx:compile.bzl", + "@prelude//cxx:cxx_sources.bzl", "CxxSrcWithFlags", # @unused Used as a type ) -load("@prelude//cxx:cxx_library.bzl", "cxx_library_parameterized") load("@prelude//cxx:cxx_types.bzl", "CxxRuleProviderParams", "CxxRuleSubTargetParams") load( "@prelude//cxx:linker.bzl", @@ -43,18 +46,17 @@ load( ":apple_sdk_metadata.bzl", "MacOSXSdkMetadata", ) -load(":debug.bzl", "DEBUGINFO_SUBTARGET") +load(":debug.bzl", "AppleDebuggableInfo") load(":xcode.bzl", "apple_populate_xcode_attributes") load(":xctest_swift_support.bzl", "XCTestSwiftSupportInfo") def apple_test_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: - apple_check_buck2_compatibility(ctx) - def get_apple_test_providers(deps_providers) -> list[Provider]: xctest_bundle = bundle_output(ctx) test_host_app_bundle = _get_test_host_app_bundle(ctx) test_host_app_binary = _get_test_host_app_binary(ctx, test_host_app_bundle) + ui_test_target_app_bundle = _get_ui_test_target_app_bundle(ctx) objc_bridging_header_flags = [ # Disable bridging header -> PCH compilation to mitigate an issue in Xcode 13 beta. @@ -69,11 +71,11 @@ def apple_test_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: # any xctests altogether, provided the test dylib is adhoc signed shared_library_flags += entitlements_link_flags(ctx) - # The linker will incluide adhoc signature for ARM64 by default, lets + # The linker will include adhoc signature for ARM64 by default, lets # ensure we always have an adhoc signature regardless of arch/linker logic. shared_library_flags += ["-Wl,-adhoc_codesign"] - constructor_params = apple_library_rule_constructor_params_and_swift_providers( + constructor_params, _ = apple_library_rule_constructor_params_and_swift_providers( ctx, AppleLibraryAdditionalParams( rule_type = "apple_test", @@ -95,7 +97,7 @@ def apple_test_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: compilation_database = True, default = False, linkable_graph = False, - link_style_outputs = False, + link_style_outputs = True, merged_native_link_info = False, omnibus_root = False, preprocessors = False, @@ -123,18 +125,31 @@ def apple_test_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: part_list_output = get_apple_bundle_part_list(ctx, AppleBundlePartListConstructorParams(binaries = [binary_part])) xctest_swift_support_needed = None + debug_info = None for p in cxx_library_output.providers: if isinstance(p, XCTestSwiftSupportInfo): xctest_swift_support_needed = p.support_needed - break + if isinstance(p, AppleDebuggableInfo): + debug_info = project_artifacts(ctx.actions, [p.debug_info_tset]) expect(xctest_swift_support_needed != None, "Expected `XCTestSwiftSupportInfo` provider to be present") + expect(debug_info != None, "Expected `AppleDebuggableInfo` provider to be present") bundle_parts = part_list_output.parts + _get_xctest_framework(ctx, xctest_swift_support_needed) + for sanitizer_runtime_dylib in cxx_library_output.sanitizer_runtime_files: + frameworks_destination = AppleBundleDestination("frameworks") + bundle_parts.append( + AppleBundlePart( + source = sanitizer_runtime_dylib, + destination = frameworks_destination, + codesign_on_copy = True, + ), + ) + primary_binary_rel_path = get_apple_bundle_part_relative_destination_path(ctx, binary_part) swift_stdlib_args = SwiftStdlibArguments(primary_binary_rel_path = primary_binary_rel_path) - sub_targets = assemble_bundle( + bundle_result = assemble_bundle( ctx, xctest_bundle, bundle_parts, @@ -144,43 +159,44 @@ def apple_test_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: # + includes any entitlements if present. skip_adhoc_signing = True, ) - + sub_targets = bundle_result.sub_targets sub_targets.update(cxx_library_output.sub_targets) - (debuginfo,) = sub_targets[DEBUGINFO_SUBTARGET] + dsym_artifact = get_apple_dsym( ctx = ctx, executable = test_binary, - debug_info = debuginfo.default_outputs, + debug_info = debug_info, action_identifier = "generate_apple_test_dsym", output_path_override = get_bundle_dir_name(ctx) + ".dSYM", ) sub_targets[DSYM_SUBTARGET] = [DefaultInfo(default_output = dsym_artifact)] - # If the test has a test host, add a subtarget to build the test host app bundle. + # If the test has a test host and a ui test target, add the subtargets to build the app bundles. sub_targets["test-host"] = [DefaultInfo(default_output = test_host_app_bundle)] if test_host_app_bundle else [DefaultInfo()] + sub_targets["ui-test-target"] = [DefaultInfo(default_output = ui_test_target_app_bundle)] if ui_test_target_app_bundle else [DefaultInfo()] sub_targets[DWARF_AND_DSYM_SUBTARGET] = [ DefaultInfo(default_output = xctest_bundle, other_outputs = [dsym_artifact]), - _get_test_info(ctx, xctest_bundle, test_host_app_bundle, dsym_artifact), + _get_test_info(ctx, xctest_bundle, test_host_app_bundle, dsym_artifact, ui_test_target_app_bundle), ] return [ DefaultInfo(default_output = xctest_bundle, sub_targets = sub_targets), - _get_test_info(ctx, xctest_bundle, test_host_app_bundle), + _get_test_info(ctx, xctest_bundle, test_host_app_bundle, ui_test_target_app_bundle = ui_test_target_app_bundle), cxx_library_output.xcode_data_info, cxx_library_output.cxx_compilationdb_info, - ] + ] + bundle_result.providers if uses_explicit_modules(ctx): return get_swift_anonymous_targets(ctx, get_apple_test_providers) else: return get_apple_test_providers([]) -def _get_test_info(ctx: AnalysisContext, xctest_bundle: Artifact, test_host_app_bundle: [Artifact, None], dsym_artifact: [Artifact, None] = None) -> Provider: +def _get_test_info(ctx: AnalysisContext, xctest_bundle: Artifact, test_host_app_bundle: Artifact | None, dsym_artifact: Artifact | None = None, ui_test_target_app_bundle: Artifact | None = None) -> Provider: # When interacting with Tpx, we just pass our various inputs via env vars, # since Tpx basiclaly wants structured output for this. - xctest_bundle = cmd_args(xctest_bundle).hidden(dsym_artifact) if dsym_artifact else xctest_bundle + xctest_bundle = cmd_args(xctest_bundle, hidden = dsym_artifact) if dsym_artifact else xctest_bundle env = {"XCTEST_BUNDLE": xctest_bundle} if test_host_app_bundle == None: @@ -189,6 +205,10 @@ def _get_test_info(ctx: AnalysisContext, xctest_bundle: Artifact, test_host_app_ env["HOST_APP_BUNDLE"] = test_host_app_bundle tpx_label = "tpx:apple_test:buck2:appTest" + if ui_test_target_app_bundle != None: + env["TARGET_APP_BUNDLE"] = ui_test_target_app_bundle + tpx_label = "tpx:apple_test:buck2:uiTest" + labels = ctx.attrs.labels + [tpx_label] labels.append(tpx_label) @@ -232,7 +252,7 @@ def _get_test_info(ctx: AnalysisContext, xctest_bundle: Artifact, test_host_app_ }, ) -def _get_test_host_app_bundle(ctx: AnalysisContext) -> [Artifact, None]: +def _get_test_host_app_bundle(ctx: AnalysisContext) -> Artifact | None: """ Get the bundle for the test host app, if one exists for this test. """ if ctx.attrs.test_host_app: # Copy the test host app bundle into test's output directory @@ -243,18 +263,29 @@ def _get_test_host_app_bundle(ctx: AnalysisContext) -> [Artifact, None]: return None -def _get_test_host_app_binary(ctx: AnalysisContext, test_host_app_bundle: [Artifact, None]) -> [cmd_args, None]: +def _get_test_host_app_binary(ctx: AnalysisContext, test_host_app_bundle: Artifact | None) -> [cmd_args, None]: """ Reference to the binary with the test host app bundle, if one exists for this test. Captures the bundle as an artifact in the cmd_args. """ if ctx.attrs.test_host_app == None: return None parts = [test_host_app_bundle] - rel_path = bundle_relative_path_for_destination(AppleBundleDestination("executables"), get_apple_sdk_name(ctx), ctx.attrs.extension) + rel_path = bundle_relative_path_for_destination(AppleBundleDestination("executables"), get_apple_sdk_name(ctx), ctx.attrs.extension, False) if len(rel_path) > 0: parts.append(rel_path) parts.append(ctx.attrs.test_host_app[AppleBundleInfo].binary_name) return cmd_args(parts, delimiter = "/") +def _get_ui_test_target_app_bundle(ctx: AnalysisContext) -> Artifact | None: + """ Get the bundle for the ui test target app, if one exists for this test. """ + if ctx.attrs.ui_test_target_app: + # Copy the ui test target app bundle into test's output directory + original_bundle = ctx.attrs.ui_test_target_app[AppleBundleInfo].bundle + ui_test_target_app_bundle = ctx.actions.declare_output(original_bundle.basename) + ctx.actions.copy_file(ui_test_target_app_bundle, original_bundle) + return ui_test_target_app_bundle + + return None + def _get_bundle_loader_flags(binary: [cmd_args, None]) -> list[typing.Any]: if binary: # During linking we need to link the test shared lib against the test host binary. The diff --git a/prelude/apple/apple_toolchain.bzl b/prelude/apple/apple_toolchain.bzl index 2f84d2506c..cf8d907cdf 100644 --- a/prelude/apple/apple_toolchain.bzl +++ b/prelude/apple/apple_toolchain.bzl @@ -5,6 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//apple:apple_target_sdk_version.bzl", "max_sdk_version") load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo") load("@prelude//apple/swift:swift_toolchain_types.bzl", "SwiftToolchainInfo") load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxPlatformInfo", "CxxToolchainInfo") @@ -12,10 +13,21 @@ load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxPlatformInfo", "CxxToolchainIn def apple_toolchain_impl(ctx: AnalysisContext) -> list[Provider]: sdk_path = ctx.attrs._internal_sdk_path or ctx.attrs.sdk_path platform_path = ctx.attrs._internal_platform_path or ctx.attrs.platform_path + + if ctx.attrs.min_version != None and \ + ctx.attrs.target_sdk_version != None and \ + ctx.attrs.min_version != ctx.attrs.target_sdk_version and \ + max_sdk_version(ctx.attrs.target_sdk_version, ctx.attrs.min_version) == ctx.attrs.min_version: + fail("target_sdk_version {} is less than toolchain min_version {}".format( + ctx.attrs.target_sdk_version, + ctx.attrs.min_version, + )) + return [ DefaultInfo(), AppleToolchainInfo( actool = ctx.attrs.actool[RunInfo], + architecture = ctx.attrs.architecture, codesign = ctx.attrs.codesign[RunInfo], codesign_allocate = ctx.attrs.codesign_allocate[RunInfo], codesign_identities_command = ctx.attrs.codesign_identities_command[RunInfo] if ctx.attrs.codesign_identities_command else None, @@ -30,16 +42,17 @@ def apple_toolchain_impl(ctx: AnalysisContext) -> list[Provider]: installer = ctx.attrs.installer, libtool = ctx.attrs.libtool[RunInfo], lipo = ctx.attrs.lipo[RunInfo], + mapc = ctx.attrs.mapc[RunInfo] if ctx.attrs.mapc else None, min_version = ctx.attrs.min_version, momc = ctx.attrs.momc[RunInfo], - odrcov = ctx.attrs.odrcov[RunInfo] if ctx.attrs.odrcov else None, + objdump = ctx.attrs.objdump[RunInfo] if ctx.attrs.objdump else None, platform_path = platform_path, sdk_build_version = ctx.attrs.build_version, sdk_name = ctx.attrs.sdk_name, sdk_path = sdk_path, sdk_version = ctx.attrs.version, swift_toolchain_info = ctx.attrs.swift_toolchain[SwiftToolchainInfo] if ctx.attrs.swift_toolchain else None, - watch_kit_stub_binary = ctx.attrs.watch_kit_stub_binary, + target_sdk_version = ctx.attrs.target_sdk_version, xcode_build_version = ctx.attrs.xcode_build_version, xcode_version = ctx.attrs.xcode_version, xctest = ctx.attrs.xctest[RunInfo], diff --git a/prelude/apple/apple_toolchain_types.bzl b/prelude/apple/apple_toolchain_types.bzl index 0f7435a91a..fead17a1c1 100644 --- a/prelude/apple/apple_toolchain_types.bzl +++ b/prelude/apple/apple_toolchain_types.bzl @@ -5,54 +5,59 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//apple/swift:swift_toolchain_types.bzl", "SwiftToolchainInfo") +load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxPlatformInfo", "CxxToolchainInfo") + AppleToolchainInfo = provider( # @unsorted-dict-items fields = { - "actool": provider_field(typing.Any, default = None), # "RunInfo" - "codesign_allocate": provider_field(typing.Any, default = None), # "RunInfo" - "codesign_identities_command": provider_field(typing.Any, default = None), # ["RunInfo", None] - "codesign": provider_field(typing.Any, default = None), # "RunInfo" - "compile_resources_locally": provider_field(typing.Any, default = None), # bool - "copy_scene_kit_assets": provider_field(typing.Any, default = None), # "RunInfo" - "cxx_platform_info": provider_field(typing.Any, default = None), # "CxxPlatformInfo" - "cxx_toolchain_info": provider_field(typing.Any, default = None), # "CxxToolchainInfo" - "dsymutil": provider_field(typing.Any, default = None), # "RunInfo" - "dwarfdump": provider_field(typing.Any, default = None), # ["RunInfo", None] - "extra_linker_outputs": provider_field(typing.Any, default = None), # [str] - "ibtool": provider_field(typing.Any, default = None), # "RunInfo" - "installer": provider_field(typing.Any, default = None), # label - "libtool": provider_field(typing.Any, default = None), # "RunInfo" - "lipo": provider_field(typing.Any, default = None), # "RunInfo" - "min_version": provider_field(typing.Any, default = None), # [None, str] - "momc": provider_field(typing.Any, default = None), # "RunInfo" - "odrcov": provider_field(typing.Any, default = None), # ["RunInfo", None] - "platform_path": provider_field(typing.Any, default = None), # [str, artifact] - "sdk_build_version": provider_field(typing.Any, default = None), # "[None, str]" + "actool": provider_field(RunInfo), + "architecture": provider_field(str), + "codesign_allocate": provider_field(RunInfo), + "codesign_identities_command": provider_field(RunInfo | None, default = None), + "codesign": provider_field(RunInfo), + "compile_resources_locally": provider_field(bool), + "copy_scene_kit_assets": provider_field(RunInfo), + "cxx_platform_info": provider_field(CxxPlatformInfo), + "cxx_toolchain_info": provider_field(CxxToolchainInfo), + "dsymutil": provider_field(RunInfo), + "dwarfdump": provider_field(RunInfo | None, default = None), + "extra_linker_outputs": provider_field(list[str]), + "ibtool": provider_field(RunInfo), + "installer": provider_field(Label), + "libtool": provider_field(RunInfo), + "lipo": provider_field(RunInfo), + "mapc": provider_field(RunInfo | None, default = None), + "min_version": provider_field(str), + "momc": provider_field(RunInfo), + "objdump": provider_field(RunInfo | None, default = None), + "platform_path": provider_field(str | Artifact), + "sdk_build_version": provider_field(str | None, default = None), # SDK name to be passed to tools (e.g. actool), equivalent to ApplePlatform::getExternalName() in v1. - "sdk_name": provider_field(typing.Any, default = None), # str - "sdk_path": provider_field(typing.Any, default = None), # [str, artifact] + "sdk_name": provider_field(str), + "sdk_path": provider_field(str | Artifact), # TODO(T124581557) Make it non-optional once there is no "selected xcode" toolchain - "sdk_version": provider_field(typing.Any, default = None), # [None, str] - "swift_toolchain_info": provider_field(typing.Any, default = None), # "SwiftToolchainInfo" - "watch_kit_stub_binary": provider_field(typing.Any, default = None), # "artifact" - "xcode_build_version": provider_field(typing.Any, default = None), # "[None, str]" - "xcode_version": provider_field(typing.Any, default = None), # "[None, str]" - "xctest": provider_field(typing.Any, default = None), # "RunInfo" + "sdk_version": provider_field(str | None, default = None), + "swift_toolchain_info": provider_field(SwiftToolchainInfo), + "target_sdk_version": provider_field(str | None, default = None), + "xcode_build_version": provider_field(str | None, default = None), + "xcode_version": provider_field(str | None, default = None), + "xctest": provider_field(RunInfo), }, ) AppleToolsInfo = provider( # @unsorted-dict-items fields = { - "assemble_bundle": provider_field(typing.Any, default = None), # RunInfo - "split_arch_combine_dsym_bundles_tool": provider_field(typing.Any, default = None), # RunInfo - "dry_codesign_tool": provider_field(typing.Any, default = None), # "RunInfo" - "adhoc_codesign_tool": provider_field(typing.Any, default = None), # "RunInfo" - "selective_debugging_scrubber": provider_field(typing.Any, default = None), # "RunInfo" - "info_plist_processor": provider_field(typing.Any, default = None), # RunInfo - "ipa_package_maker": provider_field(typing.Any, default = None), # RunInfo - "make_modulemap": provider_field(typing.Any, default = None), # "RunInfo" - "make_vfsoverlay": provider_field(typing.Any, default = None), # "RunInfo" - "swift_objc_header_postprocess": provider_field(typing.Any, default = None), # "RunInfo" + "assemble_bundle": provider_field(RunInfo), + "split_arch_combine_dsym_bundles_tool": provider_field(RunInfo), + "dry_codesign_tool": provider_field(RunInfo), + "adhoc_codesign_tool": provider_field(RunInfo), + "selective_debugging_scrubber": provider_field(RunInfo), + "info_plist_processor": provider_field(RunInfo), + "ipa_package_maker": provider_field(RunInfo), + "make_modulemap": provider_field(RunInfo), + "make_vfsoverlay": provider_field(RunInfo), + "xcframework_maker": provider_field(RunInfo), }, ) diff --git a/prelude/apple/apple_universal_binaries.bzl b/prelude/apple/apple_universal_binaries.bzl index 43c76fc801..fdcb6c78e9 100644 --- a/prelude/apple/apple_universal_binaries.bzl +++ b/prelude/apple/apple_universal_binaries.bzl @@ -18,25 +18,32 @@ def create_universal_binary( dsym_bundle_name: [str, None], split_arch_dsym: bool) -> AppleBundleBinaryOutput: binary_output = ctx.actions.declare_output("UniversalBinary" if binary_name == None else binary_name, dir = False) - lipo_cmd = cmd_args([ctx.attrs._apple_toolchain[AppleToolchainInfo].lipo]) + lipo_cmd = [ctx.attrs._apple_toolchain[AppleToolchainInfo].lipo] for (_, binary) in binary_deps.items(): - lipo_cmd.add(cmd_args(binary[DefaultInfo].default_outputs[0])) + lipo_cmd.append(cmd_args(binary[DefaultInfo].default_outputs[0])) - lipo_cmd.add(["-create", "-output", binary_output.as_output()]) - ctx.actions.run(lipo_cmd, category = "lipo") + lipo_cmd.extend(["-create", "-output", binary_output.as_output()]) + ctx.actions.run(cmd_args(lipo_cmd), category = "lipo") + + # Universal binaries can be created out of plain `cxx_binary()` / `cxx_library()` + # which lack the `AppleDebuggableInfo` provider. + # TODO(T174234334): Uniformly support debuggable info for apple_*/cxx_* + contains_full_debuggable_info = _all_binaries_have_apple_debuggable_info(binary_deps) dsym_output = None - if split_arch_dsym: + if split_arch_dsym and contains_full_debuggable_info: dsym_output = ctx.actions.declare_output("UniversalBinary.dSYM" if dsym_bundle_name == None else dsym_bundle_name, dir = True) - dsym_combine_cmd = cmd_args([ctx.attrs._apple_tools[AppleToolsInfo].split_arch_combine_dsym_bundles_tool]) + dsym_combine_cmd = [ctx.attrs._apple_tools[AppleToolsInfo].split_arch_combine_dsym_bundles_tool] for (arch, binary) in binary_deps.items(): - dsym_combine_cmd.add(["--dsym-bundle", cmd_args(binary.get(AppleDebuggableInfo).dsyms[0]), "--arch", arch]) - dsym_combine_cmd.add(["--output", dsym_output.as_output()]) - ctx.actions.run(dsym_combine_cmd, category = "universal_binaries_dsym") + dsym_combine_cmd.extend(["--dsym-bundle", cmd_args(binary.get(AppleDebuggableInfo).dsyms[0]), "--arch", arch]) + dsym_combine_cmd.extend(["--output", dsym_output.as_output()]) + ctx.actions.run(cmd_args(dsym_combine_cmd), category = "universal_binaries_dsym") - all_debug_info_tsets = [binary.get(AppleDebuggableInfo).debug_info_tset for binary in binary_deps.values()] + all_debug_info_tsets = [] + if contains_full_debuggable_info: + all_debug_info_tsets = [binary.get(AppleDebuggableInfo).debug_info_tset for binary in binary_deps.values()] return AppleBundleBinaryOutput( binary = binary_output, @@ -50,3 +57,10 @@ def create_universal_binary( ), ), ) + +def _all_binaries_have_apple_debuggable_info(binary_deps: dict[str, Dependency]) -> bool: + for binary in binary_deps.values(): + info = binary.get(AppleDebuggableInfo) + if info == None: + return False + return True diff --git a/prelude/apple/apple_universal_executable.bzl b/prelude/apple/apple_universal_executable.bzl index 87cac4d55f..a8ca1e605d 100644 --- a/prelude/apple/apple_universal_executable.bzl +++ b/prelude/apple/apple_universal_executable.bzl @@ -14,7 +14,7 @@ load(":apple_bundle_utility.bzl", "get_default_binary_dep", "get_flattened_binar load(":apple_code_signing_types.bzl", "AppleEntitlementsInfo") load(":apple_dsym.bzl", "DSYM_SUBTARGET", "get_apple_dsym_ext") load(":apple_universal_binaries.bzl", "create_universal_binary") -load(":debug.bzl", "AppleDebuggableInfo") +load(":debug.bzl", "AppleDebuggableInfo", "DEBUGINFO_SUBTARGET") load(":resource_groups.bzl", "ResourceGraphInfo") _FORWARDED_PROVIDER_TYPES = [ @@ -51,6 +51,11 @@ def apple_universal_executable_impl(ctx: AnalysisContext) -> list[Provider]: split_arch_dsym = ctx.attrs.split_arch_dsym, ) + debug_info = project_artifacts( + actions = ctx.actions, + tsets = [binary_outputs.debuggable_info.debug_info_tset], + ) + sub_targets = {} if ctx.attrs.split_arch_dsym: dsyms = binary_outputs.debuggable_info.dsyms @@ -58,15 +63,19 @@ def apple_universal_executable_impl(ctx: AnalysisContext) -> list[Provider]: dsyms = [get_apple_dsym_ext( ctx = ctx, executable = binary_outputs.binary, - debug_info = project_artifacts( - actions = ctx.actions, - tsets = [binary_outputs.debuggable_info.debug_info_tset], - ), + debug_info = debug_info, action_identifier = ctx.attrs.name + "_dsym", output_path = dsym_name, )] sub_targets[DSYM_SUBTARGET] = [DefaultInfo(default_outputs = dsyms)] + debug_info_artifacts_manifest = ctx.actions.write( + "debuginfo.artifacts", + debug_info, + with_inputs = True, + ) + sub_targets[DEBUGINFO_SUBTARGET] = [DefaultInfo(default_output = debug_info_artifacts_manifest)] + default_binary = get_default_binary_dep(ctx.attrs.executable) forwarded_providers = [] for forward_provider_type in _FORWARDED_PROVIDER_TYPES: diff --git a/prelude/apple/apple_utility.bzl b/prelude/apple/apple_utility.bzl index 81fcb867a0..7e12625f70 100644 --- a/prelude/apple/apple_utility.bzl +++ b/prelude/apple/apple_utility.bzl @@ -8,20 +8,6 @@ load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo") load("@prelude//cxx:headers.bzl", "CxxHeadersLayout", "CxxHeadersNaming") load("@prelude//utils:utils.bzl", "value_or") -load(":apple_target_sdk_version.bzl", "get_min_deployment_version_for_node") - -_VERSION_PLACEHOLDER = "(VERSION)" - -# TODO(T115177501): Make target triples part of the toolchains -# Map from SDK name -> target triple _without_ leading architecture -_TARGET_TRIPLE_MAP = { - "iphoneos": "apple-ios{}".format(_VERSION_PLACEHOLDER), - "iphonesimulator": "apple-ios{}-simulator".format(_VERSION_PLACEHOLDER), - "maccatalyst": "apple-ios{}-macabi".format(_VERSION_PLACEHOLDER), - "macosx": "apple-macosx{}".format(_VERSION_PLACEHOLDER), - "watchos": "apple-watchos{}".format(_VERSION_PLACEHOLDER), - "watchsimulator": "apple-watchos{}-simulator".format(_VERSION_PLACEHOLDER), -} def get_apple_cxx_headers_layout(ctx: AnalysisContext) -> CxxHeadersLayout: namespace = value_or(ctx.attrs.header_path_prefix, ctx.attrs.name) @@ -33,23 +19,8 @@ def get_module_name(ctx: AnalysisContext) -> str: def has_apple_toolchain(ctx: AnalysisContext) -> bool: return hasattr(ctx.attrs, "_apple_toolchain") -def get_versioned_target_triple(ctx: AnalysisContext) -> str: - apple_toolchain_info = ctx.attrs._apple_toolchain[AppleToolchainInfo] - swift_toolchain_info = apple_toolchain_info.swift_toolchain_info - - architecture = swift_toolchain_info.architecture - if architecture == None: - fail("Need to set `architecture` field of swift_toolchain(), target: {}".format(ctx.label)) - - target_sdk_version = get_min_deployment_version_for_node(ctx) or "" - - sdk_name = apple_toolchain_info.sdk_name - target_triple_with_version_placeholder = _TARGET_TRIPLE_MAP.get(sdk_name) - if target_triple_with_version_placeholder == None: - fail("Could not find target triple for sdk = {}".format(sdk_name)) - - versioned_target_triple = target_triple_with_version_placeholder.replace(_VERSION_PLACEHOLDER, target_sdk_version) - return "{}-{}".format(architecture, versioned_target_triple) +def get_apple_architecture(ctx: AnalysisContext) -> str: + return ctx.attrs._apple_toolchain[AppleToolchainInfo].architecture def get_apple_stripped_attr_value_with_default_fallback(ctx: AnalysisContext) -> bool: stripped = ctx.attrs.stripped @@ -70,15 +41,15 @@ def expand_relative_prefixed_sdk_path( "$RESOURCEDIR": swift_resource_dir, "$SDKROOT": sdk_path, } - expanded_cmd = cmd_args() + expanded_cmd = [] for (path_variable, path_value) in path_expansion_map.items(): if path_to_expand.startswith(path_variable): path = path_to_expand[len(path_variable):] if path.find("$") == 0: fail("Failed to expand framework path: {}".format(path)) - expanded_cmd.add(cmd_args([path_value, path], delimiter = "")) + expanded_cmd.append(cmd_args([path_value, path], delimiter = "")) - return expanded_cmd + return cmd_args(expanded_cmd) def get_disable_pch_validation_flags() -> list[str]: """ diff --git a/prelude/apple/apple_xcuitest.bzl b/prelude/apple/apple_xcuitest.bzl new file mode 100644 index 0000000000..d08d726460 --- /dev/null +++ b/prelude/apple/apple_xcuitest.bzl @@ -0,0 +1,92 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//:paths.bzl", "paths") +load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo") +load(":apple_bundle_destination.bzl", "AppleBundleDestination") +load(":apple_bundle_part.bzl", "AppleBundlePart", "assemble_bundle") +load(":apple_bundle_types.bzl", "AppleBundleInfo", "AppleBundleType") +load(":apple_info_plist.bzl", "process_info_plist") +load(":apple_utility.bzl", "get_apple_architecture") + +def apple_xcuitest_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: + # The XCUITest runner app bundle copies the application from the platform + # directory, and includes the UI test bundle in the PlugIns folder. + output_bundle = ctx.actions.declare_output(ctx.attrs.name + "." + ctx.attrs.extension) + bundle_parts = [ + _get_xctrunner_binary(ctx), + _get_uitest_bundle(ctx), + ] + _get_xctrunner_frameworks(ctx) + bundle_result = assemble_bundle( + ctx = ctx, + bundle = output_bundle, + info_plist_part = process_info_plist(ctx, override_input = None), + parts = bundle_parts, + swift_stdlib_args = None, + ) + + return [ + DefaultInfo(default_output = output_bundle), + AppleBundleInfo( + bundle = output_bundle, + bundle_type = AppleBundleType("default"), + binary_name = ctx.attrs.name, + contains_watchapp = False, + # The test runner binary does not contain Swift + skip_copying_swift_stdlib = True, + ), + ] + bundle_result.providers + +def _get_uitest_bundle(ctx: AnalysisContext) -> AppleBundlePart: + return AppleBundlePart( + source = ctx.attrs.test_bundle[DefaultInfo].default_outputs[0], + destination = AppleBundleDestination("plugins"), + ) + +def _get_xctrunner_binary(ctx: AnalysisContext) -> AppleBundlePart: + arch = get_apple_architecture(ctx) + lipo = ctx.attrs._apple_toolchain[AppleToolchainInfo].lipo + platform_path = ctx.attrs._apple_toolchain[AppleToolchainInfo].platform_path + thin_binary = ctx.actions.declare_output(ctx.attrs.name) + xctrunner_path = cmd_args(platform_path, "Developer/Library/Xcode/Agents/XCTRunner.app/XCTRunner", delimiter = "/") + ctx.actions.run([ + lipo, + xctrunner_path, + "-extract", + arch, + "-output", + thin_binary.as_output(), + ], category = "copy_xctrunner") + + return AppleBundlePart( + source = thin_binary, + destination = AppleBundleDestination("executables"), + ) + +def _get_xctrunner_frameworks(ctx: AnalysisContext) -> list[AppleBundlePart]: + # We need to copy the framework as AppleBundlePart requires an artifact. + # It would be nicer to make this an arglike and avoid the copies. + # It would also be nicer to exclude the headers. + def copy_platform_framework(platform_relative_path: str) -> AppleBundlePart: + copied_framework = ctx.actions.declare_output(paths.basename(platform_relative_path)) + path = cmd_args(ctx.attrs._apple_toolchain[AppleToolchainInfo].platform_path, platform_relative_path, delimiter = "/") + ctx.actions.run(["cp", "-PR", path, copied_framework.as_output()], category = "copy_framework", identifier = platform_relative_path) + return AppleBundlePart( + source = copied_framework, + destination = AppleBundleDestination("frameworks"), + codesign_on_copy = True, + ) + + runner_frameworks = [ + "Developer/Library/Frameworks/XCTest.framework", + "Developer/Library/PrivateFrameworks/XCTAutomationSupport.framework", + "Developer/Library/PrivateFrameworks/XCTestCore.framework", + "Developer/Library/PrivateFrameworks/XCTestSupport.framework", + "Developer/Library/PrivateFrameworks/XCUIAutomation.framework", + "Developer/Library/PrivateFrameworks/XCUnit.framework", + ] + return [copy_platform_framework(p) for p in runner_frameworks] diff --git a/prelude/apple/debug.bzl b/prelude/apple/debug.bzl index 7b6b7239bd..e7a91d1147 100644 --- a/prelude/apple/debug.bzl +++ b/prelude/apple/debug.bzl @@ -26,13 +26,13 @@ AppleDebuggableInfo = provider( # a. the owning library target to artifacts, or # b. the owning bundle target to filtered artifacts "debug_info_tset": provider_field(ArtifactTSet), - # In the case of b above, contians the map of library target to artifacts, else None + # In the case of b above, contains the map of library target to artifacts, else None "filtered_map": provider_field([dict[Label, list[Artifact]], None], default = None), }, ) _AppleDebugInfo = record( - debug_info_tset = "ArtifactTSet", + debug_info_tset = ArtifactTSet, filtered_map = field([dict[Label, list[Artifact]], None]), ) diff --git a/prelude/apple/mockingbird/mockingbird_mock.bzl b/prelude/apple/mockingbird/mockingbird_mock.bzl new file mode 100644 index 0000000000..61adf51739 --- /dev/null +++ b/prelude/apple/mockingbird/mockingbird_mock.bzl @@ -0,0 +1,162 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec") +load(":mockingbird_types.bzl", "MockingbirdLibraryInfo", "MockingbirdLibraryRecord", "MockingbirdSourcesInfo") + +def _impl(ctx: AnalysisContext) -> list[Provider]: + mockingbird_info = ctx.attrs.module[MockingbirdLibraryInfo] + + dep_names = [dep[MockingbirdLibraryInfo].name for dep in ctx.attrs.deps] + + json_project_description = _get_mockingbird_json_project_description(info = mockingbird_info, included_srcs = ctx.attrs.srcs, excluded_srcs = ctx.attrs.excluded_srcs, dep_names = dep_names) + json_project_description_output = ctx.actions.declare_output("mockingbird_project.json") + ctx.actions.write_json(json_project_description_output.as_output(), json_project_description) + + mockingbird_source = ctx.actions.declare_output(mockingbird_info.name + "Mocks.generated.swift", dir = False) + cmd = cmd_args( + hidden = [record.src_dir for record in mockingbird_info.tset.traverse()], + ) + + params = [ + ctx.attrs._mockingbird_bin[RunInfo], + "generate", + "--target", + mockingbird_info.name, + "--project", + json_project_description_output, + "--output", + mockingbird_source.as_output(), + "--header", + "// (c) Meta Platforms, Inc. and affiliates. Confidential and proprietary.", + "--support", + ctx.attrs._mockingbird_support[DefaultInfo].default_outputs, + "--verbose", + "--disable-cache", + ] + + if ctx.attrs.only_protocols: + params.append("--only-protocols") + + cmd.add(params) + + ctx.actions.run( + cmd, + category = "mockingbird", + local_only = True, + ) + # TODO: T182716646 Remove local_only + + return [ + DefaultInfo(mockingbird_source), + MockingbirdSourcesInfo(srcs = [mockingbird_source]), + ] + +def _attrs(): + attribs = { + ## If the superclass for an object being mocked is in another module add it as a dep so mockingbird can find the implementation. + "deps": attrs.list(attrs.dep(), default = []), + ## The list of source files to exclude. Only the name of the file, excluding the path, should be set. If set, the srcs attribute will be ignored. + "excluded_srcs": attrs.list(attrs.string(), default = []), + ## The module to generate mocks for. + "module": attrs.dep(), + ## Whether to only generate mocks for Swift protocols. + "only_protocols": attrs.bool(default = False), + ## A list of source files to include. Only the name of the file, excluding the path, should be set. By default all source files are included and this doesn't need to be specified. + "srcs": attrs.list(attrs.string(), default = []), + "_mockingbird_bin": attrs.exec_dep(providers = [RunInfo], default = "fbsource//fbobjc/VendorLib/Mockingbird:mockingbird-binary"), + "_mockingbird_support": attrs.dep(providers = [DefaultInfo], default = "fbsource//fbobjc/VendorLib/Mockingbird:MockingbirdSupport"), + } + return attribs + +registration_spec = RuleRegistrationSpec( + name = "mockingbird_mock", + impl = _impl, + attrs = _attrs(), +) + +# Produce JSON project description for Mockingbird codegen +# https://mockingbirdswift.com/json-project-description +# { +# "targets": [ +# { +# "name": "MyLibrary", +# "type": "library", +# "path": "/path/to/MyLibrary", +# "dependencies": [], +# "sources": [ +# "SourceFileA.swift", +# "SourceFileB.swift" +# ] +# }, +# { +# "name": "MyOtherLibrary", +# "type": "library", +# "path": "/path/to/MyOtherLibrary", +# "dependencies": [ +# "MyLibrary" +# ], +# "sources": [ +# "SourceFileA.swift", +# "SourceFileB.swift" +# ] +# }, +# { +# "name": "MyLibraryTests", +# "type": "test", +# "path": "/path/to/MyLibraryTests", +# "dependencies": [ +# "MyLibrary" +# ], +# "sources": [ +# "SourceFileA.swift", +# "SourceFileB.swift" +# ] +# } +# ] +# } +def _get_mockingbird_json_project_description(info: MockingbirdLibraryInfo, included_srcs: list[str], excluded_srcs: list[str], dep_names: list[str]) -> dict: + targets = [] + for record in info.tset.traverse(): + if record.name == info.name: + targets.append(_target_dict_for_mockingbird_record(record = record, included_srcs = included_srcs, excluded_srcs = excluded_srcs, include_non_exported_deps = True)) + elif record.name in dep_names: + targets.append(_target_dict_for_mockingbird_record(record = record, included_srcs = [], excluded_srcs = [], include_non_exported_deps = False)) + json = { + "targets": targets, + } + + return json + +def _target_dict_for_mockingbird_record(record: MockingbirdLibraryRecord, included_srcs: list[str], excluded_srcs: list[str], include_non_exported_deps: bool) -> dict: + srcs = [] + if len(included_srcs) > 0 and len(excluded_srcs) > 0: + fail("Included srcs and excluded srcs cannot both be set at the same time") + + if len(included_srcs) > 0: + for src in record.srcs: + if src.basename in included_srcs: + srcs.append(src.basename) + elif len(excluded_srcs) > 0: + for src in record.srcs: + if src.basename not in excluded_srcs: + srcs.append(src.basename) + else: + srcs = [src.basename for src in record.srcs] + + deps = record.exported_dep_names + + if include_non_exported_deps: + deps = deps + record.dep_names + + return { + "dependencies": deps, + "name": record.name, + "path": record.src_dir, + "sources": srcs, + "type": record.type, + } diff --git a/prelude/apple/mockingbird/mockingbird_types.bzl b/prelude/apple/mockingbird/mockingbird_types.bzl new file mode 100644 index 0000000000..0eee9cb67f --- /dev/null +++ b/prelude/apple/mockingbird/mockingbird_types.bzl @@ -0,0 +1,42 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +MockingbirdLibraryInfoTSet = transitive_set() + +MockingbirdTargetType = enum("library", "test") + +MockingbirdLibraryInfo = provider( + fields = { + # The name of the target. + "name": provider_field(str), + # Contains a tset with this target's MockingbirdLibraryRecord as the value + # and all of its dependency's MockingbirdLibraryRecord in the children. + "tset": provider_field(MockingbirdLibraryInfoTSet), + }, +) + +MockingbirdLibraryRecord = record( + # The names of this target's dependencies. + dep_names = field(list[str]), + # The names of this target's exported dependencies. + exported_dep_names = field(list[str]), + # The name of the target. + name = str, + # Swift sources in this target. + srcs = field(list[Artifact]), + # Whether this is a library or a test. + type = field(MockingbirdTargetType), + # Symlinked directory containing the source files. + src_dir = field(Artifact), +) + +MockingbirdSourcesInfo = provider( + fields = { + # Source files containing the auto generated mocks produced by mockingbird-cli. + "srcs": provider_field(list[Artifact]), + }, +) diff --git a/prelude/apple/modulemap.bzl b/prelude/apple/modulemap.bzl index 7cba3e9eb9..7cfb0b7eb7 100644 --- a/prelude/apple/modulemap.bzl +++ b/prelude/apple/modulemap.bzl @@ -17,7 +17,7 @@ load( ) load(":apple_utility.bzl", "get_module_name") -def preprocessor_info_for_modulemap(ctx: AnalysisContext, name: str, headers: list[CHeader], swift_header: [Artifact, None]) -> CPreprocessor: +def preprocessor_info_for_modulemap(ctx: AnalysisContext, name: str, headers: list[CHeader], swift_header: Artifact | None) -> CPreprocessor: # We don't want to name this module.modulemap to avoid implicit importing if name == "module": fail("Don't use the name `module` for modulemaps, this will allow for implicit importing.") @@ -69,20 +69,20 @@ def preprocessor_info_for_modulemap(ctx: AnalysisContext, name: str, headers: li ctx.actions.run(cmd, category = "modulemap", identifier = name) return CPreprocessor( - relative_args = CPreprocessorArgs(args = _exported_preprocessor_args(symlink_tree)), - absolute_args = CPreprocessorArgs(args = _exported_preprocessor_args(symlink_tree)), + args = CPreprocessorArgs(args = _exported_preprocessor_args(symlink_tree)), modular_args = _args_for_modulemap(output, symlink_tree, swift_header), - modulemap_path = cmd_args(output).hidden(cmd_args(symlink_tree)), + modulemap_path = cmd_args(output, hidden = cmd_args(symlink_tree)), ) def _args_for_modulemap( modulemap: Artifact, symlink_tree: Artifact, - swift_header: [Artifact, None]) -> list[cmd_args]: - cmd = cmd_args(modulemap, format = "-fmodule-map-file={}") - cmd.hidden(symlink_tree) - if swift_header: - cmd.hidden(swift_header) + swift_header: Artifact | None) -> list[cmd_args]: + cmd = cmd_args( + modulemap, + format = "-fmodule-map-file={}", + hidden = [symlink_tree] + ([swift_header] if swift_header else []), + ) return [cmd] diff --git a/prelude/apple/prebuilt_apple_framework.bzl b/prelude/apple/prebuilt_apple_framework.bzl index e717eb40b3..8d2dafba1b 100644 --- a/prelude/apple/prebuilt_apple_framework.bzl +++ b/prelude/apple/prebuilt_apple_framework.bzl @@ -9,6 +9,7 @@ load("@prelude//cxx:cxx_context.bzl", "get_cxx_toolchain_info") load( "@prelude//cxx:cxx_library_utility.bzl", "cxx_attr_exported_linker_flags", + "cxx_attr_preferred_linkage", "cxx_platform_supported", ) load( @@ -27,7 +28,6 @@ load( "LibOutputStyle", "LinkInfo", "LinkInfos", - "Linkage", "create_merged_link_info", ) load( @@ -63,7 +63,7 @@ def prebuilt_apple_framework_impl(ctx: AnalysisContext) -> list[Provider]: inherited_pp_info = cxx_inherited_preprocessor_infos(ctx.attrs.deps) providers.append(cxx_merge_cpreprocessors( ctx, - [CPreprocessor(relative_args = CPreprocessorArgs(args = ["-F", framework_dir]))], + [CPreprocessor(args = CPreprocessorArgs(args = ["-F", framework_dir]))], inherited_pp_info, )) @@ -78,10 +78,12 @@ def prebuilt_apple_framework_impl(ctx: AnalysisContext) -> list[Provider]: name = framework_name, pre_flags = args, ) + link_info = LinkInfos(default = link) + providers.append(create_merged_link_info( ctx, get_cxx_toolchain_info(ctx).pic_behavior, - {output_style: LinkInfos(default = link) for output_style in LibOutputStyle}, + {output_style: link_info for output_style in LibOutputStyle}, )) # Create, augment and provide the linkable graph. @@ -91,8 +93,8 @@ def prebuilt_apple_framework_impl(ctx: AnalysisContext) -> list[Provider]: ctx, linkable_node = create_linkable_node( ctx, - preferred_linkage = Linkage("shared"), - link_infos = {LibOutputStyle("shared_lib"): LinkInfos(default = link)}, + preferred_linkage = cxx_attr_preferred_linkage(ctx), + link_infos = {output_style: link_info for output_style in LibOutputStyle}, # TODO(cjhopman): this should be set to non-None default_soname = None, ), diff --git a/prelude/apple/resource_groups.bzl b/prelude/apple/resource_groups.bzl index 8b5f63ba2d..43f8d95f13 100644 --- a/prelude/apple/resource_groups.bzl +++ b/prelude/apple/resource_groups.bzl @@ -5,14 +5,10 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load( - "@prelude//cxx:groups.bzl", - "Group", - "MATCH_ALL_LABEL", -) +load("@prelude//cxx:groups_types.bzl", "Group", "MATCH_ALL_LABEL") load( "@prelude//utils:graph_utils.bzl", - "breadth_first_traversal_by", + "depth_first_traversal_by", ) load(":apple_asset_catalog_types.bzl", "AppleAssetCatalogSpec") load(":apple_core_data_types.bzl", "AppleCoreDataSpec") @@ -35,6 +31,8 @@ ResourceGroupInfo = provider( }, ) +RESOURCE_GROUP_MAP_ATTR = attrs.option(attrs.dep(providers = [ResourceGroupInfo]), default = None) + ResourceGraphNode = record( label = field(Label), # Attribute labels on the target. @@ -160,7 +158,7 @@ def get_filtered_resources( node = resource_graph_node_map[target] # buildifier: disable=uninitialized return node.exported_deps + node.deps - targets = breadth_first_traversal_by( + targets = depth_first_traversal_by( resource_graph_node_map, get_traversed_deps(root), get_traversed_deps, diff --git a/prelude/apple/scene_kit_assets.bzl b/prelude/apple/scene_kit_assets.bzl index 650919b339..caedfbed86 100644 --- a/prelude/apple/scene_kit_assets.bzl +++ b/prelude/apple/scene_kit_assets.bzl @@ -24,7 +24,7 @@ def scene_kit_assets_impl(ctx: AnalysisContext) -> list[Provider]: ) return [DefaultInfo(), graph] -def compile_scene_kit_assets(ctx: AnalysisContext, specs: list[SceneKitAssetsSpec]) -> [Artifact, None]: +def compile_scene_kit_assets(ctx: AnalysisContext, specs: list[SceneKitAssetsSpec]) -> Artifact | None: if len(specs) == 0: return None @@ -50,7 +50,7 @@ def compile_scene_kit_assets(ctx: AnalysisContext, specs: list[SceneKitAssetsSpe ], allow_args = True, ) - combined_command = cmd_args(["/bin/sh", wrapper_script]).hidden(copy_scene_kit_assets_cmds + [output.as_output()]) + combined_command = cmd_args(["/bin/sh", wrapper_script], hidden = copy_scene_kit_assets_cmds + [output.as_output()]) processing_options = get_bundle_resource_processing_options(ctx) ctx.actions.run(combined_command, prefer_local = processing_options.prefer_local, allow_cache_upload = processing_options.allow_cache_upload, category = "scene_kit_assets") return output diff --git a/prelude/apple/swift/swift_compilation.bzl b/prelude/apple/swift/swift_compilation.bzl index 81217d81a4..f0444dd329 100644 --- a/prelude/apple/swift/swift_compilation.bzl +++ b/prelude/apple/swift/swift_compilation.bzl @@ -11,16 +11,19 @@ load( "make_artifact_tset", "project_artifacts", ) -load("@prelude//:paths.bzl", "paths") -load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo", "AppleToolsInfo") -load("@prelude//apple:apple_utility.bzl", "get_disable_pch_validation_flags", "get_module_name", "get_versioned_target_triple") +load("@prelude//apple:apple_target_sdk_version.bzl", "get_versioned_target_triple") +load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo") +load("@prelude//apple:apple_utility.bzl", "get_disable_pch_validation_flags", "get_module_name") load("@prelude//apple:modulemap.bzl", "preprocessor_info_for_modulemap") load("@prelude//apple/swift:swift_types.bzl", "SWIFTMODULE_EXTENSION", "SWIFT_EXTENSION") load("@prelude//cxx:argsfiles.bzl", "CompileArgsfile", "CompileArgsfiles") +load("@prelude//cxx:cxx_context.bzl", "get_cxx_toolchain_info") +load("@prelude//cxx:cxx_library_utility.bzl", "cxx_use_shlib_intfs_mode") load( - "@prelude//cxx:compile.bzl", + "@prelude//cxx:cxx_sources.bzl", "CxxSrcWithFlags", # @unused Used as a type ) +load("@prelude//cxx:cxx_toolchain_types.bzl", "ShlibInterfacesMode") load("@prelude//cxx:headers.bzl", "CHeader") load( "@prelude//cxx:link_groups.bzl", @@ -52,7 +55,7 @@ load( "get_incremental_swiftmodule_compilation_flags", "should_build_swift_incrementally", ) -load(":swift_module_map.bzl", "write_swift_module_map_with_swift_deps") +load(":swift_module_map.bzl", "write_swift_module_map_with_deps") load(":swift_pcm_compilation.bzl", "compile_underlying_pcm", "get_compiled_pcm_deps_tset", "get_swift_pcm_anon_targets") load( ":swift_pcm_compilation_types.bzl", @@ -68,12 +71,8 @@ load( "SwiftToolchainInfo", ) -# {"module_name": [exported_headers]}, used for Swift header post processing -ExportedHeadersTSet = transitive_set() - SwiftDependencyInfo = provider(fields = { "debug_info_tset": provider_field(ArtifactTSet), - "exported_headers": provider_field(ExportedHeadersTSet), # Includes modules through exported_deps, used for compilation "exported_swiftmodules": provider_field(SwiftCompiledModuleTset), }) @@ -86,7 +85,13 @@ SwiftCompilationDatabase = record( SwiftObjectOutput = record( object_files = field(list[Artifact]), argsfiles = field(CompileArgsfiles), - output_map_artifact = field([Artifact, None]), + output_map_artifact = field(Artifact | None), +) + +SwiftLibraryForDistributionOutput = record( + swiftinterface = field(Artifact), + private_swiftinterface = field(Artifact), + swiftdoc = field(Artifact), ) SwiftCompilationOutput = record( @@ -102,6 +107,8 @@ SwiftCompilationOutput = record( pre = field(CPreprocessor), # Exported preprocessor info required for ObjC compilation of rdeps. exported_pre = field(CPreprocessor), + # Exported -Swift.h header + exported_swift_header = field(Artifact), # Argsfiles used to compile object files. argsfiles = field(CompileArgsfiles), # A tset of (SDK/first-party) swiftmodule artifacts required to be linked into binary. @@ -112,7 +119,11 @@ SwiftCompilationOutput = record( # Info required for `[swift-compilation-database]` subtarget. compilation_database = field(SwiftCompilationDatabase), # An artifact that represent the Swift module map for this target. - output_map_artifact = field([Artifact, None]), + output_map_artifact = field(Artifact | None), + # An optional artifact of the exported symbols emitted for this module. + exported_symbols = field(Artifact | None), + # An optional artifact with files that support consuming the generated library with later versions of the swift compiler. + swift_library_for_distribution_output = field(SwiftLibraryForDistributionOutput | None), ) SwiftDebugInfo = record( @@ -194,10 +205,7 @@ def compile_swift( exported_headers: list[CHeader], objc_modulemap_pp_info: [CPreprocessor, None], framework_search_paths_flags: cmd_args, - extra_search_paths_flags: list[ArgLike] = []) -> [SwiftCompilationOutput, None]: - if not srcs: - return None - + extra_search_paths_flags: list[ArgLike] = []) -> ([SwiftCompilationOutput, None], DefaultInfo): # If this target imports XCTest we need to pass the search path to its swiftmodule. framework_search_paths = cmd_args() framework_search_paths.add(_get_xctest_swiftmodule_search_path(ctx)) @@ -209,7 +217,7 @@ def compile_swift( # If a target exports ObjC headers and Swift explicit modules are enabled, # we need to precompile a PCM of the underlying module and supply it to the Swift compilation. - if objc_modulemap_pp_info and ctx.attrs.uses_explicit_modules: + if objc_modulemap_pp_info and uses_explicit_modules(ctx): underlying_swift_pcm_uncompiled_info = get_swift_pcm_uncompile_info( ctx, None, @@ -228,12 +236,7 @@ def compile_swift( else: compiled_underlying_pcm = None - toolchain = ctx.attrs._apple_toolchain[AppleToolchainInfo].swift_toolchain_info - module_name = get_module_name(ctx) - output_header = ctx.actions.declare_output(module_name + "-Swift.h") - - output_swiftmodule = ctx.actions.declare_output(module_name + SWIFTMODULE_EXTENSION) shared_flags = _get_shared_flags( ctx, @@ -246,13 +249,38 @@ def compile_swift( extra_search_paths_flags, ) shared_flags.add(framework_search_paths) + swift_interface_info = _create_swift_interface(ctx, shared_flags, module_name) - if toolchain.can_toolchain_emit_obj_c_header_textually: - _compile_swiftmodule(ctx, toolchain, shared_flags, srcs, output_swiftmodule, output_header) - else: - unprocessed_header = ctx.actions.declare_output(module_name + "-SwiftUnprocessed.h") - _compile_swiftmodule(ctx, toolchain, shared_flags, srcs, output_swiftmodule, unprocessed_header) - _perform_swift_postprocessing(ctx, module_name, unprocessed_header, output_header) + if not srcs: + return (None, swift_interface_info) + + toolchain = ctx.attrs._apple_toolchain[AppleToolchainInfo].swift_toolchain_info + + if ctx.attrs.serialize_debugging_options: + if exported_headers: + # TODO(T99100029): We cannot use VFS overlays with Buck2, so we have to disable + # serializing debugging options for mixed libraries to debug successfully + warning("Mixed libraries cannot serialize debugging options, disabling for module `{}` in rule `{}`".format(module_name, ctx.label)) + elif not toolchain.prefix_serialized_debugging_options: + warning("The current toolchain does not support prefixing serialized debugging options, disabling for module `{}` in rule `{}`".format(module_name, ctx.label)) + + output_header = ctx.actions.declare_output(module_name + "-Swift.h") + output_swiftmodule = ctx.actions.declare_output(module_name + SWIFTMODULE_EXTENSION) + + swift_framework_output = None + if ctx.attrs._enable_library_evolution: + swift_framework_output = SwiftLibraryForDistributionOutput( + swiftinterface = ctx.actions.declare_output(module_name + ".swiftinterface"), + private_swiftinterface = ctx.actions.declare_output(module_name + ".private.swiftinterface"), + swiftdoc = ctx.actions.declare_output(module_name + ".swiftdoc"), #this is generated automatically once we pass -emit-module-info, so must have this name + ) + + output_symbols = None + + if cxx_use_shlib_intfs_mode(ctx, ShlibInterfacesMode("stub_from_headers")): + output_symbols = ctx.actions.declare_output("__tbd__/" + module_name + ".swift_symbols.txt") + + _compile_swiftmodule(ctx, toolchain, shared_flags, srcs, output_swiftmodule, output_header, output_symbols, swift_framework_output) object_output = _compile_object(ctx, toolchain, shared_flags, srcs) @@ -267,7 +295,7 @@ def compile_swift( exported_pp_info = CPreprocessor( headers = [exported_swift_header], modular_args = modulemap_pp_info.modular_args, - relative_args = CPreprocessorArgs(args = modulemap_pp_info.relative_args.args), + args = CPreprocessorArgs(args = modulemap_pp_info.args.args), modulemap_path = modulemap_pp_info.modulemap_path, ) @@ -281,43 +309,22 @@ def compile_swift( pre = CPreprocessor(headers = [swift_header]) # Pass up the swiftmodule paths for this module and its exported_deps - return SwiftCompilationOutput( + return (SwiftCompilationOutput( output_map_artifact = object_output.output_map_artifact, object_files = object_output.object_files, object_format = toolchain.object_format, swiftmodule = output_swiftmodule, - dependency_info = get_swift_dependency_info(ctx, exported_pp_info, output_swiftmodule, deps_providers), + dependency_info = get_swift_dependency_info(ctx, output_swiftmodule, deps_providers), pre = pre, exported_pre = exported_pp_info, + exported_swift_header = exported_swift_header.artifact, argsfiles = object_output.argsfiles, swift_debug_info = extract_and_merge_swift_debug_infos(ctx, deps_providers, [output_swiftmodule]), clang_debug_info = extract_and_merge_clang_debug_infos(ctx, deps_providers), - compilation_database = _create_compilation_database(ctx, srcs, object_output.argsfiles.absolute[SWIFT_EXTENSION]), - ) - -# Swift headers are postprocessed to make them compatible with Objective-C -# compilation that does not use -fmodules. This is a workaround for the bad -# performance of -fmodules without Explicit Modules, once Explicit Modules is -# supported, this postprocessing should be removed. -def _perform_swift_postprocessing( - ctx: AnalysisContext, - module_name: str, - unprocessed_header: Artifact, - output_header: Artifact): - transitive_exported_headers = { - module: module_exported_headers - for exported_headers_map in _get_exported_headers_tset(ctx).traverse() - if exported_headers_map - for module, module_exported_headers in exported_headers_map.items() - } - deps_json = ctx.actions.write_json(module_name + "-Deps.json", transitive_exported_headers) - postprocess_cmd = cmd_args(ctx.attrs._apple_tools[AppleToolsInfo].swift_objc_header_postprocess) - postprocess_cmd.add([ - unprocessed_header, - deps_json, - output_header.as_output(), - ]) - ctx.actions.run(postprocess_cmd, category = "swift_objc_header_postprocess") + compilation_database = _create_compilation_database(ctx, srcs, object_output.argsfiles.relative[SWIFT_EXTENSION]), + exported_symbols = output_symbols, + swift_library_for_distribution_output = swift_framework_output, + ), swift_interface_info) # We use separate actions for swiftmodule and object file output. This # improves build parallelism at the cost of duplicated work, but by disabling @@ -329,7 +336,9 @@ def _compile_swiftmodule( shared_flags: cmd_args, srcs: list[CxxSrcWithFlags], output_swiftmodule: Artifact, - output_header: Artifact) -> CompileArgsfiles: + output_header: Artifact, + output_symbols: Artifact | None, + swift_framework_output: SwiftLibraryForDistributionOutput | None) -> CompileArgsfiles: argfile_cmd = cmd_args(shared_flags) argfile_cmd.add([ "-emit-module", @@ -337,6 +346,10 @@ def _compile_swiftmodule( "-experimental-skip-non-inlinable-function-bodies-without-types", ]) + if ctx.attrs._enable_library_evolution: + argfile_cmd.add(["-enable-library-evolution"]) + argfile_cmd.add(["-emit-module-interface"]) + cmd = cmd_args([ "-emit-objc-header", "-emit-objc-header-path", @@ -345,6 +358,16 @@ def _compile_swiftmodule( output_swiftmodule.as_output(), ]) + if swift_framework_output: + # this is generated implicitly once we pass -emit-module + cmd.add(cmd_args(hidden = swift_framework_output.swiftdoc.as_output())) + cmd.add([ + "-emit-parseable-module-interface-path", + swift_framework_output.swiftinterface.as_output(), + "-emit-private-module-interface-path", + swift_framework_output.private_swiftinterface.as_output(), + ]) + if should_build_swift_incrementally(ctx, len(srcs)): incremental_compilation_output = get_incremental_swiftmodule_compilation_flags(ctx, srcs) cmd.add(incremental_compilation_output.incremental_flags_cmd) @@ -356,7 +379,31 @@ def _compile_swiftmodule( "-wmo", ]) - return _compile_with_argsfile(ctx, "swiftmodule_compile", SWIFTMODULE_EXTENSION, argfile_cmd, srcs, cmd, toolchain) + output_tbd = None + if output_symbols != None: + # Two step process, first we need to emit the TBD + output_tbd = ctx.actions.declare_output("__tbd__/" + ctx.attrs.name + "-Swift.tbd") + cmd.add([ + "-emit-tbd", + "-emit-tbd-path", + output_tbd.as_output(), + ]) + + ret = _compile_with_argsfile(ctx, "swiftmodule_compile", SWIFTMODULE_EXTENSION, argfile_cmd, srcs, cmd, toolchain) + + if output_tbd != None: + # Now we have run the TBD action we need to extract the symbols + extract_cmd = cmd_args([ + get_cxx_toolchain_info(ctx).linker_info.mk_shlib_intf[RunInfo], + "extract", + "-o", + output_symbols.as_output(), + "--tbd", + output_tbd, + ]) + ctx.actions.run(extract_cmd, category = "extract_tbd_symbols") + + return ret def _compile_object( ctx: AnalysisContext, @@ -388,6 +435,9 @@ def _compile_object( if embed_bitcode: cmd.add("--embed-bitcode") + if ctx.attrs._enable_library_evolution: + cmd.add(["-enable-library-evolution"]) + argsfiles = _compile_with_argsfile(ctx, "swift_compile", SWIFT_EXTENSION, shared_flags, srcs, cmd, toolchain) return SwiftObjectOutput( @@ -407,7 +457,7 @@ def _compile_with_argsfile( shell_quoted_args = cmd_args(shared_flags, quote = "shell") argsfile, _ = ctx.actions.write(extension + ".argsfile", shell_quoted_args, allow_args = True) input_args = [shared_flags] - cmd_form = cmd_args(cmd_args(argsfile, format = "@{}", delimiter = "")).hidden(input_args) + cmd_form = cmd_args(cmd_args(argsfile, format = "@{}", delimiter = ""), hidden = input_args) cmd_form.add([s.file for s in srcs]) cmd = cmd_args(toolchain.compiler) @@ -431,7 +481,7 @@ def _compile_with_argsfile( no_outputs_cleanup = should_build_swift_incrementally(ctx, len(srcs)), ) - relative_argsfile = CompileArgsfile( + argsfile = CompileArgsfile( file = argsfile, cmd_form = cmd_form, input_args = input_args, @@ -439,8 +489,8 @@ def _compile_with_argsfile( args_without_file_prefix_args = shared_flags, ) - # Swift correctly handles relative paths and we can utilize the relative argsfile for absolute paths. - return CompileArgsfiles(relative = {extension: relative_argsfile}, absolute = {extension: relative_argsfile}) + # Swift correctly handles relative paths and we can utilize the relative argsfile for Xcode. + return CompileArgsfiles(relative = {extension: argsfile}, xcode = {extension: argsfile}) def _get_shared_flags( ctx: AnalysisContext, @@ -454,7 +504,7 @@ def _get_shared_flags( toolchain = ctx.attrs._apple_toolchain[AppleToolchainInfo].swift_toolchain_info cmd = cmd_args() - if not toolchain.supports_relative_resource_dir: + if not (uses_explicit_modules(ctx) and toolchain.supports_relative_resource_dir): # Setting this to empty will get the driver to make all paths absolute when # passed to the frontend. We later debug prefix these to ensure relative paths # in the debug info. @@ -479,14 +529,27 @@ def _get_shared_flags( "-parse-as-library", ]) + if ctx.attrs.swift_package_name != None: + cmd.add([ + "-package-name", + ctx.attrs.swift_package_name, + ]) + if uses_explicit_modules(ctx): - # We set -fmodule-file-home-is-cwd as this is used to correctly - # set the working directory of modules when generating debug info. cmd.add([ "-Xcc", "-Xclang", "-Xcc", + # We set -fmodule-file-home-is-cwd as this is used to correctly + # set the working directory of modules when generating debug info. "-fmodule-file-home-is-cwd", + "-Xcc", + "-Xclang", + "-Xcc", + # This is the default for compilation, but not in sourcekitd. + # Set it explicitly here so that indexing will not fail with + # invalid module format errors. + "-fmodule-format=obj", ]) cmd.add(get_disable_pch_validation_flags()) @@ -505,19 +568,7 @@ def _get_shared_flags( else: cmd.add(["-enable-experimental-cxx-interop"]) - serialize_debugging_options = False - if ctx.attrs.serialize_debugging_options: - if objc_headers: - # TODO(T99100029): We cannot use VFS overlays with Buck2, so we have to disable - # serializing debugging options for mixed libraries to debug successfully - warning("Mixed libraries cannot serialize debugging options, disabling for module `{}` in rule `{}`".format(module_name, ctx.label)) - elif not toolchain.prefix_serialized_debugging_options: - warning("The current toolchain does not support prefixing serialized debugging options, disabling for module `{}` in rule `{}`".format(module_name, ctx.label)) - else: - # Apply the debug prefix map to Swift serialized debugging info. - # This will allow for debugging remotely built swiftmodule files. - serialize_debugging_options = True - + serialize_debugging_options = ctx.attrs.serialize_debugging_options and not objc_headers and toolchain.prefix_serialized_debugging_options if serialize_debugging_options: cmd.add([ "-Xfrontend", @@ -552,12 +603,20 @@ def _get_shared_flags( ]) pcm_deps_tset = get_compiled_pcm_deps_tset(ctx, deps_providers) - sdk_clang_deps_tset = get_compiled_sdk_clang_deps_tset(ctx, deps_providers) - sdk_swift_deps_tset = get_compiled_sdk_swift_deps_tset(ctx, deps_providers) - # Add flags required to import ObjC module dependencies - _add_clang_deps_flags(ctx, pcm_deps_tset, sdk_clang_deps_tset, cmd) - _add_swift_deps_flags(ctx, sdk_swift_deps_tset, cmd) + # If Swift Explicit Modules are enabled, a few things must be provided to a compilation job: + # 1. Direct and transitive SDK deps from `sdk_modules` attribute. + # 2. Direct and transitive user-defined deps. + # 3. Transitive SDK deps of user-defined deps. + # (This is the case, when a user-defined dep exports a type from SDK module, + # thus such SDK module should be implicitly visible to consumers of that custom dep) + if uses_explicit_modules(ctx): + sdk_clang_deps_tset = get_compiled_sdk_clang_deps_tset(ctx, deps_providers) + sdk_swift_deps_tset = get_compiled_sdk_swift_deps_tset(ctx, deps_providers) + _add_swift_module_map_args(ctx, sdk_swift_deps_tset, pcm_deps_tset, sdk_clang_deps_tset, cmd) + + _add_clang_deps_flags(ctx, pcm_deps_tset, cmd) + _add_swift_deps_flags(ctx, cmd) # Add flags for importing the ObjC part of this library _add_mixed_library_flags_to_cmd(ctx, cmd, underlying_module, objc_headers, objc_modulemap_pp_info) @@ -569,28 +628,34 @@ def _get_shared_flags( return cmd -def _add_swift_deps_flags( +def _add_swift_module_map_args( ctx: AnalysisContext, + sdk_swiftmodule_deps_tset: SwiftCompiledModuleTset, + pcm_deps_tset: SwiftCompiledModuleTset, sdk_deps_tset: SwiftCompiledModuleTset, cmd: cmd_args): - # If Explicit Modules are enabled, a few things must be provided to a compilation job: - # 1. Direct and transitive SDK deps from `sdk_modules` attribute. - # 2. Direct and transitive user-defined deps. - # 3. Transitive SDK deps of user-defined deps. - # (This is the case, when a user-defined dep exports a type from SDK module, - # thus such SDK module should be implicitly visible to consumers of that custom dep) + module_name = get_module_name(ctx) + sdk_swiftmodule_deps_tset = [sdk_swiftmodule_deps_tset] if sdk_swiftmodule_deps_tset else [] + all_deps_tset = ctx.actions.tset( + SwiftCompiledModuleTset, + children = _get_swift_paths_tsets(ctx.attrs.deps + ctx.attrs.exported_deps) + [pcm_deps_tset, sdk_deps_tset] + sdk_swiftmodule_deps_tset, + ) + swift_module_map_artifact = write_swift_module_map_with_deps( + ctx, + module_name, + all_deps_tset, + ) + cmd.add([ + "-Xfrontend", + "-explicit-swift-module-map-file", + "-Xfrontend", + swift_module_map_artifact, + ]) + +def _add_swift_deps_flags( + ctx: AnalysisContext, + cmd: cmd_args): if uses_explicit_modules(ctx): - module_name = get_module_name(ctx) - swift_deps_tset = ctx.actions.tset( - SwiftCompiledModuleTset, - children = _get_swift_paths_tsets(ctx.attrs.deps + ctx.attrs.exported_deps), - ) - swift_module_map_artifact = write_swift_module_map_with_swift_deps( - ctx, - module_name, - sdk_deps_tset, - swift_deps_tset, - ) cmd.add([ "-Xcc", "-fno-implicit-modules", @@ -598,10 +663,6 @@ def _add_swift_deps_flags( "-fno-implicit-module-maps", "-Xfrontend", "-disable-implicit-swift-modules", - "-Xfrontend", - "-explicit-swift-module-map-file", - "-Xfrontend", - swift_module_map_artifact, ]) else: depset = ctx.actions.tset(SwiftCompiledModuleTset, children = _get_swift_paths_tsets(ctx.attrs.deps + ctx.attrs.exported_deps)) @@ -610,15 +671,9 @@ def _add_swift_deps_flags( def _add_clang_deps_flags( ctx: AnalysisContext, pcm_deps_tset: SwiftCompiledModuleTset, - sdk_deps_tset: SwiftCompiledModuleTset, cmd: cmd_args) -> None: - # If a module uses Explicit Modules, all direct and - # transitive Clang deps have to be explicitly added. if uses_explicit_modules(ctx): - cmd.add(pcm_deps_tset.project_as_args("clang_deps")) - - # Add Clang sdk modules which do not go to swift modulemap - cmd.add(sdk_deps_tset.project_as_args("clang_deps")) + cmd.add(pcm_deps_tset.project_as_args("clang_importer_flags")) else: inherited_preprocessor_infos = cxx_inherited_preprocessor_infos(ctx.attrs.deps + ctx.attrs.exported_deps) preprocessors = cxx_merge_cpreprocessors(ctx, [], inherited_preprocessor_infos) @@ -635,23 +690,25 @@ def _add_mixed_library_flags_to_cmd( if uses_explicit_modules(ctx): if underlying_module: cmd.add(underlying_module.clang_importer_args) + cmd.add(underlying_module.clang_module_file_args) cmd.add("-import-underlying-module") return if not objc_headers: return - # TODO(T99100029): We cannot use VFS overlays to mask this import from - # the debugger as they require absolute paths. Instead we will enforce - # that mixed libraries do not have serialized debugging info and rely on - # rdeps to serialize the correct paths. - for arg in objc_modulemap_pp_info.relative_args.args: - cmd.add("-Xcc") - cmd.add(arg) + if objc_modulemap_pp_info: + # TODO(T99100029): We cannot use VFS overlays to mask this import from + # the debugger as they require absolute paths. Instead we will enforce + # that mixed libraries do not have serialized debugging info and rely on + # rdeps to serialize the correct paths. + for arg in objc_modulemap_pp_info.args.args: + cmd.add("-Xcc") + cmd.add(arg) - for arg in objc_modulemap_pp_info.modular_args: - cmd.add("-Xcc") - cmd.add(arg) + for arg in objc_modulemap_pp_info.modular_args: + cmd.add("-Xcc") + cmd.add(arg) cmd.add("-import-underlying-module") @@ -669,17 +726,6 @@ def _get_external_debug_info_tsets(deps: list[Dependency]) -> list[ArtifactTSet] if SwiftDependencyInfo in d ] -def _get_exported_headers_tset(ctx: AnalysisContext, exported_headers: [list[str], None] = None) -> ExportedHeadersTSet: - return ctx.actions.tset( - ExportedHeadersTSet, - value = {get_module_name(ctx): exported_headers} if exported_headers else None, - children = [ - dep.exported_headers - for dep in [x.get(SwiftDependencyInfo) for x in ctx.attrs.exported_deps] - if dep and dep.exported_headers - ], - ) - def get_swift_pcm_uncompile_info( ctx: AnalysisContext, propagated_exported_preprocessor_info: [CPreprocessorInfo, None], @@ -692,7 +738,7 @@ def get_swift_pcm_uncompile_info( name = get_module_name(ctx), is_transient = not ctx.attrs.modular or not exported_pre, exported_preprocessor = exported_pre, - exported_deps = ctx.attrs.exported_deps, + exported_deps = _exported_deps(ctx), propagated_preprocessor_args_cmd = propagated_pp_args_cmd, uncompiled_sdk_modules = ctx.attrs.sdk_modules, ) @@ -700,21 +746,9 @@ def get_swift_pcm_uncompile_info( def get_swift_dependency_info( ctx: AnalysisContext, - exported_pre: [CPreprocessor, None], - output_module: [Artifact, None], + output_module: Artifact | None, deps_providers: list) -> SwiftDependencyInfo: - all_deps = ctx.attrs.exported_deps + ctx.attrs.deps - if ctx.attrs.reexport_all_header_dependencies: - exported_deps = all_deps - else: - exported_deps = ctx.attrs.exported_deps - - # We only need to pass up the exported_headers for Swift header post-processing. - # If the toolchain can emit textual imports already then we skip the extra work. - exported_headers = [] - if not ctx.attrs._apple_toolchain[AppleToolchainInfo].swift_toolchain_info.can_toolchain_emit_obj_c_header_textually: - exported_headers = [_header_basename(header) for header in ctx.attrs.exported_headers] - exported_headers += [header.name for header in exported_pre.headers] if exported_pre else [] + exported_deps = _exported_deps(ctx) # We pass through the SDK swiftmodules here to match Buck 1 behaviour. This is # pretty loose, but it matches Buck 1 behavior so cannot be improved until @@ -723,6 +757,7 @@ def get_swift_dependency_info( if output_module: compiled_info = SwiftCompiledModuleInfo( is_framework = False, + is_sdk_module = False, is_swiftmodule = True, module_name = get_module_name(ctx), output_artifact = output_module, @@ -734,22 +769,15 @@ def get_swift_dependency_info( debug_info_tset = make_artifact_tset( actions = ctx.actions, artifacts = [output_module] if output_module != None else [], - children = _get_external_debug_info_tsets(all_deps), + children = _get_external_debug_info_tsets(ctx.attrs.deps + ctx.attrs.exported_deps), label = ctx.label, ) return SwiftDependencyInfo( debug_info_tset = debug_info_tset, - exported_headers = _get_exported_headers_tset(ctx, exported_headers), exported_swiftmodules = exported_swiftmodules, ) -def _header_basename(header: [Artifact, str]) -> str: - if type(header) == type(""): - return paths.basename(header) - else: - return header.basename - def uses_explicit_modules(ctx: AnalysisContext) -> bool: swift_toolchain = ctx.attrs._apple_toolchain[AppleToolchainInfo].swift_toolchain_info return ctx.attrs.uses_explicit_modules and is_sdk_modules_provided(swift_toolchain) @@ -792,7 +820,7 @@ def get_swift_debug_infos( ctx: AnalysisContext, swift_dependency_info: [SwiftDependencyInfo, None], swift_output: [SwiftCompilationOutput, None]) -> SwiftDebugInfo: - # When determing the debug info for shared libraries, if the shared library is a link group, we rely on the link group links to + # When determining the debug info for shared libraries, if the shared library is a link group, we rely on the link group links to # obtain the debug info for linked libraries and only need to provide any swift debug info for this library itself. Otherwise # if linking standard shared, we need to obtain the transitive debug info. if get_link_group(ctx): @@ -836,3 +864,50 @@ def _create_compilation_database( ctx.actions.run(cmd, category = "swift_compilation_database", identifier = identifier) return SwiftCompilationDatabase(db = cdb_artifact, other_outputs = argfile.cmd_form) + +def _create_swift_interface(ctx: AnalysisContext, shared_flags: cmd_args, module_name: str) -> DefaultInfo: + swift_toolchain = ctx.attrs._apple_toolchain[AppleToolchainInfo].swift_toolchain_info + swift_ide_test_tool = swift_toolchain.swift_ide_test_tool + if not swift_ide_test_tool: + return DefaultInfo() + mk_swift_interface = swift_toolchain.mk_swift_interface + + identifier = module_name + ".interface.swift" + + argsfile, _ = ctx.actions.write( + identifier + ".argsfile", + shared_flags, + allow_args = True, + ) + interface_artifact = ctx.actions.declare_output(identifier) + + mk_swift_args = cmd_args( + mk_swift_interface, + "--swift-ide-test-tool", + swift_ide_test_tool, + "--module", + module_name, + "--out", + interface_artifact.as_output(), + "--", + cmd_args(cmd_args(argsfile, format = "@{}", delimiter = ""), hidden = [shared_flags]), + ) + + ctx.actions.run( + mk_swift_args, + category = "mk_swift_interface", + identifier = identifier, + ) + + return DefaultInfo( + default_output = interface_artifact, + other_outputs = [ + argsfile, + ], + ) + +def _exported_deps(ctx) -> list[Dependency]: + if ctx.attrs.reexport_all_header_dependencies: + return ctx.attrs.exported_deps + ctx.attrs.deps + else: + return ctx.attrs.exported_deps diff --git a/prelude/apple/swift/swift_incremental_support.bzl b/prelude/apple/swift/swift_incremental_support.bzl index 183b4c985d..a3bcf304eb 100644 --- a/prelude/apple/swift/swift_incremental_support.bzl +++ b/prelude/apple/swift/swift_incremental_support.bzl @@ -8,9 +8,10 @@ load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo") load("@prelude//apple:apple_utility.bzl", "get_module_name") load("@prelude//apple/swift:swift_toolchain_types.bzl", "SwiftObjectFormat") +load("@prelude//apple/swift:swift_types.bzl", "SwiftCompilationModes") load( - "@prelude//cxx:compile.bzl", - "CxxSrcWithFlags", + "@prelude//cxx:cxx_sources.bzl", + "CxxSrcWithFlags", # @unused Used as a type ) _WriteOutputFileMapOutput = record( @@ -26,11 +27,7 @@ IncrementalCompilationOutput = record( output_map_artifact = field(Artifact), ) -SwiftCompilationMode = enum( - "wmo", - "incremental", - "auto", -) +SwiftCompilationMode = enum(*SwiftCompilationModes) SwiftIncrementalBuildFilesTreshold = 20 @@ -59,20 +56,22 @@ def get_incremental_swiftmodule_compilation_flags(ctx: AnalysisContext, srcs: li def _get_incremental_compilation_flags_and_objects( output_file_map: _WriteOutputFileMapOutput, additional_flags: cmd_args) -> IncrementalCompilationOutput: - cmd = cmd_args([ - "-incremental", - "-enable-incremental-imports", - "-enable-batch-mode", - "-driver-batch-count", - "1", - "-output-file-map", - output_file_map.output_map_artifact, - ]) - cmd.add(additional_flags) - - cmd = cmd.hidden([swiftdep.as_output() for swiftdep in output_file_map.swiftdeps]) - cmd = cmd.hidden([artifact.as_output() for artifact in output_file_map.artifacts]) - cmd = cmd.hidden(output_file_map.main_swiftdeps.as_output()) + cmd = cmd_args( + [ + "-incremental", + "-enable-incremental-imports", + "-disable-cmo", # To minimize changes in generated swiftmodule file. + "-enable-batch-mode", + "-driver-batch-count", + "1", + "-output-file-map", + output_file_map.output_map_artifact, + additional_flags, + ], + hidden = [swiftdep.as_output() for swiftdep in output_file_map.swiftdeps] + + [artifact.as_output() for artifact in output_file_map.artifacts] + + [output_file_map.main_swiftdeps.as_output()], + ) return IncrementalCompilationOutput( incremental_flags_cmd = cmd, @@ -86,7 +85,8 @@ def _write_output_file_map( srcs: list[CxxSrcWithFlags], compilation_mode: str, # Either "object" or "swiftmodule" extension: str) -> _WriteOutputFileMapOutput: # Either ".o" or ".swiftmodule" - module_swiftdeps = ctx.actions.declare_output("module-build-record." + compilation_mode + ".swiftdeps") + # swift-driver doesn't respect extension for root swiftdeps file and it always has to be `.priors`. + module_swiftdeps = ctx.actions.declare_output("module-build-record." + compilation_mode + ".priors") output_file_map = { "": { diff --git a/prelude/apple/swift/swift_module_map.bzl b/prelude/apple/swift/swift_module_map.bzl index 5547232f07..dd60747cfe 100644 --- a/prelude/apple/swift/swift_module_map.bzl +++ b/prelude/apple/swift/swift_module_map.bzl @@ -8,22 +8,10 @@ load("@prelude//utils:arglike.bzl", "ArgLike") # @unused Used as a type load(":swift_toolchain_types.bzl", "SwiftCompiledModuleTset") -def write_swift_module_map( +def write_swift_module_map_with_deps( ctx: AnalysisContext, module_name: str, - sdk_deps: SwiftCompiledModuleTset) -> ArgLike: - return write_swift_module_map_with_swift_deps(ctx, module_name, sdk_deps, None) - -def write_swift_module_map_with_swift_deps( - ctx: AnalysisContext, - module_name: str, - sdk_swift_deps: SwiftCompiledModuleTset, - swift_deps: [SwiftCompiledModuleTset, None]) -> ArgLike: - if swift_deps: - all_deps = ctx.actions.tset(SwiftCompiledModuleTset, children = [sdk_swift_deps, swift_deps]) - else: - all_deps = sdk_swift_deps - + all_deps: SwiftCompiledModuleTset) -> ArgLike: return ctx.actions.write_json( module_name + ".swift_module_map.json", all_deps.project_as_json("swift_module_map"), diff --git a/prelude/apple/swift/swift_pcm_compilation.bzl b/prelude/apple/swift/swift_pcm_compilation.bzl index 01626fce7e..554a2ef1fa 100644 --- a/prelude/apple/swift/swift_pcm_compilation.bzl +++ b/prelude/apple/swift/swift_pcm_compilation.bzl @@ -57,13 +57,13 @@ def _compile_with_argsfile( argfile, _ = ctx.actions.write(module_name + ".pcm.argsfile", shell_quoted_cmd, allow_args = True) swift_toolchain = ctx.attrs._apple_toolchain[AppleToolchainInfo].swift_toolchain_info - cmd = cmd_args(swift_toolchain.compiler) - cmd.add(cmd_args(["@", argfile], delimiter = "")) - - # Action should also depend on all artifacts from the argsfile, otherwise they won't be materialised. - cmd.hidden([args]) - - cmd.add(additional_cmd) + cmd = cmd_args( + swift_toolchain.compiler, + cmd_args(["@", argfile], delimiter = ""), + additional_cmd, + # Action should also depend on all artifacts from the argsfile, otherwise they won't be materialised. + hidden = args, + ) ctx.actions.run( cmd, @@ -77,9 +77,9 @@ def _compiled_module_info( module_name: str, pcm_output: Artifact, pcm_info: SwiftPCMUncompiledInfo) -> SwiftCompiledModuleInfo: - clang_importer_args = cmd_args() - clang_importer_args.add("-Xcc") - clang_importer_args.add( + clang_deps_args = cmd_args() + clang_deps_args.add("-Xcc") + clang_deps_args.add( cmd_args( [ "-fmodule-file=", @@ -90,8 +90,8 @@ def _compiled_module_info( delimiter = "", ), ) - clang_importer_args.add("-Xcc") - clang_importer_args.add( + clang_deps_args.add("-Xcc") + clang_deps_args.add( cmd_args( [ "-fmodule-map-file=", @@ -100,16 +100,22 @@ def _compiled_module_info( delimiter = "", ), ) - clang_importer_args.add("-Xcc") - clang_importer_args.add(pcm_info.exported_preprocessor.relative_args.args) - clang_importer_args.hidden(pcm_info.exported_preprocessor.modular_args) + + clang_importer_args = cmd_args( + "-Xcc", + pcm_info.exported_preprocessor.args.args, + hidden = pcm_info.exported_preprocessor.modular_args, + ) return SwiftCompiledModuleInfo( + clang_module_file_args = clang_deps_args, clang_importer_args = clang_importer_args, is_framework = False, + is_sdk_module = False, is_swiftmodule = False, module_name = module_name, output_artifact = pcm_output, + clang_modulemap = pcm_info.exported_preprocessor.modulemap_path, ) def _swift_pcm_compilation_impl(ctx: AnalysisContext) -> [Promise, list[Provider]]: @@ -240,7 +246,7 @@ def compile_underlying_pcm( "-Xcc", "-I", "-Xcc", - cmd_args([cmd_args(modulemap_path).parent(), "exported_symlink_tree"], delimiter = "/"), + cmd_args([cmd_args(modulemap_path, parent = 1), "exported_symlink_tree"], delimiter = "/"), ]) cmd.add(framework_search_path_flags) @@ -261,41 +267,37 @@ def _get_base_pcm_flags( pcm_deps_tset: SwiftCompiledModuleTset, swift_cxx_args: list[str]) -> (cmd_args, cmd_args, Artifact): swift_toolchain = ctx.attrs._apple_toolchain[AppleToolchainInfo].swift_toolchain_info + modulemap_path = uncompiled_pcm_info.exported_preprocessor.modulemap_path + pcm_output = ctx.actions.declare_output(module_name + ".pcm") - cmd = cmd_args() - cmd.add(get_shared_pcm_compilation_args(module_name)) - cmd.add(["-sdk", swift_toolchain.sdk_path]) - cmd.add(swift_toolchain.compiler_flags) - - if swift_toolchain.resource_dir: - cmd.add([ + cmd = cmd_args( + get_shared_pcm_compilation_args(module_name), + ["-sdk", swift_toolchain.sdk_path], + swift_toolchain.compiler_flags, + ([ "-resource-dir", swift_toolchain.resource_dir, - ]) - - cmd.add(sdk_deps_tset.project_as_args("clang_deps")) - cmd.add(pcm_deps_tset.project_as_args("clang_deps")) - - modulemap_path = uncompiled_pcm_info.exported_preprocessor.modulemap_path - pcm_output = ctx.actions.declare_output(module_name + ".pcm") + ] if swift_toolchain.resource_dir else []), + sdk_deps_tset.project_as_args("clang_module_file_flags"), + pcm_deps_tset.project_as_args("clang_module_file_flags"), + pcm_deps_tset.project_as_args("clang_importer_flags"), + # To correctly resolve modulemap's headers, + # a search path to the root of modulemap should be passed. + [ + "-Xcc", + "-I", + "-Xcc", + cmd_args(modulemap_path, parent = 1), + ], + # Modular deps like `-Swift.h` have to be materialized. + hidden = uncompiled_pcm_info.exported_preprocessor.modular_args, + ) - additional_cmd = cmd_args(swift_cxx_args) - additional_cmd.add([ + additional_cmd = cmd_args( + swift_cxx_args, "-o", pcm_output.as_output(), modulemap_path, - ]) - - # To correctly resolve modulemap's headers, - # a search path to the root of modulemap should be passed. - cmd.add([ - "-Xcc", - "-I", - "-Xcc", - cmd_args(modulemap_path).parent(), - ]) - - # Modular deps like `-Swift.h` have to be materialized. - cmd.hidden(uncompiled_pcm_info.exported_preprocessor.modular_args) + ) return (cmd, additional_cmd, pcm_output) diff --git a/prelude/apple/swift/swift_sdk_pcm_compilation.bzl b/prelude/apple/swift/swift_sdk_pcm_compilation.bzl index f4e3d0ac5e..5c70ec3dd8 100644 --- a/prelude/apple/swift/swift_sdk_pcm_compilation.bzl +++ b/prelude/apple/swift/swift_sdk_pcm_compilation.bzl @@ -44,6 +44,10 @@ def get_shared_pcm_compilation_args(module_name: str) -> cmd_args: # to avoid serializing it as an absolute path. "-Xcc", "-working-directory=", + # AssetsLibrary is shipping with a #warning, which we shouldn't error on when compiling + # the SDK module. I don't think this is actually avoidable or removable until the next xcode major version + "-Xcc", + "-Wno-error=#warnings", ]) cmd.add(get_disable_pch_validation_flags()) @@ -139,7 +143,7 @@ def _swift_sdk_pcm_compilation_impl(ctx: AnalysisContext) -> [Promise, list[Prov "-I.", ]) - cmd.add(sdk_deps_tset.project_as_args("clang_deps")) + cmd.add(sdk_deps_tset.project_as_args("clang_module_file_flags")) expanded_modulemap_path_cmd = expand_relative_prefixed_sdk_path( cmd_args(swift_toolchain.sdk_path), @@ -179,9 +183,9 @@ def _swift_sdk_pcm_compilation_impl(ctx: AnalysisContext) -> [Promise, list[Prov ) # Construct the args needed to be passed to the clang importer - clang_importer_args = cmd_args() - clang_importer_args.add("-Xcc") - clang_importer_args.add( + clang_deps_args = cmd_args() + clang_deps_args.add("-Xcc") + clang_deps_args.add( cmd_args( [ "-fmodule-file=", @@ -192,8 +196,8 @@ def _swift_sdk_pcm_compilation_impl(ctx: AnalysisContext) -> [Promise, list[Prov delimiter = "", ), ) - clang_importer_args.add("-Xcc") - clang_importer_args.add( + clang_deps_args.add("-Xcc") + clang_deps_args.add( cmd_args( [ "-fmodule-map-file=", @@ -204,11 +208,13 @@ def _swift_sdk_pcm_compilation_impl(ctx: AnalysisContext) -> [Promise, list[Prov ) compiled_sdk = SwiftCompiledModuleInfo( - clang_importer_args = clang_importer_args, + clang_module_file_args = clang_deps_args, is_framework = uncompiled_sdk_module_info.is_framework, + is_sdk_module = True, is_swiftmodule = False, module_name = module_name, output_artifact = pcm_output, + clang_modulemap = expanded_modulemap_path_cmd, ) return [ diff --git a/prelude/apple/swift/swift_sdk_swiftinterface_compilation.bzl b/prelude/apple/swift/swift_sdk_swiftinterface_compilation.bzl index 20667ddaf8..8bd7b8a7c9 100644 --- a/prelude/apple/swift/swift_sdk_swiftinterface_compilation.bzl +++ b/prelude/apple/swift/swift_sdk_swiftinterface_compilation.bzl @@ -14,7 +14,7 @@ load( "extract_and_merge_clang_debug_infos", "extract_and_merge_swift_debug_infos", ) -load(":swift_module_map.bzl", "write_swift_module_map") +load(":swift_module_map.bzl", "write_swift_module_map_with_deps") load(":swift_sdk_pcm_compilation.bzl", "get_swift_sdk_pcm_anon_targets") load(":swift_toolchain_types.bzl", "SdkUncompiledModuleInfo", "SwiftCompiledModuleInfo", "SwiftCompiledModuleTset", "WrappedSdkCompiledModuleInfo") @@ -52,12 +52,12 @@ def _swift_interface_compilation_impl(ctx: AnalysisContext) -> [Promise, list[Pr clang_deps_tset = get_compiled_sdk_clang_deps_tset(ctx, sdk_deps_providers) swift_deps_tset = get_compiled_sdk_swift_deps_tset(ctx, sdk_deps_providers) - swift_module_map_artifact = write_swift_module_map(ctx, uncompiled_module_info_name, swift_deps_tset) + swift_module_map_artifact = write_swift_module_map_with_deps(ctx, uncompiled_module_info_name, swift_deps_tset) cmd.add([ "-explicit-swift-module-map-file", swift_module_map_artifact, ]) - cmd.add(clang_deps_tset.project_as_args("clang_deps")) + cmd.add(clang_deps_tset.project_as_args("clang_module_file_flags")) swiftmodule_output = ctx.actions.declare_output(uncompiled_module_info_name + SWIFTMODULE_EXTENSION) expanded_swiftinterface_cmd = expand_relative_prefixed_sdk_path( @@ -80,6 +80,7 @@ def _swift_interface_compilation_impl(ctx: AnalysisContext) -> [Promise, list[Pr compiled_sdk = SwiftCompiledModuleInfo( is_framework = uncompiled_sdk_module_info.is_framework, + is_sdk_module = True, is_swiftmodule = True, module_name = uncompiled_module_info_name, output_artifact = swiftmodule_output, diff --git a/prelude/apple/swift/swift_toolchain.bzl b/prelude/apple/swift/swift_toolchain.bzl index c3a7a0a128..7e89e77436 100644 --- a/prelude/apple/swift/swift_toolchain.bzl +++ b/prelude/apple/swift/swift_toolchain.bzl @@ -69,6 +69,8 @@ def swift_toolchain_impl(ctx): sdk_path = ctx.attrs._internal_sdk_path or ctx.attrs.sdk_path, swift_stdlib_tool = ctx.attrs.swift_stdlib_tool[RunInfo], swift_stdlib_tool_flags = ctx.attrs.swift_stdlib_tool_flags, + swift_ide_test_tool = ctx.attrs.swift_ide_test_tool[RunInfo] if ctx.attrs.swift_ide_test_tool else None, + mk_swift_interface = cmd_args(ctx.attrs._swiftc_wrapper[RunInfo]).add(ctx.attrs.make_swift_interface[RunInfo]), supports_relative_resource_dir = ctx.attrs.supports_relative_resource_dir, supports_swift_cxx_interoperability_mode = ctx.attrs.supports_swift_cxx_interoperability_mode, supports_swift_importing_objc_forward_declarations = ctx.attrs.supports_swift_importing_obj_c_forward_declarations, diff --git a/prelude/apple/swift/swift_toolchain_types.bzl b/prelude/apple/swift/swift_toolchain_types.bzl index e2f7c8241e..5ec41d73d6 100644 --- a/prelude/apple/swift/swift_toolchain_types.bzl +++ b/prelude/apple/swift/swift_toolchain_types.bzl @@ -31,6 +31,8 @@ SwiftToolchainInfo = provider( "sdk_path": provider_field(typing.Any, default = None), "swift_stdlib_tool_flags": provider_field(typing.Any, default = None), "swift_stdlib_tool": provider_field(typing.Any, default = None), + "swift_ide_test_tool": provider_field(typing.Any, default = None), + "mk_swift_interface": provider_field(typing.Any, default = None), "runtime_run_paths": provider_field(typing.Any, default = None), # [str] "supports_relative_resource_dir": provider_field(typing.Any, default = None), # bool "supports_swift_cxx_interoperability_mode": provider_field(typing.Any, default = None), # bool @@ -64,8 +66,11 @@ SdkSwiftOverlayInfo = provider(fields = { }) SwiftCompiledModuleInfo = provider(fields = { - "clang_importer_args": provider_field(typing.Any, default = None), # cmd_args of include flags for the clang importer. + "clang_importer_args": provider_field(typing.Any, default = None), # cmd_args of additional flags for the clang importer. + "clang_module_file_args": provider_field(typing.Any, default = None), # cmd_args of include flags for the clang importer. + "clang_modulemap": provider_field(typing.Any, default = None), # Clang modulemap file which is required for generation of swift_module_map. "is_framework": provider_field(typing.Any, default = None), + "is_sdk_module": provider_field(bool, default = False), "is_swiftmodule": provider_field(typing.Any, default = None), # If True then contains a compiled swiftmodule, otherwise Clang's pcm. "module_name": provider_field(typing.Any, default = None), # A real name of a module, without distinguishing suffixes. "output_artifact": provider_field(typing.Any, default = None), # Compiled artifact either swiftmodule or pcm. @@ -73,24 +78,43 @@ SwiftCompiledModuleInfo = provider(fields = { def _add_swiftmodule_search_path(module_info: SwiftCompiledModuleInfo): # We need to import the containing folder, not the file itself. - return ["-I", cmd_args(module_info.output_artifact).parent()] if module_info.is_swiftmodule else [] + # We skip SDK modules as those are found via the -sdk flag. + if module_info.is_swiftmodule and not module_info.is_sdk_module: + return ["-I", cmd_args(module_info.output_artifact, parent = 1)] -def _add_clang_import_flags(module_info: SwiftCompiledModuleInfo): + return [] + +def _add_clang_module_file_flags(module_info: SwiftCompiledModuleInfo): if module_info.is_swiftmodule: return [] else: - return [module_info.clang_importer_args] + return [module_info.clang_module_file_args] + +def _add_clang_importer_flags(module_info: SwiftCompiledModuleInfo): + if module_info.is_swiftmodule: + return [] + else: + return [module_info.clang_importer_args] if module_info.clang_importer_args else [] def _swift_module_map_struct(module_info: SwiftCompiledModuleInfo): - return struct( - isFramework = module_info.is_framework, - moduleName = module_info.module_name, - modulePath = module_info.output_artifact, - ) + if module_info.is_swiftmodule: + return struct( + isFramework = module_info.is_framework, + moduleName = module_info.module_name, + modulePath = module_info.output_artifact, + ) + else: + return struct( + isFramework = module_info.is_framework, + moduleName = module_info.module_name, + clangModulePath = module_info.output_artifact, + clangModuleMapPath = cmd_args([module_info.clang_modulemap], delimiter = ""), + ) SwiftCompiledModuleTset = transitive_set( args_projections = { - "clang_deps": _add_clang_import_flags, + "clang_importer_flags": _add_clang_importer_flags, # Additional clang flags required for compilation. + "clang_module_file_flags": _add_clang_module_file_flags, # Projects pcm modules as cli flags. "module_search_path": _add_swiftmodule_search_path, }, json_projections = { diff --git a/prelude/apple/swift/swift_types.bzl b/prelude/apple/swift/swift_types.bzl index eac1b0f839..30d9e42239 100644 --- a/prelude/apple/swift/swift_types.bzl +++ b/prelude/apple/swift/swift_types.bzl @@ -8,3 +8,5 @@ SWIFT_EXTENSION = ".swift" SWIFTMODULE_EXTENSION = ".swiftmodule" + +SwiftCompilationModes = ["wmo", "incremental", "auto"] diff --git a/prelude/apple/tools/BUCK.v2 b/prelude/apple/tools/BUCK.v2 index 3c12c4007b..fe87357b76 100644 --- a/prelude/apple/tools/BUCK.v2 +++ b/prelude/apple/tools/BUCK.v2 @@ -1,16 +1,28 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + apple_tools( name = "apple-tools", - assemble_bundle = "prelude//apple/tools/bundling:assemble_bundle", - # @oss-disable: adhoc_codesign_tool = "prelude//apple/tools/meta_only/codesign_rust:adhoc-signer", adhoc_codesign_tool = None, # @oss-enable - split_arch_combine_dsym_bundles_tool = ":split_arch_combine_dsym_bundles_tool", + # @oss-disable: adhoc_codesign_tool = "prelude//apple/tools/meta_only/codesign_rust:adhoc-signer", + assemble_bundle = "prelude//apple/tools/bundling:assemble_bundle", dry_codesign_tool = ":dry_codesign_tool", info_plist_processor = "prelude//apple/tools/info_plist_processor:tool", ipa_package_maker = ":ipa_package_maker", make_modulemap = ":make_modulemap", make_vfsoverlay = ":make_vfsoverlay", selective_debugging_scrubber = "prelude//apple/tools/selective_debugging:tool", - swift_objc_header_postprocess = ":swift_objc_header_postprocess", + split_arch_combine_dsym_bundles_tool = ":split_arch_combine_dsym_bundles_tool", + visibility = ["PUBLIC"], + xcframework_maker = ":xcframework_maker", +) + +python_binary( + name = "xcframework_maker", + main = "xcframework_maker.py", visibility = ["PUBLIC"], ) @@ -41,6 +53,12 @@ python_bootstrap_binary( visibility = ["PUBLIC"], ) +python_bootstrap_binary( + name = "make_swift_interface", + main = "make_swift_interface.py", + visibility = ["PUBLIC"], +) + python_bootstrap_binary( name = "make_vfsoverlay", main = "make_vfsoverlay.py", @@ -69,9 +87,3 @@ export_file( name = "swift_exec.sh", src = "swift_exec.sh", ) - -python_bootstrap_binary( - name = "swift_objc_header_postprocess", - main = "swift_objc_header_postprocess.py", - visibility = ["PUBLIC"], -) diff --git a/prelude/apple/tools/bundling/BUCK.v2 b/prelude/apple/tools/bundling/BUCK.v2 index 4a5265c0cf..91dacb0ed2 100644 --- a/prelude/apple/tools/bundling/BUCK.v2 +++ b/prelude/apple/tools/bundling/BUCK.v2 @@ -1,4 +1,9 @@ -# @oss-disable: load("@prelude//apple/tools/defs.bzl", "meta_python_test") +load("@prelude//utils:source_listing.bzl", "source_listing") +load("@prelude//apple/tools/defs.bzl", "meta_python_test") + +oncall("build_infra") + +source_listing() python_binary( name = "assemble_bundle", diff --git a/prelude/apple/tools/bundling/action_metadata.py b/prelude/apple/tools/bundling/action_metadata.py index 8f73315f8f..ade702a703 100644 --- a/prelude/apple/tools/bundling/action_metadata.py +++ b/prelude/apple/tools/bundling/action_metadata.py @@ -5,12 +5,14 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import json import os from dataclasses import dataclass from io import TextIOBase from pathlib import Path -from typing import Any, Dict, List, Optional +from typing import Any, Dict, List, Optional, Union _METADATA_VERSION = 1 @@ -27,7 +29,7 @@ class _Metadata: digests: List[_Item] -def _object_hook(dict: Dict[str, Any]) -> Any: +def _object_hook(dict: Dict[str, Any]) -> Union[_Item, _Metadata]: if "version" in dict: return _Metadata(**dict) else: diff --git a/prelude/apple/tools/bundling/assemble_bundle.py b/prelude/apple/tools/bundling/assemble_bundle.py index 1748400152..144390ec2b 100644 --- a/prelude/apple/tools/bundling/assemble_bundle.py +++ b/prelude/apple/tools/bundling/assemble_bundle.py @@ -5,11 +5,13 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import logging import os import shutil from pathlib import Path -from typing import cast, Dict, List, Optional +from typing import Any, cast, Dict, List, Optional from .assemble_bundle_types import BundleSpecItem, IncrementalContext from .incremental_state import IncrementalState, IncrementalStateItem @@ -18,7 +20,7 @@ should_assemble_incrementally, ) -_LOGGER = logging.getLogger(__name__) +_LOGGER: logging.Logger = logging.getLogger(__name__) def assemble_bundle( @@ -26,37 +28,30 @@ def assemble_bundle( bundle_path: Path, incremental_context: Optional[IncrementalContext], check_conflicts: bool, + versioned_if_macos: bool, ) -> Optional[List[IncrementalStateItem]]: - # It's possible to have the same spec multiple times as different - # apple_resource() targets can refer to the _same_ resource file. - # - # On RE, we're not allowed to overwrite files, so prevent doing - # identical file copies. - # - # Do not reorder spec items to achieve determinism. - # Rely on the fact that `dict` preserves key order. - deduplicated_spec = list(dict.fromkeys(spec)) - # Force same sorting as in Buck1 for `SourcePathWithAppleBundleDestination` - # WARNING: This logic is tightly coupled with how spec filtering is done in `_filter_conflicting_paths` method during incremental bundling. Don't change unless you fully understand what is going on here. - deduplicated_spec.sort() - incremental_result = None if incremental_context: - if should_assemble_incrementally(deduplicated_spec, incremental_context): + if should_assemble_incrementally(spec, incremental_context): incremental_result = _assemble_incrementally( bundle_path, - deduplicated_spec, + spec, incremental_context.metadata, cast(IncrementalState, incremental_context.state), check_conflicts, + versioned_if_macos, ) else: - _assemble_non_incrementally(bundle_path, deduplicated_spec, check_conflicts) + _assemble_non_incrementally( + bundle_path, spec, check_conflicts, versioned_if_macos + ) incremental_result = calculate_incremental_state( - deduplicated_spec, incremental_context.metadata + spec, incremental_context.metadata ) else: - _assemble_non_incrementally(bundle_path, deduplicated_spec, check_conflicts) + _assemble_non_incrementally( + bundle_path, spec, check_conflicts, versioned_if_macos + ) # External tooling (e.g., Xcode) might depend on the timestamp of the bundle bundle_path.touch() @@ -71,14 +66,17 @@ def _cleanup_output(incremental: bool, path: Path) -> None: def _assemble_non_incrementally( - bundle_path: Path, spec: List[BundleSpecItem], check_conflicts: bool + bundle_path: Path, + spec: List[BundleSpecItem], + check_conflicts: bool, + versioned_if_macos: bool, ) -> None: logging.getLogger(__name__).info("Assembling bundle non-incrementally.") _cleanup_output(incremental=False, path=bundle_path) - copied_contents = {} + copied_contents: Dict[Path, str] = {} - def _copy(src, dst, **kwargs) -> None: + def _copy(src: str, dst: Path, **kwargs: Any) -> None: if check_conflicts: if dst in copied_contents: raise RuntimeError( @@ -88,11 +86,21 @@ def _copy(src, dst, **kwargs) -> None: if check_conflicts: copied_contents[dst] = src + symlinks = set() + for spec_item in spec: source_path = spec_item.src destination_path = bundle_path / spec_item.dst destination_path.parent.mkdir(parents=True, exist_ok=True) + if spec_item.dst.startswith("Versions/A") and versioned_if_macos: + parts = Path(spec_item.dst).parts + if len(parts) <= 2: + raise RuntimeError( + "Versioned bundles cannot be created from a single copy directly to Versions/A" + ) + symlinks.add(parts[2]) + if os.path.isdir(source_path): shutil.copytree( source_path, @@ -104,6 +112,16 @@ def _copy(src, dst, **kwargs) -> None: else: _copy(source_path, destination_path) + _create_symlinks(symlinks, bundle_path) + + +def _create_symlinks(symlinks: set[str], bundle_path: Path) -> None: + if symlinks and not Path.exists(bundle_path / "Versions/Current"): + os.symlink("A", bundle_path / "Versions/Current") + for dir_to_link in symlinks: + if not Path.exists(bundle_path / dir_to_link): + os.symlink("Versions/Current/" + dir_to_link, bundle_path / dir_to_link) + def _assemble_incrementally( bundle_path: Path, @@ -111,6 +129,7 @@ def _assemble_incrementally( action_metadata: Dict[Path, str], incremental_state: IncrementalState, check_conflicts: bool, + versioned_if_macos: bool, ) -> List[IncrementalStateItem]: logging.getLogger(__name__).info("Assembling bundle incrementally.") _cleanup_output(incremental=True, path=bundle_path) @@ -136,6 +155,9 @@ def _assemble_incrementally( else: new_incremental_state = _filter_conflicting_paths(new_incremental_state) + new_symlinks = set() + versioned_subdir = Path("Versions/A") + for item in new_incremental_state: # Added file might not be present in old result, need to check first. dst = item.destination_relative_to_bundle @@ -161,6 +183,12 @@ def _assemble_incrementally( ) project_relative_dst.parent.mkdir(parents=True, exist_ok=True) shutil.copy2(item.source, project_relative_dst, follow_symlinks=False) + if Path(dst).is_relative_to(versioned_subdir): + symlink = Path(dst).relative_to(versioned_subdir).parts[0] + new_symlinks.add(symlink) + + if versioned_if_macos: + _create_symlinks(new_symlinks, bundle_path) for path in paths_to_delete: (bundle_path / path).unlink() @@ -225,5 +253,8 @@ def _cleanup_empty_redundant_directories( new_directories = { p for item in new_state for p in item.destination_relative_to_bundle.parents } + versioned_subdir = Path("Versions/A") for redundant_directory in old_directories - new_directories: shutil.rmtree(bundle_path / redundant_directory, ignore_errors=True) + if redundant_directory.parent == versioned_subdir: + Path.unlink(bundle_path / redundant_directory.name) diff --git a/prelude/apple/tools/bundling/assemble_bundle_types.py b/prelude/apple/tools/bundling/assemble_bundle_types.py index 2f0ea75970..563b3bef97 100644 --- a/prelude/apple/tools/bundling/assemble_bundle_types.py +++ b/prelude/apple/tools/bundling/assemble_bundle_types.py @@ -5,10 +5,14 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + +from __future__ import annotations + import functools from dataclasses import dataclass from pathlib import Path -from typing import Dict, Optional +from typing import Dict, List, Optional from apple.tools.code_signing.codesign_bundle import CodesignConfiguration @@ -22,26 +26,64 @@ class BundleSpecItem: # Should be bundle relative path, empty string means the root of the bundle dst: str codesign_on_copy: bool = False + codesign_entitlements: Optional[str] = None + codesign_flags_override: Optional[List[str]] = None - def __eq__(self, other) -> bool: + def __eq__(self: BundleSpecItem, other: Optional[BundleSpecItem]) -> bool: return ( - other + other is not None and self.src == other.src and self.dst == other.dst and self.codesign_on_copy == other.codesign_on_copy + and self.codesign_entitlements == other.codesign_entitlements + and self.codesign_flags_override == other.codesign_flags_override ) - def __ne__(self, other) -> bool: + def __ne__(self: BundleSpecItem, other: BundleSpecItem) -> bool: return not self.__eq__(other) - def __hash__(self) -> int: - return hash((self.src, self.dst, self.codesign_on_copy)) + def __hash__(self: BundleSpecItem) -> int: + return hash( + ( + self.src, + self.dst, + self.codesign_on_copy, + self.codesign_entitlements, + ( + tuple(self.codesign_flags_override) + if self.codesign_flags_override is not None + else hash(None) + ), + ) + ) - def __lt__(self, other) -> bool: + def __lt__(self: BundleSpecItem, other: BundleSpecItem) -> bool: return ( self.src < other.src or self.dst < other.dst or self.codesign_on_copy < other.codesign_on_copy + or ( + self.codesign_entitlements < other.codesign_entitlements + if ( + self.codesign_entitlements is not None + and other.codesign_entitlements is not None + ) + else ( + self.codesign_entitlements is None + and other.codesign_entitlements is not None + ) + ) + or ( + self.codesign_flags_override < other.codesign_flags_override + if ( + self.codesign_flags_override is not None + and other.codesign_flags_override is not None + ) + else ( + self.codesign_flags_override is None + and other.codesign_flags_override is not None + ) + ) ) @@ -58,3 +100,5 @@ class IncrementalContext: codesigned: bool codesign_configuration: Optional[CodesignConfiguration] codesign_identity: Optional[str] + codesign_arguments: List[str] + versioned_if_macos: bool diff --git a/prelude/apple/tools/bundling/incremental_state.py b/prelude/apple/tools/bundling/incremental_state.py index d35daecf58..49bb78f8ac 100644 --- a/prelude/apple/tools/bundling/incremental_state.py +++ b/prelude/apple/tools/bundling/incremental_state.py @@ -5,15 +5,19 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + +from __future__ import annotations + import json from dataclasses import dataclass from io import TextIOBase from pathlib import Path -from typing import Any, Dict, List, Optional +from typing import Any, Dict, List, Optional, Union from apple.tools.code_signing.codesign_bundle import CodesignConfiguration -_VERSION = 3 +_VERSION = 7 @dataclass @@ -33,6 +37,35 @@ class IncrementalStateItem: """ +@dataclass +class CodesignedOnCopy: + path: Path + """ + Path relative to bundle root which needs to be codesigned + """ + entitlements_digest: Optional[str] + """ + Digest of entitlements used when the given path is codesigned on copy + """ + codesign_flags_override: Optional[List[str]] + """ + If present, overrides codesign arguments (which are used for root bundle) when the given path is codesigned on copy + """ + + def __hash__(self: CodesignedOnCopy) -> int: + return hash( + ( + self.path, + self.entitlements_digest, + ( + tuple(self.codesign_flags_override) + if self.codesign_flags_override is not None + else hash(None) + ), + ) + ) + + @dataclass class IncrementalState: """ @@ -42,25 +75,29 @@ class IncrementalState: items: List[IncrementalStateItem] codesigned: bool codesign_configuration: CodesignConfiguration - codesign_on_copy_paths: List[Path] + codesigned_on_copy: List[CodesignedOnCopy] codesign_identity: Optional[str] + codesign_arguments: List[str] swift_stdlib_paths: List[Path] + versioned_if_macos: bool version: int = _VERSION class IncrementalStateJSONEncoder(json.JSONEncoder): - def default(self, o: Any) -> Any: + def default(self, o: object) -> object: if isinstance(o, IncrementalState): return { "items": [self.default(i) for i in o.items], "codesigned": o.codesigned, - "codesign_configuration": o.codesign_configuration.value - if o.codesign_configuration - else None, - "codesign_on_copy_paths": [str(p) for p in o.codesign_on_copy_paths], + "codesign_configuration": ( + o.codesign_configuration.value if o.codesign_configuration else None + ), + "codesigned_on_copy": [self.default(i) for i in o.codesigned_on_copy], "codesign_identity": o.codesign_identity, "swift_stdlib_paths": [str(p) for p in o.swift_stdlib_paths], "version": o.version, + "codesign_arguments": o.codesign_arguments, + "versioned_if_macos": o.versioned_if_macos, } elif isinstance(o, IncrementalStateItem): result = { @@ -72,15 +109,22 @@ def default(self, o: Any) -> Any: if o.resolved_symlink is not None: result["resolved_symlink"] = str(o.resolved_symlink) return result + elif isinstance(o, CodesignedOnCopy): + result = {} + result["path"] = str(o.path) + if o.entitlements_digest is not None: + result["entitlements_digest"] = str(o.entitlements_digest) + if o.codesign_flags_override is not None: + result["codesign_flags_override"] = o.codesign_flags_override + return result else: return super().default(o) -def _object_hook(dict: Dict[str, Any]) -> Any: +def _object_hook( + dict: Dict[str, Any] +) -> Union[IncrementalState, IncrementalStateItem, CodesignedOnCopy]: if "version" in dict: - dict["codesign_on_copy_paths"] = [ - Path(p) for p in dict.pop("codesign_on_copy_paths") - ] codesign_configuration = dict.pop("codesign_configuration") dict["codesign_configuration"] = ( CodesignConfiguration(codesign_configuration) @@ -89,7 +133,7 @@ def _object_hook(dict: Dict[str, Any]) -> Any: ) dict["swift_stdlib_paths"] = [Path(p) for p in dict.pop("swift_stdlib_paths")] return IncrementalState(**dict) - else: + elif "destination_relative_to_bundle" in dict: dict["source"] = Path(dict.pop("source")) dict["destination_relative_to_bundle"] = Path( dict.pop("destination_relative_to_bundle") @@ -98,6 +142,11 @@ def _object_hook(dict: Dict[str, Any]) -> Any: resolved_symlink = dict.pop("resolved_symlink", None) dict["resolved_symlink"] = Path(resolved_symlink) if resolved_symlink else None return IncrementalStateItem(**dict) + else: + dict["path"] = Path(dict.pop("path")) + dict["entitlements_digest"] = dict.pop("entitlements_digest", None) + dict["codesign_flags_override"] = dict.pop("codesign_flags_override", None) + return CodesignedOnCopy(**dict) def parse_incremental_state(data: TextIOBase) -> IncrementalState: diff --git a/prelude/apple/tools/bundling/incremental_state_test.py b/prelude/apple/tools/bundling/incremental_state_test.py index 3f55977505..b893e1cd20 100644 --- a/prelude/apple/tools/bundling/incremental_state_test.py +++ b/prelude/apple/tools/bundling/incremental_state_test.py @@ -5,20 +5,72 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +import io +import json import unittest -from json import JSONDecodeError from pathlib import Path import pkg_resources from .incremental_state import ( + CodesignedOnCopy, IncrementalState, IncrementalStateItem, + IncrementalStateJSONEncoder, parse_incremental_state, ) class TestIncrementalState(unittest.TestCase): + def test_state_serialization_and_deserialization(self): + expected = IncrementalState( + items=[ + IncrementalStateItem( + source=Path("repo/foo.txt"), + destination_relative_to_bundle=Path("foo.txt"), + digest="foo_digest", + resolved_symlink=None, + ), + IncrementalStateItem( + source=Path("buck-out/bar.txt"), + destination_relative_to_bundle=Path("Resources/bar.txt"), + digest="bar_digest", + resolved_symlink=None, + ), + ], + codesigned=True, + codesign_configuration=None, + codesigned_on_copy=[ + CodesignedOnCopy( + path=Path("Resources/bar.txt"), + entitlements_digest=None, + codesign_flags_override=None, + ), + CodesignedOnCopy( + path=Path("Resources/baz.txt"), + entitlements_digest="abc", + codesign_flags_override=None, + ), + CodesignedOnCopy( + path=Path("Resources/qux.txt"), + entitlements_digest=None, + codesign_flags_override=["--deep", "--force"], + ), + ], + codesign_identity="Johnny Appleseed", + codesign_arguments=[ + "--force", + ], + swift_stdlib_paths=[Path("Frameworks/libswiftCore.dylib")], + versioned_if_macos=False, + ) + json_result = json.dumps(expected, cls=IncrementalStateJSONEncoder) + result = parse_incremental_state(io.StringIO(json_result)) + self.assertEqual( + result, + expected, + ) + def test_valid_state_is_parsed_successfully(self): file_content = pkg_resources.resource_stream( __name__, "test_resources/valid_incremental_state.json" @@ -47,9 +99,30 @@ def test_valid_state_is_parsed_successfully(self): ], codesigned=True, codesign_configuration=None, - codesign_on_copy_paths=[Path("Resources/bar.txt")], + codesigned_on_copy=[ + CodesignedOnCopy( + path=Path("Resources/bar.txt"), + entitlements_digest=None, + codesign_flags_override=None, + ), + CodesignedOnCopy( + path=Path("Resources/baz.txt"), + entitlements_digest="abc", + codesign_flags_override=None, + ), + CodesignedOnCopy( + path=Path("Resources/qux.txt"), + entitlements_digest=None, + codesign_flags_override=["--deep", "--force"], + ), + ], codesign_identity="Johny Appleseed", + codesign_arguments=[ + "--force", + "--deep", + ], swift_stdlib_paths=[Path("Frameworks/libswiftCore.dylib")], + versioned_if_macos=True, ) self.assertEqual( result, @@ -60,7 +133,7 @@ def test_error_when_invalid_metadata(self): file_content = pkg_resources.resource_stream( __name__, "test_resources/the.broken_json" ) - with self.assertRaises(JSONDecodeError): + with self.assertRaises(json.JSONDecodeError): _ = parse_incremental_state(file_content) def test_user_friendly_error_when_metadata_with_newer_version(self): diff --git a/prelude/apple/tools/bundling/incremental_utils.py b/prelude/apple/tools/bundling/incremental_utils.py index a92b6463f0..03e0f9a0c4 100644 --- a/prelude/apple/tools/bundling/incremental_utils.py +++ b/prelude/apple/tools/bundling/incremental_utils.py @@ -5,13 +5,15 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import logging import os from pathlib import Path -from typing import Dict, List, Set, Tuple +from typing import Dict, List, Optional, Set, Tuple from .assemble_bundle_types import BundleSpecItem, IncrementalContext -from .incremental_state import IncrementalStateItem +from .incremental_state import CodesignedOnCopy, IncrementalStateItem FILES_TO_BE_IGNORED: Set[str] = { # Storage of Finder settings, which shouldn't be added when enumerating files from sources @@ -28,6 +30,12 @@ def should_assemble_incrementally( "Decided not to assemble incrementally — no incremental state for previous build." ) return False + if previous_run_state.versioned_if_macos != incremental_context.versioned_if_macos: + logging.getLogger(__name__).info( + "Decided not to assemble incrementally — current build and previous build have different versioned_if_macos settings." + ) + return False + previously_codesigned = previous_run_state.codesigned # If previously bundle was not code signed there should be no problems with code signing # currently in incremental mode. Existing binaries could be code signed "on @@ -50,6 +58,12 @@ def should_assemble_incrementally( "Decided not to assemble incrementally — previous vs current builds have mismatching codesigning identities." ) return False + # If previous codesign arguments are different from the current ones also perform non-incremental run. + if previous_run_state.codesign_arguments != incremental_context.codesign_arguments: + logging.getLogger(__name__).info( + "Decided not to assemble incrementally — previous vs current builds have mismatching codesigning arguments." + ) + return False # If bundle from previous run was signed in a different configuration vs the current run (e.g. dry code signed while now regular code signing is required) perform non-incremental run. if ( previous_run_state.codesign_configuration @@ -62,29 +76,45 @@ def should_assemble_incrementally( # If there is an artifact that was code signed on copy in previous run which is # present in current run and not code signed on copy, we should perform # non-incremental run for simplicity and correctness reasons. - current_codesigned_on_copy_paths = {Path(i.dst) for i in spec if i.codesign_on_copy} + current_codesigned_on_copy_items = { + codesigned_on_copy_item( + path=Path(i.dst), + entitlements=( + Path(i.codesign_entitlements) if i.codesign_entitlements else None + ), + incremental_context=incremental_context, + codesign_flags_override=i.codesign_flags_override, + ) + for i in spec + if i.codesign_on_copy + } + codesigned_on_copy_paths_from_previous_build_which_are_present_in_current_build = _codesigned_on_copy_paths_from_previous_build_which_are_present_in_current_build( - set(previous_run_state.codesign_on_copy_paths), + previous_run_state.codesigned_on_copy, {Path(i.dst) for i in spec}, ) codesign_on_copy_paths_are_compatible = codesigned_on_copy_paths_from_previous_build_which_are_present_in_current_build.issubset( - current_codesigned_on_copy_paths + current_codesigned_on_copy_items ) if not codesign_on_copy_paths_are_compatible: logging.getLogger(__name__).info( - f"Decided not to assemble incrementally — there is at least one artifact `{list(codesigned_on_copy_paths_from_previous_build_which_are_present_in_current_build - current_codesigned_on_copy_paths)[0]}` that was code signed on copy in previous build which is present in current run and not code signed on copy." + f"Decided not to assemble incrementally — there is at least one artifact `{list(codesigned_on_copy_paths_from_previous_build_which_are_present_in_current_build - current_codesigned_on_copy_items)[0]}` that was code signed on copy in previous build which is present in current run and not code signed on copy (or codesigned but with a different set of entitlements and flags)." ) return codesign_on_copy_paths_are_compatible def _codesigned_on_copy_paths_from_previous_build_which_are_present_in_current_build( - previously_codesigned_on_copy_paths: Set[Path], + previously_codesigned_on_copy: List[CodesignedOnCopy], all_input_files: Set[Path], -): +) -> Set[CodesignedOnCopy]: all_input_files_and_directories = all_input_files | { i for file in all_input_files for i in file.parents } - return previously_codesigned_on_copy_paths & all_input_files_and_directories + return { + i + for i in previously_codesigned_on_copy + if i.path in all_input_files_and_directories + } def _get_new_digest(action_metadata: Dict[Path, str], path: Path) -> str: @@ -111,7 +141,7 @@ def calculate_incremental_state( """ result = [] source_with_destination_files = _source_with_destination_files(spec) - for (src, dst) in source_with_destination_files: + for src, dst in source_with_destination_files: is_symlink = src.is_symlink() new_digest = _get_new_digest(action_metadata, src) if not is_symlink else None resolved_symlink = Path(os.readlink(src)) if is_symlink else None @@ -165,3 +195,24 @@ def _list_directory_deterministically(directory: Path) -> List[Path]: # Sort in order for walk to be deterministic. dir_names.sort() return result + + +def codesigned_on_copy_item( + path: Path, + entitlements: Optional[Path], + incremental_context: IncrementalContext, + codesign_flags_override: Optional[List[str]], +) -> CodesignedOnCopy: + if entitlements is not None: + digest = incremental_context.metadata.get(entitlements) + if digest is None: + raise RuntimeError( + f"Expected digest for entitlements file path `{entitlements}` to be present in action metadata." + ) + else: + digest = None + return CodesignedOnCopy( + path=path, + entitlements_digest=digest, + codesign_flags_override=codesign_flags_override, + ) diff --git a/prelude/apple/tools/bundling/incremental_utils_test.py b/prelude/apple/tools/bundling/incremental_utils_test.py index de2f48f284..47e7b1b975 100644 --- a/prelude/apple/tools/bundling/incremental_utils_test.py +++ b/prelude/apple/tools/bundling/incremental_utils_test.py @@ -14,7 +14,7 @@ from apple.tools.code_signing.codesign_bundle import CodesignConfiguration from .assemble_bundle_types import BundleSpecItem -from .incremental_state import IncrementalState, IncrementalStateItem +from .incremental_state import CodesignedOnCopy, IncrementalState, IncrementalStateItem from .incremental_utils import ( calculate_incremental_state, IncrementalContext, @@ -53,6 +53,8 @@ def test_not_run_incrementally_when_previous_build_not_incremental(self): codesigned=False, codesign_configuration=None, codesign_identity=None, + codesign_arguments=[], + versioned_if_macos=True, ) self.assertFalse(should_assemble_incrementally(spec, incremental_context)) @@ -77,13 +79,17 @@ def test_run_incrementally_when_previous_build_not_codesigned(self): ], codesigned=False, codesign_configuration=None, - codesign_on_copy_paths=[], + codesigned_on_copy=[], codesign_identity=None, + codesign_arguments=[], + versioned_if_macos=True, swift_stdlib_paths=[], ), codesigned=True, codesign_configuration=None, codesign_identity=None, + codesign_arguments=[], + versioned_if_macos=True, ) self.assertTrue(should_assemble_incrementally(spec, incremental_context)) @@ -110,13 +116,17 @@ def test_not_run_incrementally_when_previous_build_codesigned_and_current_is_not ], codesigned=True, codesign_configuration=None, - codesign_on_copy_paths=[], + codesigned_on_copy=[], codesign_identity=None, + codesign_arguments=[], + versioned_if_macos=True, swift_stdlib_paths=[], ), codesigned=False, codesign_configuration=None, codesign_identity=None, + codesign_arguments=[], + versioned_if_macos=True, ) self.assertFalse(should_assemble_incrementally(spec, incremental_context)) # Check that behavior changes when both builds are codesigned @@ -146,13 +156,17 @@ def test_not_run_incrementally_when_previous_build_codesigned_with_different_ide ], codesigned=True, codesign_configuration=None, - codesign_on_copy_paths=[], + codesigned_on_copy=[], codesign_identity="old_identity", + codesign_arguments=[], + versioned_if_macos=True, swift_stdlib_paths=[], ), codesigned=True, codesign_configuration=None, codesign_identity="new_identity", + codesign_arguments=[], + versioned_if_macos=True, ) self.assertFalse(should_assemble_incrementally(spec, incremental_context)) # Check that behavior changes when identities are same @@ -172,9 +186,19 @@ def test_run_incrementally_when_codesign_on_copy_paths_match(self): dst="bar", codesign_on_copy=True, ), + BundleSpecItem( + src="src/baz", + dst="baz", + codesign_on_copy=True, + codesign_entitlements="entitlements.plist", + ), ] incremental_context = IncrementalContext( - metadata={Path("src/foo"): "digest"}, + metadata={ + Path("src/foo"): "digest", + Path("src/baz"): "digest2", + Path("entitlements.plist"): "entitlements_digest", + }, state=IncrementalState( items=[ IncrementalStateItem( @@ -182,17 +206,38 @@ def test_run_incrementally_when_codesign_on_copy_paths_match(self): destination_relative_to_bundle=Path("foo"), digest="digest", resolved_symlink=None, - ) + ), + IncrementalStateItem( + source=Path("src/baz"), + destination_relative_to_bundle=Path("baz"), + digest="digest2", + resolved_symlink=None, + ), ], codesigned=True, codesign_configuration=None, - codesign_on_copy_paths=[Path("foo")], + codesigned_on_copy=[ + CodesignedOnCopy( + path=Path("foo"), + entitlements_digest=None, + codesign_flags_override=None, + ), + CodesignedOnCopy( + path=Path("baz"), + entitlements_digest="entitlements_digest", + codesign_flags_override=None, + ), + ], codesign_identity="same_identity", + codesign_arguments=[], + versioned_if_macos=True, swift_stdlib_paths=[], ), codesigned=True, codesign_configuration=None, codesign_identity="same_identity", + codesign_arguments=[], + versioned_if_macos=True, ) self.assertTrue(should_assemble_incrementally(spec, incremental_context)) @@ -219,15 +264,160 @@ def test_not_run_incrementally_when_codesign_on_copy_paths_mismatch(self): codesigned=True, codesign_configuration=None, # but it was codesigned in old build - codesign_on_copy_paths=[Path("foo")], + codesigned_on_copy=[ + CodesignedOnCopy( + path=Path("foo"), + entitlements_digest=None, + codesign_flags_override=None, + ) + ], + codesign_identity="same_identity", + codesign_arguments=[], + versioned_if_macos=True, + swift_stdlib_paths=[], + ), + codesigned=True, + codesign_configuration=None, + codesign_identity="same_identity", + codesign_arguments=[], + versioned_if_macos=True, + ) + self.assertFalse(should_assemble_incrementally(spec, incremental_context)) + spec[0].codesign_on_copy = True + self.assertTrue(should_assemble_incrementally(spec, incremental_context)) + + def test_not_run_incrementally_when_codesign_on_copy_entitlements_mismatch(self): + spec = [ + BundleSpecItem( + src="src/foo", + dst="foo", + codesign_on_copy=True, + codesign_entitlements="baz/entitlements.plist", + ) + ] + incremental_context = IncrementalContext( + metadata={ + Path("src/foo"): "digest", + Path("baz/entitlements.plist"): "new_digest", + }, + state=IncrementalState( + items=[ + IncrementalStateItem( + source=Path("src/foo"), + destination_relative_to_bundle=Path("foo"), + digest="digest", + resolved_symlink=None, + ) + ], + codesigned=True, + codesign_configuration=None, + codesigned_on_copy=[ + CodesignedOnCopy( + path=Path("foo"), + entitlements_digest="old_digest", + codesign_flags_override=None, + ) + ], + codesign_identity="same_identity", + codesign_arguments=[], + versioned_if_macos=True, + swift_stdlib_paths=[], + ), + codesigned=True, + codesign_configuration=None, + codesign_identity="same_identity", + codesign_arguments=[], + versioned_if_macos=True, + ) + self.assertFalse(should_assemble_incrementally(spec, incremental_context)) + incremental_context.metadata[Path("baz/entitlements.plist")] = "old_digest" + self.assertTrue(should_assemble_incrementally(spec, incremental_context)) + + def test_not_run_incrementally_when_codesign_on_copy_flags_mismatch(self): + spec = [ + BundleSpecItem( + src="src/foo", + dst="foo", + codesign_on_copy=True, + codesign_flags_override=["--force"], + ) + ] + incremental_context = IncrementalContext( + metadata={ + Path("src/foo"): "digest", + }, + state=IncrementalState( + items=[ + IncrementalStateItem( + source=Path("src/foo"), + destination_relative_to_bundle=Path("foo"), + digest="digest", + resolved_symlink=None, + ) + ], + codesigned=True, + codesign_configuration=None, + codesigned_on_copy=[ + CodesignedOnCopy( + path=Path("foo"), + entitlements_digest=None, + codesign_flags_override=["--force", "--deep"], + ) + ], + codesign_identity="same_identity", + codesign_arguments=[], + versioned_if_macos=True, + swift_stdlib_paths=[], + ), + codesigned=True, + codesign_configuration=None, + codesign_identity="same_identity", + codesign_arguments=[], + versioned_if_macos=True, + ) + self.assertFalse(should_assemble_incrementally(spec, incremental_context)) + incremental_context.state.codesigned_on_copy[0].codesign_flags_override = [ + "--force" + ] + self.assertTrue(should_assemble_incrementally(spec, incremental_context)) + + def test_not_run_incrementally_when_codesign_arguments_mismatch(self): + spec = [ + BundleSpecItem( + src="src/foo", + dst="foo", + ) + ] + incremental_context = IncrementalContext( + metadata={ + Path("src/foo"): "digest", + }, + state=IncrementalState( + items=[ + IncrementalStateItem( + source=Path("src/foo"), + destination_relative_to_bundle=Path("foo"), + digest="digest", + resolved_symlink=None, + ) + ], + codesigned=True, + codesign_configuration=None, + codesigned_on_copy=[], codesign_identity="same_identity", + codesign_arguments=["--force"], swift_stdlib_paths=[], + versioned_if_macos=True, ), codesigned=True, codesign_configuration=None, codesign_identity="same_identity", + codesign_arguments=["--force", "--deep"], + versioned_if_macos=True, ) self.assertFalse(should_assemble_incrementally(spec, incremental_context)) + incremental_context.codesign_arguments = ["--force"] + self.assertTrue(should_assemble_incrementally(spec, incremental_context)) def test_not_run_incrementally_when_codesign_configurations_mismatch(self): spec = [ @@ -251,13 +441,23 @@ def test_not_run_incrementally_when_codesign_configurations_mismatch(self): codesigned=True, # Dry codesigned in old build codesign_configuration=CodesignConfiguration.dryRun, - codesign_on_copy_paths=[Path("foo")], + codesigned_on_copy=[ + CodesignedOnCopy( + path=Path("foo"), + entitlements_digest=None, + codesign_flags_override=None, + ) + ], codesign_identity="same_identity", + codesign_arguments=[], + versioned_if_macos=True, swift_stdlib_paths=[], ), codesigned=True, codesign_configuration=CodesignConfiguration.dryRun, codesign_identity="same_identity", + codesign_arguments=[], + versioned_if_macos=True, ) # Canary self.assertTrue(should_assemble_incrementally(spec, incremental_context)) diff --git a/prelude/apple/tools/bundling/main.py b/prelude/apple/tools/bundling/main.py index 30dde321b7..895216c652 100644 --- a/prelude/apple/tools/bundling/main.py +++ b/prelude/apple/tools/bundling/main.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import argparse import cProfile import json @@ -13,17 +15,19 @@ import shlex import sys from pathlib import Path -from typing import List, Optional +from typing import Dict, List, Optional from apple.tools.code_signing.apple_platform import ApplePlatform from apple.tools.code_signing.codesign_bundle import ( AdhocSigningContext, codesign_bundle, CodesignConfiguration, - non_adhoc_signing_context, + CodesignedPath, + signing_context_with_profile_selection, ) -from apple.tools.code_signing.list_codesign_identities_command_factory import ( - ListCodesignIdentitiesCommandFactory, +from apple.tools.code_signing.list_codesign_identities import ( + AdHocListCodesignIdentities, + ListCodesignIdentities, ) from apple.tools.re_compatibility_utils.writable import make_dir_recursively_writable @@ -33,11 +37,13 @@ from .assemble_bundle import assemble_bundle from .assemble_bundle_types import BundleSpecItem, IncrementalContext from .incremental_state import ( + CodesignedOnCopy, IncrementalState, IncrementalStateItem, IncrementalStateJSONEncoder, parse_incremental_state, ) +from .incremental_utils import codesigned_on_copy_item from .swift_support import run_swift_stdlib_tool, SwiftSupportArguments @@ -74,6 +80,20 @@ def _args_parser() -> argparse.ArgumentParser: required=False, help="Path to code signing utility. If not provided standard `codesign` tool will be used.", ) + parser.add_argument( + "--strict-provisioning-profile-search", + action="store_true", + required=False, + help="Fail code signing if more than one matching profile found.", + ) + parser.add_argument( + "--codesign-args", + type=str, + default=[], + required=False, + action="append", + help="Add additional args to pass during codesigning. Pass as`--codesign-args=ARG` to ensure correct arg parsing.", + ) parser.add_argument( "--info-plist-source", metavar="", @@ -114,12 +134,17 @@ def _args_parser() -> argparse.ArgumentParser: action="store_true", help="Perform ad-hoc signing if set.", ) + parser.add_argument( + "--embed-provisioning-profile-when-signing-ad-hoc", + action="store_true", + help="Perform selection of provisioining profile and embed it into final bundle when ad-hoc signing if set.", + ) parser.add_argument( "--ad-hoc-codesign-identity", metavar="", type=str, required=False, - help="Codesign identity to use when ad-hoc signing is performed.", + help="Codesign identity to use when ad-hoc signing is performed. Should be present when selection of provisioining profile is requested for ad-hoc signing.", ) parser.add_argument( "--codesign-configuration", @@ -223,6 +248,17 @@ def _args_parser() -> argparse.ArgumentParser: action="store_true", help="Check there are no path conflicts between different source parts of the bundle if enabled.", ) + parser.add_argument( + "--fast-provisioning-profile-parsing", + action="store_true", + help="Uses experimental faster provisioning profile parsing.", + ) + parser.add_argument( + "--versioned-if-macos", + action="store_true", + help="Create symlinks for versioned macOS bundle", + ) + return parser @@ -249,28 +285,71 @@ def _main() -> None: pr.enable() if args.codesign: - assert args.info_plist_source and args.info_plist_destination and args.platform + if not args.info_plist_source: + raise RuntimeError( + "Paths to Info.plist source file should be set when code signing is required." + ) + if not args.info_plist_destination: + raise RuntimeError( + "Info.plist destination path should be set when code signing is required." + ) + if not args.platform: + raise RuntimeError( + "Apple platform should be set when code signing is required." + ) + list_codesign_identities = ( + ListCodesignIdentities.override( + shlex.split(args.codesign_identities_command) + ) + if args.codesign_identities_command + else ListCodesignIdentities.default() + ) if args.ad_hoc: + if args.embed_provisioning_profile_when_signing_ad_hoc: + if not args.profiles_dir: + raise RuntimeError( + "Path to directory with provisioning profile files should be set when selection of provisioining profile is enabled for ad-hoc code signing." + ) + if not args.ad_hoc_codesign_identity: + raise RuntimeError( + "Code signing identity should be set when selection of provisioining profile is enabled for ad-hoc code signing." + ) + profile_selection_context = signing_context_with_profile_selection( + info_plist_source=args.info_plist_source, + info_plist_destination=args.info_plist_destination, + provisioning_profiles_dir=args.profiles_dir, + entitlements_path=args.entitlements, + platform=args.platform, + list_codesign_identities=AdHocListCodesignIdentities( + original=list_codesign_identities, + subject_common_name=args.ad_hoc_codesign_identity, + ), + log_file_path=args.log_file, + should_use_fast_provisioning_profile_parsing=args.fast_provisioning_profile_parsing, + strict_provisioning_profile_search=args.strict_provisioning_profile_search, + ) + else: + profile_selection_context = None signing_context = AdhocSigningContext( - codesign_identity=args.ad_hoc_codesign_identity + codesign_identity=args.ad_hoc_codesign_identity, + profile_selection_context=profile_selection_context, ) selected_identity_argument = args.ad_hoc_codesign_identity else: - assert ( - args.profiles_dir - ), "Path to directory with provisioning profile files should be set when signing is not ad-hoc." - signing_context = non_adhoc_signing_context( + if not args.profiles_dir: + raise RuntimeError( + "Path to directory with provisioning profile files should be set when signing is not ad-hoc." + ) + signing_context = signing_context_with_profile_selection( info_plist_source=args.info_plist_source, info_plist_destination=args.info_plist_destination, provisioning_profiles_dir=args.profiles_dir, entitlements_path=args.entitlements, platform=args.platform, - list_codesign_identities_command_factory=ListCodesignIdentitiesCommandFactory.override( - shlex.split(args.codesign_identities_command) - ) - if args.codesign_identities_command - else None, + list_codesign_identities=list_codesign_identities, log_file_path=args.log_file, + should_use_fast_provisioning_profile_parsing=args.fast_provisioning_profile_parsing, + strict_provisioning_profile_search=args.strict_provisioning_profile_search, ) selected_identity_argument = ( signing_context.selected_profile_info.identity.fingerprint @@ -281,12 +360,15 @@ def _main() -> None: with args.spec.open(mode="rb") as spec_file: spec = json.load(spec_file, object_hook=lambda d: BundleSpecItem(**d)) + spec = _deduplicate_spec(spec) incremental_context = _incremental_context( incremenatal_state_path=args.incremental_state, codesigned=args.codesign, codesign_configuration=args.codesign_configuration, codesign_identity=selected_identity_argument, + codesign_arguments=args.codesign_args, + versioned_if_macos=args.versioned_if_macos, ) incremental_state = assemble_bundle( @@ -294,6 +376,7 @@ def _main() -> None: bundle_path=args.output, incremental_context=incremental_context, check_conflicts=args.check_conflicts, + versioned_if_macos=args.versioned_if_macos, ) swift_support_args = _swift_support_arguments( @@ -304,7 +387,6 @@ def _main() -> None: if swift_support_args: swift_stdlib_paths = run_swift_stdlib_tool( bundle_path=args.output, - signing_identity=selected_identity_argument, args=swift_support_args, ) else: @@ -319,18 +401,47 @@ def _main() -> None: raise RuntimeError( "Expected signing context to be created before bundling is done if codesign is requested." ) + + bundle_path = CodesignedPath( + path=args.output, entitlements=args.entitlements, flags=args.codesign_args + ) + codesign_on_copy_paths = [ + CodesignedPath( + path=bundle_path.path / i.dst, + entitlements=( + Path(i.codesign_entitlements) if i.codesign_entitlements else None + ), + flags=( + i.codesign_flags_override + if (i.codesign_flags_override is not None) + else args.codesign_args + ), + ) + for i in spec + if i.codesign_on_copy + ] + [ + CodesignedPath( + path=bundle_path.path / path, + entitlements=None, + flags=args.codesign_args, + ) + for path in swift_stdlib_paths + ] + codesign_bundle( - bundle_path=args.output, + bundle_path=bundle_path, signing_context=signing_context, - entitlements_path=args.entitlements, platform=args.platform, - codesign_on_copy_paths=[i.dst for i in spec if i.codesign_on_copy], - codesign_args=[], + codesign_on_copy_paths=codesign_on_copy_paths, codesign_tool=args.codesign_tool, codesign_configuration=args.codesign_configuration, ) if incremental_state: + if incremental_context is None: + raise RuntimeError( + "Expected incremental context to be present when incremental state is non-null." + ) _write_incremental_state( spec=spec, items=incremental_state, @@ -338,7 +449,10 @@ def _main() -> None: codesigned=args.codesign, codesign_configuration=args.codesign_configuration, selected_codesign_identity=selected_identity_argument, + codesign_arguments=args.codesign_args, swift_stdlib_paths=swift_stdlib_paths, + versioned_if_macos=args.versioned_if_macos, + incremental_context=incremental_context, ) if profiling_enabled: @@ -354,6 +468,8 @@ def _incremental_context( codesigned: bool, codesign_configuration: CodesignConfiguration, codesign_identity: Optional[str], + codesign_arguments: List[str], + versioned_if_macos: bool, ) -> Optional[IncrementalContext]: action_metadata = action_metadata_if_present(_METADATA_PATH_KEY) if action_metadata is None: @@ -372,6 +488,8 @@ def _incremental_context( codesigned=codesigned, codesign_configuration=codesign_configuration, codesign_identity=codesign_identity, + codesign_arguments=codesign_arguments, + versioned_if_macos=versioned_if_macos, ) @@ -443,15 +561,31 @@ def _write_incremental_state( codesigned: bool, codesign_configuration: CodesignConfiguration, selected_codesign_identity: Optional[str], + codesign_arguments: List[str], swift_stdlib_paths: List[Path], -): + versioned_if_macos: bool, + incremental_context: IncrementalContext, +) -> None: state = IncrementalState( items, codesigned=codesigned, codesign_configuration=codesign_configuration, - codesign_on_copy_paths=[Path(i.dst) for i in spec if i.codesign_on_copy], + codesigned_on_copy=[ + codesigned_on_copy_item( + path=Path(i.dst), + entitlements=( + Path(i.codesign_entitlements) if i.codesign_entitlements else None + ), + incremental_context=incremental_context, + codesign_flags_override=i.codesign_flags_override, + ) + for i in spec + if i.codesign_on_copy + ], codesign_identity=selected_codesign_identity, + codesign_arguments=codesign_arguments, swift_stdlib_paths=swift_stdlib_paths, + versioned_if_macos=versioned_if_macos, ) path.touch() try: @@ -462,6 +596,22 @@ def _write_incremental_state( raise +def _deduplicate_spec(spec: List[BundleSpecItem]) -> List[BundleSpecItem]: + # It's possible to have the same spec multiple times as different + # apple_resource() targets can refer to the _same_ resource file. + # + # On RE, we're not allowed to overwrite files, so prevent doing + # identical file copies. + # + # Do not reorder spec items to achieve determinism. + # Rely on the fact that `dict` preserves key order. + deduplicated_spec = list(dict.fromkeys(spec)) + # Force same sorting as in Buck1 for `SourcePathWithAppleBundleDestination` + # WARNING: This logic is tightly coupled with how spec filtering is done in `_filter_conflicting_paths` method during incremental bundling. Don't change unless you fully understand what is going on here. + deduplicated_spec.sort() + return deduplicated_spec + + def _setup_logging( stderr_level: int, file_level: int, log_path: Optional[Path] ) -> None: @@ -489,7 +639,7 @@ def _setup_logging( class ColoredLogFormatter(logging.Formatter): - _colors = { + _colors: Dict[int, str] = { logging.DEBUG: "\x1b[m", logging.INFO: "\x1b[37m", logging.WARNING: "\x1b[33m", @@ -498,10 +648,10 @@ class ColoredLogFormatter(logging.Formatter): } _reset_color = "\x1b[0m" - def __init__(self, text_format: str): + def __init__(self, text_format: str) -> None: self.text_format = text_format - def format(self, record: logging.LogRecord): + def format(self, record: logging.LogRecord) -> str: colored_format = ( self._colors[record.levelno] + self.text_format + self._reset_color ) diff --git a/prelude/apple/tools/bundling/swift_support.py b/prelude/apple/tools/bundling/swift_support.py index b9ecd81d85..d1c53bf783 100644 --- a/prelude/apple/tools/bundling/swift_support.py +++ b/prelude/apple/tools/bundling/swift_support.py @@ -5,6 +5,9 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + +import logging import os import shlex import shutil @@ -14,6 +17,8 @@ from pathlib import Path from typing import List, Optional, Union +_LOGGER: logging.Logger = logging.getLogger(__name__) + @dataclass class SwiftSupportArguments: @@ -26,10 +31,8 @@ class SwiftSupportArguments: sdk_root: Path -def run_swift_stdlib_tool( - bundle_path: Path, signing_identity: Optional[str], args: SwiftSupportArguments -) -> List[Path]: - # TODO(akozhevnikov) when incremental bundling is on, binary, frameworks and plugins are not changed, signing identity is unchanged skip this step. +def run_swift_stdlib_tool(bundle_path: Path, args: SwiftSupportArguments) -> List[Path]: + # TODO(T181556849) when incremental bundling is on, binary, frameworks and plugins are not changed, signing identity is unchanged skip this step. bundle_relative_output_paths = [] with tempfile.TemporaryDirectory() as tmp_dir: # When signing, swift-stdlib-tool needs a proper PATH environment variable. @@ -37,10 +40,11 @@ def run_swift_stdlib_tool( env = os.environ.copy() # xcrun doesn't like relative paths env["SDKROOT"] = os.path.abspath(args.sdk_root) - result = subprocess.run( - _execution_command(bundle_path, signing_identity, args, tmp_dir), - env=env, + cmd = _execution_command(bundle_path, args, tmp_dir) + _LOGGER.info( + f"Running Swift stdlib tool with command: `{cmd}` and environment `{env}`." ) + result = subprocess.run(cmd, env=env) result.check_returncode() outputs = sorted(os.listdir(tmp_dir)) frameworks_path = bundle_path / args.frameworks_destination @@ -56,28 +60,22 @@ def run_swift_stdlib_tool( def _execution_command( bundle_path: Path, - signing_identity: Optional[str], args: SwiftSupportArguments, tmp_dir: str, ) -> List[Union[str, Path]]: - signing_args = ["--sign", signing_identity] if signing_identity else [] - return ( - shlex.split(args.swift_stdlib_command) - + [ - "--copy", - "--strip-bitcode", - "--scan-executable", - bundle_path / args.binary_destination, - "--scan-executable", - bundle_path / args.appclips_destination, - "--scan-folder", - bundle_path / args.frameworks_destination, - "--scan-folder", - bundle_path / args.plugins_destination, - "--destination", - tmp_dir, - "--platform", - args.platform, - ] - + signing_args - ) + return shlex.split(args.swift_stdlib_command) + [ + "--copy", + "--strip-bitcode", + "--scan-executable", + bundle_path / args.binary_destination, + "--scan-executable", + bundle_path / args.appclips_destination, + "--scan-folder", + bundle_path / args.frameworks_destination, + "--scan-folder", + bundle_path / args.plugins_destination, + "--destination", + tmp_dir, + "--platform", + args.platform, + ] diff --git a/prelude/apple/tools/bundling/test_resources/newer_version_incremental_state.json b/prelude/apple/tools/bundling/test_resources/newer_version_incremental_state.json index 7e4b6d2366..79b549518e 100644 --- a/prelude/apple/tools/bundling/test_resources/newer_version_incremental_state.json +++ b/prelude/apple/tools/bundling/test_resources/newer_version_incremental_state.json @@ -1,5 +1,5 @@ { - "version": 4, + "version": 7, "data": { "something": [] } diff --git a/prelude/apple/tools/bundling/test_resources/valid_incremental_state.json b/prelude/apple/tools/bundling/test_resources/valid_incremental_state.json index dd43ce55d7..ff8bc2e49b 100644 --- a/prelude/apple/tools/bundling/test_resources/valid_incremental_state.json +++ b/prelude/apple/tools/bundling/test_resources/valid_incremental_state.json @@ -18,12 +18,27 @@ ], "codesign_configuration": null, "codesigned": true, - "codesign_on_copy_paths": [ - "Resources/bar.txt" + "codesigned_on_copy": [ + { + "path": "Resources/bar.txt" + }, + { + "path": "Resources/baz.txt", + "entitlements_digest": "abc" + }, + { + "path": "Resources/qux.txt", + "codesign_flags_override": ["--deep", "--force"] + } ], "codesign_identity": "Johny Appleseed", + "codesign_arguments": [ + "--force", + "--deep" + ], + "versioned_if_macos": true, "swift_stdlib_paths": [ "Frameworks/libswiftCore.dylib" ], - "version": 3 + "version": 7 } diff --git a/prelude/apple/tools/code_signing/BUCK.v2 b/prelude/apple/tools/code_signing/BUCK.v2 index a55b3d96d1..3ca90c27e3 100644 --- a/prelude/apple/tools/code_signing/BUCK.v2 +++ b/prelude/apple/tools/code_signing/BUCK.v2 @@ -1,4 +1,9 @@ -# @oss-disable: load("@prelude//apple/tools/defs.bzl", "meta_python_test") +load("@prelude//utils:source_listing.bzl", "source_listing") +load("@prelude//apple/tools/defs.bzl", "meta_python_test") + +oncall("build_infra") + +source_listing() python_library( name = "lib", @@ -11,11 +16,11 @@ python_library( "main.py", ], ), + visibility = ["PUBLIC"], deps = [ "prelude//apple/tools:plistlib_utils", "prelude//apple/tools/info_plist_processor:process", ], - visibility = ["PUBLIC"], ) # @oss-disable: meta_python_test( @@ -35,6 +40,7 @@ python_binary( main = "main.py", visibility = ["PUBLIC"], deps = [ + "fbsource//third-party/pypi/typed-argument-parser:typed-argument-parser", ":lib", ], ) diff --git a/prelude/apple/tools/code_signing/app_id.py b/prelude/apple/tools/code_signing/app_id.py index fbd70e5171..d657a4da94 100644 --- a/prelude/apple/tools/code_signing/app_id.py +++ b/prelude/apple/tools/code_signing/app_id.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + from __future__ import annotations import re @@ -22,11 +24,11 @@ class _ReGroupName(str, Enum): team_id = "team_id" bundle_id = "bundle_id" - _re_string = "^(?P<{team_id}>[A-Z0-9]{{10}})\\.(?P<{bundle_id}>.+)$".format( + _re_string: str = "^(?P<{team_id}>[A-Z0-9]{{10}})\\.(?P<{bundle_id}>.+)$".format( team_id=_ReGroupName.team_id, bundle_id=_ReGroupName.bundle_id, ) - _re_pattern = re.compile(_re_string) + _re_pattern: re.Pattern[str] = re.compile(_re_string) # Takes a application identifier and splits it into Team ID and bundle ID. # Prefix is always a ten-character alphanumeric sequence. Bundle ID may be a fully-qualified name or a wildcard ending in *. @@ -34,7 +36,12 @@ class _ReGroupName(str, Enum): def from_string(cls, string: str) -> AppId: match = re.match(cls._re_pattern, string) if not match: - raise RuntimeError("Malformed app ID string: {}".format(string)) + raise RuntimeError( + "Malformed app ID string: '{}'. " + "We expected a prefix of a ten-character alphanumeric sequence and a Bundle ID which may be a fully-qualified name or a wildcard ending in '*'.".format( + string + ) + ) return AppId( match.group(cls._ReGroupName.team_id), match.group(cls._ReGroupName.bundle_id), @@ -43,8 +50,13 @@ def from_string(cls, string: str) -> AppId: # Returns the App ID if it can be inferred from keys in the entitlement. Otherwise, it returns `None`. @staticmethod def infer_from_entitlements(entitlements: Dict[str, Any]) -> Optional[AppId]: - keychain_access_groups = entitlements.get("keychain-access-groups") - if not keychain_access_groups: - return None - app_id_string = keychain_access_groups[0] - return AppId.from_string(app_id_string) + try: + keychain_access_groups = entitlements.get("keychain-access-groups") + if not keychain_access_groups: + return None + app_id_string = keychain_access_groups[0] + return AppId.from_string(app_id_string) + except Exception as e: + raise RuntimeError( + "Error when parsing the entitlements for the app ID: {}".format(e) + ) diff --git a/prelude/apple/tools/code_signing/app_id_test.py b/prelude/apple/tools/code_signing/app_id_test.py index 2875a129a0..ab1c32bb83 100644 --- a/prelude/apple/tools/code_signing/app_id_test.py +++ b/prelude/apple/tools/code_signing/app_id_test.py @@ -23,14 +23,36 @@ def test_string_parsing(self): expected = AppId("ABCDE12345", "*") self.assertEqual(expected, result) - with self.assertRaisesRegex(RuntimeError, "Malformed app ID string: invalid."): + with self.assertRaisesRegex( + RuntimeError, + "Malformed app ID string: 'invalid.'. We expected a prefix of a ten-character alphanumeric sequence and a Bundle ID which may be a fully-qualified name or a wildcard ending in '*'.", + ): _ = AppId.from_string("invalid.") def test_entitlements_parsing(self): - file = pkg_resources.resource_stream( + with pkg_resources.resource_stream( __name__, "test_resources/Entitlements.plist" - ) - entitlements = plistlib.load(file) - result = AppId.infer_from_entitlements(entitlements) - expected = AppId("ABCDE12345", "com.example.TestApp") - self.assertEqual(expected, result) + ) as file: + entitlements = plistlib.load(file) + result = AppId.infer_from_entitlements(entitlements) + expected = AppId("ABCDE12345", "com.example.TestApp") + self.assertEqual(expected, result) + + invalid_file = b""" + + + + keychain-access-groups + + com.facebook.CommonTestHost + p + + """ + + invalid_entitlement = plistlib.loads(invalid_file) + with self.assertRaisesRegex( + RuntimeError, + "Error when parsing the entitlements for the app ID: Malformed app ID string: 'com.facebook.CommonTestHost'. " + "We expected a prefix of a ten-character alphanumeric sequence and a Bundle ID which may be a fully-qualified name or a wildcard ending in '*'.", + ): + AppId.infer_from_entitlements(invalid_entitlement) diff --git a/prelude/apple/tools/code_signing/apple_platform.py b/prelude/apple/tools/code_signing/apple_platform.py index e45b0905c8..ee32486c85 100644 --- a/prelude/apple/tools/code_signing/apple_platform.py +++ b/prelude/apple/tools/code_signing/apple_platform.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + from enum import Enum from typing import Optional @@ -36,12 +38,12 @@ def provisioning_profile_name(self) -> Optional[str]: else: return None - def embedded_provisioning_profile_file_name(self) -> str: + def embedded_provisioning_profile_path(self) -> str: """ Returns: The name of the provisioning profile in the final application bundle. """ if self.is_desktop(): - return "embedded.provisionprofile" + return "Contents/embedded.provisionprofile" else: return "embedded.mobileprovision" diff --git a/prelude/apple/tools/code_signing/codesign_bundle.py b/prelude/apple/tools/code_signing/codesign_bundle.py index 9303d6a744..05a08afcb3 100644 --- a/prelude/apple/tools/code_signing/codesign_bundle.py +++ b/prelude/apple/tools/code_signing/codesign_bundle.py @@ -5,6 +5,9 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + +import asyncio import logging import os import shutil @@ -15,7 +18,7 @@ from dataclasses import dataclass from enum import Enum from pathlib import Path -from typing import Any, Dict, List, Optional, Union +from typing import Any, cast, Dict, List, Optional, Union from apple.tools.plistlib_utils import detect_format_and_load @@ -26,17 +29,14 @@ ICodesignCommandFactory, ) from .fast_adhoc import is_fast_adhoc_codesign_allowed, should_skip_adhoc_signing_path -from .identity import CodeSigningIdentity from .info_plist_metadata import InfoPlistMetadata -from .list_codesign_identities_command_factory import ( - IListCodesignIdentitiesCommandFactory, - ListCodesignIdentitiesCommandFactory, -) +from .list_codesign_identities import IListCodesignIdentities from .prepare_code_signing_entitlements import prepare_code_signing_entitlements from .prepare_info_plist import prepare_info_plist from .provisioning_profile_diagnostics import ( interpret_provisioning_profile_diagnostics, META_IOS_BUILD_AND_RUN_ON_DEVICE_LINK, + META_IOS_PROVISIONING_PROFILES_COMMAND, META_IOS_PROVISIONING_PROFILES_LINK, ) from .provisioning_profile_metadata import ProvisioningProfileMetadata @@ -54,7 +54,23 @@ DefaultReadProvisioningProfileCommandFactory() ) -_LOGGER = logging.getLogger(__name__) +_LOGGER: logging.Logger = logging.getLogger(__name__) + + +@dataclass +class CodesignedPath: + path: Path + """ + Path relative to bundle root which needs to be codesigned + """ + entitlements: Optional[Path] + """ + Path to entitlements to be used when codesigning, relative to buck project + """ + flags: List[str] + """ + Flags to be passed to codesign command when codesigning this particular path + """ def _select_provisioning_profile( @@ -62,17 +78,39 @@ def _select_provisioning_profile( provisioning_profiles_dir: Path, entitlements_path: Optional[Path], platform: ApplePlatform, - list_codesign_identities_command_factory: IListCodesignIdentitiesCommandFactory, - read_provisioning_profile_command_factory: IReadProvisioningProfileCommandFactory = _default_read_provisioning_profile_command_factory, + list_codesign_identities: IListCodesignIdentities, + should_use_fast_provisioning_profile_parsing: bool, + strict_provisioning_profile_search: bool, log_file_path: Optional[Path] = None, ) -> SelectedProvisioningProfileInfo: - identities = _list_identities(list_codesign_identities_command_factory) - provisioning_profiles = _read_provisioning_profiles( - provisioning_profiles_dir, read_provisioning_profile_command_factory + read_provisioning_profile_command_factory = ( + _default_read_provisioning_profile_command_factory + ) + identities = list_codesign_identities.list_codesign_identities() + _LOGGER.info( + f"Fast provisioning profile parsing enabled: {should_use_fast_provisioning_profile_parsing}" ) + provisioning_profiles = [] + if should_use_fast_provisioning_profile_parsing: + provisioning_profiles = asyncio.run( + _fast_read_provisioning_profiles_async( + provisioning_profiles_dir, + read_provisioning_profile_command_factory, + ) + ) + else: + provisioning_profiles = _read_provisioning_profiles( + provisioning_profiles_dir, + read_provisioning_profile_command_factory, + ) if not provisioning_profiles: raise CodeSignProvisioningError( - f"\n\nFailed to find any provisioning profiles. Please make sure to install required provisioning profiles and make sure they are located at '{provisioning_profiles_dir}'.\n\nPlease follow the wiki to build & run on device: {META_IOS_BUILD_AND_RUN_ON_DEVICE_LINK}.\nProvisioning profiles for your app can be downloaded from {META_IOS_PROVISIONING_PROFILES_LINK}.\n" + ( + f"\n\nFailed to find any provisioning profiles. Please make sure to install required provisioning profiles and make sure they are located at '{provisioning_profiles_dir}'.\n\n" + f"Execute `{META_IOS_PROVISIONING_PROFILES_COMMAND}` to download the profiles.\n" + f"Please follow the wiki to build & run on device: {META_IOS_BUILD_AND_RUN_ON_DEVICE_LINK}.\n" + f"Provisioning profiles for your app can also be downloaded from {META_IOS_PROVISIONING_PROFILES_LINK}.\n" + ) ) entitlements = _read_entitlements_file(entitlements_path) selected_profile_info, mismatches = select_best_provisioning_profile( @@ -81,6 +119,7 @@ def _select_provisioning_profile( provisioning_profiles, entitlements, platform, + strict_provisioning_profile_search, ) if selected_profile_info is None: if not mismatches: @@ -92,6 +131,7 @@ def _select_provisioning_profile( diagnostics=mismatches, bundle_id=info_plist_metadata.bundle_id, provisioning_profiles_dir=provisioning_profiles_dir, + identities=identities, log_file_path=log_file_path, ) ) @@ -99,32 +139,38 @@ def _select_provisioning_profile( @dataclass -class AdhocSigningContext: - codesign_identity: str - - def __init__(self, codesign_identity: Optional[str] = None): - self.codesign_identity = codesign_identity or "-" - - -@dataclass -class NonAdhocSigningContext: +class SigningContextWithProfileSelection: info_plist_source: Path info_plist_destination: Path info_plist_metadata: InfoPlistMetadata selected_profile_info: SelectedProvisioningProfileInfo -def non_adhoc_signing_context( +@dataclass +class AdhocSigningContext: + codesign_identity: str + profile_selection_context: Optional[SigningContextWithProfileSelection] + + def __init__( + self, + codesign_identity: Optional[str] = None, + profile_selection_context: Optional[SigningContextWithProfileSelection] = None, + ) -> None: + self.codesign_identity = codesign_identity or "-" + self.profile_selection_context = profile_selection_context + + +def signing_context_with_profile_selection( info_plist_source: Path, info_plist_destination: Path, provisioning_profiles_dir: Path, entitlements_path: Optional[Path], platform: ApplePlatform, - list_codesign_identities_command_factory: Optional[ - IListCodesignIdentitiesCommandFactory - ] = None, + list_codesign_identities: IListCodesignIdentities, log_file_path: Optional[Path] = None, -) -> NonAdhocSigningContext: + should_use_fast_provisioning_profile_parsing: bool = False, + strict_provisioning_profile_search: bool = False, +) -> SigningContextWithProfileSelection: with open(info_plist_source, mode="rb") as info_plist_file: info_plist_metadata = InfoPlistMetadata.from_file(info_plist_file) selected_profile_info = _select_provisioning_profile( @@ -132,12 +178,13 @@ def non_adhoc_signing_context( provisioning_profiles_dir=provisioning_profiles_dir, entitlements_path=entitlements_path, platform=platform, - list_codesign_identities_command_factory=list_codesign_identities_command_factory - or ListCodesignIdentitiesCommandFactory.default(), + list_codesign_identities=list_codesign_identities, log_file_path=log_file_path, + should_use_fast_provisioning_profile_parsing=should_use_fast_provisioning_profile_parsing, + strict_provisioning_profile_search=strict_provisioning_profile_search, ) - return NonAdhocSigningContext( + return SigningContextWithProfileSelection( info_plist_source, info_plist_destination, info_plist_metadata, @@ -152,42 +199,45 @@ class CodesignConfiguration(str, Enum): def codesign_bundle( - bundle_path: Path, - signing_context: Union[AdhocSigningContext, NonAdhocSigningContext], - entitlements_path: Optional[Path], + bundle_path: CodesignedPath, + signing_context: Union[AdhocSigningContext, SigningContextWithProfileSelection], platform: ApplePlatform, - codesign_on_copy_paths: List[Path], - codesign_args: List[str], + codesign_on_copy_paths: List[CodesignedPath], codesign_tool: Optional[Path] = None, codesign_configuration: Optional[CodesignConfiguration] = None, ) -> None: with tempfile.TemporaryDirectory() as tmp_dir: - if isinstance(signing_context, NonAdhocSigningContext): - info_plist_metadata = signing_context.info_plist_metadata - selected_profile_info = signing_context.selected_profile_info - prepared_entitlements_path = prepare_code_signing_entitlements( - entitlements_path, - info_plist_metadata.bundle_id, - selected_profile_info.profile, - tmp_dir, - ) - prepared_info_plist_path = prepare_info_plist( - signing_context.info_plist_source, - info_plist_metadata, - selected_profile_info.profile, - tmp_dir, + if isinstance(signing_context, SigningContextWithProfileSelection): + selection_profile_context = signing_context + elif isinstance(signing_context, AdhocSigningContext): + selection_profile_context = signing_context.profile_selection_context + else: + raise RuntimeError( + f"Unexpected type of signing context `{type(signing_context)}`" ) - os.replace( - prepared_info_plist_path, - bundle_path / signing_context.info_plist_destination, + + if selection_profile_context: + bundle_path_with_prepared_entitlements = ( + _prepare_entitlements_and_info_plist( + bundle_path=bundle_path, + platform=platform, + signing_context=selection_profile_context, + tmp_dir=tmp_dir, + ) ) - shutil.copy2( - selected_profile_info.profile.file_path, - bundle_path / platform.embedded_provisioning_profile_file_name(), + selected_identity_fingerprint = ( + selection_profile_context.selected_profile_info.identity.fingerprint ) - selected_identity_fingerprint = selected_profile_info.identity.fingerprint else: - prepared_entitlements_path = entitlements_path + if not isinstance(signing_context, AdhocSigningContext): + raise AssertionError( + f"Expected `AdhocSigningContext`, got `{type(signing_context)}` instead." + ) + if signing_context.profile_selection_context: + raise AssertionError( + "Expected no profile selection context in `AdhocSigningContext` when `selection_profile_context` is `None`." + ) + bundle_path_with_prepared_entitlements = bundle_path selected_identity_fingerprint = signing_context.codesign_identity if codesign_configuration is CodesignConfiguration.dryRun: @@ -196,14 +246,12 @@ def codesign_bundle( "Expected codesign tool not to be the default one when dry run codesigning is requested." ) _dry_codesign_everything( - bundle_path=bundle_path, + root=bundle_path_with_prepared_entitlements, codesign_on_copy_paths=codesign_on_copy_paths, identity_fingerprint=selected_identity_fingerprint, tmp_dir=tmp_dir, codesign_tool=codesign_tool, - entitlements=prepared_entitlements_path, platform=platform, - codesign_args=codesign_args, ) else: fast_adhoc_signing_enabled = ( @@ -212,36 +260,95 @@ def codesign_bundle( ) _LOGGER.info(f"Fast adhoc signing enabled: {fast_adhoc_signing_enabled}") _codesign_everything( - bundle_path=bundle_path, + root=bundle_path_with_prepared_entitlements, codesign_on_copy_paths=codesign_on_copy_paths, identity_fingerprint=selected_identity_fingerprint, tmp_dir=tmp_dir, codesign_command_factory=DefaultCodesignCommandFactory(codesign_tool), - entitlements=prepared_entitlements_path, platform=platform, fast_adhoc_signing=fast_adhoc_signing_enabled, - codesign_args=codesign_args, ) -def _list_identities( - list_codesign_identities_command_factory: IListCodesignIdentitiesCommandFactory, -) -> List[CodeSigningIdentity]: - output = subprocess.check_output( - list_codesign_identities_command_factory.list_codesign_identities_command(), - encoding="utf-8", +def _prepare_entitlements_and_info_plist( + bundle_path: CodesignedPath, + platform: ApplePlatform, + signing_context: SigningContextWithProfileSelection, + tmp_dir: str, +) -> CodesignedPath: + info_plist_metadata = signing_context.info_plist_metadata + selected_profile = signing_context.selected_profile_info.profile + prepared_entitlements_path = prepare_code_signing_entitlements( + bundle_path.entitlements, + info_plist_metadata.bundle_id, + selected_profile, + tmp_dir, + ) + prepared_info_plist_path = prepare_info_plist( + signing_context.info_plist_source, + info_plist_metadata, + selected_profile, + tmp_dir, + ) + os.replace( + prepared_info_plist_path, + bundle_path.path / signing_context.info_plist_destination, + ) + shutil.copy2( + selected_profile.file_path, + bundle_path.path / platform.embedded_provisioning_profile_path(), + ) + return CodesignedPath( + path=bundle_path.path, + entitlements=prepared_entitlements_path, + flags=bundle_path.flags, + ) + + +async def _fast_read_provisioning_profiles_async( + dirpath: Path, + read_provisioning_profile_command_factory: IReadProvisioningProfileCommandFactory, +) -> List[ProvisioningProfileMetadata]: + tasks = [] + for f in os.listdir(dirpath): + if f.endswith(".mobileprovision") or f.endswith(".provisionprofile"): + filepath = dirpath / f + tasks.append( + _provisioning_profile_from_file_path_async( + filepath, + read_provisioning_profile_command_factory, + should_use_fast_provisioning_profile_parsing=True, + ) + ) + results = await asyncio.gather(*tasks) + return cast(List[ProvisioningProfileMetadata], results) + + +async def _provisioning_profile_from_file_path_async( + path: Path, + read_provisioning_profile_command_factory: IReadProvisioningProfileCommandFactory, + should_use_fast_provisioning_profile_parsing: bool, +) -> ProvisioningProfileMetadata: + loop = asyncio.get_running_loop() + return await loop.run_in_executor( + None, + _provisioning_profile_from_file_path, + path, + read_provisioning_profile_command_factory, + should_use_fast_provisioning_profile_parsing, ) - return CodeSigningIdentity.parse_security_stdout(output) def _read_provisioning_profiles( dirpath: Path, read_provisioning_profile_command_factory: IReadProvisioningProfileCommandFactory, ) -> List[ProvisioningProfileMetadata]: + return [ _provisioning_profile_from_file_path( dirpath / f, read_provisioning_profile_command_factory, + should_use_fast_provisioning_profile_parsing=False, ) for f in os.listdir(dirpath) if (f.endswith(".mobileprovision") or f.endswith(".provisionprofile")) @@ -251,8 +358,36 @@ def _read_provisioning_profiles( def _provisioning_profile_from_file_path( path: Path, read_provisioning_profile_command_factory: IReadProvisioningProfileCommandFactory, + should_use_fast_provisioning_profile_parsing: bool, +) -> ProvisioningProfileMetadata: + if should_use_fast_provisioning_profile_parsing: + # Provisioning profiles have a plist embedded in them that we can extract directly. + # This is much faster than calling an external command like openssl. + with open(path, "rb") as f: + content = f.read() + start_index = content.find(b"", start_index) + len(b"") + if start_index >= 0 and end_index >= 0: + plist_data = content[start_index:end_index] + return ProvisioningProfileMetadata.from_provisioning_profile_file_content( + path, plist_data + ) + else: + _LOGGER.warning( + f"Failed to find plist in provisioning profile at {path}. Falling back to slow parsing." + ) + + # Fallback to slow parsing if fast parsing is disabled or fails + return _provisioning_profile_from_file_path_using_factory( + path, read_provisioning_profile_command_factory + ) + + +def _provisioning_profile_from_file_path_using_factory( + path: Path, + read_provisioning_profile_command_factory: IReadProvisioningProfileCommandFactory, ) -> ProvisioningProfileMetadata: - output = subprocess.check_output( + output: bytes = subprocess.check_output( read_provisioning_profile_command_factory.read_provisioning_profile_command( path ), @@ -271,20 +406,17 @@ def _read_entitlements_file(path: Optional[Path]) -> Optional[Dict[str, Any]]: def _dry_codesign_everything( - bundle_path: Path, - codesign_on_copy_paths: List[Path], + root: CodesignedPath, + codesign_on_copy_paths: List[CodesignedPath], identity_fingerprint: str, tmp_dir: str, codesign_tool: Path, - entitlements: Optional[Path], platform: ApplePlatform, - codesign_args: List[str], ) -> None: codesign_command_factory = DryRunCodesignCommandFactory(codesign_tool) - codesign_on_copy_abs_paths = [bundle_path / path for path in codesign_on_copy_paths] codesign_on_copy_directory_paths = [ - p for p in codesign_on_copy_abs_paths if p.is_dir() + p for p in codesign_on_copy_paths if p.path.is_dir() ] # First sign codesign-on-copy directory paths @@ -293,15 +425,15 @@ def _dry_codesign_everything( identity_fingerprint=identity_fingerprint, tmp_dir=tmp_dir, codesign_command_factory=codesign_command_factory, - entitlements=None, platform=platform, - codesign_args=codesign_args, ) # Dry codesigning creates a .plist inside every directory it signs. # That approach doesn't work for files so those files are written into .plist for root bundle. codesign_on_copy_file_paths = [ - p.relative_to(bundle_path) for p in codesign_on_copy_abs_paths if p.is_file() + p.path.relative_to(root.path) + for p in codesign_on_copy_paths + if p.path.is_file() ] codesign_command_factory.set_codesign_on_copy_file_paths( codesign_on_copy_file_paths @@ -309,32 +441,27 @@ def _dry_codesign_everything( # Lastly sign whole bundle _codesign_paths( - paths=[bundle_path], + paths=[root], identity_fingerprint=identity_fingerprint, tmp_dir=tmp_dir, codesign_command_factory=codesign_command_factory, - entitlements=entitlements, platform=platform, - codesign_args=codesign_args, ) def _codesign_everything( - bundle_path: Path, - codesign_on_copy_paths: List[Path], + root: CodesignedPath, + codesign_on_copy_paths: List[CodesignedPath], identity_fingerprint: str, tmp_dir: str, codesign_command_factory: ICodesignCommandFactory, - entitlements: Optional[Path], platform: ApplePlatform, fast_adhoc_signing: bool, - codesign_args: List[str], ) -> None: # First sign codesign-on-copy paths codesign_on_copy_filtered_paths = _filter_out_fast_adhoc_paths( - paths=[bundle_path / path for path in codesign_on_copy_paths], + paths=codesign_on_copy_paths, identity_fingerprint=identity_fingerprint, - entitlements=entitlements, platform=platform, fast_adhoc_signing=fast_adhoc_signing, ) @@ -343,82 +470,90 @@ def _codesign_everything( identity_fingerprint, tmp_dir, codesign_command_factory, - None, platform, - codesign_args, ) # Lastly sign whole bundle - root_bundle_paths = _filter_out_fast_adhoc_paths( - paths=[bundle_path], + root_filtered_paths = _filter_out_fast_adhoc_paths( + paths=[root], identity_fingerprint=identity_fingerprint, - entitlements=entitlements, platform=platform, fast_adhoc_signing=fast_adhoc_signing, ) _codesign_paths( - root_bundle_paths, + root_filtered_paths, identity_fingerprint, tmp_dir, codesign_command_factory, - entitlements, platform, - codesign_args, ) @dataclass -class CodesignProcess: - process: subprocess.Popen - stdout_path: str +class ParallelProcess: + process: subprocess.Popen[bytes] + stdout_path: Optional[str] stderr_path: str def check_result(self) -> None: if self.process.returncode == 0: return - with open(self.stdout_path, encoding="utf8") as stdout, open( - self.stderr_path, encoding="utf8" - ) as stderr: - raise RuntimeError( - "\nstdout:\n{}\n\nstderr:\n{}\n".format(stdout.read(), stderr.read()) + with ExitStack() as stack: + stderr = stack.enter_context(open(self.stderr_path, encoding="utf8")) + stderr_string = f"\nstderr:\n{stderr.read()}\n" + stdout = ( + stack.enter_context(open(self.stdout_path, encoding="utf8")) + if self.stdout_path + else None ) + stdout_string = f"\nstdout:\n{stdout.read()}\n" if stdout else "" + raise RuntimeError(f"{stdout_string}{stderr_string}") -def _spawn_codesign_process( - path: Path, - identity_fingerprint: str, +def _spawn_process( + command: List[Union[str, Path]], tmp_dir: str, - codesign_command_factory: ICodesignCommandFactory, - entitlements: Optional[Path], stack: ExitStack, - codesign_args: List[str], -) -> CodesignProcess: - stdout_path = os.path.join(tmp_dir, uuid.uuid4().hex) - stdout = stack.enter_context(open(stdout_path, "w")) + pipe_stdout: bool = False, +) -> ParallelProcess: + if pipe_stdout: + stdout_path = None + stdout = subprocess.PIPE + else: + stdout_path = os.path.join(tmp_dir, uuid.uuid4().hex) + stdout = stack.enter_context(open(stdout_path, "w")) stderr_path = os.path.join(tmp_dir, uuid.uuid4().hex) stderr = stack.enter_context(open(stderr_path, "w")) - command = codesign_command_factory.codesign_command( - path, identity_fingerprint, entitlements, codesign_args - ) - _LOGGER.info(f"Executing codesign command: {command}") + _LOGGER.info(f"Executing command: {command}") process = subprocess.Popen(command, stdout=stdout, stderr=stderr) - return CodesignProcess( + return ParallelProcess( process, stdout_path, stderr_path, ) +def _spawn_codesign_process( + path: CodesignedPath, + identity_fingerprint: str, + tmp_dir: str, + codesign_command_factory: ICodesignCommandFactory, + stack: ExitStack, +) -> ParallelProcess: + command = codesign_command_factory.codesign_command( + path.path, identity_fingerprint, path.entitlements, path.flags + ) + return _spawn_process(command=command, tmp_dir=tmp_dir, stack=stack) + + def _codesign_paths( - paths: List[Path], + paths: List[CodesignedPath], identity_fingerprint: str, tmp_dir: str, codesign_command_factory: ICodesignCommandFactory, - entitlements: Optional[Path], platform: ApplePlatform, - codesign_args: List[str], ) -> None: """Codesigns several paths in parallel.""" - processes: List[CodesignProcess] = [] + processes: List[ParallelProcess] = [] with ExitStack() as stack: for path in paths: process = _spawn_codesign_process( @@ -426,9 +561,7 @@ def _codesign_paths( identity_fingerprint=identity_fingerprint, tmp_dir=tmp_dir, codesign_command_factory=codesign_command_factory, - entitlements=entitlements, stack=stack, - codesign_args=codesign_args, ) processes.append(process) for p in processes: @@ -438,12 +571,11 @@ def _codesign_paths( def _filter_out_fast_adhoc_paths( - paths: List[Path], + paths: List[CodesignedPath], identity_fingerprint: str, - entitlements: Optional[Path], platform: ApplePlatform, fast_adhoc_signing: bool, -) -> List[Path]: +) -> List[CodesignedPath]: if not fast_adhoc_signing: return paths # TODO(T149863217): Make skip checks run in parallel, they're usually fast (~15ms) @@ -452,6 +584,6 @@ def _filter_out_fast_adhoc_paths( p for p in paths if not should_skip_adhoc_signing_path( - p, identity_fingerprint, entitlements, platform + p.path, identity_fingerprint, p.entitlements, platform ) ] diff --git a/prelude/apple/tools/code_signing/codesign_command_factory.py b/prelude/apple/tools/code_signing/codesign_command_factory.py index 2d122ce45a..9a90e03231 100644 --- a/prelude/apple/tools/code_signing/codesign_command_factory.py +++ b/prelude/apple/tools/code_signing/codesign_command_factory.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + from abc import ABCMeta, abstractmethod from pathlib import Path from typing import List, Optional, Union @@ -23,9 +25,10 @@ def codesign_command( class DefaultCodesignCommandFactory(ICodesignCommandFactory): - _command_args = ["--force", "--sign"] + codesign_tool: Path + _command_args: List[str] = ["--force", "--sign"] - def __init__(self, codesign_tool: Optional[Path]): + def __init__(self, codesign_tool: Optional[Path]) -> None: self.codesign_tool = codesign_tool or Path("codesign") def codesign_command( @@ -39,15 +42,18 @@ def codesign_command( return ( [self.codesign_tool] + DefaultCodesignCommandFactory._command_args - + codesign_args + [identity_fingerprint] + + codesign_args + entitlements_args + [path] ) class DryRunCodesignCommandFactory(ICodesignCommandFactory): - def __init__(self, codesign_tool: Path): + codesign_tool: Path + codesign_on_copy_file_paths: Optional[List[Path]] + + def __init__(self, codesign_tool: Path) -> None: self.codesign_tool = codesign_tool self.codesign_on_copy_file_paths = None @@ -64,7 +70,8 @@ def codesign_command( args = [path, "--identity", identity_fingerprint] if entitlements: args += ["--entitlements", entitlements] if entitlements else [] - if self.codesign_on_copy_file_paths: + codesign_on_copy_file_paths = self.codesign_on_copy_file_paths + if codesign_on_copy_file_paths: args += ["--extra-paths-to-sign"] - args += self.codesign_on_copy_file_paths + args += codesign_on_copy_file_paths return [self.codesign_tool] + args diff --git a/prelude/apple/tools/code_signing/fast_adhoc.py b/prelude/apple/tools/code_signing/fast_adhoc.py index 8d3fb16c6b..9d79c57e8d 100644 --- a/prelude/apple/tools/code_signing/fast_adhoc.py +++ b/prelude/apple/tools/code_signing/fast_adhoc.py @@ -5,17 +5,19 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import logging import os import subprocess import sys from pathlib import Path -from typing import Optional +from typing import List, Optional, Union from .apple_platform import ApplePlatform -_LOGGER = logging.getLogger(__name__) +_LOGGER: logging.Logger = logging.getLogger(__name__) def _find_executable_for_signed_path(path: Path, platform: ApplePlatform) -> Path: @@ -29,7 +31,9 @@ def _find_executable_for_signed_path(path: Path, platform: ApplePlatform) -> Pat return contents_dir / path.stem -def _logged_subprocess_run(name, why, args): +def _logged_subprocess_run( + name: str, why: str, args: List[Union[str, Path]] +) -> subprocess.CompletedProcess[str]: _LOGGER.info(f" Calling {name} to {why}: `{args}`") result = subprocess.run( args, @@ -74,7 +78,7 @@ def should_skip_adhoc_signing_path( identity_fingerprint: str, entitlements_path: Optional[Path], platform: ApplePlatform, -): +) -> bool: logging.getLogger(__name__).info( f"Checking if should skip adhoc signing path `{path}` with identity `{identity_fingerprint}` and entitlements `{entitlements_path}` for platform `{platform}`" ) diff --git a/prelude/apple/tools/code_signing/identity.py b/prelude/apple/tools/code_signing/identity.py index 7893a6ff9a..191e526916 100644 --- a/prelude/apple/tools/code_signing/identity.py +++ b/prelude/apple/tools/code_signing/identity.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + from __future__ import annotations import re @@ -22,12 +24,14 @@ class _ReGroupName(str, Enum): fingerprint = "fingerprint" subject_common_name = "subject_common_name" - _re_string = '(?P<{fingerprint}>[A-F0-9]{{40}}) "(?P<{subject_common_name}>.+)"(?!.*CSSMERR_.+)'.format( - fingerprint=_ReGroupName.fingerprint, - subject_common_name=_ReGroupName.subject_common_name, + _re_string: str = ( + '(?P<{fingerprint}>[A-F0-9]{{40}}) "(?P<{subject_common_name}>.+)"(?!.*CSSMERR_.+)'.format( + fingerprint=_ReGroupName.fingerprint, + subject_common_name=_ReGroupName.subject_common_name, + ) ) - _pattern = re.compile(_re_string) + _pattern: re.Pattern[str] = re.compile(_re_string) @classmethod def parse_security_stdout(cls, text: str) -> List[CodeSigningIdentity]: diff --git a/prelude/apple/tools/code_signing/info_plist_metadata.py b/prelude/apple/tools/code_signing/info_plist_metadata.py index 21942eecbb..beb99b5ead 100644 --- a/prelude/apple/tools/code_signing/info_plist_metadata.py +++ b/prelude/apple/tools/code_signing/info_plist_metadata.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + from __future__ import annotations from dataclasses import dataclass @@ -20,10 +22,10 @@ class InfoPlistMetadata: is_watchos_app: bool @staticmethod - def from_file(info_plist_file: IO) -> InfoPlistMetadata: + def from_file(info_plist_file: IO[bytes]) -> InfoPlistMetadata: root = detect_format_and_load(info_plist_file) return InfoPlistMetadata( root["CFBundleIdentifier"], root.get("CFBundlePackageType"), - root.get("WKWatchKitApp", False), + root.get("WKApplication", False), ) diff --git a/prelude/apple/tools/code_signing/info_plist_metadata_test.py b/prelude/apple/tools/code_signing/info_plist_metadata_test.py index 9b0c91b060..98ab53adac 100644 --- a/prelude/apple/tools/code_signing/info_plist_metadata_test.py +++ b/prelude/apple/tools/code_signing/info_plist_metadata_test.py @@ -22,7 +22,7 @@ def test_canary(self): com.company.application CFBundlePackageType APPL - WKWatchKitApp + WKApplication diff --git a/prelude/apple/tools/code_signing/list_codesign_identities.py b/prelude/apple/tools/code_signing/list_codesign_identities.py new file mode 100644 index 0000000000..2a4d458b9d --- /dev/null +++ b/prelude/apple/tools/code_signing/list_codesign_identities.py @@ -0,0 +1,75 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# pyre-strict + +from __future__ import annotations + +import subprocess + +from abc import ABCMeta, abstractmethod +from typing import List + +from .identity import CodeSigningIdentity + + +class IListCodesignIdentities(metaclass=ABCMeta): + @abstractmethod + def list_codesign_identities(self) -> List[CodeSigningIdentity]: + raise NotImplementedError + + +class ListCodesignIdentities(IListCodesignIdentities): + _default_command = ["security", "find-identity", "-v", "-p", "codesigning"] + + def __init__(self, command: List[str]) -> None: + self.command = command + + @classmethod + def default(cls) -> IListCodesignIdentities: + return cls(cls._default_command) + + @classmethod + def override(cls, command: List[str]) -> IListCodesignIdentities: + return cls(command) + + def list_codesign_identities(self) -> List[CodeSigningIdentity]: + return _list_identities(self.command) + + +def _list_identities( + command: List[str], +) -> List[CodeSigningIdentity]: + output = subprocess.check_output( + command, + encoding="utf-8", + ) + return CodeSigningIdentity.parse_security_stdout(output) + + +class AdHocListCodesignIdentities(IListCodesignIdentities): + def __init__( + self, original: IListCodesignIdentities, subject_common_name: str + ) -> None: + self.original = original + self.subject_common_name = subject_common_name + + def list_codesign_identities(self) -> List[CodeSigningIdentity]: + unfiltered_identities = self.original.list_codesign_identities() + identity = next( + ( + i + for i in unfiltered_identities + if i.subject_common_name == self.subject_common_name + ), + None, + ) + if not identity: + raise RuntimeError( + f"No identity found with subject common name `{self.subject_common_name}`" + ) + return [identity] diff --git a/prelude/apple/tools/code_signing/list_codesign_identities_command_factory.py b/prelude/apple/tools/code_signing/list_codesign_identities_command_factory.py deleted file mode 100644 index ad92e239b9..0000000000 --- a/prelude/apple/tools/code_signing/list_codesign_identities_command_factory.py +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under both the MIT license found in the -# LICENSE-MIT file in the root directory of this source tree and the Apache -# License, Version 2.0 found in the LICENSE-APACHE file in the root directory -# of this source tree. - -from __future__ import annotations - -from abc import ABCMeta, abstractmethod -from typing import List - - -class IListCodesignIdentitiesCommandFactory(metaclass=ABCMeta): - @abstractmethod - def list_codesign_identities_command(self) -> List[str]: - raise NotImplementedError - - -class ListCodesignIdentitiesCommandFactory(IListCodesignIdentitiesCommandFactory): - _default_command = ["security", "find-identity", "-v", "-p", "codesigning"] - - def __init__(self, command: List[str]): - self.command = command - - @classmethod - def default(cls) -> ListCodesignIdentitiesCommandFactory: - return cls(cls._default_command) - - @classmethod - def override(cls, command: List[str]) -> ListCodesignIdentitiesCommandFactory: - return cls(command) - - def list_codesign_identities_command(self) -> List[str]: - return self.command diff --git a/prelude/apple/tools/code_signing/main.py b/prelude/apple/tools/code_signing/main.py index 549e324990..f9fb7a9717 100644 --- a/prelude/apple/tools/code_signing/main.py +++ b/prelude/apple/tools/code_signing/main.py @@ -5,80 +5,113 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -import argparse +# pyre-strict + import pathlib import sys +from typing import List, Optional + +from tap import Tap from .apple_platform import ApplePlatform from .codesign_bundle import ( AdhocSigningContext, codesign_bundle, - non_adhoc_signing_context, + CodesignedPath, + signing_context_with_profile_selection, ) +from .list_codesign_identities import ListCodesignIdentities from .provisioning_profile_selection import CodeSignProvisioningError -def _args_parser() -> argparse.ArgumentParser: - parser = argparse.ArgumentParser( - description="Tool which code signs the Apple bundle. `Info.plist` file is amended as a part of it." - ) - parser.add_argument( - "--bundle-path", - metavar="", - type=pathlib.Path, - required=True, - help="Absolute path to Apple bundle result.", - ) - parser.add_argument( - "--info-plist", - metavar="", - type=pathlib.Path, - required=True, - help="Bundle relative destination path to Info.plist file if it is present in bundle.", - ) - parser.add_argument( - "--entitlements", - metavar="", - type=pathlib.Path, - required=False, - help="Path to file with entitlements to be used during code signing. If it's not provided the minimal entitlements are going to be generated.", - ) - parser.add_argument( - "--profiles-dir", - metavar="", - type=pathlib.Path, - required=False, - help="Path to directory with provisioning profile files. Required if code signing is not ad-hoc.", - ) - parser.add_argument( - "--ad-hoc", - action="store_true", - help="Perform ad-hoc signing if set.", - ) - parser.add_argument( - "--ad-hoc-codesign-identity", - metavar="", - type=str, - required=False, - help="Codesign identity to use when ad-hoc signing is performed.", - ) - parser.add_argument( - "--platform", - metavar="", - type=ApplePlatform, - required=True, - help="Apple platform for which the bundle was built.", - ) - parser.add_argument( - "--codesign-on-copy", - metavar="", - type=pathlib.Path, - action="append", - required=False, - help="Bundle relative path that should be codesigned prior to result bundle.", - ) - - return parser +class Arguments(Tap): # pyre-ignore[13] ignore uninitialized attributes for typed argument parser + """ + Tool which code signs the Apple bundle. `Info.plist` file is amended as a part of it. + """ + + bundle_path: pathlib.Path + info_plist: pathlib.Path + entitlements: Optional[pathlib.Path] = None + profiles_dir: Optional[pathlib.Path] = None + ad_hoc: bool = False + ad_hoc_codesign_identity: Optional[str] = None + platform: ApplePlatform + codesign_on_copy: Optional[List[pathlib.Path]] = None + fast_provisioning_profile_parsing: bool = False + strict_provisioning_profile_search: bool = False + + def configure(self) -> None: + """ + Configure the arguments. + """ + self.add_argument( + "--bundle-path", + metavar="", + type=pathlib.Path, + required=True, + help="Absolute path to Apple bundle result.", + ) + self.add_argument( + "--info-plist", + metavar="", + type=pathlib.Path, + required=True, + help="Bundle relative destination path to Info.plist file if it is present in bundle.", + ) + self.add_argument( + "--entitlements", + metavar="", + type=pathlib.Path, + required=False, + help="Path to file with entitlements to be used during code signing. If it's not provided the minimal entitlements are going to be generated.", + ) + self.add_argument( + "--profiles-dir", + metavar="", + type=pathlib.Path, + required=False, + help="Path to directory with provisioning profile files. Required if code signing is not ad-hoc.", + ) + self.add_argument( + "--ad-hoc", + action="store_true", + required=False, + help="Perform ad-hoc signing if set.", + ) + self.add_argument( + "--ad-hoc-codesign-identity", + metavar="", + type=str, + required=False, + help="Codesign identity to use when ad-hoc signing is performed.", + ) + self.add_argument( + "--platform", + metavar="", + type=ApplePlatform, + required=True, + help="Apple platform for which the bundle was built.", + ) + self.add_argument( + "--codesign-on-copy", + metavar="", + type=pathlib.Path, + action="append", + required=False, + help="Bundle relative path that should be codesigned prior to result bundle.", + ) + self.add_argument( + "--fast-provisioning-profile-parsing", + action="store_true", + required=False, + help="Uses experimental faster provisioning profile parsing.", + ) + self.add_argument( + "--strict-provisioning-profile-search", + action="store_true", + required=False, + help="Fail code signing if more than one matching profile found.", + ) # Add emoji to beginning of actionable error message so it stands out more. @@ -86,8 +119,8 @@ def decorate_error_message(message: str) -> str: return " ".join(["❗️", message]) -def _main(): - args = _args_parser().parse_args() +def _main() -> None: + args = Arguments().parse_args() try: if args.ad_hoc: signing_context = AdhocSigningContext( @@ -97,20 +130,37 @@ def _main(): assert ( args.profiles_dir ), "Path to directory with provisioning profile files should be set when signing is not ad-hoc." - signing_context = non_adhoc_signing_context( + non_optional_profiles_dir = args.profiles_dir + signing_context = signing_context_with_profile_selection( info_plist_source=args.bundle_path / args.info_plist, info_plist_destination=args.info_plist, - provisioning_profiles_dir=args.profiles_dir, + provisioning_profiles_dir=non_optional_profiles_dir, entitlements_path=args.entitlements, + list_codesign_identities=ListCodesignIdentities.default(), platform=args.platform, + should_use_fast_provisioning_profile_parsing=args.fast_provisioning_profile_parsing, + strict_provisioning_profile_search=args.strict_provisioning_profile_search, ) + + bundle_path = CodesignedPath( + path=args.bundle_path, entitlements=args.entitlements, flags=[] + ) + codesign_on_copy_paths = ( + [ + CodesignedPath( + path=bundle_path.path / path, entitlements=None, flags=[] + ) + for path in args.codesign_on_copy + ] + if args.codesign_on_copy + else [] + ) + codesign_bundle( - bundle_path=args.bundle_path, + bundle_path=bundle_path, signing_context=signing_context, - entitlements_path=args.entitlements, platform=args.platform, - codesign_on_copy_paths=args.codesign_on_copy or [], - codesign_args=[], + codesign_on_copy_paths=codesign_on_copy_paths, ) except CodeSignProvisioningError as e: print(decorate_error_message(str(e)), file=sys.stderr) diff --git a/prelude/apple/tools/code_signing/prepare_code_signing_entitlements.py b/prelude/apple/tools/code_signing/prepare_code_signing_entitlements.py index 90ffe1c212..2ed16222bf 100644 --- a/prelude/apple/tools/code_signing/prepare_code_signing_entitlements.py +++ b/prelude/apple/tools/code_signing/prepare_code_signing_entitlements.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import os import plistlib import tempfile @@ -15,6 +17,7 @@ from .provisioning_profile_metadata import ProvisioningProfileMetadata + # Buck v1 corresponding code is in `ProvisioningProfileCopyStep::execute` in `ProvisioningProfileCopyStep.java` def prepare_code_signing_entitlements( entitlements_path: Optional[Path], diff --git a/prelude/apple/tools/code_signing/prepare_info_plist.py b/prelude/apple/tools/code_signing/prepare_info_plist.py index 8130a949e4..a5e7104e64 100644 --- a/prelude/apple/tools/code_signing/prepare_info_plist.py +++ b/prelude/apple/tools/code_signing/prepare_info_plist.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import os import tempfile from pathlib import Path @@ -15,6 +17,7 @@ from .info_plist_metadata import InfoPlistMetadata from .provisioning_profile_metadata import ProvisioningProfileMetadata + # Buck v1 corresponding code is in `ProvisioningProfileCopyStep::execute` in `ProvisioningProfileCopyStep.java` def prepare_info_plist( info_plist: Path, diff --git a/prelude/apple/tools/code_signing/prepare_info_plist_test.py b/prelude/apple/tools/code_signing/prepare_info_plist_test.py index dadaca18f0..bf4d0061d4 100644 --- a/prelude/apple/tools/code_signing/prepare_info_plist_test.py +++ b/prelude/apple/tools/code_signing/prepare_info_plist_test.py @@ -53,7 +53,7 @@ def test_app_id_set_for_non_watchos_apps(self): info_plist = { "CFBundleIdentifier": "com.facebook.test", "CFBundlePackageType": "APPL", - "WKWatchKitApp": True, + "WKApplication": True, } info_plist_path, info_plist_metadata = _write_info_plist( info_plist, tmp_dir, "Info.plist" diff --git a/prelude/apple/tools/code_signing/provisioning_profile_diagnostics.py b/prelude/apple/tools/code_signing/provisioning_profile_diagnostics.py index a94b1da8d6..f0b6bd7719 100644 --- a/prelude/apple/tools/code_signing/provisioning_profile_diagnostics.py +++ b/prelude/apple/tools/code_signing/provisioning_profile_diagnostics.py @@ -5,25 +5,34 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + from abc import ABCMeta, abstractmethod from pathlib import Path from typing import List, Optional, Type, TypeVar from .apple_platform import ApplePlatform +from .identity import CodeSigningIdentity + from .provisioning_profile_metadata import ProvisioningProfileMetadata -META_IOS_DEVELOPER_CERTIFICATE_LINK: str = "https://www.internalfb.com/intern/qa/5198/how-do-i-get-the-fb-ios-developer-certificate" +META_IOS_DEVELOPER_CERTIFICATE_LINK: str = ( + "https://www.internalfb.com/intern/qa/5198/how-do-i-get-the-fb-ios-developer-certificate" +) META_IOS_PROVISIONING_PROFILES_LINK: str = ( "https://www.internalfb.com/intern/apple/download-provisioning-profile/" ) -META_IOS_BUILD_AND_RUN_ON_DEVICE_LINK: str = "https://www.internalfb.com/intern/wiki/Ios-first-steps/running-on-device/#2-register-your-device-i" +META_IOS_PROVISIONING_PROFILES_COMMAND: str = "arc download-provisioning-profiles" +META_IOS_BUILD_AND_RUN_ON_DEVICE_LINK: str = ( + "https://www.internalfb.com/intern/wiki/Ios-first-steps/running-on-device/#2-register-your-device-i" +) class IProvisioningProfileDiagnostics(metaclass=ABCMeta): profile: ProvisioningProfileMetadata - def __init__(self, profile: ProvisioningProfileMetadata): + def __init__(self, profile: ProvisioningProfileMetadata) -> None: self.profile = profile @abstractmethod @@ -40,7 +49,7 @@ def __init__( profile: ProvisioningProfileMetadata, team_id: str, team_id_constraint: str, - ): + ) -> None: super().__init__(profile) self.team_id = team_id self.team_id_constraint = team_id_constraint @@ -58,7 +67,7 @@ def __init__( profile: ProvisioningProfileMetadata, bundle_id: str, bundle_id_constraint: str, - ): + ) -> None: super().__init__(profile) self.bundle_id = bundle_id self.bundle_id_constraint = bundle_id_constraint @@ -74,7 +83,7 @@ def __init__( self, profile: ProvisioningProfileMetadata, bundle_id_match_length: int, - ): + ) -> None: super().__init__(profile) self.bundle_id_match_length = bundle_id_match_length @@ -91,7 +100,7 @@ def __init__( profile: ProvisioningProfileMetadata, bundle_id_match_length: int, platform_constraint: ApplePlatform, - ): + ) -> None: super().__init__(profile) self.bundle_id_match_length = bundle_id_match_length self.platform_constraint = platform_constraint @@ -112,7 +121,7 @@ def __init__( bundle_id_match_length: int, mismatched_key: str, mismatched_value: str, - ): + ) -> None: super().__init__(profile) self.bundle_id_match_length = bundle_id_match_length self.mismatched_key = mismatched_key @@ -129,7 +138,7 @@ def __init__( self, profile: ProvisioningProfileMetadata, bundle_id_match_length: int, - ): + ) -> None: super().__init__(profile) self.bundle_id_match_length = bundle_id_match_length @@ -147,6 +156,7 @@ def interpret_provisioning_profile_diagnostics( diagnostics: List[IProvisioningProfileDiagnostics], bundle_id: str, provisioning_profiles_dir: Path, + identities: List[CodeSigningIdentity], log_file_path: Optional[Path] = None, ) -> str: if not diagnostics: @@ -182,10 +192,16 @@ def find_mismatch(class_type: Type[_T]) -> Optional[_T]: ) if mismatch := find_mismatch(DeveloperCertificateMismatch): + identities_description = ( + "WARNING: NO SIGNING IDENTITIES FOUND!" + if len(identities) == 0 + else f"List of signing identities: `{identities}`." + ) return "".join( [ header, f"The provisioning profile `{mismatch.profile.file_path.name}` satisfies all constraints, but no matching certificates were found in your keychain. ", + identities_description, f"Please download and install the latest certificate from {META_IOS_DEVELOPER_CERTIFICATE_LINK}.", footer, ] @@ -197,7 +213,8 @@ def find_mismatch(class_type: Type[_T]) -> Optional[_T]: header, f"The provisioning profile `{mismatch.profile.file_path.name}` is the best match, but it doesn't contain all the needed entitlements. ", f"Expected entitlement item with key `{mismatch.mismatched_key}` and value `{mismatch.mismatched_value}` is missing. ", - f"Usually that means the application entitlements were changed recently, provisioning profile was updated and you need to download & install the latest version of provisioning profile for Bundle ID `{bundle_id}` from {META_IOS_PROVISIONING_PROFILES_LINK}", + f"Usually that means the application entitlements were changed recently, provisioning profile was updated and you need to download & install the latest version of provisioning profile for Bundle ID `{bundle_id}`.", + f"Execute `{META_IOS_PROVISIONING_PROFILES_COMMAND}` or download from from {META_IOS_PROVISIONING_PROFILES_LINK}", footer, ] ) @@ -218,7 +235,8 @@ def find_mismatch(class_type: Type[_T]) -> Optional[_T]: [ header, f"The provisioning profile `{mismatch.profile.file_path.name}` is the the best match; however, it has expired", - f"Please download and install a valid profile from {META_IOS_PROVISIONING_PROFILES_LINK}", + f"Execute `{META_IOS_PROVISIONING_PROFILES_COMMAND}` to get the latest provisioning profiles.", + f"Alternatively, please download and install a valid profile from {META_IOS_PROVISIONING_PROFILES_LINK}", footer, ] ) @@ -227,7 +245,8 @@ def find_mismatch(class_type: Type[_T]) -> Optional[_T]: [ header, f"No provisioning profile matching the Bundle ID `{bundle_id}` was found. ", - f"Please download and install the appropriate profile from {META_IOS_PROVISIONING_PROFILES_LINK}", + f"Execute `{META_IOS_PROVISIONING_PROFILES_COMMAND}` to get the latest provisioning profiles.", + f"Alternatively, please download and install the appropriate profile from {META_IOS_PROVISIONING_PROFILES_LINK}", footer, ] ) diff --git a/prelude/apple/tools/code_signing/provisioning_profile_metadata.py b/prelude/apple/tools/code_signing/provisioning_profile_metadata.py index 331ded4e7e..733b32e6a4 100644 --- a/prelude/apple/tools/code_signing/provisioning_profile_metadata.py +++ b/prelude/apple/tools/code_signing/provisioning_profile_metadata.py @@ -5,13 +5,15 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + from __future__ import annotations import hashlib from dataclasses import dataclass from datetime import datetime from pathlib import Path -from typing import Any, Dict, Set +from typing import Any, Dict, FrozenSet, Set from apple.tools.plistlib_utils import detect_format_and_loads @@ -25,18 +27,20 @@ class ProvisioningProfileMetadata: uuid: str # Naïve object with ignored timezone, see https://bugs.python.org/msg110249 expiration_date: datetime - platforms: Set[str] + platforms: FrozenSet[str] # Let's agree they are uppercased - developer_certificate_fingerprints: Set[str] + developer_certificate_fingerprints: FrozenSet[str] entitlements: Dict[str, Any] - _mergeable_entitlements_keys = { - "application-identifier", - "beta-reports-active", - "get-task-allow", - "com.apple.developer.aps-environment", - "com.apple.developer.team-identifier", - } + _mergeable_entitlements_keys: FrozenSet[str] = frozenset( + [ + "application-identifier", + "beta-reports-active", + "get-task-allow", + "com.apple.developer.aps-environment", + "com.apple.developer.team-identifier", + ] + ) # See `ProvisioningProfileMetadataFactory::getAppIDFromEntitlements` from `ProvisioningProfileMetadataFactory.java` in Buck v1 def get_app_id(self) -> AppId: @@ -73,7 +77,20 @@ def from_provisioning_profile_file_content( file_path=file_path, uuid=root["UUID"], expiration_date=root["ExpirationDate"], - platforms=set(root["Platform"]), - developer_certificate_fingerprints=developer_certificate_fingerprints, + platforms=frozenset(root["Platform"]), + developer_certificate_fingerprints=frozenset( + developer_certificate_fingerprints + ), entitlements=root["Entitlements"], ) + + def __hash__(self) -> int: + return hash( + ( + self.file_path, + self.uuid, + self.expiration_date, + self.platforms, + self.developer_certificate_fingerprints, + ) + ) diff --git a/prelude/apple/tools/code_signing/provisioning_profile_selection.py b/prelude/apple/tools/code_signing/provisioning_profile_selection.py index 421f2d5678..b7feac1df4 100644 --- a/prelude/apple/tools/code_signing/provisioning_profile_selection.py +++ b/prelude/apple/tools/code_signing/provisioning_profile_selection.py @@ -5,8 +5,11 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import datetime import logging +from collections import defaultdict from dataclasses import dataclass from typing import Any, cast, Dict, List, Optional, Tuple @@ -25,7 +28,7 @@ ) from .provisioning_profile_metadata import ProvisioningProfileMetadata -_LOGGER = logging.getLogger(__name__) +_LOGGER: logging.Logger = logging.getLogger(__name__) class CodeSignProvisioningError(Exception): @@ -45,8 +48,8 @@ def _parse_team_id_from_entitlements( def _matches_or_array_is_subset_of( entitlement_name: str, - expected_value: Any, - actual_value: Any, + expected_value: object, + actual_value: object, platform: ApplePlatform, ) -> bool: if expected_value is None: @@ -98,7 +101,7 @@ def _check_entitlements_match( ) -> Tuple[bool, Optional[EntitlementsMismatch]]: if expected_entitlements is None: return (True, None) - for (key, value) in expected_entitlements.items(): + for key, value in expected_entitlements.items(): profile_entitlement = profile.entitlements.get(key) if (key not in _IGNORE_MISMATCH_ENTITLEMENTS_KEYS) and ( not _matches_or_array_is_subset_of( @@ -133,6 +136,33 @@ def _check_developer_certificates_match( ) +def _make_multiple_matching_profiles_message( + profiles: list[ProvisioningProfileMetadata], + strict_search: bool, +) -> str: + messages = [f"Found MULTIPLE matching profiles: {len(profiles)}"] + messages += [ + f" Matching Profile = UUID:{profile.uuid}, file path: {profile.file_path}" + for profile in profiles + ] + + if strict_search: + messages += [ + "Strict provisioning profile search is ENABLED, build will FAIL due to ambiguous provisioning profile search results.", + "To resolve the problem, ensure only a single profile matches.", + "To unblock, you have two options:", + "Option 1: Disable strict provisioning profile search for the targets failing to build.", + " If the target failing to build is an `apple_bundle()`, set the `strict_provisioning_profile_search` attribute to `False`.", + " If the target failing to build is produced by `ios_binary()`, set the `bundle_strict_provisioning_profile_search` attribute to `False`.", + " You can commit such a change, so that the issue can be investigated without blocking other developers.", + " NB: This is a TEMPORARY WORKAROUND, as it only disables the strict checking, it does not resolve the ambiguity.", + "Option 2: Pass `--config apple.strict_provisioning_profile_search=false` as part of your build command.", + " DO NOT COMMIT such a change by adding this to any CI configs.", + ] + + return "\n".join(messages) + + @dataclass class SelectedProvisioningProfileInfo: profile: ProvisioningProfileMetadata @@ -146,6 +176,7 @@ def select_best_provisioning_profile( provisioning_profiles: List[ProvisioningProfileMetadata], entitlements: Optional[Dict[str, Any]], platform: ApplePlatform, + strict_search: bool, ) -> Tuple[ Optional[SelectedProvisioningProfileInfo], List[IProvisioningProfileDiagnostics] ]: @@ -170,7 +201,7 @@ def select_best_provisioning_profile( result = None # Used for error messages - diagnostics = [] + diagnostics: List[IProvisioningProfileDiagnostics] = [] def log_mismatched_profile(mismatch: IProvisioningProfileDiagnostics) -> None: diagnostics.append(mismatch) @@ -178,6 +209,8 @@ def log_mismatched_profile(mismatch: IProvisioningProfileDiagnostics) -> None: f"Skipping provisioning profile `{mismatch.profile.file_path.name}`: {mismatch.log_message()}" ) + profiles_for_match_length = defaultdict(list) + for profile in provisioning_profiles: app_id = profile.get_app_id() if maybe_team_id_constraint and maybe_team_id_constraint != app_id.team_id: @@ -245,8 +278,41 @@ def log_mismatched_profile(mismatch: IProvisioningProfileDiagnostics) -> None: log_mismatched_profile(cast(DeveloperCertificateMismatch, mismatch)) continue + _LOGGER.info( + f"Matching provisioning profile `{profile.file_path.name}` with score {current_match_length}" + ) + + profiles_for_match_length[current_match_length] += [profile] + if current_match_length > best_match_length: best_match_length = current_match_length result = SelectedProvisioningProfileInfo(profile, certificate) + all_matching_profiles = ( + profiles_for_match_length[best_match_length] if result else [] + ) + if len(all_matching_profiles) > 1: + multiple_profiles_message = _make_multiple_matching_profiles_message( + all_matching_profiles, + strict_search, + ) + _LOGGER.info(multiple_profiles_message) + if strict_search: + raise CodeSignProvisioningError(multiple_profiles_message) + + if result: + _LOGGER.info( + ( + f"Found matching provisioning profile and identity\n" + f" Selected Identity: {result.identity}\n" + f" Provisioning Profile: `{result.profile.file_path.name}`\n" + f" UUID: {result.profile.uuid}\n" + f" File: {result.profile.file_path}\n" + f" Expiration: {result.profile.expiration_date}\n" + f" Platforms: {result.profile.platforms}\n" + f" Fingerprints: {result.profile.developer_certificate_fingerprints}\n" + f" Entitlements: {result.profile.entitlements}" + ) + ) + return result, diagnostics diff --git a/prelude/apple/tools/code_signing/provisioning_profile_selection_test.py b/prelude/apple/tools/code_signing/provisioning_profile_selection_test.py index 5a06e61065..72d17f23f5 100644 --- a/prelude/apple/tools/code_signing/provisioning_profile_selection_test.py +++ b/prelude/apple/tools/code_signing/provisioning_profile_selection_test.py @@ -55,6 +55,7 @@ def test_expired_profiles_are_ignored(self): [expired_provisioning_profile], {}, ApplePlatform.ios_device, + False, ) self.assertIsNone(selected) self.verify_diagnostic_info_candidate_profile( @@ -70,9 +71,52 @@ def test_expired_profiles_are_ignored(self): [fresh_provisioning_profiles], {}, ApplePlatform.ios_device, + False, ) self.assertIsNotNone(selected) + def test_multiple_matching_profiles_strict_mode(self): + info_plist = InfoPlistMetadata("com.company.application", None, False) + identity = CodeSigningIdentity( + "fingerprint", + "name", + ) + first = ProvisioningProfileMetadata( + Path("/foo.first"), + "00000000-0000-0000-0000-000000000000", + datetime.max, + {"iOS"}, + {identity.fingerprint}, + {"application-identifier": "AAAAAAAAAA.*"}, + ) + second = ProvisioningProfileMetadata( + Path("/foo.second"), + "00000000-0000-0000-0000-000000000000", + datetime.max, + {"iOS"}, + {identity.fingerprint}, + {"application-identifier": "AAAAAAAAAA.*"}, + ) + profiles = [ + first, + second, + ] + + selection_failed = False + try: + _, _ = select_best_provisioning_profile( + info_plist, + [identity], + profiles, + {"keychain-access-groups": ["AAAAAAAAAA.*"]}, + ApplePlatform.ios_device, + True, + ) + except Exception: + selection_failed = True + + self.assertTrue(selection_failed) + def test_prefix_override(self): info_plist = InfoPlistMetadata("com.company.application", None, False) identity = CodeSigningIdentity( @@ -104,6 +148,7 @@ def test_prefix_override(self): profiles, {"keychain-access-groups": ["AAAAAAAAAA.*"]}, ApplePlatform.ios_device, + False, ) self.assertEqual(selected, SelectedProvisioningProfileInfo(expected, identity)) @@ -153,6 +198,7 @@ def test_entitlement_keys_are_matched(self): "com.apple.security.application-groups": ["foo", "bar"], }, ApplePlatform.ios_device, + False, ) self.assertEqual(selected, SelectedProvisioningProfileInfo(expected, identity)) @@ -165,6 +211,7 @@ def test_entitlement_keys_are_matched(self): "com.apple.security.application-groups": ["foo", "bar"], }, ApplePlatform.ios_device, + False, ) self.assertEqual(selected, SelectedProvisioningProfileInfo(expected, identity)) @@ -177,6 +224,7 @@ def test_entitlement_keys_are_matched(self): "com.apple.security.application-groups": ["foo", "xxx"], }, ApplePlatform.ios_device, + False, ) self.assertIsNone(selected) self.verify_diagnostic_info_candidate_profile( @@ -222,6 +270,7 @@ def test_only_profiles_containing_valid_fingerprints_are_matched(self): profiles, {}, ApplePlatform.ios_device, + False, ) self.assertEqual( selected, SelectedProvisioningProfileInfo(expected, valid_identity) @@ -232,6 +281,7 @@ def test_only_profiles_containing_valid_fingerprints_are_matched(self): [unexpected], {}, ApplePlatform.ios_device, + False, ) self.assertIsNone(selected) self.verify_diagnostic_info_candidate_profile( @@ -274,6 +324,7 @@ def test_matches_specific_app(self): profiles, {}, ApplePlatform.ios_device, + False, ) self.assertEqual(selected, SelectedProvisioningProfileInfo(expected, identity)) @@ -283,6 +334,7 @@ def test_matches_specific_app(self): reversed(profiles), {}, ApplePlatform.ios_device, + False, ) self.assertEqual(selected, SelectedProvisioningProfileInfo(expected, identity)) @@ -308,6 +360,7 @@ def test_matches_wildcard(self): [expected], None, ApplePlatform.ios_device, + False, ) self.assertEqual(selected, SelectedProvisioningProfileInfo(expected, identity)) @@ -340,6 +393,7 @@ def test_force_included_app_entitlements(self): "aps-environment": "production", }, ApplePlatform.ios_device, + False, ) self.assertIsNotNone(selected) @@ -371,6 +425,7 @@ def test_unmatched_app_entitlement(self): "com.made.up.entitlement": "buck", }, ApplePlatform.ios_device, + False, ) self.assertIsNone(selected) self.verify_diagnostic_info_candidate_profile( diff --git a/prelude/apple/tools/code_signing/read_provisioning_profile_command_factory.py b/prelude/apple/tools/code_signing/read_provisioning_profile_command_factory.py index 0d4d753623..c6f09fce8c 100644 --- a/prelude/apple/tools/code_signing/read_provisioning_profile_command_factory.py +++ b/prelude/apple/tools/code_signing/read_provisioning_profile_command_factory.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + from abc import ABCMeta, abstractmethod from pathlib import Path from typing import List, Union @@ -27,6 +29,7 @@ class DefaultReadProvisioningProfileCommandFactory( "der", "-verify", "-noverify", + "-nosigs", "-in", ] diff --git a/prelude/apple/tools/defs.bzl b/prelude/apple/tools/defs.bzl index 03e8f4233f..55f20fa397 100644 --- a/prelude/apple/tools/defs.bzl +++ b/prelude/apple/tools/defs.bzl @@ -5,16 +5,15 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load( - "@fbsource//tools/build_defs:python_platform.bzl", - "set_platform_decorator_for_python", -) +# @oss-disable: load("@fbsource//tools/build_defs:python_platform.bzl", "set_platform_decorator_for_python") load("@prelude//:native.bzl", _native = "native") +set_platform_decorator_for_python = lambda **kwargs: kwargs # @oss-enable + def meta_python_test(name, **kwargs): # Set the platform attributes as needed for proper exec platform resolution kwargs = set_platform_decorator_for_python( - set_python_constraint_overrides = True, + # @oss-disable: set_python_constraint_overrides = True, **kwargs ) diff --git a/prelude/apple/tools/dry_codesign_tool.py b/prelude/apple/tools/dry_codesign_tool.py index 5e445b3acb..38a34e7996 100644 --- a/prelude/apple/tools/dry_codesign_tool.py +++ b/prelude/apple/tools/dry_codesign_tool.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import argparse import plistlib import shutil @@ -48,7 +50,7 @@ def _args_parser() -> argparse.ArgumentParser: return parser -def _main(): +def _main() -> None: args = _args_parser().parse_args() content = { # This is always empty string if you check `DryCodeSignStep` class usages in buck1 diff --git a/prelude/apple/tools/info_plist_processor/BUCK.v2 b/prelude/apple/tools/info_plist_processor/BUCK.v2 index dd48ee60ae..5cae7b5e8c 100644 --- a/prelude/apple/tools/info_plist_processor/BUCK.v2 +++ b/prelude/apple/tools/info_plist_processor/BUCK.v2 @@ -1,4 +1,9 @@ -# @oss-disable: load("@prelude//apple/tools/defs.bzl", "meta_python_test") +load("@prelude//utils:source_listing.bzl", "source_listing") +load("@prelude//apple/tools/defs.bzl", "meta_python_test") + +oncall("build_infra") + +source_listing() python_library( name = "preprocess", @@ -14,8 +19,8 @@ python_library( python_library( name = "process", srcs = ["process.py"], - deps = ["prelude//apple/tools:plistlib_utils"], visibility = ["PUBLIC"], + deps = ["prelude//apple/tools:plistlib_utils"], ) # @oss-disable: meta_python_test( @@ -27,9 +32,9 @@ python_library( python_binary( name = "tool", main = "main.py", + visibility = ["PUBLIC"], deps = [ ":preprocess", ":process", ], - visibility = ["PUBLIC"], ) diff --git a/prelude/apple/tools/info_plist_processor/main.py b/prelude/apple/tools/info_plist_processor/main.py index b1d3e6b670..995b385772 100644 --- a/prelude/apple/tools/info_plist_processor/main.py +++ b/prelude/apple/tools/info_plist_processor/main.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import argparse from contextlib import ExitStack from enum import Enum @@ -19,7 +21,9 @@ class _SubcommandName(str, Enum): process = "process" -def _create_preprocess_subparser(subparsers): +def _create_preprocess_subparser( + subparsers: "argparse._SubParsersAction[argparse.ArgumentParser]", +) -> None: parser = subparsers.add_parser( _SubcommandName.preprocess.value, description="Sub-command to expand macro variables in parametrized Info.plist files. It's the Buck v2 equivalent of what `FindAndReplaceStep` and `InfoPlistSubstitution` do.", @@ -53,7 +57,9 @@ def _create_preprocess_subparser(subparsers): ) -def _create_process_subparser(subparsers): +def _create_process_subparser( + subparsers: "argparse._SubParsersAction[argparse.ArgumentParser]", +) -> None: parser = subparsers.add_parser( _SubcommandName.process.value, description="Sub-command to do the final processing of the Info.plist before it's copied to the application bundle. It's the Buck v2 equivalent of what `PlistProcessStep` does in v1.", @@ -92,7 +98,7 @@ def _create_process_subparser(subparsers): ) -def _parse_args(): +def _parse_args() -> argparse.Namespace: parser = argparse.ArgumentParser( description="Tool to process Info.plist file before it is placed into the bundle. It's the Buck v2 equivalent of what `AppleInfoPlist` build rule from v1 does." ) @@ -102,7 +108,7 @@ def _parse_args(): return parser.parse_args() -def main(): +def main() -> None: args = _parse_args() if args.subcommand_name == _SubcommandName.preprocess: with ExitStack() as stack: diff --git a/prelude/apple/tools/info_plist_processor/preprocess.py b/prelude/apple/tools/info_plist_processor/preprocess.py index 18b8e71a9c..937959fe83 100644 --- a/prelude/apple/tools/info_plist_processor/preprocess.py +++ b/prelude/apple/tools/info_plist_processor/preprocess.py @@ -5,9 +5,12 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import json import re from enum import Enum +from typing import Dict, TextIO class _ReGroupName(str, Enum): @@ -17,15 +20,19 @@ class _ReGroupName(str, Enum): closeparen = "closeparen" -_re_string = "\\$(?P<{openparen}>[\\{{\\(])(?P<{variable}>[^\\}}\\):]+)(?::(?P<{modifier}>[^\\}}\\)]+))?(?P<{closeparen}>[\\}}\\)])".format( - openparen=_ReGroupName.openparen, - variable=_ReGroupName.variable, - modifier=_ReGroupName.modifier, - closeparen=_ReGroupName.closeparen, +_re_string: str = ( + "\\$(?P<{openparen}>[\\{{\\(])(?P<{variable}>[^\\}}\\):]+)(?::(?P<{modifier}>[^\\}}\\)]+))?(?P<{closeparen}>[\\}}\\)])".format( + openparen=_ReGroupName.openparen, + variable=_ReGroupName.variable, + modifier=_ReGroupName.modifier, + closeparen=_ReGroupName.closeparen, + ) ) -def _make_substitution_dict(substitutions_json_file, product_name): +def _make_substitution_dict( + substitutions_json_file: TextIO, product_name: str +) -> Dict[str, str]: result = { "EXECUTABLE_NAME": product_name, "PRODUCT_NAME": product_name, @@ -36,7 +43,9 @@ def _make_substitution_dict(substitutions_json_file, product_name): return result -def _process_line(line, pattern, substitutions): +def _process_line( + line: str, pattern: re.Pattern[str], substitutions: Dict[str, str] +) -> str: result = line pos = 0 substituted_keys = set() @@ -62,7 +71,12 @@ def _process_line(line, pattern, substitutions): return result -def preprocess(input_file, output_file, substitutions_file, product_name): +def preprocess( + input_file: TextIO, + output_file: TextIO, + substitutions_file: TextIO, + product_name: str, +) -> None: pattern = re.compile(_re_string) substitutions = _make_substitution_dict(substitutions_file, product_name) for line in input_file: diff --git a/prelude/apple/tools/info_plist_processor/process.py b/prelude/apple/tools/info_plist_processor/process.py index 91f1d5d386..178bcbfc22 100644 --- a/prelude/apple/tools/info_plist_processor/process.py +++ b/prelude/apple/tools/info_plist_processor/process.py @@ -5,12 +5,15 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import json import plistlib -from typing import Any, Dict, IO, Optional +from typing import Any, Dict, IO, Optional, TextIO from apple.tools.plistlib_utils import detect_format_and_load + # Corresponding v1 code is contained in `com/facebook/buck/apple/PlistProcessStep.java`, `PlistProcessStep::execute` method. def _merge_plist_dicts( source: Dict[str, Any], destination: Dict[str, Any], override: bool = False @@ -26,12 +29,12 @@ def _merge_plist_dicts( def process( - input_file: IO, - output_file: IO, - override_input_file: Optional[IO] = None, + input_file: IO[bytes], + output_file: IO[bytes], + override_input_file: Optional[IO[bytes]] = None, additional_keys: Optional[Dict[str, Any]] = None, - additional_keys_file: Optional[IO] = None, - override_keys_file: Optional[IO] = None, + additional_keys_file: Optional[TextIO] = None, + override_keys_file: Optional[TextIO] = None, output_format: plistlib.PlistFormat = plistlib.FMT_BINARY, ) -> None: root = detect_format_and_load(input_file) diff --git a/prelude/apple/tools/info_plist_processor/process_test.py b/prelude/apple/tools/info_plist_processor/process_test.py index bfd2a43137..d53829d47b 100644 --- a/prelude/apple/tools/info_plist_processor/process_test.py +++ b/prelude/apple/tools/info_plist_processor/process_test.py @@ -56,7 +56,7 @@ def test_additional_input_given_no_keys_conflict(self): process(input_file, output_file, override_input_file) output_file.seek(0) root = plistlib.load(output_file) - self.assertEquals(root, {"foo": "bar", "baz": "qux"}) + self.assertEqual(root, {"foo": "bar", "baz": "qux"}) def test_additional_input_given_keys_conflict(self): input_file = io.BytesIO( @@ -106,7 +106,7 @@ def test_additional_input_given_keys_conflict(self): process(input_file, output_file, override_input_file) output_file.seek(0) root = plistlib.load(output_file) - self.assertEquals( + self.assertEqual( root, {"foo": "baz", "qux": {"a": "z", "b": "c", "c": "x"}, "foobar": "zanzibar"}, ) @@ -128,7 +128,7 @@ def test_additional_keys(self): process(input_file, output_file, additional_keys=additional_keys) output_file.seek(0) root = plistlib.load(output_file) - self.assertEquals(root, {"foo": "bar", "baz": "qux"}) + self.assertEqual(root, {"foo": "bar", "baz": "qux"}) def test_additional_keys_do_not_override(self): input_file = io.BytesIO( @@ -147,7 +147,7 @@ def test_additional_keys_do_not_override(self): process(input_file, output_file, additional_keys=additional_keys) output_file.seek(0) root = plistlib.load(output_file) - self.assertEquals(root, {"foo": "bar"}) + self.assertEqual(root, {"foo": "bar"}) def test_additional_keys_from_file(self): input_file = io.BytesIO( @@ -166,7 +166,7 @@ def test_additional_keys_from_file(self): process(input_file, output_file, additional_keys_file=additional_keys_file) output_file.seek(0) root = plistlib.load(output_file) - self.assertEquals(root, {"foo": "bar", "baz": "qux"}) + self.assertEqual(root, {"foo": "bar", "baz": "qux"}) def test_override_keys_from_file(self): input_file = io.BytesIO( @@ -185,4 +185,4 @@ def test_override_keys_from_file(self): process(input_file, output_file, override_keys_file=override_keys_file) output_file.seek(0) root = plistlib.load(output_file) - self.assertEquals(root, {"foo": "baz"}) + self.assertEqual(root, {"foo": "baz"}) diff --git a/prelude/apple/tools/ipa_package_maker.py b/prelude/apple/tools/ipa_package_maker.py index 710872b94f..ac3275d67c 100644 --- a/prelude/apple/tools/ipa_package_maker.py +++ b/prelude/apple/tools/ipa_package_maker.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import argparse import os import shutil @@ -14,18 +16,16 @@ from pathlib import Path -from typing import List, Optional - from apple.tools.re_compatibility_utils.writable import make_dir_recursively_writable -def _copy_ipa_contents(ipa_contents_dir: Path, output_dir: Path): +def _copy_ipa_contents(ipa_contents_dir: Path, output_dir: Path) -> None: if os.path.exists(output_dir): shutil.rmtree(output_dir, ignore_errors=False) shutil.copytree(ipa_contents_dir, output_dir, symlinks=True, dirs_exist_ok=False) -def _delete_empty_SwiftSupport_dir(output_dir: Path): +def _delete_empty_SwiftSupport_dir(output_dir: Path) -> None: swiftSupportDir = output_dir / "SwiftSupport" if not swiftSupportDir.exists(): return @@ -44,9 +44,7 @@ def _package_ipa_contents( ipa_contents_dir: Path, ipa_output_path: Path, compression_level: int, - validator: Optional[Path], - validator_args: List[str], -): +) -> None: with tempfile.TemporaryDirectory() as processed_package_dir: processed_package_dir_path = Path(processed_package_dir) _copy_ipa_contents(ipa_contents_dir, processed_package_dir_path) @@ -62,18 +60,6 @@ def _package_ipa_contents( # and mirror behavior which Apple expects, so we're future-proof. make_dir_recursively_writable(str(processed_package_dir_path)) - if validator: - validation_command = [ - str(validator), - "--ipa-contents-dir", - str(processed_package_dir_path), - *validator_args, - ] - subprocess.run( - validation_command, - check=True, - ) - with open(ipa_output_path, "wb") as ipa_file: zip_cmd = ["zip", "-X", "-r", f"-{compression_level}", "-", "."] subprocess.run( @@ -86,7 +72,7 @@ def _package_ipa_contents( ) -def main(): +def main() -> None: parser = argparse.ArgumentParser(description="Tool to make an .ipa package file.") parser.add_argument( "--ipa-contents-dir", @@ -106,21 +92,12 @@ def main(): required=True, help="The compression level to use for 'zip'.", ) - parser.add_argument( - "--validator", - type=Path, - required=False, - help="A path to an executable which will be passed the path to the IPA contents dir to validate", - ) - parser.add_argument("--validator-args", required=False, default=[], action="append") args = parser.parse_args() _package_ipa_contents( args.ipa_contents_dir, args.ipa_output_path, args.compression_level, - args.validator, - args.validator_args, ) diff --git a/prelude/apple/tools/make_modulemap.py b/prelude/apple/tools/make_modulemap.py index f92f037df3..b73d43e2ed 100755 --- a/prelude/apple/tools/make_modulemap.py +++ b/prelude/apple/tools/make_modulemap.py @@ -81,7 +81,7 @@ def _write_submodules( module = root_module for i, component in enumerate(h.split(os.sep)): if i == 0 and component == name: - # The common case is we have a singe header path prefix that matches the module name. + # The common case is we have a single header path prefix that matches the module name. # In this case we add the headers directly to the root module. pass else: diff --git a/prelude/apple/tools/make_swift_interface.py b/prelude/apple/tools/make_swift_interface.py new file mode 100755 index 0000000000..13c91db7e4 --- /dev/null +++ b/prelude/apple/tools/make_swift_interface.py @@ -0,0 +1,282 @@ +#!/usr/bin/env fbpython +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +""" +Remaps swiftc arguments to be usable by swift-ide-test, and invokes +swift-ide-test with said arguments. +""" + +import argparse +import dataclasses +import optparse +import pathlib +import subprocess as proc +import sys + +from contextlib import contextmanager +from typing import Iterable, List, Optional + + +@dataclasses.dataclass +class SwiftIDETestArguments(object): + sdk: Optional[str] + target: Optional[str] + xcc: Iterable[str] + defines: Iterable[str] + frameworks: Iterable[str] + includes: Iterable[str] + resource_dir: str + enable_cxx_interop: bool + cxx_interoperability_mode: Optional[str] + upcoming_features: Iterable[str] + explicit_swift_module_map: Optional[str] + swift_version: Optional[str] + + def to_args(self) -> List[str]: + args = [] + if self.target: + args.append("--target") + args.append(self.target) + + if self.sdk: + args.append("--sdk") + args.append(self.sdk) + + for define in self.defines: + args.append("-D") + args.append(define) + + for include in self.includes: + args.append("-I") + args.append(include) + + for framework in self.frameworks: + args.append("-F") + args.append(framework) + + for xcc in self.xcc: + args.append("--Xcc") + args.append(xcc) + + args.append("--resource-dir") + args.append(self.resource_dir) + + if self.enable_cxx_interop: + args.append("-enable-experimental-cxx-interop") + + if self.cxx_interoperability_mode: + # swift-ide-test only understands -enable-experimental-cxx-interop, + # not the versioned -cxx-interoperability-mode=. + args.append("-enable-experimental-cxx-interop") + + if self.upcoming_features: + for feature in self.upcoming_features: + args.append("-enable-upcoming-feature") + args.append(feature) + + if self.explicit_swift_module_map: + args.append("--explicit-swift-module-map-file") + args.append(self.explicit_swift_module_map) + + if self.swift_version: + args.append("-swift-version") + args.append(self.swift_version) + return args + + +class LongSingleDashOpt(optparse.Option): + """ + This Option subclass allows for long arguments specified with single dashes, + e.g. -sdk (the default implementation only allows long options with two + dashes) + """ + + def _set_opt_strings(self, opts): + for opt in opts: + if len(opt) < 2: + raise optparse.OptionError( + "invalid option string %r: " + "must be at least two characters long" % opt, + self, + ) + elif len(opt) == 2: + self._short_opts.append(opt) + else: + self._long_opts.append(opt) + + +class IgnoreUnknownLongSingleDashOptParser(optparse.OptionParser): + """ + This OptionParser subclass allows for + (a) long arguments specified with single dashes (e.g. -sdk) + (b) ignoring unknown arguments + The default OptionParser doesn't have either of these behaviors. + """ + + def __init__(self, *args, **kwargs): + kwargs["option_class"] = LongSingleDashOpt + super().__init__(*args, **kwargs) + + def _process_args(self, largs, rargs, values): + while rargs: + try: + arg = rargs[0] + if arg == "--": + del rargs[0] + return + elif arg[0:2] == "--": + self._process_long_opt(rargs, values) + elif arg[:1] == "-" and len(arg) > 1: + if len(arg) > 2: + self._process_long_opt(rargs, values) + else: + self._process_short_opts(rargs, values) + elif self.allow_interspersed_args: + largs.append(arg) + del rargs[0] + else: + return + except optparse.BadOptionError: + continue + + +def parse_swiftc_args(arguments: List[str]) -> SwiftIDETestArguments: # noqa: C901 + """ + We can't use argparse to do our parsing because arguments like -Xcc + need to accept arguments that are prefixed with `-`. + + optparse can handle this, and it's only soft deprecated (i.e. it should + stay around, just not actively developed), so we should be safe to use it. + + Additionally, our subclasses above are safe, since optparse is no longer + actively developed. + """ + parser = IgnoreUnknownLongSingleDashOptParser() + + parser.add_option("-sdk", dest="sdk") + parser.add_option("-target", dest="target") + parser.add_option("-Xcc", action="append", default=[], dest="xcc") + parser.add_option("-D", dest="defines", action="append", default=[]) + parser.add_option("-F", dest="frameworks", action="append", default=[]) + parser.add_option("-I", dest="includes", action="append", default=[]) + parser.add_option("-resource-dir", dest="resource_dir") + parser.add_option( + "-enable-experimental-cxx-interop", + action="store_true", + default=False, + dest="enable_experimental_cxx_interop", + ) + parser.add_option("-Xfrontend", action="append", default=[], dest="xfrontend") + parser.add_option("-swift-version", dest="swift_version") + parser.add_option("-cxx-interoperability-mode", dest="cxx_interoperability_mode") + + options, leftovers = parser.parse_args(arguments) + + frontend_parser = IgnoreUnknownLongSingleDashOptParser() + frontend_parser.add_option( + "-enable-upcoming-feature", + dest="enable_upcoming_feature", + action="append", + default=[], + ) + frontend_parser.add_option( + "-explicit-swift-module-map-file", dest="explicit_swift_module_map" + ) + frontend_options = frontend_parser.parse_args(options.xfrontend)[0] + + resource_dir = options.resource_dir + if not resource_dir: + # If an explicit resource dir was not provided, we need to figure out + # which resource id would have been used, which, in the case of Xcode, + # is relative to the swiftc used. + assert len(leftovers) >= 1 + compiler_path = pathlib.Path(leftovers[0]) + assert compiler_path.name == "swiftc" + resource_dir_path = compiler_path.parents[1] / "lib" / "swift" + assert resource_dir_path.exists() + resource_dir = str(resource_dir_path) + + return SwiftIDETestArguments( + options.sdk, + options.target, + options.xcc, + options.defines, + options.frameworks, + options.includes, + resource_dir, + options.enable_experimental_cxx_interop, + options.cxx_interoperability_mode, + frontend_options.enable_upcoming_feature, + frontend_options.explicit_swift_module_map, + options.swift_version, + ) + + +def parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser( + description="Uses swift-ide-test to generate a swift interface", + fromfile_prefix_chars="@", + ) + parser.add_argument( + "--swift-ide-test-tool", + required=True, + help="Path to swift-ide-test binary.", + ) + parser.add_argument( + "--module", + required=True, + help="Name of the module to generate the interface for.", + ) + parser.add_argument( + "--out", + help="Path to output file.", + default="-", + ) + parser.add_argument( + "arguments", + nargs="*", + default=[], + help="File containing compiler arguments to use to invoke" + + " swift-ide-test. Note these arguments should be in the format CC" + + " expects, not swift-ide-test, as this tool converts the arguments" + + " as needed", + ) + return parser.parse_args() + + +@contextmanager +def open_or_stdout(out): + if out == "-": + yield sys.stdout + else: + with open(out, "w") as f: + yield f + + +def main() -> None: + args = parse_args() + + parsed = parse_swiftc_args(args.arguments) + with open_or_stdout(args.out) as out: + proc.run( + [ + args.swift_ide_test_tool, + "--source-filename=x", + "--print-module", + "--module-to-print", + args.module, + "--module-print-submodules", + ] + + parsed.to_args(), + stdout=out, + check=True, + ) + + +if __name__ == "__main__": + main() diff --git a/prelude/apple/tools/plistlib_utils.py b/prelude/apple/tools/plistlib_utils.py index 2f927c38ce..39141677c7 100644 --- a/prelude/apple/tools/plistlib_utils.py +++ b/prelude/apple/tools/plistlib_utils.py @@ -5,15 +5,18 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import plistlib from io import BytesIO +from typing import Any, Dict, IO -def _is_fmt_binary(header): +def _is_fmt_binary(header: bytes) -> bool: return header[:8] == b"bplist00" -def detect_format_and_load(fp): +def detect_format_and_load(fp: IO[bytes]) -> Dict[str, Any]: header = fp.read(32) fp.seek(0) if _is_fmt_binary(header): @@ -23,6 +26,6 @@ def detect_format_and_load(fp): return plistlib.load(fp, fmt=fmt) -def detect_format_and_loads(value): +def detect_format_and_loads(value: bytes) -> Dict[str, Any]: fp = BytesIO(value) return detect_format_and_load(fp) diff --git a/prelude/apple/tools/re_compatibility_utils/BUCK b/prelude/apple/tools/re_compatibility_utils/BUCK index 3e117edb1e..a567b6984b 100644 --- a/prelude/apple/tools/re_compatibility_utils/BUCK +++ b/prelude/apple/tools/re_compatibility_utils/BUCK @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + prelude = native prelude.python_library( diff --git a/prelude/apple/tools/re_compatibility_utils/writable.py b/prelude/apple/tools/re_compatibility_utils/writable.py index 54bdd59798..b6c0ee90d1 100644 --- a/prelude/apple/tools/re_compatibility_utils/writable.py +++ b/prelude/apple/tools/re_compatibility_utils/writable.py @@ -5,12 +5,14 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import os import platform import stat -def make_path_user_writable(path: str): +def make_path_user_writable(path: str) -> None: # On Linux, `os.chmod()` does not support setting the permissions on a symlink. # `chmod` manpage says: # > AT_SYMLINK_NOFOLLOW If pathname is a symbolic link, do not @@ -22,11 +24,11 @@ def make_path_user_writable(path: str): # # Darwin supports permission setting on symlinks. follow_symlinks = platform.system() != "Darwin" - st = os.stat(path) + st = os.stat(path, follow_symlinks=False) os.chmod(path, st.st_mode | stat.S_IWUSR, follow_symlinks=follow_symlinks) -def make_dir_recursively_writable(dir: str): +def make_dir_recursively_writable(dir: str) -> None: for dirpath, _, filenames in os.walk(dir): make_path_user_writable(dirpath) for filename in filenames: diff --git a/prelude/apple/tools/resource_broker/BUCK.v2 b/prelude/apple/tools/resource_broker/BUCK.v2 new file mode 100644 index 0000000000..0a3e34a211 --- /dev/null +++ b/prelude/apple/tools/resource_broker/BUCK.v2 @@ -0,0 +1,38 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + +python_binary( + name = "resource_broker", + main = "main.py", + visibility = ["PUBLIC"], + deps = [ + ":main", + ], +) + +python_library( + name = "main", + srcs = ["main.py"], + deps = [ + ":lib", + ], +) + +python_library( + name = "lib", + srcs = glob( + [ + "*.py", + ], + exclude = [ + "main.py", + ], + ), + deps = [ + "fbsource//third-party/pypi/dataclasses-json:dataclasses-json", + "fbsource//third-party/pypi/packaging:packaging", + ], +) diff --git a/prelude/apple/tools/resource_broker/idb_companion.py b/prelude/apple/tools/resource_broker/idb_companion.py new file mode 100644 index 0000000000..aa2b450a35 --- /dev/null +++ b/prelude/apple/tools/resource_broker/idb_companion.py @@ -0,0 +1,24 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# pyre-strict + +import os +import signal +from dataclasses import dataclass +from io import TextIOWrapper + + +@dataclass +class IdbCompanion: + socket_address: str + pid: int + stderr: TextIOWrapper + + def cleanup(self) -> None: + os.kill(self.pid, signal.SIGTERM) + self.stderr.close() diff --git a/prelude/apple/tools/resource_broker/idb_target.py b/prelude/apple/tools/resource_broker/idb_target.py new file mode 100644 index 0000000000..f856bd75b9 --- /dev/null +++ b/prelude/apple/tools/resource_broker/idb_target.py @@ -0,0 +1,48 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# pyre-strict + +import json +from dataclasses import dataclass +from enum import Enum +from typing import List, Optional + +from dataclasses_json import dataclass_json + + +class SimState(str, Enum): + booted = "Booted" + shutdown = "Shutdown" + + +@dataclass_json +@dataclass +class IdbTarget: + name: str + os_version: str + udid: str + state: SimState + host: str = "" + port: int = 0 + + +@dataclass +class SimulatorInfo: + udid: str + device_set_path: str + + +def managed_simulators_from_stdout(stdout: Optional[str]) -> List[IdbTarget]: + if not stdout: + return [] + targets = map( + # pyre-ignore[16]: `from_dict` is dynamically provided by `dataclass_json` + IdbTarget.from_dict, + json.loads(stdout), + ) + return list(targets) diff --git a/prelude/apple/tools/resource_broker/ios.py b/prelude/apple/tools/resource_broker/ios.py new file mode 100644 index 0000000000..60d8e190e5 --- /dev/null +++ b/prelude/apple/tools/resource_broker/ios.py @@ -0,0 +1,220 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# pyre-strict + +import os +from typing import List, Optional + +from packaging.version import Version + +from .idb_companion import IdbCompanion + +from .idb_target import ( + IdbTarget, + managed_simulators_from_stdout, + SimState, + SimulatorInfo, +) + +from .simctl_runtime import list_ios_runtimes, XCSimRuntime + +from .timeouts import SIMULATOR_BOOT_TIMEOUT + +from .utils import ( + execute_generic_text_producing_command, + spawn_companion, + wait_for_idb_companions, +) + + +def _device_set_path() -> str: + return os.path.expanduser("~/Library/Developer/Buck2IdbDeviceSet") + + +def _list_managed_simulators_command(simulator_manager: str) -> List[str]: + return [ + simulator_manager, + "list", + "--device-set-path", + _device_set_path(), + "--only", + "simulator", + ] + + +def _create_simulator_command(simulator_manager: str, sim_spec: str) -> List[str]: + return [ + simulator_manager, + "create", + "--device-set-path", + _device_set_path(), + "--configuration", + sim_spec, + ] + + +def _boot_simulator_command(simulator_manager: str, udid: str) -> List[str]: + return [ + simulator_manager, + "boot", + "--device-set-path", + _device_set_path(), + udid, + ] + + +def _compatible_device_type_from_runtime(runtime: XCSimRuntime) -> Optional[str]: + iphones = filter( + lambda t: t.product_family == "iPhone", runtime.supported_device_types + ) + if not iphones: + return None + default = next(iphones) + return next( + (device_type.name for device_type in iphones if device_type.name == "iPhone 8"), + default.name, + ) + + +def _select_latest_simulator_spec(runtimes: List[XCSimRuntime]) -> str: + runtimes.sort(key=lambda x: Version(x.version), reverse=True) + for runtime in runtimes: + device_type = _compatible_device_type_from_runtime(runtime) + if device_type: + return f"{device_type},{runtime.name}" + raise RuntimeError( + "No XCode simctl compatible iOS runtime and device available. Try to `sudo xcode-select -s ` and *open Xcode to install all required components*." + ) + + +def _spawn_companion_for_simulator_command( + udid: str, grpc_domain_sock: str +) -> List[str]: + return [ + "idb_companion", + "--device-set-path", + _device_set_path(), + "--udid", + udid, + "--only", + "simulator", + "--grpc-domain-sock", + grpc_domain_sock, + ] + + +async def _generic_managed_simulators_command( + name: str, cmd: List[str] +) -> List[IdbTarget]: + stdout = await execute_generic_text_producing_command(name=name, cmd=cmd) + return managed_simulators_from_stdout(stdout) + + +async def _list_managed_simulators(simulator_manager: str) -> List[IdbTarget]: + list_cmd = _list_managed_simulators_command(simulator_manager=simulator_manager) + return await _generic_managed_simulators_command( + name="list managed simulators", cmd=list_cmd + ) + + +async def _create_simulator(simulator_manager: str) -> List[IdbTarget]: + runtimes = await list_ios_runtimes() + spec = _select_latest_simulator_spec(runtimes) + create_cmd = _create_simulator_command( + simulator_manager=simulator_manager, sim_spec=spec + ) + return await _generic_managed_simulators_command( + name="create simulators", cmd=create_cmd + ) + + +async def _get_managed_simulators_create_if_needed( + simulator_manager: str, +) -> List[IdbTarget]: + managed_simulators = await _list_managed_simulators( + simulator_manager=simulator_manager + ) + if managed_simulators: + return managed_simulators + + managed_simulators = await _create_simulator(simulator_manager=simulator_manager) + if managed_simulators: + return managed_simulators + + raise RuntimeError( + "Failed to create an iOS simulator. Try to `sudo xcode-select -s ` and *open Xcode to install all required components*." + ) + + +def _select_simulator( + only_booted: bool, all_simulators: List[IdbTarget] +) -> Optional[IdbTarget]: + return next( + filter( + lambda s: s.state == SimState.booted if only_booted else True, + iter(all_simulators), + ), + None, + ) + + +def _select_simulator_with_preference( + prefer_booted: bool, all_simulators: List[IdbTarget] +) -> IdbTarget: + simulator = _select_simulator( + only_booted=prefer_booted, all_simulators=all_simulators + ) + if not simulator and prefer_booted: + simulator = _select_simulator(only_booted=False, all_simulators=all_simulators) + if not simulator: + raise RuntimeError("Expected at least unbooted simulator entity to be selected") + return simulator + + +async def prepare_simulator(simulator_manager: str, booted: bool) -> SimulatorInfo: + managed_simulators = await _get_managed_simulators_create_if_needed( + simulator_manager=simulator_manager + ) + simulator = _select_simulator_with_preference( + prefer_booted=booted, all_simulators=managed_simulators + ) + if simulator.state != SimState.booted and booted: + boot_cmd = _boot_simulator_command( + simulator_manager=simulator_manager, udid=simulator.udid + ) + await execute_generic_text_producing_command( + name="boot simulator", + cmd=boot_cmd, + timeout=SIMULATOR_BOOT_TIMEOUT, + ) + return SimulatorInfo( + udid=simulator.udid, + device_set_path=_device_set_path(), + ) + + +async def _ios_simulator(simulator_manager: str, booted: bool) -> List[IdbCompanion]: + simulator = await prepare_simulator( + simulator_manager=simulator_manager, booted=booted + ) + grpc_domain_sock = f"/tmp/buck2_idb_companion_{simulator.udid}" + process = await spawn_companion( + command=_spawn_companion_for_simulator_command( + simulator.udid, grpc_domain_sock + ), + log_file_suffix=f"companion_launch_logs_for_{simulator.udid}.log", + ) + return await wait_for_idb_companions([process]) + + +async def ios_unbooted_simulator(simulator_manager: str) -> List[IdbCompanion]: + return await _ios_simulator(simulator_manager=simulator_manager, booted=False) + + +async def ios_booted_simulator(simulator_manager: str) -> List[IdbCompanion]: + return await _ios_simulator(simulator_manager=simulator_manager, booted=True) diff --git a/prelude/apple/tools/resource_broker/macos.py b/prelude/apple/tools/resource_broker/macos.py new file mode 100644 index 0000000000..ad103a031a --- /dev/null +++ b/prelude/apple/tools/resource_broker/macos.py @@ -0,0 +1,43 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# pyre-strict + +import asyncio +from typing import cast, List + +from .idb_companion import IdbCompanion + +from .utils import IdbCompanionProcess, spawn_companion, wait_for_idb_companions + + +def _boot_macos_companion_command(grpc_domain_sock: str) -> List[str]: + return [ + "idb_companion", + "--udid", + "mac", + "--grpc-domain-sock", + grpc_domain_sock, + ] + + +async def macos_idb_companions() -> List[IdbCompanion]: + addresses = [(i, f"/tmp/buck2_idb_companion_mac_{i}") for i in range(10)] + awaitables = [ + spawn_companion( + command=_boot_macos_companion_command(addr), + log_file_suffix=f"macos_companion_{i}.log", + ) + for i, addr in addresses + ] + results = await asyncio.gather(*awaitables, return_exceptions=True) + + if exception := next(filter(lambda r: isinstance(r, BaseException), results), None): + [r.cleanup() for r in results if isinstance(r, IdbCompanionProcess)] + raise cast(BaseException, exception) + + return await wait_for_idb_companions(cast(List[IdbCompanionProcess], results)) diff --git a/prelude/apple/tools/resource_broker/main.py b/prelude/apple/tools/resource_broker/main.py new file mode 100644 index 0000000000..e6a422e541 --- /dev/null +++ b/prelude/apple/tools/resource_broker/main.py @@ -0,0 +1,135 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# pyre-strict + +import argparse +import asyncio +import json +import os +import signal +import sys +from enum import Enum +from time import sleep +from typing import List, Optional + +from .idb_companion import IdbCompanion + +from .ios import ios_booted_simulator, ios_unbooted_simulator, prepare_simulator + +from .macos import macos_idb_companions + +idb_companions: List[IdbCompanion] = [] + + +def _args_parser() -> argparse.ArgumentParser: + parser = argparse.ArgumentParser( + description="Utility which helps to set up IDB companions which are used later by buck2 it runs tests locally." + ) + parser.add_argument( + "--simulator-manager", + required=False, + type=str, + help="Tool to manage simulators and their lifecycle. Required for iOS testing", + ) + parser.add_argument( + "--type", + metavar="", + type=_ResourceType, + choices=[e.value for e in _ResourceType], + required=True, + help=f""" + Type of required resources. + Pass `{_ResourceType.iosUnbootedSimulator}` to get a companion for iOS unbooted simulator. + Pass `{_ResourceType.iosBootedSimulator}` to get a companion for iOS booted simulator. + Pass `{_ResourceType.macosIdbCompanion}` to get MacOS companions. + """, + ) + parser.add_argument( + "--no-companion", + default=False, + action="store_true", + help=""" + If passed, will only create simulator. No idb_companion will be spawned. + """, + ) + return parser + + +class _ResourceType(str, Enum): + iosUnbootedSimulator = "ios_unbooted_simulator" + iosBootedSimulator = "ios_booted_simulator" + macosIdbCompanion = "macos_idb_companion" + + +def _exit_gracefully(*args: List[object]) -> None: + for idb_companion in idb_companions: + idb_companion.cleanup() + exit(0) + + +def _check_simulator_manager_exists(simulator_manager: Optional[str]) -> None: + if not simulator_manager: + raise Exception("Simulator manager is not specified") + + +def main() -> None: + args = _args_parser().parse_args() + if args.no_companion: + if args.type == _ResourceType.macosIdbCompanion: + raise Exception( + "No resource brocker is required for MacOS tests without companion" + ) + + booted = args.type == _ResourceType.iosBootedSimulator + sim = asyncio.run( + prepare_simulator(simulator_manager=args.simulator_manager, booted=booted) + ) + result = { + "resources": [ + { + "udid": sim.udid, + "device_set_path": sim.device_set_path, + } + ] + } + json.dump(result, sys.stdout) + else: + _create_companion(args) + + +def _create_companion(args: argparse.Namespace) -> None: + if args.type == _ResourceType.iosBootedSimulator: + _check_simulator_manager_exists(args.simulator_manager) + idb_companions.extend(asyncio.run(ios_booted_simulator(args.simulator_manager))) + elif args.type == _ResourceType.iosUnbootedSimulator: + _check_simulator_manager_exists(args.simulator_manager) + idb_companions.extend( + asyncio.run(ios_unbooted_simulator(args.simulator_manager)) + ) + elif args.type == _ResourceType.macosIdbCompanion: + idb_companions.extend(asyncio.run(macos_idb_companions())) + pid = os.fork() + if pid == 0: + # child + signal.signal(signal.SIGINT, _exit_gracefully) + signal.signal(signal.SIGTERM, _exit_gracefully) + while True: + sleep(0.1) + else: + # Do not leak open FDs in parent + for c in idb_companions: + c.stderr.close() + result = { + "pid": pid, + "resources": [{"socket_address": c.socket_address} for c in idb_companions], + } + json.dump(result, sys.stdout) + + +if __name__ == "__main__": + main() diff --git a/prelude/apple/tools/resource_broker/simctl_runtime.py b/prelude/apple/tools/resource_broker/simctl_runtime.py new file mode 100644 index 0000000000..6787b2b5c9 --- /dev/null +++ b/prelude/apple/tools/resource_broker/simctl_runtime.py @@ -0,0 +1,66 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# pyre-strict + +import json +from dataclasses import dataclass, field +from typing import List, Optional + +from dataclasses_json import config, dataclass_json + +from .utils import execute_generic_text_producing_command + + +@dataclass_json +@dataclass +class XCSimDevice: + name: str + product_family: str = field(metadata=config(field_name="productFamily")) + + +@dataclass_json +@dataclass +class XCSimRuntime: + name: str + version: str + supported_device_types: List[XCSimDevice] = field( + metadata=config(field_name="supportedDeviceTypes") + ) + + +@dataclass_json +@dataclass +class _XCSimRuntimes: + runtimes: List[XCSimRuntime] + + +def _list_ios_runtimes_command() -> List[str]: + return [ + "xcrun", + "simctl", + "list", + "runtimes", + "iOS", + "available", + "--json", + ] + + +def _simctl_runtimes_from_stdout(stdout: Optional[str]) -> List[XCSimRuntime]: + if not stdout: + return [] + data = json.loads(stdout) + # pyre-ignore[16]: `from_dict` is dynamically provided by `dataclass_json` + return _XCSimRuntimes.from_dict(data).runtimes + + +async def list_ios_runtimes() -> List[XCSimRuntime]: + stdout = await execute_generic_text_producing_command( + name="list iOS runtimes", cmd=_list_ios_runtimes_command() + ) + return _simctl_runtimes_from_stdout(stdout) diff --git a/prelude/genrule_types.bzl b/prelude/apple/tools/resource_broker/timeouts.py similarity index 58% rename from prelude/genrule_types.bzl rename to prelude/apple/tools/resource_broker/timeouts.py index 0793c705d4..a5694dd677 100644 --- a/prelude/genrule_types.bzl +++ b/prelude/apple/tools/resource_broker/timeouts.py @@ -5,8 +5,10 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -# A provider that's used as a marker for `genrule()`, allows dependents -# to distinguish such outputs -GenruleMarkerInfo = provider(fields = {}) +# pyre-strict -GENRULE_MARKER_SUBTARGET_NAME = "genrule_marker" +DEFAULT_OPERATION_TIMEOUT = 10 + +# Simulator boot is an expensive command and can take a long time to complete +# depending on machine configuration and current machine load. +SIMULATOR_BOOT_TIMEOUT = 90 diff --git a/prelude/apple/tools/resource_broker/utils.py b/prelude/apple/tools/resource_broker/utils.py new file mode 100644 index 0000000000..80d36e7169 --- /dev/null +++ b/prelude/apple/tools/resource_broker/utils.py @@ -0,0 +1,142 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# pyre-strict + +import asyncio +import json +import shlex +from dataclasses import dataclass +from io import TextIOWrapper +from pathlib import Path +from typing import Any, List, Tuple + +from dataclasses_json import dataclass_json + +from .idb_companion import IdbCompanion +from .timeouts import DEFAULT_OPERATION_TIMEOUT + + +@dataclass_json +@dataclass +class _IdbStdout: + grpc_path: str + + +@dataclass +class IdbCompanionProcess: + process: asyncio.subprocess.Process + stderr: TextIOWrapper + stderr_path: Path + + def cleanup(self) -> None: + self.process.terminate() + self.stderr.close() + + +async def _read_until_valid_json(stream: asyncio.StreamReader) -> object: + buffer = b"" + while True: + data = await stream.readuntil(b"}") + buffer += data + try: + return json.loads(buffer.decode()) + except json.JSONDecodeError: + pass + raise RuntimeError( + "Should not be reachable since either the valid JSON is there or `asyncio.IncompleteReadError` is raised." + ) + + +async def _read_stdout(p: IdbCompanionProcess) -> Tuple[int, TextIOWrapper, object]: + if not p.process.stdout: + raise ValueError("Expected stdout to be set for idb companion launch process.") + try: + json = await _read_until_valid_json(p.process.stdout) + except asyncio.IncompleteReadError as e: + if not e.partial: + with open(p.stderr_path) as f: + lines = f.readlines() + raise RuntimeError( + f"idb companion terminated unexpectedly with the following stderr:\n{lines}" + ) from e + else: + raise + return p.process.pid, p.stderr, json + + +async def wait_for_idb_companions( + processes: List[IdbCompanionProcess], + timeout: float = DEFAULT_OPERATION_TIMEOUT, +) -> List[IdbCompanion]: + reads = [asyncio.Task(_read_stdout(p)) for p in processes] + done, pending = await asyncio.wait( + reads, + timeout=timeout, + ) + if not pending: + results = [task.result() for task in done] + return [ + IdbCompanion( + # pyre-ignore[16]: `from_dict` is dynamically provided by `dataclass_json` + socket_address=_IdbStdout.from_dict(json_dict).grpc_path, + pid=pid, + stderr=stderr, + ) + for pid, stderr, json_dict in results + ] + + process_index = {reads[i]: processes[i] for i in range(len(processes))} + + stderr_paths = [] + + for task in pending: + task.cancel() + process_info = process_index[task] + stderr_paths.append(str(process_info.stderr_path)) + process_info.process.terminate() + + raise RuntimeError( + f"Timeout when trying to launch idb companions. List of files with stderr for pending companions: {stderr_paths}" + ) + + +async def execute_generic_text_producing_command( + name: str, cmd: List[str], timeout: float = DEFAULT_OPERATION_TIMEOUT +) -> str: + process = await asyncio.create_subprocess_exec( + *cmd, + stdin=asyncio.subprocess.DEVNULL, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, + ) + stdout, stderr = await asyncio.wait_for(process.communicate(), timeout=timeout) + if process.returncode != 0: + raise RuntimeError( + f"Failed to {name} with command:\n```\n{shlex.join(cmd)}\n```\nstdout:\n```\n{stdout.decode(errors='ignore')}\n```\nstdout:\n```\n{stderr.decode(errors='ignore')}\n```\n" + ) + return stdout.decode() + + +async def spawn_companion( + command: List[str], + log_file_suffix: str, +) -> IdbCompanionProcess: + stderr_path = Path("/tmp/buck2_idb_companion_logs") / f"stderr-{log_file_suffix}" + stderr_path.parent.mkdir(parents=True, exist_ok=True) + stderr = stderr_path.open(mode="w") + process = await asyncio.create_subprocess_exec( + *command, + stdin=asyncio.subprocess.DEVNULL, + stdout=asyncio.subprocess.PIPE, + stderr=stderr, + ) + return IdbCompanionProcess( + process=process, + stderr=stderr, + stderr_path=stderr_path, + ) diff --git a/prelude/apple/tools/selective_debugging/BUCK.v2 b/prelude/apple/tools/selective_debugging/BUCK.v2 index e191c614cd..0311f9f3ce 100644 --- a/prelude/apple/tools/selective_debugging/BUCK.v2 +++ b/prelude/apple/tools/selective_debugging/BUCK.v2 @@ -1,4 +1,9 @@ -# @oss-disable: load("@prelude//apple/tools/defs.bzl", "meta_python_test") +load("@prelude//utils:source_listing.bzl", "source_listing") +load("@prelude//apple/tools/defs.bzl", "meta_python_test") + +oncall("build_infra") + +source_listing() python_library( name = "lib", diff --git a/prelude/apple/tools/selective_debugging/macho.py b/prelude/apple/tools/selective_debugging/macho.py index 966bee5671..8371cbeb6c 100644 --- a/prelude/apple/tools/selective_debugging/macho.py +++ b/prelude/apple/tools/selective_debugging/macho.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + from dataclasses import dataclass MH_MAGIC = 0xFEEDFACE @@ -20,7 +22,7 @@ class MachO: - def __str__(self): + def __str__(self) -> str: props = {} for k, v in self.__dict__.items(): props[k] = hex(v) @@ -39,7 +41,7 @@ class MachOHeader(MachO): reserved: int @property - def is_valid(self): + def is_valid(self) -> bool: return self.magic in (MH_CIGAM_64, MH_MAGIC_64) diff --git a/prelude/apple/tools/selective_debugging/macho_parser.py b/prelude/apple/tools/selective_debugging/macho_parser.py index 76e60512fa..d9717c9db8 100644 --- a/prelude/apple/tools/selective_debugging/macho_parser.py +++ b/prelude/apple/tools/selective_debugging/macho_parser.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import sys from typing import BinaryIO, List, Optional, Tuple diff --git a/prelude/apple/tools/selective_debugging/main.py b/prelude/apple/tools/selective_debugging/main.py index 20920c02a0..d1dc98de32 100644 --- a/prelude/apple/tools/selective_debugging/main.py +++ b/prelude/apple/tools/selective_debugging/main.py @@ -6,13 +6,15 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import argparse import sys from .scrubber import scrub -def _parse_args(): +def _parse_args() -> argparse.Namespace: parser = argparse.ArgumentParser( description="Tool to postprocess executables/dylibs." ) @@ -38,7 +40,7 @@ def _parse_args(): return parser.parse_args() -def main(): +def main() -> None: args = _parse_args() try: scrub( diff --git a/prelude/apple/tools/selective_debugging/scrubber.py b/prelude/apple/tools/selective_debugging/scrubber.py index f0d4f88c6b..a3f75ab166 100644 --- a/prelude/apple/tools/selective_debugging/scrubber.py +++ b/prelude/apple/tools/selective_debugging/scrubber.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import json import os @@ -55,7 +57,7 @@ def load_focused_targets_output_paths(json_file_path: str) -> Set[str]: # Visible for testing def _get_target_output_path_from_debug_file_path( debug_target_path: str, -): +) -> str: # This function assumes the debug file path created by buck2 is in the following format: # buck-out/isolation_dir/gen/project_cell/{hash}/.../__name__/libFoo.a parts = debug_target_path.split("/") diff --git a/prelude/apple/tools/selective_debugging/spec.py b/prelude/apple/tools/selective_debugging/spec.py index 250a2fa430..35fa26ca82 100644 --- a/prelude/apple/tools/selective_debugging/spec.py +++ b/prelude/apple/tools/selective_debugging/spec.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import json import re from dataclasses import dataclass, field @@ -46,11 +48,11 @@ class Spec: include_build_target_patterns: List[BuildTargetPatternOutputPathMatcher] = field( init=False ) - include_regular_expressions: List[re.Pattern] = field(init=False) + include_regular_expressions: List[re.Pattern[str]] = field(init=False) exclude_build_target_patterns: List[BuildTargetPatternOutputPathMatcher] = field( init=False ) - exclude_regular_expressions: List[re.Pattern] = field(init=False) + exclude_regular_expressions: List[re.Pattern[str]] = field(init=False) def __post_init__(self) -> None: with open(self.spec_path, "r") as f: @@ -95,7 +97,7 @@ def scrub_debug_file_path(self, debug_file_path: str) -> bool: def _path_matches_pattern_or_expression( debug_file_path: str, patterns: List[BuildTargetPatternOutputPathMatcher], - expressions: List[re.Pattern], + expressions: List[re.Pattern[str]], ) -> bool: for pattern in patterns: if pattern.match_path(debug_file_path): diff --git a/prelude/apple/tools/selective_debugging/utils.py b/prelude/apple/tools/selective_debugging/utils.py index e9a92803aa..0d0d78230c 100644 --- a/prelude/apple/tools/selective_debugging/utils.py +++ b/prelude/apple/tools/selective_debugging/utils.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + class MachOException(Exception): pass diff --git a/prelude/apple/tools/split_arch_combine_dsym_bundles_tool.py b/prelude/apple/tools/split_arch_combine_dsym_bundles_tool.py index b7e99d8531..f3897b9367 100644 --- a/prelude/apple/tools/split_arch_combine_dsym_bundles_tool.py +++ b/prelude/apple/tools/split_arch_combine_dsym_bundles_tool.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import argparse import os import shutil @@ -31,7 +33,7 @@ def _args_parser() -> argparse.ArgumentParser: return parser -def _main(): +def _main() -> None: args = _args_parser().parse_args() output_dwarf_path = os.path.join(args.output, "Contents/Resources/DWARF") diff --git a/prelude/apple/tools/swift_objc_header_postprocess.py b/prelude/apple/tools/swift_objc_header_postprocess.py deleted file mode 100755 index f3ccfc5fcb..0000000000 --- a/prelude/apple/tools/swift_objc_header_postprocess.py +++ /dev/null @@ -1,312 +0,0 @@ -#!/usr/bin/env fbpython -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under both the MIT license found in the -# LICENSE-MIT file in the root directory of this source tree and the Apache -# License, Version 2.0 found in the LICENSE-APACHE file in the root directory -# of this source tree. - -import argparse -import json -import os -import re -import sys -from typing import Dict, Iterable, TextIO - -# Out-of-date? Update with this command: -# -# xcode-select --print-path | xargs printf '%s/Platforms/iPhoneSimulator.platform/Developer/SDKs/iPhoneSimulator.sdk/System/Library/Frameworks/' | xargs ls | rg '^([A-Z].+)\.framework$' -r '${1}' | xargs printf ' "%s",\n' && xcode-select --print-path | xargs printf '%s/Platforms/iPhoneSimulator.platform/Developer/SDKs/iPhoneSimulator.sdk/usr/include/module.modulemap' | xargs cat | rg '^module ([a-zA-Z0-9_]*) .*$' -r '${1}'| xargs printf ' "%s",\n' -APPLE_SYSTEM_MODULES = { - "ARKit", - "AVFAudio", - "AVFoundation", - "AVKit", - "Accelerate", - "Accessibility", - "Accounts", - "AdServices", - "AdSupport", - "AddressBook", - "AddressBookUI", - "AppClip", - "AppTrackingTransparency", - "AssetsLibrary", - "AudioToolbox", - "AudioUnit", - "AuthenticationServices", - "AutomaticAssessmentConfiguration", - "BackgroundTasks", - "BusinessChat", - "CFNetwork", - "CallKit", - "CarPlay", - "ClassKit", - "ClockKit", - "CloudKit", - "Combine", - "Contacts", - "ContactsUI", - "CoreAudio", - "CoreAudioKit", - "CoreAudioTypes", - "CoreBluetooth", - "CoreData", - "CoreFoundation", - "CoreGraphics", - "CoreHaptics", - "CoreImage", - "CoreLocation", - "CoreLocationUI", - "CoreMIDI", - "CoreML", - "CoreMedia", - "CoreMotion", - "CoreNFC", - "CoreServices", - "CoreSpotlight", - "CoreTelephony", - "CoreText", - "CoreVideo", - "CryptoKit", - "CryptoTokenKit", - "DataDetection", - "DeveloperToolsSupport", - "DeviceActivity", - "DeviceCheck", - "EventKit", - "EventKitUI", - "ExposureNotification", - "ExternalAccessory", - "FamilyControls", - "FileProvider", - "FileProviderUI", - "Foundation", - "GLKit", - "GSS", - "GameController", - "GameKit", - "GameplayKit", - "GroupActivities", - "HealthKit", - "HealthKitUI", - "HomeKit", - "IOKit", - "IOSurface", - "IdentityLookup", - "IdentityLookupUI", - "ImageCaptureCore", - "ImageIO", - "Intents", - "IntentsUI", - "JavaScriptCore", - "LinkPresentation", - "LocalAuthentication", - "ManagedSettings", - "ManagedSettingsUI", - "MapKit", - "MediaAccessibility", - "MediaPlayer", - "MediaToolbox", - "MessageUI", - "Messages", - "Metal", - "MetalKit", - "MetalPerformanceShaders", - "MetalPerformanceShadersGraph", - "MetricKit", - "MobileCoreServices", - "ModelIO", - "MultipeerConnectivity", - "MusicKit", - "NaturalLanguage", - "NearbyInteraction", - "Network", - "NetworkExtension", - "NewsstandKit", - "NotificationCenter", - "OSLog", - "OpenAL", - "OpenGLES", - "PDFKit", - "PHASE", - "PassKit", - "PencilKit", - "Photos", - "PhotosUI", - "PushKit", - "QuartzCore", - "QuickLook", - "QuickLookThumbnailing", - "RealityFoundation", - "RealityKit", - "ReplayKit", - "SafariServices", - "SceneKit", - "ScreenTime", - "Security", - "SensorKit", - "ShazamKit", - "Social", - "SoundAnalysis", - "Speech", - "SpriteKit", - "StoreKit", - "SwiftUI", - "SystemConfiguration", - "TabularData", - "Twitter", - "UIKit", - "UniformTypeIdentifiers", - "UserNotifications", - "UserNotificationsUI", - "VideoSubscriberAccount", - "VideoToolbox", - "Vision", - "VisionKit", - "WatchConnectivity", - "WebKit", - "WidgetKit", - "AppleTextureEncoder", - "Compression", - "Darwin", - "asl", - "dnssd", - "os", - "os_object", - "os_workgroup", - "libkern", - "notify", - "zlib", - "SQLite3", -} - -APPLE_TEST_FRAMEWORKS = { - "XCTest", -} - - -# These modules require specific handling, as they do not have an umbrella -# header that matches the module name, as typical Apple frameworks do. -APPLE_SYSTEM_MODULE_OVERRIDES = { - "Dispatch": ("dispatch", ("dispatch.h",)), - "ObjectiveC": ("objc", ("runtime.h",)), -} - - -def write_imports_for_headers(out: TextIO, prefix: str, headers: Iterable[str]) -> None: - for header in headers: - print(f"#import <{prefix}/{header}>", file=out) - - -def write_imports_for_modules( - out: TextIO, - postprocessing_module_name: str, - modules: Iterable[str], - deps: Dict[str, Iterable[str]], -) -> None: - # We only include the traditional textual imports when modules are disabled, so - # that the behavior with modules enabled is identical to the behavior without - # the postprocessing. - print("#else", file=out) - for module in modules: - if headers := deps.get(module): - write_imports_for_headers(out, module, headers) - elif override := APPLE_SYSTEM_MODULE_OVERRIDES.get(module): - write_imports_for_headers(out, override[0], override[1]) - elif module in APPLE_SYSTEM_MODULES or module in APPLE_TEST_FRAMEWORKS: - # When we don't have an explicit override for the module, we use the module's - # name as an umbrella header. This is used for typical Apple frameworks like - # Foundation and UIKit. - write_imports_for_headers(out, module, (f"{module}.h",)) - else: - print( - f""" -The module "{module}" was imported as a dependency of Swift code in "{postprocessing_module_name}", but could not be mapped to a list of header imports by Buck's Swift header postprocessing. There are two possibilities: - -1. If "{module}" is an internal library, it is likely that the exported_deps of "{postprocessing_module_name}" are incorrect. Try fixing them manually or with "arc fixmydeps". This is the most likely issue. - -2. If "{module}" is a system (Apple) framework, the list of Apple system modules in {os.path.basename(__file__)} is out-of-date. There is a command to fix it in that file. This issue is unlikely. -""", - file=sys.stderr, - ) - sys.exit(1) - - -def main() -> None: - parser = argparse.ArgumentParser() - parser.add_argument("header") - parser.add_argument("deps") - parser.add_argument("out") - args = parser.parse_args() - - with open(args.deps) as f: - deps = json.load(f) - - # Strips the suffix from the header name, leaving us with just the name - # of the module that we are postprocessing the header for. This is used - # for error reporting. - postprocessing_module_name = os.path.basename(args.header).split("-")[0] - - # The Swift compiler's output looks like this for Swift5.8: - # - # #if __has_feature(objc_modules) - # #if __has_warning("-Watimport-in-framework-header") - # #pragma clang diagnostic ignored "-Watimport-in-framework-header" - # #endif - # @import ModuleA; - # @import ModuleB; - # @import ModuleC; - # #endif - # - # The implementation here balances being somewhat flexible to changes to the compiler's - # output, unlikely though they may be, with avoiding adding too much complexity and getting - # too close to implementing a full parser for Objective-C un-preprocessed header files. - - with open(args.header) as header, open(args.out, "w") as out: - # When this is None, it means that we are still searching for the start of the conditional - # @import block in the generated header. - modules = None - # The Swift compiler emits an additional #if gate inside the conditional @import block, so - # we need to track whether we're in a further nested conditional so that we know when the - # main conditional block has ended. - if_level = 0 - - for line in header: - line = line.rstrip("\n") - # When the modules has not been set, we are still searching for the start of the - # modules @import section. - if modules is None: - # The line changed from __has_feature(modules) to __has_feature(objc_modules) between Swift5.7 and Swift5.8. - # For the time being, we need to check for either to support both Xcode14.2 and Xcode14.3 onwards. - if ( - line == "#if __has_feature(objc_modules)" - or line == "#if __has_feature(modules)" - ): - modules = [] - if_level = 1 - else: - if line.startswith("@import"): - # Splitting on: - # "@import ": to separate from the @import. - # Semicolon and period: to separate the main module name from submodules or EOL. - # The module name will then be the first item. - modules.append(re.split(r"@import |[;.]", line)[1]) - elif line.startswith("#if"): - # This allows us to handle the Clang diagnostic #if block that the compiler inserts - # within the main #if block for modules. - if_level += 1 - elif line.startswith("#endif"): - if_level -= 1 - if if_level == 0: - write_imports_for_modules( - out, - postprocessing_module_name, - modules, - deps, - ) - modules = None - print(line, file=out) - - -if __name__ == "__main__": - main() diff --git a/prelude/apple/tools/xcframework_maker.py b/prelude/apple/tools/xcframework_maker.py new file mode 100644 index 0000000000..17806db0e5 --- /dev/null +++ b/prelude/apple/tools/xcframework_maker.py @@ -0,0 +1,163 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# pyre-strict + +""" +Packages given input files into the correct format for an XCFramework, and generates +the required Info.plist. + +Example Usage: +xcframework_maker.py --name FooKit --output_path /tmp/FooKit.xcframework \ + --framework_path ios-arm64 input/ios/FooKit.xcframework \ + --dsym_path ios-arm64 input/ios/dSYM \ + --framework_path ios-arm64_x86_64-simulator input/ios-simulator/FooKit.xcframework \ + --dsym_path ios-arm64_x86_64-simulator input/ios-simulator/dSYM +""" + +import argparse +import plistlib +import shutil + +from pathlib import Path +from typing import Any, Optional + +# functions that take architecture specifiers as 'item'. +# Examples: +# ios-arm64_x86_64-simulator +# -> supported platform: ios +# -> supported architectures [arm64, x86_64] +# -> supported platform variant: simulator +# watchos-arm64_arm64_32 +# -> supported platform: watchos +# -> supported architectures: [arm64, arm64_32] +# -> supported platform variant: None + + +def _supported_architectures(item: str) -> list[str]: + archs = [] + # order is important so that we can + # consume 'arm64_32' first to prevent it + # later matching arm64 + for arch in ["arm64_32", "arm64", "x86_64"]: + if arch in item: + archs.append(arch) + item = item.replace(arch, "") + return archs + + +def _supported_platform(item: str) -> str: + return item.split("-")[0] + + +def _supported_platform_variant(item: str) -> Optional[str]: + components = item.split("-") + if len(components) > 2: + return components[2] + else: + return None + + +def _make_plist_entry( + item: str, binary_path: str, library_path: str, dsym_path: Optional[str] +) -> dict[str, Any]: + entry = { + "BinaryPath": binary_path, + "LibraryIdentifier": item, + "LibraryPath": library_path, + "SupportedArchitectures": _supported_architectures(item), + "SupportedPlatform": _supported_platform(item), + } + variant = _supported_platform_variant(item) + if variant is not None: + entry["SupportedPlatformVariant"] = variant + + if dsym_path is not None: + entry["DebugSymbolsPath"] = dsym_path + + return entry + + +def _make_plist( + items: list[str], + binary_paths: list[str], + library_path: str, + dsym_paths: list[Optional[str]], +) -> bytes: + d = {} + d["AvailableLibraries"] = [ + _make_plist_entry(item, binary_path, library_path, dsym_path) + for (item, binary_path, dsym_path) in zip(items, binary_paths, dsym_paths) + ] + d["CFBundlePackageType"] = "XFWK" + d["XCFrameworkFormatVersion"] = "1.0" + return plistlib.dumps(d) + + +def _find_binary_path(framework_fullpath: str, binary_name: str) -> str: + fullpath = Path(framework_fullpath) + versioned_binary_paths = sorted(fullpath.glob("Versions/Current/" + binary_name)) + if len(versioned_binary_paths) > 0: + return versioned_binary_paths[-1].relative_to(fullpath.parents[0]).as_posix() + return fullpath.name + "/" + binary_name + + +def main() -> None: + parser = argparse.ArgumentParser(description="Tool to make an xcframework bundle.") + parser.add_argument("--output-path") + parser.add_argument("--name") + parser.add_argument("--framework-path", action="append", nargs="+") + parser.add_argument( + "--dsym-path", action="append", nargs="+", default=[], required=False + ) + args = parser.parse_args() + + out_path = Path(args.output_path) + out_path.mkdir(parents=True, exist_ok=False) + + plist_path = out_path / "Info.plist" + items = [fp_args[0] for fp_args in args.framework_path] + binary_paths = [] + dsym_path_map = {} + + for framework_path in args.framework_path: + + # args are structured like this + # --framework_path ios-arm64 buck-out/path/to/MyPkg.framework + + framework_arch = framework_path[0] + framework_fullpath = framework_path[1] + framework_basename = Path(framework_fullpath).name + + shutil.copytree( + framework_fullpath, + out_path / framework_arch / framework_basename, + symlinks=True, + dirs_exist_ok=False, + ) + + binary_paths.append(_find_binary_path(framework_fullpath, args.name)) + + for dsym_path in args.dsym_path: + dsym_arch = dsym_path[0] + dsym_fullpath = dsym_path[1] + shutil.copytree( + dsym_fullpath, + out_path / dsym_arch / "dSYMs" / (args.name + ".framework.dSYM"), + symlinks=True, + dirs_exist_ok=False, + ) + dsym_path_map[dsym_arch] = "dSYMs" + + dsym_paths = [dsym_path_map.get(arch) for arch in items] + + library_path = args.name + ".framework" + plist_path.write_bytes(_make_plist(items, binary_paths, library_path, dsym_paths)) + + +if __name__ == "__main__": + main() diff --git a/prelude/apple/user/apple_resource_bundle.bzl b/prelude/apple/user/apple_resource_bundle.bzl index 18b82ec23b..d5235e157a 100644 --- a/prelude/apple/user/apple_resource_bundle.bzl +++ b/prelude/apple/user/apple_resource_bundle.bzl @@ -9,9 +9,10 @@ load("@prelude//apple:apple_bundle_attrs.bzl", "get_apple_info_plist_build_syste load("@prelude//apple:apple_bundle_resources.bzl", "get_apple_bundle_resource_part_list") load("@prelude//apple:apple_bundle_types.bzl", "AppleBundleResourceInfo") load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo", "AppleToolsInfo") +load("@prelude//apple:resource_groups.bzl", "RESOURCE_GROUP_MAP_ATTR") +load("@prelude//apple/user:cpu_split_transition.bzl", "cpu_split_transition") load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec") load("@prelude//decls/ios_rules.bzl", "AppleBundleExtension") -load(":resource_group_map.bzl", "resource_group_map_attr") def _get_apple_resources_toolchain_attr(): # FIXME: prelude// should be standalone (not refer to fbcode//) @@ -29,7 +30,8 @@ def _impl(ctx: AnalysisContext) -> list[Provider]: def _apple_resource_bundle_attrs(): attribs = { "asset_catalogs_compilation_options": attrs.dict(key = attrs.string(), value = attrs.any(), default = {}), - "binary": attrs.option(attrs.dep(), default = None), + "binary": attrs.option(attrs.split_transition_dep(cfg = cpu_split_transition), default = None), + "copy_public_framework_headers": attrs.option(attrs.bool(), default = None), "deps": attrs.list(attrs.dep(), default = []), "extension": attrs.one_of(attrs.enum(AppleBundleExtension), attrs.string()), "ibtool_flags": attrs.option(attrs.list(attrs.string()), default = None), @@ -37,9 +39,12 @@ def _apple_resource_bundle_attrs(): "info_plist": attrs.source(), "info_plist_substitutions": attrs.dict(key = attrs.string(), value = attrs.string(), sorted = False, default = {}), "labels": attrs.list(attrs.string(), default = []), + "module_map": attrs.option(attrs.source(), default = None), + "privacy_manifest": attrs.option(attrs.source(), default = None), "product_name": attrs.option(attrs.string(), default = None), "resource_group": attrs.option(attrs.string(), default = None), - "resource_group_map": resource_group_map_attr(), + "resource_group_map": RESOURCE_GROUP_MAP_ATTR, + "universal": attrs.option(attrs.bool(), default = None), # Only include macOS hosted toolchains, so we compile resources directly on Mac RE "_apple_toolchain": _get_apple_resources_toolchain_attr(), "_apple_tools": attrs.exec_dep(default = "prelude//apple/tools:apple-tools", providers = [AppleToolsInfo]), diff --git a/prelude/apple/user/apple_resource_transition.bzl b/prelude/apple/user/apple_resource_transition.bzl index 99464c6a53..85055c271f 100644 --- a/prelude/apple/user/apple_resource_transition.bzl +++ b/prelude/apple/user/apple_resource_transition.bzl @@ -14,11 +14,10 @@ def _impl(platform: PlatformInfo, refs: struct, attrs: struct) -> PlatformInfo: return platform else: cpu_constraint_label = refs.cpu[ConstraintSettingInfo].label - universal_constraint_label = refs.universal[ConstraintSettingInfo].label filtered_constraints = { constraint_setting_label: constraint_setting_value for (constraint_setting_label, constraint_setting_value) in platform.configuration.constraints.items() - if constraint_setting_label != cpu_constraint_label and constraint_setting_label != universal_constraint_label + if constraint_setting_label != cpu_constraint_label } return PlatformInfo( label = "apple_universal_deduped_resource", @@ -32,8 +31,8 @@ apple_resource_transition = transition( impl = _impl, refs = { "cpu": "config//cpu/constraints:cpu", - "universal": "config//build_mode/apple/constraints:universal", - "universal_enabled": "config//build_mode/apple/constraints:universal-enabled", + "universal": "config//cpu/constraints:universal", + "universal_enabled": "config//cpu/constraints:universal-enabled", }, attrs = [ "skip_universal_resource_dedupe", diff --git a/prelude/apple/user/apple_selective_debugging.bzl b/prelude/apple/user/apple_selective_debugging.bzl index 66fe2b27b3..fad01e2d89 100644 --- a/prelude/apple/user/apple_selective_debugging.bzl +++ b/prelude/apple/user/apple_selective_debugging.bzl @@ -20,6 +20,7 @@ load( "parse_build_target_pattern", ) load("@prelude//utils:lazy.bzl", "lazy") +load("@prelude//utils:set.bzl", "set") _SelectionCriteria = record( include_build_target_patterns = field(list[BuildTargetPattern], []), @@ -33,6 +34,7 @@ AppleSelectiveDebuggingInfo = provider( fields = { "scrub_binary": provider_field(typing.Callable), "filter": provider_field(typing.Callable), + "scrub_selected_debug_paths_file": provider_field(typing.Callable), }, ) @@ -72,10 +74,12 @@ def _impl(ctx: AnalysisContext) -> list[Provider]: scrubber = ctx.attrs._apple_tools[AppleToolsInfo].selective_debugging_scrubber + targets_json_file = None cmd = cmd_args(scrubber) if json_type == _SelectiveDebuggingJsonType("targets"): + targets_json_file = ctx.attrs.targets_json_file or ctx.actions.write_json("targets.json", {"targets": []}) + # If a targets json file is not provided, write an empty json file: - targets_json_file = ctx.attrs.targets_json_file or ctx.actions.write_json("targets_json.txt", {"targets": []}) cmd.add("--targets-file") cmd.add(targets_json_file) elif json_type == _SelectiveDebuggingJsonType("spec"): @@ -98,6 +102,35 @@ def _impl(ctx: AnalysisContext) -> list[Provider]: exclude_regular_expressions = exclude_regular_expressions, ) + def scrub_selected_debug_paths_file(inner_ctx: AnalysisContext, package_names: list[str], output_name: str) -> Artifact: + # In the event that _SelectiveDebuggingJsonType was "spec", we expect that `package_names` + # was already filtered as part of scrubbing the binary in the apple_bundle. + # + # See `_maybe_scrub_binary()` in apple_bundle.bzl + if json_type != _SelectiveDebuggingJsonType("targets"): + return inner_ctx.actions.write(output_name, sorted(set(package_names).list())) + + def scrub_selected_debug_paths_action(dynamic_ctx: AnalysisContext, artifacts, outputs): + packages = [ + # "cell//path/to/some/thing:target" -> "path/to/some/thing" + target.split("//")[1].split(":")[0] + for target in artifacts[targets_json_file].read_json()["targets"] + ] + dynamic_ctx.actions.write( + outputs.values()[0], + sorted(set(filter(lambda p: p in packages, package_names)).list()), + ) + + output = inner_ctx.actions.declare_output(output_name) + inner_ctx.actions.dynamic_output( + dynamic = [targets_json_file], + inputs = [], + outputs = [output.as_output()], + f = scrub_selected_debug_paths_action, + ) + + return output + def scrub_binary(inner_ctx, executable: Artifact, executable_link_execution_preference: LinkExecutionPreference, adhoc_codesign_tool: [RunInfo, None]) -> Artifact: inner_cmd = cmd_args(cmd) output = inner_ctx.actions.declare_output("debug_scrubbed/{}".format(executable.short_path)) @@ -126,8 +159,12 @@ def _impl(ctx: AnalysisContext) -> list[Provider]: for infos in debug_info: for info in infos: if _is_label_included(info.label, selection_criteria): - map[info.label] = info.artifacts - + # There might be a few ArtifactInfo corresponding to the same Label, + # so to avoid overwriting, we need to preserve all artifacts. + if info.label in map: + map[info.label] += info.artifacts + else: + map[info.label] = list(info.artifacts) return AppleSelectiveDebuggingFilteredDebugInfo(map = map) def preference_for_links(links: list[Label], deps_preferences: list[LinkExecutionPreferenceInfo]) -> LinkExecutionPreference: @@ -155,6 +192,7 @@ def _impl(ctx: AnalysisContext) -> list[Provider]: AppleSelectiveDebuggingInfo( scrub_binary = scrub_binary, filter = filter_debug_info, + scrub_selected_debug_paths_file = scrub_selected_debug_paths_file, ), LinkExecutionPreferenceDeterminatorInfo(preference_for_links = preference_for_links), ] diff --git a/prelude/apple/user/apple_toolchain_override.bzl b/prelude/apple/user/apple_toolchain_override.bzl index 0a3886e49f..fab6676c1a 100644 --- a/prelude/apple/user/apple_toolchain_override.bzl +++ b/prelude/apple/user/apple_toolchain_override.bzl @@ -16,6 +16,7 @@ def _impl(ctx: AnalysisContext) -> list[Provider]: DefaultInfo(), AppleToolchainInfo( actool = base.actool, + architecture = base.architecture, codesign = base.codesign, codesign_allocate = base.codesign_allocate, copy_scene_kit_assets = base.copy_scene_kit_assets, @@ -30,15 +31,15 @@ def _impl(ctx: AnalysisContext) -> list[Provider]: libtool = base.libtool, lipo = base.lipo, min_version = base.min_version, + mapc = base.mapc, momc = base.momc, - odrcov = base.odrcov, + objdump = base.objdump, platform_path = base.platform_path, sdk_build_version = base.sdk_build_version, sdk_name = base.sdk_name, sdk_path = base.sdk_path, sdk_version = base.sdk_version, swift_toolchain_info = base.swift_toolchain_info, - watch_kit_stub_binary = base.watch_kit_stub_binary, xcode_build_version = base.xcode_build_version, xcode_version = base.xcode_version, xctest = base.xctest, diff --git a/prelude/apple/user/apple_tools.bzl b/prelude/apple/user/apple_tools.bzl index fc131c4b2e..0bc8b7fac4 100644 --- a/prelude/apple/user/apple_tools.bzl +++ b/prelude/apple/user/apple_tools.bzl @@ -21,7 +21,7 @@ def _impl(ctx: AnalysisContext) -> list[Provider]: make_modulemap = ctx.attrs.make_modulemap[RunInfo], make_vfsoverlay = ctx.attrs.make_vfsoverlay[RunInfo], selective_debugging_scrubber = ctx.attrs.selective_debugging_scrubber[RunInfo], - swift_objc_header_postprocess = ctx.attrs.swift_objc_header_postprocess[RunInfo], + xcframework_maker = ctx.attrs.xcframework_maker[RunInfo], ), ] @@ -41,6 +41,6 @@ registration_spec = RuleRegistrationSpec( "make_vfsoverlay": attrs.dep(providers = [RunInfo]), "selective_debugging_scrubber": attrs.dep(providers = [RunInfo]), "split_arch_combine_dsym_bundles_tool": attrs.dep(providers = [RunInfo]), - "swift_objc_header_postprocess": attrs.dep(providers = [RunInfo]), + "xcframework_maker": attrs.dep(providers = [RunInfo]), }, ) diff --git a/prelude/apple/user/apple_watchos_bundle.bzl b/prelude/apple/user/apple_watchos_bundle.bzl index 261f68b633..428b21a3b6 100644 --- a/prelude/apple/user/apple_watchos_bundle.bzl +++ b/prelude/apple/user/apple_watchos_bundle.bzl @@ -6,9 +6,10 @@ # of this source tree. load("@prelude//apple:apple_bundle.bzl", "apple_bundle_impl") +load("@prelude//apple:apple_platforms.bzl", "APPLE_PLATFORMS_KEY") load("@prelude//apple:apple_rules_impl_utility.bzl", "apple_bundle_extra_attrs") +load("@prelude//apple:resource_groups.bzl", "RESOURCE_GROUP_MAP_ATTR") load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec") -load("@prelude//decls/common.bzl", "Traversal") load("@prelude//decls/ios_rules.bzl", "AppleBundleExtension") load(":watch_transition.bzl", "watch_transition") @@ -34,7 +35,7 @@ def _apple_bundle_base_attrs(): "platform_binary": attrs.option(attrs.list(attrs.tuple(attrs.regex(), attrs.dep())), default = None), "product_name": attrs.option(attrs.string(), default = None), "resource_group": attrs.option(attrs.string(), default = None), - "resource_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal), attrs.option(attrs.string()))))), default = None), + "resource_group_map": attrs.option(RESOURCE_GROUP_MAP_ATTR, default = None), "skip_copying_swift_stdlib": attrs.option(attrs.bool(), default = None), "try_skip_code_signing": attrs.option(attrs.bool(), default = None), "xcode_product_type": attrs.option(attrs.string(), default = None), @@ -44,6 +45,10 @@ def _apple_watchos_bundle_attrs(): attributes = {} attributes.update(_apple_bundle_base_attrs()) attributes.update(apple_bundle_extra_attrs()) + attributes.update({ + APPLE_PLATFORMS_KEY: attrs.dict(key = attrs.string(), value = attrs.dep(), sorted = False, default = {}), + "bundle_type": attrs.string(default = "watchapp"), + }) return attributes def apple_watchos_bundle_impl(ctx: AnalysisContext) -> list[Provider]: diff --git a/prelude/apple/user/apple_xcframework.bzl b/prelude/apple/user/apple_xcframework.bzl new file mode 100644 index 0000000000..0177e50bbe --- /dev/null +++ b/prelude/apple/user/apple_xcframework.bzl @@ -0,0 +1,186 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolsInfo") +load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec") + +def _impl(ctx: AnalysisContext) -> list[Provider]: + apple_tools = ctx.attrs._apple_tools[AppleToolsInfo] + + xcframework_dir = ctx.actions.declare_output(ctx.attrs.framework_name + ".xcframework", dir = True) + xcframework_command = cmd_args([ + apple_tools.xcframework_maker, + "--output-path", + xcframework_dir.as_output(), + "--name", + ctx.attrs.framework_name, + ]) + + for arch in ctx.attrs.framework: + framework_dep = ctx.attrs.framework[arch] + framework_paths = framework_dep[DefaultInfo].default_outputs + if len(framework_paths) > 1: + fail("xcframework's framework target {} must only produce one output".format(framework_dep.label)) + + xcframework_command.add("--framework-path") + xcframework_command.add(arch) + xcframework_command.add(framework_paths[0]) + + if ctx.attrs.include_dsym: + dsym_dep = framework_dep[DefaultInfo].sub_targets["dsym"] + dsym_path = dsym_dep[DefaultInfo].default_outputs + xcframework_command.add("--dsym-path") + xcframework_command.add(arch) + xcframework_command.add(dsym_path) + + ctx.actions.run(xcframework_command, category = "apple_xcframework") + return [ + DefaultInfo(default_output = xcframework_dir), + ] + +def _strip_os_sdk_and_runtime_constraints(platform: PlatformInfo, refs: struct) -> dict[TargetLabel, ConstraintValueInfo]: + return { + constraint_setting_label: constraint_setting_value + for (constraint_setting_label, constraint_setting_value) in platform.configuration.constraints.items() + if constraint_setting_label not in [refs.os[ConstraintSettingInfo].label, refs.sdk[ConstraintSettingInfo].label, refs.universal[ConstraintSettingInfo].label, refs.runtime[ConstraintSettingInfo].label] + } + +# provides a map of os-platform to cpu architectures +# so we can identify when universal binaries can be created instead of +# two separate frameworks +# +# e.g. input of ["ios-arm64", "iphonesimulator-x86_64", "iphonesimulator-arm64"] +# will produce {"ios": ["arm64"], "iphonesimulator": ["arm64", "x86_64"]} + +def _normalize_platforms(platforms: list[str]) -> dict[str, list[str]]: + result = {} + for platform in platforms: + plat_list = platform.split("-") + plat_type = plat_list[0] + plat_archs = plat_list[1:] + previous_archs = result.get(plat_type, []) + result[plat_type] = sorted(plat_archs + previous_archs) + + return result + +def _apple_xcframework_framework_attrib_split_transition_impl( + platform: PlatformInfo, + refs: struct, + attrs: struct) -> dict[str, PlatformInfo]: + result = {} + + new_platforms = _normalize_platforms(attrs.platforms).items() + for os_value, cpu_values in new_platforms: + updated_constraints = _strip_os_sdk_and_runtime_constraints(platform, refs) + updated_constraints[refs.swift_library_evolution[ConstraintSettingInfo].label] = refs.swift_library_evolution_enabled[ConstraintValueInfo] + + canonical_platform_suffix = "" + + if os_value == "macos": + canonical_platform_prefix = "macos" + updated_constraints[refs.os[ConstraintSettingInfo].label] = refs.macos[ConstraintValueInfo] + elif os_value == "iphoneos": + canonical_platform_prefix = "ios" + updated_constraints[refs.os[ConstraintSettingInfo].label] = refs.ios[ConstraintValueInfo] + updated_constraints[refs.sdk[ConstraintSettingInfo].label] = refs.ios_device_sdk[ConstraintValueInfo] + elif os_value == "watchos": + canonical_platform_prefix = "watchos" + updated_constraints[refs.os[ConstraintSettingInfo].label] = refs.watchos[ConstraintValueInfo] + updated_constraints[refs.sdk[ConstraintSettingInfo].label] = refs.watchos_device_sdk[ConstraintValueInfo] + elif os_value == "iphonesimulator": + canonical_platform_prefix = "ios" + canonical_platform_suffix = "simulator" + updated_constraints[refs.os[ConstraintSettingInfo].label] = refs.ios[ConstraintValueInfo] + updated_constraints[refs.sdk[ConstraintSettingInfo].label] = refs.ios_simulator_sdk[ConstraintValueInfo] + elif os_value == "watchossimulator": + canonical_platform_prefix = "watchos" + canonical_platform_suffix = "simulator" + updated_constraints[refs.os[ConstraintSettingInfo].label] = refs.watchos[ConstraintValueInfo] + updated_constraints[refs.sdk[ConstraintSettingInfo].label] = refs.watchos_simulator_sdk[ConstraintValueInfo] + elif os_value == "maccatalyst": + canonical_platform_prefix = "ios" + canonical_platform_suffix = "maccatalyst" + updated_constraints[refs.os[ConstraintSettingInfo].label] = refs.ios[ConstraintValueInfo] + updated_constraints[refs.sdk[ConstraintSettingInfo].label] = refs.maccatalyst_sdk[ConstraintValueInfo] + updated_constraints[refs.runtime[ConstraintSettingInfo].label] = refs.maccatalyst_runtime[ConstraintValueInfo] + else: + fail("Unsupported OS value {} in apple_xcframework() platforms.".format(os_value)) + + cpu_constraint_name = refs.cpu[ConstraintSettingInfo].label + + if len(cpu_values) > 1: + updated_constraints[refs.universal[ConstraintSettingInfo].label] = refs.universal_enabled[ConstraintValueInfo] + elif cpu_values[0] == "arm64": + updated_constraints[cpu_constraint_name] = refs.arm64[ConstraintValueInfo] + elif cpu_values[0] == "x86_64": + updated_constraints[cpu_constraint_name] = refs.x86_64[ConstraintValueInfo] + else: + fail("Unsupported CPU value {} in apple_xcframework().".format(cpu_values[0])) + + new_cfg = ConfigurationInfo( + constraints = updated_constraints, + values = platform.configuration.values, + ) + + canonical_platform_name = canonical_platform_prefix + "-" + "_".join(cpu_values) + if len(canonical_platform_suffix) > 0: + canonical_platform_name += "-" + canonical_platform_suffix + + result.update({canonical_platform_name: PlatformInfo( + label = canonical_platform_name + "_transition", + configuration = new_cfg, + )}) + + return result + +framework_split_transition = transition( + impl = _apple_xcframework_framework_attrib_split_transition_impl, + refs = { + "arm32": "config//cpu/constraints:arm32", + "arm64": "config//cpu/constraints:arm64", + "cpu": "config//cpu/constraints:cpu", + "ios": "config//os/constraints:iphoneos", + "ios_device_sdk": "config//os/sdk/apple/constraints:iphoneos", + "ios_simulator_sdk": "config//os/sdk/apple/constraints:iphonesimulator", + "maccatalyst_runtime": "config//runtime/constraints:maccatalyst", + "maccatalyst_sdk": "config//os/sdk/apple/constraints:maccatalyst", + "macos": "config//os/constraints:macos", + "os": "config//os/constraints:os", + "runtime": "config//runtime/constraints:runtime", + "sdk": "config//os/sdk/apple/constraints:_", + "swift_library_evolution": "config//features/apple/constraints:swift_library_evolution", + "swift_library_evolution_enabled": "config//features/apple/constraints:swift_library_evolution_enabled", + "universal": "config//cpu/constraints:universal", + "universal_enabled": "config//cpu/constraints:universal-enabled", + "watchos": "config//os/constraints:watchos", + "watchos_device_sdk": "config//os/sdk/apple/constraints:watchos", + "watchos_simulator_sdk": "config//os/sdk/apple/constraints:watchsimulator", + "x86_64": "config//cpu/constraints:x86_64", + }, + attrs = [ + "platforms", + ], + split = True, +) + +registration_spec = RuleRegistrationSpec( + name = "apple_xcframework", + impl = _impl, + attrs = { + "framework": attrs.split_transition_dep(cfg = framework_split_transition), + "framework_name": attrs.string(), + "include_dsym": attrs.option(attrs.bool(), default = None), + "platforms": attrs.list(attrs.string(), default = []), + "_apple_tools": attrs.exec_dep(default = "prelude//apple/tools:apple-tools", providers = [AppleToolsInfo]), + }, +) + +def apple_xcframework_extra_attrs(): + attribs = { + "_apple_tools": attrs.exec_dep(default = "prelude//apple/tools:apple-tools", providers = [AppleToolsInfo]), + } + return attribs diff --git a/prelude/apple/user/cpu_split_transition.bzl b/prelude/apple/user/cpu_split_transition.bzl index 027259d98a..0fbbdf6d73 100644 --- a/prelude/apple/user/cpu_split_transition.bzl +++ b/prelude/apple/user/cpu_split_transition.bzl @@ -27,6 +27,15 @@ def _universal_constraint_value(platform: PlatformInfo, refs: struct) -> [None, universal = platform.configuration.constraints.get(refs.universal[ConstraintSettingInfo].label) return universal.label == refs.universal_enabled[ConstraintValueInfo].label if universal != None else False +def _filter_incompatible_constraints(platform_name: str, constraints: dict[TargetLabel, ConstraintValueInfo]) -> dict[TargetLabel, ConstraintValueInfo]: + filtered = dict() + for constraint_setting_label, constraint_value_info in constraints.items(): + incompatible_constraint_name = "//cpu/x86" if platform_name == "arm64" else "//cpu/arm" + if incompatible_constraint_name in str(constraint_value_info.label): + continue + filtered[constraint_setting_label] = constraint_value_info + return filtered + def _cpu_split_transition_impl( platform: PlatformInfo, refs: struct, @@ -43,7 +52,7 @@ def _cpu_split_transition_impl( cpu_name_to_cpu_constraint = {} if os_label == refs.ios[ConstraintValueInfo].label: - if sdk == None or sdk_label == refs.ios_simulator_sdk[ConstraintValueInfo].label: + if sdk == None or sdk_label == refs.ios_simulator_sdk[ConstraintValueInfo].label or sdk_label == refs.maccatalyst_sdk[ConstraintValueInfo].label: # default to simulator if SDK is not specified cpu_name_to_cpu_constraint["arm64"] = refs.arm64[ConstraintValueInfo] cpu_name_to_cpu_constraint["x86_64"] = refs.x86_64[ConstraintValueInfo] @@ -68,8 +77,8 @@ def _cpu_split_transition_impl( cpu_constraint_name = refs.cpu[ConstraintSettingInfo].label base_constraints = { - constraint_setting_label: constraint_setting_value - for (constraint_setting_label, constraint_setting_value) in platform.configuration.constraints.items() + constraint_setting_label: constraint_value_info + for (constraint_setting_label, constraint_value_info) in platform.configuration.constraints.items() if constraint_setting_label != cpu_constraint_name } @@ -77,6 +86,8 @@ def _cpu_split_transition_impl( for platform_name, cpu_constraint in cpu_name_to_cpu_constraint.items(): updated_constraints = dict(base_constraints) updated_constraints[cpu_constraint_name] = cpu_constraint + updated_constraints = _filter_incompatible_constraints(platform_name, updated_constraints) + new_configs[platform_name] = PlatformInfo( label = platform_name, configuration = ConfigurationInfo( @@ -96,11 +107,12 @@ cpu_split_transition = transition( "ios": "config//os/constraints:iphoneos", "ios_device_sdk": "config//os/sdk/apple/constraints:iphoneos", "ios_simulator_sdk": "config//os/sdk/apple/constraints:iphonesimulator", + "maccatalyst_sdk": "config//os/sdk/apple/constraints:maccatalyst", "macos": "config//os/constraints:macos", "os": "config//os/constraints:os", "sdk": "config//os/sdk/apple/constraints:_", - "universal": "config//build_mode/apple/constraints:universal", - "universal_enabled": "config//build_mode/apple/constraints:universal-enabled", + "universal": "config//cpu/constraints:universal", + "universal_enabled": "config//cpu/constraints:universal-enabled", "watchos": "config//os/constraints:watchos", "watchos_device_sdk": "config//os/sdk/apple/constraints:watchos", "watchos_simulator_sdk": "config//os/sdk/apple/constraints:watchsimulator", diff --git a/prelude/apple/user/resource_group_map.bzl b/prelude/apple/user/resource_group_map.bzl index 4b46e4f51f..5c5c315ec6 100644 --- a/prelude/apple/user/resource_group_map.bzl +++ b/prelude/apple/user/resource_group_map.bzl @@ -7,6 +7,7 @@ load( "@prelude//apple:resource_groups.bzl", + "ResourceGraphNode", # @unused Used as a type "ResourceGroupInfo", "create_resource_graph", "get_resource_graph_node_map_func", @@ -15,24 +16,27 @@ load( "@prelude//cxx:groups.bzl", "compute_mappings", "create_group", + "get_roots_from_mapping", + "make_info_subtarget_providers", "parse_groups_definitions", ) +load( + "@prelude//cxx:groups_types.bzl", + "GroupMapping", # @unused Used as a type + "Traversal", +) load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec") -load("@prelude//decls/common.bzl", "Traversal") - -def resource_group_map_attr(): - return attrs.option(attrs.dep(providers = [ResourceGroupInfo]), default = None) +load("@prelude//utils:utils.bzl", "flatten") def _impl(ctx: AnalysisContext) -> list[Provider]: resource_groups = parse_groups_definitions(ctx.attrs.map, lambda root: root.label) - # Extract deps from the roots via the raw attrs, as `parse_groups_definitions` - # parses them as labels. - resource_groups_deps = [ - mapping[0] + resource_groups_deps = flatten([ + get_roots_from_mapping(mapping) for entry in ctx.attrs.map for mapping in entry[1] - ] + ]) + resource_graph = create_resource_graph( ctx = ctx, labels = [], @@ -48,18 +52,19 @@ def _impl(ctx: AnalysisContext) -> list[Provider]: # ResourceGraphInfo, which `create_resource_graph` removes above. # So make sure we remove them from the mappings too, otherwise # `compute_mappings` crashes on the inconsistency. - mappings = [ - mapping - for mapping in group.mappings - if mapping.root == None or mapping.root in resource_graph_node_map - ], + mappings = filter( + None, + [_fixup_mapping_to_only_include_roots_in_the_map(m, resource_graph_node_map) for m in group.mappings], + ), ) for group in resource_groups }, graph_map = resource_graph_node_map, ) return [ - DefaultInfo(), + DefaultInfo(sub_targets = { + "info": make_info_subtarget_providers(ctx, resource_groups, mappings), + }), ResourceGroupInfo( groups = resource_groups, groups_hash = hash(str(resource_groups)), @@ -72,10 +77,40 @@ def _impl(ctx: AnalysisContext) -> list[Provider]: ), ] +def _fixup_mapping_to_only_include_roots_in_the_map(mapping: GroupMapping, node_map: dict[Label, ResourceGraphNode]) -> GroupMapping | None: + if not mapping.roots: + return mapping + + filtered_roots = [ + root + for root in mapping.roots + if root in node_map + ] + if not filtered_roots: + return None + + return GroupMapping( + roots = filtered_roots, + traversal = mapping.traversal, + filters = mapping.filters, + preferred_linkage = mapping.preferred_linkage, + ) + registration_spec = RuleRegistrationSpec( name = "resource_group_map", impl = _impl, attrs = { - "map": attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal), attrs.option(attrs.string()))))), + "map": attrs.list( + attrs.tuple( + attrs.string(), + attrs.list( + attrs.tuple( + attrs.one_of(attrs.dep(), attrs.list(attrs.dep())), + attrs.enum(Traversal.values()), + attrs.option(attrs.string()), + ), + ), + ), + ), }, ) diff --git a/prelude/apple/xcode.bzl b/prelude/apple/xcode.bzl index 691f23501a..e46ffad47c 100644 --- a/prelude/apple/xcode.bzl +++ b/prelude/apple/xcode.bzl @@ -7,13 +7,13 @@ load("@prelude//apple:apple_sdk.bzl", "get_apple_sdk_name") load("@prelude//apple:apple_target_sdk_version.bzl", "get_min_deployment_version_for_node") -load("@prelude//apple:apple_utility.bzl", "has_apple_toolchain") +load("@prelude//apple:apple_utility.bzl", "get_apple_architecture", "has_apple_toolchain") load( "@prelude//cxx:argsfiles.bzl", "CompileArgsfile", # @unused Used as a type ) load( - "@prelude//cxx:compile.bzl", + "@prelude//cxx:cxx_sources.bzl", "CxxSrcWithFlags", # @unused Used as a type ) load("@prelude//cxx:xcode.bzl", "cxx_populate_xcode_attributes") @@ -23,10 +23,14 @@ def apple_populate_xcode_attributes( ctx, srcs: list[CxxSrcWithFlags], argsfiles: dict[str, CompileArgsfile], - product_name: str) -> dict[str, typing.Any]: + product_name: str, + contains_swift_sources: bool = False) -> dict[str, typing.Any]: data = cxx_populate_xcode_attributes(ctx = ctx, srcs = srcs, argsfiles = argsfiles, product_name = product_name) + data["contains_swift_sources"] = contains_swift_sources + if has_apple_toolchain(ctx): + data["arch"] = get_apple_architecture(ctx) data["sdk"] = get_apple_sdk_name(ctx) data["deployment_version"] = get_min_deployment_version_for_node(ctx) @@ -64,7 +68,3 @@ def _get_attribute_with_output(ctx: AnalysisContext, attr_name: str) -> [Depende # So, an empty `DefaultInfo` basically signifies that there's no xctoolchain. return dep return None - -def get_project_root_file(ctx) -> Artifact: - content = cmd_args(ctx.label.project_root) - return ctx.actions.write("project_root", content, absolute = True) diff --git a/prelude/artifacts.bzl b/prelude/artifacts.bzl index 18d14f1839..46bfb94629 100644 --- a/prelude/artifacts.bzl +++ b/prelude/artifacts.bzl @@ -35,6 +35,26 @@ ArtifactOutputs = record( other_outputs = field(list[ArgLike]), ) +# Wrapper to support wrapping `Artifact`s referencing paths behind external +# symlinks. +ArtifactExt = record( + artifact = field(Artifact), + # If the `artifact` above is a symlink referencing an external path, this + # is an optional sub-path to append when accessing the path. + sub_path = field(str | None, None), + # Returns the resolved path as a `cmd_arg()`, with the optional sub-path + # appended. + as_arg = field(typing.Callable), +) + +# A Provider that mirrors `DefaultInfo` for `Artifact` outputs, but allows +# specifying an `ArtifactExt` as it's default output. +DefaultOutputExt = provider( + fields = dict( + default_output = provider_field(ArtifactExt), + ), +) + def single_artifact(dep: Artifact | Dependency) -> ArtifactOutputs: if type(dep) == "artifact": return ArtifactOutputs( @@ -123,3 +143,26 @@ def unpack_artifact_map(artifacts: dict[str, Artifact | Dependency]) -> dict[str out[name] = single_artifact(artifact) return out + +def _as_arg(artifact: Artifact, sub_path: str | None) -> ArgLike: + if sub_path == None: + return artifact + return cmd_args(artifact, format = "{{}}/{}".format(sub_path)) + +def artifact_ext( + artifact: Artifact, + sub_path: str | None = None) -> ArtifactExt: + return ArtifactExt( + artifact = artifact, + sub_path = sub_path, + as_arg = lambda: _as_arg(artifact, sub_path), + ) + +def to_arglike(src: Artifact | Dependency) -> ArgLike: + if type(src) == "dependency": + ext = src.get(DefaultOutputExt) + if ext != None: + src = ext.default_output.as_arg() + else: + (src,) = src[DefaultInfo].default_outputs + return src diff --git a/prelude/apple/apple_buck2_compatibility.bzl b/prelude/buck2_compatibility.bzl similarity index 94% rename from prelude/apple/apple_buck2_compatibility.bzl rename to prelude/buck2_compatibility.bzl index 09d870c0f7..439b344af5 100644 --- a/prelude/apple/apple_buck2_compatibility.bzl +++ b/prelude/buck2_compatibility.bzl @@ -15,6 +15,6 @@ Buck2Compatibility = enum( BUCK2_COMPATIBILITY_ATTRIB_NAME = "buck2_compatibility" BUCK2_COMPATIBILITY_ATTRIB_TYPE = attrs.enum(Buck2Compatibility.values(), default = "unknown") -def apple_check_buck2_compatibility(ctx: AnalysisContext): +def check_buck2_compatibility(ctx: AnalysisContext): if hasattr(ctx.attrs, "buck2_compatibility") and ctx.attrs.buck2_compatibility == "incompatible": warning("The target '{}' is marked as incompatible with buck2, output might be incorrect".format(ctx.label)) diff --git a/prelude/command_alias.bzl b/prelude/command_alias.bzl index 3b7a4db583..97d12b0ee0 100644 --- a/prelude/command_alias.bzl +++ b/prelude/command_alias.bzl @@ -24,56 +24,55 @@ def _command_alias_impl_target_unix(ctx, exec_is_windows: bool): else: base = _get_run_info_from_exe(ctx.attrs.exe) - run_info_args = cmd_args() + trampoline_args = cmd_args() + trampoline_args.add("#!/usr/bin/env bash") + trampoline_args.add("set -euo pipefail") + trampoline_args.add('BUCK_COMMAND_ALIAS_ABSOLUTE=$(cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P)') + + for (k, v) in ctx.attrs.env.items(): + # TODO(akozhevnikov): maybe check environment variable is not conflicting with pre-existing one + trampoline_args.add(cmd_args(["export ", k, "=", cmd_args(v, quote = "shell")], delimiter = "")) + + if len(ctx.attrs.platform_exe.items()) > 0: + trampoline_args.add('case "$(uname)" in') + for platform, exe in ctx.attrs.platform_exe.items(): + # Only linux and macos are supported. + if platform == "linux": + _add_platform_case_to_trampoline_args(trampoline_args, "Linux", _get_run_info_from_exe(exe), ctx.attrs.args) + elif platform == "macos": + _add_platform_case_to_trampoline_args(trampoline_args, "Darwin", _get_run_info_from_exe(exe), ctx.attrs.args) + + # Default case + _add_platform_case_to_trampoline_args(trampoline_args, "*", base, ctx.attrs.args) + trampoline_args.add("esac") + else: + _add_args_declaration_to_trampoline_args(trampoline_args, base, ctx.attrs.args) - if len(ctx.attrs.env) > 0 or len(ctx.attrs.platform_exe.items()) > 0: - trampoline_args = cmd_args() - trampoline_args.add("#!/usr/bin/env bash") - trampoline_args.add("set -euo pipefail") - trampoline_args.add('BUCK_COMMAND_ALIAS_ABSOLUTE=$(cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P)') - - for (k, v) in ctx.attrs.env.items(): - # TODO(akozhevnikov): maybe check environment variable is not conflicting with pre-existing one - trampoline_args.add(cmd_args(["export ", k, "=", cmd_args(v, quote = "shell")], delimiter = "")) - - if len(ctx.attrs.platform_exe.items()) > 0: - trampoline_args.add('case "$(uname)" in') - for platform, exe in ctx.attrs.platform_exe.items(): - # Only linux and macos are supported. - if platform == "linux": - _add_platform_case_to_trampoline_args(trampoline_args, "Linux", _get_run_info_from_exe(exe), ctx.attrs.args) - elif platform == "macos": - _add_platform_case_to_trampoline_args(trampoline_args, "Darwin", _get_run_info_from_exe(exe), ctx.attrs.args) - - # Default case - _add_platform_case_to_trampoline_args(trampoline_args, "*", base, ctx.attrs.args) - trampoline_args.add("esac") - else: - _add_args_declaration_to_trampoline_args(trampoline_args, base, ctx.attrs.args) - - trampoline_args.add('exec "${ARGS[@]}"') - - trampoline = _relativize_path( - ctx, - trampoline_args, - "sh", - "$BUCK_COMMAND_ALIAS_ABSOLUTE", - exec_is_windows, - ) + trampoline_args.add('exec "${ARGS[@]}"') - run_info_args.add(trampoline) - run_info_args.hidden([trampoline_args]) + trampoline = _relativize_path( + ctx, + trampoline_args, + "sh", + "$BUCK_COMMAND_ALIAS_ABSOLUTE", + exec_is_windows, + ) + + run_info_args_args = [] + run_info_args_hidden = [] + if len(ctx.attrs.env) > 0 or len(ctx.attrs.platform_exe.items()) > 0: + run_info_args_args.append(trampoline) + run_info_args_hidden.append(trampoline_args) else: - run_info_args.add(base.args) - run_info_args.add(ctx.attrs.args) + run_info_args_args.append(base.args) + run_info_args_args.append(ctx.attrs.args) - run_info_args.hidden(ctx.attrs.resources) + run_info_args_hidden.append(ctx.attrs.resources) + + run_info_args = cmd_args(run_info_args_args, hidden = run_info_args_hidden) - # TODO(cjhopman): Consider what this should have for default outputs. Using - # the base's default outputs may not really be correct (it makes more sense to - # be the outputs required by the args). return [ - DefaultInfo(), + DefaultInfo(default_output = trampoline, other_outputs = [trampoline_args] + ctx.attrs.resources), RunInfo(args = run_info_args), ] @@ -87,50 +86,51 @@ def _command_alias_impl_target_windows(ctx, exec_is_windows: bool): else: base = RunInfo() - run_info_args = cmd_args() + trampoline_args = cmd_args() + trampoline_args.add("@echo off") + + # Set BUCK_COMMAND_ALIAS_ABSOLUTE to the drive and full path of the script being created here + # We use this below to prefix any artifacts being referenced in the script + trampoline_args.add("set BUCK_COMMAND_ALIAS_ABSOLUTE=%~dp0") + + # Handle envs + for (k, v) in ctx.attrs.env.items(): + # TODO(akozhevnikov): maybe check environment variable is not conflicting with pre-existing one + trampoline_args.add(cmd_args(["set ", k, "=", v], delimiter = "")) + + # Handle args + # We shell quote the args but not the base. This is due to the same limitation detailed below with T111687922 + cmd = cmd_args([base.args], delimiter = " ") + for arg in ctx.attrs.args: + cmd.add(cmd_args(arg, quote = "shell")) + + # Add on %* to handle any other args passed through the command + cmd.add("%*") + trampoline_args.add(cmd) + + trampoline = _relativize_path( + ctx, + trampoline_args, + "bat", + "%BUCK_COMMAND_ALIAS_ABSOLUTE%", + exec_is_windows, + ) + + run_info_args_args = [] + run_info_args_hidden = [] if len(ctx.attrs.env) > 0: - trampoline_args = cmd_args() - trampoline_args.add("@echo off") - - # Set BUCK_COMMAND_ALIAS_ABSOLUTE to the drive and full path of the script being created here - # We use this below to prefix any artifacts being referenced in the script - trampoline_args.add("set BUCK_COMMAND_ALIAS_ABSOLUTE=%~dp0") - - # Handle envs - for (k, v) in ctx.attrs.env.items(): - # TODO(akozhevnikov): maybe check environment variable is not conflicting with pre-existing one - trampoline_args.add(cmd_args(["set ", k, "=", v], delimiter = "")) - - # Handle args - # We shell quote the args but not the base. This is due to the same limitation detailed below with T111687922 - cmd = cmd_args([base.args], delimiter = " ") - for arg in ctx.attrs.args: - cmd.add(cmd_args(arg, quote = "shell")) - - # Add on %* to handle any other args passed through the command - cmd.add("%*") - trampoline_args.add(cmd) - - trampoline = _relativize_path( - ctx, - trampoline_args, - "bat", - "%BUCK_COMMAND_ALIAS_ABSOLUTE%", - exec_is_windows, - ) - run_info_args.add(trampoline) - run_info_args.hidden([trampoline_args]) + run_info_args_args.append(trampoline) + run_info_args_hidden.append(trampoline_args) else: - run_info_args.add(base.args) - run_info_args.add(ctx.attrs.args) + run_info_args_args.append(base.args) + run_info_args_args.append(ctx.attrs.args) - run_info_args.hidden(ctx.attrs.resources) + run_info_args_hidden.append(ctx.attrs.resources) + + run_info_args = cmd_args(run_info_args_args, hidden = run_info_args_hidden) - # TODO(cjhopman): Consider what this should have for default outputs. Using - # the base's default outputs may not really be correct (it makes more sense to - # be the outputs required by the args). return [ - DefaultInfo(), + DefaultInfo(default_output = trampoline, other_outputs = [trampoline_args] + ctx.attrs.resources), RunInfo(args = run_info_args), ] @@ -154,7 +154,11 @@ def _relativize_path_unix( trampoline_args: cmd_args) -> Artifact: # FIXME(ndmitchell): more straightforward relativization with better API non_materialized_reference = ctx.actions.write("dummy", "") - trampoline_args.relative_to(non_materialized_reference, parent = 1).absolute_prefix("__BUCK_COMMAND_ALIAS_ABSOLUTE__/") + trampoline_args = cmd_args( + trampoline_args, + relative_to = (non_materialized_reference, 1), + absolute_prefix = "__BUCK_COMMAND_ALIAS_ABSOLUTE__/", + ) trampoline_tmp, _ = ctx.actions.write("__command_alias_trampoline.{}.pre".format(extension), trampoline_args, allow_args = True) @@ -184,7 +188,11 @@ def _relativize_path_windows( trampoline_args: cmd_args) -> Artifact: # FIXME(ndmitchell): more straightforward relativization with better API non_materialized_reference = ctx.actions.write("dummy", "") - trampoline_args.relative_to(non_materialized_reference, parent = 1).absolute_prefix(var + "/") + trampoline_args = cmd_args( + trampoline_args, + relative_to = (non_materialized_reference, 1), + absolute_prefix = var + "/", + ) trampoline, _ = ctx.actions.write("__command_alias_trampoline.{}".format(extension), trampoline_args, allow_args = True) @@ -215,7 +223,10 @@ def _add_args_declaration_to_trampoline_args(trampoline_args: cmd_args, base: Ru trampoline_args.add(")") -def _get_run_info_from_exe(exe: Dependency) -> RunInfo: +def _get_run_info_from_exe(exe: Dependency | Artifact) -> RunInfo: + if isinstance(exe, Artifact): + return RunInfo(args = cmd_args(exe)) + run_info = exe.get(RunInfo) if run_info == None: run_info = RunInfo( diff --git a/prelude/configurations/rules.bzl b/prelude/configurations/rules.bzl index 323560833d..66536b8333 100644 --- a/prelude/configurations/rules.bzl +++ b/prelude/configurations/rules.bzl @@ -67,6 +67,9 @@ def platform_impl(ctx): ), ] +def configuration_alias_impl(ctx: AnalysisContext) -> list[Provider]: + return ctx.attrs.actual.providers + # TODO(cjhopman): Update the attributes for these ruletypes to declare the types of providers that they expect in their references. extra_attributes = { "platform": { @@ -76,6 +79,7 @@ extra_attributes = { implemented_rules = { "config_setting": config_setting_impl, + "configuration_alias": configuration_alias_impl, "constraint_setting": constraint_setting_impl, "constraint_value": constraint_value_impl, "platform": platform_impl, diff --git a/prelude/cpu/BUCK.v2 b/prelude/cpu/BUCK.v2 index 30d019bfd9..ec7c7b4ed3 100644 --- a/prelude/cpu/BUCK.v2 +++ b/prelude/cpu/BUCK.v2 @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + config_setting( name = "x86_64", constraint_values = [ diff --git a/prelude/cpu/constraints/BUCK.v2 b/prelude/cpu/constraints/BUCK.v2 index 6a0e5dd00e..14971dbc1f 100644 --- a/prelude/cpu/constraints/BUCK.v2 +++ b/prelude/cpu/constraints/BUCK.v2 @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + # Used by open source projects to support `prelude//` constraint_setting( diff --git a/prelude/csharp/csharp.bzl b/prelude/csharp/csharp.bzl index 7651b5763c..53ddfb7c38 100644 --- a/prelude/csharp/csharp.bzl +++ b/prelude/csharp/csharp.bzl @@ -19,14 +19,14 @@ def csharp_library_impl(ctx: AnalysisContext) -> list[Provider]: library = ctx.actions.declare_output(dll_name) # Create a command invoking a wrapper script that calls csc.exe to compile the .dll. - cmd = cmd_args(toolchain.csc) + cmd = [toolchain.csc] # Add caller specified compiler flags. - cmd.add(ctx.attrs.compiler_flags) + cmd.append(ctx.attrs.compiler_flags) # Set the output target as a .NET library. - cmd.add("/target:library") - cmd.add(cmd_args( + cmd.append("/target:library") + cmd.append(cmd_args( library.as_output(), format = "/out:{}", )) @@ -34,29 +34,29 @@ def csharp_library_impl(ctx: AnalysisContext) -> list[Provider]: # Don't include any default .NET framework assemblies like "mscorlib" or "System" unless # explicitly requested with `/reference:{}`. This flag also stops injection of other # default compiler flags. - cmd.add("/noconfig") + cmd.append("/noconfig") # Don't reference mscorlib.dll unless asked for. This is required for targets that target # embedded platforms such as Silverlight or WASM. (Originally for Buck1 compatibility.) - cmd.add("/nostdlib") + cmd.append("/nostdlib") # Don't search any paths for .NET libraries unless explicitly referenced with `/lib:{}`. - cmd.add("/nosdkpath") + cmd.append("/nosdkpath") # Let csc know the directory path where it can find system assemblies. This is the path # that is searched by `/reference:{libname}` if `libname` is just a DLL name. - cmd.add(cmd_args(toolchain.framework_dirs[ctx.attrs.framework_ver], format = "/lib:{}")) + cmd.append(cmd_args(toolchain.framework_dirs[ctx.attrs.framework_ver], format = "/lib:{}")) # Add a `/reference:{name}` argument for each dependency. # Buck target refs should be absolute paths and system assemblies just the DLL name. child_deps = generate_target_tset_children(ctx.attrs.deps, ctx) deps_tset = ctx.actions.tset(DllDepTSet, children = child_deps) - cmd.add(deps_tset.project_as_args("reference")) + cmd.append(deps_tset.project_as_args("reference")) # Specify the C# source code files that should be compiled into this target. # NOTE: This must happen after /out and /target! - cmd.add(ctx.attrs.srcs) + cmd.append(ctx.attrs.srcs) # Run the C# compiler to produce the output artifact. ctx.actions.run(cmd, category = "csharp_compile") diff --git a/prelude/cxx/anon_link.bzl b/prelude/cxx/anon_link.bzl index 354f4aba15..9da24b7ebb 100644 --- a/prelude/cxx/anon_link.bzl +++ b/prelude/cxx/anon_link.bzl @@ -11,6 +11,7 @@ load( "make_artifact_tset", ) load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") +load("@prelude//cxx:cxx_utility.bzl", "cxx_attrs_get_allow_cache_upload") load("@prelude//linking:execution_preference.bzl", "LinkExecutionPreference") load( "@prelude//linking:link_info.bzl", @@ -168,7 +169,7 @@ def deserialize_anon_attrs( category_suffix = attrs.category_suffix, identifier = attrs.identifier, enable_distributed_thinlto = attrs.enable_distributed_thinlto, - allow_cache_upload = attrs.allow_cache_upload, + allow_cache_upload = cxx_attrs_get_allow_cache_upload(attrs), ) result_type = CxxLinkResultType(attrs.result_type) diff --git a/prelude/cxx/archive.bzl b/prelude/cxx/archive.bzl index e594162957..f7431d4f18 100644 --- a/prelude/cxx/archive.bzl +++ b/prelude/cxx/archive.bzl @@ -7,6 +7,7 @@ load("@prelude//cxx:cxx_toolchain_types.bzl", "LinkerInfo") load("@prelude//linking:link_info.bzl", "Archive") +load("@prelude//utils:argfile.bzl", "at_argfile") load("@prelude//utils:utils.bzl", "value_or") load(":cxx_context.bzl", "get_cxx_toolchain_info") @@ -67,9 +68,13 @@ def _archive(ctx: AnalysisContext, name: str, args: cmd_args, thin: bool, prefer shell_quoted_args = cmd_args(args, quote = "shell") if toolchain.linker_info.use_archiver_flags and toolchain.linker_info.archiver_flags != None: shell_quoted_args.add(toolchain.linker_info.archiver_flags) - argfile, _ = ctx.actions.write(name + ".argsfile", shell_quoted_args, allow_args = True) - command.hidden([shell_quoted_args]) - command.add(cmd_args(["@", argfile], delimiter = "")) + + command.add(at_argfile( + actions = ctx.actions, + name = name + ".argsfile", + args = shell_quoted_args, + allow_args = True, + )) else: command.add(args) diff --git a/prelude/cxx/argsfiles.bzl b/prelude/cxx/argsfiles.bzl index 81dbdfeea7..b3f8627848 100644 --- a/prelude/cxx/argsfiles.bzl +++ b/prelude/cxx/argsfiles.bzl @@ -6,7 +6,6 @@ # of this source tree. ARGSFILES_SUBTARGET = "argsfiles" -ABS_ARGSFILES_SUBTARGET = "abs-argsfiles" # Information on argsfiles created for compilation. CompileArgsfile = record( @@ -15,7 +14,7 @@ CompileArgsfile = record( # This argsfile as a command form that would use the argsfile (includes dependent inputs). cmd_form = field(cmd_args), # Input args necessary for the argsfile to reference. - input_args = field(list[["artifacts", cmd_args]]), + input_args = field(list[cmd_args]), # Args as written to the argsfile (with shell quoting applied). args = field(cmd_args), # Args aggregated for the argsfile excluding file prefix args (excludes shell quoting). @@ -25,19 +24,19 @@ CompileArgsfile = record( CompileArgsfiles = record( # Relative path argsfiles used for build actions, mapped by extension. relative = field(dict[str, CompileArgsfile], default = {}), - # Absolute path argsfiles used for extra outputs, mapped by extension. - absolute = field(dict[str, CompileArgsfile], default = {}), + # Argsfiles used for Xcode integration, mapped by extension. + xcode = field(dict[str, CompileArgsfile], default = {}), ) def get_argsfiles_output(ctx: AnalysisContext, argsfile_by_ext: dict[str, CompileArgsfile], summary_name: str) -> DefaultInfo: argsfiles = [] - argsfile_names = cmd_args() + argsfile_names = [] dependent_outputs = [] for _, argsfile in argsfile_by_ext.items(): argsfiles.append(argsfile.file) - argsfile_names.add(cmd_args(argsfile.file).ignore_artifacts()) + argsfile_names.append(cmd_args(argsfile.file, ignore_artifacts = True)) dependent_outputs.extend(argsfile.input_args) - argsfiles_summary = ctx.actions.write(summary_name, argsfile_names) + argsfiles_summary = ctx.actions.write(summary_name, cmd_args(argsfile_names)) return DefaultInfo(default_outputs = [argsfiles_summary] + argsfiles, other_outputs = dependent_outputs) diff --git a/prelude/cxx/attr_selection.bzl b/prelude/cxx/attr_selection.bzl index 020040d8d5..cfd8338380 100644 --- a/prelude/cxx/attr_selection.bzl +++ b/prelude/cxx/attr_selection.bzl @@ -32,7 +32,7 @@ def cxx_by_language_ext(x: dict[typing.Any, typing.Any], ext: str) -> list[typin # And you can see them in java code, but somehow it works with # this one, which is seem across the repo. Find out what's happening. key_compiler = "c_cpp_output" - elif ext in (".cpp", ".cc", ".cxx", ".c++"): + elif ext in (".cpp", ".cc", ".cxx", ".c++", ".bc"): key_pp = "cxx" key_compiler = "cxx_cpp_output" elif ext == ".m": @@ -41,7 +41,7 @@ def cxx_by_language_ext(x: dict[typing.Any, typing.Any], ext: str) -> list[typin elif ext == ".mm": key_pp = "objcxx" key_compiler = "objcxx_cpp_output" - elif ext in (".s", ".S"): + elif ext in (".s", ".sx", ".S"): key_pp = "assembler_with_cpp" key_compiler = "assembler" elif ext == ".cu": diff --git a/prelude/cxx/bitcode.bzl b/prelude/cxx/bitcode.bzl index 782e8a30b3..d157583e7c 100644 --- a/prelude/cxx/bitcode.bzl +++ b/prelude/cxx/bitcode.bzl @@ -6,6 +6,7 @@ # of this source tree. load("@prelude//cxx:cxx_toolchain_types.bzl", "LinkerInfo") +load("@prelude//utils:argfile.bzl", "at_argfile") load("@prelude//utils:utils.bzl", "value_or") load(":cxx_context.bzl", "get_cxx_toolchain_info") @@ -35,14 +36,19 @@ def _bundle(ctx: AnalysisContext, name: str, args: cmd_args, prefer_local: bool) bundle_output = ctx.actions.declare_output(name) - argsfile, _ = ctx.actions.write(name + ".argsfile", args, allow_args = True) - - command = cmd_args(argsfile, format = "@{}", delimiter = "").hidden(args) - llvm_cmd = cmd_args(llvm_link) - llvm_cmd.add(command) - llvm_cmd.add("-v") - llvm_cmd.add("-o") - llvm_cmd.add(bundle_output.as_output()) + command = at_argfile( + actions = ctx.actions, + name = name + ".argsfile", + args = args, + allow_args = True, + ) + llvm_cmd = cmd_args( + llvm_link, + command, + "-v", + "-o", + bundle_output.as_output(), + ) ctx.actions.run(llvm_cmd, category = "bitcode_bundle", identifier = name, prefer_local = prefer_local) return bundle_output @@ -70,7 +76,7 @@ def make_bitcode_bundle( if override and len(objects) > 1: args.add(objects[0]) overrides = cmd_args(objects[1:], format = "--override={}") - args.add(overrides).hidden(objects) + args.add(overrides) else: args.add(objects) diff --git a/prelude/cxx/comp_db.bzl b/prelude/cxx/comp_db.bzl index a1daff0e57..4d13bf46b2 100644 --- a/prelude/cxx/comp_db.bzl +++ b/prelude/cxx/comp_db.bzl @@ -7,6 +7,7 @@ load("@prelude//:paths.bzl", "paths") load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxPlatformInfo", "CxxToolchainInfo") +load("@prelude//utils:argfile.bzl", "at_argfile") load( ":compile.bzl", "CxxSrcCompileCommand", # @unused Used as a type @@ -20,6 +21,11 @@ CxxCompilationDbInfo = provider(fields = { "toolchain": provider_field(typing.Any, default = None), # toolchain for this compilation database }) +# Provider that exposes the .gcno files produced during compilation +GcnoFilesInfo = provider(fields = { + "gcno_files": provider_field(list[Artifact]), +}) + def make_compilation_db_info(src_compile_cmds: list[CxxSrcCompileCommand], toolchainInfo: CxxToolchainInfo, platformInfo: CxxPlatformInfo) -> CxxCompilationDbInfo: info = {} for src_compile_cmd in src_compile_cmds: @@ -31,7 +37,10 @@ def create_compilation_database( ctx: AnalysisContext, src_compile_cmds: list[CxxSrcCompileCommand], identifier: str) -> DefaultInfo: - mk_comp_db = get_cxx_toolchain_info(ctx).mk_comp_db[RunInfo] + mk_comp_db = get_cxx_toolchain_info(ctx).mk_comp_db + if mk_comp_db == None: + return DefaultInfo() + mk_comp_db = mk_comp_db[RunInfo] # Generate the per-source compilation DB entries. entries = {} @@ -46,7 +55,7 @@ def create_compilation_database( "gen", cmd_args(entry.as_output(), format = "--output={}"), src_compile_cmd.src.basename, - cmd_args(src_compile_cmd.src).parent(), + cmd_args(src_compile_cmd.src, parent = 1), "--", src_compile_cmd.cxx_compile_cmd.base_compile_cmd, src_compile_cmd.cxx_compile_cmd.argsfile.cmd_form, @@ -59,18 +68,17 @@ def create_compilation_database( other_outputs.append(cmd) entries[cdb_path] = entry - content = cmd_args(*entries.values()) - - argfile = ctx.actions.declare_output(paths.join(identifier, "comp_db.argsfile")) - ctx.actions.write(argfile.as_output(), content) - # Merge all entries into the actual compilation DB. db = ctx.actions.declare_output(paths.join(identifier, "compile_commands.json")) cmd = cmd_args(mk_comp_db) cmd.add("merge") cmd.add(cmd_args(db.as_output(), format = "--output={}")) - cmd.add(cmd_args(argfile, format = "@{}")) - cmd.hidden(entries.values()) + cmd.add(at_argfile( + actions = ctx.actions, + name = paths.join(identifier, "comp_db.argsfile"), + args = entries.values(), + )) + ctx.actions.run(cmd, category = "cxx_compilation_database_merge", identifier = identifier) return DefaultInfo(default_output = db, other_outputs = other_outputs) diff --git a/prelude/cxx/compilation_database_labels.bzl b/prelude/cxx/compilation_database_labels.bzl new file mode 100644 index 0000000000..ee064331e5 --- /dev/null +++ b/prelude/cxx/compilation_database_labels.bzl @@ -0,0 +1,72 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +""" +Handles labels used to provide compilation database information for filegroup() and genrule() targets. + +Our language services need to know how to compile files owned solely by filegroup() or genrule() targets like: +* Regular generated sources, that then end up being compiled by regular cxx_ or apple_ targets. +* Manually declared mixin files, that are always compiled by multiple other targets spread across the codebase. +* Files built by external build systems wrapped in genrules(), where compile_commands.json is produced by yet another genrule(). + +Prior approach for the former two cases was to run rdeps() queries to find a compilable target that would have a compile_commands.json entry for the file. +It suffered from reliability and performance issues, as the universe for rdeps() queries had to be quite broad and with no guarantee that there isn't even a single broken target within it. +And for external build system wrappers where there is no compilable target, we could define a rule that would effectively wrap two genrules and expose one of them as [compilation-database] subtarget, +but that wouldn't solve the problem with mixins which is still relevant with external build systems and would put us in the same suboptimal spot in terms of performance and reliability. + +As the IDE needs to operate in O(changes) instead of O(repo), and open files even if some other corner of the repo is broken. +We need to make things both reliable and performant in an ever-growing codebase with a CI that explicitly cannot guarantee that the entire repo is green, and where rdeps() queries are thus flaky and slow. + +And as the IDE needs to react to any local changes and act consistently with local checkout, we cannot simply use a remote cache for rdeps() queries that are slow and flaky. + +So the solution is instead to localize the required information within the target, and directly point to the build system rules that provide compile_commands.json for the target. +""" + +def compilation_database_rules(source_mapping: dict[str, list[str]] | list[str]) -> list[str]: + """ + Takes a mapping from sources to the rules to be used to build compilation databases for those sources. + + Tooling like IDEs needs to obtain compile commands for source files that are exported by filegroup() to be built as part of another target, or are built with an external build system wrapped in a genrule(). + Labels provide a convenient way to link the non-compileable target with a rule that produces a compilation database for its sources: + ``` + load("@prelude//cxx:compilation_database_labels.bzl", "compilation_database_rules") + + # The shorthand way for most cases: + export_file( + name = "gadget_to_be_compiled_as_part_of_another_target.cpp", + labels = compilation_database_rules([ + "//path/to/some/dependent:target", + "//path/to/another/dependent:target", + ]) + ) + + # A per-source mapping for cases when the generated files from one genrule() are compiled in different targets and never together: + genrule( + name = "multiple_gadgets_for_different_purposes", + labels = compilation_database_rules({ + "server_gen.cpp": ["//path/to/dependent/module:server"], + "client_gen.cpp": ["//path/to/dependent/module:client"], + }) + ) + ``` + The tooling can use a BXL script to check the target kind and extract the compilation database rule from its labels. And then iterate over the resulting compilation database and resolve the symlinks in 'file' entries in order to find the matching entry for the original source. + + :param dict[str,str]|list[str] source_mapping: A mapping with source file name regex as key and target as value. The target has to be either a target with [compilation-database] subtarget, or a genrule that produces compile_commands.json (for wrapping external build systems). + """ + if not isinstance(source_mapping, dict): + source_mapping = {".*": source_mapping} + return ["compilation_database_rules=" + json.encode(source_mapping)] + +def get_compilation_database_rules(labels = list[str]) -> dict[str, list[str]] | None: + """ + Retrieves and decodes compilation database targets from target labels, if any. + """ + for label in labels: + value = label.removeprefix("compilation_database_rules=") + if value != label: + return json.decode(value) + return None diff --git a/prelude/cxx/compile.bzl b/prelude/cxx/compile.bzl index 025f099190..b2afdc5eb9 100644 --- a/prelude/cxx/compile.bzl +++ b/prelude/cxx/compile.bzl @@ -7,6 +7,7 @@ load("@prelude//:paths.bzl", "paths") load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") +load("@prelude//cxx:cxx_utility.bzl", "cxx_attrs_get_allow_cache_upload") load("@prelude//linking:lto.bzl", "LtoMode") load("@prelude//utils:set.bzl", "set") load( @@ -24,7 +25,9 @@ load( "get_pic_flags", ) load(":cxx_context.bzl", "get_cxx_toolchain_info") +load(":cxx_sources.bzl", "CxxSrcWithFlags") load(":cxx_toolchain_types.bzl", "CxxObjectFormat", "DepTrackingMode") +load(":cxx_types.bzl", "CxxRuleConstructorParams") load(":debug.bzl", "SplitDebugMode") load( ":headers.bzl", @@ -35,11 +38,19 @@ load( ":preprocessor.bzl", "CPreprocessor", # @unused Used as a type "CPreprocessorInfo", # @unused Used as a type - "cxx_attr_preprocessor_flags", "cxx_merge_cpreprocessors", "get_flags_for_compiler_type", ) +# Supported assembly extensions +AsmExtensions = enum( + ".s", + ".sx", + ".S", + ".asm", + ".asmpp", +) + # Supported Cxx file extensions CxxExtension = enum( ".cpp", @@ -47,19 +58,17 @@ CxxExtension = enum( ".cxx", ".c++", ".c", - ".s", - ".S", ".m", ".mm", ".cu", ".hip", - ".asm", - ".asmpp", ".h", ".hpp", ".hh", ".h++", ".hxx", + ".bc", + *AsmExtensions.values() ) # Header files included in compilation databases @@ -100,10 +109,13 @@ _CxxCompileCommand = record( base_compile_cmd = field(cmd_args), # The argsfile of arguments from the rule and it's dependencies. argsfile = field(CompileArgsfile), + # The argsfile to use for Xcode integration. + xcode_argsfile = field(CompileArgsfile), headers_dep_files = field([_HeadersDepFiles, None]), compiler_type = field(str), # The action category category = field(str), + allow_cache_upload = field(bool), ) # Information about how to compile a source file. @@ -131,17 +143,6 @@ CxxCompileCommandOutput = record( comp_db_compile_cmds = field(list[CxxSrcCompileCommand], default = []), ) -# An input to cxx compilation, consisting of a file to compile and optional -# file specific flags to compile with. -CxxSrcWithFlags = record( - file = field(Artifact), - flags = field(list[ResolvedStringWithMacros], []), - # If we have multiple source entries with same files but different flags, - # specify an index so we can differentiate them. Otherwise, use None. - index = field([int, None], None), - is_header = field(bool, False), -) - CxxCompileOutput = record( # The compiled `.o` file. object = field(Artifact), @@ -150,12 +151,13 @@ CxxCompileOutput = record( # Externally referenced debug info, which doesn't get linked with the # object (e.g. the above `.o` when using `-gsplit-dwarf=single` or the # the `.dwo` when using `-gsplit-dwarf=split`). - external_debug_info = field([Artifact, None], None), - clang_remarks = field([Artifact, None], None), - clang_trace = field([Artifact, None], None), + external_debug_info = field(Artifact | None, None), + clang_remarks = field(Artifact | None, None), + clang_trace = field(Artifact | None, None), + gcno_file = field(Artifact | None, None), ) -_ABSOLUTE_ARGSFILE_SUBSTITUTIONS = [ +_XCODE_ARG_SUBSTITUTION = [ (regex("-filter-error=.+"), "-fcolor-diagnostics"), (regex("-filter-ignore=.+"), "-fcolor-diagnostics"), (regex("-filter-warning=.+"), "-fcolor-diagnostics"), @@ -163,64 +165,94 @@ _ABSOLUTE_ARGSFILE_SUBSTITUTIONS = [ # @oss-disable: (regex("-fpika-runtime-checks"), "-fcolor-diagnostics"), ] -def get_extension_for_header(header_extension: str) -> str | None: +def get_source_extension_for_header(header_extension: str, default: CxxExtension) -> CxxExtension: """ Which source file extension to use to get compiler flags for the header. """ if header_extension in (".hpp", ".hh", ".h++", ".hxx"): - return ".cpp" + return CxxExtension(".cpp") elif header_extension == ".cuh": - return ".cu" + return CxxExtension(".cu") elif header_extension not in HeaderExtension.values(): - return header_extension # a file in `headers` has a source extension + return CxxExtension(header_extension) # a file in `headers` has a source extension else: - return None + return default -def get_extension_for_plain_headers(srcs: list[CxxSrcWithFlags]) -> str | None: +def get_source_extension(src: CxxSrcWithFlags, default_for_headers: CxxExtension) -> CxxExtension: """ - For a given list source files determine which source file extension - to use to get compiler flags for plain .h headers. + Which source files extension to use for a source or a header file. We want + headers to appear as though they are source files. + """ + if src.is_header: + return get_source_extension_for_header(src.file.extension, default_for_headers) + else: + return CxxExtension(src.file.extension) + +def collect_extensions(srcs: list[CxxSrcWithFlags]) -> list[CxxExtension]: """ + Collect extensions of source files while doing light normalization. + """ + duplicates = { ".c++": ".cpp", ".cc": ".cpp", ".cxx": ".cpp", } - extensions = set([duplicates.get(src.file.extension, src.file.extension) for src in srcs]) + extensions = set([CxxExtension(duplicates.get(src.file.extension, src.file.extension)) for src in srcs]) + return extensions.list() - # Assembly doesn't need any special handling as included files tend to have .asm extension themselves. - # And the presence of assembly in the target doesn't tell us anything about the language of .h files. - for asm_ext in [".s", ".S", ".asm", ".asmpp"]: - extensions.remove(asm_ext) - - if extensions.size() == 0: - return None - if extensions.size() == 1: - return extensions.list()[0] - if extensions.contains(".hip"): - return ".hip" - if extensions.contains(".cu"): - return ".cu" - if extensions.contains(".mm"): - return ".mm" - if extensions.contains(".cpp") and extensions.contains(".m"): - return ".mm" - if extensions.contains(".cpp"): - return ".cpp" - if extensions.contains(".m"): - return ".m" - return ".c" - -def get_default_extension_for_plain_header(rule_type: str) -> str: +def default_source_extension_for_plain_header(rule_type: str) -> CxxExtension: """ Returns default source file extension to use to get get compiler flags for plain .h headers. """ # Default to (Objective-)C++ instead of plain (Objective-)C as it is more likely to be compatible with both. - return ".mm" if rule_type.startswith("apple_") else ".cpp" + return CxxExtension(".mm") if rule_type.startswith("apple_") else CxxExtension(".cpp") + +def detect_source_extension_for_plain_headers(exts: list[CxxExtension], rule_type: str) -> CxxExtension: + """ + For a given list source files determine which source file extension + to use to get compiler flags for plain .h headers. + """ + + exts = set(exts) + + # Assembly doesn't need any special handling as included files tend to have .asm extension themselves. + # And the presence of assembly in the target doesn't tell us anything about the language of .h files. + for asm_ext in AsmExtensions: + exts.remove(asm_ext) + + if exts.size() == 0: + return default_source_extension_for_plain_header(rule_type) + + if exts.size() == 1: + return exts.list()[0] + if exts.contains(CxxExtension(".hip")): + return CxxExtension(".hip") + if exts.contains(CxxExtension(".cu")): + return CxxExtension(".cu") + if exts.contains(CxxExtension(".mm")): + return CxxExtension(".mm") + if exts.contains(CxxExtension(".cpp")) and exts.contains(CxxExtension(".m")): + return CxxExtension(".mm") + if exts.contains(CxxExtension(".cpp")): + return CxxExtension(".cpp") + if exts.contains(CxxExtension(".m")): + return CxxExtension(".m") + return CxxExtension(".c") + +def collect_source_extensions( + srcs: list[CxxSrcWithFlags], + default_for_headers: CxxExtension) -> list[CxxExtension]: + """ + Return unique source extensions from a list of source and header files where + header extensions are mapped to corresponding source extensions. + """ + source_extensions = set([get_source_extension(src, default_for_headers) for src in srcs]) + return source_extensions.list() -def get_header_language_mode(source_extension: str) -> str | None: +def get_header_language_mode(source_extension: CxxExtension) -> str | None: """ Returns the header mode to use for plain .h headers based on the source file extension used to obtain the compiler flags for them. @@ -228,18 +260,15 @@ def get_header_language_mode(source_extension: str) -> str | None: # Note: CUDA doesn't have its own header language mode, but the headers have distinct .cuh extension. modes = { - ".cpp": "c++-header", - ".m": "objective-c-header", - ".mm": "objective-c++-header", + CxxExtension(".cpp"): "c++-header", + CxxExtension(".m"): "objective-c-header", + CxxExtension(".mm"): "objective-c++-header", } return modes.get(source_extension) def create_compile_cmds( ctx: AnalysisContext, - # TODO(nga): this is `CxxRuleConstructorParams`, - # but there's dependency cycle between `compile.bzl` (this file) - # and `cxx_types.bzl` (where `CxxRuleConstructorParams` is defined). - impl_params: typing.Any, + impl_params: CxxRuleConstructorParams, own_preprocessors: list[CPreprocessor], inherited_preprocessor_infos: list[CPreprocessorInfo]) -> CxxCompileCommandOutput: """ @@ -248,6 +277,9 @@ def create_compile_cmds( of the generated compile commands and argsfile output. """ + srcs_extensions = collect_extensions(impl_params.srcs) + extension_for_plain_headers = detect_source_extension_for_plain_headers(srcs_extensions, impl_params.rule_type) + srcs_with_flags = [] # type: [CxxSrcWithFlags] for src in impl_params.srcs: @@ -282,62 +314,39 @@ def create_compile_cmds( ) headers_tag = ctx.actions.artifact_tag() - abs_headers_tag = ctx.actions.artifact_tag() # This headers tag is just for convenience use in _mk_argsfile and is otherwise unused. src_compile_cmds = [] hdr_compile_cmds = [] - cxx_compile_cmd_by_ext = {} - argsfile_by_ext = {} - abs_argsfile_by_ext = {} + cxx_compile_cmd_by_ext = {} # type: dict[CxxExtension, _CxxCompileCommand] + argsfile_by_ext = {} # type: dict[str, CompileArgsfile] + xcode_argsfile_by_ext = {} # type: dict[str, CompileArgsfile] - extension_for_plain_headers = get_extension_for_plain_headers(impl_params.srcs) - extension_for_plain_headers = extension_for_plain_headers or get_default_extension_for_plain_header(impl_params.rule_type) - for src in srcs_with_flags: - # We want headers to appear as though they are source files. - extension_for_header = get_extension_for_header(src.file.extension) or extension_for_plain_headers - ext = CxxExtension(extension_for_header if src.is_header else src.file.extension) - - # Deduplicate shared arguments to save memory. If we compile multiple files - # of the same extension they will have some of the same flags. Save on - # allocations by caching and reusing these objects. - if not ext in cxx_compile_cmd_by_ext: - toolchain = get_cxx_toolchain_info(ctx) - compiler_info = _get_compiler_info(toolchain, ext) - base_compile_cmd = _get_compile_base(compiler_info) - category = _get_category(ext) - - headers_dep_files = None - dep_file_file_type_hint = _dep_file_type(ext) - if dep_file_file_type_hint != None and toolchain.use_dep_files: - tracking_mode = _get_dep_tracking_mode(toolchain, dep_file_file_type_hint) - mk_dep_files_flags = get_headers_dep_files_flags_factory(tracking_mode) - if mk_dep_files_flags: - headers_dep_files = _HeadersDepFiles( - processor = cmd_args(compiler_info.dep_files_processor), - mk_flags = mk_dep_files_flags, - tag = headers_tag, - dep_tracking_mode = tracking_mode, - ) - - argsfile_by_ext[ext.value] = _mk_argsfile(ctx, compiler_info, pre, ext, headers_tag, False) - abs_argsfile_by_ext[ext.value] = _mk_argsfile(ctx, compiler_info, pre, ext, abs_headers_tag, True) - - cxx_compile_cmd_by_ext[ext] = _CxxCompileCommand( - base_compile_cmd = base_compile_cmd, - argsfile = argsfile_by_ext[ext.value], - headers_dep_files = headers_dep_files, - compiler_type = compiler_info.compiler_type, - category = category, - ) + src_extensions = collect_source_extensions(srcs_with_flags, extension_for_plain_headers) - cxx_compile_cmd = cxx_compile_cmd_by_ext[ext] + # Deduplicate shared arguments to save memory. If we compile multiple files + # of the same extension they will have some of the same flags. Save on + # allocations by caching and reusing these objects. + for ext in src_extensions: + cmd = _generate_base_compile_command(ctx, impl_params, pre, headers_tag, ext) + cxx_compile_cmd_by_ext[ext] = cmd + argsfile_by_ext[ext.value] = cmd.argsfile + xcode_argsfile_by_ext[ext.value] = cmd.xcode_argsfile + for src in srcs_with_flags: src_args = [] src_args.extend(src.flags) + + ext = get_source_extension(src, extension_for_plain_headers) + if src.is_header: - language_mode = get_header_language_mode(extension_for_header) + language_mode = get_header_language_mode(ext) src_args.extend(["-x", language_mode] if language_mode else []) - src_args.extend(["-c", src.file]) + + cxx_compile_cmd = cxx_compile_cmd_by_ext[ext] + + if cxx_compile_cmd.compiler_type != "nasm": + src_args.append("-c") + src_args.append(src.file) src_compile_command = CxxSrcCompileCommand(src = src.file, cxx_compile_cmd = cxx_compile_cmd, args = src_args, index = src.index, is_header = src.is_header) if src.is_header: @@ -346,17 +355,143 @@ def create_compile_cmds( src_compile_cmds.append(src_compile_command) argsfile_by_ext.update(impl_params.additional.argsfiles.relative) - abs_argsfile_by_ext.update(impl_params.additional.argsfiles.absolute) + xcode_argsfile_by_ext.update(impl_params.additional.argsfiles.xcode) return CxxCompileCommandOutput( src_compile_cmds = src_compile_cmds, argsfiles = CompileArgsfiles( relative = argsfile_by_ext, - absolute = abs_argsfile_by_ext, + xcode = xcode_argsfile_by_ext, ), comp_db_compile_cmds = src_compile_cmds + hdr_compile_cmds, ) +def _compile_single_cxx( + ctx: AnalysisContext, + toolchain: CxxToolchainInfo, + default_object_format: CxxObjectFormat, + bitcode_args: cmd_args, + src_compile_cmd: CxxSrcCompileCommand, + pic: bool) -> CxxCompileOutput: + """ + Construct a final compile command for a single CXX source based on + `src_compile_command` and other compilation options. + """ + + identifier = src_compile_cmd.src.short_path + if src_compile_cmd.index != None: + # Add a unique postfix if we have duplicate source files with different flags + identifier = identifier + "_" + str(src_compile_cmd.index) + + filename_base = identifier + (".pic" if pic else "") + object = ctx.actions.declare_output( + "__objects__", + "{}.{}".format(filename_base, toolchain.linker_info.object_file_extension), + ) + + cmd = cmd_args(src_compile_cmd.cxx_compile_cmd.base_compile_cmd) + + compiler_type = src_compile_cmd.cxx_compile_cmd.compiler_type + cmd.add(get_output_flags(compiler_type, object)) + + args = cmd_args() + + if pic: + args.add(get_pic_flags(compiler_type)) + + args.add(src_compile_cmd.cxx_compile_cmd.argsfile.cmd_form) + args.add(src_compile_cmd.args) + + cmd.add(args) + cmd.add(bitcode_args) + + action_dep_files = {} + + headers_dep_files = src_compile_cmd.cxx_compile_cmd.headers_dep_files + if headers_dep_files: + dep_file = ctx.actions.declare_output( + paths.join("__dep_files__", filename_base), + ).as_output() + + processor_flags, compiler_flags = headers_dep_files.mk_flags(ctx.actions, filename_base, src_compile_cmd.src) + cmd.add(compiler_flags) + + # API: First argument is the dep file source path, second is the + # dep file destination path, other arguments are the actual compile + # command. + cmd = cmd_args([ + headers_dep_files.processor, + headers_dep_files.dep_tracking_mode.value, + processor_flags, + headers_dep_files.tag.tag_artifacts(dep_file), + cmd, + ]) + + action_dep_files["headers"] = headers_dep_files.tag + + if pic: + identifier += " (pic)" + + clang_remarks = None + if toolchain.clang_remarks and compiler_type == "clang": + args.add(["-fsave-optimization-record", "-fdiagnostics-show-hotness", "-foptimization-record-passes=" + toolchain.clang_remarks]) + clang_remarks = ctx.actions.declare_output( + paths.join("__objects__", "{}.opt.yaml".format(filename_base)), + ) + cmd.add(cmd_args(hidden = clang_remarks.as_output())) + + clang_trace = None + if toolchain.clang_trace and compiler_type == "clang": + args.add(["-ftime-trace"]) + clang_trace = ctx.actions.declare_output( + paths.join("__objects__", "{}.json".format(filename_base)), + ) + cmd.add(cmd_args(hidden = clang_trace.as_output())) + + gcno_file = None + if toolchain.gcno_files and src_compile_cmd.src.extension not in (".S", ".sx"): + args.add(["--coverage"]) + gcno_file = ctx.actions.declare_output( + paths.join("__objects__", "{}.gcno".format(filename_base)), + ) + cmd.hidden(gcno_file.as_output()) + ctx.actions.run( + cmd, + category = src_compile_cmd.cxx_compile_cmd.category, + identifier = identifier, + dep_files = action_dep_files, + allow_cache_upload = src_compile_cmd.cxx_compile_cmd.allow_cache_upload, + allow_dep_file_cache_upload = False, + ) + + # If we're building with split debugging, where the debug info is in the + # original object, then add the object as external debug info + # FIXME: ThinLTO generates debug info in a separate dwo dir, but we still + # need to track object files if the object file is not compiled to bitcode. + # We should track whether ThinLTO is used on a per-object basis rather than + # globally on a toolchain level. + object_has_external_debug_info = ( + toolchain.split_debug_mode == SplitDebugMode("single") + ) + + # .S extension is native assembly code (machine level, processor specific) + # and clang will happily compile them to .o files, but the object are always + # native even if we ask for bitcode. If we don't mark the output format, + # other tools would try and parse the .o file as LLVM-IR and fail. + if src_compile_cmd.src.extension in [".S", ".s"]: + object_format = CxxObjectFormat("native") + else: + object_format = default_object_format + + return CxxCompileOutput( + object = object, + object_format = object_format, + object_has_external_debug_info = object_has_external_debug_info, + clang_remarks = clang_remarks, + clang_trace = clang_trace, + gcno_file = gcno_file, + ) + def compile_cxx( ctx: AnalysisContext, src_compile_cmds: list[CxxSrcCompileCommand], @@ -386,112 +521,32 @@ def compile_cxx( objects = [] for src_compile_cmd in src_compile_cmds: - identifier = src_compile_cmd.src.short_path - if src_compile_cmd.index != None: - # Add a unique postfix if we have duplicate source files with different flags - identifier = identifier + "_" + str(src_compile_cmd.index) - - filename_base = identifier + (".pic" if pic else "") - object = ctx.actions.declare_output( - "__objects__", - "{}.{}".format(filename_base, linker_info.object_file_extension), + cxx_compile_output = _compile_single_cxx( + ctx, + toolchain, + default_object_format, + bitcode_args, + src_compile_cmd, + pic, ) - - cmd = cmd_args(src_compile_cmd.cxx_compile_cmd.base_compile_cmd) - - compiler_type = src_compile_cmd.cxx_compile_cmd.compiler_type - cmd.add(get_output_flags(compiler_type, object)) - - args = cmd_args() - - if pic: - args.add(get_pic_flags(compiler_type)) - - args.add(src_compile_cmd.cxx_compile_cmd.argsfile.cmd_form) - args.add(src_compile_cmd.args) - - cmd.add(args) - cmd.add(bitcode_args) - - action_dep_files = {} - - headers_dep_files = src_compile_cmd.cxx_compile_cmd.headers_dep_files - if headers_dep_files: - dep_file = ctx.actions.declare_output( - paths.join("__dep_files__", filename_base), - ).as_output() - - processor_flags, compiler_flags = headers_dep_files.mk_flags(ctx.actions, filename_base, src_compile_cmd.src) - cmd.add(compiler_flags) - - # API: First argument is the dep file source path, second is the - # dep file destination path, other arguments are the actual compile - # command. - cmd = cmd_args([ - headers_dep_files.processor, - headers_dep_files.dep_tracking_mode.value, - processor_flags, - headers_dep_files.tag.tag_artifacts(dep_file), - cmd, - ]) - - action_dep_files["headers"] = headers_dep_files.tag - - if pic: - identifier += " (pic)" - - clang_remarks = None - if toolchain.clang_remarks and compiler_type == "clang": - args.add(["-fsave-optimization-record", "-fdiagnostics-show-hotness", "-foptimization-record-passes=" + toolchain.clang_remarks]) - clang_remarks = ctx.actions.declare_output( - paths.join("__objects__", "{}.opt.yaml".format(filename_base)), - ) - cmd.hidden(clang_remarks.as_output()) - - clang_trace = None - if toolchain.clang_trace and compiler_type == "clang": - args.add(["-ftime-trace"]) - clang_trace = ctx.actions.declare_output( - paths.join("__objects__", "{}.json".format(filename_base)), - ) - cmd.hidden(clang_trace.as_output()) - - ctx.actions.run( - cmd, - category = src_compile_cmd.cxx_compile_cmd.category, - identifier = identifier, - dep_files = action_dep_files, - ) - - # If we're building with split debugging, where the debug info is in the - # original object, then add the object as external debug info - # FIXME: ThinLTO generates debug info in a separate dwo dir, but we still - # need to track object files if the object file is not compiled to bitcode. - # We should track whether ThinLTO is used on a per-object basis rather than - # globally on a toolchain level. - object_has_external_debug_info = ( - toolchain.split_debug_mode == SplitDebugMode("single") - ) - - # .S extension is native assembly code (machine level, processor specific) - # and clang will happily compile them to .o files, but the object are always - # native even if we ask for bitcode. If we don't mark the output format, - # other tools would try and parse the .o file as LLVM-IR and fail. - if src_compile_cmd.src.extension in [".S", ".s"]: - object_format = CxxObjectFormat("native") - else: - object_format = default_object_format - - objects.append(CxxCompileOutput( - object = object, - object_format = object_format, - object_has_external_debug_info = object_has_external_debug_info, - clang_remarks = clang_remarks, - clang_trace = clang_trace, - )) + objects.append(cxx_compile_output) return objects +def cxx_objects_sub_targets(outs: list[CxxCompileOutput]) -> dict[str, list[Provider]]: + objects_sub_targets = {} + for obj in outs: + sub_targets = {} + if obj.clang_trace: + sub_targets["clang-trace"] = [DefaultInfo(obj.clang_trace)] + if obj.clang_remarks: + sub_targets["clang-remarks"] = [DefaultInfo(obj.clang_remarks)] + objects_sub_targets[obj.object.short_path] = [DefaultInfo( + obj.object, + sub_targets = sub_targets, + )] + return objects_sub_targets + def _validate_target_headers(ctx: AnalysisContext, preprocessor: list[CPreprocessor]): path_to_artifact = {} all_headers = flatten([x.headers for x in preprocessor]) @@ -506,11 +561,11 @@ def _validate_target_headers(ctx: AnalysisContext, preprocessor: list[CPreproces def _get_compiler_info(toolchain: CxxToolchainInfo, ext: CxxExtension) -> typing.Any: compiler_info = None - if ext.value in (".cpp", ".cc", ".mm", ".cxx", ".c++", ".h", ".hpp", ".hh", ".h++", ".hxx"): + if ext.value in (".cpp", ".cc", ".mm", ".cxx", ".c++", ".h", ".hpp", ".hh", ".h++", ".hxx", ".bc"): compiler_info = toolchain.cxx_compiler_info elif ext.value in (".c", ".m"): compiler_info = toolchain.c_compiler_info - elif ext.value in (".s", ".S"): + elif ext.value in (".s", ".sx", ".S"): compiler_info = toolchain.as_compiler_info elif ext.value == ".cu": compiler_info = toolchain.cuda_compiler_info @@ -536,12 +591,14 @@ def _get_category(ext: CxxExtension) -> str: return "objc_compile" if ext.value == ".mm": return "objcxx_compile" - elif ext.value in (".s", ".S", ".asm", ".asmpp"): + elif ext.value in (".s", ".sx", ".S", ".asm", ".asmpp"): return "asm_compile" elif ext.value == ".cu": return "cuda_compile" elif ext.value == ".hip": return "hip_compile" + elif ext.value == ".bc": + return "bitcode_compile" else: # This should be unreachable as long as we handle all enum values fail("Unknown extension: " + ext.value) @@ -557,111 +614,126 @@ def _get_compile_base(compiler_info: typing.Any) -> cmd_args: def _dep_file_type(ext: CxxExtension) -> [DepFileType, None]: # Raw assembly doesn't make sense to capture dep files for. + # .S is preprocessed assembly, but some builds use it with + # assemblers that don't support -MF, so leave depfiles off. if ext.value in (".s", ".S", ".asm"): return None elif ext.value == ".hip": # TODO (T118797886): HipCompilerInfo doesn't have dep files processor. # Should it? return None + elif ext.value == ".bc": + # Bitcode doesn't have depfiles + return None - # Return the file type aswell + # Return the file type as well if ext.value in (".cpp", ".cc", ".mm", ".cxx", ".c++", ".h", ".hpp", ".hh", ".h++", ".hxx"): return DepFileType("cpp") elif ext.value in (".c", ".m"): return DepFileType("c") elif ext.value == ".cu": return DepFileType("cuda") - elif ext.value in (".asmpp"): + elif ext.value in (".asmpp", ".sx"): return DepFileType("asm") else: # This should be unreachable as long as we handle all enum values fail("Unknown C++ extension: " + ext.value) -def _add_compiler_info_flags(ctx: AnalysisContext, compiler_info: typing.Any, ext: CxxExtension, cmd: cmd_args): - cmd.add(compiler_info.preprocessor_flags or []) - cmd.add(compiler_info.compiler_flags or []) - cmd.add(get_flags_for_reproducible_build(ctx, compiler_info.compiler_type)) +def _add_compiler_info_flags(ctx: AnalysisContext, compiler_info: typing.Any, ext: CxxExtension) -> list: + cmd = [] + cmd.append(compiler_info.preprocessor_flags or []) + cmd.append(compiler_info.compiler_flags or []) + cmd.append(get_flags_for_reproducible_build(ctx, compiler_info.compiler_type)) if ext.value not in (".asm", ".asmpp"): # Clang's asm compiler doesn't support colorful output, so we skip this there. - cmd.add(get_flags_for_colorful_output(compiler_info.compiler_type)) + cmd.append(get_flags_for_colorful_output(compiler_info.compiler_type)) + + return cmd def _mk_argsfile( ctx: AnalysisContext, + impl_params: CxxRuleConstructorParams, compiler_info: typing.Any, preprocessor: CPreprocessorInfo, ext: CxxExtension, headers_tag: ArtifactTag, - use_absolute_paths: bool) -> CompileArgsfile: + is_xcode_argsfile: bool) -> CompileArgsfile: """ Generate and return an {ext}.argsfile artifact and command args that utilize the argsfile. """ - args = cmd_args() + args_list = [] - _add_compiler_info_flags(ctx, compiler_info, ext, args) + args_list.append(_add_compiler_info_flags(ctx, compiler_info, ext)) - if use_absolute_paths: - args.add(preprocessor.set.project_as_args("abs_args")) - else: - args.add(headers_tag.tag_artifacts(preprocessor.set.project_as_args("args"))) + args_list.append(headers_tag.tag_artifacts(preprocessor.set.project_as_args("args"))) # Different preprocessors will contain whether to use modules, # and the modulemap to use, so we need to get the final outcome. if preprocessor.set.reduce("uses_modules"): - args.add(headers_tag.tag_artifacts(preprocessor.set.project_as_args("modular_args"))) + args_list.append(headers_tag.tag_artifacts(preprocessor.set.project_as_args("modular_args"))) - args.add(cxx_attr_preprocessor_flags(ctx, ext.value)) - args.add(get_flags_for_compiler_type(compiler_info.compiler_type)) - args.add(_attr_compiler_flags(ctx, ext.value)) - args.add(headers_tag.tag_artifacts(preprocessor.set.project_as_args("include_dirs"))) + args_list.append(_preprocessor_flags(ctx, impl_params, ext.value)) + args_list.append(get_flags_for_compiler_type(compiler_info.compiler_type)) + args_list.append(_compiler_flags(ctx, impl_params, ext.value)) + args_list.append(headers_tag.tag_artifacts(preprocessor.set.project_as_args("include_dirs"))) # Workaround as that's not precompiled, but working just as prefix header. # Another thing is that it's clang specific, should be generalized. - if ctx.attrs.precompiled_header != None: - args.add(["-include", headers_tag.tag_artifacts(ctx.attrs.precompiled_header[CPrecompiledHeaderInfo].header)]) - if ctx.attrs.prefix_header != None: - args.add(["-include", headers_tag.tag_artifacts(ctx.attrs.prefix_header)]) + if hasattr(ctx.attrs, "precompiled_header") and ctx.attrs.precompiled_header != None: + args_list.append(["-include", headers_tag.tag_artifacts(ctx.attrs.precompiled_header[CPrecompiledHeaderInfo].header)]) + if hasattr(ctx.attrs, "prefix_header") and ctx.attrs.prefix_header != None: + args_list.append(["-include", headers_tag.tag_artifacts(ctx.attrs.prefix_header)]) # Create a copy of the args so that we can continue to modify it later. - args_without_file_prefix_args = cmd_args(args) + args_without_file_prefix_args = cmd_args(args_list) # Put file_prefix_args in argsfile directly, make sure they do not appear when evaluating $(cxxppflags) # to avoid "argument too long" errors - if use_absolute_paths: - args.add(cmd_args(preprocessor.set.project_as_args("abs_file_prefix_args"))) - - # HACK: Replace Xcode clang incompatible flags with compatible ones. - # TODO: Refactor this to be a true Xcode argsfile generating flow. - for re, sub in _ABSOLUTE_ARGSFILE_SUBSTITUTIONS: - args.replace_regex(re, sub) + args_list.append(headers_tag.tag_artifacts(cmd_args(preprocessor.set.project_as_args("file_prefix_args")))) + + if is_xcode_argsfile: + replace_regex = [] + for re, sub in _XCODE_ARG_SUBSTITUTION: + replace_regex.append((re, sub)) + args = cmd_args(args_list, replace_regex = replace_regex) + file_args = args else: - args.add(headers_tag.tag_artifacts(cmd_args(preprocessor.set.project_as_args("file_prefix_args")))) + args = cmd_args(args_list) + file_args = cmd_args(args) if compiler_info.compiler_type == "nasm" else cmd_args(args, quote = "shell") - shell_quoted_args = cmd_args(args, quote = "shell") - - file_name = ext.value + ("-abs.argsfile" if use_absolute_paths else ".argsfile") - argsfile, _ = ctx.actions.write(file_name, shell_quoted_args, allow_args = True, absolute = use_absolute_paths) + file_name = ext.value + ("-xcode.argsfile" if is_xcode_argsfile else ".argsfile") + argsfile, _ = ctx.actions.write(file_name, file_args, allow_args = True) input_args = [args] - cmd_form = cmd_args(argsfile, format = "@{}").hidden(input_args) + format = "-@{}" if compiler_info.compiler_type == "nasm" else "@{}" + cmd_form = cmd_args(argsfile, format = format, hidden = input_args) return CompileArgsfile( file = argsfile, cmd_form = cmd_form, input_args = input_args, - args = shell_quoted_args, + args = file_args, args_without_file_prefix_args = args_without_file_prefix_args, ) -def _attr_compiler_flags(ctx: AnalysisContext, ext: str) -> list[typing.Any]: +def _compiler_flags(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, ext: str) -> list[typing.Any]: return ( - cxx_by_language_ext(ctx.attrs.lang_compiler_flags, ext) + - flatten(cxx_by_platform(ctx, ctx.attrs.platform_compiler_flags)) + - flatten(cxx_by_platform(ctx, cxx_by_language_ext(ctx.attrs.lang_platform_compiler_flags, ext))) + + cxx_by_language_ext(impl_params.lang_compiler_flags, ext) + + flatten(cxx_by_platform(ctx, impl_params.platform_compiler_flags)) + + flatten(cxx_by_platform(ctx, cxx_by_language_ext(impl_params.lang_platform_compiler_flags, ext))) + # ctx.attrs.compiler_flags need to come last to preserve buck1 ordering, this prevents compiler # flags ordering-dependent build errors - ctx.attrs.compiler_flags + impl_params.compiler_flags + ) + +def _preprocessor_flags(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, ext: str) -> list[typing.Any]: + return ( + impl_params.preprocessor_flags + + cxx_by_language_ext(impl_params.lang_preprocessor_flags, ext) + + flatten(cxx_by_platform(ctx, impl_params.platform_preprocessor_flags)) + + flatten(cxx_by_platform(ctx, cxx_by_language_ext(impl_params.lang_platform_preprocessor_flags, ext))) ) def _get_dep_tracking_mode(toolchain: Provider, file_type: DepFileType) -> DepTrackingMode: @@ -671,3 +743,45 @@ def _get_dep_tracking_mode(toolchain: Provider, file_type: DepFileType) -> DepTr return toolchain.cuda_dep_tracking_mode else: return DepTrackingMode("makefile") + +def _generate_base_compile_command( + ctx: AnalysisContext, + impl_params: CxxRuleConstructorParams, + pre: CPreprocessorInfo, + headers_tag: ArtifactTag, + ext: CxxExtension) -> _CxxCompileCommand: + """ + Generate a common part of a compile command that is shared by all sources + with a given extension. + """ + toolchain = get_cxx_toolchain_info(ctx) + compiler_info = _get_compiler_info(toolchain, ext) + base_compile_cmd = _get_compile_base(compiler_info) + category = _get_category(ext) + + headers_dep_files = None + dep_file_file_type_hint = _dep_file_type(ext) + if dep_file_file_type_hint != None and toolchain.use_dep_files: + tracking_mode = _get_dep_tracking_mode(toolchain, dep_file_file_type_hint) + mk_dep_files_flags = get_headers_dep_files_flags_factory(tracking_mode) + if mk_dep_files_flags: + headers_dep_files = _HeadersDepFiles( + processor = cmd_args(compiler_info.dep_files_processor), + mk_flags = mk_dep_files_flags, + tag = headers_tag, + dep_tracking_mode = tracking_mode, + ) + + argsfile = _mk_argsfile(ctx, impl_params, compiler_info, pre, ext, headers_tag, False) + xcode_argsfile = _mk_argsfile(ctx, impl_params, compiler_info, pre, ext, headers_tag, True) + + allow_cache_upload = cxx_attrs_get_allow_cache_upload(ctx.attrs, default = compiler_info.allow_cache_upload) + return _CxxCompileCommand( + base_compile_cmd = base_compile_cmd, + argsfile = argsfile, + xcode_argsfile = xcode_argsfile, + headers_dep_files = headers_dep_files, + compiler_type = compiler_info.compiler_type, + category = category, + allow_cache_upload = allow_cache_upload, + ) diff --git a/prelude/cxx/compiler.bzl b/prelude/cxx/compiler.bzl index 37967f4e2d..cad51cf3a1 100644 --- a/prelude/cxx/compiler.bzl +++ b/prelude/cxx/compiler.bzl @@ -58,7 +58,9 @@ def tree_style_cc_dep_files( _actions: AnalysisActions, _filename_base: str, input_file: Artifact) -> (cmd_args, cmd_args): - return (cmd_args(input_file), cmd_args(["-H"])) + # If we use color diagnostics, then error messages come through in color, which messes up parsing of the + # -H output in `show_headers_to_dep_file.py`. So make sure to pass -fno-color-diagnostics. + return (cmd_args(input_file), cmd_args(["-H", "-fno-color-diagnostics"])) def windows_cc_dep_files( _actions: AnalysisActions, diff --git a/prelude/cxx/cxx.bzl b/prelude/cxx/cxx.bzl index 1bd3780341..22763f6337 100644 --- a/prelude/cxx/cxx.bzl +++ b/prelude/cxx/cxx.bzl @@ -11,11 +11,12 @@ load( "merge_android_packageable_info", ) load("@prelude//apple:resource_groups.bzl", "create_resource_graph") +load("@prelude//cxx:cxx_sources.bzl", "get_srcs_with_flags") +load("@prelude//cxx:cxx_utility.bzl", "cxx_attrs_get_allow_cache_upload") load( - "@prelude//apple:xcode.bzl", - "get_project_root_file", + "@prelude//cxx:link_groups_types.bzl", + "LinkGroupInfo", # @unused Used as a type ) -load("@prelude//cxx:cxx_sources.bzl", "get_srcs_with_flags") load("@prelude//linking:execution_preference.bzl", "LinkExecutionPreference") load( "@prelude//linking:link_groups.bzl", @@ -25,13 +26,13 @@ load( "@prelude//linking:link_info.bzl", "Archive", "ArchiveLinkable", + "CxxSanitizerRuntimeInfo", "LibOutputStyle", "LinkArgs", "LinkCommandDebugOutputInfo", "LinkInfo", "LinkInfos", "LinkStrategy", - "Linkage", "LinkedObject", "SharedLibLinkable", "create_merged_link_info", @@ -54,12 +55,21 @@ load( "@prelude//linking:linkables.bzl", "linkables", ) -load("@prelude//linking:shared_libraries.bzl", "SharedLibraryInfo", "create_shared_libraries", "merge_shared_libraries") +load( + "@prelude//linking:shared_libraries.bzl", + "SharedLibraries", + "SharedLibraryInfo", + "create_shlib_from_ctx", + "extract_soname_from_shlib", + "merge_shared_libraries", + "to_soname", +) load("@prelude//linking:strip.bzl", "strip_debug_info") +load("@prelude//linking:types.bzl", "Linkage") load("@prelude//os_lookup:defs.bzl", "OsLookup") load( "@prelude//tests:re_utils.bzl", - "get_re_executor_from_props", + "get_re_executors_from_props", ) load("@prelude//utils:expect.bzl", "expect") load( @@ -92,8 +102,8 @@ load( "CxxRuleSubTargetParams", ) load( - ":groups.bzl", - "Group", # @unused Used as a type + ":groups_types.bzl", + "Group", "MATCH_ALL_LABEL", "NO_MATCH_LABEL", ) @@ -108,7 +118,6 @@ load( ) load( ":link_groups.bzl", - "LinkGroupInfo", # @unused Used as a type "LinkGroupLibSpec", "get_link_group_info", ) @@ -186,6 +195,14 @@ def cxx_library_impl(ctx: AnalysisContext) -> list[Provider]: output_style_sub_targets_and_providers_factory = _get_shared_link_style_sub_targets_and_providers, generate_sub_targets = sub_target_params, generate_providers = provider_params, + compiler_flags = ctx.attrs.compiler_flags, + lang_compiler_flags = ctx.attrs.lang_compiler_flags, + platform_compiler_flags = ctx.attrs.platform_compiler_flags, + lang_platform_compiler_flags = ctx.attrs.lang_platform_compiler_flags, + preprocessor_flags = ctx.attrs.preprocessor_flags, + lang_preprocessor_flags = ctx.attrs.lang_preprocessor_flags, + platform_preprocessor_flags = ctx.attrs.platform_preprocessor_flags, + lang_platform_preprocessor_flags = ctx.attrs.lang_platform_preprocessor_flags, ) output = cxx_library_parameterized(ctx, params) return output.providers @@ -235,14 +252,24 @@ def cxx_binary_impl(ctx: AnalysisContext) -> list[Provider]: link_group_info = link_group_info, auto_link_group_specs = get_auto_link_group_specs(ctx, link_group_info), prefer_stripped_objects = ctx.attrs.prefer_stripped_objects, - exe_allow_cache_upload = ctx.attrs.allow_cache_upload, + exe_allow_cache_upload = cxx_attrs_get_allow_cache_upload(ctx.attrs), extra_link_roots = linkables(ctx.attrs.link_group_deps), + compiler_flags = ctx.attrs.compiler_flags, + lang_compiler_flags = ctx.attrs.lang_compiler_flags, + platform_compiler_flags = ctx.attrs.platform_compiler_flags, + lang_platform_compiler_flags = ctx.attrs.lang_platform_compiler_flags, + preprocessor_flags = ctx.attrs.preprocessor_flags, + lang_preprocessor_flags = ctx.attrs.lang_preprocessor_flags, + platform_preprocessor_flags = ctx.attrs.platform_preprocessor_flags, + lang_platform_preprocessor_flags = ctx.attrs.lang_platform_preprocessor_flags, ) output = cxx_executable(ctx, params) extra_providers = [] if output.link_command_debug_output: extra_providers.append(LinkCommandDebugOutputInfo(debug_outputs = [output.link_command_debug_output])) + if output.sanitizer_runtime_files: + extra_providers.append(CxxSanitizerRuntimeInfo(runtime_files = output.sanitizer_runtime_files)) # When an executable is the output of a build, also materialize all the # unpacked external debuginfo that goes with it. This makes `buck2 build @@ -268,7 +295,7 @@ def cxx_binary_impl(ctx: AnalysisContext) -> list[Provider]: other_outputs = other_outputs, sub_targets = output.sub_targets, ), - RunInfo(args = cmd_args(output.binary).hidden(output.runtime_files)), + RunInfo(args = cmd_args(output.binary, hidden = output.runtime_files)), output.compilation_db, output.xcode_data, output.dist_info, @@ -368,18 +395,23 @@ def prebuilt_cxx_library_impl(ctx: AnalysisContext) -> list[Provider]: if ctx.attrs.soname != None: soname = get_shared_library_name_for_param(linker_info, ctx.attrs.soname) + elif shared_lib != None and ctx.attrs.extract_soname: + soname = extract_soname_from_shlib( + actions = ctx.actions, + name = "__soname__.txt", + shared_lib = shared_lib, + ) else: soname = get_shared_library_name(linker_info, ctx.label.name, apply_default_prefix = True) + soname = to_soname(soname) # Use ctx.attrs.deps instead of cxx_attr_deps, since prebuilt rules don't have platform_deps. first_order_deps = ctx.attrs.deps exported_first_order_deps = cxx_attr_exported_deps(ctx) - project_root_file = get_project_root_file(ctx) - # Exported preprocessor info. inherited_pp_infos = cxx_inherited_preprocessor_infos(exported_first_order_deps) - generic_exported_pre = cxx_exported_preprocessor_info(ctx, cxx_get_regular_cxx_headers_layout(ctx), project_root_file, []) + generic_exported_pre = cxx_exported_preprocessor_info(ctx, cxx_get_regular_cxx_headers_layout(ctx), []) args = [] compiler_type = get_cxx_toolchain_info(ctx).cxx_compiler_info.compiler_type if header_dirs != None: @@ -387,7 +419,7 @@ def prebuilt_cxx_library_impl(ctx: AnalysisContext) -> list[Provider]: args.append(format_system_include_arg(cmd_args(x), compiler_type)) exported_items = [generic_exported_pre] if args: - exported_items.append(CPreprocessor(relative_args = CPreprocessorArgs(args = args))) + exported_items.append(CPreprocessor(args = CPreprocessorArgs(args = args))) providers.append(cxx_merge_cpreprocessors( ctx, exported_items, @@ -402,7 +434,7 @@ def prebuilt_cxx_library_impl(ctx: AnalysisContext) -> list[Provider]: # Gather link infos, outputs, and shared libs for effective link style. outputs = {} libraries = {} - solibs = {} + solibs = [] sub_targets = {} for output_style in get_output_styles_for_linkage(preferred_linkage): out = None @@ -445,8 +477,8 @@ def prebuilt_cxx_library_impl(ctx: AnalysisContext) -> list[Provider]: shlink_args.extend(get_link_whole_args(linker_type, [lib])) link_result = cxx_link_shared_library( ctx = ctx, - output = soname, - name = soname, + output = soname.ensure_str(), + name = soname.ensure_str(), opts = link_options( links = [ LinkArgs(flags = shlink_args), @@ -487,12 +519,18 @@ def prebuilt_cxx_library_impl(ctx: AnalysisContext) -> list[Provider]: # Provided means something external to the build will provide # the libraries, so we don't need to propagate anything. if not ctx.attrs.provided: - solibs[soname] = shared_lib + solibs.append( + create_shlib_from_ctx( + ctx = ctx, + lib = shared_lib, + soname = soname, + ), + ) # Provide a sub-target that always provides the shared lib # using the soname. - if soname and shared_lib.output.basename != paths.basename(soname): - soname_lib = ctx.actions.copy_file(soname, shared_lib.output) + if soname and soname.is_str() and shared_lib.output.basename != paths.basename(soname.ensure_str()): + soname_lib = ctx.actions.copy_file(soname.ensure_str(), shared_lib.output) else: soname_lib = shared_lib.output sub_targets["soname-lib"] = [DefaultInfo(default_output = soname_lib)] @@ -535,11 +573,6 @@ def prebuilt_cxx_library_impl(ctx: AnalysisContext) -> list[Provider]: sub_targets = sub_targets, )) - # TODO(cjhopman): This is preserving existing behavior, but it doesn't make sense. These lists can be passed - # unmerged to create_merged_link_info below. Potentially that could change link order, so needs to be done more carefully. - merged_inherited_link = create_merged_link_info_for_propagation(ctx, inherited_link) - merged_inherited_exported_link = create_merged_link_info_for_propagation(ctx, inherited_exported_link) - # Propagate link info provider. providers.append(create_merged_link_info( ctx, @@ -548,25 +581,27 @@ def prebuilt_cxx_library_impl(ctx: AnalysisContext) -> list[Provider]: libraries, preferred_linkage = preferred_linkage, # Export link info from non-exported deps (when necessary). - deps = [merged_inherited_link], + deps = inherited_link, # Export link info from out (exported) deps. - exported_deps = [merged_inherited_exported_link], + exported_deps = inherited_exported_link, )) # Propagate shared libraries up the tree. + shared_libs = SharedLibraries(libraries = solibs) providers.append(merge_shared_libraries( ctx.actions, - create_shared_libraries(ctx, solibs), + shared_libs, filter(None, [x.get(SharedLibraryInfo) for x in exported_first_order_deps]), )) # Omnibus root provider. - if LibOutputStyle("pic_archive") in libraries and (static_pic_lib or static_lib) and not ctx.attrs.header_only: + if LibOutputStyle("pic_archive") in libraries and (static_pic_lib or static_lib) and not ctx.attrs.header_only and soname.is_str(): # TODO(cjhopman): This doesn't support thin archives linkable_root = create_linkable_root( - name = soname, + label = ctx.label, + name = soname.ensure_str(), link_infos = LinkInfos(default = LinkInfo( - name = soname, + name = soname.ensure_str(), pre_flags = ( linker_flags.exported_flags + linker_flags.flags @@ -594,13 +629,14 @@ def prebuilt_cxx_library_impl(ctx: AnalysisContext) -> list[Provider]: ctx, linkable_node = create_linkable_node( ctx = ctx, - default_soname = soname, + default_soname = soname.as_str(), preferred_linkage = preferred_linkage, + default_link_strategy = to_link_strategy(cxx_toolchain.linker_info.link_style), exported_deps = exported_first_order_deps, # If we don't have link input for this link style, we pass in `None` so # that omnibus knows to avoid it. link_infos = libraries, - shared_libs = solibs, + shared_libs = shared_libs, linker_flags = linker_flags, can_be_asset = getattr(ctx.attrs, "can_be_asset", False) or False, ), @@ -652,13 +688,21 @@ def cxx_test_impl(ctx: AnalysisContext) -> list[Provider]: auto_link_group_specs = get_auto_link_group_specs(ctx, link_group_info), prefer_stripped_objects = ctx.attrs.prefer_stripped_objects, extra_link_roots = linkables(ctx.attrs.link_group_deps), + compiler_flags = ctx.attrs.compiler_flags, + lang_compiler_flags = ctx.attrs.lang_compiler_flags, + platform_compiler_flags = ctx.attrs.platform_compiler_flags, + lang_platform_compiler_flags = ctx.attrs.lang_platform_compiler_flags, + preprocessor_flags = ctx.attrs.preprocessor_flags, + lang_preprocessor_flags = ctx.attrs.lang_preprocessor_flags, + platform_preprocessor_flags = ctx.attrs.platform_preprocessor_flags, + lang_platform_preprocessor_flags = ctx.attrs.lang_platform_preprocessor_flags, ) output = cxx_executable(ctx, params, is_cxx_test = True) - command = [cmd_args(output.binary).hidden(output.runtime_files)] + ctx.attrs.args + command = [cmd_args(output.binary, hidden = output.runtime_files)] + ctx.attrs.args - # Setup a RE executor based on the `remote_execution` param. - re_executor = get_re_executor_from_props(ctx) + # Setup RE executors based on the `remote_execution` param. + re_executor, executor_overrides = get_re_executors_from_props(ctx) return inject_test_run_info( ctx, @@ -669,6 +713,7 @@ def cxx_test_impl(ctx: AnalysisContext) -> list[Provider]: labels = ctx.attrs.labels, contacts = ctx.attrs.contacts, default_executor = re_executor, + executor_overrides = executor_overrides, # We implicitly make this test via the project root, instead of # the cell root (e.g. fbcode root). run_from_project_root = ( @@ -678,9 +723,14 @@ def cxx_test_impl(ctx: AnalysisContext) -> list[Provider]: use_project_relative_paths = re_executor != None, ), ) + [ - DefaultInfo(default_output = output.binary, other_outputs = output.runtime_files, sub_targets = output.sub_targets), + DefaultInfo( + default_output = output.binary, + other_outputs = output.runtime_files + output.external_debug_info_artifacts, + sub_targets = output.sub_targets, + ), output.compilation_db, output.xcode_data, + output.dist_info, ] def _get_params_for_android_binary_cxx_library() -> (CxxRuleSubTargetParams, CxxRuleProviderParams): diff --git a/prelude/cxx/cxx_bolt.bzl b/prelude/cxx/cxx_bolt.bzl index 271ab5e804..8b5d2bb82a 100644 --- a/prelude/cxx/cxx_bolt.bzl +++ b/prelude/cxx/cxx_bolt.bzl @@ -8,30 +8,37 @@ # BOLT (Binary Optimization Layout Tool) is a post link profile guided optimizer used for # performance-critical services in fbcode: https://www.internalfb.com/intern/wiki/HHVM-BOLT/ +load( + "@prelude//:artifact_tset.bzl", + "ArtifactTSet", + "project_artifacts", +) load(":cxx_context.bzl", "get_cxx_toolchain_info") def cxx_use_bolt(ctx: AnalysisContext) -> bool: cxx_toolchain_info = get_cxx_toolchain_info(ctx) return cxx_toolchain_info.bolt_enabled and ctx.attrs.bolt_profile != None -def bolt(ctx: AnalysisContext, prebolt_output: Artifact, identifier: [str, None]) -> Artifact: +def bolt(ctx: AnalysisContext, prebolt_output: Artifact, external_debug_info: ArtifactTSet, identifier: [str, None]) -> Artifact: output_name = prebolt_output.short_path.removesuffix("-wrapper") postbolt_output = ctx.actions.declare_output(output_name) bolt_msdk = get_cxx_toolchain_info(ctx).binary_utilities_info.bolt_msdk if not bolt_msdk or not cxx_use_bolt(ctx): fail("Cannot use bolt if bolt_msdk is not available or bolt profile is not available") - args = cmd_args() + + materialized_external_debug_info = project_artifacts(ctx.actions, [external_debug_info]) # bolt command format: # {llvm_bolt} {input_bin} -o $OUT -data={fdata} {args} - args.add( + args = cmd_args( cmd_args(bolt_msdk, format = "{}/bin/llvm-bolt"), prebolt_output, "-o", postbolt_output.as_output(), cmd_args(ctx.attrs.bolt_profile, format = "-data={}"), ctx.attrs.bolt_flags, + hidden = materialized_external_debug_info, ) ctx.actions.run( @@ -41,4 +48,24 @@ def bolt(ctx: AnalysisContext, prebolt_output: Artifact, identifier: [str, None] local_only = get_cxx_toolchain_info(ctx).linker_info.link_binaries_locally, ) - return postbolt_output + output = postbolt_output + + if hasattr(ctx.attrs, "strip_stapsdt") and ctx.attrs.strip_stapsdt: + stripped_postbolt_output = ctx.actions.declare_output(output_name + "-nostapsdt") + ctx.actions.run( + # We --rename-section instead of --remove-section because objcopy's processing + # in an invalid ELF file + cmd_args([ + get_cxx_toolchain_info(ctx).binary_utilities_info.objcopy, + "--rename-section", + ".stapsdt.base=.deleted_stapsdt_base_section", + postbolt_output, + stripped_postbolt_output.as_output(), + ]), + category = "bolt_strip_stapsdt", + identifier = identifier, + local_only = get_cxx_toolchain_info(ctx).linker_info.link_binaries_locally, + ) + output = stripped_postbolt_output + + return output diff --git a/prelude/cxx/cxx_executable.bzl b/prelude/cxx/cxx_executable.bzl index bf99f13464..bae20782f9 100644 --- a/prelude/cxx/cxx_executable.bzl +++ b/prelude/cxx/cxx_executable.bzl @@ -23,20 +23,22 @@ load( "apple_create_frameworks_linkable", "apple_get_link_info_by_deduping_link_infos", ) -load( - "@prelude//apple:xcode.bzl", - "get_project_root_file", -) load( "@prelude//cxx:cxx_bolt.bzl", "cxx_use_bolt", ) +load( + "@prelude//cxx:link_groups_types.bzl", + "LinkGroupsDebugLinkInfo", + "LinkGroupsDebugLinkableItem", +) load( "@prelude//dist:dist_info.bzl", "DistInfo", ) load( "@prelude//ide_integrations:xcode.bzl", + "XCODE_ARGSFILES_SUB_TARGET", "XCODE_DATA_SUB_TARGET", "XcodeDataInfo", "generate_xcode_data", @@ -69,6 +71,7 @@ load( ) load( "@prelude//linking:shared_libraries.bzl", + "SharedLibrary", # @unused Used as a type "merge_shared_libraries", "traverse_shared_library_info", ) @@ -81,7 +84,6 @@ load( ) load( ":argsfiles.bzl", - "ABS_ARGSFILES_SUBTARGET", "ARGSFILES_SUBTARGET", "get_argsfiles_output", ) @@ -95,6 +97,7 @@ load( ":compile.bzl", "compile_cxx", "create_compile_cmds", + "cxx_objects_sub_targets", ) load(":cxx_context.bzl", "get_cxx_platform_info", "get_cxx_toolchain_info") load( @@ -105,7 +108,6 @@ load( "cxx_attr_linker_flags", "cxx_attr_resources", "cxx_is_gnu", - "cxx_objects_sub_targets", ) load( ":cxx_link_utility.bzl", @@ -115,6 +117,7 @@ load( ":cxx_types.bzl", "CxxRuleConstructorParams", # @unused Used as a type ) +load(":groups.bzl", "get_dedupped_roots_from_groups") load( ":link.bzl", "CxxLinkerMapData", @@ -126,6 +129,7 @@ load( "LINK_GROUP_MAPPINGS_SUB_TARGET", "LINK_GROUP_MAP_DATABASE_SUB_TARGET", "LinkGroupContext", + "create_debug_linkable_entries", "create_link_groups", "find_relevant_roots", "get_filtered_labels_to_links_map", @@ -134,6 +138,7 @@ load( "get_link_group", "get_link_group_map_json", "get_link_group_preferred_linkage", + "get_public_link_group_nodes", "get_transitive_deps_matching_labels", "is_link_group_shlib", ) @@ -160,11 +165,11 @@ load( CxxExecutableOutput = record( binary = Artifact, unstripped_binary = Artifact, - bitcode_bundle = field([Artifact, None], None), - dwp = field([Artifact, None]), + bitcode_bundle = field(Artifact | None, None), + dwp = field(Artifact | None), # Files that must be present for the executable to run successfully. These # are always materialized, whether the executable is the output of a build - # or executed as a host tool. They become .hidden() arguments when executing + # or executed as a host tool. They become hidden= arguments when executing # the executable via RunInfo(). runtime_files = list[ArgLike], sub_targets = dict[str, list[DefaultInfo]], @@ -176,7 +181,7 @@ CxxExecutableOutput = record( # materialized when this executable is the output of a build, not when it is # used by other rules. They become other_outputs on DefaultInfo. external_debug_info_artifacts = list[TransitiveSetArgsProjection], - shared_libs = dict[str, LinkedObject], + shared_libs = list[SharedLibrary], # All link group links that were generated in the executable. auto_link_groups = field(dict[str, LinkedObject], {}), compilation_db = CxxCompilationDbInfo, @@ -184,17 +189,15 @@ CxxExecutableOutput = record( linker_map_data = [CxxLinkerMapData, None], link_command_debug_output = field([LinkCommandDebugOutput, None], None), dist_info = DistInfo, + sanitizer_runtime_files = field(list[Artifact], []), ) def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, is_cxx_test: bool = False) -> CxxExecutableOutput: - project_root_file = get_project_root_file(ctx) - # Gather preprocessor inputs. preprocessor_deps = cxx_attr_deps(ctx) + filter(None, [ctx.attrs.precompiled_header]) (own_preprocessor_info, test_preprocessor_infos) = cxx_private_preprocessor_info( ctx, impl_params.headers_layout, - project_root_file = project_root_file, raw_headers = ctx.attrs.raw_headers, extra_preprocessors = impl_params.extra_preprocessors, non_exported_deps = preprocessor_deps, @@ -216,8 +219,8 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, ) cxx_outs = compile_cxx(ctx, compile_cmd_output.src_compile_cmds, pic = link_strategy != LinkStrategy("static")) - sub_targets[ARGSFILES_SUBTARGET] = [get_argsfiles_output(ctx, compile_cmd_output.argsfiles.relative, "argsfiles")] - sub_targets[ABS_ARGSFILES_SUBTARGET] = [get_argsfiles_output(ctx, compile_cmd_output.argsfiles.absolute, "abs-argsfiles")] + sub_targets[ARGSFILES_SUBTARGET] = [get_argsfiles_output(ctx, compile_cmd_output.argsfiles.relative, ARGSFILES_SUBTARGET)] + sub_targets[XCODE_ARGSFILES_SUB_TARGET] = [get_argsfiles_output(ctx, compile_cmd_output.argsfiles.xcode, XCODE_ARGSFILES_SUB_TARGET)] sub_targets[OBJECTS_SUBTARGET] = [DefaultInfo(sub_targets = cxx_objects_sub_targets(cxx_outs))] # Compilation DB. @@ -311,6 +314,14 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, # If we're using auto-link-groups, where we generate the link group links # in the prelude, the link group map will give us the link group libs. # Otherwise, pull them from the `LinkGroupLibInfo` provider from out deps. + + public_link_group_nodes = get_public_link_group_nodes( + linkable_graph_node_map, + link_group_mappings, + exec_dep_roots + link_group_extra_link_roots, + link_group, + ) + link_group_libs_debug_info = {} if impl_params.auto_link_group_specs != None: linked_link_groups = create_link_groups( ctx = ctx, @@ -320,12 +331,14 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, executable_deps = exec_dep_roots, linker_flags = own_link_flags, link_group_specs = impl_params.auto_link_group_specs, - root_link_group = link_group, linkable_graph_node_map = linkable_graph_node_map, other_roots = link_group_extra_link_roots, prefer_stripped_objects = impl_params.prefer_stripped_objects, anonymous = ctx.attrs.anonymous_link_groups, + allow_cache_upload = impl_params.exe_allow_cache_upload, + public_nodes = public_link_group_nodes, ) + link_group_libs_debug_info = linked_link_groups.libs_debug_info for name, linked_link_group in linked_link_groups.libs.items(): auto_link_groups[name] = linked_link_group.artifact if linked_link_group.library != None: @@ -346,6 +359,7 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, # scenarios for which we need to propagate up link info and simplify this logic. For now # base which links to use based on whether link groups are defined. labels_to_links_map = get_filtered_labels_to_links_map( + public_link_group_nodes, linkable_graph_node_map, link_group, link_groups, @@ -371,10 +385,24 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, force_static_follows_dependents = impl_params.link_groups_force_static_follows_dependents, ) + link_groups_debug_info = LinkGroupsDebugLinkInfo( + binary = LinkGroupsDebugLinkableItem( + ordered_linkables = create_debug_linkable_entries(labels_to_links_map), + ), + libs = link_group_libs_debug_info, + ) + sub_targets["link-groups-info"] = [DefaultInfo( + default_output = ctx.actions.write_json( + ctx.label.name + ".link-groups-info.json", + link_groups_debug_info, + ), + )] + if is_cxx_test and link_group != None: # if a cpp_unittest is part of the link group, we need to traverse through all deps # from the root again to ensure we link in gtest deps labels_to_links_map = labels_to_links_map | get_filtered_labels_to_links_map( + public_link_group_nodes, linkable_graph_node_map, None, link_groups, @@ -396,12 +424,7 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, public_nodes = get_transitive_deps_matching_labels( linkable_graph_node_map = linkable_graph_node_map, label = ctx.attrs.link_group_public_deps_label, - roots = [ - mapping.root - for group in link_group_info.groups.values() - for mapping in group.mappings - if mapping.root != None - ], + roots = get_dedupped_roots_from_groups(link_group_info.groups.values()), ) filtered_links = get_filtered_links(labels_to_links_map, set(public_nodes)) @@ -420,11 +443,10 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, sub_targets[LINK_GROUP_MAP_DATABASE_SUB_TARGET] = [get_link_group_map_json(ctx, filtered_targets)] # Set up shared libraries symlink tree only when needed - shared_libs = {} + shared_libs = [] # Add in extra, rule-specific shared libs. - for name, shlib in impl_params.extra_shared_libs.items(): - shared_libs[name] = shlib.lib + shared_libs.extend(impl_params.extra_shared_libs) # Only setup a shared library symlink tree when shared linkage or link_groups is used gnu_use_link_groups = cxx_is_gnu(ctx) and link_group_mappings @@ -444,17 +466,15 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, labels_to_links_map = labels_to_links_map, ) - def shlib_filter(_name, shared_lib): - return not gnu_use_link_groups or is_link_group_shlib(shared_lib.label, link_group_ctx) - - for name, shared_lib in traverse_shared_library_info(shlib_info, filter_func = shlib_filter).items(): - shared_libs[name] = shared_lib.lib + for shlib in traverse_shared_library_info(shlib_info): + if not gnu_use_link_groups or is_link_group_shlib(shlib.label, link_group_ctx): + shared_libs.append(shlib) if gnu_use_link_groups: # When there are no matches for a pattern based link group, # `link_group_mappings` will not have an entry associated with the lib. for _name, link_group_lib in link_group_libs.items(): - shared_libs.update(link_group_lib.shared_libs) + shared_libs.extend(link_group_lib.shared_libs.libraries) toolchain_info = get_cxx_toolchain_info(ctx) linker_info = toolchain_info.linker_info @@ -485,14 +505,14 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, # hidden link args. if impl_params.extra_hidden: links.append( - LinkArgs(flags = cmd_args().hidden(impl_params.extra_hidden)), + LinkArgs(flags = cmd_args(hidden = impl_params.extra_hidden)), ) link_result = _link_into_executable( ctx, # If shlib lib tree generation is enabled, pass in the shared libs (which # will trigger the necessary link tree and link args). - shared_libs if impl_params.exe_shared_libs_link_tree else {}, + shared_libs if impl_params.exe_shared_libs_link_tree else [], impl_params.executable_name, linker_info.binary_extension, link_options( @@ -519,7 +539,7 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, output = binary.output, populate_rule_specific_attributes_func = impl_params.cxx_populate_xcode_attributes_func, srcs = impl_params.srcs + impl_params.additional.srcs, - argsfiles = compile_cmd_output.argsfiles.absolute, + argsfiles = compile_cmd_output.argsfiles.xcode, product_name = get_cxx_executable_product_name(ctx), ) sub_targets[XCODE_DATA_SUB_TARGET] = xcode_data_default_info @@ -532,31 +552,48 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, sub_targets["rpath-tree"] = [DefaultInfo( default_output = shared_libs_symlink_tree, other_outputs = [ - lib.output - for lib in shared_libs.values() + shlib.lib.output + for shlib in shared_libs ] + [ - lib.dwp - for lib in shared_libs.values() - if lib.dwp + shlib.lib.dwp + for shlib in shared_libs + if shlib.lib.dwp ], )] + + # TODO(agallagher) There appears to be pre-existing soname conflicts + # when building this (when using link groups), which prevents using + # `with_unique_str_sonames`. + str_soname_shlibs = { + shlib.soname.ensure_str(): shlib + for shlib in shared_libs + if shlib.soname.is_str() + } sub_targets["shared-libraries"] = [DefaultInfo( default_output = ctx.actions.write_json( binary.output.basename + ".shared-libraries.json", { - "libraries": ["{}:{}[shared-libraries][{}]".format(ctx.label.path, ctx.label.name, name) for name in shared_libs.keys()], - "librariesdwp": ["{}:{}[shared-libraries][{}][dwp]".format(ctx.label.path, ctx.label.name, name) for name, lib in shared_libs.items() if lib.dwp], + "libraries": [ + "{}:{}[shared-libraries][{}]".format(ctx.label.path, ctx.label.name, soname) + for soname in str_soname_shlibs + ], + "librariesdwp": [ + "{}:{}[shared-libraries][{}][dwp]".format(ctx.label.path, ctx.label.name, soname) + for soname, shlib in str_soname_shlibs.items() + if shlib.lib.dwp + ], "rpathtree": ["{}:{}[rpath-tree]".format(ctx.label.path, ctx.label.name)] if shared_libs_symlink_tree else [], }, ), sub_targets = { - name: [DefaultInfo( - default_output = lib.output, - sub_targets = {"dwp": [DefaultInfo(default_output = lib.dwp)]} if lib.dwp else {}, + soname: [DefaultInfo( + default_output = shlib.lib.output, + sub_targets = {"dwp": [DefaultInfo(default_output = shlib.lib.dwp)]} if shlib.lib.dwp else {}, )] - for name, lib in shared_libs.items() + for soname, shlib in str_soname_shlibs.items() }, )] + if link_group_mappings: readable_mappings = {} for node, group in link_group_mappings.items(): @@ -642,7 +679,7 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, actions = ctx.actions, children = ( [binary.external_debug_info] + - [s.external_debug_info for s in shared_libs.values()] + + [s.lib.external_debug_info for s in shared_libs] + impl_params.additional.static_external_debug_info ), ) @@ -655,6 +692,14 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, sub_targets["debuginfo"] = [DefaultInfo( default_output = materialize_external_debug_info, )] + sub_targets["debug_coverage_instrumentation"] = [DefaultInfo( + default_output = materialize_external_debug_info, + )] + + sub_targets["exe"] = [DefaultInfo( + default_output = binary.output, + other_outputs = runtime_files, + )] for additional_subtarget, subtarget_providers in impl_params.additional.subtargets.items(): sub_targets[additional_subtarget] = subtarget_providers @@ -678,6 +723,7 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, shared_libs = shlib_info.set, nondebug_runtime_files = runtime_files, ), + sanitizer_runtime_files = link_result.sanitizer_runtime_files, ) _CxxLinkExecutableResult = record( @@ -694,11 +740,12 @@ _CxxLinkExecutableResult = record( # Optional shared libs symlink tree symlinked_dir action shared_libs_symlink_tree = [list[Artifact], Artifact, None], linker_map_data = [CxxLinkerMapData, None], + sanitizer_runtime_files = list[Artifact], ) def _link_into_executable( ctx: AnalysisContext, - shared_libs: dict[str, LinkedObject], + shared_libs: list[SharedLibrary], executable_name: [str, None], binary_extension: str, opts: LinkOptions) -> _CxxLinkExecutableResult: @@ -709,7 +756,7 @@ def _link_into_executable( output_name = "{}{}".format(executable_name if executable_name else get_cxx_executable_product_name(ctx), "." + binary_extension if binary_extension else "") output = ctx.actions.declare_output(output_name) executable_args = executable_shared_lib_arguments( - ctx.actions, + ctx, get_cxx_toolchain_info(ctx), output, shared_libs, @@ -727,10 +774,11 @@ def _link_into_executable( return _CxxLinkExecutableResult( exe = link_result.linked_object, - runtime_files = executable_args.runtime_files, + runtime_files = executable_args.runtime_files + link_result.sanitizer_runtime_files, external_debug_info = executable_args.external_debug_info, shared_libs_symlink_tree = executable_args.shared_libs_symlink_tree, linker_map_data = link_result.linker_map_data, + sanitizer_runtime_files = link_result.sanitizer_runtime_files, ) def get_cxx_executable_product_name(ctx: AnalysisContext) -> str: diff --git a/prelude/cxx/cxx_library.bzl b/prelude/cxx/cxx_library.bzl index bab9ae429c..e5843293fa 100644 --- a/prelude/cxx/cxx_library.bzl +++ b/prelude/cxx/cxx_library.bzl @@ -27,16 +27,14 @@ load( "apple_get_link_info_by_deduping_link_infos", ) load("@prelude//apple:resource_groups.bzl", "create_resource_graph") -load( - "@prelude//apple:xcode.bzl", - "get_project_root_file", -) load( "@prelude//apple/swift:swift_runtime.bzl", "create_swift_runtime_linkable", ) +load("@prelude//cxx:headers.bzl", "cxx_attr_exported_headers") load( "@prelude//ide_integrations:xcode.bzl", + "XCODE_ARGSFILES_SUB_TARGET", "XCODE_DATA_SUB_TARGET", "XcodeDataInfo", "generate_xcode_data", @@ -64,7 +62,6 @@ load( "LinkInfos", "LinkOrdering", "LinkStrategy", - "Linkage", "LinkedObject", # @unused Used as a type "ObjectsLinkable", "SharedLibLinkable", @@ -72,7 +69,6 @@ load( "SwiftmoduleLinkable", # @unused Used as a type "UnstrippedLinkOutputInfo", "create_merged_link_info", - "create_merged_link_info_for_propagation", "get_lib_output_style", "get_link_args_for_strategy", "get_output_styles_for_linkage", @@ -96,6 +92,7 @@ load( ) load("@prelude//linking:shared_libraries.bzl", "SharedLibraryInfo", "create_shared_libraries", "merge_shared_libraries") load("@prelude//linking:strip.bzl", "strip_debug_info") +load("@prelude//linking:types.bzl", "Linkage") load("@prelude//utils:arglike.bzl", "ArgLike") load("@prelude//utils:expect.bzl", "expect") load("@prelude//utils:lazy.bzl", "lazy") @@ -112,7 +109,6 @@ load( load(":archive.bzl", "make_archive") load( ":argsfiles.bzl", - "ABS_ARGSFILES_SUBTARGET", "ARGSFILES_SUBTARGET", "get_argsfiles_output", ) @@ -120,6 +116,7 @@ load(":bitcode.bzl", "BitcodeBundle", "BitcodeBundleInfo", "BitcodeTSet", "make_ load( ":comp_db.bzl", "CxxCompilationDbInfo", + "GcnoFilesInfo", "create_compilation_database", "make_compilation_db_info", ) @@ -129,6 +126,7 @@ load( "CxxCompileOutput", # @unused Used as a type "compile_cxx", "create_compile_cmds", + "cxx_objects_sub_targets", ) load(":cxx_context.bzl", "get_cxx_platform_info", "get_cxx_toolchain_info") load( @@ -136,17 +134,18 @@ load( "OBJECTS_SUBTARGET", "cxx_attr_deps", "cxx_attr_exported_deps", + "cxx_attr_link_strategy", "cxx_attr_link_style", "cxx_attr_linker_flags_all", "cxx_attr_preferred_linkage", "cxx_attr_resources", "cxx_inherited_link_info", "cxx_is_gnu", - "cxx_objects_sub_targets", "cxx_platform_supported", "cxx_use_shlib_intfs", + "cxx_use_shlib_intfs_mode", ) -load(":cxx_toolchain_types.bzl", "is_bitcode_format") +load(":cxx_toolchain_types.bzl", "ShlibInterfacesMode", "is_bitcode_format") load( ":cxx_types.bzl", "CxxRuleConstructorParams", # @unused Used as a type @@ -195,6 +194,11 @@ load( ) load( ":shared_library_interface.bzl", + "SharedInterfaceInfo", # @unused Used as a type + "create_shared_interface_info", + "create_shared_interface_info_with_children", + "generate_exported_symbols", + "generate_tbd_with_symbols", "shared_library_interface", ) @@ -224,13 +228,13 @@ CxxLibraryOutput = record( # its corresponding DWARF debug info. # May be None when Split DWARF is disabled, for static/static-pic libraries, # for some types of synthetic link objects or for pre-built shared libraries. - dwp = field([Artifact, None], None), + dwp = field(Artifact | None, None), # A shared shared library may have an associated PDB file with # its corresponding Windows debug info. - pdb = field([Artifact, None], None), + pdb = field(Artifact | None, None), # The import library is the linkable output of a Windows shared library build. - implib = field([Artifact, None], None), + implib = field(Artifact | None, None), # Data about the linker map, only available on shared libraries # TODO(cjhopman): always available? when is it/is it not available? linker_map = field([CxxLinkerMapData, None], None), @@ -261,6 +265,7 @@ _CxxAllLibraryOutputs = record( providers = field(list[Provider], default = []), # Shared object name to shared library mapping if this target produces a shared library. solib = field([(str, LinkedObject), None]), + sanitizer_runtime_files = field(list[Artifact], []), ) _CxxLibraryCompileOutput = record( @@ -272,6 +277,7 @@ _CxxLibraryCompileOutput = record( bitcode_objects = field([list[Artifact], None]), # yaml file with optimization remarks about clang compilation clang_remarks = field([list[Artifact], None]), + gcno_files = field([list[Artifact], None]), # json file with trace information about clang compilation clang_traces = field(list[Artifact]), # Externally referenced debug info, which doesn't get linked with the @@ -317,6 +323,8 @@ _CxxLibraryParameterizedOutput = record( cxx_compilationdb_info = field([CxxCompilationDbInfo, None], None), # LinkableRootInfo provider, same as above. linkable_root = field([LinkableRootInfo, None], None), + # List of shared libraries for the sanitizer runtime linked into the library + sanitizer_runtime_files = field(list[Artifact], []), ) def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams) -> _CxxLibraryParameterizedOutput: @@ -344,8 +352,6 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc # TODO(T110378095) right now we implement reexport of exported_* flags manually, we should improve/automate that in the macro layer - project_root_file = get_project_root_file(ctx) - # Gather preprocessor inputs. (own_non_exported_preprocessor_info, test_preprocessor_infos) = cxx_private_preprocessor_info( ctx = ctx, @@ -353,9 +359,8 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc extra_preprocessors = impl_params.extra_preprocessors, non_exported_deps = non_exported_deps, is_test = impl_params.is_test, - project_root_file = project_root_file, ) - own_exported_preprocessor_info = cxx_exported_preprocessor_info(ctx, impl_params.headers_layout, project_root_file, impl_params.extra_exported_preprocessors) + own_exported_preprocessor_info = cxx_exported_preprocessor_info(ctx, impl_params.headers_layout, impl_params.extra_exported_preprocessors) own_preprocessors = [own_non_exported_preprocessor_info, own_exported_preprocessor_info] + test_preprocessor_infos inherited_non_exported_preprocessor_infos = cxx_inherited_preprocessor_infos( @@ -386,8 +391,8 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc ) if impl_params.generate_sub_targets.argsfiles: - sub_targets[ARGSFILES_SUBTARGET] = [get_argsfiles_output(ctx, compiled_srcs.compile_cmds.argsfiles.relative, "argsfiles")] - sub_targets[ABS_ARGSFILES_SUBTARGET] = [get_argsfiles_output(ctx, compiled_srcs.compile_cmds.argsfiles.absolute, "abs-argsfiles")] + sub_targets[ARGSFILES_SUBTARGET] = [get_argsfiles_output(ctx, compiled_srcs.compile_cmds.argsfiles.relative, ARGSFILES_SUBTARGET)] + sub_targets[XCODE_ARGSFILES_SUB_TARGET] = [get_argsfiles_output(ctx, compiled_srcs.compile_cmds.argsfiles.xcode, XCODE_ARGSFILES_SUB_TARGET)] if impl_params.generate_sub_targets.clang_remarks: if compiled_srcs.non_pic and compiled_srcs.non_pic.clang_remarks: @@ -432,6 +437,24 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc comp_db_info = make_compilation_db_info(compiled_srcs.compile_cmds.comp_db_compile_cmds, get_cxx_toolchain_info(ctx), get_cxx_platform_info(ctx)) providers.append(comp_db_info) + # Shared library interfaces are partial lists of exported symbols that are merged at link time. + exported_symbol_outputs = impl_params.extra_shared_library_interfaces if impl_params.extra_shared_library_interfaces else [] + if impl_params.shared_library_interface_target and \ + cxx_use_shlib_intfs_mode(ctx, ShlibInterfacesMode("stub_from_headers")): + transitive_pp = inherited_exported_preprocessor_infos + if _attr_reexport_all_header_dependencies(ctx): + transitive_pp += inherited_non_exported_preprocessor_infos + + cxx_exported_symbols = generate_exported_symbols( + ctx, + cxx_attr_exported_headers(ctx, impl_params.headers_layout), + own_exported_preprocessor_info, + transitive_pp, + impl_params.shared_library_interface_target, + ) + exported_symbol_outputs.append(cxx_exported_symbols) + sub_targets["exported-symbols"] = [DefaultInfo(default_outputs = exported_symbol_outputs)] + # Link Groups link_group = get_link_group(ctx) link_group_info = get_link_group_info(ctx) @@ -461,7 +484,7 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc frameworks_linkable = apple_create_frameworks_linkable(ctx) swiftmodule_linkable = impl_params.swiftmodule_linkable swift_runtime_linkable = create_swift_runtime_linkable(ctx) - dep_infos, link_group_map, link_execution_preference = _get_shared_library_links( + dep_infos, link_group_map, link_execution_preference, shared_interface_info = _get_shared_library_links( ctx, get_linkable_graph_node_map_func(deps_linkable_graph), link_group, @@ -475,6 +498,7 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc swiftmodule_linkable, force_static_follows_dependents = impl_params.link_groups_force_static_follows_dependents, swift_runtime_linkable = swift_runtime_linkable, + exported_symbol_outputs = exported_symbol_outputs, ) if impl_params.generate_sub_targets.link_group_map and link_group_map: sub_targets[LINK_GROUP_MAP_DATABASE_SUB_TARGET] = [link_group_map] @@ -496,8 +520,10 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc extra_static_linkables = extra_static_linkables, gnu_use_link_groups = cxx_is_gnu(ctx) and bool(link_group_mappings), link_execution_preference = link_execution_preference, + shared_interface_info = shared_interface_info, ) solib_as_dict = {library_outputs.solib[0]: library_outputs.solib[1]} if library_outputs.solib else {} + shared_libs = create_shared_libraries(ctx, solib_as_dict) for _, link_style_output in library_outputs.outputs.items(): for key in link_style_output.sub_targets.keys(): @@ -530,6 +556,11 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc # Add any subtargets for this output style. output_style_sub_targets.update(output.sub_targets) + # TBD outputs are collected for each link unit, so propagate whenever + # a library is being linked statically. + if output_style != LibOutputStyle("shared_lib") and shared_interface_info != None: + output_style_providers.append(shared_interface_info) + if impl_params.generate_sub_targets.link_style_outputs: if output: sub_targets[subtarget_for_output_style(output_style)] = [DefaultInfo( @@ -568,7 +599,7 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc output = default_output.default if default_output else None, populate_rule_specific_attributes_func = impl_params.cxx_populate_xcode_attributes_func, srcs = impl_params.srcs + impl_params.additional.srcs, - argsfiles = compiled_srcs.compile_cmds.argsfiles.absolute, + argsfiles = compiled_srcs.compile_cmds.argsfiles.xcode, product_name = get_default_cxx_library_product_name(ctx, impl_params), ) sub_targets[XCODE_DATA_SUB_TARGET] = xcode_data_default_info @@ -580,15 +611,6 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc inherited_non_exported_link = cxx_inherited_link_info(non_exported_deps) inherited_exported_link = cxx_inherited_link_info(exported_deps) - # TODO(cjhopman): This is strange that we construct this intermediate MergedLinkInfo rather than just - # passing the full list of deps below, but I'm keeping it to preserve existing behavior with a refactor. - # I intend to change completely how MergedLinkInfo works, so this should go away then. We cannot just - # pass these to create_merged_link_info because the for_propagation one is used to filter out deps for - # individual link strategies where that dep doesn't provide a linkinfo (which may itself be a bug, but not - # sure). - inherited_non_exported_link = create_merged_link_info_for_propagation(ctx, inherited_non_exported_link) - inherited_exported_link = create_merged_link_info_for_propagation(ctx, inherited_exported_link) - merged_native_link_info = create_merged_link_info( ctx, pic_behavior, @@ -596,9 +618,9 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc library_outputs.link_infos, preferred_linkage = preferred_linkage, # Export link info from non-exported deps (when necessary). - deps = [inherited_non_exported_link], + deps = inherited_non_exported_link, # Export link info from out (exported) deps. - exported_deps = [inherited_exported_link], + exported_deps = inherited_exported_link, frameworks_linkable = frameworks_linkable, swiftmodule_linkable = swiftmodule_linkable, swift_runtime_linkable = swift_runtime_linkable, @@ -614,7 +636,7 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc if impl_params.generate_providers.shared_libraries: providers.append(merge_shared_libraries( ctx.actions, - create_shared_libraries(ctx, solib_as_dict), + shared_libs, filter(None, [x.get(SharedLibraryInfo) for x in non_exported_deps]) + filter(None, [x.get(SharedLibraryInfo) for x in exported_deps]), )) @@ -668,6 +690,7 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc soname = None linker_type = get_cxx_toolchain_info(ctx).linker_info.type linkable_root = create_linkable_root( + label = ctx.label, name = soname, link_infos = LinkInfos( default = LinkInfo( @@ -716,15 +739,18 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc ctx = ctx, default_soname = _soname(ctx, impl_params), preferred_linkage = preferred_linkage, + default_link_strategy = cxx_attr_link_strategy(ctx.attrs), deps = non_exported_deps, exported_deps = exported_deps, # If we don't have link input for this link style, we pass in `None` so # that omnibus knows to avoid it. include_in_android_mergemap = getattr(ctx.attrs, "include_in_android_merge_map_output", True) and default_output != None, link_infos = library_outputs.link_infos, - shared_libs = solib_as_dict, + shared_libs = shared_libs, linker_flags = linker_flags, can_be_asset = getattr(ctx.attrs, "can_be_asset", False) or False, + # We don't want to propagate shared interaces across shared library boundaries. + shared_interface_info = None if preferred_linkage == Linkage("shared") else create_shared_interface_info(ctx, exported_symbol_outputs, []), ), excluded = {ctx.label: None} if not value_or(ctx.attrs.supports_merged_linking, True) else {}, ), @@ -757,26 +783,28 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc # Some rules, e.g. fbcode//thrift/lib/cpp:thrift-core-module # define preprocessor flags as things like: -DTHRIFT_PLATFORM_CONFIG= # and unless they get quoted, they break shell syntax. - cxx_preprocessor_flags = cmd_args() cxx_compiler_info = get_cxx_toolchain_info(ctx).cxx_compiler_info - cxx_preprocessor_flags.add(cmd_args(cxx_compiler_info.preprocessor_flags or [], quote = "shell")) - cxx_preprocessor_flags.add(cmd_args(propagated_preprocessor.set.project_as_args("args"), quote = "shell")) - cxx_preprocessor_flags.add(propagated_preprocessor.set.project_as_args("include_dirs")) + cxx_preprocessor_flags = cmd_args( + cmd_args(cxx_compiler_info.preprocessor_flags or [], quote = "shell"), + cmd_args(propagated_preprocessor.set.project_as_args("args"), quote = "shell"), + propagated_preprocessor.set.project_as_args("include_dirs"), + ) templ_vars["cxxppflags"] = cxx_preprocessor_flags - c_preprocessor_flags = cmd_args() c_compiler_info = get_cxx_toolchain_info(ctx).c_compiler_info - c_preprocessor_flags.add(cmd_args(c_compiler_info.preprocessor_flags or [], quote = "shell")) - c_preprocessor_flags.add(cmd_args(propagated_preprocessor.set.project_as_args("args"), quote = "shell")) - c_preprocessor_flags.add(propagated_preprocessor.set.project_as_args("include_dirs")) + c_preprocessor_flags = cmd_args( + cmd_args(c_compiler_info.preprocessor_flags or [], quote = "shell"), + cmd_args(propagated_preprocessor.set.project_as_args("args"), quote = "shell"), + propagated_preprocessor.set.project_as_args("include_dirs"), + ) templ_vars["cppflags"] = c_preprocessor_flags # Add in ldflag macros. for link_strategy in (LinkStrategy("static"), LinkStrategy("static_pic")): name = "ldflags-" + link_strategy.value.replace("_", "-") - args = cmd_args() + args = [] linker_info = get_cxx_toolchain_info(ctx).linker_info - args.add(linker_info.linker_flags or []) + args.append(linker_info.linker_flags or []) # Normally, we call get_link_args_for_strategy for getting the args for our own link from our # deps. This case is a bit different as we are effectively trying to get the args for how this library @@ -786,8 +814,8 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc [merged_native_link_info], link_strategy, ) - args.add(unpack_link_args(link_args)) - templ_vars[name] = args + args.append(unpack_link_args(link_args)) + templ_vars[name] = cmd_args(args) # TODO(T110378127): To implement `$(ldflags-shared ...)` properly, we'd need # to setup a symink tree rule for all transitive shared libs. Since this @@ -822,6 +850,7 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc pass default_output = unknown() + default_info = DefaultInfo( default_output = default_output.default if default_output != None else None, other_outputs = default_output.other if default_output != None else [], @@ -836,7 +865,7 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc merge_link_group_lib_info( label = ctx.label, name = link_group, - shared_libs = solib_as_dict, + shared_libs = shared_libs, shared_link_infos = library_outputs.link_infos.get(LibOutputStyle("shared_lib")), deps = exported_deps + non_exported_deps, ), @@ -851,6 +880,7 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc xcode_data_info = xcode_data_info, cxx_compilationdb_info = comp_db_info, linkable_root = linkable_root, + sanitizer_runtime_files = library_outputs.sanitizer_runtime_files, ) def get_default_cxx_library_product_name(ctx, impl_params) -> str: @@ -889,6 +919,7 @@ def _get_library_compile_output(ctx, outs: list[CxxCompileOutput], extra_link_in bitcode_objects = bitcode_objects, clang_traces = [out.clang_trace for out in outs if out.clang_trace != None], clang_remarks = [out.clang_remarks for out in outs if out.clang_remarks != None], + gcno_files = [out.gcno_file for out in outs if out.gcno_file != None], external_debug_info = [out.external_debug_info for out in outs if out.external_debug_info != None], objects_have_external_debug_info = lazy.is_any(lambda out: out.object_has_external_debug_info, outs), objects_sub_targets = objects_sub_targets, @@ -936,12 +967,15 @@ def _form_library_outputs( dep_infos: LinkArgs, extra_static_linkables: list[[FrameworksLinkable, SwiftmoduleLinkable, SwiftRuntimeLinkable]], gnu_use_link_groups: bool, - link_execution_preference: LinkExecutionPreference) -> _CxxAllLibraryOutputs: + link_execution_preference: LinkExecutionPreference, + shared_interface_info: [SharedInterfaceInfo, None]) -> _CxxAllLibraryOutputs: # Build static/shared libs and the link info we use to export them to dependents. outputs = {} solib = None link_infos = {} providers = [] + sanitizer_runtime_files = [] + gcno_files = [] linker_flags = cxx_attr_linker_flags_all(ctx) @@ -969,6 +1003,8 @@ def _form_library_outputs( if not lib_compile_output: fail("output_style {} requires non_pic compiled srcs, but didn't have any in {}".format(output_style, compiled_srcs)) + gcno_files += lib_compile_output.gcno_files + # Only generate an archive if we have objects to include if lib_compile_output.objects: output, info = _static_library( @@ -1019,17 +1055,21 @@ def _form_library_outputs( children = impl_params.additional.shared_external_debug_info, ) + gcno_files += compiled_srcs.pic.gcno_files + extra_linker_flags, extra_linker_outputs = impl_params.extra_linker_outputs_factory(ctx) + result = _shared_library( - ctx, - impl_params, - compiled_srcs.pic.objects, - external_debug_info, - dep_infos, - gnu_use_link_groups, + ctx = ctx, + impl_params = impl_params, + objects = compiled_srcs.pic.objects, + external_debug_info = external_debug_info, + dep_infos = dep_infos, + gnu_use_link_groups = gnu_use_link_groups, extra_linker_flags = extra_linker_flags, link_ordering = map_val(LinkOrdering, ctx.attrs.link_ordering), link_execution_preference = link_execution_preference, + shared_interface_info = shared_interface_info, ) shlib = result.link_result.linked_object info = result.info @@ -1070,6 +1110,12 @@ def _form_library_outputs( providers.append(result.link_result.link_execution_preference_info) + link_sanitizer_runtime_files = result.link_result.sanitizer_runtime_files + if link_sanitizer_runtime_files: + if sanitizer_runtime_files: + fail("Cannot specify sanitizer runtime files multiple times") + sanitizer_runtime_files = link_sanitizer_runtime_files + # you cannot link against header only libraries so create an empty link info info = info if info != None else LinkInfo() if output: @@ -1079,11 +1125,22 @@ def _form_library_outputs( stripped = ldflags(stripped) if stripped != None else None, ) + if get_cxx_toolchain_info(ctx).gcno_files: + deps_gcno_files = [ + x[GcnoFilesInfo].gcno_files + for x in ctx.attrs.deps + ctx.attrs.exported_deps + if GcnoFilesInfo in x + ] + providers.append(GcnoFilesInfo( + gcno_files = dedupe(flatten(deps_gcno_files) + gcno_files), + )) + return _CxxAllLibraryOutputs( outputs = outputs, link_infos = link_infos, providers = providers, solib = solib, + sanitizer_runtime_files = sanitizer_runtime_files, ) def _strip_objects(ctx: AnalysisContext, objects: list[Artifact]) -> list[Artifact]: @@ -1091,11 +1148,18 @@ def _strip_objects(ctx: AnalysisContext, objects: list[Artifact]) -> list[Artifa Return new objects with debug info stripped. """ + cxx_toolchain_info = get_cxx_toolchain_info(ctx) + # Stripping is not supported on Windows - linker_type = get_cxx_toolchain_info(ctx).linker_info.type + linker_type = cxx_toolchain_info.linker_info.type if linker_type == "windows": return objects + # Disable stripping if no `strip` binary was provided by the toolchain. + if cxx_toolchain_info.binary_utilities_info == None or \ + cxx_toolchain_info.binary_utilities_info.strip == None: + return objects + outs = [] for obj in objects: @@ -1117,8 +1181,9 @@ def _get_shared_library_links( force_link_group_linking, frameworks_linkable: [FrameworksLinkable, None], swiftmodule_linkable: [SwiftmoduleLinkable, None], + exported_symbol_outputs: list[Artifact], force_static_follows_dependents: bool = True, - swift_runtime_linkable: [SwiftRuntimeLinkable, None] = None) -> (LinkArgs, [DefaultInfo, None], LinkExecutionPreference): + swift_runtime_linkable: [SwiftRuntimeLinkable, None] = None) -> (LinkArgs, [DefaultInfo, None], LinkExecutionPreference, [SharedInterfaceInfo, None]): """ Returns LinkArgs with the content to link, and a link group map json output if applicable. @@ -1133,21 +1198,10 @@ def _get_shared_library_links( # If we're not filtering for link groups, link against the shared dependencies if not link_group_mappings and not force_link_group_linking: - deps_merged_link_infos = cxx_inherited_link_info(dedupe(flatten([non_exported_deps, exported_deps]))) - - # Even though we're returning the shared library links, we must still - # respect the `link_style` attribute of the target which controls how - # all deps get linked. For example, you could be building the shared - # output of a library which has `link_style = "static"`. - # - # The fallback equivalent code in Buck v1 is in CxxLibraryFactor::createBuildRule() - # where link style is determined using the `linkableDepType` variable. - link_strategy_value = ctx.attrs.link_style if ctx.attrs.link_style != None else "shared" - - # Note if `static` link style is requested, we assume `static_pic` - # instead, so that code in the shared library can be correctly - # loaded in the address space of any process at any address. - link_strategy_value = "static_pic" if link_strategy_value == "static" else link_strategy_value + deps = dedupe(flatten([non_exported_deps, exported_deps])) + deps_merged_link_infos = cxx_inherited_link_info(deps) + + link_strategy = cxx_attr_link_strategy(ctx.attrs) # We cannot support deriving link execution preference off the included links, as we've already # lost the information on what is in the link. @@ -1155,6 +1209,10 @@ def _get_shared_library_links( # Not all rules calling `cxx_library_parameterized` have `link_execution_preference`. Notably `cxx_python_extension`. link_execution_preference = get_link_execution_preference(ctx, []) if hasattr(ctx.attrs, "link_execution_preference") else LinkExecutionPreference("any") + # Collect the shared interface providers for this link unit and strategy. + # These are merged when linking shared library output. + shared_interface_info = create_shared_interface_info(ctx, exported_symbol_outputs, deps) + return apple_build_link_args_with_deduped_flags( ctx, deps_merged_link_infos, @@ -1163,10 +1221,10 @@ def _get_shared_library_links( # To get the link_strategy, we have to check the link_strategy against the toolchain's pic_behavior. # # For more info, check the PicBehavior docs. - process_link_strategy_for_pic_behavior(LinkStrategy(link_strategy_value), pic_behavior), + process_link_strategy_for_pic_behavior(link_strategy, pic_behavior), swiftmodule_linkable, swift_runtime_linkable = swift_runtime_linkable, - ), None, link_execution_preference + ), None, link_execution_preference, shared_interface_info # Else get filtered link group links prefer_stripped = cxx_is_gnu(ctx) and ctx.attrs.prefer_stripped_objects @@ -1176,8 +1234,11 @@ def _get_shared_library_links( if link_strategy == LinkStrategy("static"): link_strategy = LinkStrategy("static_pic") link_strategy = process_link_strategy_for_pic_behavior(link_strategy, pic_behavior) + linkable_graph_label_to_node_map = linkable_graph_node_map_func() + filtered_labels_to_links_map = get_filtered_labels_to_links_map( - linkable_graph_node_map_func(), + None, + linkable_graph_label_to_node_map, link_group, {}, link_group_mappings, @@ -1203,7 +1264,18 @@ def _get_shared_library_links( if additional_links: filtered_links.append(additional_links) - return LinkArgs(infos = filtered_links), get_link_group_map_json(ctx, filtered_targets), link_execution_preference + # Collect the interface providers from the targets in this link group, these will + # be merged when linking shared library output. If this library has no + # interface output then interface generation is disabled and we can skip collection. + shared_interface_infos = [] + if len(exported_symbol_outputs) > 0: + for label in filtered_labels_to_links_map.keys(): + linkable_node = linkable_graph_label_to_node_map[label] + if linkable_node.shared_interface_info != None: + shared_interface_infos.append(linkable_node.shared_interface_info) + + shared_interface_info = create_shared_interface_info_with_children(ctx, exported_symbol_outputs, shared_interface_infos) + return LinkArgs(infos = filtered_links), get_link_group_map_json(ctx, filtered_targets), link_execution_preference, shared_interface_info def _use_pic(output_style: LibOutputStyle) -> bool: """ @@ -1241,9 +1313,10 @@ def _static_library( # If we have extra hidden deps of this target add them to the archive action # so they are forced to build for static library output. - archive_args = cmd_args(objects) - if impl_params.extra_hidden: - archive_args.hidden(impl_params.extra_hidden) + archive_args = cmd_args( + objects, + hidden = impl_params.extra_hidden or [], + ) archive = make_archive(ctx, name, objects, archive_args) @@ -1341,7 +1414,7 @@ _CxxSharedLibraryResult = record( link_result = CxxLinkResult, # Shared library name (e.g. SONAME) soname = str, - objects_bitcode_bundle = [Artifact, None], + objects_bitcode_bundle = Artifact | None, # `LinkInfo` used to link against the shared library. info = LinkInfo, ) @@ -1355,7 +1428,8 @@ def _shared_library( gnu_use_link_groups: bool, extra_linker_flags: list[ArgLike], link_execution_preference: LinkExecutionPreference, - link_ordering: [LinkOrdering, None] = None) -> _CxxSharedLibraryResult: + link_ordering: [LinkOrdering, None], + shared_interface_info: [SharedInterfaceInfo, None]) -> _CxxSharedLibraryResult: """ Generate a shared library and the associated native link info used by dependents to link against it. @@ -1400,13 +1474,14 @@ def _shared_library( links = [LinkArgs(infos = [link_info]), dep_infos] if impl_params.extra_hidden: links.append( - LinkArgs(flags = cmd_args().hidden(impl_params.extra_hidden)), + LinkArgs(flags = cmd_args(hidden = impl_params.extra_hidden)), ) link_result = cxx_link_shared_library( ctx = ctx, output = soname, opts = link_options( + enable_distributed_thinlto = getattr(ctx.attrs, "enable_distributed_thinlto", False), links = links, identifier = soname, link_ordering = link_ordering, @@ -1422,8 +1497,26 @@ def _shared_library( # If shared library interfaces are enabled, link that and use it as # the shared lib that dependents will link against. if cxx_use_shlib_intfs(ctx): - if not linker_info.produce_interface_from_stub_shared_library: - shlib_for_interface = exported_shlib + mode = get_cxx_toolchain_info(ctx).linker_info.shlib_interfaces + if mode == ShlibInterfacesMode("stub_from_library"): + # Generate a library interface from the linked library output. + # This will prevent relinking rdeps when changes do not affect + # the library symbols. + exported_shlib = shared_library_interface( + ctx = ctx, + shared_lib = exported_shlib, + ) + elif mode == ShlibInterfacesMode("stub_from_headers"): + # Generate a library interface from its deps exported_headers. + # This will allow for linker parallelisation as we do not have + # to wait for dependent libraries to link. + # If the provider is missing this is a non apple_library target, + # so skip producing the interface. + if shared_interface_info != None and impl_params.shared_library_interface_target != None: + # collect the linker args which are required + # to correctly set symbol visibility. + link_args = [unpack_link_args(link) for link in links] + exported_shlib = generate_tbd_with_symbols(ctx, soname, shared_interface_info.interfaces, link_args, impl_params.shared_library_interface_target) elif not gnu_use_link_groups: # TODO(agallagher): There's a bug in shlib intfs interacting with link # groups, where we don't include the symbols we're meant to export from @@ -1455,19 +1548,11 @@ def _shared_library( ), name = soname, ) - shlib_for_interface = intf_link_result.linked_object.output - else: - shlib_for_interface = None - - if shlib_for_interface: - # Convert the shared library into an interface. - shlib_interface = shared_library_interface( + exported_shlib = shared_library_interface( ctx = ctx, - shared_lib = shlib_for_interface, + shared_lib = intf_link_result.linked_object.output, ) - exported_shlib = shlib_interface - # Link against import library on Windows. if link_result.linked_object.import_library: exported_shlib = link_result.linked_object.import_library diff --git a/prelude/cxx/cxx_library_utility.bzl b/prelude/cxx/cxx_library_utility.bzl index 97738c1e7b..a84a5615ab 100644 --- a/prelude/cxx/cxx_library_utility.bzl +++ b/prelude/cxx/cxx_library_utility.bzl @@ -13,20 +13,17 @@ load( load("@prelude//:paths.bzl", "paths") load( "@prelude//linking:link_info.bzl", + "LinkStrategy", "LinkStyle", - "Linkage", "LinkerFlags", "MergedLinkInfo", ) +load("@prelude//linking:types.bzl", "Linkage") load( "@prelude//utils:utils.bzl", "flatten", "from_named_set", ) -load( - ":compile.bzl", - "CxxCompileOutput", # @unused Used as a type -) load(":cxx_context.bzl", "get_cxx_platform_info", "get_cxx_toolchain_info") load(":cxx_toolchain_types.bzl", "ShlibInterfacesMode") load( @@ -92,6 +89,23 @@ def cxx_attr_linker_flags(ctx: AnalysisContext) -> list[typing.Any]: (flatten(cxx_by_platform(ctx, ctx.attrs.platform_linker_flags)) if hasattr(ctx.attrs, "platform_linker_flags") else []) ) +# Even though we're returning the shared library links, we must still +# respect the `link_style` attribute of the target which controls how +# all deps get linked. For example, you could be building the shared +# output of a library which has `link_style = "static"`. +# +# The fallback equivalent code in Buck v1 is in CxxLibraryFactor::createBuildRule() +# where link style is determined using the `linkableDepType` variable. + +# Note if `static` link style is requested, we assume `static_pic` +# instead, so that code in the shared library can be correctly +# loaded in the address space of any process at any address. +def cxx_attr_link_strategy(attrs: typing.Any) -> LinkStrategy: + value = attrs.link_style if attrs.link_style != None else "shared" + if value == "static": + value = "static_pic" + return LinkStrategy(value) + def cxx_attr_link_style(ctx: AnalysisContext) -> LinkStyle: if ctx.attrs.link_style != None: return LinkStyle(ctx.attrs.link_style) @@ -143,6 +157,12 @@ def cxx_use_shlib_intfs(ctx: AnalysisContext) -> bool: linker_info = get_cxx_toolchain_info(ctx).linker_info return linker_info.shlib_interfaces != ShlibInterfacesMode("disabled") +def cxx_use_shlib_intfs_mode(ctx: AnalysisContext, mode: ShlibInterfacesMode) -> bool: + """ + Verify we are using a specific shared library interface mode. + """ + return cxx_use_shlib_intfs(ctx) and get_cxx_toolchain_info(ctx).linker_info.shlib_interfaces == mode + def cxx_platform_supported(ctx: AnalysisContext) -> bool: """ Return whether this rule's `supported_platforms_regex` matches the current @@ -156,17 +176,3 @@ def cxx_platform_supported(ctx: AnalysisContext) -> bool: ctx.attrs.supported_platforms_regex, get_cxx_platform_info(ctx).name, ) - -def cxx_objects_sub_targets(outs: list[CxxCompileOutput]) -> dict[str, list[Provider]]: - objects_sub_targets = {} - for obj in outs: - sub_targets = {} - if obj.clang_trace: - sub_targets["clang-trace"] = [DefaultInfo(obj.clang_trace)] - if obj.clang_remarks: - sub_targets["clang-remarks"] = [DefaultInfo(obj.clang_remarks)] - objects_sub_targets[obj.object.short_path] = [DefaultInfo( - obj.object, - sub_targets = sub_targets, - )] - return objects_sub_targets diff --git a/prelude/cxx/cxx_link_utility.bzl b/prelude/cxx/cxx_link_utility.bzl index 77ab0424d1..ae8a97d30e 100644 --- a/prelude/cxx/cxx_link_utility.bzl +++ b/prelude/cxx/cxx_link_utility.bzl @@ -14,11 +14,16 @@ load( "@prelude//linking:link_info.bzl", "LinkArgs", "LinkOrdering", # @unused Used as a type - "LinkedObject", # @unused Used as a type "unpack_link_args", + "unpack_link_args_excluding_filelist", "unpack_link_args_filelist", ) load("@prelude//linking:lto.bzl", "LtoMode") +load( + "@prelude//linking:shared_libraries.bzl", + "SharedLibrary", # @unused Used as a type + "create_shlib_symlink_tree", +) load("@prelude//utils:arglike.bzl", "ArgLike") # @unused Used as a type def generates_split_debug(toolchain: CxxToolchainInfo): @@ -57,13 +62,13 @@ def linker_map_args(toolchain: CxxToolchainInfo, linker_map) -> LinkArgs: LinkArgsOutput = record( link_args = ArgLike, hidden = list[typing.Any], - pdb_artifact = [Artifact, None], + pdb_artifact = Artifact | None, # The filelist artifact which contains the list of all object files. # Only present for Darwin linkers. Note that object files referenced # _inside_ the filelist are _not_ part of the `hidden` field above. # That's by design - we do not want to materialise _all_ object files # to inspect the filelist. Intended to be used for debugging. - filelist = [Artifact, None], + filelist = Artifact | None, ) def make_link_args( @@ -72,7 +77,6 @@ def make_link_args( links: list[LinkArgs], suffix = None, output_short_path: [str, None] = None, - is_shared: [bool, None] = None, link_ordering: [LinkOrdering, None] = None) -> LinkArgsOutput: """ Merges LinkArgs. Returns the args, files that must be present for those @@ -115,23 +119,26 @@ def make_link_args( pdb_artifact = actions.declare_output(pdb_filename) hidden.append(pdb_artifact.as_output()) + filelists = None + if linker_type == "darwin": + filelists = filter(None, [unpack_link_args_filelist(link) for link in links]) + hidden.extend(filelists) + for link in links: - args.add(unpack_link_args(link, is_shared, link_ordering = link_ordering)) + if filelists: + # If we are using a filelist, only add argument that aren't already in the + # filelist. This is to avoid duplicate inputs in the link command. + args.add(unpack_link_args_excluding_filelist(link, link_ordering = link_ordering)) + else: + args.add(unpack_link_args(link, link_ordering = link_ordering)) - filelists = filter(None, [unpack_link_args_filelist(link) for link in links]) - hidden.extend(filelists) + # On Darwin, filelist args _must_ come last as the order can affect symbol + # resolution and result in binary size increases. filelist_file = None if filelists: - if linker_type == "gnu": - fail("filelist populated for gnu linker") - elif linker_type == "darwin": - # On Darwin, filelist args _must_ come last as there's semantical difference - # of the position. - path = actions.write("filelist%s.txt" % suffix, filelists) - args.add(["-Xlinker", "-filelist", "-Xlinker", path]) - filelist_file = path - else: - fail("Linker type {} not supported".format(linker_type)) + path = actions.write("filelist%s.txt" % suffix, filelists) + args.add(cmd_args(["-Xlinker", "-filelist", "-Xlinker", path])) + filelist_file = path return LinkArgsOutput( link_args = args, @@ -157,11 +164,63 @@ ExecutableSharedLibArguments = record( shared_libs_symlink_tree = field(list[Artifact] | Artifact | None, None), ) +CxxSanitizerRuntimeArguments = record( + extra_link_args = field(list[ArgLike], []), + sanitizer_runtime_files = field(list[Artifact], []), +) + +# @executable_path/Frameworks + +def cxx_sanitizer_runtime_arguments( + ctx: AnalysisContext, + cxx_toolchain: CxxToolchainInfo, + output: Artifact) -> CxxSanitizerRuntimeArguments: + linker_info = cxx_toolchain.linker_info + target_sanitizer_runtime_enabled = ctx.attrs.sanitizer_runtime_enabled if hasattr(ctx.attrs, "sanitizer_runtime_enabled") else None + sanitizer_runtime_enabled = target_sanitizer_runtime_enabled if target_sanitizer_runtime_enabled != None else linker_info.sanitizer_runtime_enabled + if not sanitizer_runtime_enabled: + return CxxSanitizerRuntimeArguments() + + if not linker_info.sanitizer_runtime_files: + fail("C++ sanitizer runtime enabled but there are no runtime files") + + if linker_info.type == "darwin": + # ignore_artifacts as the runtime directory is not required at _link_ time + runtime_rpath = cmd_args(ignore_artifacts = True) + runtime_files = linker_info.sanitizer_runtime_files + for runtime_shared_lib in runtime_files: + # Rpath-relative dylibs have an install name of `@rpath/libName.dylib`, + # which means we need to add the parent dir of the dylib as an rpath. + runtime_shared_lib_dir = cmd_args(runtime_shared_lib, parent = 1) + + # The parent dir of the runtime shared lib must appear as a path + # relative to the parent dir of the binary. `@executable_path` + # represents the parent dir of the binary, not the binary itself. + runtime_shared_lib_rpath = cmd_args(runtime_shared_lib_dir, format = "-Wl,-rpath,@executable_path/{}").relative_to(output, parent = 1) + runtime_rpath.add(runtime_shared_lib_rpath) + + return CxxSanitizerRuntimeArguments( + extra_link_args = [ + runtime_rpath, + # Add rpaths in case the binary gets bundled and the app bundle is expected to be standalone. + # Not all transitive callers have `CxxPlatformInfo`, so just add both iOS and macOS rpaths. + # There's no downsides to having both, except dyld would check in both locations (and it won't + # find anything for the non-current platform). + "-Wl,-rpath,@loader_path/Frameworks", # iOS + "-Wl,-rpath,@executable_path/Frameworks", # iOS + "-Wl,-rpath,@loader_path/../Frameworks", # macOS + "-Wl,-rpath,@executable_path/../Frameworks", # macOS + ], + sanitizer_runtime_files = runtime_files, + ) + + return CxxSanitizerRuntimeArguments() + def executable_shared_lib_arguments( - actions: AnalysisActions, + ctx: AnalysisContext, cxx_toolchain: CxxToolchainInfo, output: Artifact, - shared_libs: dict[str, LinkedObject]) -> ExecutableSharedLibArguments: + shared_libs: list[SharedLibrary]) -> ExecutableSharedLibArguments: extra_link_args = [] runtime_files = [] shared_libs_symlink_tree = None @@ -169,31 +228,32 @@ def executable_shared_lib_arguments( # External debug info is materialized only when the executable is the output # of a build. Do not add to runtime_files. external_debug_info = project_artifacts( - actions = actions, - tsets = [shlib.external_debug_info for shlib in shared_libs.values()], + actions = ctx.actions, + tsets = [shlib.lib.external_debug_info for shlib in shared_libs], ) linker_type = cxx_toolchain.linker_info.type if len(shared_libs) > 0: if linker_type == "windows": - shared_libs_symlink_tree = [actions.symlink_file( - shlib.output.basename, - shlib.output, - ) for _, shlib in shared_libs.items()] + shared_libs_symlink_tree = [ctx.actions.symlink_file( + shlib.lib.output.basename, + shlib.lib.output, + ) for shlib in shared_libs] runtime_files.extend(shared_libs_symlink_tree) # Windows doesn't support rpath. else: - shared_libs_symlink_tree = actions.symlinked_dir( - shared_libs_symlink_tree_name(output), - {name: shlib.output for name, shlib in shared_libs.items()}, + shared_libs_symlink_tree = create_shlib_symlink_tree( + actions = ctx.actions, + out = shared_libs_symlink_tree_name(output), + shared_libs = shared_libs, ) runtime_files.append(shared_libs_symlink_tree) rpath_reference = get_rpath_origin(linker_type) - # We ignore_artifacts() here since we don't want the symlink tree to actually be there for the link. - rpath_arg = cmd_args(shared_libs_symlink_tree, format = "-Wl,-rpath,{}/{{}}".format(rpath_reference)).relative_to(output, parent = 1).ignore_artifacts() + # We ignore_artifacts here since we don't want the symlink tree to actually be there for the link. + rpath_arg = cmd_args(shared_libs_symlink_tree, format = "-Wl,-rpath,{}/{{}}".format(rpath_reference), ignore_artifacts = True).relative_to(output, parent = 1) extra_link_args.append(rpath_arg) return ExecutableSharedLibArguments( @@ -203,20 +263,35 @@ def executable_shared_lib_arguments( shared_libs_symlink_tree = shared_libs_symlink_tree, ) -def cxx_link_cmd_parts(toolchain: CxxToolchainInfo) -> ((RunInfo | cmd_args), cmd_args): +LinkCmdParts = record( + linker = [RunInfo, cmd_args], + linker_flags = cmd_args, + post_linker_flags = cmd_args, + # linker + linker_flags, for convenience + link_cmd = cmd_args, +) + +def cxx_link_cmd_parts(toolchain: CxxToolchainInfo) -> LinkCmdParts: # `toolchain_linker_flags` can either be a list of strings, `cmd_args` or `None`, # so we need to do a bit more work to satisfy the type checker toolchain_linker_flags = toolchain.linker_info.linker_flags + toolchain_post_linker_flags = toolchain.linker_info.post_linker_flags if toolchain_linker_flags == None: toolchain_linker_flags = cmd_args() elif not type(toolchain_linker_flags) == "cmd_args": toolchain_linker_flags = cmd_args(toolchain_linker_flags) - return toolchain.linker_info.linker, toolchain_linker_flags + if toolchain_post_linker_flags == None: + toolchain_post_linker_flags = cmd_args() + elif not type(toolchain_post_linker_flags) == "cmd_args": + toolchain_post_linker_flags = cmd_args(toolchain_post_linker_flags) -# The command line for linking with C++ -def cxx_link_cmd(toolchain: CxxToolchainInfo) -> cmd_args: - linker, toolchain_linker_flags = cxx_link_cmd_parts(toolchain) - command = cmd_args(linker) - command.add(toolchain_linker_flags) - return command + link_cmd = cmd_args(toolchain.linker_info.linker) + link_cmd.add(toolchain_linker_flags) + + return LinkCmdParts( + linker = toolchain.linker_info.linker, + linker_flags = toolchain_linker_flags, + post_linker_flags = toolchain_post_linker_flags, + link_cmd = link_cmd, + ) diff --git a/prelude/cxx/cxx_sources.bzl b/prelude/cxx/cxx_sources.bzl index ece339680f..ffbc433b31 100644 --- a/prelude/cxx/cxx_sources.bzl +++ b/prelude/cxx/cxx_sources.bzl @@ -9,15 +9,22 @@ load( "@prelude//utils:utils.bzl", "flatten", ) -load( - ":compile.bzl", - "CxxSrcWithFlags", -) load(":platform.bzl", "cxx_by_platform") +# An input to cxx compilation, consisting of a file to compile and optional +# file specific flags to compile with. +CxxSrcWithFlags = record( + file = field(Artifact), + flags = field(list[ResolvedStringWithMacros], []), + # If we have multiple source entries with same files but different flags, + # specify an index so we can differentiate them. Otherwise, use None. + index = field([int, None], None), + is_header = field(bool, False), +) + # The source files -def get_srcs_with_flags(ctx: AnalysisContext) -> list[CxxSrcWithFlags]: - all_srcs = ctx.attrs.srcs + flatten(cxx_by_platform(ctx, ctx.attrs.platform_srcs)) +def get_srcs_with_flags(ctx: AnalysisContext, additional_srcs: list = []) -> list[CxxSrcWithFlags]: + all_srcs = ctx.attrs.srcs + flatten(cxx_by_platform(ctx, ctx.attrs.platform_srcs)) + additional_srcs # src -> flags_hash -> flags flags_sets_by_src = {} diff --git a/prelude/cxx/cxx_toolchain.bzl b/prelude/cxx/cxx_toolchain.bzl index 0be0ff960e..77ee6d6c11 100644 --- a/prelude/cxx/cxx_toolchain.bzl +++ b/prelude/cxx/cxx_toolchain.bzl @@ -6,13 +6,15 @@ # of this source tree. load("@prelude//:is_full_meta_repo.bzl", "is_full_meta_repo") -load("@prelude//cxx:cxx_toolchain_types.bzl", "AsCompilerInfo", "AsmCompilerInfo", "BinaryUtilitiesInfo", "CCompilerInfo", "CudaCompilerInfo", "CxxCompilerInfo", "CxxObjectFormat", "DepTrackingMode", "DistLtoToolsInfo", "HipCompilerInfo", "LinkerInfo", "PicBehavior", "ShlibInterfacesMode", "StripFlagsInfo", "cxx_toolchain_infos") +load("@prelude//cxx:cxx_toolchain_types.bzl", "AsCompilerInfo", "AsmCompilerInfo", "BinaryUtilitiesInfo", "CCompilerInfo", "CudaCompilerInfo", "CvtresCompilerInfo", "CxxCompilerInfo", "CxxObjectFormat", "DepTrackingMode", "DistLtoToolsInfo", "HipCompilerInfo", "LinkerInfo", "PicBehavior", "RcCompilerInfo", "ShlibInterfacesMode", "StripFlagsInfo", "cxx_toolchain_infos") +load("@prelude//cxx:cxx_utility.bzl", "cxx_toolchain_allow_cache_upload_args") load("@prelude//cxx:debug.bzl", "SplitDebugMode") load("@prelude//cxx:headers.bzl", "HeaderMode", "HeadersAsRawHeadersMode") load("@prelude//cxx:linker.bzl", "LINKERS", "is_pdb_generated") +load("@prelude//cxx:target_sdk_version.bzl", "get_target_sdk_version") load("@prelude//linking:link_info.bzl", "LinkOrdering", "LinkStyle") load("@prelude//linking:lto.bzl", "LtoMode", "lto_compiler_flags") -load("@prelude//utils:utils.bzl", "value_or") +load("@prelude//utils:utils.bzl", "flatten", "value_or") load("@prelude//decls/cxx_rules.bzl", "cxx_rules") def cxx_toolchain_impl(ctx): @@ -29,19 +31,21 @@ def cxx_toolchain_impl(ctx): c_info = CCompilerInfo( compiler = c_compiler, compiler_type = ctx.attrs.c_compiler_type or ctx.attrs.compiler_type, - compiler_flags = cmd_args(ctx.attrs.c_compiler_flags).add(c_lto_flags), + compiler_flags = cmd_args(ctx.attrs.c_compiler_flags, c_lto_flags), preprocessor = c_compiler, preprocessor_flags = cmd_args(ctx.attrs.c_preprocessor_flags), dep_files_processor = ctx.attrs._dep_files_processor[RunInfo], + allow_cache_upload = ctx.attrs.c_compiler_allow_cache_upload, ) cxx_compiler = _get_maybe_wrapped_msvc(ctx.attrs.cxx_compiler[RunInfo], ctx.attrs.cxx_compiler_type or ctx.attrs.compiler_type, ctx.attrs._msvc_hermetic_exec[RunInfo]) cxx_info = CxxCompilerInfo( compiler = cxx_compiler, compiler_type = ctx.attrs.cxx_compiler_type or ctx.attrs.compiler_type, - compiler_flags = cmd_args(ctx.attrs.cxx_compiler_flags).add(c_lto_flags), + compiler_flags = cmd_args(ctx.attrs.cxx_compiler_flags, c_lto_flags), preprocessor = cxx_compiler, preprocessor_flags = cmd_args(ctx.attrs.cxx_preprocessor_flags), dep_files_processor = ctx.attrs._dep_files_processor[RunInfo], + allow_cache_upload = ctx.attrs.cxx_compiler_allow_cache_upload, ) asm_info = AsmCompilerInfo( compiler = ctx.attrs.asm_compiler[RunInfo], @@ -70,6 +74,18 @@ def cxx_toolchain_impl(ctx): compiler_flags = cmd_args(ctx.attrs.hip_compiler_flags), preprocessor_flags = cmd_args(ctx.attrs.hip_preprocessor_flags), ) if ctx.attrs.hip_compiler else None + cvtres_info = CvtresCompilerInfo( + compiler = ctx.attrs.cvtres_compiler[RunInfo], + compiler_type = ctx.attrs.cvtres_compiler_type or ctx.attrs.compiler_type, + compiler_flags = cmd_args(ctx.attrs.cvtres_compiler_flags), + preprocessor_flags = cmd_args(ctx.attrs.cvtres_preprocessor_flags), + ) if ctx.attrs.cvtres_compiler else None + rc_info = RcCompilerInfo( + compiler = ctx.attrs.rc_compiler[RunInfo], + compiler_type = ctx.attrs.rc_compiler_type or ctx.attrs.compiler_type, + compiler_flags = cmd_args(ctx.attrs.rc_compiler_flags), + preprocessor_flags = cmd_args(ctx.attrs.rc_preprocessor_flags), + ) if ctx.attrs.rc_compiler else None linker_info = LinkerInfo( archiver = ctx.attrs.archiver[RunInfo], @@ -83,11 +99,12 @@ def cxx_toolchain_impl(ctx): is_pdb_generated = is_pdb_generated(ctx.attrs.linker_type, ctx.attrs.linker_flags), link_binaries_locally = not value_or(ctx.attrs.cache_links, True), link_libraries_locally = False, - link_style = LinkStyle("static"), - link_weight = 1, + link_style = LinkStyle(ctx.attrs.link_style), + link_weight = ctx.attrs.link_weight, link_ordering = ctx.attrs.link_ordering, linker = ctx.attrs.linker[RunInfo], - linker_flags = cmd_args(ctx.attrs.linker_flags).add(c_lto_flags), + linker_flags = cmd_args(ctx.attrs.linker_flags, c_lto_flags), + post_linker_flags = cmd_args(ctx.attrs.post_linker_flags), lto_mode = lto_mode, mk_shlib_intf = ctx.attrs.shared_library_interface_producer, object_file_extension = ctx.attrs.object_file_extension or "o", @@ -95,6 +112,8 @@ def cxx_toolchain_impl(ctx): independent_shlib_interface_linker_flags = ctx.attrs.shared_library_interface_flags, requires_archives = value_or(ctx.attrs.requires_archives, True), requires_objects = value_or(ctx.attrs.requires_objects, False), + sanitizer_runtime_enabled = ctx.attrs.sanitizer_runtime_enabled, + sanitizer_runtime_files = flatten([runtime_file[DefaultInfo].default_outputs for runtime_file in ctx.attrs.sanitizer_runtime_files]), supports_distributed_thinlto = ctx.attrs.supports_distributed_thinlto, shared_dep_runtime_ld_flags = ctx.attrs.shared_dep_runtime_ld_flags, shared_library_name_default_prefix = _get_shared_library_name_default_prefix(ctx), @@ -105,12 +124,12 @@ def cxx_toolchain_impl(ctx): static_pic_dep_runtime_ld_flags = ctx.attrs.static_pic_dep_runtime_ld_flags, type = ctx.attrs.linker_type, use_archiver_flags = ctx.attrs.use_archiver_flags, - produce_interface_from_stub_shared_library = ctx.attrs.produce_interface_from_stub_shared_library, ) utilities_info = BinaryUtilitiesInfo( nm = ctx.attrs.nm[RunInfo], objcopy = ctx.attrs.objcopy_for_shared_library_interface[RunInfo], + objdump = ctx.attrs.objdump[RunInfo] if ctx.attrs.objdump else None, ranlib = ctx.attrs.ranlib[RunInfo] if ctx.attrs.ranlib else None, strip = ctx.attrs.strip[RunInfo], dwp = None, @@ -137,6 +156,8 @@ def cxx_toolchain_impl(ctx): as_compiler_info = as_info, cuda_compiler_info = cuda_info, hip_compiler_info = hip_info, + cvtres_compiler_info = cvtres_info, + rc_compiler_info = rc_info, header_mode = _get_header_mode(ctx), llvm_link = ctx.attrs.llvm_link[RunInfo] if ctx.attrs.llvm_link else None, object_format = CxxObjectFormat(object_format), @@ -150,10 +171,13 @@ def cxx_toolchain_impl(ctx): # TODO(T138705365): Turn on dep files by default use_dep_files = value_or(ctx.attrs.use_dep_files, _get_default_use_dep_files(platform_name)), clang_remarks = ctx.attrs.clang_remarks, + gcno_files = value_or(ctx.attrs.gcno_files, False), clang_trace = value_or(ctx.attrs.clang_trace, False), cpp_dep_tracking_mode = DepTrackingMode(ctx.attrs.cpp_dep_tracking_mode), cuda_dep_tracking_mode = DepTrackingMode(ctx.attrs.cuda_dep_tracking_mode), dumpbin_toolchain_path = ctx.attrs._dumpbin_toolchain_path[DefaultInfo].default_outputs[0] if ctx.attrs._dumpbin_toolchain_path else None, + target_sdk_version = get_target_sdk_version(ctx), + dist_lto_tools_info = ctx.attrs.dist_lto_tools[DistLtoToolsInfo], ) def cxx_toolchain_extra_attributes(is_toolchain_rule): @@ -172,15 +196,22 @@ def cxx_toolchain_extra_attributes(is_toolchain_rule): "cpp_dep_tracking_mode": attrs.enum(DepTrackingMode.values(), default = "makefile"), "cuda_compiler": attrs.option(dep_type(providers = [RunInfo]), default = None), "cuda_dep_tracking_mode": attrs.enum(DepTrackingMode.values(), default = "makefile"), + "cvtres_compiler": attrs.option(dep_type(providers = [RunInfo]), default = None), "cxx_compiler": dep_type(providers = [RunInfo]), + "dist_lto_tools": dep_type(providers = [DistLtoToolsInfo], default = "prelude//cxx/dist_lto/tools:dist_lto_tools"), + "gcno_files": attrs.bool(default = False), "generate_linker_maps": attrs.bool(default = False), "hip_compiler": attrs.option(dep_type(providers = [RunInfo]), default = None), "link_ordering": attrs.enum(LinkOrdering.values(), default = "preorder"), + "link_weight": attrs.int(default = 1), "linker": dep_type(providers = [RunInfo]), "llvm_link": attrs.option(dep_type(providers = [RunInfo]), default = None), "lto_mode": attrs.enum(LtoMode.values(), default = "none"), + # Darwin only: the minimum deployment target supported + "min_sdk_version": attrs.option(attrs.string(), default = None), "nm": dep_type(providers = [RunInfo]), "objcopy_for_shared_library_interface": dep_type(providers = [RunInfo]), + "objdump": attrs.option(dep_type(providers = [RunInfo]), default = None), "object_format": attrs.enum(CxxObjectFormat.values(), default = "native"), "pic_behavior": attrs.enum(PicBehavior.values(), default = "supported"), # A placeholder tool that can be used to set up toolchain constraints. @@ -190,19 +221,22 @@ def cxx_toolchain_extra_attributes(is_toolchain_rule): # Used for resolving any 'platform_*' attributes. "platform_name": attrs.option(attrs.string(), default = None), "private_headers_symlinks_enabled": attrs.bool(default = True), - "produce_interface_from_stub_shared_library": attrs.bool(default = True), "public_headers_symlinks_enabled": attrs.bool(default = True), "ranlib": attrs.option(dep_type(providers = [RunInfo]), default = None), + "rc_compiler": attrs.option(dep_type(providers = [RunInfo]), default = None), "requires_objects": attrs.bool(default = False), + "sanitizer_runtime_enabled": attrs.bool(default = False), + "sanitizer_runtime_files": attrs.set(attrs.dep(), sorted = True, default = []), # Use `attrs.dep()` as it's not a tool, always propagate target platform "shared_library_interface_mode": attrs.enum(ShlibInterfacesMode.values(), default = "disabled"), "shared_library_interface_producer": attrs.option(dep_type(providers = [RunInfo]), default = None), "split_debug_mode": attrs.enum(SplitDebugMode.values(), default = "none"), "strip": dep_type(providers = [RunInfo]), "supports_distributed_thinlto": attrs.bool(default = False), + # Darwin only: the deployment target to use for this build + "target_sdk_version": attrs.option(attrs.string(), default = None), "use_archiver_flags": attrs.bool(default = True), "use_dep_files": attrs.option(attrs.bool(), default = None), "_dep_files_processor": dep_type(providers = [RunInfo], default = "prelude//cxx/tools:dep_file_processor"), - "_dist_lto_tools": attrs.default_only(dep_type(providers = [DistLtoToolsInfo], default = "prelude//cxx/dist_lto/tools:dist_lto_tools")), # TODO(scottcao): Figure out a slightly better way to integrate this. In theory, this is only needed for clang toolchain. # If we were using msvc, we should be able to use dumpbin directly. "_dumpbin_toolchain_path": attrs.default_only(attrs.option(dep_type(providers = [DefaultInfo]), default = select({ @@ -220,7 +254,7 @@ def cxx_toolchain_extra_attributes(is_toolchain_rule): # FIXME: prelude// should be standalone (not refer to fbsource//) "_mk_hmap": attrs.default_only(dep_type(providers = [RunInfo], default = "prelude//cxx/tools:hmap_wrapper")), "_msvc_hermetic_exec": attrs.default_only(dep_type(providers = [RunInfo], default = "prelude//windows/tools:msvc_hermetic_exec")), - } + } | cxx_toolchain_allow_cache_upload_args() def _cxx_toolchain_inheriting_target_platform_attrs(): attrs = dict(cxx_rules.cxx_toolchain.attrs) diff --git a/prelude/cxx/cxx_toolchain_macro_layer.bzl b/prelude/cxx/cxx_toolchain_macro_layer.bzl index 4b38584ad9..adaeada5a3 100644 --- a/prelude/cxx/cxx_toolchain_macro_layer.bzl +++ b/prelude/cxx/cxx_toolchain_macro_layer.bzl @@ -7,12 +7,9 @@ def cxx_toolchain_macro_impl(cxx_toolchain_rule = None, **kwargs): # `cxx.linker_map_enabled` overrides toolchain behavior - linker_map_enabled = read_root_config("cxx", "linker_map_enabled") - if linker_map_enabled != None: - if linker_map_enabled.lower() == "true": - kwargs["generate_linker_maps"] = True - else: - kwargs["generate_linker_maps"] = False + if "generate_linker_maps" not in kwargs: + linker_map_enabled = read_root_config("cxx", "linker_map_enabled", "") + kwargs["generate_linker_maps"] = linker_map_enabled.lower() == "true" bitcode = read_root_config("cxx", "bitcode") if bitcode != None: diff --git a/prelude/cxx/cxx_toolchain_types.bzl b/prelude/cxx/cxx_toolchain_types.bzl index 932179e340..7652032651 100644 --- a/prelude/cxx/cxx_toolchain_types.bzl +++ b/prelude/cxx/cxx_toolchain_types.bzl @@ -7,15 +7,9 @@ load("@prelude//cxx:debug.bzl", "SplitDebugMode") -# For cases where our `ld` dependency provides more than an executable and -# would like to give us flags too. We use this to place the flags in the proper -# field (linker_flags), so that things that want ldflags without the linker -# executable can access those. -RichLinkerRunInfo = provider(fields = {"exe": provider_field(typing.Any, default = None), "flags": provider_field(typing.Any, default = None)}) - LinkerType = ["gnu", "darwin", "windows", "wasm"] -ShlibInterfacesMode = enum("disabled", "enabled", "defined_only") +ShlibInterfacesMode = enum("disabled", "enabled", "defined_only", "stub_from_library", "stub_from_headers") # TODO(T110378149): Consider whether it makes sense to move these things to # configurations/constraints rather than part of the toolchain. @@ -41,14 +35,17 @@ LinkerInfo = provider( # GiBs of object files (which can also lead to RE errors/timesouts etc). "link_libraries_locally": provider_field(typing.Any, default = None), "link_style": provider_field(typing.Any, default = None), # LinkStyle - "link_weight": provider_field(typing.Any, default = None), # int + "link_weight": provider_field(int, default = 1), # int "link_ordering": provider_field(typing.Any, default = None), # LinkOrdering "linker": provider_field(typing.Any, default = None), "linker_flags": provider_field(typing.Any, default = None), + "post_linker_flags": provider_field(typing.Any, default = None), "lto_mode": provider_field(typing.Any, default = None), "mk_shlib_intf": provider_field(typing.Any, default = None), # "o" on Unix, "obj" on Windows "object_file_extension": provider_field(typing.Any, default = None), # str + "sanitizer_runtime_enabled": provider_field(bool, default = False), + "sanitizer_runtime_files": provider_field(list[Artifact], default = []), "shlib_interfaces": provider_field(ShlibInterfacesMode), "shared_dep_runtime_ld_flags": provider_field(typing.Any, default = None), # "lib" on Linux/Mac/Android, "" on Windows. @@ -68,7 +65,6 @@ LinkerInfo = provider( "use_archiver_flags": provider_field(typing.Any, default = None), "force_full_hybrid_if_capable": provider_field(typing.Any, default = None), "is_pdb_generated": provider_field(typing.Any, default = None), # bool - "produce_interface_from_stub_shared_library": provider_field(typing.Any, default = None), # bool }, ) @@ -77,6 +73,7 @@ BinaryUtilitiesInfo = provider(fields = { "dwp": provider_field(typing.Any, default = None), "nm": provider_field(typing.Any, default = None), "objcopy": provider_field(typing.Any, default = None), + "objdump": provider_field(typing.Any, default = None), "ranlib": provider_field(typing.Any, default = None), "strip": provider_field(typing.Any, default = None), }) @@ -122,10 +119,14 @@ _compiler_fields = [ "preprocessor_type", "preprocessor_flags", "dep_files_processor", + # Controls cache upload for object files + "allow_cache_upload", ] HipCompilerInfo = provider(fields = _compiler_fields) CudaCompilerInfo = provider(fields = _compiler_fields) +CvtresCompilerInfo = provider(fields = _compiler_fields) +RcCompilerInfo = provider(fields = _compiler_fields) CCompilerInfo = provider(fields = _compiler_fields) CxxCompilerInfo = provider(fields = _compiler_fields) AsmCompilerInfo = provider(fields = _compiler_fields) @@ -184,12 +185,15 @@ CxxToolchainInfo = provider( "as_compiler_info": provider_field(typing.Any, default = None), "hip_compiler_info": provider_field(typing.Any, default = None), "cuda_compiler_info": provider_field(typing.Any, default = None), + "cvtres_compiler_info": provider_field(typing.Any, default = None), + "rc_compiler_info": provider_field(typing.Any, default = None), "mk_comp_db": provider_field(typing.Any, default = None), "mk_hmap": provider_field(typing.Any, default = None), "llvm_link": provider_field(typing.Any, default = None), "dist_lto_tools_info": provider_field(typing.Any, default = None), "use_dep_files": provider_field(typing.Any, default = None), "clang_remarks": provider_field(typing.Any, default = None), + "gcno_files": provider_field(typing.Any, default = None), "clang_trace": provider_field(typing.Any, default = None), "cpp_dep_tracking_mode": provider_field(typing.Any, default = None), "cuda_dep_tracking_mode": provider_field(typing.Any, default = None), @@ -198,6 +202,7 @@ CxxToolchainInfo = provider( "bolt_enabled": provider_field(typing.Any, default = None), "pic_behavior": provider_field(typing.Any, default = None), "dumpbin_toolchain_path": provider_field(typing.Any, default = None), + "target_sdk_version": provider_field([str, None], default = None), }, ) @@ -215,9 +220,6 @@ def _validate_linker_info(info: LinkerInfo): if info.requires_archives and info.requires_objects: fail("only one of `requires_archives` and `requires_objects` can be enabled") - if info.supports_distributed_thinlto and not info.requires_objects: - fail("distributed thinlto requires enabling `requires_objects`") - def is_bitcode_format(format: CxxObjectFormat) -> bool: return format in [CxxObjectFormat("bitcode"), CxxObjectFormat("embedded-bitcode")] @@ -234,12 +236,15 @@ def cxx_toolchain_infos( as_compiler_info = None, hip_compiler_info = None, cuda_compiler_info = None, + cvtres_compiler_info = None, + rc_compiler_info = None, object_format = CxxObjectFormat("native"), mk_comp_db = None, mk_hmap = None, use_distributed_thinlto = False, use_dep_files = False, clang_remarks = None, + gcno_files = None, clang_trace = False, cpp_dep_tracking_mode = DepTrackingMode("none"), cuda_dep_tracking_mode = DepTrackingMode("none"), @@ -250,7 +255,8 @@ def cxx_toolchain_infos( llvm_link = None, platform_deps_aliases = [], pic_behavior = PicBehavior("supported"), - dumpbin_toolchain_path = None): + dumpbin_toolchain_path = None, + target_sdk_version = None): """ Creates the collection of cxx-toolchain Infos for a cxx toolchain. @@ -275,6 +281,8 @@ def cxx_toolchain_infos( as_compiler_info = as_compiler_info, hip_compiler_info = hip_compiler_info, cuda_compiler_info = cuda_compiler_info, + cvtres_compiler_info = cvtres_compiler_info, + rc_compiler_info = rc_compiler_info, mk_comp_db = mk_comp_db, mk_hmap = mk_hmap, object_format = object_format, @@ -282,6 +290,7 @@ def cxx_toolchain_infos( use_distributed_thinlto = use_distributed_thinlto, use_dep_files = use_dep_files, clang_remarks = clang_remarks, + gcno_files = gcno_files, clang_trace = clang_trace, cpp_dep_tracking_mode = cpp_dep_tracking_mode, cuda_dep_tracking_mode = cuda_dep_tracking_mode, @@ -290,6 +299,7 @@ def cxx_toolchain_infos( bolt_enabled = bolt_enabled, pic_behavior = pic_behavior, dumpbin_toolchain_path = dumpbin_toolchain_path, + target_sdk_version = target_sdk_version, ) # Provide placeholder mappings, used primarily by cxx_genrule. @@ -309,9 +319,10 @@ def cxx_toolchain_infos( # NOTE(agallagher): The arg-less variants of the ldflags macro are # identical, and are just separate to match v1's behavior (ideally, # we just have a single `ldflags` macro for this case). - "ldflags-shared": _shell_quote(linker_info.linker_flags), - "ldflags-static": _shell_quote(linker_info.linker_flags), - "ldflags-static-pic": _shell_quote(linker_info.linker_flags), + "ldflags-shared": _shell_quote(linker_info.linker_flags or []), + "ldflags-static": _shell_quote(linker_info.linker_flags or []), + "ldflags-static-pic": _shell_quote(linker_info.linker_flags or []), + "objcopy": binary_utilities_info.objcopy, # TODO(T110378148): $(platform-name) is almost unusued. Should we remove it? "platform-name": platform_name, } diff --git a/prelude/cxx/cxx_types.bzl b/prelude/cxx/cxx_types.bzl index 62cd70bb3f..56bc22b1f1 100644 --- a/prelude/cxx/cxx_types.bzl +++ b/prelude/cxx/cxx_types.bzl @@ -6,6 +6,10 @@ # of this source tree. load("@prelude//:artifact_tset.bzl", "ArtifactTSet") # @unused Used as a type +load( + "@prelude//cxx:link_groups_types.bzl", + "LinkGroupInfo", # @unused Used as a type +) load( "@prelude//linking:link_info.bzl", "LinkArgs", @@ -21,7 +25,7 @@ load( ) load(":argsfiles.bzl", "CompileArgsfiles") load( - ":compile.bzl", + ":cxx_sources.bzl", "CxxSrcWithFlags", # @unused Used as a type ) load( @@ -30,7 +34,6 @@ load( ) load( ":link_groups.bzl", - "LinkGroupInfo", # @unused Used as a type "LinkGroupLibSpec", # @unused Used as a type ) load( @@ -175,7 +178,7 @@ CxxRuleConstructorParams = record( # shared libs to include in the symlink tree). extra_link_roots = field(list[LinkableProviders], []), # Additional shared libs to "package". - extra_shared_libs = field(dict[str, SharedLibrary], {}), + extra_shared_libs = field(list[SharedLibrary], []), auto_link_group_specs = field([list[LinkGroupLibSpec], None], None), link_group_info = field([LinkGroupInfo, None], None), # Whether to use pre-stripped objects when linking. @@ -189,4 +192,22 @@ CxxRuleConstructorParams = record( extra_linker_outputs_factory = field(typing.Callable, lambda _context: ([], {})), # Whether to allow cache uploads for locally-linked executables. exe_allow_cache_upload = field(bool, False), + # The target triple to use when generating shared library interfaces + shared_library_interface_target = field([str, None], None), + # Extra shared library interfaces to propagate, eg from mixed Swift libraries. + extra_shared_library_interfaces = field([list[Artifact], None], None), + # Compiler flags + compiler_flags = field(list[typing.Any], []), + lang_compiler_flags = field(dict[typing.Any, typing.Any], {}), + # Platform compiler flags + platform_compiler_flags = field(list[(str, typing.Any)], []), + lang_platform_compiler_flags = field(dict[typing.Any, typing.Any], {}), + # Preprocessor flags + preprocessor_flags = field(list[typing.Any], []), + lang_preprocessor_flags = field(dict[typing.Any, typing.Any], {}), + # Platform preprocessor flags + platform_preprocessor_flags = field(list[(str, typing.Any)], []), + lang_platform_preprocessor_flags = field(dict[typing.Any, typing.Any], {}), + # modulename-Swift.h header for building objc targets that rely on this swift dep + swift_objc_header = field([Artifact, None], None), ) diff --git a/prelude/cxx/cxx_utility.bzl b/prelude/cxx/cxx_utility.bzl new file mode 100644 index 0000000000..a221419d8f --- /dev/null +++ b/prelude/cxx/cxx_utility.bzl @@ -0,0 +1,32 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +def cxx_attrs_get_allow_cache_upload(attrs: struct, default: [None, bool] = None) -> bool: + default_value = default if default != None else False + if not hasattr(attrs, "allow_cache_upload"): + return default_value + value = attrs.allow_cache_upload + return value if value != None else default_value + +def cxx_toolchain_allow_cache_upload_args(): + doc = """ + Whether to allow uploading of object files to cache when the compile + action is executed locally and the configuration allows uploads (i.e., + there is a cache configured and the client has permission to write to it). + """ + return { + "c_compiler_allow_cache_upload": attrs.option( + attrs.bool(), + default = None, + doc = doc, + ), + "cxx_compiler_allow_cache_upload": attrs.option( + attrs.bool(), + default = None, + doc = doc, + ), + } diff --git a/prelude/cxx/dist_lto/README.md b/prelude/cxx/dist_lto/README.md index 1102134a2c..88a4b80a75 100644 --- a/prelude/cxx/dist_lto/README.md +++ b/prelude/cxx/dist_lto/README.md @@ -1,23 +1,27 @@ # Distributed ThinLTO in Buck2 + Sean Gillespie, April 2022 -This document is a technical overview into Buck2's implementation of a distributed ThinLTO. -Like all rules in Buck2, this implementation is written entirely in Starlark, contained in -`dist_lto.bzl` (in this same directory). +This document is a technical overview into Buck2's implementation of a +distributed ThinLTO. Like all rules in Buck2, this implementation is written +entirely in Starlark, contained in `dist_lto.bzl` (in this same directory). ## Motivation -First, I highly recommend watching [Teresa Johnson's CppCon2017 talk about ThinLTO](https://www.youtube.com/watch?v=p9nH2vZ2mNo), +First, I highly recommend watching +[Teresa Johnson's CppCon2017 talk about ThinLTO](https://www.youtube.com/watch?v=p9nH2vZ2mNo), which covers the topics in this section in much greater detail than I can. -C and C++ have long enjoyed significant optimizations at the hands of compilers. However, they have also -long suffered a fundamental limitation; a C or C++ compiler can only optimize code that it sees in a single -translation unit. For a language like C or C++, this means in practice that only code that is included via -the preprocessor or specified in the translation unit can be optimized as a single unit. C and C++ compilers -are unable to inline functions that are defined in different translation units. However, a crucial advantage -of this compilation model is that all C and C++ compiler invocations are *completely parallelizable*; despite -sacrificing some code quality, C and C++ compilation turns into a massively parallel problem with a serial -link step at the very end. +C and C++ have long enjoyed significant optimizations at the hands of compilers. +However, they have also long suffered a fundamental limitation; a C or C++ +compiler can only optimize code that it sees in a single translation unit. For a +language like C or C++, this means in practice that only code that is included +via the preprocessor or specified in the translation unit can be optimized as a +single unit. C and C++ compilers are unable to inline functions that are defined +in different translation units. However, a crucial advantage of this compilation +model is that all C and C++ compiler invocations are _completely +parallelizable_; despite sacrificing some code quality, C and C++ compilation +turns into a massively parallel problem with a serial link step at the very end. ``` flowchart LR; @@ -36,20 +40,25 @@ flowchart LR; c.o --> main; ``` -([Rendered](https://fburl.com/mermaid/rzup8o32). Compilation and optimization of a, b, and c can proceed in parallel.) - - -In cases where absolute performance is required, though, the inability to perform cross-translation-unit -(or "cross-module", in LLVM parlance) optimizations becomes more of a problem. To solve this, a new compilation -paradigm was designed, dubbed "Link-Time Optimization" (LTO). In this scheme, a compiler will not produce machine code -when processing a translation unit; rather, it will output the compiler's intermediate representation (e.g. LLVM bitcode). -Later on, when it is time for the linker to run, it will load all of the compiler IR into one giant module, run -optimization passes on the mega-module, and produce a final binary from that. - -This works quite well, if all that you're looking for is run-time performance. A major drawback of the LTO approach is -that all of the parallelism gained from optimizing translation units individually is now completely lost; instead, the -linker (using a plugin) will do a single-threaded pass of *all code* produced by compilation steps. This is extremely -slow, memory-intensive, and unable to be run incrementally. There are targets at Meta that simply can't be LTO-compiled +([Rendered](https://fburl.com/mermaid/rzup8o32). Compilation and optimization of +a, b, and c can proceed in parallel.) + +In cases where absolute performance is required, though, the inability to +perform cross-translation-unit (or "cross-module", in LLVM parlance) +optimizations becomes more of a problem. To solve this, a new compilation +paradigm was designed, dubbed "Link-Time Optimization" (LTO). In this scheme, a +compiler will not produce machine code when processing a translation unit; +rather, it will output the compiler's intermediate representation (e.g. LLVM +bitcode). Later on, when it is time for the linker to run, it will load all of +the compiler IR into one giant module, run optimization passes on the +mega-module, and produce a final binary from that. + +This works quite well, if all that you're looking for is run-time performance. A +major drawback of the LTO approach is that all of the parallelism gained from +optimizing translation units individually is now completely lost; instead, the +linker (using a plugin) will do a single-threaded pass of _all code_ produced by +compilation steps. This is extremely slow, memory-intensive, and unable to be +run incrementally. There are targets at Meta that simply can't be LTO-compiled because of their size. ``` @@ -74,15 +83,21 @@ flowchart LR; main.o --> |ld| main ``` -([Rendered](https://fburl.com/mermaid/kid35io9). `a.bc`, `b.bc`, and `c.bc` are LLVM bitcode; they are all merged -together into a single module, `a_b_c_optimized.bc`, which is then optimized and codegen'd into a final binary.) -The idea of ThinLTO comes from a desire to maintain the ability to optimize modules in parallel while still -allowing for profitable cross-module optimizations. The idea is this: +([Rendered](https://fburl.com/mermaid/kid35io9). `a.bc`, `b.bc`, and `c.bc` are +LLVM bitcode; they are all merged together into a single module, +`a_b_c_optimized.bc`, which is then optimized and codegen'd into a final +binary.) -1. Just like regular LTO, the compiler emits bitcode instead of machine code. However, it also contains some light -metadata such as a call graph of symbols within the module. -2. The monolithic LTO link is split into three steps: `index`, `opt`, and `link`. +The idea of ThinLTO comes from a desire to maintain the ability to optimize +modules in parallel while still allowing for profitable cross-module +optimizations. The idea is this: + +1. Just like regular LTO, the compiler emits bitcode instead of machine code. + However, it also contains some light metadata such as a call graph of symbols + within the module. +2. The monolithic LTO link is split into three steps: `index`, `opt`, and + `link`. ``` flowchart LR; @@ -117,137 +132,192 @@ flowchart LR; ([Rendered](https://fburl.com/mermaid/56oc99t5)) -The `index` step looks like a link step. However, it does not produce a final binary; instead, it looks at every -compiler IR input file that it receives and heuristically determines which other IR modules it should be optimized -with in order to achieve profitable optimizations. These modules might include functions that the index step thinks -probably will get inlined, or globals that are read in the target IR input file. The output of the index step is a -series of files on disk that indicate which sibling object files should be present when optimizing a particular object -file, for each object file in the linker command-line. - -The `opt` step runs in parallel for every object file. Each object file will be optimized using the compiler's -optimizer (e.g. `opt`, for LLVM). The optimizer will combine the objects that were referenced as part of the index -step as potentially profitable to include and optimize them all together. - -The `link` step takes the outputs of `opt` and links them together, like a normal linker. - -In practice, ThinLTO manages to recapture the inherent parallelism of C/C++ compilation by pushing the majority of work -to the parallel `opt` phase of execution. When LLVM performs ThinLTO by default, it will launch a thread pool and process -independent modules in parallel. ThinLTO does not produce as performant a binary as a monolithic LTO; however, in practice, -ThinLTO binaries [paired with AutoFDO](https://fburl.com/wiki/q480euco) perform comparably to monolithic LTO. Furthermore, -ThinLTO's greater efficiency allows for more expensive optimization passes to be run, which can further improve code quality +The `index` step looks like a link step. However, it does not produce a final +binary; instead, it looks at every compiler IR input file that it receives and +heuristically determines which other IR modules it should be optimized with in +order to achieve profitable optimizations. These modules might include functions +that the index step thinks probably will get inlined, or globals that are read +in the target IR input file. The output of the index step is a series of files +on disk that indicate which sibling object files should be present when +optimizing a particular object file, for each object file in the linker +command-line. + +The `opt` step runs in parallel for every object file. Each object file will be +optimized using the compiler's optimizer (e.g. `opt`, for LLVM). The optimizer +will combine the objects that were referenced as part of the index step as +potentially profitable to include and optimize them all together. + +The `link` step takes the outputs of `opt` and links them together, like a +normal linker. + +In practice, ThinLTO manages to recapture the inherent parallelism of C/C++ +compilation by pushing the majority of work to the parallel `opt` phase of +execution. When LLVM performs ThinLTO by default, it will launch a thread pool +and process independent modules in parallel. ThinLTO does not produce as +performant a binary as a monolithic LTO; however, in practice, ThinLTO binaries +[paired with AutoFDO](https://fburl.com/wiki/q480euco) perform comparably to +monolithic LTO. Furthermore, ThinLTO's greater efficiency allows for more +expensive optimization passes to be run, which can further improve code quality near that of a monolithic LTO. -This is all great, and ThinLTO has been in use at Meta for some time. However, Buck2 has the ability to take a step -further than Buck1 could ever have - Buck2 can distribute parallel `opt` actions across many machines via Remote Execution -to achieve drastic speedups in ThinLTO wall clock time, memory usage, and incrementality. +This is all great, and ThinLTO has been in use at Meta for some time. However, +Buck2 has the ability to take a step further than Buck1 could ever have - Buck2 +can distribute parallel `opt` actions across many machines via Remote Execution +to achieve drastic speedups in ThinLTO wall clock time, memory usage, and +incrementality. ## Buck2's Implementation -Buck2's role in a distributed ThinLTO compilation is to construct a graph of actions that directly mirrors the graph -that the `index` step outputs. The graph that the `index` step outputs is entirely dynamic and, as such, the build -system is only aware of what the graph could be after the `index` step is complete. Unlike Buck1 (or even Blaze/Bazel), -Buck2 has explicit support for this paradigm [("dynamic dependencies")](https://fburl.com/gdoc/zklwhkll). Therefore, for Buck2, the basic strategy looks like: - -1. Invoke `clang` to act as `index`. `index` will output a file for every object file that indicates what other modules -need to be present when running `opt` on the object file (an "imports file"). -2. Read imports files and construct a graph of dynamic `opt` actions whose dependencies mirror the contents of the imports files. -3. Collect the outputs from the `opt` actions and invoke the linker to produce a final binary. - -Action `2` is inherently dynamic, since it must read the contents of files produced as part of action `1`. Furthermore, -Buck2's support of `1` is complicated by the fact that certain Buck2 rules can produce an archive of object files as -an output (namely, the Rust compiler). As a result, Buck2's implementation of Distributed ThinLTO is highly dynamic. +Buck2's role in a distributed ThinLTO compilation is to construct a graph of +actions that directly mirrors the graph that the `index` step outputs. The graph +that the `index` step outputs is entirely dynamic and, as such, the build system +is only aware of what the graph could be after the `index` step is complete. +Unlike Buck1 (or even Blaze/Bazel), Buck2 has explicit support for this paradigm +[("dynamic dependencies")](https://fburl.com/gdoc/zklwhkll). Therefore, for +Buck2, the basic strategy looks like: + +1. Invoke `clang` to act as `index`. `index` will output a file for every object + file that indicates what other modules need to be present when running `opt` + on the object file (an "imports file"). +2. Read imports files and construct a graph of dynamic `opt` actions whose + dependencies mirror the contents of the imports files. +3. Collect the outputs from the `opt` actions and invoke the linker to produce a + final binary. + +Action `2` is inherently dynamic, since it must read the contents of files +produced as part of action `1`. Furthermore, Buck2's support of `1` is +complicated by the fact that certain Buck2 rules can produce an archive of +object files as an output (namely, the Rust compiler). As a result, Buck2's +implementation of Distributed ThinLTO is highly dynamic. Buck2's implementation contains four phases of actions: -1. `thin_lto_prepare`, which specifically handles archives containing LLVM IR and prepares them to be inputs to `thin_lto_index`, -2. `thin_lto_index`, which invokes LLVM's ThinLTO indexer to produce a imports list for every object file to be optimized, -3. `thin_lto_opt`, which optimizes each object file in parallel with its imports present, +1. `thin_lto_prepare`, which specifically handles archives containing LLVM IR + and prepares them to be inputs to `thin_lto_index`, +2. `thin_lto_index`, which invokes LLVM's ThinLTO indexer to produce a imports + list for every object file to be optimized, +3. `thin_lto_opt`, which optimizes each object file in parallel with its imports + present, 4. `thin_lto_link`, which links together the optimized code into a final binary. ### thin_lto_prepare -It is a reality of Buck2 today that some rules don't produce a statically-known list of object files. The list of object -files is known *a priori* during C/C++ compilation, since they have a one-to-one correspondence to source files; however, -the Rust compiler emits an archive of object files; without inspecting the archive, Buck2 has no way of knowing what -the contents of the archive are, or even if they contain bitcode at all. +It is a reality of Buck2 today that some rules don't produce a statically-known +list of object files. The list of object files is known _a priori_ during C/C++ +compilation, since they have a one-to-one correspondence to source files; +however, the Rust compiler emits an archive of object files; without inspecting +the archive, Buck2 has no way of knowing what the contents of the archive are, +or even if they contain bitcode at all. -Future steps (particularly `thin_lto_index`) are defined to only operate on a list of object files - a limitation [inherited from LLVM](https://lists.llvm.org/pipermail/llvm-dev/2019-June/133145.html). Therefore, it is the job of `thin_lto_prepare` to turn an archive into a list of objects - namely, by extracting the archive into a directory. +Future steps (particularly `thin_lto_index`) are defined to only operate on a +list of object files - a limitation +[inherited from LLVM](https://lists.llvm.org/pipermail/llvm-dev/2019-June/133145.html). +Therefore, it is the job of `thin_lto_prepare` to turn an archive into a list of +objects - namely, by extracting the archive into a directory. -Buck2 dispatches a `thin_lto_prepare` action for every archive. Each prepare action has two outputs: +Buck2 dispatches a `thin_lto_prepare` action for every archive. Each prepare +action has two outputs: -1. An **output directory** (called `objects` in the code), a directory that contains the unextracted contents of the archive. -2. A **archive manifest**, a JSON document containing a list of object files that are contained in the output directory. +1. An **output directory** (called `objects` in the code), a directory that + contains the unextracted contents of the archive. +2. A **archive manifest**, a JSON document containing a list of object files + that are contained in the output directory. -The core logic of this action is implemented in the Python script `dist_lto_prepare.py`, contained in the `tools` directory. In addition to unpacking each archive, Buck2 -keeps track of the list of archives as a Starlark array that will be referenced by index -in later steps. +The core logic of this action is implemented in the Python script +`dist_lto_prepare.py`, contained in the `tools` directory. In addition to +unpacking each archive, Buck2 keeps track of the list of archives as a Starlark +array that will be referenced by index in later steps. ### thin_lto_index -With all archives prepared, the next step is to invoke LLVM's ThinLTO indexer. For the purposes of Buck2, the indexer -looks like a linker; because of this, Buck2 must construct a reasonable link line. Buck2 does this by iterating over the -list of linkables that it has been given and constructing a link line from them. Uniquely for distributed ThinLTO, Buck2 -must wrap all objects that were derived from `thin_lto_prepare` (i.e. were extracted from archives) with `-Wl,--start-lib` -and `-Wl,--end-lib` to ensure that they are still treated as if they were archives by the indexer. - -Invoking the indexer is relatively straightforward in that Buck2 invokes it like it would any other linker. However, -once the indexer returns, Buck2 must post-process its output into a format that Buck2's Starlark can understand and -translate into a graph of dynamic `opt` actions. The first thing that Buck2 is write a "meta file" to disk, which -communicates inputs and outputs of `thin_lto_index` to a Python script, `dist_lto_planner.py`. The meta file contains -a list of 7-tuples, whose members are: - -1. The path to the source bitcode file. This is used as an index into - a dictionary that records much of the metadata coming - from these lines. +With all archives prepared, the next step is to invoke LLVM's ThinLTO indexer. +For the purposes of Buck2, the indexer looks like a linker; because of this, +Buck2 must construct a reasonable link line. Buck2 does this by iterating over +the list of linkables that it has been given and constructing a link line from +them. Uniquely for distributed ThinLTO, Buck2 must wrap all objects that were +derived from `thin_lto_prepare` (i.e. were extracted from archives) with +`-Wl,--start-lib` and `-Wl,--end-lib` to ensure that they are still treated as +if they were archives by the indexer. + +Invoking the indexer is relatively straightforward in that Buck2 invokes it like +it would any other linker. However, once the indexer returns, Buck2 must +post-process its output into a format that Buck2's Starlark can understand and +translate into a graph of dynamic `opt` actions. The first thing that Buck2 is +write a "meta file" to disk, which communicates inputs and outputs of +`thin_lto_index` to a Python script, `dist_lto_planner.py`. The meta file +contains a list of 7-tuples, whose members are: + +1. The path to the source bitcode file. This is used as an index into a + dictionary that records much of the metadata coming from these lines. 2. The path to an output file. `dist_lto_planner.py`is expected to place a - ThinLTO index file at this location (suffixed `.thinlto.bc`). -3. The path to an output plan. This script is expected to place a link - plan here (a JSON document indicating which other object files this) - object file depends on, among other things. -4. If this object file came from an archive, the index of the archive in - the Starlark archives array. + ThinLTO index file at this location (suffixed `.thinlto.bc`). +3. The path to an output plan. This script is expected to place a link plan here + (a JSON document indicating which other object files this) object file + depends on, among other things. +4. If this object file came from an archive, the index of the archive in the + Starlark archives array. 5. If this object file came from an archive, the name of the archive. -6. If this object file came from an archive, the path to an output plan. - This script is expected to produce an archive link plan here (a JSON) - document similar to the object link plan, except containing link - information for every file in the archive from which this object - came. +6. If this object file came from an archive, the path to an output plan. This + script is expected to produce an archive link plan here (a JSON) document + similar to the object link plan, except containing link information for every + file in the archive from which this object came. 7. If this object file came from an archive, the indexes directory of that - archive. This script is expected to place all ThinLTO indexes derived - from object files originating from this archive in that directory. - -There are two indices that are derived from this meta file: the object -index (`mapping["index"]`) and the archive index (`mapping["archive_index"]`). -These indices are indices into Starlark arrays for all objects and archive -linkables, respectively. `dist_lto_planner.py` script does not inspect them; rather, -it is expected to communicate these indices back to Starlark by writing them to the + archive. This script is expected to place all ThinLTO indexes derived from + object files originating from this archive in that directory. + +There are two indices that are derived from this meta file: the object index +(`mapping["index"]`) and the archive index (`mapping["archive_index"]`). These +indices are indices into Starlark arrays for all objects and archive linkables, +respectively. `dist_lto_planner.py` script does not inspect them; rather, it is +expected to communicate these indices back to Starlark by writing them to the link plan. -`dist_lto_planner.py` reads the index and imports file produced by LLVM and derives -a number of artifacts: - -1. For each object file, a `thinlto.bc` file (`bitcode_file`). This file is the same as the input bitcode file, except that LLVM has inserted a number of module imports to refer to the other modules that will be present when the object file is optimized. -2. For each object file, an optimization plan (`plan`). The optimization plan is a JSON document indicating how to construct an `opt` action for this object file. This plan includes -this object file's module imports, whether or not this file contains bitcode at all, a location to place the optimized object file, and a list of archives that this object file imported. -3. For each archive, an optimization plan (`archive_plan`), which contains optimization plans for all of the object files contained within the archive. - -This action is a dynamic action because, in the case that there are archives that needed to be preprocessed by `thin_lto_prepare`, this action must read the archive manifest. +`dist_lto_planner.py` reads the index and imports file produced by LLVM and +derives a number of artifacts: + +1. For each object file, a `thinlto.bc` file (`bitcode_file`). This file is the + same as the input bitcode file, except that LLVM has inserted a number of + module imports to refer to the other modules that will be present when the + object file is optimized. +2. For each object file, an optimization plan (`plan`). The optimization plan is + a JSON document indicating how to construct an `opt` action for this object + file. This plan includes this object file's module imports, whether or not + this file contains bitcode at all, a location to place the optimized object + file, and a list of archives that this object file imported. +3. For each archive, an optimization plan (`archive_plan`), which contains + optimization plans for all of the object files contained within the archive. + +This action is a dynamic action because, in the case that there are archives +that needed to be preprocessed by `thin_lto_prepare`, this action must read the +archive manifest. ### thin_lto_opt -After `thin_lto_index` completes, Buck2 launches `thin_lto_opt` actions for every object file and for every archive. For each object file, Buck2 reads that object file's optimization plan. -At this phase, it is Buck2's responsibility to declare dependencies on every object file referenced by that object's compilation plan; it does so here by adding `hidden` dependencies -on every object file and archive that the archive plan says that this object depends on. - -`thin_lto_opt` uses a Python wrapper around LLVM because of a bug (T116695431) where LTO fatal errors don't prevent `clang` from returning an exit code of zero. The Python script wraps -`clang` and exits with a non-zero exit code if `clang` produced an empty object file. - -For each archive, Buck2 reads the archive's optimization plan and constructs additional `thin_lto_opt` actions for each object file contained in the archive. Buck2 creates a directory of -symlinks (`opt_objects`) that either contains symlinks to optimized object files (if the object file contained bitcode) or the original object file (if it didn't). The purpose of this symlink directory is to allow the final link to consume object files directly -from this directory without having to know whether they were optimized or not. Paths to these files are passed to the link step -via the optimization manifest (`opt_manifest`). +After `thin_lto_index` completes, Buck2 launches `thin_lto_opt` actions for +every object file and for every archive. For each object file, Buck2 reads that +object file's optimization plan. At this phase, it is Buck2's responsibility to +declare dependencies on every object file referenced by that object's +compilation plan; it does so here by adding `hidden` dependencies on every +object file and archive that the archive plan says that this object depends on. + +`thin_lto_opt` uses a Python wrapper around LLVM because of a bug (T116695431) +where LTO fatal errors don't prevent `clang` from returning an exit code of +zero. The Python script wraps `clang` and exits with a non-zero exit code if +`clang` produced an empty object file. + +For each archive, Buck2 reads the archive's optimization plan and constructs +additional `thin_lto_opt` actions for each object file contained in the archive. +Buck2 creates a directory of symlinks (`opt_objects`) that either contains +symlinks to optimized object files (if the object file contained bitcode) or the +original object file (if it didn't). The purpose of this symlink directory is to +allow the final link to consume object files directly from this directory +without having to know whether they were optimized or not. Paths to these files +are passed to the link step via the optimization manifest (`opt_manifest`). ### thin_lto_link -The final link step. Similar to `thin_lto_index`, this involves creating a link line to feed to the linker that uses the optimized artifacts that we just calculated. In cases where Buck2 -would put an archive on the link line, it instead inserts `-Wl,--start-lib`, `-Wl,--end-lib`, and references to the objects in `opt_objects`. +The final link step. Similar to `thin_lto_index`, this involves creating a link +line to feed to the linker that uses the optimized artifacts that we just +calculated. In cases where Buck2 would put an archive on the link line, it +instead inserts `-Wl,--start-lib`, `-Wl,--end-lib`, and references to the +objects in `opt_objects`. diff --git a/prelude/cxx/dist_lto/dist_lto.bzl b/prelude/cxx/dist_lto/dist_lto.bzl index c2a9450319..c6f3f35bb8 100644 --- a/prelude/cxx/dist_lto/dist_lto.bzl +++ b/prelude/cxx/dist_lto/dist_lto.bzl @@ -16,12 +16,12 @@ load( "bolt", "cxx_use_bolt", ) +load("@prelude//cxx:cxx_context.bzl", "get_cxx_toolchain_info") load( "@prelude//cxx:cxx_link_utility.bzl", - "cxx_link_cmd", + "cxx_link_cmd_parts", "linker_map_args", ) -load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") load("@prelude//cxx:debug.bzl", "SplitDebugMode") load( "@prelude//cxx:dwp.bzl", @@ -36,10 +36,13 @@ load( "LinkedObject", "ObjectsLinkable", "SharedLibLinkable", # @unused Used as a type + "SwiftRuntimeLinkable", # @unused Used as a type + "SwiftmoduleLinkable", # @unused Used as a type "append_linkable_args", "map_to_link_infos", "unpack_external_debug_info", ) +load("@prelude//utils:argfile.bzl", "at_argfile") load("@prelude//utils:lazy.bzl", "lazy") _BitcodeLinkData = record( @@ -84,7 +87,7 @@ def cxx_dist_link( links: list[LinkArgs], # The destination for the link output. output: Artifact, - linker_map: [Artifact, None] = None, + linker_map: Artifact | None = None, # A category suffix that will be added to the category of the link action that is generated. category_suffix: [str, None] = None, # An identifier that will uniquely name this link action in the context of a category. Useful for @@ -149,7 +152,7 @@ def cxx_dist_link( link_infos = map_to_link_infos(links) - cxx_toolchain = ctx.attrs._cxx_toolchain[CxxToolchainInfo] + cxx_toolchain = get_cxx_toolchain_info(ctx) lto_planner = cxx_toolchain.dist_lto_tools_info.planner lto_opt = cxx_toolchain.dist_lto_tools_info.opt lto_prepare = cxx_toolchain.dist_lto_tools_info.prepare @@ -176,7 +179,7 @@ def cxx_dist_link( pre_post_flags = {} # buildifier: disable=uninitialized - def add_linkable(idx: int, linkable: [ArchiveLinkable, SharedLibLinkable, ObjectsLinkable, FrameworksLinkable]): + def add_linkable(idx: int, linkable: [ArchiveLinkable, SharedLibLinkable, SwiftmoduleLinkable, SwiftRuntimeLinkable, ObjectsLinkable, FrameworksLinkable]): if idx not in linkables_index: linkables_index[idx] = [linkable] else: @@ -227,7 +230,7 @@ def cxx_dist_link( ), ) index_link_data.append(data) - plan_outputs.extend([bc_output, plan_output]) + plan_outputs.extend([bc_output.as_output(), plan_output.as_output()]) elif isinstance(linkable, ArchiveLinkable) and linkable.supports_lto: # Our implementation of Distributed ThinLTO operates on individual objects, not archives. Since these # archives might still contain LTO-able bitcode, we first extract the objects within the archive into @@ -275,7 +278,7 @@ def cxx_dist_link( index_link_data.append(data) archive_opt_manifests.append(archive_opt_manifest) plan_inputs.extend([archive_manifest, archive_objects]) - plan_outputs.extend([archive_indexes, archive_plan]) + plan_outputs.extend([archive_indexes.as_output(), archive_plan.as_output()]) else: add_linkable(idx, linkable) index_link_data.append(None) @@ -335,7 +338,7 @@ def cxx_dist_link( archive_args = prepend_index_args if link_data.prepend else index_args - archive_args.hidden(link_data.objects_dir) + archive_args.add(cmd_args(hidden = link_data.objects_dir)) if not link_data.link_whole: archive_args.add("-Wl,--start-lib") @@ -347,8 +350,6 @@ def cxx_dist_link( if not link_data.link_whole: archive_args.add("-Wl,--end-lib") - archive_args.hidden(link_data.objects_dir) - add_post_flags(idx) index_argfile, _ = ctx.actions.write( @@ -359,18 +360,20 @@ def cxx_dist_link( index_cat = make_cat("thin_lto_index") index_file_out = ctx.actions.declare_output(make_id(index_cat) + "/index") - index_out_dir = cmd_args(index_file_out.as_output()).parent() + index_out_dir = cmd_args(index_file_out.as_output(), parent = 1) + + index_cmd_parts = cxx_link_cmd_parts(cxx_toolchain) - index_cmd = cxx_link_cmd(cxx_toolchain) + index_cmd = index_cmd_parts.link_cmd index_cmd.add(cmd_args(index_argfile, format = "@{}")) - output_as_string = cmd_args(output) - output_as_string.ignore_artifacts() + output_as_string = cmd_args(output, ignore_artifacts = True) index_cmd.add("-o", output_as_string) index_cmd.add(cmd_args(index_file_out.as_output(), format = "-Wl,--thinlto-index-only={}")) index_cmd.add("-Wl,--thinlto-emit-imports-files") index_cmd.add("-Wl,--thinlto-full-index") index_cmd.add(cmd_args(index_out_dir, format = "-Wl,--thinlto-prefix-replace=;{}/")) + index_cmd.add(index_cmd_parts.post_linker_flags) # Terminate the index file with a newline. index_meta.add("") @@ -382,10 +385,10 @@ def cxx_dist_link( plan_cmd = cmd_args([lto_planner, "--meta", index_meta_file, "--index", index_out_dir, "--link-plan", outputs[link_plan].as_output(), "--final-link-index", outputs[final_link_index].as_output(), "--"]) plan_cmd.add(index_cmd) - plan_extra_inputs = cmd_args() - plan_extra_inputs.add(index_meta) - plan_extra_inputs.add(index_args) - plan_cmd.hidden(plan_extra_inputs) + plan_cmd.add(cmd_args(hidden = [ + index_meta, + index_args, + ])) ctx.actions.run(plan_cmd, category = index_cat, identifier = identifier, local_only = True) @@ -396,20 +399,22 @@ def cxx_dist_link( # directly, since it uses `ctx.outputs` to bind its outputs. Instead of doing Starlark hacks to work around # the lack of `ctx.outputs`, we declare an empty file as a dynamic input. plan_inputs.append(ctx.actions.write(output.basename + ".plan_hack.txt", "")) - plan_outputs.extend([link_plan, index_argsfile_out, final_link_index]) + plan_outputs.extend([link_plan.as_output(), index_argsfile_out.as_output(), final_link_index.as_output()]) ctx.actions.dynamic_output(dynamic = plan_inputs, inputs = [], outputs = plan_outputs, f = plan) link_plan_out = ctx.actions.declare_output(output.basename + ".link-plan.json") dynamic_plan(link_plan = link_plan_out, index_argsfile_out = index_argsfile_out, final_link_index = final_link_index) def prepare_opt_flags(link_infos: list[LinkInfo]) -> cmd_args: - opt_args = cmd_args() - opt_args.add(cxx_link_cmd(cxx_toolchain)) + opt_cmd_parts = cxx_link_cmd_parts(cxx_toolchain) + opt_args = opt_cmd_parts.link_cmd # buildifier: disable=uninitialized for link in link_infos: for raw_flag in link.pre_flags + link.post_flags: opt_args.add(raw_flag) + + opt_args.add(opt_cmd_parts.post_linker_flags) return opt_args opt_common_flags = prepare_opt_flags(link_infos) @@ -451,7 +456,7 @@ def cxx_dist_link( # Create an argsfile and dump all the flags to be processed later. opt_argsfile = ctx.actions.declare_output(outputs[opt_object].basename + ".opt.argsfile") ctx.actions.write(opt_argsfile.as_output(), opt_common_flags, allow_args = True) - opt_cmd.hidden(opt_common_flags) + opt_cmd.add(cmd_args(hidden = opt_common_flags)) opt_cmd.add("--args", opt_argsfile) opt_cmd.add("--") @@ -459,11 +464,10 @@ def cxx_dist_link( imports = [index_link_data[idx].link_data.initial_object for idx in plan_json["imports"]] archives = [index_link_data[idx].link_data.objects_dir for idx in plan_json["archive_imports"]] - opt_cmd.hidden(imports) - opt_cmd.hidden(archives) + opt_cmd.add(cmd_args(hidden = imports + archives)) ctx.actions.run(opt_cmd, category = make_cat("thin_lto_opt_object"), identifier = name) - ctx.actions.dynamic_output(dynamic = [plan], inputs = [], outputs = [opt_object], f = optimize_object) + ctx.actions.dynamic_output(dynamic = [plan], inputs = [], outputs = [opt_object.as_output()], f = optimize_object) def dynamic_optimize_archive(archive: _ArchiveLinkData): def optimize_archive(ctx: AnalysisContext, artifacts, outputs): @@ -488,9 +492,7 @@ def cxx_dist_link( opt_object.as_output(), "--from", entry["path"], - ]) - - copy_cmd.hidden(archive.objects_dir) + ], hidden = archive.objects_dir) ctx.actions.run(copy_cmd, category = make_cat("thin_lto_opt_copy"), identifier = source_path) output_dir[source_path] = opt_object continue @@ -503,14 +505,14 @@ def cxx_dist_link( opt_cmd.add("--input", entry["path"]) opt_cmd.add("--index", entry["bitcode_file"]) - if cxx_toolchain.split_debug_mode == SplitDebugMode("none") or ctx.attrs.distributed_thinlto_partial_split_dwarf: + if cxx_toolchain.split_debug_mode == SplitDebugMode("none") or getattr(ctx.attrs, "distributed_thinlto_partial_split_dwarf", False): opt_cmd.add("--split-dwarf=none") elif cxx_toolchain.split_debug_mode == SplitDebugMode("single"): opt_cmd.add("--split-dwarf=single") opt_argsfile = ctx.actions.declare_output(opt_object.basename + ".opt.argsfile") ctx.actions.write(opt_argsfile.as_output(), opt_common_flags, allow_args = True) - opt_cmd.hidden(opt_common_flags) + opt_cmd.add(cmd_args(hidden = opt_common_flags)) opt_cmd.add("--args", opt_argsfile) opt_cmd.add("--") @@ -518,17 +520,16 @@ def cxx_dist_link( imports = [index_link_data[idx].link_data.initial_object for idx in entry["imports"]] archives = [index_link_data[idx].link_data.objects_dir for idx in entry["archive_imports"]] - opt_cmd.hidden(imports) - opt_cmd.hidden(archives) - opt_cmd.hidden(archive.indexes_dir) - opt_cmd.hidden(archive.objects_dir) + opt_cmd.add(cmd_args( + hidden = imports + archives + [archive.indexes_dir, archive.objects_dir], + )) ctx.actions.run(opt_cmd, category = make_cat("thin_lto_opt_archive"), identifier = source_path) ctx.actions.symlinked_dir(outputs[archive.opt_objects_dir], output_dir) ctx.actions.write(outputs[archive.opt_manifest], output_manifest, allow_args = True) archive_opt_inputs = [archive.plan] - archive_opt_outputs = [archive.opt_objects_dir, archive.opt_manifest] + archive_opt_outputs = [archive.opt_objects_dir.as_output(), archive.opt_manifest.as_output()] ctx.actions.dynamic_output(dynamic = archive_opt_inputs, inputs = [], outputs = archive_opt_outputs, f = optimize_archive) for artifact in index_link_data: @@ -576,28 +577,31 @@ def cxx_dist_link( current_index += 1 link_args.add(link.post_flags) - link_cmd = cxx_link_cmd(cxx_toolchain) - final_link_argfile, final_link_inputs = ctx.actions.write( - outputs[linker_argsfile_out].as_output(), - link_args, - allow_args = True, - ) + link_cmd_parts = cxx_link_cmd_parts(cxx_toolchain) + link_cmd = link_cmd_parts.link_cmd + link_cmd_hidden = [] # buildifier: disable=uninitialized for artifact in index_link_data: if artifact != None and artifact.data_type == _DataType("archive"): - link_cmd.hidden(artifact.link_data.opt_objects_dir) - link_cmd.add(cmd_args(final_link_argfile, format = "@{}")) + link_cmd_hidden.append(artifact.link_data.opt_objects_dir) + link_cmd.add(at_argfile( + actions = ctx.actions, + name = outputs[linker_argsfile_out], + args = link_args, + allow_args = True, + )) link_cmd.add(cmd_args(final_link_index, format = "@{}")) link_cmd.add("-o", outputs[output].as_output()) if linker_map: link_cmd.add(linker_map_args(cxx_toolchain, outputs[linker_map].as_output()).flags) - link_cmd_extra_inputs = cmd_args() - link_cmd_extra_inputs.add(final_link_inputs) - link_cmd.hidden(link_cmd_extra_inputs) - link_cmd.hidden(link_args) - link_cmd.hidden(opt_objects) - link_cmd.hidden(archives) + link_cmd_hidden.extend([ + link_args, + opt_objects, + archives, + ]) + link_cmd.add(link_cmd_parts.post_linker_flags) + link_cmd.add(cmd_args(hidden = link_cmd_hidden)) ctx.actions.run(link_cmd, category = make_cat("thin_lto_link"), identifier = identifier, local_only = True) @@ -605,7 +609,7 @@ def cxx_dist_link( ctx.actions.dynamic_output( dynamic = final_link_inputs, inputs = [], - outputs = [output] + ([linker_map] if linker_map else []) + [linker_argsfile_out], + outputs = [output.as_output()] + ([linker_map.as_output()] if linker_map else []) + [linker_argsfile_out.as_output()], f = thin_lto_final_link, ) @@ -617,7 +621,7 @@ def cxx_dist_link( ], ) - final_output = output if not (executable_link and cxx_use_bolt(ctx)) else bolt(ctx, output, identifier) + final_output = output if not (executable_link and cxx_use_bolt(ctx)) else bolt(ctx, output, external_debug_info, identifier) dwp_output = ctx.actions.declare_output(output.short_path.removesuffix("-wrapper") + ".dwp") if generate_dwp else None if generate_dwp: diff --git a/prelude/cxx/dist_lto/tools/BUCK.v2 b/prelude/cxx/dist_lto/tools/BUCK.v2 index 3abffa2823..3b9664e240 100644 --- a/prelude/cxx/dist_lto/tools/BUCK.v2 +++ b/prelude/cxx/dist_lto/tools/BUCK.v2 @@ -1,4 +1,9 @@ load("@prelude//cxx/dist_lto:tools.bzl", "dist_lto_tools") +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() prelude = native @@ -14,6 +19,12 @@ prelude.python_bootstrap_binary( visibility = ["PUBLIC"], ) +prelude.python_bootstrap_binary( + name = "dist_lto_opt_darwin", + main = "dist_lto_opt_darwin.py", + visibility = ["PUBLIC"], +) + prelude.python_bootstrap_binary( name = "dist_lto_prepare", main = "dist_lto_prepare.py", @@ -27,18 +38,27 @@ prelude.python_bootstrap_binary( ) dist_lto_tools( - name = "dist_lto_tools", + name = "dist_lto_tools_darwin", + copy = ":dist_lto_copy", + opt = ":dist_lto_opt_darwin", planner = ":dist_lto_planner", - opt = ":dist_lto_opt", prepare = ":dist_lto_prepare", + visibility = ["PUBLIC"], +) + +dist_lto_tools( + name = "dist_lto_tools", copy = ":dist_lto_copy", + opt = ":dist_lto_opt", + planner = ":dist_lto_planner", + prepare = ":dist_lto_prepare", visibility = ["PUBLIC"], ) prelude.python_test( name = "test_dist_lto_opt", srcs = [ - "tests/test_dist_lto_opt.py", "dist_lto_opt.py", + "tests/test_dist_lto_opt.py", ], ) diff --git a/prelude/cxx/dist_lto/tools/dist_lto_opt.py b/prelude/cxx/dist_lto/tools/dist_lto_opt.py index 183b24c548..e6d34d3b6f 100644 --- a/prelude/cxx/dist_lto/tools/dist_lto_opt.py +++ b/prelude/cxx/dist_lto/tools/dist_lto_opt.py @@ -24,6 +24,7 @@ EXIT_SUCCESS, EXIT_FAILURE = 0, 1 + # Filter opt related flags def _filter_flags(clang_flags: List[str]) -> List[str]: # noqa: C901 # List of llvm flags to be ignored. @@ -46,8 +47,6 @@ def _filter_flags(clang_flags: List[str]) -> List[str]: # noqa: C901 # this setting matches current llvm implementation: # https://github.com/llvm/llvm-project/blob/main/llvm/include/llvm/LTO/Config.h#L57 "-O2", - # TODO(T139459170): Remove after clang-15. NPM is the default. - "-fexperimental-new-pass-manager", "-ffunction-sections", "-fdata-sections", ] @@ -238,7 +237,7 @@ def main(argv: List[str]) -> int: # 1. a spliter "--", it's not used anywhere; # 2. the fbcc wrapper script path # 3. the "-cc" arg pointing to the compiler we use - # EXAMPLE: ['--', 'buck-out/v2/gen/fbcode/8e3db19fe005003a/tools/build/buck/wrappers/__fbcc__/fbcc', '--cc=fbcode/third-party-buck/platform010/build/llvm-fb/12/bin/clang++', '--target=x86_64-redhat-linux-gnu', ...] + # EXAMPLE: ['--', 'buck-out/v2/gen/fbcode/8e3db19fe005003a/tools/build/buck/wrappers/__fbcc__/fbcc', '--cc=fbcode/third-party-buck/platform010/build/llvm-fb//bin/clang++', '--target=x86_64-redhat-linux-gnu', ...] clang_cc1_flags = _cleanup_flags(args.opt_args[2:] + clang_opt_flags) if clang_cc1_flags is None: return EXIT_FAILURE diff --git a/prelude/cxx/dist_lto/tools/dist_lto_opt_darwin.py b/prelude/cxx/dist_lto/tools/dist_lto_opt_darwin.py new file mode 100644 index 0000000000..429663fcda --- /dev/null +++ b/prelude/cxx/dist_lto/tools/dist_lto_opt_darwin.py @@ -0,0 +1,62 @@ +#!/usr/bin/env python3 +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +""" +Python wrapper around clang intended to optimize and codegen bitcode files +to native object files for distributed thin lto. This script munges compiler +flags to prepare a suitable clang invocation. +""" + +import argparse +import subprocess +import sys + +from typing import List + + +def main(argv: List[str]) -> int: + parser = argparse.ArgumentParser() + parser.add_argument("--out", help="The output native object file.") + parser.add_argument("--input", help="The input bitcode object file.") + parser.add_argument("--index", help="The thinlto index file.") + # Split dwarf isn't applicable to Darwin, ignore the flag + parser.add_argument("--split-dwarf", required=False, help="Split dwarf option.") + parser.add_argument( + "--args", help="The argsfile containing unfiltered and unprocessed flags." + ) + parser.add_argument("opt_args", nargs=argparse.REMAINDER) + args = parser.parse_args(argv[1:]) + + with open(args.args, "r") as argsfile: + clang_opt_flags = argsfile.read().splitlines() + + clang_opt_flags.extend( + [ + "-o", + args.out, + "-x", + "ir", # Without this the input file type is incorrectly inferred. + "-c", + args.input, + f"-fthinlto-index={args.index}", + # When lto_mode=thin/full all compile actions are passed `-flto=thin/full`. We + # want to generate a native object file here. + "-fno-lto", + ] + ) + + # TODO(T187767988) - Check if returning the subprocesses exit code is sufficient. Server LLVM created such a wrapper + # script in the first place because of a bug in Clang where it fails but does not set a non-zero exit code (T116695431). Fbcode's + # version of this script measure the size of the output file to determine success. The task is closed, but if the bug + # still persists, we may need to do the same. + result = subprocess.run(clang_opt_flags) + return result.returncode + + +if __name__ == "__main__": + sys.exit(main(sys.argv)) diff --git a/prelude/cxx/dist_lto/tools/dist_lto_prepare.py b/prelude/cxx/dist_lto/tools/dist_lto_prepare.py index 69f7fce54c..3fe9983963 100644 --- a/prelude/cxx/dist_lto/tools/dist_lto_prepare.py +++ b/prelude/cxx/dist_lto/tools/dist_lto_prepare.py @@ -15,8 +15,10 @@ import enum import json import os +import shutil import subprocess import sys +import tempfile from typing import List, Tuple @@ -80,67 +82,58 @@ def main(argv: List[str]) -> int: # a long time, llvm-ar does not support --output and the change in llvm-ar # looks like it has stalled for years (https://reviews.llvm.org/D69418) # So, we need to invoke ar in the directory that we want it to extract into, and so - # need to adjust some paths. - ar_path = os.path.relpath(args.ar, start=objects_path) - archive_path = os.path.relpath(args.archive, start=objects_path) + # need absolute paths. + ar_path = os.path.abspath(args.ar) + archive_path = os.path.abspath(args.archive) output = subprocess.check_output( [ar_path, "t", archive_path], cwd=objects_path ).decode() member_list = [member for member in output.split("\n") if member] - # no duplicated filename + # This will extract all the members of the archive, including duplicates + # replacing existing duplicates. That is if first/foo.txt and second/foo.txt + # are placed in an archive in that order, this will leave second/foo.txt + # in the objects_path. output = subprocess.check_output( [ar_path, "xv", archive_path], cwd=objects_path ).decode() - for line in output.splitlines(): - assert line.startswith("x - ") - obj = line[4:] - known_objects.append(_gen_path(objects_path, obj)) # Count all members of the same name. counter = {} for member in member_list: counter.setdefault(member, 0) counter[member] += 1 - - for member, count in counter.items(): - if count <= 1: - continue - for current in range(1, count + 1): - if current == 1: - # just extract the file - output = subprocess.check_output( - [ar_path, "xN", str(current), archive_path, member], - cwd=objects_path, - ).decode() - assert not output - # We've already added this above. - else: - # llvm doesn't allow --output so we need this clumsiness - tmp_filename = "tmp" - current_file = _gen_filename(member, current) - # rename current 'member' file to tmp - output = subprocess.check_output( - ["mv", member, tmp_filename], cwd=objects_path - ).decode() - assert not output + # Insert all objects at most once into the list of known objects + if counter[member] == 1: + known_objects.append(_gen_path(objects_path, member)) + + with tempfile.TemporaryDirectory() as temp_dir: + # For each duplicate member, rename and extract duplicates 1 through N + # inclusive. While N was already extracted above, we don't want to rely + # upon this implementation detail of llvm-ar. + for member, count in counter.items(): + if count <= 1: + continue + for current in range(1, count + 1): # extract the file from archive output = subprocess.check_output( - [ar_path, "xN", str(current), archive_path, member], - cwd=objects_path, - ).decode() - assert not output - # rename the newly extracted file - output = subprocess.check_output( - ["mv", member, current_file], cwd=objects_path - ).decode() - assert not output - # rename the tmp file back to 'member' - output = subprocess.check_output( - ["mv", tmp_filename, member], cwd=objects_path + [ + ar_path, + "xN", + str(current), + archive_path, + member, + ], + cwd=temp_dir, ).decode() - assert not output - known_objects.append(_gen_path(objects_path, current_file)) + unique_name = _gen_filename(member, current) + # rename and move the newly extracted file to objects_path + shutil.move( + os.path.join(temp_dir, member), + os.path.join(os.path.abspath(objects_path), unique_name), + ) + if current > 1: + known_objects.append(_gen_path(objects_path, unique_name)) elif file_type == ArchiveKind.THIN_ARCHIVE: output = subprocess.check_output([args.ar, "t", args.archive]).decode() diff --git a/prelude/cxx/dist_lto/tools/tests/test_dist_lto_opt.py b/prelude/cxx/dist_lto/tools/tests/test_dist_lto_opt.py index 83cda1e6d9..db1f53b5b3 100644 --- a/prelude/cxx/dist_lto/tools/tests/test_dist_lto_opt.py +++ b/prelude/cxx/dist_lto/tools/tests/test_dist_lto_opt.py @@ -30,7 +30,6 @@ def test_filter_flags(self): flags, [ "-O2", - "-fexperimental-new-pass-manager", "-ffunction-sections", "-fdata-sections", "-mllvm", @@ -52,16 +51,13 @@ def test_filter_flags_hhvm_case_rev_0f8618f31(self): "--target=x86_64-redhat-linux-gnu", "-nostdinc", "-resource-dir", - "fbcode/third-party-buck/platform010/build/llvm-fb/12/lib/clang/stable", "-idirafter", - "fbcode/third-party-buck/platform010/build/llvm-fb/12/lib/clang/stable/include", "-idirafter", "fbcode/third-party-buck/platform010/build/glibc/include", "-idirafter", "fbcode/third-party-buck/platform010/build/kernel-headers/include", "-Bfbcode/third-party-buck/platform010/build/binutils/x86_64-facebook-linux/bin", "--cflag=--target=x86_64-redhat-linux-gnu", - "--ar=fbcode/third-party-buck/platform010/build/llvm-fb/12/bin/llvm-ar", "-Bfbcode/third-party-buck/platform010/build/glibc/lib", "-Bfbcode/third-party-buck/platform010/tools/gcc/lib/gcc/x86_64-redhat-linux-gnu/trunk", "-Lfbcode/third-party-buck/platform010/build/libgcc/lib/gcc/x86_64-facebook-linux/trunk", @@ -69,7 +65,6 @@ def test_filter_flags_hhvm_case_rev_0f8618f31(self): "-Wl,--dynamic-linker,/usr/local/fbcode/platform010/lib/ld.so", "-Wl,--disable-new-dtags", "-Bfbcode/third-party-buck/platform010/build/binutils/x86_64-facebook-linux/bin", - "-Bbuck-out/v2/gen/fbcode/8e3db19fe005003a/third-party-buck/platform010/build/llvm-fb/12/__lld_path__/lld_path/bin", "-Wl,--no-mmap-output-file", "-nodefaultlibs", "--target=x86_64-redhat-linux-gnu", @@ -92,7 +87,6 @@ def test_filter_flags_hhvm_case_rev_0f8618f31(self): "-Wl,-mllvm,-hot-callsite-threshold=12000", "-Wl,--lto-whole-program-visibility", "-fwhole-program-vtables", - "-fexperimental-new-pass-manager", "-Wl,--no-discard-section=.nv_fatbin", "-Wl,--no-discard-section=.nvFatBinSegment", "fbcode/tools/build/move_gpu_sections_implicit_linker_script.txt", @@ -143,7 +137,6 @@ def test_filter_flags_hhvm_case_rev_0f8618f31(self): flags, [ "-O2", - "-fexperimental-new-pass-manager", "-ffunction-sections", "-fdata-sections", "-mllvm", @@ -159,10 +152,7 @@ def test_filter_flags_hhvm_case_rev_0f8618f31(self): def test_filter_flags_unicorn_case_rev_0f8618f31(self): inputs = [ - "--ld=fbcode/third-party-buck/platform010/build/llvm-fb/12/bin/clang++", - "--cc=buck-out/v2/gen/fbcode/8e3db19fe005003a/tools/build/buck/wrappers/__fbcc__/fbcc --cc=fbcode/third-party-buck/platform010/build/llvm-fb/12/bin/clang --target=x86_64-redhat-linux-gnu -nostdinc -resource-dir fbcode/third-party-buck/platform010/build/llvm-fb/12/lib/clang/stable -idirafter fbcode/third-party-buck/platform010/build/llvm-fb/12/lib/clang/stable/include -idirafter fbcode/third-party-buck/platform010/build/glibc/include -idirafter fbcode/third-party-buck/platform010/build/kernel-headers/include -Bfbcode/third-party-buck/platform010/build/binutils/x86_64-facebook-linux/bin", "--cflag=--target=x86_64-redhat-linux-gnu", - "--ar=fbcode/third-party-buck/platform010/build/llvm-fb/12/bin/llvm-ar", "-Bfbcode/third-party-buck/platform010/build/glibc/lib", "-Bfbcode/third-party-buck/platform010/tools/gcc/lib/gcc/x86_64-redhat-linux-gnu/trunk", "-Lfbcode/third-party-buck/platform010/build/libgcc/lib/gcc/x86_64-facebook-linux/trunk", @@ -170,7 +160,6 @@ def test_filter_flags_unicorn_case_rev_0f8618f31(self): "-Wl,--dynamic-linker,/usr/local/fbcode/platform010/lib/ld.so", "-Wl,--disable-new-dtags", "-Bfbcode/third-party-buck/platform010/build/binutils/x86_64-facebook-linux/bin", - "-Bbuck-out/v2/gen/fbcode/8e3db19fe005003a/third-party-buck/platform010/build/llvm-fb/12/__lld_path__/lld_path/bin", "-Wl,--no-mmap-output-file", "-nodefaultlibs", "--target=x86_64-redhat-linux-gnu", @@ -190,7 +179,6 @@ def test_filter_flags_unicorn_case_rev_0f8618f31(self): "-Wl,--discard-section=.rela.debug_types", "-Wl,-O1", "-Wl,--build-id=sha1", - "-fexperimental-new-pass-manager", "-Xlinker", "-znow", "-Xlinker", @@ -262,7 +250,6 @@ def test_filter_flags_unicorn_case_rev_0f8618f31(self): flags, [ "-O2", - "-fexperimental-new-pass-manager", "-ffunction-sections", "-fdata-sections", "-fprofile-sample-use=buck-out/v2/gen/fbcode/40fc99293b37c503/fdo/autofdo/default_profile/__autofdo__/out/profile", diff --git a/prelude/cxx/dwp.bzl b/prelude/cxx/dwp.bzl index c3cafa7ba8..76e6e3c4e5 100644 --- a/prelude/cxx/dwp.bzl +++ b/prelude/cxx/dwp.bzl @@ -24,17 +24,17 @@ def run_dwp_action( referenced_objects: [ArgLike, list[Artifact]], dwp_output: Artifact, local_only: bool): - args = cmd_args() dwp = toolchain.binary_utilities_info.dwp - # llvm trunk now supports 64-bit debug cu indedx, add --continue-on-cu-index-overflow by default - # to suppress dwp file overflow warning - args.add("/bin/sh", "-c", '"$1" --continue-on-cu-index-overflow -o "$2" -e "$3" && touch "$2"', "") - args.add(dwp, dwp_output.as_output(), obj) - - # All object/dwo files referenced in the library/executable are implicitly - # processed by dwp. - args.hidden(referenced_objects) + args = cmd_args( + # llvm trunk now supports 64-bit debug cu indedx, add --continue-on-cu-index-overflow by default + # to suppress dwp file overflow warning + ["/bin/sh", "-c", '"$1" --continue-on-cu-index-overflow -o "$2" -e "$3" && touch "$2"', ""] + + [dwp, dwp_output.as_output(), obj], + # All object/dwo files referenced in the library/executable are implicitly + # processed by dwp. + hidden = referenced_objects, + ) category = "dwp" if category_suffix != None: diff --git a/prelude/cxx/groups.bzl b/prelude/cxx/groups.bzl index 8187869919..046b78a164 100644 --- a/prelude/cxx/groups.bzl +++ b/prelude/cxx/groups.bzl @@ -5,10 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load( - "@prelude//linking:link_info.bzl", - "Linkage", -) +load("@prelude//cxx:groups_types.bzl", "Traversal") +load("@prelude//linking:types.bzl", "Linkage") load( "@prelude//utils:build_target_pattern.bzl", "BuildTargetPattern", @@ -16,7 +14,7 @@ load( ) load( "@prelude//utils:graph_utils.bzl", - "breadth_first_traversal_by", + "depth_first_traversal_by", ) load( "@prelude//utils:strings.bzl", @@ -24,55 +22,18 @@ load( ) load( "@prelude//utils:utils.bzl", - "map_val", "value_or", ) - -# Types of group traversal -Traversal = enum( - # Includes the target and all of it's transitive dependencies in the group. - "tree", - # Includes only the target in the group. - "node", - # Uses pattern and separates all targets by full folder path. - "subfolders", -) - -# Optional type of filtering -FilterType = enum( - # Filters for targets with labels matching the regex pattern defined after `label:`. - "label", - # Filters for targets for the build target pattern defined after "pattern:". - "pattern", -) - -BuildTargetFilter = record( - pattern = field(BuildTargetPattern), - _type = field(FilterType, FilterType("pattern")), -) - -LabelFilter = record( - regex = regex, - _type = field(FilterType, FilterType("label")), -) - -# Label for special group mapping which makes every target associated with it to be included in all groups -MATCH_ALL_LABEL = "MATCH_ALL" - -# Label for special group mapping which makes every target associated with it to be linked directly -# against the final binary -NO_MATCH_LABEL = "NO_MATCH" - -# Representation of a parsed group mapping -GroupMapping = record( - # The root to apply this mapping to. - root = field([Label, None], None), - # The type of traversal to use. - traversal = field(Traversal, Traversal("tree")), - # Optional filter type to apply to the traversal. - filters = field(list[[BuildTargetFilter, LabelFilter]], []), - # Preferred linkage for this target when added to a link group. - preferred_linkage = field([Linkage, None], None), +load( + ":groups_types.bzl", + "BuildTargetFilter", + "FilterType", + "Group", + "GroupAttrs", + "GroupDefinition", + "GroupMapping", + "LabelFilter", + "TargetRegexFilter", ) _VALID_ATTRS = [ @@ -82,46 +43,28 @@ _VALID_ATTRS = [ "discard_group", "linker_flags", "requires_root_node_exists", + "prohibit_file_duplicates", ] -# Representation of group attributes -GroupAttrs = record( - # Use distributed thinlto to build the link group shared library. - enable_distributed_thinlto = field(bool, False), - # Enable this link group if the binary's node count exceeds the given threshold - enable_if_node_count_exceeds = field([int, None], None), - # Discard all dependencies in the link group, useful for dropping unused dependencies - # from the build graph. - discard_group = field(bool, False), - # Adds additional linker flags used to link the link group shared object. - linker_flags = field(list, []), - # Adds additional linker flags to apply to dependents that link against the - # link group's shared object. - exported_linker_flags = field(list, []), - # Requires root nodes in specs to always exist in dependency graph. - # Otherwise fails. - requires_root_node_exists = field(bool, True), -) - -# Types of group traversal -GroupDefinition = enum( - # Group is explicitly defined in mapping provided by user. - # That is the default behavior. - "explicit", - # Group is implicitly created during mapping computations. - # For example, group can be created for "subfolders" traversal. - "implicit", -) - -# Representation of a parsed group -Group = record( - # The name for this group. - name = str, - # The mappings that are part of this group. - mappings = list[GroupMapping], - attrs = GroupAttrs, - definition_type = field(GroupDefinition, GroupDefinition("explicit")), -) +# Traversal types in this list will only assign the node +# to a target (as opposed to the transitive deps of the node's tree). +_TRAVERSALS_TO_ASSIGN_NODE = [ + Traversal("node"), + Traversal("subfolders"), + # TODO (dust): Possible perf optimization: + # When intersecting configured targets, it's not possible to intersect + # a parent without also intersecting it's children. + # + # As a result, there's a possible perf optimization to assign 'tree' + # to intersected targets instead, and leverage that to avoid traversing + # the entire tree of every root. + # + # For example: + # If iterating the tree of 'root2' we find a node which + # was also present in 'root1', we can skip traversing the subtree + # because it's evitable that everything is going to match there too. + Traversal("intersect_any_roots"), +] # Creates a group from an existing group, overwriting any properties provided def create_group( @@ -137,6 +80,10 @@ def create_group( definition_type = value_or(definition_type, group.definition_type), ) +def get_roots_from_mapping(mapping): + deps = mapping[0] if type(mapping[0]) == "list" else [mapping[0]] + return filter(None, deps) + def parse_groups_definitions( map: list, # Function to parse a root label from the input type, allowing different @@ -158,17 +105,24 @@ def parse_groups_definitions( discard_group = attrs.get("discard_group", False), linker_flags = attrs.get("linker_flags", []), requires_root_node_exists = attrs.get("requires_root_node_exists", True), + prohibit_file_duplicates = attrs.get("prohibit_file_duplicates", False), ) parsed_mappings = [] for entry in mappings: traversal = _parse_traversal_from_mapping(entry[1]) mapping = GroupMapping( - root = map_val(parse_root, entry[0]), + roots = filter(None, [parse_root(root) for root in get_roots_from_mapping(entry)]), traversal = traversal, filters = _parse_filter_from_mapping(entry[2]), preferred_linkage = Linkage(entry[3]) if len(entry) > 3 and entry[3] else None, ) + num_roots = len(mapping.roots) if mapping.roots else 0 + if num_roots > 1 and mapping.traversal != Traversal("intersect_any_roots"): + fail("Invariant. A link_group mapping with traversal type: {} can only have 1 root node. {} found.".format(mapping.traversal, mapping.roots)) + elif mapping.traversal == Traversal("intersect_any_roots") and num_roots < 2: + fail("Invariant. A link_group mapping with traversal type 'intersect' must have at least 2 root nodes. {} found.".format(mapping.roots)) + parsed_mappings.append(mapping) group = Group( @@ -188,10 +142,12 @@ def _parse_traversal_from_mapping(entry: str) -> Traversal: return Traversal("node") elif entry == "subfolders": return Traversal("subfolders") + elif entry == "intersect_any_roots": + return Traversal("intersect_any_roots") else: fail("Unrecognized group traversal type: " + entry) -def _parse_filter(entry: str) -> [BuildTargetFilter, LabelFilter]: +def _parse_filter(entry: str) -> [BuildTargetFilter, LabelFilter, TargetRegexFilter]: for prefix in ("label:", "tag:"): label_regex = strip_prefix(prefix, entry) if label_regex != None: @@ -203,15 +159,19 @@ def _parse_filter(entry: str) -> [BuildTargetFilter, LabelFilter]: regex = regex("^{}$".format(label_regex), fancy = True), ) + target_regex = strip_prefix("target_regex:", entry) + if target_regex != None: + return TargetRegexFilter(regex = regex("^{}$".format(target_regex), fancy = True)) + pattern = strip_prefix("pattern:", entry) if pattern != None: return BuildTargetFilter( pattern = parse_build_target_pattern(pattern), ) - fail("Invalid group mapping filter: {}\nFilter must begin with `label:`, `tag:`, or `pattern:`.".format(entry)) + fail("Invalid group mapping filter: {}\nFilter must begin with `label:`, `tag:`, `target_regex` or `pattern:`.".format(entry)) -def _parse_filter_from_mapping(entry: [list[str], str, None]) -> list[[BuildTargetFilter, LabelFilter]]: +def _parse_filter_from_mapping(entry: [list[str], str, None]) -> list[[BuildTargetFilter, LabelFilter, TargetRegexFilter]]: if type(entry) == type([]): return [_parse_filter(e) for e in entry] if type(entry) == type(""): @@ -239,14 +199,27 @@ def compute_mappings(groups_map: dict[str, Group], graph_map: dict[Label, typing return target_to_group_map +def get_dedupped_roots_from_groups(groups: list[Group]) -> list[Label]: + roots = {} + for group in groups: + for mapping in group.mappings: + if not mapping.roots: + continue + + for root in mapping.roots: + roots[root] = True + + return list(roots.keys()) + def _find_targets_in_mapping( graph_map: dict[Label, typing.Any], mapping: GroupMapping) -> list[Label]: # If we have no filtering, we don't need to do any traversal to find targets to include. if not mapping.filters: - if mapping.root == None: + if not mapping.roots: fail("no filter or explicit root given: {}", mapping) - return [mapping.root] + elif mapping.traversal != Traversal("intersect_any_roots"): + return mapping.roots # Else find all dependencies that match the filter. matching_targets = {} @@ -266,11 +239,14 @@ def _find_targets_in_mapping( if filter._type == FilterType("label"): if not any_labels_match(filter.regex, labels): return False + elif filter._type == FilterType("target_regex"): + target_str = str(target.raw_target()) + return filter.regex.match(target_str) elif not filter.pattern.matches(target): return False return True - def find_matching_targets(node): # Label -> [Label]: + def populate_matching_targets(node): # Label -> bool: graph_node = graph_map[node] if matches_target(node, graph_node.labels): matching_targets[node] = None @@ -278,17 +254,77 @@ def _find_targets_in_mapping( # We can stop traversing the tree at this point because we've added the # build target to the list of all targets that will be traversed by the # algorithm that applies the groups. - return [] - return graph_node.deps + graph_node.exported_deps + return False + return True + + def populate_matching_targets_bfs_wrapper(node): # (Label) -> list + if populate_matching_targets(node): + graph_node = graph_map[node] + return graph_node.deps + graph_node.exported_deps + return [] - if mapping.root == None: + if not mapping.roots: for node in graph_map: - find_matching_targets(node) + populate_matching_targets(node) + elif mapping.traversal == Traversal("intersect_any_roots"): + targets_to_counter = {} + for root in mapping.roots: + # This is a captured variable inside `populate_matching_targets`. + # We reset it for each root we visit so that we don't have results + # from other roots. + matching_targets = {} + depth_first_traversal_by(graph_map, [root], populate_matching_targets_bfs_wrapper) + for t in matching_targets: + targets_to_counter[t] = targets_to_counter.get(t, 0) + 1 + + return [ + t + for t, count in targets_to_counter.items() + if count > 1 + ] else: - breadth_first_traversal_by(graph_map, [mapping.root], find_matching_targets) + depth_first_traversal_by(graph_map, mapping.roots, populate_matching_targets_bfs_wrapper) return matching_targets.keys() +# Extracted from `_update_target_to_group_mapping` to avoid function allocations inside the loop +def _assign_target_to_group( + target_to_group_map, #: {"label": str} + node_traversed_targets, #: {"label": None} + group, # Group, + groups_map, # {str: Group} + mapping, # GroupMapping + target, # Label + node_traversal): # bool + # If the target hasn't already been assigned to a group, assign it to the + # first group claiming the target. Return whether the target was already assigned. + if target not in target_to_group_map: + if mapping.traversal == Traversal("subfolders"): + generated_group_name = _generate_group_subfolder_name(group.name, target.package) + _add_to_implicit_link_group(generated_group_name, group, groups_map, target_to_group_map, target) + else: + target_to_group_map[target] = group.name + + if node_traversal: + node_traversed_targets[target] = None + return False + else: + return True + +# Extracted from `_update_target_to_group_mapping` to avoid function allocations inside the loop +def _transitively_add_targets_to_group_mapping( + assign_target_to_group, # (Label, bool) -> bool + node_traversed_targets, #: {"label": None} + graph_map, # {"label": "_b"} + node): # ([Label]) -> None + previously_processed = assign_target_to_group(node, False) + + # If the node has been previously processed, and it was via tree (not node), all child nodes have been assigned + if previously_processed and node not in node_traversed_targets: + return None + graph_node = graph_map[node] + return graph_node.deps + graph_node.exported_deps + # Types removed to avoid unnecessary type checking which degrades performance. def _update_target_to_group_mapping( graph_map, # {"label": "_b"} @@ -298,37 +334,13 @@ def _update_target_to_group_mapping( groups_map, # {str: Group} mapping, # GroupMapping target): # Label - def assign_target_to_group( - target: Label, - node_traversal: bool) -> bool: - # If the target hasn't already been assigned to a group, assign it to the - # first group claiming the target. Return whether the target was already assigned. - if target not in target_to_group_map: - if mapping.traversal == Traversal("subfolders"): - generated_group_name = _generate_group_subfolder_name(group.name, target.package) - _add_to_implicit_link_group(generated_group_name, group, groups_map, target_to_group_map, target) - else: - target_to_group_map[target] = group.name - - if node_traversal: - node_traversed_targets[target] = None - return False - else: - return True - - def transitively_add_targets_to_group_mapping(node: Label) -> list[Label]: - previously_processed = assign_target_to_group(target = node, node_traversal = False) - - # If the node has been previously processed, and it was via tree (not node), all child nodes have been assigned - if previously_processed and node not in node_traversed_targets: - return [] - graph_node = graph_map[node] - return graph_node.deps + graph_node.exported_deps + assign_target_to_group = partial(_assign_target_to_group, target_to_group_map, node_traversed_targets, group, groups_map, mapping) # (Label, bool) -> bool + transitively_add_targets_to_group_mapping = partial(_transitively_add_targets_to_group_mapping, assign_target_to_group, node_traversed_targets, graph_map) # (Label) -> list[Label] - if mapping.traversal == Traversal("node") or mapping.traversal == Traversal("subfolders"): - assign_target_to_group(target = target, node_traversal = True) + if mapping.traversal in _TRAVERSALS_TO_ASSIGN_NODE: + assign_target_to_group(target, True) else: # tree - breadth_first_traversal_by(graph_map, [target], transitively_add_targets_to_group_mapping) + depth_first_traversal_by(graph_map, [target], transitively_add_targets_to_group_mapping) def _add_to_implicit_link_group( generated_group_name, # str @@ -366,3 +378,47 @@ def _hash_group_name(prefix: str, name: str) -> str: Hash algorithm is stable in starlark: https://fburl.com/code/ptegkov6 """ return "{}_{}".format(prefix, str(hash(name))) + +def _make_json_info_for_build_target_pattern(build_target_pattern: BuildTargetPattern) -> dict[str, typing.Any]: + # `BuildTargetPattern` contains lambdas which are not serializable, so + # have to generate the JSON representation + return { + "cell": build_target_pattern.cell, + "kind": build_target_pattern.kind, + "name": build_target_pattern.name, + "path": build_target_pattern.path, + } + +def _make_json_info_for_group_mapping_filters(filters: list[[BuildTargetFilter, LabelFilter]]) -> list[dict[str, typing.Any]]: + json_filters = [] + for filter in filters: + if filter._type == FilterType("label"): + json_filters += [{"regex": str(filter.regex)}] + elif filter._type == FilterType("pattern"): + json_filters += [_make_json_info_for_build_target_pattern(filter.pattern)] + else: + fail("Unknown filter type: " + filter) + return json_filters + +def _make_json_info_for_group_mapping(group_mapping: GroupMapping) -> dict[str, typing.Any]: + return { + "filters": _make_json_info_for_group_mapping_filters(group_mapping.filters), + "preferred_linkage": group_mapping.preferred_linkage, + "roots": group_mapping.roots, + "traversal": group_mapping.traversal, + } + +def _make_json_info_for_group(group: Group) -> dict[str, typing.Any]: + return { + "attrs": group.attrs, + "mappings": [_make_json_info_for_group_mapping(mapping) for mapping in group.mappings], + "name": group.name, + } + +def make_info_subtarget_providers(ctx: AnalysisContext, groups: list[Group], mappings: dict[Label, str]) -> list[Provider]: + info_json = { + "groups": {group.name: _make_json_info_for_group(group) for group in groups}, + "mappings": mappings, + } + json_output = ctx.actions.write_json("link_group_map_info.json", info_json) + return [DefaultInfo(default_output = json_output)] diff --git a/prelude/cxx/groups_types.bzl b/prelude/cxx/groups_types.bzl new file mode 100644 index 0000000000..779729aac0 --- /dev/null +++ b/prelude/cxx/groups_types.bzl @@ -0,0 +1,111 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//linking:types.bzl", "Linkage") +load( + "@prelude//utils:build_target_pattern.bzl", + "BuildTargetPattern", +) + +# Label for special group mapping which makes every target associated with it to be included in all groups +MATCH_ALL_LABEL = "MATCH_ALL" + +# Label for special group mapping which makes every target associated with it to be linked directly +# against the final binary +NO_MATCH_LABEL = "NO_MATCH" + +Traversal = enum( + # Includes the target and all of it's transitive dependencies in the group. + "tree", + # Includes only the target in the group. + "node", + # Uses pattern and separates all targets by full folder path. + "subfolders", + # Includes targets found in the transitive deps of *any* roots. + # Filters for these mappings will be applied to the intersected deps. + "intersect_any_roots", +) + +# Optional type of filtering +FilterType = enum( + # Filters for targets with labels matching the regex pattern defined after `label:`. + "label", + # Filters for targets for the build target pattern defined after "pattern:". + "pattern", + # Filters for targets matching the regex pattern defined after "target_regex:". + "target_regex", +) + +BuildTargetFilter = record( + pattern = field(BuildTargetPattern), + _type = field(FilterType, FilterType("pattern")), +) + +LabelFilter = record( + regex = regex, + _type = field(FilterType, FilterType("label")), +) + +TargetRegexFilter = record( + regex = regex, + _type = field(FilterType, FilterType("target_regex")), +) + +# Representation of a parsed group mapping +GroupMapping = record( + # The root to apply this mapping to. + roots = field(list[Label], []), + # The type of traversal to use. + traversal = field(Traversal, Traversal("tree")), + # Optional filter type to apply to the traversal. + filters = field(list[[BuildTargetFilter, LabelFilter, TargetRegexFilter]], []), + # Preferred linkage for this target when added to a link group. + preferred_linkage = field([Linkage, None], None), +) + +# Representation of group attributes +GroupAttrs = record( + # Use distributed thinlto to build the link group shared library. + enable_distributed_thinlto = field(bool, False), + # Enable this link group if the binary's node count exceeds the given threshold + enable_if_node_count_exceeds = field([int, None], None), + # Discard all dependencies in the link group, useful for dropping unused dependencies + # from the build graph. + discard_group = field(bool, False), + # Adds additional linker flags used to link the link group shared object. + linker_flags = field(list, []), + # Adds additional linker flags to apply to dependents that link against the + # link group's shared object. + exported_linker_flags = field(list, []), + # Requires root nodes in specs to always exist in dependency graph. + # Otherwise fails. + requires_root_node_exists = field(bool, True), + # For certain wide-scale generic link groups we want to enable + # initial duplicate analysis. This is useful for detecting dduplicated symbols problem early + # for automatoc link groups that we not aware about (e.g. evicting whole root package folder into link group) + prohibit_file_duplicates = field(bool, False), +) + +# Types of group traversal +GroupDefinition = enum( + # Group is explicitly defined in mapping provided by user. + # That is the default behavior. + "explicit", + # Group is implicitly created during mapping computations. + # For example, group can be created for "subfolders" traversal. + "implicit", +) + +# Representation of a parsed group +Group = record( + # The name for this group. + name = str, + # The mappings that are part of this group. + mappings = list[GroupMapping], + attrs = GroupAttrs, + definition_type = field(GroupDefinition, GroupDefinition("explicit")), +) diff --git a/prelude/cxx/headers.bzl b/prelude/cxx/headers.bzl index 150920d2d3..e102dbf687 100644 --- a/prelude/cxx/headers.bzl +++ b/prelude/cxx/headers.bzl @@ -6,6 +6,7 @@ # of this source tree. load("@prelude//:paths.bzl", "paths") +load("@prelude//cxx:cxx_utility.bzl", "cxx_attrs_get_allow_cache_upload") load("@prelude//utils:expect.bzl", "expect") load("@prelude//utils:lazy.bzl", "lazy") load("@prelude//utils:utils.bzl", "from_named_set", "map_val", "value_or") @@ -66,7 +67,7 @@ HeaderStyle = enum( Headers = record( include_path = field(cmd_args), # NOTE(agallagher): Used for module hack replacement. - symlink_tree = field([Artifact, None], None), + symlink_tree = field(Artifact | None, None), # args that map symlinked private headers to source path file_prefix_args = field([cmd_args, None], None), ) @@ -110,15 +111,16 @@ CPrecompiledHeaderInfo = provider(fields = { def cxx_attr_header_namespace(ctx: AnalysisContext) -> str: return value_or(ctx.attrs.header_namespace, ctx.label.package) -def cxx_attr_exported_headers(ctx: AnalysisContext, headers_layout: CxxHeadersLayout) -> list[CHeader]: - headers = _get_attr_headers(ctx.attrs.exported_headers, headers_layout.namespace, headers_layout.naming) - platform_headers = _get_attr_headers(_headers_by_platform(ctx, ctx.attrs.exported_platform_headers), headers_layout.namespace, headers_layout.naming) +def cxx_attr_headers_list(ctx: AnalysisContext, headers: typing.Any, platform_headers: typing.Any, headers_layout: CxxHeadersLayout) -> list[CHeader]: + headers = _get_attr_headers(headers, headers_layout.namespace, headers_layout.naming) + platform_headers = _get_attr_headers(_headers_by_platform(ctx, platform_headers), headers_layout.namespace, headers_layout.naming) return headers + platform_headers +def cxx_attr_exported_headers(ctx: AnalysisContext, headers_layout: CxxHeadersLayout) -> list[CHeader]: + return cxx_attr_headers_list(ctx, ctx.attrs.exported_headers, ctx.attrs.exported_platform_headers, headers_layout) + def cxx_attr_headers(ctx: AnalysisContext, headers_layout: CxxHeadersLayout) -> list[CHeader]: - headers = _get_attr_headers(ctx.attrs.headers, headers_layout.namespace, headers_layout.naming) - platform_headers = _get_attr_headers(_headers_by_platform(ctx, ctx.attrs.platform_headers), headers_layout.namespace, headers_layout.naming) - return headers + platform_headers + return cxx_attr_headers_list(ctx, ctx.attrs.headers, ctx.attrs.platform_headers, headers_layout) def cxx_get_regular_cxx_headers_layout(ctx: AnalysisContext) -> CxxHeadersLayout: namespace = cxx_attr_header_namespace(ctx) @@ -181,7 +183,7 @@ def _header_mode(ctx: AnalysisContext) -> HeaderMode: return toolchain_header_mode -def prepare_headers(ctx: AnalysisContext, srcs: dict[str, Artifact], name: str, project_root_file: [Artifact, None]) -> [Headers, None]: +def prepare_headers(ctx: AnalysisContext, srcs: dict[str, Artifact], name: str) -> [Headers, None]: """ Prepare all the headers we want to use, depending on the header_mode set on the target's toolchain. @@ -203,23 +205,23 @@ def prepare_headers(ctx: AnalysisContext, srcs: dict[str, Artifact], name: str, lazy.is_any(lambda n: paths.basename(n) == "module.modulemap", srcs.keys())): header_mode = HeaderMode("symlink_tree_only") - output_name = name + "-abs" if project_root_file else name + output_name = name if header_mode == HeaderMode("header_map_only"): headers = {h: (a, "{}") for h, a in srcs.items()} - hmap = _mk_hmap(ctx, output_name, headers, project_root_file) + hmap = _mk_hmap(ctx, output_name, headers) return Headers( - include_path = cmd_args(hmap).hidden(srcs.values()), + include_path = cmd_args(hmap, hidden = srcs.values()), ) symlink_dir = ctx.actions.symlinked_dir(output_name, _normalize_header_srcs(srcs)) if header_mode == HeaderMode("symlink_tree_only"): return Headers(include_path = cmd_args(symlink_dir), symlink_tree = symlink_dir) if header_mode == HeaderMode("symlink_tree_with_header_map"): headers = {h: (symlink_dir, "{}/" + h) for h in srcs} - hmap = _mk_hmap(ctx, output_name, headers, project_root_file) + hmap = _mk_hmap(ctx, output_name, headers) file_prefix_args = _get_debug_prefix_args(ctx, symlink_dir) return Headers( - include_path = cmd_args(hmap).hidden(symlink_dir), + include_path = cmd_args(hmap, hidden = symlink_dir), symlink_tree = symlink_dir, file_prefix_args = file_prefix_args, ) @@ -335,28 +337,28 @@ def _get_debug_prefix_args(ctx: AnalysisContext, header_dir: Artifact) -> [cmd_a if get_cxx_toolchain_info(ctx).linker_info.type != "gnu": return None - debug_prefix_args = cmd_args() fmt = "-fdebug-prefix-map={}=" + value_or(header_dir.owner.cell, ".") - debug_prefix_args.add( + return cmd_args( cmd_args(header_dir, format = fmt), ) - return debug_prefix_args -def _mk_hmap(ctx: AnalysisContext, name: str, headers: dict[str, (Artifact, str)], project_root_file: [Artifact, None]) -> Artifact: +def _mk_hmap(ctx: AnalysisContext, name: str, headers: dict[str, (Artifact, str)]) -> Artifact: output = ctx.actions.declare_output(name + ".hmap") - cmd = cmd_args(get_cxx_toolchain_info(ctx).mk_hmap) - cmd.add(["--output", output.as_output()]) header_args = cmd_args() for n, (path, fmt) in headers.items(): header_args.add(n) # We don't care about the header contents -- just their names. - header_args.add(cmd_args(path, format = fmt).ignore_artifacts()) + header_args.add(cmd_args(path, format = fmt, ignore_artifacts = True)) hmap_args_file = ctx.actions.write(output.basename + ".argsfile", cmd_args(header_args, quote = "shell")) - cmd.add(["--mappings-file", hmap_args_file]).hidden(header_args) - if project_root_file: - cmd.add(["--project-root-file", project_root_file]) - ctx.actions.run(cmd, category = "generate_hmap", identifier = name) + + cmd = cmd_args( + [get_cxx_toolchain_info(ctx).mk_hmap] + + ["--output", output.as_output()] + + ["--mappings-file", hmap_args_file], + hidden = header_args, + ) + ctx.actions.run(cmd, category = "generate_hmap", identifier = name, allow_cache_upload = cxx_attrs_get_allow_cache_upload(ctx.attrs)) return output diff --git a/prelude/cxx/link.bzl b/prelude/cxx/link.bzl index 1d46e01f9c..8dd1d8ce9a 100644 --- a/prelude/cxx/link.bzl +++ b/prelude/cxx/link.bzl @@ -34,6 +34,7 @@ load( ) load( "@prelude//linking:lto.bzl", + "LtoMode", "get_split_debug_lto_info", ) load("@prelude//linking:strip.bzl", "strip_object") @@ -50,6 +51,7 @@ load(":cxx_context.bzl", "get_cxx_toolchain_info") load( ":cxx_link_utility.bzl", "cxx_link_cmd_parts", + "cxx_sanitizer_runtime_arguments", "generates_split_debug", "linker_map_args", "make_link_args", @@ -75,13 +77,15 @@ CxxLinkResult = record( linked_object = LinkedObject, linker_map_data = [CxxLinkerMapData, None], link_execution_preference_info = LinkExecutionPreferenceInfo, + # A list of runtime shared libraries + sanitizer_runtime_files = field(list[Artifact]), ) def link_external_debug_info( ctx: AnalysisContext, links: list[LinkArgs], - split_debug_output: [Artifact, None] = None, - pdb: [Artifact, None] = None) -> ArtifactTSet: + split_debug_output: Artifact | None = None, + pdb: Artifact | None = None) -> ArtifactTSet: external_debug_artifacts = [] # When using LTO+split-dwarf, the link step will generate externally @@ -130,8 +134,11 @@ def cxx_link_into( linker_map_data = None if linker_info.supports_distributed_thinlto and opts.enable_distributed_thinlto: - if not linker_info.requires_objects: - fail("Cannot use distributed thinlto if the cxx toolchain doesn't require_objects") + if not linker_info.lto_mode == LtoMode("thin"): + fail("Cannot use distributed thinlto if the cxx toolchain doesn't use thin-lto lto_mode") + sanitizer_runtime_args = cxx_sanitizer_runtime_arguments(ctx, cxx_toolchain_info, output) + if sanitizer_runtime_args.extra_link_args or sanitizer_runtime_args.sanitizer_runtime_files: + fail("Cannot use distributed thinlto with sanitizer runtime") exe = cxx_dist_link( ctx, opts.links, @@ -148,6 +155,7 @@ def cxx_link_into( link_execution_preference_info = LinkExecutionPreferenceInfo( preference = opts.link_execution_preference, ), + sanitizer_runtime_files = [], ) if linker_info.generate_linker_maps: @@ -155,8 +163,8 @@ def cxx_link_into( else: links_with_linker_map = opts.links - linker, toolchain_linker_flags = cxx_link_cmd_parts(cxx_toolchain_info) - all_link_args = cmd_args(toolchain_linker_flags) + link_cmd_parts = cxx_link_cmd_parts(cxx_toolchain_info) + all_link_args = cmd_args(link_cmd_parts.linker_flags) all_link_args.add(get_output_flags(linker_info.type, output)) # Darwin LTO requires extra link outputs to preserve debug info @@ -181,7 +189,6 @@ def cxx_link_into( links_with_linker_map, suffix = link_args_suffix, output_short_path = output.short_path, - is_shared = result_type.value == "shared_library", link_ordering = value_or( opts.link_ordering, # Fallback to toolchain default. @@ -190,6 +197,12 @@ def cxx_link_into( ) all_link_args.add(link_args_output.link_args) + # Sanitizer runtime args must appear at the end because it can affect + # behavior of Swift runtime loading when the app also has an embedded + # Swift runtime. + sanitizer_runtime_args = cxx_sanitizer_runtime_arguments(ctx, cxx_toolchain_info, output) + all_link_args.add(sanitizer_runtime_args.extra_link_args) + bitcode_linkables = [] for link_item in opts.links: if link_item.infos == None: @@ -212,6 +225,8 @@ def cxx_link_into( pdb = link_args_output.pdb_artifact, ) + all_link_args.add(link_cmd_parts.post_linker_flags) + if linker_info.type == "windows": shell_quoted_args = cmd_args(all_link_args) else: @@ -223,10 +238,14 @@ def cxx_link_into( allow_args = True, ) - command = cmd_args(linker) - command.add(cmd_args(argfile, format = "@{}")) - command.hidden(link_args_output.hidden) - command.hidden(shell_quoted_args) + command = cmd_args( + link_cmd_parts.linker, + cmd_args(argfile, format = "@{}"), + hidden = [ + link_args_output.hidden, + shell_quoted_args, + ], + ) category = "cxx_link" if opts.category_suffix != None: category += "_" + opts.category_suffix @@ -235,11 +254,13 @@ def cxx_link_into( # generate a DWO directory, so make sure we at least `mkdir` and empty # one to make v2/RE happy. if split_debug_output != None: - cmd = cmd_args(["/bin/sh", "-c"]) - cmd.add(cmd_args(split_debug_output.as_output(), format = 'mkdir -p {}; "$@"')) - cmd.add('""').add(command) - cmd.hidden(command) - command = cmd + command = cmd_args( + "/bin/sh", + "-c", + cmd_args(split_debug_output.as_output(), format = 'mkdir -p {}; "$@"'), + '""', + command, + ) link_execution_preference_info = LinkExecutionPreferenceInfo( preference = opts.link_execution_preference, @@ -264,7 +285,7 @@ def cxx_link_into( strip_args = opts.strip_args_factory(ctx) if opts.strip_args_factory else cmd_args() output = strip_object(ctx, cxx_toolchain_info, output, strip_args, opts.category_suffix) - final_output = output if not (is_result_executable and cxx_use_bolt(ctx)) else bolt(ctx, output, opts.identifier) + final_output = output if not (is_result_executable and cxx_use_bolt(ctx)) else bolt(ctx, output, external_debug_info, opts.identifier) dwp_artifact = None if should_generate_dwp: # TODO(T110378144): Once we track split dwarf from compiles, we should @@ -290,6 +311,7 @@ def cxx_link_into( linked_object = LinkedObject( output = final_output, + link_args = opts.links, bitcode_bundle = bitcode_artifact.artifact if bitcode_artifact else None, prebolt_output = output, unstripped_output = unstripped_output, @@ -307,6 +329,7 @@ def cxx_link_into( linked_object = linked_object, linker_map_data = linker_map_data, link_execution_preference_info = link_execution_preference_info, + sanitizer_runtime_files = sanitizer_runtime_args.sanitizer_runtime_files, ) _AnonLinkInfo = provider(fields = { @@ -393,6 +416,10 @@ def _anon_cxx_link( split_debug_output = split_debug_output, ) + # The anon target API doesn't allow us to return the list of artifacts for + # sanitizer runtime, so it has be computed here + sanitizer_runtime_args = cxx_sanitizer_runtime_arguments(ctx, cxx_toolchain, output) + return CxxLinkResult( linked_object = LinkedObject( output = output, @@ -404,6 +431,7 @@ def _anon_cxx_link( link_execution_preference_info = LinkExecutionPreferenceInfo( preference = LinkExecutionPreference("any"), ), + sanitizer_runtime_files = sanitizer_runtime_args.sanitizer_runtime_files, ) def cxx_link( diff --git a/prelude/cxx/link_groups.bzl b/prelude/cxx/link_groups.bzl index fa353e20a6..af4e9a30f1 100644 --- a/prelude/cxx/link_groups.bzl +++ b/prelude/cxx/link_groups.bzl @@ -6,6 +6,16 @@ # of this source tree. load("@prelude//:paths.bzl", "paths") +load( + "@prelude//cxx:groups_types.bzl", + "Group", # @unused Used as a type +) +load( + "@prelude//cxx:link_groups_types.bzl", + "LinkGroupInfo", + "LinkGroupsDebugLinkableEntry", + "LinkGroupsDebugLinkableItem", +) load("@prelude//linking:execution_preference.bzl", "LinkExecutionPreference") load( "@prelude//linking:link_groups.bzl", @@ -18,7 +28,6 @@ load( "LinkInfo", "LinkInfos", "LinkStrategy", - "Linkage", "LinkedObject", # @unused Used as a type "SharedLibLinkable", "get_lib_output_style", @@ -38,11 +47,17 @@ load( "get_linkable_graph_node_map_func", "get_transitive_deps", ) +load( + "@prelude//linking:shared_libraries.bzl", + "SharedLibraries", + "create_shlib", +) +load("@prelude//linking:types.bzl", "Linkage") load("@prelude//utils:arglike.bzl", "ArgLike") load("@prelude//utils:expect.bzl", "expect") load( "@prelude//utils:graph_utils.bzl", - "breadth_first_traversal_by", + "depth_first_traversal_by", ) load( "@prelude//utils:set.bzl", @@ -61,12 +76,14 @@ load( load(":cxx_toolchain_types.bzl", "PicBehavior") load( ":groups.bzl", - "Group", # @unused Used as a type - "MATCH_ALL_LABEL", - "NO_MATCH_LABEL", "compute_mappings", "parse_groups_definitions", ) +load( + ":groups_types.bzl", + "MATCH_ALL_LABEL", + "NO_MATCH_LABEL", +) load( ":link.bzl", "cxx_link_shared_library", @@ -107,25 +124,13 @@ LINK_GROUP_MAP_DATABASE_FILENAME = "link_group_map_database.json" LINK_GROUP_MAPPINGS_SUB_TARGET = "link-group-mappings" LINK_GROUP_MAPPINGS_FILENAME_SUFFIX = ".link_group_map.json" -LinkGroupInfo = provider( - # @unsorted-dict-items - fields = { - "groups": provider_field(typing.Any, default = None), # dict[str, Group] - "groups_hash": provider_field(typing.Any, default = None), # str - "mappings": provider_field(typing.Any, default = None), # dict[ConfiguredProvidersLabel, str] - # Additional graphs needed to cover labels referenced by the groups above. - # This is useful in cases where the consumer of this provider won't already - # have deps covering these. - # NOTE(agallagher): We do this to maintain existing behavior w/ the - # standalone `link_group_map()` rule, but it's not clear if it's actually - # desirable behavior. - "graph": provider_field(typing.Any, default = None), # LinkableGraph - }, -) - LinkGroupLinkInfo = record( link_info = field(LinkInfo), output_style = field(LibOutputStyle), + + # Where this link info is originated from. + # Either target label or link group name + link_name = field(Label | str), ) LinkGroupLibSpec = record( @@ -142,6 +147,7 @@ LinkGroupLibSpec = record( root = field([LinkableRootInfo, None], None), # The link group to link. group = field(Group), + label = field(Label | None, None), ) _LinkedLinkGroup = record( @@ -152,6 +158,7 @@ _LinkedLinkGroup = record( _LinkedLinkGroups = record( libs = field(dict[str, _LinkedLinkGroup]), symbol_ldflags = field(list[typing.Any], []), + libs_debug_info = field(dict[typing.Any, typing.Any]), ) def get_link_group(ctx: AnalysisContext) -> [str, None]: @@ -220,12 +227,22 @@ def get_link_group_info( ) def get_link_group_preferred_linkage(link_groups: list[Group]) -> dict[Label, Linkage]: - return { - mapping.root: mapping.preferred_linkage - for group in link_groups - for mapping in group.mappings - if mapping.root != None and mapping.preferred_linkage != None - } + root_to_linkage = {} + for group in link_groups: + for mapping in group.mappings: + if not mapping.roots: + continue + + if not mapping.preferred_linkage: + continue + + for root in mapping.roots: + # TODO: There might be a bug here - if the same root is listed in + # two different link_group_map entries, we'll only use the preferred_linkage + # of the last style passed. + root_to_linkage[root] = mapping.preferred_linkage + + return root_to_linkage LinkGroupContext = record( link_group_mappings = field([dict[Label, str], None]), @@ -263,7 +280,8 @@ def _transitively_update_shared_linkage( link_strategy: LinkStrategy, link_group_preferred_linkage: dict[Label, Linkage], link_group_roots: dict[Label, str], - pic_behavior: PicBehavior): + pic_behavior: PicBehavior, + link_group_mappings: [dict[Label, str], None]): # Identify targets whose shared linkage style may be propagated to # dependencies. Implicitly created root libraries are skipped. shared_lib_roots = [] @@ -279,19 +297,35 @@ def _transitively_update_shared_linkage( shared_lib_roots.append(target) # buildifier: disable=uninitialized - def process_dependency(node: Label) -> list[Label]: + def process_dependency(node: Label) -> list[Label] | None: + if link_group_mappings and link_group_mappings.get(node) == NO_MATCH_LABEL: + # Do not propagate shared linkage via nodes that are excluded from link groups. + return None linkable_node = linkable_graph_node_map[node] if linkable_node.preferred_linkage == Linkage("any"): link_group_preferred_linkage[node] = Linkage("shared") return get_deps_for_link(linkable_node, link_strategy, pic_behavior) - breadth_first_traversal_by( + depth_first_traversal_by( linkable_graph_node_map, shared_lib_roots, process_dependency, ) +def create_debug_linkable_entries( + labels_to_links_map: dict[Label, LinkGroupLinkInfo]) -> list[LinkGroupsDebugLinkableEntry]: + entries = [] + for link_info in labels_to_links_map.values(): + link_groups_linkable_info = LinkGroupsDebugLinkableEntry( + name = link_info.link_name, + output_style = link_info.output_style, + ) + entries.append(link_groups_linkable_info) + + return entries + def get_filtered_labels_to_links_map( + public_nodes: [set_record, None], linkable_graph_node_map: dict[Label, LinkableNode], link_group: [str, None], link_groups: dict[str, Group], @@ -312,29 +346,18 @@ def get_filtered_labels_to_links_map( """ def get_potential_linkables(node: Label) -> list[Label]: - linkable_node = linkable_graph_node_map[node] # buildifier: disable=uninitialized - - # Always link against exported deps - node_linkables = list(linkable_node.exported_deps) + linkable_node = linkable_graph_node_map[node] # If the preferred linkage is `static` or `any` we need to link against the deps too. - # TODO(cjhopman): This code originally was as commented out below and the comment indicated that the - # intent was to not traverse in the second case if link style was shared, but at this point idk which - # behavior we actually want. - should_traverse_private_deps = False - if linkable_node.preferred_linkage == Linkage("static"): - should_traverse_private_deps = True - elif linkable_node.preferred_linkage == Linkage("any"): - should_traverse_private_deps = True - # should_traverse = link_style != Linkage("shared") + should_traverse_private_deps = linkable_node.preferred_linkage == Linkage("static") or linkable_node.preferred_linkage == Linkage("any") if should_traverse_private_deps: - node_linkables += linkable_node.deps - - return node_linkables + return linkable_node.all_deps + else: + return linkable_node.exported_deps # Get all potential linkable targets - linkables = breadth_first_traversal_by( + linkables = depth_first_traversal_by( linkable_graph_node_map, roots, get_potential_linkables, @@ -357,6 +380,7 @@ def get_filtered_labels_to_links_map( link_group_preferred_linkage, link_group_roots, pic_behavior, + link_group_mappings, ) linkable_map = {} @@ -365,15 +389,36 @@ def get_filtered_labels_to_links_map( # already. This avoids use adding the same link group lib multiple times, # for each of the possible multiple nodes that maps to it. link_group_added = {} + group_srcs = {} def add_link(target: Label, output_style: LibOutputStyle): linkable_map[target] = LinkGroupLinkInfo( link_info = get_link_info(linkable_graph_node_map[target], output_style, prefer_stripped), output_style = output_style, - ) # buildifier: disable=uninitialized + link_name = target, + ) def add_link_group(target: Label, target_group: str): # If we've already added this link group to the link line, we're done. + + link_group_spec = link_groups.get(target_group, None) + if link_group_spec and link_group_spec.attrs.prohibit_file_duplicates and public_nodes and public_nodes.contains(target): + if target_group not in group_srcs: + group_srcs[target_group] = {} + target_group_srcs = group_srcs[target_group] + for src in linkable_graph_node_map[target].srcs: + if not isinstance(src, Artifact): + # "src" is either source file or source file with list of compilation flags. + # We do not handle the case where we have compilation flags attached to source files + # because it we don't know is link gonna fail or not. So we let user deal with linker errors if there are any. + continue + + previous_target = target_group_srcs.get(src, None) + if previous_target and previous_target != target: + fail("'{}' artifact included multiple times into '{}' link group. From '{}:{}' and '{}:{}'".format(src, target_group, target.package, target.name, previous_target.package, previous_target.name)) + else: + target_group_srcs[src] = target + if target_group in link_group_added: return @@ -392,7 +437,8 @@ def get_filtered_labels_to_links_map( linkable_map[target] = LinkGroupLinkInfo( link_info = get_link_info_from_link_infos(shared_link_infos), output_style = LibOutputStyle("shared_lib"), - ) # buildifier: disable=uninitialized + link_name = target_group, + ) filtered_groups = [None, NO_MATCH_LABEL, MATCH_ALL_LABEL] @@ -419,7 +465,9 @@ def get_filtered_labels_to_links_map( target_link_group = link_group_mappings.get(target) # Always add force-static libs to the link. - if force_static_follows_dependents and node.preferred_linkage == Linkage("static"): + if (force_static_follows_dependents and + node.preferred_linkage == Linkage("static") and + not node.ignore_force_static_follows_dependents): add_link(target, output_style) elif not target_link_group and not link_group: # Ungrouped linkable targets belong to the unlabeled executable @@ -496,7 +544,7 @@ def get_public_link_group_nodes( external_link_group_nodes.update( # get transitive exported deps - breadth_first_traversal_by( + depth_first_traversal_by( linkable_graph_node_map, external_link_group_nodes.list(), discover_link_group_linkables, @@ -526,6 +574,46 @@ def get_link_group_map_json(ctx: AnalysisContext, targets: list[TargetLabel]) -> json_map = ctx.actions.write_json(LINK_GROUP_MAP_DATABASE_FILENAME, sorted(targets)) return DefaultInfo(default_output = json_map) +def _find_all_relevant_roots( + specs: list[LinkGroupLibSpec], + link_group_mappings: dict[Label, str], # target label to link group name + roots: list[Label], + linkable_graph_node_map: dict[Label, LinkableNode]) -> dict[str, list[Label]]: + relevant_roots = {} + link_groups_for_full_traversal = set() # list[str] + + for spec in specs: + if spec.root != None: + relevant_roots[spec.group.name] = spec.root.deps + else: + roots_from_mappings, has_empty_root = _get_roots_from_mappings(spec, linkable_graph_node_map) + relevant_roots[spec.group.name] = roots_from_mappings + if has_empty_root: + link_groups_for_full_traversal.add(spec.group.name) + + def collect_and_traverse_roots(node_target: Label) -> list[Label]: + node = linkable_graph_node_map.get(node_target) + if node.preferred_linkage == Linkage("static") and not node.ignore_force_static_follows_dependents: + return node.all_deps + + node_link_group = link_group_mappings.get(node_target) + + if node_link_group == MATCH_ALL_LABEL: + # Add node into the list of roots for all link groups + for link_group in relevant_roots.keys(): + relevant_roots[link_group].append(node_target) + elif link_groups_for_full_traversal.contains(node_link_group) and node_link_group != NO_MATCH_LABEL: + relevant_roots[node_link_group].append(node_target) + return node.all_deps + + depth_first_traversal_by( + linkable_graph_node_map, + roots, + collect_and_traverse_roots, + ) + + return relevant_roots + def find_relevant_roots( link_group: [str, None] = None, linkable_graph_node_map: dict[Label, LinkableNode] = {}, @@ -533,22 +621,26 @@ def find_relevant_roots( roots: list[Label] = []): # Walk through roots looking for the first node which maps to the current # link group. - def collect_and_traverse_roots(roots, node_target): + + def collect_and_traverse_roots(roots, node_target: Label) -> list[Label] | None: node = linkable_graph_node_map.get(node_target) - if node.preferred_linkage == Linkage("static"): - return node.deps + node.exported_deps + if node.preferred_linkage == Linkage("static") and not node.ignore_force_static_follows_dependents: + return node.all_deps + node_link_group = link_group_mappings.get(node_target) + if node_link_group == MATCH_ALL_LABEL: roots.append(node_target) - return [] - if node_link_group == link_group: + elif node_link_group == link_group: roots.append(node_target) - return [] - return node.deps + node.exported_deps + else: + return node.all_deps + + return None relevant_roots = [] - breadth_first_traversal_by( + depth_first_traversal_by( linkable_graph_node_map, roots, partial(collect_and_traverse_roots, relevant_roots), @@ -556,13 +648,33 @@ def find_relevant_roots( return relevant_roots +def _get_roots_from_mappings( + spec: LinkGroupLibSpec, + linkable_graph_node_map: dict[Label, LinkableNode]) -> (list[Label], bool): + roots = [] + has_empty_root = False + for mapping in spec.group.mappings: + # If there's no explicit root, this means we need to search the entire + # graph to find candidate nodes. + if not mapping.roots: + has_empty_root = True + elif spec.group.attrs.requires_root_node_exists: + # If spec requires root to always exist (default True), always include to traversal to fail hard if it is not in deps. + # Otherwise add to traversal only if we sure it is in deps graph. + roots.extend(mapping.roots) + else: + roots.extend([root for root in mapping.roots if root in linkable_graph_node_map]) + return (roots, has_empty_root) + +_CreatedLinkGroup = record( + linked_object = field(LinkedObject), + labels_to_links_map = field(dict[Label, LinkGroupLinkInfo] | None), +) + def _create_link_group( ctx: AnalysisContext, spec: LinkGroupLibSpec, - # The deps of the top-level executable. - executable_deps: list[Label] = [], - # Additional roots involved in the link. - other_roots: list[Label] = [], + roots: list[Label], public_nodes: set_record = set(), linkable_graph_node_map: dict[Label, LinkableNode] = {}, linker_flags: list[typing.Any] = [], @@ -573,7 +685,8 @@ def _create_link_group( link_group_libs: dict[str, ([Label, None], LinkInfos)] = {}, prefer_stripped_objects: bool = False, category_suffix: [str, None] = None, - anonymous: bool = False) -> [LinkedObject, None]: + anonymous: bool = False, + allow_cache_upload = False) -> _CreatedLinkGroup | None: """ Link a link group library, described by a `LinkGroupLibSpec`. This is intended to handle regular shared libs and e.g. Python extensions. @@ -593,10 +706,6 @@ def _create_link_group( get_ignore_undefined_symbols_flags(linker_type), )) - # Get roots to begin the linkable search. - # TODO(agallagher): We should use the groups "public" nodes as the roots. - roots = [] - has_empty_root = False if spec.root != None: # If there's a linkable root attached to the spec, use that to guide # linking, as that will contain things like private linker flags that @@ -605,33 +714,10 @@ def _create_link_group( spec.root.link_infos, prefer_stripped = prefer_stripped_objects, )) - roots.extend(spec.root.deps) - else: - for mapping in spec.group.mappings: - # If there's no explicit root, this means we need to search the entire - # graph to find candidate nodes. - if mapping.root == None: - has_empty_root = True - elif spec.group.attrs.requires_root_node_exists or mapping.root in linkable_graph_node_map: - # If spec requires root to always exist (default True), always include to traversal to fail hard if it is not in deps. - # Otherwise add to traversal only if we sure it is in deps graph. - roots.append(mapping.root) - - # If this link group has an empty mapping, we need to search everything - # -- even the additional roots -- to find potential nodes in the link - # group. - if has_empty_root: - roots.extend( - find_relevant_roots( - link_group = spec.group.name, - linkable_graph_node_map = linkable_graph_node_map, - link_group_mappings = link_group_mappings, - roots = executable_deps + other_roots, - ), - ) # Add roots... filtered_labels_to_links_map = get_filtered_labels_to_links_map( + public_nodes, linkable_graph_node_map, spec.group.name, link_groups, @@ -662,10 +748,14 @@ def _create_link_group( # TODO: anonymous targets cannot be used with dynamic output yet enable_distributed_thinlto = False if anonymous else spec.group.attrs.enable_distributed_thinlto, link_execution_preference = LinkExecutionPreference("any"), + allow_cache_upload = allow_cache_upload, ), anonymous = anonymous, ) - return link_result.linked_object + return _CreatedLinkGroup( + linked_object = link_result.linked_object, + labels_to_links_map = filtered_labels_to_links_map, + ) def _stub_library( ctx: AnalysisContext, @@ -772,17 +862,18 @@ def _symbol_flags_for_link_groups( def create_link_groups( ctx: AnalysisContext, + public_nodes: set_record, link_groups: dict[str, Group] = {}, link_group_specs: list[LinkGroupLibSpec] = [], executable_deps: list[Label] = [], other_roots: list[Label] = [], - root_link_group: [str, None] = None, linker_flags: list[typing.Any] = [], prefer_stripped_objects: bool = False, linkable_graph_node_map: dict[Label, LinkableNode] = {}, link_group_preferred_linkage: dict[Label, Linkage] = {}, link_group_mappings: [dict[Label, str], None] = None, - anonymous: bool = False) -> _LinkedLinkGroups: + anonymous: bool = False, + allow_cache_upload = False) -> _LinkedLinkGroups: # Generate stubs first, so that subsequent links can link against them. link_group_shared_links = {} specs = [] @@ -802,25 +893,24 @@ def create_link_groups( ) linked_link_groups = {} + link_groups_debug_info = {} undefined_symfiles = [] global_symfiles = [] - - public_nodes = get_public_link_group_nodes( - linkable_graph_node_map, + roots = _find_all_relevant_roots( + specs, link_group_mappings, executable_deps + other_roots, - root_link_group, + linkable_graph_node_map, ) for link_group_spec in specs: # NOTE(agallagher): It might make sense to move this down to be # done when we generated the links for the executable, so we can # handle the case when a link group can depend on the executable. - link_group_lib = _create_link_group( + created_link_group = _create_link_group( ctx = ctx, spec = link_group_spec, - executable_deps = executable_deps, - other_roots = other_roots, + roots = roots[link_group_spec.group.name], linkable_graph_node_map = linkable_graph_node_map, public_nodes = public_nodes, linker_flags = ( @@ -840,12 +930,20 @@ def create_link_groups( prefer_stripped_objects = prefer_stripped_objects, category_suffix = "link_group", anonymous = anonymous, + allow_cache_upload = allow_cache_upload, ) - if link_group_lib == None: + if created_link_group == None: # the link group did not match anything, don't create shlib interface continue + link_group_lib = created_link_group.linked_object + + if created_link_group.labels_to_links_map: + link_groups_debug_info[link_group_spec.name] = LinkGroupsDebugLinkableItem( + ordered_linkables = create_debug_linkable_entries(created_link_group.labels_to_links_map), + ) + # On GNU, use shlib interfaces. if cxx_is_gnu(ctx): shlib_for_link = shared_library_interface( @@ -862,7 +960,15 @@ def create_link_groups( linked_link_groups[link_group_spec.group.name] = _LinkedLinkGroup( artifact = link_group_lib, library = None if not link_group_spec.is_shared_lib else LinkGroupLib( - shared_libs = {link_group_spec.name: link_group_lib}, + shared_libs = SharedLibraries( + libraries = [ + create_shlib( + label = link_group_spec.label or ctx.label, + soname = link_group_spec.name, + lib = link_group_lib, + ), + ], + ), shared_link_infos = LinkInfos( default = wrap_link_info( link_info, @@ -898,6 +1004,7 @@ def create_link_groups( return _LinkedLinkGroups( libs = linked_link_groups, symbol_ldflags = symbol_ldflags, + libs_debug_info = link_groups_debug_info, ) def get_transitive_deps_matching_labels( diff --git a/prelude/cxx/link_groups_types.bzl b/prelude/cxx/link_groups_types.bzl new file mode 100644 index 0000000000..7c0037e5e5 --- /dev/null +++ b/prelude/cxx/link_groups_types.bzl @@ -0,0 +1,87 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load( + "@prelude//linking:link_info.bzl", + "LibOutputStyle", +) +load("@prelude//linking:types.bzl", "Linkage") +load(":groups_types.bzl", "Group", "Traversal") + +# These are targets or link groups that will be added to .linker.argsfile +# Targets will be expanded to .o files, link groups will be added to NEEDS +LinkGroupsDebugLinkableEntry = record( + name = field(Label | str), + output_style = field(LibOutputStyle), +) + +# This is info about single output unit. It is either a final binary or +# one of link groups. +LinkGroupsDebugLinkableItem = record( + ordered_linkables = field(list[LinkGroupsDebugLinkableEntry]), +) + +LinkGroupsDebugLinkInfo = record( + binary = field(LinkGroupsDebugLinkableItem), + libs = field(dict[str, LinkGroupsDebugLinkableItem]), +) + +LinkGroupInfo = provider( + fields = { + # Additional graphs needed to cover labels referenced by the groups above. + # This is useful in cases where the consumer of this provider won't already + # have deps covering these. + # NOTE(agallagher): We do this to maintain existing behavior w/ the + # standalone `link_group_map()` rule, but it's not clear if it's actually + # desirable behavior. + "graph": provider_field(typing.Any, default = None), # LinkableGraph + "groups": provider_field(dict[str, Group]), + "groups_hash": provider_field(int), + "mappings": provider_field(dict[Label, str]), + }, +) + +def link_group_inlined_map_attr(root_attr): + return attrs.list( + attrs.tuple( + # name + attrs.string(), + # list of mappings + attrs.list( + # a single mapping + attrs.tuple( + # root node + attrs.one_of(root_attr, attrs.list(root_attr)), + # traversal + attrs.enum(Traversal.values()), + # filters, either `None`, a single filter, or a list of filters + # (which must all match). + attrs.option(attrs.one_of(attrs.list(attrs.string()), attrs.string())), + # linkage + attrs.option(attrs.enum(Linkage.values())), + ), + ), + # attributes + attrs.option( + attrs.dict(key = attrs.string(), value = attrs.any(), sorted = False), + ), + ), + ) + +LINK_GROUP_MAP_ATTR = attrs.option( + attrs.one_of( + attrs.dep(providers = [LinkGroupInfo]), + link_group_inlined_map_attr( + # Inlined `link_group_map` will parse roots as `label`s, to avoid + # bloating deps w/ unrelated mappings (e.g. it's common to use + # a default mapping for all rules, which would otherwise add + # unrelated deps to them). + root_attr = attrs.option(attrs.label()), + ), + ), + default = None, +) diff --git a/prelude/cxx/link_types.bzl b/prelude/cxx/link_types.bzl index ddcac7c52b..677e32e9d8 100644 --- a/prelude/cxx/link_types.bzl +++ b/prelude/cxx/link_types.bzl @@ -32,7 +32,7 @@ LinkOptions = record( strip = bool, # A function/lambda which will generate the strip args using the ctx. strip_args_factory = [typing.Callable, None], - import_library = [Artifact, None], + import_library = Artifact | None, allow_cache_upload = bool, cxx_toolchain = [CxxToolchainInfo, None], # Force callers to use link_options() or merge_link_options() to create. @@ -49,7 +49,7 @@ def link_options( identifier: [str, None] = None, strip: bool = False, strip_args_factory = None, - import_library: [Artifact, None] = None, + import_library: Artifact | None = None, allow_cache_upload: bool = False, cxx_toolchain: [CxxToolchainInfo, None] = None) -> LinkOptions: """ @@ -72,7 +72,7 @@ def link_options( __private_use_link_options_function_to_construct = None, ) -# A marker instance to differentiate explicitly-passed None and a field tha +# A marker instance to differentiate explicitly-passed None and a field that # isn't provided in merge_link_options. _NotProvided = record() _NOT_PROVIDED = _NotProvided() diff --git a/prelude/cxx/linker.bzl b/prelude/cxx/linker.bzl index 3bf86d3be5..be2c3c2b1f 100644 --- a/prelude/cxx/linker.bzl +++ b/prelude/cxx/linker.bzl @@ -198,7 +198,7 @@ def get_objects_as_library_args(linker_type: str, objects: list[Artifact]) -> li args.append("-Wl,--start-lib") args.extend(objects) args.append("-Wl,--end-lib") - elif linker_type == "windows": + elif linker_type == "darwin" or linker_type == "windows": args.extend(objects) else: fail("Linker type {} not supported".format(linker_type)) @@ -216,7 +216,7 @@ def get_ignore_undefined_symbols_flags(linker_type: str) -> list[str]: args.append("-Wl,--allow-shlib-undefined") args.append("-Wl,--unresolved-symbols=ignore-all") elif linker_type == "darwin": - args.append("-Wl,-flat_namespace,-undefined,suppress") + args.append("-Wl,-undefined,dynamic_lookup") else: fail("Linker type {} not supported".format(linker_type)) @@ -248,7 +248,7 @@ def get_output_flags(linker_type: str, output: Artifact) -> list[ArgLike]: def get_import_library( ctx: AnalysisContext, linker_type: str, - output_short_path: str) -> ([Artifact, None], list[ArgLike]): + output_short_path: str) -> (Artifact | None, list[ArgLike]): if linker_type == "windows": import_library = ctx.actions.declare_output(output_short_path + ".imp.lib") return import_library, [cmd_args(import_library.as_output(), format = "/IMPLIB:{}")] diff --git a/prelude/cxx/omnibus.bzl b/prelude/cxx/omnibus.bzl index 801f921427..12f4f696ad 100644 --- a/prelude/cxx/omnibus.bzl +++ b/prelude/cxx/omnibus.bzl @@ -20,7 +20,6 @@ load( "LinkInfo", "LinkInfos", "LinkStrategy", - "Linkage", "LinkedObject", "SharedLibLinkable", "get_lib_output_style", @@ -38,10 +37,16 @@ load( "linkable_deps", "linkable_graph", ) +load( + "@prelude//linking:shared_libraries.bzl", + "SharedLibrary", # @unused Used as a type + "create_shlib", +) +load("@prelude//linking:types.bzl", "Linkage") load("@prelude//utils:expect.bzl", "expect") load( "@prelude//utils:graph_utils.bzl", - "breadth_first_traversal_by", + "depth_first_traversal_by", "post_order_traversal", ) load("@prelude//utils:utils.bzl", "flatten", "value_or") @@ -118,7 +123,7 @@ OmnibusRootProduct = record( # The result of the omnibus link. OmnibusSharedLibraries = record( omnibus = field([CxxLinkResult, None], None), - libraries = field(dict[str, LinkedObject], {}), + libraries = field(list[SharedLibrary], []), roots = field(dict[Label, OmnibusRootProduct], {}), exclusion_roots = field(list[Label]), excluded = field(list[Label]), @@ -139,7 +144,8 @@ def get_roots(deps: list[Dependency]) -> dict[Label, LinkableRootInfo]: roots = {} for dep in deps: if LinkableRootInfo in dep: - roots[dep.label] = dep[LinkableRootInfo] + root = dep[LinkableRootInfo] + roots[root.label] = root return roots def get_excluded(deps: list[Dependency] = []) -> dict[Label, None]: @@ -151,11 +157,13 @@ def get_excluded(deps: list[Dependency] = []) -> dict[Label, None]: return excluded_nodes def create_linkable_root( + label: Label, link_infos: LinkInfos, name: [str, None] = None, - deps: list[Dependency] = []) -> LinkableRootInfo: + deps: list[LinkableGraph | Dependency] = []) -> LinkableRootInfo: # Only include dependencies that are linkable. return LinkableRootInfo( + label = label, name = name, link_infos = link_infos, deps = linkable_deps(deps), @@ -192,7 +200,7 @@ def _link_deps( def find_deps(node: Label): return get_deps_for_link(link_infos[node], LinkStrategy("shared"), pic_behavior) - return breadth_first_traversal_by(link_infos, deps, find_deps) + return depth_first_traversal_by(link_infos, deps, find_deps) def _create_root( ctx: AnalysisContext, @@ -512,9 +520,9 @@ def _create_omnibus( root_products.values(), # ... and the shared libs from excluded nodes. [ - shared_lib.output + shared_lib.lib.output for label in spec.excluded - for shared_lib in spec.link_infos[label].shared_libs.values() + for shared_lib in spec.link_infos[label].shared_libs.libraries ], # Extract explicit global symbol names from flags in all body link args. global_symbols_link_args, @@ -587,11 +595,18 @@ def _build_omnibus_spec( if label not in excluded } - # Find the deps of the root nodes. These form the roots of the nodes - # included in the omnibus link. + # Find the deps of the root nodes that should be linked into + # 'libomnibus.so'. + # + # If a dep indicates preferred linkage static, it is linked directly into + # this omnimbus root and therefore not added to `first_order_root_deps` and + # thereby will not be linked into 'libomnibus.so'. If the dep does not + # indicate preferred linkage static, then it is added to + # `first_order_root_deps` and thereby will be linked into 'libomnibus.so'. first_order_root_deps = [] for label in _link_deps(graph.nodes, flatten([r.deps for r in roots.values()]), get_cxx_toolchain_info(ctx).pic_behavior): - # We only consider deps which aren't *only* statically linked. + # Per the comment above, only consider deps which aren't *only* + # statically linked. if _is_static_only(graph.nodes[label]): continue @@ -647,9 +662,10 @@ def _ordered_roots( """ # Calculate all deps each root node needs to link against. - link_deps = {} - for label, root in spec.roots.items(): - link_deps[label] = _link_deps(spec.link_infos, root.deps, pic_behavior) + link_deps = { + label: _link_deps(spec.link_infos, root.deps, pic_behavior) + for label, root in spec.roots.items() + } # Used the link deps to create the graph of root nodes. root_graph = { @@ -657,14 +673,12 @@ def _ordered_roots( for node, deps in link_deps.items() } - ordered_roots = [] - # Emit the root link info in post-order, so that we generate root link rules # for dependencies before their dependents. - for label in post_order_traversal(root_graph): - root = spec.roots[label] - deps = link_deps[label] - ordered_roots.append((label, root, deps)) + ordered_roots = [ + (label, spec.roots[label], link_deps[label]) + for label in post_order_traversal(root_graph) + ] return ordered_roots @@ -679,7 +693,7 @@ def create_omnibus_libraries( # Create dummy omnibus dummy_omnibus = create_dummy_omnibus(ctx, extra_ldflags) - libraries = {} + libraries = [] root_products = {} # Link all root nodes against the dummy libomnibus lib. @@ -698,7 +712,13 @@ def create_omnibus_libraries( allow_cache_upload = True, ) if root.name != None: - libraries[root.name] = product.shared_library + libraries.append( + create_shlib( + soname = root.name, + lib = product.shared_library, + label = label, + ), + ) root_products[label] = product # If we have body nodes, then link them into the monolithic libomnibus.so. @@ -713,12 +733,17 @@ def create_omnibus_libraries( prefer_stripped_objects, allow_cache_upload = True, ) - libraries[_omnibus_soname(ctx)] = omnibus.linked_object + libraries.append( + create_shlib( + soname = _omnibus_soname(ctx), + lib = omnibus.linked_object, + label = ctx.label, + ), + ) # For all excluded nodes, just add their regular shared libs. for label in spec.excluded: - for name, lib in spec.link_infos[label].shared_libs.items(): - libraries[name] = lib + libraries.extend(spec.link_infos[label].shared_libs.libraries) return OmnibusSharedLibraries( omnibus = omnibus, diff --git a/prelude/cxx/prebuilt_cxx_library_group.bzl b/prelude/cxx/prebuilt_cxx_library_group.bzl index 3235dd4093..75001d64a3 100644 --- a/prelude/cxx/prebuilt_cxx_library_group.bzl +++ b/prelude/cxx/prebuilt_cxx_library_group.bzl @@ -25,11 +25,9 @@ load( "LinkInfo", "LinkInfos", "LinkStrategy", - "Linkage", "LinkedObject", "SharedLibLinkable", "create_merged_link_info", - "create_merged_link_info_for_propagation", "get_lib_output_style", "get_output_styles_for_linkage", ) @@ -46,6 +44,7 @@ load( "merge_shared_libraries", ) load("@prelude//linking:strip.bzl", "strip_debug_info") +load("@prelude//linking:types.bzl", "Linkage") load("@prelude//utils:expect.bzl", "expect") load("@prelude//utils:utils.bzl", "flatten_dict") load(":cxx_context.bzl", "get_cxx_toolchain_info") @@ -268,7 +267,7 @@ def prebuilt_cxx_library_group_impl(ctx: AnalysisContext) -> list[Provider]: args.extend(ctx.attrs.exported_preprocessor_flags) for inc_dir in ctx.attrs.include_dirs: args += ["-isystem", inc_dir] - preprocessor = CPreprocessor(relative_args = CPreprocessorArgs(args = args)) + preprocessor = CPreprocessor(args = CPreprocessorArgs(args = args)) inherited_pp_info = cxx_inherited_preprocessor_infos(exported_deps) providers.append(cxx_merge_cpreprocessors(ctx, [preprocessor], inherited_pp_info)) @@ -322,11 +321,6 @@ def prebuilt_cxx_library_group_impl(ctx: AnalysisContext) -> list[Provider]: static_output_style = get_lib_output_style(LinkStrategy("static"), preferred_linkage, pic_behavior) providers.append(DefaultInfo(default_outputs = outputs[static_output_style])) - # TODO(cjhopman): This is preserving existing behavior, but it doesn't make sense. These lists can be passed - # unmerged to create_merged_link_info below. Potentially that could change link order, so needs to be done more carefully. - merged_inherited_non_exported_link = create_merged_link_info_for_propagation(ctx, inherited_non_exported_link) - merged_inherited_exported_link = create_merged_link_info_for_propagation(ctx, inherited_exported_link) - # Provider for native link. providers.append(create_merged_link_info( ctx, @@ -335,15 +329,16 @@ def prebuilt_cxx_library_group_impl(ctx: AnalysisContext) -> list[Provider]: preferred_linkage = preferred_linkage, # Export link info from our (non-exported) deps (e.g. when we're linking # statically). - deps = [merged_inherited_non_exported_link], + deps = inherited_non_exported_link, # Export link info from our (exported) deps. - exported_deps = [merged_inherited_exported_link], + exported_deps = inherited_exported_link, )) # Propagate shared libraries up the tree. + shared_libs = create_shared_libraries(ctx, solibs) providers.append(merge_shared_libraries( ctx.actions, - create_shared_libraries(ctx, solibs), + shared_libs, filter(None, [x.get(SharedLibraryInfo) for x in deps + exported_deps]), )) @@ -358,7 +353,7 @@ def prebuilt_cxx_library_group_impl(ctx: AnalysisContext) -> list[Provider]: exported_deps = exported_deps, preferred_linkage = preferred_linkage, link_infos = libraries, - shared_libs = solibs, + shared_libs = shared_libs, can_be_asset = getattr(ctx.attrs, "can_be_asset", False) or False, # TODO(cjhopman): this should be set to non-None default_soname = None, diff --git a/prelude/cxx/preprocessor.bzl b/prelude/cxx/preprocessor.bzl index 00cd190a67..eb292337b5 100644 --- a/prelude/cxx/preprocessor.bzl +++ b/prelude/cxx/preprocessor.bzl @@ -32,7 +32,7 @@ SystemIncludeDirs = record( # Compiler type to infer correct include flags compiler_type = field(str), # Directories to be included via [-isystem | /external:I] [arglike things] - include_dirs = field(list["label_relative_path"]), + include_dirs = field(list[CellPath]), ) CPreprocessorArgs = record( @@ -45,15 +45,13 @@ CPreprocessorArgs = record( # Note: Any generic attributes are assumed to be relative. CPreprocessor = record( # Relative path args to be used for build operations. - relative_args = field(CPreprocessorArgs, CPreprocessorArgs()), - # Absolute path args used to generate extra user-specific outputs. - absolute_args = field(CPreprocessorArgs, CPreprocessorArgs()), + args = field(CPreprocessorArgs, CPreprocessorArgs()), # Header specs headers = field(list[CHeader], []), # Those should be mutually exclusive with normal headers as per documentation raw_headers = field(list[Artifact], []), # Directories to be included via -I, [arglike things] - include_dirs = field(list["label_relative_path"], []), + include_dirs = field(list[CellPath], []), # Directories to be included via -isystem, [arglike things] system_include_dirs = field([SystemIncludeDirs, None], None), # Whether to compile with modules support @@ -68,13 +66,7 @@ CPreprocessor = record( def _cpreprocessor_args(pres: list[CPreprocessor]): args = cmd_args() for pre in pres: - args.add(pre.relative_args.args) - return args - -def _cpreprocessor_abs_args(pres: list[CPreprocessor]): - args = cmd_args() - for pre in pres: - args.add(pre.absolute_args.args) + args.add(pre.args.args) return args def _cpreprocessor_modular_args(pres: list[CPreprocessor]): @@ -86,13 +78,7 @@ def _cpreprocessor_modular_args(pres: list[CPreprocessor]): def _cpreprocessor_file_prefix_args(pres: list[CPreprocessor]): args = cmd_args() for pre in pres: - args.add(pre.relative_args.file_prefix_args) - return args - -def _cpreprocessor_abs_file_prefix_args(pres: list[CPreprocessor]): - args = cmd_args() - for pre in pres: - args.add(pre.absolute_args.file_prefix_args) + args.add(pre.args.file_prefix_args) return args def _cpreprocessor_include_dirs(pres: list[CPreprocessor]): @@ -118,8 +104,6 @@ def _cpreprocessor_uses_modules(children: list[bool], pres: [list[CPreprocessor] # exported pp info and one for not-exported). CPreprocessorTSet = transitive_set( args_projections = { - "abs_args": _cpreprocessor_abs_args, - "abs_file_prefix_args": _cpreprocessor_abs_file_prefix_args, "args": _cpreprocessor_args, "file_prefix_args": _cpreprocessor_file_prefix_args, "include_dirs": _cpreprocessor_include_dirs, @@ -149,15 +133,6 @@ CPreprocessorForTestsInfo = provider( }, ) -# Preprocessor flags -def cxx_attr_preprocessor_flags(ctx: AnalysisContext, ext: str) -> list[typing.Any]: - return ( - ctx.attrs.preprocessor_flags + - cxx_by_language_ext(ctx.attrs.lang_preprocessor_flags, ext) + - flatten(cxx_by_platform(ctx, ctx.attrs.platform_preprocessor_flags)) + - flatten(cxx_by_platform(ctx, cxx_by_language_ext(ctx.attrs.lang_platform_preprocessor_flags, ext))) - ) - def cxx_attr_exported_preprocessor_flags(ctx: AnalysisContext) -> list[typing.Any]: return ( ctx.attrs.exported_preprocessor_flags + @@ -192,7 +167,7 @@ def format_system_include_arg(path: cmd_args, compiler_type: str) -> list[cmd_ar else: return [cmd_args("-isystem"), path] -def cxx_exported_preprocessor_info(ctx: AnalysisContext, headers_layout: CxxHeadersLayout, project_root_file: Artifact, extra_preprocessors: list[CPreprocessor] = []) -> CPreprocessor: +def cxx_exported_preprocessor_info(ctx: AnalysisContext, headers_layout: CxxHeadersLayout, extra_preprocessors: list[CPreprocessor] = []) -> CPreprocessor: """ This rule's preprocessor info which is both applied to the compilation of its source and propagated to the compilation of dependent's sources. @@ -237,16 +212,14 @@ def cxx_exported_preprocessor_info(ctx: AnalysisContext, headers_layout: CxxHead include_dirs.extend([ctx.label.path.add(x) for x in ctx.attrs.public_include_directories]) system_include_dirs.extend([ctx.label.path.add(x) for x in ctx.attrs.public_system_include_directories]) - relative_args = _get_exported_preprocessor_args(ctx, exported_header_map, style, compiler_type, raw_headers, extra_preprocessors, None) - absolute_args = _get_exported_preprocessor_args(ctx, exported_header_map, style, compiler_type, raw_headers, extra_preprocessors, project_root_file) + args = _get_exported_preprocessor_args(ctx, exported_header_map, style, compiler_type, raw_headers, extra_preprocessors) modular_args = [] for pre in extra_preprocessors: modular_args.extend(pre.modular_args) return CPreprocessor( - relative_args = CPreprocessorArgs(args = relative_args.args, file_prefix_args = relative_args.file_prefix_args), - absolute_args = CPreprocessorArgs(args = absolute_args.args, file_prefix_args = absolute_args.file_prefix_args), + args = CPreprocessorArgs(args = args.args, file_prefix_args = args.file_prefix_args), headers = exported_headers, raw_headers = raw_headers, include_dirs = include_dirs, @@ -254,8 +227,8 @@ def cxx_exported_preprocessor_info(ctx: AnalysisContext, headers_layout: CxxHead modular_args = modular_args, ) -def _get_exported_preprocessor_args(ctx: AnalysisContext, headers: dict[str, Artifact], style: HeaderStyle, compiler_type: str, raw_headers: list[Artifact], extra_preprocessors: list[CPreprocessor], project_root_file: [Artifact, None]) -> CPreprocessorArgs: - header_root = prepare_headers(ctx, headers, "buck-headers", project_root_file) +def _get_exported_preprocessor_args(ctx: AnalysisContext, headers: dict[str, Artifact], style: HeaderStyle, compiler_type: str, raw_headers: list[Artifact], extra_preprocessors: list[CPreprocessor]) -> CPreprocessorArgs: + header_root = prepare_headers(ctx, headers, "buck-headers") # Process args to handle the `$(cxx-header-tree)` macro. args = [] @@ -278,23 +251,22 @@ def _get_exported_preprocessor_args(ctx: AnalysisContext, headers: dict[str, Art if raw_headers: # NOTE(agallagher): It's a bit weird adding an "empty" arg, but this # appears to do the job (and not e.g. expand to `""`). - args.append(cmd_args().hidden(raw_headers)) + args.append(cmd_args(hidden = raw_headers)) # Append any extra preprocessor info passed in via the constructor params for pre in extra_preprocessors: - args.extend(pre.absolute_args.args if project_root_file else pre.relative_args.args) + args.extend(pre.args.args) return CPreprocessorArgs(args = args, file_prefix_args = file_prefix_args) def cxx_private_preprocessor_info( ctx: AnalysisContext, headers_layout: CxxHeadersLayout, - project_root_file: [Artifact, None], raw_headers: list[Artifact] = [], extra_preprocessors: list[CPreprocessor] = [], non_exported_deps: list[Dependency] = [], is_test: bool = False) -> (CPreprocessor, list[CPreprocessor]): - private_preprocessor = _cxx_private_preprocessor_info(ctx, headers_layout, raw_headers, extra_preprocessors, project_root_file) + private_preprocessor = _cxx_private_preprocessor_info(ctx, headers_layout, raw_headers, extra_preprocessors) test_preprocessors = [] if is_test: @@ -309,8 +281,7 @@ def _cxx_private_preprocessor_info( ctx: AnalysisContext, headers_layout: CxxHeadersLayout, raw_headers: list[Artifact], - extra_preprocessors: list[CPreprocessor], - project_root_file: [Artifact, None]) -> CPreprocessor: + extra_preprocessors: list[CPreprocessor]) -> CPreprocessor: """ This rule's preprocessor info which is only applied to the compilation of its source, and not propagated to dependents. @@ -352,23 +323,21 @@ def _cxx_private_preprocessor_info( all_raw_headers.extend(raw_headers) include_dirs.extend([ctx.label.path.add(x) for x in ctx.attrs.include_directories]) - relative_args = _get_private_preprocessor_args(ctx, header_map, compiler_type, all_raw_headers, None) - absolute_args = _get_private_preprocessor_args(ctx, header_map, compiler_type, all_raw_headers, project_root_file) + args = _get_private_preprocessor_args(ctx, header_map, compiler_type, all_raw_headers) return CPreprocessor( - relative_args = CPreprocessorArgs(args = relative_args.args, file_prefix_args = relative_args.file_prefix_args), - absolute_args = CPreprocessorArgs(args = absolute_args.args, file_prefix_args = absolute_args.file_prefix_args), + args = CPreprocessorArgs(args = args.args, file_prefix_args = args.file_prefix_args), headers = headers, raw_headers = all_raw_headers, include_dirs = include_dirs, uses_modules = uses_modules, ) -def _get_private_preprocessor_args(ctx: AnalysisContext, headers: dict[str, Artifact], compiler_type: str, all_raw_headers: list[Artifact], project_root_file: [Artifact, None]) -> CPreprocessorArgs: +def _get_private_preprocessor_args(ctx: AnalysisContext, headers: dict[str, Artifact], compiler_type: str, all_raw_headers: list[Artifact]) -> CPreprocessorArgs: # Create private header tree and propagate via args. args = [] file_prefix_args = [] - header_root = prepare_headers(ctx, headers, "buck-private-headers", project_root_file) + header_root = prepare_headers(ctx, headers, "buck-private-headers") if header_root != None: args.extend(_format_include_arg("-I", header_root.include_path, compiler_type)) if header_root.file_prefix_args != None: @@ -379,7 +348,7 @@ def _get_private_preprocessor_args(ctx: AnalysisContext, headers: dict[str, Arti if all_raw_headers: # NOTE(agallagher): It's a bit weird adding an "empty" arg, but this # appears to do the job (and not e.g. expand to `""`). - args.append(cmd_args().hidden(all_raw_headers)) + args.append(cmd_args(hidden = all_raw_headers)) return CPreprocessorArgs(args = args, file_prefix_args = file_prefix_args) diff --git a/prelude/cxx/shared_library_interface.bzl b/prelude/cxx/shared_library_interface.bzl index 3ac819b044..d93f729f86 100644 --- a/prelude/cxx/shared_library_interface.bzl +++ b/prelude/cxx/shared_library_interface.bzl @@ -5,9 +5,20 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//:artifact_tset.bzl", "ArtifactTSet", "make_artifact_tset", "project_artifacts") load("@prelude//:paths.bzl", "paths") +load("@prelude//cxx:preprocessor.bzl", "CPreprocessor", "CPreprocessorInfo") +load("@prelude//utils:arglike.bzl", "ArgLike") # @unused Used as a type +load("@prelude//utils:lazy.bzl", "lazy") load(":cxx_context.bzl", "get_cxx_toolchain_info") load(":cxx_toolchain_types.bzl", "CxxToolchainInfo") +load(":headers.bzl", "CHeader") + +# The transitive artifacts of partial shared interface for a library. +# These need to be collected and merged to produce the final shared interface. +SharedInterfaceInfo = provider(fields = { + "interfaces": provider_field(ArtifactTSet), +}) def _shared_library_interface( ctx: AnalysisContext, @@ -80,3 +91,136 @@ def shared_library_interface( shared_lib = shared_lib, identifier = shared_lib.short_path, ) + +def generate_exported_symbols(ctx: AnalysisContext, exported_headers: list[CHeader], exported_preprocessor: CPreprocessor, transitive_preprocessor: list[CPreprocessorInfo], target: str) -> Artifact: + # Use the c++ compiler to correctly generate c++ symbols. + compiler_info = get_cxx_toolchain_info(ctx).cxx_compiler_info + + # Collect the exported headers for this library and create a filelist for them. + # The exported headers are possibly hidden behind a modulemap, + # so cannot be fetched directly from exported_preprocessor. + filelist_headers = [] + for h in exported_headers: + filelist_headers.append({ + "path": h.artifact, + "type": "public", + }) + + # We need to collect all raw_headers that belong in a public include dir + include_dirs = ctx.attrs.public_include_directories + ctx.attrs.public_system_include_directories + include_dirs = [d if d.endswith("/") else d + "/" for d in include_dirs] + if len(include_dirs) > 0: + filelist_headers.extend([ + { + "path": h, + "type": "public", + } + for h in exported_preprocessor.raw_headers + if lazy.is_any(lambda d: h.short_path.startswith(d), include_dirs) + ]) + + filelist_contents = { + "headers": filelist_headers, + "version": "2", + } + filelist = ctx.actions.write_json( + paths.join("__tbd__", ctx.attrs.name + "_exported_headers.json"), + filelist_contents, + with_inputs = True, + ) + + # Run the shlib interface tool with the filelist and required args + output_file = ctx.actions.declare_output( + paths.join("__tbd__", ctx.attrs.name + ".exported_symbols.txt"), + ) + args = cmd_args(get_cxx_toolchain_info(ctx).linker_info.mk_shlib_intf[RunInfo]) + args.add([ + "installapi", + "--filelist", + filelist, + "-o", + output_file.as_output(), + "--target", + target, + ]) + args.add(cmd_args(compiler_info.preprocessor_flags, prepend = "-Xparser")) + args.add(cmd_args(compiler_info.compiler_flags, prepend = "-Xparser")) + args.add(cmd_args(exported_preprocessor.args.args, prepend = "-Xparser")) + for ppinfo in transitive_preprocessor: + args.add(cmd_args(ppinfo.set.project_as_args("args"), prepend = "-Xparser")) + args.add(cmd_args(ppinfo.set.project_as_args("include_dirs"), prepend = "-Xparser")) + + # We need the targets compiler flags to pick up base flags that are applied + # in the macros instead of the toolchain for historical reasons. + args.add(cmd_args(ctx.attrs.compiler_flags, prepend = "-Xparser")) + + ctx.actions.run( + args, + category = "exported_symbols", + identifier = ctx.attrs.name, + ) + + return output_file + +def generate_tbd_with_symbols(ctx: AnalysisContext, soname: str, exported_symbol_inputs: ArtifactTSet, links: list[ArgLike], target: str) -> Artifact: + # Use arglists for the inputs, otherwise we will overflow ARGMAX + symbol_args = project_artifacts(ctx.actions, [exported_symbol_inputs]) + input_argfile, _ = ctx.actions.write("__tbd__/" + ctx.attrs.name + ".symbols.filelist", symbol_args, allow_args = True) + + # Run the shlib interface tool with the merge command + tbd_file = ctx.actions.declare_output( + paths.join("__tbd__", ctx.attrs.name + ".merged.tbd"), + ) + args = cmd_args( + get_cxx_toolchain_info(ctx).linker_info.mk_shlib_intf[RunInfo], + "merge", + "-install_name", + "@rpath/" + soname, + "--symbols-filelist", + input_argfile, + "--target", + target, + "-o", + tbd_file.as_output(), + hidden = symbol_args, + ) + + # Pass through the linker args as we need to honour any flags + # related to exported or unexported symbols. + for link_args in links: + args.add(cmd_args(link_args, prepend = "-Xparser")) + + ctx.actions.run( + args, + category = "generate_tbd", + identifier = ctx.attrs.name, + ) + return tbd_file + +def create_shared_interface_info(ctx: AnalysisContext, symbol_artifacts: list[Artifact], deps: list[Dependency]) -> [SharedInterfaceInfo, None]: + children = [d[SharedInterfaceInfo].interfaces for d in deps if SharedInterfaceInfo in d] + if len(symbol_artifacts) == 0 and len(children) == 0: + return None + + return SharedInterfaceInfo( + interfaces = make_artifact_tset( + actions = ctx.actions, + label = ctx.label, + artifacts = symbol_artifacts, + children = children, + ), + ) + +def create_shared_interface_info_with_children(ctx: AnalysisContext, symbol_artifacts: list[Artifact], children: list[SharedInterfaceInfo]) -> [SharedInterfaceInfo, None]: + children = [d.interfaces for d in children] + if len(symbol_artifacts) == 0 and len(children) == 0: + return None + + return SharedInterfaceInfo( + interfaces = make_artifact_tset( + actions = ctx.actions, + label = ctx.label, + artifacts = symbol_artifacts, + children = children, + ), + ) diff --git a/prelude/cxx/symbols.bzl b/prelude/cxx/symbols.bzl index 0e93f1b638..def85ee09d 100644 --- a/prelude/cxx/symbols.bzl +++ b/prelude/cxx/symbols.bzl @@ -7,6 +7,8 @@ load("@prelude//:paths.bzl", "paths") load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") +load("@prelude//cxx:cxx_utility.bzl", "cxx_attrs_get_allow_cache_upload") +load("@prelude//os_lookup:defs.bzl", "OsLookup") def _extract_symbol_names( ctx: AnalysisContext, @@ -45,26 +47,58 @@ def _extract_symbol_names( if dynamic and cxx_toolchain.linker_info.type != "darwin": nm_flags += "D" - script = ( - "set -euo pipefail; " + - '"$1" {} "${{@:2}}"'.format(nm_flags) + - # Grab only the symbol name field. - ' | cut -d" " -f2 ' + - # Strip off ABI Version (@...) when using llvm-nm to keep compat with buck1 - " | cut -d@ -f1 " + - # Sort and dedup symbols. Use the `C` locale and do it in-memory to - # make it significantly faster. CAUTION: if ten of these processes - # run in parallel, they'll have cumulative allocations larger than RAM. - " | LC_ALL=C sort -S 10% -u > {}" - ) + is_windows = hasattr(ctx.attrs, "_exec_os_type") and ctx.attrs._exec_os_type[OsLookup].platform == "windows" - ctx.actions.run( - [ + if is_windows: + script = ( + """& {{ + $result = & $args[0] {} $($args[1..($args.Length-1)] -join " ") + $lines = $result -split '`n' + $lines = $lines | ForEach-Object {{ ($_ -split ' ')[1] }} + $lines = $lines | ForEach-Object {{ ($_ -split '@')[0] }} + $lines = $lines | Where-Object {{ $_ -notmatch '__odr_asan_gen_.*' }} + $lines = $lines | Sort-Object -Unique + [IO.File]::WriteAllLines('{{}}', $lines) + }}""".format(nm_flags) + ) + symbol_extraction_args = [ + "powershell", + "-Command", + cmd_args(output.as_output(), format = script), + ] + else: + script = ( + "set -euo pipefail; " + + '"$1" {} "${{@:2}}"'.format(nm_flags) + + # Grab only the symbol name field. + ' | cut -d" " -f2 ' + + # Strip off ABI Version (@...) when using llvm-nm to keep compat with buck1 + " | cut -d@ -f1 " + + # Remove ASAN ODR generated symbols: __odr_asan_gen_*. They are + # handled by a separate asan_dynamic_list.txt list of asan patterns. + # BUT MORE IMPORTANTLY, symbols like __odr_asan_XXX[abi:cxx11] force + # lld into a code path that repeatedly does a linear scan of all + # symbols for O(num_patterns_with_bracket * num_symbols). This + # totally tanks link time for builds with sanitizers! Anecdotally, + # a binary with 3.7M symbols and 2K __odr_asan_XXX[abi:cxx11] can + # spend 6 mins processing patterns and 10s actually linking. + " | grep -v -E '__odr_asan_gen_.*'" + + # Sort and dedup symbols. Use the `C` locale and do it in-memory to + # make it significantly faster. CAUTION: if ten of these processes + # run in parallel, they'll have cumulative allocations larger than RAM. + " | LC_ALL=C sort -S 10% -u > {}" + ) + symbol_extraction_args = [ "/usr/bin/env", "bash", "-c", cmd_args(output.as_output(), format = script), "", + ] + + ctx.actions.run( + symbol_extraction_args + + [ nm, ] + objects, @@ -75,6 +109,7 @@ def _extract_symbol_names( weight_percentage = 15, # 10% + a little padding allow_cache_upload = allow_cache_upload, ) + return output _SymbolsInfo = provider(fields = { @@ -94,7 +129,7 @@ def _anon_extract_symbol_names_impl(ctx): objects = ctx.attrs.objects, prefer_local = ctx.attrs.prefer_local, undefined_only = ctx.attrs.undefined_only, - allow_cache_upload = ctx.attrs.allow_cache_upload, + allow_cache_upload = cxx_attrs_get_allow_cache_upload(ctx.attrs), ) return [DefaultInfo(), _SymbolsInfo(artifact = output)] @@ -212,7 +247,7 @@ def _create_symbols_file_from_script( """ all_symbol_files = actions.write(name + ".symbols", symbol_files) - all_symbol_files = cmd_args(all_symbol_files).hidden(symbol_files) + all_symbol_files = cmd_args(all_symbol_files, hidden = symbol_files) output = actions.declare_output(name) cmd = [ "/usr/bin/env", diff --git a/prelude/cxx/target_sdk_version.bzl b/prelude/cxx/target_sdk_version.bzl new file mode 100644 index 0000000000..81d7b88f95 --- /dev/null +++ b/prelude/cxx/target_sdk_version.bzl @@ -0,0 +1,37 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +def _version_is_greater(left: str, right: str) -> bool: + # Assumes version strings are in dotted format 1.2.4. + # After comparing components the longer remainder is + # considered larger. + left_components = left.split(".") + right_components = right.split(".") + for pair in zip(left_components, right_components): + x = int(pair[0]) + y = int(pair[1]) + if x < y: + return False + elif x > y: + return True + + return len(left_components) > len(right_components) + +def get_target_sdk_version(ctx: AnalysisContext) -> [None, str]: + min_version = ctx.attrs.min_sdk_version + target_version = ctx.attrs.target_sdk_version + if min_version == None and target_version == None: + return None + elif min_version != None and target_version == None: + return min_version + elif min_version == None and target_version != None: + fail("Cannot set target_sdk_version without min_sdk_version") + elif _version_is_greater(min_version, target_version): + warning("Target SDK version {} is less than minimum supported version {}".format(target_version, min_version)) + return min_version + else: + return target_version diff --git a/prelude/cxx/tools/BUCK.v2 b/prelude/cxx/tools/BUCK.v2 index 774d717171..8ca3c303be 100644 --- a/prelude/cxx/tools/BUCK.v2 +++ b/prelude/cxx/tools/BUCK.v2 @@ -1,5 +1,10 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") load(":defs.bzl", "cxx_hacks") +oncall("build_infra") + +source_listing() + prelude = native prelude.python_bootstrap_binary( @@ -26,19 +31,19 @@ prelude.python_bootstrap_binary( prelude.python_bootstrap_binary( name = "dep_file_processor", main = "dep_file_processor.py", + visibility = ["PUBLIC"], deps = [ ":dep_file_processors", ], - visibility = ["PUBLIC"], ) prelude.python_bootstrap_library( name = "dep_file_processors", srcs = [ + "dep_file_utils.py", "makefile_to_dep_file.py", "show_headers_to_dep_file.py", "show_includes_to_dep_file.py", - "dep_file_utils.py", ], visibility = ["PUBLIC"], ) diff --git a/prelude/cxx/tools/hmap_wrapper.py b/prelude/cxx/tools/hmap_wrapper.py index e2c08254c2..3e04a7da0e 100755 --- a/prelude/cxx/tools/hmap_wrapper.py +++ b/prelude/cxx/tools/hmap_wrapper.py @@ -30,16 +30,9 @@ def main(argv): if len(mapping_args) % 2 != 0: parser.error("mappings must be dest-source pairs") - project_root = None - if args.project_root_file: - with open(args.project_root_file) as file: - project_root = file.read().strip() - # Convert the hmap mappings passed on the command line to a dict. mappings = {} for src, dst in itertools.zip_longest(*([iter(mapping_args)] * 2)): - if project_root: - dst = f"{project_root}/{dst}" mappings[src] = dst # NOTE(agallagher): Add a mapping from the mapped path to itself. If diff --git a/prelude/cxx/tools/makefile_to_dep_file.py b/prelude/cxx/tools/makefile_to_dep_file.py index ec173cac32..1049a78765 100755 --- a/prelude/cxx/tools/makefile_to_dep_file.py +++ b/prelude/cxx/tools/makefile_to_dep_file.py @@ -88,7 +88,7 @@ def process_dep_file(args): Expects the src dep file to be the first argument, dst dep file to be the second argument, and the command to follow. """ - ret = subprocess.call(args[2:]) + ret = subprocess.call(args[2:], stdin=subprocess.DEVNULL) if ret == 0: rewrite_dep_file(args[0], args[1]) sys.exit(ret) diff --git a/prelude/cxx/tools/show_headers_to_dep_file.py b/prelude/cxx/tools/show_headers_to_dep_file.py index b2bf4900e8..037eb24937 100644 --- a/prelude/cxx/tools/show_headers_to_dep_file.py +++ b/prelude/cxx/tools/show_headers_to_dep_file.py @@ -8,12 +8,14 @@ # pyre-unsafe +import re import sys from subprocess import PIPE, run import dep_file_utils + # output_path -> path to write the dep file to # cmd_args -> command to be run to get dependencies from compiler # input_file -> Path to the file we're generating the dep file for. We need this since @@ -21,12 +23,11 @@ # the file itself, so we need the path to add it manually def process_show_headers_dep_file(output_path, cmd_args, input_file): ret = run(cmd_args, stderr=PIPE, encoding="utf-8") - if ret.returncode == 0: - parse_into_dep_file(ret.stderr, output_path, input_file) + parse_into_dep_file(ret.stderr, output_path, input_file, ret.returncode) sys.exit(ret.returncode) -def parse_into_dep_file(output, dst_path, input_file): +def parse_into_dep_file(output, dst_path, input_file, returncode): """ Convert stderr generated by clang to dep file. This will be a mix of output like: @@ -45,17 +46,24 @@ def parse_into_dep_file(output, dst_path, input_file): lines = output.splitlines() - deps = [] - for line in lines: - if line.startswith("."): - path = remove_leading_dots(line.replace(" ", "")) - if len(path) > 0: - deps.append(path.strip()) + if returncode == 0: + deps = [] + for line in lines: + if line.startswith("."): + path = remove_leading_dots(line.replace(" ", "")) + if len(path) > 0: + deps.append(path.strip()) + continue + print(line, file=sys.stderr) # This was a warning/error + + deps.append(input_file) + dep_file_utils.normalize_and_write_deps(deps, dst_path) + else: + for line in lines: + if re.match(r"^\.+ ", line): continue - print(line, file=sys.stderr) # This was a warning/error - deps.append(input_file) - dep_file_utils.normalize_and_write_deps(deps, dst_path) + print(line, file=sys.stderr) def remove_leading_dots(s): diff --git a/prelude/cxx/tools/show_includes_to_dep_file.py b/prelude/cxx/tools/show_includes_to_dep_file.py index a525789a98..ff25b33276 100644 --- a/prelude/cxx/tools/show_includes_to_dep_file.py +++ b/prelude/cxx/tools/show_includes_to_dep_file.py @@ -11,6 +11,8 @@ import dep_file_utils DEP_PREFIX = "Note: including file:" + + # output_path -> path to write the dep field to # cmd_args -> command to be run to get dependencies from compiler # source_file -> Path to the file we're generating the dep file for. We need this since diff --git a/prelude/cxx/user/cxx_toolchain_override.bzl b/prelude/cxx/user/cxx_toolchain_override.bzl index 1cc4cd7262..f885205223 100644 --- a/prelude/cxx/user/cxx_toolchain_override.bzl +++ b/prelude/cxx/user/cxx_toolchain_override.bzl @@ -6,6 +6,7 @@ # of this source tree. load("@prelude//cxx:cxx_toolchain_types.bzl", "AsCompilerInfo", "AsmCompilerInfo", "BinaryUtilitiesInfo", "CCompilerInfo", "CxxCompilerInfo", "CxxObjectFormat", "CxxPlatformInfo", "CxxToolchainInfo", "LinkerInfo", "LinkerType", "PicBehavior", "ShlibInterfacesMode", "StripFlagsInfo", "cxx_toolchain_infos") +load("@prelude//cxx:cxx_utility.bzl", "cxx_toolchain_allow_cache_upload_args") load("@prelude//cxx:debug.bzl", "SplitDebugMode") load("@prelude//cxx:headers.bzl", "HeaderMode") load("@prelude//cxx:linker.bzl", "is_pdb_generated") @@ -15,8 +16,8 @@ load( ) load("@prelude//linking:lto.bzl", "LtoMode") load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec") -load("@prelude//utils:pick.bzl", _pick = "pick", _pick_and_add = "pick_and_add", _pick_bin = "pick_bin", _pick_dep = "pick_dep") -load("@prelude//utils:utils.bzl", "map_val", "value_or") +load("@prelude//utils:pick.bzl", _pick = "pick", _pick_and_add = "pick_and_add", _pick_bin = "pick_bin", _pick_dep = "pick_dep", _pick_raw = "pick_raw") +load("@prelude//utils:utils.bzl", "flatten", "map_val", "value_or") def _cxx_toolchain_override(ctx): base_toolchain = ctx.attrs.base[CxxToolchainInfo] @@ -36,7 +37,7 @@ def _cxx_toolchain_override(ctx): if asm_info != None: asm_info = AsmCompilerInfo( compiler = _pick_bin(ctx.attrs.asm_compiler, asm_info.compiler), - compiler_type = asm_info.compiler_type, + compiler_type = _pick_raw(ctx.attrs.asm_compiler_type, asm_info.compiler_type), compiler_flags = _pick(ctx.attrs.asm_compiler_flags, asm_info.compiler_flags), preprocessor = _pick_bin(ctx.attrs.asm_compiler, asm_info.preprocessor), preprocessor_type = asm_info.preprocessor_type, @@ -52,6 +53,7 @@ def _cxx_toolchain_override(ctx): preprocessor_type = base_c_info.preprocessor_type, preprocessor_flags = _pick(ctx.attrs.c_preprocessor_flags, base_c_info.preprocessor_flags), dep_files_processor = base_c_info.dep_files_processor, + allow_cache_upload = _pick_raw(ctx.attrs.c_compiler_allow_cache_upload, base_c_info.allow_cache_upload), ) base_cxx_info = base_toolchain.cxx_compiler_info cxx_info = CxxCompilerInfo( @@ -62,12 +64,13 @@ def _cxx_toolchain_override(ctx): preprocessor_type = base_cxx_info.preprocessor_type, preprocessor_flags = _pick(ctx.attrs.cxx_preprocessor_flags, base_cxx_info.preprocessor_flags), dep_files_processor = base_cxx_info.dep_files_processor, + allow_cache_upload = _pick_raw(ctx.attrs.cxx_compiler_allow_cache_upload, base_cxx_info.allow_cache_upload), ) base_linker_info = base_toolchain.linker_info linker_type = ctx.attrs.linker_type if ctx.attrs.linker_type != None else base_linker_info.type pdb_expected = is_pdb_generated(linker_type, ctx.attrs.linker_flags) if ctx.attrs.linker_flags != None else base_linker_info.is_pdb_generated - # This handles case when linker type is overriden to non-windows from + # This handles case when linker type is overridden to non-windows from # windows but linker flags are inherited. # When it's changed from non-windows to windows but flags are not changed, # we can't inspect base linker flags and disable PDB subtargets. @@ -75,6 +78,7 @@ def _cxx_toolchain_override(ctx): # linker flags should be changed as well. pdb_expected = linker_type == "windows" and pdb_expected shlib_interfaces = ShlibInterfacesMode(ctx.attrs.shared_library_interface_mode) if ctx.attrs.shared_library_interface_mode else None + sanitizer_runtime_files = flatten([runtime_file[DefaultInfo].default_outputs for runtime_file in ctx.attrs.sanitizer_runtime_files]) if ctx.attrs.sanitizer_runtime_files != None else None linker_info = LinkerInfo( archiver = _pick_bin(ctx.attrs.archiver, base_linker_info.archiver), archiver_type = base_linker_info.archiver_type, @@ -90,6 +94,7 @@ def _cxx_toolchain_override(ctx): link_ordering = base_linker_info.link_ordering, linker = _pick_bin(ctx.attrs.linker, base_linker_info.linker), linker_flags = _pick(ctx.attrs.linker_flags, base_linker_info.linker_flags), + post_linker_flags = _pick(ctx.attrs.post_linker_flags, base_linker_info.post_linker_flags), lto_mode = value_or(map_val(LtoMode, ctx.attrs.lto_mode), base_linker_info.lto_mode), object_file_extension = base_linker_info.object_file_extension, shlib_interfaces = value_or(shlib_interfaces, base_linker_info.shlib_interfaces), @@ -98,6 +103,8 @@ def _cxx_toolchain_override(ctx): requires_objects = base_linker_info.requires_objects, supports_distributed_thinlto = base_linker_info.supports_distributed_thinlto, independent_shlib_interface_linker_flags = base_linker_info.independent_shlib_interface_linker_flags, + sanitizer_runtime_enabled = value_or(ctx.attrs.sanitizer_runtime_enabled, base_linker_info.sanitizer_runtime_enabled), + sanitizer_runtime_files = value_or(sanitizer_runtime_files, base_linker_info.sanitizer_runtime_files), shared_dep_runtime_ld_flags = [], shared_library_name_default_prefix = ctx.attrs.shared_library_name_default_prefix if ctx.attrs.shared_library_name_default_prefix != None else base_linker_info.shared_library_name_default_prefix, shared_library_name_format = ctx.attrs.shared_library_name_format if ctx.attrs.shared_library_name_format != None else base_linker_info.shared_library_name_format, @@ -109,13 +116,13 @@ def _cxx_toolchain_override(ctx): use_archiver_flags = value_or(ctx.attrs.use_archiver_flags, base_linker_info.use_archiver_flags), force_full_hybrid_if_capable = value_or(ctx.attrs.force_full_hybrid_if_capable, base_linker_info.force_full_hybrid_if_capable), is_pdb_generated = pdb_expected, - produce_interface_from_stub_shared_library = value_or(ctx.attrs.produce_interface_from_stub_shared_library, base_linker_info.produce_interface_from_stub_shared_library), ) base_binary_utilities_info = base_toolchain.binary_utilities_info binary_utilities_info = BinaryUtilitiesInfo( nm = _pick_bin(ctx.attrs.nm, base_binary_utilities_info.nm), objcopy = _pick_bin(ctx.attrs.objcopy, base_binary_utilities_info.objcopy), + objdump = _pick_bin(ctx.attrs.objdump, base_binary_utilities_info.objdump), ranlib = _pick_bin(ctx.attrs.ranlib, base_binary_utilities_info.ranlib), strip = _pick_bin(ctx.attrs.strip, base_binary_utilities_info.strip), dwp = base_binary_utilities_info.dwp, @@ -123,11 +130,14 @@ def _cxx_toolchain_override(ctx): ) base_strip_flags_info = base_toolchain.strip_flags_info - strip_flags_info = StripFlagsInfo( - strip_debug_flags = _pick(ctx.attrs.strip_debug_flags, base_strip_flags_info.strip_debug_flags), - strip_non_global_flags = _pick(ctx.attrs.strip_non_global_flags, base_strip_flags_info.strip_non_global_flags), - strip_all_flags = _pick(ctx.attrs.strip_all_flags, base_strip_flags_info.strip_all_flags), - ) + if base_strip_flags_info: + strip_flags_info = StripFlagsInfo( + strip_debug_flags = _pick(ctx.attrs.strip_debug_flags, base_strip_flags_info.strip_debug_flags), + strip_non_global_flags = _pick(ctx.attrs.strip_non_global_flags, base_strip_flags_info.strip_non_global_flags), + strip_all_flags = _pick(ctx.attrs.strip_all_flags, base_strip_flags_info.strip_all_flags), + ) + else: + strip_flags_info = None return [ DefaultInfo(), @@ -152,35 +162,38 @@ def _cxx_toolchain_override(ctx): dist_lto_tools_info = base_toolchain.dist_lto_tools_info, use_dep_files = base_toolchain.use_dep_files, clang_remarks = base_toolchain.clang_remarks, + gcno_files = base_toolchain.gcno_files, clang_trace = base_toolchain.clang_trace, object_format = CxxObjectFormat(ctx.attrs.object_format) if ctx.attrs.object_format != None else base_toolchain.object_format, conflicting_header_basename_allowlist = base_toolchain.conflicting_header_basename_allowlist, strip_flags_info = strip_flags_info, pic_behavior = PicBehavior(ctx.attrs.pic_behavior) if ctx.attrs.pic_behavior != None else base_toolchain.pic_behavior.value, split_debug_mode = SplitDebugMode(value_or(ctx.attrs.split_debug_mode, base_toolchain.split_debug_mode.value)), + target_sdk_version = base_toolchain.target_sdk_version, ) -def _cxx_toolchain_override_inheriting_target_platform_attrs(is_toolchain_rule): - dep_type = attrs.exec_dep if is_toolchain_rule else attrs.dep - base_dep_type = attrs.toolchain_dep if is_toolchain_rule else attrs.dep - return { +cxx_toolchain_override_registration_spec = RuleRegistrationSpec( + name = "cxx_toolchain_override", + impl = _cxx_toolchain_override, + attrs = { "additional_c_compiler_flags": attrs.option(attrs.list(attrs.arg()), default = None), "additional_cxx_compiler_flags": attrs.option(attrs.list(attrs.arg()), default = None), "archive_objects_locally": attrs.option(attrs.bool(), default = None), - "archiver": attrs.option(dep_type(providers = [RunInfo]), default = None), + "archiver": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), "archiver_supports_argfiles": attrs.option(attrs.bool(), default = None), - "as_compiler": attrs.option(dep_type(providers = [RunInfo]), default = None), + "as_compiler": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), "as_compiler_flags": attrs.option(attrs.list(attrs.arg()), default = None), "as_preprocessor_flags": attrs.option(attrs.list(attrs.arg()), default = None), - "asm_compiler": attrs.option(dep_type(providers = [RunInfo]), default = None), + "asm_compiler": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), "asm_compiler_flags": attrs.option(attrs.list(attrs.arg()), default = None), + "asm_compiler_type": attrs.option(attrs.string(), default = None), "asm_preprocessor_flags": attrs.option(attrs.list(attrs.arg()), default = None), - "base": base_dep_type(providers = [CxxToolchainInfo]), + "base": attrs.toolchain_dep(providers = [CxxToolchainInfo]), "bolt_enabled": attrs.option(attrs.bool(), default = None), - "c_compiler": attrs.option(dep_type(providers = [RunInfo]), default = None), + "c_compiler": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), "c_compiler_flags": attrs.option(attrs.list(attrs.arg()), default = None), "c_preprocessor_flags": attrs.option(attrs.list(attrs.arg()), default = None), - "cxx_compiler": attrs.option(dep_type(providers = [RunInfo]), default = None), + "cxx_compiler": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), "cxx_compiler_flags": attrs.option(attrs.list(attrs.arg()), default = None), "cxx_preprocessor_flags": attrs.option(attrs.list(attrs.arg()), default = None), "force_full_hybrid_if_capable": attrs.option(attrs.bool(), default = None), @@ -190,43 +203,35 @@ def _cxx_toolchain_override_inheriting_target_platform_attrs(is_toolchain_rule): "link_libraries_locally": attrs.option(attrs.bool(), default = None), "link_style": attrs.option(attrs.enum(LinkStyle.values()), default = None), "link_weight": attrs.option(attrs.int(), default = None), - "linker": attrs.option(dep_type(providers = [RunInfo]), default = None), + "linker": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), "linker_flags": attrs.option(attrs.list(attrs.arg()), default = None), "linker_type": attrs.option(attrs.enum(LinkerType), default = None), - "llvm_link": attrs.option(dep_type(providers = [RunInfo]), default = None), + "llvm_link": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), "lto_mode": attrs.option(attrs.enum(LtoMode.values()), default = None), - "mk_comp_db": attrs.option(dep_type(providers = [RunInfo]), default = None), - "mk_hmap": attrs.option(dep_type(providers = [RunInfo]), default = None), - "mk_shlib_intf": attrs.option(dep_type(providers = [RunInfo]), default = None), - "nm": attrs.option(dep_type(providers = [RunInfo]), default = None), - "objcopy": attrs.option(dep_type(providers = [RunInfo]), default = None), + "mk_comp_db": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), + "mk_hmap": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), + "mk_shlib_intf": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), + "nm": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), + "objcopy": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), + "objdump": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), "object_format": attrs.enum(CxxObjectFormat.values(), default = "native"), "pic_behavior": attrs.enum(PicBehavior.values(), default = "supported"), "platform_deps_aliases": attrs.option(attrs.list(attrs.string()), default = None), "platform_name": attrs.option(attrs.string(), default = None), - "produce_interface_from_stub_shared_library": attrs.option(attrs.bool(), default = None), - "ranlib": attrs.option(dep_type(providers = [RunInfo]), default = None), + "post_linker_flags": attrs.option(attrs.list(attrs.arg()), default = None), + "ranlib": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), + "sanitizer_runtime_enabled": attrs.bool(default = False), + "sanitizer_runtime_files": attrs.option(attrs.set(attrs.dep(), sorted = True, default = []), default = None), # Use `attrs.dep()` as it's not a tool, always propagate target platform "shared_library_interface_mode": attrs.option(attrs.enum(ShlibInterfacesMode.values()), default = None), "shared_library_name_default_prefix": attrs.option(attrs.string(), default = None), "shared_library_name_format": attrs.option(attrs.string(), default = None), "shared_library_versioned_name_format": attrs.option(attrs.string(), default = None), "split_debug_mode": attrs.option(attrs.enum(SplitDebugMode.values()), default = None), - "strip": attrs.option(dep_type(providers = [RunInfo]), default = None), + "strip": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), "strip_all_flags": attrs.option(attrs.list(attrs.arg()), default = None), "strip_debug_flags": attrs.option(attrs.list(attrs.arg()), default = None), "strip_non_global_flags": attrs.option(attrs.list(attrs.arg()), default = None), "use_archiver_flags": attrs.option(attrs.bool(), default = None), - } - -cxx_toolchain_override_registration_spec = RuleRegistrationSpec( - name = "cxx_toolchain_override", - impl = _cxx_toolchain_override, - attrs = _cxx_toolchain_override_inheriting_target_platform_attrs(is_toolchain_rule = False), -) - -cxx_toolchain_override_inheriting_target_platform_registration_spec = RuleRegistrationSpec( - name = "cxx_toolchain_override_inheriting_target_platform", - impl = _cxx_toolchain_override, - attrs = _cxx_toolchain_override_inheriting_target_platform_attrs(is_toolchain_rule = True), + } | cxx_toolchain_allow_cache_upload_args(), is_toolchain_rule = True, ) diff --git a/prelude/cxx/user/link_group_map.bzl b/prelude/cxx/user/link_group_map.bzl index 93ee74c706..d3e55c87bb 100644 --- a/prelude/cxx/user/link_group_map.bzl +++ b/prelude/cxx/user/link_group_map.bzl @@ -7,18 +7,18 @@ load( "@prelude//cxx:groups.bzl", - "BuildTargetFilter", # @unused Used as a type - "FilterType", - "Group", # @unused Used as a type - "GroupMapping", # @unused Used as a type - "LabelFilter", # @unused Used as a type + "get_roots_from_mapping", + "make_info_subtarget_providers", "parse_groups_definitions", ) load( "@prelude//cxx:link_groups.bzl", - "LinkGroupInfo", "build_link_group_info", ) +load( + "@prelude//cxx:link_groups_types.bzl", + "link_group_inlined_map_attr", +) load( "@prelude//linking:link_groups.bzl", "LinkGroupLibInfo", @@ -37,132 +37,26 @@ load( "SharedLibraryInfo", ) load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec") -load( - "@prelude//utils:build_target_pattern.bzl", - "BuildTargetPattern", # @unused Used as a type -) -load("@prelude//decls/common.bzl", "Linkage", "Traversal") - -def _v1_attrs( - optional_root: bool = False, - # Whether we should parse `root` fields as a `dependency`, instead of a `label`. - root_is_dep: bool = True): - if root_is_dep: - attrs_root = attrs.dep(providers = [ - LinkGroupLibInfo, - LinkableGraph, - MergedLinkInfo, - SharedLibraryInfo, - ]) - else: - attrs_root = attrs.label() - - if optional_root: - attrs_root = attrs.option(attrs_root) - - return attrs.list( - attrs.tuple( - # name - attrs.string(), - # list of mappings - attrs.list( - # a single mapping - attrs.tuple( - # root node - attrs_root, - # traversal - attrs.enum(Traversal), - # filters, either `None`, a single filter, or a list of filters - # (which must all match). - attrs.option(attrs.one_of(attrs.list(attrs.string()), attrs.string())), - # linkage - attrs.option(attrs.enum(Linkage)), - ), - ), - # attributes - attrs.option( - attrs.dict(key = attrs.string(), value = attrs.any(), sorted = False), - ), - ), - ) - -def link_group_map_attr(): - v2_attrs = attrs.dep(providers = [LinkGroupInfo]) - return attrs.option( - attrs.one_of( - v2_attrs, - _v1_attrs( - optional_root = True, - # Inlined `link_group_map` will parse roots as `label`s, to avoid - # bloating deps w/ unrelated mappings (e.g. it's common to use - # a default mapping for all rules, which would otherwise add - # unrelated deps to them). - root_is_dep = False, - ), - ), - default = None, - ) - -def _make_json_info_for_build_target_pattern(build_target_pattern: BuildTargetPattern) -> dict[str, typing.Any]: - # `BuildTargetPattern` contains lambdas which are not serializable, so - # have to generate the JSON representation - return { - "cell": build_target_pattern.cell, - "kind": build_target_pattern.kind, - "name": build_target_pattern.name, - "path": build_target_pattern.path, - } - -def _make_json_info_for_group_mapping_filters(filters: list[[BuildTargetFilter, LabelFilter]]) -> list[dict[str, typing.Any]]: - json_filters = [] - for filter in filters: - if filter._type == FilterType("label"): - json_filters += [{"regex": str(filter.regex)}] - elif filter._type == FilterType("pattern"): - json_filters += [_make_json_info_for_build_target_pattern(filter.pattern)] - else: - fail("Unknown filter type: " + filter) - return json_filters - -def _make_json_info_for_group_mapping(group_mapping: GroupMapping) -> dict[str, typing.Any]: - return { - "filters": _make_json_info_for_group_mapping_filters(group_mapping.filters), - "preferred_linkage": group_mapping.preferred_linkage, - "root": group_mapping.root, - "traversal": group_mapping.traversal, - } - -def _make_json_info_for_group(group: Group) -> dict[str, typing.Any]: - return { - "attrs": group.attrs, - "mappings": [_make_json_info_for_group_mapping(mapping) for mapping in group.mappings], - "name": group.name, - } - -def _make_info_subtarget_providers(ctx: AnalysisContext, link_group_info: LinkGroupInfo) -> list[Provider]: - info_json = { - "groups": {name: _make_json_info_for_group(group) for name, group in link_group_info.groups.items()}, - "mappings": link_group_info.mappings, - } - json_output = ctx.actions.write_json("link_group_map_info.json", info_json) - return [DefaultInfo(default_output = json_output)] +load("@prelude//utils:utils.bzl", "flatten") def _impl(ctx: AnalysisContext) -> list[Provider]: # Extract graphs from the roots via the raw attrs, as `parse_groups_definitions` # parses them as labels. + + deps = flatten([ + get_roots_from_mapping(mapping) + for entry in ctx.attrs.map + for mapping in entry[1] + ]) linkable_graph = create_linkable_graph( ctx, - deps = [ - mapping[0][LinkableGraph] - for entry in ctx.attrs.map - for mapping in entry[1] - ], + deps = [dep[LinkableGraph] for dep in deps], ) link_groups = parse_groups_definitions(ctx.attrs.map, lambda root: root.label) link_group_info = build_link_group_info(linkable_graph, link_groups) return [ DefaultInfo(sub_targets = { - "info": _make_info_subtarget_providers(ctx, link_group_info), + "info": make_info_subtarget_providers(ctx, link_group_info.groups.values(), link_group_info.mappings), }), link_group_info, ] @@ -171,6 +65,15 @@ registration_spec = RuleRegistrationSpec( name = "link_group_map", impl = _impl, attrs = { - "map": _v1_attrs(), + "map": link_group_inlined_map_attr( + root_attr = attrs.dep( + providers = [ + LinkGroupLibInfo, + LinkableGraph, + MergedLinkInfo, + SharedLibraryInfo, + ], + ), + ), }, ) diff --git a/prelude/cxx/windows_resource.bzl b/prelude/cxx/windows_resource.bzl new file mode 100644 index 0000000000..17071680dc --- /dev/null +++ b/prelude/cxx/windows_resource.bzl @@ -0,0 +1,95 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//cxx:cxx_context.bzl", "get_cxx_toolchain_info") +load("@prelude//cxx:headers.bzl", "cxx_get_regular_cxx_headers_layout") +load("@prelude//cxx:preprocessor.bzl", "cxx_merge_cpreprocessors", "cxx_private_preprocessor_info") +load("@prelude//linking:link_groups.bzl", "LinkGroupLibInfo") +load("@prelude//linking:link_info.bzl", "LibOutputStyle", "LinkInfo", "LinkInfos", "ObjectsLinkable", "create_merged_link_info") +load("@prelude//linking:linkable_graph.bzl", "create_linkable_graph") +load("@prelude//linking:shared_libraries.bzl", "SharedLibraryInfo") + +def windows_resource_impl(ctx: AnalysisContext) -> list[Provider]: + (own_non_exported_preprocessor_info, _) = cxx_private_preprocessor_info( + ctx = ctx, + headers_layout = cxx_get_regular_cxx_headers_layout(ctx), + raw_headers = ctx.attrs.raw_headers, + extra_preprocessors = [], + non_exported_deps = [], + is_test = False, + ) + + preprocessor = cxx_merge_cpreprocessors( + ctx, + [own_non_exported_preprocessor_info], + [], + ) + + headers_tag = ctx.actions.artifact_tag() + + objects = [] + + toolchain = get_cxx_toolchain_info(ctx) + for src in ctx.attrs.srcs: + rc_output = ctx.actions.declare_output( + "__objects__", + "{}.res".format(src.short_path), + ) + rc_cmd = cmd_args( + toolchain.rc_compiler_info.compiler, + toolchain.rc_compiler_info.compiler_flags, + cmd_args(rc_output.as_output(), format = "/fo{}"), + headers_tag.tag_artifacts(preprocessor.set.project_as_args("args")), + headers_tag.tag_artifacts(preprocessor.set.project_as_args("include_dirs")), + src, + ) + + ctx.actions.run( + rc_cmd, + category = "rc_compile", + ) + + cvtres_output = ctx.actions.declare_output( + "__objects__", + "{}.obj".format(src.short_path), + ) + cvtres_cmd = cmd_args( + toolchain.cvtres_compiler_info.compiler, + toolchain.cvtres_compiler_info.compiler_flags, + cmd_args(cvtres_output.as_output(), format = "/OUT:{}"), + rc_output, + ) + + ctx.actions.run( + cvtres_cmd, + category = "cvtres_compile", + ) + + objects.append(cvtres_output) + + link = LinkInfo( + name = ctx.attrs.name, + linkables = [ObjectsLinkable( + objects = objects, + linker_type = toolchain.linker_info.type, + link_whole = True, + )], + ) + + providers = [ + DefaultInfo(default_output = None), + SharedLibraryInfo(set = None), + LinkGroupLibInfo(libs = {}), + create_linkable_graph(ctx), + create_merged_link_info( + ctx, + toolchain.pic_behavior, + {output_style: LinkInfos(default = link) for output_style in LibOutputStyle}, + ), + ] + + return providers diff --git a/prelude/cxx/xcode.bzl b/prelude/cxx/xcode.bzl index 07c98c9122..5f03486801 100644 --- a/prelude/cxx/xcode.bzl +++ b/prelude/cxx/xcode.bzl @@ -10,7 +10,7 @@ load( "CompileArgsfile", # @unused Used as a type ) load( - "@prelude//cxx:compile.bzl", + "@prelude//cxx:cxx_sources.bzl", "CxxSrcWithFlags", # @unused Used as a type ) diff --git a/prelude/debugging/common.bzl b/prelude/debugging/common.bzl index e115313752..b484d780ee 100644 --- a/prelude/debugging/common.bzl +++ b/prelude/debugging/common.bzl @@ -19,6 +19,6 @@ def create_target_info(target: bxl.ConfiguredTargetNode) -> TargetInfo: attrs = target.attrs_lazy() return TargetInfo( target = target_name(target), - target_type = rule_type(target), + target_type = rule_type(target).removeprefix("prelude//rules.bzl:"), labels = attrs.get("labels").value() if attrs.get("labels") != None else [], ) diff --git a/prelude/debugging/inspect_dbg_exec.bzl b/prelude/debugging/inspect_dbg_exec.bzl index 416f5b4869..33dfafb01f 100644 --- a/prelude/debugging/inspect_dbg_exec.bzl +++ b/prelude/debugging/inspect_dbg_exec.bzl @@ -18,8 +18,7 @@ def inspect_dbg_exec(ctx: bxl.Context, actions: AnalysisActions, target: bxl.Con providers = ctx.analysis(fbsource_alias_target).providers() fdb_helper = providers[RunInfo] fdb_helper_out = actions.declare_output("fdb_helper.json") - cmd = cmd_args(fdb_helper) - cmd.add(settings.args) + cmd = cmd_args(fdb_helper, settings.args) actions.run(cmd, category = "fdb_helper", env = {"FDB_OUTPUT_FILE": fdb_helper_out.as_output()}, local_only = True) result = actions.declare_output("final_out.json") @@ -47,7 +46,7 @@ def inspect_dbg_exec(ctx: bxl.Context, actions: AnalysisActions, target: bxl.Con actions.dynamic_output( dynamic = [fdb_helper_out], inputs = [], - outputs = [result], + outputs = [result.as_output()], f = build_exec_info, ) return result diff --git a/prelude/debugging/labels.bzl b/prelude/debugging/labels.bzl index adc5589dc7..94f99b10c6 100644 --- a/prelude/debugging/labels.bzl +++ b/prelude/debugging/labels.bzl @@ -12,7 +12,7 @@ # For example: # Running "buck run //another:target" (or via using [RunInfo]) should produce `ExecInfo` as its stdout -# 3. If target has a label `dbg:info:ref=//another:target` we assume a presense of //another:target which we can inspect for the presense of relevant providers (see fdb.bxl) +# 3. If target has a label `dbg:info:ref=//another:target` we assume a presence of //another:target which we can inspect for the presence of relevant providers (see fdb.bxl) # This label indicates where to locate "[RunInfo]" which would output `ExecInfo` -compatible output DBG_INFO_EXEC = "dbg:info:exec" @@ -34,13 +34,6 @@ def get_info_ref(labels: list[str]) -> [str, None]: return result return None -def get_info_exec(labels: list[str]) -> [str, None]: - for label in labels: - result = _get_value_by_mark(DBG_INFO_EXEC, label) - if result: - return result - return None - def get_label_or_mark(label: str) -> str: for mark in [DBG_INFO_EXEC, DBG_INFO_REF]: if label.startswith(mark): diff --git a/prelude/debugging/types.bzl b/prelude/debugging/types.bzl index bfc9ea4e70..d6560eecba 100644 --- a/prelude/debugging/types.bzl +++ b/prelude/debugging/types.bzl @@ -60,7 +60,7 @@ Custom = record( # Java DAP server requires this file in order to correctly locate classes in the source files # The integration with a tool is available as a part of "JVM" rules. (java/kotlin_library/binary/test) JavaInfo = record( - classmap_file = field([Artifact, None]), + classmap_file = field(Artifact | None), ) # Customizations that are understood by debugging tool diff --git a/prelude/decls/android_rules.bzl b/prelude/decls/android_rules.bzl index fe4b1b1bfb..2fb57df3c1 100644 --- a/prelude/decls/android_rules.bzl +++ b/prelude/decls/android_rules.bzl @@ -116,9 +116,9 @@ android_aar = prelude_rule( "contacts": attrs.list(attrs.string(), default = []), "default_host_platform": attrs.option(attrs.configuration_label(), default = None), "enable_relinker": attrs.bool(default = False), + "excluded_java_deps": attrs.list(attrs.dep(), default = []), "extra_arguments": attrs.list(attrs.string(), default = []), "extra_kotlinc_arguments": attrs.list(attrs.string(), default = []), - "extra_non_source_only_abi_kotlinc_arguments": attrs.list(attrs.string(), default = []), "friend_paths": attrs.list(attrs.dep(), default = []), "java_version": attrs.option(attrs.string(), default = None), "javac": attrs.option(attrs.source(), default = None), @@ -129,7 +129,7 @@ android_aar = prelude_rule( "manifest": attrs.option(attrs.source(), default = None), "manifest_file": attrs.option(attrs.source(), default = None), "maven_coords": attrs.option(attrs.string(), default = None), - "native_library_merge_code_generator": attrs.option(attrs.dep(), default = None), + "native_library_merge_code_generator": attrs.option(attrs.exec_dep(), default = None), "native_library_merge_glue": attrs.option(attrs.dep(), default = None), "native_library_merge_localized_symbols": attrs.option(attrs.set(attrs.string(), sorted = True), default = None), "native_library_merge_map": attrs.option(attrs.dict(key = attrs.string(), value = attrs.list(attrs.regex()), sorted = False), default = None), @@ -151,6 +151,7 @@ android_aar = prelude_rule( "srcs": attrs.list(attrs.source(), default = []), "target": attrs.option(attrs.string(), default = None), "use_jvm_abi_gen": attrs.option(attrs.bool(), default = None), + "_wip_java_plugin_arguments": attrs.dict(attrs.label(), attrs.list(attrs.string()), default = {}), } ), ) @@ -215,6 +216,7 @@ android_binary = prelude_rule( "duplicate_resource_whitelist": attrs.option(attrs.source(), default = None), "enable_relinker": attrs.bool(default = False), "exopackage_modes": attrs.list(attrs.enum(ExopackageMode), default = []), + "extra_no_compress_asset_extensions": attrs.list(attrs.string(), default = []), "extra_filtered_resources": attrs.list(attrs.string(), default = []), "field_ref_count_buffer_space": attrs.int(default = 0), "ignore_aapt_proguard_config": attrs.bool(default = False), @@ -246,6 +248,7 @@ android_binary = prelude_rule( "package_asset_libraries": attrs.bool(default = False), "package_type": attrs.enum(PackageType, default = "debug"), "packaged_locales": attrs.list(attrs.string(), default = []), + "packaging_options": attrs.dict(key = attrs.string(), value = attrs.list(attrs.string()), default = {}), "post_filter_resources_cmd": attrs.option(attrs.arg(), default = None), "preprocess_java_classes_bash": attrs.option(attrs.arg(), default = None), "preprocess_java_classes_cmd": attrs.option(attrs.arg(), default = None), @@ -261,6 +264,7 @@ android_binary = prelude_rule( "secondary_dex_weight_limit": attrs.option(attrs.int(), default = None), "skip_crunch_pngs": attrs.option(attrs.bool(), default = None), "skip_proguard": attrs.bool(default = False), + "strip_libraries": attrs.bool(default = True), "trim_resource_ids": attrs.bool(default = False), "use_split_dex": attrs.bool(default = False), "xz_compression_level": attrs.int(default = 4), @@ -446,6 +450,7 @@ android_bundle = prelude_rule( "duplicate_resource_whitelist": attrs.option(attrs.source(), default = None), "enable_relinker": attrs.bool(default = False), "exopackage_modes": attrs.list(attrs.enum(ExopackageMode), default = []), + "extra_no_compress_asset_extensions": attrs.list(attrs.string(), default = []), "extra_filtered_resources": attrs.list(attrs.string(), default = []), "field_ref_count_buffer_space": attrs.int(default = 0), "ignore_aapt_proguard_config": attrs.bool(default = False), @@ -477,6 +482,7 @@ android_bundle = prelude_rule( "package_asset_libraries": attrs.bool(default = False), "package_type": attrs.enum(PackageType, default = "debug"), "packaged_locales": attrs.list(attrs.string(), default = []), + "packaging_options": attrs.dict(key = attrs.string(), value = attrs.list(attrs.string()), default = {}), "post_filter_resources_cmd": attrs.option(attrs.arg(), default = None), "preprocess_java_classes_bash": attrs.option(attrs.arg(), default = None), "preprocess_java_classes_cmd": attrs.option(attrs.arg(), default = None), @@ -574,6 +580,9 @@ android_instrumentation_apk = prelude_rule( "licenses": attrs.list(attrs.source(), default = []), "use_split_dex": attrs.option(attrs.bool(), default = None), "primary_dex_patterns": attrs.list(attrs.string(), default = []), + "preprocess_java_classes_bash": attrs.option(attrs.arg(), default = None), + "preprocess_java_classes_cmd": attrs.option(attrs.arg(), default = None), + "preprocess_java_classes_deps": attrs.list(attrs.dep(), default = []), } ), ) @@ -745,9 +754,9 @@ android_library = prelude_rule( "annotation_processors": attrs.list(attrs.string(), default = []), "contacts": attrs.list(attrs.string(), default = []), "default_host_platform": attrs.option(attrs.configuration_label(), default = None), - "extra_non_source_only_abi_kotlinc_arguments": attrs.list(attrs.string(), default = []), "friend_paths": attrs.list(attrs.dep(), default = []), "java_version": attrs.option(attrs.string(), default = None), + "jar_postprocessor": attrs.option(attrs.exec_dep(), default = None), "kotlin_compiler_plugins": attrs.dict(key = attrs.source(), value = attrs.dict(key = attrs.string(), value = attrs.string(), sorted = False), sorted = False, default = {}), "labels": attrs.list(attrs.string(), default = []), "language": attrs.option(attrs.enum(JvmLanguage), default = None), @@ -763,6 +772,7 @@ android_library = prelude_rule( "runtime_deps": attrs.list(attrs.dep(), default = []), "source_abi_verification_mode": attrs.option(attrs.enum(SourceAbiVerificationMode), default = None), "use_jvm_abi_gen": attrs.option(attrs.bool(), default = None), + "_wip_java_plugin_arguments": attrs.dict(attrs.label(), attrs.list(attrs.string()), default = {}), } ), ) @@ -912,6 +922,8 @@ android_prebuilt_aar = prelude_rule( "contacts": attrs.list(attrs.string(), default = []), "default_host_platform": attrs.option(attrs.configuration_label(), default = None), "deps": attrs.list(attrs.dep(), default = []), + "desugar_deps": attrs.list(attrs.dep(), default = []), + "for_primary_apk": attrs.bool(default = False), "labels": attrs.list(attrs.string(), default = []), "licenses": attrs.list(attrs.source(), default = []), "maven_coords": attrs.option(attrs.string(), default = None), @@ -1051,8 +1063,10 @@ apk_genrule = prelude_rule( { "apk": attrs.option(attrs.dep(), default = None, doc = """ The input `android_binary()` rule. The path to the APK can be - accessed with the `$APK` shell variable. + accessed with the `$APK` shell variable. Only one of `apk` or + `aab` can be provided. """), + "keystore": attrs.option(attrs.dep(), default = None), } | genrule_common.srcs_arg() | genrule_common.cmd_arg() | @@ -1061,13 +1075,21 @@ apk_genrule = prelude_rule( genrule_common.type_arg() | { "out": attrs.option(attrs.string(), default = None, doc = """ - This argument only exists for historical reasons and it does not have any - effect. It will be deprecated and removed in the future. + The name of the output file or directory. The complete path to this + argument is provided to the shell command through + the `OUT` environment variable. Only one of`out` + or `outs` may be present. + + For an apk_genrule the output should be a '.apk' or '.aab' file. """), } | genrule_common.environment_expansion_separator() | { - "aab": attrs.option(attrs.dep(), default = None), + "aab": attrs.option(attrs.dep(), default = None, doc = """ + The input `android_binary()` rule. The path to the AAB can be + accessed with the `$AAB` shell variable. Only one of `apk` or + `aab` can be provided. + """), "cacheable": attrs.option(attrs.bool(), default = None), "contacts": attrs.list(attrs.string(), default = []), "default_host_platform": attrs.option(attrs.configuration_label(), default = None), @@ -1290,6 +1312,7 @@ prebuilt_jar = prelude_rule( `binary_jar` is already built, there should be nothing to build, so this should be empty. """), + "desugar_deps": attrs.list(attrs.dep(), default = []), "contacts": attrs.list(attrs.string(), default = []), "default_host_platform": attrs.option(attrs.configuration_label(), default = None), "generate_abi": attrs.bool(default = False), @@ -1399,9 +1422,9 @@ robolectric_test = prelude_rule( "exported_deps": attrs.list(attrs.dep(), default = []), "exported_provided_deps": attrs.list(attrs.dep(), default = []), "extra_arguments": attrs.list(attrs.string(), default = []), - "extra_non_source_only_abi_kotlinc_arguments": attrs.list(attrs.string(), default = []), "fork_mode": attrs.enum(ForkMode, default = "none"), "friend_paths": attrs.list(attrs.dep(), default = []), + "jar_postprocessor": attrs.option(attrs.exec_dep(), default = None), "java_version": attrs.option(attrs.string(), default = None), "java": attrs.option(attrs.dep(), default = None), "javac": attrs.option(attrs.source(), default = None), @@ -1443,8 +1466,10 @@ robolectric_test = prelude_rule( "unbundled_resources_root": attrs.option(attrs.source(allow_directory = True), default = None), "use_cxx_libraries": attrs.option(attrs.bool(), default = None), "use_dependency_order_classpath": attrs.option(attrs.bool(), default = None), + "used_as_dependency_deprecated_do_not_use": attrs.bool(default = False), "use_jvm_abi_gen": attrs.option(attrs.bool(), default = None), "vm_args": attrs.list(attrs.arg(), default = []), + "_wip_java_plugin_arguments": attrs.dict(attrs.label(), attrs.list(attrs.string()), default = {}), } | jvm_common.k2() | re_test_common.test_args() ), diff --git a/prelude/decls/common.bzl b/prelude/decls/common.bzl index 64b9552dc6..4a6e581381 100644 --- a/prelude/decls/common.bzl +++ b/prelude/decls/common.bzl @@ -39,8 +39,6 @@ IncludeType = ["local", "system", "raw"] LinkableDepType = ["static", "static_pic", "shared"] -Linkage = ["any", "static", "shared"] - LogLevel = ["off", "severe", "warning", "info", "config", "fine", "finer", "finest", "all"] OnDuplicateEntry = ["fail", "overwrite", "append"] @@ -49,8 +47,6 @@ SourceAbiVerificationMode = ["off", "log", "fail"] TestType = ["junit", "junit5", "testng"] -Traversal = ["tree", "node", "subfolders"] - UnusedDependenciesAction = ["unknown", "fail", "warn", "ignore", "unrecognized"] def _name_arg(name_type): @@ -205,8 +201,9 @@ def _exec_os_type_arg() -> Attr: def _allow_cache_upload_arg(): return { - "allow_cache_upload": attrs.bool( - default = False, + "allow_cache_upload": attrs.option( + attrs.bool(), + default = None, doc = """ Whether to allow uploading the output of this rule to be uploaded to cache when the action is executed locally if the configuration diff --git a/prelude/decls/core_rules.bzl b/prelude/decls/core_rules.bzl index c2700bbde4..83e387b573 100644 --- a/prelude/decls/core_rules.bzl +++ b/prelude/decls/core_rules.bzl @@ -158,9 +158,12 @@ command_alias = prelude_rule( attrs = ( # @unsorted-dict-items { - "exe": attrs.option(attrs.dep(), default = None, doc = """ - A `build target`for a rule that outputs - an executable, such as an `sh\\_binary()`. + # Match `dep` before `source` so that we can support extracting the + # `RunInfo` provider of it, if one exists. + "exe": attrs.option(attrs.one_of(attrs.dep(), attrs.source()), default = None, doc = """ + A `build target` for a rule that outputs + an executable, such as an `sh\\_binary()`, + or an executable source file. """), "platform_exe": attrs.dict(key = attrs.enum(Platform), value = attrs.dep(), sorted = False, default = {}, doc = """ A mapping from platforms to `build target`. @@ -221,6 +224,23 @@ config_setting = prelude_rule( ), ) +configuration_alias = prelude_rule( + name = "configuration_alias", + docs = "", + examples = None, + further = None, + attrs = ( + # @unsorted-dict-items + { + # configuration_alias acts like alias but for configuration rules. + + # The configuration_alias itself is a configuration rule and the `actual` argument is + # expected to be a configuration rule as well. + "actual": attrs.dep(pulls_and_pushes_plugins = plugins.All), + } + ), +) + configured_alias = prelude_rule( name = "configured_alias", docs = "", @@ -280,10 +300,6 @@ constraint_value = prelude_rule( export_file = prelude_rule( name = "export_file", docs = """ - **Warning:** this build rule is deprecated for folders. - Use `filegroup()`instead. It is still supported for individual files. - - An `export_file()` takes a single file or folder and exposes it so other rules can use it. """, @@ -362,7 +378,7 @@ export_file = prelude_rule( genrule( name = 'demo', - out = 'result.html' + out = 'result.html', cmd = 'cp $(location :example) $OUT', ) @@ -735,7 +751,7 @@ http_archive = prelude_rule( * foo\\_prime/bar-0.1.2 Only `data.dat` will be extracted, and it will be extracted into the output - directory specified in\302\240`http\\_archive()out`. + directory specified in `out`. """), "excludes": attrs.list(attrs.regex(), default = [], doc = """ An optional list of regex patterns. All file paths in the extracted archive which match @@ -1098,6 +1114,19 @@ test_suite = prelude_rule( ), ) +toolchain_alias = prelude_rule( + name = "toolchain_alias", + docs = """ +toolchain_alias acts like alias but for toolchain rules. + +The toolchain_alias itself is a toolchain rule and the `actual` argument is +expected to be a toolchain_rule as well. + """, + examples = None, + further = None, + attrs = {"actual": attrs.toolchain_dep(doc = "The actual toolchain that is being aliased. This should be a toolchain rule.")}, +) + versioned_alias = prelude_rule( name = "versioned_alias", docs = "", @@ -1473,6 +1502,7 @@ core_rules = struct( alias = alias, command_alias = command_alias, config_setting = config_setting, + configuration_alias = configuration_alias, configured_alias = configured_alias, constraint_setting = constraint_setting, constraint_value = constraint_value, @@ -1485,6 +1515,7 @@ core_rules = struct( platform = platform, remote_file = remote_file, test_suite = test_suite, + toolchain_alias = toolchain_alias, versioned_alias = versioned_alias, worker_tool = worker_tool, zip_file = zip_file, diff --git a/prelude/decls/cxx_rules.bzl b/prelude/decls/cxx_rules.bzl index 531f89810f..3834835e51 100644 --- a/prelude/decls/cxx_rules.bzl +++ b/prelude/decls/cxx_rules.bzl @@ -10,8 +10,10 @@ # the generated docs, and so those should be verified to be accurate and # well-formatted (and then delete this TODO) -load(":apple_common.bzl", "apple_common") -load(":common.bzl", "CxxRuntimeType", "CxxSourceType", "HeadersAsRawHeadersMode", "LinkableDepType", "Linkage", "Traversal", "buck", "prelude_rule") +load("@prelude//apple:apple_common.bzl", "apple_common") +load("@prelude//cxx:link_groups_types.bzl", "LINK_GROUP_MAP_ATTR") +load("@prelude//linking:types.bzl", "Linkage") +load(":common.bzl", "CxxRuntimeType", "CxxSourceType", "HeadersAsRawHeadersMode", "LinkableDepType", "buck", "prelude_rule") load(":cxx_common.bzl", "cxx_common") load(":genrule_common.bzl", "genrule_common") load(":native_common.bzl", "native_common") @@ -117,7 +119,7 @@ cxx_binary = prelude_rule( "licenses": attrs.list(attrs.source(), default = []), "link_deps_query_whole": attrs.bool(default = False), "link_group": attrs.option(attrs.string(), default = None), - "link_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal), attrs.option(attrs.string()))))), default = None), + "link_group_map": LINK_GROUP_MAP_ATTR, "platform_deps": attrs.list(attrs.tuple(attrs.regex(), attrs.set(attrs.dep(), sorted = True)), default = []), "post_linker_flags": attrs.list(attrs.arg(anon_target_compatible = True), default = []), "post_platform_linker_flags": attrs.list(attrs.tuple(attrs.regex(), attrs.list(attrs.arg(anon_target_compatible = True))), default = []), @@ -136,8 +138,8 @@ cxx_genrule = prelude_rule( name = "cxx_genrule", docs = """ A `cxx_genrule()` enables you to run shell commands as part - of the Buck build process. A `cxx_genrule()` exposes\342\200\224through - a set of string parameter macros and variables\342\200\224information about the + of the Buck build process. A `cxx_genrule()` exposes - through + a set of string parameter macros and variables - information about the tools and configuration options used by the Buck environment, specifically those related to the C/C++ toolchain. @@ -517,18 +519,12 @@ cxx_library = prelude_rule( cxx_common.exported_post_platform_linker_flags_arg() | native_common.link_style() | native_common.link_whole(link_whole_type = attrs.option(attrs.bool(), default = None)) | + native_common.soname() | cxx_common.raw_headers_arg() | cxx_common.include_directories_arg() | cxx_common.public_include_directories_arg() | cxx_common.public_system_include_directories_arg() | { - "soname": attrs.option(attrs.string(), default = None, doc = """ - Sets the soname ("shared object name") of any shared library produced from this rule. - The default value is based on the full rule name. - The macro `$(ext)` will be replaced with a platform-appropriate extension. - An argument can be provided, which is a library version. - For example `soname = 'libfoo.$(ext 2.3)'` will be `libfoo.2.3.dylib` on Mac and `libfoo.so.2.3` on Linux. - """), "used_by_wrap_script": attrs.bool(default = False, doc = """ When using an exopackage Android, if this parameter is set to `True`, then the library is @@ -542,7 +538,7 @@ cxx_library = prelude_rule( } | cxx_common.supported_platforms_regex_arg() | cxx_common.force_static(force_static_type = attrs.option(attrs.bool(), default = None)) | - native_common.preferred_linkage(preferred_linkage_type = attrs.option(attrs.enum(Linkage), default = None)) | + native_common.preferred_linkage(preferred_linkage_type = attrs.option(attrs.enum(Linkage.values()), default = None)) | cxx_common.reexport_all_header_dependencies_arg() | cxx_common.exported_deps_arg() | cxx_common.exported_platform_deps_arg() | @@ -570,7 +566,7 @@ cxx_library = prelude_rule( "libraries": attrs.list(attrs.string(), default = []), "licenses": attrs.list(attrs.source(), default = []), "link_group": attrs.option(attrs.string(), default = None), - "link_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal), attrs.option(attrs.string()))))), default = None), + "link_group_map": LINK_GROUP_MAP_ATTR, "module_name": attrs.option(attrs.string(), default = None), "platform_deps": attrs.list(attrs.tuple(attrs.regex(), attrs.set(attrs.dep(), sorted = True)), default = []), "post_linker_flags": attrs.list(attrs.arg(anon_target_compatible = True), default = []), @@ -586,9 +582,8 @@ cxx_library = prelude_rule( "uses_explicit_modules": attrs.bool(default = False), "version_universe": attrs.option(attrs.string(), default = None), "weak_framework_names": attrs.list(attrs.string(), default = []), - "xcode_private_headers_symlinks": attrs.option(attrs.bool(), default = None), - "xcode_public_headers_symlinks": attrs.option(attrs.bool(), default = None), - } + } | + buck.allow_cache_upload_arg() ), ) @@ -729,6 +724,56 @@ cxx_precompiled_header = prelude_rule( ), ) +windows_resource = prelude_rule( + name = "windows_resource", + docs = """ + A `windows_resource()` rule specifies a set of Window's Resource File (.rc) that + are compiled into object files. + + The files are compiled into .res files using rc.exe and then compiled into object files + using cvtres.exe. + They are not part of cxx_library because Microsoft's linker ignores the resources + unless they are specified as an object file, meaning including them in a possibly static + library is unintuitive. + """, + examples = """ + ``` + + # A rule that includes a single .rc file and compiles it into an object file. + windows_resource( + name = "resources", + srcs = [ + "resources.rc", + ], + ) + + # A rule that links against the above windows_resource rule. + cxx_binary( + name = "app", + srcs = [ + "main.cpp", + ], + deps = [ + ":resources" + ], + ) + + ``` + """, + further = None, + attrs = ( + cxx_common.srcs_arg() | + cxx_common.headers_arg() | + cxx_common.platform_headers_arg() | + cxx_common.header_namespace_arg() | + cxx_common.raw_headers_arg() | + cxx_common.include_directories_arg() | + { + "labels": attrs.list(attrs.string(), default = []), + } + ), +) + cxx_test = prelude_rule( name = "cxx_test", docs = """ @@ -832,7 +877,7 @@ cxx_test = prelude_rule( "licenses": attrs.list(attrs.source(), default = []), "link_deps_query_whole": attrs.bool(default = False), "link_group": attrs.option(attrs.string(), default = None), - "link_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal), attrs.option(attrs.string()))))), default = None), + "link_group_map": LINK_GROUP_MAP_ATTR, "link_style": attrs.option(attrs.enum(LinkableDepType), default = None), "linker_extra_outputs": attrs.list(attrs.string(), default = []), "platform_compiler_flags": attrs.list(attrs.tuple(attrs.regex(), attrs.list(attrs.arg())), default = []), @@ -849,7 +894,8 @@ cxx_test = prelude_rule( "use_default_test_main": attrs.option(attrs.bool(), default = None), "version_universe": attrs.option(attrs.string(), default = None), "weak_framework_names": attrs.list(attrs.string(), default = []), - } + } | + buck.allow_cache_upload_arg() ), ) @@ -889,6 +935,10 @@ cxx_toolchain = prelude_rule( "cuda_compiler_flags": attrs.list(attrs.arg(), default = []), "cuda_compiler_type": attrs.option(attrs.enum(CxxToolProviderType), default = None), "cuda_preprocessor_flags": attrs.list(attrs.arg(), default = []), + "cvtres_compiler": attrs.option(attrs.source(), default = None), + "cvtres_compiler_flags": attrs.list(attrs.arg(), default = []), + "cvtres_compiler_type": attrs.option(attrs.enum(CxxToolProviderType), default = None), + "cvtres_preprocessor_flags": attrs.list(attrs.arg(), default = []), "cxx_compiler": attrs.source(), "cxx_compiler_flags": attrs.list(attrs.arg(), default = []), "cxx_compiler_type": attrs.option(attrs.enum(CxxToolProviderType), default = None), @@ -906,18 +956,25 @@ cxx_toolchain = prelude_rule( "labels": attrs.list(attrs.string(), default = []), "licenses": attrs.list(attrs.source(), default = []), "link_path_normalization_args_enabled": attrs.bool(default = False), + "link_style": attrs.string(default = "static"), "linker": attrs.source(), "linker_flags": attrs.list(attrs.arg(anon_target_compatible = True), default = []), "linker_type": attrs.enum(LinkerProviderType), "nm": attrs.source(), "objcopy_for_shared_library_interface": attrs.source(), "objcopy_recalculates_layout": attrs.bool(default = False), + "objdump": attrs.option(attrs.source(), default = None), "object_file_extension": attrs.string(default = ""), "pic_type_for_shared_linking": attrs.enum(PicType, default = "pic"), + "post_linker_flags": attrs.list(attrs.arg(anon_target_compatible = True), default = []), "private_headers_symlinks_enabled": attrs.bool(default = False), "public_headers_symlinks_enabled": attrs.bool(default = False), "ranlib": attrs.option(attrs.source(), default = None), "ranlib_flags": attrs.list(attrs.arg(), default = []), + "rc_compiler": attrs.option(attrs.source(), default = None), + "rc_compiler_flags": attrs.list(attrs.arg(), default = []), + "rc_compiler_type": attrs.option(attrs.enum(CxxToolProviderType), default = None), + "rc_preprocessor_flags": attrs.list(attrs.arg(), default = []), "requires_archives": attrs.bool(default = False), "shared_dep_runtime_ld_flags": attrs.list(attrs.arg(), default = []), "shared_library_extension": attrs.string(default = ""), @@ -1055,7 +1112,7 @@ prebuilt_cxx_library = prelude_rule( cxx_common.exported_platform_preprocessor_flags_arg() | cxx_common.exported_linker_flags_arg() | cxx_common.force_static(force_static_type = attrs.bool(default = False)) | - native_common.preferred_linkage(preferred_linkage_type = attrs.option(attrs.enum(Linkage), default = None)) | + native_common.preferred_linkage(preferred_linkage_type = attrs.option(attrs.enum(Linkage.values()), default = None)) | cxx_common.exported_deps_arg() | cxx_common.exported_platform_deps_arg() | cxx_common.supports_merged_linking() | @@ -1070,6 +1127,7 @@ prebuilt_cxx_library = prelude_rule( "exported_platform_linker_flags": attrs.list(attrs.tuple(attrs.regex(), attrs.list(attrs.arg(anon_target_compatible = True))), default = []), "exported_post_linker_flags": attrs.list(attrs.arg(anon_target_compatible = True), default = []), "exported_post_platform_linker_flags": attrs.list(attrs.tuple(attrs.regex(), attrs.list(attrs.arg(anon_target_compatible = True))), default = []), + "extract_soname": attrs.bool(default = False), "frameworks": attrs.list(attrs.string(), default = []), "import_lib": attrs.option(attrs.source(), default = None), "include_in_android_merge_map_output": attrs.bool(default = True), @@ -1092,7 +1150,8 @@ prebuilt_cxx_library = prelude_rule( "versioned_soname": attrs.option(attrs.versioned(attrs.string()), default = None), "versioned_static_lib": attrs.option(attrs.versioned(attrs.source()), default = None), "versioned_static_pic_lib": attrs.option(attrs.versioned(attrs.source()), default = None), - } + } | + buck.allow_cache_upload_arg() ), ) @@ -1239,6 +1298,7 @@ cxx_rules = struct( cxx_genrule = cxx_genrule, cxx_library = cxx_library, cxx_precompiled_header = cxx_precompiled_header, + windows_resource = windows_resource, cxx_test = cxx_test, cxx_toolchain = cxx_toolchain, prebuilt_cxx_library = prebuilt_cxx_library, diff --git a/prelude/decls/erlang_rules.bzl b/prelude/decls/erlang_rules.bzl index f568793789..e62cc5ba4d 100644 --- a/prelude/decls/erlang_rules.bzl +++ b/prelude/decls/erlang_rules.bzl @@ -88,7 +88,7 @@ rules_attributes = { This attribute controls if the output of the builds also create edoc chunks. """), "env": attrs.option(attrs.dict(key = attrs.string(), value = attrs.string()), default = None, doc = """ - The `env` field allows to set the application env variables. The key value pairs will materialise in tha applications `.app` + The `env` field allows to set the application env variables. The key value pairs will materialise in the application's `.app` file and can then be accessed by [`application:get_env/2`](https://www.erlang.org/doc/man/application.html#get_env-2). """), "erl_opts": attrs.option(attrs.list(attrs.string()), default = None, doc = """ @@ -121,6 +121,9 @@ rules_attributes = { [application_opt()](https://www.erlang.org/doc/man/application.html#load-2). The key-value pair will be stored in the applications `.app` file and can be accessed by `file:consult/1`. """), + "include_src": attrs.bool(default = True, doc = """ + This field controls if the generated application directory contains a src/ directory with the Erlang code or not. + """), "includes": attrs.list(attrs.source(), default = [], doc = """ The public header files accessible via `-include_lib("appname/include/header.hrl")` from other erlang files. """), @@ -129,6 +132,11 @@ rules_attributes = { difference, that the module name, and the individual start arguments need to be given as the string representation of the corresponding Erlang terms. """), + "peek_private_includes": attrs.bool(default = False, doc = """ + This attribute allows you to use the private includes of the application's dependencies. This can be useful for + test applications, to create shared abstractions for tests. It's not advisable to use this attribute for prodution + code. All private includes transitively must be non-ambiguous. + """), "resources": attrs.list(attrs.dep(), default = [], doc = """ The `resources` field specifies targets whose default output are placed in the applications `priv/` directory. For regular files this field is typically combined with `export_file`, `filegroup`, or similar targets. However, it @@ -152,6 +160,12 @@ rules_attributes = { "_toolchain": attrs.toolchain_dep(default = "toolchains//:erlang-default"), }, "erlang_escript": { + "bundled": attrs.bool(default = True, doc = """ + Setting bundled to `True` does generate a folder structure and escript trampoline instead of an archive. + """), + "configs": attrs.list(attrs.dep(), default = [], doc = """ + This attribute allows to set config files for the escript. The dependencies that are typically used + here are `export_file` targets."""), "deps": attrs.list(attrs.dep(), doc = """ List of Erlang applications that are bundled in the escript. This includes all transitive dependencies as well. """), @@ -165,7 +179,7 @@ rules_attributes = { `resources` field, the `priv` folders files can then be accessed by `escript"extract/2`. """), "main_module": attrs.option(attrs.string(), default = None, doc = """ - Overrides the default main module. Instead of defering the main module from the scripts filename, the specified module + Overrides the default main module. Instead of deferring the main module from the scripts filename, the specified module is used. That module needs to export a `main/1` function that is called as entry point. """), "resources": attrs.list(attrs.dep(), default = [], doc = """ @@ -198,11 +212,11 @@ rules_attributes = { [`permanent`](https://www.erlang.org/doc/man/application.html#type-restart_type). """), "include_erts": attrs.bool(default = False, doc = """ - This field controls wether OTP applications and the Erlang runtime system should be included as part of the release. + This field controls whether OTP applications and the Erlang runtime system should be included as part of the release. Please note, that at the moment the erts folder is just `erts/`. """), "multi_toolchain": attrs.option(attrs.list(attrs.dep()), default = None, doc = """ - This field controls wether the release should be built with a single toolchain, or multiple toolchains. In the + This field controls whether the release should be built with a single toolchain, or multiple toolchains. In the latter case, all output paths are prefixed with the toolchain name. """), "overlays": attrs.dict(key = attrs.string(), value = attrs.list(attrs.dep()), default = {}, doc = """ @@ -219,6 +233,9 @@ rules_attributes = { "_toolchain": attrs.toolchain_dep(default = "toolchains//:erlang-default"), }, "erlang_test": { + "common_app_env": attrs.dict(key = attrs.string(), value = attrs.string(), default = {}, doc = """ + Application environment variables for the `common` application. + """), "config_files": attrs.list(attrs.dep(), default = [], doc = """ Will specify what config files the erlang beam machine running test with should load, for reference look at [OTP documentation](https://www.erlang.org/doc/man/config.html). These ones should consist of default_output of @@ -239,6 +256,10 @@ rules_attributes = { "extra_ct_hooks": attrs.list(attrs.string(), default = [], doc = """ List of additional Common Test hooks. The strings are interpreted as Erlang terms. """), + "extra_erl_flags": attrs.list(attrs.string(), default = [], doc = """ + List of additional command line arguments given to the erl command invocation. These + arguments are added to the front of the argument list. + """), "preamble": attrs.string(default = read_root_config("erlang", "erlang_test_preamble", "test:info(),test:ensure_initialized(),test:start_shell()."), doc = """ """), "property_tests": attrs.list(attrs.dep(), default = [], doc = """ @@ -254,13 +275,13 @@ rules_attributes = { "suite": attrs.source(doc = """ The source file for the test suite. If you are using the macro, you should use the `suites` attribute instead. - The suites attribtue specify which erlang_test targets should be generated. For each suite "path_to_suite/suite_SUITE.erl" an + The suites attribute specifies which erlang_test targets should be generated. For each suite "path_to_suite/suite_SUITE.erl" an implicit 'erlang_test' target suite_SUITE will be generated. """), - "_artifact_annotation_mfa": attrs.string(), + "_artifact_annotation_mfa": attrs.string(default = "artifact_annotations:default_annotation/1"), "_cli_lib": attrs.dep(default = "prelude//erlang/common_test/test_cli_lib:test_cli_lib"), "_ct_opts": attrs.string(default = read_root_config("erlang", "erlang_test_ct_opts", "")), - "_providers": attrs.string(), + "_providers": attrs.string(default = ""), "_test_binary": attrs.dep(default = "prelude//erlang/common_test/test_binary:escript"), "_test_binary_lib": attrs.dep(default = "prelude//erlang/common_test/test_binary:test_binary"), "_toolchain": attrs.toolchain_dep(default = "toolchains//:erlang-default"), @@ -510,7 +531,7 @@ erlang_test = prelude_rule( For each suite `_SUITE.erl`, if a data_dir `_SUITE_data` is present along the suite, (as per [the data_dir naming scheme for ct](https://www.erlang.org/doc/apps/common_test/write_test_chapter.html#data-and-private-directories)), - it will automatically adds the coresponding resource target to the generated test target of the suite. + it will automatically adds the corresponding resource target to the generated test target of the suite. Resources will be placed in the [Data directory (data_dir)](https://www.erlang.org/doc/apps/common_test/write_test_chapter.html#data_priv_dir) of each of the suite. @@ -521,16 +542,12 @@ erlang_test = prelude_rule( The `erlang_tests` macro forwards all attributes to the `erlang_test`. It defines some attributes that control how the targets get generated: - - `use_default_configs` (bool): Parameter that controls if the config files specified by the global config variable - `erlang.erlang_tests_default_config` should be used, default to True. - - `use_default_deps` (bool): Parameter that controls if the dependencies specified by the global config variable - `erlang.erlang_tests_default_apps` should be pulled, default to True. - `srcs` ([source]): Set of files that the suites might depend on and that are not part of any specific application. A "meta" application having those files as sources will automatically be created, and included in the dependencies of the tests. - Ene can call - - `buck2 build //my_app:test_SUITE` to compile the test files together with its depedencies. + One can call + - `buck2 build //my_app:test_SUITE` to compile the test files together with its dependencies. - `buck2 test //my_app:other_test_SUITE` to run the test. - `buck2 run //my_app:other_test_SUITE` to open an interactive test shell, where tests can be run iteratively. diff --git a/prelude/decls/genrule_common.bzl b/prelude/decls/genrule_common.bzl index 0538c1ec46..53a98a666b 100644 --- a/prelude/decls/genrule_common.bzl +++ b/prelude/decls/genrule_common.bzl @@ -49,13 +49,13 @@ def _cmd_arg(): A string expansion of the `srcs` argument delimited by the `environment_expansion_separator` argument where each element of `srcs` will be translated - into an absolute path. + into a relative path. `${SRCDIR}` - The absolute path to a directory to which sources are copied + The relative path to a directory to which sources are copied prior to running the command. @@ -72,7 +72,9 @@ def _cmd_arg(): command determine whether this filepath is treated as a file or a directory. If the filepath is a directory, then the shell command needs to create it if not using named outputs. Otherwise, it will - be automatically created. + be automatically created. All outputs (directories and files) must + be readable, writable, and (in the case of directories) executable + by the current user. The file or directory specified by this variable must always @@ -86,71 +88,6 @@ def _cmd_arg(): A temporary directory which can be used for intermediate results and will not be bundled into the output. - - - ##### String parameter macros - - It is also possible to expand references to other rules within the - `cmd`, using builtin `string parameter macros` - . - All build rules expanded in the command are automatically considered - to be dependencies of the `genrule()`. - - - Note that the paths returned by these macros are *absolute* paths. You should convert these paths to be relative paths before - embedding them in, for example, a shell script or batch file. Using - relative paths ensures that your builds are *hermetic*, that - is, they are reproducible across different machine environments. - - - Additionally, if you embed these paths in a shell script, you should - execute that script using the `sh\\_binary()`rule and include - the targets for these paths in the `resources` argument of - that `sh_binary` rule. These are the same targets that you - pass to the string parameter macros. - - - `$(classpath //path/to:target)` - - - Expands to the transitive classpath of the specified build - rule, provided that the rule has a Java classpath. If the rule - does not have (or contribute to) a classpath, then an - exception is thrown and the build breaks. - - - `$(exe //path/to:target)` - - - Expands a build rule that results in an executable to the - commands necessary to run that executable. For example, - a `java_binary()` might expand to a call - to `java -jar path/to/target.jar` . Files that are - executable (perhaps generated by a `genrule()`) - are also expanded. If the build rule does not generate an - executable output, then an exception is thrown and the build - breaks. - - - `$(location //path/to:target)` - - - Expands to the location of the output of the specified build - rule. This means that you can refer to the output without - needing to be aware of how Buck is storing data on the disk - mid-build. - - - `$(maven_coords //path/to:target)` - - - Expands to the Maven coordinates for the specified build rule. - This allows you to access the Maven coordinates for - Maven-aware build rules. The format of the expansion is: - - ``` - - ``` """), } diff --git a/prelude/decls/go_common.bzl b/prelude/decls/go_common.bzl index 845f3861bc..91d3cd1d56 100644 --- a/prelude/decls/go_common.bzl +++ b/prelude/decls/go_common.bzl @@ -30,6 +30,16 @@ def _srcs_arg(): """), } +def _package_root_arg(): + return { + "package_root": attrs.option(attrs.string(), default = None, doc = """ + Sets Go package direactory (relative to BUCK file). + By default (or if None passes) package_root is being detected automatically. + Empty string of Go package is on the same level as BUCK file otherwise the subdirectory name. + Example for srcs = ["foo/bar.go"], package_root = "foo" +"""), + } + def _link_style_arg(): return { "link_style": attrs.option(attrs.enum(LinkableDepType), default = None, doc = """ @@ -124,9 +134,39 @@ def _embedcfg_arg(): """), } +def _cgo_enabled_arg(): + return { + "cgo_enabled": attrs.option(attrs.bool(), default = None, doc = """ + Experimental: Analog of CGO_ENABLED environment-variable. + None will be converted to True if cxx_toolchain available for current configuration, otherwise False. +"""), + } + +def _race_arg(): + return { + "race": attrs.bool(default = False, doc = """ + If true, enable data race detection. +"""), + } + +def _asan_arg(): + return { + "asan": attrs.bool(default = False, doc = """ + If true, enable ASAN. +"""), + } + +def _tags_arg(): + return { + "tags": attrs.list(attrs.string(), default = [], doc = """ + Build tags to apply to this target and its dependencies. +"""), + } + go_common = struct( deps_arg = _deps_arg, srcs_arg = _srcs_arg, + package_root_arg = _package_root_arg, link_style_arg = _link_style_arg, link_mode_arg = _link_mode_arg, cgo_compiler_flags_arg = _cgo_compiler_flags_arg, @@ -136,4 +176,8 @@ go_common = struct( linker_flags_arg = _linker_flags_arg, external_linker_flags_arg = _external_linker_flags_arg, embedcfg_arg = _embedcfg_arg, + cgo_enabled_arg = _cgo_enabled_arg, + race_arg = _race_arg, + asan_arg = _asan_arg, + tags_arg = _tags_arg, ) diff --git a/prelude/decls/go_rules.bzl b/prelude/decls/go_rules.bzl index 2ec5f5ebdd..8e099e75ef 100644 --- a/prelude/decls/go_rules.bzl +++ b/prelude/decls/go_rules.bzl @@ -10,10 +10,12 @@ # the generated docs, and so those should be verified to be accurate and # well-formatted (and then delete this TODO) -load(":common.bzl", "CxxRuntimeType", "CxxSourceType", "HeadersAsRawHeadersMode", "Traversal", "buck", "prelude_rule") +load("@prelude//cxx:link_groups_types.bzl", "LINK_GROUP_MAP_ATTR") +load(":common.bzl", "CxxRuntimeType", "CxxSourceType", "HeadersAsRawHeadersMode", "buck", "prelude_rule") load(":cxx_common.bzl", "cxx_common") load(":go_common.bzl", "go_common") load(":native_common.bzl", "native_common") +load(":re_test_common.bzl", "re_test_common") BuildMode = ["executable", "c_shared", "c_archive"] @@ -74,6 +76,7 @@ cgo_library = prelude_rule( cxx_common.platform_preprocessor_flags_arg() | go_common.cgo_compiler_flags_arg() | go_common.embedcfg_arg() | + go_common.package_root_arg() | cxx_common.compiler_flags_arg() | cxx_common.platform_compiler_flags_arg() | cxx_common.linker_extra_outputs_arg() | @@ -113,7 +116,7 @@ cgo_library = prelude_rule( "licenses": attrs.list(attrs.source(), default = []), "link_deps_query_whole": attrs.bool(default = False), "link_group": attrs.option(attrs.string(), default = None), - "link_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal), attrs.option(attrs.string()))))), default = None), + "link_group_map": LINK_GROUP_MAP_ATTR, "platform_deps": attrs.list(attrs.tuple(attrs.regex(), attrs.set(attrs.dep(), sorted = True)), default = []), "platform_headers": attrs.list(attrs.tuple(attrs.regex(), attrs.named_set(attrs.source(), sorted = True)), default = []), "platform_srcs": attrs.list(attrs.tuple(attrs.regex(), attrs.set(attrs.one_of(attrs.source(), attrs.tuple(attrs.source(), attrs.list(attrs.arg()))), sorted = True)), default = []), @@ -125,7 +128,8 @@ cgo_library = prelude_rule( "thin_lto": attrs.bool(default = False), "version_universe": attrs.option(attrs.string(), default = None), "weak_framework_names": attrs.list(attrs.string(), default = []), - } + } | + buck.allow_cache_upload_arg() ), ) @@ -182,6 +186,11 @@ go_binary = prelude_rule( go_common.linker_flags_arg() | go_common.external_linker_flags_arg() | go_common.embedcfg_arg() | + go_common.package_root_arg() | + go_common.cgo_enabled_arg() | + go_common.race_arg() | + go_common.asan_arg() | + go_common.tags_arg() | { "resources": attrs.list(attrs.source(), default = [], doc = """ Static files to be symlinked into the working directory of the test. You can access these in your @@ -266,6 +275,11 @@ go_exported_library = prelude_rule( go_common.assembler_flags_arg() | go_common.linker_flags_arg() | go_common.external_linker_flags_arg() | + go_common.package_root_arg() | + go_common.cgo_enabled_arg() | + go_common.race_arg() | + go_common.asan_arg() | + go_common.tags_arg() | { "resources": attrs.list(attrs.source(), default = [], doc = """ Static files to be symlinked into the working directory of the test. You can access these in your @@ -315,6 +329,7 @@ go_library = prelude_rule( go_common.compiler_flags_arg() | go_common.assembler_flags_arg() | go_common.embedcfg_arg() | + go_common.package_root_arg() | { "contacts": attrs.list(attrs.string(), default = []), "default_host_platform": attrs.option(attrs.configuration_label(), default = None), @@ -415,6 +430,11 @@ go_test = prelude_rule( go_common.linker_flags_arg() | go_common.external_linker_flags_arg() | go_common.embedcfg_arg() | + go_common.package_root_arg() | + go_common.cgo_enabled_arg() | + go_common.race_arg() | + go_common.asan_arg() | + go_common.tags_arg() | { "resources": attrs.list(attrs.source(), default = [], doc = """ Static files that are symlinked into the working directory of the @@ -437,7 +457,8 @@ go_test = prelude_rule( "platform": attrs.option(attrs.string(), default = None), "runner": attrs.option(attrs.dep(), default = None), "specs": attrs.option(attrs.arg(json = True), default = None), - } + } | + re_test_common.test_args() ), ) diff --git a/prelude/decls/groovy_rules.bzl b/prelude/decls/groovy_rules.bzl index 0e5aef98b3..91f71860cc 100644 --- a/prelude/decls/groovy_rules.bzl +++ b/prelude/decls/groovy_rules.bzl @@ -131,6 +131,7 @@ groovy_library = prelude_rule( "runtime_deps": attrs.list(attrs.dep(), default = []), "source_abi_verification_mode": attrs.option(attrs.enum(SourceAbiVerificationMode), default = None), "source_only_abi_deps": attrs.list(attrs.dep(), default = []), + "_wip_java_plugin_arguments": attrs.dict(attrs.label(), attrs.list(attrs.string()), default = {}), } ), ) @@ -188,6 +189,7 @@ groovy_test = prelude_rule( "use_cxx_libraries": attrs.option(attrs.bool(), default = None), "use_dependency_order_classpath": attrs.option(attrs.bool(), default = None), "vm_args": attrs.list(attrs.arg(), default = []), + "_wip_java_plugin_arguments": attrs.dict(attrs.label(), attrs.list(attrs.string()), default = {}), } ), ) diff --git a/prelude/decls/halide_rules.bzl b/prelude/decls/halide_rules.bzl index 52db23f108..7ddda0ec7c 100644 --- a/prelude/decls/halide_rules.bzl +++ b/prelude/decls/halide_rules.bzl @@ -10,7 +10,8 @@ # the generated docs, and so those should be verified to be accurate and # well-formatted (and then delete this TODO) -load(":common.bzl", "CxxRuntimeType", "CxxSourceType", "HeadersAsRawHeadersMode", "LinkableDepType", "Traversal", "prelude_rule") +load("@prelude//cxx:link_groups_types.bzl", "LINK_GROUP_MAP_ATTR") +load(":common.bzl", "CxxRuntimeType", "CxxSourceType", "HeadersAsRawHeadersMode", "LinkableDepType", "prelude_rule") load(":cxx_common.bzl", "cxx_common") halide_library = prelude_rule( @@ -98,7 +99,7 @@ halide_library = prelude_rule( "licenses": attrs.list(attrs.source(), default = []), "link_deps_query_whole": attrs.bool(default = False), "link_group": attrs.option(attrs.string(), default = None), - "link_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal), attrs.option(attrs.string()))))), default = None), + "link_group_map": LINK_GROUP_MAP_ATTR, "link_style": attrs.option(attrs.enum(LinkableDepType), default = None), "linker_extra_outputs": attrs.list(attrs.string(), default = []), "platform_deps": attrs.list(attrs.tuple(attrs.regex(), attrs.set(attrs.dep(), sorted = True)), default = []), diff --git a/prelude/decls/haskell_rules.bzl b/prelude/decls/haskell_rules.bzl index e6ddcdd469..22aff27d81 100644 --- a/prelude/decls/haskell_rules.bzl +++ b/prelude/decls/haskell_rules.bzl @@ -10,7 +10,8 @@ # the generated docs, and so those should be verified to be accurate and # well-formatted (and then delete this TODO) -load(":common.bzl", "LinkableDepType", "Linkage", "buck", "prelude_rule") +load("@prelude//linking:types.bzl", "Linkage") +load(":common.bzl", "LinkableDepType", "buck", "prelude_rule") load(":haskell_common.bzl", "haskell_common") load(":native_common.bzl", "native_common") @@ -106,10 +107,11 @@ haskell_haddock = prelude_rule( "default_host_platform": attrs.option(attrs.configuration_label(), default = None), "deps": attrs.list(attrs.dep(), default = []), "deps_query": attrs.option(attrs.query(), default = None), - "haddock_flags": attrs.list(attrs.string(), default = []), + "haddock_flags": attrs.list(attrs.arg(), default = []), "labels": attrs.list(attrs.string(), default = []), "licenses": attrs.list(attrs.source(), default = []), "platform": attrs.option(attrs.string(), default = None), + "platform_deps": attrs.list(attrs.tuple(attrs.regex(), attrs.set(attrs.dep(), sorted = True)), default = []), } ), ) @@ -165,14 +167,14 @@ haskell_library = prelude_rule( haskell_common.deps_arg() | buck.platform_deps_arg() | native_common.link_whole(link_whole_type = attrs.bool(default = False)) | - native_common.preferred_linkage(preferred_linkage_type = attrs.enum(Linkage)) | + native_common.preferred_linkage(preferred_linkage_type = attrs.enum(Linkage.values())) | { "contacts": attrs.list(attrs.string(), default = []), "default_host_platform": attrs.option(attrs.configuration_label(), default = None), "enable_profiling": attrs.bool(default = False), "ghci_platform_preload_deps": attrs.list(attrs.tuple(attrs.regex(), attrs.set(attrs.dep(), sorted = True)), default = []), "ghci_preload_deps": attrs.set(attrs.dep(), sorted = True, default = []), - "haddock_flags": attrs.list(attrs.string(), default = []), + "haddock_flags": attrs.list(attrs.arg(), default = []), "labels": attrs.list(attrs.string(), default = []), "licenses": attrs.list(attrs.source(), default = []), "linker_flags": attrs.list(attrs.arg(), default = []), diff --git a/prelude/decls/ios_rules.bzl b/prelude/decls/ios_rules.bzl index a42dbfd9bf..9e9e73bd2e 100644 --- a/prelude/decls/ios_rules.bzl +++ b/prelude/decls/ios_rules.bzl @@ -10,8 +10,11 @@ # the generated docs, and so those should be verified to be accurate and # well-formatted (and then delete this TODO) -load(":apple_common.bzl", "apple_common") -load(":common.bzl", "CxxRuntimeType", "CxxSourceType", "HeadersAsRawHeadersMode", "IncludeType", "Linkage", "Traversal", "buck", "prelude_rule") +load("@prelude//apple:apple_common.bzl", "apple_common") +load("@prelude//apple:resource_groups.bzl", "RESOURCE_GROUP_MAP_ATTR") +load("@prelude//cxx:link_groups_types.bzl", "LINK_GROUP_MAP_ATTR") +load("@prelude//linking:types.bzl", "Linkage") +load(":common.bzl", "CxxRuntimeType", "CxxSourceType", "HeadersAsRawHeadersMode", "IncludeType", "buck", "prelude_rule") load(":cxx_common.bzl", "cxx_common") load(":native_common.bzl", "native_common") @@ -83,7 +86,7 @@ apple_asset_catalog = prelude_rule( apple_binary = prelude_rule( name = "apple_binary", docs = """ - An `apple_binary()` rule builds a native executable\342\200\224such as an iOS or OSX app\342\200\224from + An `apple_binary()` rule builds a native executable - such as an iOS or OSX app - from the supplied set of Objective-C/C++ source files and dependencies. It is similar to a `cxx\\_binary()`rule with which it shares many attributes. In addition to those common attributes, `apple_binary()` has a some additional attributes @@ -155,7 +158,6 @@ apple_binary = prelude_rule( { "bridging_header": attrs.option(attrs.source(), default = None), "can_be_asset": attrs.option(attrs.bool(), default = None), - "configs": attrs.dict(key = attrs.string(), value = attrs.dict(key = attrs.string(), value = attrs.string(), sorted = False), sorted = False, default = {}), "contacts": attrs.list(attrs.string(), default = []), "cxx_runtime_type": attrs.option(attrs.enum(CxxRuntimeType), default = None), "default_host_platform": attrs.option(attrs.configuration_label(), default = None), @@ -193,7 +195,7 @@ apple_binary = prelude_rule( "libraries": attrs.list(attrs.string(), default = []), "licenses": attrs.list(attrs.source(), default = []), "link_group": attrs.option(attrs.string(), default = None), - "link_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal), attrs.option(attrs.string()))))), default = None), + "link_group_map": LINK_GROUP_MAP_ATTR, "link_whole": attrs.option(attrs.bool(), default = None), "modular": attrs.bool(default = False), "module_name": attrs.option(attrs.string(), default = None), @@ -204,7 +206,7 @@ apple_binary = prelude_rule( "post_linker_flags": attrs.list(attrs.arg(), default = []), "post_platform_linker_flags": attrs.list(attrs.tuple(attrs.regex(), attrs.list(attrs.arg())), default = []), "precompiled_header": attrs.option(attrs.source(), default = None), - "preferred_linkage": attrs.option(attrs.enum(Linkage), default = None), + "preferred_linkage": attrs.option(attrs.enum(Linkage.values()), default = None), "prefix_header": attrs.option(attrs.source(), default = None), "public_include_directories": attrs.set(attrs.string(), sorted = True, default = []), "public_system_include_directories": attrs.set(attrs.string(), sorted = True, default = []), @@ -223,9 +225,8 @@ apple_binary = prelude_rule( "uses_cxx_explicit_modules": attrs.bool(default = False), "uses_explicit_modules": attrs.bool(default = False), "uses_modules": attrs.bool(default = False), - "xcode_private_headers_symlinks": attrs.option(attrs.bool(), default = None), - "xcode_public_headers_symlinks": attrs.option(attrs.bool(), default = None), - } + } | + buck.allow_cache_upload_arg() ), ) @@ -342,10 +343,12 @@ apple_bundle = prelude_rule( } | apple_common.info_plist_arg() | apple_common.info_plist_substitutions_arg() | + apple_common.privacy_manifest_arg() | { "asset_catalogs_compilation_options": attrs.dict(key = attrs.string(), value = attrs.any(), default = {}, doc = """ A dict holding parameters for asset catalogs compiler (actool). Its options include: - * `notices` (defaults to `True`) + + * `notices` (defaults to `True`) * `warnings` (defaults to `True`) * `errors` (defaults to `True`) * `compress_pngs` (defaults to `True`) @@ -367,7 +370,7 @@ apple_bundle = prelude_rule( "licenses": attrs.list(attrs.source(), default = []), "platform_binary": attrs.option(attrs.list(attrs.tuple(attrs.regex(), attrs.dep())), default = None), "resource_group": attrs.option(attrs.string(), default = None), - "resource_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal), attrs.option(attrs.string()))))), default = None), + "resource_group_map": attrs.option(RESOURCE_GROUP_MAP_ATTR, default = None), "skip_copying_swift_stdlib": attrs.option(attrs.bool(), default = None), "try_skip_code_signing": attrs.option(attrs.bool(), default = None), "xcode_product_type": attrs.option(attrs.string(), default = None), @@ -438,7 +441,7 @@ apple_library = prelude_rule( cxx_common.exported_linker_flags_arg() | cxx_common.exported_platform_linker_flags_arg() | apple_common.target_sdk_version() | - native_common.preferred_linkage(preferred_linkage_type = attrs.option(attrs.enum(Linkage), default = None)) | + native_common.preferred_linkage(preferred_linkage_type = attrs.option(attrs.enum(Linkage.values()), default = None)) | native_common.link_style() | native_common.link_whole(link_whole_type = attrs.option(attrs.bool(), default = None)) | cxx_common.reexport_all_header_dependencies_arg() | @@ -448,7 +451,6 @@ apple_library = prelude_rule( { "bridging_header": attrs.option(attrs.source(), default = None), "can_be_asset": attrs.option(attrs.bool(), default = None), - "configs": attrs.dict(key = attrs.string(), value = attrs.dict(key = attrs.string(), value = attrs.string(), sorted = False), sorted = False, default = {}), "contacts": attrs.list(attrs.string(), default = []), "cxx_runtime_type": attrs.option(attrs.enum(CxxRuntimeType), default = None), "default_host_platform": attrs.option(attrs.configuration_label(), default = None), @@ -483,7 +485,7 @@ apple_library = prelude_rule( "libraries": attrs.list(attrs.string(), default = []), "licenses": attrs.list(attrs.source(), default = []), "link_group": attrs.option(attrs.string(), default = None), - "link_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal), attrs.option(attrs.string()))))), default = None), + "link_group_map": LINK_GROUP_MAP_ATTR, "modular": attrs.bool(default = False), "module_name": attrs.option(attrs.string(), default = None), "module_requires_cxx": attrs.bool(default = False), @@ -495,6 +497,7 @@ apple_library = prelude_rule( "post_platform_linker_flags": attrs.list(attrs.tuple(attrs.regex(), attrs.list(attrs.arg())), default = []), "precompiled_header": attrs.option(attrs.source(), default = None), "prefix_header": attrs.option(attrs.source(), default = None), + "public_framework_headers": attrs.named_set(attrs.source(), sorted = True, default = []), "public_include_directories": attrs.set(attrs.string(), sorted = True, default = []), "public_system_include_directories": attrs.set(attrs.string(), sorted = True, default = []), "raw_headers": attrs.set(attrs.source(), sorted = True, default = []), @@ -511,9 +514,8 @@ apple_library = prelude_rule( "uses_cxx_explicit_modules": attrs.bool(default = False), "uses_explicit_modules": attrs.bool(default = False), "uses_modules": attrs.bool(default = False), - "xcode_private_headers_symlinks": attrs.option(attrs.bool(), default = None), - "xcode_public_headers_symlinks": attrs.option(attrs.bool(), default = None), - } + } | + buck.allow_cache_upload_arg() ), ) @@ -696,7 +698,6 @@ apple_test = prelude_rule( "can_be_asset": attrs.option(attrs.bool(), default = None), "codesign_flags": attrs.list(attrs.string(), default = []), "codesign_identity": attrs.option(attrs.string(), default = None), - "configs": attrs.dict(key = attrs.string(), value = attrs.dict(key = attrs.string(), value = attrs.string(), sorted = False), sorted = False, default = {}), "contacts": attrs.list(attrs.string(), default = []), "cxx_runtime_type": attrs.option(attrs.enum(CxxRuntimeType), default = None), "default_host_platform": attrs.option(attrs.configuration_label(), default = None), @@ -739,7 +740,7 @@ apple_test = prelude_rule( "libraries": attrs.list(attrs.string(), default = []), "licenses": attrs.list(attrs.source(), default = []), "link_group": attrs.option(attrs.string(), default = None), - "link_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal), attrs.option(attrs.string()))))), default = None), + "link_group_map": LINK_GROUP_MAP_ATTR, "link_whole": attrs.option(attrs.bool(), default = None), "linker_extra_outputs": attrs.list(attrs.string(), default = []), "modular": attrs.bool(default = False), @@ -752,7 +753,7 @@ apple_test = prelude_rule( "post_linker_flags": attrs.list(attrs.arg(), default = []), "post_platform_linker_flags": attrs.list(attrs.tuple(attrs.regex(), attrs.list(attrs.arg())), default = []), "precompiled_header": attrs.option(attrs.source(), default = None), - "preferred_linkage": attrs.option(attrs.enum(Linkage), default = None), + "preferred_linkage": attrs.option(attrs.enum(Linkage.values()), default = None), "prefix_header": attrs.option(attrs.source(), default = None), "public_include_directories": attrs.set(attrs.string(), sorted = True, default = []), "public_system_include_directories": attrs.set(attrs.string(), sorted = True, default = []), @@ -778,10 +779,9 @@ apple_test = prelude_rule( "uses_cxx_explicit_modules": attrs.bool(default = False), "uses_explicit_modules": attrs.bool(default = False), "uses_modules": attrs.bool(default = False), - "xcode_private_headers_symlinks": attrs.option(attrs.bool(), default = None), "xcode_product_type": attrs.option(attrs.string(), default = None), - "xcode_public_headers_symlinks": attrs.option(attrs.bool(), default = None), - } + } | + buck.allow_cache_upload_arg() ), ) @@ -810,6 +810,7 @@ apple_toolchain = prelude_rule( "libtool": attrs.source(), "licenses": attrs.list(attrs.source(), default = []), "lipo": attrs.source(), + "mapc": attrs.option(attrs.source(), default = None), "min_version": attrs.string(default = ""), "momc": attrs.source(), "platform_path": attrs.source(), @@ -900,7 +901,7 @@ prebuilt_apple_framework = prelude_rule( attrs = ( # @unsorted-dict-items { - "preferred_linkage": attrs.enum(Linkage, doc = """ + "preferred_linkage": attrs.enum(Linkage.values(), doc = """ How to link to a binary: use `dynamic` for a dynamic framework, and `static` for old universal static frameworks manually lipo-ed together. `dynamic` will @@ -962,7 +963,7 @@ swift_library = prelude_rule( "libraries": attrs.list(attrs.string(), default = []), "licenses": attrs.list(attrs.source(), default = []), "module_name": attrs.option(attrs.string(), default = None), - "preferred_linkage": attrs.option(attrs.enum(Linkage), default = None), + "preferred_linkage": attrs.option(attrs.enum(Linkage.values()), default = None), "sdk_modules": attrs.list(attrs.string(), default = []), "serialize_debugging_options": attrs.bool(default = False), "soname": attrs.option(attrs.string(), default = None), @@ -1010,81 +1011,6 @@ swift_toolchain = prelude_rule( ), ) -xcode_postbuild_script = prelude_rule( - name = "xcode_postbuild_script", - docs = "", - examples = None, - further = None, - attrs = ( - # @unsorted-dict-items - { - "cmd": attrs.string(default = ""), - "contacts": attrs.list(attrs.string(), default = []), - "default_host_platform": attrs.option(attrs.configuration_label(), default = None), - "input_file_lists": attrs.list(attrs.string(), default = []), - "inputs": attrs.list(attrs.string(), default = []), - "labels": attrs.list(attrs.string(), default = []), - "licenses": attrs.list(attrs.source(), default = []), - "output_file_lists": attrs.list(attrs.string(), default = []), - "outputs": attrs.list(attrs.string(), default = []), - "srcs": attrs.list(attrs.source(), default = []), - } - ), -) - -xcode_prebuild_script = prelude_rule( - name = "xcode_prebuild_script", - docs = "", - examples = None, - further = None, - attrs = ( - # @unsorted-dict-items - { - "cmd": attrs.string(default = ""), - "contacts": attrs.list(attrs.string(), default = []), - "default_host_platform": attrs.option(attrs.configuration_label(), default = None), - "input_file_lists": attrs.list(attrs.string(), default = []), - "inputs": attrs.list(attrs.string(), default = []), - "labels": attrs.list(attrs.string(), default = []), - "licenses": attrs.list(attrs.source(), default = []), - "output_file_lists": attrs.list(attrs.string(), default = []), - "outputs": attrs.list(attrs.string(), default = []), - "srcs": attrs.list(attrs.source(), default = []), - } - ), -) - -xcode_workspace_config = prelude_rule( - name = "xcode_workspace_config", - docs = "", - examples = None, - further = None, - attrs = ( - # @unsorted-dict-items - { - "action_config_names": attrs.dict(key = attrs.enum(SchemeActionType), value = attrs.string(), sorted = False, default = {}), - "additional_scheme_actions": attrs.option(attrs.dict(key = attrs.enum(SchemeActionType), value = attrs.dict(key = attrs.enum(AdditionalActions), value = attrs.list(attrs.string()), sorted = False), sorted = False), default = None), - "contacts": attrs.list(attrs.string(), default = []), - "default_host_platform": attrs.option(attrs.configuration_label(), default = None), - "environment_variables": attrs.option(attrs.dict(key = attrs.enum(SchemeActionType), value = attrs.dict(key = attrs.string(), value = attrs.string(), sorted = False), sorted = False), default = None), - "explicit_runnable_path": attrs.option(attrs.string(), default = None), - "extra_schemes": attrs.dict(key = attrs.string(), value = attrs.dep(), sorted = False, default = {}), - "extra_shallow_targets": attrs.list(attrs.dep(), default = []), - "extra_targets": attrs.list(attrs.dep(), default = []), - "extra_tests": attrs.list(attrs.dep(), default = []), - "is_remote_runnable": attrs.option(attrs.bool(), default = None), - "labels": attrs.list(attrs.string(), default = []), - "launch_style": attrs.option(attrs.enum(LaunchStyle), default = None), - "licenses": attrs.list(attrs.source(), default = []), - "notification_payload_file": attrs.option(attrs.string(), default = None), - "src_target": attrs.option(attrs.dep(), default = None), - "was_created_for_app_extension": attrs.option(attrs.bool(), default = None), - "watch_interface": attrs.option(attrs.enum(WatchInterface), default = None), - "workspace_name": attrs.option(attrs.string(), default = None), - } - ), -) - ios_rules = struct( apple_asset_catalog = apple_asset_catalog, apple_binary = apple_binary, @@ -1100,7 +1026,4 @@ ios_rules = struct( scene_kit_assets = scene_kit_assets, swift_library = swift_library, swift_toolchain = swift_toolchain, - xcode_postbuild_script = xcode_postbuild_script, - xcode_prebuild_script = xcode_prebuild_script, - xcode_workspace_config = xcode_workspace_config, ) diff --git a/prelude/decls/java_rules.bzl b/prelude/decls/java_rules.bzl index abc89185e5..53379d9252 100644 --- a/prelude/decls/java_rules.bzl +++ b/prelude/decls/java_rules.bzl @@ -281,6 +281,7 @@ java_library = prelude_rule( "annotation_processors": attrs.list(attrs.string(), default = []), "contacts": attrs.list(attrs.string(), default = []), "default_host_platform": attrs.option(attrs.configuration_label(), default = None), + "jar_postprocessor": attrs.option(attrs.exec_dep(), default = None), "labels": attrs.list(attrs.string(), default = []), "licenses": attrs.list(attrs.source(), default = []), "manifest_file": attrs.option(attrs.source(), default = None), @@ -290,6 +291,7 @@ java_library = prelude_rule( "proguard_config": attrs.option(attrs.source(), default = None), "runtime_deps": attrs.list(attrs.dep(), default = []), "source_abi_verification_mode": attrs.option(attrs.enum(SourceAbiVerificationMode), default = None), + "_wip_java_plugin_arguments": attrs.dict(attrs.label(), attrs.list(attrs.string()), default = {}), } ), ) @@ -401,6 +403,7 @@ java_test = prelude_rule( "exported_deps": attrs.list(attrs.dep(), default = []), "exported_provided_deps": attrs.list(attrs.dep(), default = []), "extra_arguments": attrs.list(attrs.string(), default = []), + "jar_postprocessor": attrs.option(attrs.exec_dep(), default = None), "java_version": attrs.option(attrs.string(), default = None), "java": attrs.option(attrs.dep(), default = None), "javac": attrs.option(attrs.source(), default = None), @@ -423,6 +426,7 @@ java_test = prelude_rule( "test_case_timeout_ms": attrs.option(attrs.int(), default = None), "unbundled_resources_root": attrs.option(attrs.source(allow_directory = True), default = None), "use_dependency_order_classpath": attrs.option(attrs.bool(), default = None), + "_wip_java_plugin_arguments": attrs.dict(attrs.label(), attrs.list(attrs.string()), default = {}), } ), ) @@ -467,6 +471,7 @@ java_test_runner = prelude_rule( "source_only_abi_deps": attrs.list(attrs.dep(), default = []), "srcs": attrs.list(attrs.source(), default = []), "target": attrs.option(attrs.string(), default = None), + "_wip_java_plugin_arguments": attrs.dict(attrs.label(), attrs.list(attrs.string()), default = {}), } ), ) diff --git a/prelude/decls/kotlin_rules.bzl b/prelude/decls/kotlin_rules.bzl index b8dce5a80f..4a7485bfb9 100644 --- a/prelude/decls/kotlin_rules.bzl +++ b/prelude/decls/kotlin_rules.bzl @@ -178,9 +178,9 @@ kotlin_library = prelude_rule( "contacts": attrs.list(attrs.string(), default = []), "default_host_platform": attrs.option(attrs.configuration_label(), default = None), "extra_arguments": attrs.list(attrs.string(), default = []), - "extra_non_source_only_abi_kotlinc_arguments": attrs.list(attrs.string(), default = []), "java_version": attrs.option(attrs.string(), default = None), "javac": attrs.option(attrs.source(), default = None), + "jar_postprocessor": attrs.option(attrs.exec_dep(), default = None), "licenses": attrs.list(attrs.source(), default = []), "manifest_file": attrs.option(attrs.source(), default = None), "maven_coords": attrs.option(attrs.string(), default = None), @@ -195,6 +195,7 @@ kotlin_library = prelude_rule( "source_only_abi_deps": attrs.list(attrs.dep(), default = []), "target": attrs.option(attrs.string(), default = None), "use_jvm_abi_gen": attrs.option(attrs.bool(), default = None), + "_wip_java_plugin_arguments": attrs.dict(attrs.label(), attrs.list(attrs.string()), default = {}), } ), ) @@ -273,7 +274,6 @@ kotlin_test = prelude_rule( "exported_provided_deps": attrs.list(attrs.dep(), default = []), "extra_arguments": attrs.list(attrs.string(), default = []), "extra_kotlinc_arguments": attrs.list(attrs.string(), default = []), - "extra_non_source_only_abi_kotlinc_arguments": attrs.list(attrs.string(), default = []), "friend_paths": attrs.list(attrs.dep(), default = []), "java_version": attrs.option(attrs.string(), default = None), "java": attrs.option(attrs.dep(), default = None), @@ -300,6 +300,7 @@ kotlin_test = prelude_rule( "use_cxx_libraries": attrs.option(attrs.bool(), default = None), "use_dependency_order_classpath": attrs.option(attrs.bool(), default = None), "use_jvm_abi_gen": attrs.option(attrs.bool(), default = None), + "_wip_java_plugin_arguments": attrs.dict(attrs.label(), attrs.list(attrs.string()), default = {}), } ), ) diff --git a/prelude/decls/native_common.bzl b/prelude/decls/native_common.bzl index 929c75602b..acc501a972 100644 --- a/prelude/decls/native_common.bzl +++ b/prelude/decls/native_common.bzl @@ -46,10 +46,22 @@ def _link_group_public_deps_label(): """), } +def _soname(): + return { + "soname": attrs.option(attrs.string(), default = None, doc = """ + Sets the soname ("shared object name") of any shared library produced from this rule. + The default value is based on the full rule name. + The macro `$(ext)` will be replaced with a platform-appropriate extension. + An argument can be provided, which is a library version. + For example `soname = 'libfoo.$(ext 2.3)'` will be `libfoo.2.3.dylib` on Mac and `libfoo.so.2.3` on Linux. +"""), + } + native_common = struct( link_group_deps = _link_group_deps, link_group_public_deps_label = _link_group_public_deps_label, link_style = _link_style, link_whole = _link_whole, preferred_linkage = _preferred_linkage, + soname = _soname, ) diff --git a/prelude/decls/python_rules.bzl b/prelude/decls/python_rules.bzl index e84a135a79..53721a538b 100644 --- a/prelude/decls/python_rules.bzl +++ b/prelude/decls/python_rules.bzl @@ -12,6 +12,18 @@ load(":python_common.bzl", "python_common") NativeLinkStrategy = ["separate", "merged"] +def _typing_arg(): + return { + "py_version_for_type_checking": attrs.option(attrs.string(), default = None, doc = """ + This option will force the type checker to perform checking under a specific version of Python interpreter. +"""), + # NOTE(grievejia): Setting default to True here may have non-trivial impact on build memory + # usage (see S395002) + "typing": attrs.bool(default = False, doc = """ + Determines whether to perform type checking on the given target. Default is False. +"""), + } + cxx_python_extension = prelude_rule( name = "cxx_python_extension", docs = """ @@ -270,7 +282,8 @@ python_binary = prelude_rule( "version_universe": attrs.option(attrs.string(), default = None), "zip_safe": attrs.option(attrs.bool(), default = None), } | - buck.allow_cache_upload_arg() + buck.allow_cache_upload_arg() | + _typing_arg() ), ) @@ -339,7 +352,8 @@ python_library = prelude_rule( "versioned_resources": attrs.option(attrs.versioned(attrs.named_set(attrs.source(), sorted = True)), default = None), "versioned_srcs": attrs.option(attrs.versioned(attrs.named_set(attrs.source(), sorted = True)), default = None), "zip_safe": attrs.option(attrs.bool(), default = None), - } + } | + _typing_arg() ), ) @@ -449,7 +463,8 @@ python_test = prelude_rule( "versioned_resources": attrs.option(attrs.versioned(attrs.named_set(attrs.source(), sorted = True)), default = None), "versioned_srcs": attrs.option(attrs.versioned(attrs.named_set(attrs.source(), sorted = True)), default = None), "zip_safe": attrs.option(attrs.bool(), default = None), - } + } | + _typing_arg() ), ) diff --git a/prelude/decls/re_test_common.bzl b/prelude/decls/re_test_common.bzl index 6875f4fa44..cfc5dee7b3 100644 --- a/prelude/decls/re_test_common.bzl +++ b/prelude/decls/re_test_common.bzl @@ -14,8 +14,11 @@ def _opts_for_tests_arg() -> Attr: # The expected shape of re_opts is: # { # "capabilities": Dict | None + # "listing_capabilities": Dict | None # "use_case": str | None # "remote_cache_enabled": bool | None + # "dependencies": list> | [] + # "resource_units": int | None # } return attrs.dict( key = attrs.string(), @@ -28,6 +31,8 @@ def _opts_for_tests_arg() -> Attr: ), attrs.string(), attrs.bool(), + attrs.list(attrs.dict(key = attrs.string(), value = attrs.string()), default = []), + attrs.int(), ), # TODO(cjhopman): I think this default does nothing, it should be deleted default = None, @@ -37,7 +42,8 @@ def _opts_for_tests_arg() -> Attr: def _action_key_provider_arg() -> Attr: if is_full_meta_repo(): - return attrs.dep(providers = [BuildModeInfo], default = "fbcode//buck2/platform/build_mode:build_mode") + default_build_mode = read_root_config("fb", "remote_execution_test_build_mode", "fbcode//buck2/platform/build_mode:build_mode") + return attrs.dep(providers = [BuildModeInfo], default = default_build_mode) else: return attrs.option(attrs.dep(providers = [BuildModeInfo]), default = None) diff --git a/prelude/decls/rust_rules.bzl b/prelude/decls/rust_rules.bzl index 331cf99484..097758d360 100644 --- a/prelude/decls/rust_rules.bzl +++ b/prelude/decls/rust_rules.bzl @@ -5,70 +5,20 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//cxx/user:link_group_map.bzl", "link_group_map_attr") +load("@prelude//cxx:link_groups_types.bzl", "LINK_GROUP_MAP_ATTR") +load("@prelude//linking:types.bzl", "Linkage") +load("@prelude//rust:clippy_configuration.bzl", "ClippyConfiguration") load("@prelude//rust:link_info.bzl", "RustProcMacroPlugin") load("@prelude//rust:rust_binary.bzl", "rust_binary_impl", "rust_test_impl") -load("@prelude//rust:rust_library.bzl", "prebuilt_rust_library_impl", "rust_library_impl") -load(":common.bzl", "LinkableDepType", "Linkage", "buck", "prelude_rule") +load("@prelude//rust:rust_library.bzl", "rust_library_impl") +load(":common.bzl", "buck", "prelude_rule") load(":native_common.bzl", "native_common") load(":re_test_common.bzl", "re_test_common") load(":rust_common.bzl", "rust_common", "rust_target_dep") -prebuilt_rust_library = prelude_rule( - name = "prebuilt_rust_library", - impl = prebuilt_rust_library_impl, - docs = """ - A prebuilt\\_rust\\_library() specifies a pre-built Rust crate, and any dependencies - it may have on other crates (typically also prebuilt). - - - Note: Buck is currently tested with (and therefore supports) version 1.32.0 of Rust. - """, - examples = """ - ``` - - prebuilt_rust_library( - name = 'dailygreet', - rlib = 'libdailygreet.rlib', - deps = [ - ':jinsy', - ], - ) - - prebuilt_rust_library( - name = 'jinsy', - rlib = 'libarbiter-6337e9cb899bd295.rlib', - ) - - ``` - """, - further = None, - attrs = ( - # @unsorted-dict-items - { - "rlib": attrs.source(doc = """ - Path to the precompiled Rust crate - typically of the form 'libfoo.rlib', or - 'libfoo-abc123def456.rlib' if it has symbol versioning metadata. - """), - } | - rust_common.crate(crate_type = attrs.string(default = "")) | - rust_common.deps_arg(is_binary = False) | - { - "contacts": attrs.list(attrs.string(), default = []), - "default_host_platform": attrs.option(attrs.configuration_label(), default = None), - "labels": attrs.list(attrs.string(), default = []), - "licenses": attrs.list(attrs.source(), default = []), - "link_style": attrs.option(attrs.enum(LinkableDepType), default = None), - "proc_macro": attrs.bool(default = False), - } | - rust_common.cxx_toolchain_arg() | - rust_common.rust_toolchain_arg() - ), - uses_plugins = [RustProcMacroPlugin], -) - def _rust_common_attributes(is_binary: bool): return { + "clippy_configuration": attrs.option(attrs.dep(providers = [ClippyConfiguration]), default = None), "contacts": attrs.list(attrs.string(), default = []), "coverage": attrs.bool(default = False), "default_host_platform": attrs.option(attrs.configuration_label(), default = None), @@ -100,8 +50,9 @@ _RUST_EXECUTABLE_ATTRIBUTES = { "auto_link_groups": attrs.bool(default = True), # TODO: enable distributed thinlto "enable_distributed_thinlto": attrs.bool(default = False), - "link_group": attrs.option(attrs.string(), default = None), - "link_group_map": link_group_map_attr(), + # Required by the rules but not supported, since Rust is auto-link groups only + "link_group": attrs.default_only(attrs.option(attrs.string(), default = None)), + "link_group_map": LINK_GROUP_MAP_ATTR, "link_group_min_binary_node_count": attrs.option(attrs.int(), default = None), "rpath": attrs.bool(default = False, doc = """ Set the "rpath" in the executable when using a shared link style. @@ -119,9 +70,7 @@ rust_binary = prelude_rule( If you invoke a build with the `check` flavor, then Buck will invoke rustc to check the code (typecheck, produce warnings, etc), but won't generate an executable code. When applied to binaries it produces no output; for libraries it produces metadata for - consumers of the library. When building with `check`, extra compiler flags from - the `rust.rustc_check_flags` are added to the compiler's command line options, - to allow for extra warnings, etc. + consumers of the library. Note: Buck is currently tested with (and therefore supports) version 1.32.0 of Rust. @@ -193,9 +142,7 @@ rust_library = prelude_rule( If you invoke a build with the `check` flavor, then Buck will invoke rustc to check the code (typecheck, produce warnings, etc), but won't generate an executable code. When applied to binaries it produces no output; for libraries it produces metadata for - consumers of the library. When building with `check`, extra compiler flags from - the `rust.rustc_check_flags` are added to the compiler's command line options, - to allow for extra warnings, etc. + consumers of the library. Note: Buck is currently tested with (and therefore supports) version 1.32.0 of Rust. @@ -238,7 +185,9 @@ rust_library = prelude_rule( rust_common.env_arg() | rust_common.crate(crate_type = attrs.option(attrs.string(), default = None)) | rust_common.crate_root() | - native_common.preferred_linkage(preferred_linkage_type = attrs.enum(Linkage, default = "any")) | + native_common.preferred_linkage(preferred_linkage_type = attrs.enum(Linkage.values(), default = "any")) | + native_common.soname() | + native_common.link_style() | _rust_common_attributes(is_binary = False) | { "crate_dynamic": attrs.option(attrs.dep(), default = None), @@ -328,7 +277,6 @@ rust_test = prelude_rule( ) rust_rules = struct( - prebuilt_rust_library = prebuilt_rust_library, rust_binary = rust_binary, rust_library = rust_library, rust_test = rust_test, diff --git a/prelude/decls/scala_rules.bzl b/prelude/decls/scala_rules.bzl index 093ac8e5a2..80c95edda6 100644 --- a/prelude/decls/scala_rules.bzl +++ b/prelude/decls/scala_rules.bzl @@ -46,6 +46,7 @@ scala_library = prelude_rule( "source_only_abi_deps": attrs.list(attrs.dep(), default = []), "srcs": attrs.list(attrs.source(), default = []), "target": attrs.option(attrs.string(), default = None), + "_wip_java_plugin_arguments": attrs.dict(attrs.label(), attrs.list(attrs.string()), default = {}), } ), ) @@ -103,6 +104,7 @@ scala_test = prelude_rule( "use_cxx_libraries": attrs.option(attrs.bool(), default = None), "use_dependency_order_classpath": attrs.option(attrs.bool(), default = None), "vm_args": attrs.list(attrs.arg(), default = []), + "_wip_java_plugin_arguments": attrs.dict(attrs.label(), attrs.list(attrs.string()), default = {}), } ), ) diff --git a/prelude/decls/toolchains_common.bzl b/prelude/decls/toolchains_common.bzl index 30dc9d475f..aaf4bf77c8 100644 --- a/prelude/decls/toolchains_common.bzl +++ b/prelude/decls/toolchains_common.bzl @@ -9,7 +9,7 @@ load("@prelude//android:android_toolchain.bzl", "AndroidPlatformInfo", "AndroidT load("@prelude//csharp:toolchain.bzl", "CSharpToolchainInfo") load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxPlatformInfo", "CxxToolchainInfo") load("@prelude//go:toolchain.bzl", "GoToolchainInfo") -load("@prelude//haskell:haskell.bzl", "HaskellPlatformInfo", "HaskellToolchainInfo") +load("@prelude//haskell:toolchain.bzl", "HaskellPlatformInfo", "HaskellToolchainInfo") load("@prelude//java:dex_toolchain.bzl", "DexToolchainInfo") load( "@prelude//java:java_toolchain.bzl", @@ -28,14 +28,7 @@ load("@prelude//rust:rust_toolchain.bzl", "RustToolchainInfo") load("@prelude//tests:remote_test_execution_toolchain.bzl", "RemoteTestExecutionToolchainInfo") load("@prelude//zip_file:zip_file_toolchain.bzl", "ZipFileToolchainInfo") -def _toolchain(lang: str, providers: list[typing.Any], default_only = True) -> Attr: - toolchain = attrs.toolchain_dep(default = "toolchains//:" + lang, providers = providers) - if default_only: - return attrs.default_only(toolchain) - else: - return toolchain - -def _toolchain_with_override(lang: str, providers: list[typing.Any]) -> Attr: +def _toolchain(lang: str, providers: list[typing.Any]) -> Attr: return attrs.toolchain_dep(default = "toolchains//:" + lang, providers = providers) def _android_toolchain(): @@ -48,7 +41,7 @@ def _cxx_toolchain(): return _toolchain("cxx", [CxxToolchainInfo, CxxPlatformInfo]) def _dex_toolchain(): - return _toolchain_with_override("dex", [DexToolchainInfo]) + return _toolchain("dex", [DexToolchainInfo]) def _go_toolchain(): return _toolchain("go", [GoToolchainInfo]) @@ -57,7 +50,7 @@ def _haskell_toolchain(): return _toolchain("haskell", [HaskellToolchainInfo, HaskellPlatformInfo]) def _java_toolchain(): - return _toolchain_with_override("java", [JavaToolchainInfo, JavaPlatformInfo]) + return _toolchain("java", [JavaToolchainInfo, JavaPlatformInfo]) def _java_for_android_toolchain(): return _toolchain("java_for_android", [JavaToolchainInfo, JavaPlatformInfo]) @@ -69,11 +62,11 @@ def _java_test_toolchain(): return _toolchain("java_test", [JavaTestToolchainInfo]) def _kotlin_toolchain(): - return _toolchain_with_override("kotlin", [KotlinToolchainInfo]) + return _toolchain("kotlin", [KotlinToolchainInfo]) def _prebuilt_jar_toolchain(): # Override is allowed for bootstrapping prebuilt jar toolchains - return _toolchain_with_override("prebuilt_jar", [PrebuiltJarToolchainInfo]) + return _toolchain("prebuilt_jar", [PrebuiltJarToolchainInfo]) def _python_toolchain(): return _toolchain("python", [PythonToolchainInfo, PythonPlatformInfo]) @@ -82,7 +75,7 @@ def _python_bootstrap_toolchain(): return _toolchain("python_bootstrap", [PythonBootstrapToolchainInfo]) def _rust_toolchain(): - return _toolchain("rust", [RustToolchainInfo], default_only = False) + return _toolchain("rust", [RustToolchainInfo]) def _zip_file_toolchain(): return _toolchain("zip_file", [ZipFileToolchainInfo]) diff --git a/prelude/erlang/applications/BUCK.v2 b/prelude/erlang/applications/BUCK.v2 index 1148a21894..b476ddc259 100644 --- a/prelude/erlang/applications/BUCK.v2 +++ b/prelude/erlang/applications/BUCK.v2 @@ -1,3 +1,8 @@ load("@prelude//erlang:erlang_otp_application.bzl", "gen_otp_applications") +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() gen_otp_applications() diff --git a/prelude/erlang/common_test/common/BUCK.v2 b/prelude/erlang/common_test/common/BUCK.v2 index 7bd533f7a3..fbd20f2c1a 100644 --- a/prelude/erlang/common_test/common/BUCK.v2 +++ b/prelude/erlang/common_test/common/BUCK.v2 @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + erlang_application( name = "common", srcs = glob([ diff --git a/prelude/erlang/common_test/common/include/buck_ct_records.hrl b/prelude/erlang/common_test/common/include/buck_ct_records.hrl index f73aa93547..1f1ebdd3ef 100644 --- a/prelude/erlang/common_test/common/include/buck_ct_records.hrl +++ b/prelude/erlang/common_test/common/include/buck_ct_records.hrl @@ -14,6 +14,8 @@ providers :: [{atom(), [term()]}], ct_opts :: [term()], erl_cmd :: string(), + extra_flags :: [string()], + common_app_env :: #{string() => string()}, artifact_annotation_mfa :: artifact_annotations:annotation_function() }). @@ -28,7 +30,9 @@ config_files :: [file:filename_all()], providers :: [{module(), [term()]}], ct_opts :: [term()], + common_app_env :: #{string() => string()}, erl_cmd :: string(), + extra_flags :: [string()], artifact_annotation_mfa :: artifact_annotations:annotation_function() }). diff --git a/prelude/erlang/common_test/common/src/buck_ct_parser.erl b/prelude/erlang/common_test/common/src/buck_ct_parser.erl index ddf6690b73..879ec1fff3 100644 --- a/prelude/erlang/common_test/common/src/buck_ct_parser.erl +++ b/prelude/erlang/common_test/common/src/buck_ct_parser.erl @@ -22,6 +22,19 @@ parse_str("") -> []; parse_str(StrArgs) -> - {ok, Tokens, _} = erl_scan:string(StrArgs ++ "."), - {ok, Term} = erl_parse:parse_term(Tokens), - Term. + try + {ok, Tokens, _} = erl_scan:string(StrArgs ++ "."), + erl_parse:parse_term(Tokens) + of + {ok, Term} -> + Term; + {error, Reason} -> + error(lists:flatten(io_lib:format("Error parsing StrArgs ~p, Reason: ~p", [StrArgs, Reason]))) + catch + E:R:S -> + error( + lists:flatten( + io_lib:format("Error parsing StrArgs ~p, error ~p", [StrArgs, erl_error:format_exception(E, R, S)]) + ) + ) + end. diff --git a/prelude/erlang/common_test/common/src/buck_ct_provider.erl b/prelude/erlang/common_test/common/src/buck_ct_provider.erl index 2d04eb4dd5..3807f11230 100644 --- a/prelude/erlang/common_test/common/src/buck_ct_provider.erl +++ b/prelude/erlang/common_test/common/src/buck_ct_provider.erl @@ -115,7 +115,7 @@ execute_method_on_provider(Method, ProviderName, ProviderState, Args) -> {error, Reason} -> ErrorMsg = unicode:characters_to_list( io_lib:format( - "Method ~p on provider ~p with sate ~p ~n returned with error ~p ~n", [ + "Method ~p on provider ~p with state ~p ~n returned with error ~p ~n", [ Method, ProviderName, ProviderState, Reason ] ) @@ -138,7 +138,7 @@ execute_method_on_provider(Method, ProviderName, ProviderState, Args) -> catch Class:Reason:StackTrace -> ErrorMsg = unicode:characters_to_list( - io_lib:format("Method ~p on provider ~p with sate ~p ~n ~s ~n", [ + io_lib:format("Method ~p on provider ~p with state ~p ~n ~s ~n", [ Method, ProviderName, ProviderState, diff --git a/prelude/erlang/common_test/common/src/ct_error_printer.erl b/prelude/erlang/common_test/common/src/ct_error_printer.erl index 52cf17dbee..2a4b59961b 100644 --- a/prelude/erlang/common_test/common/src/ct_error_printer.erl +++ b/prelude/erlang/common_test/common/src/ct_error_printer.erl @@ -50,6 +50,7 @@ format_error(ErrType, Reason, FormatStackTrace) -> -spec format_reason(term()) -> {ok, unicode:chardata()}. format_reason(Reason) -> + blame_reason(Reason), lists:foldl( fun (_Formatter, Acc = {ok, _Formatted}) -> Acc; @@ -63,6 +64,22 @@ format_reason(Reason) -> ] ). +-spec blame_reason(term()) -> ok. +blame_reason({Reason, StackTrace}) -> + try + case application:get_env(common, exception_blame) of + undefined -> + ok; + {ok, Blame} when is_atom(Blame) -> + Blame:format_blame(Reason, StackTrace) + end + catch + C:E:S -> + io:format("Error: ~ts", [erl_error:format_exception(C, E, S)]) + end; +blame_reason(_Reason) -> + ok. + -spec maybe_custom_format(term()) -> unrecognized_error | {ok, [unicode:chardata()]}. maybe_custom_format({{Type, Props}, StackTrace}) when is_atom(Type), is_list(Props) -> try proplists:to_map(Props) of diff --git a/prelude/erlang/common_test/cth_hooks/BUCK.v2 b/prelude/erlang/common_test/cth_hooks/BUCK.v2 index 631e312d51..cca3d1d4d4 100644 --- a/prelude/erlang/common_test/cth_hooks/BUCK.v2 +++ b/prelude/erlang/common_test/cth_hooks/BUCK.v2 @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + erlang_application( name = "cth_hooks", srcs = glob([ diff --git a/prelude/erlang/common_test/cth_hooks/src/cth_tpx.erl b/prelude/erlang/common_test/cth_hooks/src/cth_tpx.erl index fe7be1216c..ce4479c590 100644 --- a/prelude/erlang/common_test/cth_hooks/src/cth_tpx.erl +++ b/prelude/erlang/common_test/cth_hooks/src/cth_tpx.erl @@ -489,7 +489,7 @@ add_result( case Truncated of true -> StdOutLocation = - case os:getenv("SANDCASTLE") of + case is_running_in_sandcastle() of true -> "tab Diagnostics: Artifacts/ct_executor.stdout.txt"; _ -> @@ -497,13 +497,15 @@ add_result( filename:dirname(OutputFile), "ct_executor.stdout.txt" ) end, - Io ++ + [ io_lib:format( - "\n The std_out has been truncated, see ~s for the full suite std_out.", + "The stdout logs have been truncated, see ~s for the full suite stdout. Showing tail below\n", [ StdOutLocation ] - ); + ) + | Io + ]; false -> Io end @@ -672,3 +674,15 @@ modify_shared_state(HookState, Caller, Action) -> {ok, Action(State)} end), NewHookState. + +-spec is_running_in_sandcastle() -> boolean(). +is_running_in_sandcastle() -> + case os:getenv("SANDCASTLE_DIFF_ID") of + [$D | _] -> + true; + _ -> + case os:getenv("SANDCASTLE") of + false -> false; + _ -> true + end + end. diff --git a/prelude/erlang/common_test/cth_hooks/src/cth_tpx_role.erl b/prelude/erlang/common_test/cth_hooks/src/cth_tpx_role.erl index fadc41f384..43915817df 100644 --- a/prelude/erlang/common_test/cth_hooks/src/cth_tpx_role.erl +++ b/prelude/erlang/common_test/cth_hooks/src/cth_tpx_role.erl @@ -1,3 +1,10 @@ +%%% Copyright (c) Meta Platforms, Inc. and affiliates. +%%% +%%% This source code is licensed under both the MIT license found in the +%%% LICENSE-MIT file in the root directory of this source tree and the Apache +%%% License, Version 2.0 found in the LICENSE-APACHE file in the root directory +%%% of this source tree. + -module(cth_tpx_role). % -------- What are cth_tpx roles?? --------------- diff --git a/prelude/erlang/common_test/cth_hooks/src/cth_tpx_server.erl b/prelude/erlang/common_test/cth_hooks/src/cth_tpx_server.erl index f719e0807c..90db84fb1f 100644 --- a/prelude/erlang/common_test/cth_hooks/src/cth_tpx_server.erl +++ b/prelude/erlang/common_test/cth_hooks/src/cth_tpx_server.erl @@ -1,3 +1,10 @@ +%%% Copyright (c) Meta Platforms, Inc. and affiliates. +%%% +%%% This source code is licensed under both the MIT license found in the +%%% LICENSE-MIT file in the root directory of this source tree and the Apache +%%% License, Version 2.0 found in the LICENSE-APACHE file in the root directory +%%% of this source tree. + -module(cth_tpx_server). -behaviour(gen_server). @@ -31,11 +38,11 @@ start_link(InitialState) -> -spec get(Handle :: handle()) -> CurrentState :: term(). get(Handle) -> - gen_server:call(Handle, get). + gen_server:call(Handle, get, 6000). -spec modify(Handle :: handle(), Fun :: fun((State) -> {A, State})) -> A. modify(Handle, Fun) -> - gen_server:call(Handle, {modify, Fun}). + gen_server:call(Handle, {modify, Fun}, 6000). %% ---- gen_server callbacks ---------- diff --git a/prelude/erlang/common_test/test_binary/BUCK.v2 b/prelude/erlang/common_test/test_binary/BUCK.v2 index 6e0b3e10aa..5737c0dccf 100644 --- a/prelude/erlang/common_test/test_binary/BUCK.v2 +++ b/prelude/erlang/common_test/test_binary/BUCK.v2 @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + erlang_escript( name = "escript", emu_args = [ diff --git a/prelude/erlang/common_test/test_binary/src/test_binary.erl b/prelude/erlang/common_test/test_binary/src/test_binary.erl index e11f4f697b..d5dd4ed53f 100644 --- a/prelude/erlang/common_test/test_binary/src/test_binary.erl +++ b/prelude/erlang/common_test/test_binary/src/test_binary.erl @@ -31,16 +31,7 @@ main([TestInfoFile, "list", OutputDir]) -> after test_logger:flush() end, - init:stop(ExitCode), - receive - after ?INIT_STOP_TIMEOUT -> - ?LOG_ERROR( - io_lib:format("~p failed to terminate within ~c millisecond", [ - ?MODULE, ?INIT_STOP_TIMEOUT - ]) - ), - erlang:halt(ExitCode) - end; + erlang:halt(ExitCode); main([TestInfoFile, "run", OutputDir | Tests]) -> test_logger:set_up_logger(OutputDir, test_runner), ExitCode = @@ -55,16 +46,7 @@ main([TestInfoFile, "run", OutputDir | Tests]) -> after test_logger:flush() end, - init:stop(ExitCode), - receive - after ?INIT_STOP_TIMEOUT -> - ?LOG_ERROR( - io_lib:format("~p failed to terminate within ~c millisecond", [ - ?MODULE, ?INIT_STOP_TIMEOUT - ]) - ), - erlang:halt(ExitCode) - end; + erlang:halt(ExitCode); main([TestInfoFile]) -> %% without test runner support we run all tests and need to create our own test dir OutputDir = string:trim(os:cmd("mktemp -d")), @@ -105,7 +87,9 @@ load_test_info(TestInfoFile) -> "ct_opts" := CtOpts, "extra_ct_hooks" := ExtraCtHooks, "erl_cmd" := ErlCmd, - "artifact_annotation_mfa" := ArtifactAnnotationMFA + "extra_flags" := ExtraFlags, + "artifact_annotation_mfa" := ArtifactAnnotationMFA, + "common_app_env" := CommonAppEnv } ]} = file:consult(TestInfoFile), Providers1 = buck_ct_parser:parse_str(Providers), @@ -120,10 +104,12 @@ load_test_info(TestInfoFile) -> providers = Providers1, artifact_annotation_mfa = parse_mfa(ArtifactAnnotationMFA), ct_opts = CtOpts1, - erl_cmd = ErlCmd + erl_cmd = ErlCmd, + extra_flags = ExtraFlags, + common_app_env = CommonAppEnv }. --spec parse_mfa(string()) -> {ok, artifact_annotations:annotation_function()} | {error, term()}. +-spec parse_mfa(string()) -> artifact_annotations:annotation_function() | {error, term()}. parse_mfa(MFA) -> case erl_scan:string(MFA) of {ok, diff --git a/prelude/erlang/common_test/test_binary/src/test_runner.erl b/prelude/erlang/common_test/test_binary/src/test_runner.erl index 809dc989ec..6a8233f382 100644 --- a/prelude/erlang/common_test/test_binary/src/test_runner.erl +++ b/prelude/erlang/common_test/test_binary/src/test_runner.erl @@ -39,7 +39,9 @@ run_tests(Tests, #test_info{} = TestInfo, OutputDir, Listing) -> config_files = TestInfo#test_info.config_files, providers = TestInfo#test_info.providers, ct_opts = TestInfo#test_info.ct_opts, + common_app_env = TestInfo#test_info.common_app_env, erl_cmd = TestInfo#test_info.erl_cmd, + extra_flags = TestInfo#test_info.extra_flags, artifact_annotation_mfa = TestInfo#test_info.artifact_annotation_mfa }) end. @@ -170,7 +172,7 @@ provide_output_file( Tests, Suite, "test binary internal crash", ResultExec, OutLog ); Other when Other =:= passed orelse Other =:= timeout -> - % Here we either pased or timeout. + % Here we either passed or timeout. case file:read_file(ResultsFile) of {ok, JsonFile} -> TreeResults = binary_to_term(JsonFile), diff --git a/prelude/erlang/common_test/test_cli_lib/BUCK.v2 b/prelude/erlang/common_test/test_cli_lib/BUCK.v2 index 439cc1c4d6..63e3cfdd9d 100644 --- a/prelude/erlang/common_test/test_cli_lib/BUCK.v2 +++ b/prelude/erlang/common_test/test_cli_lib/BUCK.v2 @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + erlang_application( name = "test_cli_lib", srcs = glob(["src/*.erl"]), @@ -13,3 +19,21 @@ erlang_application( ], visibility = ["PUBLIC"], ) + +erlang_tests( + contacts = ["whatsapp_testing_infra"], + labels = ["e2e"], + suites = ["test/test_cli_e2e_SUITE.erl"], + deps = [ + "stdlib", + ":test_cli_e2e_SUITE_fixtures", + ":test_cli_lib", + ], +) + +erlang_application( + name = "test_cli_e2e_SUITE_fixtures", + srcs = glob(["test/test_cli_e2e_SUITE_data/*.erl"]), + applications = [], + labels = ["test_application"], +) diff --git a/prelude/erlang/common_test/test_cli_lib/src/test.erl b/prelude/erlang/common_test/test_cli_lib/src/test.erl index 88786b1d11..fa94929477 100644 --- a/prelude/erlang/common_test/test_cli_lib/src/test.erl +++ b/prelude/erlang/common_test/test_cli_lib/src/test.erl @@ -24,7 +24,8 @@ list/0, list/1, rerun/1, run/0, run/1, - reset/0 + reset/0, + logs/0 ]). %% init @@ -34,7 +35,14 @@ start_shell/0 ]). --type run_spec() :: string() | non_neg_integer() | [#{name := string(), suite := string()}]. +%% Test functions +-export([ + list_impl/1 +]). + +-type test_id() :: string() | non_neg_integer(). +-type test_info() :: #{name := string(), suite := atom()}. +-type run_spec() :: test_id() | [test_info()]. -type run_result() :: {non_neg_integer(), non_neg_integer()}. -spec start() -> ok. @@ -69,13 +77,13 @@ help() -> [ print_help(F, A) || {F, A} <- ?MODULE:module_info(exports), - not lists:member(F, [module_info, ensure_initialized, start, start_shell]) + not lists:member(F, [module_info, ensure_initialized, start, start_shell, list_impl]) ], io:format("~n"), io:format("For more information, use the built in help, e.g. h(test, help)~n"), ok. --spec print_help(function(), arity()) -> ok. +-spec print_help(Fun :: atom(), arity()) -> ok. print_help(Fun, Arity) -> #{args := Args, desc := [DescFirst | DescRest]} = command_description(Fun, Arity), FunSig = string:pad( @@ -83,9 +91,10 @@ print_help(Fun, Arity) -> ), io:format("~s -- ~s~n", [FunSig, DescFirst]), Padding = string:pad("", 34), - [io:format("~s~s~n", [Padding, DescLine]) || DescLine <- DescRest]. + [io:format("~s~s~n", [Padding, DescLine]) || DescLine <- DescRest], + ok. --spec command_description(module(), arity()) -> #{args := [string()], desc := string()}. +-spec command_description(Fun :: atom(), arity()) -> #{args := [string()], desc := [string()]}. command_description(help, 0) -> #{args => [], desc => ["print help"]}; command_description(info, 0) -> @@ -125,12 +134,14 @@ command_description(run, 1) -> }; command_description(reset, 0) -> #{args => [], desc => ["restarts the test node, enabling a clean test state"]}; +command_description(logs, 0) -> + #{args => [], desc => ["print log files of the currently running test suites"]}; command_description(F, A) -> error({help_is_missing, {F, A}}). %% @doc List all available tests %% @equiv test:list("") --spec list() -> non_neg_integer(). +-spec list() -> ok | {error, term()}. list() -> list(""). @@ -138,26 +149,26 @@ list() -> %% [https://www.erlang.org/doc/man/re.html#regexp_syntax] for the supported %% regular expression syntax. If a module is given as argument, list all %% tests from that module instead --spec list(RegExOrModule :: module() | string()) -> non_neg_integer(). +-spec list(RegExOrModule :: module() | string()) -> ok | {error, term()}. list(RegEx) when is_list(RegEx) -> - ensure_initialized(), - Tests = ct_daemon:list(RegEx), - print_tests(Tests). + case list_impl(RegEx) of + {ok, TestsString} -> io:format("~s", [TestsString]); + Error -> Error + end. %% @doc Run a test given by either the test id from the last list() command, or %% a regex that matches exactly one test. Tests are run with the shortest possible %% setup. This call does not recompile the test suite and its dependencies, but %% runs them as is. You can manually recompile code with c(Module). %% To reset the test state use reset(). --spec rerun(string() | non_neg_integer() | [#{name := string(), suite := string()}]) -> - run_result(). +-spec rerun(run_spec()) -> run_result(). rerun(Spec) -> ensure_initialized(), do_plain_test_run(Spec). %% @doc update code and run all tests %% @equiv run("") --spec run() -> ok | error. +-spec run() -> run_result() | error. run() -> run(""). @@ -177,8 +188,15 @@ run(RegExOrId) -> ok -> io:format("Reloading all changed modules... "), Loaded = ct_daemon:load_changed(), - io:format("reloaded ~p modules ~P~n", [erlang:length(Loaded), Loaded, 10]), - rerun(ToRun); + case erlang:length(Loaded) of + 0 -> + do_plain_test_run(ToRun); + ChangedCount -> + io:format("reloaded ~p modules ~P~n", [ChangedCount, Loaded, 10]), + % There were some changes, so list the tests again, then run but without recompiling changes + % Note that if called with the RegEx instead of ToRun test list like above, do_plain_test_run/1 will list the tests again + do_plain_test_run(RegExOrId) + end; Error -> Error end @@ -198,7 +216,27 @@ reset() -> }) end. +%% @doc Print all the logs of the currently running test suites +-spec logs() -> ok. +logs() -> + ensure_initialized(), + case logs_impl() of + {ok, Logs} -> + lists:foreach(fun(LogPath) -> io:format("~s~n", [LogPath]) end, Logs), + io:format("~n"); + {error, not_found} -> + io:format("no logs found~n") + end. + %% internal +-spec list_impl(RegEx :: string()) -> {ok, string()} | {error, term()}. +list_impl(RegEx) -> + ensure_initialized(), + case ct_daemon:list(RegEx) of + {invalid_regex, _} = Err -> {error, Err}; + Tests -> {ok, print_tests(Tests)} + end. + ensure_initialized() -> PrintInit = lists:foldl( fun(Fun, Acc) -> Fun() orelse Acc end, @@ -216,6 +254,7 @@ ensure_initialized() -> ok end. +-spec init_utility_apps() -> boolean(). init_utility_apps() -> RunningApps = proplists:get_value(running, application:info()), case proplists:is_defined(test_cli_lib, RunningApps) of @@ -233,6 +272,7 @@ init_utility_apps() -> end end. +-spec init_node() -> boolean(). init_node() -> case ct_daemon:alive() of true -> @@ -259,6 +299,7 @@ init_node() -> true end. +-spec watchdog() -> no_return(). watchdog() -> Node = ct_daemon_node:get_node(), true = erlang:monitor_node(Node, true), @@ -272,6 +313,7 @@ watchdog() -> erlang:halt() end. +-spec init_group_leader() -> boolean(). init_group_leader() -> %% set the group leader unconditionally, we need to do this since %% during init, the group leader is different then the one from the @@ -279,23 +321,29 @@ init_group_leader() -> ct_daemon:set_gl(), false. +-spec print_tests([{module(), [{non_neg_integer(), string()}]}]) -> string(). print_tests([]) -> - io:format("no tests found~n"); + lists:flatten(io_lib:format("no tests found~n")); print_tests(Tests) -> - print_tests_impl(lists:reverse(Tests)). + lists:flatten(print_tests_impl(lists:reverse(Tests))). +-spec print_tests_impl([{module(), [{non_neg_integer(), string()}]}]) -> io_lib:chars(). print_tests_impl([]) -> - ok; + ""; print_tests_impl([{Suite, SuiteTests} | Rest]) -> - io:format("~s:~n", [Suite]), - [io:format("\t~b - ~s~n", [Id, Test]) || {Id, Test} <- SuiteTests], - print_tests_impl(Rest). + SuiteString = io_lib:format("~s:~n", [Suite]), + TestsString = [io_lib:format("\t~b - ~s~n", [Id, Test]) || {Id, Test} <- SuiteTests], + RestString = print_tests_impl(Rest), + SuiteString ++ TestsString ++ RestString. -spec is_debug_session() -> boolean(). is_debug_session() -> - application:get_env(test_cli_lib, debugger_mode, false). + case application:get_env(test_cli_lib, debugger_mode, false) of + Value when is_boolean(Value) -> + Value + end. --spec collect_results(#{module => [string()]}) -> #{string => ct_daemon_core:run_result()}. +-spec collect_results(#{module => [string()]}) -> #{string() => ct_daemon_core:run_result()}. collect_results(PerSuite) -> maps:fold( fun(Suite, Tests, Acc) -> @@ -330,7 +378,7 @@ ensure_per_suite_encapsulation(Suite) -> end end. --spec discover(string() | non_neg_integer()) -> [#{name := string(), suite := string()}]. +-spec discover(string() | non_neg_integer()) -> [test_info()]. discover(RegExOrId) -> case ct_daemon:discover(RegExOrId) of {error, not_listed_yet} -> @@ -375,11 +423,26 @@ do_plain_test_run(RegExOrId) -> ToRun -> do_plain_test_run(ToRun) end. --spec start_shell() -> no_return(). +-spec start_shell() -> ok | {error, term()}. start_shell() -> case string:to_integer(erlang:system_info(otp_release)) of {Version, _} when Version >= 26 -> shell:start_interactive(); _ -> - user_drv:start() + user_drv:start(), + ok + end. + +-spec logs_impl() -> {ok, [file:filename_all()]} | {error, not_found}. +logs_impl() -> + case ct_daemon:priv_dir() of + undefined -> + {error, not_found}; + PrivDir -> + PatternLog = filename:join(PrivDir, "*.log"), + LogPaths = filelib:wildcard(PatternLog), + PatternLogJson = filename:join(PrivDir, "*.log.json"), + LogJsonPaths = filelib:wildcard(PatternLogJson), + AllLogs = lists:sort(LogPaths ++ LogJsonPaths), + {ok, AllLogs} end. diff --git a/prelude/erlang/common_test/test_cli_lib/test/test_cli_e2e_SUITE.erl b/prelude/erlang/common_test/test_cli_lib/test/test_cli_e2e_SUITE.erl new file mode 100644 index 0000000000..9cb1778f63 --- /dev/null +++ b/prelude/erlang/common_test/test_cli_lib/test/test_cli_e2e_SUITE.erl @@ -0,0 +1,33 @@ +%% Copyright (c) Meta Platforms, Inc. and affiliates. +%% This source code is licensed under both the MIT license found in the +%% LICENSE-MIT file in the root directory of this source tree and the Apache +%% License, Version 2.0 found in the LICENSE-APACHE file in the root directory +%% of this source tree. +%%% % @format +-module(test_cli_e2e_SUITE). + +-include_lib("stdlib/include/assert.hrl"). + +-export([all/0]). + +-export([ + test_list/1 +]). + +all() -> + [test_list]. + +test_list(_Config) -> + Expected = + "test_cli_e2e_SUITE:\n" + "test_list_SUITE:\n" + "\t1 - test_list_SUITE - .test_pass\n" + "\t2 - test_list_SUITE - default.test_fail\n", + ?assertEqual({ok, Expected}, test:list_impl("test_list_SUITE")), + + ?assertMatch({error, {invalid_regex, _}}, test:list_impl("^[a")), + + EmptyExpected = + "test_cli_e2e_SUITE:\n" + "test_list_SUITE:\n", + ?assertEqual({ok, EmptyExpected}, test:list_impl("does_not_exist_SUITE")). diff --git a/prelude/erlang/common_test/test_cli_lib/test/test_cli_e2e_SUITE_data/test_list_SUITE.erl b/prelude/erlang/common_test/test_cli_lib/test/test_cli_e2e_SUITE_data/test_list_SUITE.erl new file mode 100644 index 0000000000..c28e4b0e93 --- /dev/null +++ b/prelude/erlang/common_test/test_cli_lib/test/test_cli_e2e_SUITE_data/test_list_SUITE.erl @@ -0,0 +1,28 @@ +%% Copyright (c) Meta Platforms, Inc. and affiliates. +%% This source code is licensed under both the MIT license found in the +%% LICENSE-MIT file in the root directory of this source tree and the Apache +%% License, Version 2.0 found in the LICENSE-APACHE file in the root directory +%% of this source tree. +%%% % @format +-module(test_list_SUITE). + +-include_lib("stdlib/include/assert.hrl"). + +-export([all/0, groups/0]). + +-export([ + test_pass/1, + test_fail/1 +]). + +all() -> + [test_pass, {group, default}]. + +groups() -> + [{default, [], [test_fail]}]. + +test_pass(_Config) -> + ?assert(true). + +test_fail(_Config) -> + ?assert(false). diff --git a/prelude/erlang/common_test/test_exec/BUCK.v2 b/prelude/erlang/common_test/test_exec/BUCK.v2 index 0ea358626c..1cb78df304 100644 --- a/prelude/erlang/common_test/test_exec/BUCK.v2 +++ b/prelude/erlang/common_test/test_exec/BUCK.v2 @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + erlang_application( name = "test_exec", srcs = glob([ @@ -23,3 +29,13 @@ erlang_application( use_global_parse_transforms = False, visibility = ["PUBLIC"], ) + +erlang_tests( + contacts = ["whatsapp_testing_infra"], + labels = ["unit"], + suites = ["test/ct_executor_SUITE.erl"], + deps = [ + "stdlib", + ":test_exec", + ], +) diff --git a/prelude/erlang/common_test/test_exec/src/ct_daemon.erl b/prelude/erlang/common_test/test_exec/src/ct_daemon.erl index b5a4fb7fe7..97e993d454 100644 --- a/prelude/erlang/common_test/test_exec/src/ct_daemon.erl +++ b/prelude/erlang/common_test/test_exec/src/ct_daemon.erl @@ -26,7 +26,8 @@ discover/1, load_changed/0, setup_state/0, - output_dir/0 + output_dir/0, + priv_dir/0 ]). %% @doc start a test-node with random name and shortname @@ -89,7 +90,7 @@ list(RegEx) -> end. -spec discover(pos_integer() | string()) -> - #{suite := module(), name := string()} + [#{suite := module(), name := string()}] | ct_daemon_runner:discover_error(). discover(RegExOrId) -> do_call({discover, RegExOrId}). @@ -107,6 +108,10 @@ setup_state() -> output_dir() -> do_call(output_dir). +-spec priv_dir() -> file:filename_all() | undefined. +priv_dir() -> + do_call(priv_dir). + -spec push_paths(Paths :: [file:filename_all()]) -> ok. push_paths(Paths) -> case alive() of diff --git a/prelude/erlang/common_test/test_exec/src/ct_daemon_core.erl b/prelude/erlang/common_test/test_exec/src/ct_daemon_core.erl index b38b0a9e84..dd3c560f2f 100644 --- a/prelude/erlang/common_test/test_exec/src/ct_daemon_core.erl +++ b/prelude/erlang/common_test/test_exec/src/ct_daemon_core.erl @@ -235,8 +235,8 @@ test_part(Config, Suite, Test, Path) -> InitResult = case safe_call(wrap_ct_hook(init_per_testcase, Path, fun Suite:init_per_testcase/2), [Config]) of {error, not_exported} -> Config; - {skipped, Reason} -> {error, {skip, init_per_testcase, Reason}}; - {failed, InitErrReason} -> {error, {skip, init_per_testcase, InitErrReason}}; + {skip, Reason} -> {error, {skip, init_per_testcase, Reason}}; + {fail, InitErrReason} -> {error, {skip, init_per_testcase, InitErrReason}}; {error, InitErrReason} -> {error, {skip, init_per_testcase, InitErrReason}}; InitOutConfig -> InitOutConfig end, @@ -328,10 +328,10 @@ do_part_safe(Id, Fun, Config, TimeTrap) -> end, {name, FunName} = erlang:fun_info(Fun, name), try Fun(Config) of - {skipped, Reason} -> + {skip, Reason} -> ?LOG_DEBUG("got skip for ~p because of: ~p", [Id, Reason]), ParentPid ! {RspRef, {skip, {FunName, Id}, Reason}}; - {failed, Reason} -> + {fail, Reason} -> ?LOG_DEBUG("got fail for ~p because of: ~p", [Id, Reason]), ParentPid ! {RspRef, {fail, {FunName, Id}, Reason}}; {skip_and_save, Reason, _} -> diff --git a/prelude/erlang/common_test/test_exec/src/ct_daemon_hooks.erl b/prelude/erlang/common_test/test_exec/src/ct_daemon_hooks.erl index 8a8ef43993..913edb8eef 100644 --- a/prelude/erlang/common_test/test_exec/src/ct_daemon_hooks.erl +++ b/prelude/erlang/common_test/test_exec/src/ct_daemon_hooks.erl @@ -7,12 +7,13 @@ %%%------------------------------------------------------------------- %%% @doc -%%% Implementation of hooks functionality +%%% Implementation of hooks functionality. We mimic the behaviour of +%%% common test hooks so that they can run in test shell %%% @end %%% % @format -module(ct_daemon_hooks). --compile(warn_missing_spec). +-compile(warn_missing_spec_all). -behaviour(gen_server). @@ -63,16 +64,50 @@ | on_tc_fail | on_tc_skip. +-type post_hook_call() :: + post_init_per_suite + | post_init_per_group + | post_init_per_testcase + | post_end_per_suite + | post_end_per_group + | post_end_per_testcase. + +-type pre_hook_call() :: + pre_init_per_suite + | pre_init_per_group + | pre_init_per_testcase + | pre_end_per_suite + | pre_end_per_group + | pre_end_per_testcase. + +-type hook_level() :: + suite + | group + | testcase. + +-type hook_response() :: + [config()] + | {skip, term()} + | {fail, term()}. + +-type hook_config() :: + module() + | {module(), Options :: [term()]} + | {module(), Options :: [term()], Priority :: integer()}. + %%-------------------------------------------------------------------- %%% API -spec set_state(id(), hook_state()) -> ok. set_state(Id, State) -> - gen_server:call(?MODULE, {set_state, Id, State}). + ok = gen_server:call(?MODULE, {set_state, Id, State}). --spec get_state(id()) -> {ok, hook_state()} | {error, not_found}. +-spec get_state(id()) -> {ok, hook_state()} | {error, {not_found, list()}}. get_state(Id) -> - gen_server:call(?MODULE, {get_state, Id}). + case gen_server:call(?MODULE, {get_state, Id}) of + {ok, State} -> {ok, State}; + Error = {error, {not_found, Details}} when is_list(Details) -> Error + end. -spec wrap(part(), [atom()], fun()) -> fun(). wrap(Part, Path, Fun) -> @@ -86,7 +121,7 @@ get_hooks() -> %% @doc %% Starts the server within supervision tree --spec start_monitor() -> gen_server:start_ret(). +-spec start_monitor() -> gen_server:start_mon_ret(). start_monitor() -> gen_server:start_monitor({local, ?MODULE}, ?MODULE, [], []). @@ -97,12 +132,13 @@ start_monitor() -> init([]) -> {ok, initialize_hooks()}. --spec handle_call(Request :: term(), From :: gen_server:from(), State :: state()) -> - no_return(). +-spec handle_call({get_state, id()}, gen_server:from(), state()) -> {reply, {ok, hook_state()}, state()} | {error, {not_found, list()}}; + ({set_state, id(), hook_state()}, gen_server:from(), state()) -> {reply, ok, state()}; + ({wrap, part(), fun()}, gen_server:from(), state()) -> {reply, fun(([atom() | config()]) -> term()), state()}. handle_call({get_state, Id}, _From, State = #{states := HookStates}) -> case HookStates of #{Id := HookState} -> {reply, {ok, HookState}, State}; - _ -> {error, not_found, [{state, State}, {id, Id}]} + _ -> {error, {not_found, [{state, State}, {id, Id}]}} end; handle_call({set_state, Id, HookState}, _From, State = #{states := HookStates}) -> {reply, ok, State#{states => HookStates#{Id => HookState}}}; @@ -131,7 +167,7 @@ initialize_hooks() -> end || {Mod, Opts, Prio} <- NormalizedConfiguredHooks ], - %% according to documentation, if two hooks have the same ID, the latter one get's dropped + %% according to documentation, if two hooks have the same ID, the latter one gets dropped PreInitHooks0 = lists:ukeysort(2, HooksWithId), %% now sort with configured prio the inits (default prio being 0) PreInitHooks1 = lists:keysort(1, PreInitHooks0), @@ -156,6 +192,7 @@ initialize_hooks() -> hooks => [Hook || {_Priority, Hook} <- SortedHooks] }. +-spec get_hooks_config() -> [hook_config()]. get_hooks_config() -> application:get_env(test_exec, ct_daemon_hooks, []) ++ proplists:get_value(ct_hooks, application:get_env(test_exec, daemon_options, []), []). @@ -164,6 +201,7 @@ get_hooks_config() -> wrap_part(Part, Fun, State) -> wrap_init_end(Part, Fun, State). +-spec wrap_init_end(part(), fun(), state()) -> fun(([atom() | config()]) -> term()). wrap_init_end(Part, Fun, #{hooks := HooksInInstallationOrder}) -> %% NOTE ON EXECUTION ORDER: %% @@ -198,9 +236,9 @@ wrap_init_end(Part, Fun, #{hooks := HooksInInstallationOrder}) -> end, case call_if_exists_with_fallback_store_state(Hook, pre(Part), PathArg ++ [ConfigArg0], ok) of {skip, SkipReason} -> - {skipped, SkipReason}; + {skip, SkipReason}; {fail, FailReason} -> - {failed, FailReason}; + {fail, FailReason}; HookCallbackResult -> ConfigArg1 = case is_list(HookCallbackResult) of @@ -219,12 +257,12 @@ wrap_init_end(Part, Fun, #{hooks := HooksInInstallationOrder}) -> {tc_status, {skipped, SkipReason}} | lists:keydelete(tc_status, 1, ConfigArg1) ], - {skipped, SkipReason} + {skip, SkipReason} }; {fail, FailReason} -> { [{tc_status, {failed, FailReason}} | lists:keydelete(tc_status, 1, ConfigArg1)], - {failed, FailReason} + {fail, FailReason} }; OkResult -> ConfigArg2 = @@ -256,15 +294,10 @@ wrap_init_end(Part, Fun, #{hooks := HooksInInstallationOrder}) -> [Suite | _] = PathArg, Result = try WrappedWithPreAndPost(PathArg, ConfigArg) of - Skip = {skipped, _Reason} -> - Skip; - Fail = {failed, _Reason} -> - Fail; - %% if we don't have a hook setup, we still need to do the conversion from skip/fail to skipped/failed {skip, SkipReason} -> - {skipped, SkipReason}; + {skip, SkipReason}; {fail, FailReason} -> - {failed, FailReason}; + {fail, FailReason}; MaybeConfig -> case init_or_end(Part) of 'end' -> @@ -279,50 +312,51 @@ wrap_init_end(Part, Fun, #{hooks := HooksInInstallationOrder}) -> end end catch - Class:Reason:Stacktrace -> {failed, {'EXIT', {{Class, Reason}, Stacktrace}}} + Class:Reason:Stacktrace -> {fail, {'EXIT', {{Class, Reason}, Stacktrace}}} end, handle_post_result(HooksInInstallationOrder, build_test_name(Part, PathArg), Suite, Result) end. +-spec handle_post_result([hook()], test_name(), module(), {ok, [config()]} | {skip, term()} | {fail, term()}) -> hook_response(). handle_post_result(Hooks, TestName, Suite, Result) -> ReverseHooks = lists:reverse(Hooks), case Result of - SkipResult = {skipped, _} -> + {skip, SkipReason} -> [ call_if_exists_with_fallback_store_state( - Hook, on_tc_skip, [Suite, TestName, SkipResult], ok + Hook, on_tc_skip, [Suite, TestName, {tc_user_skip, SkipReason}], ok ) || Hook <- ReverseHooks ], - SkipResult; - FailResult = {failed, _} -> + {skip, SkipReason}; + {fail, FailReason} -> [ call_if_exists_with_fallback_store_state( - Hook, on_tc_fail, [Suite, TestName, FailResult], ok + Hook, on_tc_fail, [Suite, TestName, FailReason], ok ) || Hook <- ReverseHooks ], - FailResult; + {fail, FailReason}; {ok, Config} -> case lists:keyfind(tc_status, 1, Config) of false -> Config; - {tc_status, SkipResult = {skipped, _}} -> + {tc_status, {skipped, SkipReason}} -> [ call_if_exists_with_fallback_store_state( - Hook, on_tc_skip, [Suite, TestName, SkipResult], ok + Hook, on_tc_skip, [Suite, TestName, {tc_user_skip, SkipReason}], ok ) || Hook <- ReverseHooks ], - SkipResult; - {tc_status, FailResult = {failed, _}} -> + {skip, SkipReason}; + {tc_status, {failed, FailReason}} -> [ call_if_exists_with_fallback_store_state( - Hook, on_tc_fail, [Suite, TestName, FailResult], ok + Hook, on_tc_fail, [Suite, TestName, FailReason], ok ) || Hook <- ReverseHooks ], - FailResult + {fail, FailReason} end end. @@ -359,22 +393,21 @@ build_test_name(end_per_testcase, Path) -> [Test, Group | _] = lists:reverse(Path), {Group, Test}. --spec get_hook_module(module() | {module(), Options} | {module(), Options, Priority}) -> module() when - Options :: list(), Priority :: integer(). +-spec get_hook_module(hook_config()) -> module(). get_hook_module({Mod, _, _}) -> Mod; get_hook_module({Mod, _}) -> Mod; get_hook_module(Mod) -> Mod. --spec get_hook_opts(module() | {module(), Options} | {module(), Options, Priority}) -> Options when - Options :: list(), Priority :: integer(). + +-spec get_hook_opts(hook_config()) -> [term()]. get_hook_opts({_, Opts, _}) -> Opts; get_hook_opts({_, Opts}) -> Opts; get_hook_opts(_) -> []. --spec get_hook_priority(module() | {module(), Options} | {module(), Options, Priority}) -> Priority when - Options :: list(), Priority :: integer(). +-spec get_hook_priority(hook_config()) -> integer() | undefined. get_hook_priority({_, _, Prio}) -> Prio; get_hook_priority(_) -> undefined. +-spec normalize_part(part(), fun()) -> fun(). normalize_part(Part, Fun) -> SafeFun = get_safe_part(Part, Fun), case level(Part) of @@ -384,21 +417,24 @@ normalize_part(Part, Fun) -> end. %% wrappers because most calls are optional +-spec call_if_exists(module(), atom(), [term()], Default :: {'$lazy', LazyFun :: fun(() -> term())} | term()) -> term(). call_if_exists(Mod, Fun, Args, Default) -> case erlang:function_exported(Mod, Fun, erlang:length(Args)) of true -> erlang:apply(Mod, Fun, Args); false -> case Default of - {'$lazy', LazyFun} -> LazyFun(); + {'$lazy', LazyFun} when is_function(LazyFun, 0) -> LazyFun(); _ -> Default end end. +-spec call_if_exists_with_fallback(module(), atom(), [term()], term()) -> term(). call_if_exists_with_fallback(Mod, Fun, Args, ReturnDefault) -> [_ | FallbackArgs] = Args, call_if_exists(Mod, Fun, Args, {'$lazy', fun() -> call_if_exists(Mod, Fun, FallbackArgs, ReturnDefault) end}). +-spec call_if_exists_with_fallback_store_state({module(), term()}, atom(), [term()], term()) -> term(). call_if_exists_with_fallback_store_state({Mod, Id}, Fun, Args, ReturnDefault) -> {ok, State} = get_state(Id), Default = @@ -436,6 +472,7 @@ wrapped_init({Mod, Id}, Opts, ConfiguredPriority) -> _ -> {ConfiguredPriority, InitState} end. +-spec pre(part()) -> pre_hook_call(). pre(init_per_suite) -> pre_init_per_suite; pre(init_per_group) -> pre_init_per_group; pre(init_per_testcase) -> pre_init_per_testcase; @@ -443,6 +480,7 @@ pre(end_per_suite) -> pre_end_per_suite; pre(end_per_group) -> pre_end_per_group; pre(end_per_testcase) -> pre_end_per_testcase. +-spec post(part()) -> post_hook_call(). post(init_per_suite) -> post_init_per_suite; post(init_per_group) -> post_init_per_group; post(init_per_testcase) -> post_init_per_testcase; @@ -450,6 +488,7 @@ post(end_per_suite) -> post_end_per_suite; post(end_per_group) -> post_end_per_group; post(end_per_testcase) -> post_end_per_testcase. +-spec level(part()) -> hook_level(). level(init_per_suite) -> suite; level(init_per_group) -> group; level(init_per_testcase) -> testcase; @@ -457,6 +496,7 @@ level(end_per_suite) -> suite; level(end_per_group) -> group; level(end_per_testcase) -> testcase. +-spec init_or_end(part()) -> init | 'end'. init_or_end(init_per_suite) -> init; init_or_end(init_per_group) -> init; init_or_end(init_per_testcase) -> init; @@ -464,12 +504,14 @@ init_or_end(end_per_suite) -> 'end'; init_or_end(end_per_group) -> 'end'; init_or_end(end_per_testcase) -> 'end'. +-spec get_safe_part(part(), fun()) -> fun(). get_safe_part(Part, Fun) -> case is_exported(Fun) of true -> Fun; false -> dummy(Part) end. +-spec dummy(part()) -> fun(). dummy(init_per_suite) -> fun(Config) -> Config end; dummy(init_per_group) -> fun(_, Config) -> Config end; dummy(init_per_testcase) -> fun(_, Config) -> Config end; @@ -477,6 +519,7 @@ dummy(end_per_suite) -> fun(_) -> ok end; dummy(end_per_group) -> fun(_, _) -> ok end; dummy(end_per_testcase) -> fun(_, _) -> ok end. +-spec is_exported(fun()) -> boolean(). is_exported(Fun) -> case maps:from_list(erlang:fun_info(Fun)) of #{ diff --git a/prelude/erlang/common_test/test_exec/src/ct_daemon_logger.erl b/prelude/erlang/common_test/test_exec/src/ct_daemon_logger.erl index ab147833b5..74badb9db5 100644 --- a/prelude/erlang/common_test/test_exec/src/ct_daemon_logger.erl +++ b/prelude/erlang/common_test/test_exec/src/ct_daemon_logger.erl @@ -7,7 +7,12 @@ %%%------------------------------------------------------------------- %%% @doc -%%% Setup functions for logger and CT printing facilities +%%% CT handles logging and printing by sending a message to the ct_logs +%%% process. We intercept those messages for test shell by starting a +%%% gen_server that intercepts the messages and prints them to the test +%%% shell. We do this instead of using the ct_logs process to have more +%%% control over the output and to avoid starting ct processes that +%%% might interfere with test shell's functionality. %%% @end %%% % @format @@ -15,67 +20,60 @@ -include_lib("kernel/include/logger.hrl"). +-behaviour(gen_server). + %% Public API --export([setup/2]). +-export([start/1]). + +%% gen_server callbacks +-export([init/1, handle_info/2, handle_call/3, handle_cast/2]). + +-type state() :: #{}. + +-spec init(Args) -> Result when + Args :: term(), + Result :: {ok, state()}. +init(_) -> {ok, #{}}. + +-spec handle_info(Info, State) -> {noreply, State} when + Info :: term(), + State :: state(). +handle_info({log, _SyncOrAsync, _FromPid, _GL, _Category, _Importance, Content, _EscChars} = _Info, State) when + is_list(Content) +-> + % Mimics behaviour from the logger_loop function in ct_logs.erl + IoList = lists:foldl( + fun + ({Format, Args}, IoList) when is_list(Format), is_list(Args) -> + [io_lib:format(Format, Args), "\n", IoList]; + (_, IoList) -> + IoList + end, + [], + Content + ), + io:format("~ts~n", [IoList]), + {noreply, State}; +handle_info(_Info, State) -> + % ignore + {noreply, State}. + +-spec handle_call(Request, From, State) -> {noreply, State} when + Request :: term(), + From :: gen_server:from(), + State :: state(). +handle_call(_Info, _From, State) -> {noreply, State}. + +-spec handle_cast(Request, State) -> {noreply, State} when + Request :: term(), + State :: state(). +handle_cast(_Info, State) -> {noreply, State}. %% @doc mocks for ct_logs functions --spec setup(file:filename_all(), boolean()) -> ok. -setup(OutputDir, InstrumentCTLogs) -> +-spec start(file:filename_all()) -> ok. +start(OutputDir) -> LogFile = test_logger:get_log_file(OutputDir, ct_daemon), ok = test_logger:configure_logger(LogFile), - %% check is we need to instrument ct_logs - %% this somehow crashes the node startup if CT runs on the - %% controlling node - case InstrumentCTLogs of - true -> - meck:new(ct_logs, [passthrough, no_link]), - meck:expect(ct_logs, tc_log, fun tc_log/3), - meck:expect(ct_logs, tc_log, fun tc_log/4), - meck:expect(ct_logs, tc_log, fun tc_log/5), - meck:expect(ct_logs, tc_print, fun tc_print/3), - meck:expect(ct_logs, tc_print, fun tc_print/4), - meck:expect(ct_logs, tc_print, fun tc_print/5), - meck:expect(ct_logs, tc_pal, fun tc_pal/3), - meck:expect(ct_logs, tc_pal, fun tc_pal/4), - meck:expect(ct_logs, tc_pal, fun tc_pal/5); - _ -> - ok - end, + {ok, _} = gen_server:start_link({local, ct_logs}, ?MODULE, #{}, []), ok. - -tc_log(Category, Format, Args) -> - tc_print(Category, 1000, Format, Args). - -tc_log(Category, Importance, Format, Args) -> - tc_print(Category, Importance, Format, Args, []). - -tc_log(Category, Importance, Format, Args, _Opts) -> - LogMessage = lists:flatten( - io_lib:format("[ct_logs][~p][~p] ~s", [Category, Importance, Format]) - ), - ?LOG_INFO(LogMessage, Args). - -tc_print(Category, Format, Args) -> - tc_print(Category, 1000, Format, Args). - -tc_print(Category, Importance, Format, Args) -> - tc_print(Category, Importance, Format, Args, []). - -tc_print(_Category, _Importance, Format, Args, _Opts) -> - FormatWithTimesStamp = io_lib:format("[~s] ~s\n", [timestamp(), Format]), - FinalFormat = lists:flatten(FormatWithTimesStamp), - io:format(FinalFormat, Args). - -tc_pal(Category, Format, Args) -> - tc_print(Category, 1000, Format, Args). - -tc_pal(Category, Importance, Format, Args) -> - tc_print(Category, Importance, Format, Args, []). - -tc_pal(Category, Importance, Format, Args, Opts) -> - ct_logs:tc_log(Category, Importance, Format, Args, [no_css | Opts]), - tc_print(Category, Importance, Format, Args, Opts). - -timestamp() -> - calendar:system_time_to_rfc3339(erlang:system_time(second)). diff --git a/prelude/erlang/common_test/test_exec/src/ct_daemon_node.erl b/prelude/erlang/common_test/test_exec/src/ct_daemon_node.erl index d260bc2492..97f9b918ea 100644 --- a/prelude/erlang/common_test/test_exec/src/ct_daemon_node.erl +++ b/prelude/erlang/common_test/test_exec/src/ct_daemon_node.erl @@ -14,6 +14,8 @@ -module(ct_daemon_node). +-compile(warn_missing_spec_all). + -include_lib("kernel/include/logger.hrl"). %% Public API @@ -73,6 +75,7 @@ start( % see T129435667 Port = ct_runner:start_test_node( os:find_executable("erl"), + [], CodePaths, ConfigFiles, OutputDir, @@ -83,6 +86,7 @@ start( true = erlang:register(?MODULE, self()), port_loop(Port, []). +-spec port_loop(port(), list()) -> ok | {error, {crash_on_startup, integer()}}. port_loop(Port, Acc) -> receive {Port, {data, {eol, Line}}} -> @@ -105,6 +109,7 @@ stop() -> %% monitore node true = erlang:monitor_node(Node, true), %% kill node + %% elp:ignore W0014 _Pid = erlang:spawn(Node, fun() -> erlang:halt() end), %% wait for node to come down receive @@ -125,7 +130,7 @@ alive() -> %% @doc node main entry point -spec node_main([node()]) -> no_return(). -node_main([Parent, OutputDirAtom, InstrumentCTLogs]) -> +node_main([Parent, OutputDirAtom]) -> ok = application:load(test_exec), OutputDir = erlang:atom_to_list(OutputDirAtom), @@ -133,7 +138,7 @@ node_main([Parent, OutputDirAtom, InstrumentCTLogs]) -> erlang:system_flag(backtrace_depth, 20), %% setup logger and prepare IO - ok = ct_daemon_logger:setup(OutputDir, InstrumentCTLogs), + ok = ct_daemon_logger:start(OutputDir), true = net_kernel:connect_node(Parent), @@ -189,7 +194,6 @@ build_daemon_args(Type, Node, Cookie, Options, OutputDir) -> longnames -> "-name"; shortnames -> "-sname" end, - InstrumentCTLogs = erlang:whereis(ct_logs) =:= undefined, [ DistArg, convert_atom_arg(Node), @@ -202,8 +206,7 @@ build_daemon_args(Type, Node, Cookie, Options, OutputDir) -> convert_atom_arg(?MODULE), "node_main", convert_atom_arg(erlang:node()), - OutputDir, - convert_atom_arg(InstrumentCTLogs) + OutputDir ]. -spec convert_atom_arg(atom()) -> string(). diff --git a/prelude/erlang/common_test/test_exec/src/ct_daemon_printer.erl b/prelude/erlang/common_test/test_exec/src/ct_daemon_printer.erl index dd81f8cbde..dc8f945d9c 100644 --- a/prelude/erlang/common_test/test_exec/src/ct_daemon_printer.erl +++ b/prelude/erlang/common_test/test_exec/src/ct_daemon_printer.erl @@ -55,6 +55,10 @@ print_result(Name, {error, {_TestId, {'ct_daemon_core$sentinel_crash', Info}}}) io:format("~ts ~ts~n", [?CROSS_MARK, Name]), io:format("Test process received EXIT signal with reason: ~p~n", [Info]), fail; +print_result(Name, {error, {_TestId, {timetrap, TimeoutValue}}}) -> + io:format("~ts ~ts~n", [?CROSS_MARK, Name]), + io:format("Test timed out after ~p ms~n", [TimeoutValue]), + fail; print_result(Name, Unstructured) -> io:format("~ts ~ts~n", [?CROSS_MARK, Name]), io:format("unable to format failure reason, please report.~n"), diff --git a/prelude/erlang/common_test/test_exec/src/ct_daemon_runner.erl b/prelude/erlang/common_test/test_exec/src/ct_daemon_runner.erl index 6f583ff8c0..831dbfdb48 100644 --- a/prelude/erlang/common_test/test_exec/src/ct_daemon_runner.erl +++ b/prelude/erlang/common_test/test_exec/src/ct_daemon_runner.erl @@ -113,6 +113,15 @@ handle_call(setup, _From, State) -> handle_call(output_dir, _From, State) -> DaemonOptions = application:get_env(test_exec, daemon_options, []), {reply, proplists:get_value(output_dir, DaemonOptions), State}; +handle_call(priv_dir, _From, State) -> + Response = + case State of + #{setup := #{config := Config}} -> + proplists:get_value(priv_dir, Config); + _ -> + undefined + end, + {reply, Response, State}; handle_call(Request, _From, State) -> {reply, Request, State}. diff --git a/prelude/erlang/common_test/test_exec/src/ct_executor.erl b/prelude/erlang/common_test/test_exec/src/ct_executor.erl index f10db6f1d3..e7c8e0f2df 100644 --- a/prelude/erlang/common_test/test_exec/src/ct_executor.erl +++ b/prelude/erlang/common_test/test_exec/src/ct_executor.erl @@ -11,15 +11,25 @@ %% Notably allows us to call post/pre method on the node if needed, e.g for coverage. -module(ct_executor). - -include_lib("kernel/include/logger.hrl"). -include_lib("common/include/buck_ct_records.hrl"). +-compile(warn_missing_spec_all). -export([run/1]). -% Time we give the beam to close off, in ms. --define(INIT_STOP_TIMEOUT, 5000). +%% `ct_run_arg()` represents an option accepted by ct:run_test/1, such as +%% `multiply_timetraps` or `ct_hooks`. +%% For all the options, see https://www.erlang.org/doc/man/ct#run_test-1 +-type ct_run_arg() :: {atom(), term()}. +-type ct_exec_arg() :: {output_dir | suite | providers, term()}. + +% For testing +-export([split_args/1]). + +-define(STDOUT_MAX_LINES, 1000). +-define(STDOUT_MAX_LINE_LENGTH, 10000). +-spec run([string()]) -> no_return(). run(Args) when is_list(Args) -> ExitCode = try @@ -37,6 +47,9 @@ run(Args) when is_list(Args) -> % Therefore we used io:format to forward information to the % process calling it (ct_runner). try + % We need to load the 'common' application to be able to configure + % it via the `common_app_env` arguments + application:load(common), % We consult all the .app files to load the atoms. % This solution is less than optimal and should be addressed % T120903856 @@ -61,7 +74,9 @@ run(Args) when is_list(Args) -> erlang:system_flag(backtrace_depth, 20), ?LOG_DEBUG("ct_run called with arguments ~p ~n", [CtRunArgs]), Providers1 = [buck_ct_provider:do_pre_running(Provider) || Provider <- Providers0], - {ok, IoBuffer} = io_buffer:start_link(), + {ok, IoBuffer} = io_buffer:start_link(#{ + passthrough => true, max_elements => ?STDOUT_MAX_LINES, max_length => ?STDOUT_MAX_LINE_LENGTH + }), register(cth_tpx_io_buffer, IoBuffer), %% set global timeout Result = ct:run_test(CtRunArgs), @@ -85,23 +100,9 @@ run(Args) when is_list(Args) -> io:format("~ts\n", [erl_error:format_exception(Class1, Reason1, Stack1)]), 1 end, - case ExitCode of - 0 -> - init:stop(0), - receive - after ?INIT_STOP_TIMEOUT -> - ?LOG_ERROR( - io_lib:format("~p failed to terminate within ~c millisecond", [ - ?MODULE, ?INIT_STOP_TIMEOUT - ]) - ), - erlang:halt(0) - end; - _ -> - erlang:halt(ExitCode) - end. + erlang:halt(ExitCode). --spec parse_arguments([string()]) -> {proplists:proplist(), [term()]}. +-spec parse_arguments([string()]) -> {[ct_exec_arg()], [ct_run_arg()]}. parse_arguments(Args) -> % The logger is not set up yet. % This will be sent to the program executing it (ct_runner), @@ -120,14 +121,27 @@ parse_arguments(Args) -> split_args(ParsedArgs). % @doc Splits the argument before those that happens -% before ct_args (the executor args) amd those after -% (the args for ct_run). -split_args(Args) -> split_args(Args, [], []). +% before ct_args (the executor args) and those after +% (the args for ct_run). ct_args will always be +% present in the list +-spec split_args([term()]) -> {[ct_exec_arg()], [ct_run_arg()]}. +split_args(Args) -> + {CtExecutorArgs, [ct_args | CtRunArgs]} = lists:splitwith(fun(Arg) -> Arg =/= ct_args end, Args), + {parse_ct_exec_args(CtExecutorArgs), parse_ct_run_args(CtRunArgs)}. + +-spec parse_ct_run_args([term()]) -> [ct_run_arg()]. +parse_ct_run_args([]) -> + []; +parse_ct_run_args([{Key, _Value} = Arg | Args]) when is_atom(Key) -> + [Arg | parse_ct_run_args(Args)]. -split_args([ct_args | Args], CtExecutorArgs, []) -> {lists:reverse(CtExecutorArgs), Args}; -split_args([Arg | Args], CtExecutorArgs, []) -> split_args(Args, [Arg | CtExecutorArgs], []); -split_args([], CtExecutorArgs, []) -> {lists:reverse(CtExecutorArgs), []}. +-spec parse_ct_exec_args([term()]) -> [ct_exec_arg()]. +parse_ct_exec_args([]) -> + []; +parse_ct_exec_args([{Key, _Value} = Arg | Args]) when Key =:= output_dir; Key =:= suite; Key =:= providers -> + [Arg | parse_ct_exec_args(Args)]. +-spec debug_print(string(), [term()]) -> ok. debug_print(Fmt, Args) -> case os:getenv("ERLANG_BUCK_DEBUG_PRINT") of false -> io:format(Fmt, Args); diff --git a/prelude/erlang/common_test/test_exec/src/ct_runner.erl b/prelude/erlang/common_test/test_exec/src/ct_runner.erl index a56928de6e..5240f28b1c 100644 --- a/prelude/erlang/common_test/test_exec/src/ct_runner.erl +++ b/prelude/erlang/common_test/test_exec/src/ct_runner.erl @@ -28,8 +28,8 @@ ]). -export([ - start_test_node/5, start_test_node/6, + start_test_node/7, cookie/0, generate_arg_tuple/2, project_root/0 @@ -143,7 +143,9 @@ run_test( suite_path = SuitePath, providers = Providers, suite = Suite, - erl_cmd = ErlCmd + erl_cmd = ErlCmd, + extra_flags = ExtraFlags, + common_app_env = CommonAppEnv } = _TestEnv, PortEpmd ) -> @@ -152,12 +154,13 @@ run_test( SuiteFolder = filename:dirname(filename:absname(SuitePath)), CodePath = [SuiteFolder | Dependencies], - Args = build_run_args(OutputDir, Providers, Suite, TestSpecFile), + Args = build_run_args(OutputDir, Providers, Suite, TestSpecFile, CommonAppEnv), {ok, ProjectRoot} = file:get_cwd(), start_test_node( ErlCmd, + ExtraFlags, CodePath, ConfigFiles, OutputDir, @@ -186,39 +189,72 @@ build_common_args(CodePath, ConfigFiles) -> OutputDir :: file:filename_all(), Providers :: [{module(), [term()]}], Suite :: module(), - TestSpecFile :: file:filename_all() + TestSpecFile :: file:filename_all(), + CommonAppEnv :: #{string() => string()} ) -> [string()]. -build_run_args(OutputDir, Providers, Suite, TestSpecFile) -> - lists:concat([ - ["-run", "ct_executor", "run"], - generate_arg_tuple(output_dir, OutputDir), - generate_arg_tuple(providers, Providers), - generate_arg_tuple(suite, Suite), - ["ct_args"], - generate_arg_tuple(spec, TestSpecFile) - ]). +build_run_args(OutputDir, Providers, Suite, TestSpecFile, CommonAppEnv) -> + lists:append( + [ + ["-run", "ct_executor", "run"], + generate_arg_tuple(output_dir, OutputDir), + generate_arg_tuple(providers, Providers), + generate_arg_tuple(suite, Suite), + ["ct_args"], + generate_arg_tuple(spec, TestSpecFile), + common_app_env_args(CommonAppEnv) + ] + ). + +-spec common_app_env_args(Env :: #{string() => string()}) -> [string()]. +common_app_env_args(Env) -> + lists:append([["-common", Key, Value] || {Key, Value} <- maps:to_list(Env)]). -spec start_test_node( Erl :: string(), + ExtraFlags :: [string()], CodePath :: [file:filename_all()], ConfigFiles :: [file:filename_all()], OutputDir :: file:filename_all(), PortSettings :: port_settings() ) -> port(). -start_test_node(ErlCmd, CodePath, ConfigFiles, OutputDir, PortSettings0) -> - start_test_node(ErlCmd, CodePath, ConfigFiles, OutputDir, PortSettings0, false). +start_test_node( + ErlCmd, + ExtraFlags, + CodePath, + ConfigFiles, + OutputDir, + PortSettings0 +) -> + start_test_node( + ErlCmd, + ExtraFlags, + CodePath, + ConfigFiles, + OutputDir, + PortSettings0, + false + ). -spec start_test_node( Erl :: string(), + ExtraFlags :: [string()], CodePath :: [file:filename_all()], ConfigFiles :: [file:filename_all()], OutputDir :: file:filename_all(), PortSettings :: port_settings(), ReplayIo :: boolean() ) -> port(). -start_test_node(ErlCmd, CodePath, ConfigFiles, OutputDir, PortSettings0, ReplayIo) -> +start_test_node( + ErlCmd, + ExtraFlags, + CodePath, + ConfigFiles, + OutputDir, + PortSettings0, + ReplayIo +) -> % split of args from Erl which can contain emulator flags - [_Executable | ExtraFlags] = string:split(ErlCmd, " ", all), + [_Executable | Flags] = string:split(ErlCmd, " ", all), % we ignore the executable we got, and use the erl command from the % toolchain that executes this code ErlExecutable = os:find_executable("erl"), @@ -228,7 +264,7 @@ start_test_node(ErlCmd, CodePath, ConfigFiles, OutputDir, PortSettings0, ReplayI %% merge args, enc, cd settings LaunchArgs = - ExtraFlags ++ + Flags ++ ExtraFlags ++ build_common_args(CodePath, ConfigFiles) ++ proplists:get_value(args, PortSettings0, []), diff --git a/prelude/erlang/common_test/test_exec/test/ct_executor_SUITE.erl b/prelude/erlang/common_test/test_exec/test/ct_executor_SUITE.erl new file mode 100644 index 0000000000..bcf4d0b866 --- /dev/null +++ b/prelude/erlang/common_test/test_exec/test/ct_executor_SUITE.erl @@ -0,0 +1,41 @@ +%% Copyright (c) Meta Platforms, Inc. and affiliates. +%% This source code is licensed under both the MIT license found in the +%% LICENSE-MIT file in the root directory of this source tree and the Apache +%% License, Version 2.0 found in the LICENSE-APACHE file in the root directory +%% of this source tree. +%%% % @format +-module(ct_executor_SUITE). + +-include_lib("stdlib/include/assert.hrl"). + +-export([all/0]). + +-export([ + test_split_args/1 +]). + +all() -> + [test_split_args]. + +test_split_args(_Config) -> + ?assertEqual( + {[{output_dir, ""}, {providers, [something]}, {suite, a_suite}], [{dir, ""}, {suite, a_suite}, {group, a_group}]}, + ct_executor:split_args([ + {output_dir, ""}, + {providers, [something]}, + {suite, a_suite}, + ct_args, + {dir, ""}, + {suite, a_suite}, + {group, a_group} + ]) + ), + ?assertEqual( + {[{output_dir, ""}, {providers, [something]}, {suite, a_suite}], []}, + ct_executor:split_args([{output_dir, ""}, {providers, [something]}, {suite, a_suite}, ct_args]) + ), + ?assertEqual( + {[], [{dir, ""}, {suite, a_suite}, {group, a_group}]}, + ct_executor:split_args([ct_args, {dir, ""}, {suite, a_suite}, {group, a_group}]) + ), + ?assertEqual({[], []}, ct_executor:split_args([ct_args])). diff --git a/prelude/erlang/elp.bxl b/prelude/erlang/elp.bxl new file mode 100644 index 0000000000..1ea73b78f7 --- /dev/null +++ b/prelude/erlang/elp.bxl @@ -0,0 +1,150 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# Provide information so that ELP is able to load a BUCK project + +load("@prelude//:paths.bzl", "paths") + +# ------------------ IMPL ------------------ + +def _clean_up_includes(includes): + # - Strip to just dir, not file name + # - Remove duplicates + # Note: Sometimes the buck rule generating the includes has an excludes glob for a directory. + # This flattening will potentially expose excluded files in the directory. + # But we do it, because otherwise the performance in erlang_service parsing is too poor. + include_paths = [_as_path(paths.dirname(p["value"])) for p in includes if p["type"] == "path"] + targets = [t for t in includes if t["type"] == "target"] + return include_paths + targets + +def _get_includes(ctx, includes_target): + return _clean_up_includes([_get_absolute_path(ctx, inc) for inc in includes_target.value()]) + +def _dep_includes(ctx, dep, target_universe): + lookup_val = "{}:{}".format(dep.path, dep.name) + dep_target = target_universe.lookup(lookup_val) + if not dep_target: + return [] + dep_target = dep_target[0] + includes = dep_target.attrs_lazy().get("includes") + + if not includes: + return [] + else: + return _get_includes(ctx, includes) + +def _get_absolute_path(ctx, src) -> dict[str, str]: + """ + Get the absolute path of the thing passed in, which is either an artifact or a target label. + """ + if type(src) == "label": + return _as_target(str(src.raw_target())) + else: + return _as_path(get_path_without_materialization(src, ctx, abs = True)) + +def _elp_config(ctx): + cells = { + cell: cell_path + for cell, cell_path in ctx.audit().cell(aliases = True).items() + } + + included_targets = ctx.cli_args.included_targets + target_universe = ctx.target_universe(included_targets) + + all = ctx.cquery().kind("^(erlang_app|erlang_test)$", ctx.configured_targets(included_targets)) + if ctx.cli_args.deps_target: + all += ctx.cquery().kind("^erlang_app$", ctx.configured_targets(ctx.cli_args.deps_target)) + result = {} + dep_includes_cache = {} # Cache of includes per dependency + for target in all: + label = target.label + label_name = label.raw_target() + deps = target.attrs_lazy().get("deps") + + includes = target.attrs_lazy().get("includes") + if not includes: + includes = [] + else: + includes = _get_includes(ctx, includes) + + if ctx.cli_args.deps_includes: + if deps: + for dep in deps.value(): + if dep in dep_includes_cache: + dep_includes = dep_includes_cache[dep] + else: + dep_includes = _dep_includes(ctx, dep, target_universe) + dep_includes_cache[dep] = dep_includes + includes = includes + dep_includes + apps = target.attrs_lazy().get("applications") + + if apps: + for app in apps.value(): + includes.append(_as_path(str(_file_node_to_path(cells, app.path)))) + + srcs = target.attrs_lazy().get("srcs") + if not srcs: + srcs = [] + else: + srcs = [_get_absolute_path(ctx, src) for src in srcs.value()] + + suite = target.attrs_lazy().get("suite") + if not suite: + suite = None + elif suite.value() == None: + suite = None + else: + suite_info = _get_absolute_path(ctx, suite.value()) + if suite_info["type"] == "path": + suite = suite_info["value"] + else: + suite = None + + includes = _build_output(includes) + srcs = _build_output(srcs) + result[label_name] = dict( + name = target.attrs_lazy().get("name"), + suite = suite, + srcs = srcs, + includes = includes, + labels = target.attrs_lazy().get("labels"), + ) + ctx.output.print_json(result) + +def _file_node_to_path(cells: dict[str, str], file_node) -> str: + cell, path = str(file_node).split("//", 1) + return paths.join(cells[cell], path) + +def _build_output(items: list[dict[str, str]]) -> list[str]: + # we completely ignore targets, since we don't have support for generated files in ELP + paths = _list_dedupe([p["value"] for p in items if p["type"] == "path"]) + return paths + +def _list_dedupe(xs: list[str]) -> list[str]: + return {x: True for x in xs}.keys() + +def _as_path(src): + return {"type": "path", "value": src} + +def _as_target(src): + return {"type": "target", "value": src} + +# ------------------ INTERFACE ------------------ + +elp_config = bxl_main( + impl = _elp_config, + cli_args = { + "deps_includes": cli_args.bool(False, doc = "Use include paths from the dependencies too."), + "deps_target": cli_args.option(cli_args.string(), doc = "Target to include deps from, if specified. See corresponding field in .elp.toml"), + "included_targets": cli_args.list(cli_args.string(), doc = "Targets to include in the query. See corresponding field in .elp.toml"), + }, +) + +# Run with `buck2 bxl prelude//erlang/elp.bxl:elp_config` +# e.g. +# buck2 bxl prelude//erlang/elp.bxl:elp_config -- --included_targets cell//... +# buck2 bxl prelude//erlang/elp.bxl:elp_config -- --included_targets cell//... --deps_includes true diff --git a/prelude/erlang/erlang.bzl b/prelude/erlang/erlang.bzl index 3ef89fdb18..01d0c5b123 100644 --- a/prelude/erlang/erlang.bzl +++ b/prelude/erlang/erlang.bzl @@ -89,8 +89,7 @@ def erlang_tests( srcs: list[str] = [], property_tests: list[str] = [], config_files: list[str] = [], - use_default_configs: bool = True, - use_default_deps: bool = True, + common_app_env: dict[str, str] = {}, **common_attributes): """ Generate multiple erlang_test targets based on the `suites` field. @@ -104,7 +103,6 @@ def erlang_tests( srcs = srcs, property_tests = property_tests, config_files = config_files, - use_default_configs = use_default_configs, - use_default_deps = use_default_deps, + common_app_env = common_app_env, **common_attributes ) diff --git a/prelude/erlang/erlang_application.bzl b/prelude/erlang/erlang_application.bzl index e9d3a134bf..41ea6be1f6 100644 --- a/prelude/erlang/erlang_application.bzl +++ b/prelude/erlang/erlang_application.bzl @@ -37,7 +37,6 @@ load( "multidict_projection", "multidict_projection_key", "normalise_metadata", - "str_to_bool", "to_term_args", ) @@ -161,6 +160,14 @@ def _build_erlang_application(ctx: AnalysisContext, toolchain: Toolchain, depend is_private = True, ) + # maybe peek private includes + build_environment = erlang_build.utils.peek_private_includes( + ctx, + toolchain, + build_environment, + dependencies, + ) + # beams build_environment = erlang_build.build_steps.generate_beam_artifacts( ctx, @@ -252,11 +259,8 @@ def _generate_app_file( script, app_info_file, ], + hidden = [output.as_output(), srcs] + ([ctx.attrs.app_src] if ctx.attrs.app_src else []), ) - app_build_cmd.hidden(output.as_output()) - app_build_cmd.hidden(srcs) - if ctx.attrs.app_src: - app_build_cmd.hidden(ctx.attrs.app_src) erlang_build.utils.run_with_env( ctx, toolchain, @@ -291,8 +295,7 @@ def _app_info_content( srcs: list[Artifact], output: Artifact) -> Artifact: """build an app_info.term file that contains the meta information for building the .app file""" - sources_args = convert(srcs) - sources_args.ignore_artifacts() + sources_args = convert(srcs, ignore_artifacts = True) data = { "applications": [ app[ErlangAppInfo].name @@ -366,7 +369,7 @@ def link_output( def _link_srcs_folder(ctx: AnalysisContext) -> dict[str, Artifact]: """Build mapping for the src folder if erlang.include_src is set""" - if not str_to_bool(read_root_config("erlang", "include_src", "False")): + if not ctx.attrs.include_src: return {} srcs = { paths.join("src", src_file.basename): src_file diff --git a/prelude/erlang/erlang_build.bzl b/prelude/erlang/erlang_build.bzl index ad183aba24..5e6bf1cd73 100644 --- a/prelude/erlang/erlang_build.bzl +++ b/prelude/erlang/erlang_build.bzl @@ -264,8 +264,6 @@ def _generate_beam_artifacts( for src in src_artifacts } - _check_beam_uniqueness(beam_mapping, build_environment.beams) - # dep files beam_deps = _get_deps_files(ctx, toolchain, anchor, src_artifacts, output_mapping) @@ -287,7 +285,7 @@ def _generate_beam_artifacts( input_mapping = build_environment.input_mapping, ) - dep_info_content = to_term_args({paths.basename(artifact): {"dep_file": dep_file, "path": artifact} for artifact, dep_file in updated_build_environment.deps_files.items()}) + dep_info_content = to_term_args(_build_dep_info_data(updated_build_environment)) dep_info_file = ctx.actions.write(_dep_info_name(toolchain), dep_info_content) for erl in src_artifacts: @@ -295,13 +293,17 @@ def _generate_beam_artifacts( return updated_build_environment -def _check_beam_uniqueness( - local_beams: ModuleArtifactMapping, - global_beams: ModuleArtifactMapping) -> None: - for module in local_beams: - if module in global_beams: - fail("duplicated modules found in build: {}".format([module])) - return None +def _build_dep_info_data(build_environment: BuildEnvironment) -> dict[str, dict[str, Artifact | str]]: + """build input for dependency finalizer, this implements uniqueness checks for headers and beams""" + seen = {} + data = {} + for artifact, dep_file in build_environment.deps_files.items(): + if paths.basename(artifact) in seen: + fail("conflicting artifacts found in build: {} and {}".format(seen[paths.basename(artifact)], artifact)) + else: + seen[paths.basename(artifact)] = artifact + data[paths.basename(artifact)] = {"dep_file": dep_file, "path": artifact} + return data def _generate_chunk_artifacts( ctx: AnalysisContext, @@ -334,7 +336,7 @@ def _generate_chunk_artifacts( input_mapping = build_environment.input_mapping, ) - preprocess_modules = read_root_config("erlang", "edoc_preprocess", "").split() + preprocess_modules = toolchain.edoc_preprocess preprocess_all = "__all__" in preprocess_modules for erl in src_artifacts: @@ -374,11 +376,22 @@ def _deps_key(anchor: Artifact, src: Artifact) -> str: def _get_deps_file(ctx: AnalysisContext, toolchain: Toolchain, src: Artifact) -> Artifact: dependency_analyzer = toolchain.dependency_analyzer dependency_json = ctx.actions.declare_output(_dep_file_name(toolchain, src)) - escript = toolchain.otp_binaries.escript + erl = toolchain.otp_binaries.erl dependency_analyzer_cmd = cmd_args( [ - escript, + erl, + "+A0", + "+S1:1", + "+sbtu", + "-mode", + "minimal", + "-noinput", + "-noshell", + "-run", + "escript", + "start", + "--", dependency_analyzer, src, dependency_json.as_output(), @@ -412,7 +425,7 @@ def _build_xyrl( [ erlc, "-o", - cmd_args(output.as_output()).parent(), + cmd_args(output.as_output(), parent = 1), xyrl, ], ) @@ -439,13 +452,24 @@ def _build_erl( final_dep_file = ctx.actions.declare_output(_dep_final_name(toolchain, src)) finalize_deps_cmd = cmd_args( - toolchain.otp_binaries.escript, + toolchain.otp_binaries.erl, + "+A0", + "+S1:1", + "+sbtu", + "-mode", + "minimal", + "-noinput", + "-noshell", + "-run", + "escript", + "start", + "--", toolchain.dependency_finalizer, src, dep_info_file, final_dep_file.as_output(), + hidden = build_environment.deps_files.values(), ) - finalize_deps_cmd.hidden(build_environment.deps_files.values()) ctx.actions.run( finalize_deps_cmd, category = "dependency_finalizer", @@ -464,12 +488,14 @@ def _build_erl( _dependency_code_paths(build_environment), ), "-o", - cmd_args(outputs[output].as_output()).parent(), + cmd_args(outputs[output].as_output(), parent = 1), src, ], ) - erlc_cmd, mapping = _add_dependencies_to_args(artifacts, final_dep_file, erlc_cmd, build_environment) - erlc_cmd = _add_full_dependencies(erlc_cmd, build_environment) + deps_args, mapping = _dependencies_to_args(artifacts, final_dep_file, build_environment) + erlc_cmd.add(deps_args) + full_deps_args = _full_dependencies(build_environment) + erlc_cmd.add(full_deps_args) _run_with_env( ctx, toolchain, @@ -480,7 +506,7 @@ def _build_erl( always_print_stderr = True, ) - ctx.actions.dynamic_output(dynamic = [final_dep_file], inputs = [src], outputs = [output], f = dynamic_lambda) + ctx.actions.dynamic_output(dynamic = [final_dep_file], inputs = [src], outputs = [output.as_output()], f = dynamic_lambda) return None def _build_edoc( @@ -504,7 +530,7 @@ def _build_edoc( "-pa", toolchain.utility_modules, "-o", - cmd_args(output.as_output()).parent(2), + cmd_args(output.as_output(), parent = 2), ], ) @@ -514,11 +540,14 @@ def _build_edoc( args = _erlc_dependency_args(_dependency_include_dirs(build_environment), [], False) eval_cmd.add(args) + eval_cmd_hidden = [] for include in build_environment.includes.values(): - eval_cmd.hidden(include) + eval_cmd_hidden.append(include) for include in build_environment.private_includes.values(): - eval_cmd.hidden(include) + eval_cmd_hidden.append(include) + + eval_cmd.add(cmd_args(hidden = eval_cmd_hidden)) _run_with_env( ctx, @@ -530,13 +559,14 @@ def _build_edoc( ) return None -def _add_dependencies_to_args( +def _dependencies_to_args( artifacts, final_dep_file: Artifact, - args: cmd_args, build_environment: BuildEnvironment) -> (cmd_args, dict[str, (bool, [str, Artifact])]): """Add the transitive closure of all per-file Erlang dependencies as specified in the deps files to the `args` with .hidden. """ + args_hidden = [] + input_mapping = {} deps = artifacts[final_dep_file].read_json() @@ -588,30 +618,31 @@ def _add_dependencies_to_args( else: fail("unrecognized dependency type %s", (dep["type"])) - args.hidden(artifact) + args_hidden.append(artifact) - return args, input_mapping + return cmd_args(hidden = args_hidden), input_mapping -def _add_full_dependencies(erlc_cmd: cmd_args, build_environment: BuildEnvironment) -> cmd_args: +def _full_dependencies(build_environment: BuildEnvironment) -> cmd_args: + erlc_cmd_hidden = [] for artifact in build_environment.full_dependencies: - erlc_cmd.hidden(artifact) - return erlc_cmd + erlc_cmd_hidden.append(artifact) + return cmd_args(hidden = erlc_cmd_hidden) def _dependency_include_dirs(build_environment: BuildEnvironment) -> list[cmd_args]: includes = [ - cmd_args(include_dir_anchor).parent() + cmd_args(include_dir_anchor, parent = 1) for include_dir_anchor in build_environment.private_include_dir ] for include_dir_anchor in build_environment.include_dirs.values(): - includes.append(cmd_args(include_dir_anchor).parent(3)) - includes.append(cmd_args(include_dir_anchor).parent()) + includes.append(cmd_args(include_dir_anchor, parent = 3)) + includes.append(cmd_args(include_dir_anchor, parent = 1)) return includes def _dependency_code_paths(build_environment: BuildEnvironment) -> list[cmd_args]: return [ - cmd_args(ebin_dir_anchor).parent() + cmd_args(ebin_dir_anchor, parent = 1) for ebin_dir_anchor in build_environment.ebin_dirs.values() ] @@ -624,7 +655,7 @@ def _erlc_dependency_args( # A: the whole string would get passed as a single argument, as if it was quoted in CLI e.g. '-I include_path' # ...which the escript cannot parse, as it expects two separate arguments, e.g. '-I' 'include_path' - args = cmd_args([]) + args = cmd_args([], ignore_artifacts = True) # build -I options if path_in_arg: @@ -644,8 +675,6 @@ def _erlc_dependency_args( args.add("-pa") args.add(code_path) - args.ignore_artifacts() - return args def _get_erl_opts( @@ -681,9 +710,9 @@ def _get_erl_opts( for parse_transform, (beam, resource_folder) in parse_transforms.items(): args.add( "+{parse_transform, %s}" % (parse_transform,), - cmd_args(beam, format = "-pa{}").parent(), + cmd_args(beam, format = "-pa{}", parent = 1), ) - args.hidden(resource_folder) + args.add(cmd_args(hidden = resource_folder)) # add relevant compile_info manually args.add(cmd_args( @@ -802,6 +831,43 @@ def _run_with_env(ctx: AnalysisContext, toolchain: Toolchain, *args, **kwargs): kwargs["env"] = env ctx.actions.run(*args, **kwargs) +def _peek_private_includes( + ctx: AnalysisContext, + toolchain: Toolchain, + build_environment: BuildEnvironment, + dependencies: ErlAppDependencies, + force_peek: bool = False) -> BuildEnvironment: + # get mutable dict for private includes + new_private_includes = dict(build_environment.private_includes) + new_private_include_dir = list(build_environment.private_include_dir) + + # get private deps from dependencies + for dep in dependencies.values(): + if ErlangAppInfo in dep: + if dep[ErlangAppInfo].private_include_dir: + new_private_include_dir = new_private_include_dir + dep[ErlangAppInfo].private_include_dir[toolchain.name] + new_private_includes.update(dep[ErlangAppInfo].private_includes[toolchain.name]) + if force_peek or ctx.attrs.peek_private_includes: + return BuildEnvironment( + private_includes = new_private_includes, + private_include_dir = new_private_include_dir, + # copied fields + includes = build_environment.includes, + beams = build_environment.beams, + priv_dirs = build_environment.priv_dirs, + include_dirs = build_environment.include_dirs, + ebin_dirs = build_environment.ebin_dirs, + deps_files = build_environment.deps_files, + app_files = build_environment.app_files, + full_dependencies = build_environment.full_dependencies, + app_includes = build_environment.app_includes, + app_beams = build_environment.app_beams, + app_chunks = build_environment.app_chunks, + input_mapping = build_environment.input_mapping, + ) + else: + return build_environment + # export erlang_build = struct( @@ -822,5 +888,6 @@ erlang_build = struct( make_dir_anchor = _make_dir_anchor, build_dir = _build_dir, run_with_env = _run_with_env, + peek_private_includes = _peek_private_includes, ), ) diff --git a/prelude/erlang/erlang_escript.bzl b/prelude/erlang/erlang_escript.bzl index ef3cf834d8..3550b02487 100644 --- a/prelude/erlang/erlang_escript.bzl +++ b/prelude/erlang/erlang_escript.bzl @@ -7,8 +7,9 @@ load("@prelude//:paths.bzl", "paths") load(":erlang_build.bzl", "erlang_build") -load(":erlang_dependencies.bzl", "check_dependencies", "flatten_dependencies") +load(":erlang_dependencies.bzl", "ErlAppDependencies", "check_dependencies", "flatten_dependencies") load(":erlang_info.bzl", "ErlangAppInfo") +load(":erlang_release.bzl", "build_lib_dir") load( ":erlang_toolchain.bzl", "Toolchain", # @unused Used as type @@ -17,43 +18,60 @@ load( ) load(":erlang_utils.bzl", "action_identifier", "to_term_args") -def create_escript( - ctx: AnalysisContext, - spec_file: Artifact, - toolchain: Toolchain, - files: list[Artifact], - output: Artifact, - escript_name: str) -> None: - """ build the escript with the escript builder tool - """ - script = toolchain.escript_builder - - escript_build_cmd = cmd_args( - [ - toolchain.otp_binaries.escript, - script, - spec_file, - ], - ) - escript_build_cmd.hidden(output.as_output()) - escript_build_cmd.hidden(files) - erlang_build.utils.run_with_env( - ctx, - toolchain, - escript_build_cmd, - category = "escript", - identifier = action_identifier(toolchain, escript_name), - ) - return None - def erlang_escript_impl(ctx: AnalysisContext) -> list[Provider]: # select the correct tools from the toolchain - toolchain_name = get_primary(ctx) toolchain = select_toolchains(ctx)[get_primary(ctx)] # collect all dependencies dependencies = flatten_dependencies(ctx, check_dependencies(ctx.attrs.deps, [ErlangAppInfo])) + if ctx.attrs.bundled: + return _bundled_escript_impl(ctx, dependencies, toolchain) + else: + return _unbundled_escript_impl(ctx, dependencies, toolchain) + +def _unbundled_escript_impl(ctx: AnalysisContext, dependencies: ErlAppDependencies, toolchain: Toolchain) -> list[Provider]: + if ctx.attrs.resources: + fail("resources are not supported with unbundled escripts, add them to an applications priv/ directory instead") + + escript_name = _escript_name(ctx) + + lib_dir = build_lib_dir( + ctx, + toolchain, + escript_name, + dependencies, + ) + + config_files = _escript_config_files(ctx) + escript_trampoline = build_escript_unbundled_trampoline(ctx, toolchain, config_files) + + trampoline = { + "run.escript": escript_trampoline, + } + + all_outputs = {} + for outputs in [lib_dir, trampoline]: + all_outputs.update(outputs) + + for config_file in config_files: + all_outputs[config_file.short_path] = config_file + + output = ctx.actions.symlinked_dir( + escript_name, + all_outputs, + ) + + cmd = cmd_args([ + toolchain.escript_trampoline, + output, + toolchain.otp_binaries.escript, + ]) + + return [DefaultInfo(default_output = output), RunInfo(cmd)] + +def _bundled_escript_impl(ctx: AnalysisContext, dependencies: ErlAppDependencies, toolchain: Toolchain) -> list[Provider]: + toolchain_name = get_primary(ctx) artifacts = {} for dep in dependencies.values(): @@ -81,15 +99,19 @@ def erlang_escript_impl(ctx: AnalysisContext) -> list[Provider]: fail("multiple artifacts defined for path %s", (artifact.short_path)) artifacts[artifact.short_path] = artifact - if ctx.attrs.script_name: - escript_name = ctx.attrs.script_name - else: - escript_name = ctx.attrs.name + ".escript" + escript_name = _escript_name(ctx) output = ctx.actions.declare_output(escript_name) args = ctx.attrs.emu_args - if ctx.attrs.main_module: - args += ["-escript", "main", ctx.attrs.main_module] + + config_files = _escript_config_files(ctx) + for config_file in config_files: + artifacts[config_file.short_path] = config_file + + escript_trampoline = build_escript_bundled_trampoline(ctx, toolchain, config_files) + artifacts[escript_trampoline.basename] = escript_trampoline + + args += ["-escript", "main", "erlang_escript_trampoline"] escript_build_spec = { "artifacts": artifacts, @@ -116,8 +138,136 @@ def erlang_escript_impl(ctx: AnalysisContext) -> list[Provider]: RunInfo(escript_cmd), ] +def create_escript( + ctx: AnalysisContext, + spec_file: Artifact, + toolchain: Toolchain, + files: list[Artifact], + output: Artifact, + escript_name: str) -> None: + """ build the escript with the escript builder tool + """ + script = toolchain.escript_builder + + escript_build_cmd = cmd_args( + [ + toolchain.otp_binaries.escript, + script, + spec_file, + ], + hidden = [ + output.as_output(), + files, + ], + ) + + erlang_build.utils.run_with_env( + ctx, + toolchain, + escript_build_cmd, + category = "escript", + identifier = action_identifier(toolchain, escript_name), + ) + return None + +def _escript_name(ctx: AnalysisContext) -> str: + if ctx.attrs.script_name: + return ctx.attrs.script_name + else: + return ctx.attrs.name + ".escript" + +def _main_module(ctx: AnalysisContext) -> str: + if ctx.attrs.main_module: + return ctx.attrs.main_module + else: + return ctx.attrs.name + +def build_escript_unbundled_trampoline(ctx: AnalysisContext, toolchain, config_files: list[Artifact]) -> Artifact: + data = cmd_args() + + data.add("#!/usr/bin/env escript") + data.add("%% -*- erlang -*-") + data.add("%%! {}".format(" ".join(ctx.attrs.emu_args))) + + data.add("-module('{}').".format(_escript_name(ctx))) + data.add("-export([main/1]).") + data.add("main(Args) ->") + data.add("EscriptDir = filename:dirname(escript:script_name()),") + data.add(_config_files_code_to_erl(config_files)) + data.add(' EBinDirs = filelib:wildcard(filename:join([EscriptDir, "lib", "*", "ebin"])),') + data.add(" code:add_paths(EBinDirs),") + data.add(" {}:main(Args).".format(_main_module(ctx))) + data.add(_parse_bin()) + + return ctx.actions.write( + paths.join(erlang_build.utils.build_dir(toolchain), "run.escript"), + data, + is_executable = True, + ) + +def build_escript_bundled_trampoline(ctx: AnalysisContext, toolchain, config_files: list[Artifact]) -> Artifact: + data = cmd_args() + + data.add("-module('erlang_escript_trampoline').") + data.add("-export([main/1]).") + data.add("main(Args) ->") + data.add("EscriptDir = escript:script_name(),") + data.add(_config_files_code_to_erl(config_files)) + data.add(" {}:main(Args).".format(_main_module(ctx))) + data.add(_parse_bin()) + escript_trampoline_erl = ctx.actions.write( + paths.join(erlang_build.utils.build_dir(toolchain), "erlang_escript_trampoline.erl"), + data, + ) + my_output = ctx.actions.declare_output("erlang_escript_trampoline.beam") + + ctx.actions.run( + cmd_args( + toolchain.otp_binaries.erlc, + "-o", + cmd_args(my_output.as_output(), parent = 1), + escript_trampoline_erl, + ), + category = "erlc_escript_trampoline", + ) + + return my_output + def _ebin_path(file: Artifact, app_name: str) -> str: return paths.join(app_name, "ebin", file.basename) def _priv_path(app_name: str) -> str: return paths.join(app_name, "priv") + +def _escript_config_files(ctx: AnalysisContext) -> list[Artifact]: + config_files = [] + for config_dep in ctx.attrs.configs: + for artifact in config_dep[DefaultInfo].default_outputs + config_dep[DefaultInfo].other_outputs: + (_, ext) = paths.split_extension(artifact.short_path) + if ext == ".config": + config_files.append(artifact) + return config_files + +def _config_files_code_to_erl(config_files: list[Artifact]) -> list[str]: + cmd = [] + cmd.append("ConfigFiles = [") + for i in range(0, len(config_files)): + cmd.append(cmd_args("\"", config_files[i].short_path, "\"", delimiter = "")) + if i < len(config_files) - 1: + cmd.append(",") + cmd.append("],") + cmd.append("[begin ") + cmd.append("{ok, AppConfigBin, _FullName} = erl_prim_loader:get_file(filename:join(EscriptDir, ConfigFile)),") + cmd.append("{ok, AppConfig} = parse_bin(AppConfigBin), ") + cmd.append(" ok = application:set_env(AppConfig, [{persistent, true}])") + cmd.append("end || ConfigFile <- ConfigFiles],") + return cmd + +def _parse_bin() -> str: + return """ +parse_bin(<<"">>) -> + []; +parse_bin(Bin) -> + {ok, Tokens, _} = erl_scan:string(binary_to_list(Bin)), + erl_parse:parse_term(Tokens). + """ diff --git a/prelude/erlang/erlang_info.bzl b/prelude/erlang/erlang_info.bzl index 026ce179f6..f1fab0250f 100644 --- a/prelude/erlang/erlang_info.bzl +++ b/prelude/erlang/erlang_info.bzl @@ -91,6 +91,7 @@ ErlangToolchainInfo = provider( "dependency_finalizer": provider_field(typing.Any, default = None), # trampoline rerouting stdout to stderr "erlc_trampoline": provider_field(typing.Any, default = None), + "escript_trampoline": provider_field(typing.Any, default = None), # name to parse_transform artifacts mapping for core parse_transforms (that are always used) and # user defines ones "core_parse_transforms": provider_field(typing.Any, default = None), @@ -106,6 +107,7 @@ ErlangToolchainInfo = provider( # edoc-generating escript "edoc": provider_field(typing.Any, default = None), "edoc_options": provider_field(typing.Any, default = None), + "edoc_preprocess": provider_field(list[str], default = []), # beams we need for various reasons "utility_modules": provider_field(typing.Any, default = None), # env to be set for toolchain invocations diff --git a/prelude/erlang/erlang_release.bzl b/prelude/erlang/erlang_release.bzl index 1da7d2c3ef..d50ae8f2ff 100644 --- a/prelude/erlang/erlang_release.bzl +++ b/prelude/erlang/erlang_release.bzl @@ -75,7 +75,7 @@ def _build_primary_release(ctx: AnalysisContext, apps: ErlAppDependencies) -> li def _build_release(ctx: AnalysisContext, toolchain: Toolchain, apps: ErlAppDependencies) -> dict[str, Artifact]: # OTP base structure - lib_dir = _build_lib_dir(ctx, toolchain, apps) + lib_dir = build_lib_dir(ctx, toolchain, _relname(ctx), apps) boot_scripts = _build_boot_script(ctx, toolchain, lib_dir["lib"]) # release specific variables in bin/release_variables @@ -100,12 +100,15 @@ def _build_release(ctx: AnalysisContext, toolchain: Toolchain, apps: ErlAppDepen return all_outputs -def _build_lib_dir(ctx: AnalysisContext, toolchain: Toolchain, all_apps: ErlAppDependencies) -> dict[str, Artifact]: +def build_lib_dir( + ctx: AnalysisContext, + toolchain: Toolchain, + release_name: str, + all_apps: ErlAppDependencies) -> dict[str, Artifact]: """Build lib dir according to OTP specifications. .. seealso:: `OTP Design Principles Release Structure `_ """ - release_name = _relname(ctx) build_dir = erlang_build.utils.build_dir(toolchain) link_spec = { @@ -195,12 +198,14 @@ def _build_boot_script( toolchain.otp_binaries.escript, script, spec_file, - cmd_args(release_resource.as_output()).parent(), + cmd_args(release_resource.as_output(), parent = 1), + ], + hidden = [ + start_script.as_output(), + boot_script.as_output(), + lib_dir, ], ) - boot_script_build_cmd.hidden(start_script.as_output()) - boot_script_build_cmd.hidden(boot_script.as_output()) - boot_script_build_cmd.hidden(lib_dir) erlang_build.utils.run_with_env( ctx, diff --git a/prelude/erlang/erlang_shell.bzl b/prelude/erlang/erlang_shell.bzl index 26e8428238..4d2209b2c3 100644 --- a/prelude/erlang/erlang_shell.bzl +++ b/prelude/erlang/erlang_shell.bzl @@ -56,11 +56,11 @@ def _build_run_info( content.add("") shell_script = ctx.actions.write("start_shell.sh", content) - shell_cmd = cmd_args(["/usr/bin/env", "bash", shell_script]) - - # depend on input paths - for code_path in app_paths + additional_paths: - shell_cmd.hidden(code_path) + shell_cmd = cmd_args( + ["/usr/bin/env", "bash", shell_script], + # depend on input paths + hidden = app_paths + additional_paths, + ) return RunInfo(shell_cmd) diff --git a/prelude/erlang/erlang_tests.bzl b/prelude/erlang/erlang_tests.bzl index c00070932e..1e681b340f 100644 --- a/prelude/erlang/erlang_tests.bzl +++ b/prelude/erlang/erlang_tests.bzl @@ -41,11 +41,10 @@ def erlang_tests_macro( deps: list[str] = [], resources: list[str] = [], property_tests: list[str] = [], - config_files: list[str] = [], srcs: list[str] = [], - use_default_configs: bool = True, - use_default_deps: bool = True, - **common_attributes: dict) -> None: + prefix: str | None = None, + generated_app_labels: list[str] = [], + **common_attributes) -> None: """ Generate multiple erlang_test targets based on the `suites` field. Also adds the default 'config' and 'deps' from the buck2 config. @@ -53,7 +52,6 @@ def erlang_tests_macro( resource targets for files in the suite associated _data folder. """ deps = [normalize_application(dep) for dep in deps] - config_files = list(config_files) if not suites: return @@ -67,26 +65,11 @@ def erlang_tests_macro( erlang_app_rule( name = srcs_app, srcs = srcs, - labels = ["generated", "test_application", "test_utils"], + labels = generated_app_labels, applications = app_deps, ) deps.append(":" + srcs_app) - # add default apps - - default_deps = read_root_config("erlang", "erlang_tests_default_apps", None) if use_default_deps else None - default_config_files = read_root_config("erlang", "erlang_tests_default_config", None) if use_default_configs else None - trampoline = read_root_config("erlang", "erlang_tests_trampoline", None) if use_default_configs else None - providers = read_root_config("erlang", "erlang_test_providers", "") if use_default_configs else "" - defaultAnnotationMFA = "artifact_annotations:default_annotation/1" - annotationsMFA = read_root_config("erlang", "test_artifacts_annotation_mfa", defaultAnnotationMFA) if use_default_configs else defaultAnnotationMFA - - if default_config_files: - config_files += default_config_files.split() - - if default_deps != None: - deps += default_deps.split() - target_resources = list(resources) if not property_tests: @@ -95,11 +78,7 @@ def erlang_tests_macro( if prop_target: property_tests = [prop_target] - common_attributes["labels"] = common_attributes.get("labels", []) + ["tpx-enable-artifact-reporting", "test-framework=39:erlang_common_test"] - - additional_labels = read_config("erlang", "test_labels", None) - if additional_labels != None: - common_attributes["labels"] += additional_labels.split() + common_attributes["labels"] = common_attributes.get("labels", []) common_attributes["labels"] = list_dedupe(common_attributes["labels"]) @@ -117,17 +96,16 @@ def erlang_tests_macro( suite_resource = [target for target in target_resources] suite_resource.append(data_target) + if prefix != None: + suite_name = "{}_{}".format(prefix, suite_name) + # forward resources and deps fields and generate erlang_test target erlang_test_rule( name = suite_name, suite = suite, deps = deps, resources = suite_resource, - config_files = config_files, property_tests = property_tests, - _trampoline = trampoline, - _providers = providers, - _artifact_annotation_mfa = annotationsMFA, **common_attributes ) @@ -145,26 +123,22 @@ def erlang_test_impl(ctx: AnalysisContext) -> list[Provider]: # prepare build environment pre_build_environment = erlang_build.prepare_build_environment(ctx, primary_toolchain, dependencies) - new_private_include_dir = pre_build_environment.private_include_dir - - # pre_build_environment.private_includes is immutable, that's how we change that. - new_private_includes = {a: b for (a, b) in pre_build_environment.private_includes.items()} - - #Pull private deps from dependencies - for dep in dependencies.values(): - if ErlangAppInfo in dep: - if dep[ErlangAppInfo].private_include_dir: - new_private_include_dir = new_private_include_dir + dep[ErlangAppInfo].private_include_dir[primary_toolchain_name] - new_private_includes.update(dep[ErlangAppInfo].private_includes[primary_toolchain_name]) + pre_build_environment = erlang_build.utils.peek_private_includes( + ctx, + primary_toolchain, + pre_build_environment, + dependencies, + force_peek = True, + ) # Records are immutable, hence we need to create a new record from the previous one. build_environment = BuildEnvironment( includes = pre_build_environment.includes, - private_includes = new_private_includes, + private_includes = pre_build_environment.private_includes, beams = pre_build_environment.beams, priv_dirs = pre_build_environment.priv_dirs, include_dirs = pre_build_environment.include_dirs, - private_include_dir = new_private_include_dir, + private_include_dir = pre_build_environment.private_include_dir, ebin_dirs = pre_build_environment.ebin_dirs, deps_files = pre_build_environment.deps_files, app_files = pre_build_environment.app_files, @@ -180,15 +154,14 @@ def erlang_test_impl(ctx: AnalysisContext) -> list[Provider]: # Config files for ct config_files = [config_file[DefaultInfo].default_outputs[0] for config_file in ctx.attrs.config_files] - test_binary = ctx.attrs._test_binary[DefaultInfo].default_outputs + test_binary_cmd_args = ctx.attrs._test_binary[RunInfo] trampoline = ctx.attrs._trampoline cmd = cmd_args([]) if trampoline: cmd.add(trampoline[RunInfo]) - cmd.add(primary_toolchain.otp_binaries.escript) - cmd.add(test_binary) + cmd.add(test_binary_cmd_args) suite = ctx.attrs.suite suite_name = module_name(suite) @@ -218,13 +191,16 @@ def erlang_test_impl(ctx: AnalysisContext) -> list[Provider]: ) cmd.add(test_info_file) + hidden_args = [] + default_info = _build_default_info(dependencies, output_dir) for output_artifact in default_info.other_outputs: - cmd.hidden(output_artifact) + hidden_args.append(output_artifact) for config_file in config_files: - cmd.hidden(config_file) + hidden_args.append(config_file) - cmd.hidden(output_dir) + hidden_args.append(output_dir) + cmd.add(cmd_args(hidden = hidden_args)) # prepare shell dependencies additional_paths = [ @@ -258,7 +234,7 @@ def erlang_test_impl(ctx: AnalysisContext) -> list[Provider]: type = "erlang_test", command = [cmd], env = ctx.attrs.env, - labels = ["tpx-fb-test-type=16"] + ctx.attrs.labels, + labels = ctx.attrs.labels, contacts = ctx.attrs.contacts, run_from_project_root = True, use_project_relative_paths = True, @@ -293,11 +269,13 @@ def _write_test_info_file( erl_cmd: [cmd_args, Artifact]) -> Artifact: tests_info = { "artifact_annotation_mfa": ctx.attrs._artifact_annotation_mfa, + "common_app_env": ctx.attrs.common_app_env, "config_files": config_files, "ct_opts": ctx.attrs._ct_opts, "dependencies": _list_code_paths(dependencies), "erl_cmd": cmd_args(['"', cmd_args(erl_cmd, delimiter = " "), '"'], delimiter = ""), "extra_ct_hooks": ctx.attrs.extra_ct_hooks, + "extra_flags": ctx.attrs.extra_erl_flags, "providers": ctx.attrs._providers, "test_dir": test_dir, "test_suite": test_suite, diff --git a/prelude/erlang/erlang_toolchain.bzl b/prelude/erlang/erlang_toolchain.bzl index 3bf9b2d796..d1434f7597 100644 --- a/prelude/erlang/erlang_toolchain.bzl +++ b/prelude/erlang/erlang_toolchain.bzl @@ -41,6 +41,7 @@ Toolchain = record( dependency_analyzer = field(Artifact), dependency_finalizer = field(Artifact), erlc_trampoline = field(Artifact), + escript_trampoline = field(Artifact), escript_builder = field(Artifact), otp_binaries = field(Tools), release_variables_builder = field(Artifact), @@ -50,6 +51,7 @@ Toolchain = record( parse_transforms_filters = field(dict[str, list[str]]), edoc = field(Artifact), edoc_options = field(list[str]), + edoc_preprocess = field(list[str]), utility_modules = field(Artifact), env = field(dict[str, str]), ) @@ -64,6 +66,7 @@ ToolchainUtillInfo = provider( "dependency_finalizer": provider_field(typing.Any, default = None), "edoc": provider_field(typing.Any, default = None), "erlc_trampoline": provider_field(typing.Any, default = None), + "escript_trampoline": provider_field(typing.Any, default = None), "escript_builder": provider_field(typing.Any, default = None), "release_variables_builder": provider_field(typing.Any, default = None), "include_erts": provider_field(typing.Any, default = None), @@ -96,6 +99,7 @@ def _multi_version_toolchain_impl(ctx: AnalysisContext) -> list[Provider]: dependency_finalizer = toolchain_info.dependency_finalizer, erl_opts = toolchain_info.erl_opts, erlc_trampoline = toolchain_info.erlc_trampoline, + escript_trampoline = toolchain_info.escript_trampoline, escript_builder = toolchain_info.escript_builder, otp_binaries = toolchain_info.otp_binaries, release_variables_builder = toolchain_info.release_variables_builder, @@ -105,6 +109,7 @@ def _multi_version_toolchain_impl(ctx: AnalysisContext) -> list[Provider]: parse_transforms_filters = toolchain_info.parse_transforms_filters, edoc = toolchain_info.edoc, edoc_options = toolchain_info.edoc_options, + edoc_preprocess = toolchain_info.edoc_preprocess, utility_modules = toolchain_info.utility_modules, env = toolchain_info.env, ) @@ -124,9 +129,6 @@ multi_version_toolchain_rule = rule( is_toolchain_rule = True, ) -def as_target(name: str) -> str: - return ":" + name - def _config_erlang_toolchain_impl(ctx: AnalysisContext) -> list[Provider]: """ rule for erlang toolchain """ @@ -135,14 +137,13 @@ def _config_erlang_toolchain_impl(ctx: AnalysisContext) -> list[Provider]: erl_opts = ctx.attrs.erl_opts.split() emu_flags = ctx.attrs.emu_flags.split() edoc_options = ctx.attrs.edoc_options.split() + edoc_preprocess = ctx.attrs.edoc_preprocess.split() # get otp binaries binaries_info = ctx.attrs.otp_binaries[ErlangOTPBinariesInfo] erl = cmd_args([binaries_info.erl] + emu_flags) - erlc = cmd_args(binaries_info.erlc) - escript = cmd_args(binaries_info.escript) - erlc.hidden(binaries_info.erl) - escript.hidden(binaries_info.erl) + erlc = cmd_args(binaries_info.erlc, hidden = binaries_info.erl) + escript = cmd_args(binaries_info.escript, hidden = binaries_info.erl) tools_binaries = ToolsBinaries( erl = binaries_info.erl, erlc = binaries_info.erl, @@ -188,6 +189,7 @@ def _config_erlang_toolchain_impl(ctx: AnalysisContext) -> list[Provider]: env = ctx.attrs.env, emu_flags = emu_flags, erlc_trampoline = utils.erlc_trampoline, + escript_trampoline = utils.escript_trampoline, escript_builder = utils.escript_builder, otp_binaries = otp_binaries, release_variables_builder = utils.release_variables_builder, @@ -197,6 +199,7 @@ def _config_erlang_toolchain_impl(ctx: AnalysisContext) -> list[Provider]: parse_transforms_filters = ctx.attrs.parse_transforms_filters, edoc = utils.edoc, edoc_options = edoc_options, + edoc_preprocess = edoc_preprocess, utility_modules = utility_modules, ), ] @@ -266,7 +269,7 @@ def _gen_parse_transform_beam( erlc, "+deterministic", "-o", - cmd_args(output.as_output()).parent(), + cmd_args(output.as_output(), parent = 1), src, ]) ctx.actions.run(cmd, category = "erlc", identifier = src.short_path) @@ -277,6 +280,7 @@ config_erlang_toolchain_rule = rule( attrs = { "core_parse_transforms": attrs.list(attrs.dep(), default = ["@prelude//erlang/toolchain:transform_project_root"]), "edoc_options": attrs.string(default = ""), + "edoc_preprocess": attrs.string(default = ""), "emu_flags": attrs.string(default = ""), "env": attrs.dict(key = attrs.string(), value = attrs.string(), default = {}), "erl_opts": attrs.string(default = ""), @@ -302,7 +306,7 @@ def _gen_util_beams( erlc, "+deterministic", "-o", - cmd_args(output.as_output()).parent(), + cmd_args(output.as_output(), parent = 1), src, ], category = "erlc", @@ -357,6 +361,7 @@ def _toolchain_utils(ctx: AnalysisContext) -> list[Provider]: dependency_finalizer = ctx.attrs.dependency_finalizer, edoc = ctx.attrs.edoc, erlc_trampoline = ctx.attrs.erlc_trampoline, + escript_trampoline = ctx.attrs.escript_trampoline, escript_builder = ctx.attrs.escript_builder, release_variables_builder = ctx.attrs.release_variables_builder, include_erts = ctx.attrs.include_erts, @@ -375,6 +380,7 @@ toolchain_utilities = rule( "edoc": attrs.source(), "erlc_trampoline": attrs.source(), "escript_builder": attrs.source(), + "escript_trampoline": attrs.source(), "include_erts": attrs.source(), "release_variables_builder": attrs.source(), "utility_modules": attrs.list(attrs.source()), diff --git a/prelude/erlang/erlang_utils.bzl b/prelude/erlang/erlang_utils.bzl index dcb20b3dfe..6de4ed43dd 100644 --- a/prelude/erlang/erlang_utils.bzl +++ b/prelude/erlang/erlang_utils.bzl @@ -20,13 +20,14 @@ def normalise_metadata(data: [str, list[str]]) -> [cmd_args, list[cmd_args]]: def to_term_args(data: typing.Any) -> cmd_args: """ convert nested lists/tuple/map data structure to Erlang Term cmd_args """ - args = cmd_args([]) - args.add(cmd_args([ - convert(data), - ".", - ], delimiter = "")) - args.add("") - return args + + return cmd_args( + cmd_args([ + convert(data), + ".", + ], delimiter = ""), + "", + ) # paths def app_file(ctx: AnalysisContext) -> str: @@ -48,7 +49,7 @@ build_paths = struct( linktree = linktree, ) -def convert(data: typing.Any) -> cmd_args: +def convert(data: typing.Any, ignore_artifacts: bool = False) -> cmd_args: """ converts a lists/tuple/map data structure to a sub-term that can be embedded in another to_term_args or convert """ if type(data) == "list": @@ -64,57 +65,50 @@ def convert(data: typing.Any) -> cmd_args: elif type(data) == "bool": return convert_bool(data) - args = cmd_args([]) - args.add(cmd_args(["\"", data, "\""], delimiter = "")) - return args + return cmd_args( + cmd_args(["\"", data, "\""], delimiter = ""), + ignore_artifacts = ignore_artifacts, + ) # internal def convert_list(ls: list, ob: str = "[", cb: str = "]") -> cmd_args: - args = cmd_args([]) - args.add(ob) + args = [] + args.append(ob) if len(ls) >= 1: - args.add(cmd_args([ + args.append(cmd_args([ convert(ls[0]), ], delimiter = "")) for item in ls[1:]: - args.add(cmd_args([ + args.append(cmd_args([ ",", convert(item), ], delimiter = "")) - args.add(cb) - return args + args.append(cb) + return cmd_args(args) def convert_dict(dt: dict) -> cmd_args: - args = cmd_args([]) - args.add("#{") + args = [] + args.append("#{") items = list(dt.items()) if len(items) >= 1: k, v = items[0] - args.add(cmd_args([ + args.append(cmd_args([ convert(k), "=>", convert(v), ], delimiter = "")) for k, v in items[1:]: - args.add(cmd_args([ + args.append(cmd_args([ ",", convert(k), "=>", convert(v), ], delimiter = "")) - args.add("}") - return args - -def convert_args(data: cmd_args) -> cmd_args: - args = cmd_args() - args.add("\"") - args.add(cmd_args(data, delimiter = " ")) - args.add("\"") - return args + args.append("}") + return cmd_args(args) def convert_string(st: str) -> cmd_args: - args = cmd_args() - return args.add(cmd_args(["\"", st.replace("\"", "\\\""), "\""], delimiter = "")) + return cmd_args(cmd_args(["\"", st.replace("\"", "\\\""), "\""], delimiter = "")) def convert_bool(bl: bool) -> cmd_args: if bl: @@ -141,15 +135,6 @@ def action_identifier(toolchain: Toolchain, name: str) -> str: """builds an action identifier parameterized by the toolchain""" return "%s(%s)" % (name, toolchain.name) -def str_to_bool(value: str) -> bool: - """convert string representation of bool to bool""" - if value == "True": - return True - elif value == "False": - return False - else: - fail("{} is not a valid boolean value") - def preserve_structure(path: str) -> dict[str, list[str]]: """Return a mapping from a path that preserves the filestructure relative to the path.""" all_files = glob([paths.join(path, "**")]) diff --git a/prelude/erlang/shell/BUCK.v2 b/prelude/erlang/shell/BUCK.v2 index 5f86cd6414..7d018bbe2b 100644 --- a/prelude/erlang/shell/BUCK.v2 +++ b/prelude/erlang/shell/BUCK.v2 @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + erlang_application( name = "buck2_shell_utils", srcs = glob(["src/*.erl"]), diff --git a/prelude/erlang/toolchain/BUCK.v2 b/prelude/erlang/toolchain/BUCK.v2 index 3e0d26ce0b..bb177013ac 100644 --- a/prelude/erlang/toolchain/BUCK.v2 +++ b/prelude/erlang/toolchain/BUCK.v2 @@ -1,4 +1,9 @@ load("@prelude//erlang:erlang_toolchain.bzl", "erlang_parse_transform", "toolchain_resources_internal", "toolchain_utilities") +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() erlang_parse_transform( name = "transform_project_root", @@ -27,11 +32,13 @@ toolchain_utilities( edoc = "edoc_cli.escript", erlc_trampoline = "erlc_trampoline.sh", escript_builder = "escript_builder.escript", + escript_trampoline = "escript_trampoline.sh", include_erts = "include_erts.escript", release_variables_builder = "release_variables_builder.escript", utility_modules = [ "edoc_doclet_chunks.erl", "edoc_report.erl", + "epp_dodger.erl", ], visibility = ["PUBLIC"], ) diff --git a/prelude/erlang/toolchain/boot_script_builder.escript b/prelude/erlang/toolchain/boot_script_builder.escript index 06f00be014..5d853c13c5 100644 --- a/prelude/erlang/toolchain/boot_script_builder.escript +++ b/prelude/erlang/toolchain/boot_script_builder.escript @@ -24,8 +24,6 @@ -export([main/1]). --mode(compile). - -define(EXITSUCCESS, 0). -define(EXITERROR, 1). diff --git a/prelude/erlang/toolchain/dependency_finalizer.escript b/prelude/erlang/toolchain/dependency_finalizer.escript index 2f10861302..0a10dcc64d 100644 --- a/prelude/erlang/toolchain/dependency_finalizer.escript +++ b/prelude/erlang/toolchain/dependency_finalizer.escript @@ -1,10 +1,14 @@ %%% % @format +%%% Copyright (c) Meta Platforms, Inc. and affiliates. +%%% +%%% This source code is licensed under both the MIT license found in the +%%% LICENSE-MIT file in the root directory of this source tree and the Apache +%%% License, Version 2.0 found in the LICENSE-APACHE file in the root directory +%%% of this source tree. -module(dependency_finalizer). -author("loscher@meta.com"). --mode(compile). - -spec main([string()]) -> ok | no_return(). main([Source, InFile]) -> do(Source, InFile, stdout); @@ -45,7 +49,6 @@ collect_dependencies([], _, _, Acc) -> collect_dependencies([Key | Rest], DepFiles, Visited, Acc) -> case DepFiles of #{Key := #{"dep_file" := DepFile}} -> - io:format("~p~n", [file:consult(DepFile)]), {ok, [Dependencies]} = file:consult(DepFile), {NextKeys, NextVisited, NextAcc} = lists:foldl( fun(#{"file" := File} = Dep, {KeysAcc, VisitedAcc, DepAcc}) -> diff --git a/prelude/erlang/toolchain/edoc_cli.escript b/prelude/erlang/toolchain/edoc_cli.escript index ee01eac47a..ab7cf6cdd2 100644 --- a/prelude/erlang/toolchain/edoc_cli.escript +++ b/prelude/erlang/toolchain/edoc_cli.escript @@ -21,8 +21,6 @@ -module(edoc_cli). -export([main/1]). --mode(compile). - main([]) -> print(usage()); main(Args) -> @@ -87,7 +85,7 @@ verify_files_exist(#{files := Files, out_dir := OutputDir}) -> true -> true; false -> - io:format(standard_error, "error: coudn't generate ~s~n", [ChunkPath]), + io:format(standard_error, "error: couldn't generate ~s~n", [ChunkPath]), false end end, diff --git a/prelude/erlang/toolchain/epp_dodger.erl b/prelude/erlang/toolchain/epp_dodger.erl new file mode 100644 index 0000000000..52193e10bc --- /dev/null +++ b/prelude/erlang/toolchain/epp_dodger.erl @@ -0,0 +1,944 @@ +%% A temporary port of the official OTP epp_dodger from OTP 27, +%% so that EDoc can also be computed for OTP 26 in presence of the +%% maybe operator. See https://github.com/erlang/otp/issues/7266 +%% ===================================================================== +%% Licensed under the Apache License, Version 2.0 (the "License"); you may +%% not use this file except in compliance with the License. You may obtain +%% a copy of the License at +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%% +%% Alternatively, you may use this file under the terms of the GNU Lesser +%% General Public License (the "LGPL") as published by the Free Software +%% Foundation; either version 2.1, or (at your option) any later version. +%% If you wish to allow use of your version of this file only under the +%% terms of the LGPL, you should delete the provisions above and replace +%% them with the notice and other provisions required by the LGPL; see +%% . If you do not delete the provisions +%% above, a recipient may use your version of this file under the terms of +%% either the Apache License or the LGPL. +%% +%% @copyright 2001-2006 Richard Carlsson +%% @author Richard Carlsson +%% @end +%% ===================================================================== + +%% @doc `epp_dodger' - bypasses the Erlang preprocessor. +%% +%%

This module tokenises and parses most Erlang source code without +%% expanding preprocessor directives and macro applications, as long as +%% these are syntactically "well-behaved". Because the normal parse +%% trees of the `erl_parse' module cannot represent these things +%% (normally, they are expanded by the Erlang preprocessor {@link +%% //stdlib/epp} before the parser sees them), an extended syntax tree +%% is created, using the {@link erl_syntax} module.

+ + +%% NOTES: +%% +%% * It's OK if the result does not parse - then at least nothing +%% strange happens, and the user can resort to full preprocessing. +%% However, we must avoid generating a token stream that is accepted by +%% the parser, but has a different meaning than the intended. A typical +%% example is when someone uses token-level string concatenation with +%% macros, as in `"foo" ?bar' (where `?bar' expands to a string). If we +%% replace the tokens `? bar' with `( ... )', to preserve precedence, +%% the result will be parsed as an application `"foo" ( ... )' and cause +%% trouble later on. We must detect such cases and report an error. +%% +%% * It is pointless to add a mechanism for tracking which macros are +%% known to take arguments, and which are known to take no arguments, +%% since a lot of the time we will not have seen the macro definition +%% anyway (it's usually in a header file). Hence, we try to use +%% heuristics instead. In most cases, the token sequence `? foo (' +%% indicates that it is a call of a macro that is supposed to take +%% arguments, but e.g., in the context `: ? foo (', the argument list +%% typically belongs to a remote function call, as in `m:?f(...)' and +%% should be parsed as `m:(?f)(...)' unless it is actually a try-clause +%% pattern such as `throw:?f(...) ->'. +%% +%% * We do our best to make macros without arguments pass the parsing +%% stage transparently. Atoms are accepted in most contexts, but +%% variables are not, so we use only atoms to encode these macros. +%% Sadly, the parsing sometimes discards even the location info from +%% atom tokens, so we can only use the actual characters for this. +%% +%% * We recognize `?m(...' at the start of a form and prevent this from +%% being interpreted as a macro with arguments, since it is probably a +%% function definition. Likewise with attributes `-?m(...'. + +-module(epp_dodger). + +-export([parse_file/1, quick_parse_file/1, parse_file/2, + quick_parse_file/2, parse/1, quick_parse/1, parse/2, + quick_parse/2, parse/3, quick_parse/3, parse_form/2, + parse_form/3, quick_parse_form/2, quick_parse_form/3, + format_error/1, tokens_to_string/1]). + + +%% The following should be: 1) pseudo-uniquely identifiable, and 2) +%% cause nice looking error messages when the parser has to give up. + +-define(macro_call, '? ('). +-define(atom_prefix, "? "). +-define(var_prefix, "?,"). +-define(pp_form, '?preprocessor declaration?'). + + +%% @type errorinfo() = //stdlib/erl_scan:error_info(). +%% +%% This is a so-called Erlang I/O ErrorInfo structure; see the {@link +%% //stdlib/io} module for details. + +-type errorinfo() :: erl_scan:error_info(). + +-type option() :: atom() | {atom(), term()}. + +%% ===================================================================== +%% @spec parse_file(File) -> {ok, Forms} | {error, errorinfo()} +%% File = file:filename() +%% Forms = [erl_syntax:syntaxTree()] +%% +%% @equiv parse_file(File, []) + +-spec parse_file(file:filename()) -> + {'ok', erl_syntax:forms()} | {'error', errorinfo()}. + +parse_file(File) -> + parse_file(File, []). + +%% @spec parse_file(File, Options) -> {ok, Forms} | {error, errorinfo()} +%% File = file:filename() +%% Options = [term()] +%% Forms = [erl_syntax:syntaxTree()] +%% +%% @doc Reads and parses a file. If successful, `{ok, Forms}' +%% is returned, where `Forms' is a list of abstract syntax +%% trees representing the "program forms" of the file (cf. +%% `erl_syntax:is_form/1'). Otherwise, `{error, errorinfo()}' is +%% returned, typically if the file could not be opened. Note that +%% parse errors show up as error markers in the returned list of +%% forms; they do not cause this function to fail or return +%% `{error, errorinfo()}'. +%% +%% Options: +%%
+%%
{@type {no_fail, boolean()@}}
+%%
If `true', this makes `epp_dodger' replace any program forms +%% that could not be parsed with nodes of type `text' (see {@link +%% erl_syntax:text/1}), representing the raw token sequence of the +%% form, instead of reporting a parse error. The default value is +%% `false'.
+%%
{@type {clever, boolean()@}}
+%%
If set to `true', this makes `epp_dodger' try to repair the +%% source code as it seems fit, in certain cases where parsing would +%% otherwise fail. Currently, it inserts `++'-operators between string +%% literals and macros where it looks like concatenation was intended. +%% The default value is `false'.
+%%
+%% +%% @see parse/2 +%% @see quick_parse_file/1 +%% @see erl_syntax:is_form/1 + +-spec parse_file(file:filename(), [option()]) -> + {'ok', erl_syntax:forms()} | {'error', errorinfo()}. + +parse_file(File, Options) -> + parse_file(File, fun parse/3, Options). + +%% @spec quick_parse_file(File) -> {ok, Forms} | {error, errorinfo()} +%% File = file:filename() +%% Forms = [erl_syntax:syntaxTree()] +%% +%% @equiv quick_parse_file(File, []) + +-spec quick_parse_file(file:filename()) -> + {'ok', erl_syntax:forms()} | {'error', errorinfo()}. + +quick_parse_file(File) -> + quick_parse_file(File, []). + +%% @spec quick_parse_file(File, Options) -> +%% {ok, Forms} | {error, errorinfo()} +%% File = file:filename() +%% Options = [term()] +%% Forms = [erl_syntax:syntaxTree()] +%% +%% @doc Similar to {@link parse_file/2}, but does a more quick-and-dirty +%% processing of the code. Macro definitions and other preprocessor +%% directives are discarded, and all macro calls are replaced with +%% atoms. This is useful when only the main structure of the code is of +%% interest, and not the details. Furthermore, the quick-parse method +%% can usually handle more strange cases than the normal, more exact +%% parsing. +%% +%% Options: see {@link parse_file/2}. Note however that for +%% `quick_parse_file/2', the option `no_fail' is `true' by default. +%% +%% @see quick_parse/2 +%% @see parse_file/2 + +-spec quick_parse_file(file:filename(), [option()]) -> + {'ok', erl_syntax:forms()} | {'error', errorinfo()}. + +quick_parse_file(File, Options) -> + parse_file(File, fun quick_parse/3, Options ++ [no_fail]). + +parse_file(File, Parser, Options) -> + case do_parse_file(utf8, File, Parser, Options) of + {ok, Forms}=Ret -> + case find_invalid_unicode(Forms) of + none -> + Ret; + invalid_unicode -> + case epp:read_encoding(File) of + utf8 -> + Ret; + _ -> + do_parse_file(latin1, File, Parser, Options) + end + end; + Else -> + Else + end. + +do_parse_file(DefEncoding, File, Parser, Options) -> + case file:open(File, [read]) of + {ok, Dev} -> + _ = epp:set_encoding(Dev, DefEncoding), + try Parser(Dev, 1, Options) + after ok = file:close(Dev) + end; + {error, Error} -> + {error, {0, file, Error}} % defer to file:format_error/1 + end. + +find_invalid_unicode([H|T]) -> + case H of + {error, {_Location, file_io_server, invalid_unicode}} -> + invalid_unicode; + _Other -> + find_invalid_unicode(T) + end; +find_invalid_unicode([]) -> none. + +%% ===================================================================== +%% @spec parse(IODevice) -> {ok, Forms} | {error, errorinfo()} +%% @equiv parse(IODevice, 1) + +-spec parse(file:io_device()) -> {'ok', erl_syntax:forms()}. + +parse(Dev) -> + parse(Dev, 1). + +%% @spec parse(IODevice, StartLocation) -> {ok, Forms} | {error, errorinfo()} +%% IODevice = pid() +%% StartLocation = //stdlib/erl_anno:location() +%% Forms = [erl_syntax:syntaxTree()] +%% +%% @equiv parse(IODevice, StartLocation, []) +%% @see parse/1 + +-spec parse(file:io_device(), erl_anno:location()) -> {'ok', erl_syntax:forms()}. + +parse(Dev, L) -> + parse(Dev, L, []). + +%% @spec parse(IODevice, StartLocation, Options) -> +%% {ok, Forms} | {error, errorinfo()} +%% IODevice = pid() +%% StartLocation = //stdlib/erl_anno:location() +%% Options = [term()] +%% Forms = [erl_syntax:syntaxTree()] +%% +%% @doc Reads and parses program text from an I/O stream. Characters are +%% read from `IODevice' until end-of-file; apart from this, the +%% behaviour is the same as for {@link parse_file/2}. `StartLocation' is the +%% initial location. +%% +%% @see parse/2 +%% @see parse_file/2 +%% @see parse_form/2 +%% @see quick_parse/3 + +-spec parse(file:io_device(), erl_anno:location(), [option()]) -> + {'ok', erl_syntax:forms()}. + +parse(Dev, L0, Options) -> + parse(Dev, L0, fun parse_form/3, Options). + +%% @spec quick_parse(IODevice) -> {ok, Forms} | {error, errorinfo()} +%% @equiv quick_parse(IODevice, 1) + +-spec quick_parse(file:io_device()) -> + {'ok', erl_syntax:forms()}. + +quick_parse(Dev) -> + quick_parse(Dev, 1). + +%% @spec quick_parse(IODevice, StartLocation) -> +%% {ok, Forms} | {error, errorinfo()} +%% IODevice = pid() +%% StartLocation = //stdlib/erl_anno:location() +%% Forms = [erl_syntax:syntaxTree()] +%% +%% @equiv quick_parse(IODevice, StartLocation, []) +%% @see quick_parse/1 + +-spec quick_parse(file:io_device(), erl_anno:location()) -> + {'ok', erl_syntax:forms()}. + +quick_parse(Dev, L) -> + quick_parse(Dev, L, []). + +%% @spec (IODevice, StartLocation, Options) -> +%% {ok, Forms} | {error, errorinfo()} +%% IODevice = pid() +%% StartLocation = //stdlib/erl_anno:location() +%% Options = [term()] +%% Forms = [erl_syntax:syntaxTree()] +%% +%% @doc Similar to {@link parse/3}, but does a more quick-and-dirty +%% processing of the code. See {@link quick_parse_file/2} for details. +%% +%% @see quick_parse/2 +%% @see quick_parse_file/2 +%% @see quick_parse_form/2 +%% @see parse/3 + +-spec quick_parse(file:io_device(), erl_anno:location(), [option()]) -> + {'ok', erl_syntax:forms()}. + +quick_parse(Dev, L0, Options) -> + parse(Dev, L0, fun quick_parse_form/3, Options). + +parse(Dev, L0, Parser, Options) -> + parse(Dev, L0, [], Parser, Options). + +parse(Dev, L0, Fs, Parser, Options) -> + case Parser(Dev, L0, Options) of + {ok, none, L1} -> + parse(Dev, L1, Fs, Parser, Options); + {ok, F, L1} -> + parse(Dev, L1, [F | Fs], Parser, Options); + {error, IoErr, L1} -> + parse(Dev, L1, [{error, IoErr} | Fs], Parser, Options); + {eof, _L1} -> + {ok, lists:reverse(Fs)} + end. + + +%% ===================================================================== +%% @spec parse_form(IODevice, StartLocation) -> {ok, Form, Location} +%% | {eof, Location} +%% | {error, errorinfo(), Location} +%% IODevice = pid() +%% StartLocation = //stdlib/erl_anno:location() +%% Form = erl_syntax:syntaxTree() +%% Location = //stdlib/erl_anno:location() +%% +%% @equiv parse_form(IODevice, StartLocation, []) +%% +%% @see quick_parse_form/2 + +-spec parse_form(file:io_device(), erl_anno:location()) -> + {'ok', erl_syntax:forms(), erl_anno:location()} + | {'eof', erl_anno:location()} | {'error', errorinfo(), erl_anno:location()}. + +parse_form(Dev, L0) -> + parse_form(Dev, L0, []). + +%% @spec parse_form(IODevice, StartLocation, Options) -> +%% {ok, Form, Location} +%% | {eof, Location} +%% | {error, errorinfo(), Location} +%% +%% IODevice = pid() +%% StartLocation = //stdlib/erl_anno:location() +%% Options = [term()] +%% Form = erl_syntax:syntaxTree() +%% Location = //stdlib/erl_anno:location() +%% +%% @doc Reads and parses a single program form from an I/O stream. +%% Characters are read from `IODevice' until an end-of-form +%% marker is found (a period character followed by whitespace), or until +%% end-of-file; apart from this, the behaviour is similar to that of +%% `parse/3', except that the return values also contain the +%% final location given that `StartLocation' is the initial +%% location, and that `{eof, Location}' may be returned. +%% +%% @see parse/3 +%% @see parse_form/2 +%% @see quick_parse_form/3 + +-spec parse_form(file:io_device(), erl_anno:location(), [option()]) -> + {'ok', erl_syntax:forms(), erl_anno:location()} + | {'eof', erl_anno:location()} | {'error', errorinfo(), erl_anno:location()}. + +parse_form(Dev, L0, Options) -> + parse_form(Dev, L0, fun normal_parser/2, Options). + +%% @spec quick_parse_form(IODevice, StartLocation) -> +%% {ok, Form, Location} +%% | {eof, Location} +%% | {error, errorinfo(), Location} +%% IODevice = pid() +%% StartLocation = //stdlib/erl_anno:location() +%% Form = erl_syntax:syntaxTree() | none +%% Location = //stdlib/erl_anno:location() +%% +%% @equiv quick_parse_form(IODevice, StartLocation, []) +%% +%% @see parse_form/2 + +-spec quick_parse_form(file:io_device(), erl_anno:location()) -> + {'ok', erl_syntax:forms(), erl_anno:location()} + | {'eof', erl_anno:location()} | {'error', errorinfo(), erl_anno:location()}. + +quick_parse_form(Dev, L0) -> + quick_parse_form(Dev, L0, []). + +%% @spec quick_parse_form(IODevice, StartLocation, Options) -> +%% {ok, Form, Location} +%% | {eof, Location} +%% | {error, errorinfo(), Location} +%% +%% IODevice = pid() +%% StartLocation = //stdlib/erl_anno:location() +%% Options = [term()] +%% Form = erl_syntax:syntaxTree() +%% Location = //stdlib/erl_anno:location() +%% +%% @doc Similar to {@link parse_form/3}, but does a more quick-and-dirty +%% processing of the code. See {@link quick_parse_file/2} for details. +%% +%% @see parse/3 +%% @see quick_parse_form/2 +%% @see parse_form/3 + +-spec quick_parse_form(file:io_device(), erl_anno:location(), [option()]) -> + {'ok', erl_syntax:forms(), erl_anno:location()} + | {'eof', erl_anno:location()} | {'error', errorinfo(), erl_anno:location()}. + +quick_parse_form(Dev, L0, Options) -> + parse_form(Dev, L0, fun quick_parser/2, Options). + +-record(opt, {clever = false :: boolean()}). + +parse_form(Dev, L0, Parser, Options) -> + NoFail = proplists:get_bool(no_fail, Options), + Opt = #opt{clever = proplists:get_bool(clever, Options)}, + + %% This as the *potential* to read options for enabling/disabling + %% features for the parsing of the file. + {ok, {_Ftrs, ResWordFun}} = + erl_features:keyword_fun(Options, fun reserved_word/1), + + case io:scan_erl_form(Dev, "", L0, [{reserved_word_fun,ResWordFun}]) of + {ok, Ts, L1} -> + case catch {ok, Parser(Ts, Opt)} of + {'EXIT', Term} -> + {error, io_error(L1, {unknown, Term}), L1}; + {error, Term} -> + IoErr = io_error(L1, Term), + {error, IoErr, L1}; + {parse_error, _IoErr} when NoFail -> + {ok, erl_syntax:set_pos( + erl_syntax:text(tokens_to_string(Ts)), + erl_anno:new(start_pos(Ts, L1))), + L1}; + {parse_error, IoErr} -> + {error, IoErr, L1}; + {ok, F} -> + {ok, F, L1} + end; + {error, _IoErr, _L1} = Err -> Err; + {error, _Reason} -> {eof, L0}; % This is probably encoding problem + {eof, _L1} = Eof -> Eof + end. + +io_error(L, Desc) -> + {L, ?MODULE, Desc}. + +start_pos([T | _Ts], _L) -> + erl_anno:location(element(2, T)); +start_pos([], L) -> + L. + +%% Exception-throwing wrapper for the standard Erlang parser stage + +parse_tokens(Ts) -> + parse_tokens(Ts, fun fix_form/1). + +parse_tokens(Ts, Fix) -> + case erl_parse:parse_form(Ts) of + {ok, Form} -> + Form; + {error, IoErr} -> + case Fix(Ts) of + {form, Form} -> + Form; + {retry, Ts1, Fix1} -> + parse_tokens(Ts1, Fix1); + error -> + throw({parse_error, IoErr}) + end + end. + +%% --------------------------------------------------------------------- +%% Quick scanning/parsing - deletes macro definitions and other +%% preprocessor directives, and replaces all macro calls with atoms. + +quick_parser(Ts, _Opt) -> + filter_form(parse_tokens(quickscan_form(Ts))). + +quickscan_form([{'-', _Anno}, {atom, AnnoA, define} | _Ts]) -> + kill_form(AnnoA); +quickscan_form([{'-', _Anno}, {atom, AnnoA, undef} | _Ts]) -> + kill_form(AnnoA); +quickscan_form([{'-', _Anno}, {atom, AnnoA, include} | _Ts]) -> + kill_form(AnnoA); +quickscan_form([{'-', _Anno}, {atom, AnnoA, include_lib} | _Ts]) -> + kill_form(AnnoA); +quickscan_form([{'-', _Anno}, {atom, AnnoA, ifdef} | _Ts]) -> + kill_form(AnnoA); +quickscan_form([{'-', _Anno}, {atom, AnnoA, ifndef} | _Ts]) -> + kill_form(AnnoA); +quickscan_form([{'-', _Anno}, {'if', AnnoA} | _Ts]) -> + kill_form(AnnoA); +quickscan_form([{'-', _Anno}, {atom, AnnoA, elif} | _Ts]) -> + kill_form(AnnoA); +quickscan_form([{'-', _Anno}, {atom, AnnoA, 'else'} | _Ts]) -> + kill_form(AnnoA); +quickscan_form([{'-', _Anno}, {'else', AnnoA} | _Ts]) -> + kill_form(AnnoA); +quickscan_form([{'-', _Anno}, {atom, AnnoA, endif} | _Ts]) -> + kill_form(AnnoA); +quickscan_form([{'-', _Anno}, {atom, AnnoA, feature} | _Ts]) -> + kill_form(AnnoA); +quickscan_form([{'-', Anno}, {'?', _}, {Type, _, _}=N | [{'(', _} | _]=Ts]) + when Type =:= atom; Type =:= var -> + %% minus, macro and open parenthesis at start of form - assume that + %% the macro takes no arguments; e.g. `-?foo(...).' + quickscan_macros_1(N, Ts, [{'-', Anno}]); +quickscan_form([{'?', _Anno}, {Type, _, _}=N | [{'(', _} | _]=Ts]) + when Type =:= atom; Type =:= var -> + %% macro and open parenthesis at start of form - assume that the + %% macro takes no arguments (see scan_macros for details) + quickscan_macros_1(N, Ts, []); +quickscan_form(Ts) -> + quickscan_macros(Ts). + +kill_form(A) -> + [{atom, A, ?pp_form}, {'(', A}, {')', A}, {'->', A}, {atom, A, kill}, + {dot, A}]. + +quickscan_macros(Ts) -> + quickscan_macros(Ts, []). + +quickscan_macros([{'?',_}, {Type, _, A} | Ts], [{string, AnnoS, S} | As]) + when Type =:= atom; Type =:= var -> + %% macro after a string literal: change to a single string + {_, Ts1} = skip_macro_args(Ts), + S1 = S ++ quick_macro_string(A), + quickscan_macros(Ts1, [{string, AnnoS, S1} | As]); +quickscan_macros([{'?',_}, {Type, _, _}=N | [{'(',_}|_]=Ts], + [{':',_}|_]=As) + when Type =:= atom; Type =:= var -> + %% macro and open parenthesis after colon - check the token + %% following the arguments (see scan_macros for details) + Ts1 = case skip_macro_args(Ts) of + {_, [{'->',_} | _] = Ts2} -> Ts2; + {_, [{'when',_} | _] = Ts2} -> Ts2; + {_, [{':',_} | _] = Ts2} -> Ts2; + _ -> Ts %% assume macro without arguments + end, + quickscan_macros_1(N, Ts1, As); +quickscan_macros([{'?',_}, {Type, _, _}=N | Ts], As) + when Type =:= atom; Type =:= var -> + %% macro with or without arguments + {_, Ts1} = skip_macro_args(Ts), + quickscan_macros_1(N, Ts1, As); +quickscan_macros([T | Ts], As) -> + quickscan_macros(Ts, [T | As]); +quickscan_macros([], As) -> + lists:reverse(As). + +%% (after a macro has been found and the arglist skipped, if any) +quickscan_macros_1({_Type, _, A}, [{string, AnnoS, S} | Ts], As) -> + %% string literal following macro: change to single string + S1 = quick_macro_string(A) ++ S, + quickscan_macros(Ts, [{string, AnnoS, S1} | As]); +quickscan_macros_1({_Type, AnnoA, A}, Ts, As) -> + %% normal case - just replace the macro with an atom + quickscan_macros(Ts, [{atom, AnnoA, quick_macro_atom(A)} | As]). + +quick_macro_atom(A) -> + list_to_atom("?" ++ atom_to_list(A)). + +quick_macro_string(A) -> + "(?" ++ atom_to_list(A) ++ ")". + +%% Skipping to the end of a macro call, tracking open/close constructs. +%% @spec (Tokens) -> {Skipped, Rest} + +skip_macro_args([{'(',_}=T | Ts]) -> + skip_macro_args(Ts, [')'], [T]); +skip_macro_args(Ts) -> + {[], Ts}. + +skip_macro_args([{'(',_}=T | Ts], Es, As) -> + skip_macro_args(Ts, [')' | Es], [T | As]); +skip_macro_args([{'{',_}=T | Ts], Es, As) -> + skip_macro_args(Ts, ['}' | Es], [T | As]); +skip_macro_args([{'[',_}=T | Ts], Es, As) -> + skip_macro_args(Ts, [']' | Es], [T | As]); +skip_macro_args([{'<<',_}=T | Ts], Es, As) -> + skip_macro_args(Ts, ['>>' | Es], [T | As]); +skip_macro_args([{'begin',_}=T | Ts], Es, As) -> + skip_macro_args(Ts, ['end' | Es], [T | As]); +skip_macro_args([{'if',_}=T | Ts], Es, As) -> + skip_macro_args(Ts, ['end' | Es], [T | As]); +skip_macro_args([{'case',_}=T | Ts], Es, As) -> + skip_macro_args(Ts, ['end' | Es], [T | As]); +skip_macro_args([{'receive',_}=T | Ts], Es, As) -> + skip_macro_args(Ts, ['end' | Es], [T | As]); +skip_macro_args([{'try',_}=T | Ts], Es, As) -> + skip_macro_args(Ts, ['end' | Es], [T | As]); +skip_macro_args([{E,_}=T | Ts], [E], As) -> %final close + {lists:reverse([T | As]), Ts}; +skip_macro_args([{E,_}=T | Ts], [E | Es], As) -> %matching close + skip_macro_args(Ts, Es, [T | As]); +skip_macro_args([T | Ts], Es, As) -> + skip_macro_args(Ts, Es, [T | As]); +skip_macro_args([], _Es, _As) -> + throw({error, macro_args}). + +filter_form({function, _, ?pp_form, _, + [{clause, _, [], [], [{atom, _, kill}]}]}) -> + none; +filter_form(T) -> + T. + + +%% --------------------------------------------------------------------- +%% Normal parsing - try to preserve all information + +normal_parser(Ts0, Opt) -> + case scan_form(Ts0, Opt) of + Ts when is_list(Ts) -> + rewrite_form(parse_tokens(Ts)); + Node -> + Node + end. + +scan_form([{'-', _Anno}, {atom, AnnoA, define} | Ts], Opt) -> + [{atom, AnnoA, ?pp_form}, {'(', AnnoA}, {')', AnnoA}, {'->', AnnoA}, + {atom, AnnoA, define} | scan_macros(Ts, Opt)]; +scan_form([{'-', _Anno}, {atom, AnnoA, undef} | Ts], Opt) -> + [{atom, AnnoA, ?pp_form}, {'(', AnnoA}, {')', AnnoA}, {'->', AnnoA}, + {atom, AnnoA, undef} | scan_macros(Ts, Opt)]; +scan_form([{'-', _Anno}, {atom, AnnoA, include} | Ts], Opt) -> + [{atom, AnnoA, ?pp_form}, {'(', AnnoA}, {')', AnnoA}, {'->', AnnoA}, + {atom, AnnoA, include} | scan_macros(Ts, Opt)]; +scan_form([{'-', _Anno}, {atom, AnnoA, include_lib} | Ts], Opt) -> + [{atom, AnnoA, ?pp_form}, {'(', AnnoA}, {')', AnnoA}, {'->', AnnoA}, + {atom, AnnoA, include_lib} | scan_macros(Ts, Opt)]; +scan_form([{'-', _Anno}, {atom, AnnoA, ifdef} | Ts], Opt) -> + [{atom, AnnoA, ?pp_form}, {'(', AnnoA}, {')', AnnoA}, {'->', AnnoA}, + {atom, AnnoA, ifdef} | scan_macros(Ts, Opt)]; +scan_form([{'-', _Anno}, {atom, AnnoA, ifndef} | Ts], Opt) -> + [{atom, AnnoA, ?pp_form}, {'(', AnnoA}, {')', AnnoA}, {'->', AnnoA}, + {atom, AnnoA, ifndef} | scan_macros(Ts, Opt)]; +scan_form([{'-', _Anno}, {'if', AnnoA} | Ts], Opt) -> + [{atom, AnnoA, ?pp_form}, {'(', AnnoA}, {')', AnnoA}, {'->', AnnoA}, + {atom, AnnoA, 'if'} | scan_macros(Ts, Opt)]; +scan_form([{'-', _Anno}, {atom, AnnoA, elif} | Ts], Opt) -> + [{atom, AnnoA, ?pp_form}, {'(', AnnoA}, {')', AnnoA}, {'->', AnnoA}, + {atom, AnnoA, 'elif'} | scan_macros(Ts, Opt)]; +scan_form([{'-', _Anno}, {atom, AnnoA, 'else'} | Ts], Opt) -> + [{atom, AnnoA, ?pp_form}, {'(', AnnoA}, {')', AnnoA}, {'->', AnnoA}, + {atom, AnnoA, 'else'} | scan_macros(Ts, Opt)]; +scan_form([{'-', _Anno}, {'else', AnnoA} | Ts], Opt) -> + [{atom, AnnoA, ?pp_form}, {'(', AnnoA}, {')', AnnoA}, {'->', AnnoA}, + {atom, AnnoA, 'else'} | scan_macros(Ts, Opt)]; +scan_form([{'-', _Anno}, {atom, AnnoA, endif} | Ts], Opt) -> + [{atom, AnnoA, ?pp_form}, {'(', AnnoA}, {')', AnnoA}, {'->', AnnoA}, + {atom, AnnoA, endif} | scan_macros(Ts, Opt)]; +scan_form([{'-', _Anno}, {atom, AnnoA, error} | Ts], _Opt) -> + Desc = build_info_string("-error", Ts), + ErrorInfo = {erl_anno:location(AnnoA), ?MODULE, {error, Desc}}, + erl_syntax:error_marker(ErrorInfo); +scan_form([{'-', _Anno}, {atom, AnnoA, warning} | Ts], _Opt) -> + Desc = build_info_string("-warning", Ts), + ErrorInfo = {erl_anno:location(AnnoA), ?MODULE, {warning, Desc}}, + erl_syntax:error_marker(ErrorInfo); +scan_form([{'-', A}, {'?', A1}, {Type, _, _}=N | [{'(', _} | _]=Ts], Opt) + when Type =:= atom; Type =:= var -> + %% minus, macro and open parenthesis at start of form - assume that + %% the macro takes no arguments; e.g. `-?foo(...).' + macro(A1, N, Ts, [{'-', A}], Opt); +scan_form([{'?', A}, {Type, _, _}=N | [{'(', _} | _]=Ts], Opt) + when Type =:= atom; Type =:= var -> + %% macro and open parenthesis at start of form - assume that the + %% macro takes no arguments; probably a function declaration on the + %% form `?m(...) -> ...', which will not parse if it is rewritten as + %% `(?m(...)) -> ...', so it must be handled as `(?m)(...) -> ...' + macro(A, N, Ts, [], Opt); +scan_form(Ts, Opt) -> + scan_macros(Ts, Opt). + +build_info_string(Prefix, Ts0) -> + Ts = lists:droplast(Ts0), + String = lists:droplast(tokens_to_string(Ts)), + Prefix ++ " " ++ String ++ ".". + +scan_macros(Ts, Opt) -> + scan_macros(Ts, [], Opt). + +scan_macros([{'?', _}=M, {Type, _, _}=N | Ts], [{string, AnnoS, _}=S | As], + #opt{clever = true}=Opt) + when Type =:= atom; Type =:= var -> + %% macro after a string literal: be clever and insert ++ + scan_macros([M, N | Ts], [{'++', AnnoS}, S | As], Opt); +scan_macros([{'?', Anno}, {Type, _, _}=N | [{'(',_}|_]=Ts], + [{':',_}|_]=As, Opt) + when Type =:= atom; Type =:= var -> + %% macro and open parentheses after colon - probably a call + %% `m:?F(...)' so the argument list might belong to the call, not + %% the macro - but it could also be a try-clause pattern + %% `...:?T(...) ->' - we need to check the token following the + %% arguments to decide + {Args, Rest} = skip_macro_args(Ts), + case Rest of + [{'->',_} | _] -> + macro_call(Args, Anno, N, Rest, As, Opt); + [{'when',_} | _] -> + macro_call(Args, Anno, N, Rest, As, Opt); + [{':',_} | _] -> + macro_call(Args, Anno, N, Rest, As, Opt); + _ -> + macro(Anno, N, Ts, As, Opt) + end; +scan_macros([{'?', Anno}, {Type, _, _}=N | [{'(',_}|_]=Ts], As, Opt) + when Type =:= atom; Type =:= var -> + %% macro with arguments + {Args, Rest} = skip_macro_args(Ts), + macro_call(Args, Anno, N, Rest, As, Opt); +scan_macros([{'?', Anno }, {Type, _, _}=N | Ts], As, Opt) + when Type =:= atom; Type =:= var -> + %% macro without arguments + macro(Anno, N, Ts, As, Opt); +scan_macros([T | Ts], As, Opt) -> + scan_macros(Ts, [T | As], Opt); +scan_macros([], As, _Opt) -> + lists:reverse(As). + +%% Rewriting to a tuple which will be recognized by the post-parse pass +%% (we insert parentheses to preserve the precedences when parsing). + +macro(Anno, {Type, _, A}, Rest, As, Opt) -> + scan_macros_1([], Rest, [{atom,Anno,macro_atom(Type,A)} | As], Opt). + +macro_call([{'(',_}, {')',_}], Anno, {_, AnnoN, _}=N, Rest, As, Opt) -> + {Open, Close} = parentheses(As), + scan_macros_1([], Rest, + %% {'?macro_call', N } + lists:reverse(Open ++ [{'{', Anno}, + {atom, Anno, ?macro_call}, + {',', Anno}, + N, + {'}', AnnoN}] ++ Close, + As), Opt); +macro_call([{'(',_} | Args], Anno, {_, AnnoN, _}=N, Rest, As, Opt) -> + {Open, Close} = parentheses(As), + %% drop closing parenthesis + {')', _} = lists:last(Args), %% assert + Args1 = lists:droplast(Args), + %% note that we must scan the argument list; it may not be skipped + scan_macros_1(Args1 ++ [{'}', AnnoN} | Close], + Rest, + %% {'?macro_call', N, Arg1, ... } + lists:reverse(Open ++ [{'{', Anno}, + {atom, Anno, ?macro_call}, + {',', Anno}, + N, + {',', AnnoN}], + As), Opt). + +macro_atom(atom, A) -> + list_to_atom(?atom_prefix ++ atom_to_list(A)); +macro_atom(var, A) -> + list_to_atom(?var_prefix ++ atom_to_list(A)). + +%% don't insert parentheses after a string token, to avoid turning +%% `"string" ?macro' into a "function application" `"string"(...)' +%% (see note at top of file) +parentheses([{string, _, _} | _]) -> + {[], []}; +parentheses(_) -> + {[{'(',0}], [{')',0}]}. + +%% (after a macro has been found and the arglist skipped, if any) +scan_macros_1(Args, [{string, AnnoS, _} | _]=Rest, As, + #opt{clever = true}=Opt) -> + %% string literal following macro: be clever and insert ++ + scan_macros(Args ++ [{'++', AnnoS} | Rest], As, Opt); +scan_macros_1(Args, Rest, As, Opt) -> + %% normal case - continue scanning + scan_macros(Args ++ Rest, As, Opt). + +rewrite_form({function, Anno, ?pp_form, _, + [{clause, _, [], [], [{call, _, A, As}]}]}) -> + erl_syntax:set_pos(erl_syntax:attribute(A, rewrite_list(As)), Anno); +rewrite_form({function, Anno, ?pp_form, _, [{clause, _, [], [], [A]}]}) -> + erl_syntax:set_pos(erl_syntax:attribute(A), Anno); +rewrite_form(T) -> + rewrite(T). + +rewrite_list([T | Ts]) -> + [rewrite(T) | rewrite_list(Ts)]; +rewrite_list([]) -> + []. + +%% Note: as soon as we start using erl_syntax:subtrees/1 and similar +%% functions, we cannot assume that we know the exact representation of +%% the syntax tree anymore - we must use erl_syntax functions to analyze +%% and decompose the data. + +rewrite(Node) -> + case erl_syntax:type(Node) of + atom -> + case atom_to_list(erl_syntax:atom_value(Node)) of + ?atom_prefix ++ As -> + A1 = list_to_atom(As), + N = erl_syntax:copy_pos(Node, erl_syntax:atom(A1)), + erl_syntax:copy_pos(Node, erl_syntax:macro(N)); + ?var_prefix ++ As -> + A1 = list_to_atom(As), + N = erl_syntax:copy_pos(Node, erl_syntax:variable(A1)), + erl_syntax:copy_pos(Node, erl_syntax:macro(N)); + _ -> + Node + end; + tuple -> + case erl_syntax:tuple_elements(Node) of + [MagicWord, A | As] -> + case erl_syntax:type(MagicWord) of + atom -> + case erl_syntax:atom_value(MagicWord) of + ?macro_call -> + M = erl_syntax:macro(A, rewrite_list(As)), + erl_syntax:copy_pos(Node, M); + _ -> + rewrite_1(Node) + end; + _ -> + rewrite_1(Node) + end; + _ -> + rewrite_1(Node) + end; + _ -> + rewrite_1(Node) + end. + +rewrite_1(Node) -> + case erl_syntax:subtrees(Node) of + [] -> + Node; + Gs -> + Node1 = erl_syntax:make_tree(erl_syntax:type(Node), + [[rewrite(T) || T <- Ts] + || Ts <- Gs]), + erl_syntax:copy_pos(Node, Node1) + end. + +%% attempting a rescue operation on a token sequence for a single form +%% if it could not be parsed after the normal treatment + +fix_form([{atom, _, ?pp_form}, {'(', _}, {')', _}, {'->', _}, + {atom, _, define}, {'(', _} | _]=Ts) -> + case lists:reverse(Ts) of + [{dot, _}, {')', _} | _] -> + {retry, Ts, fun fix_define/1}; + [{dot, Anno} | Ts1] -> + Ts2 = lists:reverse([{dot, Anno}, {')', Anno} | Ts1]), + {retry, Ts2, fun fix_define/1}; + _ -> + error + end; +fix_form(_Ts) -> + error. + +fix_define([{atom, Anno, ?pp_form}, {'(', _}, {')', _}, {'->', _}, + {atom, AnnoA, define}, {'(', _}, N, {',', _} | Ts]) -> + [{dot, _}, {')', _} | Ts1] = lists:reverse(Ts), + S = tokens_to_string(lists:reverse(Ts1)), + A = erl_syntax:set_pos(erl_syntax:atom(define), AnnoA), + Txt = erl_syntax:set_pos(erl_syntax:text(S), AnnoA), + {form, erl_syntax:set_pos(erl_syntax:attribute(A, [N, Txt]), Anno)}; +fix_define(_Ts) -> + error. + +%% @spec tokens_to_string(Tokens::[term()]) -> string() +%% +%% @doc Generates a string corresponding to the given token sequence. +%% The string can be re-tokenized to yield the same token list again. + +-spec tokens_to_string([term()]) -> string(). + +tokens_to_string([{atom,_,A} | Ts]) -> + io_lib:write_atom(A) ++ " " ++ tokens_to_string(Ts); +tokens_to_string([{string, _, S} | Ts]) -> + io_lib:write_string(S) ++ " " ++ tokens_to_string(Ts); +tokens_to_string([{char, _, C} | Ts]) -> + io_lib:write_char(C) ++ " " ++ tokens_to_string(Ts); +tokens_to_string([{float, _, F} | Ts]) -> + float_to_list(F) ++ " " ++ tokens_to_string(Ts); +tokens_to_string([{integer, _, N} | Ts]) -> + integer_to_list(N) ++ " " ++ tokens_to_string(Ts); +tokens_to_string([{var, _, A} | Ts]) -> + atom_to_list(A) ++ " " ++ tokens_to_string(Ts); +tokens_to_string([{dot, _} | Ts]) -> + ".\n" ++ tokens_to_string(Ts); +tokens_to_string([{A, _} | Ts]) -> + atom_to_list(A) ++ " " ++ tokens_to_string(Ts); +tokens_to_string([]) -> + "". + + +%% @spec format_error(Descriptor::term()) -> string() +%% @hidden +%% @doc Callback function for formatting error descriptors. Not for +%% normal use. + +-spec format_error(term()) -> string(). + +format_error(macro_args) -> + errormsg("macro call missing end parenthesis"); +format_error({error, Error}) -> + Error; +format_error({warning, Error}) -> + Error; +format_error({unknown, Reason}) -> + errormsg(io_lib:format("unknown error: ~tP", [Reason, 15])). + +errormsg(String) -> + io_lib:format("~s: ~ts", [?MODULE, String]). + + +%% ===================================================================== + +%% See #7266: The dodger currently does not process feature attributes +%% correctly, so temporarily consider the `else` and `maybe` atoms +%% always as keywords +-spec reserved_word(Atom :: atom()) -> boolean(). +reserved_word('else') -> true; +reserved_word('maybe') -> true; +reserved_word(Atom) -> erl_scan:f_reserved_word(Atom). diff --git a/prelude/erlang/toolchain/escript_builder.escript b/prelude/erlang/toolchain/escript_builder.escript index 744a56d3d9..54f3ff3b5c 100644 --- a/prelude/erlang/toolchain/escript_builder.escript +++ b/prelude/erlang/toolchain/escript_builder.escript @@ -26,8 +26,6 @@ -include_lib("kernel/include/file.hrl"). --mode(compile). - -type escript_artifact_spec() :: #{ ArchivePath :: file:filename() => FileSystemPath :: file:filename() }. diff --git a/prelude/apple/xcode_postbuild_script.bzl b/prelude/erlang/toolchain/escript_trampoline.sh old mode 100644 new mode 100755 similarity index 75% rename from prelude/apple/xcode_postbuild_script.bzl rename to prelude/erlang/toolchain/escript_trampoline.sh index 3c425a48e3..dbf39844bf --- a/prelude/apple/xcode_postbuild_script.bzl +++ b/prelude/erlang/toolchain/escript_trampoline.sh @@ -1,3 +1,4 @@ +#! /usr/bin/env bash # Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under both the MIT license found in the @@ -5,5 +6,6 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -def xcode_postbuild_script_impl(_ctx: AnalysisContext) -> list[Provider]: - return [DefaultInfo()] +cmd=("$2" "$1/run.escript" "${@:3}") + +"${cmd[@]}" diff --git a/prelude/erlang/toolchain/include_erts.escript b/prelude/erlang/toolchain/include_erts.escript index ad81e86b7d..90d72d4c18 100644 --- a/prelude/erlang/toolchain/include_erts.escript +++ b/prelude/erlang/toolchain/include_erts.escript @@ -23,8 +23,6 @@ -export([main/1]). --mode(compile). - -spec main([string()]) -> ok. main([TargetPath]) -> case filelib:wildcard(filename:join(code:root_dir(), "erts-*")) of diff --git a/prelude/erlang/toolchain/release_variables_builder.escript b/prelude/erlang/toolchain/release_variables_builder.escript index da9556823f..53fb7e12ee 100644 --- a/prelude/erlang/toolchain/release_variables_builder.escript +++ b/prelude/erlang/toolchain/release_variables_builder.escript @@ -28,8 +28,6 @@ -export([main/1]). --mode(compile). - -define(EXITSUCCESS, 0). -define(EXITERROR, 1). diff --git a/prelude/export_exe.bzl b/prelude/export_exe.bzl index af37ff6b0d..d1134c29f0 100644 --- a/prelude/export_exe.bzl +++ b/prelude/export_exe.bzl @@ -45,7 +45,7 @@ export_exe = rule( src = "bin/script.sh", ) - The latter form allows executing checked in binaries with required resouces (eg. runtime shared libraries) + The latter form allows executing checked in binaries with required resources (eg. runtime shared libraries) without unnecessary indirection via another rule which allows args, like command_alias. Eg. instead of export_file( diff --git a/prelude/genrule.bzl b/prelude/genrule.bzl index ea63c30bd0..16622fdccb 100644 --- a/prelude/genrule.bzl +++ b/prelude/genrule.bzl @@ -10,11 +10,9 @@ load("@prelude//:cache_mode.bzl", "CacheModeInfo") load("@prelude//:genrule_local_labels.bzl", "genrule_labels_require_local") load("@prelude//:genrule_toolchain.bzl", "GenruleToolchainInfo") -load("@prelude//:genrule_types.bzl", "GENRULE_MARKER_SUBTARGET_NAME", "GenruleMarkerInfo") load("@prelude//:is_full_meta_repo.bzl", "is_full_meta_repo") load("@prelude//android:build_only_native_code.bzl", "is_build_only_native_code") load("@prelude//os_lookup:defs.bzl", "OsLookup") -load("@prelude//utils:expect.bzl", "expect") load("@prelude//utils:utils.bzl", "flatten", "value_or") GENRULE_OUT_DIR = "out" @@ -41,6 +39,8 @@ _BUILD_ROOT_LABELS = {label: True for label in [ "app_modules_genrule", # produces JSON containing file paths that are read from the root dir. "android_langpack_strings", # produces JSON containing file paths that are read from the root dir. "windows_long_path_issue", # Windows: relative path length exceeds PATH_MAX, program cannot access file + "flowtype_ota_safety_target", # produces JSON containing file paths that are project-relative + "ctrlr_setting_paths", ]} # In Buck1 the SRCS environment variable is only set if the substring SRCS is on the command line. @@ -82,6 +82,7 @@ def genrule_attributes() -> dict[str, Attr]: "metadata_env_var": attrs.option(attrs.string(), default = None), "metadata_path": attrs.option(attrs.string(), default = None), "no_outputs_cleanup": attrs.bool(default = False), + "remote_execution_dependencies": attrs.list(attrs.dict(key = attrs.string(), value = attrs.string()), default = []), "_build_only_native_code": attrs.default_only(attrs.bool(default = is_build_only_native_code())), "_genrule_toolchain": attrs.default_only(attrs.toolchain_dep(default = "toolchains//:genrule", providers = [GenruleToolchainInfo])), } @@ -128,7 +129,8 @@ def process_genrule( out_attr: [str, None], outs_attr: [dict, None], extra_env_vars: dict = {}, - identifier: [str, None] = None) -> list[Provider]: + identifier: [str, None] = None, + other_outputs: list[Artifact] = []) -> list[Provider]: if (out_attr != None) and (outs_attr != None): fail("Only one of `out` and `outs` should be set. Got out=`%s`, outs=`%s`" % (repr(out_attr), repr(outs_attr))) @@ -174,18 +176,20 @@ def process_genrule( cmd = ctx.attrs.bash if ctx.attrs.bash != None else ctx.attrs.cmd if cmd == None: fail("One of `cmd` or `bash` should be set.") - cmd = cmd_args(cmd) + + replace_regex = [] # For backwards compatibility with Buck1. if is_windows: for re, sub in _WINDOWS_ENV_SUBSTITUTIONS: - cmd.replace_regex(re, sub) + replace_regex.append((re, sub)) for extra_env_var in extra_env_vars: - cmd.replace_regex(regex("\\$(%s\\b|\\{%s\\})" % (extra_env_var, extra_env_var)), "%%%s%%" % extra_env_var) + replace_regex.append( + (regex("\\$(%s\\b|\\{%s\\})" % (extra_env_var, extra_env_var)), "%%%s%%" % extra_env_var), + ) - if _ignore_artifacts(ctx): - cmd = cmd.ignore_artifacts() + cmd = cmd_args(cmd, ignore_artifacts = _ignore_artifacts(ctx), replace_regex = replace_regex) if type(ctx.attrs.srcs) == type([]): # FIXME: We should always use the short_path, but currently that is sometimes blank. @@ -329,13 +333,15 @@ def process_genrule( metadata_args["metadata_env_var"] = ctx.attrs.metadata_env_var if ctx.attrs.metadata_path: metadata_args["metadata_path"] = ctx.attrs.metadata_path + if ctx.attrs.remote_execution_dependencies: + metadata_args["remote_execution_dependencies"] = ctx.attrs.remote_execution_dependencies category = "genrule" if ctx.attrs.type != None: # As of 09/2021, all genrule types were legal snake case if their dashes and periods were replaced with underscores. category += "_" + ctx.attrs.type.replace("-", "_").replace(".", "_") ctx.actions.run( - cmd_args(script_args).hidden([cmd, srcs_artifact, out_artifact.as_output()] + hidden), + cmd_args(script_args, hidden = [cmd, srcs_artifact, out_artifact.as_output()] + hidden), env = env_vars, local_only = local_only, allow_cache_upload = cacheable, @@ -346,17 +352,11 @@ def process_genrule( **metadata_args ) - # Use a subtarget to insert a marker, as callsites make assumptions about - # the providers of `process_genrule()`. We want to have the marker in - # `DefaultInfo` rather than in `genrule_impl()` because we want to identify - # all classes of genrule-like rules. sub_targets = {k: [DefaultInfo(default_outputs = v)] for (k, v) in named_outputs.items()} - expect(GENRULE_MARKER_SUBTARGET_NAME not in sub_targets, "Conflicting private `{}` subtarget and named output".format(GENRULE_MARKER_SUBTARGET_NAME)) - sub_targets[GENRULE_MARKER_SUBTARGET_NAME] = [GenruleMarkerInfo()] - providers = [DefaultInfo( default_outputs = default_outputs, sub_targets = sub_targets, + other_outputs = other_outputs, )] # The cxx_genrule also forwards here, and that doesn't have .executable, so use getattr diff --git a/prelude/genrule_local_labels.bzl b/prelude/genrule_local_labels.bzl index 13cb97fd20..139eacf316 100644 --- a/prelude/genrule_local_labels.bzl +++ b/prelude/genrule_local_labels.bzl @@ -94,6 +94,12 @@ _GENRULE_LOCAL_LABELS = {label: True for label in [ # (https://fb.workplace.com/groups/1042353022615812/posts/1849505965233843/). "uses_php", + # Uses the `libX11-devel` package which is not available on RE. + "uses_x11", + + # Unity license client needs to be set up on RE workers for this to work, and maybe further debugging. + "uses_unity", + # mksquashfs isn't available in RE, so run these locally # (https://fb.workplace.com/groups/buck2users/permalink/3023630007893360/) "uses_mksquashfs", @@ -170,10 +176,12 @@ _GENRULE_LOCAL_LABELS = {label: True for label in [ # Some Qt genrules don't support RE yet "qt_moc", - "qt_qrc_gen", + "qt_qmlcachegen", "qt_qrc_compile", + "qt_qrc_gen", "qt_qsb_gen", - "qt_qmlcachegen", + "qt_rcc", + "qt_uic", # use local jar "uses_jar", @@ -201,6 +209,14 @@ _GENRULE_LOCAL_LABELS = {label: True for label in [ # Uses Apple's codesign command which might not be in RE "uses_codesign", + + # Uses jf which is not on RE + "uses_jf", + + # On Messenger Desktop few targets are massive and take much longer on RE than + # locally to build on Windows. This is a mitigation until we can break down these + # targets + "zeratul_windows_capacity_hog", ]} def genrule_labels_require_local(labels): diff --git a/prelude/git/tools/BUCK.v2 b/prelude/git/tools/BUCK.v2 index 9135477da0..ce7dcb83cc 100644 --- a/prelude/git/tools/BUCK.v2 +++ b/prelude/git/tools/BUCK.v2 @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + prelude = native prelude.python_bootstrap_binary( diff --git a/prelude/go/cgo_builder.bzl b/prelude/go/cgo_builder.bzl new file mode 100644 index 0000000000..0ae78f8cc4 --- /dev/null +++ b/prelude/go/cgo_builder.bzl @@ -0,0 +1,177 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//:paths.bzl", "paths") +load("@prelude//cxx:cxx_library.bzl", "cxx_compile_srcs") +load( + "@prelude//cxx:cxx_sources.bzl", + "CxxSrcWithFlags", +) +load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") +load( + "@prelude//cxx:cxx_types.bzl", + "CxxRuleConstructorParams", # @unused Used as a type +) +load("@prelude//cxx:headers.bzl", "cxx_attr_header_namespace", "cxx_get_regular_cxx_headers_layout", "prepare_headers") +load( + "@prelude//cxx:preprocessor.bzl", + "CPreprocessor", + "CPreprocessorArgs", + "CPreprocessorInfo", + "cxx_inherited_preprocessor_infos", + "cxx_merge_cpreprocessors", +) +load( + "@prelude//linking:link_info.bzl", + "LinkStyle", +) +load("@prelude//linking:types.bzl", "Linkage") +load("@prelude//os_lookup:defs.bzl", "OsLookup") +load("@prelude//utils:cmd_script.bzl", "ScriptOs", "cmd_script") +load("@prelude//utils:expect.bzl", "expect") +load(":toolchain.bzl", "GoToolchainInfo", "get_toolchain_env_vars") + +# A map of expected linkages for provided link style +_LINKAGE_FOR_LINK_STYLE = { + LinkStyle("static"): Linkage("static"), + LinkStyle("static_pic"): Linkage("static"), + LinkStyle("shared"): Linkage("shared"), +} + +def _cgo( + ctx: AnalysisContext, + srcs: list[Artifact], + own_pre: list[CPreprocessor], + inherited_pre: list[CPreprocessorInfo], + c_flags: list[str], + cpp_flags: list[str]) -> (list[Artifact], list[Artifact], list[Artifact], Artifact): + """ + Run `cgo` on `.go` sources to generate Go, C, and C-Header sources. + """ + gen_dir = ctx.actions.declare_output("cgo_gen_tmp", dir = True) + + go_srcs = [] + c_headers = [] + c_srcs = [] + go_srcs.append(gen_dir.project("_cgo_gotypes.go")) + c_srcs.append(gen_dir.project("_cgo_export.c")) + c_headers.append(gen_dir.project("_cgo_export.h")) + for src in srcs: + go_srcs.append(gen_dir.project(paths.replace_extension(src.basename, ".cgo1.go"))) + c_srcs.append(gen_dir.project(paths.replace_extension(src.basename, ".cgo2.c"))) + + # Return a `cmd_args` to use as the generated sources. + go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] + + cmd = cmd_args( + go_toolchain.cgo_wrapper, + cmd_args(go_toolchain.cgo, format = "--cgo={}"), + cmd_args(gen_dir.as_output(), format = "--output={}"), + "--", + c_flags + cpp_flags, + ctx.attrs.compiler_flags, + srcs, + ) + + env = get_toolchain_env_vars(go_toolchain) + env["CC"] = _cxx_wrapper(ctx, own_pre, inherited_pre) + + ctx.actions.run(cmd, env = env, category = "cgo") + + return go_srcs, c_headers, c_srcs, gen_dir + +def _cxx_wrapper(ctx: AnalysisContext, own_pre: list[CPreprocessor], inherited_pre: list[CPreprocessorInfo]) -> cmd_args: + pre = cxx_merge_cpreprocessors(ctx, own_pre, inherited_pre) + pre_args = pre.set.project_as_args("args") + pre_include_dirs = pre.set.project_as_args("include_dirs") + + go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] + expect(CxxToolchainInfo in ctx.attrs._cxx_toolchain) + cxx_toolchain = ctx.attrs._cxx_toolchain[CxxToolchainInfo] + + c_compiler = cxx_toolchain.c_compiler_info + + # Construct the full C/C++ command needed to preprocess/compile sources. + cxx_cmd = cmd_args( + c_compiler.compiler, + c_compiler.preprocessor_flags, + c_compiler.compiler_flags, + pre_args, + pre_include_dirs, + go_toolchain.c_compiler_flags, + ) + + # Wrap the C/C++ command in a wrapper script to avoid arg length limits. + return cmd_script( + ctx = ctx, + name = "cxx_wrapper", + cmd = cxx_cmd, + os = ScriptOs("windows" if ctx.attrs._exec_os_type[OsLookup].platform == "windows" else "unix"), + ) + +# build CPreprocessor similar as cxx_private_preprocessor_info does, but with our filtered headers +def _own_pre(ctx: AnalysisContext, h_files: list[Artifact]) -> CPreprocessor: + namespace = cxx_attr_header_namespace(ctx) + header_map = {paths.join(namespace, h.short_path): h for h in h_files} + header_root = prepare_headers(ctx, header_map, "h_files-private-headers") + + return CPreprocessor( + args = CPreprocessorArgs(args = ["-I", header_root.include_path] if header_root != None else []), + ) + +def build_cgo(ctx: AnalysisContext, cgo_files: list[Artifact], h_files: list[Artifact], c_files: list[Artifact], c_flags: list[str], cpp_flags: list[str]) -> (list[Artifact], list[Artifact], Artifact): + if len(cgo_files) == 0: + return [], [], ctx.actions.copied_dir("cgo_gen_tmp", {}) + + # Gather preprocessor inputs. + own_pre = _own_pre(ctx, h_files) + inherited_pre = cxx_inherited_preprocessor_infos(ctx.attrs.deps) + + # Separate sources into C++ and GO sources. + go_gen_srcs, c_gen_headers, c_gen_srcs, gen_dir = _cgo(ctx, cgo_files, [own_pre], inherited_pre, c_flags, cpp_flags) + + # Wrap the generated CGO C headers in a CPreprocessor object for compiling. + cgo_headers_pre = CPreprocessor(args = CPreprocessorArgs(args = [ + "-I", + prepare_headers( + ctx, + {h.basename: h for h in c_gen_headers}, + "cgo-private-headers", + ).include_path, + ])) + + link_style = ctx.attrs.link_style + if link_style == None: + link_style = "static" + linkage = _LINKAGE_FOR_LINK_STYLE[LinkStyle(link_style)] + + # Copmile C++ sources into object files. + c_compile_cmds = cxx_compile_srcs( + ctx, + CxxRuleConstructorParams( + rule_type = "cgo_library", + headers_layout = cxx_get_regular_cxx_headers_layout(ctx), + srcs = [CxxSrcWithFlags(file = src) for src in c_files + c_gen_srcs], + compiler_flags = c_flags + ctx.attrs.compiler_flags, + lang_compiler_flags = ctx.attrs.lang_compiler_flags, + platform_compiler_flags = ctx.attrs.platform_compiler_flags, + lang_platform_compiler_flags = ctx.attrs.lang_platform_compiler_flags, + preprocessor_flags = cpp_flags + ctx.attrs.preprocessor_flags, + lang_preprocessor_flags = ctx.attrs.lang_preprocessor_flags, + platform_preprocessor_flags = ctx.attrs.platform_preprocessor_flags, + lang_platform_preprocessor_flags = ctx.attrs.lang_platform_preprocessor_flags, + ), + # Create private header tree and propagate via args. + [own_pre, cgo_headers_pre], + inherited_pre, + [], + linkage, + ) + + compiled_objects = c_compile_cmds.pic.objects + + return go_gen_srcs, compiled_objects, gen_dir diff --git a/prelude/go/cgo_library.bzl b/prelude/go/cgo_library.bzl index 96da87a30d..c691c14f3b 100644 --- a/prelude/go/cgo_library.bzl +++ b/prelude/go/cgo_library.bzl @@ -5,35 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//:paths.bzl", "paths") -load( - "@prelude//apple:xcode.bzl", - "get_project_root_file", -) -load( - "@prelude//cxx:compile.bzl", - "CxxSrcWithFlags", # @unused Used as a type -) -load("@prelude//cxx:cxx_library.bzl", "cxx_compile_srcs") -load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") -load( - "@prelude//cxx:cxx_types.bzl", - "CxxRuleConstructorParams", # @unused Used as a type -) -load("@prelude//cxx:headers.bzl", "cxx_get_regular_cxx_headers_layout", "prepare_headers") -load( - "@prelude//cxx:preprocessor.bzl", - "CPreprocessor", - "CPreprocessorArgs", - "CPreprocessorInfo", - "cxx_inherited_preprocessor_infos", - "cxx_merge_cpreprocessors", - "cxx_private_preprocessor_info", -) load( "@prelude//linking:link_info.bzl", - "LinkStyle", - "Linkage", "MergedLinkInfo", "create_merged_link_info_for_propagation", ) @@ -42,210 +15,40 @@ load( "SharedLibraryInfo", "merge_shared_libraries", ) -load("@prelude//os_lookup:defs.bzl", "OsLookup") -load("@prelude//utils:expect.bzl", "expect") load( "@prelude//utils:utils.bzl", "map_idx", ) -load(":compile.bzl", "GoPkgCompileInfo", "compile", "get_filtered_srcs", "get_inherited_compile_pkgs") +load(":compile.bzl", "GoPkgCompileInfo", "get_inherited_compile_pkgs") +load(":coverage.bzl", "GoCoverageMode") load(":link.bzl", "GoPkgLinkInfo", "get_inherited_link_pkgs") -load(":packages.bzl", "GoPkg", "go_attr_pkg_name", "merge_pkgs") -load(":toolchain.bzl", "GoToolchainInfo", "get_toolchain_cmd_args") - -# A map of expected linkages for provided link style -_LINKAGE_FOR_LINK_STYLE = { - LinkStyle("static"): Linkage("static"), - LinkStyle("static_pic"): Linkage("static"), - LinkStyle("shared"): Linkage("shared"), -} - -def _cgo( - ctx: AnalysisContext, - srcs: list[Artifact], - own_pre: list[CPreprocessor], - inherited_pre: list[CPreprocessorInfo]) -> (list[Artifact], list[Artifact], list[Artifact]): - """ - Run `cgo` on `.go` sources to generate Go, C, and C-Header sources. - """ - - pre = cxx_merge_cpreprocessors(ctx, own_pre, inherited_pre) - pre_args = pre.set.project_as_args("args") - pre_include_dirs = pre.set.project_as_args("include_dirs") - - # If you change this dir or naming convention, please - # update the corresponding logic in `fbgolist`. - # Otherwise editing and linting for Go will break. - gen_dir = "cgo_gen" - - go_srcs = [] - c_headers = [] - c_srcs = [] - go_srcs.append(ctx.actions.declare_output(paths.join(gen_dir, "_cgo_gotypes.go"))) - c_srcs.append(ctx.actions.declare_output(paths.join(gen_dir, "_cgo_export.c"))) - c_headers.append(ctx.actions.declare_output(paths.join(gen_dir, "_cgo_export.h"))) - for src in srcs: - go_srcs.append(ctx.actions.declare_output(paths.join(gen_dir, paths.replace_extension(src.basename, ".cgo1.go")))) - c_srcs.append(ctx.actions.declare_output(paths.join(gen_dir, paths.replace_extension(src.basename, ".cgo2.c")))) - - # Return a `cmd_args` to use as the generated sources. - go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] - expect(go_toolchain.cgo != None) - expect(CxxToolchainInfo in ctx.attrs._cxx_toolchain) - cxx_toolchain = ctx.attrs._cxx_toolchain[CxxToolchainInfo] - - cmd = get_toolchain_cmd_args(go_toolchain, go_root = False) - cmd.add(go_toolchain.cgo_wrapper[RunInfo]) - - args = cmd_args() - args.add(cmd_args(go_toolchain.cgo, format = "--cgo={}")) - - c_compiler = cxx_toolchain.c_compiler_info - # linker = cxx_toolchain.linker_info - - # Passing fbcode-platform ldflags may create S365277, so I would - # comment this change until we really need to do it. - # ldflags = cmd_args( - # linker.linker_flags, - # go_toolchain.external_linker_flags, - # ) - - # Construct the full C/C++ command needed to preprocess/compile sources. - cxx_cmd = cmd_args() - cxx_cmd.add(c_compiler.compiler) - cxx_cmd.add(c_compiler.preprocessor_flags) - cxx_cmd.add(c_compiler.compiler_flags) - cxx_cmd.add(pre_args) - cxx_cmd.add(pre_include_dirs) - - # Passing the same value as go-build, because our -g flags break cgo - # in some buck modes - cxx_cmd.add("-g") - - # Wrap the C/C++ command in a wrapper script to avoid arg length limits. - is_win = ctx.attrs._exec_os_type[OsLookup].platform == "windows" - cxx_sh = cmd_args( - [ - cmd_args(cxx_cmd, quote = "shell"), - "%*" if is_win else "\"$@\"", - ], - delimiter = " ", - ) - cxx_wrapper, _ = ctx.actions.write( - "__{}_cxx__.{}".format(ctx.label.name, "bat" if is_win else "sh"), - ([] if is_win else ["#!/bin/sh"]) + [cxx_sh], - allow_args = True, - is_executable = True, - ) - args.add(cmd_args(cxx_wrapper, format = "--env-cc={}")) - args.hidden(cxx_cmd) - - # TODO(agallagher): cgo outputs a dir with generated sources, but I'm not - # sure how to pass in an output dir *and* enumerate the sources we know will - # generated w/o v2 complaining that the output dir conflicts with the nested - # artifacts. - args.add(cmd_args(go_srcs[0].as_output(), format = "--output={}/..")) - - args.add(srcs) - - argsfile = ctx.actions.declare_output(paths.join(gen_dir, ".cgo.argsfile")) - ctx.actions.write(argsfile.as_output(), args, allow_args = True) - - cmd.add(cmd_args(argsfile, format = "@{}").hidden([args])) - - for src in go_srcs + c_headers + c_srcs: - cmd.hidden(src.as_output()) - ctx.actions.run(cmd, category = "cgo") - - return go_srcs, c_headers, c_srcs +load(":package_builder.bzl", "build_package") +load(":packages.bzl", "go_attr_pkg_name", "merge_pkgs") def cgo_library_impl(ctx: AnalysisContext) -> list[Provider]: pkg_name = go_attr_pkg_name(ctx) - project_root_file = get_project_root_file(ctx) - - # Gather preprocessor inputs. - (own_pre, _) = cxx_private_preprocessor_info( - ctx, - cxx_get_regular_cxx_headers_layout(ctx), - project_root_file = project_root_file, - ) - inherited_pre = cxx_inherited_preprocessor_infos(ctx.attrs.deps) - - # Separate sources into C++ and CGO sources. - cgo_srcs = [] - cxx_srcs = [] - for src in ctx.attrs.srcs: - if src.extension == ".go": - cgo_srcs.append(src) - elif src.extension in (".c", ".cpp"): - cxx_srcs.append(src) - else: - fail("unexpected extension: {}".format(src)) - - # Generate CGO and C sources. - go_srcs, c_headers, c_srcs = _cgo(ctx, cgo_srcs, [own_pre], inherited_pre) - cxx_srcs.extend(c_srcs) - - # Wrap the generated CGO C headers in a CPreprocessor object for compiling. - cgo_headers_pre = CPreprocessor(relative_args = CPreprocessorArgs(args = [ - "-I", - prepare_headers( - ctx, - {h.basename: h for h in c_headers}, - "cgo-private-headers", - None, - ).include_path, - ])) - - link_style = ctx.attrs.link_style - if link_style == None: - link_style = "static" - linkage = _LINKAGE_FOR_LINK_STYLE[LinkStyle(link_style)] - - # Copmile C++ sources into object files. - c_compile_cmds = cxx_compile_srcs( - ctx, - CxxRuleConstructorParams( - rule_type = "cgo_library", - headers_layout = cxx_get_regular_cxx_headers_layout(ctx), - srcs = [CxxSrcWithFlags(file = src) for src in cxx_srcs], - ), - # Create private header tree and propagate via args. - [own_pre, cgo_headers_pre], - inherited_pre, - [], - linkage, - ) - - compiled_objects = c_compile_cmds.pic.objects - - # Merge all sources together to pass to the Go compile step. - all_srcs = cmd_args(go_srcs + compiled_objects) - if ctx.attrs.go_srcs: - all_srcs.add(get_filtered_srcs(ctx, ctx.attrs.go_srcs)) + shared = ctx.attrs._compile_shared + race = ctx.attrs._race + asan = ctx.attrs._asan + coverage_mode = GoCoverageMode(ctx.attrs._coverage_mode) if ctx.attrs._coverage_mode else None # Build Go library. - static_pkg = compile( + compiled_pkg = build_package( ctx, pkg_name, - all_srcs, + ctx.attrs.go_srcs + ctx.attrs.srcs + ctx.attrs.headers, + package_root = ctx.attrs.package_root, deps = ctx.attrs.deps + ctx.attrs.exported_deps, - shared = False, - ) - shared_pkg = compile( - ctx, - pkg_name, - all_srcs, - deps = ctx.attrs.deps + ctx.attrs.exported_deps, - shared = True, + shared = shared, + race = race, + asan = asan, + coverage_mode = coverage_mode, + embedcfg = ctx.attrs.embedcfg, ) + pkgs = { - pkg_name: GoPkg( - shared = shared_pkg, - static = static_pkg, - cgo = True, - ), + pkg_name: compiled_pkg, } # We need to keep pre-processed cgo source files, @@ -253,7 +56,7 @@ def cgo_library_impl(ctx: AnalysisContext) -> list[Provider]: # to work with cgo. And when nearly every FB service client is cgo, # we need to support it well. return [ - DefaultInfo(default_output = static_pkg, other_outputs = go_srcs), + DefaultInfo(default_output = compiled_pkg.pkg, other_outputs = [compiled_pkg.cgo_gen_dir]), GoPkgCompileInfo(pkgs = merge_pkgs([ pkgs, get_inherited_compile_pkgs(ctx.attrs.exported_deps), diff --git a/prelude/go/compile.bzl b/prelude/go/compile.bzl index ffb4f25cd7..e3038756c0 100644 --- a/prelude/go/compile.bzl +++ b/prelude/go/compile.bzl @@ -10,10 +10,7 @@ load( ":packages.bzl", "GoPkg", # @Unused used as type "merge_pkgs", - "pkg_artifacts", - "stdlib_pkg_artifacts", ) -load(":toolchain.bzl", "GoToolchainInfo", "get_toolchain_cmd_args") # Provider wrapping packages used for compiling. GoPkgCompileInfo = provider(fields = { @@ -31,135 +28,18 @@ GoTestInfo = provider( }, ) -def _out_root(shared: bool = False): - return "__shared__" if shared else "__static__" - def get_inherited_compile_pkgs(deps: list[Dependency]) -> dict[str, GoPkg]: return merge_pkgs([d[GoPkgCompileInfo].pkgs for d in deps if GoPkgCompileInfo in d]) -def get_filtered_srcs(ctx: AnalysisContext, srcs: list[Artifact], tests: bool = False, force_disable_cgo: bool = False) -> cmd_args: - """ - Filter the input sources based on build pragma - """ - - go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] - - # Delegate to `go list` to filter out srcs with incompatible `// +build` - # pragmas. - filtered_srcs = ctx.actions.declare_output("__filtered_srcs__.txt") - srcs_dir = ctx.actions.symlinked_dir( - "__srcs__", - {src.short_path: src for src in srcs}, - ) - filter_cmd = get_toolchain_cmd_args(go_toolchain, go_root = True, force_disable_cgo = force_disable_cgo) - filter_cmd.add(go_toolchain.filter_srcs[RunInfo]) - filter_cmd.add(cmd_args(go_toolchain.go, format = "--go={}")) - if tests: - filter_cmd.add("--tests") - filter_cmd.add(cmd_args(",".join(go_toolchain.tags), format = "--tags={}")) - filter_cmd.add(cmd_args(filtered_srcs.as_output(), format = "--output={}")) - filter_cmd.add(srcs_dir) - ctx.actions.run(filter_cmd, category = "go_filter_srcs") - - # Add filtered srcs to compile command. - return cmd_args(filtered_srcs, format = "@{}").hidden(srcs).hidden(srcs_dir) - -def _assemble_cmd( - ctx: AnalysisContext, - pkg_name: str, - flags: list[str] = [], - shared: bool = False) -> cmd_args: - go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] - cmd = cmd_args() - cmd.add(go_toolchain.assembler) - cmd.add(flags) - cmd.add("-p", pkg_name) - if shared: - cmd.add("-shared") - - return cmd - -def _compile_cmd( - ctx: AnalysisContext, - pkg_name: str, - pkgs: dict[str, Artifact] = {}, - deps: list[Dependency] = [], - flags: list[str] = [], - shared: bool = False) -> cmd_args: - go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] - - cmd = cmd_args() - cmd.add(go_toolchain.compiler) - cmd.add("-p", pkg_name) - cmd.add("-pack") - cmd.add("-nolocalimports") - cmd.add(flags) - cmd.add("-buildid=") - - # Add shared/static flags. - if shared: - cmd.add("-shared") - cmd.add(go_toolchain.compiler_flags_shared) - else: - cmd.add(go_toolchain.compiler_flags_static) - - # Add Go pkgs inherited from deps to compiler search path. - all_pkgs = merge_pkgs([ - pkgs, - pkg_artifacts(get_inherited_compile_pkgs(deps), shared = shared), - stdlib_pkg_artifacts(go_toolchain, shared = shared), - ]) - - importcfg_content = [] - for name_, pkg_ in all_pkgs.items(): - # Hack: we use cmd_args get "artifact" valid path and write it to a file. - importcfg_content.append(cmd_args("packagefile ", name_, "=", pkg_, delimiter = "")) - - # Future work: support importmap in buck rules insted of hacking here. - if name_.startswith("third-party-source/go/"): - real_name_ = name_.removeprefix("third-party-source/go/") - importcfg_content.append(cmd_args("importmap ", real_name_, "=", name_, delimiter = "")) - - root = _out_root(shared) - importcfg = ctx.actions.declare_output(root, paths.basename(pkg_name) + "-importcfg") - ctx.actions.write(importcfg.as_output(), importcfg_content) - - cmd.add("-importcfg", importcfg) - cmd.hidden(all_pkgs.values()) - - return cmd - -def compile( - ctx: AnalysisContext, - pkg_name: str, - srcs: cmd_args, - pkgs: dict[str, Artifact] = {}, - deps: list[Dependency] = [], - compile_flags: list[str] = [], - assemble_flags: list[str] = [], - shared: bool = False) -> Artifact: - go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] - root = _out_root(shared) - output = ctx.actions.declare_output(root, paths.basename(pkg_name) + ".a") - - cmd = get_toolchain_cmd_args(go_toolchain) - cmd.add(go_toolchain.compile_wrapper[RunInfo]) - cmd.add(cmd_args(output.as_output(), format = "--output={}")) - cmd.add(cmd_args(_compile_cmd(ctx, pkg_name, pkgs, deps, compile_flags, shared = shared), format = "--compiler={}")) - cmd.add(cmd_args(_assemble_cmd(ctx, pkg_name, assemble_flags, shared = shared), format = "--assembler={}")) - cmd.add(cmd_args(go_toolchain.packer, format = "--packer={}")) - if ctx.attrs.embedcfg: - cmd.add(cmd_args(ctx.attrs.embedcfg, format = "--embedcfg={}")) - - argsfile = ctx.actions.declare_output(root, pkg_name + ".go.argsfile") - srcs_args = cmd_args(srcs) - ctx.actions.write(argsfile.as_output(), srcs_args, allow_args = True) - - cmd.add(cmd_args(argsfile, format = "@{}").hidden([srcs_args])) - - identifier = paths.basename(pkg_name) - if shared: - identifier += "[shared]" - ctx.actions.run(cmd, category = "go_compile", identifier = identifier) - - return output +def infer_package_root(srcs: list[Artifact]) -> str: + go_sources = [s for s in srcs if s.extension == ".go"] + if len(go_sources) == 0: + return "" + dir_set = {paths.dirname(s.short_path): None for s in go_sources} + if len(dir_set) > 1: + fail("Provide `package_root` target attribute. Can't infer it when there are multiple directories containing .go files: {}. Sources: {}".format( + dir_set.keys(), + [s.short_path for s in go_sources], + )) + + return dir_set.keys()[0] diff --git a/prelude/go/constraints/BUCK.v2 b/prelude/go/constraints/BUCK.v2 new file mode 100644 index 0000000000..a49e566f0d --- /dev/null +++ b/prelude/go/constraints/BUCK.v2 @@ -0,0 +1,105 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") +load(":defs.bzl", "generate_tag_constraints") + +oncall("build_infra") + +source_listing() + +constraint_setting( + name = "cgo_enabled", + visibility = ["PUBLIC"], +) + +constraint_value( + name = "cgo_enabled_auto", + constraint_setting = ":cgo_enabled", + visibility = ["PUBLIC"], +) + +constraint_value( + name = "cgo_enabled_true", + constraint_setting = ":cgo_enabled", + visibility = ["PUBLIC"], +) + +constraint_value( + name = "cgo_enabled_false", + constraint_setting = ":cgo_enabled", + visibility = ["PUBLIC"], +) + +constraint_setting( + name = "compile_shared", + visibility = ["PUBLIC"], +) + +constraint_value( + name = "compile_shared_false", + constraint_setting = ":compile_shared", + visibility = ["PUBLIC"], +) + +constraint_value( + name = "compile_shared_true", + constraint_setting = ":compile_shared", + visibility = ["PUBLIC"], +) + +constraint_setting( + name = "race", + visibility = ["PUBLIC"], +) + +constraint_value( + name = "race_false", + constraint_setting = ":race", + visibility = ["PUBLIC"], +) + +constraint_value( + name = "race_true", + constraint_setting = ":race", + visibility = ["PUBLIC"], +) + +constraint_setting( + name = "asan", + visibility = ["PUBLIC"], +) + +constraint_value( + name = "asan_false", + constraint_setting = ":race", + visibility = ["PUBLIC"], +) + +constraint_value( + name = "asan_true", + constraint_setting = ":race", + visibility = ["PUBLIC"], +) + +constraint_setting( + name = "coverage_mode", + visibility = ["PUBLIC"], +) + +constraint_value( + name = "coverage_mode_set", + constraint_setting = ":coverage_mode", + visibility = ["PUBLIC"], +) + +constraint_value( + name = "coverage_mode_count", + constraint_setting = ":coverage_mode", + visibility = ["PUBLIC"], +) + +constraint_value( + name = "coverage_mode_atomic", + constraint_setting = ":coverage_mode", + visibility = ["PUBLIC"], +) + +generate_tag_constraints() diff --git a/prelude/go/constraints/defs.bzl b/prelude/go/constraints/defs.bzl new file mode 100644 index 0000000000..8da0b3e810 --- /dev/null +++ b/prelude/go/constraints/defs.bzl @@ -0,0 +1,23 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//:native.bzl", "native") +load("@prelude//go/transitions:tags_helper.bzl", "allowed_tags") + +def generate_tag_constraints(): + for tag in allowed_tags: + setting_name = "tag_" + tag + native.constraint_setting( + name = "tag_" + tag, + visibility = ["PUBLIC"], + ) + + native.constraint_value( + name = setting_name + "__value", + constraint_setting = ":" + setting_name, + visibility = ["PUBLIC"], + ) diff --git a/prelude/go/coverage.bzl b/prelude/go/coverage.bzl index b85f684538..cf6ab2808d 100644 --- a/prelude/go/coverage.bzl +++ b/prelude/go/coverage.bzl @@ -23,14 +23,17 @@ GoCoverResult = record( variables = field(cmd_args), ) -def cover_srcs(ctx: AnalysisContext, pkg_name: str, mode: GoCoverageMode, srcs: cmd_args) -> GoCoverResult: - out_covered_src_dir = ctx.actions.declare_output("__covered_srcs__", dir = True) - out_srcs_argsfile = ctx.actions.declare_output("covered_srcs.txt") - out_coverage_vars_argsfile = ctx.actions.declare_output("coverage_vars.txt") +def cover_srcs(ctx: AnalysisContext, pkg_name: str, mode: GoCoverageMode, srcs: cmd_args, shared: bool) -> GoCoverResult: + path = pkg_name + "_static_" + mode.value + if shared: + path = pkg_name + "shared_" + mode.value + out_covered_src_dir = ctx.actions.declare_output("__covered_" + path + "_srcs__", dir = True) + out_srcs_argsfile = ctx.actions.declare_output("covered_" + path + "_srcs.txt") + out_coverage_vars_argsfile = ctx.actions.declare_output("coverage_" + path + "_vars.txt") go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] cmd = cmd_args() - cmd.add(go_toolchain.cover_srcs[RunInfo]) + cmd.add(go_toolchain.cover_srcs) cmd.add("--cover", go_toolchain.cover) cmd.add("--coverage-mode", mode.value) cmd.add("--coverage-var-argsfile", out_coverage_vars_argsfile.as_output()) @@ -38,9 +41,9 @@ def cover_srcs(ctx: AnalysisContext, pkg_name: str, mode: GoCoverageMode, srcs: cmd.add("--out-srcs-argsfile", out_srcs_argsfile.as_output()) cmd.add("--pkg-name", pkg_name) cmd.add(srcs) - ctx.actions.run(cmd, category = "go_cover") + ctx.actions.run(cmd, category = "go_cover", identifier = path) return GoCoverResult( - srcs = cmd_args(out_srcs_argsfile, format = "@{}").hidden(out_covered_src_dir).hidden(srcs), + srcs = cmd_args(out_srcs_argsfile, format = "@{}", hidden = [out_covered_src_dir, srcs]), variables = cmd_args(out_coverage_vars_argsfile, format = "@{}"), ) diff --git a/prelude/go/go_binary.bzl b/prelude/go/go_binary.bzl index 2ce58d9173..4613299911 100644 --- a/prelude/go/go_binary.bzl +++ b/prelude/go/go_binary.bzl @@ -16,24 +16,33 @@ load( "map_val", "value_or", ) -load(":compile.bzl", "compile", "get_filtered_srcs") load(":link.bzl", "link") +load(":package_builder.bzl", "build_package") def go_binary_impl(ctx: AnalysisContext) -> list[Provider]: - lib = compile( + lib = build_package( ctx, "main", - get_filtered_srcs(ctx, ctx.attrs.srcs), + ctx.attrs.srcs, + package_root = ctx.attrs.package_root, deps = ctx.attrs.deps, - compile_flags = ctx.attrs.compiler_flags, + compiler_flags = ctx.attrs.compiler_flags, + race = ctx.attrs._race, + asan = ctx.attrs._asan, + embedcfg = ctx.attrs.embedcfg, + # We need to set CGO_DESABLED for "pure" Go libraries, otherwise CGo files may be selected for compilation. + force_disable_cgo = True, ) (bin, runtime_files, external_debug_info) = link( ctx, - lib, + lib.pkg, deps = ctx.attrs.deps, link_style = value_or(map_val(LinkStyle, ctx.attrs.link_style), LinkStyle("static")), linker_flags = ctx.attrs.linker_flags, link_mode = ctx.attrs.link_mode, + race = ctx.attrs._race, + asan = ctx.attrs._asan, + external_linker_flags = ctx.attrs.external_linker_flags, ) # runtime_files are all the artifacts that must be present in order for this @@ -60,6 +69,6 @@ def go_binary_impl(ctx: AnalysisContext) -> list[Provider]: default_output = bin, other_outputs = other_outputs, ), - RunInfo(args = cmd_args(bin).hidden(other_outputs)), + RunInfo(args = cmd_args(bin, hidden = other_outputs)), DistInfo(nondebug_runtime_files = runtime_files), ] diff --git a/prelude/go/go_exported_library.bzl b/prelude/go/go_exported_library.bzl index 28101cf52f..2b5c381147 100644 --- a/prelude/go/go_exported_library.bzl +++ b/prelude/go/go_exported_library.bzl @@ -14,27 +14,35 @@ load( "map_val", "value_or", ) -load(":compile.bzl", "compile", "get_filtered_srcs") load(":link.bzl", "GoBuildMode", "link") +load(":package_builder.bzl", "build_package") def go_exported_library_impl(ctx: AnalysisContext) -> list[Provider]: - lib = compile( + lib = build_package( ctx, "main", - get_filtered_srcs(ctx, ctx.attrs.srcs), + ctx.attrs.srcs, + package_root = ctx.attrs.package_root, deps = ctx.attrs.deps, - compile_flags = ctx.attrs.compiler_flags, + compiler_flags = ctx.attrs.compiler_flags, shared = True, + race = ctx.attrs._race, + asan = ctx.attrs._asan, + embedcfg = ctx.attrs.embedcfg, + # We need to set CGO_DESABLED for "pure" Go libraries, otherwise CGo files may be selected for compilation. + force_disable_cgo = True, ) (bin, runtime_files, _external_debug_info) = link( ctx, - lib, + lib.pkg, deps = ctx.attrs.deps, build_mode = GoBuildMode(ctx.attrs.build_mode), link_style = value_or(map_val(LinkStyle, ctx.attrs.link_style), LinkStyle("static_pic")), linker_flags = ctx.attrs.linker_flags, external_linker_flags = ctx.attrs.external_linker_flags, shared = True, + race = ctx.attrs._race, + asan = ctx.attrs._asan, ) return [ DefaultInfo( diff --git a/prelude/go/go_library.bzl b/prelude/go/go_library.bzl index 2bc66e990c..823b9246c4 100644 --- a/prelude/go/go_library.bzl +++ b/prelude/go/go_library.bzl @@ -23,9 +23,11 @@ load( "@prelude//utils:utils.bzl", "map_idx", ) -load(":compile.bzl", "GoPkgCompileInfo", "GoTestInfo", "compile", "get_filtered_srcs", "get_inherited_compile_pkgs") +load(":compile.bzl", "GoPkgCompileInfo", "GoTestInfo", "get_inherited_compile_pkgs") +load(":coverage.bzl", "GoCoverageMode") load(":link.bzl", "GoPkgLinkInfo", "get_inherited_link_pkgs") -load(":packages.bzl", "GoPkg", "go_attr_pkg_name", "merge_pkgs") +load(":package_builder.bzl", "build_package") +load(":packages.bzl", "go_attr_pkg_name", "merge_pkgs") def go_library_impl(ctx: AnalysisContext) -> list[Provider]: pkgs = {} @@ -34,34 +36,30 @@ def go_library_impl(ctx: AnalysisContext) -> list[Provider]: if ctx.attrs.srcs: pkg_name = go_attr_pkg_name(ctx) - # We need to set CGO_DESABLED for "pure" Go libraries, otherwise CGo files may be selected for compilation. - srcs = get_filtered_srcs(ctx, ctx.attrs.srcs, force_disable_cgo = True) + shared = ctx.attrs._compile_shared + race = ctx.attrs._race + asan = ctx.attrs._asan + coverage_mode = GoCoverageMode(ctx.attrs._coverage_mode) if ctx.attrs._coverage_mode else None - static_pkg = compile( + pkg = build_package( ctx, pkg_name, - srcs = srcs, - deps = ctx.attrs.deps + ctx.attrs.exported_deps, - compile_flags = ctx.attrs.compiler_flags, - assemble_flags = ctx.attrs.assembler_flags, - shared = False, - ) - - shared_pkg = compile( - ctx, - pkg_name, - srcs = srcs, + srcs = ctx.attrs.srcs, + package_root = ctx.attrs.package_root, deps = ctx.attrs.deps + ctx.attrs.exported_deps, - compile_flags = ctx.attrs.compiler_flags, - assemble_flags = ctx.attrs.assembler_flags, - shared = True, + compiler_flags = ctx.attrs.compiler_flags, + assembler_flags = ctx.attrs.assembler_flags, + shared = shared, + race = race, + asan = asan, + coverage_mode = coverage_mode, + embedcfg = ctx.attrs.embedcfg, + # We need to set CGO_DESABLED for "pure" Go libraries, otherwise CGo files may be selected for compilation. + force_disable_cgo = True, ) - default_output = static_pkg - pkgs[pkg_name] = GoPkg( - shared = shared_pkg, - static = static_pkg, - ) + default_output = pkg.pkg + pkgs[pkg_name] = pkg return [ DefaultInfo(default_output = default_output), diff --git a/prelude/go/go_list.bzl b/prelude/go/go_list.bzl new file mode 100644 index 0000000000..cbf1bf3b85 --- /dev/null +++ b/prelude/go/go_list.bzl @@ -0,0 +1,111 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//:paths.bzl", "paths") +load(":toolchain.bzl", "GoToolchainInfo", "get_toolchain_env_vars") + +GoListOut = record( + go_files = field(list[Artifact], default = []), + h_files = field(list[Artifact], default = []), + c_files = field(list[Artifact], default = []), + cxx_files = field(list[Artifact], default = []), + cgo_files = field(list[Artifact], default = []), + s_files = field(list[Artifact], default = []), + test_go_files = field(list[Artifact], default = []), + x_test_go_files = field(list[Artifact], default = []), + embed_files = field(list[Artifact], default = []), + cgo_cflags = field(list[str], default = []), + cgo_cppflags = field(list[str], default = []), +) + +def go_list(ctx: AnalysisContext, pkg_name: str, srcs: list[Artifact], package_root: str, force_disable_cgo: bool, with_tests: bool, asan: bool) -> Artifact: + go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] + env = get_toolchain_env_vars(go_toolchain, force_disable_cgo = force_disable_cgo) + env["GO111MODULE"] = "off" + + go_list_out = ctx.actions.declare_output(paths.basename(pkg_name) + "_go_list.json") + + # Create file structure that `go list` can recognize + # Use copied_dir, because embed doesn't work with symlinks + srcs_dir = ctx.actions.copied_dir( + "__{}_srcs_dir__".format(paths.basename(pkg_name)), + {src.short_path.removeprefix(package_root).lstrip("/"): src for src in srcs}, + ) + tags = go_toolchain.tags + ctx.attrs._tags + if asan: + tags.append("asan") + + required_felds = "GoFiles,CgoFiles,HFiles,CFiles,CXXFiles,SFiles,EmbedFiles,CgoCFLAGS,CgoCPPFLAGS" + if with_tests: + required_felds += ",TestGoFiles,XTestGoFiles" + + go_list_args = [ + go_toolchain.go_wrapper, + go_toolchain.go, + ["--workdir", srcs_dir], + ["--output", go_list_out.as_output()], + "list", + "-e", + "-json=" + required_felds, + ["-tags", ",".join(tags) if tags else []], + ".", + ] + + identifier = paths.basename(pkg_name) + ctx.actions.run(go_list_args, env = env, category = "go_list", identifier = identifier) + + return go_list_out + +def parse_go_list_out(srcs: list[Artifact], package_root: str, go_list_out: ArtifactValue) -> GoListOut: + go_list = go_list_out.read_json() + go_files, cgo_files, h_files, c_files, cxx_files, s_files, test_go_files, x_test_go_files, embed_files = [], [], [], [], [], [], [], [], [] + + for src in srcs: + # remove package_root prefix from src artifact path to match `go list` output format + src_path = src.short_path.removeprefix(package_root).lstrip("/") + if src_path in go_list.get("GoFiles", []): + go_files.append(src) + if src_path in go_list.get("CgoFiles", []): + cgo_files.append(src) + if src_path in go_list.get("HFiles", []): + h_files.append(src) + if src_path in go_list.get("CFiles", []): + c_files.append(src) + if src_path in go_list.get("CXXFiles", []): + cxx_files.append(src) + if src_path in go_list.get("SFiles", []): + s_files.append(src) + if src_path in go_list.get("TestGoFiles", []): + test_go_files.append(src) + if src_path in go_list.get("XTestGoFiles", []): + x_test_go_files.append(src) + if _any_starts_with(go_list.get("EmbedFiles", []), src_path): + embed_files.append(src) + + cgo_cflags = go_list.get("CgoCFLAGS", []) + cgo_cppflags = go_list.get("CgoCPPFLAGS", []) + + return GoListOut( + go_files = go_files, + h_files = h_files, + c_files = c_files, + cxx_files = cxx_files, + cgo_files = cgo_files, + s_files = s_files, + test_go_files = test_go_files, + x_test_go_files = x_test_go_files, + embed_files = embed_files, + cgo_cflags = cgo_cflags, + cgo_cppflags = cgo_cppflags, + ) + +def _any_starts_with(files: list[str], path: str): + for file in files: + if paths.starts_with(file, path): + return True + + return False diff --git a/prelude/go/go_stdlib.bzl b/prelude/go/go_stdlib.bzl new file mode 100644 index 0000000000..31fbb62f8b --- /dev/null +++ b/prelude/go/go_stdlib.bzl @@ -0,0 +1,72 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load(":packages.bzl", "GoStdlib") +load(":toolchain.bzl", "GoToolchainInfo", "evaluate_cgo_enabled", "get_toolchain_env_vars") + +def go_stdlib_impl(ctx: AnalysisContext) -> list[Provider]: + go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] + stdlib_pkgdir = ctx.actions.declare_output("stdlib_pkgdir", dir = True) + cgo_enabled = evaluate_cgo_enabled(go_toolchain, ctx.attrs._cgo_enabled) + tags = [] + go_toolchain.tags + linker_flags = [] + go_toolchain.linker_flags + assembler_flags = [] + go_toolchain.assembler_flags + compiler_flags = [] + go_toolchain.compiler_flags + compiler_flags += ["-buildid="] # Make builds reproducible. + if ctx.attrs._compile_shared: + assembler_flags += ["-shared"] + compiler_flags += ["-shared"] + + if ctx.attrs._asan: + compiler_flags += ["-asan"] + tags += ["asan"] + + env = get_toolchain_env_vars(go_toolchain) + env["GODEBUG"] = "installgoroot=all" + env["CGO_ENABLED"] = "1" if cgo_enabled else "0" + + cxx_toolchain = go_toolchain.cxx_toolchain_for_linking + if cxx_toolchain != None: + c_compiler = cxx_toolchain.c_compiler_info + + env["CC"] = cmd_args(c_compiler.compiler, delimiter = " ", quote = "shell", absolute_prefix = "%cwd%/") + env["CGO_CFLAGS"] = cmd_args(c_compiler.compiler_flags, delimiter = " ", quote = "shell", absolute_prefix = "%cwd%/") + env["CGO_CPPFLAGS"] = cmd_args(c_compiler.preprocessor_flags, delimiter = " ", quote = "shell", absolute_prefix = "%cwd%/") + + cmd = cmd_args([ + go_toolchain.go_wrapper, + go_toolchain.go, + "install", + "-pkgdir", + stdlib_pkgdir.as_output(), + cmd_args(["-asmflags=", cmd_args(assembler_flags, delimiter = " ")], delimiter = "") if assembler_flags else [], + cmd_args(["-gcflags=", cmd_args(compiler_flags, delimiter = " ")], delimiter = "") if compiler_flags else [], + cmd_args(["-ldflags=", cmd_args(linker_flags, delimiter = " ")], delimiter = "") if linker_flags else [], + ["-tags", ",".join(tags)] if tags else [], + ["-race"] if ctx.attrs._race else [], + "std", + ]) + + ctx.actions.run(cmd, env = env, category = "go_build_stdlib", identifier = "go_build_stdlib") + + importcfg = ctx.actions.declare_output("stdlib.importcfg") + ctx.actions.run( + [ + go_toolchain.gen_stdlib_importcfg, + "--stdlib", + stdlib_pkgdir, + "--output", + importcfg.as_output(), + ], + category = "go_gen_stdlib_importcfg", + identifier = "go_gen_stdlib_importcfg", + ) + + return [ + DefaultInfo(default_output = stdlib_pkgdir), + GoStdlib(pkgdir = stdlib_pkgdir, importcfg = importcfg), + ] diff --git a/prelude/go/go_test.bzl b/prelude/go/go_test.bzl index 3ee295defe..4bb088956f 100644 --- a/prelude/go/go_test.bzl +++ b/prelude/go/go_test.bzl @@ -9,41 +9,50 @@ load( "@prelude//linking:link_info.bzl", "LinkStyle", ) +load( + "@prelude//tests:re_utils.bzl", + "get_re_executors_from_props", +) load( "@prelude//utils:utils.bzl", "map_val", "value_or", ) load("@prelude//test/inject_test_run_info.bzl", "inject_test_run_info") -load(":compile.bzl", "GoTestInfo", "compile", "get_filtered_srcs") -load(":coverage.bzl", "GoCoverageMode", "cover_srcs") +load(":compile.bzl", "GoTestInfo", "get_inherited_compile_pkgs") +load(":coverage.bzl", "GoCoverageMode") load(":link.bzl", "link") +load(":package_builder.bzl", "build_package") load(":packages.bzl", "go_attr_pkg_name") def _gen_test_main( ctx: AnalysisContext, pkg_name: str, coverage_mode: [GoCoverageMode, None], - coverage_vars: [cmd_args, None], + coverage_vars: dict[str, cmd_args], srcs: cmd_args) -> Artifact: """ Generate a `main.go` which calls tests from the given sources. """ output = ctx.actions.declare_output("main.go") - cmd = cmd_args() - cmd.add(ctx.attrs._testmaingen[RunInfo]) - if ctx.attrs.coverage_mode: - cmd.add(cmd_args(ctx.attrs.coverage_mode, format = "--cover-mode={}")) - cmd.add(cmd_args(output.as_output(), format = "--output={}")) - cmd.add(cmd_args(pkg_name, format = "--import-path={}")) + cmd = [] + cmd.append(ctx.attrs._testmaingen[RunInfo]) + + # if ctx.attrs.coverage_mode: + # cmd.append(cmd_args(ctx.attrs.coverage_mode, format = "--cover-mode={}")) + cmd.append(cmd_args(output.as_output(), format = "--output={}")) + cmd.append(cmd_args(pkg_name, format = "--import-path={}")) if coverage_mode != None: - cmd.add("--cover-mode", coverage_mode.value) - if coverage_vars != None: - cmd.add(coverage_vars) - cmd.add(srcs) - ctx.actions.run(cmd, category = "go_test_main_gen") + cmd.extend(["--cover-mode", coverage_mode.value]) + for _, vars in coverage_vars.items(): + cmd.append(vars) + cmd.append(srcs) + ctx.actions.run(cmd_args(cmd), category = "go_test_main_gen") return output +def is_subpackage_of(other_pkg_name: str, pkg_name: str) -> bool: + return pkg_name == other_pkg_name or other_pkg_name.startswith(pkg_name + "/") + def go_test_impl(ctx: AnalysisContext) -> list[Provider]: deps = ctx.attrs.deps srcs = ctx.attrs.srcs @@ -59,47 +68,68 @@ def go_test_impl(ctx: AnalysisContext) -> list[Provider]: # TODO: should we assert that pkg_name != None here? pkg_name = lib.pkg_name - srcs = get_filtered_srcs(ctx, srcs, tests = True) - # If coverage is enabled for this test, we need to preprocess the sources # with the Go cover tool. - coverage_mode = None - coverage_vars = None - if ctx.attrs.coverage_mode != None: - coverage_mode = GoCoverageMode(ctx.attrs.coverage_mode) - cov_res = cover_srcs(ctx, pkg_name, coverage_mode, srcs) - srcs = cov_res.srcs - coverage_vars = cov_res.variables + coverage_mode = GoCoverageMode(ctx.attrs._coverage_mode) if ctx.attrs._coverage_mode else None + coverage_vars = {} + pkgs = {} # Compile all tests into a package. - tests = compile( + tests = build_package( ctx, pkg_name, - srcs, + srcs = srcs, + package_root = ctx.attrs.package_root, deps = deps, - compile_flags = ctx.attrs.compiler_flags, + pkgs = pkgs, + compiler_flags = ctx.attrs.compiler_flags, + coverage_mode = coverage_mode, + race = ctx.attrs._race, + asan = ctx.attrs._asan, + embedcfg = ctx.attrs.embedcfg, + tests = True, + # We need to set CGO_DESABLED for "pure" Go libraries, otherwise CGo files may be selected for compilation. + force_disable_cgo = True, ) + if coverage_mode != None: + coverage_vars[pkg_name] = tests.coverage_vars + + # Get all packages that are linked to the test (i.e. the entire dependency tree) + for name, pkg in get_inherited_compile_pkgs(deps).items(): + if ctx.label != None and is_subpackage_of(name, ctx.label.package): + coverage_vars[name] = pkg.coverage_vars + pkgs[name] = pkg.pkg + + pkgs[pkg_name] = tests.pkg + # Generate a main function which runs the tests and build that into another # package. - gen_main = _gen_test_main(ctx, pkg_name, coverage_mode, coverage_vars, srcs) - main = compile(ctx, "main", cmd_args(gen_main), pkgs = {pkg_name: tests}) + gen_main = _gen_test_main(ctx, pkg_name, coverage_mode, coverage_vars, tests.srcs_list) + main = build_package(ctx, "main", [gen_main], package_root = "", pkgs = pkgs, coverage_mode = coverage_mode, race = ctx.attrs._race, asan = ctx.attrs._asan, cgo_gen_dir_name = "cgo_gen_test_main") # Link the above into a Go binary. (bin, runtime_files, external_debug_info) = link( ctx = ctx, - main = main, - pkgs = {pkg_name: tests}, + main = main.pkg, + pkgs = pkgs, deps = deps, link_style = value_or(map_val(LinkStyle, ctx.attrs.link_style), LinkStyle("static")), linker_flags = ctx.attrs.linker_flags, + shared = False, + race = ctx.attrs._race, + asan = ctx.attrs._asan, ) - run_cmd = cmd_args(bin).hidden(runtime_files, external_debug_info) - # As per v1, copy in resources next to binary. + copied_resources = [] for resource in ctx.attrs.resources: - run_cmd.hidden(ctx.actions.copy_file(resource.short_path, resource)) + copied_resources.append(ctx.actions.copy_file(resource.short_path, resource)) + + run_cmd = cmd_args(bin, hidden = [runtime_files, external_debug_info] + copied_resources) + + # Setup RE executors based on the `remote_execution` param. + re_executor, executor_overrides = get_re_executors_from_props(ctx) return inject_test_run_info( ctx, @@ -109,8 +139,11 @@ def go_test_impl(ctx: AnalysisContext) -> list[Provider]: env = ctx.attrs.env, labels = ctx.attrs.labels, contacts = ctx.attrs.contacts, + default_executor = re_executor, + executor_overrides = executor_overrides, # FIXME: Consider setting to true - run_from_project_root = False, + run_from_project_root = re_executor != None, + use_project_relative_paths = re_executor != None, ), ) + [ DefaultInfo( diff --git a/prelude/go/link.bzl b/prelude/go/link.bzl index bb7957f995..5deae5b0a4 100644 --- a/prelude/go/link.bzl +++ b/prelude/go/link.bzl @@ -32,11 +32,11 @@ load( load( ":packages.bzl", "GoPkg", # @Unused used as type + "make_importcfg", "merge_pkgs", "pkg_artifacts", - "stdlib_pkg_artifacts", ) -load(":toolchain.bzl", "GoToolchainInfo", "get_toolchain_cmd_args") +load(":toolchain.bzl", "GoToolchainInfo", "get_toolchain_env_vars") # Provider wrapping packages used for linking. GoPkgLinkInfo = provider(fields = { @@ -58,14 +58,6 @@ def _build_mode_param(mode: GoBuildMode) -> str: def get_inherited_link_pkgs(deps: list[Dependency]) -> dict[str, GoPkg]: return merge_pkgs([d[GoPkgLinkInfo].pkgs for d in deps if GoPkgLinkInfo in d]) -def is_any_dep_cgo(deps: list[Dependency]) -> bool: - for d in deps: - if GoPkgLinkInfo in d: - for pkg in d[GoPkgLinkInfo].pkgs.values(): - if pkg.cgo: - return True - return False - # TODO(cjhopman): Is link_style a LibOutputStyle or a LinkStrategy here? Based # on returning an empty thing for link_style != shared, it seems likely its # intended to be LibOutputStyle, but it's called in places that are passing what @@ -87,12 +79,10 @@ def _process_shared_dependencies( ctx.actions, deps = filter(None, map_idx(SharedLibraryInfo, deps)), ) - shared_libs = {} - for name, shared_lib in traverse_shared_library_info(shlib_info).items(): - shared_libs[name] = shared_lib.lib + shared_libs = traverse_shared_library_info(shlib_info) return executable_shared_lib_arguments( - ctx.actions, + ctx, ctx.attrs._go_toolchain[GoToolchainInfo].cxx_toolchain_for_linking, artifact, shared_libs, @@ -108,7 +98,9 @@ def link( link_style: LinkStyle = LinkStyle("static"), linker_flags: list[typing.Any] = [], external_linker_flags: list[typing.Any] = [], - shared: bool = False): + shared: bool = False, + race: bool = False, + asan: bool = False): go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] if go_toolchain.env_go_os == "windows": executable_extension = ".exe" @@ -119,35 +111,30 @@ def link( file_extension = shared_extension if build_mode == GoBuildMode("c_shared") else executable_extension output = ctx.actions.declare_output(ctx.label.name + file_extension) - cmd = get_toolchain_cmd_args(go_toolchain) + cmd = cmd_args() cmd.add(go_toolchain.linker) - if shared: - cmd.add(go_toolchain.linker_flags_shared) - else: - cmd.add(go_toolchain.linker_flags_static) + cmd.add(go_toolchain.linker_flags) cmd.add("-o", output.as_output()) cmd.add("-buildmode=" + _build_mode_param(build_mode)) cmd.add("-buildid=") # Setting to a static buildid helps make the binary reproducible. + if race: + cmd.add("-race") + + if asan: + cmd.add("-asan") + # Add inherited Go pkgs to library search path. all_pkgs = merge_pkgs([ pkgs, - pkg_artifacts(get_inherited_link_pkgs(deps), shared = shared), - stdlib_pkg_artifacts(go_toolchain, shared = shared), + pkg_artifacts(get_inherited_link_pkgs(deps)), ]) - importcfg_content = [] - for name_, pkg_ in all_pkgs.items(): - # Hack: we use cmd_args get "artifact" valid path and write it to a file. - importcfg_content.append(cmd_args("packagefile ", name_, "=", pkg_, delimiter = "")) - - importcfg = ctx.actions.declare_output("importcfg") - ctx.actions.write(importcfg.as_output(), importcfg_content) + importcfg = make_importcfg(ctx, "", all_pkgs, with_importmap = False) cmd.add("-importcfg", importcfg) - cmd.hidden(all_pkgs.values()) executable_args = _process_shared_dependencies(ctx, output, deps, link_style) @@ -156,15 +143,15 @@ def link( link_mode = "external" elif shared: link_mode = "external" - elif is_any_dep_cgo(deps): - link_mode = "external" - else: - link_mode = "internal" - cmd.add("-linkmode", link_mode) - if link_mode == "external": + if link_mode != None: + cmd.add("-linkmode", link_mode) + + cxx_toolchain = go_toolchain.cxx_toolchain_for_linking + if cxx_toolchain == None and link_mode == "external": + fail("cxx_toolchain required for link_mode='external'") + if cxx_toolchain != None: is_win = ctx.attrs._exec_os_type[OsLookup].platform == "windows" - cxx_toolchain = go_toolchain.cxx_toolchain_for_linking # Gather external link args from deps. ext_links = get_link_args_for_strategy(ctx, cxx_inherited_link_info(deps), to_link_strategy(link_style)) @@ -173,21 +160,18 @@ def link( cxx_toolchain, [ext_links], ) - ext_link_args = cmd_args() + ext_link_args = cmd_args(hidden = ext_link_args_output.hidden) ext_link_args.add(cmd_args(executable_args.extra_link_args, quote = "shell")) ext_link_args.add(external_linker_flags) ext_link_args.add(ext_link_args_output.link_args) - ext_link_args.hidden(ext_link_args_output.hidden) # Delegate to C++ linker... # TODO: It feels a bit inefficient to generate a wrapper file for every # link. Is there some way to etract the first arg of `RunInfo`? Or maybe - # we can generate te platform-specific stuff once and re-use? + # we can generate the platform-specific stuff once and re-use? cxx_link_cmd = cmd_args( [ cxx_toolchain.linker_info.linker, - cxx_toolchain.linker_info.linker_flags, - go_toolchain.external_linker_flags, ext_link_args, "%*" if is_win else "\"$@\"", ], @@ -199,12 +183,20 @@ def link( allow_args = True, is_executable = True, ) - cmd.add("-extld", linker_wrapper).hidden(cxx_link_cmd) + cmd.add("-extld", linker_wrapper, cmd_args(hidden = cxx_link_cmd)) + cmd.add("-extldflags", cmd_args( + cxx_toolchain.linker_info.linker_flags, + go_toolchain.external_linker_flags, + delimiter = " ", + quote = "shell", + )) cmd.add(linker_flags) cmd.add(main) - ctx.actions.run(cmd, category = "go_link") + env = get_toolchain_env_vars(go_toolchain) + + ctx.actions.run(cmd, env = env, category = "go_link") return (output, executable_args.runtime_files, executable_args.external_debug_info) diff --git a/prelude/go/package_builder.bzl b/prelude/go/package_builder.bzl new file mode 100644 index 0000000000..040a3178cc --- /dev/null +++ b/prelude/go/package_builder.bzl @@ -0,0 +1,281 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//:paths.bzl", "paths") +load("@prelude//utils:utils.bzl", "dedupe_by_value") +load(":cgo_builder.bzl", "build_cgo") +load(":compile.bzl", "get_inherited_compile_pkgs", "infer_package_root") +load( + ":coverage.bzl", + "GoCoverageMode", # @Unused used as type +) +load(":go_list.bzl", "go_list", "parse_go_list_out") +load(":packages.bzl", "GoPkg", "make_importcfg", "merge_pkgs", "pkg_artifacts") +load(":toolchain.bzl", "GoToolchainInfo", "get_toolchain_env_vars") + +def build_package( + ctx: AnalysisContext, + pkg_name: str, + srcs: list[Artifact], + package_root: str | None, + pkgs: dict[str, Artifact] = {}, + deps: list[Dependency] = [], + compiler_flags: list[str] = [], + assembler_flags: list[str] = [], + shared: bool = False, + race: bool = False, + asan: bool = False, + coverage_mode: GoCoverageMode | None = None, + embedcfg: Artifact | None = None, + tests: bool = False, + force_disable_cgo: bool = False, + # If you change this dir or naming convention, please + # update the corresponding logic in `fbgolist`. + # Otherwise editing and linting for Go will break. + cgo_gen_dir_name: str = "cgo_gen") -> GoPkg: + if race and coverage_mode not in [None, GoCoverageMode("atomic")]: + fail("`coverage_mode` must be `atomic` when `race=True`") + + out = ctx.actions.declare_output(paths.basename(pkg_name) + ".a") + + cgo_gen_dir = ctx.actions.declare_output(cgo_gen_dir_name, dir = True) + + srcs = dedupe_by_value(srcs) + + has_go_files = False + for src in srcs: + if src.extension == ".go": + has_go_files = True + break + + if not has_go_files: + return GoPkg( + pkg = ctx.actions.write(out.as_output(), ""), + coverage_vars = cmd_args(), + srcs_list = cmd_args(), + cgo_gen_dir = ctx.actions.copied_dir(cgo_gen_dir.as_output(), {}), + ) + + package_root = package_root if package_root != None else infer_package_root(srcs) + + go_list_out = go_list(ctx, pkg_name, srcs, package_root, force_disable_cgo, with_tests = tests, asan = asan) + + srcs_list_argsfile = ctx.actions.declare_output(paths.basename(pkg_name) + "_srcs_list.argsfile") + coverage_vars_argsfile = ctx.actions.declare_output(paths.basename(pkg_name) + "_coverage_vars.argsfile") + dynamic_outputs = [out, srcs_list_argsfile, coverage_vars_argsfile, cgo_gen_dir] + + all_pkgs = merge_pkgs([ + pkgs, + pkg_artifacts(get_inherited_compile_pkgs(deps)), + ]) + importcfg = make_importcfg(ctx, pkg_name, all_pkgs, with_importmap = True) + + def f(ctx: AnalysisContext, artifacts, outputs, go_list_out = go_list_out): + go_list = parse_go_list_out(srcs, package_root, artifacts[go_list_out]) + + symabis = _symabis(ctx, pkg_name, go_list.s_files, assembler_flags, shared) + + # Generate CGO and C sources. + cgo_go_files, cgo_o_files, cgo_gen_tmp_dir = build_cgo(ctx, go_list.cgo_files, go_list.h_files, go_list.c_files + go_list.cxx_files, go_list.cgo_cflags, go_list.cgo_cppflags) + ctx.actions.copy_dir(outputs[cgo_gen_dir], cgo_gen_tmp_dir) + + go_files = go_list.go_files + cgo_go_files + + src_list_for_argsfile = go_files + (go_list.test_go_files + go_list.x_test_go_files if tests else []) + ctx.actions.write(outputs[srcs_list_argsfile], cmd_args(src_list_for_argsfile, "")) + + covered_go_files, coverage_vars_out = _cover(ctx, pkg_name, go_files, coverage_mode) + ctx.actions.write(outputs[coverage_vars_argsfile], coverage_vars_out) + + go_files_to_compile = covered_go_files + ((go_list.test_go_files + go_list.x_test_go_files) if tests else []) + go_a_file, asmhdr = _compile(ctx, pkg_name, go_files_to_compile, importcfg, compiler_flags, shared, race, asan, embedcfg, go_list.embed_files, symabis, len(go_list.s_files) > 0) + + asm_o_files = _asssembly(ctx, pkg_name, go_list.s_files, asmhdr, assembler_flags, shared) + + pkg_file = _pack(ctx, pkg_name, go_a_file, cgo_o_files + asm_o_files) + + ctx.actions.copy_file(outputs[out], pkg_file) + + ctx.actions.dynamic_output(dynamic = [go_list_out], inputs = [], outputs = [o.as_output() for o in dynamic_outputs], f = f) + + return GoPkg( + pkg = out, + coverage_vars = cmd_args(coverage_vars_argsfile, format = "@{}"), + srcs_list = cmd_args(srcs_list_argsfile, format = "@{}", hidden = srcs), + cgo_gen_dir = cgo_gen_dir, + ) + +def _compile( + ctx: AnalysisContext, + pkg_name: str, + go_srcs: list[Artifact], + importcfg: cmd_args, + compiler_flags: list[str], + shared: bool, + race: bool, + asan: bool, + embedcfg: Artifact | None = None, + embed_files: list[Artifact] = [], + symabis: Artifact | None = None, + gen_asmhdr: bool = False) -> (Artifact, Artifact | None): + go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] + + env = get_toolchain_env_vars(go_toolchain) + out = ctx.actions.declare_output("go_compile_out.a") + + if len(go_srcs) == 0: + ctx.actions.write(out.as_output(), "") + return out, None + + asmhdr = ctx.actions.declare_output("__asmhdr__/go_asm.h") if gen_asmhdr else None + + compile_cmd = cmd_args( + [ + go_toolchain.compiler, + go_toolchain.compiler_flags, + compiler_flags, + "-buildid=", + "-nolocalimports", + ["-p", pkg_name], + ["-importcfg", importcfg], + ["-o", out.as_output()], + ["-race"] if race else [], + ["-asan"] if asan else [], + ["-shared"] if shared else [], + ["-embedcfg", embedcfg] if embedcfg else [], + ["-symabis", symabis] if symabis else [], + ["-asmhdr", asmhdr.as_output()] if asmhdr else [], + go_srcs, + ], + hidden = embed_files, # files and directories should be available for embedding + ) + + identifier = paths.basename(pkg_name) + ctx.actions.run(compile_cmd, env = env, category = "go_compile", identifier = identifier) + + return (out, asmhdr) + +def _symabis(ctx: AnalysisContext, pkg_name: str, s_files: list[Artifact], assembler_flags: list[str], shared: bool) -> Artifact | None: + if len(s_files) == 0: + return None + + go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] + env = get_toolchain_env_vars(go_toolchain) + + # we have to supply "go_asm.h" with any content to make asm tool happy + # its content doesn't matter if -gensymabis provided + # https://github.com/golang/go/blob/3f8f929d60a90c4e4e2b07c8d1972166c1a783b1/src/cmd/go/internal/work/gc.go#L441-L443 + fake_asmhdr = ctx.actions.write("__fake_asmhdr__/go_asm.h", "") + symabis = ctx.actions.declare_output("symabis") + asm_cmd = [ + go_toolchain.assembler, + go_toolchain.assembler_flags, + assembler_flags, + _asm_args(ctx, pkg_name, shared), + "-gensymabis", + ["-o", symabis.as_output()], + ["-I", cmd_args(fake_asmhdr, parent = 1)], + s_files, + ] + + identifier = paths.basename(pkg_name) + ctx.actions.run(asm_cmd, env = env, category = "go_symabis", identifier = identifier) + + return symabis + +def _asssembly(ctx: AnalysisContext, pkg_name: str, s_files: list[Artifact], asmhdr: Artifact | None, assembler_flags: list[str], shared: bool) -> list[Artifact]: + if len(s_files) == 0: + return [] + + go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] + env = get_toolchain_env_vars(go_toolchain) + + o_files = [] + identifier = paths.basename(pkg_name) + for s_file in s_files: + o_file = ctx.actions.declare_output(s_file.short_path + ".o") + o_files.append(o_file) + + asm_cmd = [ + go_toolchain.assembler, + go_toolchain.assembler_flags, + assembler_flags, + _asm_args(ctx, pkg_name, shared), + ["-o", o_file.as_output()], + ["-I", cmd_args(asmhdr, parent = 1)] if asmhdr else [], # can it actually be None? + s_file, + ] + + ctx.actions.run(asm_cmd, env = env, category = "go_assembly", identifier = identifier + "/" + s_file.short_path) + + return o_files + +def _pack(ctx: AnalysisContext, pkg_name: str, a_file: Artifact, o_files: list[Artifact]) -> Artifact: + if len(o_files) == 0: + # no need to repack .a file, if there are no .o files + return a_file + + go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] + env = get_toolchain_env_vars(go_toolchain) + + pkg_file = ctx.actions.declare_output("pkg.a") + + pack_cmd = [ + go_toolchain.packer, + "c", + pkg_file.as_output(), + a_file, + o_files, + ] + + identifier = paths.basename(pkg_name) + ctx.actions.run(pack_cmd, env = env, category = "go_pack", identifier = identifier) + + return pkg_file + +def _asm_args(ctx: AnalysisContext, pkg_name: str, shared: bool): + go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] + return [ + ["-p", pkg_name], + ["-I", cmd_args(go_toolchain.env_go_root, absolute_suffix = "/pkg/include")], + ["-D", "GOOS_" + go_toolchain.env_go_os] if go_toolchain.env_go_os else [], + ["-D", "GOARCH_" + go_toolchain.env_go_arch] if go_toolchain.env_go_arch else [], + ["-shared"] if shared else [], + ] + +def _cover(ctx: AnalysisContext, pkg_name: str, go_files: list[Artifact], coverage_mode: GoCoverageMode | None) -> (list[Artifact], str | cmd_args): + if coverage_mode == None: + return go_files, "" + + go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] + env = get_toolchain_env_vars(go_toolchain) + covered_files = [] + coverage_vars = {} + for go_file in go_files: + covered_file = ctx.actions.declare_output("with_coverage", go_file.short_path) + covered_files.append(covered_file) + + var = "Var_" + sha256(pkg_name + "::" + go_file.short_path) + coverage_vars[var] = go_file.short_path + + cover_cmd = [ + go_toolchain.cover, + ["-mode", coverage_mode.value], + ["-var", var], + ["-o", covered_file.as_output()], + go_file, + ] + + ctx.actions.run(cover_cmd, env = env, category = "go_cover", identifier = paths.basename(pkg_name) + "/" + go_file.short_path) + + coverage_vars_out = "" + if len(coverage_vars) > 0: + # convert coverage_vars to argsfile for compatibility with python implementation + cover_pkg = "{}:{}".format(pkg_name, ",".join(["{}={}".format(var, name) for var, name in coverage_vars.items()])) + coverage_vars_out = cmd_args("--cover-pkgs", cover_pkg) + + return covered_files, coverage_vars_out diff --git a/prelude/go/packages.bzl b/prelude/go/packages.bzl index 26e90b548b..7d3baa6bff 100644 --- a/prelude/go/packages.bzl +++ b/prelude/go/packages.bzl @@ -5,16 +5,21 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//:artifacts.bzl", "ArtifactGroupInfo") load("@prelude//go:toolchain.bzl", "GoToolchainInfo") load("@prelude//utils:utils.bzl", "value_or") GoPkg = record( - # Built w/ `-shared`. - shared = field(Artifact), - # Built w/o `-shared`. - static = field(Artifact), - cgo = field(bool, default = False), + pkg = field(Artifact), + coverage_vars = field(cmd_args | None, default = None), + srcs_list = field(cmd_args | None, default = None), + cgo_gen_dir = field(Artifact), +) + +GoStdlib = provider( + fields = { + "importcfg": provider_field(Artifact), + "pkgdir": provider_field(Artifact), + }, ) def go_attr_pkg_name(ctx: AnalysisContext) -> str: @@ -39,35 +44,50 @@ def merge_pkgs(pkgss: list[dict[str, typing.Any]]) -> dict[str, typing.Any]: return all_pkgs -def pkg_artifacts(pkgs: dict[str, GoPkg], shared: bool = False) -> dict[str, Artifact]: +def pkg_artifacts(pkgs: dict[str, GoPkg]) -> dict[str, Artifact]: """ Return a map package name to a `shared` or `static` package artifact. """ return { - name: pkg.shared if shared else pkg.static + name: pkg.pkg for name, pkg in pkgs.items() } -def stdlib_pkg_artifacts(toolchain: GoToolchainInfo, shared: bool = False) -> dict[str, Artifact]: - """ - Return a map package name to a `shared` or `static` package artifact of stdlib. - """ +def make_importcfg( + ctx: AnalysisContext, + pkg_name: str, + own_pkgs: dict[str, typing.Any], + with_importmap: bool) -> cmd_args: + go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] + stdlib = ctx.attrs._go_stdlib[GoStdlib] - prebuilt_stdlib = toolchain.prebuilt_stdlib_shared if shared else toolchain.prebuilt_stdlib - stdlib_pkgs = prebuilt_stdlib[ArtifactGroupInfo].artifacts + content = [] + for name_, pkg_ in own_pkgs.items(): + # Hack: we use cmd_args get "artifact" valid path and write it to a file. + content.append(cmd_args("packagefile ", name_, "=", pkg_, delimiter = "")) - if len(stdlib_pkgs) == 0: - fail("Stdlib for current platfrom is missing from toolchain.") + # Note: matters for packages which do not specify package_name + # Future work: support importmap in buck rules instead of hacking here. + # BUG: Should use go.vendor_path instead of hard-coding values. + for vendor_prefix in ["third-party-source/go/", "third-party-go/vendor/"]: + if with_importmap and name_.startswith(vendor_prefix): + real_name_ = name_.removeprefix(vendor_prefix) + content.append(cmd_args("importmap ", real_name_, "=", name_, delimiter = "")) - pkgs = {} - for pkg in stdlib_pkgs: - # remove first directory like `pgk` - _, _, temp_path = pkg.short_path.partition("/") + own_importcfg = ctx.actions.declare_output("{}.importcfg".format(pkg_name)) + ctx.actions.write(own_importcfg, content) - # remove second directory like `darwin_amd64` - # now we have name like `net/http.a` - _, _, pkg_relpath = temp_path.partition("/") - name = pkg_relpath.removesuffix(".a") # like `net/http` - pkgs[name] = pkg + final_importcfg = ctx.actions.declare_output("{}.final.importcfg".format(pkg_name)) + ctx.actions.run( + [ + go_toolchain.concat_files, + "--output", + final_importcfg.as_output(), + stdlib.importcfg, + own_importcfg, + ], + category = "concat_importcfgs", + identifier = pkg_name, + ) - return pkgs + return cmd_args(final_importcfg, hidden = [stdlib.pkgdir, own_pkgs.values()]) diff --git a/prelude/go/toolchain.bzl b/prelude/go/toolchain.bzl index 3f00630e3b..eb9aeb466e 100644 --- a/prelude/go/toolchain.bzl +++ b/prelude/go/toolchain.bzl @@ -5,58 +5,74 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") + GoToolchainInfo = provider( # @unsorted-dict-items fields = { - "assembler": provider_field(typing.Any, default = None), - "cgo": provider_field(typing.Any, default = None), - "cgo_wrapper": provider_field(typing.Any, default = None), - "compile_wrapper": provider_field(typing.Any, default = None), - "compiler": provider_field(typing.Any, default = None), - "compiler_flags_shared": provider_field(typing.Any, default = None), - "compiler_flags_static": provider_field(typing.Any, default = None), - "cover": provider_field(typing.Any, default = None), - "cover_srcs": provider_field(typing.Any, default = None), - "cxx_toolchain_for_linking": provider_field(typing.Any, default = None), - "env_go_arch": provider_field(typing.Any, default = None), - "env_go_os": provider_field(typing.Any, default = None), - "env_go_arm": provider_field(typing.Any, default = None), + "assembler": provider_field(RunInfo), + "assembler_flags": provider_field(typing.Any, default = None), + "c_compiler_flags": provider_field(typing.Any, default = None), + "cgo": provider_field(RunInfo), + "cgo_wrapper": provider_field(RunInfo), + "gen_stdlib_importcfg": provider_field(RunInfo), + "go_wrapper": provider_field(RunInfo), + "compiler": provider_field(RunInfo), + "compiler_flags": provider_field(typing.Any, default = None), + "concat_files": provider_field(RunInfo), + "cover": provider_field(RunInfo), + "cxx_toolchain_for_linking": provider_field(CxxToolchainInfo | None, default = None), + "env_go_arch": provider_field(str), + "env_go_os": provider_field(str), + "env_go_arm": provider_field(str | None, default = None), "env_go_root": provider_field(typing.Any, default = None), + "env_go_debug": provider_field(dict[str, str], default = {}), "external_linker_flags": provider_field(typing.Any, default = None), - "filter_srcs": provider_field(typing.Any, default = None), - "go": provider_field(typing.Any, default = None), - "linker": provider_field(typing.Any, default = None), - "linker_flags_shared": provider_field(typing.Any, default = None), - "linker_flags_static": provider_field(typing.Any, default = None), - "packer": provider_field(typing.Any, default = None), - "prebuilt_stdlib": provider_field(typing.Any, default = None), - "prebuilt_stdlib_shared": provider_field(typing.Any, default = None), - "tags": provider_field(typing.Any, default = None), + "go": provider_field(RunInfo), + "linker": provider_field(RunInfo), + "linker_flags": provider_field(typing.Any, default = None), + "packer": provider_field(RunInfo), + "tags": provider_field(list[str], default = []), }, ) -def get_toolchain_cmd_args(toolchain: GoToolchainInfo, go_root = True, force_disable_cgo = False) -> cmd_args: - cmd = cmd_args("env") - - # opt-out from Go1.20 coverage redisign - cmd.add("GOEXPERIMENT=nocoverageredesign") +def get_toolchain_env_vars(toolchain: GoToolchainInfo, force_disable_cgo = False) -> dict[str, str | cmd_args]: + env = { + "GOARCH": toolchain.env_go_arch, + # opt-out from Go1.20 coverage redesign + "GOEXPERIMENT": "nocoverageredesign", + "GOOS": toolchain.env_go_os, + } - if toolchain.env_go_arch != None: - cmd.add("GOARCH={}".format(toolchain.env_go_arch)) - if toolchain.env_go_os != None: - cmd.add("GOOS={}".format(toolchain.env_go_os)) if toolchain.env_go_arm != None: - cmd.add("GOARM={}".format(toolchain.env_go_arm)) - if go_root and toolchain.env_go_root != None: - cmd.add(cmd_args(toolchain.env_go_root, format = "GOROOT={}")) + env["GOARM"] = toolchain.env_go_arm + if toolchain.env_go_root != None: + env["GOROOT"] = toolchain.env_go_root + if toolchain.env_go_debug: + godebug = ",".join(["{}={}".format(k, v) for k, v in toolchain.env_go_debug.items()]) + env["GODEBUG"] = godebug if force_disable_cgo: - cmd.add("CGO_ENABLED=0") + env["CGO_ENABLED"] = "0" else: # CGO is enabled by default for native compilation, but we need to set it # explicitly for cross-builds: # https://go-review.googlesource.com/c/go/+/12603/2/src/cmd/cgo/doc.go - if toolchain.cgo != None: - cmd.add("CGO_ENABLED=1") + cxx_toolchain_available = toolchain.cxx_toolchain_for_linking != None + if cxx_toolchain_available: + env["CGO_ENABLED"] = "1" + + return env + +# Sets default value of cgo_enabled attribute based on the presence of C++ toolchain. +def evaluate_cgo_enabled(toolchain: GoToolchainInfo, cgo_enabled: [bool, None]) -> bool: + cxx_toolchain_available = toolchain.cxx_toolchain_for_linking != None + + if cgo_enabled and not cxx_toolchain_available: + fail("Cgo requires a C++ toolchain. Set cgo_enabled=None|False.") + + if cgo_enabled != None: + return cgo_enabled - return cmd + # Return True if cxx_toolchain available for current configuration, otherwise to False. + return cxx_toolchain_available diff --git a/prelude/go/tools/BUCK.v2 b/prelude/go/tools/BUCK.v2 index b7499e98f9..4c31770f48 100644 --- a/prelude/go/tools/BUCK.v2 +++ b/prelude/go/tools/BUCK.v2 @@ -1,26 +1,32 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + prelude = native prelude.python_bootstrap_binary( - name = "compile_wrapper", - main = "compile_wrapper.py", + name = "concat_files", + main = "concat_files.py", visibility = ["PUBLIC"], ) prelude.python_bootstrap_binary( - name = "cover_srcs", - main = "cover_srcs.py", + name = "cgo_wrapper", + main = "cgo_wrapper.py", visibility = ["PUBLIC"], ) prelude.python_bootstrap_binary( - name = "filter_srcs", - main = "filter_srcs.py", + name = "gen_stdlib_importcfg", + main = "gen_stdlib_importcfg.py", visibility = ["PUBLIC"], ) prelude.python_bootstrap_binary( - name = "cgo_wrapper", - main = "cgo_wrapper.py", + name = "go_wrapper", + main = "go_wrapper.py", visibility = ["PUBLIC"], ) @@ -33,3 +39,8 @@ prelude.go_binary( "PUBLIC", ], ) + +prelude.go_stdlib( + name = "stdlib", + visibility = ["PUBLIC"], +) diff --git a/prelude/go/tools/cgo_wrapper.py b/prelude/go/tools/cgo_wrapper.py index 38e4b845fb..44f98ab635 100644 --- a/prelude/go/tools/cgo_wrapper.py +++ b/prelude/go/tools/cgo_wrapper.py @@ -12,7 +12,6 @@ import os import subprocess import sys -import tempfile from pathlib import Path @@ -20,34 +19,20 @@ def main(argv): parser = argparse.ArgumentParser(fromfile_prefix_chars="@") parser.add_argument("--cgo", action="append", default=[]) parser.add_argument("--output", required=True, type=Path) - parser.add_argument("--cpp", action="append", default=[]) - parser.add_argument("--env-cc", action="append", default=[]) - parser.add_argument("--env-ldflags", action="append", default=[]) parser.add_argument("srcs", type=Path, nargs="*") args = parser.parse_args(argv[1:]) output = args.output.resolve(strict=False) + # the only reason we need this whapper is to create `-objdir`, + # because neither `go tool cgo` nor buck can create it. os.makedirs(output, exist_ok=True) env = os.environ.copy() - env["CC"] = " ".join(args.env_cc) - env["CGO_LDFLAGS"] = " ".join(args.env_ldflags) cmd = [] cmd.extend(args.cgo) - # cmd.append("-importpath={}") - # cmd.append("-srcdir={}") cmd.append(f"-objdir={output}") - # cmd.append(cgoCompilerFlags) cmd.append("--") - # cmd.append(cxxCompilerFlags) - - if args.cpp: - with tempfile.NamedTemporaryFile("w", delete=False) as argsfile: - for arg in args.cpp: - print(arg, file=argsfile) - argsfile.flush() - cmd.append("@" + argsfile.name) cmd.extend(args.srcs) return subprocess.call(cmd, env=env) diff --git a/prelude/go/tools/compile_wrapper.py b/prelude/go/tools/compile_wrapper.py deleted file mode 100755 index b0ae431119..0000000000 --- a/prelude/go/tools/compile_wrapper.py +++ /dev/null @@ -1,139 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under both the MIT license found in the -# LICENSE-MIT file in the root directory of this source tree and the Apache -# License, Version 2.0 found in the LICENSE-APACHE file in the root directory -# of this source tree. - -""" -Compile the given Go sources into a Go package. - -Example: - - $ ./compile_wrapper.py \ - --compiler compile \ - --assembler assemble \ - --output srcs.txt src/dir/ - -""" - -# pyre-unsafe - -import argparse -import contextlib -import os -import subprocess -import sys -import tempfile -from pathlib import Path -from typing import List - - -def _call_or_exit(cmd: List[str]): - ret = subprocess.call(cmd) - if ret != 0: - sys.exit(ret) - - -def _compile(compile_prefix: List[str], output: Path, srcs: List[Path]): - cmd = [] - cmd.extend(compile_prefix) - cmd.append("-trimpath={}".format(os.getcwd())) - cmd.append("-o") - cmd.append(output) - cmd.extend(srcs) - _call_or_exit(cmd) - - -def _pack(pack_prefix: List[str], output: Path, items: List[Path]): - cmd = [] - cmd.extend(pack_prefix) - cmd.append("r") - cmd.append(output) - cmd.extend(items) - _call_or_exit(cmd) - - -def main(argv): - parser = argparse.ArgumentParser(fromfile_prefix_chars="@") - parser.add_argument("--compiler", action="append", default=[]) - parser.add_argument("--assembler", action="append", default=[]) - parser.add_argument("--packer", action="append", default=[]) - parser.add_argument("--embedcfg", type=Path, default=None) - parser.add_argument("--output", required=True, type=Path) - parser.add_argument("srcs", type=Path, nargs="*") - args = parser.parse_args(argv[1:]) - - # If there's no srcs, just leave an empty file. - if not args.srcs: - args.output.touch() - return - - # go:embed does not parse symlinks, so following the links to the real paths - real_srcs = [s.resolve() for s in args.srcs] - - go_files = [s for s in real_srcs if s.suffix == ".go"] - s_files = [s for s in real_srcs if s.suffix == ".s"] - o_files = [s for s in real_srcs if s.suffix in (".o", ".obj")] - - with contextlib.ExitStack() as stack: - - asmhdr_dir = None - - assemble_prefix = [] - assemble_prefix.extend(args.assembler) - - if go_files: - compile_prefix = [] - compile_prefix.extend(args.compiler) - - # If we have assembly files, generate the symabi file to compile - # against, and the asm header. - if s_files: - asmhdr_dir = stack.push(tempfile.TemporaryDirectory()) - - asmhdr = Path(asmhdr_dir.name) / "go_asm.h" - asmhdr.touch() - compile_prefix.extend(["-asmhdr", asmhdr]) - assemble_prefix.extend(["-I", asmhdr_dir.name]) - assemble_prefix.extend( - ["-I", os.path.join(os.environ["GOROOT"], "pkg", "include")] - ) - assemble_prefix.extend(["-D", f"GOOS_{os.environ['GOOS']}"]) - assemble_prefix.extend(["-D", f"GOARCH_{os.environ['GOARCH']}"]) - if "GOAMD64" in os.environ and os.environ["GOARCH"] == "amd64": - assemble_prefix.extend(["-D", f"GOAMD64_{os.environ['GOAMD64']}"]) - - # Note: at this point go_asm.h is empty, but that's OK. As per the Go compiler: - # https://github.com/golang/go/blob/3f8f929d60a90c4e4e2b07c8d1972166c1a783b1/src/cmd/go/internal/work/gc.go#L441-L443 - symabis = args.output.with_suffix(".symabis") - _compile(assemble_prefix + ["-gensymabis"], symabis, s_files) - compile_prefix.extend(["-symabis", symabis]) - - if args.embedcfg is not None: - compile_prefix.extend( - [ - "-embedcfg", - args.embedcfg, - ] - ) - - # This will create go_asm.h - _compile(compile_prefix, args.output, go_files) - - else: - args.output.touch() - - # If there are assembly files, assemble them to an object and add into the - # output archive. - if s_files: - s_object = args.output.with_suffix(".o") - _compile(assemble_prefix, s_object, s_files) - o_files.append(s_object) - - if o_files: - _pack(args.packer, args.output, o_files) - - -sys.exit(main(sys.argv)) diff --git a/prelude/go/tools/concat_files.py b/prelude/go/tools/concat_files.py new file mode 100644 index 0000000000..145335a288 --- /dev/null +++ b/prelude/go/tools/concat_files.py @@ -0,0 +1,33 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +import argparse +import sys +from pathlib import Path + + +def main(argv): + parser = argparse.ArgumentParser(fromfile_prefix_chars="@") + parser.add_argument("--output", required=True, type=Path) + parser.add_argument("files", type=Path, nargs="*") + args = parser.parse_args(argv[1:]) + + if len(args.files) == 0: + print( + "usage: concat_files.py --output out.txt in1.txt in2.txt", file=sys.stderr + ) + return 1 + + with open(args.output, "wb") as outfile: + for f in args.files: + with open(f, "rb") as infile: + outfile.write(infile.read()) + + return 0 + + +sys.exit(main(sys.argv)) diff --git a/prelude/go/tools/cover_srcs.py b/prelude/go/tools/cover_srcs.py deleted file mode 100644 index 4dcaf2cc51..0000000000 --- a/prelude/go/tools/cover_srcs.py +++ /dev/null @@ -1,81 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under both the MIT license found in the -# LICENSE-MIT file in the root directory of this source tree and the Apache -# License, Version 2.0 found in the LICENSE-APACHE file in the root directory -# of this source tree. - -""" -Run `go cover` on non-`_test.go` input sources. -""" - -# pyre-unsafe - -import argparse -import hashlib -import subprocess -import sys -from pathlib import Path - - -def _var(pkg_name, src): - return "Var_" + hashlib.md5(f"{pkg_name}::{src}".encode("utf-8")).hexdigest() - - -def main(argv): - parser = argparse.ArgumentParser(fromfile_prefix_chars="@") - parser.add_argument("--cover", type=Path, required=True) - parser.add_argument("--pkg-name", type=str, required=True) - parser.add_argument("--coverage-mode", type=str, required=True) - parser.add_argument("--covered-srcs-dir", type=Path, required=True) - parser.add_argument("--out-srcs-argsfile", type=Path, required=True) - parser.add_argument("--coverage-var-argsfile", type=Path, required=True) - parser.add_argument("srcs", nargs="*", type=Path) - args = parser.parse_args(argv[1:]) - - out_srcs = [] - coverage_vars = {} - - args.covered_srcs_dir.mkdir(parents=True) - - for src in args.srcs: - if src.name.endswith("_test.go"): - out_srcs.append(src) - else: - var = _var(args.pkg_name, src) - covered_src = args.covered_srcs_dir / src - covered_src.parent.mkdir(parents=True, exist_ok=True) - subprocess.check_call( - [ - args.cover, - "-mode", - args.coverage_mode, - "-var", - var, - "-o", - covered_src, - src, - ] - ) - # we need just the source name for the --cover-pkgs argument - coverage_vars[var] = src.name - out_srcs.append(covered_src) - - with open(args.out_srcs_argsfile, mode="w") as f: - for src in out_srcs: - print(src, file=f) - - with open(args.coverage_var_argsfile, mode="w") as f: - if coverage_vars: - print("--cover-pkgs", file=f) - print( - "{}:{}".format( - args.pkg_name, - ",".join([f"{var}={name}" for var, name in coverage_vars.items()]), - ), - file=f, - ) - - -sys.exit(main(sys.argv)) diff --git a/prelude/go/tools/filter_srcs.py b/prelude/go/tools/filter_srcs.py deleted file mode 100755 index a242e981da..0000000000 --- a/prelude/go/tools/filter_srcs.py +++ /dev/null @@ -1,103 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under both the MIT license found in the -# LICENSE-MIT file in the root directory of this source tree and the Apache -# License, Version 2.0 found in the LICENSE-APACHE file in the root directory -# of this source tree. - -""" -Run on a directory of Go source files and print out a list of srcs that should -be compiled. - -Example: - - $ ./filter_srcs.py --output srcs.txt src/dir/ - -""" - -# pyre-unsafe - -import argparse -import json -import os -import subprocess -import sys -import tempfile -from pathlib import Path - - -def main(argv): - parser = argparse.ArgumentParser() - parser.add_argument("--go", default="go", type=Path) - parser.add_argument("--tests", action="store_true") - parser.add_argument("--tags", default="") - parser.add_argument("--output", type=argparse.FileType("w"), default=sys.stdout) - parser.add_argument("srcdir", type=Path) - args = parser.parse_args(argv[1:]) - - # Find all source sub-dirs, which we'll need to run `go list` from. - roots = set() - for root, _dirs, _files in os.walk(args.srcdir): - roots.add(root) - - # Compute absolute paths for GOROOT, to enable `go list` to use `compile/asm/etc` - goroot = os.environ.get("GOROOT", "") - if goroot: - goroot = os.path.realpath(goroot) - - # Run `go list` on all source dirs to filter input sources by build pragmas. - for root in roots: - with tempfile.TemporaryDirectory() as go_cache_dir: - out = subprocess.check_output( - [ - "env", - "-i", - "GOROOT={}".format(goroot), - "GOARCH={}".format(os.environ.get("GOARCH", "")), - "GOOS={}".format(os.environ.get("GOOS", "")), - "CGO_ENABLED={}".format(os.environ.get("CGO_ENABLED", "0")), - "GO111MODULE=off", - "GOCACHE=" + go_cache_dir, - args.go.resolve(), - "list", - "-e", - "-json", - "-tags", - args.tags, - ".", - ], - cwd=root, - ).decode("utf-8") - - # Parse JSON output and print out sources. - idx = 0 - decoder = json.JSONDecoder() - while idx < len(out) - 1: - # The raw_decode method fails if there are any leading spaces, e.g. " {}" fails - # so manually trim the prefix of the string - if out[idx].isspace(): - idx += 1 - continue - - obj, idx = decoder.raw_decode(out, idx) - types = ["GoFiles", "EmbedFiles"] - if args.tests: - types.extend(["TestGoFiles", "XTestGoFiles"]) - else: - types.extend(["SFiles"]) - for typ in types: - for src in obj.get(typ, []): - src = Path(obj["Dir"]) / src - # Resolve the symlink - src = Path( - os.path.normpath(str(src.parent / os.readlink(str(src)))) - ) - # Relativize to the CWD. - src = src.relative_to(os.getcwd()) - print(src, file=args.output) - - args.output.close() - - -sys.exit(main(sys.argv)) diff --git a/prelude/go/tools/gen_stdlib_importcfg.py b/prelude/go/tools/gen_stdlib_importcfg.py new file mode 100644 index 0000000000..ce973c0ab7 --- /dev/null +++ b/prelude/go/tools/gen_stdlib_importcfg.py @@ -0,0 +1,32 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +import argparse +import os +import sys +from pathlib import Path + + +def main(argv): + parser = argparse.ArgumentParser() + parser.add_argument("--stdlib", type=Path, default=None) + parser.add_argument("--output", type=Path, default=None) + + args = parser.parse_args() + + with open(args.output, "w") as f: + for root, _dirs, files in os.walk(args.stdlib): + for file in files: + pkg_path = Path(root, file) + pkg_name, _ = os.path.splitext(pkg_path.relative_to(args.stdlib)) + # package names always use unix slashes + pkg_name = pkg_name.replace("\\", "/") + f.write(f"packagefile {pkg_name}={pkg_path}\n") + + +if __name__ == "__main__": + sys.exit(main(sys.argv)) diff --git a/prelude/go/tools/go_wrapper.py b/prelude/go/tools/go_wrapper.py new file mode 100644 index 0000000000..06d1da4f96 --- /dev/null +++ b/prelude/go/tools/go_wrapper.py @@ -0,0 +1,90 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +import argparse +import os +import shlex +import subprocess +import sys +from pathlib import Path + + +# A copy of "cmd/internal/quoted" translated into Python with GPT-4 +# Source: https://github.com/golang/go/blob/7e9894449e8a12157a28a4a14fc9341353a6469c/src/cmd/internal/quoted/quoted.go#L65 +def go_join(args): + buf = [] + for i, arg in enumerate(args): + if i > 0: + buf.append(" ") + saw_space, saw_single_quote, saw_double_quote = False, False, False + for c in arg: + if ord(c) > 127: + continue + elif c.isspace(): + saw_space = True + elif c == "'": + saw_single_quote = True + elif c == '"': + saw_double_quote = True + if not saw_space and not saw_single_quote and not saw_double_quote: + buf.append(arg) + elif not saw_single_quote: + buf.append("'") + buf.append(arg) + buf.append("'") + elif not saw_double_quote: + buf.append('"') + buf.append(arg) + buf.append('"') + else: + raise ValueError( + f"Argument {arg} contains both single and double quotes and cannot be quoted" + ) + return "".join(buf) + + +def main(argv): + """ + This is a wrapper script around the `go` binary. + - It fixes GOROOT and GOCACHE + """ + if len(argv) < 2: + print("usage: go_wrapper.py ", file=sys.stderr) + return 1 + + wrapped_binary = Path(argv[1]).resolve() + + parser = argparse.ArgumentParser() + parser.add_argument("--workdir", type=Path, default=None) + parser.add_argument("--output", type=argparse.FileType("w"), default=sys.stdout) + parsed, unknown = parser.parse_known_args(argv[2:]) + + env = os.environ.copy() + # Make paths absolute, otherwise go build will fail. + if "GOROOT" in env: + env["GOROOT"] = os.path.realpath(env["GOROOT"]) + + env["GOCACHE"] = os.path.realpath(env["BUCK_SCRATCH_PATH"]) + + cwd = os.getcwd() + for env_var in ["CC", "CGO_CFLAGS", "CGO_CPPFLAGS", "CGO_LDFLAGS"]: + if env_var in env: + # HACK: Split the value into a list of arguments then join them back. + # This is because buck encodes quoted args in a way `go` doesn't like, + # but `go_join` does it in a way that `go` expects. + var_value = go_join(shlex.split(env[env_var])) + # HACK: Replace %cwd% with the current working directory to make it work when `go` does `cd` to a tmp-dir. + env[env_var] = var_value.replace("%cwd%", cwd) + + retcode = subprocess.call( + [wrapped_binary] + unknown, env=env, cwd=parsed.workdir, stdout=parsed.output + ) + parsed.output.close() + return retcode + + +sys.exit(main(sys.argv)) diff --git a/prelude/go/transitions/defs.bzl b/prelude/go/transitions/defs.bzl new file mode 100644 index 0000000000..a69f31acc2 --- /dev/null +++ b/prelude/go/transitions/defs.bzl @@ -0,0 +1,245 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//go:coverage.bzl", "GoCoverageMode") +load(":tags_helper.bzl", "selects_for_tags", "tag_to_constrant_value") + +def _cgo_enabled_transition(platform, refs, attrs): + constraints = platform.configuration.constraints + + # Cancel transition if the value already set + # to enable using configuration modifiers for overriding this option + cgo_enabled_setting = refs.cgo_enabled_auto[ConstraintValueInfo].setting + if cgo_enabled_setting.label in constraints: + return platform + + if attrs.cgo_enabled == None: + cgo_enabled_ref = refs.cgo_enabled_auto + elif attrs.cgo_enabled == True: + cgo_enabled_ref = refs.cgo_enabled_true + else: + cgo_enabled_ref = refs.cgo_enabled_false + + cgo_enabled_value = cgo_enabled_ref[ConstraintValueInfo] + constraints[cgo_enabled_value.setting.label] = cgo_enabled_value + + new_cfg = ConfigurationInfo( + constraints = constraints, + values = platform.configuration.values, + ) + + return PlatformInfo( + label = platform.label, + configuration = new_cfg, + ) + +def _compile_shared_transition(platform, refs, _): + compile_shared_value = refs.compile_shared_value[ConstraintValueInfo] + constraints = platform.configuration.constraints + constraints[compile_shared_value.setting.label] = compile_shared_value + new_cfg = ConfigurationInfo( + constraints = constraints, + values = platform.configuration.values, + ) + + return PlatformInfo( + label = platform.label, + configuration = new_cfg, + ) + +def _race_transition(platform, refs, attrs): + constraints = platform.configuration.constraints + + # Cancel transition if the value already set + # to enable using configuration modifiers for overriding this option + race_setting = refs.race_false[ConstraintValueInfo].setting + if race_setting.label in constraints: + return platform + + if attrs.race == True: + race_ref = refs.race_true + else: + race_ref = refs.race_false + + race_value = race_ref[ConstraintValueInfo] + constraints[race_value.setting.label] = race_value + + new_cfg = ConfigurationInfo( + constraints = constraints, + values = platform.configuration.values, + ) + + return PlatformInfo( + label = platform.label, + configuration = new_cfg, + ) + +def _asan_transition(platform, refs, attrs): + constraints = platform.configuration.constraints + + # Cancel transition if the value already set + # to enable using configuration modifiers for overriding this option + asan_setting = refs.asan_false[ConstraintValueInfo].setting + if asan_setting.label in constraints: + return platform + + if attrs.asan == True: + asan_ref = refs.asan_true + else: + asan_ref = refs.asan_false + + asan_value = asan_ref[ConstraintValueInfo] + constraints[asan_value.setting.label] = asan_value + + new_cfg = ConfigurationInfo( + constraints = constraints, + values = platform.configuration.values, + ) + + return PlatformInfo( + label = platform.label, + configuration = new_cfg, + ) + +def _coverage_mode_transition(platform, refs, attrs): + constraints = platform.configuration.constraints + + # Cancel transition if the value already set + # to enable using configuration modifiers for overriding this option + coverage_mode_setting = refs.coverage_mode_set[ConstraintValueInfo].setting + if coverage_mode_setting.label in constraints: + return platform + + if attrs.coverage_mode == None: + return platform + elif attrs.coverage_mode == "set": + coverage_mode_ref = refs.coverage_mode_set + elif attrs.coverage_mode == "count": + coverage_mode_ref = refs.coverage_mode_count + elif attrs.coverage_mode == "atomic": + coverage_mode_ref = refs.coverage_mode_atomic + else: + fail("`coverage_mode` can be either: 'set', 'count', 'atomic' or None, got: {}".format(attrs.coverage_mode)) + + coverage_mode_value = coverage_mode_ref[ConstraintValueInfo] + constraints[coverage_mode_value.setting.label] = coverage_mode_value + + new_cfg = ConfigurationInfo( + constraints = constraints, + values = platform.configuration.values, + ) + + return PlatformInfo( + label = platform.label, + configuration = new_cfg, + ) + +def _tags_transition(platform, refs, attrs): + constraints = platform.configuration.constraints + for tag in attrs.tags: + ref_name = "tag_{}__value".format(tag) + if not hasattr(refs, ref_name): + fail("Add tag '{}' to .buckconfig attribute `go.allowed_tags` to allow using it".format(tag)) + + tag_value = getattr(refs, ref_name)[ConstraintValueInfo] + constraints[tag_value.setting.label] = tag_value + + new_cfg = ConfigurationInfo( + constraints = constraints, + values = platform.configuration.values, + ) + + return PlatformInfo( + label = platform.label, + configuration = new_cfg, + ) + +def _chain_transitions(transitions): + def tr(platform, refs, attrs): + for t in transitions: + platform = t(platform, refs, attrs) + return platform + + return tr + +_tansitions = [_asan_transition, _cgo_enabled_transition, _compile_shared_transition, _race_transition, _tags_transition] + +_refs = { + "asan_false": "prelude//go/constraints:asan_false", + "asan_true": "prelude//go/constraints:asan_true", + "cgo_enabled_auto": "prelude//go/constraints:cgo_enabled_auto", + "cgo_enabled_false": "prelude//go/constraints:cgo_enabled_false", + "cgo_enabled_true": "prelude//go/constraints:cgo_enabled_true", + "race_false": "prelude//go/constraints:race_false", + "race_true": "prelude//go/constraints:race_true", +} | { + "tag_{}__value".format(tag): constrant_value + for tag, constrant_value in tag_to_constrant_value().items() +} + +_attrs = ["asan", "cgo_enabled", "race", "tags"] + +go_binary_transition = transition( + impl = _chain_transitions(_tansitions), + refs = _refs | { + "compile_shared_value": "prelude//go/constraints:compile_shared_false", + }, + attrs = _attrs, +) + +go_test_transition = transition( + impl = _chain_transitions(_tansitions + [_coverage_mode_transition]), + refs = _refs | { + "compile_shared_value": "prelude//go/constraints:compile_shared_false", + "coverage_mode_atomic": "prelude//go/constraints:coverage_mode_atomic", + "coverage_mode_count": "prelude//go/constraints:coverage_mode_count", + "coverage_mode_set": "prelude//go/constraints:coverage_mode_set", + }, + attrs = _attrs + ["coverage_mode"], +) + +go_exported_library_transition = transition( + impl = _chain_transitions(_tansitions), + refs = _refs | { + "compile_shared_value": "prelude//go/constraints:compile_shared_true", + }, + attrs = _attrs, +) + +cgo_enabled_attr = attrs.default_only(attrs.option(attrs.bool(), default = select({ + "DEFAULT": None, + "prelude//go/constraints:cgo_enabled_auto": None, + "prelude//go/constraints:cgo_enabled_false": False, + "prelude//go/constraints:cgo_enabled_true": True, +}))) + +compile_shared_attr = attrs.default_only(attrs.bool(default = select({ + "DEFAULT": False, + "prelude//go/constraints:compile_shared_false": False, + "prelude//go/constraints:compile_shared_true": True, +}))) + +race_attr = attrs.default_only(attrs.bool(default = select({ + "DEFAULT": False, + "prelude//go/constraints:race_false": False, + "prelude//go/constraints:race_true": True, +}))) + +asan_attr = attrs.default_only(attrs.bool(default = select({ + "DEFAULT": False, + "prelude//go/constraints:asan_false": False, + "prelude//go/constraints:asan_true": True, +}))) + +coverage_mode_attr = attrs.default_only(attrs.option(attrs.enum(GoCoverageMode.values()), default = select({ + "DEFAULT": None, + "prelude//go/constraints:coverage_mode_atomic": "atomic", + "prelude//go/constraints:coverage_mode_count": "count", + "prelude//go/constraints:coverage_mode_set": "set", +}))) + +tags_attr = attrs.default_only(attrs.list(attrs.string(), default = selects_for_tags())) diff --git a/prelude/go/transitions/tags_helper.bzl b/prelude/go/transitions/tags_helper.bzl new file mode 100644 index 0000000000..966fbb69d8 --- /dev/null +++ b/prelude/go/transitions/tags_helper.bzl @@ -0,0 +1,23 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//utils:buckconfig.bzl", "read_list") + +allowed_tags = read_list("go", "allowed_tags", default = [], root_cell = True) + +def tag_to_constrant_value(): + return {tag: "prelude//go/constraints:tag_{}__value".format(tag) for tag in allowed_tags} + +def selects_for_tags(): + selects = [] + for tag in allowed_tags: + selects += select({ + "DEFAULT": [], + "prelude//go/constraints:tag_{}__value".format(tag): [tag], + }) + + return selects diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl new file mode 100644 index 0000000000..0e1a706070 --- /dev/null +++ b/prelude/haskell/compile.bzl @@ -0,0 +1,261 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load( + "@prelude//cxx:preprocessor.bzl", + "cxx_inherited_preprocessor_infos", + "cxx_merge_cpreprocessors", +) +load( + "@prelude//haskell:library_info.bzl", + "HaskellLibraryInfoTSet", +) +load( + "@prelude//haskell:toolchain.bzl", + "HaskellToolchainInfo", +) +load( + "@prelude//haskell:util.bzl", + "attr_deps_haskell_lib_infos", + "attr_deps_haskell_link_infos", + "get_artifact_suffix", + "is_haskell_src", + "output_extensions", + "srcs_to_pairs", +) +load( + "@prelude//linking:link_info.bzl", + "LinkStyle", +) +load("@prelude//utils:argfile.bzl", "at_argfile") + +# The type of the return value of the `_compile()` function. +CompileResultInfo = record( + objects = field(Artifact), + hi = field(Artifact), + stubs = field(Artifact), + producing_indices = field(bool), +) + +CompileArgsInfo = record( + result = field(CompileResultInfo), + srcs = field(cmd_args), + args_for_cmd = field(cmd_args), + args_for_file = field(cmd_args), +) + +PackagesInfo = record( + exposed_package_args = cmd_args, + packagedb_args = cmd_args, + transitive_deps = field(HaskellLibraryInfoTSet), +) + +def _package_flag(toolchain: HaskellToolchainInfo) -> str: + if toolchain.support_expose_package: + return "-expose-package" + else: + return "-package" + +def get_packages_info( + ctx: AnalysisContext, + link_style: LinkStyle, + specify_pkg_version: bool, + enable_profiling: bool) -> PackagesInfo: + haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] + + # Collect library dependencies. Note that these don't need to be in a + # particular order. + direct_deps_link_info = attr_deps_haskell_link_infos(ctx) + libs = ctx.actions.tset( + HaskellLibraryInfoTSet, + children = [ + lib.prof_info[link_style] if enable_profiling else lib.info[link_style] + for lib in direct_deps_link_info + ], + ) + + # base is special and gets exposed by default + package_flag = _package_flag(haskell_toolchain) + exposed_package_args = cmd_args([package_flag, "base"]) + + packagedb_args = cmd_args() + packagedb_set = {} + + for lib in libs.traverse(): + packagedb_set[lib.db] = None + hidden_args = cmd_args(hidden = [ + lib.import_dirs.values(), + lib.stub_dirs, + # libs of dependencies might be needed at compile time if + # we're using Template Haskell: + lib.libs, + ]) + + exposed_package_args.add(hidden_args) + + packagedb_args.add(hidden_args) + + # These we need to add for all the packages/dependencies, i.e. + # direct and transitive (e.g. `fbcode-common-hs-util-hs-array`) + packagedb_args.add([cmd_args("-package-db", x) for x in packagedb_set]) + + haskell_direct_deps_lib_infos = attr_deps_haskell_lib_infos( + ctx, + link_style, + enable_profiling, + ) + + # Expose only the packages we depend on directly + for lib in haskell_direct_deps_lib_infos: + pkg_name = lib.name + if (specify_pkg_version): + pkg_name += "-{}".format(lib.version) + + exposed_package_args.add(package_flag, pkg_name) + + return PackagesInfo( + exposed_package_args = exposed_package_args, + packagedb_args = packagedb_args, + transitive_deps = libs, + ) + +def compile_args( + ctx: AnalysisContext, + link_style: LinkStyle, + enable_profiling: bool, + pkgname = None, + suffix: str = "") -> CompileArgsInfo: + haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] + + compile_cmd = cmd_args() + compile_cmd.add(haskell_toolchain.compiler_flags) + + # Some rules pass in RTS (e.g. `+RTS ... -RTS`) options for GHC, which can't + # be parsed when inside an argsfile. + compile_cmd.add(ctx.attrs.compiler_flags) + + compile_args = cmd_args() + compile_args.add("-no-link", "-i") + + if enable_profiling: + compile_args.add("-prof") + + if link_style == LinkStyle("shared"): + compile_args.add("-dynamic", "-fPIC") + elif link_style == LinkStyle("static_pic"): + compile_args.add("-fPIC", "-fexternal-dynamic-refs") + + osuf, hisuf = output_extensions(link_style, enable_profiling) + compile_args.add("-osuf", osuf, "-hisuf", hisuf) + + if getattr(ctx.attrs, "main", None) != None: + compile_args.add(["-main-is", ctx.attrs.main]) + + artifact_suffix = get_artifact_suffix(link_style, enable_profiling, suffix) + + objects = ctx.actions.declare_output( + "objects-" + artifact_suffix, + dir = True, + ) + hi = ctx.actions.declare_output("hi-" + artifact_suffix, dir = True) + stubs = ctx.actions.declare_output("stubs-" + artifact_suffix, dir = True) + + compile_args.add( + "-odir", + objects.as_output(), + "-hidir", + hi.as_output(), + "-hiedir", + hi.as_output(), + "-stubdir", + stubs.as_output(), + ) + + # Add -package-db and -package/-expose-package flags for each Haskell + # library dependency. + packages_info = get_packages_info( + ctx, + link_style, + specify_pkg_version = False, + enable_profiling = enable_profiling, + ) + + compile_args.add(packages_info.exposed_package_args) + compile_args.add(packages_info.packagedb_args) + + # Add args from preprocess-able inputs. + inherited_pre = cxx_inherited_preprocessor_infos(ctx.attrs.deps) + pre = cxx_merge_cpreprocessors(ctx, [], inherited_pre) + pre_args = pre.set.project_as_args("args") + compile_args.add(cmd_args(pre_args, format = "-optP={}")) + + if pkgname: + compile_args.add(["-this-unit-id", pkgname]) + + arg_srcs = [] + hidden_srcs = [] + for (path, src) in srcs_to_pairs(ctx.attrs.srcs): + # hs-boot files aren't expected to be an argument to compiler but does need + # to be included in the directory of the associated src file + if is_haskell_src(path): + arg_srcs.append(src) + else: + hidden_srcs.append(src) + srcs = cmd_args( + arg_srcs, + hidden = hidden_srcs, + ) + + producing_indices = "-fwrite-ide-info" in ctx.attrs.compiler_flags + + return CompileArgsInfo( + result = CompileResultInfo( + objects = objects, + hi = hi, + stubs = stubs, + producing_indices = producing_indices, + ), + srcs = srcs, + args_for_cmd = compile_cmd, + args_for_file = compile_args, + ) + +# Compile all the context's sources. +def compile( + ctx: AnalysisContext, + link_style: LinkStyle, + enable_profiling: bool, + pkgname: str | None = None) -> CompileResultInfo: + haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] + compile_cmd = cmd_args(haskell_toolchain.compiler) + + args = compile_args(ctx, link_style, enable_profiling, pkgname) + + compile_cmd.add(args.args_for_cmd) + + artifact_suffix = get_artifact_suffix(link_style, enable_profiling) + + if args.args_for_file: + if haskell_toolchain.use_argsfile: + compile_cmd.add(at_argfile( + actions = ctx.actions, + name = "haskell_compile_" + artifact_suffix + ".argsfile", + args = [args.args_for_file, args.srcs], + allow_args = True, + )) + else: + compile_cmd.add(args.args_for_file) + compile_cmd.add(args.srcs) + + artifact_suffix = get_artifact_suffix(link_style, enable_profiling) + ctx.actions.run( + compile_cmd, + category = "haskell_compile_" + artifact_suffix.replace("-", "_"), + no_outputs_cleanup = True, + ) + + return args.result diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index 8c8d92bcd8..ecf1a585d7 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -13,12 +13,16 @@ load( "@prelude//cxx:cxx.bzl", "get_auto_link_group_specs", ) +load( + "@prelude//cxx:cxx_context.bzl", + "get_cxx_toolchain_info", +) load( "@prelude//cxx:cxx_toolchain_types.bzl", - "CxxPlatformInfo", "CxxToolchainInfo", "PicBehavior", ) +load("@prelude//cxx:groups.bzl", "get_dedupped_roots_from_groups") load( "@prelude//cxx:link_groups.bzl", "LinkGroupContext", @@ -28,9 +32,16 @@ load( "get_filtered_links", "get_link_group_info", "get_link_group_preferred_linkage", + "get_public_link_group_nodes", "get_transitive_deps_matching_labels", "is_link_group_shlib", ) +load( + "@prelude//cxx:linker.bzl", + "LINKERS", + "get_rpath_origin", + "get_shared_library_flags", +) load( "@prelude//cxx:preprocessor.bzl", "CPreprocessor", @@ -38,6 +49,45 @@ load( "cxx_inherited_preprocessor_infos", "cxx_merge_cpreprocessors", ) +load( + "@prelude//haskell:compile.bzl", + "CompileResultInfo", + "compile", +) +load( + "@prelude//haskell:haskell_haddock.bzl", + "haskell_haddock_lib", +) +load( + "@prelude//haskell:library_info.bzl", + "HaskellLibraryInfo", + "HaskellLibraryInfoTSet", + "HaskellLibraryProvider", +) +load( + "@prelude//haskell:link_info.bzl", + "HaskellLinkInfo", + "HaskellProfLinkInfo", + "attr_link_style", + "cxx_toolchain_link_style", +) +load( + "@prelude//haskell:toolchain.bzl", + "HaskellToolchainInfo", +) +load( + "@prelude//haskell:util.bzl", + "attr_deps", + "attr_deps_haskell_link_infos_sans_template_deps", + "attr_deps_merged_link_infos", + "attr_deps_profiling_link_infos", + "attr_deps_shared_library_infos", + "get_artifact_suffix", + "is_haskell_src", + "output_extensions", + "src_to_module_name", + "srcs_to_pairs", +) load( "@prelude//linking:link_groups.bzl", "gather_link_group_libs", @@ -52,7 +102,6 @@ load( "LinkInfo", "LinkInfos", "LinkStyle", - "Linkage", "LinkedObject", "MergedLinkInfo", "SharedLibLinkable", @@ -81,70 +130,19 @@ load( "@prelude//linking:shared_libraries.bzl", "SharedLibraryInfo", "create_shared_libraries", + "create_shlib_symlink_tree", "merge_shared_libraries", "traverse_shared_library_info", ) +load("@prelude//linking:types.bzl", "Linkage") load( "@prelude//python:python.bzl", "PythonLibraryInfo", ) -load("@prelude//utils:platform_flavors_util.bzl", "by_platform") +load("@prelude//utils:argfile.bzl", "at_argfile") load("@prelude//utils:set.bzl", "set") load("@prelude//utils:utils.bzl", "filter_and_map_idx", "flatten") -_HASKELL_EXTENSIONS = [ - ".hs", - ".lhs", - ".hsc", - ".chs", - ".x", - ".y", -] - -HaskellPlatformInfo = provider(fields = { - "name": provider_field(typing.Any, default = None), -}) - -HaskellToolchainInfo = provider( - # @unsorted-dict-items - fields = { - "compiler": provider_field(typing.Any, default = None), - "compiler_flags": provider_field(typing.Any, default = None), - "linker": provider_field(typing.Any, default = None), - "linker_flags": provider_field(typing.Any, default = None), - "haddock": provider_field(typing.Any, default = None), - "compiler_major_version": provider_field(typing.Any, default = None), - "package_name_prefix": provider_field(typing.Any, default = None), - "packager": provider_field(typing.Any, default = None), - "use_argsfile": provider_field(typing.Any, default = None), - "support_expose_package": provider_field(bool, default = False), - "archive_contents": provider_field(typing.Any, default = None), - "ghci_script_template": provider_field(typing.Any, default = None), - "ghci_iserv_template": provider_field(typing.Any, default = None), - "ide_script_template": provider_field(typing.Any, default = None), - "ghci_binutils_path": provider_field(typing.Any, default = None), - "ghci_lib_path": provider_field(typing.Any, default = None), - "ghci_ghc_path": provider_field(typing.Any, default = None), - "ghci_iserv_path": provider_field(typing.Any, default = None), - "ghci_iserv_prof_path": provider_field(typing.Any, default = None), - "ghci_cxx_path": provider_field(typing.Any, default = None), - "ghci_cc_path": provider_field(typing.Any, default = None), - "ghci_cpp_path": provider_field(typing.Any, default = None), - "ghci_packager": provider_field(typing.Any, default = None), - "cache_links": provider_field(typing.Any, default = None), - "script_template_processor": provider_field(typing.Any, default = None), - }, -) - -# A list of `HaskellLibraryInfo`s. -HaskellLinkInfo = provider( - # Contains a list of HaskellLibraryInfo records. - fields = { - "info": provider_field(typing.Any, default = None), # dict[LinkStyle, list[HaskellLibraryInfo]] # TODO use a tset - "prof_info": provider_field(typing.Any, default = None), # dict[LinkStyle, list[HaskellLibraryInfo]] # TODO use a tset - }, -) - HaskellIndexingTSet = transitive_set() # A list of hie dirs @@ -154,101 +152,10 @@ HaskellIndexInfo = provider( }, ) -# If the target is a haskell library, the HaskellLibraryProvider -# contains its HaskellLibraryInfo. (in contrast to a HaskellLinkInfo, -# which contains the HaskellLibraryInfo for all the transitive -# dependencies). Direct dependencies are treated differently from -# indirect dependencies for the purposes of module visibility. -HaskellLibraryProvider = provider( - fields = { - "lib": provider_field(typing.Any, default = None), # dict[LinkStyle, HaskellLibraryInfo] - "prof_lib": provider_field(typing.Any, default = None), # dict[LinkStyle, HaskellLibraryInfo] - }, -) - -# HaskellProfLinkInfo exposes the MergedLinkInfo of a target and all of its -# dependencies built for profiling. This allows top-level targets (e.g. -# `haskell_binary`) to be defined with profiling enabled by default. -HaskellProfLinkInfo = provider( - fields = { - "prof_infos": provider_field(typing.Any, default = None), # MergedLinkInfo - }, -) - -# A record of a Haskell library. -HaskellLibraryInfo = record( - # The library target name: e.g. "rts" - name = str, - # package config database: e.g. platform009/build/ghc/lib/package.conf.d - db = Artifact, - # e.g. "base-4.13.0.0" - id = str, - # Import dirs indexed by profiling enabled/disabled - import_dirs = dict[bool, Artifact], - stub_dirs = list[Artifact], - - # This field is only used as hidden inputs to compilation, to - # support Template Haskell which may need access to the libraries - # at compile time. The real library flags are propagated up the - # dependency graph via MergedLinkInfo. - libs = field(list[Artifact], []), - # Package version, used to specify the full package when exposing it, - # e.g. filepath-1.4.2.1, deepseq-1.4.4.0. - # Internal packages default to 1.0.0, e.g. `fbcode-dsi-logger-hs-types-1.0.0`. - version = str, - is_prebuilt = bool, - profiling_enabled = bool, -) - -# -- - -def _by_platform(ctx: AnalysisContext, xs: list[(str, list[typing.Any])]) -> list[typing.Any]: - platform = ctx.attrs._cxx_toolchain[CxxPlatformInfo].name - return flatten(by_platform([platform], xs)) - -def attr_deps(ctx: AnalysisContext) -> list[Dependency]: - return ctx.attrs.deps + _by_platform(ctx, ctx.attrs.platform_deps) - -# Disable until we have a need to call this. -# def _attr_deps_merged_link_infos(ctx: AnalysisContext) -> [MergedLinkInfo]: -# return filter(None, [d[MergedLinkInfo] for d in attr_deps(ctx)]) - # This conversion is non-standard, see TODO about link style below def _to_lib_output_style(link_style: LinkStyle) -> LibOutputStyle: return default_output_style_for_link_strategy(to_link_strategy(link_style)) -def _attr_deps_haskell_link_infos(ctx: AnalysisContext) -> list[HaskellLinkInfo]: - return filter( - None, - [ - d.get(HaskellLinkInfo) - for d in attr_deps(ctx) + ctx.attrs.template_deps - ], - ) - -def _attr_deps_haskell_lib_infos( - ctx: AnalysisContext, - link_style: LinkStyle, - enable_profiling: bool) -> list[HaskellLibraryInfo]: - if enable_profiling and link_style == LinkStyle("shared"): - fail("Profiling isn't supported when using dynamic linking") - return [ - x.prof_lib[link_style] if enable_profiling else x.lib[link_style] - for x in filter(None, [ - d.get(HaskellLibraryProvider) - for d in attr_deps(ctx) + ctx.attrs.template_deps - ]) - ] - -def _cxx_toolchain_link_style(ctx: AnalysisContext) -> LinkStyle: - return ctx.attrs._cxx_toolchain[CxxToolchainInfo].linker_info.link_style - -def _attr_link_style(ctx: AnalysisContext) -> LinkStyle: - if ctx.attrs.link_style != None: - return LinkStyle(ctx.attrs.link_style) - else: - return _cxx_toolchain_link_style(ctx) - def _attr_preferred_linkage(ctx: AnalysisContext) -> Linkage: preferred_linkage = ctx.attrs.preferred_linkage @@ -260,14 +167,6 @@ def _attr_preferred_linkage(ctx: AnalysisContext) -> Linkage: # -- -def _is_haskell_src(x: str) -> bool: - _, ext = paths.split_extension(x) - return ext in _HASKELL_EXTENSIONS - -def _src_to_module_name(x: str) -> str: - base, _ext = paths.split_extension(x) - return base.replace("/", ".") - def _get_haskell_prebuilt_libs( ctx, link_style: LinkStyle, @@ -371,9 +270,17 @@ def haskell_prebuilt_library_impl(ctx: AnalysisContext) -> list[Provider]: ] hlibinfos[link_style] = hlibinfo - hlinkinfos[link_style] = [hlibinfo] + hlinkinfos[link_style] = ctx.actions.tset( + HaskellLibraryInfoTSet, + value = hlibinfo, + children = [lib.info[link_style] for lib in haskell_infos], + ) prof_hlibinfos[link_style] = prof_hlibinfo - prof_hlinkinfos[link_style] = [prof_hlibinfo] + prof_hlinkinfos[link_style] = ctx.actions.tset( + HaskellLibraryInfoTSet, + value = prof_hlibinfo, + children = [lib.prof_info[link_style] for lib in haskell_infos], + ) link_infos[link_style] = LinkInfos( default = LinkInfo( pre_flags = ctx.attrs.exported_linker_flags, @@ -422,6 +329,7 @@ def haskell_prebuilt_library_impl(ctx: AnalysisContext) -> list[Provider]: solibs = {} for soname, lib in ctx.attrs.shared_libs.items(): solibs[soname] = LinkedObject(output = lib, unstripped_output = lib) + shared_libs = create_shared_libraries(ctx, solibs) linkable_graph = create_linkable_graph( ctx, @@ -431,7 +339,7 @@ def haskell_prebuilt_library_impl(ctx: AnalysisContext) -> list[Provider]: ctx = ctx, exported_deps = ctx.attrs.deps, link_infos = {_to_lib_output_style(s): v for s, v in link_infos.items()}, - shared_libs = solibs, + shared_libs = shared_libs, default_soname = None, ), ), @@ -440,7 +348,7 @@ def haskell_prebuilt_library_impl(ctx: AnalysisContext) -> list[Provider]: inherited_pp_info = cxx_inherited_preprocessor_infos(ctx.attrs.deps) own_pp_info = CPreprocessor( - relative_args = CPreprocessorArgs(args = flatten([["-isystem", d] for d in ctx.attrs.cxx_header_dirs])), + args = CPreprocessorArgs(args = flatten([["-isystem", d] for d in ctx.attrs.cxx_header_dirs])), ) return [ @@ -449,11 +357,11 @@ def haskell_prebuilt_library_impl(ctx: AnalysisContext) -> list[Provider]: cxx_merge_cpreprocessors(ctx, [own_pp_info], inherited_pp_info), merge_shared_libraries( ctx.actions, - create_shared_libraries(ctx, solibs), + shared_libs, shared_library_infos, ), merge_link_group_lib_info(deps = ctx.attrs.deps), - merge_haskell_link_infos(haskell_infos + [haskell_link_infos]), + haskell_link_infos, merged_link_info, HaskellProfLinkInfo( prof_infos = prof_merged_link_info, @@ -461,254 +369,17 @@ def haskell_prebuilt_library_impl(ctx: AnalysisContext) -> list[Provider]: linkable_graph, ] -def merge_haskell_link_infos(deps: list[HaskellLinkInfo]) -> HaskellLinkInfo: - merged = {} - prof_merged = {} - for link_style in LinkStyle: - children = [] - prof_children = [] - for dep in deps: - if link_style in dep.info: - children.extend(dep.info[link_style]) - - if link_style in dep.prof_info: - prof_children.extend(dep.prof_info[link_style]) - - merged[link_style] = dedupe(children) - prof_merged[link_style] = dedupe(prof_children) - - return HaskellLinkInfo(info = merged, prof_info = prof_merged) - -PackagesInfo = record( - exposed_package_args = cmd_args, - packagedb_args = cmd_args, - transitive_deps = field(list[HaskellLibraryInfo]), -) - -def _package_flag(toolchain: HaskellToolchainInfo) -> str: - if toolchain.support_expose_package: - return "-expose-package" - else: - return "-package" - -def get_packages_info( - ctx: AnalysisContext, - link_style: LinkStyle, - specify_pkg_version: bool, - enable_profiling: bool) -> PackagesInfo: - haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] - - # Collect library dependencies. Note that these don't need to be in a - # particular order and we really want to remove duplicates (there - # are a *lot* of duplicates). - libs = {} - direct_deps_link_info = _attr_deps_haskell_link_infos(ctx) - merged_hs_link_info = merge_haskell_link_infos(direct_deps_link_info) - - hs_link_info = merged_hs_link_info.prof_info if enable_profiling else merged_hs_link_info.info - - for lib in hs_link_info[link_style]: - libs[lib.db] = lib # lib.db is a good enough unique key - - # base is special and gets exposed by default - package_flag = _package_flag(haskell_toolchain) - exposed_package_args = cmd_args([package_flag, "base"]) - - packagedb_args = cmd_args() - - for lib in libs.values(): - exposed_package_args.hidden(lib.import_dirs.values()) - exposed_package_args.hidden(lib.stub_dirs) - - # libs of dependencies might be needed at compile time if - # we're using Template Haskell: - exposed_package_args.hidden(lib.libs) - - packagedb_args.hidden(lib.import_dirs.values()) - packagedb_args.hidden(lib.stub_dirs) - packagedb_args.hidden(lib.libs) - - for lib in libs.values(): - # These we need to add for all the packages/dependencies, i.e. - # direct and transitive (e.g. `fbcode-common-hs-util-hs-array`) - packagedb_args.add("-package-db", lib.db) - - haskell_direct_deps_lib_infos = _attr_deps_haskell_lib_infos( - ctx, - link_style, - enable_profiling, - ) - - # Expose only the packages we depend on directly - for lib in haskell_direct_deps_lib_infos: - pkg_name = lib.name - if (specify_pkg_version): - pkg_name += "-{}".format(lib.version) - - exposed_package_args.add(package_flag, pkg_name) - - return PackagesInfo( - exposed_package_args = exposed_package_args, - packagedb_args = packagedb_args, - transitive_deps = libs.values(), - ) - -# The type of the return value of the `_compile()` function. -CompileResultInfo = record( - objects = field(Artifact), - hi = field(Artifact), - stubs = field(Artifact), - producing_indices = field(bool), -) - -def _link_style_extensions(link_style: LinkStyle) -> (str, str): - if link_style == LinkStyle("shared"): - return ("dyn_o", "dyn_hi") - elif link_style == LinkStyle("static_pic"): - return ("o", "hi") # is this right? - elif link_style == LinkStyle("static"): - return ("o", "hi") - fail("unknown LinkStyle") - -def _output_extensions( - link_style: LinkStyle, - profiled: bool) -> (str, str): - osuf, hisuf = _link_style_extensions(link_style) - if profiled: - return ("p_" + osuf, "p_" + hisuf) - else: - return (osuf, hisuf) - def _srcs_to_objfiles( ctx: AnalysisContext, odir: Artifact, osuf: str) -> cmd_args: objfiles = cmd_args() - for src, _ in _srcs_to_pairs(ctx.attrs.srcs): + for src, _ in srcs_to_pairs(ctx.attrs.srcs): # Don't link boot sources, as they're only meant to be used for compiling. - if _is_haskell_src(src): + if is_haskell_src(src): objfiles.add(cmd_args([odir, "/", paths.replace_extension(src, "." + osuf)], delimiter = "")) return objfiles -# We take a named_set for srcs, which is sometimes a list, sometimes a dict. -# In future we should only accept a list, but for now, cope with both. -def _srcs_to_pairs(srcs) -> list[(str, Artifact)]: - if type(srcs) == type({}): - return srcs.items() - else: - return [(src.short_path, src) for src in srcs] - -# Single place to build the suffix used in artifacts (e.g. package directories, -# lib names) considering attributes like link style and profiling. -def get_artifact_suffix(link_style: LinkStyle, enable_profiling: bool) -> str: - artifact_suffix = link_style.value - if enable_profiling: - artifact_suffix += "-prof" - return artifact_suffix - -# Compile all the context's sources. -def _compile( - ctx: AnalysisContext, - link_style: LinkStyle, - enable_profiling: bool, - extra_args = []) -> CompileResultInfo: - haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] - compile_cmd = cmd_args(haskell_toolchain.compiler) - compile_cmd.add(haskell_toolchain.compiler_flags) - - # Some rules pass in RTS (e.g. `+RTS ... -RTS`) options for GHC, which can't - # be parsed when inside an argsfile. - compile_cmd.add(ctx.attrs.compiler_flags) - - compile_args = cmd_args() - compile_args.add("-no-link", "-i") - - if enable_profiling: - compile_args.add("-prof") - - if link_style == LinkStyle("shared"): - compile_args.add("-dynamic", "-fPIC") - elif link_style == LinkStyle("static_pic"): - compile_args.add("-fPIC", "-fexternal-dynamic-refs") - - osuf, hisuf = _output_extensions(link_style, enable_profiling) - compile_args.add("-osuf", osuf, "-hisuf", hisuf) - - if getattr(ctx.attrs, "main", None) != None: - compile_args.add(["-main-is", ctx.attrs.main]) - - artifact_suffix = get_artifact_suffix(link_style, enable_profiling) - - objects = ctx.actions.declare_output( - "objects-" + artifact_suffix, - dir = True, - ) - hi = ctx.actions.declare_output("hi-" + artifact_suffix, dir = True) - stubs = ctx.actions.declare_output("stubs-" + artifact_suffix, dir = True) - - compile_args.add( - "-odir", - objects.as_output(), - "-hidir", - hi.as_output(), - "-hiedir", - hi.as_output(), - "-stubdir", - stubs.as_output(), - ) - - # Add -package-db and -package/-expose-package flags for each Haskell - # library dependency. - packages_info = get_packages_info( - ctx, - link_style, - specify_pkg_version = False, - enable_profiling = enable_profiling, - ) - - compile_args.add(packages_info.exposed_package_args) - compile_args.add(packages_info.packagedb_args) - - # Add args from preprocess-able inputs. - inherited_pre = cxx_inherited_preprocessor_infos(ctx.attrs.deps) - pre = cxx_merge_cpreprocessors(ctx, [], inherited_pre) - pre_args = pre.set.project_as_args("args") - compile_args.add(cmd_args(pre_args, format = "-optP={}")) - - compile_args.add(extra_args) - - for (path, src) in _srcs_to_pairs(ctx.attrs.srcs): - # hs-boot files aren't expected to be an argument to compiler but does need - # to be included in the directory of the associated src file - if _is_haskell_src(path): - compile_args.add(src) - else: - compile_args.hidden(src) - - if haskell_toolchain.use_argsfile: - argsfile = ctx.actions.declare_output( - "haskell_compile_" + artifact_suffix + ".argsfile", - ) - ctx.actions.write(argsfile.as_output(), compile_args, allow_args = True) - compile_cmd.add(cmd_args(argsfile, format = "@{}").hidden(compile_args)) - else: - compile_cmd.add(compile_args) - - ctx.actions.run( - compile_cmd, - category = "haskell_compile_" + artifact_suffix.replace("-", "_"), - no_outputs_cleanup = True, - ) - - producing_indices = "-fwrite-ide-info" in ctx.attrs.compiler_flags - - return CompileResultInfo( - objects = objects, - hi = hi, - stubs = stubs, - producing_indices = producing_indices, - ) - _REGISTER_PACKAGE = """\ set -eu GHC_PKG=$1 @@ -748,11 +419,7 @@ def _make_package( artifact_suffix = get_artifact_suffix(link_style, enable_profiling) # Don't expose boot sources, as they're only meant to be used for compiling. - modules = [_src_to_module_name(x) for x, _ in _srcs_to_pairs(ctx.attrs.srcs) if _is_haskell_src(x)] - - uniq_hlis = {} - for x in hlis: - uniq_hlis[x.id] = x + modules = [src_to_module_name(x) for x, _ in srcs_to_pairs(ctx.attrs.srcs) if is_haskell_src(x)] if enable_profiling: # Add the `-p` suffix otherwise ghc will look for objects @@ -782,36 +449,40 @@ def _make_package( "import-dirs:" + ", ".join(import_dirs), "library-dirs:" + ", ".join(library_dirs), "extra-libraries: " + libname, - "depends: " + ", ".join(uniq_hlis), + "depends: " + ", ".join([lib.id for lib in hlis]), ] pkg_conf = ctx.actions.write("pkg-" + artifact_suffix + ".conf", conf) db = ctx.actions.declare_output("db-" + artifact_suffix) - db_deps = {} - for x in uniq_hlis.values(): - db_deps[repr(x.db)] = x.db + # While the list of hlis is unique, there may be multiple packages in the same db. + # Cutting down the GHC_PACKAGE_PATH significantly speeds up GHC. + db_deps = {x.db: None for x in hlis}.keys() # So that ghc-pkg can find the DBs for the dependencies. We might # be able to use flags for this instead, but this works. ghc_package_path = cmd_args( - db_deps.values(), + db_deps, delimiter = ":", ) haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] ctx.actions.run( - cmd_args([ - "sh", - "-c", - _REGISTER_PACKAGE, - "", - haskell_toolchain.packager, - db.as_output(), - pkg_conf, - ]).hidden(hi.values()).hidden(lib.values()), # needs hi, because ghc-pkg checks that the .hi files exist + cmd_args( + [ + "sh", + "-c", + _REGISTER_PACKAGE, + "", + haskell_toolchain.packager, + db.as_output(), + pkg_conf, + ], + # needs hi, because ghc-pkg checks that the .hi files exist + hidden = hi.values() + lib.values(), + ), category = "haskell_package_" + artifact_suffix.replace("-", "_"), - env = {"GHC_PACKAGE_PATH": ghc_package_path}, + env = {"GHC_PACKAGE_PATH": ghc_package_path} if db_deps else {}, ) return db @@ -824,8 +495,21 @@ HaskellLibBuildOutput = record( libs = list[Artifact], ) +def _get_haskell_shared_library_name_linker_flags(linker_type: str, soname: str) -> list[str]: + if linker_type == "gnu": + return ["-Wl,-soname,{}".format(soname)] + elif linker_type == "darwin": + # Passing `-install_name @rpath/...` or + # `-Xlinker -install_name -Xlinker @rpath/...` instead causes + # ghc-9.6.3: panic! (the 'impossible' happened) + return ["-Wl,-install_name,@rpath/{}".format(soname)] + else: + fail("Unknown linker type '{}'.".format(linker_type)) + def _build_haskell_lib( ctx, + libname: str, + pkgname: str, hlis: list[HaskellLinkInfo], # haskell link infos from all deps nlis: list[MergedLinkInfo], # native link infos from all deps link_style: LinkStyle, @@ -834,20 +518,18 @@ def _build_haskell_lib( # profiling, so it should be passed when `enable_profiling` is True. non_profiling_hlib: [HaskellLibBuildOutput, None] = None) -> HaskellLibBuildOutput: linker_info = ctx.attrs._cxx_toolchain[CxxToolchainInfo].linker_info - libname = repr(ctx.label.path).replace("//", "_").replace("/", "_") + "_" + ctx.label.name - pkgname = libname.replace("_", "-") # Link the objects into a library haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] - osuf, _hisuf = _output_extensions(link_style, enable_profiling) + osuf, _hisuf = output_extensions(link_style, enable_profiling) # Compile the sources - compiled = _compile( + compiled = compile( ctx, link_style, enable_profiling = enable_profiling, - extra_args = ["-this-unit-id", pkgname], + pkgname = pkgname, ) solibs = {} artifact_suffix = get_artifact_suffix(link_style, enable_profiling) @@ -856,34 +538,38 @@ def _build_haskell_lib( if link_style == LinkStyle("static_pic"): libstem += "_pic" + dynamic_lib_suffix = "." + LINKERS[linker_info.type].default_shared_library_extension static_lib_suffix = "_p.a" if enable_profiling else ".a" - libfile = "lib" + libstem + (".so" if link_style == LinkStyle("shared") else static_lib_suffix) + libfile = "lib" + libstem + (dynamic_lib_suffix if link_style == LinkStyle("shared") else static_lib_suffix) lib_short_path = paths.join("lib-{}".format(artifact_suffix), libfile) linfos = [x.prof_info if enable_profiling else x.info for x in hlis] - uniq_infos = dedupe(flatten([x[link_style] for x in linfos])) + + # only gather direct dependencies + uniq_infos = [x[link_style].value for x in linfos] objfiles = _srcs_to_objfiles(ctx, compiled.objects, osuf) if link_style == LinkStyle("shared"): lib = ctx.actions.declare_output(lib_short_path) - link = cmd_args(haskell_toolchain.linker) - link.add(haskell_toolchain.linker_flags) - link.add(ctx.attrs.linker_flags) - link.add("-o", lib.as_output()) - link.add( - "-shared", - "-dynamic", - "-optl", - "-Wl,-soname", - "-optl", - "-Wl," + libfile, + link = cmd_args( + [haskell_toolchain.linker] + + [haskell_toolchain.linker_flags] + + [ctx.attrs.linker_flags] + + ["-o", lib.as_output()] + + [ + get_shared_library_flags(linker_info.type), + "-dynamic", + cmd_args( + _get_haskell_shared_library_name_linker_flags(linker_info.type, libfile), + prepend = "-optl", + ), + ] + + [objfiles], + hidden = compiled.stubs, ) - link.add(objfiles) - link.hidden(compiled.stubs) - infos = get_link_args_for_strategy( ctx, nlis, @@ -980,27 +666,10 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: preferred_linkage = Linkage("static") # Get haskell and native link infos from all deps - hlis = [] - nlis = [] - prof_nlis = [] - shared_library_infos = [] - for lib in attr_deps(ctx): - li = lib.get(HaskellLinkInfo) - if li != None: - hlis.append(li) - li = lib.get(MergedLinkInfo) - if li != None: - nlis.append(li) - if HaskellLinkInfo not in lib: - # MergedLinkInfo from non-haskell deps should be part of the - # profiling MergedLinkInfo - prof_nlis.append(li) - li = lib.get(HaskellProfLinkInfo) - if li != None: - prof_nlis.append(li.prof_infos) - li = lib.get(SharedLibraryInfo) - if li != None: - shared_library_infos.append(li) + hlis = attr_deps_haskell_link_infos_sans_template_deps(ctx) + nlis = attr_deps_merged_link_infos(ctx) + prof_nlis = attr_deps_profiling_link_infos(ctx) + shared_library_infos = attr_deps_shared_library_infos(ctx) solibs = {} link_infos = {} @@ -1012,6 +681,9 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: indexing_tsets = {} sub_targets = {} + libname = repr(ctx.label.path).replace("//", "_").replace("/", "_") + "_" + ctx.label.name + pkgname = libname.replace("_", "-") + # The non-profiling library is also needed to build the package with # profiling enabled, so we need to keep track of it for each link style. non_profiling_hlib = {} @@ -1024,6 +696,8 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: hlib_build_out = _build_haskell_lib( ctx, + libname, + pkgname, hlis = hlis, nlis = nlis, link_style = link_style, @@ -1040,11 +714,19 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: if enable_profiling: prof_hlib_infos[link_style] = hlib - prof_hlink_infos[link_style] = [hlib] + prof_hlink_infos[link_style] = ctx.actions.tset( + HaskellLibraryInfoTSet, + value = hlib, + children = [li.prof_info[link_style] for li in hlis], + ) prof_link_infos[link_style] = hlib_build_out.link_infos else: hlib_infos[link_style] = hlib - hlink_infos[link_style] = [hlib] + hlink_infos[link_style] = ctx.actions.tset( + HaskellLibraryInfoTSet, + value = hlib, + children = [li.info[link_style] for li in hlis], + ) link_infos[link_style] = hlib_build_out.link_infos # Build the indices and create subtargets only once, with profiling @@ -1065,12 +747,13 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: )] pic_behavior = ctx.attrs._cxx_toolchain[CxxToolchainInfo].pic_behavior - link_style = _cxx_toolchain_link_style(ctx) + link_style = cxx_toolchain_link_style(ctx) output_style = get_lib_output_style( to_link_strategy(link_style), preferred_linkage, pic_behavior, ) + shared_libs = create_shared_libraries(ctx, solibs) # TODO(cjhopman): this haskell implementation does not consistently handle LibOutputStyle # and LinkStrategy as expected and it's hard to tell what the intent of the existing code is @@ -1113,7 +796,7 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: preferred_linkage = preferred_linkage, exported_deps = ctx.attrs.deps, link_infos = {_to_lib_output_style(s): v for s, v in link_infos.items()}, - shared_libs = solibs, + shared_libs = shared_libs, # TODO(cjhopman): this should be set to non-None default_soname = None, ), @@ -1146,10 +829,10 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: lib = hlib_infos, prof_lib = prof_hlib_infos, ), - merge_haskell_link_infos(hlis + [HaskellLinkInfo( + HaskellLinkInfo( info = hlink_infos, prof_info = prof_hlink_infos, - )]), + ), merged_link_info, HaskellProfLinkInfo( prof_infos = prof_merged_link_info, @@ -1158,9 +841,10 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: cxx_merge_cpreprocessors(ctx, pp, inherited_pp_info), merge_shared_libraries( ctx.actions, - create_shared_libraries(ctx, solibs), + shared_libs, shared_library_infos, ), + haskell_haddock_lib(ctx, pkgname), ] if indexing_tsets: @@ -1202,7 +886,7 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: def derive_indexing_tset( actions: AnalysisActions, link_style: LinkStyle, - value: [Artifact, None], + value: Artifact | None, children: list[Dependency]) -> HaskellIndexingTSet: index_children = [] for dep in children: @@ -1222,7 +906,7 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: # Decide what kind of linking we're doing - link_style = _attr_link_style(ctx) + link_style = attr_link_style(ctx) # Link Groups link_group_info = get_link_group_info(ctx, filter_and_map_idx(LinkableGraph, attr_deps(ctx))) @@ -1231,7 +915,7 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: if enable_profiling and link_style == LinkStyle("shared"): link_style = LinkStyle("static") - compiled = _compile( + compiled = compile( ctx, link_style, enable_profiling = enable_profiling, @@ -1240,17 +924,20 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] output = ctx.actions.declare_output(ctx.attrs.name) - link = cmd_args(haskell_toolchain.compiler) - link.add("-o", output.as_output()) - link.add(haskell_toolchain.linker_flags) - link.add(ctx.attrs.linker_flags) + link = cmd_args( + [haskell_toolchain.compiler] + + ["-o", output.as_output()] + + [haskell_toolchain.linker_flags] + + [ctx.attrs.linker_flags], + hidden = compiled.stubs, + ) - link.hidden(compiled.stubs) + link_args = cmd_args() - osuf, _hisuf = _output_extensions(link_style, enable_profiling) + osuf, _hisuf = output_extensions(link_style, enable_profiling) objfiles = _srcs_to_objfiles(ctx, compiled.objects, osuf) - link.add(objfiles) + link_args.add(objfiles) indexing_tsets = {} if compiled.producing_indices: @@ -1267,7 +954,7 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: deps = slis, ) - sos = {} + sos = [] if link_group_info != None: own_binary_link_flags = [] @@ -1281,14 +968,22 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: # in the prelude, the link group map will give us the link group libs. # Otherwise, pull them from the `LinkGroupLibInfo` provider from out deps. auto_link_group_specs = get_auto_link_group_specs(ctx, link_group_info) + executable_deps = [d.linkable_graph.nodes.value.label for d in link_deps if d.linkable_graph != None] + public_nodes = get_public_link_group_nodes( + linkable_graph_node_map, + link_group_info.mappings, + executable_deps, + None, + ) if auto_link_group_specs != None: linked_link_groups = create_link_groups( ctx = ctx, link_group_mappings = link_group_info.mappings, link_group_preferred_linkage = link_group_preferred_linkage, - executable_deps = [d.linkable_graph.nodes.value.label for d in link_deps if d.linkable_graph != None], + executable_deps = executable_deps, link_group_specs = auto_link_group_specs, linkable_graph_node_map = linkable_graph_node_map, + public_nodes = public_nodes, ) for name, linked_link_group in linked_link_groups.libs.items(): auto_link_groups[name] = linked_link_group.artifact @@ -1307,15 +1002,11 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: link_group_relevant_roots = find_relevant_roots( linkable_graph_node_map = linkable_graph_node_map, link_group_mappings = link_group_info.mappings, - roots = [ - mapping.root - for group in link_group_info.groups.values() - for mapping in group.mappings - if mapping.root != None - ], + roots = get_dedupped_roots_from_groups(link_group_info.groups.values()), ) labels_to_links_map = get_filtered_labels_to_links_map( + public_nodes = public_nodes, linkable_graph_node_map = linkable_graph_node_map, link_group = None, link_groups = link_group_info.groups, @@ -1367,15 +1058,15 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: labels_to_links_map = labels_to_links_map, ) - for name, shared_lib in traverse_shared_library_info(shlib_info).items(): + for shared_lib in traverse_shared_library_info(shlib_info): label = shared_lib.label if is_link_group_shlib(label, link_group_ctx): - sos[name] = shared_lib.lib + sos.append(shared_lib) # When there are no matches for a pattern based link group, # `link_group_mappings` will not have an entry associated with the lib. for _name, link_group_lib in link_group_libs.items(): - sos.update(link_group_lib.shared_libs) + sos.extend(link_group_lib.shared_libs.libraries) else: nlis = [] @@ -1388,21 +1079,32 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: li = lib.get(MergedLinkInfo) if li != None: nlis.append(li) - for name, shared_lib in traverse_shared_library_info(shlib_info).items(): - sos[name] = shared_lib.lib + sos.extend(traverse_shared_library_info(shlib_info)) infos = get_link_args_for_strategy(ctx, nlis, to_link_strategy(link_style)) - link.add(cmd_args(unpack_link_args(infos), prepend = "-optl")) + link_args.add(cmd_args(unpack_link_args(infos), prepend = "-optl")) + link.add(at_argfile( + actions = ctx.actions, + name = "haskell_link.argsfile", + args = link_args, + allow_args = True, + )) ctx.actions.run(link, category = "haskell_link") - run = cmd_args(output) - if link_style == LinkStyle("shared") or link_group_info != None: sos_dir = "__{}__shared_libs_symlink_tree".format(ctx.attrs.name) - link.add("-optl", "-Wl,-rpath", "-optl", "-Wl,$ORIGIN/{}".format(sos_dir)) - symlink_dir = ctx.actions.symlinked_dir(sos_dir, {n: o.output for n, o in sos.items()}) - run.hidden(symlink_dir) + rpath_ref = get_rpath_origin(get_cxx_toolchain_info(ctx).linker_info.type) + rpath_ldflag = "-Wl,{}/{}".format(rpath_ref, sos_dir) + link.add("-optl", "-Wl,-rpath", "-optl", rpath_ldflag) + symlink_dir = create_shlib_symlink_tree( + actions = ctx.actions, + out = sos_dir, + shared_libs = sos, + ) + run = cmd_args(output, hidden = symlink_dir) + else: + run = cmd_args(output) providers = [ DefaultInfo(default_output = output), diff --git a/prelude/haskell/haskell_ghci.bzl b/prelude/haskell/haskell_ghci.bzl index 93f76677cc..118e11f7e1 100644 --- a/prelude/haskell/haskell_ghci.bzl +++ b/prelude/haskell/haskell_ghci.bzl @@ -17,22 +17,26 @@ load( "link_options", ) load( - "@prelude//haskell:haskell.bzl", + "@prelude//haskell:compile.bzl", + "PackagesInfo", + "get_packages_info", +) +load( + "@prelude//haskell:library_info.bzl", "HaskellLibraryInfo", "HaskellLibraryProvider", +) +load( + "@prelude//haskell:toolchain.bzl", "HaskellToolchainInfo", - "PackagesInfo", - "attr_deps", - "get_artifact_suffix", - "get_packages_info", ) +load("@prelude//haskell:util.bzl", "attr_deps", "get_artifact_suffix") load("@prelude//linking:execution_preference.bzl", "LinkExecutionPreference") load( "@prelude//linking:link_info.bzl", "LinkArgs", "LinkInfo", "LinkStyle", - "Linkage", "get_lib_output_style", "set_linkable_link_whole", "to_link_strategy", @@ -47,12 +51,15 @@ load( load( "@prelude//linking:shared_libraries.bzl", "SharedLibraryInfo", + "create_shlib_symlink_tree", "traverse_shared_library_info", + "with_unique_str_sonames", ) +load("@prelude//linking:types.bzl", "Linkage") load( "@prelude//utils:graph_utils.bzl", - "breadth_first_traversal", - "breadth_first_traversal_by", + "depth_first_traversal", + "depth_first_traversal_by", ) load("@prelude//utils:utils.bzl", "flatten") @@ -176,7 +183,7 @@ def _build_haskell_omnibus_so(ctx: AnalysisContext) -> HaskellOmnibusData: dep_graph[ctx.label] = all_direct_deps # Need to exclude all transitive deps of excluded deps - all_nodes_to_exclude = breadth_first_traversal( + all_nodes_to_exclude = depth_first_traversal( dep_graph, [dep.label for dep in preload_deps], ) @@ -221,7 +228,7 @@ def _build_haskell_omnibus_so(ctx: AnalysisContext) -> HaskellOmnibusData: # This is not the final set of body nodes, because it still includes # nodes that don't support omnibus (e.g. haskell_library nodes) - breadth_first_traversal_by( + depth_first_traversal_by( dep_graph, [ctx.label], find_deps_for_body, @@ -264,7 +271,7 @@ def _build_haskell_omnibus_so(ctx: AnalysisContext) -> HaskellOmnibusData: # Handle third-party dependencies of the omnibus SO tp_deps_shared_link_infos = {} - so_symlinks = {} + prebuilt_shlibs = [] for node_label in prebuilt_so_deps.keys(): node = graph_nodes[node_label] @@ -278,14 +285,14 @@ def _build_haskell_omnibus_so(ctx: AnalysisContext) -> HaskellOmnibusData: shared_li = node.link_infos.get(output_style, None) if shared_li != None: tp_deps_shared_link_infos[node_label] = shared_li.default - for libname, linkObject in node.shared_libs.items(): - so_symlinks[libname] = linkObject.output + prebuilt_shlibs.extend(node.shared_libs.libraries) # Create symlinks to the TP dependencies' SOs so_symlinks_root_path = ctx.label.name + ".so-symlinks" - so_symlinks_root = ctx.actions.symlinked_dir( - so_symlinks_root_path, - so_symlinks, + so_symlinks_root = create_shlib_symlink_tree( + actions = ctx.actions, + out = so_symlinks_root_path, + shared_libs = prebuilt_shlibs, ) linker_info = get_cxx_toolchain_info(ctx).linker_info @@ -323,10 +330,10 @@ def _replace_macros_in_script_template( script_template: Artifact, haskell_toolchain: HaskellToolchainInfo, # Optional artifacts - ghci_bin: [Artifact, None] = None, - start_ghci: [Artifact, None] = None, - iserv_script: [Artifact, None] = None, - squashed_so: [Artifact, None] = None, + ghci_bin: Artifact | None = None, + start_ghci: Artifact | None = None, + iserv_script: Artifact | None = None, + squashed_so: Artifact | None = None, # Optional cmd_args exposed_package_args: [cmd_args, None] = None, packagedb_args: [cmd_args, None] = None, @@ -472,10 +479,10 @@ def _build_preload_deps_root( if SharedLibraryInfo in preload_dep: slib_info = preload_dep[SharedLibraryInfo] - shlib = traverse_shared_library_info(slib_info).items() + shlib = traverse_shared_library_info(slib_info) - for shlib_name, shared_lib in shlib: - preload_symlinks[shlib_name] = shared_lib.lib.output + for soname, shared_lib in with_unique_str_sonames(shlib).items(): + preload_symlinks[soname] = shared_lib.lib.output # TODO(T150785851): build or get SO for direct preload_deps # TODO(T150785851): find out why the only SOs missing are the ones from @@ -628,11 +635,11 @@ def haskell_ghci_impl(ctx: AnalysisContext) -> list[Provider]: package_symlinks_root = ctx.label.name + ".packages" packagedb_args = cmd_args(delimiter = " ") - prebuilt_packagedb_args = cmd_args(delimiter = " ") + prebuilt_packagedb_args_set = {} - for lib in packages_info.transitive_deps: + for lib in packages_info.transitive_deps.traverse(): if lib.is_prebuilt: - prebuilt_packagedb_args.add(lib.db) + prebuilt_packagedb_args_set[lib.db] = None else: lib_symlinks_root = paths.join( package_symlinks_root, @@ -662,6 +669,7 @@ def haskell_ghci_impl(ctx: AnalysisContext) -> list[Provider]: "packagedb", ), ) + prebuilt_packagedb_args = cmd_args(prebuilt_packagedb_args_set.keys(), delimiter = " ") script_templates = [] for script_template in ctx.attrs.extra_script_templates: @@ -712,7 +720,7 @@ def haskell_ghci_impl(ctx: AnalysisContext) -> list[Provider]: "__{}__".format(ctx.label.name), output_artifacts, ) - run = cmd_args(final_ghci_script).hidden(outputs) + run = cmd_args(final_ghci_script, hidden = outputs) return [ DefaultInfo(default_outputs = [root_output_dir]), diff --git a/prelude/haskell/haskell_haddock.bzl b/prelude/haskell/haskell_haddock.bzl index d30b0c56cd..002388afca 100644 --- a/prelude/haskell/haskell_haddock.bzl +++ b/prelude/haskell/haskell_haddock.bzl @@ -5,5 +5,156 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -def haskell_haddock_impl(_ctx: AnalysisContext) -> list[Provider]: - return [DefaultInfo()] +load("@prelude//haskell:compile.bzl", "compile_args") +load("@prelude//haskell:link_info.bzl", "cxx_toolchain_link_style") +load( + "@prelude//haskell:toolchain.bzl", + "HaskellToolchainInfo", +) +load( + "@prelude//haskell:util.bzl", + "attr_deps", +) +load("@prelude//utils:argfile.bzl", "at_argfile") + +HaskellHaddockInfo = provider( + fields = { + "html": provider_field(typing.Any, default = None), + "interface": provider_field(typing.Any, default = None), + }, +) + +def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str) -> Provider: + haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] + + iface = ctx.actions.declare_output("haddock-interface") + odir = ctx.actions.declare_output("haddock-html", dir = True) + + link_style = cxx_toolchain_link_style(ctx) + args = compile_args( + ctx, + link_style, + enable_profiling = False, + suffix = "-haddock", + pkgname = pkgname, + ) + + cmd = cmd_args(haskell_toolchain.haddock) + cmd.add(cmd_args(args.args_for_cmd, format = "--optghc={}")) + cmd.add( + "--use-index", + "doc-index.html", + "--use-contents", + "index.html", + "--html", + "--hoogle", + "--no-tmp-comp-dir", + "--no-warnings", + "--dump-interface", + iface.as_output(), + "--odir", + odir.as_output(), + "--package-name", + pkgname, + ) + + for lib in attr_deps(ctx): + hi = lib.get(HaskellHaddockInfo) + if hi != None: + cmd.add("--read-interface", hi.interface) + + cmd.add(ctx.attrs.haddock_flags) + + source_entity = read_root_config("haskell", "haddock_source_entity", None) + if source_entity: + cmd.add("--source-entity", source_entity) + + if args.args_for_file: + if haskell_toolchain.use_argsfile: + ghcargs = cmd_args(args.args_for_file, format = "--optghc={}") + cmd.add(at_argfile( + actions = ctx.actions, + name = "haskell_haddock.argsfile", + args = [ghcargs, args.srcs], + allow_args = True, + )) + else: + cmd.add(args.args_for_file) + + # Buck2 requires that the output artifacts are always produced, but Haddock only + # creates them if it needs to, so we need a wrapper script to mkdir the outputs. + script = ctx.actions.declare_output("haddock-script") + script_args = cmd_args([ + "mkdir", + "-p", + args.result.objects.as_output(), + args.result.hi.as_output(), + args.result.stubs.as_output(), + "&&", + cmd_args(cmd, quote = "shell"), + ], delimiter = " ") + ctx.actions.write( + script, + cmd_args("#!/bin/sh", script_args), + is_executable = True, + allow_args = True, + ) + + ctx.actions.run( + cmd_args(script, hidden = cmd), + category = "haskell_haddock", + no_outputs_cleanup = True, + ) + + return HaskellHaddockInfo(interface = iface, html = odir) + +def haskell_haddock_impl(ctx: AnalysisContext) -> list[Provider]: + haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] + + out = ctx.actions.declare_output("haddock-html", dir = True) + + cmd = cmd_args(haskell_toolchain.haddock) + + cmd.add( + "--gen-index", + "--gen-contents", + "-o", + out.as_output(), + ) + + dep_htmls = [] + for lib in attr_deps(ctx): + hi = lib.get(HaskellHaddockInfo) + if hi != None: + cmd.add("--read-interface", hi.interface) + dep_htmls.append(hi.html) + + cmd.add(ctx.attrs.haddock_flags) + + script = ctx.actions.declare_output("haddock-script") + script_args = cmd_args([ + "#!/bin/sh", + "set -ueo pipefail", + cmd_args(cmd, delimiter = " ", quote = "shell"), + ]) + for dir in dep_htmls: + script_args.add( + cmd_args( + ["cp", "-Rf", "--reflink=auto", cmd_args(dir, format = "{}/*"), out.as_output()], + delimiter = " ", + ), + ) + ctx.actions.write( + script, + script_args, + is_executable = True, + allow_args = True, + ) + + ctx.actions.run( + cmd_args(script, hidden = script_args), + category = "haskell_haddock", + no_outputs_cleanup = True, + ) + + return [DefaultInfo(default_outputs = [out])] diff --git a/prelude/haskell/ide/README.md b/prelude/haskell/ide/README.md index c7867a541f..4e58eed4f0 100644 --- a/prelude/haskell/ide/README.md +++ b/prelude/haskell/ide/README.md @@ -1,13 +1,14 @@ # Haskell Language Server integration This integration allows loading `haskell_binary` and `haskell_library` targets -on Haskell Language Server. This is accomplished via a BXL script that is -used to drive a hie-bios "bios" cradle. +on Haskell Language Server. This is accomplished via a BXL script that is used +to drive a hie-bios "bios" cradle. # Usage To print the list of GHC flags and targets for a Haskell source file: - buck2 bxl prelude//haskell/ide/ide.bxl -- --bios true --file +buck2 bxl prelude//haskell/ide/ide.bxl -- --bios true --file + To integrate with hie_bios, copy `hie.yaml` to your repo root diff --git a/prelude/haskell/ide/ide.bxl b/prelude/haskell/ide/ide.bxl index 9a4dbda824..57fcd4c053 100644 --- a/prelude/haskell/ide/ide.bxl +++ b/prelude/haskell/ide/ide.bxl @@ -5,7 +5,9 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//haskell:haskell.bzl", "HaskellLibraryProvider", "HaskellLinkInfo", "HaskellToolchainInfo") +load("@prelude//haskell:library_info.bzl", "HaskellLibraryProvider") +load("@prelude//haskell:link_info.bzl", "HaskellLinkInfo") +load("@prelude//haskell:toolchain.bzl", "HaskellToolchainInfo") load("@prelude//linking:link_info.bzl", "LinkStyle") load("@prelude//paths.bzl", "paths") @@ -25,7 +27,7 @@ load("@prelude//paths.bzl", "paths") # 1. Finding its owner target, if the input is a file # 2. Finding the target's "project", which involves a rdeps search # 3. Computing the project solution (flags, sources and dependencies) -# 4. Outputing the solution as JSON +# 4. Outputting the solution as JSON _HASKELL_BIN = "prelude//rules.bzl:haskell_binary" _HASKELL_IDE = "prelude//rules.bzl:haskell_ide" @@ -60,7 +62,18 @@ def _solution_for_file(ctx, file, project_universe): target_universe = ctx.target_universe(unconfigured_owners).target_set() owners = ctx.cquery().owner(file, target_universe) if not owners or len(owners) == 0: - fail("No owner found for " + file) + return { + "external_dependencies": [], + "flags": [], + "generated_dependencies": [], + "haskell_deps": {}, + "import_dirs": [], + "owner": "No owner found for " + file, + "project": "", + "project_type": "", + "sources": [], + "targets": [], + } owner = owners[0] @@ -143,7 +156,7 @@ def _solution_for_haskell_lib(ctx, target, exclude): import_dirs = {} root = ctx.root() for key, item in resolved_attrs.srcs.items(): - # because BXL wont give you the path of an ensured artifact + # because BXL won't give you the path of an ensured artifact sp = get_path_without_materialization(item, ctx) (_, ext) = paths.split_extension(sp) diff = sp.removesuffix(paths.replace_extension(key, ext)) @@ -246,7 +259,7 @@ def _assembleSolution(ctx, linkStyle, result): for provider in result["haskell_deps"].values(): info = provider.info.get(linkStyle) if info != None: - for item in info: + for item in info.traverse(): if result["exclude_packages"].get(item.name) == None: hlis[item.name] = item for hli in hlis.values(): diff --git a/prelude/haskell/library_info.bzl b/prelude/haskell/library_info.bzl new file mode 100644 index 0000000000..3b048f1374 --- /dev/null +++ b/prelude/haskell/library_info.bzl @@ -0,0 +1,45 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# If the target is a haskell library, the HaskellLibraryProvider +# contains its HaskellLibraryInfo. (in contrast to a HaskellLinkInfo, +# which contains the HaskellLibraryInfo for all the transitive +# dependencies). Direct dependencies are treated differently from +# indirect dependencies for the purposes of module visibility. +HaskellLibraryProvider = provider( + fields = { + "lib": provider_field(typing.Any, default = None), # dict[LinkStyle, HaskellLibraryInfo] + "prof_lib": provider_field(typing.Any, default = None), # dict[LinkStyle, HaskellLibraryInfo] + }, +) + +# A record of a Haskell library. +HaskellLibraryInfo = record( + # The library target name: e.g. "rts" + name = str, + # package config database: e.g. platform009/build/ghc/lib/package.conf.d + db = Artifact, + # e.g. "base-4.13.0.0" + id = str, + # Import dirs indexed by profiling enabled/disabled + import_dirs = dict[bool, Artifact], + stub_dirs = list[Artifact], + + # This field is only used as hidden inputs to compilation, to + # support Template Haskell which may need access to the libraries + # at compile time. The real library flags are propagated up the + # dependency graph via MergedLinkInfo. + libs = field(list[Artifact], []), + # Package version, used to specify the full package when exposing it, + # e.g. filepath-1.4.2.1, deepseq-1.4.4.0. + # Internal packages default to 1.0.0, e.g. `fbcode-dsi-logger-hs-types-1.0.0`. + version = str, + is_prebuilt = bool, + profiling_enabled = bool, +) + +HaskellLibraryInfoTSet = transitive_set() diff --git a/prelude/haskell/link_info.bzl b/prelude/haskell/link_info.bzl new file mode 100644 index 0000000000..5cdc5f5926 --- /dev/null +++ b/prelude/haskell/link_info.bzl @@ -0,0 +1,46 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load( + "@prelude//cxx:cxx_toolchain_types.bzl", + "CxxToolchainInfo", +) +load( + "@prelude//haskell:library_info.bzl", + "HaskellLibraryInfoTSet", +) +load( + "@prelude//linking:link_info.bzl", + "LinkStyle", +) + +# A list of `HaskellLibraryInfo`s. +HaskellLinkInfo = provider( + # Contains a list of HaskellLibraryInfo records. + fields = { + "info": provider_field(dict[LinkStyle, HaskellLibraryInfoTSet]), + "prof_info": provider_field(dict[LinkStyle, HaskellLibraryInfoTSet]), + }, +) + +# HaskellProfLinkInfo exposes the MergedLinkInfo of a target and all of its +# dependencies built for profiling. This allows top-level targets (e.g. +# `haskell_binary`) to be defined with profiling enabled by default. +HaskellProfLinkInfo = provider( + fields = { + "prof_infos": provider_field(typing.Any, default = None), # MergedLinkInfo + }, +) + +def cxx_toolchain_link_style(ctx: AnalysisContext) -> LinkStyle: + return ctx.attrs._cxx_toolchain[CxxToolchainInfo].linker_info.link_style + +def attr_link_style(ctx: AnalysisContext) -> LinkStyle: + if ctx.attrs.link_style != None: + return LinkStyle(ctx.attrs.link_style) + else: + return cxx_toolchain_link_style(ctx) diff --git a/prelude/haskell/toolchain.bzl b/prelude/haskell/toolchain.bzl new file mode 100644 index 0000000000..f6c072fbf5 --- /dev/null +++ b/prelude/haskell/toolchain.bzl @@ -0,0 +1,41 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +HaskellPlatformInfo = provider(fields = { + "name": provider_field(typing.Any, default = None), +}) + +HaskellToolchainInfo = provider( + # @unsorted-dict-items + fields = { + "compiler": provider_field(typing.Any, default = None), + "compiler_flags": provider_field(typing.Any, default = None), + "linker": provider_field(typing.Any, default = None), + "linker_flags": provider_field(typing.Any, default = None), + "haddock": provider_field(typing.Any, default = None), + "compiler_major_version": provider_field(typing.Any, default = None), + "package_name_prefix": provider_field(typing.Any, default = None), + "packager": provider_field(typing.Any, default = None), + "use_argsfile": provider_field(typing.Any, default = None), + "support_expose_package": provider_field(bool, default = False), + "archive_contents": provider_field(typing.Any, default = None), + "ghci_script_template": provider_field(typing.Any, default = None), + "ghci_iserv_template": provider_field(typing.Any, default = None), + "ide_script_template": provider_field(typing.Any, default = None), + "ghci_binutils_path": provider_field(typing.Any, default = None), + "ghci_lib_path": provider_field(typing.Any, default = None), + "ghci_ghc_path": provider_field(typing.Any, default = None), + "ghci_iserv_path": provider_field(typing.Any, default = None), + "ghci_iserv_prof_path": provider_field(typing.Any, default = None), + "ghci_cxx_path": provider_field(typing.Any, default = None), + "ghci_cc_path": provider_field(typing.Any, default = None), + "ghci_cpp_path": provider_field(typing.Any, default = None), + "ghci_packager": provider_field(typing.Any, default = None), + "cache_links": provider_field(typing.Any, default = None), + "script_template_processor": provider_field(typing.Any, default = None), + }, +) diff --git a/prelude/haskell/tools/BUCK.v2 b/prelude/haskell/tools/BUCK.v2 index 48758abb9a..3029719fcd 100644 --- a/prelude/haskell/tools/BUCK.v2 +++ b/prelude/haskell/tools/BUCK.v2 @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + prelude = native prelude.python_bootstrap_binary( diff --git a/prelude/haskell/util.bzl b/prelude/haskell/util.bzl new file mode 100644 index 0000000000..80584cd3be --- /dev/null +++ b/prelude/haskell/util.bzl @@ -0,0 +1,151 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//:paths.bzl", "paths") +load( + "@prelude//cxx:cxx_toolchain_types.bzl", + "CxxPlatformInfo", +) +load( + "@prelude//haskell:library_info.bzl", + "HaskellLibraryInfo", + "HaskellLibraryProvider", +) +load( + "@prelude//haskell:link_info.bzl", + "HaskellLinkInfo", + "HaskellProfLinkInfo", +) +load( + "@prelude//linking:link_info.bzl", + "LinkStyle", + "MergedLinkInfo", +) +load( + "@prelude//linking:shared_libraries.bzl", + "SharedLibraryInfo", +) +load("@prelude//utils:platform_flavors_util.bzl", "by_platform") +load("@prelude//utils:utils.bzl", "flatten") + +HASKELL_EXTENSIONS = [ + ".hs", + ".lhs", + ".hsc", + ".chs", + ".x", + ".y", +] + +# We take a named_set for srcs, which is sometimes a list, sometimes a dict. +# In future we should only accept a list, but for now, cope with both. +def srcs_to_pairs(srcs) -> list[(str, Artifact)]: + if type(srcs) == type({}): + return srcs.items() + else: + return [(src.short_path, src) for src in srcs] + +def is_haskell_src(x: str) -> bool: + _, ext = paths.split_extension(x) + return ext in HASKELL_EXTENSIONS + +def src_to_module_name(x: str) -> str: + base, _ext = paths.split_extension(x) + return base.replace("/", ".") + +def _by_platform(ctx: AnalysisContext, xs: list[(str, list[typing.Any])]) -> list[typing.Any]: + platform = ctx.attrs._cxx_toolchain[CxxPlatformInfo].name + return flatten(by_platform([platform], xs)) + +def attr_deps(ctx: AnalysisContext) -> list[Dependency]: + return ctx.attrs.deps + _by_platform(ctx, ctx.attrs.platform_deps) + +def attr_deps_haskell_link_infos(ctx: AnalysisContext) -> list[HaskellLinkInfo]: + return dedupe(filter( + None, + [ + d.get(HaskellLinkInfo) + for d in attr_deps(ctx) + ctx.attrs.template_deps + ], + )) + +# DONT CALL THIS FUNCTION, you want attr_deps_haskell_link_infos instead +def attr_deps_haskell_link_infos_sans_template_deps(ctx: AnalysisContext) -> list[HaskellLinkInfo]: + return dedupe(filter( + None, + [ + d.get(HaskellLinkInfo) + for d in attr_deps(ctx) + ], + )) + +def attr_deps_haskell_lib_infos( + ctx: AnalysisContext, + link_style: LinkStyle, + enable_profiling: bool) -> list[HaskellLibraryInfo]: + if enable_profiling and link_style == LinkStyle("shared"): + fail("Profiling isn't supported when using dynamic linking") + return [ + x.prof_lib[link_style] if enable_profiling else x.lib[link_style] + for x in filter(None, [ + d.get(HaskellLibraryProvider) + for d in attr_deps(ctx) + ctx.attrs.template_deps + ]) + ] + +def attr_deps_merged_link_infos(ctx: AnalysisContext) -> list[MergedLinkInfo]: + return dedupe(filter( + None, + [ + d.get(MergedLinkInfo) + for d in attr_deps(ctx) + ], + )) + +def attr_deps_profiling_link_infos(ctx: AnalysisContext) -> list[MergedLinkInfo]: + return filter( + None, + [ + d.get(HaskellProfLinkInfo).prof_infos if d.get(HaskellProfLinkInfo) else d.get(MergedLinkInfo) + for d in attr_deps(ctx) + ], + ) + +def attr_deps_shared_library_infos(ctx: AnalysisContext) -> list[SharedLibraryInfo]: + return filter( + None, + [ + d.get(SharedLibraryInfo) + for d in attr_deps(ctx) + ], + ) + +def _link_style_extensions(link_style: LinkStyle) -> (str, str): + if link_style == LinkStyle("shared"): + return ("dyn_o", "dyn_hi") + elif link_style == LinkStyle("static_pic"): + return ("o", "hi") # is this right? + elif link_style == LinkStyle("static"): + return ("o", "hi") + fail("unknown LinkStyle") + +def output_extensions( + link_style: LinkStyle, + profiled: bool) -> (str, str): + osuf, hisuf = _link_style_extensions(link_style) + if profiled: + return ("p_" + osuf, "p_" + hisuf) + else: + return (osuf, hisuf) + +# Single place to build the suffix used in artifacts (e.g. package directories, +# lib names) considering attributes like link style and profiling. +def get_artifact_suffix(link_style: LinkStyle, enable_profiling: bool, suffix: str = "") -> str: + artifact_suffix = link_style.value + if enable_profiling: + artifact_suffix += "-prof" + return artifact_suffix + suffix diff --git a/prelude/http_archive/http_archive.bzl b/prelude/http_archive/http_archive.bzl index 604a2747e7..ded4cc8dd3 100644 --- a/prelude/http_archive/http_archive.bzl +++ b/prelude/http_archive/http_archive.bzl @@ -66,8 +66,9 @@ def _unarchive_cmd( archive, "--stdout", "|", - "tar", + "%WINDIR%\\System32\\tar.exe", "-x", + "-P", "-f", "-", _tar_strip_prefix_flags(strip_prefix), @@ -76,7 +77,7 @@ def _unarchive_cmd( # unzip and zip are not cli commands available on windows. however, the # bsdtar that ships with windows has builtin support for zip return cmd_args( - "tar", + "%WINDIR%\\System32\\tar.exe", "-x", "-P", "-f", @@ -200,7 +201,10 @@ def http_archive_impl(ctx: AnalysisContext) -> list[Provider]: ) ctx.actions.run( - cmd_args(interpreter + [script]).hidden(exclude_hidden + [archive, script_output.as_output()]), + cmd_args( + interpreter + [script], + hidden = exclude_hidden + [archive, script_output.as_output()], + ), category = "http_archive", prefer_local = prefer_local, ) diff --git a/prelude/http_archive/tools/BUCK.v2 b/prelude/http_archive/tools/BUCK.v2 index f08b7dcc79..b91ae412d4 100644 --- a/prelude/http_archive/tools/BUCK.v2 +++ b/prelude/http_archive/tools/BUCK.v2 @@ -1,5 +1,10 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") load("@prelude//http_archive/exec_deps.bzl", "http_archive_exec_deps") +oncall("build_infra") + +source_listing() + prelude = native http_archive_exec_deps( diff --git a/prelude/ide_integrations/xcode.bzl b/prelude/ide_integrations/xcode.bzl index f8f2cda0d3..e14f9fbb9d 100644 --- a/prelude/ide_integrations/xcode.bzl +++ b/prelude/ide_integrations/xcode.bzl @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +XCODE_ARGSFILES_SUB_TARGET = "xcode-argsfiles" + XCODE_DATA_SUB_TARGET = "xcode-data" _XCODE_DATA_FILE_NAME = "xcode_data.json" @@ -15,7 +17,7 @@ XcodeDataInfo = provider(fields = { def generate_xcode_data( ctx: AnalysisContext, rule_type: str, - output: [Artifact, None], + output: Artifact | None, populate_rule_specific_attributes_func: [typing.Callable, None] = None, **kwargs) -> (list[DefaultInfo], XcodeDataInfo): data = { @@ -27,5 +29,9 @@ def generate_xcode_data( if populate_rule_specific_attributes_func: data.update(populate_rule_specific_attributes_func(ctx, **kwargs)) + data["extra_xcode_files"] = [] + if hasattr(ctx.attrs, "extra_xcode_files"): + data["extra_xcode_files"] = ctx.attrs.extra_xcode_files + json_file = ctx.actions.write_json(_XCODE_DATA_FILE_NAME, data) return [DefaultInfo(default_output = json_file)], XcodeDataInfo(data = data) diff --git a/prelude/java/class_to_srcs.bzl b/prelude/java/class_to_srcs.bzl index d8bc8fa03c..b026964ae6 100644 --- a/prelude/java/class_to_srcs.bzl +++ b/prelude/java/class_to_srcs.bzl @@ -7,11 +7,11 @@ load( "@prelude//java:java_toolchain.bzl", - "JavaTestToolchainInfo", # @unused Used as a type "JavaToolchainInfo", # @unused Used as a type ) +load("@prelude//utils:argfile.bzl", "at_argfile") -def _class_to_src_map_args(mapping: [Artifact, None]): +def _class_to_src_map_args(mapping: Artifact | None): if mapping != None: return cmd_args(mapping) return cmd_args() @@ -34,8 +34,8 @@ JavaClassToSourceMapInfo = provider( def create_class_to_source_map_info( ctx: AnalysisContext, - mapping: [Artifact, None] = None, - mapping_debuginfo: [Artifact, None] = None, + mapping: Artifact | None = None, + mapping_debuginfo: Artifact | None = None, deps = [Dependency]) -> JavaClassToSourceMapInfo: # Only generate debuginfo if the debug info tool is available. java_toolchain = ctx.attrs._java_toolchain[JavaToolchainInfo] @@ -55,7 +55,7 @@ def create_class_to_source_map_info( actions = ctx.actions, java_toolchain = ctx.attrs._java_toolchain[JavaToolchainInfo], tset_debuginfo = tset_debuginfo, - name = ctx.attrs.name + ".debuginfo_merged.json", + name = ctx.label.name + ".debuginfo_merged.json", ) return JavaClassToSourceMapInfo( @@ -73,21 +73,28 @@ def create_class_to_source_map_from_jar( name: str, java_toolchain: JavaToolchainInfo, jar: Artifact, - srcs: list[Artifact]) -> Artifact: + srcs: list[Artifact], + sources_jar_name: [str, None] = None) -> (Artifact, Artifact | None): output = actions.declare_output(name) cmd = cmd_args(java_toolchain.gen_class_to_source_map[RunInfo]) + if java_toolchain.gen_class_to_source_map_include_sourceless_compiled_packages != None: + for item in java_toolchain.gen_class_to_source_map_include_sourceless_compiled_packages: + cmd.add("-i", item) cmd.add("-o", output.as_output()) cmd.add(jar) - for src in srcs: - cmd.add(cmd_args(src)) + cmd.add(at_argfile(actions = actions, name = "class_to_srcs_map_argsfile.txt", args = srcs)) + sources_jar = None + if sources_jar_name: + sources_jar = actions.declare_output(sources_jar_name) + cmd.add("--sources_jar", sources_jar.as_output()) actions.run(cmd, category = "class_to_srcs_map") - return output + return (output, sources_jar) def maybe_create_class_to_source_map_debuginfo( actions: AnalysisActions, name: str, java_toolchain: JavaToolchainInfo, - srcs: list[Artifact]) -> [Artifact, None]: + srcs: list[Artifact]) -> Artifact | None: # Only generate debuginfo if the debug info tool is available. if java_toolchain.gen_class_to_source_map_debuginfo == None: return None @@ -96,34 +103,33 @@ def maybe_create_class_to_source_map_debuginfo( cmd = cmd_args(java_toolchain.gen_class_to_source_map_debuginfo[RunInfo]) cmd.add("gen") cmd.add("-o", output.as_output()) - inputs_file = actions.write("sourcefiles.txt", srcs) - cmd.add(cmd_args(inputs_file, format = "@{}")) - cmd.hidden(srcs) + cmd.add(at_argfile(actions = actions, name = "sourcefiles.txt", args = srcs)) actions.run(cmd, category = "class_to_srcs_map_debuginfo") return output def merge_class_to_source_map_from_jar( actions: AnalysisActions, name: str, - java_test_toolchain: JavaTestToolchainInfo, - mapping: [Artifact, None] = None, - relative_to: [CellRoot, None] = None, - # TODO(nga): I think this meant to be type, not default value. - deps = [JavaClassToSourceMapInfo.type]) -> Artifact: + java_toolchain: JavaToolchainInfo, + relative_to: [CellRoot, None], + deps: list[JavaClassToSourceMapInfo]) -> Artifact: output = actions.declare_output(name) - cmd = cmd_args(java_test_toolchain.merge_class_to_source_maps[RunInfo]) - cmd.add(cmd_args(output.as_output(), format = "--output={}")) - if relative_to != None: - cmd.add(cmd_args(str(relative_to), format = "--relative-to={}")) + tset = actions.tset( JavaClassToSourceMapTset, - value = mapping, + value = None, children = [d.tset for d in deps], ) class_to_source_files = tset.project_as_args("class_to_src_map") mappings_file = actions.write("class_to_src_map.txt", class_to_source_files) - cmd.add(["--mappings", mappings_file]) - cmd.hidden(class_to_source_files) + + cmd = cmd_args( + java_toolchain.merge_class_to_source_maps[RunInfo], + cmd_args(output.as_output(), format = "--output={}"), + cmd_args(str(relative_to), format = "--relative-to={}") if relative_to != None else [], + ["--mappings", mappings_file], + hidden = class_to_source_files, + ) actions.run(cmd, category = "merge_class_to_srcs_map") return output @@ -142,8 +148,7 @@ def _create_merged_debug_info( children = [tset_debuginfo], ) input_files = tset.project_as_args("class_to_src_map") - input_list_file = actions.write("debuginfo_list.txt", input_files) - cmd.add(cmd_args(input_list_file, format = "@{}")) - cmd.hidden(input_files) + cmd.add(at_argfile(actions = actions, name = "debuginfo_list.txt", args = input_files)) + actions.run(cmd, category = "merged_debuginfo") return output diff --git a/prelude/java/dex.bzl b/prelude/java/dex.bzl index d2fd2f6ddd..16a0c76803 100644 --- a/prelude/java/dex.bzl +++ b/prelude/java/dex.bzl @@ -51,7 +51,7 @@ def get_dex_produced_from_java_library( else: desugar_deps_file = ctx.actions.write(prefix + "_desugar_deps_file.txt", desugar_deps) d8_cmd.add(["--classpath-files", desugar_deps_file]) - d8_cmd.hidden(desugar_deps) + d8_cmd.add(cmd_args(hidden = desugar_deps)) referenced_resources_file = ctx.actions.declare_output(prefix + "_referenced_resources.txt") d8_cmd.add(["--referenced-resources-path", referenced_resources_file.as_output()]) @@ -71,7 +71,7 @@ def get_dex_produced_from_java_library( identifier = "{}:{} {}".format(ctx.label.package, ctx.label.name, output_dex_file.short_path) ctx.actions.run( d8_cmd, - category = "d8", + category = "pre_dex", identifier = identifier, ) diff --git a/prelude/java/gwt_binary.bzl b/prelude/java/gwt_binary.bzl index 2c13c70a81..d5d86cc592 100644 --- a/prelude/java/gwt_binary.bzl +++ b/prelude/java/gwt_binary.bzl @@ -14,7 +14,7 @@ load( "get_all_java_packaging_deps", ) -GWT_COMPILER_CLASS = "com.google.gwt.dev.Compiler" +_GWT_COMPILER_CLASS = "com.google.gwt.dev.Compiler" def gwt_binary_impl(ctx: AnalysisContext) -> list[Provider]: expect(ctx.attrs.local_workers > 0, "local workers must be greater than zero") @@ -35,7 +35,7 @@ def gwt_binary_impl(ctx: AnalysisContext) -> list[Provider]: ctx.attrs.vm_args, "-classpath", cmd_args(module_deps_classpath + deps_classpath, delimiter = get_path_separator_for_exec_os(ctx)), - GWT_COMPILER_CLASS, + _GWT_COMPILER_CLASS, "-war", output.as_output(), "-style", @@ -57,6 +57,8 @@ def gwt_binary_impl(ctx: AnalysisContext) -> list[Provider]: ctx.actions.run(gwt_args, category = "gwt_binary") + sub_targets = {"deploy": [DefaultInfo(default_output = deploy_output)]} + return [ - DefaultInfo(default_output = output), + DefaultInfo(default_output = output, sub_targets = sub_targets), ] diff --git a/prelude/java/java.bzl b/prelude/java/java.bzl index 9a35285df1..87602f3c1b 100644 --- a/prelude/java/java.bzl +++ b/prelude/java/java.bzl @@ -5,6 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//:validation_deps.bzl", "VALIDATION_DEPS_ATTR_NAME") load("@prelude//android:build_only_native_code.bzl", "is_build_only_native_code") load("@prelude//android:configuration.bzl", "is_building_android_binary_attr") load("@prelude//android:min_sdk_version.bzl", "get_min_sdk_version_constraint_value_name", "get_min_sdk_version_range") @@ -64,8 +65,9 @@ extra_attributes = { }, "java_library": { "abi_generation_mode": attrs.option(attrs.enum(AbiGenerationMode), default = None), - "javac": attrs.option(attrs.one_of(attrs.dep(), attrs.source()), default = None), + "javac": attrs.option(attrs.one_of(attrs.exec_dep(), attrs.source()), default = None), "resources_root": attrs.option(attrs.string(), default = None), + VALIDATION_DEPS_ATTR_NAME: attrs.set(attrs.dep(), sorted = True, default = []), "_build_only_native_code": attrs.default_only(attrs.bool(default = is_build_only_native_code())), "_dex_min_sdk_version": attrs.option(attrs.int(), default = dex_min_sdk_version()), "_dex_toolchain": toolchains_common.dex(), @@ -78,8 +80,10 @@ extra_attributes = { }, "java_test": { "abi_generation_mode": attrs.option(attrs.enum(AbiGenerationMode), default = None), - "javac": attrs.option(attrs.one_of(attrs.dep(), attrs.source()), default = None), + "java_agents": attrs.list(attrs.source(), default = []), + "javac": attrs.option(attrs.one_of(attrs.exec_dep(), attrs.source()), default = None), "resources_root": attrs.option(attrs.string(), default = None), + "test_class_names_file": attrs.option(attrs.source(), default = None), "unbundled_resources_root": attrs.option(attrs.source(allow_directory = True), default = None), "_build_only_native_code": attrs.default_only(attrs.bool(default = is_build_only_native_code())), "_exec_os_type": buck.exec_os_type_arg(), diff --git a/prelude/java/java_binary.bzl b/prelude/java/java_binary.bzl index 96ea62f4af..34aada110f 100644 --- a/prelude/java/java_binary.bzl +++ b/prelude/java/java_binary.bzl @@ -22,7 +22,7 @@ load( "get_java_packaging_info", ) -def _generate_script(generate_wrapper: bool, native_libs: dict[str, SharedLibrary]) -> bool: +def _generate_script(generate_wrapper: bool, native_libs: list[SharedLibrary]) -> bool: # if `generate_wrapper` is set and no native libs then it should be a wrapper script as result, # otherwise fat jar will be generated (inner jar or script will be included inside a final fat jar) return generate_wrapper and len(native_libs) == 0 @@ -31,7 +31,7 @@ def _create_fat_jar( ctx: AnalysisContext, java_toolchain: JavaToolchainInfo, jars: cmd_args, - native_libs: dict[str, SharedLibrary], + native_libs: list[SharedLibrary], do_not_create_inner_jar: bool, generate_wrapper: bool) -> list[Artifact]: extension = "sh" if _generate_script(generate_wrapper, native_libs) else "jar" @@ -55,7 +55,7 @@ def _create_fat_jar( ) args += [ "--native_libs_file", - ctx.actions.write("native_libs", [cmd_args([so_name, native_lib.lib.output], delimiter = " ") for so_name, native_lib in native_libs.items()]), + ctx.actions.write("native_libs", [cmd_args([native_lib.soname.ensure_str(), native_lib.lib.output], delimiter = " ") for native_lib in native_libs]), ] if do_not_create_inner_jar: args += [ @@ -79,9 +79,6 @@ def _create_fat_jar( main_class = ctx.attrs.main_class if main_class: - if do_not_create_inner_jar and native_libs: - fail("For performance reasons, java binaries with a main class and native libs should always generate an inner jar.\ - The reason for having inner.jar is so that we don't have to compress the native libraries, which is slow at compilation time and also at runtime (when decompressing).") args += ["--main_class", main_class] manifest_file = ctx.attrs.manifest_file @@ -109,8 +106,10 @@ def _create_fat_jar( ] outputs.append(classpath_args_output) - fat_jar_cmd = cmd_args(args) - fat_jar_cmd.hidden(jars, [native_lib.lib.output for native_lib in native_libs.values()]) + fat_jar_cmd = cmd_args( + args, + hidden = [jars] + [native_lib.lib.output for native_lib in native_libs], + ) ctx.actions.run( fat_jar_cmd, @@ -188,16 +187,16 @@ def java_binary_impl(ctx: AnalysisContext) -> list[Provider]: if need_to_generate_wrapper: classpath_file = outputs[1] - run_cmd.hidden( + run_cmd.add(cmd_args(hidden = [ java_toolchain.java[RunInfo], classpath_file, packaging_jar_args, - ) + ])) other_outputs = [classpath_file] + [packaging_jar_args] + _get_java_tool_artifacts(java_toolchain) sub_targets = get_classpath_subtarget(ctx.actions, packaging_info) - class_to_src_map, _ = get_class_to_source_map_info( + class_to_src_map, _, _ = get_class_to_source_map_info( ctx, outputs = None, deps = ctx.attrs.deps, diff --git a/prelude/java/java_library.bzl b/prelude/java/java_library.bzl index 45525a411a..6fd5aeb855 100644 --- a/prelude/java/java_library.bzl +++ b/prelude/java/java_library.bzl @@ -6,6 +6,7 @@ # of this source tree. load("@prelude//:paths.bzl", "paths") +load("@prelude//:validation_deps.bzl", "get_validation_deps_outputs") load("@prelude//android:android_providers.bzl", "merge_android_packageable_info") load( "@prelude//java:java_providers.bzl", @@ -31,6 +32,7 @@ load( ) load("@prelude//java/utils:java_more_utils.bzl", "get_path_separator_for_exec_os") load("@prelude//java/utils:java_utils.bzl", "declare_prefixed_name", "derive_javac", "get_abi_generation_mode", "get_class_to_source_map_info", "get_default_info", "get_java_version_attributes", "to_java_version") +load("@prelude//jvm:cd_jar_creator_util.bzl", "postprocess_jar") load("@prelude//jvm:nullsafe.bzl", "get_nullsafe_info") load("@prelude//linking:shared_libraries.bzl", "SharedLibraryInfo") load("@prelude//utils:expect.bzl", "expect") @@ -41,9 +43,8 @@ _SUPPORTED_ARCHIVE_SUFFIXES = [".src.zip", "-sources.jar"] def _process_classpath( actions: AnalysisActions, classpath_args: cmd_args, - cmd: cmd_args, args_file_name: str, - option_name: str): + option_name: str) -> cmd_args: # write joined classpath string into args file classpath_args_file, _ = actions.write( args_file_name, @@ -51,13 +52,15 @@ def _process_classpath( allow_args = True, ) - # mark classpath artifacts as input - cmd.hidden(classpath_args) - - # add classpath args file to cmd - cmd.add(option_name, classpath_args_file) + return cmd_args( + option_name, + # add classpath args file to cmd + classpath_args_file, + # mark classpath artifacts as input + hidden = classpath_args, + ) -def classpath_args(ctx: AnalysisContext, args): +def _classpath_args(ctx: AnalysisContext, args): return cmd_args(args, delimiter = get_path_separator_for_exec_os(ctx)) def _process_plugins( @@ -65,8 +68,8 @@ def _process_plugins( actions_identifier: [str, None], annotation_processor_properties: AnnotationProcessorProperties, plugin_params: [PluginParams, None], - javac_args: cmd_args, - cmd: cmd_args): + javac_args: cmd_args) -> cmd_args: + cmd = cmd_args() processors_classpath_tsets = [] # Process Annotation processors @@ -88,8 +91,7 @@ def _process_plugins( # Process Javac Plugins if plugin_params: - plugin = plugin_params.processors[0] - args = plugin_params.args.get(plugin, cmd_args()) + plugin, args = plugin_params.processors[0] # Produces "-Xplugin:PluginName arg1 arg2 arg3", as a single argument plugin_and_args = cmd_args(plugin) @@ -109,14 +111,15 @@ def _process_plugins( processors_classpath_tset = None if processors_classpath_tset: - processors_classpath = classpath_args(ctx, processors_classpath_tset.project_as_args("full_jar_args")) - _process_classpath( + processors_classpath = _classpath_args(ctx, processors_classpath_tset.project_as_args("full_jar_args")) + cmd.add(_process_classpath( ctx.actions, processors_classpath, - cmd, declare_prefixed_name("plugin_cp_args", actions_identifier), "--javac_processors_classpath_file", - ) + )) + + return cmd def _build_classpath(actions: AnalysisActions, deps: list[Dependency], additional_classpath_entries: list[Artifact], classpath_args_projection: str) -> [cmd_args, None]: compiling_deps_tset = derive_compiling_deps(actions, None, deps) @@ -132,12 +135,11 @@ def _build_classpath(actions: AnalysisActions, deps: list[Dependency], additiona def _build_bootclasspath(bootclasspath_entries: list[Artifact], source_level: int, java_toolchain: JavaToolchainInfo) -> list[Artifact]: bootclasspath_list = [] - if source_level in [7, 8]: + if source_level in [8]: if bootclasspath_entries: bootclasspath_list = bootclasspath_entries - elif source_level == 7: - bootclasspath_list = java_toolchain.bootclasspath_7 elif source_level == 8: + expect(java_toolchain.bootclasspath_8, "Must specify bootclasspath for source level 8") bootclasspath_list = java_toolchain.bootclasspath_8 return bootclasspath_list @@ -155,8 +157,8 @@ def _append_javac_params( extra_arguments: cmd_args, additional_classpath_entries: list[Artifact], bootclasspath_entries: list[Artifact], - cmd: cmd_args, - generated_sources_dir: Artifact): + generated_sources_dir: Artifact) -> cmd_args: + cmd = cmd_args() javac_args = cmd_args( "-encoding", "utf-8", @@ -168,13 +170,12 @@ def _append_javac_params( compiling_classpath = _build_classpath(ctx.actions, deps, additional_classpath_entries, "args_for_compiling") if compiling_classpath: - _process_classpath( + cmd.add(_process_classpath( ctx.actions, - classpath_args(ctx, compiling_classpath), - cmd, + _classpath_args(ctx, compiling_classpath), declare_prefixed_name("classpath_args", actions_identifier), "--javac_classpath_file", - ) + )) else: javac_args.add("-classpath ''") @@ -185,22 +186,20 @@ def _append_javac_params( bootclasspath_list = _build_bootclasspath(bootclasspath_entries, source_level, java_toolchain) if bootclasspath_list: - _process_classpath( + cmd.add(_process_classpath( ctx.actions, - classpath_args(ctx, bootclasspath_list), - cmd, + _classpath_args(ctx, bootclasspath_list), declare_prefixed_name("bootclasspath_args", actions_identifier), "--javac_bootclasspath_file", - ) + )) - _process_plugins( + cmd.add(_process_plugins( ctx, actions_identifier, annotation_processor_properties, javac_plugin_params, javac_args, - cmd, - ) + )) cmd.add("--generated_sources_dir", generated_sources_dir.as_output()) @@ -212,20 +211,22 @@ def _append_javac_params( javac_args, allow_args = True, ) - cmd.hidden(javac_args) + cmd.add(cmd_args(hidden = javac_args)) # mark plain srcs artifacts as input - cmd.hidden(plain_sources) + cmd.add(cmd_args(hidden = plain_sources)) cmd.add("--javac_args_file", args_file) if zipped_sources: cmd.add("--zipped_sources_file", ctx.actions.write(declare_prefixed_name("zipped_source_args", actions_identifier), zipped_sources)) - cmd.hidden(zipped_sources) + cmd.add(cmd_args(hidden = zipped_sources)) if remove_classes: cmd.add("--remove_classes", ctx.actions.write(declare_prefixed_name("remove_classes_args", actions_identifier), remove_classes)) + return cmd + def split_on_archives_and_plain_files( srcs: list[Artifact], plain_file_extensions: list[str]) -> (list[Artifact], list[Artifact]): @@ -275,13 +276,13 @@ def compile_to_jar( srcs: list[Artifact], *, abi_generation_mode: [AbiGenerationMode, None] = None, - output: [Artifact, None] = None, + output: Artifact | None = None, actions_identifier: [str, None] = None, javac_tool: [typing.Any, None] = None, resources: [list[Artifact], None] = None, resources_root: [str, None] = None, remove_classes: [list[str], None] = None, - manifest_file: [Artifact, None] = None, + manifest_file: Artifact | None = None, annotation_processor_properties: [AnnotationProcessorProperties, None] = None, plugin_params: [PluginParams, None] = None, source_level: [int, None] = None, @@ -291,9 +292,10 @@ def compile_to_jar( source_only_abi_deps: [list[Dependency], None] = None, extra_arguments: [cmd_args, None] = None, additional_classpath_entries: [list[Artifact], None] = None, - additional_compiled_srcs: [Artifact, None] = None, + additional_compiled_srcs: Artifact | None = None, bootclasspath_entries: [list[Artifact], None] = None, - is_creating_subtarget: bool = False) -> JavaCompileOutputs: + is_creating_subtarget: bool = False, + debug_port: [int, None] = None) -> JavaCompileOutputs: if not additional_classpath_entries: additional_classpath_entries = [] if not bootclasspath_entries: @@ -347,6 +349,7 @@ def compile_to_jar( bootclasspath_entries, is_building_android_binary, is_creating_subtarget, + debug_port, ) def _create_jar_artifact( @@ -355,13 +358,13 @@ def _create_jar_artifact( abi_generation_mode: [AbiGenerationMode, None], java_toolchain: JavaToolchainInfo, label: Label, - output: [Artifact, None], + output: Artifact | None, javac_tool: [typing.Any, None], srcs: list[Artifact], remove_classes: list[str], resources: list[Artifact], resources_root: [str, None], - manifest_file: [Artifact, None], + manifest_file: Artifact | None, annotation_processor_properties: AnnotationProcessorProperties, plugin_params: [PluginParams, None], source_level: int, @@ -371,10 +374,11 @@ def _create_jar_artifact( _source_only_abi_deps: list[Dependency], extra_arguments: cmd_args, additional_classpath_entries: list[Artifact], - additional_compiled_srcs: [Artifact, None], + additional_compiled_srcs: Artifact | None, bootclasspath_entries: list[Artifact], _is_building_android_binary: bool, - _is_creating_subtarget: bool = False) -> JavaCompileOutputs: + _is_creating_subtarget: bool = False, + _debug_port: [int, None] = None) -> JavaCompileOutputs: """ Creates jar artifact. @@ -412,7 +416,7 @@ def _create_jar_artifact( generated_sources_dir = None if not skip_javac: generated_sources_dir = ctx.actions.declare_output(declare_prefixed_name("generated_sources", actions_identifier), dir = True) - _append_javac_params( + compile_and_package_cmd.add(_append_javac_params( ctx, actions_identifier, java_toolchain, @@ -426,16 +430,19 @@ def _create_jar_artifact( extra_arguments, additional_classpath_entries, bootclasspath_entries, - compile_and_package_cmd, generated_sources_dir, - ) + )) ctx.actions.run(compile_and_package_cmd, category = "javac_and_jar", identifier = actions_identifier) abi = None if (not srcs and not additional_compiled_srcs) or abi_generation_mode == AbiGenerationMode("none") or java_toolchain.is_bootstrap_toolchain else create_abi(ctx.actions, java_toolchain.class_abi_generator, jar_out) + has_postprocessor = hasattr(ctx.attrs, "jar_postprocessor") and ctx.attrs.jar_postprocessor + final_jar = postprocess_jar(ctx.actions, ctx.attrs.jar_postprocessor[RunInfo], jar_out, actions_identifier) if has_postprocessor else jar_out + return make_compile_outputs( - full_library = jar_out, + full_library = final_jar, + preprocessed_library = jar_out, class_abi = abi, required_for_source_only_abi = required_for_source_only_abi, annotation_processor_output = generated_sources_dir, @@ -460,6 +467,10 @@ def _check_exported_deps(exported_deps: list[Dependency], attr_name: str): "Exported deps are meant to be forwarded onto the classpath for dependents, so only " + "make sense for a target that emits Java bytecode, {} in {} does not.".format(exported_dep, attr_name), ) + expect( + not exported_dep[JavaLibraryInfo].may_not_be_exported, + "{} has 'may_not_be_exported' label and should not be present in {}.".format(exported_dep.label.raw_target(), attr_name), + ) # TODO(T145137403) remove need for this def _skip_java_library_dep_checks(ctx: AnalysisContext) -> bool: @@ -499,7 +510,11 @@ def java_library_impl(ctx: AnalysisContext) -> list[Provider]: _check_dep_types(ctx.attrs.exported_provided_deps) _check_dep_types(ctx.attrs.runtime_deps) - java_providers = build_java_library(ctx, ctx.attrs.srcs) + java_providers = build_java_library( + ctx = ctx, + srcs = ctx.attrs.srcs, + validation_deps_outputs = get_validation_deps_outputs(ctx), + ) return to_list(java_providers) + [android_packageable_info] @@ -509,10 +524,11 @@ def build_java_library( run_annotation_processors = True, additional_classpath_entries: list[Artifact] = [], bootclasspath_entries: list[Artifact] = [], - additional_compiled_srcs: [Artifact, None] = None, + additional_compiled_srcs: Artifact | None = None, generated_sources: list[Artifact] = [], override_abi_generation_mode: [AbiGenerationMode, None] = None, - extra_sub_targets: dict = {}) -> JavaProviders: + extra_sub_targets: dict = {}, + validation_deps_outputs: [list[Artifact], None] = None) -> JavaProviders: expect( not getattr(ctx.attrs, "_build_only_native_code", False), "Shouldn't call build_java_library if we're only building native code!", @@ -561,6 +577,7 @@ def build_java_library( "additional_compiled_srcs": additional_compiled_srcs, "annotation_processor_properties": annotation_processor_properties, "bootclasspath_entries": bootclasspath_entries, + "debug_port": getattr(ctx.attrs, "debug_port", None), "deps": first_order_deps, "javac_tool": derive_javac(ctx.attrs.javac) if ctx.attrs.javac else None, "manifest_file": manifest_file, @@ -574,10 +591,17 @@ def build_java_library( "target_level": target_level, } + # The outputs of validation_deps need to be added as hidden arguments + # to an action for the validation_deps targets to be built and enforced. + extra_arguments = cmd_args( + ctx.attrs.extra_arguments, + hidden = validation_deps_outputs or [], + ) + outputs = compile_to_jar( ctx, plugin_params = plugin_params, - extra_arguments = cmd_args(ctx.attrs.extra_arguments), + extra_arguments = extra_arguments, **common_compile_kwargs ) @@ -623,7 +647,8 @@ def build_java_library( ctx.actions.write("gwt_entries.txt", entries), "--output", gwt_output.as_output(), - ).hidden(entries) + hidden = entries, + ) ctx.actions.run(gwt_cmd_args, category = "gwt_module") @@ -636,6 +661,14 @@ def build_java_library( DefaultInfo(default_output = all_generated_sources[0]), ]} + class_to_src_map, sources_jar, class_to_src_map_sub_targets = get_class_to_source_map_info( + ctx, + outputs = outputs, + deps = ctx.attrs.deps + deps_query + ctx.attrs.exported_deps, + generate_sources_jar = True, + ) + extra_sub_targets = extra_sub_targets | class_to_src_map_sub_targets + java_library_info, java_packaging_info, shared_library_info, cxx_resource_info, linkable_graph, template_placeholder_info, intellij_info = create_java_library_providers( ctx, library_output = outputs.classpath_entry if outputs else None, @@ -647,16 +680,11 @@ def build_java_library( needs_desugar = source_level > 7 or target_level > 7, generated_sources = all_generated_sources, has_srcs = has_srcs, + sources_jar = sources_jar, gwt_module = gwt_output, + preprocessed_library = outputs.preprocessed_library if outputs else None, ) - class_to_src_map, class_to_src_map_sub_targets = get_class_to_source_map_info( - ctx, - outputs = outputs, - deps = ctx.attrs.deps + deps_query + ctx.attrs.exported_deps, - ) - extra_sub_targets = extra_sub_targets | class_to_src_map_sub_targets - default_info = get_default_info( ctx.actions, java_toolchain, diff --git a/prelude/java/java_providers.bzl b/prelude/java/java_providers.bzl index 2d2c6e6ac9..76d3fce053 100644 --- a/prelude/java/java_providers.bzl +++ b/prelude/java/java_providers.bzl @@ -90,7 +90,7 @@ JavaClasspathEntry = record( abi = field(Artifact), # abi_as_dir is the abi .jar unzipped into a directory. If available, it is used to provide # .class level granularity for javacd and kotlincd dep-files. - abi_as_dir = field([Artifact, None]), + abi_as_dir = field(Artifact | None), required_for_source_only_abi = field(bool), ) @@ -125,15 +125,16 @@ JavaCompilingDepsTSet = transitive_set( JavaPackagingDep = record( label = Label, - jar = [Artifact, None], + jar = Artifact | None, dex = [DexLibraryInfo, None], - gwt_module = [Artifact, None], + gwt_module = Artifact | None, is_prebuilt_jar = bool, - proguard_config = [Artifact, None], + proguard_config = Artifact | None, # An output that is used solely by the system to have an artifact bound to the target (that the core can then use to find # the right target from the given artifact). output_for_classpath_macro = Artifact, + sources_jar = Artifact | None, ) def _full_jar_args(dep: JavaPackagingDep): @@ -168,6 +169,9 @@ JavaLibraryInfo = provider( # An output of the library. If present then already included into `compiling_deps` field. "library_output": provider_field(typing.Any, default = None), # ["JavaClasspathEntry", None] + # Shows if the library can be exported or not + "may_not_be_exported": provider_field(typing.Any, default = None), + # An output that is used solely by the system to have an artifact bound to the target (that the core can then use to find # the right target from the given artifact). "output_for_classpath_macro": provider_field(typing.Any, default = None), # "artifact" @@ -175,7 +179,6 @@ JavaLibraryInfo = provider( ) JavaLibraryIntellijInfo = provider( - # @unsorted-dict-items doc = "Information about a java library that is required for Intellij project generation", fields = { # Directory containing external annotation jars @@ -183,6 +186,10 @@ JavaLibraryIntellijInfo = provider( # All the artifacts that were used in order to compile this library "compiling_classpath": provider_field(typing.Any, default = None), # ["artifact"] "generated_sources": provider_field(typing.Any, default = None), # ["artifact"] + "lint_jar": provider_field(typing.Any, default = None), # ["artifact"] + # If this library has a jar_postprocessor, this is the jar prior to post-processing. + # Otherwise, it is the same as library_output in JavaLibraryInfo. + "preprocessed_library": provider_field(typing.Any, default = None), # ["artifact", None] }, ) @@ -204,11 +211,12 @@ KeystoreInfo = provider( JavaCompileOutputs = record( full_library = Artifact, - class_abi = [Artifact, None], - source_abi = [Artifact, None], - source_only_abi = [Artifact, None], + class_abi = Artifact | None, + source_abi = Artifact | None, + source_only_abi = Artifact | None, classpath_entry = JavaClasspathEntry, - annotation_processor_output = [Artifact, None], + annotation_processor_output = Artifact | None, + preprocessed_library = Artifact, ) JavaProviders = record( @@ -242,13 +250,14 @@ def to_list(java_providers: JavaProviders) -> list[Provider]: # specific artifact to be used as the abi for the JavaClasspathEntry. def make_compile_outputs( full_library: Artifact, - class_abi: [Artifact, None] = None, - source_abi: [Artifact, None] = None, - source_only_abi: [Artifact, None] = None, - classpath_abi: [Artifact, None] = None, - classpath_abi_dir: [Artifact, None] = None, + preprocessed_library: Artifact, + class_abi: Artifact | None = None, + source_abi: Artifact | None = None, + source_only_abi: Artifact | None = None, + classpath_abi: Artifact | None = None, + classpath_abi_dir: Artifact | None = None, required_for_source_only_abi: bool = False, - annotation_processor_output: [Artifact, None] = None) -> JavaCompileOutputs: + annotation_processor_output: Artifact | None = None) -> JavaCompileOutputs: expect(classpath_abi != None or classpath_abi_dir == None, "A classpath_abi_dir should only be provided if a classpath_abi is provided!") return JavaCompileOutputs( full_library = full_library, @@ -262,6 +271,7 @@ def make_compile_outputs( required_for_source_only_abi = required_for_source_only_abi, ), annotation_processor_output = annotation_processor_output, + preprocessed_library = preprocessed_library, ) def create_abi(actions: AnalysisActions, class_abi_generator: Dependency, library: Artifact) -> Artifact: @@ -303,15 +313,16 @@ def derive_compiling_deps( def create_java_packaging_dep( ctx: AnalysisContext, - library_jar: [Artifact, None] = None, - output_for_classpath_macro: [Artifact, None] = None, + library_jar: Artifact | None = None, + output_for_classpath_macro: Artifact | None = None, needs_desugar: bool = False, desugar_deps: list[Artifact] = [], is_prebuilt_jar: bool = False, has_srcs: bool = True, + sources_jar: Artifact | None = None, dex_weight_factor: int = 1, - proguard_config: [Artifact, None] = None, - gwt_module: [Artifact, None] = None) -> JavaPackagingDep: + proguard_config: Artifact | None = None, + gwt_module: Artifact | None = None) -> JavaPackagingDep: dex_toolchain = getattr(ctx.attrs, "_dex_toolchain", None) if library_jar != None and has_srcs and dex_toolchain != None and ctx.attrs._dex_toolchain[DexToolchainInfo].d8_command != None: dex = get_dex_produced_from_java_library( @@ -335,6 +346,7 @@ def create_java_packaging_dep( is_prebuilt_jar = is_prebuilt_jar, proguard_config = proguard_config or getattr(ctx.attrs, "proguard_config", None), output_for_classpath_macro = output_for_classpath_macro or library_jar, + sources_jar = sources_jar, ) def get_all_java_packaging_deps(ctx: AnalysisContext, deps: list[Dependency]) -> list[JavaPackagingDep]: @@ -396,8 +408,9 @@ def _create_non_template_providers( desugar_classpath: list[Artifact] = [], is_prebuilt_jar: bool = False, has_srcs: bool = True, - proguard_config: [Artifact, None] = None, - gwt_module: [Artifact, None] = None) -> (JavaLibraryInfo, JavaPackagingInfo, SharedLibraryInfo, ResourceInfo, LinkableGraph): + sources_jar: Artifact | None = None, + proguard_config: Artifact | None = None, + gwt_module: Artifact | None = None) -> (JavaLibraryInfo, JavaPackagingInfo, SharedLibraryInfo, ResourceInfo, LinkableGraph): """Creates java library providers of type `JavaLibraryInfo` and `JavaPackagingInfo`. Args: @@ -419,6 +432,7 @@ def _create_non_template_providers( desugar_classpath, is_prebuilt_jar, has_srcs, + sources_jar, proguard_config = proguard_config, gwt_module = gwt_module, ) @@ -434,6 +448,7 @@ def _create_non_template_providers( compiling_deps = derive_compiling_deps(ctx.actions, library_output, exported_deps + exported_provided_deps), library_output = library_output, output_for_classpath_macro = output_for_classpath_macro, + may_not_be_exported = "may_not_be_exported" in (ctx.attrs.labels or []), ), java_packaging_info, shared_library_info, @@ -459,10 +474,13 @@ def create_java_library_providers( needs_desugar: bool = False, is_prebuilt_jar: bool = False, has_srcs: bool = True, + sources_jar: Artifact | None = None, generated_sources: list[Artifact] = [], - annotation_jars_dir: [Artifact, None] = None, - proguard_config: [Artifact, None] = None, - gwt_module: [Artifact, None] = None) -> (JavaLibraryInfo, JavaPackagingInfo, SharedLibraryInfo, ResourceInfo, LinkableGraph, TemplatePlaceholderInfo, JavaLibraryIntellijInfo): + annotation_jars_dir: Artifact | None = None, + proguard_config: Artifact | None = None, + gwt_module: Artifact | None = None, + lint_jar: Artifact | None = None, + preprocessed_library: Artifact | None = None) -> (JavaLibraryInfo, JavaPackagingInfo, SharedLibraryInfo, ResourceInfo, LinkableGraph, TemplatePlaceholderInfo, JavaLibraryIntellijInfo): first_order_classpath_deps = filter(None, [x.get(JavaLibraryInfo) for x in declared_deps + exported_deps + runtime_deps]) first_order_classpath_libs = [dep.output_for_classpath_macro for dep in first_order_classpath_deps] @@ -481,6 +499,7 @@ def create_java_library_providers( desugar_classpath = desugar_classpath, is_prebuilt_jar = is_prebuilt_jar, has_srcs = has_srcs, + sources_jar = sources_jar, proguard_config = proguard_config, gwt_module = gwt_module, ) @@ -492,6 +511,8 @@ def create_java_library_providers( compiling_classpath = compiling_classpath, generated_sources = generated_sources, annotation_jars_dir = annotation_jars_dir, + lint_jar = lint_jar, + preprocessed_library = preprocessed_library, ) return (library_info, packaging_info, shared_library_info, cxx_resource_info, linkable_graph, template_info, intellij_info) diff --git a/prelude/java/java_resources.bzl b/prelude/java/java_resources.bzl index 4916d58918..fbab61e4cb 100644 --- a/prelude/java/java_resources.bzl +++ b/prelude/java/java_resources.bzl @@ -57,3 +57,18 @@ def get_resources_map( resource_name = get_src_package(java_toolchain.src_root_prefixes, java_toolchain.src_root_elements, full_resource) resources_to_copy[resource_name] = resource return resources_to_copy + +def parse_src_roots(src_roots: list[str]) -> (list[str], list[str]): + prefixes = [] + elements = [] + for src_root in src_roots: + if src_root.startswith("/"): + if not src_root.endswith("/"): + fail("Elements in java.src_roots config that begin with a / must end in one too, but {} does not".format(src_root)) + prefixes.append(src_root[1:]) + elif "/" in src_root: + fail("No / is permitted in java.src_roots config elements, but {} has one".format(src_root)) + else: + elements.append(src_root) + + return elements, prefixes diff --git a/prelude/java/java_test.bzl b/prelude/java/java_test.bzl index 0680023618..66e01fbb4f 100644 --- a/prelude/java/java_test.bzl +++ b/prelude/java/java_test.bzl @@ -14,11 +14,18 @@ load("@prelude//java:java_library.bzl", "build_java_library") load("@prelude//java:java_providers.bzl", "JavaLibraryInfo", "JavaPackagingInfo", "get_all_java_packaging_deps_tset") load("@prelude//java:java_toolchain.bzl", "JavaTestToolchainInfo", "JavaToolchainInfo") load("@prelude//java/utils:java_more_utils.bzl", "get_path_separator_for_exec_os") -load("@prelude//linking:shared_libraries.bzl", "SharedLibraryInfo", "merge_shared_libraries", "traverse_shared_library_info") +load( + "@prelude//linking:shared_libraries.bzl", + "SharedLibraryInfo", + "create_shlib_symlink_tree", + "merge_shared_libraries", + "traverse_shared_library_info", +) load( "@prelude//tests:re_utils.bzl", - "get_re_executor_from_props", + "get_re_executors_from_props", ) +load("@prelude//utils:argfile.bzl", "at_argfile") load("@prelude//utils:expect.bzl", "expect") load("@prelude//test/inject_test_run_info.bzl", "inject_test_run_info") @@ -46,10 +53,14 @@ def build_junit_test( extra_cmds: list = [], extra_classpath_entries: list[Artifact] = []) -> ExternalRunnerTestInfo: java_test_toolchain = ctx.attrs._java_test_toolchain[JavaTestToolchainInfo] + java_toolchain = ctx.attrs._java_toolchain[JavaToolchainInfo] - java = ctx.attrs.java[RunInfo] if ctx.attrs.java else ctx.attrs._java_toolchain[JavaToolchainInfo].java_for_tests + java = ctx.attrs.java[RunInfo] if ctx.attrs.java else java_toolchain.java_for_tests cmd = [java] + extra_cmds + ctx.attrs.vm_args + ["-XX:-MaxFDLimit"] + if java_test_toolchain.jvm_args: + cmd.extend(java_test_toolchain.jvm_args) + classpath = [] if java_test_toolchain.use_java_custom_class_loader: @@ -57,6 +68,8 @@ def build_junit_test( cmd.extend(java_test_toolchain.java_custom_class_loader_vm_args) classpath.append(java_test_toolchain.java_custom_class_loader_library_jar) + cmd.append(cmd_args(ctx.attrs.java_agents, format = "-javaagent:{}")) + classpath.extend( [java_test_toolchain.test_runner_library_jar] + [ @@ -71,11 +84,11 @@ def build_junit_test( labels = ctx.attrs.labels or [] - # Setup a RE executor based on the `remote_execution` param. - re_executor = get_re_executor_from_props(ctx) + # Setup RE executors based on the `remote_execution` param. + re_executor, executor_overrides = get_re_executors_from_props(ctx) # We implicitly make the target run from the project root if remote - # excution options were specified. + # execution options were specified. run_from_cell_root = "buck2_run_from_cell_root" in labels uses_java8 = "run_with_java8" in labels @@ -93,14 +106,17 @@ def build_junit_test( cmd.extend(["-classpath", cmd_args(java_test_toolchain.test_runner_library_jar)]) classpath_args.add(cmd_args(classpath)) classpath_args_file = ctx.actions.write("classpath_args_file", classpath_args) - cmd.append(cmd_args(classpath_args_file, format = "-Dbuck.classpath_file={}").hidden(classpath_args)) + cmd.append(cmd_args( + classpath_args_file, + format = "-Dbuck.classpath_file={}", + hidden = classpath_args, + )) else: # Java 9+ supports argfiles, so just write the classpath to an argsfile. "FileClassPathRunner" will delegate # immediately to the junit test runner. classpath_args.add("-classpath") classpath_args.add(cmd_args(classpath, delimiter = get_path_separator_for_exec_os(ctx))) - classpath_args_file = ctx.actions.write("classpath_args_file", classpath_args) - cmd.append(cmd_args(classpath_args_file, format = "@{}").hidden(classpath_args)) + cmd.append(at_argfile(actions = ctx.actions, name = "classpath_args_file", args = classpath_args)) if (ctx.attrs.test_type == "junit5"): cmd.extend(java_test_toolchain.junit5_test_runner_main_class_args) @@ -110,21 +126,23 @@ def build_junit_test( cmd.extend(java_test_toolchain.junit_test_runner_main_class_args) if ctx.attrs.test_case_timeout_ms: - cmd.extend(["--default_test_timeout", str(ctx.attrs.test_case_timeout_ms)]) - - expect(tests_java_library_info.library_output != None, "Built test library has no output, likely due to missing srcs") - - class_names = ctx.actions.declare_output("class_names") - list_class_names_cmd = cmd_args([ - java_test_toolchain.list_class_names[RunInfo], - "--jar", - tests_java_library_info.library_output.full_library, - "--sources", - ctx.actions.write("sources.txt", ctx.attrs.srcs), - "--output", - class_names.as_output(), - ]).hidden(ctx.attrs.srcs) - ctx.actions.run(list_class_names_cmd, category = "list_class_names") + cmd.extend(["--default-test-timeout", str(ctx.attrs.test_case_timeout_ms)]) + + if ctx.attrs.test_class_names_file: + class_names = ctx.attrs.test_class_names_file + else: + expect(tests_java_library_info.library_output != None, "Built test library has no output, likely due to missing srcs") + class_names = ctx.actions.declare_output("class_names") + list_class_names_cmd = cmd_args([ + java_test_toolchain.list_class_names[RunInfo], + "--jar", + tests_java_library_info.library_output.full_library, + "--sources", + ctx.actions.write("sources.txt", ctx.attrs.srcs), + "--output", + class_names.as_output(), + ], hidden = ctx.attrs.srcs) + ctx.actions.run(list_class_names_cmd, category = "list_class_names") cmd.extend(["--test-class-names-file", class_names]) @@ -139,8 +157,8 @@ def build_junit_test( if tests_class_to_source_info != None: transitive_class_to_src_map = merge_class_to_source_map_from_jar( actions = ctx.actions, - name = ctx.attrs.name + ".transitive_class_to_src.json", - java_test_toolchain = java_test_toolchain, + name = ctx.label.name + ".transitive_class_to_src.json", + java_toolchain = java_toolchain, relative_to = ctx.label.cell_root if run_from_cell_root else None, deps = [tests_class_to_source_info], ) @@ -157,6 +175,7 @@ def build_junit_test( run_from_project_root = not run_from_cell_root, use_project_relative_paths = not run_from_cell_root, default_executor = re_executor, + executor_overrides = executor_overrides, ) return test_info @@ -174,8 +193,10 @@ def _get_native_libs_env(ctx: AnalysisContext) -> dict: deps = shared_library_infos, ) - native_linkables = traverse_shared_library_info(shared_library_info) - cxx_library_symlink_tree_dict = {so_name: shared_lib.lib.output for so_name, shared_lib in native_linkables.items()} - cxx_library_symlink_tree = ctx.actions.symlinked_dir("cxx_library_symlink_tree", cxx_library_symlink_tree_dict) + cxx_library_symlink_tree = create_shlib_symlink_tree( + actions = ctx.actions, + out = "cxx_library_symlink_tree", + shared_libs = traverse_shared_library_info(shared_library_info), + ) return {"BUCK_LD_SYMLINK_TREE": cxx_library_symlink_tree} diff --git a/prelude/java/java_toolchain.bzl b/prelude/java/java_toolchain.bzl index dbfd1ccc14..b27227ac60 100644 --- a/prelude/java/java_toolchain.bzl +++ b/prelude/java/java_toolchain.bzl @@ -33,10 +33,13 @@ JavaToolchainInfo = provider( "fat_jar_main_class_lib": provider_field(typing.Any, default = None), "gen_class_to_source_map": provider_field(typing.Any, default = None), "gen_class_to_source_map_debuginfo": provider_field(typing.Any, default = None), # optional + "gen_class_to_source_map_include_sourceless_compiled_packages": provider_field(typing.Any, default = None), + "graalvm_java": provider_field(typing.Any, default = None), "is_bootstrap_toolchain": provider_field(typing.Any, default = None), "jar": provider_field(typing.Any, default = None), "jar_builder": provider_field(typing.Any, default = None), "java": provider_field(typing.Any, default = None), + "java_error_handler": provider_field(typing.Any, default = None), "java_for_tests": provider_field(typing.Any, default = None), "javac": provider_field(typing.Any, default = None), "javac_protocol": provider_field(typing.Any, default = None), @@ -46,6 +49,7 @@ JavaToolchainInfo = provider( "javacd_jvm_args_target": provider_field(typing.Any, default = None), "javacd_main_class": provider_field(typing.Any, default = None), "javacd_worker": provider_field(typing.Any, default = None), + "merge_class_to_source_maps": provider_field(typing.Any, default = None), "nullsafe": provider_field(typing.Any, default = None), "nullsafe_extra_args": provider_field(typing.Any, default = None), "nullsafe_signatures": provider_field(typing.Any, default = None), @@ -53,6 +57,7 @@ JavaToolchainInfo = provider( "src_root_elements": provider_field(typing.Any, default = None), "src_root_prefixes": provider_field(typing.Any, default = None), "target_level": provider_field(typing.Any, default = None), + "use_graalvm_java_for_javacd": provider_field(typing.Any, default = None), "zip_scrubber": provider_field(typing.Any, default = None), }, ) @@ -66,8 +71,8 @@ JavaTestToolchainInfo = provider( "java_custom_class_loader_vm_args": provider_field(typing.Any, default = None), "junit5_test_runner_main_class_args": provider_field(typing.Any, default = None), "junit_test_runner_main_class_args": provider_field(typing.Any, default = None), + "jvm_args": provider_field(typing.Any, default = None), "list_class_names": provider_field(typing.Any, default = None), - "merge_class_to_source_maps": provider_field(typing.Any, default = None), "test_runner_library_jar": provider_field(typing.Any, default = None), "testng_test_runner_main_class_args": provider_field(typing.Any, default = None), "use_java_custom_class_loader": provider_field(typing.Any, default = None), diff --git a/prelude/java/javacd_jar_creator.bzl b/prelude/java/javacd_jar_creator.bzl index dd5efb7c1e..f14ed66052 100644 --- a/prelude/java/javacd_jar_creator.bzl +++ b/prelude/java/javacd_jar_creator.bzl @@ -31,7 +31,6 @@ load( "OutputPaths", "TargetType", "add_java_7_8_bootclasspath", - "add_output_paths_to_cmd_args", "base_qualified_name", "declare_prefixed_output", "define_output_paths", @@ -40,13 +39,14 @@ load( "generate_abi_jars", "get_abi_generation_mode", "get_compiling_deps_tset", + "output_paths_to_hidden_cmd_args", "prepare_cd_exe", "prepare_final_jar", "setup_dep_files", ) load("@prelude//utils:expect.bzl", "expect") -base_command_params = struct( +_base_command_params = struct( withDownwardApi = True, spoolMode = "DIRECT_TO_JAR", ) @@ -57,13 +57,13 @@ def create_jar_artifact_javacd( abi_generation_mode: [AbiGenerationMode, None], java_toolchain: JavaToolchainInfo, label, - output: [Artifact, None], + output: Artifact | None, javac_tool: [typing.Any, None], srcs: list[Artifact], remove_classes: list[str], resources: list[Artifact], resources_root: [str, None], - manifest_file: [Artifact, None], + manifest_file: Artifact | None, annotation_processor_properties: AnnotationProcessorProperties, plugin_params: [PluginParams, None], source_level: int, @@ -73,10 +73,11 @@ def create_jar_artifact_javacd( source_only_abi_deps: list[Dependency], extra_arguments: cmd_args, additional_classpath_entries: list[Artifact], - additional_compiled_srcs: [Artifact, None], + additional_compiled_srcs: Artifact | None, bootclasspath_entries: list[Artifact], is_building_android_binary: bool, - is_creating_subtarget: bool = False) -> JavaCompileOutputs: + is_creating_subtarget: bool = False, + debug_port: [int, None] = None) -> JavaCompileOutputs: if javac_tool != None: # TODO(cjhopman): We can probably handle this better. I think we should be able to just use the non-javacd path. fail("cannot set explicit javac on library when using javacd") @@ -84,9 +85,6 @@ def create_jar_artifact_javacd( actions = ctx.actions resources_map = get_resources_map(java_toolchain, label.package, resources, resources_root) - # TODO(cjhopman): Handle manifest file. - _ = manifest_file # buildifier: disable=unused-variable - bootclasspath_entries = add_java_7_8_bootclasspath(target_level, bootclasspath_entries, java_toolchain) abi_generation_mode = get_abi_generation_mode(abi_generation_mode, java_toolchain, srcs, annotation_processor_properties) @@ -132,13 +130,14 @@ def create_jar_artifact_javacd( resources_map, annotation_processor_properties, plugin_params, + manifest_file, extra_arguments, source_only_abi_compiling_deps = [], track_class_usage = track_class_usage, ) return struct( - baseCommandParams = base_command_params, + _baseCommandParams = _base_command_params, libraryJarCommand = struct( baseJarCommand = base_jar_command, libraryJarBaseCommand = struct( @@ -171,11 +170,12 @@ def create_jar_artifact_javacd( resources_map, annotation_processor_properties, plugin_params, + manifest_file, extra_arguments, source_only_abi_compiling_deps = source_only_abi_compiling_deps, track_class_usage = track_class_usage, ) - abi_params = encode_jar_params(remove_classes, output_paths) + abi_params = encode_jar_params(remove_classes, output_paths, manifest_file) abi_command = struct( baseJarCommand = base_jar_command, @@ -183,7 +183,7 @@ def create_jar_artifact_javacd( ) return struct( - baseCommandParams = base_command_params, + _baseCommandParams = _base_command_params, abiJarCommand = abi_command, ) @@ -195,9 +195,9 @@ def create_jar_artifact_javacd( qualified_name: str, output_paths: OutputPaths, classpath_jars_tag: ArtifactTag, - abi_dir: [Artifact, None], + abi_dir: Artifact | None, target_type: TargetType, - path_to_class_hashes: [Artifact, None], + path_to_class_hashes: Artifact | None, is_creating_subtarget: bool = False, source_only_abi_compiling_deps: list[JavaClasspathEntry] = []): proto = declare_prefixed_output(actions, actions_identifier, "jar_command.proto.json") @@ -209,13 +209,14 @@ def create_jar_artifact_javacd( compiler = java_toolchain.javac[DefaultInfo].default_outputs[0] exe, local_only = prepare_cd_exe( qualified_name, - java = java_toolchain.java[RunInfo], + java = java_toolchain.graalvm_java[RunInfo] if java_toolchain.use_graalvm_java_for_javacd else java_toolchain.java[RunInfo], class_loader_bootstrapper = java_toolchain.class_loader_bootstrapper, compiler = compiler, main_class = java_toolchain.javacd_main_class, worker = java_toolchain.javacd_worker[WorkerInfo], - debug_port = java_toolchain.javacd_debug_port, - debug_target = java_toolchain.javacd_debug_target, + target_specified_debug_port = debug_port, + toolchain_specified_debug_port = java_toolchain.javacd_debug_port, + toolchain_specified_debug_target = java_toolchain.javacd_debug_target, extra_jvm_args = java_toolchain.javacd_jvm_args, extra_jvm_args_target = java_toolchain.javacd_jvm_args_target, ) @@ -245,7 +246,7 @@ def create_jar_artifact_javacd( abi_dir.as_output(), ) - args = add_output_paths_to_cmd_args(args, output_paths, path_to_class_hashes) + args.add(output_paths_to_hidden_cmd_args(output_paths, path_to_class_hashes)) # TODO(cjhopman): make sure this works both locally and remote. event_pipe_out = declare_prefixed_output(actions, actions_identifier, "events.data") @@ -284,10 +285,12 @@ def create_jar_artifact_javacd( category = "{}javacd_jar".format(category_prefix), identifier = actions_identifier or "", dep_files = dep_files, + allow_dep_file_cache_upload = False, exe = exe, local_only = local_only, low_pass_filter = False, weight = 2, + error_handler = java_toolchain.java_error_handler, ) library_classpath_jars_tag = actions.artifact_tag() @@ -304,7 +307,17 @@ def create_jar_artifact_javacd( path_to_class_hashes_out, is_creating_subtarget, ) - final_jar = prepare_final_jar(actions, actions_identifier, output, output_paths, additional_compiled_srcs, java_toolchain.jar_builder) + jar_postprocessor = ctx.attrs.jar_postprocessor[RunInfo] if hasattr(ctx.attrs, "jar_postprocessor") and ctx.attrs.jar_postprocessor else None + final_jar_output = prepare_final_jar( + actions = actions, + actions_identifier = actions_identifier, + output = output, + output_paths = output_paths, + additional_compiled_srcs = additional_compiled_srcs, + jar_builder = java_toolchain.jar_builder, + jar_postprocessor = jar_postprocessor, + ) + if not is_creating_subtarget: class_abi, source_abi, source_only_abi, classpath_abi, classpath_abi_dir = generate_abi_jars( actions, @@ -314,7 +327,7 @@ def create_jar_artifact_javacd( additional_compiled_srcs, is_building_android_binary, java_toolchain.class_abi_generator, - final_jar, + final_jar_output.final_jar, compiling_deps_tset, source_only_abi_deps, class_abi_jar = class_abi_jar, @@ -324,7 +337,8 @@ def create_jar_artifact_javacd( ) result = make_compile_outputs( - full_library = final_jar, + full_library = final_jar_output.final_jar, + preprocessed_library = final_jar_output.preprocessed_jar, class_abi = class_abi, source_abi = source_abi, source_only_abi = source_only_abi, @@ -335,7 +349,8 @@ def create_jar_artifact_javacd( ) else: result = make_compile_outputs( - full_library = final_jar, + full_library = final_jar_output.final_jar, + preprocessed_library = final_jar_output.preprocessed_jar, required_for_source_only_abi = required_for_source_only_abi, annotation_processor_output = output_paths.annotations, ) diff --git a/prelude/java/plugins/java_annotation_processor.bzl b/prelude/java/plugins/java_annotation_processor.bzl index a674922b47..343283cbd3 100644 --- a/prelude/java/plugins/java_annotation_processor.bzl +++ b/prelude/java/plugins/java_annotation_processor.bzl @@ -97,30 +97,24 @@ def create_annotation_processor_properties( annotation_processor_params = annotation_processor_params, ) -def create_ksp_annotation_processor_properties(ctx: AnalysisContext, plugins: list[Dependency]) -> AnnotationProcessorProperties: - ap_processors = [] - ap_processor_deps = [] +def create_ksp_annotation_processor_properties(plugins: list[Dependency]) -> AnnotationProcessorProperties: + annotation_processors = [] # APs derived from `plugins` attribute for ap_plugin in filter(None, [x.get(JavaProcessorsInfo) for x in plugins]): if not ap_plugin: fail("Plugin must have a type of `java_annotation_processor` or `java_plugin`. Plugins: {}".format(plugins)) if ap_plugin.type == JavaProcessorsType("ksp_annotation_processor"): - ap_processors += ap_plugin.processors - if ap_plugin.deps: - ap_processor_deps.append(ap_plugin.deps) - - if not ap_processors: - return AnnotationProcessorProperties(annotation_processors = [], annotation_processor_params = []) + annotation_processors.append(AnnotationProcessor( + affects_abi = ap_plugin.affects_abi, + supports_source_only_abi = ap_plugin.supports_source_only_abi, + processors = ap_plugin.processors, + deps = ap_plugin.deps, + isolate_class_loader = ap_plugin.isolate_class_loader, + )) return AnnotationProcessorProperties( - annotation_processors = [AnnotationProcessor( - processors = dedupe(ap_processors), - deps = ctx.actions.tset(JavaPackagingDepTSet, children = ap_processor_deps) if ap_processor_deps else None, - affects_abi = True, - supports_source_only_abi = False, - isolate_class_loader = False, - )], + annotation_processors = annotation_processors, annotation_processor_params = [], ) diff --git a/prelude/java/plugins/java_plugin.bzl b/prelude/java/plugins/java_plugin.bzl index ac41299032..2636e137a2 100644 --- a/prelude/java/plugins/java_plugin.bzl +++ b/prelude/java/plugins/java_plugin.bzl @@ -14,8 +14,7 @@ load( ) PluginParams = record( - processors = field(list[str]), - args = field(dict[str, cmd_args]), + processors = field(list[(str, cmd_args)]), deps = field([JavaPackagingDepTSet, None]), ) @@ -23,22 +22,32 @@ def create_plugin_params(ctx: AnalysisContext, plugins: list[Dependency]) -> [Pl processors = [] plugin_deps = [] + # _wip_java_plugin_arguments keys are providers_label, map to + # target_label to allow lookup with plugin.label.raw_target() + plugin_arguments = { + label.raw_target(): arguments + for label, arguments in ctx.attrs._wip_java_plugin_arguments.items() + } + # Compiler plugin derived from `plugins` attribute - for plugin in filter(None, [x.get(JavaProcessorsInfo) for x in plugins]): - if plugin.type == JavaProcessorsType("plugin"): - if len(plugin.processors) > 1: - fail("Only 1 java compiler plugin is expected. But received: {}".format(plugin.processors)) - processors.append(plugin.processors[0]) - if plugin.deps: - plugin_deps.append(plugin.deps) + for plugin in plugins: + processors_info = plugin.get(JavaProcessorsInfo) + if processors_info != None and processors_info.type == JavaProcessorsType("plugin"): + if len(processors_info.processors) > 1: + fail("Only 1 java compiler plugin is expected. But received: {}".format(processors_info.processors)) + processor = processors_info.processors[0] + if processors_info.deps: + plugin_deps.append(processors_info.deps) + + arguments = plugin_arguments.get(plugin.label.raw_target()) + processors.append((processor, cmd_args(arguments) if arguments != None else cmd_args())) if not processors: return None return PluginParams( - processors = dedupe(processors), + processors = processors, deps = ctx.actions.tset(JavaPackagingDepTSet, children = plugin_deps) if plugin_deps else None, - args = {}, ) def java_plugin_impl(ctx: AnalysisContext) -> list[Provider]: diff --git a/prelude/java/prebuilt_jar.bzl b/prelude/java/prebuilt_jar.bzl index 2e3c0bbced..4bd10169b5 100644 --- a/prelude/java/prebuilt_jar.bzl +++ b/prelude/java/prebuilt_jar.bzl @@ -59,9 +59,11 @@ def prebuilt_jar_impl(ctx: AnalysisContext) -> list[Provider]: library_output = library_output_classpath_entry, declared_deps = ctx.attrs.deps, exported_deps = ctx.attrs.deps, + provided_deps = ctx.attrs.desugar_deps, needs_desugar = True, is_prebuilt_jar = True, gwt_module = gwt_output, + sources_jar = ctx.attrs.source_jar, ) # TODO(T107163344) this shouldn't be in prebuilt_jar itself, use overlays to remove it. diff --git a/prelude/java/tools/BUCK.v2 b/prelude/java/tools/BUCK.v2 index ac3c111fc8..f47f01a54a 100644 --- a/prelude/java/tools/BUCK.v2 +++ b/prelude/java/tools/BUCK.v2 @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + prelude = native prelude.python_bootstrap_binary( @@ -77,7 +83,7 @@ prelude.python_bootstrap_library( ], visibility = [ "prelude//android/tools/...", - "prelude//kotlin/tools/...", "prelude//java/tools/...", + "prelude//kotlin/tools/...", ], ) diff --git a/prelude/java/tools/gen_class_to_source_map.py b/prelude/java/tools/gen_class_to_source_map.py index eed5c84731..f86d83cd1e 100644 --- a/prelude/java/tools/gen_class_to_source_map.py +++ b/prelude/java/tools/gen_class_to_source_map.py @@ -12,11 +12,27 @@ import zipfile +def _base_class_name_matches_base_source_path( + base_class_name: str, base_source_path: str +): + return base_class_name == base_source_path or base_source_path.endswith( + "/" + base_class_name + ) + + def main(argv): - parser = argparse.ArgumentParser() + parser = argparse.ArgumentParser(fromfile_prefix_chars="@") + parser.add_argument( + "--include_classes_prefixes", + "-i", + default=[], + nargs="*", + help="Prefixes of classes to include in the output, even if their source isn't present", + ) parser.add_argument( "--output", "-o", type=argparse.FileType("w"), default=sys.stdin ) + parser.add_argument("--sources_jar", required=False) parser.add_argument("jar") parser.add_argument("sources", nargs="*") args = parser.parse_args(argv[1:]) @@ -44,16 +60,55 @@ def main(argv): if "$" in base: continue + found = False for src_base, src_path in sources.items(): - if base == src_base or src_base.endswith("/" + base): + if _base_class_name_matches_base_source_path(base, src_base): classes.append( { "className": classname, "srcPath": src_path, } ) + found = True + break + # Kotlin creates .class files with a "Kt" suffix when code is written outside of a class, + # so strip that suffix and redo the comparison. + elif base.endswith("Kt") and _base_class_name_matches_base_source_path( + base[:-2], src_base + ): + classes.append( + { + "className": classname[:-2], + "srcPath": src_path, + } + ) + found = True break + if not found: + # If the class is not present in the sources, we still want to + # include it if it has a prefix that we are interested in. + # certain classes in "androidx.databinding.*" are generated and it's useful to know their presence in jars + for prefix in args.include_classes_prefixes: + if classname.startswith(prefix): + classes.append( + { + "className": classname, + } + ) + break + + if args.sources_jar: + with zipfile.ZipFile(args.sources_jar, "w") as sources_jar: + for d in classes: + if "srcPath" in d: + src_path = d["srcPath"] + class_name = d["className"] + _, src_path_ext = os.path.splitext(src_path) + sources_jar.write( + src_path, class_name.replace(".", "/") + src_path_ext + ) + json.dump( { "jarPath": args.jar, diff --git a/prelude/java/utils/java_utils.bzl b/prelude/java/utils/java_utils.bzl index 6d3ebf9990..476b685b3a 100644 --- a/prelude/java/utils/java_utils.bzl +++ b/prelude/java/utils/java_utils.bzl @@ -117,26 +117,31 @@ def declare_prefixed_name(name: str, prefix: [str, None]) -> str: def get_class_to_source_map_info( ctx: AnalysisContext, outputs: [JavaCompileOutputs, None], - deps: list[Dependency]) -> (JavaClassToSourceMapInfo, dict): + deps: list[Dependency], + generate_sources_jar: bool = False) -> (JavaClassToSourceMapInfo, Artifact | None, dict): sub_targets = {} class_to_srcs = None class_to_srcs_debuginfo = None + sources_jar = None if outputs != None: - if not ctx.attrs._is_building_android_binary: - class_to_srcs = create_class_to_source_map_from_jar( - actions = ctx.actions, - java_toolchain = ctx.attrs._java_toolchain[JavaToolchainInfo], - name = ctx.attrs.name + ".class_to_srcs.json", - jar = outputs.classpath_entry.full_library, - srcs = ctx.attrs.srcs, - ) + name = ctx.label.name + class_to_srcs, sources_jar = create_class_to_source_map_from_jar( + actions = ctx.actions, + java_toolchain = ctx.attrs._java_toolchain[JavaToolchainInfo], + name = name + ".class_to_srcs.json", + jar = outputs.classpath_entry.full_library, + srcs = ctx.attrs.srcs, + sources_jar_name = "{}-sources.jar".format(name) if generate_sources_jar else None, + ) class_to_srcs_debuginfo = maybe_create_class_to_source_map_debuginfo( actions = ctx.actions, java_toolchain = ctx.attrs._java_toolchain[JavaToolchainInfo], - name = ctx.attrs.name + ".debuginfo.json", + name = name + ".debuginfo.json", srcs = ctx.attrs.srcs, ) sub_targets["class-to-srcs"] = [DefaultInfo(default_output = class_to_srcs)] + if sources_jar: + sub_targets["sources.jar"] = [DefaultInfo(default_output = sources_jar)] class_to_src_map_info = create_class_to_source_map_info( ctx = ctx, @@ -146,7 +151,7 @@ def get_class_to_source_map_info( ) if outputs != None: sub_targets["debuginfo"] = [DefaultInfo(default_output = class_to_src_map_info.debuginfo)] - return (class_to_src_map_info, sub_targets) + return (class_to_src_map_info, sources_jar, sub_targets) def get_classpath_subtarget(actions: AnalysisActions, packaging_info: JavaPackagingInfo) -> dict[str, list[Provider]]: proj = packaging_info.packaging_deps.project_as_args("full_jar_args") diff --git a/prelude/js/js_bundle.bzl b/prelude/js/js_bundle.bzl index 2b25374ee0..e906217314 100644 --- a/prelude/js/js_bundle.bzl +++ b/prelude/js/js_bundle.bzl @@ -49,10 +49,11 @@ def _build_dependencies_file( command_args_files = [command_args_file], identifier = transform_profile, category = "dependencies", - hidden_artifacts = [cmd_args([ + hidden_artifacts = [cmd_args( dependencies_file.as_output(), extra_data_args, - ]).add(transitive_js_library_outputs)], + transitive_js_library_outputs, + )], ) return dependencies_file @@ -110,13 +111,14 @@ def _build_js_bundle( command_args_files = [command_args_file], identifier = base_dir, category = job_args["command"], - hidden_artifacts = [cmd_args([ + hidden_artifacts = [cmd_args( bundle_dir_output.as_output(), assets_dir.as_output(), misc_dir_path.as_output(), source_map.as_output(), extra_data_args, - ]).add(transitive_js_library_outputs)], + transitive_js_library_outputs, + )], ) return JsBundleInfo( diff --git a/prelude/js/js_library.bzl b/prelude/js/js_library.bzl index c44740f70e..9490bb4cb7 100644 --- a/prelude/js/js_library.bzl +++ b/prelude/js/js_library.bzl @@ -60,7 +60,7 @@ def _build_js_files( for grouped_src in grouped_srcs: identifier = "{}/{}".format(transform_profile, grouped_src.canonical_name) - output_path = ctx.actions.declare_output(identifier) + output_path = ctx.actions.declare_output("transform-out/{}.jsfile".format(identifier)) job_args = { "additionalSources": [{ "sourcePath": additional_source, @@ -106,7 +106,7 @@ def _build_library_files( transform_profile: str, flavors: list[str], js_files: list[Artifact]) -> Artifact: - output_path = ctx.actions.declare_output("{}/library_files".format(transform_profile)) + output_path = ctx.actions.declare_output("library-files-out/{}/library_files".format(transform_profile)) command_args_file = ctx.actions.write_json( "library_files_{}_command_args".format(transform_profile), { @@ -135,7 +135,7 @@ def _build_js_library( library_files: Artifact, flavors: list[str], js_library_deps: list[Artifact]) -> Artifact: - output_path = ctx.actions.declare_output("{}.jslib".format(transform_profile)) + output_path = ctx.actions.declare_output("library-dependencies-out/{}.jslib".format(transform_profile)) job_args = { "aggregatedSourceFilesFilePath": library_files, "command": "library-dependencies", diff --git a/prelude/js/js_providers.bzl b/prelude/js/js_providers.bzl index bc24622a4b..c5027eaf57 100644 --- a/prelude/js/js_providers.bzl +++ b/prelude/js/js_providers.bzl @@ -37,7 +37,7 @@ JsBundleInfo = provider( def get_transitive_outputs( actions: AnalysisActions, - value: [Artifact, None] = None, + value: Artifact | None = None, deps: list[JsLibraryInfo] = []) -> TransitiveOutputsTSet: kwargs = {} if value: diff --git a/prelude/js/js_utils.bzl b/prelude/js/js_utils.bzl index b52f229321..664e05e934 100644 --- a/prelude/js/js_utils.bzl +++ b/prelude/js/js_utils.bzl @@ -10,6 +10,7 @@ load("@prelude//:worker_tool.bzl", "WorkerToolInfo") load("@prelude//apple:apple_resource_types.bzl", "AppleResourceDestination", "AppleResourceSpec") load("@prelude//apple:resource_groups.bzl", "ResourceGraphInfo", "create_resource_graph") # @unused `ResourceGraphInfo` used as a type load("@prelude//js:js_providers.bzl", "JsBundleInfo") +load("@prelude//utils:argfile.bzl", "at_argfile") load("@prelude//utils:expect.bzl", "expect") RAM_BUNDLE_TYPES = { @@ -142,17 +143,25 @@ def run_worker_commands( identifier: str, category: str, hidden_artifacts = [cmd_args]): - worker_args = cmd_args("--command-args-file", command_args_files) - worker_args.add("--command-args-file-extra-data-fixup-hack=true") - - worker_argsfile = ctx.actions.declare_output(paths.join(identifier, "worker_{}.argsfile".format(category))) - ctx.actions.write(worker_argsfile.as_output(), worker_args) + worker_args = cmd_args( + "--command-args-file", + command_args_files, + "--command-args-file-extra-data-fixup-hack=true", + ) worker_tool_info = worker_tool[WorkerToolInfo] - worker_command = worker_tool_info.command.copy() - worker_command.hidden(hidden_artifacts) - worker_command.hidden(command_args_files) - worker_command.add(cmd_args(worker_argsfile, format = "@{}")) + worker_command = cmd_args( + worker_tool_info.command.copy(), + at_argfile( + actions = ctx.actions, + name = paths.join(identifier, "worker_{}.argsfile".format(category)), + args = worker_args, + ), + hidden = [ + hidden_artifacts, + command_args_files, + ], + ) ctx.actions.run( worker_command, diff --git a/prelude/julia/julia_binary.bzl b/prelude/julia/julia_binary.bzl index c1cedffee2..53288cfb41 100644 --- a/prelude/julia/julia_binary.bzl +++ b/prelude/julia/julia_binary.bzl @@ -5,7 +5,12 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//linking:shared_libraries.bzl", "merge_shared_libraries", "traverse_shared_library_info") +load( + "@prelude//linking:shared_libraries.bzl", + "create_shlib_symlink_tree", + "merge_shared_libraries", + "traverse_shared_library_info", +) load("@prelude//utils:utils.bzl", "flatten") load(":julia_info.bzl", "JuliaLibraryInfo", "JuliaLibraryTSet", "JuliaToolchainInfo") @@ -47,12 +52,13 @@ def build_jll_shlibs_mapping(ctx: AnalysisContext, json_info_file: Artifact): filter(None, [d.shared_library_info for d in deps]), )) - shared_libs_symlink_tree = ctx.actions.symlinked_dir( - "__shared_libs_symlink_tree__", - {name: shlib.lib.output for name, shlib in shlibs.items()}, + shared_libs_symlink_tree = create_shlib_symlink_tree( + actions = ctx.actions, + out = "__shared_libs_symlink_tree__", + shared_libs = shlibs, ) - shlib_label_to_soname = {shlib.label: name for name, shlib in shlibs.items()} + shlib_label_to_soname = {shlib.label: shlib.soname.ensure_str() for shlib in shlibs} # iterate through all the jll libraries json_info = [] @@ -100,19 +106,20 @@ def build_julia_command(ctx): """ julia_toolchain = ctx.attrs._julia_toolchain[JuliaToolchainInfo] - # python processor - cmd = cmd_args([julia_toolchain.cmd_processor]) - # build out the symlink tree for libs symlink_dir = build_load_path_symtree(ctx) - cmd.hidden(symlink_dir) # build symdir for sources srcs_by_path = {f.short_path: f for f in ctx.attrs.srcs} srcs = ctx.actions.symlinked_dir("srcs_tree", srcs_by_path) if ctx.attrs.main not in srcs_by_path: fail("main should be in srcs!") - cmd.hidden(srcs) + + # python processor + cmd = cmd_args( + [julia_toolchain.cmd_processor], + hidden = [symlink_dir] + [srcs], + ) # prepare a json file to hold all the data the python preprocessor needs to # execute the julia interpreter. diff --git a/prelude/julia/tools/BUCK.v2 b/prelude/julia/tools/BUCK.v2 index 2f205a4bb0..3867e73961 100644 --- a/prelude/julia/tools/BUCK.v2 +++ b/prelude/julia/tools/BUCK.v2 @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + prelude = native prelude.python_bootstrap_binary( diff --git a/prelude/jvm/cd_jar_creator_util.bzl b/prelude/jvm/cd_jar_creator_util.bzl index 18966fb32a..847a84af5c 100644 --- a/prelude/jvm/cd_jar_creator_util.bzl +++ b/prelude/jvm/cd_jar_creator_util.bzl @@ -131,15 +131,16 @@ def define_output_paths(actions: AnalysisActions, prefix: [str, None], label: La ) # buildifier: disable=uninitialized -def add_output_paths_to_cmd_args(cmd: cmd_args, output_paths: OutputPaths, path_to_class_hashes: [Artifact, None]) -> cmd_args: +def output_paths_to_hidden_cmd_args(output_paths: OutputPaths, path_to_class_hashes: Artifact | None) -> cmd_args: + hidden = [] if path_to_class_hashes != None: - cmd.hidden(path_to_class_hashes.as_output()) - cmd.hidden(output_paths.jar_parent.as_output()) - cmd.hidden(output_paths.jar.as_output()) - cmd.hidden(output_paths.classes.as_output()) - cmd.hidden(output_paths.annotations.as_output()) - cmd.hidden(output_paths.scratch.as_output()) - return cmd + hidden.append(path_to_class_hashes.as_output()) + hidden.append(output_paths.jar_parent.as_output()) + hidden.append(output_paths.jar.as_output()) + hidden.append(output_paths.classes.as_output()) + hidden.append(output_paths.annotations.as_output()) + hidden.append(output_paths.scratch.as_output()) + return cmd_args(hidden = hidden) def encode_output_paths(label: Label, paths: OutputPaths, target_type: TargetType) -> struct: paths = struct( @@ -158,13 +159,14 @@ def encode_output_paths(label: Label, paths: OutputPaths, target_type: TargetTyp libraryTargetFullyQualifiedName = base_qualified_name(label), ) -def encode_jar_params(remove_classes: list[str], output_paths: OutputPaths) -> struct: +def encode_jar_params(remove_classes: list[str], output_paths: OutputPaths, manifest_file: Artifact | None) -> struct: return struct( jarPath = output_paths.jar.as_output(), removeEntryPredicate = struct( patterns = remove_classes, ), entriesToJar = [output_paths.classes.as_output()], + manifestFile = manifest_file, duplicatesLogLevel = "FINE", ) @@ -211,7 +213,7 @@ def _get_source_only_abi_compiling_deps(compiling_deps_tset: [JavaCompilingDepsT for d in source_only_abi_deps: info = d.get(JavaLibraryInfo) if not info: - fail("source_only_abi_deps must produce a JavaLibraryInfo but {} does not, please remove it".format(d)) + fail("source_only_abi_deps must produce a JavaLibraryInfo but '{}' does not, please remove it".format(d.label)) if info.library_output: source_only_abi_deps_filter[info.library_output.abi] = True @@ -250,22 +252,28 @@ def encode_ap_params(annotation_processor_properties: AnnotationProcessorPropert return encoded_ap_params def encode_plugin_params(plugin_params: [PluginParams, None]) -> [struct, None]: - # TODO(cjhopman): We should change plugins to not be merged together just like APs. encoded_plugin_params = None if plugin_params: encoded_plugin_params = struct( parameters = [], - pluginProperties = [struct( - canReuseClassLoader = False, - doesNotAffectAbi = False, - supportsAbiGenerationFromSource = False, - processorNames = plugin_params.processors, - classpath = plugin_params.deps.project_as_json("javacd_json") if plugin_params.deps else [], - pathParams = {}, - )], + pluginProperties = [ + encode_plugin_properties(processor, arguments, plugin_params) + for processor, arguments in plugin_params.processors + ], ) return encoded_plugin_params +def encode_plugin_properties(processor: str, arguments: cmd_args, plugin_params: PluginParams) -> struct: + return struct( + canReuseClassLoader = False, + doesNotAffectAbi = False, + supportsAbiGenerationFromSource = False, + processorNames = [processor], + classpath = plugin_params.deps.project_as_json("javacd_json") if plugin_params.deps else [], + pathParams = {}, + arguments = arguments, + ) + def encode_base_jar_command( javac_tool: [str, RunInfo, Artifact, None], target_type: TargetType, @@ -282,10 +290,11 @@ def encode_base_jar_command( resources_map: dict[str, Artifact], annotation_processor_properties: AnnotationProcessorProperties, plugin_params: [PluginParams, None], + manifest_file: Artifact | None, extra_arguments: cmd_args, source_only_abi_compiling_deps: list[JavaClasspathEntry], track_class_usage: bool) -> struct: - library_jar_params = encode_jar_params(remove_classes, output_paths) + library_jar_params = encode_jar_params(remove_classes, output_paths, manifest_file) qualified_name = get_qualified_name(label, target_type) if target_type == TargetType("source_only_abi"): compiling_classpath = classpath_jars_tag.tag_artifacts([dep.abi for dep in source_only_abi_compiling_deps]) @@ -358,9 +367,10 @@ def setup_dep_files( hidden = ["artifact"]) -> cmd_args: dep_file = declare_prefixed_output(actions, actions_identifier, "dep_file.txt") - new_cmd = cmd_args() - new_cmd.add(cmd) - new_cmd.add([ + new_cmd_args = [] + new_cmd_hidden = [] + new_cmd_args.append(cmd) + new_cmd_args.append([ "--used-classes", ] + [ used_classes_json.as_output() @@ -373,16 +383,16 @@ def setup_dep_files( if abi_to_abi_dir_map: abi_to_abi_dir_map_file = declare_prefixed_output(actions, actions_identifier, "abi_to_abi_dir_map") actions.write(abi_to_abi_dir_map_file, abi_to_abi_dir_map) - new_cmd.add([ + new_cmd_args.extend([ "--jar-to-jar-dir-map", abi_to_abi_dir_map_file, ]) - if type(abi_to_abi_dir_map) == "transitive_set_args_projection": - new_cmd.hidden(classpath_jars_tag.tag_artifacts(abi_to_abi_dir_map)) + if isinstance(abi_to_abi_dir_map, TransitiveSetArgsProjection): + new_cmd_hidden.append(classpath_jars_tag.tag_artifacts(abi_to_abi_dir_map)) for hidden_artifact in hidden: - new_cmd.hidden(classpath_jars_tag.tag_artifacts(hidden_artifact)) + new_cmd_hidden.append(classpath_jars_tag.tag_artifacts(hidden_artifact)) - return new_cmd + return cmd_args(new_cmd_args, hidden = new_cmd_hidden) FORCE_PERSISTENT_WORKERS = read_root_config("build", "require_persistent_workers", "false").lower() == "true" @@ -393,21 +403,63 @@ def prepare_cd_exe( compiler: Artifact, main_class: str, worker: WorkerInfo, - debug_port: [int, None], - debug_target: [Label, None], + target_specified_debug_port: [int, None], + toolchain_specified_debug_port: [int, None], + toolchain_specified_debug_target: [Label, None], extra_jvm_args: list[str], - extra_jvm_args_target: [Label, None]) -> tuple: + extra_jvm_args_target: list[Label]) -> tuple: local_only = False jvm_args = ["-XX:-MaxFDLimit"] + # The variables 'extra_jvm_args' and 'extra_jvm_args_target' are generally used, but they are primarily designed for profiling use-cases. + # The following section is configured with the profiling use-case in mind. if extra_jvm_args_target: - if qualified_name == qualified_name_with_subtarget(extra_jvm_args_target): - jvm_args = jvm_args + extra_jvm_args - local_only = True + if len(extra_jvm_args_target) == 1: + # If there's only one target to profile, we want to isolate its compilation. + # This target should be built in its own action, allowing the worker (if available) to handle the remaining targets. + if qualified_name == qualified_name_with_subtarget(extra_jvm_args_target[0]): + jvm_args = jvm_args + extra_jvm_args + local_only = True # This flag ensures the target is not run on the worker. + else: + # If there are multiple targets to profile, they should be built on the worker to generate a single profiling data set. + # The remaining targets should be built individually, either locally or on the Remote Execution (RE). + local_only = True # By default, targets are not run on the worker. + for target in extra_jvm_args_target: + # If the current target matches the qualified name with subtarget, it is selected for profiling. + if qualified_name == qualified_name_with_subtarget(target): + jvm_args = jvm_args + extra_jvm_args + local_only = False # This flag allows the target to run on the worker. + break else: + # If no specific target is provided, the extra JVM arguments are added to all targets that run on worker, local machine or RE. jvm_args = jvm_args + extra_jvm_args - if debug_port and qualified_name == qualified_name_with_subtarget(debug_target): + # Allow JVM compiler daemon to access internal jdk.compiler APIs + jvm_args += [ + "--add-exports=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED", + "--add-exports=jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED", + "--add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED", + "--add-exports=jdk.compiler/com.sun.tools.javac.jvm=ALL-UNNAMED", + "--add-exports=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED", + "--add-exports=jdk.compiler/com.sun.tools.javac.model=ALL-UNNAMED", + "--add-exports=jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED", + "--add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED", + "--add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED", + "--add-opens=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED", + "--add-opens=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED", + "--add-opens=jdk.compiler/com.sun.tools.javac.jvm=ALL-UNNAMED", + "--add-opens=jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED", + "--add-opens=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED", + ] + + if target_specified_debug_port: + debug_port = target_specified_debug_port + elif toolchain_specified_debug_port and qualified_name == qualified_name_with_subtarget(toolchain_specified_debug_target): + debug_port = toolchain_specified_debug_port + else: + debug_port = None + + if debug_port: # Do not use a worker when debugging is enabled local_only = True jvm_args.extend(["-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address={}".format(debug_port)]) @@ -426,21 +478,37 @@ def prepare_cd_exe( ) return worker_run_info, FORCE_PERSISTENT_WORKERS +FinalJarOutput = record( + final_jar = Artifact, + # The same as final_jar unless there is a jar_postprocessor. + preprocessed_jar = Artifact, +) + # If there's additional compiled srcs, we need to merge them in and if the # caller specified an output artifact we need to make sure the jar is in that # location. def prepare_final_jar( actions: AnalysisActions, actions_identifier: [str, None], - output: [Artifact, None], + output: Artifact | None, output_paths: OutputPaths, - additional_compiled_srcs: [Artifact, None], - jar_builder: RunInfo) -> Artifact: + additional_compiled_srcs: Artifact | None, + jar_builder: RunInfo, + jar_postprocessor: [RunInfo, None]) -> FinalJarOutput: + def make_output(jar: Artifact) -> FinalJarOutput: + if jar_postprocessor: + postprocessed_jar = postprocess_jar(actions, jar_postprocessor, jar, actions_identifier) + return FinalJarOutput(final_jar = postprocessed_jar, preprocessed_jar = jar) + else: + return FinalJarOutput(final_jar = jar, preprocessed_jar = jar) + if not additional_compiled_srcs: + output_jar = output_paths.jar if output: actions.copy_file(output.as_output(), output_paths.jar) - return output - return output_paths.jar + output_jar = output + + return make_output(output_jar) merged_jar = output if not merged_jar: @@ -454,25 +522,26 @@ def prepare_final_jar( merged_jar.as_output(), "--entries-to-jar", files_to_merge_file, - ]).hidden(files_to_merge), + ], hidden = files_to_merge), category = "merge_additional_srcs", identifier = actions_identifier, ) - return merged_jar + + return make_output(merged_jar) def generate_abi_jars( actions: AnalysisActions, actions_identifier: [str, None], label: Label, abi_generation_mode: [AbiGenerationMode, None], - additional_compiled_srcs: [Artifact, None], + additional_compiled_srcs: Artifact | None, is_building_android_binary: bool, class_abi_generator: Dependency, final_jar: Artifact, compiling_deps_tset: [JavaCompilingDepsTSet, None], source_only_abi_deps: list[Dependency], - class_abi_jar: [Artifact, None], - class_abi_output_dir: [Artifact, None], + class_abi_jar: Artifact | None, + class_abi_output_dir: Artifact | None, encode_abi_command: typing.Callable, define_action: typing.Callable) -> tuple: class_abi = None @@ -548,3 +617,20 @@ def generate_abi_jars( classpath_abi_dir = class_abi_output_dir return class_abi, source_abi, source_only_abi, classpath_abi, classpath_abi_dir + +def postprocess_jar( + actions: AnalysisActions, + jar_postprocessor: RunInfo, + original_jar: Artifact, + actions_identifier: [str, None]) -> Artifact: + postprocessed_output = actions.declare_output("postprocessed_{}".format(original_jar.short_path)) + processor_cmd_args = cmd_args( + jar_postprocessor, + original_jar, + postprocessed_output.as_output(), + ) + + identifier = actions_identifier if actions_identifier else "" + actions.run(processor_cmd_args, category = "postprocessed{}".format(identifier)) + + return postprocessed_output diff --git a/prelude/kotlin/kotlin.bzl b/prelude/kotlin/kotlin.bzl index 4c9dd097e2..ce9032c67b 100644 --- a/prelude/kotlin/kotlin.bzl +++ b/prelude/kotlin/kotlin.bzl @@ -5,6 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//:validation_deps.bzl", "VALIDATION_DEPS_ATTR_NAME") load("@prelude//android:build_only_native_code.bzl", "is_build_only_native_code") load("@prelude//android:configuration.bzl", "is_building_android_binary_attr") load("@prelude//java:java.bzl", "AbiGenerationMode", "dex_min_sdk_version") @@ -23,6 +24,7 @@ extra_attributes = { "abi_generation_mode": attrs.option(attrs.enum(AbiGenerationMode), default = None), "javac": attrs.option(attrs.one_of(attrs.dep(), attrs.source()), default = None), "resources_root": attrs.option(attrs.string(), default = None), + VALIDATION_DEPS_ATTR_NAME: attrs.set(attrs.dep(), sorted = True, default = []), "_build_only_native_code": attrs.default_only(attrs.bool(default = is_build_only_native_code())), "_dex_min_sdk_version": attrs.option(attrs.int(), default = dex_min_sdk_version()), "_dex_toolchain": toolchains_common.dex(), @@ -33,8 +35,10 @@ extra_attributes = { }, "kotlin_test": { "abi_generation_mode": attrs.option(attrs.enum(AbiGenerationMode), default = None), + "java_agents": attrs.list(attrs.source(), default = []), "javac": attrs.option(attrs.one_of(attrs.dep(), attrs.source()), default = None), "resources_root": attrs.option(attrs.string(), default = None), + "test_class_names_file": attrs.option(attrs.source(), default = None), "unbundled_resources_root": attrs.option(attrs.source(allow_directory = True), default = None), "_build_only_native_code": attrs.default_only(attrs.bool(default = is_build_only_native_code())), "_exec_os_type": buck.exec_os_type_arg(), diff --git a/prelude/kotlin/kotlin_library.bzl b/prelude/kotlin/kotlin_library.bzl index cf3dbb3b3c..3c4b49685b 100644 --- a/prelude/kotlin/kotlin_library.bzl +++ b/prelude/kotlin/kotlin_library.bzl @@ -5,6 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//:validation_deps.bzl", "get_validation_deps_outputs") load("@prelude//android:android_providers.bzl", "merge_android_packageable_info") load( "@prelude//java:java_library.bzl", @@ -38,6 +39,8 @@ load( ) load("@prelude//kotlin:kotlin_utils.bzl", "get_kotlinc_compatible_target") load("@prelude//kotlin:kotlincd_jar_creator.bzl", "create_jar_artifact_kotlincd") +load("@prelude//utils:argfile.bzl", "at_argfile") +load("@prelude//utils:expect.bzl", "expect") load("@prelude//utils:lazy.bzl", "lazy") load("@prelude//utils:utils.bzl", "map_idx") @@ -49,7 +52,7 @@ def _create_kotlin_sources( deps: list[Dependency], annotation_processor_properties: AnnotationProcessorProperties, ksp_annotation_processor_properties: AnnotationProcessorProperties, - additional_classpath_entries: list[Artifact]) -> (Artifact, [Artifact, None], [Artifact, None]): + additional_classpath_entries: list[Artifact]) -> (Artifact, Artifact | None, Artifact | None): """ Runs kotlinc on the provided kotlin sources. """ @@ -59,19 +62,21 @@ def _create_kotlin_sources( kotlinc = kotlin_toolchain.kotlinc[RunInfo] kotlinc_output = ctx.actions.declare_output("kotlinc_classes_output", dir = True) - compile_kotlin_cmd = cmd_args([ + compile_kotlin_cmd_args = [ compile_kotlin_tool, "--kotlinc_output", kotlinc_output.as_output(), - ]) + ] + compile_kotlin_cmd_hidden = [] + java_toolchain = ctx.attrs._java_toolchain[JavaToolchainInfo] zip_scrubber_args = ["--zip_scrubber", cmd_args(java_toolchain.zip_scrubber, delimiter = " ")] - compile_kotlin_cmd.add(zip_scrubber_args) + compile_kotlin_cmd_args.append(zip_scrubber_args) kotlinc_cmd_args = cmd_args([kotlinc]) compiling_classpath = [] + additional_classpath_entries - compiling_deps_tset = derive_compiling_deps(ctx.actions, None, deps + kotlin_toolchain.kotlinc_classpath) + compiling_deps_tset = derive_compiling_deps(ctx.actions, None, deps + [kotlin_toolchain.kotlin_stdlib]) if compiling_deps_tset: compiling_classpath.extend( [compiling_dep.abi for compiling_dep in list(compiling_deps_tset.traverse())], @@ -82,17 +87,15 @@ def _create_kotlin_sources( delimiter = get_path_separator_for_exec_os(ctx), ) - # write joined classpath string into args file - classpath_args_file, _ = ctx.actions.write( - "kotlinc_classpath", - classpath_args, - allow_args = True, - ) - - compile_kotlin_cmd.hidden([compiling_classpath]) + compile_kotlin_cmd_hidden.append([compiling_classpath]) kotlinc_cmd_args.add(["-classpath"]) - kotlinc_cmd_args.add(cmd_args(classpath_args_file, format = "@{}")) + kotlinc_cmd_args.add(at_argfile( + actions = ctx.actions, + name = "kotlinc_classpath", + args = classpath_args, + allow_args = True, + )) module_name = ctx.label.package.replace("/", ".") + "." + ctx.label.name kotlinc_cmd_args.add( @@ -113,9 +116,9 @@ def _create_kotlin_sources( kapt_generated_sources_output = None if annotation_processor_properties.annotation_processors: - compile_kotlin_cmd.add(["--kapt_annotation_processing_jar", kotlin_toolchain.annotation_processing_jar[JavaLibraryInfo].library_output.full_library]) - compile_kotlin_cmd.add(["--kapt_annotation_processors", ",".join([p for ap in annotation_processor_properties.annotation_processors for p in ap.processors])]) - compile_kotlin_cmd.add(["--kapt_annotation_processor_params", ";".join(annotation_processor_properties.annotation_processor_params)]) + compile_kotlin_cmd_args.extend(["--kapt_annotation_processing_jar", kotlin_toolchain.annotation_processing_jar[JavaLibraryInfo].library_output.full_library]) + compile_kotlin_cmd_args.extend(["--kapt_annotation_processors", ",".join([p for ap in annotation_processor_properties.annotation_processors for p in ap.processors])]) + compile_kotlin_cmd_args.extend(["--kapt_annotation_processor_params", ";".join(annotation_processor_properties.annotation_processor_params)]) annotation_processor_classpath_tsets = ( filter(None, ([ap.deps for ap in annotation_processor_properties.annotation_processors])) + @@ -126,23 +129,23 @@ def _create_kotlin_sources( children = annotation_processor_classpath_tsets, ).project_as_args("full_jar_args") kapt_classpath_file = ctx.actions.write("kapt_classpath_file", annotation_processor_classpath) - compile_kotlin_cmd.add(["--kapt_classpath_file", kapt_classpath_file]) - compile_kotlin_cmd.hidden(annotation_processor_classpath) + compile_kotlin_cmd_args.extend(["--kapt_classpath_file", kapt_classpath_file]) + compile_kotlin_cmd_hidden.append(annotation_processor_classpath) sources_output = ctx.actions.declare_output("kapt_sources_output") - compile_kotlin_cmd.add(["--kapt_sources_output", sources_output.as_output()]) + compile_kotlin_cmd_args.append(["--kapt_sources_output", sources_output.as_output()]) classes_output = ctx.actions.declare_output("kapt_classes_output") - compile_kotlin_cmd.add(["--kapt_classes_output", classes_output.as_output()]) + compile_kotlin_cmd_args.append(["--kapt_classes_output", classes_output.as_output()]) stubs = ctx.actions.declare_output("kapt_stubs") - compile_kotlin_cmd.add(["--kapt_stubs", stubs.as_output()]) + compile_kotlin_cmd_args.append(["--kapt_stubs", stubs.as_output()]) kapt_generated_sources_output = ctx.actions.declare_output("kapt_generated_sources_output.src.zip") - compile_kotlin_cmd.add(["--kapt_generated_sources_output", kapt_generated_sources_output.as_output()]) - compile_kotlin_cmd.add(["--kapt_base64_encoder", cmd_args(kotlin_toolchain.kapt_base64_encoder[RunInfo], delimiter = " ")]) + compile_kotlin_cmd_args.append(["--kapt_generated_sources_output", kapt_generated_sources_output.as_output()]) + compile_kotlin_cmd_args.append(["--kapt_base64_encoder", cmd_args(kotlin_toolchain.kapt_base64_encoder[RunInfo], delimiter = " ")]) generated_kotlin_output = ctx.actions.declare_output("kapt_generated_kotlin_output") - compile_kotlin_cmd.add(["--kapt_generated_kotlin_output", generated_kotlin_output.as_output()]) + compile_kotlin_cmd_args.append(["--kapt_generated_kotlin_output", generated_kotlin_output.as_output()]) if jvm_target: - compile_kotlin_cmd.add(["--kapt_jvm_target", jvm_target]) + compile_kotlin_cmd_args.append(["--kapt_jvm_target", jvm_target]) friend_paths = ctx.attrs.friend_paths if friend_paths: @@ -155,8 +158,8 @@ def _create_kotlin_sources( ksp_zipped_sources_output = None if ksp_annotation_processor_properties.annotation_processors: - ksp_cmd = cmd_args(compile_kotlin_tool) - ksp_cmd.add(zip_scrubber_args) + ksp_cmd = [compile_kotlin_tool] + ksp_cmd.append(zip_scrubber_args) ksp_annotation_processor_classpath_tsets = filter(None, ([ap.deps for ap in ksp_annotation_processor_properties.annotation_processors])) if ksp_annotation_processor_classpath_tsets: @@ -164,22 +167,24 @@ def _create_kotlin_sources( JavaPackagingDepTSet, children = ksp_annotation_processor_classpath_tsets, ).project_as_args("full_jar_args") - ksp_cmd.add(["--ksp_processor_jars"]) - ksp_cmd.add(cmd_args(ksp_annotation_processor_classpath, delimiter = ",")) + ksp_cmd.append("--ksp_processor_jars") + ksp_cmd.append(cmd_args(ksp_annotation_processor_classpath, delimiter = ",")) - ksp_cmd.add(["--ksp_classpath", classpath_args]) + ksp_cmd.extend(["--ksp_classpath", classpath_args]) ksp_classes_and_resources_output = ctx.actions.declare_output("ksp_output_dir/ksp_classes_and_resources_output") - ksp_cmd.add(["--ksp_classes_and_resources_output", ksp_classes_and_resources_output.as_output()]) - ksp_output = cmd_args(ksp_classes_and_resources_output.as_output()).parent() - ksp_cmd.add(["--ksp_output", ksp_output]) + ksp_cmd.extend(["--ksp_classes_and_resources_output", ksp_classes_and_resources_output.as_output()]) + ksp_output = cmd_args(ksp_classes_and_resources_output.as_output(), parent = 1) + ksp_cmd.extend(["--ksp_output", ksp_output]) ksp_sources_output = ctx.actions.declare_output("ksp_output_dir/ksp_sources_output") - ksp_cmd.add(["--ksp_sources_output", ksp_sources_output.as_output()]) + ksp_cmd.extend(["--ksp_sources_output", ksp_sources_output.as_output()]) ksp_zipped_sources_output = ctx.actions.declare_output("ksp_output_dir/ksp_zipped_sources_output.src.zip") - ksp_cmd.add(["--ksp_zipped_sources_output", ksp_zipped_sources_output.as_output()]) - ksp_cmd.add(["--ksp_project_base_dir", ctx.label.path]) + ksp_cmd.extend(["--ksp_zipped_sources_output", ksp_zipped_sources_output.as_output()]) + ksp_cmd.extend(["--ksp_project_base_dir", ctx.label.path]) ksp_kotlinc_cmd_args = cmd_args(kotlinc_cmd_args) - _add_plugins(ctx, ksp_kotlinc_cmd_args, ksp_cmd, is_ksp = True) + plugins_cmd_args = _add_plugins(ctx, is_ksp = True) + ksp_kotlinc_cmd_args.add(plugins_cmd_args.kotlinc_cmd_args) + ksp_cmd.append(plugins_cmd_args.compile_kotlin_cmd) ksp_cmd_args_file, _ = ctx.actions.write( "ksp_kotlinc_cmd", @@ -187,21 +192,24 @@ def _create_kotlin_sources( allow_args = True, ) - ksp_cmd.add("--kotlinc_cmd_file") - ksp_cmd.add(ksp_cmd_args_file) - ksp_cmd.hidden(ksp_kotlinc_cmd_args) + ksp_cmd.extend(["--kotlinc_cmd_file", ksp_cmd_args_file]) - ctx.actions.run(ksp_cmd, category = "ksp_kotlinc") + ctx.actions.run( + cmd_args(ksp_cmd, hidden = ksp_kotlinc_cmd_args), + category = "ksp_kotlinc", + ) zipped_sources = (zipped_sources or []) + [ksp_zipped_sources_output] - compile_kotlin_cmd.add(["--ksp_generated_classes_and_resources", ksp_classes_and_resources_output]) + compile_kotlin_cmd_args.extend(["--ksp_generated_classes_and_resources", ksp_classes_and_resources_output]) - _add_plugins(ctx, kotlinc_cmd_args, compile_kotlin_cmd, is_ksp = False) + plugin_cmd_args = _add_plugins(ctx, is_ksp = False) + kotlinc_cmd_args.add(plugin_cmd_args.kotlinc_cmd_args) + compile_kotlin_cmd_args.append(plugin_cmd_args.compile_kotlin_cmd) if zipped_sources: zipped_sources_file = ctx.actions.write("kotlinc_zipped_source_args", zipped_sources) - compile_kotlin_cmd.add(["--zipped_sources_file", zipped_sources_file]) - compile_kotlin_cmd.hidden(zipped_sources) + compile_kotlin_cmd_args.append(["--zipped_sources_file", zipped_sources_file]) + compile_kotlin_cmd_hidden.append(zipped_sources) args_file, _ = ctx.actions.write( "kotlinc_cmd", @@ -209,24 +217,32 @@ def _create_kotlin_sources( allow_args = True, ) - compile_kotlin_cmd.hidden([plain_sources]) + compile_kotlin_cmd_hidden.append(plain_sources) - compile_kotlin_cmd.add("--kotlinc_cmd_file") - compile_kotlin_cmd.add(args_file) - compile_kotlin_cmd.hidden(kotlinc_cmd_args) + compile_kotlin_cmd_args.append("--kotlinc_cmd_file") + compile_kotlin_cmd_args.append(args_file) + compile_kotlin_cmd_hidden.append(kotlinc_cmd_args) - ctx.actions.run(compile_kotlin_cmd, category = "kotlinc") + ctx.actions.run( + cmd_args(compile_kotlin_cmd_args, hidden = compile_kotlin_cmd_hidden), + category = "kotlinc", + ) return kotlinc_output, kapt_generated_sources_output, ksp_zipped_sources_output def _is_ksp_plugin(plugin: str) -> bool: return "symbol-processing" in plugin +_PluginCmdArgs = record( + kotlinc_cmd_args = cmd_args, + compile_kotlin_cmd = cmd_args, +) + def _add_plugins( ctx: AnalysisContext, - kotlinc_cmd_args: cmd_args, - compile_kotlin_cmd: cmd_args, - is_ksp: bool): + is_ksp: bool) -> _PluginCmdArgs: + kotlinc_cmd_args = cmd_args() + compile_kotlin_cmd = cmd_args() for plugin, plugin_options in ctx.attrs.kotlin_compiler_plugins.items(): if _is_ksp_plugin(str(plugin)) != is_ksp: continue @@ -245,6 +261,8 @@ def _add_plugins( if options: kotlinc_cmd_args.add(["-P", cmd_args(options, delimiter = ",")]) + return _PluginCmdArgs(kotlinc_cmd_args = kotlinc_cmd_args, compile_kotlin_cmd = compile_kotlin_cmd) + def kotlin_library_impl(ctx: AnalysisContext) -> list[Provider]: packaging_deps = ctx.attrs.deps + ctx.attrs.exported_deps + ctx.attrs.runtime_deps @@ -264,14 +282,27 @@ def kotlin_library_impl(ctx: AnalysisContext) -> list[Provider]: android_packageable_info, ] - java_providers = build_kotlin_library(ctx) + java_providers = build_kotlin_library( + ctx = ctx, + validation_deps_outputs = get_validation_deps_outputs(ctx), + ) return to_list(java_providers) + [android_packageable_info] +def _check_exported_deps(exported_deps: list[Dependency], attr_name: str): + for exported_dep in exported_deps: + # TODO(navidq) add a check that the exported dep always have a JavaLibraryInfo provider + if JavaLibraryInfo in exported_dep: + expect( + not exported_dep[JavaLibraryInfo].may_not_be_exported, + "{} has 'may_not_be_exported' label and should not be present in {}.".format(exported_dep.label.raw_target(), attr_name), + ) + def build_kotlin_library( ctx: AnalysisContext, additional_classpath_entries: list[Artifact] = [], bootclasspath_entries: list[Artifact] = [], - extra_sub_targets: dict = {}) -> JavaProviders: + extra_sub_targets: dict = {}, + validation_deps_outputs: [list[Artifact], None] = None) -> JavaProviders: srcs = ctx.attrs.srcs has_kotlin_srcs = lazy.is_any(lambda src: src.extension == ".kt" or src.basename.endswith(".src.zip") or src.basename.endswith("-sources.jar"), srcs) @@ -284,11 +315,14 @@ def build_kotlin_library( # Match buck1, which always does class ABI generation for Kotlin targets unless explicitly specified. override_abi_generation_mode = get_abi_generation_mode(ctx.attrs.abi_generation_mode) or AbiGenerationMode("class"), extra_sub_targets = extra_sub_targets, + validation_deps_outputs = validation_deps_outputs, ) else: deps_query = getattr(ctx.attrs, "deps_query", []) or [] provided_deps_query = getattr(ctx.attrs, "provided_deps_query", []) or [] + _check_exported_deps(ctx.attrs.exported_deps, "exported_deps") + _check_exported_deps(ctx.attrs.exported_provided_deps, "exported_provided_deps") deps = ( ctx.attrs.deps + deps_query + @@ -304,7 +338,7 @@ def build_kotlin_library( ctx.attrs.annotation_processor_params, ctx.attrs.annotation_processor_deps, ) - ksp_annotation_processor_properties = create_ksp_annotation_processor_properties(ctx, ctx.attrs.plugins) + ksp_annotation_processor_properties = create_ksp_annotation_processor_properties(ctx.attrs.plugins) kotlin_toolchain = ctx.attrs._kotlin_toolchain[KotlinToolchainInfo] if kotlin_toolchain.kotlinc_protocol == "classic": @@ -331,11 +365,18 @@ def build_kotlin_library( additional_compiled_srcs = kotlinc_classes, generated_sources = filter(None, [kapt_generated_sources, ksp_generated_sources]), extra_sub_targets = extra_sub_targets, + validation_deps_outputs = validation_deps_outputs, ) return java_lib elif kotlin_toolchain.kotlinc_protocol == "kotlincd": source_level, target_level = get_java_version_attributes(ctx) - extra_arguments = cmd_args(ctx.attrs.extra_arguments) + extra_arguments = cmd_args( + ctx.attrs.extra_arguments, + # The outputs of validation_deps need to be added as hidden arguments + # to an action for the validation_deps targets to be built and enforced. + hidden = validation_deps_outputs or [], + ) + common_kotlincd_kwargs = { "abi_generation_mode": get_abi_generation_mode(ctx.attrs.abi_generation_mode), "actions": ctx.actions, @@ -345,16 +386,19 @@ def build_kotlin_library( annotation_processor_params = annotation_processor_properties.annotation_processor_params + ksp_annotation_processor_properties.annotation_processor_params, ), "bootclasspath_entries": bootclasspath_entries, + "debug_port": getattr(ctx.attrs, "debug_port", None), "deps": deps, "extra_kotlinc_arguments": ctx.attrs.extra_kotlinc_arguments, "friend_paths": ctx.attrs.friend_paths, "is_building_android_binary": ctx.attrs._is_building_android_binary, + "jar_postprocessor": ctx.attrs.jar_postprocessor[RunInfo] if hasattr(ctx.attrs, "jar_postprocessor") and ctx.attrs.jar_postprocessor else None, "java_toolchain": ctx.attrs._java_toolchain[JavaToolchainInfo], "javac_tool": derive_javac(ctx.attrs.javac) if ctx.attrs.javac else None, "k2": ctx.attrs.k2, "kotlin_compiler_plugins": ctx.attrs.kotlin_compiler_plugins, "kotlin_toolchain": kotlin_toolchain, "label": ctx.label, + "manifest_file": ctx.attrs.manifest_file, "remove_classes": ctx.attrs.remove_classes, "required_for_source_only_abi": ctx.attrs.required_for_source_only_abi, "resources": ctx.attrs.resources, @@ -400,6 +444,14 @@ def build_kotlin_library( DefaultInfo(default_output = nullsafe_info.output), ]} + class_to_src_map, sources_jar, class_to_src_map_sub_targets = get_class_to_source_map_info( + ctx, + outputs = outputs, + deps = ctx.attrs.deps + deps_query + ctx.attrs.exported_deps, + generate_sources_jar = True, + ) + extra_sub_targets = extra_sub_targets | class_to_src_map_sub_targets + java_library_info, java_packaging_info, shared_library_info, cxx_resource_info, linkable_graph, template_placeholder_info, intellij_info = create_java_library_providers( ctx, library_output = outputs.classpath_entry if outputs else None, @@ -411,15 +463,10 @@ def build_kotlin_library( needs_desugar = source_level > 7 or target_level > 7, generated_sources = generated_sources, has_srcs = bool(srcs), + sources_jar = sources_jar, + preprocessed_library = outputs.preprocessed_library if outputs else None, ) - class_to_src_map, class_to_src_map_sub_targets = get_class_to_source_map_info( - ctx, - outputs = outputs, - deps = ctx.attrs.deps + deps_query + ctx.attrs.exported_deps, - ) - extra_sub_targets = extra_sub_targets | class_to_src_map_sub_targets - default_info = get_default_info( ctx.actions, ctx.attrs._java_toolchain[JavaToolchainInfo], diff --git a/prelude/kotlin/kotlin_toolchain.bzl b/prelude/kotlin/kotlin_toolchain.bzl index 248ea5a197..8fb39a20d3 100644 --- a/prelude/kotlin/kotlin_toolchain.bzl +++ b/prelude/kotlin/kotlin_toolchain.bzl @@ -11,9 +11,9 @@ KotlinToolchainInfo = provider( # @unsorted-dict-items doc = "Kotlin toolchain info", fields = { + "allow_k2_usage": provider_field(typing.Any, default = None), "annotation_processing_jar": provider_field(typing.Any, default = None), "class_loader_bootstrapper": provider_field(typing.Any, default = None), - "compilation_tracer_plugin": provider_field(typing.Any, default = None), "compile_kotlin": provider_field(typing.Any, default = None), "dep_files": provider_field(typing.Any, default = None), "jvm_abi_gen_plugin": provider_field(typing.Any, default = None), @@ -22,10 +22,10 @@ KotlinToolchainInfo = provider( "kosabi_jvm_abi_gen_plugin": provider_field(typing.Any, default = None), "kosabi_stubs_gen_plugin": provider_field(typing.Any, default = None), "kosabi_supported_ksp_providers": provider_field(typing.Any, default = None), + "kotlin_error_handler": provider_field(typing.Any, default = None), "kotlin_home_libraries": provider_field(typing.Any, default = None), "kotlin_stdlib": provider_field(typing.Any, default = None), "kotlinc": provider_field(typing.Any, default = None), - "kotlinc_classpath": provider_field(typing.Any, default = None), "kotlinc_protocol": provider_field(typing.Any, default = None), "kotlincd_debug_port": provider_field(typing.Any, default = None), "kotlincd_debug_target": provider_field(typing.Any, default = None), @@ -33,8 +33,6 @@ KotlinToolchainInfo = provider( "kotlincd_jvm_args_target": provider_field(typing.Any, default = None), "kotlincd_main_class": provider_field(typing.Any, default = None), "kotlincd_worker": provider_field(typing.Any, default = None), - "qpld_dotslash": provider_field(typing.Any, default = None), - "should_use_compilation_tracer": provider_field(typing.Any, default = None), "track_class_usage_plugin": provider_field(typing.Any, default = None), }, ) diff --git a/prelude/kotlin/kotlincd_jar_creator.bzl b/prelude/kotlin/kotlincd_jar_creator.bzl index 14558b04f6..85ec1624e5 100644 --- a/prelude/kotlin/kotlincd_jar_creator.bzl +++ b/prelude/kotlin/kotlincd_jar_creator.bzl @@ -26,7 +26,6 @@ load( "@prelude//jvm:cd_jar_creator_util.bzl", "OutputPaths", "TargetType", - "add_output_paths_to_cmd_args", "base_qualified_name", "declare_prefixed_output", "define_output_paths", @@ -34,6 +33,7 @@ load( "encode_jar_params", "generate_abi_jars", "get_compiling_deps_tset", + "output_paths_to_hidden_cmd_args", "prepare_cd_exe", "prepare_final_jar", "setup_dep_files", @@ -43,11 +43,6 @@ load("@prelude//kotlin:kotlin_utils.bzl", "get_kotlinc_compatible_target") load("@prelude//utils:expect.bzl", "expect") load("@prelude//utils:utils.bzl", "map_idx") -buckPaths = struct( - configuredBuckOut = "buck-out/v2", - includeTargetConfigHash = True, -) - def create_jar_artifact_kotlincd( actions: AnalysisActions, actions_identifier: [str, None], @@ -62,6 +57,7 @@ def create_jar_artifact_kotlincd( resources_root: [str, None], annotation_processor_properties: AnnotationProcessorProperties, plugin_params: [PluginParams, None], + manifest_file: Artifact | None, source_level: int, target_level: int, deps: list[Dependency], @@ -76,7 +72,9 @@ def create_jar_artifact_kotlincd( extra_kotlinc_arguments: list[str], k2: bool, is_creating_subtarget: bool = False, - optional_dirs: list[OutputArtifact] = []) -> JavaCompileOutputs: + optional_dirs: list[OutputArtifact] = [], + jar_postprocessor: [RunInfo, None] = None, + debug_port: [int, None] = None) -> JavaCompileOutputs: resources_map = get_resources_map( java_toolchain = java_toolchain, package = label.package, @@ -116,12 +114,23 @@ def create_jar_artifact_kotlincd( if kotlin_toolchain.kosabi_jvm_abi_gen_plugin != None: kosabiPluginOptionsMap["kosabi_jvm_abi_gen_plugin"] = kotlin_toolchain.kosabi_jvm_abi_gen_plugin + current_language_version = None + for arg in extra_kotlinc_arguments: + # If `-language-version` is defined multiple times, we use the last one, just like the compiler does + if "-language-version" in arg: + current_language_version = arg.split("=")[1].strip() + + if k2 == True and kotlin_toolchain.allow_k2_usage: + if not current_language_version or current_language_version < "2.0": + extra_kotlinc_arguments.append("-language-version=2.0") + else: # use K1 + if not current_language_version or current_language_version >= "2.0": + extra_kotlinc_arguments.append("-language-version=1.9") + return struct( extraClassPaths = bootclasspath_entries, standardLibraryClassPath = kotlin_toolchain.kotlin_stdlib[JavaLibraryInfo].library_output.full_library, annotationProcessingClassPath = kotlin_toolchain.annotation_processing_jar[JavaLibraryInfo].library_output.full_library, - compilationTracerPlugin = kotlin_toolchain.compilation_tracer_plugin, - qpldDotslash = kotlin_toolchain.qpld_dotslash, jvmAbiGenPlugin = kotlin_toolchain.jvm_abi_gen_plugin, kotlinCompilerPlugins = {plugin: {"params": plugin_options} if plugin_options else {} for plugin, plugin_options in kotlin_compiler_plugins.items()}, kosabiPluginOptions = struct(**kosabiPluginOptionsMap), @@ -130,12 +139,10 @@ def create_jar_artifact_kotlincd( jvmTarget = get_kotlinc_compatible_target(str(target_level)), kosabiJvmAbiGenEarlyTerminationMessagePrefix = "exception: java.lang.RuntimeException: Terminating compilation. We're done with ABI.", kosabiSupportedKspProviders = kotlin_toolchain.kosabi_supported_ksp_providers, - shouldUseCompilationTracer = kotlin_toolchain.should_use_compilation_tracer, shouldUseJvmAbiGen = should_use_jvm_abi_gen, shouldVerifySourceOnlyAbiConstraints = actual_abi_generation_mode == AbiGenerationMode("source_only"), shouldGenerateAnnotationProcessingStats = True, extraKotlincArguments = extra_kotlinc_arguments, - extraNonSourceOnlyAbiKotlincArguments = ["-language-version=2.0"] if k2 else [], shouldRemoveKotlinCompilerFromClassPath = True, depTrackerPlugin = kotlin_toolchain.track_class_usage_plugin, ) @@ -145,7 +152,7 @@ def create_jar_artifact_kotlincd( compiling_deps_tset = get_compiling_deps_tset(actions, deps, additional_classpath_entries) # external javac does not support used classes - track_class_usage = javac_tool == None + track_class_usage = javac_tool == None and kotlin_toolchain.track_class_usage_plugin != None def encode_library_command( output_paths: OutputPaths, @@ -168,6 +175,7 @@ def create_jar_artifact_kotlincd( resources_map, annotation_processor_properties = annotation_processor_properties, plugin_params = plugin_params, + manifest_file = manifest_file, extra_arguments = cmd_args(extra_arguments), source_only_abi_compiling_deps = [], track_class_usage = track_class_usage, @@ -211,11 +219,12 @@ def create_jar_artifact_kotlincd( resources_map, annotation_processor_properties, plugin_params, + manifest_file, cmd_args(extra_arguments), source_only_abi_compiling_deps = source_only_abi_compiling_deps, track_class_usage = True, ) - abi_params = encode_jar_params(remove_classes, output_paths) + abi_params = encode_jar_params(remove_classes, output_paths, manifest_file) abi_command = struct( kotlinExtraParams = kotlin_extra_params, baseJarCommand = base_jar_command, @@ -238,9 +247,9 @@ def create_jar_artifact_kotlincd( qualified_name: str, output_paths: OutputPaths, classpath_jars_tag: ArtifactTag, - abi_dir: [Artifact, None], + abi_dir: Artifact | None, target_type: TargetType, - path_to_class_hashes: [Artifact, None], + path_to_class_hashes: Artifact | None, source_only_abi_compiling_deps: list[JavaClasspathEntry] = [], is_creating_subtarget: bool = False): _unused = source_only_abi_compiling_deps @@ -251,13 +260,14 @@ def create_jar_artifact_kotlincd( compiler = kotlin_toolchain.kotlinc[DefaultInfo].default_outputs[0] exe, local_only = prepare_cd_exe( qualified_name, - java = java_toolchain.java[RunInfo], + java = java_toolchain.graalvm_java[RunInfo] if java_toolchain.use_graalvm_java_for_javacd else java_toolchain.java[RunInfo], class_loader_bootstrapper = kotlin_toolchain.class_loader_bootstrapper, compiler = compiler, main_class = kotlin_toolchain.kotlincd_main_class, worker = kotlin_toolchain.kotlincd_worker[WorkerInfo], - debug_port = kotlin_toolchain.kotlincd_debug_port, - debug_target = kotlin_toolchain.kotlincd_debug_target, + target_specified_debug_port = debug_port, + toolchain_specified_debug_port = kotlin_toolchain.kotlincd_debug_port, + toolchain_specified_debug_target = kotlin_toolchain.kotlincd_debug_target, extra_jvm_args = kotlin_toolchain.kotlincd_jvm_args, extra_jvm_args_target = kotlin_toolchain.kotlincd_jvm_args_target, ) @@ -296,7 +306,7 @@ def create_jar_artifact_kotlincd( optional_dirs, ) - args = add_output_paths_to_cmd_args(args, output_paths, path_to_class_hashes) + args.add(output_paths_to_hidden_cmd_args(output_paths, path_to_class_hashes)) event_pipe_out = declare_prefixed_output(actions, actions_identifier, "events.data") @@ -327,10 +337,12 @@ def create_jar_artifact_kotlincd( category = "{}kotlincd_jar".format(category_prefix), identifier = actions_identifier, dep_files = dep_files, + allow_dep_file_cache_upload = False, exe = exe, local_only = local_only, low_pass_filter = False, weight = 2, + error_handler = kotlin_toolchain.kotlin_error_handler, ) library_classpath_jars_tag = actions.artifact_tag() @@ -348,13 +360,14 @@ def create_jar_artifact_kotlincd( is_creating_subtarget = is_creating_subtarget, ) - final_jar = prepare_final_jar( + final_jar_output = prepare_final_jar( actions = actions, actions_identifier = actions_identifier, output = None, output_paths = output_paths, additional_compiled_srcs = None, jar_builder = java_toolchain.jar_builder, + jar_postprocessor = jar_postprocessor, ) if not is_creating_subtarget: @@ -367,7 +380,7 @@ def create_jar_artifact_kotlincd( additional_compiled_srcs = None, is_building_android_binary = is_building_android_binary, class_abi_generator = java_toolchain.class_abi_generator, - final_jar = final_jar, + final_jar = final_jar_output.final_jar, compiling_deps_tset = compiling_deps_tset, source_only_abi_deps = source_only_abi_deps, class_abi_jar = class_abi_jar, @@ -376,7 +389,8 @@ def create_jar_artifact_kotlincd( define_action = define_kotlincd_action, ) return make_compile_outputs( - full_library = final_jar, + full_library = final_jar_output.final_jar, + preprocessed_library = final_jar_output.preprocessed_jar, class_abi = class_abi, source_only_abi = source_only_abi, classpath_abi = classpath_abi, @@ -386,7 +400,8 @@ def create_jar_artifact_kotlincd( ) else: return make_compile_outputs( - full_library = final_jar, + full_library = final_jar_output.final_jar, + preprocessed_library = final_jar_output.preprocessed_jar, required_for_source_only_abi = required_for_source_only_abi, annotation_processor_output = output_paths.annotations, ) diff --git a/prelude/kotlin/tools/compile_kotlin/BUCK.v2 b/prelude/kotlin/tools/compile_kotlin/BUCK.v2 index 0033a1bd05..5ae64be14e 100644 --- a/prelude/kotlin/tools/compile_kotlin/BUCK.v2 +++ b/prelude/kotlin/tools/compile_kotlin/BUCK.v2 @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + prelude = native prelude.python_bootstrap_binary( diff --git a/prelude/kotlin/tools/kapt_base64_encoder/BUCK.v2 b/prelude/kotlin/tools/kapt_base64_encoder/BUCK.v2 index bf49427e7f..e44d3e6263 100644 --- a/prelude/kotlin/tools/kapt_base64_encoder/BUCK.v2 +++ b/prelude/kotlin/tools/kapt_base64_encoder/BUCK.v2 @@ -1,4 +1,9 @@ load("@prelude//kotlin/tools:defs.bzl", "java_bootstrap_binary", "java_bootstrap_library") +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() java_bootstrap_library( name = "kapt_base64_encoder_lib", diff --git a/prelude/kotlin/tools/kapt_base64_encoder/com/facebook/kapt/KaptBase64Encoder.java b/prelude/kotlin/tools/kapt_base64_encoder/com/facebook/kapt/KaptBase64Encoder.java index 5da2b5fac5..d6840bdeb2 100644 --- a/prelude/kotlin/tools/kapt_base64_encoder/com/facebook/kapt/KaptBase64Encoder.java +++ b/prelude/kotlin/tools/kapt_base64_encoder/com/facebook/kapt/KaptBase64Encoder.java @@ -1,4 +1,11 @@ -// (c) Meta Platforms, Inc. and affiliates. Confidential and proprietary. +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under both the MIT license found in the + * LICENSE-MIT file in the root directory of this source tree and the Apache + * License, Version 2.0 found in the LICENSE-APACHE file in the root directory + * of this source tree. + */ package com.facebook.kapt; diff --git a/prelude/linking/execution_preference.bzl b/prelude/linking/execution_preference.bzl index 92d45adee6..041ceb7dc0 100644 --- a/prelude/linking/execution_preference.bzl +++ b/prelude/linking/execution_preference.bzl @@ -36,13 +36,14 @@ _ActionExecutionAttributes = record( def link_execution_preference_attr(): # The attribute is optional, allowing for None to represent that no preference has been set and we should fallback on the toolchain. return attrs.option(attrs.one_of(attrs.enum(LinkExecutionPreferenceTypes), attrs.dep(providers = [LinkExecutionPreferenceDeterminatorInfo])), default = None, doc = """ - The execution preference for linking. Options are:\n - - any : No preference is set, and the link action will be performed based on buck2's executor configuration.\n - - full_hybrid : The link action will execute both locally and remotely, regardless of buck2's executor configuration (if\n - the executor is capable of hybrid execution). The use_limited_hybrid setting of the hybrid executor is ignored.\n - - local : The link action will execute locally if compatible on current host platform.\n - - local_only : The link action will execute locally, and error if the current platform is not compatible.\n - - remote : The link action will execute remotely if a compatible remote platform exists, otherwise locally.\n + The execution preference for linking. Options are: + + - any : No preference is set, and the link action will be performed based on buck2's executor configuration. + - full_hybrid : The link action will execute both locally and remotely, regardless of buck2's executor configuration (if + the executor is capable of hybrid execution). The use_limited_hybrid setting of the hybrid executor is ignored. + - local : The link action will execute locally if compatible on current host platform. + - local_only : The link action will execute locally, and error if the current platform is not compatible. + - remote : The link action will execute remotely if a compatible remote platform exists, otherwise locally. The default is None, expressing that no preference has been set on the target itself. """) diff --git a/prelude/linking/link_groups.bzl b/prelude/linking/link_groups.bzl index 19582078e0..a3fb2e1c59 100644 --- a/prelude/linking/link_groups.bzl +++ b/prelude/linking/link_groups.bzl @@ -12,7 +12,10 @@ load( load( ":link_info.bzl", "LinkInfos", - "LinkedObject", +) +load( + ":shared_libraries.bzl", + "SharedLibraries", ) # Information about a linkable node which explicitly sets `link_group`. @@ -20,7 +23,7 @@ LinkGroupLib = record( # The label of the owning target (if any). label = field([Label, None], None), # The shared libs to package for this link group. - shared_libs = field(dict[str, LinkedObject]), + shared_libs = field(SharedLibraries), # The link info to link against this link group. shared_link_infos = field(LinkInfos), ) @@ -48,9 +51,10 @@ def gather_link_group_libs( def merge_link_group_lib_info( label: [Label, None] = None, name: [str, None] = None, - shared_libs: [dict[str, LinkedObject], None] = None, + shared_libs: [SharedLibraries, None] = None, shared_link_infos: [LinkInfos, None] = None, - deps: list[Dependency] = []) -> LinkGroupLibInfo: + deps: list[Dependency] = [], + children: list[LinkGroupLibInfo] = []) -> LinkGroupLibInfo: """ Merge and return link group info libs from deps and the current rule wrapped in a provider. @@ -66,5 +70,6 @@ def merge_link_group_lib_info( libs = gather_link_group_libs( libs = [libs], deps = deps, + children = children, ), ) diff --git a/prelude/linking/link_info.bzl b/prelude/linking/link_info.bzl index 2bd0b63d18..f908efe3bc 100644 --- a/prelude/linking/link_info.bzl +++ b/prelude/linking/link_info.bzl @@ -17,11 +17,8 @@ load( "get_no_as_needed_shared_libs_flags", "get_objects_as_library_args", ) +load("@prelude//linking:types.bzl", "Linkage") load("@prelude//utils:arglike.bzl", "ArgLike") -load( - "@prelude//utils:utils.bzl", - "flatten", -) # Represents an archive (.a file) Archive = record( @@ -74,22 +71,12 @@ def default_output_style_for_link_strategy(link_strategy: LinkStrategy) -> LibOu return LibOutputStyle("pic_archive") return LibOutputStyle("shared_lib") -# Ways a library can request to be linked (e.g. usually specific via a rule -# param like `preferred_linkage`. The actual link style used for a library is -# usually determined by a combination of this and the link style being exported -# via a provider. -Linkage = enum( - "static", - "shared", - "any", -) - # An archive. ArchiveLinkable = record( # Artifact in the .a format from ar archive = field(Archive), # If a bitcode bundle was created for this artifact it will be present here - bitcode_bundle = field([Artifact, None], None), + bitcode_bundle = field(Artifact | None, None), linker_type = field(str), link_whole = field(bool, False), # Indicates if this archive may contain LTO bit code. Can be set to `False` @@ -108,7 +95,7 @@ SharedLibLinkable = record( ObjectsLinkable = record( objects = field([list[Artifact], None], None), # Any of the objects that are in bitcode format - bitcode_bundle = field([Artifact, None], None), + bitcode_bundle = field(Artifact | None, None), linker_type = field(str), link_whole = field(bool, False), ) @@ -143,7 +130,14 @@ SwiftRuntimeLinkable = record( runtime_required = field(bool, False), ) -LinkableTypes = [ArchiveLinkable, SharedLibLinkable, ObjectsLinkable, FrameworksLinkable, SwiftRuntimeLinkable, SwiftmoduleLinkable] +LinkableTypes = [ + ArchiveLinkable, + SharedLibLinkable, + ObjectsLinkable, + FrameworksLinkable, + SwiftRuntimeLinkable, + SwiftmoduleLinkable, +] LinkerFlags = record( flags = field(list[typing.Any], []), @@ -177,6 +171,10 @@ LinkOrdering = enum( "topological", ) +CxxSanitizerRuntimeInfo = provider(fields = { + "runtime_files": provider_field(list[Artifact]), +}) + def set_link_info_link_whole(info: LinkInfo) -> LinkInfo: linkables = [set_linkable_link_whole(linkable) for linkable in info.linkables] return LinkInfo( @@ -219,38 +217,55 @@ def wrap_link_info( external_debug_info = inner.external_debug_info, ) +# Returns true if the command line argument representation of this linkable, +# could be passed within a filelist. +def _is_linkable_included_in_filelist(linkable: LinkableTypes) -> bool: + if isinstance(linkable, ArchiveLinkable): + # Link whole archives don't appear in the filelist, but are passed directly to the linker + # with a -force-load (MachO) or -whole-archive (ELF) flag. Regular archives do appear in the filelist. + return not linkable.link_whole + elif isinstance(linkable, SharedLibLinkable) or \ + isinstance(linkable, FrameworksLinkable) or \ + isinstance(linkable, SwiftRuntimeLinkable) or \ + isinstance(linkable, SwiftmoduleLinkable): + # These are all passed directly via various command line flags, not via a filelist. + return False + elif isinstance(linkable, ObjectsLinkable): + # Object files always appear in the filelist. + return True + else: + fail("Encountered unhandled filelist-like linkable {}".format(str(linkable))) + # Adds appropriate args representing `linkable` to `args` def append_linkable_args(args: cmd_args, linkable: LinkableTypes): if isinstance(linkable, ArchiveLinkable): if linkable.link_whole: args.add(get_link_whole_args(linkable.linker_type, [linkable.archive.artifact])) - elif linkable.linker_type == "darwin": - pass else: args.add(linkable.archive.artifact) # When using thin archives, object files are implicitly used as inputs # to the link, so make sure track them as inputs so that they're # materialized/tracked properly. - args.add(cmd_args().hidden(linkable.archive.external_objects)) + args.add(cmd_args(hidden = linkable.archive.external_objects)) elif isinstance(linkable, SharedLibLinkable): if linkable.link_without_soname: - args.add(cmd_args(linkable.lib, format = "-L{}").parent()) + args.add(cmd_args(linkable.lib, format = "-L{}", parent = 1)) args.add("-l" + linkable.lib.basename.removeprefix("lib").removesuffix(linkable.lib.extension)) else: args.add(linkable.lib) elif isinstance(linkable, ObjectsLinkable): - # We depend on just the filelist for darwin linker and don't add the normal args - if linkable.linker_type != "darwin": - # We need to export every symbol when link groups are used, but enabling - # --whole-archive with --start-lib is undefined behavior in gnu linkers: - # https://reviews.llvm.org/D120443. We need to export symbols from every - # linkable in the link_info - if not linkable.link_whole: - args.add(get_objects_as_library_args(linkable.linker_type, linkable.objects)) - else: - args.add(linkable.objects) - elif isinstance(linkable, FrameworksLinkable) or isinstance(linkable, SwiftRuntimeLinkable) or isinstance(linkable, SwiftmoduleLinkable): + # We need to export every symbol when link groups are used, but enabling + # --whole-archive with --start-lib is undefined behavior in gnu linkers: + # https://reviews.llvm.org/D120443. We need to export symbols from every + # linkable in the link_info + if not linkable.link_whole: + args.add(get_objects_as_library_args(linkable.linker_type, linkable.objects)) + else: + args.add(linkable.objects) + elif isinstance(linkable, FrameworksLinkable) or \ + isinstance(linkable, SwiftRuntimeLinkable) or \ + isinstance(linkable, SwiftmoduleLinkable): # These flags are handled separately so they can be deduped. # # We've seen in apps with larger dependency graphs that failing @@ -259,42 +274,33 @@ def append_linkable_args(args: cmd_args, linkable: LinkableTypes): else: fail("Encountered unhandled linkable {}".format(str(linkable))) -def link_info_to_args(value: LinkInfo) -> cmd_args: - args = cmd_args(value.pre_flags) - for linkable in value.linkables: - append_linkable_args(args, linkable) - if False: - # TODO(nga): `post_flags` is never `None`. - def unknown(): - pass - - value = unknown() - if value.post_flags != None: - args.add(value.post_flags) - return args - -# List of inputs to pass to the darwin linker via the `-filelist` param. -# TODO(agallagher): It might be nicer to leave these inlined in the args -# above and extract them at link time via reflection. This way we'd hide -# platform-specific details from this level. -# NOTE(agallagher): Using filelist out-of-band means objects/archives get -# linked out of order of their corresponding flags. -def link_info_filelist(value: LinkInfo) -> list[Artifact]: - filelists = [] +LinkInfoArgumentFilter = enum( + "all", + "filelist_only", + "excluding_filelist", +) + +def link_info_to_args(value: LinkInfo, argument_type_filter: LinkInfoArgumentFilter = LinkInfoArgumentFilter("all")) -> cmd_args: + pre_flags = cmd_args() + post_flags = cmd_args() + if argument_type_filter == LinkInfoArgumentFilter("all") or argument_type_filter == LinkInfoArgumentFilter("excluding_filelist"): + pre_flags.add(value.pre_flags) + post_flags.add(value.post_flags) + + flags = cmd_args() for linkable in value.linkables: - if isinstance(linkable, ArchiveLinkable): - if linkable.linker_type == "darwin" and not linkable.link_whole: - filelists.append(linkable.archive.artifact) - elif isinstance(linkable, SharedLibLinkable): - pass - elif isinstance(linkable, ObjectsLinkable): - if linkable.linker_type == "darwin": - filelists += linkable.objects - elif isinstance(linkable, FrameworksLinkable) or isinstance(linkable, SwiftRuntimeLinkable) or isinstance(linkable, SwiftmoduleLinkable): - pass - else: - fail("Encountered unhandled linkable {}".format(str(linkable))) - return filelists + if argument_type_filter == LinkInfoArgumentFilter("all"): + append_linkable_args(flags, linkable) + elif argument_type_filter == LinkInfoArgumentFilter("filelist_only") and _is_linkable_included_in_filelist(linkable): + append_linkable_args(flags, linkable) + elif argument_type_filter == LinkInfoArgumentFilter("excluding_filelist") and not _is_linkable_included_in_filelist(linkable): + append_linkable_args(flags, linkable) + + result = cmd_args() + result.add(pre_flags) + result.add(flags) + result.add(post_flags) + return result # Encapsulate all `LinkInfo`s provided by a given rule's link style. # @@ -310,39 +316,39 @@ LinkInfos = record( def _link_info_default_args(infos: LinkInfos): info = infos.default - return link_info_to_args(info) - -def _link_info_default_shared_link_args(infos: LinkInfos): - info = infos.default - return link_info_to_args(info) - -def _link_info_stripped_args(infos: LinkInfos): - info = infos.stripped or infos.default - return link_info_to_args(info) + return link_info_to_args(info, argument_type_filter = LinkInfoArgumentFilter("all")) -def _link_info_stripped_shared_link_args(infos: LinkInfos): +def _link_info_stripped_link_args(infos: LinkInfos): info = infos.stripped or infos.default - return link_info_to_args(info) + return link_info_to_args(info, argument_type_filter = LinkInfoArgumentFilter("all")) def _link_info_default_filelist(infos: LinkInfos): info = infos.default - return link_info_filelist(info) + return link_info_to_args(info, argument_type_filter = LinkInfoArgumentFilter("filelist_only")) def _link_info_stripped_filelist(infos: LinkInfos): info = infos.stripped or infos.default - return link_info_filelist(info) + return link_info_to_args(info, argument_type_filter = LinkInfoArgumentFilter("filelist_only")) + +def _link_info_default_excluding_filelist_args(infos: LinkInfos): + info = infos.default + return link_info_to_args(info, argument_type_filter = LinkInfoArgumentFilter("excluding_filelist")) + +def _link_info_stripped_excluding_filelist_args(infos: LinkInfos): + info = infos.stripped or infos.default + return link_info_to_args(info, argument_type_filter = LinkInfoArgumentFilter("excluding_filelist")) def _link_info_has_default_filelist(children: list[bool], infos: [LinkInfos, None]) -> bool: if infos: info = infos.default - if link_info_filelist(info): + if len(link_info_to_args(info, argument_type_filter = LinkInfoArgumentFilter("filelist_only")).inputs): return True return any(children) def _link_info_has_stripped_filelist(children: list[bool], infos: [LinkInfos, None]) -> bool: if infos: info = infos.stripped or infos.default - if link_info_filelist(info): + if len(link_info_to_args(info, argument_type_filter = LinkInfoArgumentFilter("filelist_only")).inputs): return True return any(children) @@ -350,11 +356,11 @@ def _link_info_has_stripped_filelist(children: list[bool], infos: [LinkInfos, No LinkInfosTSet = transitive_set( args_projections = { "default": _link_info_default_args, + "default_excluding_filelist": _link_info_default_excluding_filelist_args, "default_filelist": _link_info_default_filelist, - "default_shared": _link_info_default_shared_link_args, - "stripped": _link_info_stripped_args, + "stripped": _link_info_stripped_link_args, + "stripped_excluding_filelist": _link_info_stripped_excluding_filelist_args, "stripped_filelist": _link_info_stripped_filelist, - "stripped_shared": _link_info_stripped_shared_link_args, }, reductions = { "has_default_filelist": _link_info_has_default_filelist, @@ -386,44 +392,44 @@ LinkArgs = record( LinkedObject = record( output = field([Artifact, Promise]), # The combined bitcode from this linked object and any static libraries - bitcode_bundle = field([Artifact, None], None), + bitcode_bundle = field(Artifact | None, None), # the generated linked output before running stripping(and bolt). unstripped_output = field(Artifact), # the generated linked output before running bolt, may be None if bolt is not used. - prebolt_output = field([Artifact, None], None), + prebolt_output = field(Artifact | None, None), # The LinkArgs used to produce this LinkedObject. This can be useful for debugging or # for downstream rules to reproduce the shared library with some modifications (for example # android relinker will link again with an added version script argument). - link_args = field([LinkArgs, None], None), + link_args = field(list[LinkArgs] | None, None), # A linked object (binary/shared library) may have an associated dwp file with # its corresponding DWARF debug info. # May be None when Split DWARF is disabled or for some types of synthetic link objects. - dwp = field([Artifact, None], None), + dwp = field(Artifact | None, None), # Additional dirs or paths that contain debug info referenced by the linked # object (e.g. split dwarf files or PDB file). external_debug_info = field(ArtifactTSet, ArtifactTSet()), # This argsfile is generated in the `cxx_link` step and contains a list of arguments # passed to the linker. It is being exposed as a sub-target for debugging purposes. - linker_argsfile = field([Artifact, None], None), + linker_argsfile = field(Artifact | None, None), # The filelist is generated in the `cxx_link` step and contains a list of # object files (static libs or plain object files) passed to the linker. # It is being exposed for debugging purposes. Only present when a Darwin # linker is used. - linker_filelist = field([Artifact, None], None), + linker_filelist = field(Artifact | None, None), # The linker command as generated by `cxx_link`. Exposed for debugging purposes only. # Not present for DistLTO scenarios. linker_command = field([cmd_args, None], None), # This sub-target is only available for distributed thinLTO builds. - index_argsfile = field([Artifact, None], None), + index_argsfile = field(Artifact | None, None), # Import library for linking with DLL on Windows. # If not on Windows it's always None. - import_library = field([Artifact, None], None), + import_library = field(Artifact | None, None), # A linked object (binary/shared library) may have an associated PDB file with # its corresponding Windows debug info. # If not on Windows it's always None. - pdb = field([Artifact, None], None), + pdb = field(Artifact | None, None), # Split-debug info generated by the link. - split_debug_output = field([Artifact, None], None), + split_debug_output = field(Artifact | None, None), ) # A map of native linkable infos from transitive dependencies for each LinkStrategy. @@ -524,16 +530,27 @@ def create_merged_link_info( swift_runtime_linkables += [dep_info.swift_runtime[link_strategy] for dep_info in exported_deps] for dep_info in deps: - children.append(dep_info._infos[link_strategy]) - external_debug_info_children.append(dep_info._external_debug_info[link_strategy]) + # The inherited link infos no longer guarantees that a tset will be available for + # all link strategies. Protect against missing infos + value = dep_info._infos.get(link_strategy) + if value: + children.append(value) + value = dep_info._external_debug_info.get(link_strategy) + if value: + external_debug_info_children.append(value) + framework_linkables.append(dep_info.frameworks[link_strategy]) swiftmodule_linkables.append(dep_info.swiftmodules[link_strategy]) swift_runtime_linkables.append(dep_info.swift_runtime[link_strategy]) # We always export link info for exported deps. for dep_info in exported_deps: - children.append(dep_info._infos[link_strategy]) - external_debug_info_children.append(dep_info._external_debug_info[link_strategy]) + value = dep_info._infos.get(link_strategy) + if value: + children.append(value) + value = dep_info._external_debug_info.get(link_strategy) + if value: + external_debug_info_children.append(value) frameworks[link_strategy] = merge_framework_linkables(framework_linkables) swift_runtime[link_strategy] = merge_swift_runtime_linkables(swift_runtime_linkables) @@ -613,19 +630,14 @@ def get_link_info( return infos.default -def unpack_link_args(args: LinkArgs, is_shared: [bool, None] = None, link_ordering: [LinkOrdering, None] = None) -> ArgLike: +def unpack_link_args(args: LinkArgs, link_ordering: [LinkOrdering, None] = None) -> ArgLike: if args.tset != None: ordering = link_ordering.value if link_ordering else "preorder" tset = args.tset.infos - if is_shared: - if args.tset.prefer_stripped: - return tset.project_as_args("stripped_shared", ordering = ordering) - return tset.project_as_args("default_shared", ordering = ordering) - else: - if args.tset.prefer_stripped: - return tset.project_as_args("stripped", ordering = ordering) - return tset.project_as_args("default", ordering = ordering) + if args.tset.prefer_stripped: + return tset.project_as_args("stripped", ordering = ordering) + return tset.project_as_args("default", ordering = ordering) if args.infos != None: return cmd_args([link_info_to_args(info) for info in args.infos]) @@ -644,20 +656,37 @@ def unpack_link_args_filelist(args: LinkArgs) -> [ArgLike, None]: return tset.project_as_args("stripped_filelist" if stripped else "default_filelist") if args.infos != None: - filelist = flatten([link_info_filelist(info) for info in args.infos]) - if not filelist: + result_args = cmd_args() + for info in args.infos: + result_args.add(link_info_to_args(info, argument_type_filter = LinkInfoArgumentFilter("filelist_only"))) + + if not len(result_args.inputs): return None - # Actually create cmd_args so the API is consistent between the 2 branches. - args = cmd_args() - args.add(filelist) - return args + return result_args if args.flags != None: return None fail("Unpacked invalid empty link args") +def unpack_link_args_excluding_filelist(args: LinkArgs, link_ordering: [LinkOrdering, None] = None) -> ArgLike: + if args.tset != None: + ordering = link_ordering.value if link_ordering else "preorder" + + tset = args.tset.infos + if args.tset.prefer_stripped: + return tset.project_as_args("stripped_excluding_filelist", ordering = ordering) + return tset.project_as_args("default_excluding_filelist", ordering = ordering) + + if args.infos != None: + return cmd_args([link_info_to_args(info, LinkInfoArgumentFilter("excluding_filelist")) for info in args.infos]) + + if args.flags != None: + return args.flags + + fail("Unpacked invalid empty link args") + def unpack_external_debug_info(actions: AnalysisActions, args: LinkArgs) -> ArtifactTSet: if args.tset != None: if args.tset.prefer_stripped: @@ -695,7 +724,7 @@ def map_to_link_infos(links: list[LinkArgs]) -> list[LinkInfo]: append(link) continue if link.flags != None: - append(LinkInfo(pre_flags = link.flags)) + append(LinkInfo(pre_flags = [link.flags])) continue fail("Unpacked invalid empty link args") return res @@ -893,7 +922,7 @@ LinkCommandDebugOutput = record( filename = str, command = ArgLike, argsfile = Artifact, - filelist = [Artifact, None], + filelist = Artifact | None, ) # NB: Debug output is _not_ transitive over deps, so tsets are not used here. diff --git a/prelude/linking/linkable_graph.bzl b/prelude/linking/linkable_graph.bzl index ac1920f7c3..57d9eb825e 100644 --- a/prelude/linking/linkable_graph.bzl +++ b/prelude/linking/linkable_graph.bzl @@ -7,11 +7,18 @@ load("@prelude//cxx:cxx_toolchain_types.bzl", "PicBehavior") load("@prelude//cxx:headers.bzl", "CPrecompiledHeaderInfo") +load("@prelude//cxx:platform.bzl", "cxx_by_platform") +load("@prelude//cxx:shared_library_interface.bzl", "SharedInterfaceInfo") +load("@prelude//linking:types.bzl", "Linkage") load("@prelude//python:python.bzl", "PythonLibraryInfo") load("@prelude//utils:expect.bzl", "expect") load( "@prelude//utils:graph_utils.bzl", - "breadth_first_traversal_by", + "depth_first_traversal_by", +) +load( + "@prelude//utils:utils.bzl", + "flatten", ) load( ":link_info.bzl", @@ -19,14 +26,16 @@ load( "LinkInfo", # @unused Used as a type "LinkInfos", "LinkStrategy", - "Linkage", - "LinkedObject", "LinkerFlags", "MergedLinkInfo", "get_lib_output_style", "get_output_styles_for_linkage", _get_link_info = "get_link_info", ) +load( + ":shared_libraries.bzl", + "SharedLibraries", +) # A provider with information used to link a rule into a shared library. # Potential omnibus roots must provide this so that omnibus can link them @@ -34,6 +43,7 @@ load( LinkableRootInfo = provider( # @unsorted-dict-items fields = { + "label": provider_field(Label), "link_infos": provider_field(typing.Any, default = None), # LinkInfos "name": provider_field(typing.Any, default = None), # [str, None] "deps": provider_field(typing.Any, default = None), # ["label"] @@ -47,12 +57,14 @@ LinkableRootInfo = provider( ############################################################################### _DisallowConstruction = record() +_TargetSourceType = Artifact | str | tuple LinkableNode = record( # Attribute labels on the target. labels = field(list[str], []), # Preferred linkage for this target. preferred_linkage = field(Linkage, Linkage("any")), + default_link_strategy = field(LinkStrategy), # Linkable deps of this target. deps = field(list[Label], []), # Exported linkable deps of this target. @@ -62,6 +74,10 @@ LinkableNode = record( # deps and their (transitive) exported deps. This helps keep link lines smaller # and produces more efficient libs (for example, DT_NEEDED stays a manageable size). exported_deps = field(list[Label], []), + + # List of both deps and exported deps. We traverse linkable graph lots of times + # and preallocating this list saves RAM during analysis + all_deps = field(list[Label], []), # Link infos for all supported lib output styles supported by this node. This should have a value # for every output_style supported by the preferred linkage. link_infos = field(dict[LibOutputStyle, LinkInfos], {}), @@ -73,7 +89,7 @@ LinkableNode = record( # Shared libraries provided by this target. Used if this target is # excluded. - shared_libs = field(dict[str, LinkedObject], {}), + shared_libs = field(SharedLibraries, SharedLibraries(libraries = [])), # The soname this node would use in default link strategies. May be used by non-default # link strategies as a lib's soname. @@ -83,9 +99,17 @@ LinkableNode = record( # as an asset in android apks. can_be_asset = field(bool), + # Collected target sources from the target. + srcs = field(list[_TargetSourceType]), + # Whether the node should appear in the android mergemap (which provides information about the original # soname->final merged lib mapping) include_in_android_mergemap = field(bool), + # Don't follow dependents on this node even if has preferred linkage static + ignore_force_static_follows_dependents = field(bool), + + # Shared interface provider for this node. + shared_interface_info = field(SharedInterfaceInfo | None), # Only allow constructing within this file. _private = _DisallowConstruction, @@ -132,17 +156,28 @@ def _get_required_outputs_for_linkage(linkage: Linkage) -> list[LibOutputStyle]: return get_output_styles_for_linkage(linkage) +def _get_target_sources(ctx: AnalysisContext) -> list[_TargetSourceType]: + srcs = [] + if hasattr(ctx.attrs, "srcs"): + srcs.extend(ctx.attrs.srcs) + if hasattr(ctx.attrs, "platform_srcs"): + srcs.extend(flatten(cxx_by_platform(ctx, ctx.attrs.platform_srcs))) + return srcs + def create_linkable_node( ctx: AnalysisContext, default_soname: str | None, preferred_linkage: Linkage = Linkage("any"), - deps: list[Dependency] = [], - exported_deps: list[Dependency] = [], + default_link_strategy: LinkStrategy = LinkStrategy("shared"), + deps: list[Dependency | LinkableGraph] = [], + exported_deps: list[Dependency | LinkableGraph] = [], link_infos: dict[LibOutputStyle, LinkInfos] = {}, - shared_libs: dict[str, LinkedObject] = {}, + shared_libs: SharedLibraries = SharedLibraries(libraries = []), can_be_asset: bool = True, include_in_android_mergemap: bool = True, - linker_flags: [LinkerFlags, None] = None) -> LinkableNode: + linker_flags: [LinkerFlags, None] = None, + ignore_force_static_follows_dependents: bool = False, + shared_interface_info: SharedInterfaceInfo | None = None) -> LinkableNode: for output_style in _get_required_outputs_for_linkage(preferred_linkage): expect( output_style in link_infos, @@ -150,17 +185,24 @@ def create_linkable_node( ) if not linker_flags: linker_flags = LinkerFlags() + deps = linkable_deps(deps) + exported_deps = linkable_deps(exported_deps) return LinkableNode( labels = ctx.attrs.labels, preferred_linkage = preferred_linkage, - deps = linkable_deps(deps), - exported_deps = linkable_deps(exported_deps), + default_link_strategy = default_link_strategy, + deps = deps, + exported_deps = exported_deps, + all_deps = deps + exported_deps, link_infos = link_infos, shared_libs = shared_libs, can_be_asset = can_be_asset, + srcs = _get_target_sources(ctx), include_in_android_mergemap = include_in_android_mergemap, default_soname = default_soname, linker_flags = linker_flags, + ignore_force_static_follows_dependents = ignore_force_static_follows_dependents, + shared_interface_info = shared_interface_info, _private = _DisallowConstruction(), ) @@ -168,9 +210,12 @@ def create_linkable_graph_node( ctx: AnalysisContext, linkable_node: [LinkableNode, None] = None, roots: dict[Label, LinkableRootInfo] = {}, - excluded: dict[Label, None] = {}) -> LinkableGraphNode: + excluded: dict[Label, None] = {}, + label: Label | None = None) -> LinkableGraphNode: + if not label: + label = ctx.label return LinkableGraphNode( - label = ctx.label, + label = label, linkable = linkable_node, roots = roots, excluded = excluded, @@ -184,7 +229,7 @@ def create_linkable_graph( deps: list[[LinkableGraph, Dependency]] = []) -> LinkableGraph: graph_deps = [] for d in deps: - if eval_type(LinkableGraph.type).matches(d): + if isinstance(d, LinkableGraph): graph_deps.append(d) else: graph = d.get(LinkableGraph) @@ -193,7 +238,7 @@ def create_linkable_graph( deps_labels = {x.label: True for x in graph_deps} if node and node.linkable: - for l in [node.linkable.deps, node.linkable.exported_deps]: + for l in [node.linkable.deps, node.linkable.exported_deps]: # buildifier: disable=confusing-name for d in l: if not d in deps_labels: fail("LinkableNode had {} in its deps, but that label is missing from the node's linkable graph children (`{}`)".format(d, ", ".join(deps_labels))) @@ -205,8 +250,11 @@ def create_linkable_graph( } if node: kwargs["value"] = node + label = node.label + else: + label = ctx.label return LinkableGraph( - label = ctx.label, + label = label, nodes = ctx.actions.tset(LinkableGraphTSet, **kwargs), ) @@ -221,13 +269,16 @@ def get_linkable_graph_node_map_func(graph: LinkableGraph): return get_linkable_graph_node_map -def linkable_deps(deps: list[Dependency]) -> list[Label]: +def linkable_deps(deps: list[Dependency | LinkableGraph]) -> list[Label]: labels = [] for dep in deps: - dep_info = linkable_graph(dep) - if dep_info != None: - labels.append(dep_info.label) + if isinstance(dep, LinkableGraph): + labels.append(dep.label) + else: + dep_info = linkable_graph(dep) + if dep_info != None: + labels.append(dep_info.label) return labels @@ -291,8 +342,8 @@ def get_transitive_deps( """ def find_transitive_deps(node: Label): - return link_infos[node].deps + link_infos[node].exported_deps + return link_infos[node].all_deps - all_deps = breadth_first_traversal_by(link_infos, roots, find_transitive_deps) + all_deps = depth_first_traversal_by(link_infos, roots, find_transitive_deps) return all_deps diff --git a/prelude/linking/shared_libraries.bzl b/prelude/linking/shared_libraries.bzl index 7d95e53452..f7e8132843 100644 --- a/prelude/linking/shared_libraries.bzl +++ b/prelude/linking/shared_libraries.bzl @@ -12,6 +12,20 @@ load( "LinkedObject", # @unused Used as a type ) load("@prelude//linking:strip.bzl", "strip_object") +load("@prelude//utils:expect.bzl", "expect") + +Soname = record( + # Return the SONAME if it's a string, otherwise None. + as_str = field(typing.Callable), + # Return the SONAME as a string, throwing an error if it is actually an + # artifact. + ensure_str = field(typing.Callable), + # Return `True` if the SONAME is respresented as a string. + is_str = field(typing.Callable), + # The the actual SONAME can be rerepsented by a static string, or the + # contents of a file genrated at build time. + _soname = field(str | Artifact), +) SharedLibrary = record( lib = field(LinkedObject), @@ -19,23 +33,47 @@ SharedLibrary = record( # for downstream rules to reproduce the shared library with some modifications (for example # android relinker will link again with an added version script argument). # TODO(cjhopman): This is currently always available. - link_args = field(list[LinkArgs] | None), + link_args = field(list[LinkArgs] | None, None), # The sonames of the shared libraries that this links against. # TODO(cjhopman): This is currently always available. - shlib_deps = field(list[str] | None), - stripped_lib = field([Artifact, None]), - can_be_asset = field(bool), - for_primary_apk = field(bool), - soname = field(str), + shlib_deps = field(list[str] | None, None), + stripped_lib = field(Artifact | None, None), + can_be_asset = field(bool, False), + for_primary_apk = field(bool, False), + soname = field(Soname), label = field(Label), ) +def _ensure_str(soname: str | Artifact) -> str: + expect(type(soname) == type(""), "SONAME is not a `str`: {}", soname) + return soname + +def to_soname(soname: str | Artifact | Soname) -> Soname: + if isinstance(soname, Soname): + return soname + return Soname( + as_str = lambda: soname if type(soname) == type("") else None, + ensure_str = lambda: _ensure_str(soname), + is_str = lambda: type(soname) == type(""), + _soname = soname, + ) + +def create_shlib( + # The soname can either be a string or an artifact with the soname in + # text form. + soname: str | Artifact | Soname, + **kwargs): + return SharedLibrary( + soname = to_soname(soname), + **kwargs + ) + SharedLibraries = record( # A mapping of shared library SONAME (e.g. `libfoo.so.2`) to the artifact. # Since the SONAME is what the dynamic loader uses to uniquely identify # libraries, using this as the key allows easily detecting conflicts from # dependencies. - libraries = field(dict[str, SharedLibrary]), + libraries = field(list[SharedLibrary]), ) # T-set of SharedLibraries @@ -53,6 +91,27 @@ def get_strip_non_global_flags(cxx_toolchain: CxxToolchainInfo) -> list: return ["--strip-unneeded"] +def create_shlib_from_ctx( + ctx: AnalysisContext, + soname: str | Artifact | Soname, + lib: LinkedObject): + cxx_toolchain = getattr(ctx.attrs, "_cxx_toolchain", None) + return create_shlib( + lib = lib, + stripped_lib = strip_object( + ctx, + cxx_toolchain[CxxToolchainInfo], + lib.output, + cmd_args(get_strip_non_global_flags(cxx_toolchain[CxxToolchainInfo])), + ) if cxx_toolchain != None else None, + link_args = lib.link_args, + shlib_deps = None, # TODO(cjhopman): we need this figured out. + can_be_asset = getattr(ctx.attrs, "can_be_asset", False) or False, + for_primary_apk = getattr(ctx.attrs, "used_by_wrap_script", False), + label = ctx.label, + soname = soname, + ) + def create_shared_libraries( ctx: AnalysisContext, libraries: dict[str, LinkedObject]) -> SharedLibraries: @@ -60,57 +119,13 @@ def create_shared_libraries( Take a mapping of dest -> src and turn it into a mapping that will be passed around in providers. Used for both srcs, and resources. """ - cxx_toolchain = getattr(ctx.attrs, "_cxx_toolchain", None) return SharedLibraries( - libraries = {name: SharedLibrary( - lib = shlib, - stripped_lib = strip_object( - ctx, - cxx_toolchain[CxxToolchainInfo], - shlib.output, - cmd_args(get_strip_non_global_flags(cxx_toolchain[CxxToolchainInfo])), - ) if cxx_toolchain != None else None, - link_args = shlib.link_args, - shlib_deps = None, # TODO(cjhopman): we need this figured out. - can_be_asset = getattr(ctx.attrs, "can_be_asset", False) or False, - for_primary_apk = getattr(ctx.attrs, "used_by_wrap_script", False), - label = ctx.label, - soname = name, - ) for (name, shlib) in libraries.items()}, + libraries = [ + create_shlib_from_ctx(ctx = ctx, soname = name, lib = shlib) + for (name, shlib) in libraries.items() + ], ) -# We do a lot of merging library maps, so don't use O(n) type annotations -def _merge_lib_map( - # dict[str, SharedLibrary] - dest_mapping, - # [dict[str, SharedLibrary] - mapping_to_merge, - filter_func) -> None: - """ - Merges a mapping_to_merge into `dest_mapping`. Fails if different libraries - map to the same name. - """ - for (name, src) in mapping_to_merge.items(): - if filter_func != None and not filter_func(name, src): - continue - existing = dest_mapping.get(name) - if existing != None and existing.lib != src.lib: - error = ( - "Duplicate library {}! Conflicting mappings:\n" + - "{} from {}\n" + - "{} from {}" - ) - fail( - error.format( - name, - existing.lib, - existing.label, - src.lib, - src.label, - ), - ) - dest_mapping[name] = src - # Merge multiple SharedLibraryInfo. The value in `node` represents a set of # SharedLibraries that is provided by the target being analyzed. It's optional # because that might not always exist, e.g. a Python library can pass through @@ -131,11 +146,156 @@ def merge_shared_libraries( set = actions.tset(SharedLibrariesTSet, **kwargs) if kwargs else None return SharedLibraryInfo(set = set) -def traverse_shared_library_info( - info: SharedLibraryInfo, - filter_func = None): # -> dict[str, SharedLibrary]: - libraries = {} +def traverse_shared_library_info(info: SharedLibraryInfo): # -> list[SharedLibrary]: + libraries = [] if info.set: for libs in info.set.traverse(): - _merge_lib_map(libraries, libs.libraries, filter_func) + libraries.extend(libs.libraries) return libraries + +# Helper to merge shlibs, throwing an error if more than one have the same SONAME. +def _merge_shlibs( + shared_libs: list[SharedLibrary], + resolve_soname: typing.Callable) -> dict[str, SharedLibrary]: + merged = {} + for shlib in shared_libs: + soname = resolve_soname(shlib.soname) + existing = merged.get(soname) + if existing != None and existing.lib != shlib.lib: + error = ( + "Duplicate library {}! Conflicting mappings:\n" + + "{} from {}\n" + + "{} from {}" + ) + fail( + error.format( + shlib.soname, + existing.lib, + existing.label, + shlib.lib, + shlib.label, + ), + ) + merged[soname] = shlib + return merged + +def with_unique_str_sonames( + shared_libs: list[SharedLibrary], + skip_dynamic: bool = False) -> dict[str, SharedLibrary]: + """ + Convert a list of `SharedLibrary`s to a map of unique SONAMEs to the + corresponding `SharedLibrary`. + + Will fail if the same SONAME maps to multiple `SharedLibrary`s. + """ + return _merge_shlibs( + shared_libs = [ + shlib + for shlib in shared_libs + if shlib.soname.is_str() or not skip_dynamic + ], + resolve_soname = lambda s: s.ensure_str(), + ) + +def gen_shared_libs_action( + actions: AnalysisActions, + out: str, + shared_libs: list[SharedLibrary], + gen_action: typing.Callable, + dir = False): + """ + Produce an action by first resolving all SONAME of the given shlibs and + enforcing that each SONAME is unique. + + The provided `gen_action` callable is called with a map of unique SONAMEs + to the corresponding shlibs. + """ + + output = actions.declare_output(out, dir = dir) + + def func(actions, artifacts, output): + def resolve_soname(soname): + if soname.is_str(): + return soname._soname + else: + return artifacts[soname._soname].read_string().strip() + + gen_action( + actions, + output, + _merge_shlibs( + shared_libs = shared_libs, + resolve_soname = resolve_soname, + ), + ) + + dynamic_sonames = [shlib.soname._soname for shlib in shared_libs if not shlib.soname.is_str()] + if dynamic_sonames: + actions.dynamic_output( + dynamic = [shlib.soname._soname for shlib in shared_libs if not shlib.soname.is_str()], + inputs = [], + outputs = [output.as_output()], + f = lambda ctx, artifacts, outputs: func(ctx.actions, artifacts, outputs[output]), + ) + else: + func(actions, {}, output) + + return output + +def zip_shlibs( + merged: dict[str, SharedLibrary], + vals: list[(SharedLibrary, typing.Any)]) -> list[(str, SharedLibrary, typing.Any)]: + """ + Helper to "zip" together the soname->shlib map to a list with associated + shared lib values. + + This is useful for callers of `gen_shared_libs_action` to combine the merged + shared libs, in dedup'd dict form, with some additional data. + """ + + zipped = [] + + # Walk through the shlib and val tuples + idx = 0 + for soname, shlib in merged.items(): + for idx in range(idx, len(vals)): + if vals[idx][0] == shlib: + break + zipped.append((soname, shlib, vals[idx][1])) + + return zipped + +def create_shlib_symlink_tree(actions: AnalysisActions, out: str, shared_libs: list[SharedLibrary]): + """ + Merged shared libs into a symlink tree mapping the library's SONAME to + it's artifact. + """ + return gen_shared_libs_action( + actions = actions, + out = out, + shared_libs = shared_libs, + gen_action = lambda actions, output, shared_libs: actions.symlinked_dir( + output, + {name: shlib.lib.output for name, shlib in shared_libs.items()}, + ), + dir = True, + ) + +def extract_soname_from_shlib( + actions: AnalysisActions, + name: str, + shared_lib: Artifact) -> Artifact: + """ + Extract the SONAME from a shared library into a file. + """ + soname = actions.declare_output(name) + cmd = cmd_args( + "sh", + "-c", + '''set -euo pipefail; objdump -p "$1" | grep SONAME | awk '{print $2}' > "$2"''', + "", + shared_lib, + soname.as_output(), + ) + actions.run(cmd, category = "extract_soname", identifier = shared_lib.short_path) + return soname diff --git a/prelude/linking/strip.bzl b/prelude/linking/strip.bzl index 6db2524134..baf413680f 100644 --- a/prelude/linking/strip.bzl +++ b/prelude/linking/strip.bzl @@ -80,10 +80,13 @@ def strip_object(ctx: AnalysisContext, cxx_toolchain: CxxToolchainInfo, unstripp stripped_lib = ctx.actions.declare_output("stripped/{}".format(output_path)) # TODO(T109996375) support configuring the flags used for stripping - cmd = cmd_args() - cmd.add(strip) - cmd.add(strip_flags) - cmd.add([unstripped, "-o", stripped_lib.as_output()]) + cmd = cmd_args( + strip, + strip_flags, + unstripped, + "-o", + stripped_lib.as_output(), + ) effective_category_suffix = category_suffix if category_suffix else "shared_lib" category = "strip_{}".format(effective_category_suffix) @@ -106,7 +109,7 @@ def strip_debug_with_gnu_debuglink(ctx: AnalysisContext, name: str, obj: Artifac ctx.actions.run(cmd, category = "extract_debuginfo", identifier = name) binary_output = ctx.actions.declare_output("__stripped_objects__", name) - cmd = cmd_args([objcopy, "--strip-debug", "--add-gnu-debuglink", debuginfo_output, obj, binary_output.as_output()]) + cmd = cmd_args([objcopy, "--strip-debug", "--keep-file-symbols", "--add-gnu-debuglink", debuginfo_output, obj, binary_output.as_output()]) ctx.actions.run(cmd, category = "strip_debug", identifier = name) return binary_output, debuginfo_output diff --git a/prelude/linking/types.bzl b/prelude/linking/types.bzl new file mode 100644 index 0000000000..486318bedb --- /dev/null +++ b/prelude/linking/types.bzl @@ -0,0 +1,16 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# Ways a library can request to be linked (e.g. usually specific via a rule +# param like `preferred_linkage`). The actual link style used for a library is +# usually determined by a combination of this and the link style being exported +# via a provider. +Linkage = enum( + "any", + "static", + "shared", +) diff --git a/prelude/matlab/matlab.bzl b/prelude/matlab/matlab.bzl new file mode 100644 index 0000000000..f1fedd9d0e --- /dev/null +++ b/prelude/matlab/matlab.bzl @@ -0,0 +1,20 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load(":matlab_program.bzl", "matlab_program_impl") +load(":matlab_toolchain.bzl", "matlab_toolchain") + +implemented_rules = { + "matlab_program": matlab_program_impl, +} + +extra_attributes = { + "matlab_program": { + "main": attrs.source(), + "_matlab_toolchain": matlab_toolchain(), + }, +} diff --git a/prelude/builtin.bzl b/prelude/matlab/matlab_info.bzl similarity index 71% rename from prelude/builtin.bzl rename to prelude/matlab/matlab_info.bzl index ef1dd61fc7..3e15fc1621 100644 --- a/prelude/builtin.bzl +++ b/prelude/matlab/matlab_info.bzl @@ -5,8 +5,6 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -# Definitions we have builtin to Buck. -# Useful for running the Starlark checker on the files. - -def DefaultInfo(): - pass +MatlabToolchainInfo = provider(fields = { + "matlab_exe": RunInfo, +}) diff --git a/prelude/matlab/matlab_program.bzl b/prelude/matlab/matlab_program.bzl new file mode 100644 index 0000000000..c78cb1be37 --- /dev/null +++ b/prelude/matlab/matlab_program.bzl @@ -0,0 +1,23 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load(":matlab_info.bzl", "MatlabToolchainInfo") + +def matlab_program_impl(ctx: AnalysisContext) -> list[Provider]: + toolchain = ctx.attrs._matlab_toolchain[MatlabToolchainInfo] + + cmd = cmd_args(toolchain.matlab_exe) + cmd.add( + "-batch", + cmd_args( + ctx.attrs.main.basename.removesuffix(".m"), + quote = "shell", + ), + ) + cmd.add("-sd", cmd_args(ctx.attrs.main, parent = 1)) + + return [DefaultInfo(default_output = None, other_outputs = [cmd]), RunInfo(cmd)] diff --git a/prelude/matlab/matlab_toolchain.bzl b/prelude/matlab/matlab_toolchain.bzl new file mode 100644 index 0000000000..23456cf56d --- /dev/null +++ b/prelude/matlab/matlab_toolchain.bzl @@ -0,0 +1,16 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load(":matlab_info.bzl", "MatlabToolchainInfo") + +def matlab_toolchain(): + return attrs.default_only( + attrs.toolchain_dep( + default = "toolchains//:matlab", + providers = [MatlabToolchainInfo], + ), + ) diff --git a/prelude/native.bzl b/prelude/native.bzl index 42e730ee16..ffc0167532 100644 --- a/prelude/native.bzl +++ b/prelude/native.bzl @@ -12,7 +12,7 @@ # **all** interpreted files. load("@prelude//android:cpu_filters.bzl", "ALL_CPU_FILTERS", "CPU_FILTER_FOR_DEFAULT_PLATFORM") -load("@prelude//apple:apple_macro_layer.bzl", "apple_binary_macro_impl", "apple_bundle_macro_impl", "apple_library_macro_impl", "apple_package_macro_impl", "apple_test_macro_impl") +load("@prelude//apple:apple_macro_layer.bzl", "apple_binary_macro_impl", "apple_bundle_macro_impl", "apple_library_macro_impl", "apple_package_macro_impl", "apple_test_macro_impl", "apple_universal_executable_macro_impl", "apple_xcuitest_macro_impl") load("@prelude//apple/swift:swift_toolchain_macro_layer.bzl", "swift_toolchain_macro_impl") load("@prelude//cxx:cxx_toolchain.bzl", "cxx_toolchain_inheriting_target_platform") load("@prelude//cxx:cxx_toolchain_macro_layer.bzl", "cxx_toolchain_macro_impl") @@ -182,6 +182,14 @@ def _get_valid_cpu_filters(cpu_filters: [list[str], None]) -> list[str]: return [cpu_filter for cpu_filter in cpu_filters if cpu_filter in cpu_abis] +def _android_aar_macro_stub( + cpu_filters = None, + **kwargs): + __rules__["android_aar"]( + cpu_filters = _get_valid_cpu_filters(cpu_filters), + **kwargs + ) + def _android_binary_macro_stub( allow_r_dot_java_in_secondary_dex = False, cpu_filters = None, @@ -203,9 +211,17 @@ def _android_binary_macro_stub( def _android_instrumentation_apk_macro_stub( cpu_filters = None, + primary_dex_patterns = [], **kwargs): + primary_dex_patterns = primary_dex_patterns + [ + "/R^", + "/R$", + # Pin this to the primary for apps with no primary dex classes. + "^com/facebook/buck_generated/AppWithoutResourcesStub^", + ] __rules__["android_instrumentation_apk"]( cpu_filters = _get_valid_cpu_filters(cpu_filters), + primary_dex_patterns = primary_dex_patterns, **kwargs ) @@ -326,6 +342,12 @@ def _apple_test_macro_stub(**kwargs): **kwargs ) +def _apple_xcuitest_macro_stub(**kwargs): + apple_xcuitest_macro_impl( + apple_xcuitest_rule = __rules__["apple_xcuitest"], + **kwargs + ) + def _apple_binary_macro_stub(**kwargs): apple_binary_macro_impl( apple_binary_rule = __rules__["apple_binary"], @@ -345,6 +367,12 @@ def _apple_package_macro_stub(**kwargs): **kwargs ) +def _apple_universal_executable_macro_stub(**kwargs): + apple_universal_executable_macro_impl( + apple_universal_executable_rule = __rules__["apple_universal_executable"], + **kwargs + ) + def _swift_toolchain_macro_stub(**kwargs): rule = __rules__["swift_toolchain"] @@ -353,31 +381,23 @@ def _swift_toolchain_macro_stub(**kwargs): **kwargs ) -def _cxx_toolchain_macro_stub(inherit_target_platform = False, **kwargs): - if inherit_target_platform: - rule = cxx_toolchain_inheriting_target_platform - if is_full_meta_repo(): - cache_links = kwargs.get("cache_links") - kwargs["cache_links"] = select({ - "DEFAULT": cache_links, - "ovr_config//build_mode:fbcode-build-info-mode-disable": True, - "ovr_config//build_mode:fbcode-build-info-mode-full": False, - "ovr_config//build_mode:fbcode-build-info-mode-stable": True, - }) - else: - rule = __rules__["cxx_toolchain"] +def _cxx_toolchain_macro_stub(**kwargs): + if is_full_meta_repo(): + cache_links = kwargs.get("cache_links") + kwargs["cache_links"] = select({ + "DEFAULT": cache_links, + "ovr_config//build_mode:fbcode-build-info-mode-disable": True, + "ovr_config//build_mode:fbcode-build-info-mode-full": False, + "ovr_config//build_mode:fbcode-build-info-mode-stable": True, + }) cxx_toolchain_macro_impl( - cxx_toolchain_rule = rule, + cxx_toolchain_rule = cxx_toolchain_inheriting_target_platform, **kwargs ) -def _cxx_toolchain_override_macro_stub(inherit_target_platform = False, **kwargs): - if inherit_target_platform: - rule = _user_rules["cxx_toolchain_override_inheriting_target_platform"] - else: - rule = _user_rules["cxx_toolchain_override"] +def _cxx_toolchain_override_macro_stub(**kwargs): cxx_toolchain_macro_impl( - cxx_toolchain_rule = rule, + cxx_toolchain_rule = _user_rules["cxx_toolchain_override"], **kwargs ) @@ -412,6 +432,7 @@ def _rust_test_macro_stub(**kwargs): # Probably good if they were defined to take in the base rule that # they are wrapping and return the wrapped one. __extra_rules__ = { + "android_aar": _android_aar_macro_stub, "android_binary": _android_binary_macro_stub, "android_instrumentation_apk": _android_instrumentation_apk_macro_stub, "apple_binary": _apple_binary_macro_stub, @@ -419,7 +440,9 @@ __extra_rules__ = { "apple_library": _apple_library_macro_stub, "apple_package": _apple_package_macro_stub, "apple_test": _apple_test_macro_stub, + "apple_universal_executable": _apple_universal_executable_macro_stub, "apple_watchos_bundle": _apple_watchos_bundle_macro_stub, + "apple_xcuitest": _apple_xcuitest_macro_stub, "configured_alias": _configured_alias_macro_stub, "cxx_toolchain": _cxx_toolchain_macro_stub, "cxx_toolchain_override": _cxx_toolchain_override_macro_stub, @@ -436,7 +459,7 @@ __extra_rules__ = { "versioned_alias": _versioned_alias_macro_stub, } -__shimmed_native__ = __struct_to_dict(__internal__) +__shimmed_native__ = __struct_to_dict(__buck2_builtins__) __shimmed_native__.update(__rules__) __shimmed_native__.update(_user_rules) diff --git a/prelude/ocaml/ocaml.bzl b/prelude/ocaml/ocaml.bzl index abd652b83b..f138cc8888 100644 --- a/prelude/ocaml/ocaml.bzl +++ b/prelude/ocaml/ocaml.bzl @@ -92,7 +92,7 @@ load( "@prelude//python:python.bzl", "PythonLibraryInfo", ) -load("@prelude//utils:graph_utils.bzl", "breadth_first_traversal", "post_order_traversal") +load("@prelude//utils:graph_utils.bzl", "depth_first_traversal", "post_order_traversal") load("@prelude//utils:platform_flavors_util.bzl", "by_platform") load("@prelude//utils:utils.bzl", "filter_and_map_idx", "flatten") load(":makefile.bzl", "parse_makefile") @@ -167,7 +167,7 @@ def _mk_script(ctx: AnalysisContext, file: str, args: list[typing.Any], env: dic is_executable = True, allow_args = True, ) - return cmd_args(script).hidden(args, env.values()) + return cmd_args(script, hidden = args + env.values()) # An environment in which a custom `bin` is at the head of `$PATH`. def _mk_env(ctx: AnalysisContext) -> dict[str, cmd_args]: @@ -321,8 +321,10 @@ def _preprocess(ctx: AnalysisContext, srcs: list[Artifact], build_mode: BuildMod parser_sig = ctx.actions.declare_output(name + ".mli") result.extend((parser_sig, parser)) - cmd = cmd_args([menhir, "--fixed-exception", "-b", cmd_args(prefix).ignore_artifacts(), src]) - cmd.hidden(parser.as_output(), parser_sig.as_output()) + cmd = cmd_args( + [menhir, "--fixed-exception", "-b", cmd_args(prefix, ignore_artifacts = True), src], + hidden = [parser.as_output(), parser_sig.as_output()], + ) ctx.actions.run(cmd, category = "ocaml_yacc_" + build_mode.value, identifier = src.short_path) elif ext == ".mll": @@ -353,7 +355,7 @@ def _depends(ctx: AnalysisContext, srcs: list[Artifact], build_mode: BuildMode) dep_cmdline.add([cmd_args(f, format = "\"{}\"") for f in ctx.attrs.ocamldep_flags]) # These -I's are for ocamldep. - dep_cmdline.add(cmd_args([cmd_args(src).parent() for src in srcs], format = "-I {}")) + dep_cmdline.add(cmd_args([cmd_args(src, parent = 1) for src in srcs], format = "-I {}")) dep_cmdline.add(srcs) dep_script_name = "ocamldep_" + build_mode.value + ".sh" dep_sh, _ = ctx.actions.write( @@ -362,7 +364,7 @@ def _depends(ctx: AnalysisContext, srcs: list[Artifact], build_mode: BuildMode) is_executable = True, allow_args = True, ) - ctx.actions.run(cmd_args(dep_sh).hidden(dep_output.as_output(), dep_cmdline), category = "ocamldep_" + build_mode.value) + ctx.actions.run(cmd_args(dep_sh, hidden = [dep_output.as_output(), dep_cmdline]), category = "ocamldep_" + build_mode.value) return dep_output # Compile all the context's sources. If bytecode compiling, 'cmxs' & 'objs' will @@ -500,7 +502,7 @@ def _compile(ctx: AnalysisContext, compiler: cmd_args, build_mode: BuildMode) -> # the dependency of 'src' on other files in 'srcs'. depends_include_paths = [] seen_dirs = {} - for d in breadth_first_traversal(makefile2, makefile2.get(src, [])): + for d in depth_first_traversal(makefile2, makefile2.get(src, [])): # 'src' depends on 'd' (e.g. src='quux.ml' depends on # d='quux.mli'). # @@ -516,7 +518,7 @@ def _compile(ctx: AnalysisContext, compiler: cmd_args, build_mode: BuildMode) -> if i != None: p = paths.dirname(i.short_path) if not p in seen_dirs: - depends_include_paths.append(cmd_args(i).parent()) + depends_include_paths.append(cmd_args(i, parent = 1)) seen_dirs[p] = None # *All* the include paths needed to compile 'src'. @@ -528,7 +530,7 @@ def _compile(ctx: AnalysisContext, compiler: cmd_args, build_mode: BuildMode) -> cmd.add(src, "-c", "-o", mk_out(cmi)) if build_mode.value == "expand": cmd.add("-dsource") - cmd.hidden(mk_out(cmti), depends_produce) + cmd.add(cmd_args(hidden = [mk_out(cmti), depends_produce])) if build_mode.value == "expand": sh = cmd_args(["/bin/sh", "-c", '"$@" 2> "$preprocessed_source_file"', "--", cmd]) @@ -540,22 +542,22 @@ def _compile(ctx: AnalysisContext, compiler: cmd_args, build_mode: BuildMode) -> elif ext == ".ml": (obj, cmo, cmx, cmt, cmi, ppml) = produces[src] cmd = _compile_cmd(ctx, compiler, build_mode, cc, all_include_paths) - cmd.hidden(depends_produce) + cmd.add(cmd_args(hidden = depends_produce)) if cmo != None: cmd.add(src, "-c", "-o", mk_out(cmo)) if cmx != None: cmd.add(src, "-c", "-o", mk_out(cmx)) - cmd.hidden(mk_out(cmt)) + cmd.add(cmd_args(hidden = mk_out(cmt))) if build_mode.value == "expand": cmd.add("-dsource") if obj != None: - cmd.hidden(mk_out(obj)) + cmd.add(cmd_args(hidden = mk_out(obj))) if cmi != None: cmd.add("-intf-suffix", ",nomli,") # ignore any .mlis that aren't explicit dependencies - cmd.hidden(mk_out(cmi)) + cmd.add(cmd_args(hidden = mk_out(cmi))) else: # An explicit '.mli' for this '.ml' is a dependency. - cmd.hidden(mlis[paths.replace_extension(src.short_path, ".mli")]) + cmd.add(cmd_args(hidden = mlis[paths.replace_extension(src.short_path, ".mli")])) if build_mode.value == "expand": sh = cmd_args(["/bin/sh", "-c", '"$@" 2> "$preprocessed_source_file"', "--", cmd]) @@ -570,7 +572,7 @@ def _compile(ctx: AnalysisContext, compiler: cmd_args, build_mode: BuildMode) -> # `ocaml_object` breaks for `-flto=...` so ensure `-fno-lto` prevails here. cmd.add(src, "-c", "-ccopt", "-fno-lto", "-ccopt", cmd_args(mk_out(stb), format = "-o \"{}\"")) - cmd.hidden(headers) # Any .h files given are dependencies. + cmd.add(cmd_args(hidden = headers)) # Any .h files given are dependencies. ctx.actions.run(cmd, category = "ocaml_compile_c", identifier = src.short_path) elif ext == ".h": @@ -582,7 +584,12 @@ def _compile(ctx: AnalysisContext, compiler: cmd_args, build_mode: BuildMode) -> if outputs == []: ctx.actions.write(cmxs_order, "") else: - ctx.actions.dynamic_output(dynamic = [depends_output], inputs = todo_inputs, outputs = outputs + [cmxs_order], f = f) + ctx.actions.dynamic_output( + dynamic = [depends_output], + inputs = todo_inputs, + outputs = [o.as_output() for o in outputs + [cmxs_order]], + f = f, + ) return CompileResultInfo(cmxs_order = cmxs_order, stbs = stbs, objs = objs, cmis = cmis, cmos = cmos, cmxs = cmxs, cmts = cmts, cmtis = cmtis, ppmlis = ppmlis, ppmls = ppmls) @@ -594,15 +601,17 @@ def _include_paths(cmis: list[Artifact], cmos: list[Artifact]) -> cmd_args: for f in cmis: p = paths.dirname(f.short_path) if not p in seen_dirs: - include_paths.append(cmd_args(f).parent()) + include_paths.append(cmd_args(f, parent = 1)) seen_dirs[p] = None for f in cmos: p = paths.dirname(f.short_path) if not p in seen_dirs: - include_paths.append(cmd_args(f).parent()) + include_paths.append(cmd_args(f, parent = 1)) seen_dirs[p] = None - include_paths = cmd_args(include_paths) - include_paths.hidden(cmis + cmos) + include_paths = cmd_args( + include_paths, + hidden = cmis + cmos, + ) return include_paths def ocaml_library_impl(ctx: AnalysisContext) -> list[Provider]: @@ -626,7 +635,7 @@ def ocaml_library_impl(ctx: AnalysisContext) -> list[Provider]: cmd_nat.add("-o", cmxa.as_output()) if len([s for s in ctx.attrs.srcs if s.extension == ".ml"]) != 0: native_c_lib = ctx.actions.declare_output("lib" + ctx.attrs.name + ".a") - cmd_nat.hidden(native_c_lib.as_output()) + cmd_nat.add(cmd_args(hidden = native_c_lib.as_output())) native_c_libs = [native_c_lib] else: native_c_libs = [] @@ -637,7 +646,7 @@ def ocaml_library_impl(ctx: AnalysisContext) -> list[Provider]: # These were produced by the compile step and so are hidden dependencies of # the archive step. - cmd_nat.hidden(cmxs, cmis_nat, objs, cmts_nat, cmtis_nat) + cmd_nat.add(cmd_args(hidden = [cmxs, cmis_nat, objs, cmts_nat, cmtis_nat])) ctx.actions.run(cmd_nat, category = "ocaml_archive_native") cmxs_order, stbs_byt, _objs, cmis_byt, cmos, _cmxs, cmts_byt, cmtis_byt, _ppmlis, _ppmls = _compile_result_to_tuple(_compile(ctx, ocamlc, BuildMode("bytecode"))) @@ -652,7 +661,7 @@ def ocaml_library_impl(ctx: AnalysisContext) -> list[Provider]: # These were produced by the compile step and so are hidden dependencies of # the archive step. - cmd_byt.hidden(cmos, cmis_byt, cmts_byt, cmtis_byt) + cmd_byt.add(cmd_args(hidden = [cmos, cmis_byt, cmts_byt, cmtis_byt])) ctx.actions.run(cmd_byt, category = "ocaml_archive_bytecode") infos = _attr_deps_ocaml_link_infos(ctx) @@ -689,7 +698,7 @@ def ocaml_library_impl(ctx: AnalysisContext) -> list[Provider]: info_ide = [ DefaultInfo( - default_output = cmxa, + default_output = cmts_nat[0] if cmts_nat else None, other_outputs = [cmd_args(other_outputs_info.info.project_as_args("ide"))], ), ] @@ -722,12 +731,13 @@ def ocaml_library_impl(ctx: AnalysisContext) -> list[Provider]: def ocaml_binary_impl(ctx: AnalysisContext) -> list[Provider]: ocaml_toolchain = ctx.attrs._ocaml_toolchain[OCamlToolchainInfo] + ocaml_toolchain_runtime_deps = ocaml_toolchain.runtime_dep_link_extras env = _mk_env(ctx) ocamlopt = _mk_ocaml_compiler(ctx, env, BuildMode("native")) ocamlc = _mk_ocaml_compiler(ctx, env, BuildMode("bytecode")) - dep_link_infos = _attr_deps_merged_link_infos(ctx) + filter(None, [ocaml_toolchain.libc]) + dep_link_infos = _attr_deps_merged_link_infos(ctx) + filter(None, [ocaml_toolchain.libc]) + [d.get(MergedLinkInfo) for d in ocaml_toolchain_runtime_deps] cxx_toolchain = get_cxx_toolchain_info(ctx) link_args_output = make_link_args( ctx.actions, @@ -755,8 +765,10 @@ def ocaml_binary_impl(ctx: AnalysisContext) -> list[Provider]: # These were produced by the compile step and are therefore hidden # dependencies of the link step. - cmd_nat.hidden(cmxs, cmis_nat, cmts_nat, cmtis_nat, objs, link_args_output.hidden) + cmd_nat.add(cmd_args(hidden = [cmxs, cmis_nat, cmts_nat, cmtis_nat, objs, link_args_output.hidden])) binary_nat = ctx.actions.declare_output(ctx.attrs.name + ".opt") + + cmd_nat.add([cmd_args(["-cclib", f]) for f in ocaml_toolchain.runtime_dep_link_flags]) cmd_nat.add("-cclib", "-lpthread") cmd_nat.add("-o", binary_nat.as_output()) local_only = link_cxx_binary_locally(ctx) @@ -767,9 +779,10 @@ def ocaml_binary_impl(ctx: AnalysisContext) -> list[Provider]: # These were produced by the compile step and are therefore hidden # dependencies of the link step. - cmd_byt.hidden(cmos, cmis_byt, cmts_byt, cmtis_byt, link_args_output.hidden) + cmd_byt.add(cmd_args(hidden = [cmos, cmis_byt, cmts_byt, cmtis_byt, link_args_output.hidden])) binary_byt = ctx.actions.declare_output(ctx.attrs.name) cmd_byt.add("-custom") + cmd_byt.add([cmd_args(["-cclib", f]) for f in ocaml_toolchain.runtime_dep_link_flags]) cmd_byt.add("-cclib", "-lpthread") cmd_byt.add("-o", binary_byt.as_output()) local_only = link_cxx_binary_locally(ctx) @@ -784,7 +797,7 @@ def ocaml_binary_impl(ctx: AnalysisContext) -> list[Provider]: info_ide = [ DefaultInfo( - default_output = binary_nat, + default_output = cmts_nat[0] if cmts_nat else None, other_outputs = [cmd_args(other_outputs_info.info.project_as_args("ide"))], ), ] @@ -836,10 +849,10 @@ def ocaml_object_impl(ctx: AnalysisContext) -> list[Provider]: for lib in merge_ocaml_link_infos(_attr_deps_ocaml_link_infos(ctx)).info: cmd.add(lib.cmxas, lib.c_libs, lib.native_c_libs, lib.stbs_nat) - cmd.hidden(lib.cmxs, lib.cmis_nat, lib.cmts_nat) + cmd.add(cmd_args(hidden = [lib.cmxs, lib.cmis_nat, lib.cmts_nat])) cmd.add(stbs, "-args", cmxs_order) - cmd.hidden(cmxs, cmis, cmts, objs, cmtis, link_args_output.hidden) + cmd.add(cmd_args(hidden = [cmxs, cmis, cmts, objs, cmtis, link_args_output.hidden])) obj = ctx.actions.declare_output(ctx.attrs.name + ".o") cmd.add("-output-complete-obj") @@ -876,7 +889,7 @@ def ocaml_object_impl(ctx: AnalysisContext) -> list[Provider]: info_ide = [ DefaultInfo( - default_output = obj, + default_output = cmts[0] if cmts else None, other_outputs = [cmd_args(other_outputs_info.info.project_as_args("ide"))], ), ] @@ -942,7 +955,7 @@ def ocaml_shared_impl(ctx: AnalysisContext) -> list[Provider]: # These were produced by the compile step and are therefore hidden # dependencies of the link step. - cmd_nat.hidden(cmxs, cmis_nat, cmts_nat, cmtis_nat, objs, link_args_output.hidden) + cmd_nat.add(cmd_args(hidden = [cmxs, cmis_nat, cmts_nat, cmtis_nat, objs, link_args_output.hidden])) binary_nat = ctx.actions.declare_output(ctx.attrs.name + ".cmxs") cmd_nat.add("-shared") cmd_nat.add("-o", binary_nat.as_output()) @@ -958,7 +971,7 @@ def ocaml_shared_impl(ctx: AnalysisContext) -> list[Provider]: info_ide = [ DefaultInfo( - default_output = binary_nat, + default_output = cmts_nat[0] if cmts_nat else None, other_outputs = [cmd_args(other_outputs_info.info.project_as_args("ide"))], ), ] diff --git a/prelude/os/BUCK.v2 b/prelude/os/BUCK.v2 index 816fd17647..9272cfddf9 100644 --- a/prelude/os/BUCK.v2 +++ b/prelude/os/BUCK.v2 @@ -1,3 +1,11 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + +prelude = native # Avoid warnings and auto-formatters + # The short list of ubiquitous, mainstream operating systems: config_setting( @@ -34,12 +42,12 @@ config_setting( visibility = ["PUBLIC"], ) -native.constraint_setting( +prelude.constraint_setting( name = "maybe_building_android_binary", visibility = ["prelude//..."], ) -native.constraint_value( +prelude.constraint_value( name = "building_android_binary", constraint_setting = ":maybe_building_android_binary", visibility = ["prelude//..."], @@ -114,6 +122,14 @@ config_setting( visibility = ["PUBLIC"], ) +config_setting( + name = "linux-sgx", + constraint_values = [ + "//os/constraints:linux", + ], + visibility = ["PUBLIC"], +) + # For platforms with no OS, like microcontrollers. config_setting( name = "none", diff --git a/prelude/os/constraints/BUCK.v2 b/prelude/os/constraints/BUCK.v2 index cdb63a7a8b..0426226eb0 100644 --- a/prelude/os/constraints/BUCK.v2 +++ b/prelude/os/constraints/BUCK.v2 @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + # Used by open source projects to support `prelude//` constraint_setting( diff --git a/prelude/os_lookup/targets/BUCK.v2 b/prelude/os_lookup/targets/BUCK.v2 index 9919027f6a..668ce76c19 100644 --- a/prelude/os_lookup/targets/BUCK.v2 +++ b/prelude/os_lookup/targets/BUCK.v2 @@ -1,5 +1,10 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") load("//os_lookup:defs.bzl", "os_lookup") +oncall("build_infra") + +source_listing() + os_lookup( name = "os_lookup", cpu = select({ diff --git a/prelude/platforms/BUCK b/prelude/platforms/BUCK index 0e2de31a23..532a9d144d 100644 --- a/prelude/platforms/BUCK +++ b/prelude/platforms/BUCK @@ -2,6 +2,12 @@ # This file exports a sub-set of the definitions from TARGETS.v2 for backwards-compatibility with buck1. # NOTE: These have no effect in BUCK1 and are only provided so imports can resolve. +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + config_setting( name = "runs_remote", visibility = ["PUBLIC"], diff --git a/prelude/platforms/BUCK.v2 b/prelude/platforms/BUCK.v2 index 6e45ed1d2d..30bb6560d5 100644 --- a/prelude/platforms/BUCK.v2 +++ b/prelude/platforms/BUCK.v2 @@ -1,7 +1,12 @@ # Used by open source projects to provide a simple platform setting +load("@prelude//utils:source_listing.bzl", "source_listing") load(":defs.bzl", "execution_platform", "host_configuration") +oncall("build_infra") + +source_listing() + prelude = native execution_platform( @@ -9,6 +14,7 @@ execution_platform( cpu_configuration = host_configuration.cpu, os_configuration = host_configuration.os, use_windows_path_separators = host_info().os.is_windows, + visibility = ["PUBLIC"], ) prelude.constraint_setting( @@ -67,8 +73,8 @@ prelude.constraint_value( # execution configuration, but that's not implemented yet. export_file( name = "fat_platform_incompatible", - # @oss-disable: src = "TARGETS.v2", src = "BUCK", # @oss-enable + # @oss-disable: src = "TARGETS.v2", target_compatible_with = select({ ":fat_platform_enabled": ["config//:none"], "DEFAULT": [], diff --git a/prelude/platforms/apple/arch.bzl b/prelude/platforms/apple/arch.bzl index f6258b04b6..72163c98b8 100644 --- a/prelude/platforms/apple/arch.bzl +++ b/prelude/platforms/apple/arch.bzl @@ -5,6 +5,10 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -AppleArches = ["i386", "x86_64", "arm64", "arm64_32", "armv7k"] +_APPLE_ARCHES = [ + "arm64", + "arm64_32", + "x86_64", +] -AppleArch = enum(*AppleArches) +AppleArch = enum(*_APPLE_ARCHES) diff --git a/prelude/platforms/apple/base.bzl b/prelude/platforms/apple/base.bzl new file mode 100644 index 0000000000..1c76185f2f --- /dev/null +++ b/prelude/platforms/apple/base.bzl @@ -0,0 +1,99 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# @oss-disable: load("@fbsource//tools/build_defs/buck2:is_buck2.bzl", "is_buck2") + +load("@prelude//:is_buck2.bzl", "is_buck2") # @oss-enable +load( + "@prelude//platforms/apple:build_mode.bzl", + "APPLE_BUILD_MODES", + "CONSTRAINT_PACKAGE", + "get_build_mode", + "get_build_mode_debug", +) +load( + "@prelude//platforms/apple:constants.bzl", + "ios_platforms", + "mac_catalyst_platforms", + "mac_platforms", + "watch_platforms", +) + +# Debug constraints to add for build modes used by other rule platforms (ex: rust). +_DEBUG_CONSTRAINTS = [ + # @oss-disable: "ovr_config//build_mode/constraints:debug", +] + +# Release constraints to add for build modes used by other rule platforms (ex: rust). +_RELEASE_CONSTRAINTS = [ + # @oss-disable: "ovr_config//build_mode/constraints:release", +] + +BUILD_MODE_TO_CONSTRAINTS_MAP = { + build_mode: ["{}:{}".format(CONSTRAINT_PACKAGE, build_mode)] + (_DEBUG_CONSTRAINTS if build_mode == get_build_mode_debug() else _RELEASE_CONSTRAINTS) + for build_mode in APPLE_BUILD_MODES +} + +_MOBILE_PLATFORMS = [ + ios_platforms.IPHONEOS_ARM64, + ios_platforms.IPHONESIMULATOR_ARM64, + ios_platforms.IPHONESIMULATOR_X86_64, + watch_platforms.WATCHOS_ARM64, + watch_platforms.WATCHOS_ARM64_32, + watch_platforms.WATCHSIMULATOR_ARM64, + watch_platforms.WATCHSIMULATOR_X86_64, +] + +_MAC_PLATFORMS = [ + mac_platforms.MACOS_ARM64, + mac_platforms.MACOS_X86_64, + mac_platforms.MACOS_UNIVERSAL, + mac_catalyst_platforms.MACCATALYST_ARM64, + mac_catalyst_platforms.MACCATALYST_X86_64, +] + +# TODO: Drop the platform_rule when we're not longer attempting to support buck1. +def apple_generated_platforms(name, constraint_values, deps, platform_rule, platform = None): + # By convention, the cxx.default_platform is typically the same as the platform being defined. + # This is not the case for all watch platforms, so provide an override. + platform = platform if platform else name + if is_mobile_platform(platform) or is_buck2_mac_platform(platform): + for build_mode in APPLE_BUILD_MODES: + platform_rule( + name = _get_generated_name(name, platform, build_mode), + constraint_values = constraint_values + BUILD_MODE_TO_CONSTRAINTS_MAP.get(build_mode), + visibility = ["PUBLIC"], + deps = deps, + ) + + # Create a platform without the build mode to support backwards compatibility of hardcoded platforms + # and with buck1 cxx platform setup. + # TODO(chatatap): Look to remove all hardcoded references and get rid of these + platform_rule( + name = name, + constraint_values = constraint_values, + visibility = ["PUBLIC"], + deps = deps, + ) + +def apple_build_mode_backed_platform(name, platform, build_mode = None): + build_mode = get_build_mode() if build_mode == None else build_mode + return _get_generated_name(name, platform, build_mode) + +def is_mobile_platform(platform): + # These builds modes are primarily used in mobile code. MacOS builds in fbcode/arvr use different + # modes to represent dev/opt variants. + return platform in _MOBILE_PLATFORMS + +def is_buck2_mac_platform(platform): + return is_buck2() and platform in _MAC_PLATFORMS + +def _get_generated_name(name, platform, build_mode): + if is_mobile_platform(platform) or is_buck2_mac_platform(platform): + return "{}-{}".format(name, build_mode) + else: + return name diff --git a/prelude/platforms/apple/build_mode.bzl b/prelude/platforms/apple/build_mode.bzl new file mode 100644 index 0000000000..7200555022 --- /dev/null +++ b/prelude/platforms/apple/build_mode.bzl @@ -0,0 +1,37 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# @oss-disable: load("@prelude//platforms/apple/meta_only:build_mode.bzl", _APPLE_BUILD_MODES = "APPLE_BUILD_MODES", _BUILD_MODE = "BUILD_MODE", _get_build_mode = "get_build_mode", _get_build_mode_debug = "get_build_mode_debug", _get_build_mode_release = "get_build_mode_release") + +BUILD_MODE_DEBUG = "debug" # @oss-enable +BUILD_MODE_PROFILE = "profile" # @oss-enable +BUILD_MODE_RELEASE = "release" # @oss-enable + +APPLE_BUILD_MODES = [BUILD_MODE_DEBUG, BUILD_MODE_PROFILE, BUILD_MODE_RELEASE] # @oss-enable +# @oss-disable: APPLE_BUILD_MODES = _APPLE_BUILD_MODES + +BUILD_MODE = struct( # @oss-enable + DEBUG = BUILD_MODE_DEBUG, # @oss-enable + PROFILE = BUILD_MODE_PROFILE, # @oss-enable + RELEASE = BUILD_MODE_RELEASE, # @oss-enable +) # @oss-enable +# @oss-disable: BUILD_MODE = _BUILD_MODE + +CONSTRAINT_PACKAGE = "prelude//platforms/apple/constraints" # @oss-enable +# @oss-disable: CONSTRAINT_PACKAGE = "ovr_config//build_mode/apple/constraints" + +def get_build_mode(): + return read_root_config("apple", "build_mode", BUILD_MODE_DEBUG) # @oss-enable + # @oss-disable: return _get_build_mode() + +def get_build_mode_debug(): + return BUILD_MODE.DEBUG # @oss-enable + # @oss-disable: return _get_build_mode_debug() + +def get_build_mode_release(): + return BUILD_MODE.RELEASE # @oss-enable + # @oss-disable: return _get_build_mode_release() diff --git a/prelude/platforms/apple/constants.bzl b/prelude/platforms/apple/constants.bzl new file mode 100644 index 0000000000..a5dcbe3fc9 --- /dev/null +++ b/prelude/platforms/apple/constants.bzl @@ -0,0 +1,109 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# These are identifiers used in defining Apple platforms for configuring apple_* rules. + +APPLE = "Apple" + +# Apple SDK Definitions +APPLETVOS = "appletvos" + +IOS = "ios" + +MACOSX = "macosx" + +WATCHOS = "watchos" + +VISIONOS = "visionos" + +# Apple TV Platforms/Flavors + +APPLETVOS_ARM64 = "appletvos-arm64" + +APPLETVSIMULATOR_ARM64 = "appletvsimulator-arm64" + +APPLETVSIMULATOR_X86_64 = "appletvsimulator-x86_64" + +# iOS Platforms/Flavors + +IPHONEOS_ARM64 = "iphoneos-arm64" + +IPHONESIMULATOR_ARM64 = "iphonesimulator-arm64" + +IPHONESIMULATOR_X86_64 = "iphonesimulator-x86_64" + +# Mac Catalyst Platforms/Flavors + +MACCATALYST_ARM64 = "maccatalyst-arm64" + +MACCATALYST_X86_64 = "maccatalyst-x86_64" + +# Mac OS X Platforms/Flavors + +MACOS_ARM64 = "macosx-arm64" + +MACOS_X86_64 = "macosx-x86_64" + +MACOS_UNIVERSAL = "macosx-universal" + +# Watch OS Platforms/Flavors + +WATCHOS_ARM64 = "watchos-arm64" + +WATCHOS_ARM64_32 = "watchos-arm64_32" + +WATCHSIMULATOR_ARM64 = "watchsimulator-arm64" + +WATCHSIMULATOR_X86_64 = "watchsimulator-x86_64" + +# Vision OS Platforms/Flavors +VISIONOS_ARM64 = "visionos-arm64" + +VISIONSIMULATOR_ARM64 = "visionsimulator-arm64" + +apple_sdks = struct( + IOS = IOS, + WATCHOS = WATCHOS, + MACOSX = MACOSX, + APPLETVOS = APPLETVOS, + VISIONOS = VISIONOS, +) + +appletv_platforms = struct( + APPLETVOS_ARM64 = APPLETVOS_ARM64, + APPLETVSIMULATOR_ARM64 = APPLETVSIMULATOR_ARM64, + APPLETVSIMULATOR_X86_64 = APPLETVSIMULATOR_X86_64, +) + +ios_platforms = struct( + IPHONEOS_ARM64 = IPHONEOS_ARM64, + IPHONESIMULATOR_ARM64 = IPHONESIMULATOR_ARM64, + IPHONESIMULATOR_X86_64 = IPHONESIMULATOR_X86_64, +) + +mac_catalyst_platforms = struct( + MACCATALYST_ARM64 = MACCATALYST_ARM64, + MACCATALYST_X86_64 = MACCATALYST_X86_64, +) + +mac_platforms = struct( + MACOS_ARM64 = MACOS_ARM64, + MACOS_X86_64 = MACOS_X86_64, + MACOS_UNIVERSAL = MACOS_UNIVERSAL, +) + +watch_platforms = struct( + WATCHOS_ARM64 = WATCHOS_ARM64, + WATCHOS_ARM64_32 = WATCHOS_ARM64_32, + WATCHSIMULATOR_ARM64 = WATCHSIMULATOR_ARM64, + WATCHSIMULATOR_X86_64 = WATCHSIMULATOR_X86_64, +) + +vision_platforms = struct( + VISIONOS_ARM64 = VISIONOS_ARM64, + VISIONSIMULATOR_ARM64 = VISIONSIMULATOR_ARM64, +) diff --git a/prelude/platforms/apple/platforms.bzl b/prelude/platforms/apple/platforms.bzl new file mode 100644 index 0000000000..374dc273b3 --- /dev/null +++ b/prelude/platforms/apple/platforms.bzl @@ -0,0 +1,244 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@fbsource//tools/build_defs/buck2:is_buck2.bzl", "is_buck2") +load("@prelude//apple:apple_platforms.bzl", "APPLE_PLATFORMS_KEY") +load("@prelude//platforms/apple:base.bzl", "BUILD_MODE_TO_CONSTRAINTS_MAP", "apple_build_mode_backed_platform", "is_buck2_mac_platform", "is_mobile_platform") +load( + "@prelude//platforms/apple:build_mode.bzl", + "APPLE_BUILD_MODES", + "get_build_mode", + "get_build_mode_debug", +) +load( + "@prelude//platforms/apple:constants.bzl", + "ios_platforms", + "mac_catalyst_platforms", + "mac_platforms", +) +load("@prelude//platforms/apple:platforms_map.bzl", "APPLE_PLATFORMS_MAP") +load("@prelude//utils:buckconfig.bzl", "read") + +_SUPPORTED_IOS_PLATFORMS = [ + ios_platforms.IPHONEOS_ARM64, + ios_platforms.IPHONESIMULATOR_ARM64, + ios_platforms.IPHONESIMULATOR_X86_64, +] + +_SUPPORTED_MACOS_PLATFORMS = [ + mac_platforms.MACOS_ARM64, + mac_platforms.MACOS_X86_64, +] + +_SUPPORTED_MAC_CATALYST_PLATFORMS = [ + mac_catalyst_platforms.MACCATALYST_ARM64, + mac_catalyst_platforms.MACCATALYST_X86_64, +] + +_ANALYSIS_CONSTRAINTS = ["ovr_config//bitcode/constraints:bitcode"] +_DEFAULT_ANALYSIS_IOS_PLATFORM = ios_platforms.IPHONEOS_ARM64 +_DEFAULT_ANALYSIS_MACOS_PLATFORM = mac_platforms.MACOS_X86_64 + +DEFAULT_SUPPORTED_CXX_PLATFORMS = _SUPPORTED_IOS_PLATFORMS + +def apple_target_platforms( + base_name, + platform_rule, + constraint_values = None, # Constraint values added to all generated platforms + visibility = None, + deps = None, + cxx_platforms_constraint_values = None, # Must be a map of a supported cxx platform to a list of constraint values + build_mode_constraint_values = None, # Must be a map of a supported build mode to a list of constraint values + supported_cxx_platforms = DEFAULT_SUPPORTED_CXX_PLATFORMS, # Cxx platforms to generate platforms for + supported_build_modes = APPLE_BUILD_MODES): # Build modes to generate platforms for + """ Define architecture and sdk specific platforms alongside the base platform. """ + + # HACK: Apps shouldn't be generating platforms for cxx_platforms they don't support. However, to support cases where other apps + # depend on shared libraries that don't generate particular platforms, and set a cxx.default_platform on the command line, we need + # to make the graph parseable and generate the missing target platforms. They will never be used, but need to exist in the config + # backed world. + config_based_platform = read("cxx", "default_platform") + if config_based_platform != None and config_based_platform not in supported_cxx_platforms: + supported_cxx_platforms = list(supported_cxx_platforms) + if config_based_platform in _SUPPORTED_MACOS_PLATFORMS: + for p in _SUPPORTED_MACOS_PLATFORMS: + if p not in supported_cxx_platforms: + supported_cxx_platforms.append(p) + + if config_based_platform in _SUPPORTED_MAC_CATALYST_PLATFORMS: + for p in _SUPPORTED_MAC_CATALYST_PLATFORMS: + if p not in supported_cxx_platforms: + supported_cxx_platforms.append(p) + + # Form defaults + constraint_values = constraint_values or [] + cxx_platforms_constraint_values = cxx_platforms_constraint_values or {} + build_mode_constraint_values = build_mode_constraint_values or {} + visibility = visibility or ["PUBLIC"] + deps = deps or [] + + _validate_cxx_platforms_constraint_values(base_name, cxx_platforms_constraint_values, supported_cxx_platforms) + _validate_build_mode_constraint_values(base_name, build_mode_constraint_values, supported_build_modes) + + # Define the generated platforms + for platform in supported_cxx_platforms: + platform_dep = get_default_target_platform_for_platform(platform) + cxx_platform_constraints = cxx_platforms_constraint_values.get(platform, []) + if is_mobile_platform(platform) or is_buck2_mac_platform(platform): + for build_mode in supported_build_modes: + build_mode_constraints = build_mode_constraint_values.get(build_mode, []) + BUILD_MODE_TO_CONSTRAINTS_MAP.get(build_mode) + _define_platform( + base_name, + platform, + build_mode, + constraint_values + cxx_platform_constraints + build_mode_constraints, + visibility, + deps + [platform_dep], + platform_rule, + ) + else: + _define_platform( + base_name, + platform, + None, + constraint_values + cxx_platform_constraints, + visibility, + deps + [platform_dep], + platform_rule, + ) + + # Define the base platform in case it is needed (example: to be a dep of another platform) + platform_rule( + name = base_name, + constraint_values = constraint_values, + visibility = visibility, + deps = deps, + ) + + analysis_platform = _get_analysis_platform_for_supported_platforms(supported_cxx_platforms) + analysis_platform_dep = get_default_target_platform_for_platform(analysis_platform) + analysis_platform_build_mode_constraints = build_mode_constraint_values.get(get_build_mode_debug(), []) + + platform_rule( + name = base_name + "-analysis", + constraint_values = constraint_values + analysis_platform_build_mode_constraints + _ANALYSIS_CONSTRAINTS, + visibility = ["PUBLIC"], + deps = deps + [analysis_platform_dep], + ) + +def config_backed_apple_target_platform(target_platform = None, platform = None, build_mode = None): + platform = _get_default_platform() if platform == None else platform + build_mode = get_build_mode() if build_mode == None else build_mode + if target_platform == None: + return get_default_target_platform_for_platform(platform) + + return _get_generated_name(target_platform, platform, build_mode) + +def get_default_target_platform_for_platform(sdk_arch) -> [str, None]: + data = APPLE_PLATFORMS_MAP.get(sdk_arch) + if data != None: + return data.target_platform + + return None + +def set_apple_platforms(platform, base_config_backed_target_platform, kwargs): + def get_supported_platforms(): + if not is_buck2(): + return None + if platform in _SUPPORTED_IOS_PLATFORMS: + return _SUPPORTED_IOS_PLATFORMS + elif platform in _SUPPORTED_MACOS_PLATFORMS: + return _SUPPORTED_MACOS_PLATFORMS + elif platform in _SUPPORTED_MAC_CATALYST_PLATFORMS: + return _SUPPORTED_MAC_CATALYST_PLATFORMS + else: + return None + + supported_platforms = get_supported_platforms() + if not supported_platforms: + return kwargs + + # If we've already defined the apple platforms, we can avoid having to process them again. + if APPLE_PLATFORMS_KEY in kwargs: + return kwargs + + apple_platforms = {} + for platform in supported_platforms: + for build_mode in APPLE_BUILD_MODES: + identifier = "{}-{}".format(platform, build_mode) + if base_config_backed_target_platform: + apple_platforms[identifier] = config_backed_apple_target_platform(base_config_backed_target_platform, platform, build_mode) + else: + base_target_platform = _get_base_target_platform_for_platform(platform) + if not base_target_platform: + fail("A valid base target platform is required!") + apple_platforms[identifier] = apple_build_mode_backed_platform(base_target_platform, platform, build_mode) + + kwargs[APPLE_PLATFORMS_KEY] = apple_platforms + + return kwargs + +def _get_generated_name(base_name, platform, build_mode): + platform_and_build_mode_name = apple_build_mode_backed_platform(platform, platform, build_mode) + return "{}-{}".format(base_name, platform_and_build_mode_name) + +def _get_default_platform(): + platform = read("cxx", "default_platform") + return platform if platform != None else ios_platforms.IPHONESIMULATOR_X86_64 + +def _define_platform(base_name, platform, build_mode, constraint_values, visibility, deps, platform_rule): + # @lint-ignore BUCKLINT - We set the visibility to PUBLIC directly and can bypass fb_native + platform_rule( + name = _get_generated_name(base_name, platform, build_mode), + constraint_values = constraint_values, + visibility = visibility, + deps = deps, + ) + +def _get_base_target_platform_for_platform(sdk_arch) -> [str, None]: + data = APPLE_PLATFORMS_MAP.get(sdk_arch) + if data != None: + return data.base_target_platform + + return None + +def _get_analysis_platform_for_supported_platforms(supported_cxx_platforms): + # For determining the platform deps to use for the base platform, we inspect the supported + # cxx platforms, giving precedence to iOS platforms. + for platform in _SUPPORTED_IOS_PLATFORMS: + if platform in supported_cxx_platforms: + return _DEFAULT_ANALYSIS_IOS_PLATFORM + + for platform in _SUPPORTED_MACOS_PLATFORMS: + if platform in supported_cxx_platforms: + return _DEFAULT_ANALYSIS_MACOS_PLATFORM + + return _DEFAULT_ANALYSIS_IOS_PLATFORM + +def _validate_cxx_platforms_constraint_values(base_name, cxx_platforms_constraint_values, supported_cxx_platforms): + if type(cxx_platforms_constraint_values) != type({}): + fail("cxx_platforms_constraint_values must be a map of platform to constraint values!") + for platform, platform_values in cxx_platforms_constraint_values.items(): + if platform not in supported_cxx_platforms: + fail("\n\nProviding platform constraints for an unsupported platform!\nBase platform: {}\nCXX Platform: {} with values {}\nSupported platforms: {}\n".format( + base_name, + platform, + platform_values, + ", ".join(supported_cxx_platforms), + )) + +def _validate_build_mode_constraint_values(base_name, build_mode_constraint_values, supported_build_modes): + if type(build_mode_constraint_values) != type({}): + fail("build_mode_constraint_values must be a map of build mode to constraint values!") + for build_mode, build_mode_values in build_mode_constraint_values.items(): + if build_mode not in supported_build_modes: + fail("\n\nProviding build mode constraints for an unsupported build mode!\nBase platform: {}\nBuild mode: {} with values {}\nSupported build modes: {}\n".format( + base_name, + build_mode, + build_mode_values, + ", ".join(supported_build_modes), + )) diff --git a/prelude/platforms/apple/platforms_map.bzl b/prelude/platforms/apple/platforms_map.bzl new file mode 100644 index 0000000000..9e15a662cd --- /dev/null +++ b/prelude/platforms/apple/platforms_map.bzl @@ -0,0 +1,14 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# @oss-disable: load("@prelude//platforms/apple/meta_only:platforms_map.bzl", _APPLE_PLATFORMS_MAP = "APPLE_PLATFORMS_MAP", _APPLE_SDK_DEFAULT_PLATFORM_MAP = "APPLE_SDK_DEFAULT_PLATFORM_MAP") + +APPLE_PLATFORMS_MAP = {} # TODO: Define OSS platforms map # @oss-enable +# @oss-disable: APPLE_PLATFORMS_MAP = _APPLE_PLATFORMS_MAP + +APPLE_SDK_DEFAULT_PLATFORM_MAP = {} # @oss-enable +# @oss-disable: APPLE_SDK_DEFAULT_PLATFORM_MAP = _APPLE_SDK_DEFAULT_PLATFORM_MAP diff --git a/prelude/platforms/apple/sdk.bzl b/prelude/platforms/apple/sdk.bzl index 857896304b..e03573ee09 100644 --- a/prelude/platforms/apple/sdk.bzl +++ b/prelude/platforms/apple/sdk.bzl @@ -5,6 +5,15 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -AppleSdks = ["iphoneos", "iphonesimulator", "maccatalyst", "macosx", "visionos", "visionsimulator", "watchos", "watchsimulator"] +_APPLE_SDKS = [ + "iphoneos", + "iphonesimulator", + "maccatalyst", + "macosx", + "visionos", + "visionsimulator", + "watchos", + "watchsimulator", +] -AppleSdk = enum(*AppleSdks) +AppleSdk = enum(*_APPLE_SDKS) diff --git a/prelude/prelude.bzl b/prelude/prelude.bzl index 6ef06c1ea3..ac15950e85 100644 --- a/prelude/prelude.bzl +++ b/prelude/prelude.bzl @@ -10,5 +10,3 @@ load("@prelude//:native.bzl", _native = "native") # Public symbols in this file become globals everywhere except `bzl` files in prelude. # Additionally, members of `native` struct also become globals in `BUCK` files. native = _native - -# This is a test to get CI to notice me diff --git a/prelude/pull_request_template.md b/prelude/pull_request_template.md index 1554e0ee17..ab8b597978 100644 --- a/prelude/pull_request_template.md +++ b/prelude/pull_request_template.md @@ -1,3 +1,7 @@ -IMPORTANT: Please don't raise pull requests here, but at [facebook/buck2](https://github.com/facebook/buck2/pulls). +IMPORTANT: Please don't raise pull requests here, but at +[facebook/buck2](https://github.com/facebook/buck2/pulls). -The [`prelude`](https://github.com/facebook/buck2/tree/main/prelude) directory is a mirror of this repo, but that repo also features CI tests and is more actively monitored. Any PR's landing there will automatically show up here at the same time. +The [`prelude`](https://github.com/facebook/buck2/tree/main/prelude) directory +is a mirror of this repo, but that repo also features CI tests and is more +actively monitored. Any PR's landing there will automatically show up here at +the same time. diff --git a/prelude/python/compile.bzl b/prelude/python/compile.bzl index 4d6175ac2b..ba29eb85f0 100644 --- a/prelude/python/compile.bzl +++ b/prelude/python/compile.bzl @@ -29,21 +29,46 @@ def compile_manifests_for_mode( invalidation_mode: PycInvalidationMode = PycInvalidationMode("UNCHECKED_HASH")) -> ManifestInfo: output = ctx.actions.declare_output("bytecode_{}".format(invalidation_mode.value), dir = True) bytecode_manifest = ctx.actions.declare_output("bytecode_{}.manifest".format(invalidation_mode.value)) - cmd = cmd_args(ctx.attrs._python_toolchain[PythonToolchainInfo].host_interpreter) - cmd.add(ctx.attrs._python_toolchain[PythonToolchainInfo].compile) - cmd.add(cmd_args(output.as_output(), format = "--output={}")) - cmd.add(cmd_args(bytecode_manifest.as_output(), format = "--bytecode-manifest={}")) - cmd.add("--invalidation-mode={}".format(invalidation_mode.value)) + cmd = [ + ctx.attrs._python_toolchain[PythonToolchainInfo].host_interpreter, + ctx.attrs._python_toolchain[PythonToolchainInfo].compile, + cmd_args(output.as_output(), format = "--output={}"), + cmd_args(bytecode_manifest.as_output(), format = "--bytecode-manifest={}"), + "--invalidation-mode={}".format(invalidation_mode.value), + ] - for manifest in manifests: - cmd.add(manifest.manifest) - cmd.hidden([a for a, _ in manifest.artifacts]) - ctx.actions.run( - cmd, + env = { # On some platforms (e.g. linux), python hash code randomness can cause # the bytecode to be non-deterministic, so pin via the `PYTHONHASHSEED` # env var. - env = {"PYTHONHASHSEED": "7"}, + "PYTHONHASHSEED": "7", + } + + # support invalidating cached pyc compile actions by bumping the env var. + # the value itself is meaningless, just the fact it changes is meaningful. + # using the PYC magic number for *convenience* only + version = ctx.attrs._python_toolchain[PythonToolchainInfo].version + if version and "cinder" in version: + # for Cinder, this is a workaround... + # this action *should* use the bundled (in-repo) runtime for compilation + # (and then the change in the Cinder codebase would be sufficient to invalidate caches) + # currently though, the action uses the platform Cinder for PYC compilation, + # and these are deployed in-place (no change to toolchain paths), + # so we need to force cache invalidation when needed (e.g. for S411091) + env["CINDER_DUMMY_PYC_CACHE_BUSTER"] = "3451" + elif version and "3.12" in version: + # for CPython, the magic number *shouldn't* change during the lifetime of a feature release + # but internally we do make more signifcant changes (rarely), + # so for those cases we support forced invalidation using this env var + env["PYTHON312_DUMMY_PYC_CACHE_BUSTER"] = "3532" + + hidden = [] + for manifest in manifests: + cmd.append(manifest.manifest) + hidden.extend([a for a, _ in manifest.artifacts]) + ctx.actions.run( + cmd_args(cmd, hidden = hidden), + env = env, category = "py_compile", identifier = invalidation_mode.value, ) diff --git a/prelude/python/cxx_python_extension.bzl b/prelude/python/cxx_python_extension.bzl index 618d043740..a756ebef1b 100644 --- a/prelude/python/cxx_python_extension.bzl +++ b/prelude/python/cxx_python_extension.bzl @@ -41,7 +41,6 @@ load( "LibOutputStyle", "LinkInfo", "LinkInfos", - "Linkage", "create_merged_link_info", "wrap_link_infos", ) @@ -60,6 +59,7 @@ load( "@prelude//linking:shared_libraries.bzl", "merge_shared_libraries", ) +load("@prelude//linking:types.bzl", "Linkage") load("@prelude//os_lookup:defs.bzl", "OsLookup") load("@prelude//python:toolchain.bzl", "PythonPlatformInfo", "get_platform_attr") load("@prelude//utils:expect.bzl", "expect") @@ -119,6 +119,14 @@ def cxx_python_extension_impl(ctx: AnalysisContext) -> list[Provider]: use_soname = False, generate_providers = cxx_providers, generate_sub_targets = sub_targets, + compiler_flags = ctx.attrs.compiler_flags, + lang_compiler_flags = ctx.attrs.lang_compiler_flags, + platform_compiler_flags = ctx.attrs.platform_compiler_flags, + lang_platform_compiler_flags = ctx.attrs.lang_platform_compiler_flags, + preprocessor_flags = ctx.attrs.preprocessor_flags, + lang_preprocessor_flags = ctx.attrs.lang_preprocessor_flags, + platform_preprocessor_flags = ctx.attrs.platform_preprocessor_flags, + lang_platform_preprocessor_flags = ctx.attrs.lang_platform_preprocessor_flags, ) cxx_library_info = cxx_library_parameterized(ctx, impl_params) @@ -214,6 +222,7 @@ def cxx_python_extension_impl(ctx: AnalysisContext) -> list[Provider]: deps = [d.shared_library_info for d in link_deps], ), linkable_root_info = create_linkable_root( + label = ctx.label, link_infos = wrap_link_infos( link_infos[LibOutputStyle("pic_archive")], pre_flags = ctx.attrs.linker_flags, diff --git a/prelude/python/make_py_package.bzl b/prelude/python/make_py_package.bzl index 96faa653d8..6b936f8afe 100644 --- a/prelude/python/make_py_package.bzl +++ b/prelude/python/make_py_package.bzl @@ -12,13 +12,16 @@ execution load("@prelude//:artifact_tset.bzl", "project_artifacts") load("@prelude//:local_only.bzl", "package_python_locally") +load("@prelude//:paths.bzl", "paths") load( "@prelude//cxx:cxx_library_utility.bzl", "cxx_is_gnu", ) load( - "@prelude//linking:link_info.bzl", - "LinkedObject", # @unused Used as a type + "@prelude//linking:shared_libraries.bzl", + "SharedLibrary", # @unused Used as a type + "gen_shared_libs_action", + "zip_shlibs", ) load("@prelude//os_lookup:defs.bzl", "OsLookup") load("@prelude//utils:arglike.bzl", "ArgLike") @@ -34,7 +37,6 @@ PexModules = record( manifests = field(PythonLibraryManifestsInterface), extensions = field(ManifestInfo | None, None), extra_manifests = field(ManifestInfo | None, None), - debuginfo_manifest = field(ManifestInfo | None, None), compile = field(bool, False), ) @@ -42,7 +44,7 @@ PexModules = record( # providers. PexProviders = record( default_output = field(Artifact), - other_outputs = list[(ArgLike, str)], + other_outputs = list[ArgLike], other_outputs_prefix = str | None, hidden_resources = list[ArgLike], sub_targets = dict[str, list[Provider]], @@ -60,11 +62,15 @@ def make_py_package_providers( def make_default_info(pex: PexProviders) -> Provider: return DefaultInfo( default_output = pex.default_output, - other_outputs = [a for a, _ in pex.other_outputs] + pex.hidden_resources, + other_outputs = pex.other_outputs + pex.hidden_resources, sub_targets = pex.sub_targets, ) -def make_run_info(pex: PexProviders) -> Provider: +def make_run_info(pex: PexProviders, run_with_inplace: bool = False) -> Provider: + if run_with_inplace and "inplace" in pex.sub_targets: + # If running with inplace, we want to use the RunInfo of inplace subtarget. + return pex.sub_targets["inplace"][1] + return RunInfo(pex.run_cmd) def _srcs(srcs: list[typing.Any], format = "{}") -> cmd_args: @@ -118,10 +124,11 @@ def make_py_package( package_style: PackageStyle, build_args: list[ArgLike], pex_modules: PexModules, - shared_libraries: dict[str, (LinkedObject, bool)], + shared_libraries: list[(str, SharedLibrary, bool)], main: EntryPoint, hidden_resources: list[ArgLike] | None, - allow_cache_upload: bool) -> PexProviders: + allow_cache_upload: bool, + debuginfo_files: list[(str | (str, SharedLibrary, str), Artifact)] = []) -> PexProviders: """ Passes a standardized set of flags to a `make_py_package` binary to create a python "executable". @@ -144,12 +151,22 @@ def make_py_package( if pex_modules.extensions: srcs.append(pex_modules.extensions.manifest) - preload_libraries = _preload_libraries_args(ctx, shared_libraries) + preload_libraries = _preload_libraries_args( + ctx = ctx, + shared_libraries = [ + (shlib, libdir) + for libdir, shlib, preload in shared_libraries + if preload + ], + ) + startup_function = generate_startup_function_loader(ctx) manifest_module = generate_manifest_module(ctx, python_toolchain, srcs) common_modules_args, dep_artifacts, debug_artifacts = _pex_modules_common_args( ctx, pex_modules, - {name: lib for name, (lib, _) in shared_libraries.items()}, + [startup_function] if startup_function else [], + [(shlib, libdir) for libdir, shlib, _ in shared_libraries], + debuginfo_files = debuginfo_files, ) default = _make_py_package_impl( @@ -158,7 +175,7 @@ def make_py_package( make_py_package_cmd, package_style, build_args, - shared_libraries, + len(shared_libraries) > 0, preload_libraries, common_modules_args, dep_artifacts, @@ -177,7 +194,7 @@ def make_py_package( make_py_package_cmd, PackageStyle(style), build_args, - shared_libraries, + len(shared_libraries) > 0, preload_libraries, common_modules_args, dep_artifacts, @@ -190,6 +207,10 @@ def make_py_package( allow_cache_upload = allow_cache_upload, ) default.sub_targets[style] = make_py_package_providers(pex_providers) + + # cpp binaries already emit a `debuginfo` subtarget with a different format, + # so we opt to use a more specific subtarget + default.sub_targets["par-debuginfo"] = _debuginfo_subtarget(ctx, debug_artifacts) return default def _make_py_package_impl( @@ -198,11 +219,11 @@ def _make_py_package_impl( make_py_package_cmd: RunInfo | None, package_style: PackageStyle, build_args: list[ArgLike], - shared_libraries: dict[str, (LinkedObject, bool)], + shared_libraries: bool, preload_libraries: cmd_args, common_modules_args: cmd_args, - dep_artifacts: list[(ArgLike, str)], - debug_artifacts: list[(ArgLike, str)], + dep_artifacts: list[ArgLike], + debug_artifacts: list[(str | (str, SharedLibrary, str), ArgLike)], main: EntryPoint, hidden_resources: list[ArgLike] | None, manifest_module: ArgLike | None, @@ -213,6 +234,7 @@ def _make_py_package_impl( standalone = package_style == PackageStyle("standalone") runtime_files = [] + sub_targets = {} if standalone and hidden_resources != None: # constructing this error message is expensive, only do it when we abort analysis error_msg = "standalone builds don't support hidden resources" if output_suffix else _hidden_resources_error_message(ctx.label, hidden_resources) @@ -258,6 +280,7 @@ def _make_py_package_impl( preload_libraries, symlink_tree_path, package_style, + True if ctx.attrs.zip_safe == None else ctx.attrs.zip_safe, ) bootstrap_args.add(build_args) if standalone: @@ -267,7 +290,7 @@ def _make_py_package_impl( # For inplace builds add local artifacts to outputs so they get properly materialized runtime_files.extend(dep_artifacts) - runtime_files.append((symlink_tree_path, symlink_tree_path.short_path)) + runtime_files.append(symlink_tree_path) # For standalone builds, or builds setting make_py_package we generate args for calling make_par.py if standalone or make_py_package_cmd != None: @@ -316,23 +339,61 @@ def _make_py_package_impl( if hidden_resources == None: hidden_resources = [] + if symlink_tree_path != None: + sub_targets["link-tree"] = [DefaultInfo( + default_output = symlink_tree_path, + other_outputs = runtime_files, + sub_targets = {}, + )] + return PexProviders( default_output = output, other_outputs = runtime_files, other_outputs_prefix = symlink_tree_path.short_path if symlink_tree_path != None else None, hidden_resources = hidden_resources, - sub_targets = {}, - run_cmd = cmd_args(run_args).hidden([a for a, _ in runtime_files] + hidden_resources), + sub_targets = sub_targets, + run_cmd = cmd_args( + run_args, + hidden = runtime_files + hidden_resources + [python_toolchain.interpreter], + ), ) -def _preload_libraries_args(ctx: AnalysisContext, shared_libraries: dict[str, (LinkedObject, bool)]) -> cmd_args: - preload_libraries_path = ctx.actions.write( - "__preload_libraries.txt", - cmd_args([ - "--preload={}".format(name) - for name, (_, preload) in shared_libraries.items() - if preload - ]), +def _debuginfo_subtarget( + ctx: AnalysisContext, + debug_artifacts: list[(str | (str, SharedLibrary, str), ArgLike)]) -> list[Provider]: + for_shared_libs = [] + other = [] + for name, artifact in debug_artifacts: + if type(name) == type(()): + for_shared_libs.append((name[1], (artifact, name[0], name[2]))) + else: + other.append((artifact, name)) + out = gen_shared_libs_action( + actions = ctx.actions, + out = "debuginfo.manifest.json", + shared_libs = [shlib for shlib, _ in for_shared_libs], + gen_action = lambda actions, output, shared_libs: actions.write_json( + output, + [ + (debug, paths.join(libdir, soname + ext)) + for soname, _, (debug, libdir, ext) in zip_shlibs(shared_libs, for_shared_libs) + ] + other, + ), + ) + return [DefaultInfo(default_output = out, other_outputs = [d for _, d in debug_artifacts])] + +def _preload_libraries_args(ctx: AnalysisContext, shared_libraries: list[(SharedLibrary, str)]) -> cmd_args: + preload_libraries_path = gen_shared_libs_action( + actions = ctx.actions, + out = "__preload_libraries.txt", + shared_libs = [shlib for shlib, _ in shared_libraries], + gen_action = lambda actions, output, shared_libs: actions.write( + output, + [ + "--preload={}".format(paths.join(libdir, soname)) + for soname, _, libdir in zip_shlibs(shared_libs, shared_libraries) + ], + ), ) return cmd_args(preload_libraries_path, format = "@{}") @@ -340,10 +401,11 @@ def _pex_bootstrap_args( toolchain: PythonToolchainInfo, main: EntryPoint, output: Artifact, - shared_libraries: dict[str, (LinkedObject, bool)], + shared_libraries: bool, preload_libraries: cmd_args, symlink_tree_path: Artifact | None, - package_style: PackageStyle) -> cmd_args: + package_style: PackageStyle, + zip_safe: bool) -> cmd_args: cmd = cmd_args() cmd.add(preload_libraries) cmd.add([ @@ -357,7 +419,7 @@ def _pex_bootstrap_args( else: cmd.add(["--main-function", main[1]]) if symlink_tree_path != None: - cmd.add(cmd_args(["--modules-dir", symlink_tree_path]).ignore_artifacts()) + cmd.add(cmd_args(["--modules-dir", symlink_tree_path], ignore_artifacts = True)) if toolchain.main_runner: cmd.add(["--main-runner", toolchain.main_runner]) @@ -367,12 +429,20 @@ def _pex_bootstrap_args( cmd.add("--use-lite") cmd.add(output.as_output()) + if package_style == PackageStyle("standalone") and not zip_safe: + cmd.add("--no-zip-safe") + + for lib_path in toolchain.native_library_runtime_paths: + cmd.add("--native-library-runtime-path={}".format(lib_path)) + return cmd def _pex_modules_common_args( ctx: AnalysisContext, pex_modules: PexModules, - shared_libraries: dict[str, LinkedObject]) -> (cmd_args, list[(ArgLike, str)], list[(ArgLike, str)]): + extra_manifests: list[ArgLike], + shared_libraries: list[(SharedLibrary, str)], + debuginfo_files: list[(str | (str, SharedLibrary, str), Artifact)]) -> (cmd_args, list[ArgLike], list[(str | (str, SharedLibrary, str), ArgLike)]): srcs = [] src_artifacts = [] deps = [] @@ -389,9 +459,12 @@ def _pex_modules_common_args( srcs.append(pex_modules.extra_manifests.manifest) src_artifacts.extend(pex_modules.extra_manifests.artifacts) - deps.extend(src_artifacts) + if extra_manifests: + srcs.extend(extra_manifests) + + deps.extend([a[0] for a in src_artifacts]) resources = pex_modules.manifests.resource_manifests() - deps.extend(pex_modules.manifests.resource_artifacts_with_paths()) + deps.extend([a[0] for a in pex_modules.manifests.resource_artifacts_with_paths()]) src_manifests_path = ctx.actions.write( "__src_manifests.txt", @@ -402,73 +475,104 @@ def _pex_modules_common_args( _srcs(resources, format = "--resource-manifest={}"), ) - native_libraries = [s.output for s in shared_libraries.values()] - native_library_srcs_path = ctx.actions.write( - "__native_libraries___srcs.txt", - _srcs(native_libraries, format = "--native-library-src={}"), - ) - native_library_dests_path = ctx.actions.write( - "__native_libraries___dests.txt", - ["--native-library-dest={}".format(lib) for lib in shared_libraries], + native_libraries = gen_shared_libs_action( + actions = ctx.actions, + out = "__native_libraries__.txt", + shared_libs = [shlib for shlib, _ in shared_libraries], + gen_action = lambda actions, output, shared_libs: actions.write( + output, + cmd_args( + _srcs( + [shlib.lib.output for shlib in shared_libs.values()], + format = "--native-library-src={}", + ), + [ + "--native-library-dest={}".format(paths.join(libdir, soname)) + for soname, _, libdir in zip_shlibs(shared_libs, shared_libraries) + ], + ), + ), ) - src_manifest_args = cmd_args(src_manifests_path).hidden(srcs) - resource_manifest_args = cmd_args(resource_manifests_path).hidden(resources) - native_library_srcs_args = cmd_args(native_library_srcs_path) + src_manifest_args = cmd_args(src_manifests_path, hidden = srcs) + resource_manifest_args = cmd_args(resource_manifests_path, hidden = resources) cmd = cmd_args() cmd.add(cmd_args(src_manifest_args, format = "@{}")) cmd.add(cmd_args(resource_manifest_args, format = "@{}")) - cmd.add(cmd_args(native_library_srcs_args, format = "@{}")) - cmd.add(cmd_args(native_library_dests_path, format = "@{}")) + cmd.add(cmd_args(native_libraries, format = "@{}")) - if pex_modules.debuginfo_manifest: - debuginfo_files = pex_modules.debuginfo_manifest.artifacts + if debuginfo_files: debuginfo_srcs_path = ctx.actions.write( "__debuginfo___srcs.txt", - _srcs([src for src, _ in debuginfo_files], format = "--debuginfo-src={}"), + _srcs([src for _, src in debuginfo_files], format = "--debuginfo-src={}"), ) debuginfo_srcs_args = cmd_args(debuginfo_srcs_path) cmd.add(cmd_args(debuginfo_srcs_args, format = "@{}")) - debug_artifacts.extend(debuginfo_files) + for name, artifact in debuginfo_files: + if type(name) != type(""): + libdir, shlib, ext = name + name = paths.join(libdir, shlib.soname.ensure_str() + ext) + debug_artifacts.append((name, artifact)) if ctx.attrs.package_split_dwarf_dwp: if ctx.attrs.strip_libpar == "extract" and get_package_style(ctx) == PackageStyle("standalone") and cxx_is_gnu(ctx): - # rename to match extracted debuginfo package - dwp = [(s.dwp, "{}.debuginfo.dwp".format(n)) for n, s in shared_libraries.items() if s.dwp != None] + dwp_ext = ".debuginfo.dwp" else: - dwp = [(s.dwp, "{}.dwp".format(n)) for n, s in shared_libraries.items() if s.dwp != None] - dwp_srcs_path = ctx.actions.write( - "__dwp___srcs.txt", - _srcs([src for src, _ in dwp], format = "--dwp-src={}"), - ) - dwp_dests_path = ctx.actions.write( - "__dwp___dests.txt", - _srcs([dest for _, dest in dwp], format = "--dwp-dest={}"), + dwp_ext = ".dwp" + dwp_args = gen_shared_libs_action( + actions = ctx.actions, + out = "__dwp__.txt", + shared_libs = [shlib for shlib, _ in shared_libraries], + gen_action = lambda actions, output, shared_libs: actions.write( + output, + cmd_args( + _srcs( + [ + shlib.lib.dwp + for shlib in shared_libs.values() + if shlib.lib.dwp != None + ], + format = "--dwp-src={}", + ), + _srcs( + [ + paths.join(libdir, soname + dwp_ext) + for soname, shlib, libdir in zip_shlibs(shared_libs, shared_libraries) + if shlib.lib.dwp != None + ], + format = "--dwp-dest={}", + ), + ), + ), ) - dwp_srcs_args = cmd_args(dwp_srcs_path) - cmd.add(cmd_args(dwp_srcs_args, format = "@{}")) - cmd.add(cmd_args(dwp_dests_path, format = "@{}")) + cmd.add(cmd_args(dwp_args, format = "@{}")) - debug_artifacts.extend(dwp) + for shlib, libdir in shared_libraries: + if shlib.lib.dwp != None: + debug_artifacts.append(((libdir, shlib, dwp_ext), shlib.lib.dwp)) - deps.extend([(lib.output, name) for name, lib in shared_libraries.items()]) + for shlib, _ in shared_libraries: + deps.append(shlib.lib.output) external_debug_info = project_artifacts( ctx.actions, - [lib.external_debug_info for lib in shared_libraries.values()], + [ + shlib.lib.external_debug_info + for shlib, _ in shared_libraries + ], ) # HACK: external_debug_info has an empty path - debug_artifacts.extend([(d, "") for d in external_debug_info]) + debug_artifacts.extend([("", d) for d in external_debug_info]) return (cmd, deps, debug_artifacts) def _pex_modules_args( ctx: AnalysisContext, common_args: cmd_args, - dep_artifacts: list[(ArgLike, str)], - debug_artifacts: list[(ArgLike, str)], + dep_artifacts: list[ArgLike], + debug_artifacts: list[(str | (str, SharedLibrary, str), ArgLike)], symlink_tree_path: Artifact | None, manifest_module: ArgLike | None, pex_modules: PexModules, @@ -479,16 +583,18 @@ def _pex_modules_args( runtime (this might be empty for e.g. a standalone pex). """ - cmd = cmd_args() - cmd.add(common_args) + cmd = [] + hidden = [] + + cmd.append(common_args) if manifest_module != None: - cmd.add(cmd_args(manifest_module, format = "--module-manifest={}")) + cmd.append(cmd_args(manifest_module, format = "--module-manifest={}")) if pex_modules.compile: pyc_mode = PycInvalidationMode("UNCHECKED_HASH") if symlink_tree_path == None else PycInvalidationMode("CHECKED_HASH") bytecode_manifests = pex_modules.manifests.bytecode_manifests(pyc_mode) - dep_artifacts.extend(pex_modules.manifests.bytecode_artifacts_with_paths(pyc_mode)) + dep_artifacts.extend([a[0] for a in pex_modules.manifests.bytecode_artifacts_with_paths(pyc_mode)]) bytecode_manifests_path = ctx.actions.write( "__bytecode_manifests{}.txt".format(output_suffix), @@ -497,19 +603,19 @@ def _pex_modules_args( format = "--module-manifest={}", ), ) - cmd.add(cmd_args(bytecode_manifests_path, format = "@{}")) - cmd.hidden(bytecode_manifests) + cmd.append(cmd_args(bytecode_manifests_path, format = "@{}")) + hidden.append(bytecode_manifests) if symlink_tree_path != None: - cmd.add(["--modules-dir", symlink_tree_path.as_output()]) + cmd.extend(["--modules-dir", symlink_tree_path.as_output()]) else: # Accumulate all the artifacts we depend on. Only add them to the command # if we are not going to create symlinks. - cmd.hidden([a for a, _ in dep_artifacts]) + hidden.append(dep_artifacts) - cmd.hidden([a for a, _ in debug_artifacts]) + hidden.extend([s for _, s in debug_artifacts]) - return cmd + return cmd_args(cmd, hidden = hidden) def _hidden_resources_error_message(current_target: Label, hidden_resources: list[ArgLike] | None) -> str: """ @@ -544,6 +650,59 @@ def _hidden_resources_error_message(current_target: Label, hidden_resources: lis msg += " {}\n".format(resource) return msg +def generate_startup_function_loader(ctx: AnalysisContext) -> ArgLike: + """ + Generate `__startup_function_loader__.py` used for early bootstrap of a par. + Things that go here are also enumerated in `__manifest__['startup_functions']` + Some examples include: + * static extension finder init + * eager import loader init + * cinderx init + """ + + if ctx.attrs.manifest_module_entries == None: + startup_functions_list = "" + else: + startup_functions_list = "\n".join( + [ + '"' + startup_function + '",' + for _, startup_function in sorted(ctx.attrs.manifest_module_entries.get("startup_functions", {}).items()) + ], + ) + + src_startup_functions_path = ctx.actions.write( + "manifest/__startup_function_loader__.py", + """ +import importlib +import warnings + +STARTUP_FUNCTIONS=[{startup_functions_list}] + +def load_startup_functions(): + for func in STARTUP_FUNCTIONS: + mod, sep, func = func.partition(":") + if sep: + try: + module = importlib.import_module(mod) + getattr(module, func)() + except Exception as e: + # TODO: Ignoring errors for now. + warnings.warn( + "Startup function %s (%s:%s) not executed: %s" + % (mod, name, func, e), + stacklevel=1, + ) + + """.format(startup_functions_list = startup_functions_list), + ) + return ctx.actions.write_json( + "manifest/startup_function_loader.manifest", + [ + ["__par__/__startup_function_loader__.py", src_startup_functions_path, "prelude//python:make_py_package.bzl"], + ], + with_inputs = True, + ) + def generate_manifest_module( ctx: AnalysisContext, python_toolchain: PythonToolchainInfo, @@ -563,11 +722,13 @@ def generate_manifest_module( "__module_manifests.txt", _srcs(src_manifests, format = "--module-manifest={}"), ) - cmd = cmd_args(python_toolchain.make_py_package_manifest_module) - cmd.add(["--manifest-entries", entries_json]) - cmd.add(cmd_args(src_manifests_path, format = "@{}")) - cmd.hidden(src_manifests) - cmd.add(["--output", module.as_output()]) + cmd = cmd_args( + python_toolchain.make_py_package_manifest_module, + ["--manifest-entries", entries_json], + cmd_args(src_manifests_path, format = "@{}"), + ["--output", module.as_output()], + hidden = src_manifests, + ) ctx.actions.run(cmd, category = "par", identifier = "manifest-module") json_entries_output = ctx.actions.declare_output("manifest/__manifest__.json") diff --git a/prelude/python/manifest.bzl b/prelude/python/manifest.bzl index 7a832ac38b..b60a07b366 100644 --- a/prelude/python/manifest.bzl +++ b/prelude/python/manifest.bzl @@ -6,6 +6,11 @@ # of this source tree. load("@prelude//:artifact_tset.bzl", "project_artifacts") +load( + "@prelude//linking:shared_libraries.bzl", + "SharedLibrary", + "gen_shared_libs_action", +) load("@prelude//utils:arglike.bzl", "ArgLike") load(":toolchain.bzl", "PythonToolchainInfo") @@ -82,6 +87,29 @@ def create_manifest_for_source_map( [(dest, artifact, origin) for dest, artifact in srcs.items()], ) +def create_manifest_for_shared_libs( + actions: AnalysisActions, + name: str, + shared_libs: list[SharedLibrary]) -> ManifestInfo: + """ + Generate a source manifest for the given list of sources. + """ + return ManifestInfo( + manifest = gen_shared_libs_action( + actions = actions, + out = name + ".manifest", + shared_libs = shared_libs, + gen_action = lambda actions, output, shared_libs: actions.write_json( + output, + [ + (soname, shlib.lib.output, name) + for soname, shlib in shared_libs.items() + ], + ), + ), + artifacts = [(shlib.lib.output, "") for shlib in shared_libs], + ) + def create_manifest_for_source_dir( ctx: AnalysisContext, param: str, diff --git a/prelude/python/native_python_util.bzl b/prelude/python/native_python_util.bzl index e049465d4e..8d0c146521 100644 --- a/prelude/python/native_python_util.bzl +++ b/prelude/python/native_python_util.bzl @@ -179,7 +179,7 @@ def _write_syms_file( symbols_file = ctx.actions.declare_output(name) objects_argsfile = ctx.actions.write(name + ".objects.argsfile", objects) - objects_args = cmd_args(objects_argsfile).hidden(objects) + objects_args = cmd_args(objects_argsfile, hidden = objects) script_env = { "NM": nm, diff --git a/prelude/python/python.bzl b/prelude/python/python.bzl index 1864bdc111..000cb6d6d2 100644 --- a/prelude/python/python.bzl +++ b/prelude/python/python.bzl @@ -5,14 +5,11 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxPlatformInfo") load("@prelude//linking:shared_libraries.bzl", "traverse_shared_library_info") load("@prelude//utils:arglike.bzl", "ArgLike") -load("@prelude//utils:utils.bzl", "flatten") load(":compile.bzl", "PycInvalidationMode") load(":interface.bzl", "PythonLibraryInterface", "PythonLibraryManifestsInterface") load(":manifest.bzl", "ManifestInfo") -load(":toolchain.bzl", "PythonPlatformInfo", "get_platform_attr") PythonLibraryManifests = record( label = field(Label), @@ -95,24 +92,28 @@ _BYTECODE_PROJ_PREFIX = { PycInvalidationMode("UNCHECKED_HASH"): "bytecode", } +args_projections = { + "dep_artifacts": _dep_artifacts, + "dep_manifests": _dep_manifests, + "hidden_resources": _hidden_resources, + "resource_artifacts": _resource_artifacts, + "resource_manifests": _resource_manifests, + "source_artifacts": _source_artifacts, + "source_manifests": _source_manifests, + "source_type_artifacts": _source_type_artifacts, + "source_type_manifests": _source_type_manifests, +} +args_projections.update({ + "{}_artifacts".format(prefix): _bytecode_artifacts(mode) + for mode, prefix in _BYTECODE_PROJ_PREFIX.items() +}) +args_projections.update({ + "{}_manifests".format(prefix): _bytecode_manifests(mode) + for mode, prefix in _BYTECODE_PROJ_PREFIX.items() +}) + PythonLibraryManifestsTSet = transitive_set( - args_projections = dict({ - "dep_artifacts": _dep_artifacts, - "dep_manifests": _dep_manifests, - "hidden_resources": _hidden_resources, - "resource_artifacts": _resource_artifacts, - "resource_manifests": _resource_manifests, - "source_artifacts": _source_artifacts, - "source_manifests": _source_manifests, - "source_type_artifacts": _source_type_artifacts, - "source_type_manifests": _source_type_manifests, - }.items() + { - "{}_artifacts".format(prefix): _bytecode_artifacts(mode) - for mode, prefix in _BYTECODE_PROJ_PREFIX.items() - }.items() + { - "{}_manifests".format(prefix): _bytecode_manifests(mode) - for mode, prefix in _BYTECODE_PROJ_PREFIX.items() - }.items()), + args_projections = args_projections, json_projections = { "source_type_manifests_json": _source_type_manifest_jsons, }, @@ -152,11 +153,3 @@ def manifests_to_interface(manifests: PythonLibraryManifestsTSet) -> PythonLibra resource_artifacts = lambda: [manifests.project_as_args("resource_artifacts")], resource_artifacts_with_paths = lambda: [(a, p) for m in manifests.traverse() if m != None and m.resources != None for a, p in m.resources[0].artifacts], ) - -def get_python_deps(ctx: AnalysisContext): - python_platform = ctx.attrs._python_toolchain[PythonPlatformInfo] - cxx_platform = ctx.attrs._cxx_toolchain[CxxPlatformInfo] - return flatten( - [ctx.attrs.deps] + - get_platform_attr(python_platform, cxx_platform, ctx.attrs.platform_deps), - ) diff --git a/prelude/python/python_binary.bzl b/prelude/python/python_binary.bzl index 29f2b5bff3..a2a9f4a2c0 100644 --- a/prelude/python/python_binary.bzl +++ b/prelude/python/python_binary.bzl @@ -10,7 +10,6 @@ load( "ArtifactGroupInfo", "ArtifactOutputs", # @unused Used as a type ) -load("@prelude//cxx:compile.bzl", "CxxSrcWithFlags") load("@prelude//cxx:cxx.bzl", "create_shared_lib_link_group_specs") load("@prelude//cxx:cxx_context.bzl", "get_cxx_toolchain_info") load("@prelude//cxx:cxx_executable.bzl", "cxx_executable") @@ -18,13 +17,15 @@ load( "@prelude//cxx:cxx_library_utility.bzl", "cxx_is_gnu", ) +load("@prelude//cxx:cxx_sources.bzl", "CxxSrcWithFlags") load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxPlatformInfo") load( "@prelude//cxx:cxx_types.bzl", "CxxRuleConstructorParams", ) +load("@prelude//cxx:cxx_utility.bzl", "cxx_attrs_get_allow_cache_upload") load( - "@prelude//cxx:groups.bzl", + "@prelude//cxx:groups_types.bzl", "Group", "GroupAttrs", "GroupMapping", @@ -33,11 +34,14 @@ load( load("@prelude//cxx:headers.bzl", "cxx_get_regular_cxx_headers_layout") load( "@prelude//cxx:link_groups.bzl", - "LinkGroupInfo", # @unused Used as a type "LinkGroupLibSpec", "build_link_group_info", "get_link_group_info", ) +load( + "@prelude//cxx:link_groups_types.bzl", + "LinkGroupInfo", # @unused Used as a type +) load("@prelude//cxx:linker.bzl", "get_rpath_origin") load( "@prelude//cxx:omnibus.bzl", @@ -54,8 +58,7 @@ load( ) load( "@prelude//linking:link_info.bzl", - "Linkage", - "LinkedObject", # @unused Used as a type + "LinkedObject", ) load( "@prelude//linking:linkable_graph.bzl", @@ -68,8 +71,15 @@ load( "LinkableProviders", # @unused Used as a type "linkables", ) -load("@prelude//linking:shared_libraries.bzl", "merge_shared_libraries", "traverse_shared_library_info") +load( + "@prelude//linking:shared_libraries.bzl", + "SharedLibrary", + "create_shlib", + "merge_shared_libraries", + "traverse_shared_library_info", +) load("@prelude//linking:strip.bzl", "strip_debug_with_gnu_debuglink") +load("@prelude//linking:types.bzl", "Linkage") load("@prelude//utils:utils.bzl", "flatten", "value_or") load("@prelude//paths.bzl", "paths") load("@prelude//resources.bzl", "gather_resources") @@ -80,7 +90,7 @@ load( "EntryPointKind", "PythonLibraryInterface", ) -load(":make_py_package.bzl", "PexModules", "PexProviders", "make_default_info", "make_py_package") +load(":make_py_package.bzl", "PexModules", "PexProviders", "make_default_info", "make_py_package", "make_run_info") load( ":manifest.bzl", "create_dep_manifest_for_source_map", @@ -98,6 +108,7 @@ load( ) load(":source_db.bzl", "create_dbg_source_db", "create_python_source_db_info", "create_source_db", "create_source_db_no_deps") load(":toolchain.bzl", "NativeLinkStrategy", "PackageStyle", "PythonPlatformInfo", "PythonToolchainInfo", "get_package_style", "get_platform_attr") +load(":typing.bzl", "create_per_target_type_check") OmnibusMetadataInfo = provider( # @unsorted-dict-items @@ -159,11 +170,12 @@ def _get_root_link_group_specs( name = dep.linkable_root_info.name, is_shared_lib = True, root = dep.linkable_root_info, + label = dep.linkable_graph.nodes.value.label, group = Group( name = dep.linkable_root_info.name, mappings = [ GroupMapping( - root = dep.linkable_graph.nodes.value.label, + roots = [dep.linkable_graph.nodes.value.label], traversal = Traversal("node"), ), ], @@ -186,7 +198,7 @@ def _get_root_link_group_specs( name = name, mappings = [ GroupMapping( - root = extension.linkable_graph.nodes.value.label, + roots = [extension.linkable_graph.nodes.value.label], traversal = Traversal("node"), ), ], @@ -200,15 +212,6 @@ def _get_root_link_group_specs( return specs -def _split_debuginfo(ctx, data: dict[str, (typing.Any, Label | bool)]) -> (dict[str, (LinkedObject, Label | bool)], dict[str, Artifact]): - debuginfo_artifacts = {} - transformed = {} - for name, (artifact, extra) in data.items(): - stripped_binary, debuginfo = strip_debug_with_gnu_debuglink(ctx, name, artifact.unstripped_output) - transformed[name] = LinkedObject(output = stripped_binary, unstripped_output = artifact.unstripped_output, dwp = artifact.dwp), extra - debuginfo_artifacts[name + ".debuginfo"] = debuginfo - return transformed, debuginfo_artifacts - def _get_shared_only_groups(shared_only_libs: list[LinkableProviders]) -> list[Group]: """ Create link group mappings for shared-only libs that'll force the link to @@ -226,7 +229,7 @@ def _get_shared_only_groups(shared_only_libs: list[LinkableProviders]) -> list[G name = str(dep.linkable_graph.nodes.value.label.raw_target()), mappings = [ GroupMapping( - root = dep.linkable_graph.nodes.value.label, + roots = [dep.linkable_graph.nodes.value.label], traversal = Traversal("node"), preferred_linkage = Linkage("shared"), ), @@ -395,10 +398,28 @@ def python_executable( exe.sub_targets.update({ "dbg-source-db": [dbg_source_db], "library-info": [library_info], + "main": [DefaultInfo(default_output = ctx.actions.write_json("main.json", main))], "source-db": [source_db], "source-db-no-deps": [source_db_no_deps, create_python_source_db_info(library_info.manifests)], }) + # Type check + type_checker = python_toolchain.type_checker + if type_checker != None: + exe.sub_targets.update({ + "typecheck": [ + create_per_target_type_check( + ctx, + type_checker, + src_manifest, + python_deps, + typeshed = python_toolchain.typeshed_stubs, + py_version = ctx.attrs.py_version_for_type_checking, + typing_enabled = ctx.attrs.typing, + ), + ], + }) + return exe def create_dep_report( @@ -407,12 +428,13 @@ def create_dep_report( main: str, library_info: PythonLibraryInfo) -> DefaultInfo: out = ctx.actions.declare_output("dep-report.json") - cmd = cmd_args() - cmd.add(python_toolchain.traverse_dep_manifest) - cmd.add(cmd_args(main, format = "--main={}")) - cmd.add(cmd_args(out.as_output(), format = "--outfile={}")) - cmd.add(cmd_args(library_info.manifests.project_as_args("dep_manifests"))) - cmd.hidden(library_info.manifests.project_as_args("dep_artifacts")) + cmd = cmd_args( + python_toolchain.traverse_dep_manifest, + cmd_args(main, format = "--main={}"), + cmd_args(out.as_output(), format = "--outfile={}"), + cmd_args(library_info.manifests.project_as_args("dep_manifests")), + hidden = library_info.manifests.project_as_args("dep_artifacts"), + ) ctx.actions.run(cmd, category = "write_dep_report") return DefaultInfo(default_output = out) @@ -431,11 +453,6 @@ def _convert_python_library_to_executable( # Convert preloaded deps to a set of their names to be loaded by. preload_labels = {d.label: None for d in ctx.attrs.preload_deps} - preload_names = { - name: None - for name, shared_lib in library.shared_libraries().items() - if shared_lib.label in preload_labels - } extensions = {} extra_artifacts = {} @@ -474,7 +491,7 @@ def _convert_python_library_to_executable( dest: (omnibus_libs.roots[label].shared_library, label) for dest, (_, label) in extensions.items() } - native_libs = omnibus_libs.libraries + shared_libs = [("", shlib) for shlib in omnibus_libs.libraries] omnibus_providers = [] @@ -544,7 +561,7 @@ def _convert_python_library_to_executable( ] extra_preprocessors = [] if ctx.attrs.par_style == "native": - extra_preprocessors.append(CPreprocessor(relative_args = CPreprocessorArgs(args = ["-DNATIVE_PAR_STYLE=1"]))) + extra_preprocessors.append(CPreprocessor(args = CPreprocessorArgs(args = ["-DNATIVE_PAR_STYLE=1"]))) # All deps inolved in the link. link_deps = ( @@ -603,14 +620,26 @@ def _convert_python_library_to_executable( linkables(ctx.attrs.link_group_deps) ), exe_allow_cache_upload = allow_cache_upload, + compiler_flags = ctx.attrs.compiler_flags, + lang_compiler_flags = ctx.attrs.lang_compiler_flags, + platform_compiler_flags = ctx.attrs.platform_compiler_flags, + lang_platform_compiler_flags = ctx.attrs.lang_platform_compiler_flags, + preprocessor_flags = ctx.attrs.preprocessor_flags, + lang_preprocessor_flags = ctx.attrs.lang_preprocessor_flags, + platform_preprocessor_flags = ctx.attrs.platform_preprocessor_flags, + lang_platform_preprocessor_flags = ctx.attrs.lang_platform_preprocessor_flags, ) executable_info = cxx_executable(ctx, impl_params) extra["native-executable"] = [DefaultInfo(default_output = executable_info.binary, sub_targets = executable_info.sub_targets)] # Add sub-targets for libs. - for name, lib in executable_info.shared_libs.items(): - extra[name] = [DefaultInfo(default_output = lib.output)] + for shlib in executable_info.shared_libs: + # TODO(agallagher) There appears to be pre-existing soname conflicts + # when building this (when using link groups), which prevents using + # `with_unique_str_sonames`. + if shlib.soname.is_str(): + extra[shlib.soname.ensure_str()] = [DefaultInfo(default_output = shlib.lib.output)] for name, group in executable_info.auto_link_groups.items(): extra[name] = [DefaultInfo(default_output = group.output)] @@ -626,23 +655,29 @@ def _convert_python_library_to_executable( # Put native libraries into the runtime location, as we need to unpack # potentially all of them before startup. - native_libs = { - paths.join("runtime", "lib", name): lib - for name, lib in executable_info.shared_libs.items() - } - preload_names = [paths.join("runtime", "lib", n) for n in preload_names] + shared_libs = [("runtime/lib", s) for s in executable_info.shared_libs] # TODO expect(len(executable_info.runtime_files) == 0, "OH NO THERE ARE RUNTIME FILES") extra_artifacts.update(dict(extension_info.artifacts)) - native_libs["runtime/bin/{}".format(ctx.attrs.executable_name)] = LinkedObject( - output = executable_info.binary, - unstripped_output = executable_info.binary, - dwp = executable_info.dwp, - ) + shared_libs.append(( + "runtime/bin", + create_shlib( + soname = ctx.attrs.executable_name, + label = ctx.label, + lib = LinkedObject( + output = executable_info.binary, + unstripped_output = executable_info.binary, + dwp = executable_info.dwp, + ), + ), + )) extra_artifacts["static_extension_finder.py"] = ctx.attrs.static_extension_finder else: - native_libs = {name: shared_lib.lib for name, shared_lib in library.shared_libraries().items()} + shared_libs = [ + ("", shared_lib) + for shared_lib in library.shared_libraries() + ] if dbg_source_db: extra_artifacts["dbg-db.json"] = dbg_source_db.default_outputs[0] @@ -652,28 +687,71 @@ def _convert_python_library_to_executable( extra_manifests = create_manifest_for_source_map(ctx, "extra_manifests", extra_artifacts) - shared_libraries = {} - debuginfo_artifacts = {} - # Create the map of native libraries to their artifacts and whether they # need to be preloaded. Note that we merge preload deps into regular deps # above, before gathering up all native libraries, so we're guaranteed to # have all preload libraries (and their transitive deps) here. - for name, lib in native_libs.items(): - shared_libraries[name] = lib, name in preload_names + shared_libs = [ + (libdir, shlib, shlib.label in preload_labels) + for libdir, shlib in shared_libs + ] # Strip native libraries and extensions and update the .gnu_debuglink references if we are extracting # debug symbols from the par + debuginfo_files = [] + debuginfos = {} if ctx.attrs.strip_libpar == "extract" and package_style == PackageStyle("standalone") and cxx_is_gnu(ctx): - shared_libraries, library_debuginfo = _split_debuginfo(ctx, shared_libraries) - extensions, extension_debuginfo = _split_debuginfo(ctx, extensions) - debuginfo_artifacts = library_debuginfo | extension_debuginfo + stripped_shlibs = [] + for libdir, shlib, preload in shared_libs: + name = paths.join( + libdir, + value_or( + shlib.soname.as_str(), + shlib.lib.unstripped_output.short_path, + ), + ) + existing = debuginfos.get(name) + if existing == None: + stripped, debuginfo = strip_debug_with_gnu_debuglink( + ctx = ctx, + name = name, + obj = shlib.lib.unstripped_output, + ) + debuginfos[name] = (stripped, debuginfo) + else: + stripped, debuginfo = existing + shlib = SharedLibrary( + soname = shlib.soname, + label = shlib.label, + lib = LinkedObject( + output = stripped, + unstripped_output = shlib.lib.unstripped_output, + dwp = shlib.lib.dwp, + ), + ) + stripped_shlibs.append((libdir, shlib, preload)) + debuginfo_files.append(((libdir, shlib, ".debuginfo"), debuginfo)) + shared_libs = stripped_shlibs + for name, (extension, label) in extensions.items(): + stripped, debuginfo = strip_debug_with_gnu_debuglink( + ctx = ctx, + name = name, + obj = extension.unstripped_output, + ) + extensions[name] = ( + LinkedObject( + output = stripped, + unstripped_output = extension.unstripped_output, + dwp = extension.dwp, + ), + label, + ) + debuginfo_files.append((name + ".debuginfo", debuginfo)) # Combine sources and extensions into a map of all modules. pex_modules = PexModules( manifests = library.manifests(), extra_manifests = extra_manifests, - debuginfo_manifest = create_manifest_for_source_map(ctx, "debuginfo", debuginfo_artifacts) if debuginfo_artifacts else None, compile = compile, extensions = create_manifest_for_extensions( ctx, @@ -686,16 +764,17 @@ def _convert_python_library_to_executable( # Build the PEX. pex = make_py_package( - ctx, - python_toolchain, - ctx.attrs.make_py_package[RunInfo] if ctx.attrs.make_py_package != None else None, - package_style, - ctx.attrs.build_args, - pex_modules, - shared_libraries, - main, - hidden_resources, - allow_cache_upload, + ctx = ctx, + python_toolchain = python_toolchain, + make_py_package_cmd = ctx.attrs.make_py_package[RunInfo] if ctx.attrs.make_py_package != None else None, + package_style = package_style, + build_args = ctx.attrs.build_args, + pex_modules = pex_modules, + shared_libraries = shared_libs, + main = main, + hidden_resources = hidden_resources, + allow_cache_upload = allow_cache_upload, + debuginfo_files = debuginfo_files, ) pex.sub_targets.update(extra) @@ -714,6 +793,20 @@ def python_binary_impl(ctx: AnalysisContext) -> list[Provider]: if main_module.endswith(".py"): main_module = main_module[:-3] + # if "python-version=3.8" in ctx.attrs.labels: + # # buildifier: disable=print + # print(( + # "\033[1;33m \u26A0 [Warning] " + + # "{0} 3.8 is EOL, and is going away by the end of H1 2024. " + + # "This build triggered //{1}:{2} which still uses {0} 3.8. " + + # "Make sure someone (you or the appropriate maintainers) upgrades it to {0} 3.10 soon to avoid breakages. " + + # "https://fburl.com/python-eol \033[0m" + # ).format( + # "Cinder" if "python-flavor=cinder" in ctx.attrs.labels else "Python", + # ctx.label.package, + # ctx.attrs.name, + # )) + if main_module != None: main = (EntryPointKind("module"), main_module) else: @@ -730,9 +823,9 @@ def python_binary_impl(ctx: AnalysisContext) -> list[Provider]: srcs, {}, compile = value_or(ctx.attrs.compile, False), - allow_cache_upload = ctx.attrs.allow_cache_upload, + allow_cache_upload = cxx_attrs_get_allow_cache_upload(ctx.attrs), ) return [ make_default_info(pex), - RunInfo(pex.run_cmd), + make_run_info(pex, ctx.attrs.run_with_inplace), ] diff --git a/prelude/python/python_library.bzl b/prelude/python/python_library.bzl index e4238f8ae4..16ff1ff412 100644 --- a/prelude/python/python_library.bzl +++ b/prelude/python/python_library.bzl @@ -53,6 +53,7 @@ load(":needed_coverage.bzl", "PythonNeededCoverageInfo") load(":python.bzl", "PythonLibraryInfo", "PythonLibraryManifests", "PythonLibraryManifestsTSet") load(":source_db.bzl", "create_python_source_db_info", "create_source_db", "create_source_db_no_deps") load(":toolchain.bzl", "PythonToolchainInfo") +load(":typing.bzl", "create_per_target_type_check") def dest_prefix(label: Label, base_module: [None, str]) -> str: """ @@ -310,6 +311,22 @@ def python_library_impl(ctx: AnalysisContext) -> list[Provider]: # Source DBs. sub_targets["source-db"] = [create_source_db(ctx, src_type_manifest, deps)] sub_targets["source-db-no-deps"] = [create_source_db_no_deps(ctx, src_types), create_python_source_db_info(library_info.manifests)] + + # Type check + type_checker = python_toolchain.type_checker + if type_checker != None: + sub_targets["typecheck"] = [ + create_per_target_type_check( + ctx, + type_checker, + src_type_manifest, + deps, + typeshed = python_toolchain.typeshed_stubs, + py_version = ctx.attrs.py_version_for_type_checking, + typing_enabled = ctx.attrs.typing, + ), + ] + providers.append(DefaultInfo(sub_targets = sub_targets)) # Create, augment and provide the linkable graph. diff --git a/prelude/python/python_needed_coverage_test.bzl b/prelude/python/python_needed_coverage_test.bzl index 972282a3f6..8da3630232 100644 --- a/prelude/python/python_needed_coverage_test.bzl +++ b/prelude/python/python_needed_coverage_test.bzl @@ -7,7 +7,7 @@ load( "@prelude//tests:re_utils.bzl", - "get_re_executor_from_props", + "get_re_executors_from_props", ) load("@prelude//test/inject_test_run_info.bzl", "inject_test_run_info") load( @@ -42,8 +42,8 @@ def python_needed_coverage_test_impl(ctx: AnalysisContext) -> list[Provider]: test_type = "simple" test_env["TEST_PILOT"] = "1" - # Setup a RE executor based on the `remote_execution` param. - re_executor = get_re_executor_from_props(ctx) + # Setup RE executors based on the `remote_execution` param. + re_executor, executor_overrides = get_re_executors_from_props(ctx) return inject_test_run_info( ctx, @@ -54,6 +54,7 @@ def python_needed_coverage_test_impl(ctx: AnalysisContext) -> list[Provider]: labels = ctx.attrs.labels, contacts = ctx.attrs.contacts, default_executor = re_executor, + executor_overrides = executor_overrides, # We implicitly make this test via the project root, instead of # the cell root (e.g. fbcode root). run_from_project_root = re_executor != None, diff --git a/prelude/python/python_test.bzl b/prelude/python/python_test.bzl index c9f4b695b8..bd55642c62 100644 --- a/prelude/python/python_test.bzl +++ b/prelude/python/python_test.bzl @@ -8,7 +8,7 @@ load("@prelude//:paths.bzl", "paths") load( "@prelude//tests:re_utils.bzl", - "get_re_executor_from_props", + "get_re_executors_from_props", ) load("@prelude//utils:utils.bzl", "from_named_set", "value_or") load("@prelude//test/inject_test_run_info.bzl", "inject_test_run_info") @@ -61,8 +61,8 @@ def python_test_impl(ctx: AnalysisContext) -> list[Provider]: pex = python_test_executable(ctx) test_cmd = pex.run_cmd - # Setup a RE executor based on the `remote_execution` param. - re_executor = get_re_executor_from_props(ctx) + # Setup RE executors based on the `remote_execution` param. + re_executor, executor_overrides = get_re_executors_from_props(ctx) return inject_test_run_info( ctx, @@ -73,6 +73,7 @@ def python_test_impl(ctx: AnalysisContext) -> list[Provider]: labels = ctx.attrs.labels, contacts = ctx.attrs.contacts, default_executor = re_executor, + executor_overrides = executor_overrides, # We implicitly make this test via the project root, instead of # the cell root (e.g. fbcode root). run_from_project_root = re_executor != None, diff --git a/prelude/python/python_wheel.bzl b/prelude/python/python_wheel.bzl new file mode 100644 index 0000000000..ffab5511bc --- /dev/null +++ b/prelude/python/python_wheel.bzl @@ -0,0 +1,226 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//:paths.bzl", "paths") +load("@prelude//cxx:cxx_context.bzl", "get_cxx_toolchain_info") +load("@prelude//cxx:cxx_toolchain_types.bzl", "PicBehavior") +load( + "@prelude//cxx:link.bzl", + "cxx_link_shared_library", +) +load( + "@prelude//cxx:link_types.bzl", + "link_options", +) +load("@prelude//linking:execution_preference.bzl", "LinkExecutionPreference") +load( + "@prelude//linking:link_info.bzl", + "LinkArgs", + "LinkStrategy", + "get_lib_output_style", + "get_link_info", +) +load( + "@prelude//linking:linkable_graph.bzl", + "LinkableGraph", + "LinkableNode", # @unused Used as a type + "LinkableRootInfo", + "get_deps_for_link", + "get_linkable_graph_node_map_func", + get_link_info_for_node = "get_link_info", +) +load("@prelude//python:manifest.bzl", "create_manifest_for_entries") +load("@prelude//python:python.bzl", "PythonLibraryInfo") +load("@prelude//utils:expect.bzl", "expect") +load( + "@prelude//utils:graph_utils.bzl", + "depth_first_traversal_by", +) +load("@prelude//decls/toolchains_common.bzl", "toolchains_common") +load("@prelude//transitions/constraint_overrides.bzl", "constraint_overrides_transition") + +def _link_deps( + link_infos: dict[Label, LinkableNode], + deps: list[Label], + link_strategy: LinkStrategy, + pic_behavior: PicBehavior) -> list[Label]: + """ + Return transitive deps required to link dynamically against the given deps. + This will following through deps of statically linked inputs and exported + deps of everything else (see https://fburl.com/diffusion/rartsbkw from v1). + """ + + def find_deps(node: Label): + return get_deps_for_link(link_infos[node], link_strategy, pic_behavior) + + return depth_first_traversal_by(link_infos, deps, find_deps) + +def _impl(ctx: AnalysisContext) -> list[Provider]: + providers = [] + + cmd = [] + hidden = [] + + cmd.append(ctx.attrs._wheel[RunInfo]) + + name_parts = [ + ctx.attrs.dist or ctx.attrs.name, + ctx.attrs.version, + ctx.attrs.python, + ctx.attrs.abi, + ctx.attrs.platform, + ] + wheel = ctx.actions.declare_output("{}.whl".format("-".join(name_parts))) + cmd.append(cmd_args(wheel.as_output(), format = "--output={}")) + + cmd.append("--name={}".format(ctx.attrs.dist or ctx.attrs.name)) + cmd.append("--version={}".format(ctx.attrs.version)) + + if ctx.attrs.entry_points: + cmd.append("--entry-points={}".format(json.encode(ctx.attrs.entry_points))) + + for key, val in ctx.attrs.extra_metadata.items(): + cmd.extend(["--metadata", key, val]) + + cmd.extend(["--metadata", "Requires-Python", "=={}.*".format(ctx.attrs.python[2:])]) + + for requires in ctx.attrs.requires: + cmd.extend(["--metadata", "Requires-Dist", requires]) + + for name, script in ctx.attrs.scripts.items(): + cmd.extend(["--data", paths.join("scripts", name), script]) + + libraries = {} + for lib in ctx.attrs.libraries: + libraries[lib.label] = lib + if ctx.attrs.libraries_query != None: + for lib in ctx.attrs.libraries_query: + if PythonLibraryInfo in lib: + libraries[lib.label] = lib + + srcs = [] + extensions = {} + for dep in libraries.values(): + manifests = dep[PythonLibraryInfo].manifests.value + if manifests.srcs != None: + srcs.append(manifests.srcs) + if manifests.resources != None: + expect(not manifests.resources[1]) + srcs.append(manifests.resources[0]) + if manifests.extensions != None: + toolchain_info = get_cxx_toolchain_info(ctx) + items = manifests.extensions.items() + expect(len(items) == 1) + extension = items[0][0] + root = dep[LinkableRootInfo] + + # Add link inputs for the linkable root and any deps. + inputs = [] + inputs.append(get_link_info( + infos = root.link_infos, + prefer_stripped = ctx.attrs.prefer_stripped_objects, + )) + link_infos = get_linkable_graph_node_map_func(dep[LinkableGraph])() + for ext_dep in _link_deps( + link_infos, + root.deps, + LinkStrategy("static_pic"), + toolchain_info.pic_behavior, + ): + node = link_infos[ext_dep] + output_style = get_lib_output_style( + LinkStrategy("static_pic"), + node.preferred_linkage, + toolchain_info.pic_behavior, + ) + inputs.append(get_link_info_for_node( + node, + output_style, + prefer_stripped = ctx.attrs.prefer_stripped_objects, + )) + + # link the rule + link_result = cxx_link_shared_library( + ctx = ctx, + output = extension, + opts = link_options( + links = [LinkArgs(infos = inputs)], + category_suffix = "native_extension", + identifier = extension, + link_execution_preference = LinkExecutionPreference("any"), + ), + ) + extensions[extension] = link_result.linked_object + + if extensions: + srcs.append( + create_manifest_for_entries( + ctx, + name = "extensions.txt", + entries = [ + (name, extension.output, "") + for name, extension in extensions.items() + ], + ), + ) + + for manifest in srcs: + cmd.append(cmd_args(manifest.manifest, format = "--srcs={}")) + for a, _ in manifest.artifacts: + hidden.append(a) + + ctx.actions.run(cmd_args(cmd, hidden = hidden), category = "wheel") + providers.append(DefaultInfo(default_output = wheel)) + + return providers + +python_wheel = rule( + impl = _impl, + cfg = constraint_overrides_transition, + attrs = dict( + dist = attrs.option(attrs.string(), default = None), + version = attrs.string(default = "1.0.0"), + python = attrs.string( + default = select({ + "ovr_config//third-party/python/constraints:3.10": "py3.10", + "ovr_config//third-party/python/constraints:3.11": "py3.11", + "ovr_config//third-party/python/constraints:3.12": "py3.12", + "ovr_config//third-party/python/constraints:3.8": "py3.8", + "ovr_config//third-party/python/constraints:3.9": "py3.9", + }), + ), + entry_points = attrs.dict( + key = attrs.string(), + value = attrs.dict( + key = attrs.string(), + value = attrs.string(), + ), + default = {}, + ), + requires = attrs.list(attrs.string(), default = []), + extra_metadata = attrs.dict( + key = attrs.string(), + value = attrs.string(), + default = {}, + ), + abi = attrs.string(default = "none"), + platform = attrs.string( + default = select({ + "DEFAULT": "any", + "ovr_config//os:linux-arm64": "linux_aarch64", + "ovr_config//os:linux-x86_64": "linux_x86_64", + }), + ), + constraint_overrides = attrs.list(attrs.string(), default = []), + libraries = attrs.list(attrs.dep(providers = [PythonLibraryInfo]), default = []), + scripts = attrs.dict(key = attrs.string(), value = attrs.source(), default = {}), + libraries_query = attrs.option(attrs.query(), default = None), + prefer_stripped_objects = attrs.default_only(attrs.bool(default = False)), + _wheel = attrs.default_only(attrs.exec_dep(default = "prelude//python/tools:wheel")), + _cxx_toolchain = toolchains_common.cxx(), + ), +) diff --git a/prelude/python/runtime/BUCK.v2 b/prelude/python/runtime/BUCK.v2 index 1cac267a37..dbf6fa6b73 100644 --- a/prelude/python/runtime/BUCK.v2 +++ b/prelude/python/runtime/BUCK.v2 @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + filegroup( name = "bootstrap_files", srcs = glob(["__par__/**/*.py"]), diff --git a/prelude/python/runtime/__par__/bootstrap.py b/prelude/python/runtime/__par__/bootstrap.py index 0ee24a9c8e..1cf7a424a4 100644 --- a/prelude/python/runtime/__par__/bootstrap.py +++ b/prelude/python/runtime/__par__/bootstrap.py @@ -5,12 +5,23 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + from __future__ import annotations import os +import types from typing import Callable, Sequence +def iscoroutinefunction(func: Callable[[], None]) -> bool: + # This is the guts of inspect.iscoroutinefunction without the cost of inspect import + CO_COROUTINE = 128 # This hasn't changed in 8 years most likely never will + return isinstance(func, types.FunctionType) and bool( + func.__code__.co_flags & CO_COROUTINE + ) + + def run_as_main( main_module: str, main_function: str | None, @@ -74,4 +85,10 @@ def run_as_main( main.__globals__["__name__"] = "__main__" for hook in main_function_hooks: hook() - main() + + if iscoroutinefunction(main): + import asyncio + + asyncio.run(main()) + else: + main() diff --git a/prelude/python/source_db.bzl b/prelude/python/source_db.bzl index c799e576de..4d299a03d0 100644 --- a/prelude/python/source_db.bzl +++ b/prelude/python/source_db.bzl @@ -6,6 +6,7 @@ # of this source tree. load("@prelude//python:python.bzl", "PythonLibraryInfo") +load("@prelude//utils:argfile.bzl", "at_argfile") load( ":manifest.bzl", "ManifestInfo", # @unused Used as a type @@ -41,10 +42,12 @@ def create_source_db( dep_manifests = ctx.actions.tset(PythonLibraryManifestsTSet, children = [d.manifests for d in python_deps]) dependencies = cmd_args(dep_manifests.project_as_args("source_type_manifests"), format = "--dependency={}") - dependencies_file = ctx.actions.write("source_db_dependencies", dependencies) - dependencies_file = cmd_args(dependencies_file, format = "@{}").hidden(dependencies) + cmd.add(at_argfile( + actions = ctx.actions, + name = "source_db_dependencies", + args = dependencies, + )) - cmd.add(dependencies_file) artifacts.append(dep_manifests.project_as_args("source_type_artifacts")) ctx.actions.run(cmd, category = "py_source_db") @@ -71,9 +74,12 @@ def create_dbg_source_db( dep_manifests = ctx.actions.tset(PythonLibraryManifestsTSet, children = [d.manifests for d in python_deps]) dependencies = cmd_args(dep_manifests.project_as_args("source_manifests"), format = "--dependency={}") - dependencies_file = ctx.actions.write("dbg_source_db_dependencies", dependencies) - dependencies_file = cmd_args(dependencies_file, format = "@{}").hidden(dependencies) - cmd.add(dependencies_file) + cmd.add(at_argfile( + actions = ctx.actions, + name = "dbg_source_db_dependencies", + args = dependencies, + )) + artifacts.append(dep_manifests.project_as_args("source_artifacts")) ctx.actions.run(cmd, category = "py_dbg_source_db") diff --git a/prelude/python/sourcedb/build.bxl b/prelude/python/sourcedb/build.bxl index 13cbf9c855..81ded5ae7f 100644 --- a/prelude/python/sourcedb/build.bxl +++ b/prelude/python/sourcedb/build.bxl @@ -12,22 +12,34 @@ def _get_artifact(result: bxl.BuildResult) -> Artifact: return artifact fail("Sourcedb rule must have at least one artifact") +def _get_sourcedb(result: list[bxl.EnsuredArtifact]) -> bxl.EnsuredArtifact: + # NOTE: the first artifact is always the source db json + # T124989384 will make this nicer + for artifact in result: + return artifact + fail("Sourcedb rule must have at least one artifact") + def _abort_on_build_failure(target_label: TargetLabel, result: bxl.BuildResult) -> None: for failure in result.failures(): error_message = "Target `{}` cannot be built by Buck\nreason: {}".format(target_label, failure) fail(error_message) -# Build sourcedb for the given targets, and return a mapping from target names -# to the corresponding sourcedb JSON file location. -def do_build( +def _build( ctx: bxl.Context, - targets: list[ConfiguredTargetLabel]) -> dict[TargetLabel, Artifact]: + targets: list[ConfiguredTargetLabel]) -> dict[Label, bxl.BuildResult]: # Build sourcedbs of all targets configured_sub_targets = [ target.with_sub_target(["source-db-no-deps"]) for target in targets ] - build_results = ctx.build(configured_sub_targets) + return ctx.build(configured_sub_targets) + +# Build sourcedb for the given targets, and return a mapping from target names +# to the corresponding sourcedb JSON file location. +def do_build( + ctx: bxl.Context, + targets: list[ConfiguredTargetLabel]) -> dict[TargetLabel, Artifact]: + build_results = _build(ctx, targets) # Compute result dict output = {} @@ -37,3 +49,14 @@ def do_build( path = _get_artifact(result) output[raw_target] = path return output + +# Same as do_build, except calls ensure to ensure artifacts are materialized. +def do_build_ensured( + ctx: bxl.Context, + targets: list[ConfiguredTargetLabel]) -> dict[TargetLabel, bxl.EnsuredArtifact]: + build_results = _build(ctx, targets) + for key, value in build_results.items(): + _abort_on_build_failure(key.raw_target(), value) + + ensured_artifacts = ctx.output.ensure_multiple(build_results) + return {label.raw_target(): _get_sourcedb(artifact) for label, artifact in ensured_artifacts.items()} diff --git a/prelude/python/sourcedb/classic.bxl b/prelude/python/sourcedb/classic.bxl index 7504ad8129..1dec7df197 100644 --- a/prelude/python/sourcedb/classic.bxl +++ b/prelude/python/sourcedb/classic.bxl @@ -16,7 +16,7 @@ def _build_entry_point(ctx: bxl.Context) -> None: ) actions = bxl_actions.actions - query = ctx.cquery() + query = ctx.uquery() targets = do_query(ctx, query, actions, [query.eval(target) for target in ctx.cli_args.target]) built_sourcedbs = do_build(ctx, targets) diff --git a/prelude/python/sourcedb/code_navigation.bxl b/prelude/python/sourcedb/code_navigation.bxl index fd127ec888..9272ced6a7 100644 --- a/prelude/python/sourcedb/code_navigation.bxl +++ b/prelude/python/sourcedb/code_navigation.bxl @@ -16,12 +16,11 @@ def _build_entry_point(ctx: bxl.Context) -> None: ) actions = bxl_actions.actions - query = ctx.cquery() + query = ctx.uquery() root = ctx.root() sources = ["{}/{}".format(root, source) for source in ctx.cli_args.source] - target_universe = ctx.uquery().owner(sources) - targets = do_query(ctx, query, actions, query.owner(sources, target_universe)) + targets = do_query(ctx, query, actions, query.owner(sources)) built_sourcedbs = do_build(ctx, targets) # Ensure all source files are materialized diff --git a/prelude/python/sourcedb/filter.bxl b/prelude/python/sourcedb/filter.bxl new file mode 100644 index 0000000000..849d8db228 --- /dev/null +++ b/prelude/python/sourcedb/filter.bxl @@ -0,0 +1,64 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +BUCK_PYTHON_RULE_KINDS = [ + "python_binary", + "python_library", + "python_test", +] +BUCK_PYTHON_RULE_KIND_QUERY = "|".join(BUCK_PYTHON_RULE_KINDS) + +def filter_root_targets( + query: bxl.UqueryContext, + target_patterns: typing.Any) -> bxl.TargetSet: + # Find all Pure-Python targets + candidate_targets = utarget_set() + for pattern in target_patterns: + candidate_targets += query.kind( + BUCK_PYTHON_RULE_KIND_QUERY, + pattern, + ) + + # Don't check generated rules + filtered_targets = candidate_targets - query.attrfilter( + "labels", + "generated", + candidate_targets, + ) + + # Provide an opt-out label + filtered_targets = filtered_targets - query.attrfilter( + "labels", + "no_pyre", + candidate_targets, + ) + return filtered_targets + +def do_filter( + query: bxl.UqueryContext, + target_patterns: typing.Any) -> list[TargetLabel]: + root_targets = filter_root_targets(query, target_patterns) + return [root_target.label for root_target in root_targets] + +def _do_filter_entry_point(ctx: bxl.Context) -> None: + query = ctx.uquery() + targets = do_filter( + query, + [query.eval(target) for target in ctx.cli_args.target], + ) + ctx.output.print_json(targets) + +filter = bxl_main( + doc = ( + "Expand target patterns and look for all targets in immediate sources " + + "that will be built by Pyre." + ), + impl = _do_filter_entry_point, + cli_args = { + "target": cli_args.list(cli_args.string()), + }, +) diff --git a/prelude/python/sourcedb/ide.bxl b/prelude/python/sourcedb/ide.bxl new file mode 100644 index 0000000000..2431278959 --- /dev/null +++ b/prelude/python/sourcedb/ide.bxl @@ -0,0 +1,43 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load(":build.bxl", "do_build_ensured") +load(":query.bxl", "do_query") + +def _build_entry_point(ctx: bxl.Context) -> None: + bxl_actions = ctx.bxl_actions( + target_platform = "prelude//platforms:default", + ) + actions = bxl_actions.actions + + query = ctx.uquery() + + output = {} + for source in ctx.cli_args.source: + owning_targets = query.owner(source) + targets_configured = do_query(ctx, query, actions, owning_targets) + sourcedbs = do_build_ensured(ctx, targets_configured) + + output[source] = {"db": sourcedbs, "owning_targets": [target.label for target in owning_targets]} + + ctx.output.print_json(output) + +build = bxl_main( + doc = """Build Python sourcedb for Python IDE support. + + It takes a list of file paths, and will find the owner targets for all + those files and build source-db for those owning targets, returning them all. + """, + impl = _build_entry_point, + cli_args = { + "source": cli_args.list( + cli_args.string( + doc = "File to build a source db for (relative to source root)", + ), + ), + }, +) diff --git a/prelude/python/sourcedb/merge.bxl b/prelude/python/sourcedb/merge.bxl index af6219f45c..525bdf8928 100644 --- a/prelude/python/sourcedb/merge.bxl +++ b/prelude/python/sourcedb/merge.bxl @@ -17,13 +17,14 @@ def do_merge( dependency_key = bxl_actions.exec_deps.keys()[0] - command = cmd_args(bxl_actions.exec_deps[dependency_key][RunInfo]) - command.add(merger_input) - command.add("--output") - command.add(merger_output.as_output()) - - # Declare that the merger result depends on all sourcedbs - command.hidden(built_sourcedbs.values()) + command = cmd_args( + bxl_actions.exec_deps[dependency_key][RunInfo], + merger_input, + "--output", + merger_output.as_output(), + # Declare that the merger result depends on all sourcedbs + hidden = built_sourcedbs.values(), + ) actions.run(command, category = command_category) return ctx.output.ensure(merger_output) diff --git a/prelude/python/sourcedb/owners.bxl b/prelude/python/sourcedb/owners.bxl new file mode 100644 index 0000000000..deeefbf7e0 --- /dev/null +++ b/prelude/python/sourcedb/owners.bxl @@ -0,0 +1,41 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load(":filter.bxl", "filter_root_targets") + +def _owners_entry_point(ctx: bxl.Context) -> None: + query = ctx.uquery() + root = ctx.root() + + owning_targets = filter_root_targets(query, query.owner(ctx.cli_args.source)) + + files = [] + if len(owning_targets) > 0: + target = owning_targets[0] + files = query.inputs(target) + cell_root = ctx.audit().cell([target.label.cell])[target.label.cell] + files = ["{}/{}".format(cell_root, file.path) for file in files] + + ctx.output.print_json({"files": files, "owning_targets": [target.label for target in owning_targets], "root": root}) + +build = bxl_main( + doc = """Determines owning python targets and root, providing files within the first owning target. + Note: must be run from within fbsource. + + It takes a file path, returning an object of format + `{'owning_targets': List, 'root': string, 'files': List}` + - Owning targets is the list of python target labels that own the file. + - Root is the buck project root. + - Files is the list of files (absolute paths) within the first owning target, if any. + """, + impl = _owners_entry_point, + cli_args = { + "source": cli_args.string( + doc = "Source file (absolute path)", + ), + }, +) diff --git a/prelude/python/sourcedb/query.bxl b/prelude/python/sourcedb/query.bxl index 78fee7d167..26d4b51ac6 100644 --- a/prelude/python/sourcedb/query.bxl +++ b/prelude/python/sourcedb/query.bxl @@ -7,46 +7,7 @@ load("@prelude//python:python.bzl", "PythonLibraryManifestsTSet") load("@prelude//python:source_db.bzl", "PythonSourceDBInfo") - -BUCK_PYTHON_RULE_KINDS = [ - "python_binary", - "python_library", - "python_test", -] -BUCK_PYTHON_RULE_KIND_QUERY = "|".join(BUCK_PYTHON_RULE_KINDS) - -def _filter_root_targets( - query: bxl.CqueryContext, - target_patterns: typing.Any) -> bxl.ConfiguredTargetSet: - # Find all Pure-Python targets - candidate_targets = ctarget_set() - for pattern in target_patterns: - candidate_targets += query.kind( - BUCK_PYTHON_RULE_KIND_QUERY, - pattern, - ) - - # Don't check generated rules - filtered_targets = candidate_targets - query.attrfilter( - "labels", - "generated", - candidate_targets, - ) - - # Do include unittest sources, which are marked as generated - filtered_targets = filtered_targets + query.attrfilter( - "labels", - "unittest-library", - candidate_targets, - ) - - # Provide an opt-out label - filtered_targets = filtered_targets - query.attrfilter( - "labels", - "no_pyre", - candidate_targets, - ) - return filtered_targets +load("@prelude//python/sourcedb/filter.bxl", "filter_root_targets") def _get_python_library_manifests_from_analysis_result( analysis_result: bxl.AnalysisResult) -> [PythonLibraryManifestsTSet, None]: @@ -60,7 +21,7 @@ def _get_python_library_manifests_from_analysis_result( def _get_python_library_manifests_from_targets( ctx: bxl.Context, - targets: bxl.ConfiguredTargetSet) -> list[PythonLibraryManifestsTSet]: + targets: bxl.TargetSet) -> list[PythonLibraryManifestsTSet]: return filter(None, [ _get_python_library_manifests_from_analysis_result(analysis_result) for analysis_result in ctx.analysis(targets).values() @@ -69,7 +30,7 @@ def _get_python_library_manifests_from_targets( def get_python_library_manifests_tset_from_targets( ctx: bxl.Context, actions: AnalysisActions, - root_targets: bxl.ConfiguredTargetSet) -> PythonLibraryManifestsTSet: + root_targets: bxl.TargetSet) -> PythonLibraryManifestsTSet: return actions.tset( PythonLibraryManifestsTSet, children = _get_python_library_manifests_from_targets(ctx, root_targets), @@ -77,22 +38,24 @@ def get_python_library_manifests_tset_from_targets( def get_python_library_manifests_tset_from_target_patterns( ctx: bxl.Context, - query: bxl.CqueryContext, + query: bxl.UqueryContext, actions: AnalysisActions, target_patterns: typing.Any) -> PythonLibraryManifestsTSet: - root_targets = _filter_root_targets(query, target_patterns) + root_targets = filter_root_targets(query, target_patterns) return get_python_library_manifests_tset_from_targets(ctx, actions, root_targets) def do_query( ctx: bxl.Context, - query: bxl.CqueryContext, + query: bxl.UqueryContext, actions: AnalysisActions, target_patterns: typing.Any) -> list[ConfiguredTargetLabel]: - manifests_of_transitive_dependencies = get_python_library_manifests_tset_from_target_patterns( - ctx, - query, - actions, - target_patterns, + manifests_of_transitive_dependencies = ( + get_python_library_manifests_tset_from_target_patterns( + ctx, + query, + actions, + target_patterns, + ) ) return [ manifest.label.configured_target() @@ -101,9 +64,14 @@ def do_query( ] def _do_query_entry_point(ctx: bxl.Context) -> None: - query = ctx.cquery() + query = ctx.uquery() actions = ctx.bxl_actions().actions - targets = do_query(ctx, query, actions, [query.eval(target) for target in ctx.cli_args.target]) + targets = do_query( + ctx, + query, + actions, + [query.eval(target) for target in ctx.cli_args.target], + ) ctx.output.print_json([target.raw_target() for target in targets]) query = bxl_main( diff --git a/prelude/python/sourcedb/typing_query.bxl b/prelude/python/sourcedb/typing_query.bxl new file mode 100644 index 0000000000..da3d52510d --- /dev/null +++ b/prelude/python/sourcedb/typing_query.bxl @@ -0,0 +1,55 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//python/sourcedb/filter.bxl", "BUCK_PYTHON_RULE_KIND_QUERY") + +def get_owners_for_files( + query: bxl.UqueryContext, + sources: list[str]) -> dict[str, bxl.TargetSet]: + return {source: query.owner(source) for source in sources} + +def has_any_python_targets_with_typing( + query: bxl.UqueryContext, + owners: bxl.TargetSet) -> bool: + targets_with_typing = query.attrfilter("typing", "True", owners) + + python_targets_with_typing = query.kind( + BUCK_PYTHON_RULE_KIND_QUERY, + targets_with_typing, + ) + + return len(python_targets_with_typing) != 0 + +def get_files_per_target_typed( + query: bxl.UqueryContext, + sources: list[str]) -> dict[str, bool]: + files_to_owners = get_owners_for_files(query, sources) + + return { + file: has_any_python_targets_with_typing(query, owners) + for file, owners in files_to_owners.items() + } + +def _do_typing_query_entry_point(ctx: bxl.Context) -> None: + query = ctx.uquery() + files_per_target_typed = get_files_per_target_typed(query, ctx.cli_args.source) + ctx.output.print_json(files_per_target_typed) + +typing_query = bxl_main( + doc = ( + "Queries Buck about a given file to determine if any owning targets have typing " + + "in their attributes." + ), + impl = _do_typing_query_entry_point, + cli_args = { + "source": cli_args.list( + cli_args.string( + doc = "The absolute path to a file you are trying to get typing attributes of", + ), + ), + }, +) diff --git a/prelude/python/toolchain.bzl b/prelude/python/toolchain.bzl index 075c8d835a..f7436ec580 100644 --- a/prelude/python/toolchain.bzl +++ b/prelude/python/toolchain.bzl @@ -50,6 +50,7 @@ PythonToolchainInfo = provider( "default_sitecustomize": provider_field(typing.Any, default = None), # The interpreter to use to compile bytecode. "host_interpreter": provider_field(typing.Any, default = None), + "bundled_interpreter": provider_field(typing.Any, default = None), "interpreter": provider_field(typing.Any, default = None), "version": provider_field(typing.Any, default = None), "native_link_strategy": provider_field(typing.Any, default = None), @@ -61,6 +62,7 @@ PythonToolchainInfo = provider( "package_style": provider_field(typing.Any, default = None), "strip_libpar": provider_field(typing.Any, default = None), "make_source_db": provider_field(typing.Any, default = None), + "native_library_runtime_paths": provider_field(list[str], default = []), "make_source_db_no_deps": provider_field(typing.Any, default = None), "make_py_package_inplace": provider_field(typing.Any, default = None), "make_py_package_standalone": provider_field(typing.Any, default = None), @@ -68,6 +70,8 @@ PythonToolchainInfo = provider( "make_py_package_modules": provider_field(typing.Any, default = None), "pex_executor": provider_field(typing.Any, default = None), "pex_extension": provider_field(typing.Any, default = None), + "type_checker": provider_field(typing.Any, default = None), + "typeshed_stubs": provider_field(typing.Any, default = []), "emit_omnibus_metadata": provider_field(typing.Any, default = None), "fail_with_message": provider_field(typing.Any, default = None), "emit_dependency_metadata": provider_field(typing.Any, default = None), diff --git a/prelude/python/tools/BUCK.v2 b/prelude/python/tools/BUCK.v2 index 1e325372f5..6463b5c018 100644 --- a/prelude/python/tools/BUCK.v2 +++ b/prelude/python/tools/BUCK.v2 @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + prelude = native prelude.python_bootstrap_binary( @@ -126,3 +132,9 @@ prelude.python_bootstrap_binary( main = "fail_with_message.py", visibility = ["PUBLIC"], ) + +prelude.python_bootstrap_binary( + name = "wheel", + main = "wheel.py", + visibility = ["PUBLIC"], +) diff --git a/prelude/python/tools/__test_main__.py b/prelude/python/tools/__test_main__.py index 1ce6a946f8..ff4f2c1d84 100644 --- a/prelude/python/tools/__test_main__.py +++ b/prelude/python/tools/__test_main__.py @@ -32,13 +32,9 @@ import time import traceback import unittest -import warnings +from importlib.machinery import PathFinder -with warnings.catch_warnings(): - warnings.filterwarnings("ignore", category=DeprecationWarning) - import imp - try: from StringIO import StringIO # type: ignore except ImportError: @@ -88,7 +84,7 @@ def include(self, path): return not self.omit(path) -class DebugWipeFinder: +class DebugWipeFinder(PathFinder): """ PEP 302 finder that uses a DebugWipeLoader for all files which do not need coverage @@ -97,28 +93,15 @@ class DebugWipeFinder: def __init__(self, matcher): self.matcher = matcher - def find_module(self, fullname, path=None): - _, _, basename = fullname.rpartition(".") - try: - fd, pypath, (_, _, kind) = imp.find_module(basename, path) - except Exception: - # Finding without hooks using the imp module failed. One reason - # could be that there is a zip file on sys.path. The imp module - # does not support loading from there. Leave finding this module to - # the others finders in sys.meta_path. + def find_spec(self, fullname, path=None, target=None): + spec = super().find_spec(fullname, path=path, target=target) + if spec is None or spec.origin is None: return None - - if hasattr(fd, "close"): - fd.close() - if kind != imp.PY_SOURCE: + if not spec.origin.endswith(".py"): return None - if self.matcher.include(pypath): + if self.matcher.include(spec.origin): return None - """ - This is defined to match CPython's PyVarObject struct - """ - class PyVarObject(ctypes.Structure): _fields_ = [ ("ob_refcnt", ctypes.c_long), @@ -132,8 +115,9 @@ class DebugWipeLoader(SourceFileLoader): """ def get_code(self, fullname): - code = super(DebugWipeLoader, self).get_code(fullname) - if code: + code = super().get_code(fullname) + # This can segfault in 3.12 + if code and sys.version_info < (3, 12): # Ideally we'd do # code.co_lnotab = b'' # But code objects are READONLY. Not to worry though; we'll @@ -142,7 +126,9 @@ def get_code(self, fullname): code_impl.ob_size = 0 return code - return DebugWipeLoader(fullname, pypath) + if isinstance(spec.loader, SourceFileLoader): + spec.loader = DebugWipeLoader(fullname, spec.origin) + return spec def optimize_for_coverage(cov, include_patterns, omit_patterns): @@ -200,8 +186,7 @@ def fileno(self): return self._fileno -# pyre-fixme[11]: Annotation `unittest._TextTestResult` is not defined as a type. -class BuckTestResult(unittest._TextTestResult): +class BuckTestResult(unittest.TextTestResult): """ Our own TestResult class that outputs data in a format that can be easily parsed by buck's test runner. @@ -273,7 +258,14 @@ def stopTest(self, test): # test cases, and fall back to looking the test up from the suite # otherwise. if not hasattr(test, "_testMethodName"): - test = self._find_next_test(self._suite) + potential_test = self._find_next_test(self._suite) + + if potential_test is not None: + test = potential_test + elif hasattr(test, "id"): + # If the next test can't be found, this could be a failure in class teardown. Fallback + # to using the id, which will likely be the method name as the test method. + test._testMethodName = test.id() self._results.append( { @@ -672,11 +664,20 @@ def run(self): if self.options.list: for test in self.get_tests(test_suite): + # Python 3.12 changed the implementation of `TestCase.__str__`. + # We construct the name manually here to ensure consistency between + # Python versions. + # Example: "test_basic (tests.test_object.TestAbsent)". + method_name = getattr(test, "_testMethodName", "") + cls = test.__class__ if self.options.list_format == "python": - name = str(test) + if method_name: + name = f"{method_name} ({cls.__module__}.{cls.__qualname__})" + else: + name = str(test) + elif self.options.list_format == "buck": - method_name = getattr(test, "_testMethodName", "") - name = _format_test_name(test.__class__, method_name) + name = _format_test_name(cls, method_name) else: raise Exception( "Bad test list format: %s" % (self.options.list_format,) @@ -772,12 +773,12 @@ def convert_to_diff_cov_str(self, analysis): analysis[3][-1] if len(analysis[3]) else 0, ) lines = ["N"] * numLines - for l in analysis[1]: - lines[l - 1] = "C" - for l in analysis[2]: - lines[l - 1] = "X" - for l in analysis[3]: - lines[l - 1] = "U" + for line in analysis[1]: + lines[line - 1] = "C" + for line in analysis[2]: + lines[line - 1] = "X" + for line in analysis[3]: + lines[line - 1] = "U" return "".join(lines) diff --git a/prelude/python/tools/compile.py b/prelude/python/tools/compile.py index 2201614184..a9deee2a10 100644 --- a/prelude/python/tools/compile.py +++ b/prelude/python/tools/compile.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + """ Example usage: $ cat inputs.manifest @@ -14,30 +16,29 @@ out-dir/foo.pyc """ -# pyre-unsafe - import argparse import errno import json import os import sys from py_compile import compile, PycInvalidationMode +from typing import List if sys.version_info[0] == 3: import importlib import importlib.util - DEFAULT_FORMAT = importlib.util.cache_from_source("{pkg}/{name}.py") + DEFAULT_FORMAT: str = importlib.util.cache_from_source("{pkg}/{name}.py") else: - DEFAULT_FORMAT = "{pkg}/{name}.pyc" + DEFAULT_FORMAT: str = "{pkg}/{name}.pyc" -def get_py_path(module): +def get_py_path(module: str) -> str: return module.replace(".", os.sep) + ".py" -def get_pyc_path(module, fmt): +def get_pyc_path(module: str, fmt: str) -> str: try: package, name = module.rsplit(".", 1) except ValueError: @@ -51,7 +52,7 @@ def get_pyc_path(module, fmt): return os.path.join(*parts) -def _mkdirs(dirpath): +def _mkdirs(dirpath: str) -> None: try: os.makedirs(dirpath) except OSError as e: @@ -59,7 +60,7 @@ def _mkdirs(dirpath): raise -def main(argv): +def main(argv: List[str]) -> None: parser = argparse.ArgumentParser(fromfile_prefix_chars="@") parser.add_argument("-o", "--output", required=True) parser.add_argument( diff --git a/prelude/python/tools/create_manifest_for_source_dir.py b/prelude/python/tools/create_manifest_for_source_dir.py index 96b011f3e8..4f086154c9 100755 --- a/prelude/python/tools/create_manifest_for_source_dir.py +++ b/prelude/python/tools/create_manifest_for_source_dir.py @@ -6,6 +6,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import argparse import json import os diff --git a/prelude/python/tools/extract.py b/prelude/python/tools/extract.py index 843241d8a9..6b3eef77c6 100755 --- a/prelude/python/tools/extract.py +++ b/prelude/python/tools/extract.py @@ -6,6 +6,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + """ Quick and dirty wrapper to extract zip files; python 3.6.2+ diff --git a/prelude/python/tools/fail_with_message.py b/prelude/python/tools/fail_with_message.py index dd06ee5a9d..bf4dba1343 100644 --- a/prelude/python/tools/fail_with_message.py +++ b/prelude/python/tools/fail_with_message.py @@ -6,6 +6,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import sys from pathlib import Path diff --git a/prelude/python/tools/generate_static_extension_info.py b/prelude/python/tools/generate_static_extension_info.py index d0679a6089..da569f56d5 100644 --- a/prelude/python/tools/generate_static_extension_info.py +++ b/prelude/python/tools/generate_static_extension_info.py @@ -6,6 +6,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import argparse import sys from typing import List diff --git a/prelude/python/tools/make_par/BUCK b/prelude/python/tools/make_par/BUCK index da10d3ec0c..62d07e0989 100644 --- a/prelude/python/tools/make_par/BUCK +++ b/prelude/python/tools/make_par/BUCK @@ -3,8 +3,12 @@ # as it is the only `TARGETS` (not `TARGETS.v2`) in the prelude. # Configuring the tools to do it right seemed more dangerous than just having a caveat on this one file. +load("@prelude//utils:source_listing.bzl", "source_listing") + oncall("build_infra") +source_listing() + export_file( name = "__run_lpar_main__.py", src = "__run_lpar_main__.py", diff --git a/prelude/python/tools/make_par/__run_lpar_main__.py b/prelude/python/tools/make_par/__run_lpar_main__.py index 13de11aaa5..8605c5ef3e 100644 --- a/prelude/python/tools/make_par/__run_lpar_main__.py +++ b/prelude/python/tools/make_par/__run_lpar_main__.py @@ -5,6 +5,9 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + + # # Put everything inside an __invoke_main() function. # This way anything we define won't pollute globals(), since runpy diff --git a/prelude/python/tools/make_par/_lpar_bootstrap.sh.template b/prelude/python/tools/make_par/_lpar_bootstrap.sh.template index 0a87fdc9e6..2e01a66bde 100644 --- a/prelude/python/tools/make_par/_lpar_bootstrap.sh.template +++ b/prelude/python/tools/make_par/_lpar_bootstrap.sh.template @@ -19,7 +19,10 @@ export {lib_path_env}={ld_library_path} if [ -n "${{PYTHONPATH+SET}}" ]; then export FB_SAVED_PYTHONPATH=$PYTHONPATH fi -export PYTHONPATH=$BASE_DIR + +# The following expands to ":PAR_APPEND_PYTHONPATH" when $PAR_APPEND_PYTHONPATH is set +# This is important: we don't want a trailing colon in $PYTHONPATH. +export PYTHONPATH=$BASE_DIR${{PAR_APPEND_PYTHONPATH:+:$PAR_APPEND_PYTHONPATH}} if [ -n "${{PYTHONHOME+SET}}" ]; then export FB_SAVED_PYTHONHOME=$PYTHONHOME fi @@ -34,6 +37,10 @@ export FB_PAR_MAIN_RUNNER_FUNCTION="{main_runner_function}" export FB_PAR_RUNTIME_FILES=$BASE_DIR : ${{FB_LPAR_INVOKED_NAME:="$0"}} export FB_LPAR_INVOKED_NAME +# This environment variable is immediately unset on startup but will also appear +# in e.g. `multiprocessing` workers, and so serves as an audit trail back to +# the originating PAR (and can be read via e.g. `/proc//environ`). +export PAR_INVOKED_NAME_TAG="$FB_LPAR_INVOKED_NAME" {ld_preload} {env} exec {cmd} "$@" diff --git a/prelude/python/tools/make_par/sitecustomize.py b/prelude/python/tools/make_par/sitecustomize.py index 5b29b8225e..152a34e177 100644 --- a/prelude/python/tools/make_par/sitecustomize.py +++ b/prelude/python/tools/make_par/sitecustomize.py @@ -6,28 +6,29 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -import importlib +# pyre-strict + +from __future__ import annotations + import multiprocessing.util as mp_util import os import sys import threading +import warnings from importlib.machinery import PathFinder from importlib.util import module_from_spec lock = threading.Lock() -# pyre-fixme[3]: Return type must be annotated. -# pyre-fixme[2]: Parameter must be annotated. -def __patch_spawn(var_names, saved_env): +def __patch_spawn(var_names: list[str], saved_env: dict[str, str]) -> None: std_spawn = mp_util.spawnv_passfds # pyre-fixme[53]: Captured variable `std_spawn` is not annotated. # pyre-fixme[53]: Captured variable `saved_env` is not annotated. # pyre-fixme[53]: Captured variable `var_names` is not annotated. - # pyre-fixme[3]: Return type must be annotated. # pyre-fixme[2]: Parameter must be annotated. - def spawnv_passfds(path, args, passfds): + def spawnv_passfds(path, args, passfds) -> None | int: with lock: try: for var in var_names: @@ -44,18 +45,32 @@ def spawnv_passfds(path, args, passfds): mp_util.spawnv_passfds = spawnv_passfds -# pyre-fixme[3]: Return type must be annotated. -# pyre-fixme[2]: Parameter must be annotated. -def __clear_env(patch_spawn=True): +def __clear_env(patch_spawn: bool = True) -> None: saved_env = {} - darwin_vars = ("DYLD_LIBRARY_PATH", "DYLD_INSERT_LIBRARIES") - linux_vars = ("LD_LIBRARY_PATH", "LD_PRELOAD") - python_vars = ("PYTHONPATH",) + + var_names = [ + "PYTHONPATH", + # We use this env var to tag the process and it's `multiprocessing` + # workers. It's important that we clear it out (so that unrelated sub- + # processes don't inherit it), but it can be read via + # `/proc//environ`. + "PAR_INVOKED_NAME_TAG", + ] if sys.platform == "darwin": - var_names = darwin_vars + python_vars + var_names.extend( + [ + "DYLD_LIBRARY_PATH", + "DYLD_INSERT_LIBRARIES", + ] + ) else: - var_names = linux_vars + python_vars + var_names.extend( + [ + "LD_LIBRARY_PATH", + "LD_PRELOAD", + ] + ) # Restore the original value of environment variables that we altered # as part of the startup process. @@ -72,26 +87,17 @@ def __clear_env(patch_spawn=True): __patch_spawn(var_names, saved_env) -# pyre-fixme[3]: Return type must be annotated. -def __startup__(): - for name, var in os.environ.items(): - if name.startswith("STARTUP_"): - name, sep, func = var.partition(":") - if sep: - try: - module = importlib.import_module(name) - getattr(module, func)() - except Exception as e: - # TODO: Ignoring errors for now. The way to properly fix this should be to make - # sure we are still at the same binary that configured `STARTUP_` before importing. - print( - "Error running startup function %s:%s: %s" % (name, func, e), - file=sys.stderr, - ) - - -# pyre-fixme[3]: Return type must be annotated. -def __passthrough_exec_module(): +def __startup__() -> None: + try: + # pyre-fixme[21]: Could not find module `__par__.__startup_function_loader__`. + from __par__.__startup_function_loader__ import load_startup_functions + + load_startup_functions() + except Exception: + warnings.warn("could not load startup functions", stacklevel=1) + + +def __passthrough_exec_module() -> None: # Delegate this module execution to the next module in the path, if any, # effectively making this sitecustomize.py a passthrough module. spec = PathFinder.find_spec( diff --git a/prelude/python/tools/make_py_package_inplace.py b/prelude/python/tools/make_py_package_inplace.py index eb993a1552..9abccde90d 100755 --- a/prelude/python/tools/make_py_package_inplace.py +++ b/prelude/python/tools/make_py_package_inplace.py @@ -6,6 +6,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + """ Create a bootstrapper pex for inplace python binaries @@ -128,6 +130,13 @@ def parse_args() -> argparse.Namespace: ), help="The dynamic loader env used to find native library deps", ) + parser.add_argument( + "--native-library-runtime-path", + dest="native_library_runtime_paths", + default=[], + action="append", + help="The dynamic loader env used to find native library deps", + ) parser.add_argument( "-e", "--runtime_env", @@ -137,6 +146,10 @@ def parse_args() -> argparse.Namespace: ) # Compatibility with existing make_par scripts parser.add_argument("--passthrough", action="append", default=[]) + # No-op, added for compatibility with existing make_par scripts + parser.add_argument( + "--omnibus-debug-info", choices=["separate", "strip", "extract"] + ) return parser.parse_args() @@ -155,6 +168,7 @@ def write_bootstrapper(args: argparse.Namespace) -> None: # Because this can be invoked from other directories, find the relative path # from this .par to the modules dir, and use that. relative_modules_dir = os.path.relpath(args.modules_dir, args.output.parent) + native_lib_dirs = [relative_modules_dir] + args.native_library_runtime_paths # TODO(nmj): Remove this hack. So, if arg0 in your shebang is a bash script # (like /usr/local/fbcode/platform007/bin/python3.7 on macs is) @@ -166,9 +180,9 @@ def write_bootstrapper(args: argparse.Namespace) -> None: # exclude it for now, because linux doesn't like multiple args # after /usr/bin/env - ld_preload = "None" + ld_preload = None if args.preload_libraries: - ld_preload = repr(":".join(p.name for p in args.preload_libraries)) + ld_preload = [p.name for p in args.preload_libraries] new_data = data.replace("", "/usr/bin/env " + str(args.python)) new_data = new_data.replace("", "") @@ -187,9 +201,9 @@ def write_bootstrapper(args: argparse.Namespace) -> None: # Things that are only required for the full template new_data = new_data.replace("", args.native_libs_env_var) - new_data = new_data.replace("", repr(relative_modules_dir)) + new_data = new_data.replace("", repr(native_lib_dirs)) new_data = new_data.replace("", "LD_PRELOAD") - new_data = new_data.replace("", ld_preload) + new_data = new_data.replace("", repr(ld_preload)) if args.runtime_env: runtime_env = dict(e.split("=", maxsplit=1) for e in args.runtime_env) diff --git a/prelude/python/tools/make_py_package_manifest_module.py b/prelude/python/tools/make_py_package_manifest_module.py index 1d8506864c..89427237b6 100755 --- a/prelude/python/tools/make_py_package_manifest_module.py +++ b/prelude/python/tools/make_py_package_manifest_module.py @@ -6,6 +6,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + """ Generate a __manifest__.py module containing build metadata for a Python package. """ diff --git a/prelude/python/tools/make_py_package_modules.py b/prelude/python/tools/make_py_package_modules.py index 07e247df10..db591ec715 100755 --- a/prelude/python/tools/make_py_package_modules.py +++ b/prelude/python/tools/make_py_package_modules.py @@ -6,6 +6,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + """ Create the link tree for inplace Python binaries. diff --git a/prelude/python/tools/make_source_db.py b/prelude/python/tools/make_source_db.py index ef7f638328..a89d50efb5 100755 --- a/prelude/python/tools/make_source_db.py +++ b/prelude/python/tools/make_source_db.py @@ -6,6 +6,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + """ Creates a Python Source DB JSON file containing both a rule's immediate sources and the sources of all transitive dependencies (e.g. for use with Pyre). @@ -34,19 +36,18 @@ } """ -# pyre-unsafe - import argparse import json import sys +from typing import List, Tuple -def _load(path): +def _load(path: str) -> List[Tuple[str, str, str]]: with open(path) as f: return json.load(f) -def main(argv): +def main(argv: List[str]) -> None: parser = argparse.ArgumentParser(fromfile_prefix_chars="@") parser.add_argument("--output", type=argparse.FileType("w"), default=sys.stdout) parser.add_argument("--sources") diff --git a/prelude/python/tools/make_source_db_no_deps.py b/prelude/python/tools/make_source_db_no_deps.py index d764f8fe57..4493274eb1 100644 --- a/prelude/python/tools/make_source_db_no_deps.py +++ b/prelude/python/tools/make_source_db_no_deps.py @@ -6,6 +6,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + """ Creates a Python Source DB JSON file from Python manifest JSON file (e.g. for use with Pyre). @@ -28,18 +30,15 @@ import argparse import json import sys +from typing import List, Tuple -# pyre-fixme[3]: Return type must be annotated. -# pyre-fixme[2]: Parameter must be annotated. -def _load(path): +def _load(path: str) -> List[Tuple[str, str, str]]: with open(path) as f: return json.load(f) -# pyre-fixme[3]: Return type must be annotated. -# pyre-fixme[2]: Parameter must be annotated. -def main(argv): +def main(argv: List[str]) -> None: parser = argparse.ArgumentParser(fromfile_prefix_chars="@") parser.add_argument("--output", type=argparse.FileType("w"), default=sys.stdout) parser.add_argument("sources") diff --git a/prelude/python/tools/parse_imports.py b/prelude/python/tools/parse_imports.py index a0c2bf9ac4..6b97b5de67 100644 --- a/prelude/python/tools/parse_imports.py +++ b/prelude/python/tools/parse_imports.py @@ -6,6 +6,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import argparse import ast import json diff --git a/prelude/python/tools/py38stdlib.py b/prelude/python/tools/py38stdlib.py index 30052528fd..f5d33a6b48 100644 --- a/prelude/python/tools/py38stdlib.py +++ b/prelude/python/tools/py38stdlib.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + # This is list is "borrowed" from https://github.com/amyreese/stdlibs from typing import FrozenSet diff --git a/prelude/python/tools/run_inplace.py.in b/prelude/python/tools/run_inplace.py.in index 08fdd77b41..8ea96bae17 100644 --- a/prelude/python/tools/run_inplace.py.in +++ b/prelude/python/tools/run_inplace.py.in @@ -12,7 +12,7 @@ main_module = "" main_function = "" modules_dir = "" native_libs_env_var = "" -native_libs_dir = +native_libs_dirs = native_libs_preload_env_var = "" native_libs_preload = interpreter_flags = "" @@ -36,10 +36,19 @@ if platform.system() == "Windows" and not dirpath.startswith(unc_prefix): env_vals_to_restore = {} # Update the environment variable for the dynamic loader to the native # libraries location. -if native_libs_dir is not None: - old_native_libs_dir = os.environ.get(native_libs_env_var) - os.environ[native_libs_env_var] = os.path.join(dirpath, native_libs_dir) - env_vals_to_restore[native_libs_env_var] = old_native_libs_dir +if native_libs_dirs is not None: + old_native_libs_dirs = os.environ.get(native_libs_env_var) + os.environ[native_libs_env_var] = os.pathsep.join([ + os.path.join(dirpath, native_libs_dir) + for native_libs_dir in native_libs_dirs + ]) + env_vals_to_restore[native_libs_env_var] = old_native_libs_dirs +if os.environ.get("PAR_APPEND_LD_LIBRARY_PATH") is not None: + os.environ[native_libs_env_var] = ( + (os.environ[native_libs_env_var] + ":" + os.environ["PAR_APPEND_LD_LIBRARY_PATH"]) + if os.environ.get(native_libs_env_var) is not None + else os.environ["PAR_APPEND_LD_LIBRARY_PATH"] + ) # Update the environment variable for the dynamic loader to find libraries # to preload. @@ -48,10 +57,24 @@ if native_libs_preload is not None: env_vals_to_restore[native_libs_preload_env_var] = old_native_libs_preload # On macos, preloaded libs are found via paths. - os.environ[native_libs_preload_env_var] = ":".join( - os.path.join(dirpath, native_libs_dir, l) - for l in native_libs_preload.split(":") - ) + if platform.system() == "Darwin": + full_path_preloads = [] + for lib in native_libs_preload: + for native_libs_dir in native_libs_dirs: + fpath = os.path.join(dirpath, native_libs_dir, lib) + if os.path.exists(fpath): + full_path_preloads.append(fpath) + break + else: + raise Exception( + "cannot find preload lib {!r} in paths {!r}".format( + lib, + native_libs_dirs, + ), + ) + native_libs_preload = full_path_preloads + + os.environ[native_libs_preload_env_var] = os.pathsep.join(native_libs_preload) @@ -98,6 +121,18 @@ def __run(): restoreenv({env_vals_to_restore!r}) + # On windows, adjust os.add_dll_directory and PATH (for `ctypes.util.find_library`) + # so that native libraries can be found by the dynamic linker or ctypes + if sys.platform.startswith("win"): + path = os.environ.get("PATH", "") + for native_libs_dir in {native_libs_dirs!r}: + d = os.path.join({dirpath!r}, native_libs_dir) + os.add_dll_directory(d) + if path and not path.endswith(os.pathsep): + path += os.pathsep + path += d + setenv("PATH", path) + from import as run_as_main run_as_main({main_module!r}, {main_function!r}) @@ -142,6 +177,13 @@ for env in ("PYTHONPATH", "LD_LIBRARY_PATH", "LD_PRELOAD", path = os.path.join(dirpath, modules_dir) os.environ["PYTHONPATH"] = path +if "PAR_APPEND_PYTHONPATH" in os.environ: + os.environ["PYTHONPATH"] += ":" + os.environ["PAR_APPEND_PYTHONPATH"] + +# This environment variable is immediately unset on startup but will also appear +# in e.g. `multiprocessing` workers, and so serves as an audit trail back to +# the originating PAR (and can be read via e.g. `/proc//environ`). +os.environ["PAR_INVOKED_NAME_TAG"] = sys.argv[0] if platform.system() == "Windows": # exec on Windows is not true exec - there is only 'spawn' ('CreateProcess'). diff --git a/prelude/python/tools/sourcedb_merger/BUCK.v2 b/prelude/python/tools/sourcedb_merger/BUCK.v2 index 7f090f90ea..2521ab5a74 100644 --- a/prelude/python/tools/sourcedb_merger/BUCK.v2 +++ b/prelude/python/tools/sourcedb_merger/BUCK.v2 @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + prelude = native prelude.python_bootstrap_library( @@ -13,19 +19,19 @@ prelude.python_bootstrap_library( prelude.python_bootstrap_binary( name = "merge", main = "merge.py", + visibility = ["PUBLIC"], deps = [ ":library", ], - visibility = ["PUBLIC"], ) prelude.python_bootstrap_binary( name = "legacy_merge", main = "legacy_merge.py", + visibility = ["PUBLIC"], deps = [ ":library", ], - visibility = ["PUBLIC"], ) # Run the test suite with this command: diff --git a/prelude/python/tools/sourcedb_merger/inputs.py b/prelude/python/tools/sourcedb_merger/inputs.py index 8ccc78304c..2f298a2588 100644 --- a/prelude/python/tools/sourcedb_merger/inputs.py +++ b/prelude/python/tools/sourcedb_merger/inputs.py @@ -5,6 +5,9 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + + import dataclasses import json import pathlib diff --git a/prelude/python/tools/sourcedb_merger/legacy_merge.py b/prelude/python/tools/sourcedb_merger/legacy_merge.py index ecd6c61e1f..0eb16221f9 100644 --- a/prelude/python/tools/sourcedb_merger/legacy_merge.py +++ b/prelude/python/tools/sourcedb_merger/legacy_merge.py @@ -6,6 +6,9 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + + import argparse import pathlib import sys diff --git a/prelude/python/tools/sourcedb_merger/legacy_outputs.py b/prelude/python/tools/sourcedb_merger/legacy_outputs.py index aefebb22ba..ab225f9b05 100644 --- a/prelude/python/tools/sourcedb_merger/legacy_outputs.py +++ b/prelude/python/tools/sourcedb_merger/legacy_outputs.py @@ -5,6 +5,9 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + + import dataclasses import json import pathlib diff --git a/prelude/python/tools/sourcedb_merger/merge.py b/prelude/python/tools/sourcedb_merger/merge.py index 6f641c040f..f4dd4845b7 100644 --- a/prelude/python/tools/sourcedb_merger/merge.py +++ b/prelude/python/tools/sourcedb_merger/merge.py @@ -6,6 +6,9 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + + import argparse import pathlib import sys diff --git a/prelude/python/tools/sourcedb_merger/outputs.py b/prelude/python/tools/sourcedb_merger/outputs.py index 7a7e4f88e2..2565f75c5d 100644 --- a/prelude/python/tools/sourcedb_merger/outputs.py +++ b/prelude/python/tools/sourcedb_merger/outputs.py @@ -5,6 +5,9 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + + import dataclasses import json import pathlib diff --git a/prelude/python/tools/sourcedb_merger/tests/__init__.py b/prelude/python/tools/sourcedb_merger/tests/__init__.py index b404f6ed69..2444a8d0f4 100644 --- a/prelude/python/tools/sourcedb_merger/tests/__init__.py +++ b/prelude/python/tools/sourcedb_merger/tests/__init__.py @@ -5,6 +5,32 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + + +# pyre-fixme[21]: Could not find name `BuildMapLoadError` in `tests.inputs_test`. +# pyre-fixme[21]: Could not find name `PartialBuildMap` in `tests.inputs_test`. +# pyre-fixme[21]: Could not find name `Target` in `tests.inputs_test`. +# pyre-fixme[21]: Could not find name `TargetEntry` in `tests.inputs_test`. +# pyre-fixme[21]: Could not find name `load_targets_and_build_maps_from_json` in +# `tests.inputs_test`. from .inputs_test import * # noqa + +# pyre-fixme[21]: Could not find name `ConflictInfo` in `tests.legacy_output_test`. +# pyre-fixme[21]: Could not find name `ConflictMap` in `tests.legacy_output_test`. +# pyre-fixme[21]: Could not find name `FullBuildMap` in `tests.legacy_output_test`. +# pyre-fixme[21]: Could not find name `MergeResult` in `tests.legacy_output_test`. +# pyre-fixme[21]: Could not find name `PartialBuildMap` in `tests.legacy_output_test`. +# pyre-fixme[21]: Could not find name `SourceInfo` in `tests.legacy_output_test`. +# pyre-fixme[21]: Could not find name `Target` in `tests.legacy_output_test`. +# pyre-fixme[21]: Could not find name `TargetEntry` in `tests.legacy_output_test`. +# pyre-fixme[21]: Could not find name `merge_partial_build_maps` in +# `tests.legacy_output_test`. from .legacy_output_test import * # noqa + +# pyre-fixme[21]: Could not find name `PartialBuildMap` in `tests.outputs_test`. +# pyre-fixme[21]: Could not find name `Target` in `tests.outputs_test`. +# pyre-fixme[21]: Could not find name `TargetEntry` in `tests.outputs_test`. +# pyre-fixme[21]: Could not find name `merge_partial_build_maps` in +# `tests.outputs_test`. from .outputs_test import * # noqa diff --git a/prelude/python/tools/sourcedb_merger/tests/inputs_test.py b/prelude/python/tools/sourcedb_merger/tests/inputs_test.py index c671e1b618..75a8ce7e3e 100644 --- a/prelude/python/tools/sourcedb_merger/tests/inputs_test.py +++ b/prelude/python/tools/sourcedb_merger/tests/inputs_test.py @@ -5,6 +5,9 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + + import contextlib import json import os diff --git a/prelude/python/tools/sourcedb_merger/tests/legacy_output_test.py b/prelude/python/tools/sourcedb_merger/tests/legacy_output_test.py index 267a135f21..3d587cbb87 100644 --- a/prelude/python/tools/sourcedb_merger/tests/legacy_output_test.py +++ b/prelude/python/tools/sourcedb_merger/tests/legacy_output_test.py @@ -5,6 +5,9 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + + import unittest from typing import Mapping diff --git a/prelude/python/tools/sourcedb_merger/tests/outputs_test.py b/prelude/python/tools/sourcedb_merger/tests/outputs_test.py index 91affd8053..cb147a3dba 100644 --- a/prelude/python/tools/sourcedb_merger/tests/outputs_test.py +++ b/prelude/python/tools/sourcedb_merger/tests/outputs_test.py @@ -5,6 +5,9 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + + import unittest from typing import Mapping diff --git a/prelude/python/tools/static_extension_finder.py b/prelude/python/tools/static_extension_finder.py index 9b278d3b7d..c4c1171f7c 100644 --- a/prelude/python/tools/static_extension_finder.py +++ b/prelude/python/tools/static_extension_finder.py @@ -5,8 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -import sys -from importlib.machinery import ModuleSpec +# pyre-strict + # Add a try except to force eager importing try: @@ -17,6 +17,9 @@ class StaticExtensionFinder: + # pyre-fixme + ModuleSpec = None + @classmethod # pyre-fixme[3]: Return type must be annotated. # pyre-fixme[2]: Parameter must be annotated. @@ -25,16 +28,22 @@ def find_spec(cls, fullname, path, target=None): Use fullname to look up the PyInit function in the main binary. Returns None if not present. This allows importing CExtensions that have been statically linked in. """ + if not fullname: return None if not _check_module(fullname): return None - spec = ModuleSpec( + spec = cls.ModuleSpec( fullname, StaticExtensionLoader, origin="static-extension", is_package=False ) return spec -# pyre-fixme[3]: Return type must be annotated. -def _initialize(): +def _initialize() -> None: + # This imports are here to avoid tricking circular dependencies. see S389486 + import sys + from importlib.machinery import ModuleSpec + + StaticExtensionFinder.ModuleSpec = ModuleSpec + sys.meta_path.insert(0, StaticExtensionFinder) diff --git a/prelude/python/tools/static_extension_utils.cpp b/prelude/python/tools/static_extension_utils.cpp index f35e2a6820..1470561cbd 100644 --- a/prelude/python/tools/static_extension_utils.cpp +++ b/prelude/python/tools/static_extension_utils.cpp @@ -24,15 +24,13 @@ namespace { static PyObject* _create_module(PyObject* self, PyObject* spec) { PyObject* name; PyObject* mod; - const char* oldcontext; name = PyObject_GetAttrString(spec, "name"); if (name == nullptr) { return nullptr; } - // TODO private api usage - mod = _PyImport_FindExtensionObject(name, name); + mod = PyImport_GetModule(name); if (mod || PyErr_Occurred()) { Py_DECREF(name); Py_XINCREF(mod); @@ -58,7 +56,15 @@ static PyObject* _create_module(PyObject* self, PyObject* spec) { PyObject* modules = nullptr; PyModuleDef* def; - oldcontext = _Py_PackageContext; + +#if PY_VERSION_HEX >= 0x030C0000 + // Use our custom Python 3.12 C-API to call the statically linked module init + // function + mod = _Ci_PyImport_CallInitFuncWithContext(namestr.c_str(), initfunc); +#else + // In Python 3.10 (and earlier) we need to handle package context swapping + // ourselves + const char* oldcontext = _Py_PackageContext; _Py_PackageContext = namestr.c_str(); if (_Py_PackageContext == nullptr) { _Py_PackageContext = oldcontext; @@ -67,6 +73,7 @@ static PyObject* _create_module(PyObject* self, PyObject* spec) { } mod = initfunc(); _Py_PackageContext = oldcontext; +#endif if (mod == nullptr) { Py_DECREF(name); return nullptr; diff --git a/prelude/python/tools/traverse_dep_manifest.py b/prelude/python/tools/traverse_dep_manifest.py index cc7c5e45bb..6e73c94141 100644 --- a/prelude/python/tools/traverse_dep_manifest.py +++ b/prelude/python/tools/traverse_dep_manifest.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import argparse import json diff --git a/prelude/python/tools/wheel.py b/prelude/python/tools/wheel.py new file mode 100644 index 0000000000..ffea52cea8 --- /dev/null +++ b/prelude/python/tools/wheel.py @@ -0,0 +1,149 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# pyre-strict + +import argparse +import configparser +import contextlib +import io +import json +import os +import sys +import zipfile +from types import TracebackType +from typing import cast, Dict, List, Optional, Set, Tuple, Type + + +# pyre-fixme[24]: Generic type `AbstractContextManager` expects 1 type parameter. +class WheelBuilder(contextlib.AbstractContextManager): + + def __init__( + self, + *, + name: str, + version: str, + output: str, + entry_points: Optional[Dict[str, str]] = None, + metadata: Optional[List[Tuple[str, str]]] = None, + ) -> None: + self._name = name + self._version = version + self._record: list[str] = [] + self._outf = zipfile.ZipFile(output, mode="w") + self._entry_points: Optional[Dict[str, str]] = entry_points + self._metadata: List[Tuple[str, str]] = [] + self._metadata.append(("Name", name)) + self._metadata.append(("Version", version)) + if metadata is not None: + self._metadata.extend(metadata) + + def _dist_info(self, *path: str) -> str: + return os.path.join(f"{self._name}-{self._version}.dist-info", *path) + + def _data(self, *path: str) -> str: + return os.path.join(f"{self._name}-{self._version}.data", *path) + + def write(self, dst: str, src: str) -> None: + self._record.append(dst) + self._outf.write(filename=src, arcname=dst) + + def write_data(self, dst: str, src: str) -> None: + self.write(self._data(dst), src) + + def writestr(self, dst: str, contents: str) -> None: + self._record.append(dst) + self._outf.writestr(zinfo_or_arcname=dst, data=contents) + + def _write_record(self) -> None: + record = self._dist_info("RECORD") + self._outf.writestr( + record, "".join(["{},,\n".format(f) for f in (self._record + [record])]) + ) + + def close(self) -> None: + self.writestr( + self._dist_info("METADATA"), + "".join(["{}: {}\n".format(key, val) for key, val in self._metadata]), + ) + self.writestr( + self._dist_info("WHEEL"), + """\ +Wheel-Version: 1.0 +""", + ) + + # Write entry points. + if self._entry_points is not None: + config = configparser.ConfigParser() + config.read_dict(cast(Dict[str, Dict[str, str]], self._entry_points)) + with io.TextIOWrapper( + self._outf.open(self._dist_info("entry_points.txt"), mode="w"), + encoding="utf-8", + ) as f: + config.write(f) + + self._write_record() + self._outf.close() + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_value: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + self.close() + + +def main(argv: List[str]) -> None: + parser = argparse.ArgumentParser() + parser.add_argument("--output", required=True) + parser.add_argument("--name", required=True) + parser.add_argument("--version", required=True) + parser.add_argument("--entry-points", default=None) + parser.add_argument("--srcs", action="append", default=[]) + parser.add_argument("--metadata", nargs=2, action="append", default=[]) + parser.add_argument("--data", nargs=2, action="append", default=[]) + args = parser.parse_args(argv[1:]) + + pkgs: Set[str] = set() + pkgs_with_init = set() + + def _add_pkg(pkg: str) -> None: + pkgs.add(pkg) + parent = os.path.dirname(pkg) + if parent: + _add_pkg(parent) + + with WheelBuilder( + name=args.name, + version=args.version, + output=args.output, + entry_points=( + json.loads(args.entry_points) if args.entry_points is not None else None + ), + metadata=args.metadata, + ) as whl: + for src in args.srcs: + with open(src) as f: + manifest = json.load(f) + for dst, src, *_ in manifest: + if dst.endswith((".py", ".so")): + pkg = os.path.dirname(dst) + _add_pkg(pkg) + if os.path.basename(dst) == "__init__.py": + pkgs_with_init.add(pkg) + whl.write(dst, src) + + for dst, src in args.data: + whl.write_data(dst, src) + + for pkg in pkgs - pkgs_with_init: + whl.writestr(os.path.join(pkg, "__init__.py"), "") + + +sys.exit(main(sys.argv)) diff --git a/prelude/python/typecheck/batch.bxl b/prelude/python/typecheck/batch.bxl new file mode 100644 index 0000000000..9d3e8aed84 --- /dev/null +++ b/prelude/python/typecheck/batch.bxl @@ -0,0 +1,42 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//utils:utils.bzl", "flatten") +load("@prelude//python/sourcedb/filter.bxl", "do_filter") + +def check_targets(ctx: bxl.Context, targets: typing.Any) -> None: + checked_targets = ctx.configured_targets(do_filter(ctx.uquery(), targets)) + + build_result = ctx.build([ + target.label.with_sub_target("typecheck") + for target in checked_targets + ]) + output = ctx.output.ensure_multiple(build_result) + + ctx.output.print_json({ + "artifacts": { + label.raw_target(): [artifact.rel_path() for artifact in artifacts] + for label, artifacts in output.items() + }, + "root": ctx.root(), + }) + +def _run_entry_point(ctx: bxl.Context) -> None: + targets = flatten(ctx.cli_args.target) + check_targets(ctx, targets) + +run = bxl_main( + doc = "Run [typecheck] on a set of targets or target patterns.", + impl = _run_entry_point, + cli_args = { + "target": cli_args.list( + cli_args.target_expr( + doc = "Target pattern to run type checking on", + ), + ), + }, +) diff --git a/prelude/python/typecheck/batch_files.bxl b/prelude/python/typecheck/batch_files.bxl new file mode 100644 index 0000000000..c18b556f19 --- /dev/null +++ b/prelude/python/typecheck/batch_files.bxl @@ -0,0 +1,24 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load(":batch.bxl", "check_targets") + +def _run_entry_point(ctx: bxl.Context) -> None: + targets = ctx.uquery().owner(ctx.cli_args.source) + check_targets(ctx, targets) + +run = bxl_main( + doc = "Run [typecheck] on the owning targets of given files.", + impl = _run_entry_point, + cli_args = { + "source": cli_args.list( + cli_args.string( + doc = "Files whose owning targets need to be checked", + ), + ), + }, +) diff --git a/prelude/python/typing.bzl b/prelude/python/typing.bzl new file mode 100644 index 0000000000..e9b88b906d --- /dev/null +++ b/prelude/python/typing.bzl @@ -0,0 +1,68 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//python:python.bzl", "PythonLibraryInfo") +load( + ":manifest.bzl", + "ManifestInfo", # @unused Used as a type +) +load(":python.bzl", "PythonLibraryManifestsTSet") + +DEFAULT_PY_VERSION = "3.10" + +def create_per_target_type_check( + ctx: AnalysisContext, + executable: RunInfo, + srcs: ManifestInfo | None, + deps: list[PythonLibraryInfo], + typeshed: ManifestInfo | None, + py_version: str | None, + typing_enabled: bool) -> DefaultInfo: + output_file_name = "type_check_result.json" + if not typing_enabled: + # Use empty dict to signal that no type checking was performed. + output_file = ctx.actions.write_json(output_file_name, {}) + else: + cmd = [executable] + hidden = [] + + cmd.append(cmd_args("check")) + + # Source artifacts + source_manifests = [] + if srcs != None: + source_manifests = [srcs.manifest] + hidden.extend([a for a, _ in srcs.artifacts]) + + # Dep artifacts + dep_manifest_tset = ctx.actions.tset(PythonLibraryManifestsTSet, children = [d.manifests for d in deps]) + dep_manifests = dep_manifest_tset.project_as_args("source_type_manifests") + hidden.append(dep_manifest_tset.project_as_args("source_type_artifacts")) + + # Typeshed artifacts + if typeshed != None: + hidden.extend([a for a, _ in typeshed.artifacts]) + typeshed_manifest = typeshed.manifest + else: + typeshed_manifest = None + + # Create input configs + input_config = { + "dependencies": dep_manifests, + "py_version": py_version or DEFAULT_PY_VERSION, + "sources": source_manifests, + "typeshed": typeshed_manifest, + } + + input_file = ctx.actions.write_json("type_check_config.json", input_config, with_inputs = True) + output_file = ctx.actions.declare_output(output_file_name) + cmd.append(cmd_args(input_file)) + cmd.append(cmd_args(output_file.as_output(), format = "--output={}")) + + ctx.actions.run(cmd_args(cmd, hidden = hidden), category = "type_check") + + return DefaultInfo(default_output = output_file) diff --git a/prelude/python_bootstrap/python_bootstrap.bzl b/prelude/python_bootstrap/python_bootstrap.bzl index b4c0d021bd..ef628945dc 100644 --- a/prelude/python_bootstrap/python_bootstrap.bzl +++ b/prelude/python_bootstrap/python_bootstrap.bzl @@ -43,15 +43,18 @@ def python_bootstrap_binary_impl(ctx: AnalysisContext) -> list[Provider]: interpreter = ctx.attrs._python_bootstrap_toolchain[PythonBootstrapToolchainInfo].interpreter - run_args = cmd_args() if ctx.attrs._win_python_wrapper != None: - run_args.add(ctx.attrs._win_python_wrapper[RunInfo]) - run_args.add(run_tree) - run_args.add(interpreter) - run_args.add(output) + run_args = cmd_args( + ctx.attrs._win_python_wrapper[RunInfo], + run_tree, + interpreter, + output, + ) else: - run_args.add("/usr/bin/env") - run_args.add(cmd_args(run_tree, format = "PYTHONPATH={}")) - run_args.add(interpreter) - run_args.add(output) + run_args = cmd_args( + "/usr/bin/env", + cmd_args(run_tree, format = "PYTHONPATH={}"), + interpreter, + output, + ) return [DefaultInfo(default_output = output), RunInfo(args = run_args)] diff --git a/prelude/python_bootstrap/tools/BUCK.v2 b/prelude/python_bootstrap/tools/BUCK.v2 index b895ef1a52..e3fb697482 100644 --- a/prelude/python_bootstrap/tools/BUCK.v2 +++ b/prelude/python_bootstrap/tools/BUCK.v2 @@ -1,8 +1,14 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + prelude = native prelude.sh_binary( name = "win_python_wrapper", main = "win_python_wrapper.bat", - visibility = ["PUBLIC"], target_compatible_with = ["config//os:windows"], + visibility = ["PUBLIC"], ) diff --git a/prelude/python_bootstrap/tools/win_python_wrapper.bat b/prelude/python_bootstrap/tools/win_python_wrapper.bat index 8aa8c4a25a..f2c8b0cfb7 100644 --- a/prelude/python_bootstrap/tools/win_python_wrapper.bat +++ b/prelude/python_bootstrap/tools/win_python_wrapper.bat @@ -17,5 +17,5 @@ setlocal enabledelayedexpansion set args=;;;;;;%* set args=!args:;;;;;;%1 =! -set PYTHONPATH=%1 +set PYTHONPATH=%~1 %args% diff --git a/prelude/rules.bzl b/prelude/rules.bzl index 6cf198a222..d132966efa 100644 --- a/prelude/rules.bzl +++ b/prelude/rules.bzl @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//:buck2_compatibility.bzl", "BUCK2_COMPATIBILITY_ATTRIB_NAME", "BUCK2_COMPATIBILITY_ATTRIB_TYPE", "check_buck2_compatibility") +load("@prelude//apple:apple_platforms.bzl", "APPLE_PLATFORMS_KEY") load("@prelude//configurations:rules.bzl", _config_implemented_rules = "implemented_rules") load("@prelude//decls/common.bzl", "prelude_rule") load("@prelude//is_full_meta_repo.bzl", "is_full_meta_repo") @@ -21,7 +23,7 @@ def _unimplemented_impl(name): # some features disabled. return partial(_unimplemented, name) -def _mk_rule(rule_spec: typing.Any): +def _mk_rule(rule_spec: typing.Any, extra_attrs: dict[str, typing.Any] = dict(), impl_override: [typing.Callable, None] = None, **kwargs): name = rule_spec.name attributes = rule_spec.attrs @@ -36,22 +38,26 @@ def _mk_rule(rule_spec: typing.Any): if toolchain_attr in attributes: fat_platform_compatible = False + #Add buck2_compatibility attribute to all rules + extra_attrs[BUCK2_COMPATIBILITY_ATTRIB_NAME] = BUCK2_COMPATIBILITY_ATTRIB_TYPE + # Fat platforms is an idea specific to our toolchains, so doesn't apply to # open source. Ideally this restriction would be done at the toolchain level. if not is_full_meta_repo(): fat_platform_compatible = True attributes = dict(attributes) + attributes.update(extra_attrs) if not fat_platform_compatible: # copy so we don't try change the passed in object attributes["_cxx_toolchain_target_configuration"] = attrs.dep(default = "prelude//platforms:fat_platform_incompatible") # Add _apple_platforms to all rules so that we may query the target platform to use until we support configuration # modifiers and can use them to set the configuration to use for operations. - # Map of string identifer to platform. - attributes["_apple_platforms"] = attrs.dict(key = attrs.string(), value = attrs.dep(), sorted = False, default = {}) + # Map of string identifier to platform. + attributes[APPLE_PLATFORMS_KEY] = attrs.dict(key = attrs.string(), value = attrs.dep(), sorted = False, default = {}) - extra_args = {} + extra_args = dict(kwargs) cfg = transitions.get(name) if cfg != None: extra_args["cfg"] = cfg @@ -79,17 +85,26 @@ def _mk_rule(rule_spec: typing.Any): impl = extra_impl if not impl: impl = _unimplemented_impl(name) + if impl_override != None: + impl = impl_override if rule_spec.uses_plugins != None: extra_args["uses_plugins"] = rule_spec.uses_plugins + extra_args.setdefault("is_configuration_rule", name in _config_implemented_rules) + extra_args.setdefault("is_toolchain_rule", name in toolchain_rule_names) return rule( - impl = impl, + impl = buck2_compatibility_check_wrapper(impl), attrs = attributes, - is_configuration_rule = name in _config_implemented_rules, - is_toolchain_rule = name in toolchain_rule_names, **extra_args ) +def buck2_compatibility_check_wrapper(impl) -> typing.Callable: + def buck2_compatibility_shim(ctx: AnalysisContext) -> [list[Provider], Promise]: + check_buck2_compatibility(ctx) + return impl(ctx) + + return buck2_compatibility_shim + def _flatten_decls(): decls = {} for decl_set in rule_decl_records: @@ -130,3 +145,9 @@ rules = {rule.name: _mk_rule(rule) for rule in _declared_rules.values()} # The rules are accessed by doing module.name, so we have to put them on the correct module. load_symbols(rules) + +# TODO(akrieger): Remove this and instead refactor to allow impl bzl files to export attrs. +def clone_rule(rule: str, extra_attrs: dict[str, typing.Any] = dict(), impl_override = None, **kwargs): + if not rule in _declared_rules: + fail("Tried clone rule {} which does not exist".format(rule)) + return _mk_rule(_declared_rules[rule], extra_attrs, impl_override, **kwargs) diff --git a/prelude/rules_impl.bzl b/prelude/rules_impl.bzl index dc207cf0b8..d4bc100245 100644 --- a/prelude/rules_impl.bzl +++ b/prelude/rules_impl.bzl @@ -15,8 +15,9 @@ load("@prelude//cxx:cxx.bzl", "cxx_binary_impl", "cxx_library_impl", "cxx_precom load("@prelude//cxx:cxx_toolchain.bzl", "cxx_toolchain_extra_attributes", "cxx_toolchain_impl") load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxPlatformInfo", "CxxToolchainInfo") load("@prelude//cxx:headers.bzl", "CPrecompiledHeaderInfo", "HeaderMode") +load("@prelude//cxx:link_groups_types.bzl", "LINK_GROUP_MAP_ATTR") load("@prelude//cxx:prebuilt_cxx_library_group.bzl", "prebuilt_cxx_library_group_impl") -load("@prelude//cxx/user:link_group_map.bzl", "link_group_map_attr") +load("@prelude//cxx:windows_resource.bzl", "windows_resource_impl") load("@prelude//erlang:erlang.bzl", _erlang_implemented_rules = "implemented_rules") load("@prelude//git:git_fetch.bzl", "git_fetch_impl") load("@prelude//go:cgo_library.bzl", "cgo_library_impl") @@ -24,11 +25,14 @@ load("@prelude//go:coverage.bzl", "GoCoverageMode") load("@prelude//go:go_binary.bzl", "go_binary_impl") load("@prelude//go:go_exported_library.bzl", "go_exported_library_impl") load("@prelude//go:go_library.bzl", "go_library_impl") +load("@prelude//go:go_stdlib.bzl", "go_stdlib_impl") load("@prelude//go:go_test.bzl", "go_test_impl") -load("@prelude//haskell:haskell.bzl", "HaskellLibraryProvider", "haskell_binary_impl", "haskell_library_impl", "haskell_prebuilt_library_impl") +load("@prelude//go/transitions:defs.bzl", "asan_attr", "cgo_enabled_attr", "compile_shared_attr", "coverage_mode_attr", "go_binary_transition", "go_exported_library_transition", "go_test_transition", "race_attr", "tags_attr") +load("@prelude//haskell:haskell.bzl", "haskell_binary_impl", "haskell_library_impl", "haskell_prebuilt_library_impl") load("@prelude//haskell:haskell_ghci.bzl", "haskell_ghci_impl") load("@prelude//haskell:haskell_haddock.bzl", "haskell_haddock_impl") load("@prelude//haskell:haskell_ide.bzl", "haskell_ide_impl") +load("@prelude//haskell:library_info.bzl", "HaskellLibraryProvider") load("@prelude//http_archive:http_archive.bzl", "http_archive_impl") load("@prelude//java:java.bzl", _java_extra_attributes = "extra_attributes", _java_implemented_rules = "implemented_rules") load("@prelude//js:js.bzl", _js_extra_attributes = "extra_attributes", _js_implemented_rules = "implemented_rules") @@ -36,9 +40,11 @@ load("@prelude//julia:julia.bzl", _julia_extra_attributes = "extra_attributes", load("@prelude//kotlin:kotlin.bzl", _kotlin_extra_attributes = "extra_attributes", _kotlin_implemented_rules = "implemented_rules") load("@prelude//linking:execution_preference.bzl", "link_execution_preference_attr") load("@prelude//linking:link_info.bzl", "LinkOrdering") +load("@prelude//linking:types.bzl", "Linkage") load("@prelude//lua:cxx_lua_extension.bzl", "cxx_lua_extension_impl") load("@prelude//lua:lua_binary.bzl", "lua_binary_impl") load("@prelude//lua:lua_library.bzl", "lua_library_impl") +load("@prelude//matlab:matlab.bzl", _matlab_extra_attributes = "extra_attributes", _matlab_implemented_rules = "implemented_rules") load("@prelude//ocaml:attrs.bzl", _ocaml_extra_attributes = "ocaml_extra_attributes") load("@prelude//ocaml:ocaml.bzl", "ocaml_binary_impl", "ocaml_library_impl", "ocaml_object_impl", "ocaml_shared_impl", "prebuilt_ocaml_library_impl") load("@prelude//python:cxx_python_extension.bzl", "cxx_python_extension_impl") @@ -51,7 +57,7 @@ load("@prelude//python_bootstrap:python_bootstrap.bzl", "PythonBootstrapSources" load("@prelude//zip_file:zip_file.bzl", _zip_file_extra_attributes = "extra_attributes", _zip_file_implemented_rules = "implemented_rules") load("@prelude//apple/user/apple_resource_transition.bzl", "apple_resource_transition") load("@prelude//decls/android_rules.bzl", "android_rules") -load("@prelude//decls/common.bzl", "IncludeType", "LinkableDepType", "Linkage", "buck") +load("@prelude//decls/common.bzl", "IncludeType", "LinkableDepType", "buck") load("@prelude//decls/core_rules.bzl", "core_rules") load("@prelude//decls/cxx_rules.bzl", "cxx_rules") load("@prelude//decls/d_rules.bzl", "d_rules") @@ -76,7 +82,7 @@ load("@prelude//decls/shell_rules.bzl", "shell_rules") load("@prelude//decls/toolchains_common.bzl", "toolchains_common") load("@prelude//decls/uncategorized_rules.bzl", "uncategorized_rules") load("@prelude//transitions/constraint_overrides.bzl", "constraint_overrides_transition") -load(":alias.bzl", "alias_impl", "configured_alias_impl", "versioned_alias_impl") +load(":alias.bzl", "alias_impl", "configured_alias_impl", "toolchain_alias_impl", "versioned_alias_impl") load(":command_alias.bzl", "command_alias_impl") load(":export_file.bzl", "export_file_impl") load(":filegroup.bzl", "filegroup_impl") @@ -137,6 +143,7 @@ extra_implemented_rules = struct( sh_binary = sh_binary_impl, sh_test = sh_test_impl, test_suite = test_suite_impl, + toolchain_alias = toolchain_alias_impl, versioned_alias = versioned_alias_impl, worker_tool = worker_tool, @@ -154,6 +161,7 @@ extra_implemented_rules = struct( cxx_python_extension = cxx_python_extension_impl, prebuilt_cxx_library = prebuilt_cxx_library_impl, prebuilt_cxx_library_group = prebuilt_cxx_library_group_impl, + windows_resource = windows_resource_impl, # C++ / LLVM llvm_link_bitcode = llvm_link_bitcode_impl, @@ -167,6 +175,7 @@ extra_implemented_rules = struct( go_exported_library = go_exported_library_impl, go_library = go_library_impl, go_test = go_test_impl, + go_stdlib = go_stdlib_impl, #haskell haskell_library = haskell_library_impl, @@ -209,6 +218,7 @@ extra_implemented_rules = struct( _js_implemented_rules, _julia_implemented_rules, _kotlin_implemented_rules, + _matlab_implemented_rules, _zip_file_implemented_rules, ]) ) @@ -268,7 +278,7 @@ def _python_executable_attrs(): "executable_name": attrs.option(attrs.string(), default = None), "inplace_build_args": attrs.list(attrs.arg(), default = []), "link_group": attrs.option(attrs.string(), default = None), - "link_group_map": link_group_map_attr(), + "link_group_map": LINK_GROUP_MAP_ATTR, "link_group_min_binary_node_count": attrs.option(attrs.int(), default = None), "link_style": attrs.enum(LinkableDepType, default = "static"), "main_function": attrs.option( @@ -285,23 +295,32 @@ def _python_executable_attrs(): """, ), "make_py_package": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), - # entries for the generated __manifest__ python module - "manifest_module_entries": attrs.option(attrs.dict( - key = attrs.string(), - value = attrs.one_of( - attrs.dict(key = attrs.string(), value = attrs.option(attrs.any())), - attrs.list(attrs.string()), + "manifest_module_entries": attrs.option( + attrs.dict( + key = attrs.string(), + value = attrs.one_of( + attrs.dict(key = attrs.string(), value = attrs.option(attrs.any())), + attrs.list(attrs.string()), + ), ), - ), default = None), + default = None, + doc = """If present, it should be a `string` -> `entry` mapping that + gets generated into a `__manifest__` module in the executable. Top + level string keys will be the names of variables in this module (so + they must be valid Python identifiers). An `entry` can be a list of + `string`s, or a further `string`-keyed dictionary.""", + ), "native_link_strategy": attrs.option(attrs.enum(NativeLinkStrategy), default = None), "package_split_dwarf_dwp": attrs.bool(default = False), "par_style": attrs.option(attrs.string(), default = None), "resources": attrs.named_set(attrs.one_of(attrs.dep(), attrs.source(allow_directory = True)), sorted = True, default = []), + "run_with_inplace": attrs.bool(default = False), "runtime_env": attrs.option(attrs.dict(key = attrs.string(), value = attrs.string()), default = None), "standalone_build_args": attrs.list(attrs.arg(), default = []), "static_extension_finder": attrs.source(default = "prelude//python/tools:static_extension_finder.py"), "static_extension_utils": attrs.source(default = "prelude//python/tools:static_extension_utils.cpp"), "strip_libpar": attrs.enum(StripLibparStrategy, default = "none"), + "strip_stapsdt": attrs.bool(default = False), "_create_manifest_for_source_dir": _create_manifest_for_source_dir(), "_cxx_hacks": attrs.default_only(attrs.dep(default = "prelude//cxx/tools:cxx_hacks")), "_cxx_toolchain": toolchains_common.cxx(), @@ -328,10 +347,11 @@ def _cxx_binary_and_test_attrs(): "binary_linker_flags": attrs.list(attrs.arg(anon_target_compatible = True), default = []), "bolt_flags": attrs.list(attrs.arg(), default = []), "bolt_profile": attrs.option(attrs.source(), default = None), + "constraint_overrides": attrs.list(attrs.string(), default = []), "distributed_thinlto_partial_split_dwarf": attrs.bool(default = False), "enable_distributed_thinlto": attrs.bool(default = False), "link_execution_preference": link_execution_preference_attr(), - "link_group_map": link_group_map_attr(), + "link_group_map": LINK_GROUP_MAP_ATTR, "link_group_min_binary_node_count": attrs.option(attrs.int(), default = None), "link_ordering": attrs.option(attrs.enum(LinkOrdering.values()), default = None), "link_whole": attrs.default_only(attrs.bool(default = False)), @@ -367,9 +387,15 @@ inlined_extra_attributes = { # go "cgo_library": { "embedcfg": attrs.option(attrs.source(allow_directory = False), default = None), + "_asan": asan_attr, + "_compile_shared": compile_shared_attr, + "_coverage_mode": coverage_mode_attr, "_cxx_toolchain": toolchains_common.cxx(), "_exec_os_type": buck.exec_os_type_arg(), + "_go_stdlib": attrs.default_only(attrs.dep(default = "prelude//go/tools:stdlib")), "_go_toolchain": toolchains_common.go(), + "_race": race_attr, + "_tags": tags_attr, }, # csharp "csharp_library": { @@ -389,11 +415,11 @@ inlined_extra_attributes = { "header_mode": attrs.option(attrs.enum(HeaderMode.values()), default = None), "link_deps_query_whole": attrs.bool(default = False), "link_execution_preference": link_execution_preference_attr(), - "link_group_map": link_group_map_attr(), + "link_group_map": LINK_GROUP_MAP_ATTR, "link_ordering": attrs.option(attrs.enum(LinkOrdering.values()), default = None), "precompiled_header": attrs.option(attrs.dep(providers = [CPrecompiledHeaderInfo]), default = None), "prefer_stripped_objects": attrs.bool(default = False), - "preferred_linkage": attrs.enum(Linkage, default = "any"), + "preferred_linkage": attrs.enum(Linkage.values(), default = "any"), "resources": attrs.named_set(attrs.one_of(attrs.dep(), attrs.source(allow_directory = True)), sorted = True, default = []), "supports_header_symlink_subtarget": attrs.bool(default = False), "supports_python_dlopen": attrs.option(attrs.bool(), default = None), @@ -413,24 +439,53 @@ inlined_extra_attributes = { "go_binary": { "embedcfg": attrs.option(attrs.source(allow_directory = False), default = None), "resources": attrs.list(attrs.one_of(attrs.dep(), attrs.source(allow_directory = True)), default = []), + "_asan": asan_attr, "_exec_os_type": buck.exec_os_type_arg(), + "_go_stdlib": attrs.default_only(attrs.dep(default = "prelude//go/tools:stdlib")), "_go_toolchain": toolchains_common.go(), + "_race": race_attr, + "_tags": tags_attr, }, "go_exported_library": { "embedcfg": attrs.option(attrs.source(allow_directory = False), default = None), + "_asan": asan_attr, "_exec_os_type": buck.exec_os_type_arg(), + "_go_stdlib": attrs.default_only(attrs.dep(default = "prelude//go/tools:stdlib")), "_go_toolchain": toolchains_common.go(), + "_race": race_attr, + "_tags": tags_attr, }, "go_library": { "embedcfg": attrs.option(attrs.source(allow_directory = False), default = None), + "_asan": asan_attr, + "_cgo_enabled": cgo_enabled_attr, + "_compile_shared": compile_shared_attr, + "_coverage_mode": coverage_mode_attr, + "_go_stdlib": attrs.default_only(attrs.dep(default = "prelude//go/tools:stdlib")), + "_go_toolchain": toolchains_common.go(), + "_race": race_attr, + "_tags": tags_attr, + }, + "go_stdlib": { + "_asan": asan_attr, + "_cgo_enabled": cgo_enabled_attr, + "_compile_shared": compile_shared_attr, + "_exec_os_type": buck.exec_os_type_arg(), "_go_toolchain": toolchains_common.go(), + "_race": race_attr, + "_tags": tags_attr, }, "go_test": { "coverage_mode": attrs.option(attrs.enum(GoCoverageMode.values()), default = None), "embedcfg": attrs.option(attrs.source(allow_directory = False), default = None), "resources": attrs.list(attrs.source(allow_directory = True), default = []), + "_asan": asan_attr, + "_coverage_mode": coverage_mode_attr, "_exec_os_type": buck.exec_os_type_arg(), + "_go_stdlib": attrs.default_only(attrs.dep(default = "prelude//go/tools:stdlib")), "_go_toolchain": toolchains_common.go(), + "_race": race_attr, + "_tags": tags_attr, "_testmaingen": attrs.default_only(attrs.exec_dep(default = "prelude//go/tools:testmaingen")), }, @@ -443,7 +498,7 @@ inlined_extra_attributes = { }, "haskell_binary": { "auto_link_groups": attrs.bool(default = False), - "link_group_map": link_group_map_attr(), + "link_group_map": LINK_GROUP_MAP_ATTR, "template_deps": attrs.list(attrs.exec_dep(providers = [HaskellLibraryProvider]), default = []), "_cxx_toolchain": toolchains_common.cxx(), "_haskell_toolchain": toolchains_common.haskell(), @@ -453,12 +508,16 @@ inlined_extra_attributes = { "_cxx_toolchain": toolchains_common.cxx(), "_haskell_toolchain": toolchains_common.haskell(), }, + "haskell_haddock": { + "_cxx_toolchain": toolchains_common.cxx(), + "_haskell_toolchain": toolchains_common.haskell(), + }, "haskell_ide": { "include_projects": attrs.list(attrs.dep(), default = []), "_haskell_toolchain": toolchains_common.haskell(), }, "haskell_library": { - "preferred_linkage": attrs.enum(Linkage, default = "any"), + "preferred_linkage": attrs.enum(Linkage.values(), default = "any"), "template_deps": attrs.list(attrs.exec_dep(providers = [HaskellLibraryProvider]), default = []), "_cxx_toolchain": toolchains_common.cxx(), "_haskell_toolchain": toolchains_common.haskell(), @@ -474,7 +533,8 @@ inlined_extra_attributes = { "header_dirs": attrs.option(attrs.list(attrs.source(allow_directory = True)), default = None), "linker_flags": attrs.list(attrs.arg(anon_target_compatible = True), default = []), "platform_header_dirs": attrs.option(attrs.list(attrs.tuple(attrs.regex(), attrs.list(attrs.source(allow_directory = True)))), default = None), - "preferred_linkage": attrs.enum(Linkage, default = "any"), + "post_linker_flags": attrs.list(attrs.arg(anon_target_compatible = True), default = []), + "preferred_linkage": attrs.enum(Linkage.values(), default = "any"), "public_include_directories": attrs.set(attrs.string(), sorted = True, default = []), "public_system_include_directories": attrs.set(attrs.string(), sorted = True, default = []), "raw_headers": attrs.set(attrs.source(), sorted = True, default = []), @@ -536,6 +596,9 @@ inlined_extra_attributes = { }, "rust_test": {}, "sh_test": {}, + "windows_resource": { + "_cxx_toolchain": toolchains_common.cxx(), + }, } all_extra_attributes = _merge_dictionaries([ @@ -547,6 +610,7 @@ all_extra_attributes = _merge_dictionaries([ _js_extra_attributes, _julia_extra_attributes, _kotlin_extra_attributes, + _matlab_extra_attributes, _ocaml_extra_attributes, _zip_file_extra_attributes, ]) @@ -580,7 +644,13 @@ extra_attributes = struct(**all_extra_attributes) # Configuration transitions to pass `cfg` for builtin rules. transitions = { "android_binary": constraint_overrides_transition, + "apple_asset_catalog": apple_resource_transition, "apple_resource": apple_resource_transition, + "cxx_binary": constraint_overrides_transition, + "cxx_test": constraint_overrides_transition, + "go_binary": go_binary_transition, + "go_exported_library": go_exported_library_transition, + "go_test": go_test_transition, "python_binary": constraint_overrides_transition, "python_test": constraint_overrides_transition, } @@ -588,4 +658,5 @@ transitions = { toolchain_rule_names = [ "apple_toolchain", "swift_toolchain", + "toolchain_alias", ] diff --git a/prelude/runtime/BUCK.v2 b/prelude/runtime/BUCK.v2 new file mode 100644 index 0000000000..a4b3167a3e --- /dev/null +++ b/prelude/runtime/BUCK.v2 @@ -0,0 +1,12 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + +# Used by open source projects to support `prelude//` + +config_setting( + name = "fbcode", + visibility = ["PUBLIC"], +) diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index e7acc19e41..8e7c602faf 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -5,7 +5,10 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//:artifact_tset.bzl", "project_artifacts") +load( + "@prelude//:artifact_tset.bzl", + "project_artifacts", +) load("@prelude//:local_only.bzl", "link_cxx_binary_locally") load("@prelude//:paths.bzl", "paths") load("@prelude//:resources.bzl", "create_resource_db", "gather_resources") @@ -21,22 +24,22 @@ load("@prelude//cxx:debug.bzl", "SplitDebugMode") load("@prelude//cxx:dwp.bzl", "dwp", "dwp_available") load( "@prelude//cxx:linker.bzl", - "get_default_shared_library_name", "get_shared_library_name_linker_flags", ) load( "@prelude//linking:link_info.bzl", "LinkArgs", - "LinkStyle", + "LinkStrategy", # @unused Used as a type "get_link_args_for_strategy", - "to_link_strategy", ) load( "@prelude//linking:shared_libraries.bzl", "merge_shared_libraries", "traverse_shared_library_info", ) +load("@prelude//linking:strip.bzl", "strip_debug_info") load("@prelude//os_lookup:defs.bzl", "OsLookup") +load("@prelude//utils:argfile.bzl", "at_argfile") load("@prelude//utils:cmd_script.bzl", "ScriptOs", "cmd_script") load("@prelude//utils:set.bzl", "set") load("@prelude//utils:utils.bzl", "flatten_dict") @@ -45,11 +48,13 @@ load( "BuildParams", # @unused Used as a type "CrateType", "Emit", + "MetadataKind", "crate_type_codegen", "crate_type_linked", - "emit_needs_codegen", + "dep_metadata_of_emit", "output_filename", ) +load(":clippy_configuration.bzl", "ClippyConfiguration") load( ":context.bzl", "CommonArgsInfo", @@ -60,7 +65,6 @@ load( load(":extern.bzl", "crate_map_arg", "extern_arg") load( ":failure_filter.bzl", - "RustFailureFilter", "failure_filter", ) load( @@ -68,29 +72,21 @@ load( "RustCxxLinkGroupInfo", #@unused Used as a type "RustDependency", "RustLinkInfo", - "RustLinkStyleInfo", "attr_crate", "attr_simple_crate_for_filenames", + "attr_soname", "get_available_proc_macros", "inherited_external_debug_info", "inherited_merged_link_infos", + "inherited_rust_external_debug_info", "inherited_shared_libs", "normalize_crate", "resolve_rust_deps", - "style_info", + "strategy_info", ) +load(":outputs.bzl", "RustcOutput") load(":resources.bzl", "rust_attr_resources") -load(":rust_toolchain.bzl", "RustToolchainInfo") - -RustcOutput = record( - output = field(Artifact), - diag = field(dict[str, Artifact]), - pdb = field([Artifact, None]), - dwp_output = field([Artifact, None]), - # Zero or more Split DWARF debug info files are emitted into this directory - # with unpredictable filenames. - dwo_output_directory = field([Artifact, None]), -) +load(":rust_toolchain.bzl", "PanicRuntime", "RustToolchainInfo") def compile_context(ctx: AnalysisContext) -> CompileContext: toolchain_info = ctx.attrs._rust_toolchain[RustToolchainInfo] @@ -122,12 +118,26 @@ def compile_context(ctx: AnalysisContext) -> CompileContext: clippy_wrapper = _clippy_wrapper(ctx, toolchain_info) dep_ctx = DepCollectionContext( - native_unbundle_deps = toolchain_info.native_unbundle_deps, + advanced_unstable_linking = toolchain_info.advanced_unstable_linking, include_doc_deps = False, is_proc_macro = getattr(ctx.attrs, "proc_macro", False), explicit_sysroot_deps = toolchain_info.explicit_sysroot_deps, + panic_runtime = toolchain_info.panic_runtime, ) + # When we pass explicit sysroot deps, we need to override the default sysroot to avoid accidentally + # linking against the prebuilt sysroot libs provided by the toolchain. Rustc requires a specific layout + # for these libs, so we need to carefully recreate the directory structure below. + if toolchain_info.explicit_sysroot_deps: + empty_dir = ctx.actions.copied_dir("empty_dir", {}) + empty_sysroot = ctx.actions.copied_dir("empty_sysroot", {"lib/rustlib/" + toolchain_info.rustc_target_triple + "/lib": empty_dir}) + + sysroot_args = cmd_args("--sysroot=", empty_sysroot, delimiter = "") + elif toolchain_info.sysroot_path: + sysroot_args = cmd_args("--sysroot=", toolchain_info.sysroot_path, delimiter = "") + else: + sysroot_args = cmd_args() + return CompileContext( toolchain_info = toolchain_info, cxx_toolchain_info = cxx_toolchain_info, @@ -136,9 +146,8 @@ def compile_context(ctx: AnalysisContext) -> CompileContext: linker_args = linker, clippy_wrapper = clippy_wrapper, common_args = {}, - flagfiles_for_extern = {}, - flagfiles_for_crate_map = {}, transitive_dependency_dirs = {}, + sysroot_args = sysroot_args, ) def generate_rustdoc( @@ -159,9 +168,8 @@ def generate_rustdoc( dep_ctx = compile_ctx.dep_ctx, # to make sure we get the rmeta's generated for the crate dependencies, # rather than full .rlibs - emit = Emit("metadata"), + emit = Emit("metadata-full"), params = params, - dep_link_style = params.dep_link_style, default_roots = default_roots, is_rustdoc_test = False, ) @@ -170,16 +178,15 @@ def generate_rustdoc( output = ctx.actions.declare_output(subdir) plain_env, path_env = _process_env(compile_ctx, ctx.attrs.env, exec_is_windows) + plain_env["RUSTDOC_BUCK_TARGET"] = cmd_args(str(ctx.label.raw_target())) rustdoc_cmd = cmd_args( - [cmd_args("--env=", k, "=", v, delimiter = "") for k, v in plain_env.items()], - [cmd_args("--path-env=", k, "=", v, delimiter = "") for k, v in path_env.items()], - cmd_args(str(ctx.label.raw_target()), format = "--env=RUSTDOC_BUCK_TARGET={}"), toolchain_info.rustdoc, toolchain_info.rustdoc_flags, ctx.attrs.rustdoc_flags, common_args.args, cmd_args(output.as_output(), format = "--out-dir={}"), + hidden = [toolchain_info.rustdoc, compile_ctx.symlinked_srcs], ) if document_private_items: @@ -188,6 +195,7 @@ def generate_rustdoc( url_prefix = toolchain_info.extern_html_root_url_prefix if url_prefix != None: # Flag --extern-html-root-url used below is only supported on nightly. + plain_env["RUSTC_BOOTSTRAP"] = cmd_args("1") rustdoc_cmd.add("-Zunstable-options") for dep in resolve_rust_deps(ctx, compile_ctx.dep_ctx): @@ -198,19 +206,24 @@ def generate_rustdoc( if dep.name: name = normalize_crate(dep.name) else: - name = dep.info.crate + # TODO: resolve this using dynamic (if set), see comment on D52476603 + name = dep.info.crate.simple rustdoc_cmd.add( "--extern-html-root-url={}={}/{}:{}" .format(name, url_prefix, dep.label.package, dep.label.name), ) - rustdoc_cmd.hidden(toolchain_info.rustdoc, compile_ctx.symlinked_srcs) + rustdoc_cmd_action = cmd_args( + [cmd_args("--env=", k, "=", v, delimiter = "") for k, v in plain_env.items()], + [cmd_args("--path-env=", k, "=", v, delimiter = "") for k, v in path_env.items()], + rustdoc_cmd, + ) rustdoc_cmd = _long_command( ctx = ctx, exe = toolchain_info.rustc_action, - args = rustdoc_cmd, + args = rustdoc_cmd_action, argfile_name = "{}.args".format(subdir), ) @@ -218,27 +231,84 @@ def generate_rustdoc( return output +def generate_rustdoc_coverage( + ctx: AnalysisContext, + compile_ctx: CompileContext, + # link strategy doesn't matter, but caller should pass in build params + # with static-pic (to get best cache hits for deps) + params: BuildParams, + default_roots: list[str]) -> Artifact: + toolchain_info = compile_ctx.toolchain_info + + common_args = _compute_common_args( + ctx = ctx, + compile_ctx = compile_ctx, + dep_ctx = compile_ctx.dep_ctx, + # to make sure we get the rmeta's generated for the crate dependencies, + # rather than full .rlibs + emit = Emit("metadata-full"), + params = params, + default_roots = default_roots, + is_rustdoc_test = False, + ) + + file = common_args.subdir + "-rustdoc-coverage" + output = ctx.actions.declare_output(file) + + rustdoc_cmd = cmd_args( + toolchain_info.rustdoc, + toolchain_info.rustdoc_flags, + ctx.attrs.rustdoc_flags, + common_args.args, + "-Zunstable-options", + "--show-coverage", + ) + + exec_is_windows = ctx.attrs._exec_os_type[OsLookup].platform == "windows" + plain_env, path_env = _process_env(compile_ctx, ctx.attrs.env, exec_is_windows) + plain_env["RUSTDOC_BUCK_TARGET"] = cmd_args(str(ctx.label.raw_target())) + + rustdoc_cmd_action = cmd_args( + [cmd_args("--env=", k, "=", v, delimiter = "") for k, v in plain_env.items()], + [cmd_args("--path-env=", k, "=", v, delimiter = "") for k, v in path_env.items()], + rustdoc_cmd, + ) + + rustdoc_cmd = _long_command( + ctx = ctx, + exe = toolchain_info.rustc_action, + args = rustdoc_cmd_action, + argfile_name = "{}.args".format(file), + ) + + cmd = cmd_args([toolchain_info.rustdoc_coverage, output.as_output(), rustdoc_cmd]) + + ctx.actions.run(cmd, category = "rustdoc_coverage") + + return output + def generate_rustdoc_test( ctx: AnalysisContext, compile_ctx: CompileContext, - link_style: LinkStyle, - library: RustLinkStyleInfo, + link_strategy: LinkStrategy, + rlib: Artifact, params: BuildParams, - default_roots: list[str]) -> (cmd_args, dict[str, cmd_args]): + default_roots: list[str]) -> cmd_args: exec_is_windows = ctx.attrs._exec_os_type[OsLookup].platform == "windows" toolchain_info = compile_ctx.toolchain_info doc_dep_ctx = DepCollectionContext( - native_unbundle_deps = compile_ctx.dep_ctx.native_unbundle_deps, + advanced_unstable_linking = compile_ctx.dep_ctx.advanced_unstable_linking, include_doc_deps = True, is_proc_macro = False, explicit_sysroot_deps = compile_ctx.dep_ctx.explicit_sysroot_deps, + panic_runtime = compile_ctx.dep_ctx.panic_runtime, ) resources = create_resource_db( ctx = ctx, name = "doctest/resources.json", - binary = library.rlib, + binary = rlib, resources = flatten_dict(gather_resources( label = ctx.label, resources = rust_attr_resources(ctx), @@ -247,16 +317,15 @@ def generate_rustdoc_test( ) # Gather and setup symlink tree of transitive shared library deps. - shared_libs = {} - if link_style == LinkStyle("shared"): + shared_libs = [] + if link_strategy == LinkStrategy("shared"): shlib_info = merge_shared_libraries( ctx.actions, deps = inherited_shared_libs(ctx, doc_dep_ctx), ) - for soname, shared_lib in traverse_shared_library_info(shlib_info).items(): - shared_libs[soname] = shared_lib.lib + shared_libs.extend(traverse_shared_library_info(shlib_info)) executable_args = executable_shared_lib_arguments( - ctx.actions, + ctx, compile_ctx.cxx_toolchain_info, resources, shared_libs, @@ -268,7 +337,6 @@ def generate_rustdoc_test( dep_ctx = doc_dep_ctx, emit = Emit("link"), params = params, - dep_link_style = params.dep_link_style, default_roots = default_roots, is_rustdoc_test = True, ) @@ -281,8 +349,7 @@ def generate_rustdoc_test( get_link_args_for_strategy( ctx, inherited_merged_link_infos(ctx, doc_dep_ctx), - # TODO(cjhopman): It's unclear how rust is using link_style. I'm not sure if it's intended to be a LibOutputStyle or a LinkStrategy. - to_link_strategy(link_style), + link_strategy, ), ], "{}-{}".format(common_args.subdir, common_args.tempfile), @@ -301,14 +368,30 @@ def generate_rustdoc_test( else: runtool = ["--runtool=/usr/bin/env"] + plain_env, path_env = _process_env(compile_ctx, ctx.attrs.env, exec_is_windows) + doc_plain_env, doc_path_env = _process_env(compile_ctx, ctx.attrs.doc_env, exec_is_windows) + for k, v in doc_plain_env.items(): + path_env.pop(k, None) + plain_env[k] = v + for k, v in doc_path_env.items(): + plain_env.pop(k, None) + path_env[k] = v + + # `--runtool` is unstable. + plain_env["RUSTC_BOOTSTRAP"] = cmd_args("1") + unstable_options = ["-Zunstable-options"] + rustdoc_cmd = cmd_args( + [cmd_args("--env=", k, "=", v, delimiter = "") for k, v in plain_env.items()], + [cmd_args("--path-env=", k, "=", v, delimiter = "") for k, v in path_env.items()], + toolchain_info.rustdoc, "--test", - "-Zunstable-options", + unstable_options, cmd_args("--test-builder=", toolchain_info.compiler, delimiter = ""), toolchain_info.rustdoc_flags, ctx.attrs.rustdoc_flags, common_args.args, - extern_arg(ctx, compile_ctx, [], attr_crate(ctx), library.rlib), + extern_arg([], attr_crate(ctx), rlib), "--extern=proc_macro" if ctx.attrs.proc_macro else [], compile_ctx.linker_args, cmd_args(linker_argsfile, format = "-Clink-arg=@{}"), @@ -317,70 +400,20 @@ def generate_rustdoc_test( cmd_args("--runtool-arg=--resources=", resources, delimiter = ""), "--color=always", "--test-args=--color=always", + hidden = [ + compile_ctx.symlinked_srcs, + link_args_output.hidden, + executable_args.runtime_files, + ], ) - rustdoc_cmd.hidden( - compile_ctx.symlinked_srcs, - link_args_output.hidden, - executable_args.runtime_files, - ) - - rustdoc_cmd = _long_command( + return _long_command( ctx = ctx, - exe = toolchain_info.rustdoc, + exe = toolchain_info.rustc_action, args = rustdoc_cmd, argfile_name = "{}.args".format(common_args.subdir), ) - plain_env, path_env = _process_env(compile_ctx, ctx.attrs.env, exec_is_windows) - rustdoc_env = plain_env | path_env - - # Pass everything in env + doc_env, except ones with value None in doc_env. - for k, v in ctx.attrs.doc_env.items(): - if v == None: - rustdoc_env.pop(k, None) - else: - rustdoc_env[k] = cmd_args(v) - rustdoc_env["RUSTC_BOOTSTRAP"] = cmd_args("1") # for `-Zunstable-options` - - return (rustdoc_cmd, rustdoc_env) - -# Generate multiple compile artifacts so that distinct sets of artifacts can be -# generated concurrently. -def rust_compile_multi( - ctx: AnalysisContext, - compile_ctx: CompileContext, - emits: list[Emit], - params: BuildParams, - dep_link_style: LinkStyle, - default_roots: list[str], - extra_link_args: list[typing.Any] = [], - predeclared_outputs: dict[Emit, Artifact] = {}, - extra_flags: list[[str, ResolvedStringWithMacros]] = [], - is_binary: bool = False, - allow_cache_upload: bool = False, - rust_cxx_link_group_info: [RustCxxLinkGroupInfo, None] = None) -> list[RustcOutput]: - outputs = [] - - for emit in emits: - outs = rust_compile( - ctx = ctx, - compile_ctx = compile_ctx, - emit = emit, - params = params, - dep_link_style = dep_link_style, - default_roots = default_roots, - extra_link_args = extra_link_args, - predeclared_outputs = predeclared_outputs, - extra_flags = extra_flags, - is_binary = is_binary, - allow_cache_upload = allow_cache_upload, - rust_cxx_link_group_info = rust_cxx_link_group_info, - ) - outputs.append(outs) - - return outputs - # Generate a compilation action. A single instance of rustc can emit # numerous output artifacts, so return an artifact object for each of # them. @@ -389,12 +422,11 @@ def rust_compile( compile_ctx: CompileContext, emit: Emit, params: BuildParams, - dep_link_style: LinkStyle, default_roots: list[str], extra_link_args: list[typing.Any] = [], - predeclared_outputs: dict[Emit, Artifact] = {}, + predeclared_output: Artifact | None = None, extra_flags: list[[str, ResolvedStringWithMacros]] = [], - is_binary: bool = False, + designated_clippy: bool = False, allow_cache_upload: bool = False, rust_cxx_link_group_info: [RustCxxLinkGroupInfo, None] = None) -> RustcOutput: exec_is_windows = ctx.attrs._exec_os_type[OsLookup].platform == "windows" @@ -403,13 +435,22 @@ def rust_compile( lints, clippy_lints = _lint_flags(compile_ctx) + # If we are building metadata-full for a dylib target, we want the hollow-rlib version of rmeta, not the shared lib version. + if compile_ctx.dep_ctx.advanced_unstable_linking and emit == Emit("metadata-full") and params.crate_type == CrateType("dylib"): + params = BuildParams( + crate_type = CrateType("rlib"), + reloc_model = params.reloc_model, + dep_link_strategy = params.dep_link_strategy, + prefix = "lib", + suffix = ".rlib", + ) + common_args = _compute_common_args( ctx = ctx, compile_ctx = compile_ctx, dep_ctx = compile_ctx.dep_ctx, emit = emit, params = params, - dep_link_style = dep_link_style, default_roots = default_roots, is_rustdoc_test = False, ) @@ -420,7 +461,6 @@ def rust_compile( lints, # Report unused --extern crates in the notification stream. ["--json=unused-externs-silent", "-Wunused-crate-dependencies"] if toolchain_info.report_unused_deps else [], - "--json=artifacts", # only needed for pipeline but no harm in always leaving it enabled common_args.args, cmd_args("--remap-path-prefix=", compile_ctx.symlinked_srcs, path_sep, "=", ctx.label.path, path_sep, delimiter = ""), compile_ctx.linker_args, @@ -434,22 +474,19 @@ def rust_compile( # use the predeclared one as the output after the failure filter action # below. Otherwise we'll use the predeclared outputs directly. if toolchain_info.failure_filter: - emit_output, emit_args, extra_out = _rustc_emit( + emit_op = _rustc_emit( ctx = ctx, - compile_ctx = compile_ctx, emit = emit, - predeclared_outputs = {}, subdir = common_args.subdir, params = params, ) else: - emit_output, emit_args, extra_out = _rustc_emit( + emit_op = _rustc_emit( ctx = ctx, - compile_ctx = compile_ctx, emit = emit, - predeclared_outputs = predeclared_outputs, subdir = common_args.subdir, params = params, + predeclared_output = predeclared_output, ) pdb_artifact = None @@ -473,8 +510,7 @@ def rust_compile( ctx, compile_ctx.dep_ctx, ), - # TODO(cjhopman): It's unclear how rust is using link_style. I'm not sure if it's intended to be a LibOutputStyle or a LinkStrategy. - to_link_strategy(dep_link_style), + params.dep_link_strategy, ) link_args_output = make_link_args( @@ -485,7 +521,7 @@ def rust_compile( inherited_link_args, ], "{}-{}".format(subdir, tempfile), - output_short_path = emit_output.short_path, + output_short_path = emit_op.output.short_path, ) linker_argsfile, _ = ctx.actions.write( "{}/__{}_linker_args.txt".format(subdir, tempfile), @@ -496,35 +532,39 @@ def rust_compile( pdb_artifact = link_args_output.pdb_artifact dwp_inputs = [link_args_output.link_args] rustc_cmd.add(cmd_args(linker_argsfile, format = "-Clink-arg=@{}")) - rustc_cmd.hidden(link_args_output.hidden) + rustc_cmd.add(cmd_args(hidden = link_args_output.hidden)) - (diag, build_status) = _rustc_invoke( + invoke = _rustc_invoke( ctx = ctx, compile_ctx = compile_ctx, + common_args = common_args, prefix = "{}/{}".format(common_args.subdir, common_args.tempfile), - rustc_cmd = cmd_args(toolchain_info.compiler, rustc_cmd, emit_args), - diag = "diag", - required_outputs = [emit_output], - short_cmd = common_args.short_cmd, - is_binary = is_binary, + rustc_cmd = cmd_args(toolchain_info.compiler, rustc_cmd, emit_op.args), + required_outputs = [emit_op.output], + is_clippy = False, allow_cache_upload = allow_cache_upload, crate_map = common_args.crate_map, - only_artifact = "metadata" if toolchain_info.pipelined and emit == Emit("metadata") else None, + env = emit_op.env, ) - # Add clippy diagnostic targets for check builds - if common_args.is_check: + # Add clippy diagnostic targets next to the designated check build + if designated_clippy: # We don't really need the outputs from this build, just to keep the artifact accounting straight - clippy_out, clippy_emit_args, _extra_out = _rustc_emit( + clippy_emit_op = _rustc_emit( ctx = ctx, - compile_ctx = compile_ctx, emit = emit, - predeclared_outputs = {}, subdir = common_args.subdir + "-clippy", params = params, ) - clippy_env = dict() - if toolchain_info.clippy_toml: + clippy_env = clippy_emit_op.env + + clippy_toml = None + if ctx.attrs.clippy_configuration: + clippy_toml = ctx.attrs.clippy_configuration[ClippyConfiguration].clippy_toml + elif toolchain_info.clippy_toml: + clippy_toml = toolchain_info.clippy_toml + + if clippy_toml: # Clippy wants to be given a path to a directory containing a # clippy.toml (or .clippy.toml). Our buckconfig accepts an arbitrary # label like //path/to:my-clippy.toml which may not have the @@ -532,65 +572,77 @@ def rust_compile( # symlinks the requested configuration file under the required name. clippy_conf_dir = ctx.actions.symlinked_dir( common_args.subdir + "-clippy-configuration", - {"clippy.toml": toolchain_info.clippy_toml}, + {"clippy.toml": clippy_toml}, ) clippy_env["CLIPPY_CONF_DIR"] = clippy_conf_dir - (clippy_diag, _) = _rustc_invoke( + clippy_invoke = _rustc_invoke( ctx = ctx, compile_ctx = compile_ctx, + common_args = common_args, prefix = "{}/{}".format(common_args.subdir, common_args.tempfile), # Lints go first to allow other args to override them. - rustc_cmd = cmd_args(compile_ctx.clippy_wrapper, clippy_lints, rustc_cmd, clippy_emit_args), + rustc_cmd = cmd_args(compile_ctx.clippy_wrapper, clippy_lints, rustc_cmd, clippy_emit_op.args), env = clippy_env, - diag = "clippy", - required_outputs = [clippy_out], - short_cmd = common_args.short_cmd, - is_binary = False, + required_outputs = [clippy_emit_op.output], + is_clippy = True, allow_cache_upload = False, crate_map = common_args.crate_map, ) - diag.update(clippy_diag) + else: + clippy_invoke = None if toolchain_info.failure_filter: # This is only needed when this action's output is being used as an # input, so we only need standard diagnostics (clippy is always # asked for explicitly). - stderr = diag["diag.txt"] - filter_prov = RustFailureFilter( - buildstatus = build_status, - required = emit_output, - stderr = stderr, - ) - filtered_output = failure_filter( ctx = ctx, compile_ctx = compile_ctx, - prefix = "{}/{}".format(common_args.subdir, emit.value), - predecl_out = predeclared_outputs.get(emit), - failprov = filter_prov, - short_cmd = common_args.short_cmd, + predeclared_output = predeclared_output, + build_status = invoke.build_status, + required = emit_op.output, + stderr = invoke.diag_txt, + identifier = invoke.identifier, ) else: - filtered_output = emit_output + filtered_output = emit_op.output split_debug_mode = compile_ctx.cxx_toolchain_info.split_debug_mode or SplitDebugMode("none") if emit == Emit("link") and split_debug_mode != SplitDebugMode("none"): - dwo_output_directory = extra_out - external_debug_info = inherited_external_debug_info( + dwo_output_directory = emit_op.extra_out + + # staticlibs and cdylibs are "bundled" in the sense that they are used + # without their dependencies by the rest of the rules. This is normally + # correct, except that the split debuginfo rustc emits for these crate + # types is not bundled. This is arguably inconsistent behavior from + # rustc, but in any case, it means we need to do this bundling manually + # by collecting all the external debuginfo from dependencies + if params.crate_type == CrateType("cdylib") or params.crate_type == CrateType("staticlib"): + extra_external_debug_info = inherited_rust_external_debug_info( + ctx = ctx, + dep_ctx = compile_ctx.dep_ctx, + link_strategy = params.dep_link_strategy, + ) + else: + extra_external_debug_info = [] + all_external_debug_info = inherited_external_debug_info( ctx = ctx, dep_ctx = compile_ctx.dep_ctx, dwo_output_directory = dwo_output_directory, - dep_link_style = params.dep_link_style, + dep_link_strategy = params.dep_link_strategy, ) - dwp_inputs.extend(project_artifacts(ctx.actions, [external_debug_info])) + dwp_inputs.extend(project_artifacts(ctx.actions, [all_external_debug_info])) else: dwo_output_directory = None + extra_external_debug_info = [] - if is_binary and dwp_available(compile_ctx.cxx_toolchain_info): + if params.crate_type == CrateType("bin") and \ + emit == Emit("link") and \ + dwp_available(compile_ctx.cxx_toolchain_info): dwp_output = dwp( ctx, compile_ctx.cxx_toolchain_info, - emit_output, + emit_op.output, identifier = "{}/__{}_{}_dwp".format(common_args.subdir, common_args.tempfile, str(emit)), category_suffix = "rust", # TODO(T110378142): Ideally, referenced objects are a list of @@ -602,29 +654,49 @@ def rust_compile( else: dwp_output = None + stripped_output = strip_debug_info( + ctx, + paths.join(common_args.subdir, "stripped", output_filename( + attr_simple_crate_for_filenames(ctx), + Emit("link"), + params, + )), + filtered_output, + ) + return RustcOutput( output = filtered_output, - diag = diag, + stripped_output = stripped_output, + diag_txt = invoke.diag_txt, + diag_json = invoke.diag_json, + # Only available on metadata-like emits + clippy_txt = clippy_invoke.diag_txt if clippy_invoke else None, + clippy_json = clippy_invoke.diag_json if clippy_invoke else None, pdb = pdb_artifact, dwp_output = dwp_output, dwo_output_directory = dwo_output_directory, + extra_external_debug_info = extra_external_debug_info, ) # --extern = for direct dependencies # -Ldependency= for transitive dependencies # For native dependencies, we use -Clink-arg=@argsfile -# Second element of result tuple is a list of files/directories that should be present for executable to be run successfully -# Third return is the mapping from crate names back to targets (needed so that a deps linter knows what deps need fixing) # -# The `compile_ctx` may be omitted if `is_check` is `True` and there are no dependencies with dynamic crate names +# Second element of returned tuple is a mapping from crate names back to target +# label, needed for applying autofixes for rustc's unused_crate_dependencies +# lint by tracing Rust crate names in the compiler diagnostic back to which +# dependency entry in the BUCK file needs to be removed. +# +# The `compile_ctx` may be omitted if there are no dependencies with dynamic +# crate names. def dependency_args( ctx: AnalysisContext, compile_ctx: CompileContext | None, + toolchain_info: RustToolchainInfo, deps: list[RustDependency], subdir: str, - crate_type: CrateType, - dep_link_style: LinkStyle, - is_check: bool, + dep_link_strategy: LinkStrategy, + dep_metadata_kind: MetadataKind, is_rustdoc_test: bool) -> (cmd_args, list[(CrateName, Label)]): args = cmd_args() transitive_deps = {} @@ -639,31 +711,24 @@ def dependency_args( else: crate = dep.info.crate - style = style_info(dep.info, dep_link_style) - - use_rmeta = is_check or (compile_ctx.toolchain_info.pipelined and not crate_type_codegen(crate_type) and not is_rustdoc_test) + strategy = strategy_info(toolchain_info, dep.info, dep_link_strategy) - # Use rmeta dependencies whenever possible because they - # should be cheaper to produce. - if use_rmeta: - artifact = style.rmeta - transitive_artifacts = style.transitive_rmeta_deps - else: - artifact = style.rlib - transitive_artifacts = style.transitive_deps + artifact = strategy.outputs[dep_metadata_kind] + transitive_artifacts = strategy.transitive_deps[dep_metadata_kind] - for marker in style.transitive_proc_macro_deps.keys(): + for marker in strategy.transitive_proc_macro_deps.keys(): info = available_proc_macros[marker.label][RustLinkInfo] - style = style_info(info, dep_link_style) - transitive_deps[style.rmeta if use_rmeta else style.rlib] = info.crate + strategy = strategy_info(toolchain_info, info, dep_link_strategy) + transitive_deps[strategy.outputs[MetadataKind("link")]] = info.crate - args.add(extern_arg(ctx, compile_ctx, dep.flags, crate, artifact)) + args.add(extern_arg(dep.flags, crate, artifact)) crate_targets.append((crate, dep.label)) - # Because deps of this *target* can also be transitive deps of this compiler - # invocation, pass the artifact through `-L` unconditionally for doc tests. + # Because deps of this *target* can also be transitive deps of this + # compiler invocation, pass the artifact (under its original crate name) + # through `-L` unconditionally for doc tests. if is_rustdoc_test: - transitive_deps[artifact] = crate + transitive_deps[artifact] = dep.info.crate # Unwanted transitive_deps have already been excluded transitive_deps.update(transitive_artifacts) @@ -676,7 +741,7 @@ def dependency_args( else: simple_artifacts[artifact] = None - prefix = "{}-deps{}".format(subdir, "-check" if is_check else "") + prefix = "{}-deps{}".format(subdir, dep_metadata_kind.value) if simple_artifacts: args.add(simple_symlinked_dirs(ctx, prefix, simple_artifacts)) if dynamic_artifacts: @@ -710,20 +775,34 @@ def dynamic_symlinked_dirs( artifacts: dict[Artifact, CrateName]) -> cmd_args: name = "{}-dyn".format(prefix) transitive_dependency_dir = ctx.actions.declare_output(name, dir = True) - do_symlinks = cmd_args( - compile_ctx.toolchain_info.transitive_dependency_symlinks_tool, - cmd_args(transitive_dependency_dir.as_output(), format = "--out-dir={}"), + + # Pass the list of rlibs to transitive_dependency_symlinks.py through a file + # because there can be a lot of them. This avoids running out of command + # line length, particularly on Windows. + relative_path = lambda artifact: (cmd_args(artifact, delimiter = "", ignore_artifacts = True) + .relative_to(transitive_dependency_dir.project("i"))) + artifacts_json = ctx.actions.write_json( + ctx.actions.declare_output("{}-dyn.json".format(prefix)), + [ + (relative_path(artifact), crate.dynamic) + for artifact, crate in artifacts.items() + ], + with_inputs = True, + pretty = True, ) - for artifact, crate in artifacts.items(): - relative_path = cmd_args(artifact).relative_to(transitive_dependency_dir.project("i")) - do_symlinks.add("--artifact", crate.dynamic, relative_path.ignore_artifacts()) + ctx.actions.run( - do_symlinks, - category = "tdep_symlinks", + [ + compile_ctx.toolchain_info.transitive_dependency_symlinks_tool, + cmd_args(transitive_dependency_dir.as_output(), format = "--out-dir={}"), + cmd_args(artifacts_json, format = "--artifacts={}"), + ], + category = "deps", identifier = str(len(compile_ctx.transitive_dependency_dirs)), ) + compile_ctx.transitive_dependency_dirs[transitive_dependency_dir] = None - return cmd_args(transitive_dependency_dir, format = "@{}/dirs").hidden(artifacts.keys()) + return cmd_args(transitive_dependency_dir, format = "@{}/dirs", hidden = artifacts.keys()) def _lintify(flag: str, clippy: bool, lints: list[ResolvedStringWithMacros]) -> cmd_args: return cmd_args( @@ -765,7 +844,6 @@ def _compute_common_args( dep_ctx: DepCollectionContext, emit: Emit, params: BuildParams, - dep_link_style: LinkStyle, default_roots: list[str], is_rustdoc_test: bool) -> CommonArgsInfo: exec_is_windows = ctx.attrs._exec_os_type[OsLookup].platform == "windows" @@ -773,18 +851,12 @@ def _compute_common_args( crate_type = params.crate_type - args_key = (crate_type, emit, dep_link_style, is_rustdoc_test) - if False: - # TODO(nga): following `if args_key in ...` is no-op, and typechecker does not like it. - def unknown(): - pass - - args_key = unknown() + args_key = (crate_type, emit, params.dep_link_strategy, is_rustdoc_test) if args_key in compile_ctx.common_args: return compile_ctx.common_args[args_key] # Keep filenames distinct in per-flavour subdirs - subdir = "{}-{}-{}-{}".format(crate_type.value, params.reloc_model.value, dep_link_style.value, emit.value) + subdir = "{}-{}-{}-{}".format(crate_type.value, params.reloc_model.value, params.dep_link_strategy.value, emit.value) if is_rustdoc_test: subdir = "{}-rustdoc-test".format(subdir) @@ -798,25 +870,42 @@ def _compute_common_args( if exec_is_windows: crate_root = crate_root.replace("/", "\\") - is_check = not emit_needs_codegen(emit) + # With `advanced_unstable_linking`, we unconditionally pass the metadata + # artifacts. There are two things that work together to make this possible + # in the case of binaries: + # + # 1. The actual rlibs appear in the link providers, so they'll still be + # available for the linker to link in + # 2. The metadata artifacts aren't rmetas, but rather rlibs that just + # don't contain any generated code. Rustc can't distinguish these + # from real rlibs, and so doesn't throw an error + # + # The benefit of doing this is that there's no requirement that the + # dependency's generated code be provided to the linker via an rlib. It + # could be provided by other means, say, a link group + dep_metadata_kind = dep_metadata_of_emit(emit) + is_check = dep_metadata_kind != MetadataKind("link") + if compile_ctx.dep_ctx.advanced_unstable_linking or not crate_type_codegen(crate_type): + if dep_metadata_kind == MetadataKind("link"): + dep_metadata_kind = MetadataKind("full") dep_args, crate_map = dependency_args( ctx = ctx, compile_ctx = compile_ctx, + toolchain_info = compile_ctx.toolchain_info, deps = resolve_rust_deps(ctx, dep_ctx), subdir = subdir, - crate_type = crate_type, - dep_link_style = dep_link_style, - is_check = is_check, + dep_link_strategy = params.dep_link_strategy, + dep_metadata_kind = dep_metadata_kind, is_rustdoc_test = is_rustdoc_test, ) if crate_type == CrateType("proc-macro"): dep_args.add("--extern=proc_macro") - if crate_type == CrateType("cdylib") or crate_type == CrateType("dylib") and not is_check: + if crate_type in [CrateType("cdylib"), CrateType("dylib")] and not is_check: linker_info = compile_ctx.cxx_toolchain_info.linker_info - shlib_name = get_default_shared_library_name(linker_info, ctx.label) + shlib_name = attr_soname(ctx) dep_args.add(cmd_args( get_shared_library_name_linker_flags(linker_info.type, shlib_name), format = "-Clink-arg={}", @@ -832,6 +921,46 @@ def _compute_common_args( else: crate_name_arg = cmd_args("--crate-name=", crate.simple, delimiter = "") + # The `-Cprefer-dynamic` flag controls rustc's choice of artifacts for + # transitive dependencies, both for loading metadata and linking them. + # Direct dependencies are given to rustc one-by-one using `--extern` with a + # path to a specific artifact, so there is never ambiguity what artifact to + # use for a direct dependency. But transitive dependencies are passed in + # bulk via zero or more `-Ldependency` flags, which are directories + # containing artifacts. Within those directories, information about a + # specific crate might be available from more than one artifact, such as a + # dylib and rlib for the same crate. + # + # With `-Cprefer-dynamic=no` (the default), when a transitive dependency + # exists as both rlib and dylib, metadata is loaded from the rlib. If some + # dependencies are available in dylib but not rlib, the dylib is used for + # those. With `-Cprefer-dynamic=yes`, when a transitive dependency exists as + # both rlib and dylib, instead the dylib is used. + # + # The ambiguity over whether to use rlib or dylib for a particular + # transitive dependency only occurs if the rlib and dylib both describe the + # same crate i.e. contain the same crate hash. + # + # Buck-built libraries never produce an rlib and dylib containing the same + # crate hash, since that only occurs when outputting multiple crate types + # through a single rustc invocation: `--crate-type=rlib --crate-type=dylib`. + # In Buck, different crate types are built by different rustc invocations. + # But Cargo does invoke rustc with multiple crate types when you write + # `[lib] crate-type = ["rlib", "dylib"]` in Cargo.toml, and in fact the + # standard libraries built by x.py and distributed by Rustup are built this + # way. + if toolchain_info.explicit_sysroot_deps: + # Standard libraries are being passed explicitly, and Buck-built + # dependencies never collide on crate hash, so `-Cprefer-dynamic` cannot + # make a difference. + prefer_dynamic_flags = [] + elif crate_type == CrateType("dylib") and toolchain_info.advanced_unstable_linking: + # Use standard library dylibs from the implicit sysroot. + prefer_dynamic_flags = ["-Cprefer-dynamic=yes"] + else: + # Use standard library rlibs from the implicit sysroot. + prefer_dynamic_flags = ["-Cprefer-dynamic=no"] # (the default) + split_debuginfo_flags = { # Rustc's default behavior: debug info is put into every rlib and # staticlib, then copied into the executables and shared libraries by @@ -874,7 +1003,6 @@ def _compute_common_args( # TODO: SplitDebugMode("split"): ["-Csplit-debuginfo=unpacked"], }[compile_ctx.cxx_toolchain_info.split_debug_mode or SplitDebugMode("none")] - null_path = "nul" if ctx.attrs._exec_os_type[OsLookup].platform == "windows" else "/dev/null" args = cmd_args( cmd_args(compile_ctx.symlinked_srcs, path_sep, crate_root, delimiter = ""), crate_name_arg, @@ -884,14 +1012,15 @@ def _compute_common_args( "-Cmetadata={}".format(_metadata(ctx.label, is_rustdoc_test)[0]), # Make diagnostics json with the option to extract rendered text ["--error-format=json", "--json=diagnostic-rendered-ansi"] if not is_rustdoc_test else [], - ["-Cprefer-dynamic=yes"] if crate_type == CrateType("dylib") else [], + prefer_dynamic_flags, ["--target={}".format(toolchain_info.rustc_target_triple)] if toolchain_info.rustc_target_triple else [], split_debuginfo_flags, - ["--sysroot=" + null_path] if toolchain_info.explicit_sysroot_deps != None else [], + compile_ctx.sysroot_args, + ["-Cpanic=abort", "-Zpanic-abort-tests=yes"] if toolchain_info.panic_runtime == PanicRuntime("abort") else [], _rustc_flags(toolchain_info.rustc_flags), - _rustc_flags(toolchain_info.rustc_check_flags) if is_check else [], _rustc_flags(toolchain_info.rustc_coverage_flags) if ctx.attrs.coverage else [], _rustc_flags(ctx.attrs.rustc_flags), + _rustc_flags(toolchain_info.extra_rustc_flags), cmd_args(ctx.attrs.features, format = '--cfg=feature="{}"'), dep_args, ) @@ -900,7 +1029,9 @@ def _compute_common_args( args = args, subdir = subdir, tempfile = tempfile, - short_cmd = "{},{},{}".format(crate_type.value, params.reloc_model.value, emit.value), + crate_type = crate_type, + params = params, + emit = emit, is_check = is_check, crate_map = crate_map, ) @@ -921,7 +1052,7 @@ def _clippy_wrapper( if toolchain_info.rustc_target_triple: rustc_print_sysroot.add("--target={}".format(toolchain_info.rustc_target_triple)) - skip_setting_sysroot = toolchain_info.explicit_sysroot_deps != None + skip_setting_sysroot = toolchain_info.explicit_sysroot_deps != None or toolchain_info.sysroot_path != None if ctx.attrs._exec_os_type[OsLookup].platform == "windows": wrapper_file, _ = ctx.actions.write( @@ -929,11 +1060,8 @@ def _clippy_wrapper( [ "@echo off", "set __CLIPPY_INTERNAL_TESTS=true", - cmd_args(rustc_print_sysroot, format = 'FOR /F "tokens=* USEBACKQ" %%F IN (`{}`) DO ('), - ] + ( - [] if skip_setting_sysroot else ["set SYSROOT=%%F"] - ) + [ - ")", + ] + [ + cmd_args(rustc_print_sysroot, format = 'FOR /F "tokens=* USEBACKQ" %%F IN (`{}`) DO (set SYSROOT=%%F)') if not skip_setting_sysroot else "", cmd_args(clippy_driver, format = "{} %*"), ], allow_args = True, @@ -954,7 +1082,7 @@ def _clippy_wrapper( allow_args = True, ) - return cmd_args(wrapper_file).hidden(clippy_driver, rustc_print_sysroot) + return cmd_args(wrapper_file, hidden = [clippy_driver, rustc_print_sysroot]) # This is a hack because we need to pass the linker to rustc # using -Clinker=path and there is currently no way of doing this @@ -1009,34 +1137,69 @@ def _crate_root( if candidates.size() == 1: return candidates.list()[0] - fail("Could not infer crate_root. candidates=%s\nAdd 'crate_root = \"src/example.rs\"' to your attributes to disambiguate." % candidates.list()) + fail("Could not infer crate_root." + + "\nMake sure you have one of {} in your `srcs` attribute.".format(default_roots) + + "\nOr add 'crate_root = \"src/example.rs\"' to your attributes to disambiguate. candidates={}".format(candidates.list())) + +def _explain(crate_type: CrateType, link_strategy: LinkStrategy, emit: Emit) -> str: + if emit == Emit("metadata-full"): + link_strategy_suffix = { + LinkStrategy("static"): " [static]", + LinkStrategy("static_pic"): " [pic]", + LinkStrategy("shared"): " [shared]", + }[link_strategy] + return "metadata" + link_strategy_suffix + + if emit == Emit("metadata-fast"): + return "check" + + if emit == Emit("link"): + link_strategy_suffix = { + LinkStrategy("static"): "", + LinkStrategy("static_pic"): " [pic]", + LinkStrategy("shared"): " [shared]", + }[link_strategy] + if crate_type == CrateType("bin"): + return "link" + link_strategy_suffix + if crate_type == CrateType("rlib"): + return "rlib" + link_strategy_suffix + if crate_type == CrateType("dylib"): + return "dylib" + link_strategy_suffix + if crate_type == CrateType("proc-macro"): + return "proc-macro" # always static_pic + if crate_type == CrateType("cdylib"): + return "cdylib" + link_strategy_suffix + if crate_type == CrateType("staticlib"): + return "staticlib" + link_strategy_suffix + + if emit == Emit("expand"): + return "expand" + + fail("unrecognized rustc action:", crate_type, link_strategy, emit) + +EmitOperation = record( + output = field(Artifact), + args = field(cmd_args), + env = field(dict[str, str]), + extra_out = field(Artifact | None), +) # Take a desired output and work out how to convince rustc to generate it def _rustc_emit( ctx: AnalysisContext, - compile_ctx: CompileContext, emit: Emit, - predeclared_outputs: dict[Emit, Artifact], subdir: str, - params: BuildParams) -> (Artifact, cmd_args, [Artifact, None]): - toolchain_info = compile_ctx.toolchain_info + params: BuildParams, + predeclared_output: Artifact | None = None) -> EmitOperation: simple_crate = attr_simple_crate_for_filenames(ctx) crate_type = params.crate_type - # Metadata for pipelining needs has enough info to be used as an input for - # dependents. To do this reliably, follow Cargo's pattern of always doing - # --emit metadata,link, but only using the output we actually need. - # - # We don't bother to do this with "codegen" crates - ie, ones which are - # linked into an artifact like binaries and dylib, since they're not used as - # a pipelined dependency input. - pipeline_artifact = toolchain_info.pipelined and \ - emit in (Emit("metadata"), Emit("link")) and \ - not crate_type_codegen(crate_type) - emit_args = cmd_args() - if emit in predeclared_outputs: - emit_output = predeclared_outputs[emit] + emit_env = {} + extra_out = None + + if predeclared_output: + emit_output = predeclared_output else: extra_hash = "-" + _metadata(ctx.label, False)[1] emit_args.add("-Cextra-filename={}".format(extra_hash)) @@ -1044,28 +1207,39 @@ def _rustc_emit( emit_output = ctx.actions.declare_output(filename) - # For pipelined builds if we're emitting either metadata or link then make - # sure we generate both and take the one we want. - if pipeline_artifact: - metaext = "" if emit == Emit("metadata") else "_unwanted" - linkext = "" if emit == Emit("link") else "_unwanted" - - emit_args.add( - cmd_args("--emit=metadata=", emit_output.as_output(), metaext, delimiter = ""), - cmd_args("--emit=link=", emit_output.as_output(), linkext, delimiter = ""), - ) - elif emit == Emit("expand"): + if emit == Emit("expand"): + emit_env["RUSTC_BOOTSTRAP"] = "1" emit_args.add( "-Zunpretty=expanded", cmd_args(emit_output.as_output(), format = "-o{}"), ) else: - # Assume https://github.com/rust-lang/rust/issues/85356 is fixed (ie - # https://github.com/rust-lang/rust/pull/85362 is applied) - emit_args.add(cmd_args("--emit=", emit.value, "=", emit_output.as_output(), delimiter = "")) + # Even though the unstable flag only appears on one of the branches, we need + # an identical environment between the `-Zno-codegen` and non-`-Zno-codegen` + # command or else there are "found possibly newer version of crate" errors. + emit_env["RUSTC_BOOTSTRAP"] = "1" + + # We don't ever have metadata-only deps on codegen crates, so no need to do + # the slower thing + if emit == Emit("metadata-full") and \ + not crate_type_codegen(crate_type): + # As we're doing a pipelined build, instead of emitting an actual rmeta + # we emit a "hollow" .rlib - ie, it only contains lib.rmeta and no object + # code. It should contain full information needed by any dependent + # crate which is generating code (MIR, etc). + # + # IMPORTANT: this flag is the only way that the Emit("metadata") and + # Emit("link") operations are allowed to diverge without causing them to + # get different crate hashes. + emit_args.add("-Zno-codegen") + effective_emit = "link" + elif emit == Emit("metadata-full") or emit == Emit("metadata-fast"): + effective_emit = "metadata" + else: + effective_emit = emit.value + + emit_args.add(cmd_args("--emit=", effective_emit, "=", emit_output.as_output(), delimiter = "")) - extra_out = None - if emit != Emit("expand"): # Strip file extension from directory name. base, _ext = paths.split_extension(output_filename(simple_crate, emit, params)) extra_dir = subdir + "/extras/" + base @@ -1078,22 +1252,32 @@ def _rustc_emit( incremental_cmd = cmd_args(incremental_out.as_output(), format = "-Cincremental={}") emit_args.add(incremental_cmd) - return (emit_output, emit_args, extra_out) + return EmitOperation( + output = emit_output, + args = emit_args, + env = emit_env, + extra_out = extra_out, + ) + +Invoke = record( + diag_txt = field(Artifact), + diag_json = field(Artifact), + build_status = field(Artifact | None), + identifier = field([str, None]), +) # Invoke rustc and capture outputs def _rustc_invoke( ctx: AnalysisContext, compile_ctx: CompileContext, + common_args: CommonArgsInfo, prefix: str, rustc_cmd: cmd_args, - diag: str, required_outputs: list[Artifact], - short_cmd: str, - is_binary: bool, + is_clippy: bool, allow_cache_upload: bool, crate_map: list[(CrateName, Label)], - env: dict[str, [ResolvedStringWithMacros, Artifact]] = {}, - only_artifact: [None, str] = None) -> (dict[str, Artifact], [Artifact, None]): + env: dict[str, str | ResolvedStringWithMacros | Artifact]) -> Invoke: exec_is_windows = ctx.attrs._exec_os_type[OsLookup].platform == "windows" toolchain_info = compile_ctx.toolchain_info @@ -1105,21 +1289,20 @@ def _rustc_invoke( path_env.update(more_path_env) # Save diagnostic outputs - json_diag = ctx.actions.declare_output("{}-{}.json".format(prefix, diag)) - txt_diag = ctx.actions.declare_output("{}-{}.txt".format(prefix, diag)) + diag = "clippy" if is_clippy else "diag" + diag_json = ctx.actions.declare_output("{}-{}.json".format(prefix, diag)) + diag_txt = ctx.actions.declare_output("{}-{}.txt".format(prefix, diag)) compile_cmd = cmd_args( - cmd_args(json_diag.as_output(), format = "--diag-json={}"), - cmd_args(txt_diag.as_output(), format = "--diag-txt={}"), + cmd_args(diag_json.as_output(), format = "--diag-json={}"), + cmd_args(diag_txt.as_output(), format = "--diag-txt={}"), "--remap-cwd-prefix=.", "--buck-target={}".format(ctx.label.raw_target()), + hidden = [toolchain_info.compiler, compile_ctx.symlinked_srcs], ) - if only_artifact: - compile_cmd.add("--only-artifact=" + only_artifact) - for k, v in crate_map: - compile_cmd.add(crate_map_arg(ctx, compile_ctx, k, v)) + compile_cmd.add(crate_map_arg(k, v)) for k, v in plain_env.items(): compile_cmd.add(cmd_args("--env=", k, "=", v, delimiter = "")) for k, v in path_env.items(): @@ -1134,7 +1317,6 @@ def _rustc_invoke( compile_cmd.add("--required-output", out.short_path, out.as_output()) compile_cmd.add(rustc_cmd) - compile_cmd.hidden(toolchain_info.compiler, compile_ctx.symlinked_srcs) compile_cmd = _long_command( ctx = ctx, @@ -1148,21 +1330,38 @@ def _rustc_invoke( prefer_local = False if incremental_enabled: local_only = True - elif is_binary and link_cxx_binary_locally(ctx): + elif common_args.crate_type == CrateType("bin") and \ + common_args.emit == Emit("link") and \ + link_cxx_binary_locally(ctx): prefer_local = True - identifier = "{} {} [{}]".format(prefix, short_cmd, diag) + if is_clippy: + category = "clippy" + identifier = None + else: + category = "rustc" + identifier = _explain( + crate_type = common_args.crate_type, + link_strategy = common_args.params.dep_link_strategy, + emit = common_args.emit, + ) + ctx.actions.run( compile_cmd, local_only = local_only, prefer_local = prefer_local, - category = "rustc", + category = category, identifier = identifier, no_outputs_cleanup = incremental_enabled, allow_cache_upload = allow_cache_upload, ) - return ({diag + ".json": json_diag, diag + ".txt": txt_diag}, build_status) + return Invoke( + diag_txt = diag_txt, + diag_json = diag_json, + build_status = build_status, + identifier = identifier, + ) # Our rustc and rustdoc commands can have arbitrarily large number of `--extern` # flags, so write to file to avoid hitting the platform's limit on command line @@ -1172,8 +1371,15 @@ def _long_command( exe: RunInfo, args: cmd_args, argfile_name: str) -> cmd_args: - argfile, hidden = ctx.actions.write(argfile_name, args, allow_args = True) - return cmd_args(exe, cmd_args(argfile, format = "@{}")).hidden(args, hidden) + return cmd_args( + exe, + at_argfile( + actions = ctx.actions, + name = argfile_name, + args = args, + allow_args = True, + ), + ) _DOUBLE_ESCAPED_NEWLINE_RE = regex("\\\\n") _ESCAPED_NEWLINE_RE = regex("\\n") @@ -1188,7 +1394,7 @@ _ESCAPED_NEWLINE_RE = regex("\\n") # path and non-path content, but we'll burn that bridge when we get to it.) def _process_env( compile_ctx: CompileContext, - env: dict[str, [ResolvedStringWithMacros, Artifact]], + env: dict[str, str | ResolvedStringWithMacros | Artifact], exec_is_windows: bool) -> (dict[str, cmd_args], dict[str, cmd_args]): # Values with inputs (ie artifact references). path_env = {} @@ -1205,7 +1411,13 @@ def _process_env( # Will be unescaped in rustc_action. # Variable may have "\\n" as well. # Example: \\n\n -> \\\n\n -> \\\\n\\n - plain_env[k] = v.replace_regex(_DOUBLE_ESCAPED_NEWLINE_RE, "\\\n").replace_regex(_ESCAPED_NEWLINE_RE, "\\n") + plain_env[k] = cmd_args( + v, + replace_regex = [ + (_DOUBLE_ESCAPED_NEWLINE_RE, "\\\n"), + (_ESCAPED_NEWLINE_RE, "\\n"), + ], + ) # If CARGO_MANIFEST_DIR is not already expressed in terms of $(location ...) # of some target, then interpret it as a relative path inside of the crate's diff --git a/prelude/rust/build_params.bzl b/prelude/rust/build_params.bzl index 0f6b09ebfb..f5c14909ba 100644 --- a/prelude/rust/build_params.bzl +++ b/prelude/rust/build_params.bzl @@ -9,8 +9,8 @@ load( "@prelude//linking:link_info.bzl", - "LinkStyle", - "Linkage", # @unused Used as a type + "LibOutputStyle", + "LinkStrategy", ) load("@prelude//os_lookup:defs.bzl", "OsLookup") load("@prelude//utils:expect.bzl", "expect") @@ -29,10 +29,6 @@ CrateType = enum( "staticlib", ) -# Crate type is intended for consumption by Rust code -def crate_type_rust_linkage(crate_type: CrateType) -> bool: - return crate_type.value in ("rlib", "dylib", "proc-macro") - # Crate type is intended for native linkage (eg C++) def crate_type_native_linkage(crate_type: CrateType) -> bool: return crate_type.value in ("cdylib", "staticlib") @@ -41,10 +37,6 @@ def crate_type_native_linkage(crate_type: CrateType) -> bool: def crate_type_linked(crate_type: CrateType) -> bool: return crate_type.value in ("bin", "dylib", "proc-macro", "cdylib") -# Crate type which should include transitive deps -def crate_type_transitive_deps(crate_type: CrateType) -> bool: - return crate_type.value in ("rlib", "dylib", "staticlib") # not sure about staticlib - # Crate type which should always need codegen def crate_type_codegen(crate_type: CrateType) -> bool: return crate_type_linked(crate_type) or crate_type_native_linkage(crate_type) @@ -68,24 +60,43 @@ Emit = enum( "llvm-bc", "llvm-ir", "obj", - "metadata", "link", "dep-info", "mir", "expand", # pseudo emit alias for -Zunpretty=expanded + # Rustc actually has two different forms of metadata: + # - The full flavor, which is what's outputted when passing + # `--emit link,metadata` and can be used as a part of pipelined builds + # - The fast flavor, which is emitted from `--emit metadata`, is faster to + # build, but cannot be used in pipelined builds. + "metadata-full", + "metadata-fast", +) + +# The different quantities of Rust metadata that can be requested from +# dependencies. Each one corresponds to an `Emit` variant, but not all `Emit` +# variants output metadata +MetadataKind = enum( + "fast", + "full", + "link", ) # Emitting this artifact generates code -def emit_needs_codegen(emit: Emit) -> bool: - return emit.value in ("asm", "llvm-bc", "llvm-ir", "obj", "link", "mir") +def dep_metadata_of_emit(emit: Emit) -> MetadataKind: + if emit.value in ("asm", "llvm-bc", "llvm-ir", "obj", "link", "mir"): + return MetadataKind("link") + elif emit.value == "metadata-fast": + return MetadataKind("fast") + else: + return MetadataKind("full") # Represents a way of invoking rustc to produce an artifact. These values are computed from # information such as the rule type, linkstyle, crate type, etc. BuildParams = record( crate_type = field(CrateType), reloc_model = field(RelocModel), - # TODO(cjhopman): Is this a LibOutputStyle or a LinkStrategy? - dep_link_style = field(LinkStyle), # what link_style to use for dependencies + dep_link_strategy = field(LinkStrategy), # A prefix and suffix to use for the name of the produced artifact. Note that although we store # these in this type, they are in principle computable from the remaining fields and the OS. # Keeping them here just turns out to be a little more convenient. @@ -95,9 +106,8 @@ BuildParams = record( RustcFlags = record( crate_type = field(CrateType), - reloc_model = field(RelocModel), - dep_link_style = field(LinkStyle), platform_to_affix = field(typing.Callable), + link_strategy = field(LinkStrategy | None), ) # Filenames used for various emitted forms @@ -107,7 +117,8 @@ _EMIT_PREFIX_SUFFIX = { Emit("llvm-bc"): ("", ".bc"), Emit("llvm-ir"): ("", ".ll"), Emit("obj"): ("", ".o"), - Emit("metadata"): ("lib", ".rmeta"), # even binaries get called 'libfoo.rmeta' + Emit("metadata-fast"): ("lib", ".rmeta"), # even binaries get called 'libfoo.rmeta' + Emit("metadata-full"): (None, None), # Hollow rlibs, so they get the same name Emit("link"): (None, None), # crate type and reloc model dependent Emit("dep-info"): ("", ".d"), Emit("mir"): (None, ".mir"), @@ -156,9 +167,8 @@ LinkageLang = enum( "native-unbundled", ) -_BINARY_SHARED = 0 -_BINARY_PIE = 1 -_BINARY_NON_PIE = 2 +_BINARY = 0 +_RUST_PROC_MACRO_RUSTDOC_TEST = 1 _NATIVE_LINKABLE_SHARED_OBJECT = 3 _RUST_DYLIB_SHARED = 4 _RUST_PROC_MACRO = 5 @@ -184,182 +194,176 @@ def _library_prefix_suffix(linker_type: str, target_os_type: OsLookup) -> (str, }[linker_type] _BUILD_PARAMS = { - _BINARY_SHARED: RustcFlags( - crate_type = CrateType("bin"), - reloc_model = RelocModel("pic"), - dep_link_style = LinkStyle("shared"), - platform_to_affix = _executable_prefix_suffix, - ), - _BINARY_PIE: RustcFlags( + _BINARY: RustcFlags( crate_type = CrateType("bin"), - reloc_model = RelocModel("pic"), - dep_link_style = LinkStyle("static_pic"), platform_to_affix = _executable_prefix_suffix, + # link_strategy is provided by the rust_binary attribute + link_strategy = None, ), - _BINARY_NON_PIE: RustcFlags( - crate_type = CrateType("bin"), - reloc_model = RelocModel("static"), - dep_link_style = LinkStyle("static"), + # It's complicated: this is a rustdoc test for a procedural macro crate. + # We need deps built as if this were a binary, while passing crate-type + # proc_macro to the rustdoc invocation. + _RUST_PROC_MACRO_RUSTDOC_TEST: RustcFlags( + crate_type = CrateType("proc-macro"), platform_to_affix = _executable_prefix_suffix, + link_strategy = LinkStrategy("static_pic"), ), _NATIVE_LINKABLE_SHARED_OBJECT: RustcFlags( crate_type = CrateType("cdylib"), - reloc_model = RelocModel("pic"), - dep_link_style = LinkStyle("shared"), platform_to_affix = _library_prefix_suffix, + # cdylibs statically link all rust code and export a single C-style dylib + # for consumption by other languages + link_strategy = LinkStrategy("shared"), ), _RUST_DYLIB_SHARED: RustcFlags( crate_type = CrateType("dylib"), - reloc_model = RelocModel("pic"), - dep_link_style = LinkStyle("shared"), platform_to_affix = _library_prefix_suffix, + link_strategy = LinkStrategy("shared"), ), _RUST_PROC_MACRO: RustcFlags( crate_type = CrateType("proc-macro"), - reloc_model = RelocModel("pic"), - dep_link_style = LinkStyle("static_pic"), platform_to_affix = _library_prefix_suffix, + # FIXME(JakobDegen): It's not really clear what we should do about + # proc macros. The principled thing is probably to treat them sort + # of like a normal library, except that they always have preferred + # linkage shared? Preserve existing behavior for now + link_strategy = LinkStrategy("static_pic"), ), + # FIXME(JakobDegen): Add a comment explaining why `.a`s need reloc-strategy + # dependent names while `.rlib`s don't. _RUST_STATIC_PIC_LIBRARY: RustcFlags( crate_type = CrateType("rlib"), - reloc_model = RelocModel("pic"), - dep_link_style = LinkStyle("static_pic"), platform_to_affix = lambda _l, _t: ("lib", ".rlib"), + link_strategy = LinkStrategy("static_pic"), ), _RUST_STATIC_NON_PIC_LIBRARY: RustcFlags( crate_type = CrateType("rlib"), - reloc_model = RelocModel("static"), - dep_link_style = LinkStyle("static"), platform_to_affix = lambda _l, _t: ("lib", ".rlib"), + link_strategy = LinkStrategy("static"), ), _NATIVE_LINKABLE_STATIC_PIC: RustcFlags( crate_type = CrateType("staticlib"), - reloc_model = RelocModel("pic"), - dep_link_style = LinkStyle("static_pic"), platform_to_affix = lambda _l, _t: ("lib", "_pic.a"), + link_strategy = LinkStrategy("static_pic"), ), _NATIVE_LINKABLE_STATIC_NON_PIC: RustcFlags( crate_type = CrateType("staticlib"), - reloc_model = RelocModel("static"), - dep_link_style = LinkStyle("static"), platform_to_affix = lambda _l, _t: ("lib", ".a"), + link_strategy = LinkStrategy("static"), ), } _INPUTS = { - # Binary, shared - ("binary", False, "shared", "any", "rust"): _BINARY_SHARED, - ("binary", False, "shared", "shared", "rust"): _BINARY_SHARED, - ("binary", False, "shared", "static", "rust"): _BINARY_SHARED, - # Binary, PIE - ("binary", False, "static_pic", "any", "rust"): _BINARY_PIE, - ("binary", False, "static_pic", "shared", "rust"): _BINARY_PIE, - ("binary", False, "static_pic", "static", "rust"): _BINARY_PIE, - # Binary, non-PIE - ("binary", False, "static", "any", "rust"): _BINARY_NON_PIE, - ("binary", False, "static", "shared", "rust"): _BINARY_NON_PIE, - ("binary", False, "static", "static", "rust"): _BINARY_NON_PIE, + # Binary + ("binary", False, None, "rust"): _BINARY, + ("binary", True, None, "rust"): _RUST_PROC_MACRO_RUSTDOC_TEST, # Native linkable shared object - ("library", False, "shared", "any", "native"): _NATIVE_LINKABLE_SHARED_OBJECT, - ("library", False, "shared", "shared", "native"): _NATIVE_LINKABLE_SHARED_OBJECT, - ("library", False, "static", "shared", "native"): _NATIVE_LINKABLE_SHARED_OBJECT, - ("library", False, "static_pic", "shared", "native"): _NATIVE_LINKABLE_SHARED_OBJECT, + ("library", False, "shared_lib", "native"): _NATIVE_LINKABLE_SHARED_OBJECT, # Native unbundled linkable shared object - ("library", False, "shared", "any", "native-unbundled"): _RUST_DYLIB_SHARED, - ("library", False, "shared", "shared", "native-unbundled"): _RUST_DYLIB_SHARED, - ("library", False, "static", "shared", "native-unbundled"): _RUST_DYLIB_SHARED, - ("library", False, "static_pic", "shared", "native-unbundled"): _RUST_DYLIB_SHARED, + ("library", False, "shared_lib", "native-unbundled"): _RUST_DYLIB_SHARED, # Rust dylib shared object - ("library", False, "shared", "any", "rust"): _RUST_DYLIB_SHARED, - ("library", False, "shared", "shared", "rust"): _RUST_DYLIB_SHARED, - ("library", False, "static", "shared", "rust"): _RUST_DYLIB_SHARED, - ("library", False, "static_pic", "shared", "rust"): _RUST_DYLIB_SHARED, + ("library", False, "shared_lib", "rust"): _RUST_DYLIB_SHARED, # Rust proc-macro - ("library", True, "shared", "any", "rust"): _RUST_PROC_MACRO, - ("library", True, "shared", "shared", "rust"): _RUST_PROC_MACRO, - ("library", True, "shared", "static", "rust"): _RUST_PROC_MACRO, - ("library", True, "static", "any", "rust"): _RUST_PROC_MACRO, - ("library", True, "static", "shared", "rust"): _RUST_PROC_MACRO, - ("library", True, "static", "static", "rust"): _RUST_PROC_MACRO, - ("library", True, "static_pic", "any", "rust"): _RUST_PROC_MACRO, - ("library", True, "static_pic", "shared", "rust"): _RUST_PROC_MACRO, - ("library", True, "static_pic", "static", "rust"): _RUST_PROC_MACRO, + ("library", True, "archive", "rust"): _RUST_PROC_MACRO, + ("library", True, "pic_archive", "rust"): _RUST_PROC_MACRO, + ("library", True, "shared_lib", "rust"): _RUST_PROC_MACRO, # Rust static_pic library - ("library", False, "shared", "static", "rust"): _RUST_STATIC_PIC_LIBRARY, - ("library", False, "static_pic", "any", "rust"): _RUST_STATIC_PIC_LIBRARY, - ("library", False, "static_pic", "static", "rust"): _RUST_STATIC_PIC_LIBRARY, + ("library", False, "pic_archive", "rust"): _RUST_STATIC_PIC_LIBRARY, # Rust static (non-pic) library - ("library", False, "static", "any", "rust"): _RUST_STATIC_NON_PIC_LIBRARY, - ("library", False, "static", "static", "rust"): _RUST_STATIC_NON_PIC_LIBRARY, + ("library", False, "archive", "rust"): _RUST_STATIC_NON_PIC_LIBRARY, # Native linkable static_pic - ("library", False, "shared", "static", "native"): _NATIVE_LINKABLE_STATIC_PIC, - ("library", False, "static_pic", "any", "native"): _NATIVE_LINKABLE_STATIC_PIC, - ("library", False, "static_pic", "static", "native"): _NATIVE_LINKABLE_STATIC_PIC, + ("library", False, "pic_archive", "native"): _NATIVE_LINKABLE_STATIC_PIC, # Native linkable static non-pic - ("library", False, "static", "any", "native"): _NATIVE_LINKABLE_STATIC_NON_PIC, - ("library", False, "static", "static", "native"): _NATIVE_LINKABLE_STATIC_NON_PIC, + ("library", False, "archive", "native"): _NATIVE_LINKABLE_STATIC_NON_PIC, # Native Unbundled static_pic library - ("library", False, "shared", "static", "native-unbundled"): _RUST_STATIC_PIC_LIBRARY, - ("library", False, "static_pic", "any", "native-unbundled"): _RUST_STATIC_PIC_LIBRARY, - ("library", False, "static_pic", "static", "native-unbundled"): _RUST_STATIC_PIC_LIBRARY, + ("library", False, "pic_archive", "native-unbundled"): _RUST_STATIC_PIC_LIBRARY, # Native Unbundled static (non-pic) library - ("library", False, "static", "any", "native-unbundled"): _RUST_STATIC_NON_PIC_LIBRARY, - ("library", False, "static", "static", "native-unbundled"): _RUST_STATIC_NON_PIC_LIBRARY, + ("library", False, "archive", "native-unbundled"): _RUST_STATIC_NON_PIC_LIBRARY, } # Check types of _INPUTS, writing these out as types is too verbose, but let's make sure we don't have any typos. [ - (RuleType(rule_type), LinkStyle(link_style), Linkage(preferred_linkage), LinkageLang(linkage_lang)) - for (rule_type, _, link_style, preferred_linkage, linkage_lang), _ in _INPUTS.items() + (RuleType(rule_type), LibOutputStyle(lib_output_style) if lib_output_style else None, LinkageLang(linkage_lang)) + for (rule_type, _, lib_output_style, linkage_lang), _ in _INPUTS.items() ] -def _get_flags(build_kind_key: int, target_os_type: OsLookup) -> (RustcFlags, RelocModel): - flags = _BUILD_PARAMS[build_kind_key] - - # On Windows we should always use pic reloc model. +def _get_reloc_model(link_strategy: LinkStrategy, target_os_type: OsLookup) -> RelocModel: if target_os_type.platform == "windows": - return flags, RelocModel("pic") - return flags, flags.reloc_model + return RelocModel("pic") + if link_strategy == LinkStrategy("static"): + return RelocModel("static") + return RelocModel("pic") -# Compute crate type, relocation model and name mapping given what rule we're building, -# whether its a proc-macro, linkage information and language. +# Compute crate type, relocation model and name mapping given what rule we're building, whether its +# a proc-macro, linkage information and language. +# +# Binaries should pass the link strategy and not the lib output style, while libraries should do the +# opposite. +# +# The linking information that's passed here is different from what one might expect in the C++ +# rules. There's a good reason for that, so let's go over it. First, let's recap how C++ handles +# this, as of December 2023 (I say "recap" but I don't think this is actually documented anywhere): +# +# 1. C++ libraries can be built in three different ways: Archives, pic archives, and shared +# libraries. Which one of these is used for a given link strategy is determined by the preferred +# linkage using `linking/link_info.bzl:get_lib_output_style`. +# 2. When a C++ library is built as a shared library, the link strategy used for its dependencies +# is determined by the link style attribute on the C++ library. +# 3. When a C++ library is built as an archive (either kind), there's no need to know a link +# strategy for the dependencies. None of the per-link-strategy providers of the dependencies +# need to be accessed. +# +# There are two relevant ways in which Rust differs: +# +# 1. There are more ways of building Rust libraries than are represented by `LibOutputStyle`. The +# Rust analogue is the `BuildParams` type, which implicitly holds a `LibOutputStyle` as well as +# a bunch of additional information - this is why `LibOutputStyle` is relatively rarely used +# directly in the Rust rules. +# 2. Rust does not have the property in point three above, ie building a Rust library into an +# archive does require knowing per-link-strategy properties of the dependencies. This is +# fundamental in cases without native unbundled deps - with native unbundled deps it may be +# fixable, but that's not super clear. def build_params( rule: RuleType, proc_macro: bool, - link_style: LinkStyle, - preferred_linkage: Linkage, + link_strategy: LinkStrategy | None, + lib_output_style: LibOutputStyle | None, lang: LinkageLang, linker_type: str, target_os_type: OsLookup) -> BuildParams: - if rule == RuleType("binary") and proc_macro: - # It's complicated: this is a rustdoc test for a procedural macro crate. - # We need deps built as if this were a binary, while passing crate-type - # proc_macro to the rustdoc invocation. - crate_type = CrateType("proc-macro") - proc_macro = False + if rule == RuleType("binary"): + expect(link_strategy != None) + expect(lib_output_style == None) else: - crate_type = None + expect(lib_output_style != None) - input = (rule.value, proc_macro, link_style.value, preferred_linkage.value, lang.value) + input = (rule.value, proc_macro, lib_output_style.value if lib_output_style else None, lang.value) expect( input in _INPUTS, - "missing case for rule_type={} proc_macro={} link_style={} preferred_linkage={} lang={}", + "missing case for rule_type={} proc_macro={} lib_output_style={} lang={}", rule, proc_macro, - link_style, - preferred_linkage, + lib_output_style, lang, ) - build_kind_key = _INPUTS[input] - flags, reloc_model = _get_flags(build_kind_key, target_os_type) + flags = _BUILD_PARAMS[_INPUTS[input]] + + # FIXME(JakobDegen): We deal with Rust needing to know the link strategy + # even for building archives by using a default link strategy specifically + # for those cases. I've gone through the code and checked all the places + # where the link strategy is used to determine that this won't do anything + # too bad, but it would be nice to enforce that more strictly or not have + # this at all. + link_strategy = link_strategy or flags.link_strategy + reloc_model = _get_reloc_model(link_strategy, target_os_type) prefix, suffix = flags.platform_to_affix(linker_type, target_os_type) return BuildParams( - crate_type = crate_type or flags.crate_type, + crate_type = flags.crate_type, reloc_model = reloc_model, - dep_link_style = flags.dep_link_style, + dep_link_strategy = link_strategy, prefix = prefix, suffix = suffix, ) diff --git a/prelude/rust/cargo_buildscript.bzl b/prelude/rust/cargo_buildscript.bzl index 08c9c8afe7..9ed4bc03d8 100644 --- a/prelude/rust/cargo_buildscript.bzl +++ b/prelude/rust/cargo_buildscript.bzl @@ -20,15 +20,21 @@ load("@prelude//:prelude.bzl", "native") load("@prelude//decls:common.bzl", "buck") -load("@prelude//linking:link_info.bzl", "LinkStyle") load("@prelude//os_lookup:defs.bzl", "OsLookup") load("@prelude//rust:rust_toolchain.bzl", "RustToolchainInfo") load("@prelude//rust:targets.bzl", "targets") load("@prelude//decls/toolchains_common.bzl", "toolchains_common") load(":build.bzl", "dependency_args") -load(":build_params.bzl", "CrateType") +load(":build_params.bzl", "MetadataKind") load(":context.bzl", "DepCollectionContext") -load(":link_info.bzl", "RustProcMacroPlugin", "gather_explicit_sysroot_deps", "resolve_rust_deps_inner") +load( + ":link_info.bzl", + "DEFAULT_STATIC_LINK_STRATEGY", + "RustProcMacroPlugin", + "gather_explicit_sysroot_deps", + "resolve_rust_deps_inner", +) +load(":rust_toolchain.bzl", "PanicRuntime") def _make_rustc_shim(ctx: AnalysisContext, cwd: Artifact) -> cmd_args: # Build scripts expect to receive a `rustc` which "just works." However, @@ -38,22 +44,23 @@ def _make_rustc_shim(ctx: AnalysisContext, cwd: Artifact) -> cmd_args: explicit_sysroot_deps = toolchain_info.explicit_sysroot_deps if explicit_sysroot_deps: dep_ctx = DepCollectionContext( - native_unbundle_deps = False, + advanced_unstable_linking = False, include_doc_deps = False, is_proc_macro = False, explicit_sysroot_deps = explicit_sysroot_deps, + panic_runtime = PanicRuntime("unwind"), # not actually used ) deps = gather_explicit_sysroot_deps(dep_ctx) deps = resolve_rust_deps_inner(ctx, deps) dep_args, _ = dependency_args( - ctx, - None, # compile_ctx - deps, - "any", # subdir - CrateType("rlib"), - LinkStyle("static_pic"), - True, # is_check - False, # is_rustdoc_test + ctx = ctx, + compile_ctx = None, + toolchain_info = toolchain_info, + deps = deps, + subdir = "any", + dep_link_strategy = DEFAULT_STATIC_LINK_STRATEGY, + dep_metadata_kind = MetadataKind("full"), + is_rustdoc_test = False, ) null_path = "nul" if ctx.attrs._exec_os_type[OsLookup].platform == "windows" else "/dev/null" @@ -61,7 +68,7 @@ def _make_rustc_shim(ctx: AnalysisContext, cwd: Artifact) -> cmd_args: dep_args = cmd_args("-Zunstable-options", dep_args) dep_args = dep_args.relative_to(cwd) dep_file, _ = ctx.actions.write("rustc_dep_file", dep_args, allow_args = True) - sysroot_args = cmd_args("@", dep_file, delimiter = "").hidden(dep_args) + sysroot_args = cmd_args("@", dep_file, delimiter = "", hidden = dep_args) else: sysroot_args = cmd_args() @@ -84,7 +91,7 @@ def _make_rustc_shim(ctx: AnalysisContext, cwd: Artifact) -> cmd_args: is_executable = True, allow_args = True, ) - return cmd_args(shim).relative_to(cwd).hidden(toolchain_info.compiler).hidden(sysroot_args) + return cmd_args(shim, relative_to = cwd, hidden = [toolchain_info.compiler, sysroot_args]) def _cargo_buildscript_impl(ctx: AnalysisContext) -> list[Provider]: toolchain_info = ctx.attrs._rust_toolchain[RustToolchainInfo] @@ -114,6 +121,10 @@ def _cargo_buildscript_impl(ctx: AnalysisContext) -> list[Provider]: env["RUST_BACKTRACE"] = "1" env["TARGET"] = toolchain_info.rustc_target_triple + # \037 == \x1f == the magic delimiter specified in the environment variable + # reference above. + env["CARGO_ENCODED_RUSTFLAGS"] = cmd_args(toolchain_info.rustc_flags, delimiter = "\037") + host_triple = targets.exec_triple(ctx) if host_triple: env["HOST"] = host_triple @@ -152,7 +163,7 @@ _cargo_buildscript_rule = rule( "runner": attrs.default_only(attrs.exec_dep(providers = [RunInfo], default = "prelude//rust/tools:buildscript_run")), # *IMPORTANT* rustc_cfg must be a `dep` and not an `exec_dep` because # we want the `rustc --cfg` for the target platform, not the exec platform. - "rustc_cfg": attrs.default_only(attrs.dep(default = "prelude//rust/tools:rustc_cfg")), + "rustc_cfg": attrs.dep(default = "prelude//rust/tools:rustc_cfg"), "version": attrs.string(), "_exec_os_type": buck.exec_os_type_arg(), "_rust_toolchain": toolchains_common.rust(), diff --git a/prelude/rust/cargo_package.bzl b/prelude/rust/cargo_package.bzl index d9b5051071..b80b51f90c 100644 --- a/prelude/rust/cargo_package.bzl +++ b/prelude/rust/cargo_package.bzl @@ -5,6 +5,9 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# This file exports utilities for use with with reindeer. +# These are not used anywhere else in prelude and are not exported as prelude globals. + load("@prelude//:prelude.bzl", "native") load("@prelude//utils:selects.bzl", "selects") diff --git a/prelude/rust/clippy_configuration.bzl b/prelude/rust/clippy_configuration.bzl new file mode 100644 index 0000000000..74f0c3f6d7 --- /dev/null +++ b/prelude/rust/clippy_configuration.bzl @@ -0,0 +1,58 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//rust:rust_toolchain.bzl", "RustToolchainInfo") +load("@prelude//decls/toolchains_common.bzl", "toolchains_common") + +# Configurations for Clippy runs. +ClippyConfiguration = provider( + fields = { + "clippy_toml": provider_field(Artifact), + }, +) + +def _clippy_configuration_impl(ctx: AnalysisContext) -> list[Provider]: + toolchain_ctx = ctx.attrs._rust_toolchain[RustToolchainInfo] + toolchain_clippy_toml = toolchain_ctx.clippy_toml + + if not toolchain_clippy_toml: + clippy_toml = ctx.attrs.clippy_toml_src + else: + toml_merge_tool = ctx.attrs.toml_merge_tool + + clippy_toml = ctx.actions.declare_output("clippy.toml") + ctx.actions.run([ + toml_merge_tool[RunInfo], + cmd_args(clippy_toml.as_output(), format = "--output={}"), + cmd_args(toolchain_clippy_toml, format = "--file={}"), + cmd_args(ctx.attrs.clippy_toml_src, format = "--file={}"), + ], category = "clippy_toml_merge") + + return [ + DefaultInfo( + default_output = clippy_toml, + ), + ClippyConfiguration( + clippy_toml = clippy_toml, + ), + ] + +# Generate a Clippy configuration that is merged with the toolchain specified +# Clippy configuration (if defined). +clippy_configuration = rule(impl = _clippy_configuration_impl, attrs = { + "clippy_toml_src": attrs.source(), + # TODO(emersonford): figure out how to store this in `_rust_toolchain` + # without causing a circular dependency on the toolchain target when + # `toml_merge_tool` is a `rust_binary`. + # + # Tool used to recursively merge multiple TOML files, e.g. for merging + # clippy.toml files. Must support taking multiple `--file ` flags + # as source files to merge and `--output ` flag to write the + # merged TOML table to. + "toml_merge_tool": attrs.exec_dep(providers = [RunInfo]), + "_rust_toolchain": toolchains_common.rust(), +}) diff --git a/prelude/rust/context.bzl b/prelude/rust/context.bzl index 84302c5be4..8c3df11d70 100644 --- a/prelude/rust/context.bzl +++ b/prelude/rust/context.bzl @@ -6,13 +6,13 @@ # of this source tree. load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") -load("@prelude//linking:link_info.bzl", "LinkStyle") -load(":build_params.bzl", "CrateType", "Emit") -load(":rust_toolchain.bzl", "RustExplicitSysrootDeps", "RustToolchainInfo") +load("@prelude//linking:link_info.bzl", "LinkStrategy") +load(":build_params.bzl", "BuildParams", "CrateType", "Emit") +load(":rust_toolchain.bzl", "PanicRuntime", "RustExplicitSysrootDeps", "RustToolchainInfo") CrateName = record( simple = field(str), - dynamic = field([Artifact, None]), + dynamic = field(Artifact | None), ) # Struct for sharing common args between rustc and rustdoc @@ -21,23 +21,16 @@ CommonArgsInfo = record( args = field(cmd_args), subdir = field(str), tempfile = field(str), - short_cmd = field(str), + crate_type = field(CrateType), + params = field(BuildParams), + emit = field(Emit), is_check = field(bool), crate_map = field(list[(CrateName, Label)]), ) -ExternArg = record( - flags = str, - lib = field(Artifact), -) - -CrateMapArg = record( - label = field(Label), -) - # Information that determines how dependencies should be collected DepCollectionContext = record( - native_unbundle_deps = field(bool), + advanced_unstable_linking = field(bool), include_doc_deps = field(bool), # Is the target a proc-macro target? This is ignored if `include_doc_deps` # is set, since doc tests in proc macro crates are not built with @@ -45,6 +38,8 @@ DepCollectionContext = record( is_proc_macro = field(bool), # From the toolchain, if available explicit_sysroot_deps = field(RustExplicitSysrootDeps | None), + # Only needed if `advanced_unstable_linking` is set + panic_runtime = field(PanicRuntime), ) # Compile info which is reusable between multiple compilation command performed @@ -60,8 +55,7 @@ CompileContext = record( # Clippy wrapper (wrapping clippy-driver so it has the same CLI as rustc). clippy_wrapper = field(cmd_args), # Memoized common args for reuse. - common_args = field(dict[(CrateType, Emit, LinkStyle), CommonArgsInfo]), - flagfiles_for_extern = field(dict[ExternArg, Artifact]), - flagfiles_for_crate_map = field(dict[CrateMapArg, Artifact]), + common_args = field(dict[(CrateType, Emit, LinkStrategy, bool), CommonArgsInfo]), transitive_dependency_dirs = field(dict[Artifact, None]), + sysroot_args = field(cmd_args), ) diff --git a/prelude/rust/extern.bzl b/prelude/rust/extern.bzl index 443e4db852..d2702ded18 100644 --- a/prelude/rust/extern.bzl +++ b/prelude/rust/extern.bzl @@ -5,7 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load(":context.bzl", "CompileContext", "CrateMapArg", "CrateName", "ExternArg") +load(":context.bzl", "CrateName") # Create `--extern` flag. For crates with a name computed during analysis: # @@ -13,48 +13,22 @@ load(":context.bzl", "CompileContext", "CrateMapArg", "CrateName", "ExternArg") # # For crates with a name computed during build: # -# --extern @extern/libPROVISIONAL +# --extern=$(cat path/to/REALNAME)=path/to/libPROVISIONAL.rlib # -# where extern/libPROVISIONAL holds a flag containing the real crate name: -# -# REALNAME=path/to/libPROVISIONAL.rlib -# -# The `compile_ctx` may be omitted for non-dynamic crate names -def extern_arg( - ctx: AnalysisContext, - compile_ctx: CompileContext | None, - flags: list[str], - crate: CrateName, - lib: Artifact) -> cmd_args: +def extern_arg(flags: list[str], crate: CrateName, lib: Artifact) -> cmd_args: if flags == []: flags = "" else: flags = ",".join(flags) + ":" if crate.dynamic: - args = ExternArg(flags = flags, lib = lib) - flagfile = compile_ctx.flagfiles_for_extern.get(args, None) - if not flagfile: - flagfile = ctx.actions.declare_output("extern/{}".format(lib.short_path)) - concat_cmd = [ - compile_ctx.toolchain_info.concat_tool, - "--output", - flagfile.as_output(), - "--", - flags, - cmd_args("@", crate.dynamic, delimiter = ""), - "=", - cmd_args(lib).ignore_artifacts(), - ] - ctx.actions.run( - concat_cmd, - category = "concat", - identifier = str(len(compile_ctx.flagfiles_for_extern)), - ) - compile_ctx.flagfiles_for_extern[args] = flagfile - return cmd_args("--extern", cmd_args("@", flagfile, delimiter = "")).hidden(lib) + # TODO: consider using `cmd_args(crate.dynamic, quote = "json")` so it + # doesn't fall apart on paths containing ')' + crate_name = cmd_args(crate.dynamic, format = "$(cat {})") else: - return cmd_args("--extern=", flags, crate.simple, "=", lib, delimiter = "") + crate_name = crate.simple + + return cmd_args("--extern=", flags, crate_name, "=", lib, delimiter = "") # Create `--crate-map` flag. For crates with a name computed during analysis: # @@ -62,37 +36,12 @@ def extern_arg( # # For crates with a name computed during build: # -# --crate-map @cratemap/path/to/target +# --crate-map=$(cat path/to/REALNAME)=//path/to:target # -# where cratemap/path/to/target holds a flag containing the real crate name: -# -# REALNAME=//path/to:target -# -def crate_map_arg( - ctx: AnalysisContext, - compile_ctx: CompileContext, - crate: CrateName, - label: Label) -> cmd_args: +def crate_map_arg(crate: CrateName, label: Label) -> cmd_args: if crate.dynamic: - args = CrateMapArg(label = label) - flagfile = compile_ctx.flagfiles_for_crate_map.get(args, None) - if not flagfile: - flagfile = ctx.actions.declare_output("cratemap/{}/{}/{}".format(label.cell, label.package, label.name)) - concat_cmd = [ - compile_ctx.toolchain_info.concat_tool, - "--output", - flagfile.as_output(), - "--", - cmd_args("@", crate.dynamic, delimiter = ""), - "=", - str(label.raw_target()), - ] - ctx.actions.run( - concat_cmd, - category = "cratemap", - identifier = str(len(compile_ctx.flagfiles_for_crate_map)), - ) - compile_ctx.flagfiles_for_crate_map[args] = flagfile - return cmd_args("--crate-map", cmd_args("@", flagfile, delimiter = "")) + crate_name = cmd_args(crate.dynamic, format = "$(cat {})") else: - return cmd_args("--crate-map=", crate.simple, "=", str(label.raw_target()), delimiter = "") + crate_name = crate.simple + + return cmd_args("--crate-map=", crate_name, "=", str(label.raw_target()), delimiter = "") diff --git a/prelude/rust/failure_filter.bzl b/prelude/rust/failure_filter.bzl index 7a8fa9ff31..67533c1da7 100644 --- a/prelude/rust/failure_filter.bzl +++ b/prelude/rust/failure_filter.bzl @@ -7,16 +7,6 @@ load(":context.bzl", "CompileContext") -# Inputs to the fail filter -RustFailureFilter = provider(fields = { - # Build status json - "buildstatus": typing.Any, - # Required files - "required": typing.Any, - # stderr - "stderr": typing.Any, -}) - # This creates an action which takes a buildstatus json artifact as an input, and a list of other # artifacts. If all those artifacts are present in the buildstatus as successfully generated, then # the action will succeed with those artifacts as outputs. Otherwise it fails. @@ -24,19 +14,16 @@ RustFailureFilter = provider(fields = { def failure_filter( ctx: AnalysisContext, compile_ctx: CompileContext, - prefix: str, - predecl_out: [Artifact, None], - failprov: RustFailureFilter, - short_cmd: str) -> Artifact: + predeclared_output: Artifact | None, + build_status: Artifact, + required: Artifact, + stderr: Artifact, + identifier: str) -> Artifact: toolchain_info = compile_ctx.toolchain_info failure_filter_action = toolchain_info.failure_filter_action - buildstatus = failprov.buildstatus - required = failprov.required - stderr = failprov.stderr - - if predecl_out: - output = predecl_out + if predeclared_output: + output = predeclared_output else: output = ctx.actions.declare_output("out/" + required.short_path) @@ -49,9 +36,9 @@ def failure_filter( required, output.as_output(), "--build-status", - buildstatus, + build_status, ) - ctx.actions.run(cmd, category = "failure_filter", identifier = "{} {}".format(prefix, short_cmd)) + ctx.actions.run(cmd, category = "failure_filter", identifier = identifier) return output diff --git a/prelude/rust/link_info.bzl b/prelude/rust/link_info.bzl index f4bd612500..346590a43d 100644 --- a/prelude/rust/link_info.bzl +++ b/prelude/rust/link_info.bzl @@ -16,6 +16,7 @@ load( "@prelude//cxx:cxx.bzl", "get_auto_link_group_specs", ) +load("@prelude//cxx:cxx_context.bzl", "get_cxx_toolchain_info") load( "@prelude//cxx:cxx_library_utility.bzl", "cxx_is_gnu", @@ -23,7 +24,6 @@ load( load("@prelude//cxx:cxx_toolchain_types.bzl", "PicBehavior") load( "@prelude//cxx:link_groups.bzl", - "LinkGroupInfo", # @unused Used as a type "LinkGroupLinkInfo", # @unused Used as a type "create_link_groups", "get_filtered_labels_to_links_map", @@ -32,19 +32,29 @@ load( "get_link_group", "get_link_group_info", "get_link_group_preferred_linkage", + "get_public_link_group_nodes", +) +load( + "@prelude//cxx:link_groups_types.bzl", + "LinkGroupInfo", # @unused Used as a type +) +load( + "@prelude//cxx:linker.bzl", + "get_default_shared_library_name", + "get_shared_library_name_for_param", ) load( "@prelude//linking:link_groups.bzl", "LinkGroupLib", # @unused Used as a type + "LinkGroupLibInfo", # @unused Used as a type ) load( "@prelude//linking:link_info.bzl", + "LibOutputStyle", "LinkInfo", - "LinkStyle", - "Linkage", # @unused Used as a type + "LinkStrategy", "MergedLinkInfo", "get_link_args_for_strategy", - "to_link_strategy", "unpack_external_debug_info", ) load( @@ -53,33 +63,35 @@ load( "create_linkable_graph", "get_linkable_graph_node_map_func", ) -load( - "@prelude//linking:linkables.bzl", - "linkables", -) load( "@prelude//linking:shared_libraries.bzl", "SharedLibraryInfo", ) load( - "@prelude//utils:utils.bzl", - "filter_and_map_idx", + "@prelude//linking:types.bzl", + "Linkage", # @unused Used as a type +) +load( + ":build_params.bzl", + "MetadataKind", # @unused Used as a type ) load( ":context.bzl", "CrateName", # @unused Used as a type "DepCollectionContext", # @unused Used as a type ) +load(":rust_toolchain.bzl", "PanicRuntime", "RustToolchainInfo") -# Link style for targets which do not set an explicit `link_style` attribute. -DEFAULT_STATIC_LINK_STYLE = LinkStyle("static_pic") - -# Override dylib crates to static_pic, so that Rust code is always -# statically linked. -# In v1 we always linked Rust deps statically, even for "shared" link style -# That shouldn't be necessary, but fully shared needs some more debugging, -# so default to v1 behaviour. (Should be controlled with the `rust.force_rlib` option) -FORCE_RLIB = True +# Link strategy for targets which do not set an explicit `link_style` attribute. +# +# These values are also used as the defaults for check/clippy subtargets on +# libraries, and are the only way in which metadata-fast output can be built. +# +# Internally at Meta, these are a good choice for a default because they allow +# sharing work between check builds and dev mode builds, which have shared link +# strategy, and so consume their dependencies as `static_pic`. +DEFAULT_STATIC_LINK_STRATEGY = LinkStrategy("static_pic") +DEFAULT_STATIC_LIB_OUTPUT_STYLE = LibOutputStyle("pic_archive") RustProcMacroPlugin = plugins.kind() @@ -92,23 +104,17 @@ RustProcMacroMarker = provider(fields = { }) # Information which is keyed on link_style -RustLinkStyleInfo = record( - # Path to library or binary - rlib = field(Artifact), +RustLinkStrategyInfo = record( + # Path to the rlib, rmeta, dylib, etc. + outputs = field(dict[MetadataKind, Artifact]), # Transitive dependencies which are relevant to the consumer. For crate types which do not # propagate their deps (specifically proc macros), this set is empty # This does not include the proc macros, which are passed separately in `RustLinkInfo` - transitive_deps = field(dict[Artifact, CrateName]), - - # Path for library metadata (used for check or pipelining) - rmeta = field(Artifact), - # Transitive rmeta deps. This is the same dict as `transitive_deps`, except that it has the - # rmeta and not the rlib artifact - transitive_rmeta_deps = field(dict[Artifact, CrateName]), + transitive_deps = field(dict[MetadataKind, dict[Artifact, CrateName]]), transitive_proc_macro_deps = field(dict[RustProcMacroMarker, ()]), # Path to PDB file with Windows debug data. - pdb = field([Artifact, None]), + pdb = field(Artifact | None), # Debug info which is referenced -- but not included -- by the linkable rlib. external_debug_info = field(ArtifactTSet), ) @@ -119,26 +125,68 @@ RustLinkInfo = provider( fields = { # crate - crate name "crate": CrateName, - # styles - information about each LinkStyle as RustLinkStyleInfo - "styles": dict[LinkStyle, RustLinkStyleInfo], - # Propagate native linkable dependencies through rust libraries. - "exported_link_deps": typing.Any, - # Propagate native linkable info through rust libraries. - "merged_link_info": typing.Any, - # Propagate shared libraries through rust libraries. - "shared_libs": typing.Any, + # strategies - information about each LinkStrategy as RustLinkStrategyInfo + "strategies": dict[LinkStrategy, RustLinkStrategyInfo], + # Rust interacts with the native link graph in a non-standard way. Specifically, imagine we + # have a Rust library `:B` with its only one dependency `:A`, another Rust library. The Rust + # rules give Rust -> Rust dependencies special treatment, and as a result, the + # `MergedLinkInfo` provided from `:B` is not a "superset" of the `MergedLinkInfo` provided + # from `:A` (concrete differences discussed below). + # + # This distinction is implemented by effectively having each Rust library provide two sets + # of link providers. The first is the link providers used across Rust -> Rust dependency + # edges - this is what the fields below are. The second set is the one that is used by C++ + # and other non-Rust dependents, and is returned from the rule like normal. The second set + # is a superset of the first, that is it includes anything that the first link providers + # added. + # + # The way in which the native link providers and Rust link providers differ depends on + # whether `advanced_unstable_linking` is set on the toolchain. + # + # * Without `advanced_unstable_linking`, the Rust `MergedLinkInfo` provided by `:A` is only + # the result of merging the `MergedLinkInfo`s from `:A`'s deps, and does not contain + # anything about `:A`. Instead, when `:B` produces the native `MergedLinkInfo`, it will + # add a single static library that bundles all transitive Rust deps, including `:A` (and + # similarly for the DSO case). + # * With `advanced_unstable_linking`, the Rust `MergedLinkInfo` provided by a `:A` does + # include a linkable from `:A`, however that linkable is always the rlib (a static + # library), regardless of `:A`'s preferred linkage or the link strategy. This matches the + # `force_rlib` behavior, in which Rust -> Rust dependency edges are always statically + # linked. The native link provider then depends on that, and only adds a linkable for the + # `shared_lib` case. TODO(pickett): Update this once force_rlib is disabled in advanced + # unstable linking + "merged_link_info": MergedLinkInfo, + "shared_libs": SharedLibraryInfo, + # Because of the weird representation of `LinkableGraph`, there is no + # correct way to merge multiple linkable graphs without adding a new + # node at the same time. So we store a list to be able to depend on more + # than one + "linkable_graphs": list[LinkableGraph], + # LinkGroupLibInfo intentionally omitted because the Rust -> Rust version + # never needs to be different from the Rust -> native version + # + # Rust currently treats all native dependencies as being exported, in + # the sense of C++ `exported_deps`. However, they are not only exported + # from the Rust library that directly depends on them, they are also + # exported through any further chains of Rust libraries. This list + # tracks those dependencies + # + # FIXME(JakobDegen): We should not default to treating all native deps + # as exported. + "exported_link_deps": list[Dependency], }, ) -def _adjust_link_style_for_rust_dependencies(dep_link_style: LinkStyle) -> LinkStyle: - if FORCE_RLIB and dep_link_style == LinkStyle("shared"): - return DEFAULT_STATIC_LINK_STYLE +def _adjust_link_strategy_for_rust_dependencies(toolchain_info: RustToolchainInfo, dep_link_strategy: LinkStrategy) -> LinkStrategy: + if dep_link_strategy == LinkStrategy("shared") and not toolchain_info.advanced_unstable_linking: + return DEFAULT_STATIC_LINK_STRATEGY else: - return dep_link_style + return dep_link_strategy + +def strategy_info(toolchain_info: RustToolchainInfo, info: RustLinkInfo, dep_link_strategy: LinkStrategy) -> RustLinkStrategyInfo: + rust_dep_link_strategy = _adjust_link_strategy_for_rust_dependencies(toolchain_info, dep_link_strategy) -def style_info(info: RustLinkInfo, dep_link_style: LinkStyle) -> RustLinkStyleInfo: - rust_dep_link_style = _adjust_link_style_for_rust_dependencies(dep_link_style) - return info.styles[rust_dep_link_style] + return info.strategies[rust_dep_link_strategy] # Any dependency of a Rust crate RustOrNativeDependency = record( @@ -153,6 +201,7 @@ RustOrNativeDependency = record( RustDependency = record( info = field(RustLinkInfo), label = field(ConfiguredProvidersLabel), + dep = field(Dependency), name = field([None, str]), flags = field(list[str]), proc_macro_marker = field([None, RustProcMacroMarker]), @@ -178,14 +227,14 @@ RustCxxLinkGroupInfo = record( def enable_link_groups( ctx: AnalysisContext, - link_style: [LinkStyle, None], - specified_link_style: LinkStyle, + link_strategy: [LinkStrategy, None], + specified_link_strategy: LinkStrategy, is_binary: bool): if not (cxx_is_gnu(ctx) and is_binary): - # check minium requirements + # check minimum requirements return False - if link_style == LinkStyle("shared") or link_style != specified_link_style: - # check whether we should run link groups analysis for the given link style + if link_strategy == LinkStrategy("shared") or link_strategy != specified_link_strategy: + # check whether we should run link groups analysis for the given link strategy return False # check whether link groups is enabled @@ -228,6 +277,24 @@ def gather_explicit_sysroot_deps(dep_ctx: DepCollectionContext) -> list[RustOrNa name = None, flags = ["nounused"] + flags, )) + + # When advanced_unstable_linking is on, we only add the dep that matches the + # panic runtime. Without advanced_unstable_linking, we just let rustc deal + # with it + if explicit_sysroot_deps.panic_unwind: + if not dep_ctx.advanced_unstable_linking or dep_ctx.panic_runtime == PanicRuntime("unwind"): + out.append(RustOrNativeDependency( + dep = explicit_sysroot_deps.panic_unwind, + name = None, + flags = ["nounused"], + )) + if explicit_sysroot_deps.panic_abort: + if not dep_ctx.advanced_unstable_linking or dep_ctx.panic_runtime == PanicRuntime("abort"): + out.append(RustOrNativeDependency( + dep = explicit_sysroot_deps.panic_abort, + name = None, + flags = ["nounused"], + )) for d in explicit_sysroot_deps.others: # FIXME(JakobDegen): Ideally we would not be using `noprelude` here but # instead report these as regular transitive dependencies. However, @@ -242,12 +309,10 @@ def gather_explicit_sysroot_deps(dep_ctx: DepCollectionContext) -> list[RustOrNa def resolve_deps( ctx: AnalysisContext, dep_ctx: DepCollectionContext) -> list[RustOrNativeDependency]: - # The `getattr`s are needed for when we're operating on - # `prebuilt_rust_library` rules, which don't have those attrs. dependencies = _do_resolve_deps( deps = ctx.attrs.deps, - named_deps = getattr(ctx.attrs, "named_deps", {}), - flagged_deps = getattr(ctx.attrs, "flagged_deps", []), + named_deps = ctx.attrs.named_deps, + flagged_deps = ctx.attrs.flagged_deps, ) if dep_ctx.include_doc_deps: @@ -279,6 +344,7 @@ def resolve_rust_deps_inner( rust_deps.append(RustDependency( info = info, label = label, + dep = dep.dep, name = dep.name, flags = dep.flags, proc_macro_marker = proc_macro_marker, @@ -294,17 +360,6 @@ def resolve_rust_deps( def get_available_proc_macros(ctx: AnalysisContext) -> dict[TargetLabel, Dependency]: return {x.label.raw_target(): x for x in ctx.plugins[RustProcMacroPlugin]} -def _create_linkable_graph( - ctx: AnalysisContext, - deps: list[Dependency]) -> LinkableGraph: - linkable_graph = create_linkable_graph( - ctx, - deps = filter(None, ( - [d.linkable_graph for d in linkables(deps)] - )), - ) - return linkable_graph - # Returns native link dependencies. def _native_link_dependencies( ctx: AnalysisContext, @@ -318,93 +373,70 @@ def _native_link_dependencies( """ first_order_deps = [dep.dep for dep in resolve_deps(ctx, dep_ctx)] - if dep_ctx.native_unbundle_deps: - return [d for d in first_order_deps if MergedLinkInfo in d] - else: - return [ - d - for d in first_order_deps - if RustLinkInfo not in d and MergedLinkInfo in d - ] - -# Returns native link dependencies. -def _native_link_infos( - ctx: AnalysisContext, - dep_ctx: DepCollectionContext) -> list[MergedLinkInfo]: - """ - Return all first-order native link infos of all transitive Rust libraries. - """ - link_deps = _native_link_dependencies(ctx, dep_ctx) - return [d[MergedLinkInfo] for d in link_deps] - -# Returns native link dependencies. -def _native_shared_lib_infos( - ctx: AnalysisContext, - dep_ctx: DepCollectionContext) -> list[SharedLibraryInfo]: - """ - Return all transitive shared libraries for non-Rust native linkabes. - - This emulates v1's graph walk, where it traverses through -- and ignores -- - Rust libraries to collect all transitive shared libraries. - """ - first_order_deps = [dep.dep for dep in resolve_deps(ctx, dep_ctx)] - - if dep_ctx.native_unbundle_deps: - return [d[SharedLibraryInfo] for d in first_order_deps if SharedLibraryInfo in d] - else: - return [ - d[SharedLibraryInfo] - for d in first_order_deps - if RustLinkInfo not in d and SharedLibraryInfo in d - ] + return [ + d + for d in first_order_deps + if RustLinkInfo not in d and MergedLinkInfo in d + ] -# Returns native link dependencies. -def _rust_link_infos( +# Returns the rust link infos for non-proc macro deps. +# +# This is intended to be used to access the Rust -> Rust link providers +def _rust_non_proc_macro_link_infos( ctx: AnalysisContext, dep_ctx: DepCollectionContext) -> list[RustLinkInfo]: - return [d.info for d in resolve_rust_deps(ctx, dep_ctx)] - -def normalize_crate(label: str) -> str: - return label.replace("-", "_") + return [d.info for d in resolve_rust_deps(ctx, dep_ctx) if d.proc_macro_marker == None] def inherited_exported_link_deps(ctx: AnalysisContext, dep_ctx: DepCollectionContext) -> list[Dependency]: deps = {} for dep in _native_link_dependencies(ctx, dep_ctx): deps[dep.label] = dep - if not dep_ctx.native_unbundle_deps: - for info in _rust_link_infos(ctx, dep_ctx): - for dep in info.exported_link_deps: - deps[dep.label] = dep + for dep in resolve_rust_deps(ctx, dep_ctx): + if dep.proc_macro_marker != None: + continue + + if dep_ctx.advanced_unstable_linking: + deps[dep.label] = dep.dep + for dep in dep.info.exported_link_deps: + deps[dep.label] = dep + return deps.values() def inherited_rust_cxx_link_group_info( ctx: AnalysisContext, dep_ctx: DepCollectionContext, - link_style: [LinkStyle, None] = None) -> RustCxxLinkGroupInfo: - link_deps = inherited_exported_link_deps(ctx, dep_ctx) + link_strategy: [LinkStrategy, None] = None) -> RustCxxLinkGroupInfo: + link_graphs = inherited_linkable_graphs(ctx, dep_ctx) # Assume a rust executable wants to use link groups if a link group map # is present link_group = get_link_group(ctx) - link_group_info = get_link_group_info(ctx, filter_and_map_idx(LinkableGraph, link_deps)) + link_group_info = get_link_group_info(ctx, link_graphs) link_groups = link_group_info.groups link_group_mappings = link_group_info.mappings link_group_preferred_linkage = get_link_group_preferred_linkage(link_groups.values()) auto_link_group_specs = get_auto_link_group_specs(ctx, link_group_info) - linkable_graph = _create_linkable_graph( + linkable_graph = create_linkable_graph( ctx, - link_deps, + deps = link_graphs, ) linkable_graph_node_map = get_linkable_graph_node_map_func(linkable_graph)() executable_deps = [] - for d in link_deps: - if d.label in linkable_graph_node_map: - executable_deps.append(d.label) + for g in link_graphs: + if g.label in linkable_graph_node_map: + executable_deps.append(g.label) else: # handle labels that are mutated by version alias - executable_deps.append(d.get(LinkableGraph).nodes.value.label) + executable_deps.append(g.nodes.value.label) + + public_link_group_nodes = get_public_link_group_nodes( + linkable_graph_node_map, + link_group_mappings, + executable_deps, + link_group, + ) linked_link_groups = create_link_groups( ctx = ctx, @@ -414,11 +446,11 @@ def inherited_rust_cxx_link_group_info( executable_deps = executable_deps, linker_flags = [], link_group_specs = auto_link_group_specs, - root_link_group = link_group, linkable_graph_node_map = linkable_graph_node_map, other_roots = [], prefer_stripped_objects = False, # Does Rust ever use stripped objects? anonymous = ctx.attrs.anonymous_link_groups, + public_nodes = public_link_group_nodes, ) auto_link_groups = {} @@ -430,17 +462,18 @@ def inherited_rust_cxx_link_group_info( link_group_libs[name] = linked_link_group.library labels_to_links_map = get_filtered_labels_to_links_map( + public_link_group_nodes, linkable_graph_node_map, link_group, link_groups, link_group_mappings, link_group_preferred_linkage, - pic_behavior = PicBehavior("always_enabled") if link_style == LinkStyle("static_pic") else PicBehavior("supported"), + pic_behavior = PicBehavior("always_enabled") if link_strategy == LinkStrategy("static_pic") else PicBehavior("supported"), link_group_libs = { name: (lib.label, lib.shared_link_infos) for name, lib in link_group_libs.items() }, - link_strategy = to_link_strategy(link_style), + link_strategy = link_strategy, roots = executable_deps, is_executable_link = True, prefer_stripped = False, @@ -463,39 +496,62 @@ def inherited_merged_link_infos( ctx: AnalysisContext, dep_ctx: DepCollectionContext) -> list[MergedLinkInfo]: infos = [] - infos.extend(_native_link_infos(ctx, dep_ctx)) - if not dep_ctx.native_unbundle_deps: - infos.extend([d.merged_link_info for d in _rust_link_infos(ctx, dep_ctx) if d.merged_link_info]) + infos.extend([d[MergedLinkInfo] for d in _native_link_dependencies(ctx, dep_ctx)]) + infos.extend([d.merged_link_info for d in _rust_non_proc_macro_link_infos(ctx, dep_ctx) if d.merged_link_info]) return infos def inherited_shared_libs( ctx: AnalysisContext, dep_ctx: DepCollectionContext) -> list[SharedLibraryInfo]: infos = [] - infos.extend(_native_shared_lib_infos(ctx, dep_ctx)) - if not dep_ctx.native_unbundle_deps: - infos.extend([d.shared_libs for d in _rust_link_infos(ctx, dep_ctx)]) + infos.extend([d[SharedLibraryInfo] for d in _native_link_dependencies(ctx, dep_ctx)]) + infos.extend([d.shared_libs for d in _rust_non_proc_macro_link_infos(ctx, dep_ctx)]) return infos -def inherited_external_debug_info( +def inherited_linkable_graphs(ctx: AnalysisContext, dep_ctx: DepCollectionContext) -> list[LinkableGraph]: + deps = {} + for d in _native_link_dependencies(ctx, dep_ctx): + g = d.get(LinkableGraph) + if g: + deps[g.label] = g + for info in _rust_non_proc_macro_link_infos(ctx, dep_ctx): + for g in info.linkable_graphs: + deps[g.label] = g + return deps.values() + +def inherited_link_group_lib_infos(ctx: AnalysisContext, dep_ctx: DepCollectionContext) -> list[LinkGroupLibInfo]: + # There are no special Rust -> Rust versions of this provider + deps = {} + for d in resolve_deps(ctx, dep_ctx): + i = d.dep.get(LinkGroupLibInfo) + if i: + deps[d.dep.label] = i + return deps.values() + +def inherited_rust_external_debug_info( ctx: AnalysisContext, dep_ctx: DepCollectionContext, - dwo_output_directory: [Artifact, None], - dep_link_style: LinkStyle) -> ArtifactTSet: - rust_dep_link_style = _adjust_link_style_for_rust_dependencies(dep_link_style) - non_rust_dep_link_style = dep_link_style + link_strategy: LinkStrategy) -> list[ArtifactTSet]: + toolchain_info = ctx.attrs._rust_toolchain[RustToolchainInfo] + return [strategy_info(toolchain_info, d.info, link_strategy).external_debug_info for d in resolve_rust_deps(ctx, dep_ctx)] +def inherited_external_debug_info( + ctx: AnalysisContext, + dep_ctx: DepCollectionContext, + dwo_output_directory: Artifact | None, + dep_link_strategy: LinkStrategy) -> ArtifactTSet: inherited_debug_infos = [] inherited_link_infos = [] + toolchain_info = ctx.attrs._rust_toolchain[RustToolchainInfo] for d in resolve_deps(ctx, dep_ctx): if RustLinkInfo in d.dep: - inherited_debug_infos.append(d.dep[RustLinkInfo].styles[rust_dep_link_style].external_debug_info) + inherited_debug_infos.append(strategy_info(toolchain_info, d.dep[RustLinkInfo], dep_link_strategy).external_debug_info) inherited_link_infos.append(d.dep[RustLinkInfo].merged_link_info) elif MergedLinkInfo in d.dep: inherited_link_infos.append(d.dep[MergedLinkInfo]) - link_args = get_link_args_for_strategy(ctx, inherited_link_infos, to_link_strategy(non_rust_dep_link_style)) + link_args = get_link_args_for_strategy(ctx, inherited_link_infos, dep_link_strategy) inherited_debug_infos.append(unpack_external_debug_info(ctx.actions, link_args)) return make_artifact_tset( @@ -505,6 +561,9 @@ def inherited_external_debug_info( children = inherited_debug_infos, ) +def normalize_crate(label: str) -> str: + return label.replace("-", "_") + def attr_simple_crate_for_filenames(ctx: AnalysisContext) -> str: """ A "good enough" identifier to use in filenames. Buck wants to have filenames @@ -536,6 +595,15 @@ def attr_crate(ctx: AnalysisContext) -> CrateName: if dynamic: dynamic = dynamic.get(DefaultInfo).default_outputs[0] return CrateName( - simple = ctx.attrs.crate or normalize_crate(ctx.label.name), + simple = normalize_crate(ctx.attrs.crate or ctx.label.name), dynamic = dynamic, ) + +def attr_soname(ctx: AnalysisContext) -> str: + """ + Get the shared library name to set for the given rust library. + """ + linker_info = get_cxx_toolchain_info(ctx).linker_info + if ctx.attrs.soname != None: + return get_shared_library_name_for_param(linker_info, ctx.attrs.soname) + return get_default_shared_library_name(linker_info, ctx.label) diff --git a/prelude/rust/outputs.bzl b/prelude/rust/outputs.bzl new file mode 100644 index 0000000000..2607558ab1 --- /dev/null +++ b/prelude/rust/outputs.bzl @@ -0,0 +1,48 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load( + "@prelude//:artifact_tset.bzl", + "ArtifactTSet", # @unused Used as a type +) + +RustcOutput = record( + output = field(Artifact), + stripped_output = field(Artifact), + diag_txt = field(Artifact), + diag_json = field(Artifact), + # Only available on metadata-like emits + clippy_txt = field(Artifact | None), + clippy_json = field(Artifact | None), + pdb = field(Artifact | None), + dwp_output = field(Artifact | None), + # Zero or more Split DWARF debug info files are emitted into this directory + # with unpredictable filenames. + dwo_output_directory = field(Artifact | None), + extra_external_debug_info = field(list[ArtifactTSet]), +) + +def output_as_diag_subtargets(o: RustcOutput) -> dict[str, Artifact]: + return { + "check": o.output, + "clippy.json": o.clippy_json, + "clippy.txt": o.clippy_txt, + "diag.json": o.diag_json, + "diag.txt": o.diag_txt, + } + +# Access to additional outputs from Rust compilation. +# +# This provider is intended to be available from all rules that compile Rust +# code. As a result, it must be different from `RustLinkInfo`, since it should +# not exist on a prebuilt Rust library, but should exist on a binary. +RustcExtraOutputsInfo = provider( + fields = { + "metadata_fast": RustcOutput, + "metadata_full": RustcOutput, + }, +) diff --git a/prelude/rust/rust-analyzer/resolve_deps.bxl b/prelude/rust/rust-analyzer/resolve_deps.bxl index 8ef831ad1d..622d912939 100644 --- a/prelude/rust/rust-analyzer/resolve_deps.bxl +++ b/prelude/rust/rust-analyzer/resolve_deps.bxl @@ -5,7 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//linking:link_info.bzl", "LinkStyle") +load("@prelude//linking:link_info.bzl", "LinkStrategy") +load("@prelude//rust:build_params.bzl", "MetadataKind") load("@prelude//rust:link_info.bzl", "RustLinkInfo") def materialize(ctx, target): @@ -15,7 +16,7 @@ def materialize(ctx, target): # Ensures the srcs folder will be present return ctx.output.ensure(sources).abs_path() -def _process_target_config(ctx, target, in_workspace): +def _process_target_config(ctx, target, in_workspace, out_dir = None): # convert all source paths to absolute paths resolved_attrs = target.resolved_attrs_eager(ctx) @@ -47,6 +48,12 @@ def _process_target_config(ctx, target, in_workspace): for test in resolved_attrs.tests: tests.append(test.raw_target()) + # materialize a file containing the dynamic crate name + crate_dynamic = getattr(resolved_attrs, "crate_dynamic", None) + if crate_dynamic: + cratename_artifact = crate_dynamic.get(DefaultInfo).default_outputs[0] + crate_dynamic = ctx.output.ensure(cratename_artifact).abs_path() + # copy over the absolute paths and raw targets into the output copy = {} attrs = target.attrs_eager() @@ -61,6 +68,8 @@ def _process_target_config(ctx, target, in_workspace): copy["named_deps"] = named_deps elif k == "tests": copy["tests"] = tests + elif k == "crate_dynamic": + copy["crate_dynamic"] = crate_dynamic else: copy[k] = getattr(attrs, k) @@ -70,6 +79,9 @@ def _process_target_config(ctx, target, in_workspace): copy["project_relative_buildfile"] = ctx.fs.project_rel_path(target.buildfile_path) copy["kind"] = target.rule_type copy["in_workspace"] = in_workspace + if out_dir: + copy["out_dir"] = out_dir + return copy def cquery_deps(ctx, top_targets, workspaces, actions): @@ -96,10 +108,30 @@ def cquery_deps(ctx, top_targets, workspaces, actions): if thrift["mapped_src"] == "lib.rs": cfg["crate_root"] = thrift["artifact"] out[target.label.raw_target()] = cfg + elif "generated_protobuf_library_rust" in labels.value(): + protobuf_out_dir = materialize_generated_protobufs(ctx, target, actions, seen) + out[target.label.raw_target()] = _process_target_config(ctx, target, in_workspace, protobuf_out_dir) else: out[target.label.raw_target()] = _process_target_config(ctx, target, in_workspace) return out +def materialize_generated_protobufs(ctx, target, actions, seen): + """If `target` has a dependency that generates code from protobufs, + materialize the generated code and return the path to the output directory. + """ + prost_target = target.attrs_lazy().get("named_deps").value().get("generated_prost_target") + t = prost_target.raw_target() + analysis = ctx.analysis(t) + output = analysis.providers()[DefaultInfo].default_outputs[0] + outfile = "{}/{}/{}".format(t.cell, t.package, t.name) + + if outfile in seen: + return None + seen[outfile] = () + + copied = ctx.output.ensure(actions.copy_file(outfile, output)) + return copied.abs_path() + def materialize_generated_thrift(ctx, target, actions, seen): mapped_srcs = target.attrs_lazy().get("mapped_srcs").value() built = ctx.build(mapped_srcs.keys()) @@ -116,14 +148,16 @@ def materialize_generated_thrift(ctx, target, actions, seen): else: label = label.raw_target() - copied = actions.copy_file(outfile, artifacts.artifacts()[0]) - copied = ctx.output.ensure(copied) - artifact = { - "artifact": copied.abs_path(), - "label": label, - "mapped_src": mapped_src, - } - out.append(artifact) + if len(artifacts.artifacts()) > 0: + copied = actions.copy_file(outfile, artifacts.artifacts()[0]) + copied = ctx.output.ensure(copied) + artifact = { + "artifact": copied.abs_path(), + "label": label, + "mapped_src": mapped_src, + } + out.append(artifact) + seen[outfile] = () return out @@ -138,7 +172,7 @@ def expand_proc_macros(ctx, targets): proc_macro = getattr(attrs, "proc_macro", False) if proc_macro: analysis = ctx.analysis(target) - rlib = analysis.providers()[RustLinkInfo].styles[LinkStyle("shared")].rlib + rlib = analysis.providers()[RustLinkInfo].strategies[LinkStrategy("shared")].outputs[MetadataKind("link")] label = target.label.raw_target() out[label] = {"actual": label, "dylib": ctx.output.ensure(rlib).abs_path()} return out @@ -159,6 +193,9 @@ def expand_targets(ctx, targets): workspaces = t.attrs_lazy().get("_workspaces") if workspaces: for workspace in workspaces.value(): + if not ctx.target_exists(str(workspace.raw_target())): + continue + possible_workspaces.setdefault(workspace.raw_target(), []).append(label) active_workspaces = {} @@ -197,9 +234,30 @@ def expand_and_resolve_impl(ctx): "resolved_deps": resolved_deps, }) +def resolve_owning_buildfile_impl(ctx): + owners = ctx.uquery().owner(ctx.cli_args.files) + out = {} + for owner in owners: + buildfile = "{}".format(owner.buildfile_path) + targets_in_buildfile = ctx.uquery().targets_in_buildfile(buildfile) + rust_targets = ctx.uquery().kind("^(rust_binary|rust_library|rust_test)$", targets_in_buildfile) + targets = [] + for target in rust_targets: + targets.append(target.label) + out[buildfile] = targets + + ctx.output.print_json(out) + expand_and_resolve = bxl_main( impl = expand_and_resolve_impl, cli_args = { "targets": cli_args.list(cli_args.target_expr()), }, ) + +resolve_owning_buildfile = bxl_main( + impl = resolve_owning_buildfile_impl, + cli_args = { + "files": cli_args.list(cli_args.string()), + }, +) diff --git a/prelude/rust/rust_binary.bzl b/prelude/rust/rust_binary.bzl index fa8fb58775..119af415e1 100644 --- a/prelude/rust/rust_binary.bzl +++ b/prelude/rust/rust_binary.bzl @@ -5,6 +5,10 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load( + "@prelude//:artifact_tset.bzl", + "project_artifacts", +) load( "@prelude//:resources.bzl", "create_resource_db", @@ -15,6 +19,7 @@ load( "cxx_attr_deps", ) load("@prelude//cxx:cxx_link_utility.bzl", "executable_shared_lib_arguments") +load("@prelude//cxx:cxx_utility.bzl", "cxx_attrs_get_allow_cache_upload") load( "@prelude//cxx:link_groups.bzl", "LINK_GROUP_MAPPINGS_FILENAME_SUFFIX", @@ -31,8 +36,7 @@ load( ) load( "@prelude//linking:link_info.bzl", - "LinkStyle", - "Linkage", + "LinkStrategy", ) load( "@prelude//linking:shared_libraries.bzl", @@ -42,7 +46,7 @@ load( load("@prelude//os_lookup:defs.bzl", "OsLookup") load( "@prelude//tests:re_utils.bzl", - "get_re_executor_from_props", + "get_re_executors_from_props", ) load("@prelude//utils:arglike.bzl", "ArgLike") # @unused Used as a type load("@prelude//utils:utils.bzl", "flatten_dict") @@ -52,7 +56,6 @@ load( "compile_context", "generate_rustdoc", "rust_compile", - "rust_compile_multi", ) load( ":build_params.bzl", @@ -65,18 +68,19 @@ load( load(":context.bzl", "CompileContext") load( ":link_info.bzl", - "DEFAULT_STATIC_LINK_STYLE", + "DEFAULT_STATIC_LINK_STRATEGY", "attr_simple_crate_for_filenames", "enable_link_groups", + "inherited_external_debug_info", "inherited_rust_cxx_link_group_info", "inherited_shared_libs", ) +load(":outputs.bzl", "RustcExtraOutputsInfo", "output_as_diag_subtargets") load(":resources.bzl", "rust_attr_resources") _CompileOutputs = record( link = field(Artifact), args = field(ArgLike), - extra_targets = field(list[(str, Artifact)]), runtime_files = field(list[ArgLike]), external_debug_info = field(list[TransitiveSetArgsProjection]), sub_targets = field(dict[str, list[DefaultInfo]]), @@ -96,10 +100,10 @@ def _rust_binary_common( styles = {} dwp_target = None pdb = None - style_param = {} # style -> param + strategy_param = {} # strategy -> param sub_targets = {} - specified_link_style = LinkStyle(ctx.attrs.link_style) if ctx.attrs.link_style else DEFAULT_STATIC_LINK_STYLE + specified_link_strategy = LinkStrategy(ctx.attrs.link_style) if ctx.attrs.link_style else DEFAULT_STATIC_LINK_STRATEGY target_os_type = ctx.attrs._target_os_type[OsLookup] linker_type = compile_ctx.cxx_toolchain_info.linker_info.type @@ -110,24 +114,26 @@ def _rust_binary_common( deps = cxx_attr_deps(ctx), ).values()) - for link_style in LinkStyle: + extra_flags = toolchain_info.rustc_binary_flags + (extra_flags or []) + + for link_strategy in LinkStrategy: # Unlike for libraries, there's no possibility of different link styles # resulting in the same build params, so no need to deduplicate. params = build_params( rule = RuleType("binary"), proc_macro = False, - link_style = link_style, - preferred_linkage = Linkage("any"), + link_strategy = link_strategy, + lib_output_style = None, lang = LinkageLang("rust"), linker_type = linker_type, target_os_type = target_os_type, ) - style_param[link_style] = params - name = link_style.value + "/" + output_filename(simple_crate, Emit("link"), params) + strategy_param[link_strategy] = params + name = link_strategy.value + "/" + output_filename(simple_crate, Emit("link"), params) output = ctx.actions.declare_output(name) # Gather and setup symlink tree of transitive shared library deps. - shared_libs = {} + shared_libs = [] rust_cxx_link_group_info = None link_group_mappings = {} @@ -136,11 +142,11 @@ def _rust_binary_common( labels_to_links_map = {} filtered_targets = [] - if enable_link_groups(ctx, link_style, specified_link_style, is_binary = True): + if enable_link_groups(ctx, link_strategy, specified_link_strategy, is_binary = True): rust_cxx_link_group_info = inherited_rust_cxx_link_group_info( ctx, compile_ctx.dep_ctx, - link_style = link_style, + link_strategy = link_strategy, ) link_group_mappings = rust_cxx_link_group_info.link_group_info.mappings link_group_libs = rust_cxx_link_group_info.link_group_libs @@ -152,7 +158,7 @@ def _rust_binary_common( # link style. # XXX need link tree for dylib crates shlib_deps = [] - if link_style == LinkStyle("shared") or rust_cxx_link_group_info != None: + if link_strategy == LinkStrategy("shared") or rust_cxx_link_group_info != None: shlib_deps = inherited_shared_libs(ctx, compile_ctx.dep_ctx) shlib_info = merge_shared_libraries(ctx.actions, deps = shlib_deps) @@ -164,47 +170,49 @@ def _rust_binary_common( labels_to_links_map = labels_to_links_map, ) - def shlib_filter(_name, shared_lib): - return not rust_cxx_link_group_info or is_link_group_shlib(shared_lib.label, link_group_ctx) - - for soname, shared_lib in traverse_shared_library_info(shlib_info, filter_func = shlib_filter).items(): - shared_libs[soname] = shared_lib.lib + for shlib in traverse_shared_library_info(shlib_info): + if not rust_cxx_link_group_info or is_link_group_shlib(shlib.label, link_group_ctx): + shared_libs.append(shlib) if rust_cxx_link_group_info: # When there are no matches for a pattern based link group, # `link_group_mappings` will not have an entry associated with the lib. for _name, link_group_lib in link_group_libs.items(): - shared_libs.update(link_group_lib.shared_libs) + shared_libs.extend(link_group_lib.shared_libs.libraries) # link groups shared libraries link args are directly added to the link command, # we don't have to add them here executable_args = executable_shared_lib_arguments( - ctx.actions, + ctx, compile_ctx.cxx_toolchain_info, output, shared_libs, ) - extra_flags = toolchain_info.rustc_binary_flags + (extra_flags or []) - # Compile rust binary. - link, meta = rust_compile_multi( + link = rust_compile( ctx = ctx, compile_ctx = compile_ctx, - emits = [Emit("link"), Emit("metadata")], + emit = Emit("link"), params = params, - dep_link_style = link_style, default_roots = default_roots, extra_link_args = executable_args.extra_link_args, - predeclared_outputs = {Emit("link"): output}, + predeclared_output = output, extra_flags = extra_flags, - is_binary = True, allow_cache_upload = allow_cache_upload, rust_cxx_link_group_info = rust_cxx_link_group_info, ) - args = cmd_args(link.output).hidden(executable_args.runtime_files) - extra_targets = [("check", meta.output)] + meta.diag.items() + args = cmd_args(link.output, hidden = executable_args.runtime_files) + external_debug_info = project_artifacts( + actions = ctx.actions, + tsets = [inherited_external_debug_info( + ctx, + compile_ctx.dep_ctx, + link.dwo_output_directory, + link_strategy, + )], + ) # If we have some resources, write it to the resources JSON file and add # it and all resources to "runtime_files" so that we make to materialize @@ -220,88 +228,127 @@ def _rust_binary_common( for resource in resources.values(): resources_hidden.append(resource.default_output) resources_hidden.extend(resource.other_outputs) - args.hidden(resources_hidden) + args.add(cmd_args(hidden = resources_hidden)) runtime_files.extend(resources_hidden) - sub_targets_for_link_style = {} - - sub_targets_for_link_style["shared-libraries"] = [DefaultInfo( + sub_targets_for_link_strategy = {} + + # TODO(agallagher) There appears to be pre-existing soname conflicts + # when building this (when using link groups), which prevents using + # `with_unique_str_sonames`. + str_soname_shlibs = { + shlib.soname.ensure_str(): shlib + for shlib in shared_libs + if shlib.soname.is_str() + } + sub_targets_for_link_strategy["shared-libraries"] = [DefaultInfo( default_output = ctx.actions.write_json( name + ".shared-libraries.json", { - "libraries": ["{}:{}[shared-libraries][{}]".format(ctx.label.path, ctx.label.name, name) for name in shared_libs.keys()], - "librariesdwp": ["{}:{}[shared-libraries][{}][dwp]".format(ctx.label.path, ctx.label.name, name) for name, lib in shared_libs.items() if lib.dwp], + "libraries": [ + "{}:{}[shared-libraries][{}]".format(ctx.label.path, ctx.label.name, soname) + for soname in str_soname_shlibs + ], + "librariesdwp": [ + "{}:{}[shared-libraries][{}][dwp]".format(ctx.label.path, ctx.label.name, soname) + for soname, shlib in str_soname_shlibs.items() + if shlib.lib.dwp + ], "rpathtree": ["{}:{}[rpath-tree]".format(ctx.label.path, ctx.label.name)] if executable_args.shared_libs_symlink_tree else [], }, ), sub_targets = { - name: [DefaultInfo( - default_output = lib.output, - sub_targets = {"dwp": [DefaultInfo(default_output = lib.dwp)]} if lib.dwp else {}, + soname: [DefaultInfo( + default_output = shlib.lib.output, + sub_targets = {"dwp": [DefaultInfo(default_output = shlib.lib.dwp)]} if shlib.lib.dwp else {}, )] - for name, lib in shared_libs.items() + for soname, shlib in str_soname_shlibs.items() }, )] if isinstance(executable_args.shared_libs_symlink_tree, Artifact): - sub_targets_for_link_style["rpath-tree"] = [DefaultInfo( + sub_targets_for_link_strategy["rpath-tree"] = [DefaultInfo( default_output = executable_args.shared_libs_symlink_tree, other_outputs = [ - lib.output - for lib in shared_libs.values() + shlib.lib.output + for shlib in shared_libs ] + [ - lib.dwp - for lib in shared_libs.values() - if lib.dwp + shlib.lib.dwp + for shlib in shared_libs + if shlib.lib.dwp ], )] if rust_cxx_link_group_info: - sub_targets_for_link_style[LINK_GROUP_MAP_DATABASE_SUB_TARGET] = [get_link_group_map_json(ctx, filtered_targets)] + sub_targets_for_link_strategy[LINK_GROUP_MAP_DATABASE_SUB_TARGET] = [get_link_group_map_json(ctx, filtered_targets)] readable_mappings = {} for node, group in link_group_mappings.items(): readable_mappings[group] = readable_mappings.get(group, []) + ["{}//{}:{}".format(node.cell, node.package, node.name)] - sub_targets_for_link_style[LINK_GROUP_MAPPINGS_SUB_TARGET] = [DefaultInfo( + sub_targets_for_link_strategy[LINK_GROUP_MAPPINGS_SUB_TARGET] = [DefaultInfo( default_output = ctx.actions.write_json( name + LINK_GROUP_MAPPINGS_FILENAME_SUFFIX, readable_mappings, ), )] - styles[link_style] = _CompileOutputs( + styles[link_strategy] = _CompileOutputs( link = link.output, args = args, - extra_targets = extra_targets, runtime_files = runtime_files, - external_debug_info = executable_args.external_debug_info, - sub_targets = sub_targets_for_link_style, + external_debug_info = executable_args.external_debug_info + external_debug_info, + sub_targets = sub_targets_for_link_strategy, dist_info = DistInfo( shared_libs = shlib_info.set, nondebug_runtime_files = runtime_files, ), ) - if link_style == specified_link_style and link.dwp_output: + if link_strategy == specified_link_strategy and link.dwp_output: dwp_target = link.dwp_output - if link_style == specified_link_style and link.pdb: + if link_strategy == specified_link_strategy and link.pdb: pdb = link.pdb + meta_full = rust_compile( + ctx = ctx, + compile_ctx = compile_ctx, + emit = Emit("metadata-full"), + params = strategy_param[DEFAULT_STATIC_LINK_STRATEGY], + default_roots = default_roots, + extra_flags = extra_flags, + ) + + meta_fast = rust_compile( + ctx = ctx, + compile_ctx = compile_ctx, + emit = Emit("metadata-fast"), + params = strategy_param[DEFAULT_STATIC_LINK_STRATEGY], + default_roots = default_roots, + extra_flags = extra_flags, + designated_clippy = True, + ) + + providers = [RustcExtraOutputsInfo( + metadata_full = meta_full, + metadata_fast = meta_fast, + )] + + extra_meta_targets = output_as_diag_subtargets(meta_fast).items() + expand = rust_compile( ctx = ctx, compile_ctx = compile_ctx, emit = Emit("expand"), - params = style_param[DEFAULT_STATIC_LINK_STYLE], - dep_link_style = DEFAULT_STATIC_LINK_STYLE, + params = strategy_param[DEFAULT_STATIC_LINK_STRATEGY], default_roots = default_roots, extra_flags = extra_flags, ) - compiled_outputs = styles[specified_link_style] - extra_compiled_targets = (compiled_outputs.extra_targets + [ + compiled_outputs = styles[specified_link_strategy] + extra_compiled_targets = (extra_meta_targets + [ ("doc", generate_rustdoc( ctx = ctx, compile_ctx = compile_ctx, - params = style_param[DEFAULT_STATIC_LINK_STYLE], + params = strategy_param[DEFAULT_STATIC_LINK_STRATEGY], default_roots = default_roots, document_private_items = True, )), @@ -336,7 +383,7 @@ def _rust_binary_common( if dupmbin_toolchain: sub_targets[DUMPBIN_SUB_TARGET] = get_dumpbin_providers(ctx, compiled_outputs.link, dupmbin_toolchain) - providers = [ + providers += [ DefaultInfo( default_output = compiled_outputs.link, other_outputs = compiled_outputs.runtime_files + compiled_outputs.external_debug_info, @@ -354,7 +401,7 @@ def rust_binary_impl(ctx: AnalysisContext) -> list[Provider]: compile_ctx = compile_ctx, default_roots = ["main.rs"], extra_flags = [], - allow_cache_upload = ctx.attrs.allow_cache_upload, + allow_cache_upload = cxx_attrs_get_allow_cache_upload(ctx.attrs), ) return providers + [RunInfo(args = args)] @@ -375,8 +422,8 @@ def rust_test_impl(ctx: AnalysisContext) -> list[Provider]: allow_cache_upload = False, ) - # Setup a RE executor based on the `remote_execution` param. - re_executor = get_re_executor_from_props(ctx) + # Setup RE executors based on the `remote_execution` param. + re_executor, executor_overrides = get_re_executors_from_props(ctx) return inject_test_run_info( ctx, @@ -387,6 +434,7 @@ def rust_test_impl(ctx: AnalysisContext) -> list[Provider]: labels = ctx.attrs.labels, contacts = ctx.attrs.contacts, default_executor = re_executor, + executor_overrides = executor_overrides, run_from_project_root = True, use_project_relative_paths = True, ), diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index f05786c1f6..9d0bd88b38 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -10,21 +10,14 @@ load( "ArtifactTSet", "make_artifact_tset", ) -load("@prelude//:paths.bzl", "paths") load("@prelude//:resources.bzl", "ResourceInfo", "gather_resources") load( "@prelude//android:android_providers.bzl", "merge_android_packageable_info", ) -load( - "@prelude//cxx:cxx_context.bzl", - "get_cxx_toolchain_info", -) -load("@prelude//cxx:cxx_toolchain_types.bzl", "PicBehavior") load( "@prelude//cxx:linker.bzl", "PDB_SUB_TARGET", - "get_default_shared_library_name", "get_pdb_providers", ) load( @@ -43,10 +36,8 @@ load( "LinkInfo", "LinkInfos", "LinkStrategy", - "LinkStyle", - "Linkage", "LinkedObject", - "MergedLinkInfo", + "MergedLinkInfo", # @unused Used as a type "SharedLibLinkable", "create_merged_link_info", "create_merged_link_info_for_propagation", @@ -56,35 +47,35 @@ load( load( "@prelude//linking:linkable_graph.bzl", "DlopenableLibraryInfo", + "LinkableGraph", # @unused Used as a type "create_linkable_graph", "create_linkable_graph_node", "create_linkable_node", ) load( "@prelude//linking:shared_libraries.bzl", - "SharedLibraryInfo", + "SharedLibraryInfo", # @unused Used as a type "create_shared_libraries", "merge_shared_libraries", ) -load("@prelude//linking:strip.bzl", "strip_debug_info") +load("@prelude//linking:types.bzl", "Linkage") load("@prelude//os_lookup:defs.bzl", "OsLookup") load( ":build.bzl", - "RustcOutput", # @unused Used as a type "compile_context", "generate_rustdoc", + "generate_rustdoc_coverage", "generate_rustdoc_test", "rust_compile", - "rust_compile_multi", ) load( ":build_params.bzl", "BuildParams", # @unused Used as a type "Emit", "LinkageLang", + "MetadataKind", "RuleType", "build_params", - "crate_type_transitive_deps", ) load( ":context.bzl", @@ -94,149 +85,33 @@ load( ) load( ":link_info.bzl", - "DEFAULT_STATIC_LINK_STYLE", + "DEFAULT_STATIC_LIB_OUTPUT_STYLE", + "DEFAULT_STATIC_LINK_STRATEGY", "RustLinkInfo", - "RustLinkStyleInfo", + "RustLinkStrategyInfo", "RustProcMacroMarker", # @unused Used as a type "attr_crate", + "attr_soname", "inherited_exported_link_deps", - "inherited_external_debug_info", + "inherited_link_group_lib_infos", + "inherited_linkable_graphs", "inherited_merged_link_infos", "inherited_shared_libs", "resolve_deps", "resolve_rust_deps", - "style_info", + "strategy_info", +) +load( + ":outputs.bzl", + "RustcExtraOutputsInfo", + "RustcOutput", # @unused Used as a type + "output_as_diag_subtargets", ) load(":proc_macro_alias.bzl", "rust_proc_macro_alias") load(":resources.bzl", "rust_attr_resources") load(":rust_toolchain.bzl", "RustToolchainInfo") load(":targets.bzl", "targets") -def prebuilt_rust_library_impl(ctx: AnalysisContext) -> list[Provider]: - providers = [] - - # Default output. - providers.append( - DefaultInfo( - default_output = ctx.attrs.rlib, - ), - ) - - rust_toolchain = ctx.attrs._rust_toolchain[RustToolchainInfo] - dep_ctx = DepCollectionContext( - native_unbundle_deps = rust_toolchain.native_unbundle_deps, - include_doc_deps = False, - is_proc_macro = False, - explicit_sysroot_deps = rust_toolchain.explicit_sysroot_deps, - ) - - # Rust link provider. - crate = attr_crate(ctx) - styles = {} - for style in LinkStyle: - dep_link_style = style - tdeps, tmetadeps, external_debug_info, tprocmacrodeps = _compute_transitive_deps(ctx, dep_ctx, dep_link_style) - external_debug_info = make_artifact_tset( - actions = ctx.actions, - children = external_debug_info, - ) - styles[style] = RustLinkStyleInfo( - rlib = ctx.attrs.rlib, - transitive_deps = tdeps, - rmeta = ctx.attrs.rlib, - transitive_rmeta_deps = tmetadeps, - transitive_proc_macro_deps = tprocmacrodeps, - pdb = None, - external_debug_info = external_debug_info, - ) - - providers.append( - RustLinkInfo( - crate = crate, - styles = styles, - exported_link_deps = inherited_exported_link_deps(ctx, dep_ctx), - merged_link_info = create_merged_link_info_for_propagation(ctx, inherited_merged_link_infos(ctx, dep_ctx)), - shared_libs = merge_shared_libraries( - ctx.actions, - deps = inherited_shared_libs(ctx, dep_ctx), - ), - ), - ) - - linker_info = get_cxx_toolchain_info(ctx).linker_info - - # Native link provier. - link = LinkInfos( - default = LinkInfo( - linkables = [ - ArchiveLinkable( - archive = Archive(artifact = ctx.attrs.rlib), - linker_type = linker_info.type, - ), - ], - ), - stripped = LinkInfo( - linkables = [ - ArchiveLinkable( - archive = Archive( - artifact = strip_debug_info( - ctx = ctx, - out = ctx.attrs.rlib.short_path, - obj = ctx.attrs.rlib, - ), - ), - linker_type = linker_info.type, - ), - ], - ), - ) - providers.append( - create_merged_link_info( - ctx, - PicBehavior("supported"), - {output_style: link for output_style in LibOutputStyle}, - exported_deps = [d[MergedLinkInfo] for d in ctx.attrs.deps], - # TODO(agallagher): This matches v1 behavior, but some of these libs - # have prebuilt DSOs which might be usable. - preferred_linkage = Linkage("static"), - ), - ) - - # Native link graph setup. - linkable_graph = create_linkable_graph( - ctx, - node = create_linkable_graph_node( - ctx, - linkable_node = create_linkable_node( - ctx = ctx, - preferred_linkage = Linkage("static"), - exported_deps = ctx.attrs.deps, - link_infos = {output_style: link for output_style in LibOutputStyle}, - default_soname = get_default_shared_library_name(linker_info, ctx.label), - ), - ), - deps = ctx.attrs.deps, - ) - providers.append(linkable_graph) - - providers.append(merge_link_group_lib_info(deps = ctx.attrs.deps)) - - # FIXME(JakobDegen): I am about 85% confident that this matches what C++ - # does for prebuilt libraries if they don't have a shared variant and have - # preferred linkage static. C++ doesn't require static preferred linkage on - # their prebuilt libraries, and so they incur extra complexity here that we - # don't have to deal with. - # - # However, Rust linking is not the same as C++ linking. If Rust were - # disciplined about its use of `LibOutputStyle`, `Linkage` and - # `LinkStrategy`, then this would at least be no more wrong than what C++ - # does. In the meantime however... - providers.append(SharedLibraryInfo(set = None)) - - providers.append(merge_android_packageable_info(ctx.label, ctx.actions, ctx.attrs.deps)) - - return providers - def rust_library_impl(ctx: AnalysisContext) -> list[Provider]: compile_ctx = compile_context(ctx) toolchain_info = compile_ctx.toolchain_info @@ -246,39 +121,90 @@ def rust_library_impl(ctx: AnalysisContext) -> list[Provider]: # distinct kinds of build we actually need to deal with. param_lang, lang_style_param = _build_params_for_styles(ctx, compile_ctx) - artifacts = _build_library_artifacts(ctx, compile_ctx, param_lang.keys()) + # Grab the artifacts to use for the check subtargets. Picking a good + # `LibOutputStyle` ensures that the subtarget shares work with the main + # build if possible + check_params = lang_style_param[(LinkageLang("rust"), DEFAULT_STATIC_LIB_OUTPUT_STYLE)] + + meta_fast = rust_compile( + ctx = ctx, + compile_ctx = compile_ctx, + emit = Emit("metadata-fast"), + params = check_params, + default_roots = ["lib.rs"], + designated_clippy = True, + ) + # Generate the actions to build various output artifacts. Given the set of + # parameters we need, populate maps to the linkable and metadata + # artifacts by linkage lang. rust_param_artifact = {} native_param_artifact = {} - check_artifacts = None - - for params, (link, meta) in artifacts.items(): - if LinkageLang("rust") in param_lang[params]: - # Grab the check output for all kinds of builds to use - # in the check subtarget. The link style doesn't matter - # so pick the first. - if check_artifacts == None: - check_artifacts = {"check": meta.output} - check_artifacts.update(meta.diag) - - rust_param_artifact[params] = _handle_rust_artifact(ctx, compile_ctx.dep_ctx, params, link, meta) - if LinkageLang("native") in param_lang[params] or LinkageLang("native-unbundled") in param_lang[params]: + for params, langs in param_lang.items(): + link = rust_compile( + ctx = ctx, + compile_ctx = compile_ctx, + emit = Emit("link"), + params = params, + default_roots = ["lib.rs"], + ) + + if LinkageLang("rust") in langs: + rust_param_artifact[params] = { + MetadataKind("link"): link, + MetadataKind("full"): rust_compile( + ctx = ctx, + compile_ctx = compile_ctx, + emit = Emit("metadata-full"), + params = params, + default_roots = ["lib.rs"], + ), + MetadataKind("fast"): meta_fast, + } + + if LinkageLang("native") in langs or LinkageLang("native-unbundled") in langs: native_param_artifact[params] = link - # Among {rustdoc, doctests, macro expand}, doctests are the only one which - # cares about linkage. So if there is a required link style set for the - # doctests, reuse those same dependency artifacts for the other build - # outputs where static vs static_pic does not make a difference. + rust_artifacts = _rust_artifacts( + ctx = ctx, + compile_ctx = compile_ctx, + lang_style_param = lang_style_param, + rust_param_artifact = rust_param_artifact, + ) + + # For doctests, we need to know two things to know how to link them. The + # first is that we need a link strategy, which affects how deps of this + # target are handled + if ctx.attrs.doc_link_style: + doc_link_strategy = LinkStrategy(ctx.attrs.doc_link_style) + else: + # FIXME(JakobDegen): In this position, a binary would just fall back to + # the default link style. However, we have a little bit of additional + # information in the form of the preferred linkage that we can use to + # make a different decision. There's nothing technically wrong with + # that, but a comment explaining why we want to do it would be nice + doc_link_strategy = { + "any": LinkStrategy("shared"), + "shared": LinkStrategy("shared"), + "static": DEFAULT_STATIC_LINK_STRATEGY, + }[ctx.attrs.preferred_linkage] + + # The second thing we need is a lib output style of the regular, non-doctest + # version of this target that we want. Rustdoc does not handle this library + # being built in a "shared" way well, so this must be a static output style. if ctx.attrs.doc_link_style: - static_link_style = { - "shared": DEFAULT_STATIC_LINK_STYLE, - "static": LinkStyle("static"), - "static_pic": LinkStyle("static_pic"), + doc_output_style = { + "shared": DEFAULT_STATIC_LIB_OUTPUT_STYLE, + "static": LibOutputStyle("archive"), + "static_pic": LibOutputStyle("pic_archive"), }[ctx.attrs.doc_link_style] else: - static_link_style = DEFAULT_STATIC_LINK_STYLE + doc_output_style = DEFAULT_STATIC_LIB_OUTPUT_STYLE + static_library_params = lang_style_param[(LinkageLang("rust"), doc_output_style)] - static_library_params = lang_style_param[(LinkageLang("rust"), static_link_style)] + # Among {rustdoc, doctests, macro expand}, doctests are the only one which + # cares about linkage. So whatever build params we picked for the doctests, + # reuse them for the other two as well default_roots = ["lib.rs"] rustdoc = generate_rustdoc( ctx = ctx, @@ -288,25 +214,32 @@ def rust_library_impl(ctx: AnalysisContext) -> list[Provider]: document_private_items = False, ) + rustdoc_coverage = generate_rustdoc_coverage( + ctx = ctx, + compile_ctx = compile_ctx, + params = static_library_params, + default_roots = default_roots, + ) + + expand = rust_compile( + ctx = ctx, + compile_ctx = compile_ctx, + emit = Emit("expand"), + params = static_library_params, + default_roots = default_roots, + ) + # If doctests=True or False is set on the individual target, respect that. # Otherwise look at the global setting on the toolchain. doctests_enabled = \ (ctx.attrs.doctests if ctx.attrs.doctests != None else toolchain_info.doctests) and \ toolchain_info.rustc_target_triple == targets.exec_triple(ctx) - if ctx.attrs.doc_link_style: - doc_link_style = LinkStyle(ctx.attrs.doc_link_style) - else: - doc_link_style = { - "any": LinkStyle("shared"), - "shared": LinkStyle("shared"), - "static": DEFAULT_STATIC_LINK_STYLE, - }[ctx.attrs.preferred_linkage] rustdoc_test_params = build_params( rule = RuleType("binary"), proc_macro = ctx.attrs.proc_macro, - link_style = doc_link_style, - preferred_linkage = Linkage(ctx.attrs.preferred_linkage), + link_strategy = doc_link_strategy, + lib_output_style = None, lang = LinkageLang("rust"), linker_type = compile_ctx.cxx_toolchain_info.linker_info.type, target_os_type = ctx.attrs._target_os_type[OsLookup], @@ -314,46 +247,63 @@ def rust_library_impl(ctx: AnalysisContext) -> list[Provider]: rustdoc_test = generate_rustdoc_test( ctx = ctx, compile_ctx = compile_ctx, - link_style = rustdoc_test_params.dep_link_style, - library = rust_param_artifact[static_library_params], + link_strategy = rustdoc_test_params.dep_link_strategy, + rlib = rust_param_artifact[static_library_params][MetadataKind("link")].output, params = rustdoc_test_params, default_roots = default_roots, ) - expand = rust_compile( - ctx = ctx, - compile_ctx = compile_ctx, - emit = Emit("expand"), - params = static_library_params, - dep_link_style = DEFAULT_STATIC_LINK_STYLE, - default_roots = default_roots, - ) + check_artifacts = rust_param_artifact[check_params] providers = [] - providers += _default_providers( lang_style_param = lang_style_param, - param_artifact = rust_param_artifact, + rust_param_artifact = rust_param_artifact, + native_param_artifact = native_param_artifact, rustdoc = rustdoc, rustdoc_test = rustdoc_test, doctests_enabled = doctests_enabled, - check_artifacts = check_artifacts, + check_artifacts = output_as_diag_subtargets(check_artifacts[MetadataKind("fast")]), expand = expand.output, sources = compile_ctx.symlinked_srcs, + rustdoc_coverage = rustdoc_coverage, ) - providers += _rust_providers( - ctx = ctx, - compile_ctx = compile_ctx, - lang_style_param = lang_style_param, - param_artifact = rust_param_artifact, - ) - providers += _native_providers( - ctx = ctx, - compile_ctx = compile_ctx, - lang_style_param = lang_style_param, - param_artifact = native_param_artifact, + providers += _rust_metadata_providers( + check_artifacts = check_artifacts, ) + if ctx.attrs.proc_macro: + providers += _proc_macro_link_providers( + ctx = ctx, + rust_artifacts = rust_artifacts, + ) + else: + link_infos = _link_infos( + ctx = ctx, + compile_ctx = compile_ctx, + lang_style_param = lang_style_param, + param_artifact = native_param_artifact, + ) + + if toolchain_info.advanced_unstable_linking: + providers += _advanced_unstable_link_providers( + ctx = ctx, + compile_ctx = compile_ctx, + lang_style_param = lang_style_param, + rust_artifacts = rust_artifacts, + native_param_artifact = native_param_artifact, + link_infos = link_infos, + ) + else: + providers += _stable_link_providers( + ctx = ctx, + compile_ctx = compile_ctx, + lang_style_param = lang_style_param, + rust_artifacts = rust_artifacts, + native_param_artifact = native_param_artifact, + link_infos = link_infos, + ) + deps = [dep.dep for dep in resolve_deps(ctx, compile_ctx.dep_ctx)] providers.append(ResourceInfo(resources = gather_resources( label = ctx.label, @@ -369,7 +319,7 @@ def _build_params_for_styles( ctx: AnalysisContext, compile_ctx: CompileContext) -> ( dict[BuildParams, list[LinkageLang]], - dict[(LinkageLang, LinkStyle), BuildParams], + dict[(LinkageLang, LibOutputStyle), BuildParams], ): """ For a given rule, return two things: @@ -395,12 +345,12 @@ def _build_params_for_styles( if ctx.attrs.proc_macro and linkage_lang != LinkageLang("rust"): continue - for link_style in LinkStyle: + for lib_output_style in LibOutputStyle: params = build_params( rule = RuleType("library"), proc_macro = ctx.attrs.proc_macro, - link_style = link_style, - preferred_linkage = Linkage(ctx.attrs.preferred_linkage), + link_strategy = None, + lib_output_style = lib_output_style, lang = linkage_lang, linker_type = linker_type, target_os_type = target_os_type, @@ -408,125 +358,174 @@ def _build_params_for_styles( if params not in param_lang: param_lang[params] = [] param_lang[params] = param_lang[params] + [linkage_lang] - style_param[(linkage_lang, link_style)] = params + style_param[(linkage_lang, lib_output_style)] = params return (param_lang, style_param) -def _build_library_artifacts( +def _link_infos( ctx: AnalysisContext, compile_ctx: CompileContext, - params: list[BuildParams]) -> dict[BuildParams, (RustcOutput, RustcOutput)]: - """ - Generate the actual actions to build various output artifacts. Given the set - parameters we need, return a mapping to the linkable and metadata artifacts. - """ - param_artifact = {} + lang_style_param: dict[(LinkageLang, LibOutputStyle), BuildParams], + param_artifact: dict[BuildParams, RustcOutput]) -> dict[LibOutputStyle, LinkInfos]: + if ctx.attrs.proc_macro: + # Don't need any of this for proc macros + return {} - for params in params: - dep_link_style = params.dep_link_style + advanced_unstable_linking = compile_ctx.toolchain_info.advanced_unstable_linking + lang = LinkageLang("native-unbundled") if advanced_unstable_linking else LinkageLang("native") + linker_type = compile_ctx.cxx_toolchain_info.linker_info.type - # Separate actions for each emit type - # - # In principle we don't really need metadata for C++-only artifacts, but I don't think it hurts - link, meta = rust_compile_multi( - ctx = ctx, - compile_ctx = compile_ctx, - emits = [Emit("link"), Emit("metadata")], - params = params, - dep_link_style = dep_link_style, - default_roots = ["lib.rs"], + link_infos = {} + for output_style in LibOutputStyle: + lib = param_artifact[lang_style_param[(lang, output_style)]] + external_debug_info = make_artifact_tset( + actions = ctx.actions, + label = ctx.label, + artifacts = filter(None, [lib.dwo_output_directory]), + children = lib.extra_external_debug_info, ) + if output_style == LibOutputStyle("shared_lib"): + link_infos[output_style] = LinkInfos( + default = LinkInfo( + linkables = [SharedLibLinkable(lib = lib.output)], + external_debug_info = external_debug_info, + ), + stripped = LinkInfo( + linkables = [SharedLibLinkable(lib = lib.stripped_output)], + external_debug_info = external_debug_info, + ), + ) + else: + link_infos[output_style] = LinkInfos( + default = LinkInfo( + linkables = [ArchiveLinkable( + archive = Archive(artifact = lib.output), + linker_type = linker_type, + )], + external_debug_info = external_debug_info, + ), + stripped = LinkInfo( + linkables = [ArchiveLinkable( + archive = Archive(artifact = lib.stripped_output), + linker_type = linker_type, + )], + ), + ) + return link_infos - param_artifact[params] = (link, meta) +def _rust_artifacts( + ctx: AnalysisContext, + compile_ctx: CompileContext, + lang_style_param: dict[(LinkageLang, LibOutputStyle), BuildParams], + rust_param_artifact: dict[BuildParams, dict[MetadataKind, RustcOutput]]) -> dict[LinkStrategy, RustLinkStrategyInfo]: + pic_behavior = compile_ctx.cxx_toolchain_info.pic_behavior + preferred_linkage = Linkage(ctx.attrs.preferred_linkage) - return param_artifact + rust_artifacts = {} + for link_strategy in LinkStrategy: + params = lang_style_param[(LinkageLang("rust"), get_lib_output_style(link_strategy, preferred_linkage, pic_behavior))] + rust_artifacts[link_strategy] = _handle_rust_artifact(ctx, compile_ctx.dep_ctx, link_strategy, rust_param_artifact[params]) + return rust_artifacts def _handle_rust_artifact( ctx: AnalysisContext, dep_ctx: DepCollectionContext, - params: BuildParams, - link: RustcOutput, - meta: RustcOutput) -> RustLinkStyleInfo: + link_strategy: LinkStrategy, + outputs: dict[MetadataKind, RustcOutput]) -> RustLinkStrategyInfo: """ - Return the RustLinkInfo for a given set of artifacts. The main consideration + Return the RustLinkStrategyInfo for a given set of artifacts. The main consideration is computing the right set of dependencies. """ - dep_link_style = params.dep_link_style - # If we're a crate where our consumers should care about transitive deps, # then compute them (specifically, not proc-macro). - if crate_type_transitive_deps(params.crate_type): - tdeps, tmetadeps, external_debug_info, tprocmacrodeps = _compute_transitive_deps(ctx, dep_ctx, dep_link_style) - else: - tdeps, tmetadeps, external_debug_info, tprocmacrodeps = {}, {}, [], {} - + link_output = outputs[MetadataKind("link")] if not ctx.attrs.proc_macro: + tdeps, external_debug_info, tprocmacrodeps = _compute_transitive_deps(ctx, dep_ctx, link_strategy) external_debug_info = make_artifact_tset( actions = ctx.actions, label = ctx.label, - artifacts = filter(None, [link.dwo_output_directory]), + artifacts = filter(None, [link_output.dwo_output_directory]), children = external_debug_info, ) - return RustLinkStyleInfo( - rlib = link.output, + return RustLinkStrategyInfo( + outputs = {m: x.output for m, x in outputs.items()}, transitive_deps = tdeps, - rmeta = meta.output, - transitive_rmeta_deps = tmetadeps, transitive_proc_macro_deps = tprocmacrodeps, - pdb = link.pdb, + pdb = link_output.pdb, external_debug_info = external_debug_info, ) else: # Proc macro deps are always the real thing - return RustLinkStyleInfo( - rlib = link.output, - transitive_deps = tdeps, - rmeta = link.output, - transitive_rmeta_deps = tdeps, - transitive_proc_macro_deps = tprocmacrodeps, - pdb = link.pdb, + return RustLinkStrategyInfo( + outputs = {m: link_output.output for m in MetadataKind}, + transitive_deps = {m: {} for m in MetadataKind}, + transitive_proc_macro_deps = {}, + pdb = link_output.pdb, external_debug_info = ArtifactTSet(), ) def _default_providers( - lang_style_param: dict[(LinkageLang, LinkStyle), BuildParams], - param_artifact: dict[BuildParams, RustLinkStyleInfo], + lang_style_param: dict[(LinkageLang, LibOutputStyle), BuildParams], + rust_param_artifact: dict[BuildParams, dict[MetadataKind, RustcOutput]], + native_param_artifact: dict[BuildParams, RustcOutput], rustdoc: Artifact, - rustdoc_test: (cmd_args, dict[str, cmd_args]), + rustdoc_test: cmd_args, doctests_enabled: bool, - check_artifacts: dict[str, Artifact], + check_artifacts: dict[str, Artifact | None], expand: Artifact, - sources: Artifact) -> list[Provider]: + sources: Artifact, + rustdoc_coverage: Artifact) -> list[Provider]: targets = {} targets.update(check_artifacts) targets["sources"] = sources targets["expand"] = expand targets["doc"] = rustdoc + targets["doc-coverage"] = rustdoc_coverage sub_targets = { k: [DefaultInfo(default_output = v)] for (k, v) in targets.items() } - # Add provider for default output, and for each link-style... - for link_style in LinkStyle: - link_style_info = param_artifact[lang_style_param[(LinkageLang("rust"), link_style)]] + # Add provider for default output, and for each lib output style... + # FIXME(JakobDegen): C++ rules only provide some of the output styles, + # determined by `get_output_styles_for_linkage` in `linking/link_info.bzl`. + # Do we want to do the same? + for output_style in LibOutputStyle: + link = rust_param_artifact[lang_style_param[(LinkageLang("rust"), output_style)]][MetadataKind("link")] nested_sub_targets = {} - if link_style_info.pdb: - nested_sub_targets[PDB_SUB_TARGET] = get_pdb_providers(pdb = link_style_info.pdb, binary = link_style_info.rlib) - sub_targets[link_style.value] = [DefaultInfo( - default_output = link_style_info.rlib, + if link.pdb: + nested_sub_targets[PDB_SUB_TARGET] = get_pdb_providers(pdb = link.pdb, binary = link.output) + + # FIXME(JakobDegen): Ideally we'd use the same + # `subtarget_for_output_style` as C++, but that uses `static-pic` + # instead of `static_pic`. Would be nice if that were consistent + name = legacy_output_style_to_link_style(output_style).value + sub_targets[name] = [DefaultInfo( + default_output = link.output, sub_targets = nested_sub_targets, )] + lang_style_for_staticlib = (LinkageLang("native"), LibOutputStyle("archive")) + if lang_style_for_staticlib in lang_style_param: + artifact = native_param_artifact[lang_style_param[lang_style_for_staticlib]] + sub_targets["staticlib"] = [DefaultInfo( + default_output = artifact.output, + )] + + lang_style_for_cdylib = (LinkageLang("native"), LibOutputStyle("shared_lib")) + if lang_style_for_cdylib in lang_style_param: + artifact = native_param_artifact[lang_style_param[lang_style_for_cdylib]] + sub_targets["cdylib"] = [DefaultInfo( + default_output = artifact.output, + )] + providers = [] - (rustdoc_cmd, rustdoc_env) = rustdoc_test rustdoc_test_info = ExternalRunnerTestInfo( type = "rustdoc", - command = [rustdoc_cmd], + command = [rustdoc_test], run_from_project_root = True, - env = rustdoc_env, ) # Always let the user run doctests via `buck2 test :crate[doc]` @@ -543,151 +542,251 @@ def _default_providers( return providers -def _rust_providers( +def _rust_metadata_providers(check_artifacts: dict[MetadataKind, RustcOutput]) -> list[Provider]: + return [ + RustcExtraOutputsInfo( + metadata_full = check_artifacts[MetadataKind("full")], + metadata_fast = check_artifacts[MetadataKind("fast")], + ), + ] + +def _proc_macro_link_providers( + ctx: AnalysisContext, + rust_artifacts: dict[LinkStrategy, RustLinkStrategyInfo]) -> list[Provider]: + # These are never accessed in the case of proc macros, so just return some dummy + # values + return [RustLinkInfo( + crate = attr_crate(ctx), + strategies = rust_artifacts, + merged_link_info = create_merged_link_info_for_propagation(ctx, []), + exported_link_deps = [], + shared_libs = merge_shared_libraries(ctx.actions), + linkable_graphs = [], + )] + +def _advanced_unstable_link_providers( ctx: AnalysisContext, compile_ctx: CompileContext, - lang_style_param: dict[(LinkageLang, LinkStyle), BuildParams], - param_artifact: dict[BuildParams, RustLinkStyleInfo]) -> list[Provider]: - """ - Return the set of providers for Rust linkage. - """ + lang_style_param: dict[(LinkageLang, LibOutputStyle), BuildParams], + rust_artifacts: dict[LinkStrategy, RustLinkStrategyInfo], + native_param_artifact: dict[BuildParams, RustcOutput], + link_infos: dict[LibOutputStyle, LinkInfos]) -> list[Provider]: crate = attr_crate(ctx) + pic_behavior = compile_ctx.cxx_toolchain_info.pic_behavior + preferred_linkage = Linkage(ctx.attrs.preferred_linkage) - style_info = { - link_style: param_artifact[lang_style_param[(LinkageLang("rust"), link_style)]] - for link_style in LinkStyle - } + providers = [] - # Inherited link input and shared libraries. As in v1, this only includes - # non-Rust rules, found by walking through -- and ignoring -- Rust libraries - # to find non-Rust native linkables and libraries. - if not ctx.attrs.proc_macro: - inherited_link_deps = inherited_exported_link_deps(ctx, compile_ctx.dep_ctx) - inherited_link_infos = inherited_merged_link_infos(ctx, compile_ctx.dep_ctx) - inherited_shlibs = inherited_shared_libs(ctx, compile_ctx.dep_ctx) - else: - # proc-macros are just used by the compiler and shouldn't propagate - # their native deps to the link line of the target. - inherited_link_infos = [] - inherited_shlibs = [] - inherited_link_deps = [] + dep_ctx = compile_ctx.dep_ctx + linker_info = compile_ctx.cxx_toolchain_info.linker_info - providers = [] + inherited_link_infos = inherited_merged_link_infos(ctx, dep_ctx) + inherited_shlibs = inherited_shared_libs(ctx, dep_ctx) + inherited_graphs = inherited_linkable_graphs(ctx, dep_ctx) + inherited_exported_deps = inherited_exported_link_deps(ctx, dep_ctx) + + # Native link provider. + merged_link_info = create_merged_link_info( + ctx, + pic_behavior, + link_infos, + deps = inherited_link_infos, + exported_deps = filter(None, [d.get(MergedLinkInfo) for d in inherited_exported_deps]), + preferred_linkage = preferred_linkage, + ) + providers.append(merged_link_info) + + solibs = {} + + # Add the shared library to the list of shared libs. + shlib_name = attr_soname(ctx) + + shared_lib_params = lang_style_param[(LinkageLang("native-unbundled"), LibOutputStyle("shared_lib"))] + shared_lib_output = native_param_artifact[shared_lib_params].output + + # Only add a shared library if we generated one. + # TODO(cjhopman): This is strange. Normally (like in c++) the link_infos passed to create_merged_link_info above would only have + # a value for LibOutputStyle("shared_lib") if that were created and we could just check for that key. Given that I intend + # to remove the SharedLibraries provider, maybe just wait for that to resolve this. + if get_lib_output_style(LinkStrategy("shared"), preferred_linkage, compile_ctx.cxx_toolchain_info.pic_behavior) == LibOutputStyle("shared_lib"): + solibs[shlib_name] = LinkedObject( + output = shared_lib_output, + unstripped_output = shared_lib_output, + external_debug_info = link_infos[LibOutputStyle("shared_lib")].default.external_debug_info, + ) + + # Native shared library provider. + shared_libs = create_shared_libraries(ctx, solibs) + shared_library_info = merge_shared_libraries( + ctx.actions, + shared_libs, + inherited_shlibs, + ) + providers.append(shared_library_info) + + linkable_graph = create_linkable_graph( + ctx, + node = create_linkable_graph_node( + ctx, + linkable_node = create_linkable_node( + ctx = ctx, + preferred_linkage = preferred_linkage, + deps = inherited_graphs, + exported_deps = inherited_exported_deps, + link_infos = link_infos, + shared_libs = shared_libs, + default_soname = shlib_name, + # Link groups have a heuristic in which they assume that a + # preferred_linkage = "static" library needs to be linked + # into every single link group, instead of just one. + # Applying that same heuristic to Rust seems right, but only + # if this target actually requested that. Opt ourselves out + # if it didn't. + ignore_force_static_follows_dependents = preferred_linkage != Linkage("static"), + include_in_android_mergemap = False, # TODO(pickett): Plumb D54748362 to the macro layer + ), + ), + deps = inherited_graphs + inherited_exported_deps, + ) + + providers.append(linkable_graph) + + # Omnibus root provider. + linkable_root = create_linkable_root( + label = ctx.label, + name = shlib_name, + link_infos = LinkInfos( + default = LinkInfo( + linkables = [ArchiveLinkable( + archive = Archive( + artifact = shared_lib_output, + ), + linker_type = linker_info.type, + link_whole = True, + )], + external_debug_info = link_infos[LibOutputStyle("pic_archive")].default.external_debug_info, + ), + ), + deps = inherited_graphs, + ) + providers.append(linkable_root) + + # Mark libraries that support `dlopen`. + if getattr(ctx.attrs, "supports_python_dlopen", False): + providers.append(DlopenableLibraryInfo()) + + # We never need to add anything to this provider because Rust libraries + # cannot act as link group libs, especially given that they only support + # auto link groups anyway + providers.append(merge_link_group_lib_info(children = inherited_link_group_lib_infos(ctx, compile_ctx.dep_ctx))) # Create rust library provider. providers.append(RustLinkInfo( crate = crate, - styles = style_info, - merged_link_info = create_merged_link_info_for_propagation(ctx, inherited_link_infos), - exported_link_deps = inherited_link_deps, - shared_libs = merge_shared_libraries( - ctx.actions, - deps = inherited_shlibs, - ), + strategies = rust_artifacts, + merged_link_info = merged_link_info, + exported_link_deps = inherited_exported_deps, + shared_libs = shared_library_info, + # We've already reported transitive deps on the inherited graphs, so for + # most purposes it would be fine to just have `linkable_graph` here. + # However, link groups do an analysis that relies on each symbol + # reference having a matching edge in the link graph, and so reexports + # and generics mean that we have to report a dependency on all + # transitive Rust deps and their immediate non-Rust deps + linkable_graphs = inherited_graphs + [linkable_graph], )) return providers -def _native_providers( +def _stable_link_providers( + ctx: AnalysisContext, + compile_ctx: CompileContext, + lang_style_param: dict[(LinkageLang, LibOutputStyle), BuildParams], + native_param_artifact: dict[BuildParams, RustcOutput], + rust_artifacts: dict[LinkStrategy, RustLinkStrategyInfo], + link_infos: dict[LibOutputStyle, LinkInfos]) -> list[Provider]: + providers = [] + + crate = attr_crate(ctx) + + merged_link_info, shared_libs, linkable_graphs, exported_link_deps = _rust_link_providers(ctx, compile_ctx.dep_ctx) + + # Create rust library provider. + rust_link_info = RustLinkInfo( + crate = crate, + strategies = rust_artifacts, + merged_link_info = merged_link_info, + exported_link_deps = exported_link_deps, + shared_libs = shared_libs, + linkable_graphs = linkable_graphs, + ) + + providers.append(rust_link_info) + providers += _native_link_providers(ctx, compile_ctx, lang_style_param, native_param_artifact, link_infos, rust_link_info) + return providers + +def _rust_link_providers( + ctx: AnalysisContext, + dep_ctx: DepCollectionContext) -> ( + MergedLinkInfo, + SharedLibraryInfo, + list[LinkableGraph], + list[Dependency], +): + inherited_link_infos = inherited_merged_link_infos(ctx, dep_ctx) + inherited_shlibs = inherited_shared_libs(ctx, dep_ctx) + inherited_graphs = inherited_linkable_graphs(ctx, dep_ctx) + inherited_exported_deps = inherited_exported_link_deps(ctx, dep_ctx) + + merged_link_info = create_merged_link_info_for_propagation(ctx, inherited_link_infos) + shared_libs = merge_shared_libraries( + ctx.actions, + deps = inherited_shlibs, + ) + return (merged_link_info, shared_libs, inherited_graphs, inherited_exported_deps) + +def _native_link_providers( ctx: AnalysisContext, compile_ctx: CompileContext, - lang_style_param: dict[(LinkageLang, LinkStyle), BuildParams], - param_artifact: dict[BuildParams, RustcOutput]) -> list[Provider]: + lang_style_param: dict[(LinkageLang, LibOutputStyle), BuildParams], + param_artifact: dict[BuildParams, RustcOutput], + link_infos: dict[LibOutputStyle, LinkInfos], + rust_link_info: RustLinkInfo) -> list[Provider]: """ Return the set of providers needed to link Rust as a dependency for native (ie C/C++) code, along with relevant dependencies. """ - # If native_unbundle_deps is set on the the rust toolchain, then build this artifact - # using the "native-unbundled" linkage language. See LinkageLang docs for more details - native_unbundle_deps = compile_ctx.toolchain_info.native_unbundle_deps - lang = LinkageLang("native-unbundled") if native_unbundle_deps else LinkageLang("native") + # We collected transitive deps in the Rust link providers + inherited_link_infos = [rust_link_info.merged_link_info] + inherited_shlibs = [rust_link_info.shared_libs] + inherited_link_graphs = rust_link_info.linkable_graphs + inherited_exported_deps = rust_link_info.exported_link_deps - inherited_link_deps = inherited_exported_link_deps(ctx, compile_ctx.dep_ctx) - inherited_link_infos = inherited_merged_link_infos(ctx, compile_ctx.dep_ctx) - inherited_shlibs = inherited_shared_libs(ctx, compile_ctx.dep_ctx) linker_info = compile_ctx.cxx_toolchain_info.linker_info linker_type = linker_info.type providers = [] - if ctx.attrs.proc_macro: - # Proc-macros never have a native form - return providers - - # TODO(cjhopman): This seems to be conflating the link strategy with the lib output style. I tried going through - # lang_style_param/BuildParams and make it actually be based on LibOutputStyle, but it goes on to use that for defining - # how to consume dependencies and it's used for rust_binary like its own link strategy and it's unclear what's the - # correct behavior. For now, this preserves existing behavior without clarifying what concepts its actually - # operating on. - libraries = {} - link_infos = {} - external_debug_infos = {} - for output_style in LibOutputStyle: - legacy_link_style = legacy_output_style_to_link_style(output_style) - params = lang_style_param[(lang, legacy_link_style)] - lib = param_artifact[params] - libraries[output_style] = lib - - external_debug_info = inherited_external_debug_info( - ctx = ctx, - dep_ctx = compile_ctx.dep_ctx, - dwo_output_directory = lib.dwo_output_directory, - dep_link_style = params.dep_link_style, - ) - external_debug_infos[output_style] = external_debug_info - - # DO NOT COMMIT: verify this change - if output_style == LibOutputStyle("shared_lib"): - link_infos[output_style] = LinkInfos( - default = LinkInfo( - linkables = [SharedLibLinkable(lib = lib.output)], - external_debug_info = external_debug_info, - ), - stripped = LinkInfo( - linkables = [ArchiveLinkable( - archive = Archive( - artifact = strip_debug_info( - ctx, - paths.join(output_style.value, lib.output.short_path), - lib.output, - ), - ), - linker_type = linker_type, - )], - ), - ) - else: - link_infos[output_style] = LinkInfos( - default = LinkInfo( - linkables = [ArchiveLinkable( - archive = Archive(artifact = lib.output), - linker_type = linker_type, - )], - external_debug_info = external_debug_info, - ), - ) + shared_lib_params = lang_style_param[(LinkageLang("native"), LibOutputStyle("shared_lib"))] + shared_lib_output = param_artifact[shared_lib_params].output preferred_linkage = Linkage(ctx.attrs.preferred_linkage) - # TODO(cjhopman): This is preserving existing behavior, but it doesn't make sense. These lists can be passed - # unmerged to create_merged_link_info below. Potentially that could change link order, so needs to be done more carefully. - merged_inherited_link = create_merged_link_info_for_propagation(ctx, inherited_link_infos) - # Native link provider. providers.append(create_merged_link_info( ctx, compile_ctx.cxx_toolchain_info.pic_behavior, link_infos, - exported_deps = [merged_inherited_link], + deps = inherited_link_infos, + exported_deps = filter(None, [d.get(MergedLinkInfo) for d in inherited_exported_deps]), preferred_linkage = preferred_linkage, )) solibs = {} # Add the shared library to the list of shared libs. - linker_info = compile_ctx.cxx_toolchain_info.linker_info - shlib_name = get_default_shared_library_name(linker_info, ctx.label) + shlib_name = attr_soname(ctx) # Only add a shared library if we generated one. # TODO(cjhopman): This is strange. Normally (like in c++) the link_infos passed to create_merged_link_info above would only have @@ -695,34 +794,36 @@ def _native_providers( # to remove the SharedLibraries provider, maybe just wait for that to resolve this. if get_lib_output_style(LinkStrategy("shared"), preferred_linkage, compile_ctx.cxx_toolchain_info.pic_behavior) == LibOutputStyle("shared_lib"): solibs[shlib_name] = LinkedObject( - output = libraries[LibOutputStyle("shared_lib")].output, - unstripped_output = libraries[LibOutputStyle("shared_lib")].output, - external_debug_info = external_debug_infos[LibOutputStyle("shared_lib")], + output = shared_lib_output, + unstripped_output = shared_lib_output, + external_debug_info = link_infos[LibOutputStyle("shared_lib")].default.external_debug_info, ) # Native shared library provider. + shared_libs = create_shared_libraries(ctx, solibs) providers.append(merge_shared_libraries( ctx.actions, - create_shared_libraries(ctx, solibs), + shared_libs, inherited_shlibs, )) # Omnibus root provider. linkable_root = create_linkable_root( + label = ctx.label, name = shlib_name, link_infos = LinkInfos( default = LinkInfo( linkables = [ArchiveLinkable( archive = Archive( - artifact = libraries[LibOutputStyle("shared_lib")].output, + artifact = shared_lib_output, ), linker_type = linker_type, link_whole = True, )], - external_debug_info = external_debug_infos[LibOutputStyle("pic_archive")], + external_debug_info = link_infos[LibOutputStyle("pic_archive")].default.external_debug_info, ), ), - deps = inherited_link_deps, + deps = inherited_link_graphs, ) providers.append(linkable_root) @@ -737,18 +838,23 @@ def _native_providers( linkable_node = create_linkable_node( ctx = ctx, preferred_linkage = preferred_linkage, - exported_deps = inherited_link_deps, + deps = inherited_link_graphs, + exported_deps = inherited_exported_deps, link_infos = link_infos, - shared_libs = solibs, + shared_libs = shared_libs, default_soname = shlib_name, + include_in_android_mergemap = False, ), ), - deps = inherited_link_deps, + deps = inherited_link_graphs + inherited_exported_deps, ) providers.append(linkable_graph) - providers.append(merge_link_group_lib_info(deps = inherited_link_deps)) + # We never need to add anything to this provider because Rust libraries + # cannot act as link group libs, especially given that they only support + # auto link groups anyway + providers.append(merge_link_group_lib_info(children = inherited_link_group_lib_infos(ctx, compile_ctx.dep_ctx))) return providers @@ -756,14 +862,13 @@ def _native_providers( def _compute_transitive_deps( ctx: AnalysisContext, dep_ctx: DepCollectionContext, - dep_link_style: LinkStyle) -> ( - dict[Artifact, CrateName], - dict[Artifact, CrateName], + dep_link_strategy: LinkStrategy) -> ( + dict[MetadataKind, dict[Artifact, CrateName]], list[ArtifactTSet], dict[RustProcMacroMarker, ()], ): - transitive_deps = {} - transitive_rmeta_deps = {} + toolchain_info = ctx.attrs._rust_toolchain[RustToolchainInfo] + transitive_deps = {m: {} for m in MetadataKind} external_debug_info = [] transitive_proc_macro_deps = {} @@ -773,18 +878,16 @@ def _compute_transitive_deps( # We don't want to propagate proc macros directly, and they have no transitive deps continue - style = style_info(dep.info, dep_link_style) - transitive_deps[style.rlib] = dep.info.crate - transitive_deps.update(style.transitive_deps) - - transitive_rmeta_deps[style.rmeta] = dep.info.crate - transitive_rmeta_deps.update(style.transitive_rmeta_deps) + strategy = strategy_info(toolchain_info, dep.info, dep_link_strategy) + for m in MetadataKind: + transitive_deps[m][strategy.outputs[m]] = dep.info.crate + transitive_deps[m].update(strategy.transitive_deps[m]) - external_debug_info.append(style.external_debug_info) + external_debug_info.append(strategy.external_debug_info) - transitive_proc_macro_deps.update(style.transitive_proc_macro_deps) + transitive_proc_macro_deps.update(strategy.transitive_proc_macro_deps) - return transitive_deps, transitive_rmeta_deps, external_debug_info, transitive_proc_macro_deps + return transitive_deps, external_debug_info, transitive_proc_macro_deps def rust_library_macro_wrapper(rust_library: typing.Callable) -> typing.Callable: def wrapper(**kwargs): diff --git a/prelude/rust/rust_toolchain.bzl b/prelude/rust/rust_toolchain.bzl index 1156e57a03..0e4544b8fa 100644 --- a/prelude/rust/rust_toolchain.bzl +++ b/prelude/rust/rust_toolchain.bzl @@ -22,9 +22,13 @@ RustExplicitSysrootDeps = record( core = Dependency | None, proc_macro = Dependency | None, std = Dependency | None, + panic_unwind = Dependency | None, + panic_abort = Dependency | None, others = list[Dependency], ) +PanicRuntime = enum("unwind", "abort", "none") + # FIXME(JakobDegen): These all have default values for historical reasons. Some of them certainly # should, but some of them probably shouldn't? # @unsorted-dict-items @@ -36,10 +40,11 @@ rust_toolchain_attrs = { "rustc_target_triple": provider_field(str | None, default = None), # Baseline compiler config "rustc_flags": provider_field(list[typing.Any], default = []), + # Rustc flags, except that they are applied on the command line after the + # target's rustc flags + "extra_rustc_flags": provider_field(list[typing.Any], default = []), # Extra flags when building binaries "rustc_binary_flags": provider_field(list[typing.Any], default = []), - # Extra flags for doing check builds - "rustc_check_flags": provider_field(list[typing.Any], default = []), # Extra flags for doing building tests "rustc_test_flags": provider_field(list[typing.Any], default = []), # Extra flags when coverage is enabled for a target @@ -48,10 +53,6 @@ rust_toolchain_attrs = { "rustc_coverage_flags": provider_field(typing.Any, default = ("-Cinstrument-coverage",)), # Extra flags for rustdoc invocations "rustdoc_flags": provider_field(list[typing.Any], default = []), - # Use rmeta for lib->lib dependencies, and only block - # linking on rlib crates. The hope is that rmeta builds - # are quick and this increases effective parallelism. - "pipelined": provider_field(bool, default = False), # When you `buck test` a library, also compile and run example code in its # documentation comments. "doctests": provider_field(bool, default = False), @@ -73,6 +74,8 @@ rust_toolchain_attrs = { "rustc_action": provider_field(RunInfo | None, default = None), # Wrapper for rustdoc-generated test executables "rustdoc_test_with_resources": provider_field(RunInfo | None, default = None), + # Wrapper for rustdoc coverage + "rustdoc_coverage": provider_field(RunInfo | None, default = None), # Failure filter action "failure_filter_action": provider_field(RunInfo | None, default = None), # The default edition to use, if not specified. @@ -87,14 +90,39 @@ rust_toolchain_attrs = { # linking types in signatures to their definition in another crate. "extern_html_root_url_prefix": provider_field(str | None, default = None), # Utilities used for building flagfiles containing dynamic crate names - "concat_tool": provider_field(RunInfo | None, default = None), "transitive_dependency_symlinks_tool": provider_field(RunInfo | None, default = None), - # Passing true here enables the unstable feature using `rlib` format - # instead of `staticlib` when linking rust targets into native (e.g. - # C/C++) targets. - "native_unbundle_deps": provider_field(bool, default = False), + # Setting this enables additional behaviors that improves linking at the + # cost of using unstable implementation details of rustc. At the moment, + # this is only used for linking rlibs into C++/C builds, instead of using + # staticlibs, but that's expected to change. + # + # FIXME(JakobDegen): This should require `explicit_sysroot_deps` in the + # future. + "advanced_unstable_linking": provider_field(bool, default = False), + # Override the implicit sysroot with the provided Artifact containing a directory to + # a prebuilt sysroot. Will be forwarded to rustc as `--sysroot=`. Only + # one of this and `explicit_sysroot_deps` may be set. + "sysroot_path": provider_field(Artifact | None, default = None), # See the documentation on the type for details "explicit_sysroot_deps": provider_field(RustExplicitSysrootDeps | None, default = None), + # The panic runtime to use. This is a part of the target definition and is + # normally inferred by rustc. This field: + # + # - Should be set to `"none"` on nostd targets + # - Must be set correctly if `explicit_sysroot_deps` and + # `advanced_unstable_linking` are used. You can find the correct value + # for a given target triple via `rustc --print target-spec-json` + # - Otherwise can typically be safely defaulted to `"unwind"`. It is, + # however, still the preferred way of configuring `-Cpanic=abort`, since + # it makes sure that the flag is consistent across the crate graph. + # + # It's worth pointing out that the way that rustc handles this is a bit + # weird. It requires the panic runtime to be a nostd crate, despite the fact + # that it is only ever useful in combination with std. We don't impose such + # a requirement. + # + # FIXME(JakobDegen): Fix `enum` so that we can set `unwind` as the default + "panic_runtime": provider_field(PanicRuntime), } RustToolchainInfo = provider(fields = rust_toolchain_attrs) diff --git a/prelude/rust/tools/BUCK.v2 b/prelude/rust/tools/BUCK.v2 index d28c450927..6a5e93f894 100644 --- a/prelude/rust/tools/BUCK.v2 +++ b/prelude/rust/tools/BUCK.v2 @@ -1,5 +1,10 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") load(":tool_rules.bzl", "get_rustc_cfg") +oncall("build_infra") + +source_listing() + prelude = native get_rustc_cfg( @@ -25,12 +30,6 @@ prelude.python_bootstrap_binary( visibility = ["PUBLIC"], ) -prelude.python_bootstrap_binary( - name = "concat", - main = "concat.py", - visibility = ["PUBLIC"], -) - prelude.python_bootstrap_binary( name = "transitive_dependency_symlinks", main = "transitive_dependency_symlinks.py", @@ -38,13 +37,13 @@ prelude.python_bootstrap_binary( ) prelude.python_bootstrap_binary( - name = "get_rustc_cfg", - main = "get_rustc_cfg.py", - visibility = [], + name = "buildscript_run", + main = "buildscript_run.py", + visibility = ["PUBLIC"], ) prelude.python_bootstrap_binary( - name = "buildscript_run", - main = "buildscript_run.py", + name = "rustdoc_coverage", + main = "rustdoc_coverage.py", visibility = ["PUBLIC"], ) diff --git a/prelude/rust/tools/attrs.bzl b/prelude/rust/tools/attrs.bzl index 7d4231e8f1..5e181941a1 100644 --- a/prelude/rust/tools/attrs.bzl +++ b/prelude/rust/tools/attrs.bzl @@ -12,9 +12,9 @@ def _internal_tool(default: str) -> Attr: # configurable attributes there. This list of internal tools is distracting and # expected to grow. internal_tool_attrs = { - "concat_tool": _internal_tool("prelude//rust/tools:concat"), "failure_filter_action": _internal_tool("prelude//rust/tools:failure_filter_action"), "rustc_action": _internal_tool("prelude//rust/tools:rustc_action"), + "rustdoc_coverage": _internal_tool("prelude//rust/tools:rustdoc_coverage"), "rustdoc_test_with_resources": _internal_tool("prelude//rust/tools:rustdoc_test_with_resources"), "transitive_dependency_symlinks_tool": _internal_tool("prelude//rust/tools:transitive_dependency_symlinks"), } diff --git a/prelude/rust/tools/concat.py b/prelude/rust/tools/concat.py deleted file mode 100755 index 6dfb8723fd..0000000000 --- a/prelude/rust/tools/concat.py +++ /dev/null @@ -1,54 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under both the MIT license found in the -# LICENSE-MIT file in the root directory of this source tree and the Apache -# License, Version 2.0 found in the LICENSE-APACHE file in the root directory -# of this source tree. - -# A tool to concatenate strings, some of which may be from @files. ¯\_(ツ)_/¯ -# -# Rustc's command line requires dependencies to be provided as: -# -# --extern cratename=path/to/libcratename.rlib -# -# In Buck, sometimes the cratename is computed at build time, for example -# extracted from a Thrift file. Rustc's "@" support isn't sufficient for this -# because the following doesn't make sense: -# -# --extern @filecontainingcrate=path/to/libcratename.rlib -# -# and the cratename isn't able to be its own argument: -# -# --extern @filecontainingcrate =path/to/libcratename.rlib -# -# Instead we use Python to make a single file containing the dynamic cratename -# and the rlib filepath concatenated together. -# -# concat.py --output $TMP -- @filecontainingcrate = path/to/libcratename.rlib -# -# then: -# -# --extern @$TMP -# - -import argparse -from typing import IO, List, NamedTuple - - -class Args(NamedTuple): - output: IO[str] - strings: List[str] - - -def main(): - parser = argparse.ArgumentParser(fromfile_prefix_chars="@") - parser.add_argument("--output", type=argparse.FileType("w")) - parser.add_argument("strings", nargs="*", type=str) - args = Args(**vars(parser.parse_args())) - - args.output.write("".join(args.strings)) - - -if __name__ == "__main__": - main() diff --git a/prelude/rust/tools/get_rustc_cfg.py b/prelude/rust/tools/get_rustc_cfg.py deleted file mode 100755 index b2dfa85017..0000000000 --- a/prelude/rust/tools/get_rustc_cfg.py +++ /dev/null @@ -1,62 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under both the MIT license found in the -# LICENSE-MIT file in the root directory of this source tree and the Apache -# License, Version 2.0 found in the LICENSE-APACHE file in the root directory -# of this source tree. - -# If https://github.com/rust-lang/rust/pull/113780 is accepted, this wrapper can -# go away. The `rule` in the bzl code should directly run rustc. -# -# cmd_args( -# toolchain_info.compiler, -# cmd_args("--print=cfg=", out.as_output(), delimiter = ""), -# cmd_args("--target=", toolchain_info.rustc_target_triple, delimiter = ""), -# ) -# -# Alternatively if `ctx.actions.run` learns to redirect stdout. Something like: -# -# ctx.actions.run( -# cmd_args(toolchain_info.compiler, ...), -# stdout = out.as_output(), -# ) -# -# or: -# -# subprocess = ctx.actions.run( -# cmd_args(toolchain_info.compiler, ...), -# ) -# return [DefaultInfo(default_output = subprocess.stdout)] - - -import argparse -import subprocess -import sys -from typing import IO, NamedTuple - - -class Args(NamedTuple): - rustc: str - target: str - out: IO[str] - - -def main(): - parser = argparse.ArgumentParser() - parser.add_argument("--rustc", type=str, required=True) - parser.add_argument("--target", type=str, required=True) - parser.add_argument("--out", type=argparse.FileType("w"), required=True) - args = Args(**vars(parser.parse_args())) - - subprocess.run( - [args.rustc, "--print=cfg", f"--target={args.target}"], - stdout=args.out, - stderr=sys.stderr, - encoding="utf-8", - check=True, - ) - - -if __name__ == "__main__": - main() diff --git a/prelude/rust/tools/rustc_action.py b/prelude/rust/tools/rustc_action.py index 731f306287..bc59d76fba 100755 --- a/prelude/rust/tools/rustc_action.py +++ b/prelude/rust/tools/rustc_action.py @@ -58,7 +58,6 @@ class Args(NamedTuple): buck_target: Optional[str] failure_filter: Optional[IO[bytes]] required_output: Optional[List[Tuple[str, str]]] - only_artifact: Optional[str] rustc: List[str] @@ -119,12 +118,6 @@ def arg_parse() -> Args: help="Required output path we expect rustc to generate " "(and filled with a placeholder on a filtered failure)", ) - parser.add_argument( - "--only-artifact", - metavar="TYPE", - help="Terminate rustc after requested artifact type (metadata, link, etc) has been emitted. " - "(Assumes compiler is invoked with --error-format=json --json=artifacts)", - ) parser.add_argument( "rustc", nargs=argparse.REMAINDER, @@ -135,18 +128,35 @@ def arg_parse() -> Args: return Args(**vars(parser.parse_args())) +def arg_eval(arg: str) -> str: + """ + Expand an argument such as --extern=$(cat buck-out/v2/gen/foo.txt)=buck-out/dev/gen/libfoo.rlib + """ + expanded = "" + + while True: + begin = arg.find("$(cat ") + if begin == -1: + return expanded + arg + expanded += arg[:begin] + begin += len("$(cat ") + path, rest = arg[begin:].split(")", maxsplit=1) + with open(path, encoding="utf-8") as f: + expanded += f.read().strip() + arg = rest + + async def handle_output( # noqa: C901 proc: asyncio.subprocess.Process, args: Args, crate_map: Dict[str, str], -) -> Tuple[bool, bool]: +) -> bool: got_error_diag = False - shutdown = False proc_stderr = proc.stderr assert proc_stderr is not None - while not shutdown: + while True: line = await proc_stderr.readline() if line is None or line == b"": @@ -161,12 +171,7 @@ async def handle_output( # noqa: C901 if DEBUG: print(f"diag={repr(diag)}", end="\n") - # We have to sniff the shape of diag record based on what fields it has set. - if "artifact" in diag and "emit" in diag: - if diag["emit"] == args.only_artifact: - shutdown = True - continue - elif "unused_extern_names" in diag: + if "unused_extern_names" in diag: unused_names = diag["unused_extern_names"] # Empty unused_extern_names is just noise. @@ -219,7 +224,7 @@ async def handle_output( # noqa: C901 if args.diag_txt: args.diag_txt.close() - return (got_error_diag, shutdown) + return got_error_diag async def main() -> int: @@ -275,7 +280,7 @@ async def main() -> int: print(f"args {repr(args)} env {env} crate_map {crate_map}", end="\n") rustc_cmd = args.rustc[:1] - rustc_args = args.rustc[1:] + rustc_args = [arg_eval(arg) for arg in args.rustc[1:]] if args.remap_cwd_prefix is not None: rustc_args.append( @@ -305,24 +310,12 @@ async def main() -> int: stderr=subprocess.PIPE, limit=1_000_000, ) - (got_error_diag, shutdown) = await handle_output(proc, args, crate_map) - - if shutdown: - # We got what we want so shut down early - try: - proc.terminate() - except ProcessLookupError: - # The process already terminated on its own. - pass - await proc.wait() - res = 0 - else: - res = await proc.wait() + got_error_diag = await handle_output(proc, args, crate_map) + res = await proc.wait() if DEBUG: print( f"res={repr(res)} " - f"shutdown={shutdown} " f"got_error_diag={got_error_diag} " f"args.failure_filter {args.failure_filter}", end="\n", @@ -334,7 +327,7 @@ async def main() -> int: # Check for death by signal - this is always considered a failure if res < 0: - cmdline = " ".join(shlex.quote(arg) for arg in args.rustc) + cmdline = " ".join(shlex.quote(arg) for arg in rustc_cmd + rustc_args) eprint(f"Command exited with signal {-res}: command line: {cmdline}") elif args.failure_filter: # If failure filtering is enabled, then getting an error diagnostic is also @@ -406,4 +399,4 @@ def nix_env(env: Dict[str, str]): # There is a bug with asyncio.run() on Windows: # https://bugs.python.org/issue39232 -sys.exit(asyncio.get_event_loop().run_until_complete(main())) +sys.exit(asyncio.new_event_loop().run_until_complete(main())) diff --git a/prelude/rust/tools/rustdoc_coverage.py b/prelude/rust/tools/rustdoc_coverage.py new file mode 100755 index 0000000000..adaec15640 --- /dev/null +++ b/prelude/rust/tools/rustdoc_coverage.py @@ -0,0 +1,70 @@ +#!/usr/bin/env python3 +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +import argparse +import json +import re +import subprocess +from pathlib import Path +from typing import List, NamedTuple + + +class Args(NamedTuple): + out: Path + cmd: List[str] + + +def arg_parse() -> Args: + parser = argparse.ArgumentParser() + parser.add_argument( + "out", + type=Path, + help="path to output", + ) + parser.add_argument("cmd", nargs=argparse.REMAINDER, help="command to run") + return Args(**vars(parser.parse_args())) + + +_REGEX = re.compile(r"(\d+(?:\.\d+)?)") + + +def main(): + args = arg_parse() + stdout = subprocess.run(args.cmd, capture_output=True, text=True).stdout + + with open(args.out, "w") as f: + # not using json output until https://github.com/rust-lang/rust/issues/117291 is fixed + # stdout looks like... + # +--------+------------+------------+------------+------------+ + # | File | Documented | Percentage | Examples | Percentage | + # +--------+------------+------------+------------+------------+ + # | foo.rs | 1 | 1.0% | 0 | 0.0% | + # | bar.rs | 2 | 2.1% | 0 | 0.0% | + # +--------+------------+------------+------------+------------+ + # | Total | 3 | 3.1% | 0 | 0.0% | + # +--------+------------+------------+------------+------------+ + total_line = stdout.splitlines()[-2] + nums = _REGEX.findall(total_line) + if len(nums) != 4: + raise Exception( + f"using regex `{_REGEX.pattern}`, expected to find 4 numbers, got {len(nums)} " + f"for line: '{total_line}'" + ) + json.dump( + { + "documented": nums[0], + "documented_percentage": nums[1], + "examples": nums[2], + "examples_percentage": nums[3], + }, + f, + ) + + +if __name__ == "__main__": + main() diff --git a/prelude/rust/tools/tool_rules.bzl b/prelude/rust/tools/tool_rules.bzl index 26cd9b391a..48c507e697 100644 --- a/prelude/rust/tools/tool_rules.bzl +++ b/prelude/rust/tools/tool_rules.bzl @@ -14,10 +14,10 @@ def _get_rustc_cfg_impl(ctx: AnalysisContext) -> list[Provider]: out = ctx.actions.declare_output("rustc.cfg") cmd = [ - ctx.attrs.get_rustc_cfg[RunInfo], - cmd_args("--rustc=", toolchain_info.compiler, delimiter = ""), + toolchain_info.compiler, + cmd_args("--print=cfg=", out.as_output(), delimiter = ""), cmd_args("--target=", toolchain_info.rustc_target_triple, delimiter = ""), - cmd_args("--out=", out.as_output(), delimiter = ""), + cmd_args("--sysroot="), # We do not need a sysroot here, and not all platforms we support have one available (e.g. mips64-unknown-linux-gnuabi64) ] ctx.actions.run(cmd, category = "rustc_cfg") @@ -27,7 +27,6 @@ def _get_rustc_cfg_impl(ctx: AnalysisContext) -> list[Provider]: get_rustc_cfg = rule( impl = _get_rustc_cfg_impl, attrs = { - "get_rustc_cfg": attrs.default_only(attrs.exec_dep(providers = [RunInfo], default = "prelude//rust/tools:get_rustc_cfg")), "_rust_toolchain": toolchains_common.rust(), }, ) diff --git a/prelude/rust/tools/transitive_dependency_symlinks.py b/prelude/rust/tools/transitive_dependency_symlinks.py index 247d683fc3..77959079b6 100755 --- a/prelude/rust/tools/transitive_dependency_symlinks.py +++ b/prelude/rust/tools/transitive_dependency_symlinks.py @@ -29,22 +29,31 @@ # # transitive_dependency_symlinks.py \ # --out-dir path/to/out \ -# --artifact path/to/cratename ../../libprovisional.rlib \ -# --artifact ... +# --artifacts path/to/artifacts.json # -# The tool reads the crate name from the file at "path/to/out". Suppose it's +# The input file artifact.json is an array of pairs, each an rlib and a file +# containing a crate name for it. +# +# [ +# ["../../libprovisional.rlib", "path/to/cratename"], +# ... +# ] +# +# The tool reads the crate name from the file at "path/to/cratename". Suppose it's # "thriftgenerated". It symlinks the given artifact as "0/libthriftgenerated.rlib" # within the specified output directory. In the event of collisions, there might # be multiple dirs created, just as we do for analysis-time named crates. import argparse +import json +import os from pathlib import Path -from typing import List, NamedTuple, Tuple +from typing import IO, NamedTuple class Args(NamedTuple): out_dir: Path - artifact: List[Tuple[Path, Path]] + artifacts: IO[str] def main(): @@ -55,11 +64,8 @@ def main(): required=True, ) parser.add_argument( - "--artifact", - action="append", - nargs=2, - type=Path, - metavar=("CRATENAME", "ARTIFACT"), + "--artifacts", + type=argparse.FileType(), required=True, ) args = Args(**vars(parser.parse_args())) @@ -69,9 +75,9 @@ def main(): # Add as many -Ldependency dirs as we need to avoid name conflicts deps_dirs = [{}] - for crate_name, artifact in args.artifact: - crate_name = crate_name.read_text().strip() - original_filename = artifact.name + for artifact, crate_name in json.load(args.artifacts): + crate_name = Path(crate_name).read_text().strip() + original_filename = os.path.basename(artifact) new_filename = "lib{}-{}".format( crate_name, original_filename.rsplit("-", 1)[1], diff --git a/prelude/sh_binary.bzl b/prelude/sh_binary.bzl index 679c813a45..f5d3428658 100644 --- a/prelude/sh_binary.bzl +++ b/prelude/sh_binary.bzl @@ -55,10 +55,7 @@ def _generate_script( "set -e", # This is awkward for two reasons: args doesn't support format strings # and will insert a newline between items and so __RESOURCES_ROOT - # is put in a bash array, and we want it to be relative to script's - # dir, not the script itself, but there's no way to do that in - # starlark. To deal with this, we strip the first 3 characters - # (`../`). + # is put in a bash array. "__RESOURCES_ROOT=(", resources_dir, ")", @@ -72,7 +69,7 @@ def _generate_script( # identify what the right format is. For now, this variable lets # callees disambiguate (see D28960177 for more context). "export BUCK_SH_BINARY_VERSION_UNSTABLE=2", - "export BUCK_PROJECT_ROOT=$__SCRIPT_DIR/\"${__RESOURCES_ROOT:3}\"", + "export BUCK_PROJECT_ROOT=$__SCRIPT_DIR/\"${__RESOURCES_ROOT}\"", # In buck1, the paths for resources that are outputs of rules have # different paths in BUCK_PROJECT_ROOT and # BUCK_DEFAULT_RUNTIME_RESOURCES, but we use the same paths. buck1's @@ -82,7 +79,7 @@ def _generate_script( # sources, the paths are the same for both. "export BUCK_DEFAULT_RUNTIME_RESOURCES=\"$BUCK_PROJECT_ROOT\"", "exec \"$BUCK_PROJECT_ROOT/{}\" \"$@\"".format(main_link), - ]).relative_to(script) + ]).relative_to(script, parent = 1) else: script_content = cmd_args([ "@echo off", @@ -96,11 +93,10 @@ def _generate_script( # Get parent folder. 'for %%a in ("%__SRC%") do set "__SCRIPT_DIR=%%~dpa"', "set BUCK_SH_BINARY_VERSION_UNSTABLE=2", - # ':~3' strips the first 3 chars of __RESOURCES_ROOT. - "set BUCK_PROJECT_ROOT=%__SCRIPT_DIR%\\!__RESOURCES_ROOT:~3!", + "set BUCK_PROJECT_ROOT=%__SCRIPT_DIR%\\%__RESOURCES_ROOT%", "set BUCK_DEFAULT_RUNTIME_RESOURCES=%BUCK_PROJECT_ROOT%", "%BUCK_PROJECT_ROOT%\\{} %*".format(main_link), - ]).relative_to(script) + ]).relative_to(script, parent = 1) actions.write( script, script_content, @@ -128,11 +124,13 @@ def sh_binary_impl(ctx): is_windows, ) + script = script.with_associated_artifacts([resources_dir]) + return [ DefaultInfo(default_output = script, other_outputs = [resources_dir]), RunInfo( # TODO(cjhopman): Figure out if we need to specify the link targets # as inputs. We shouldn't need to, but need to verify it. - args = cmd_args(script).hidden(resources_dir), + args = cmd_args(script, hidden = resources_dir), ), ] diff --git a/prelude/sh_test.bzl b/prelude/sh_test.bzl index d5b4c5b4f8..9bb08fabdd 100644 --- a/prelude/sh_test.bzl +++ b/prelude/sh_test.bzl @@ -5,7 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//tests:re_utils.bzl", "get_re_executor_from_props") +load("@prelude//tests:re_utils.bzl", "get_re_executors_from_props") load("@prelude//test/inject_test_run_info.bzl", "inject_test_run_info") def sh_test_impl(ctx: AnalysisContext) -> list[Provider]: @@ -14,22 +14,24 @@ def sh_test_impl(ctx: AnalysisContext) -> list[Provider]: if ctx.attrs.list_args or ctx.attrs.list_env or ctx.attrs.run_args or ctx.attrs.run_env: fail("An unsupported attribute was passed") - args = cmd_args() + args_args = [] + args_hidden = [] if ctx.attrs.test != None: if type(ctx.attrs.test) == "artifact": - args.add(ctx.attrs.test) + args_args.append(ctx.attrs.test) elif isinstance(ctx.attrs.test, Dependency): run_info = ctx.attrs.test.get(RunInfo) if run_info != None: - args.add(run_info.args) + args_args.append(run_info.args) else: info = ctx.attrs.test[DefaultInfo] - args.add(info.default_outputs).hidden(info.other_outputs) + args_args.append(info.default_outputs) + args_hidden.append(info.other_outputs) else: fail("Unexpected type for test attribute") - args.hidden(ctx.attrs.resources) + args_hidden.append(ctx.attrs.resources) deps = [] for dep in ctx.attrs.deps: @@ -37,15 +39,17 @@ def sh_test_impl(ctx: AnalysisContext) -> list[Provider]: deps.extend(info.default_outputs) deps.extend(info.other_outputs) - args.hidden(deps) + args_hidden.append(deps) + + args = cmd_args(args_args, hidden = args_hidden) command = [args] + ctx.attrs.args # Setup a RE executor based on the `remote_execution` param. - re_executor = get_re_executor_from_props(ctx) + re_executor, executor_overrides = get_re_executors_from_props(ctx) # We implicitly make the target run from the project root if remote - # excution options were specified + # execution options were specified run_from_project_root = "buck2_run_from_project_root" in (ctx.attrs.labels or []) or re_executor != None # TODO support default info and runinfo properly by writing a sh script that invokes the command properly @@ -59,6 +63,7 @@ def sh_test_impl(ctx: AnalysisContext) -> list[Provider]: labels = ctx.attrs.labels, contacts = ctx.attrs.contacts, default_executor = re_executor, + executor_overrides = executor_overrides, run_from_project_root = run_from_project_root, use_project_relative_paths = run_from_project_root, ), diff --git a/prelude/test/inject_test_run_info.bzl b/prelude/test/inject_test_run_info.bzl index 811d13a1cc..932667a06f 100644 --- a/prelude/test/inject_test_run_info.bzl +++ b/prelude/test/inject_test_run_info.bzl @@ -22,6 +22,7 @@ def inject_test_run_info(ctx: AnalysisContext, test_info: ExternalRunnerTestInfo for (k, v) in test_info.env.items() }, with_inputs = True, + absolute = True, ) return [test_info, RunInfo(args = [inject_test_env, env_file, "--", test_info.command])] diff --git a/prelude/test/tools/BUCK.v2 b/prelude/test/tools/BUCK.v2 index 89ed907421..1c3928706f 100644 --- a/prelude/test/tools/BUCK.v2 +++ b/prelude/test/tools/BUCK.v2 @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + prelude = native prelude.python_bootstrap_binary( diff --git a/prelude/tests/re_utils.bzl b/prelude/tests/re_utils.bzl index 0517719cd4..eb3ff3abcb 100644 --- a/prelude/tests/re_utils.bzl +++ b/prelude/tests/re_utils.bzl @@ -7,7 +7,7 @@ load("@prelude//:build_mode.bzl", "BuildModeInfo") load("@prelude//tests:remote_test_execution_toolchain.bzl", "RemoteTestExecutionToolchainInfo") -load("@prelude//utils:utils.bzl", "expect_non_none") +load("@prelude//utils:expect.bzl", "expect_non_none") def _get_re_arg(ctx: AnalysisContext): if not hasattr(ctx.attrs, "remote_execution"): @@ -28,20 +28,25 @@ def _get_re_arg(ctx: AnalysisContext): return None -def get_re_executor_from_props(ctx: AnalysisContext) -> [CommandExecutorConfig, None]: +def get_re_executors_from_props(ctx: AnalysisContext) -> ([CommandExecutorConfig, None], dict[str, CommandExecutorConfig]): """ - Convert the `remote_execution` properties param into a `CommandExecutorConfig` - to use with test providers. + Convert the `remote_execution` properties param into `CommandExecutorConfig` objects to use with test providers. + + Returns (default_executor, executor_overrides). """ re_props = _get_re_arg(ctx) if re_props == None: - return None + return None, {} re_props_copy = dict(re_props) capabilities = re_props_copy.pop("capabilities") use_case = re_props_copy.pop("use_case") + listing_capabilities = re_props_copy.pop("listing_capabilities", None) remote_cache_enabled = re_props_copy.pop("remote_cache_enabled", None) + re_dependencies = re_props_copy.pop("dependencies", []) + local_enabled = re_props_copy.pop("local_enabled", False) + re_resource_units = re_props_copy.pop("resource_units", None) if re_props_copy: unexpected_props = ", ".join(re_props_copy.keys()) fail("found unexpected re props: " + unexpected_props) @@ -51,11 +56,25 @@ def get_re_executor_from_props(ctx: AnalysisContext) -> [CommandExecutorConfig, if build_mode_info != None: remote_execution_action_key = "{}={}".format(build_mode_info.cell, build_mode_info.mode) - return CommandExecutorConfig( - local_enabled = False, + default_executor = CommandExecutorConfig( + local_enabled = local_enabled, remote_enabled = True, remote_execution_properties = capabilities, remote_execution_use_case = use_case or "tpx-default", remote_cache_enabled = remote_cache_enabled, remote_execution_action_key = remote_execution_action_key, + remote_execution_dependencies = re_dependencies, + remote_execution_resource_units = re_resource_units, ) + listing_executor = default_executor + if listing_capabilities: + listing_executor = CommandExecutorConfig( + local_enabled = local_enabled, + remote_enabled = True, + remote_execution_properties = listing_capabilities, + remote_execution_use_case = use_case or "tpx-default", + remote_cache_enabled = remote_cache_enabled, + remote_execution_action_key = remote_execution_action_key, + remote_execution_resource_units = re_resource_units, + ) + return default_executor, {"listing": listing_executor} diff --git a/prelude/tests/tpx_re_legacy.bzl b/prelude/tests/tpx_re_legacy.bzl deleted file mode 100644 index a47c911ebd..0000000000 --- a/prelude/tests/tpx_re_legacy.bzl +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under both the MIT license found in the -# LICENSE-MIT file in the root directory of this source tree and the Apache -# License, Version 2.0 found in the LICENSE-APACHE file in the root directory -# of this source tree. - -load("@prelude//utils:expect.bzl", "expect") - -_RE_ENABLED = "supports_remote_execution" -_RE_OPTS_LABEL_PREFIX = "re_opts_capabilities=" -_RE_OPTS_KEYS = ["platform", "subplatform", "gpu_name"] - -def _parse_re_opts(labels: list[str]) -> [dict[str, str], None]: - """ - Parse out JSON-embedded RE options like: - 're_opts_capabilities={"platform": gpu-remote-execution, "gpu_name": "A100"}' - """ - - for label in labels: - if label.startswith(_RE_OPTS_LABEL_PREFIX): - result = json.decode(label[len(_RE_OPTS_LABEL_PREFIX):]) - for key in result.keys(): - expect(key in _RE_OPTS_KEYS, "unexpected key in RE options label: {}", key) - return result - - return None - -# TODO(agallagher): Parsing RE options via JSON embedded in labels isn't a great -# UI, and we just do it here to support existing use cases. Ideally, though, we'd -# present a better UI (e.g. an `re_opts` param for tests) and use that instead. -def get_re_executor_from_labels(labels: list[str]) -> [CommandExecutorConfig, None]: - """ - Parse legacy RE-enablement test labels and use them to configure a test RE - executor to run the test with. - - The UI is best documented at: - https://www.internalfb.com/intern/wiki/Remote_Execution/Users/GPU_RE_Contbuild_Migration/ - """ - - # If the special "RE enabled" label isn't present, abort. - if _RE_ENABLED not in labels: - return None - - # If there's no options found in labels, don't use RE. This diverges from - # v1 behavior, but v2+tpx needs some platform to be set and so we probably - # want to the toolchain tp provide some exec-platform compatible platform. - re_opts = _parse_re_opts(labels) - if re_opts == None: - return None - - return CommandExecutorConfig( - local_enabled = False, - remote_enabled = True, - remote_execution_properties = re_opts, - remote_execution_use_case = "tpx-default", - ) diff --git a/prelude/third-party/hmaptool/BUCK.v2 b/prelude/third-party/hmaptool/BUCK.v2 index cced36f1dc..4a96edf2e1 100644 --- a/prelude/third-party/hmaptool/BUCK.v2 +++ b/prelude/third-party/hmaptool/BUCK.v2 @@ -1,10 +1,18 @@ -native.export_file( +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + +prelude = native # Avoid warnings and auto-formatters + +prelude.export_file( name = "_hmaptool", src = "hmaptool", mode = "reference", ) -native.command_alias( +prelude.command_alias( name = "hmaptool", exe = ":_hmaptool", visibility = ["PUBLIC"], diff --git a/prelude/third-party/hmaptool/hmaptool b/prelude/third-party/hmaptool/hmaptool index 581e77d40c..a85bc51772 100755 --- a/prelude/third-party/hmaptool/hmaptool +++ b/prelude/third-party/hmaptool/hmaptool @@ -1,3 +1,5 @@ +#!/usr/bin/env python3 + # ===----------------------------------------------------------------------=== # # # Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. @@ -6,7 +8,6 @@ # # ===----------------------------------------------------------------------=== # -#!/usr/bin/env python3 from __future__ import absolute_import, division, print_function import json diff --git a/prelude/third-party/pkgconfig.bzl b/prelude/third-party/pkgconfig.bzl new file mode 100644 index 0000000000..1f69f08ade --- /dev/null +++ b/prelude/third-party/pkgconfig.bzl @@ -0,0 +1,58 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//:prelude.bzl", "native") + +# NB: Meta engineers should not use this! Please use tp2 instead: +# https://fburl.com/wiki/oyy0fi5j +# +# If a system has a package installed and that package provides a `.pc` file +# this rule can be used to make that library visible to other rules. The `name` +# of this rule should be the pkg-config name. For example, if +# `pkg-config --libs gtest` prints out the flags to link against gtest, then +# `external_pkgconfig_library(name = "gtest")` would allow other rules to +# depend on gtest. +# +# WARNING: dependencies are not resolved by pkg-config, so these must be specified +# manually with `deps`. Additionally, ABI/platform differences are not handled +# by this rule so be careful not to cache it in Remote Execution etc to prevent +# different machines from reusing the outputs of these rules. +def external_pkgconfig_library( + name, + visibility = ["PUBLIC"], + labels = [], + default_target_platform = "prelude//platforms:default", + deps = []): + pkg_config_cflags = name + "__pkg_config_cflags" + native.genrule( + name = pkg_config_cflags, + default_target_platform = default_target_platform, + out = "out", + cmd = "pkg-config --cflags {} > $OUT".format(name), + remote = False, + ) + pkg_config_libs = name + "__pkg_config_libs" + native.genrule( + name = pkg_config_libs, + default_target_platform = default_target_platform, + out = "out", + cmd = "pkg-config --libs {} > $OUT".format(name), + remote = False, + ) + + labels = list(labels) + labels.append("third-party:pkg-config:{}".format(name)) + + native.prebuilt_cxx_library( + name = name, + default_target_platform = default_target_platform, + visibility = visibility, + exported_preprocessor_flags = ["@$(location :{})".format(pkg_config_cflags)], + exported_linker_flags = ["@$(location :{})".format(pkg_config_libs)], + exported_deps = deps, + labels = labels, + ) diff --git a/prelude/third-party/providers.bzl b/prelude/third-party/providers.bzl new file mode 100644 index 0000000000..9b8a1fded0 --- /dev/null +++ b/prelude/third-party/providers.bzl @@ -0,0 +1,14 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# Work-around for buck2 bug causing "transitive values must be of the same +# transitive set type" errors: +# https://fb.prod.workplace.com/groups/buck2users/posts/3637287806527574/ +ThirdPartyBuildTSet = transitive_set() +ThirdPartyBuildInfo = provider(fields = { + "_tset": provider_field(ThirdPartyBuildTSet), +}) diff --git a/prelude/toolchains/apple/xcode_version_checker/BUCK.v2 b/prelude/toolchains/apple/xcode_version_checker/BUCK.v2 index a4ba601afe..383dc2dfa0 100644 --- a/prelude/toolchains/apple/xcode_version_checker/BUCK.v2 +++ b/prelude/toolchains/apple/xcode_version_checker/BUCK.v2 @@ -1,8 +1,12 @@ -# @oss-disable: load("@fbsource//tools/build_defs:fb_native_wrapper.bzl", "fb_native") +load("@prelude//utils:source_listing.bzl", "source_listing") -# @oss-disable: fb_native.export_file( -# @oss-disable: name = "xcode_version_checker", -# @oss-disable: labels = ["buck2-only"], -# @oss-disable: mode = "reference", -# @oss-disable: visibility = ["PUBLIC"], -# @oss-disable: ) +oncall("build_infra") + +source_listing() + +export_file( + name = "xcode_version_checker", + labels = ["buck2-only"], + mode = "reference", + visibility = ["PUBLIC"], +) diff --git a/prelude/toolchains/conan/BUCK.v2 b/prelude/toolchains/conan/BUCK similarity index 74% rename from prelude/toolchains/conan/BUCK.v2 rename to prelude/toolchains/conan/BUCK index a7d9546def..0edb38d5db 100644 --- a/prelude/toolchains/conan/BUCK.v2 +++ b/prelude/toolchains/conan/BUCK @@ -1,57 +1,63 @@ -native.export_file( +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + +export_file( name = "buckler", src = "buckler/conanfile.py", visibility = ["PUBLIC"], ) -native.python_bootstrap_library( +python_bootstrap_library( name = "conan_common", srcs = ["conan_common.py"], ) -native.python_bootstrap_binary( +python_bootstrap_binary( name = "conan_generate", main = "conan_generate.py", - deps = [":conan_common"], visibility = ["PUBLIC"], + deps = [":conan_common"], ) -native.python_bootstrap_binary( +python_bootstrap_binary( name = "conan_init", main = "conan_init.py", - deps = [":conan_common"], visibility = ["PUBLIC"], + deps = [":conan_common"], ) -native.python_bootstrap_binary( +python_bootstrap_binary( name = "conan_lock", main = "conan_lock.py", - deps = [":conan_common"], visibility = ["PUBLIC"], + deps = [":conan_common"], ) -native.python_bootstrap_binary( +python_bootstrap_binary( name = "conan_package", main = "conan_package.py", - deps = [":conan_common"], visibility = ["PUBLIC"], + deps = [":conan_common"], ) -native.python_bootstrap_binary( +python_bootstrap_binary( name = "conan_package_extract", main = "conan_package_extract.py", visibility = ["PUBLIC"], ) -native.python_bootstrap_binary( +python_bootstrap_binary( name = "conan_update", main = "conan_update.py", visibility = ["PUBLIC"], ) -native.python_bootstrap_binary( +python_bootstrap_binary( name = "lock_generate", main = "lock_generate.py", - deps = [":conan_common"], visibility = ["PUBLIC"], + deps = [":conan_common"], ) diff --git a/prelude/toolchains/conan/conan_common.py b/prelude/toolchains/conan/conan_common.py index 7abe794c77..7f324df0cd 100644 --- a/prelude/toolchains/conan/conan_common.py +++ b/prelude/toolchains/conan/conan_common.py @@ -161,7 +161,7 @@ def conan_env(user_home=None, trace_log=None): # env["CONAN_REVISIONS_ENABLED"] = "1" # Prevent over-allocation. - # TODO[AH] Support parallized package builds and set an appropriate action + # TODO[AH] Support parallelized package builds and set an appropriate action # weight using the `weight` parameter to `ctx.actions.run`. # Note that not all Conan packages respect the `CONAN_CPU_COUNT` setting. env["CONAN_CPU_COUNT"] = "1" diff --git a/prelude/toolchains/conan/conan_update.py b/prelude/toolchains/conan/conan_update.py index 30986dfed6..f52f75bede 100644 --- a/prelude/toolchains/conan/conan_update.py +++ b/prelude/toolchains/conan/conan_update.py @@ -1,4 +1,11 @@ #!/usr/bin/env python3 +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + import argparse import os import shutil @@ -19,7 +26,7 @@ def write_lockfile(lockfile, lockfile_out): def write_targets(update_label, lock_generate, conan_generate, targets_out): header = """\ -# @generated +# {at}generated # Update using `buck2 run {update_label}` load( @@ -28,64 +35,74 @@ def write_targets(update_label, lock_generate, conan_generate, targets_out): "conan_dep", "conan_package", ) -""".format(update_label = update_label) +""".format( + at="@", update_label=update_label + ) os.makedirs(os.path.dirname(targets_out), exist_ok=True) with open(targets_out, "w") as outf: outf.write(header) with open(lock_generate, "r") as inf: - for l in inf: - outf.write(l) + for x in inf: + outf.write(x) with open(conan_generate, "r") as inf: - for l in inf: - outf.write(l) + for x in inf: + outf.write(x) def main(): parser = argparse.ArgumentParser( - prog = "conan_update", - description = "Update the Conan lock-file and the Buck2 package imports.") + prog="conan_update", + description="Update the Conan lock-file and the Buck2 package imports.", + ) parser.add_argument( - "--update-label", - metavar="LABEL", - type=str, - required=True, - help="The label to the target to run this program.") + "--update-label", + metavar="LABEL", + type=str, + required=True, + help="The label to the target to run this program.", + ) parser.add_argument( - "--lockfile", - metavar="FILE", - type=str, - required=True, - help="Path to the lockfile to copy to the repository.") + "--lockfile", + metavar="FILE", + type=str, + required=True, + help="Path to the lockfile to copy to the repository.", + ) parser.add_argument( - "--lock-targets", - metavar="FILE", - type=str, - required=True, - help="Path to the targets file generated from the lock file.") + "--lock-targets", + metavar="FILE", + type=str, + required=True, + help="Path to the targets file generated from the lock file.", + ) parser.add_argument( - "--conan-targets", - metavar="FILE", - type=str, - required=True, - help="Path to the targets file generated by Buckler.") + "--conan-targets", + metavar="FILE", + type=str, + required=True, + help="Path to the targets file generated by Buckler.", + ) parser.add_argument( - "--conanfile", - metavar="FILE", - type=str, - required=True, - help="Path to the Conanfile.") + "--conanfile", + metavar="FILE", + type=str, + required=True, + help="Path to the Conanfile.", + ) parser.add_argument( - "--lockfile-out", - metavar="FILE", - type=str, - required=True, - help="Name of the Conan lock-file to generate, relative to the Conanfile.") + "--lockfile-out", + metavar="FILE", + type=str, + required=True, + help="Name of the Conan lock-file to generate, relative to the Conanfile.", + ) parser.add_argument( - "--targets-out", - metavar="FILE", - type=str, - required=True, - help="Name of the Starlark file to generate, relative to the Conanfile.") + "--targets-out", + metavar="FILE", + type=str, + required=True, + help="Name of the Starlark file to generate, relative to the Conanfile.", + ) args = parser.parse_args() root = find_root() diff --git a/prelude/toolchains/conan/defs.bzl b/prelude/toolchains/conan/defs.bzl index 029ca6530c..8f65db3dfa 100644 --- a/prelude/toolchains/conan/defs.bzl +++ b/prelude/toolchains/conan/defs.bzl @@ -431,21 +431,25 @@ def _conan_generate_impl(ctx: AnalysisContext) -> list[Provider]: trace_log = ctx.actions.declare_output("trace.log") targets_out = ctx.actions.declare_output(ctx.label.name + ".bzl") - cmd = cmd_args([conan_generate]) - cmd.add(["--conan", conan_toolchain.conan]) - cmd.add(["--conan-init", conan_init.user_home]) - cmd.hidden(conan_init.profile.config) # The profile is inlined in the lockfile. - cmd.hidden(conan_init.profile.inputs) - cmd.add(["--buckler", ctx.attrs._buckler]) - cmd.add(["--install-folder", install_folder.as_output()]) - cmd.add(["--output-folder", output_folder.as_output()]) - cmd.add(["--user-home", user_home.as_output()]) - cmd.add(["--manifests", manifests.as_output()]) - cmd.add(["--install-info", install_info.as_output()]) - cmd.add(["--trace-file", trace_log.as_output()]) - cmd.add(["--conanfile", ctx.attrs.conanfile]) - cmd.add(["--lockfile", ctx.attrs.lockfile]) - cmd.add(["--targets-out", targets_out.as_output()]) + cmd = cmd_args( + [conan_generate] + + ["--conan", conan_toolchain.conan] + + ["--conan-init", conan_init.user_home] + + ["--buckler", ctx.attrs._buckler] + + ["--install-folder", install_folder.as_output()] + + ["--output-folder", output_folder.as_output()] + + ["--user-home", user_home.as_output()] + + ["--manifests", manifests.as_output()] + + ["--install-info", install_info.as_output()] + + ["--trace-file", trace_log.as_output()] + + ["--conanfile", ctx.attrs.conanfile] + + ["--lockfile", ctx.attrs.lockfile] + + ["--targets-out", targets_out.as_output()], + hidden = [ + conan_init.profile.config, # The profile is inlined in the lockfile. + conan_init.profile.inputs, + ], + ) ctx.actions.run(cmd, category = "conan_build") return [ @@ -482,10 +486,12 @@ def _conan_init_impl(ctx: AnalysisContext) -> list[Provider]: user_home = ctx.actions.declare_output("user-home") trace_log = ctx.actions.declare_output("trace.log") - cmd = cmd_args([conan_init]) - cmd.add(["--conan", conan_toolchain.conan]) - cmd.add(["--user-home", user_home.as_output()]) - cmd.add(["--trace-file", trace_log.as_output()]) + cmd = cmd_args( + [conan_init] + + ["--conan", conan_toolchain.conan] + + ["--user-home", user_home.as_output()] + + ["--trace-file", trace_log.as_output()], + ) ctx.actions.run(cmd, category = "conan_init") return [ @@ -522,17 +528,18 @@ def _conan_lock_impl(ctx: AnalysisContext) -> list[Provider]: user_home = ctx.actions.declare_output("user-home") trace_log = ctx.actions.declare_output("trace.log") - cmd = cmd_args([conan_lock]) - cmd.add(["--conan", conan_toolchain.conan]) - cmd.add(["--conan-init", conan_init.user_home]) - cmd.add(["--profile", conan_init.profile.config]) - cmd.hidden(conan_init.profile.inputs) - cmd.add(["--user-home", user_home.as_output()]) - cmd.add(["--trace-file", trace_log.as_output()]) - cmd.add(["--conanfile", ctx.attrs.conanfile]) - cmd.add(["--lockfile-out", lockfile_out.as_output()]) - if ctx.attrs.lockfile: - cmd.add(["--lockfile", ctx.attrs.lockfile]) + cmd = cmd_args( + [conan_lock] + + ["--conan", conan_toolchain.conan] + + ["--conan-init", conan_init.user_home] + + ["--profile", conan_init.profile.config] + + ["--user-home", user_home.as_output()] + + ["--trace-file", trace_log.as_output()] + + ["--conanfile", ctx.attrs.conanfile] + + ["--lockfile-out", lockfile_out.as_output()] + + (["--lockfile", ctx.attrs.lockfile] if ctx.attrs.lockfile else []), + hidden = conan_init.profile.inputs, + ) ctx.actions.run(cmd, category = "conan_lock") return [ @@ -571,22 +578,26 @@ def _conan_package_impl(ctx: AnalysisContext) -> list[Provider]: cache_out = ctx.actions.declare_output("cache-out") package_out = ctx.actions.declare_output("package") - cmd = cmd_args([conan_package]) - cmd.add(["--conan", conan_toolchain.conan]) - cmd.add(["--conan-init", conan_init.user_home]) - cmd.hidden(conan_init.profile.config) # The profile is inlined in the lockfile. - cmd.hidden(conan_init.profile.inputs) - cmd.add(["--lockfile", ctx.attrs.lockfile]) - cmd.add(["--reference", ctx.attrs.reference]) - cmd.add(["--package-id", ctx.attrs.package_id]) - cmd.add(["--install-folder", install_folder.as_output()]) - cmd.add(["--output-folder", output_folder.as_output()]) - cmd.add(["--user-home", user_home.as_output()]) - cmd.add(["--manifests", manifests.as_output()]) - cmd.add(["--install-info", install_info.as_output()]) - cmd.add(["--trace-file", trace_log.as_output()]) - cmd.add(["--cache-out", cache_out.as_output()]) - cmd.add(["--package-out", package_out.as_output()]) + cmd = cmd_args( + [conan_package] + + ["--conan", conan_toolchain.conan] + + ["--conan-init", conan_init.user_home] + + ["--lockfile", ctx.attrs.lockfile] + + ["--reference", ctx.attrs.reference] + + ["--package-id", ctx.attrs.package_id] + + ["--install-folder", install_folder.as_output()] + + ["--output-folder", output_folder.as_output()] + + ["--user-home", user_home.as_output()] + + ["--manifests", manifests.as_output()] + + ["--install-info", install_info.as_output()] + + ["--trace-file", trace_log.as_output()] + + ["--cache-out", cache_out.as_output()] + + ["--package-out", package_out.as_output()], + hidden = [ + conan_init.profile.config, # The profile is inlined in the lockfile. + conan_init.profile.inputs, + ], + ) # TODO[AH] Do we need to separate deps and build_deps? # This may become necessary for cross-compilation support. @@ -638,7 +649,7 @@ conan_package = rule( doc = "Build a single Conan package.", ) -def _profile_env_var(name, value): +def _profile_env_var(name, value) -> cmd_args: # TODO[AH] Do we need `quote = "shell"` here? # Setting it causes Buck2 to escape the `$PROFILE_DIR` prefix set in the # very end which causes failures in Conan package builds. @@ -651,9 +662,14 @@ def _make_wrapper_script(ctx, name, tool): cmd_args([ "#!/bin/sh", '_SCRIPTDIR=`dirname "$0"`', - cmd_args("exec", tool, '"$@"', delimiter = " ") - .relative_to(wrapper, parent = 1) - .absolute_prefix('"$_SCRIPTDIR"/'), + cmd_args( + "exec", + tool, + '"$@"', + delimiter = " ", + relative_to = (wrapper, 1), + absolute_prefix = '"$_SCRIPTDIR"/', + ), ]), allow_args = True, is_executable = True, @@ -670,57 +686,60 @@ def _profile_env_tool(ctx, name, tool): that configured as full command lines. """ wrapper, inputs = _make_wrapper_script(ctx, name, tool) - return _profile_env_var(name, wrapper).hidden(tool).hidden(inputs) + return cmd_args(_profile_env_var(name, wrapper), hidden = [tool, inputs]) def _conan_profile_impl(ctx: AnalysisContext) -> list[Provider]: cxx = ctx.attrs._cxx_toolchain[CxxToolchainInfo] - content = cmd_args() + content = [] - content.add("[settings]") - content.add(cmd_args(ctx.attrs.arch, format = "arch={}")) - content.add(cmd_args(ctx.attrs.os, format = "os={}")) - content.add(cmd_args(ctx.attrs.build_type, format = "build_type={}")) + content.append("[settings]") + content.append(cmd_args(ctx.attrs.arch, format = "arch={}")) + content.append(cmd_args(ctx.attrs.os, format = "os={}")) + content.append(cmd_args(ctx.attrs.build_type, format = "build_type={}")) # TODO[AH] Auto-generate the compiler setting based on the toolchain. # Needs a translation of CxxToolProviderType to compiler setting. - content.add(cmd_args(ctx.attrs.compiler, format = "compiler={}")) - content.add(cmd_args(ctx.attrs.compiler_version, format = "compiler.version={}")) - content.add(cmd_args(ctx.attrs.compiler_libcxx, format = "compiler.libcxx={}")) + content.append(cmd_args(ctx.attrs.compiler, format = "compiler={}")) + content.append(cmd_args(ctx.attrs.compiler_version, format = "compiler.version={}")) + content.append(cmd_args(ctx.attrs.compiler_libcxx, format = "compiler.libcxx={}")) - content.add("") - content.add("[env]") - content.add(_profile_env_var("CMAKE_FIND_ROOT_PATH", "")) + content.append("") + content.append("[env]") + content.append(_profile_env_var("CMAKE_FIND_ROOT_PATH", "")) # TODO[AH] Define CMAKE_SYSROOT if needed. # TODO[AH] Define target CHOST for cross-compilation - content.add(_profile_env_tool(ctx, "AR", cxx.linker_info.archiver)) + content.append(_profile_env_tool(ctx, "AR", cxx.linker_info.archiver)) if cxx.as_compiler_info: - content.add(_profile_env_tool(ctx, "AS", cxx.as_compiler_info.compiler)) + content.append(_profile_env_tool(ctx, "AS", cxx.as_compiler_info.compiler)) # TODO[AH] Use asm_compiler_info for Windows if cxx.binary_utilities_info: if cxx.binary_utilities_info.nm: - content.add(_profile_env_tool(ctx, "NM", cxx.binary_utilities_info.nm)) + content.append(_profile_env_tool(ctx, "NM", cxx.binary_utilities_info.nm)) if cxx.binary_utilities_info.ranlib: - content.add(_profile_env_tool(ctx, "RANLIB", cxx.binary_utilities_info.ranlib)) + content.append(_profile_env_tool(ctx, "RANLIB", cxx.binary_utilities_info.ranlib)) if cxx.binary_utilities_info.strip: - content.add(_profile_env_tool(ctx, "STRIP", cxx.binary_utilities_info.strip)) + content.append(_profile_env_tool(ctx, "STRIP", cxx.binary_utilities_info.strip)) if cxx.c_compiler_info: - content.add(_profile_env_tool(ctx, "CC", cxx.c_compiler_info.compiler)) - content.add(_profile_env_var("CFLAGS", cxx.c_compiler_info.compiler_flags)) + content.append(_profile_env_tool(ctx, "CC", cxx.c_compiler_info.compiler)) + content.append(_profile_env_var("CFLAGS", cxx.c_compiler_info.compiler_flags)) if cxx.cxx_compiler_info: - content.add(_profile_env_tool(ctx, "CXX", cxx.cxx_compiler_info.compiler)) - content.add(_profile_env_var("CXXFLAGS", cxx.cxx_compiler_info.compiler_flags)) + content.append(_profile_env_tool(ctx, "CXX", cxx.cxx_compiler_info.compiler)) + content.append(_profile_env_var("CXXFLAGS", cxx.cxx_compiler_info.compiler_flags)) output = ctx.actions.declare_output(ctx.label.name) - content.relative_to(output, parent = 1) - content.absolute_prefix("$PROFILE_DIR/") + content = cmd_args( + content, + relative_to = (output, 1), + absolute_prefix = "$PROFILE_DIR/", + ) _, args_inputs = ctx.actions.write(output, content, allow_args = True) return [ DefaultInfo(default_outputs = [output]), - ConanProfileInfo(config = output, inputs = content.hidden(args_inputs)), + ConanProfileInfo(config = output, inputs = cmd_args(content, hidden = args_inputs)), ] conan_profile = rule( @@ -740,14 +759,16 @@ conan_profile = rule( def _conan_update_impl(ctx: AnalysisContext) -> list[Provider]: conan_update = ctx.attrs._conan_update[RunInfo] - cmd = cmd_args([conan_update]) - cmd.add(["--update-label", str(ctx.label.raw_target())]) - cmd.add(["--lockfile", ctx.attrs.lockfile]) - cmd.add(["--lock-targets", ctx.attrs.lock_generate]) - cmd.add(["--conan-targets", ctx.attrs.conan_generate]) - cmd.add(["--conanfile", ctx.attrs.conanfile]) - cmd.add(["--lockfile-out", ctx.attrs.lockfile_name]) - cmd.add(["--targets-out", ctx.attrs.targets_name]) + cmd = cmd_args( + [conan_update] + + ["--update-label", str(ctx.label.raw_target())] + + ["--lockfile", ctx.attrs.lockfile] + + ["--lock-targets", ctx.attrs.lock_generate] + + ["--conan-targets", ctx.attrs.conan_generate] + + ["--conanfile", ctx.attrs.conanfile] + + ["--lockfile-out", ctx.attrs.lockfile_name] + + ["--targets-out", ctx.attrs.targets_name], + ) return [ DefaultInfo(default_outputs = []), @@ -773,10 +794,12 @@ def _lock_generate_impl(ctx: AnalysisContext) -> list[Provider]: targets_out = ctx.actions.declare_output(ctx.label.name + ".bzl") - cmd = cmd_args([lock_generate]) - cmd.add(["--lockfile", ctx.attrs.lockfile]) - cmd.add(["--lockfile-label", str(ctx.attrs.lockfile.owner.raw_target())]) - cmd.add(["--targets-out", targets_out.as_output()]) + cmd = cmd_args( + [lock_generate] + + ["--lockfile", ctx.attrs.lockfile] + + ["--lockfile-label", str(ctx.attrs.lockfile.owner.raw_target())] + + ["--targets-out", targets_out.as_output()], + ) ctx.actions.run(cmd, category = "conan_generate") return [ diff --git a/prelude/toolchains/cxx.bzl b/prelude/toolchains/cxx.bzl index 8863bce95b..437547d012 100644 --- a/prelude/toolchains/cxx.bzl +++ b/prelude/toolchains/cxx.bzl @@ -9,11 +9,13 @@ load( "@prelude//cxx:cxx_toolchain_types.bzl", "BinaryUtilitiesInfo", "CCompilerInfo", + "CvtresCompilerInfo", "CxxCompilerInfo", "CxxPlatformInfo", "CxxToolchainInfo", "LinkerInfo", "PicBehavior", + "RcCompilerInfo", "ShlibInterfacesMode", ) load("@prelude//cxx:headers.bzl", "HeaderMode") @@ -34,6 +36,8 @@ def _system_cxx_toolchain_impl(ctx: AnalysisContext): asm_compiler_type = ctx.attrs.compiler_type compiler = ctx.attrs.compiler cxx_compiler = ctx.attrs.cxx_compiler + cvtres_compiler = ctx.attrs.cvtres_compiler + rc_compiler = ctx.attrs.rc_compiler linker = ctx.attrs.linker linker_type = "gnu" pic_behavior = PicBehavior("supported") @@ -57,6 +61,10 @@ def _system_cxx_toolchain_impl(ctx: AnalysisContext): if compiler == "cl.exe": compiler = msvc_tools.cl_exe cxx_compiler = compiler + if cvtres_compiler == "cvtres.exe": + cvtres_compiler = msvc_tools.cvtres_exe + if rc_compiler == "rc.exe": + rc_compiler = msvc_tools.rc_exe if linker == "link.exe": linker = msvc_tools.link_exe linker = _windows_linker_wrapper(ctx, linker) @@ -67,7 +75,6 @@ def _system_cxx_toolchain_impl(ctx: AnalysisContext): shared_library_name_default_prefix = "" shared_library_name_format = "{}.dll" shared_library_versioned_name_format = "{}.dll" - additional_linker_flags = ["msvcrt.lib"] pic_behavior = PicBehavior("not_supported") elif ctx.attrs.linker == "g++" or ctx.attrs.cxx_compiler == "g++": pass @@ -86,6 +93,7 @@ def _system_cxx_toolchain_impl(ctx: AnalysisContext): linker_info = LinkerInfo( linker = RunInfo(args = linker), linker_flags = additional_linker_flags + ctx.attrs.link_flags, + post_linker_flags = ctx.attrs.post_link_flags, archiver = RunInfo(args = archiver_args), archiver_type = archiver_type, archiver_supports_argfiles = archiver_supports_argfiles, @@ -110,13 +118,13 @@ def _system_cxx_toolchain_impl(ctx: AnalysisContext): static_library_extension = static_library_extension, force_full_hybrid_if_capable = False, is_pdb_generated = is_pdb_generated(linker_type, ctx.attrs.link_flags), - produce_interface_from_stub_shared_library = True, link_ordering = ctx.attrs.link_ordering, ), bolt_enabled = False, binary_utilities_info = BinaryUtilitiesInfo( nm = RunInfo(args = ["nm"]), objcopy = RunInfo(args = ["objcopy"]), + objdump = RunInfo(args = ["objdump"]), ranlib = RunInfo(args = ["ranlib"]), strip = RunInfo(args = ["strip"]), dwp = None, @@ -142,6 +150,18 @@ def _system_cxx_toolchain_impl(ctx: AnalysisContext): compiler = RunInfo(args = [asm_compiler]), compiler_type = asm_compiler_type, ), + cvtres_compiler_info = CvtresCompilerInfo( + compiler = RunInfo(args = [cvtres_compiler]), + preprocessor_flags = [], + compiler_flags = ctx.attrs.cvtres_flags, + compiler_type = ctx.attrs.compiler_type, + ), + rc_compiler_info = RcCompilerInfo( + compiler = RunInfo(args = [rc_compiler]), + preprocessor_flags = [], + compiler_flags = ctx.attrs.rc_flags, + compiler_type = ctx.attrs.compiler_type, + ), header_mode = HeaderMode("symlink_tree_only"), cpp_dep_tracking_mode = ctx.attrs.cpp_dep_tracking_mode, pic_behavior = pic_behavior, @@ -180,6 +200,8 @@ system_cxx_toolchain = rule( "compiler": attrs.string(default = "cl.exe" if host_info().os.is_windows else "clang"), "compiler_type": attrs.string(default = "windows" if host_info().os.is_windows else "clang"), # one of CxxToolProviderType "cpp_dep_tracking_mode": attrs.string(default = "makefile"), + "cvtres_compiler": attrs.string(default = "cvtres.exe"), + "cvtres_flags": attrs.list(attrs.string(), default = []), "cxx_compiler": attrs.string(default = "cl.exe" if host_info().os.is_windows else "clang++"), "cxx_flags": attrs.list(attrs.string(), default = []), "link_flags": attrs.list(attrs.string(), default = []), @@ -189,6 +211,9 @@ system_cxx_toolchain = rule( "linker_wrapper": attrs.default_only(attrs.exec_dep(providers = [RunInfo], default = "prelude//cxx/tools:linker_wrapper")), "make_comp_db": attrs.default_only(attrs.exec_dep(providers = [RunInfo], default = "prelude//cxx/tools:make_comp_db")), "msvc_tools": attrs.default_only(attrs.exec_dep(providers = [VisualStudio], default = "prelude//toolchains/msvc:msvc_tools")), + "post_link_flags": attrs.list(attrs.string(), default = []), + "rc_compiler": attrs.string(default = "rc.exe"), + "rc_flags": attrs.list(attrs.string(), default = []), }, is_toolchain_rule = True, ) diff --git a/prelude/toolchains/cxx/zig/BUCK.v2 b/prelude/toolchains/cxx/zig/BUCK.v2 deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/prelude/toolchains/cxx/zig/defs.bzl b/prelude/toolchains/cxx/zig/defs.bzl index 1e58412e6f..0a63b1e3aa 100644 --- a/prelude/toolchains/cxx/zig/defs.bzl +++ b/prelude/toolchains/cxx/zig/defs.bzl @@ -21,7 +21,7 @@ the time of writing this is still experimental. If this is a problem for your use-case then you may wish to rely on a system toolchain or define your own. The toolchain is not fully hermetic as it still relies on system tools like nm. -Only works on Linux. +It only works on Linux, and to a limited extent on MacOS. [zig-cc-announcement]: https://andrewkelley.me/post/zig-cc-powerful-drop-in-replacement-gcc-clang.html @@ -32,7 +32,7 @@ the toolchain like so: `toolchains//BUILD` ```bzl -load("@prelude//toolchains/cxx:zig.bzl", "download_zig_distribution", "cxx_zig_toolchain") +load("@prelude//toolchains/cxx/zig:defs.bzl", "download_zig_distribution", "cxx_zig_toolchain") download_zig_distribution( name = "zig", @@ -50,7 +50,7 @@ To define toolchains for multiple platforms and configure cross-compilation you can configure the toolchain like so: ```bzl -load("@prelude//toolchains/cxx:zig.bzl", "download_zig_distribution", "cxx_zig_toolchain") +load("@prelude//toolchains/cxx/zig:defs.bzl", "download_zig_distribution", "cxx_zig_toolchain") download_zig_distribution( name = "zig-x86_64-linux", @@ -174,9 +174,13 @@ def _zig_distribution_impl(ctx: AnalysisContext) -> list[Provider]: src = cmd_args(ctx.attrs.dist[DefaultInfo].default_outputs[0], format = path_tpl) ctx.actions.run(["ln", "-sf", cmd_args(src).relative_to(dst, parent = 1), dst.as_output()], category = "cp_compiler") - compiler = cmd_args([dst]) - compiler.hidden(ctx.attrs.dist[DefaultInfo].default_outputs) - compiler.hidden(ctx.attrs.dist[DefaultInfo].other_outputs) + compiler = cmd_args( + [dst], + hidden = [ + ctx.attrs.dist[DefaultInfo].default_outputs, + ctx.attrs.dist[DefaultInfo].other_outputs, + ], + ) return [ ctx.attrs.dist[DefaultInfo], @@ -227,8 +231,10 @@ def _http_archive_impl(ctx: AnalysisContext) -> list[Provider]: is_executable = True, allow_args = True, ) - ctx.actions.run(cmd_args(["/bin/sh", script]) - .hidden([archive, output.as_output()]), category = "http_archive") + ctx.actions.run( + cmd_args(["/bin/sh", script], hidden = [archive, output.as_output()]), + category = "http_archive", + ) return [DefaultInfo(default_output = output)] @@ -300,7 +306,11 @@ def _get_linker_type(os: str) -> str: if os == "linux": return "gnu" elif os == "macos" or os == "freebsd": - return "darwin" + # TODO[AH] return "darwin". + # The cc rules emit linker flags on MacOS that are not supported by Zig's linker. + # Declaring the linker as GNU style is not entirely correct, however it works better than + # declaring Darwin style at this point. See https://github.com/facebook/buck2/issues/470 + return "gnu" elif os == "windows": return "windows" else: @@ -372,7 +382,6 @@ def _cxx_zig_toolchain_impl(ctx: AnalysisContext) -> list[Provider]: #lto_mode = None, # TODO support LTO object_file_extension = "o", #mk_shlib_intf = None, # not needed if shlib_interfaces = "disabled" - produce_interface_from_stub_shared_library = True, shlib_interfaces = ShlibInterfacesMode("disabled"), shared_dep_runtime_ld_flags = ctx.attrs.shared_dep_runtime_ld_flags, shared_library_name_default_prefix = "lib", diff --git a/prelude/toolchains/demo.bzl b/prelude/toolchains/demo.bzl index 51bec438aa..17ee3f5e7a 100644 --- a/prelude/toolchains/demo.bzl +++ b/prelude/toolchains/demo.bzl @@ -7,6 +7,7 @@ load("@prelude//toolchains:cxx.bzl", "system_cxx_toolchain") load("@prelude//toolchains:genrule.bzl", "system_genrule_toolchain") +load("@prelude//toolchains:go.bzl", "system_go_toolchain") load("@prelude//toolchains:haskell.bzl", "system_haskell_toolchain") load("@prelude//toolchains:ocaml.bzl", "system_ocaml_toolchain") load("@prelude//toolchains:python.bzl", "system_python_bootstrap_toolchain", "system_python_toolchain") @@ -28,6 +29,11 @@ def system_demo_toolchains(): visibility = ["PUBLIC"], ) + system_go_toolchain( + name = "go", + visibility = ["PUBLIC"], + ) + system_haskell_toolchain( name = "haskell", visibility = ["PUBLIC"], diff --git a/prelude/toolchains/go.bzl b/prelude/toolchains/go.bzl index ad9f97127d..d3fb37f12a 100644 --- a/prelude/toolchains/go.bzl +++ b/prelude/toolchains/go.bzl @@ -6,11 +6,9 @@ # of this source tree. load("@prelude//go:toolchain.bzl", "GoToolchainInfo") +load("@prelude//utils:cmd_script.bzl", "ScriptOs", "cmd_script") def _system_go_toolchain_impl(ctx): - go_root = ctx.attrs.go_root - go_binary = go_root + "/bin/go" - arch = host_info().arch if arch.is_aarch64: go_arch = "arm64" @@ -18,35 +16,42 @@ def _system_go_toolchain_impl(ctx): go_arch = "amd64" else: fail("Unsupported go arch: {}".format(arch)) + os = host_info().os if os.is_macos: go_os = "darwin" elif os.is_linux: go_os = "linux" + elif os.is_windows: + go_os = "windows" else: fail("Unsupported go os: {}".format(os)) - get_go_tool = lambda go_tool: "{}/pkg/tool/{}_{}/{}".format(go_root, go_os, go_arch, go_tool) + script_os = ScriptOs("windows" if os.is_windows else "unix") + go = "go.exe" if os.is_windows else "go" + return [ DefaultInfo(), GoToolchainInfo( - assembler = get_go_tool("asm"), - cgo = get_go_tool("cgo"), - cgo_wrapper = ctx.attrs.cgo_wrapper, - compile_wrapper = ctx.attrs.compile_wrapper, - compiler = get_go_tool("compile"), - cover = get_go_tool("cover"), - cover_srcs = ctx.attrs.cover_srcs, + assembler = RunInfo(cmd_script(ctx, "asm", cmd_args(go, "tool", "asm"), script_os)), + cgo = RunInfo(cmd_script(ctx, "cgo", cmd_args(go, "tool", "cgo"), script_os)), + cgo_wrapper = ctx.attrs.cgo_wrapper[RunInfo], + concat_files = ctx.attrs.concat_files[RunInfo], + compiler = RunInfo(cmd_script(ctx, "compile", cmd_args(go, "tool", "compile"), script_os)), + cover = RunInfo(cmd_script(ctx, "cover", cmd_args(go, "tool", "cover"), script_os)), cxx_toolchain_for_linking = None, env_go_arch = go_arch, env_go_os = go_os, - env_go_root = go_root, - external_linker_flags = None, - filter_srcs = ctx.attrs.filter_srcs, - go = go_binary, - linker = get_go_tool("link"), - packer = get_go_tool("pack"), + external_linker_flags = [], + gen_stdlib_importcfg = ctx.attrs.gen_stdlib_importcfg[RunInfo], + go = RunInfo(cmd_script(ctx, "go", cmd_args(go), script_os)), + go_wrapper = ctx.attrs.go_wrapper[RunInfo], + linker = RunInfo(cmd_script(ctx, "link", cmd_args(go, "tool", "link"), script_os)), + packer = RunInfo(cmd_script(ctx, "pack", cmd_args(go, "tool", "pack"), script_os)), tags = [], + linker_flags = [], + assembler_flags = [], + compiler_flags = [], ), ] @@ -55,15 +60,13 @@ system_go_toolchain = rule( doc = """Example system go toolchain rules (WIP). Usage: system_go_toolchain( name = "go", - go_root = "/opt/homebrew/Cellar/go/1.20.4/libexec", visibility = ["PUBLIC"], )""", attrs = { "cgo_wrapper": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//go/tools:cgo_wrapper")), - "compile_wrapper": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//go/tools:compile_wrapper")), - "cover_srcs": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//go/tools:cover_srcs")), - "filter_srcs": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//go/tools:filter_srcs")), - "go_root": attrs.string(), + "concat_files": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//go/tools:concat_files")), + "gen_stdlib_importcfg": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//go/tools:gen_stdlib_importcfg")), + "go_wrapper": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//go/tools:go_wrapper")), }, is_toolchain_rule = True, ) diff --git a/prelude/toolchains/haskell.bzl b/prelude/toolchains/haskell.bzl index 9d5b02f16d..c3e99c382a 100644 --- a/prelude/toolchains/haskell.bzl +++ b/prelude/toolchains/haskell.bzl @@ -5,7 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//haskell:haskell.bzl", "HaskellPlatformInfo", "HaskellToolchainInfo") +load("@prelude//haskell:toolchain.bzl", "HaskellPlatformInfo", "HaskellToolchainInfo") def _system_haskell_toolchain(_ctx: AnalysisContext) -> list[Provider]: return [ @@ -14,11 +14,12 @@ def _system_haskell_toolchain(_ctx: AnalysisContext) -> list[Provider]: compiler = "ghc", packager = "ghc-pkg", linker = "ghc", + haddock = "haddock", compiler_flags = [], linker_flags = [], ), HaskellPlatformInfo( - name = "x86_64", + name = host_info().arch, ), ] diff --git a/prelude/toolchains/msvc/BUCK.v2 b/prelude/toolchains/msvc/BUCK.v2 index ed74363b49..beea9a306b 100644 --- a/prelude/toolchains/msvc/BUCK.v2 +++ b/prelude/toolchains/msvc/BUCK.v2 @@ -1,5 +1,10 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") load(":tools.bzl", "find_msvc_tools") +oncall("build_infra") + +source_listing() + python_bootstrap_binary( name = "vswhere", main = "vswhere.py", diff --git a/prelude/toolchains/msvc/run_msvc_tool.py b/prelude/toolchains/msvc/run_msvc_tool.py index 6d2f8fc736..2fa9a60193 100644 --- a/prelude/toolchains/msvc/run_msvc_tool.py +++ b/prelude/toolchains/msvc/run_msvc_tool.py @@ -42,8 +42,12 @@ def main(): prepend_env(env, "PATH", tool.PATH) prepend_env(env, "INCLUDE", tool.INCLUDE) - completed_process = subprocess.run([tool.exe, *arguments], env=env) - sys.exit(completed_process.returncode) + if tool.exe is None: + print("Tool not found", file=sys.stderr) + sys.exit(1) + else: + completed_process = subprocess.run([tool.exe, *arguments], env=env) + sys.exit(completed_process.returncode) if __name__ == "__main__": diff --git a/prelude/toolchains/msvc/tools.bzl b/prelude/toolchains/msvc/tools.bzl index 72d294eba9..199065fb94 100644 --- a/prelude/toolchains/msvc/tools.bzl +++ b/prelude/toolchains/msvc/tools.bzl @@ -12,27 +12,35 @@ VisualStudio = provider( fields = { # cl.exe "cl_exe": provider_field(typing.Any, default = None), + # cvtres.exe + "cvtres_exe": provider_field(typing.Any, default = None), # lib.exe "lib_exe": provider_field(typing.Any, default = None), # ml64.exe "ml64_exe": provider_field(typing.Any, default = None), # link.exe "link_exe": provider_field(typing.Any, default = None), + # rc.exe + "rc_exe": provider_field(typing.Any, default = None), }, ) def _find_msvc_tools_impl(ctx: AnalysisContext) -> list[Provider]: cl_exe_json = ctx.actions.declare_output("cl.exe.json") + cvtres_exe_json = ctx.actions.declare_output("cvtres.exe.json") lib_exe_json = ctx.actions.declare_output("lib.exe.json") ml64_exe_json = ctx.actions.declare_output("ml64.exe.json") link_exe_json = ctx.actions.declare_output("link.exe.json") + rc_exe_json = ctx.actions.declare_output("rc.exe.json") cmd = [ ctx.attrs.vswhere[RunInfo], cmd_args("--cl=", cl_exe_json.as_output(), delimiter = ""), + cmd_args("--cvtres=", cvtres_exe_json.as_output(), delimiter = ""), cmd_args("--lib=", lib_exe_json.as_output(), delimiter = ""), cmd_args("--ml64=", ml64_exe_json.as_output(), delimiter = ""), cmd_args("--link=", link_exe_json.as_output(), delimiter = ""), + cmd_args("--rc=", rc_exe_json.as_output(), delimiter = ""), ] ctx.actions.run( @@ -48,6 +56,12 @@ def _find_msvc_tools_impl(ctx: AnalysisContext) -> list[Provider]: cmd = cmd_args(run_msvc_tool, cl_exe_json), os = ScriptOs("windows"), ) + cvtres_exe_script = cmd_script( + ctx = ctx, + name = "cvtres", + cmd = cmd_args(run_msvc_tool, cvtres_exe_json), + os = ScriptOs("windows"), + ) lib_exe_script = cmd_script( ctx = ctx, name = "lib", @@ -66,6 +80,12 @@ def _find_msvc_tools_impl(ctx: AnalysisContext) -> list[Provider]: cmd = cmd_args(run_msvc_tool, link_exe_json), os = ScriptOs("windows"), ) + rc_exe_script = cmd_script( + ctx = ctx, + name = "rc", + cmd = cmd_args(run_msvc_tool, rc_exe_json), + os = ScriptOs("windows"), + ) return [ # Supports `buck2 run prelude//toolchains/msvc:msvc_tools[cl.exe]` @@ -77,6 +97,12 @@ def _find_msvc_tools_impl(ctx: AnalysisContext) -> list[Provider]: "json": [DefaultInfo(default_output = cl_exe_json)], }), ], + "cvtres.exe": [ + RunInfo(args = [cvtres_exe_script]), + DefaultInfo(sub_targets = { + "json": [DefaultInfo(default_output = cvtres_exe_json)], + }), + ], "lib.exe": [ RunInfo(args = [lib_exe_script]), DefaultInfo(sub_targets = { @@ -95,12 +121,20 @@ def _find_msvc_tools_impl(ctx: AnalysisContext) -> list[Provider]: "json": [DefaultInfo(default_output = ml64_exe_json)], }), ], + "rc.exe": [ + RunInfo(args = [rc_exe_script]), + DefaultInfo(sub_targets = { + "json": [DefaultInfo(default_output = rc_exe_json)], + }), + ], }), VisualStudio( cl_exe = cl_exe_script, + cvtres_exe = cvtres_exe_script, lib_exe = lib_exe_script, ml64_exe = ml64_exe_script, link_exe = link_exe_script, + rc_exe = rc_exe_script, ), ] diff --git a/prelude/toolchains/msvc/vswhere.py b/prelude/toolchains/msvc/vswhere.py index 0d98a6165e..29f5e0fe6a 100644 --- a/prelude/toolchains/msvc/vswhere.py +++ b/prelude/toolchains/msvc/vswhere.py @@ -15,17 +15,23 @@ import shutil import subprocess import sys +import tempfile import winreg from pathlib import Path from typing import IO, List, NamedTuple +VC_EXE_NAMES = ["cl.exe", "cvtres.exe", "lib.exe", "ml64.exe", "link.exe"] +UCRT_EXE_NAMES = ["rc.exe"] + class OutputJsonFiles(NamedTuple): # We write a Tool instance as JSON into each of these files. cl: IO[str] + cvtres: IO[str] lib: IO[str] ml64: IO[str] link: IO[str] + rc: IO[str] class Tool(NamedTuple): @@ -35,11 +41,14 @@ class Tool(NamedTuple): INCLUDE: List[Path] = [] -def find_in_path(executable): +def find_in_path(executable, is_optional=False): which = shutil.which(executable) if which is None: - print(f"{executable} not found in $PATH", file=sys.stderr) - sys.exit(1) + if is_optional: + return None + else: + print(f"{executable} not found in $PATH", file=sys.stderr) + sys.exit(1) return Tool(which) @@ -99,8 +108,9 @@ def find_with_vswhere_exe(): lib_path = tools_path / "lib" / "x64" include_path = tools_path / "include" - exe_names = "cl.exe", "lib.exe", "ml64.exe", "link.exe" - if not all(bin_path.joinpath(exe).exists() for exe in exe_names): + vc_exe_paths = [bin_path / exe for exe in VC_EXE_NAMES] + + if not all(exe.exists() for exe in vc_exe_paths): continue PATH = [bin_path] @@ -109,10 +119,16 @@ def find_with_vswhere_exe(): ucrt, ucrt_version = get_ucrt_dir() if ucrt and ucrt_version: - PATH.append(ucrt / "bin" / ucrt_version / "x64") + ucrt_bin_path = ucrt / "bin" / ucrt_version / "x64" + PATH.append(ucrt_bin_path) LIB.append(ucrt / "lib" / ucrt_version / "ucrt" / "x64") INCLUDE.append(ucrt / "include" / ucrt_version / "ucrt") + ucrt_exe_paths = [ucrt_bin_path / exe for exe in UCRT_EXE_NAMES] + ucrt_exe_paths = [exe if exe.exists() else None for exe in ucrt_exe_paths] + else: + ucrt_exe_paths = [None for exe in UCRT_EXE_NAMES] + sdk, sdk_version = get_sdk10_dir() if sdk and sdk_version: PATH.append(sdk / "bin" / "x64") @@ -123,12 +139,13 @@ def find_with_vswhere_exe(): INCLUDE.append(sdk / "include" / sdk_version / "shared") return [ - Tool(exe=bin_path / exe, LIB=LIB, PATH=PATH, INCLUDE=INCLUDE) - for exe in exe_names + Tool(exe=exe, LIB=LIB, PATH=PATH, INCLUDE=INCLUDE) + for exe in vc_exe_paths + ucrt_exe_paths ] print( - "vswhere.exe did not find a suitable MSVC toolchain containing cl.exe, lib.exe, ml64.exe", + "vswhere.exe did not find a suitable MSVC toolchain containing " + + ", ".join(VC_EXE_NAMES), file=sys.stderr, ) sys.exit(1) @@ -172,7 +189,7 @@ def get_sdk10_dir(): windows_sdk_dir = os.environ.get("WindowsSdkDir") windows_sdk_version = os.environ.get("WindowsSDKVersion") if windows_sdk_dir is not None and windows_sdk_version is not None: - return windows_sdk_dir, windows_sdk_version.removesuffix("\\") + return Path(windows_sdk_dir), windows_sdk_version.removesuffix("\\") registry = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) key_name = "SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows\\v10.0" @@ -202,27 +219,112 @@ def write_tool_json(out, tool): out.write(j) +# for use with the ewdk to grab the environment strings +def get_ewdk_env(ewdkdir: Path): + """ + Inspiration taken from the following: + http://pythonwise.blogspot.fr/2010/04/sourcing-shell-script.html (Miki Tebeka) + http://stackoverflow.com/questions/3503719/#comment28061110_3505826 (ahal) + """ + + # We need to write the script that will make the important variables available + with tempfile.NamedTemporaryFile( + prefix="VcVarsExtract", suffix=".bat", mode="w", delete=False + ) as tmp: + print("@echo off", file=tmp) + print("call %* > NUL", file=tmp) + print("set", file=tmp) + + env_script = ewdkdir / "BuildEnv" / "SetupBuildEnv.cmd" + cmd = [tmp.name, env_script, "amd64"] + output = subprocess.check_output(cmd).decode("utf-8") + + env = {} + for line in output.split("\r\n"): + if line and "=" in line: + first, second = line.split("=", 1) + env[first] = second + + return env + + +def find_with_ewdk(ewdkdir: Path): + env = get_ewdk_env(ewdkdir) + + installation_path = Path(env["VSINSTALLDIR"]) + vc_tools_version = env["VCToolsVersion"] + tools_path = installation_path / "VC" / "Tools" / "MSVC" / vc_tools_version + bin_path = tools_path / "bin" / "HostX64" / "x64" + lib_path = tools_path / "lib" / "x64" + include_path = tools_path / "include" + + PATH = [bin_path] + LIB = [lib_path] + INCLUDE = [include_path] + + ucrt = Path(env["UCRTContentRoot"]) + ucrt_version = env.get("Version_Number") + + vc_exe_paths = [bin_path / exe for exe in VC_EXE_NAMES] + + if ucrt_version: + ucrt_bin_path = ucrt / "bin" / ucrt_version / "x64" + PATH.append(ucrt_bin_path) + LIB.append(ucrt / "lib" / ucrt_version / "ucrt" / "x64") + INCLUDE.append(ucrt / "include" / ucrt_version / "ucrt") + + ucrt_exe_paths = [ucrt_bin_path / exe for exe in UCRT_EXE_NAMES] + ucrt_exe_paths = [exe if exe.exists() else None for exe in ucrt_exe_paths] + else: + ucrt_exe_paths = [None for exe in UCRT_EXE_NAMES] + + sdk = Path(env["WindowsSdkDir"]) + sdk_version = ucrt_version + if sdk_version: + PATH.append(sdk / "bin" / "x64") + LIB.append(sdk / "lib" / sdk_version / "um" / "x64") + INCLUDE.append(sdk / "include" / sdk_version / "um") + INCLUDE.append(sdk / "include" / sdk_version / "cppwinrt") + INCLUDE.append(sdk / "include" / sdk_version / "winrt") + INCLUDE.append(sdk / "include" / sdk_version / "shared") + + return [ + Tool(exe=bin_path / exe, LIB=LIB, PATH=PATH, INCLUDE=INCLUDE) + for exe in vc_exe_paths + ucrt_exe_paths + ] + + def main(): parser = argparse.ArgumentParser() parser.add_argument("--cl", type=argparse.FileType("w"), required=True) + parser.add_argument("--cvtres", type=argparse.FileType("w"), required=True) parser.add_argument("--lib", type=argparse.FileType("w"), required=True) parser.add_argument("--ml64", type=argparse.FileType("w"), required=True) parser.add_argument("--link", type=argparse.FileType("w"), required=True) + parser.add_argument("--rc", type=argparse.FileType("w"), required=True) output = OutputJsonFiles(**vars(parser.parse_args())) # If vcvars has been run, it puts these tools onto $PATH. if "VCINSTALLDIR" in os.environ: - cl_exe = find_in_path("cl.exe") - lib_exe = find_in_path("lib.exe") - ml64_exe = find_in_path("ml64.exe") - link_exe = find_in_path("link.exe") + cl_exe, cvtres_exe, lib_exe, ml64_exe, link_exe = ( + find_in_path(exe) for exe in VC_EXE_NAMES + ) + rc_exe = find_in_path("rc.exe", is_optional=True) + elif "EWDKDIR" in os.environ: + cl_exe, cvtres_exe, lib_exe, ml64_exe, link_exe, rc_exe = find_with_ewdk( + Path(os.environ["EWDKDIR"]) + ) else: - cl_exe, lib_exe, ml64_exe, link_exe = find_with_vswhere_exe() + cl_exe, cvtres_exe, lib_exe, ml64_exe, link_exe, rc_exe = ( + find_with_vswhere_exe() + ) write_tool_json(output.cl, cl_exe) + write_tool_json(output.cvtres, cvtres_exe) write_tool_json(output.lib, lib_exe) write_tool_json(output.ml64, ml64_exe) write_tool_json(output.link, link_exe) + write_tool_json(output.rc, rc_exe) if __name__ == "__main__": diff --git a/prelude/toolchains/python.bzl b/prelude/toolchains/python.bzl index b4ac5611dd..16400385b0 100644 --- a/prelude/toolchains/python.bzl +++ b/prelude/toolchains/python.bzl @@ -57,7 +57,10 @@ def _system_python_toolchain_impl(ctx): return [ DefaultInfo(), PythonToolchainInfo( + binary_linker_flags = ctx.attrs.binary_linker_flags, + linker_flags = ctx.attrs.linker_flags, fail_with_message = ctx.attrs.fail_with_message[RunInfo], + generate_static_extension_info = ctx.attrs.generate_static_extension_info, make_source_db = ctx.attrs.make_source_db[RunInfo], make_source_db_no_deps = ctx.attrs.make_source_db_no_deps[RunInfo], host_interpreter = RunInfo(args = [ctx.attrs.interpreter]), @@ -66,6 +69,7 @@ def _system_python_toolchain_impl(ctx): make_py_package_inplace = ctx.attrs.make_py_package_inplace[RunInfo], compile = RunInfo(args = ["echo", "COMPILEINFO"]), package_style = "inplace", + pex_extension = ctx.attrs.pex_extension, native_link_strategy = "separate", runtime_library = ctx.attrs.runtime_library, ), @@ -75,12 +79,16 @@ def _system_python_toolchain_impl(ctx): system_python_toolchain = rule( impl = _system_python_toolchain_impl, attrs = { + "binary_linker_flags": attrs.default_only(attrs.list(attrs.arg(), default = [])), "fail_with_message": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//python/tools:fail_with_message")), + "generate_static_extension_info": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//python/tools:generate_static_extension_info")), "interpreter": attrs.string(default = _INTERPRETER), + "linker_flags": attrs.default_only(attrs.list(attrs.arg(), default = [])), "make_py_package_inplace": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//python/tools:make_py_package_inplace")), "make_py_package_modules": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//python/tools:make_py_package_modules")), "make_source_db": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//python/tools:make_source_db")), "make_source_db_no_deps": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//python/tools:make_source_db_no_deps")), + "pex_extension": attrs.string(default = ".pex"), "runtime_library": attrs.default_only(attrs.dep(providers = [ArtifactGroupInfo], default = "prelude//python/runtime:bootstrap_files")), }, is_toolchain_rule = True, diff --git a/prelude/toolchains/rust.bzl b/prelude/toolchains/rust.bzl index 85e08b1c85..6d09d608df 100644 --- a/prelude/toolchains/rust.bzl +++ b/prelude/toolchains/rust.bzl @@ -5,7 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//rust:rust_toolchain.bzl", "RustToolchainInfo") +load("@prelude//rust:rust_toolchain.bzl", "PanicRuntime", "RustToolchainInfo") load("@prelude//rust/tools:attrs.bzl", "internal_tool_attrs") _DEFAULT_TRIPLE = select({ @@ -42,23 +42,22 @@ def _system_rust_toolchain_impl(ctx): clippy_driver = RunInfo(args = ["clippy-driver"]), clippy_toml = ctx.attrs.clippy_toml[DefaultInfo].default_outputs[0] if ctx.attrs.clippy_toml else None, compiler = RunInfo(args = ["rustc"]), - concat_tool = ctx.attrs.concat_tool[RunInfo], default_edition = ctx.attrs.default_edition, + panic_runtime = PanicRuntime("unwind"), deny_lints = ctx.attrs.deny_lints, doctests = ctx.attrs.doctests, extern_html_root_url_prefix = ctx.attrs.extern_html_root_url_prefix, failure_filter_action = ctx.attrs.failure_filter_action[RunInfo], - pipelined = ctx.attrs.pipelined, report_unused_deps = ctx.attrs.report_unused_deps, rustc_action = ctx.attrs.rustc_action[RunInfo], rustc_binary_flags = ctx.attrs.rustc_binary_flags, - rustc_check_flags = ctx.attrs.rustc_check_flags, rustc_flags = ctx.attrs.rustc_flags, rustc_target_triple = ctx.attrs.rustc_target_triple, rustc_test_flags = ctx.attrs.rustc_test_flags, rustdoc = RunInfo(args = ["rustdoc"]), rustdoc_flags = ctx.attrs.rustdoc_flags, rustdoc_test_with_resources = ctx.attrs.rustdoc_test_with_resources[RunInfo], + rustdoc_coverage = ctx.attrs.rustdoc_coverage[RunInfo], transitive_dependency_symlinks_tool = ctx.attrs.transitive_dependency_symlinks_tool[RunInfo], warn_lints = ctx.attrs.warn_lints, ), @@ -73,10 +72,8 @@ system_rust_toolchain = rule( "deny_lints": attrs.list(attrs.string(), default = []), "doctests": attrs.bool(default = False), "extern_html_root_url_prefix": attrs.option(attrs.string(), default = None), - "pipelined": attrs.bool(default = False), "report_unused_deps": attrs.bool(default = False), "rustc_binary_flags": attrs.list(attrs.string(), default = []), - "rustc_check_flags": attrs.list(attrs.string(), default = []), "rustc_flags": attrs.list(attrs.string(), default = []), "rustc_target_triple": attrs.string(default = _DEFAULT_TRIPLE), "rustc_test_flags": attrs.list(attrs.string(), default = []), diff --git a/prelude/tools/audit_providers_universe.bxl b/prelude/tools/audit_providers_universe.bxl new file mode 100644 index 0000000000..6916ed7a32 --- /dev/null +++ b/prelude/tools/audit_providers_universe.bxl @@ -0,0 +1,18 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +def _impl(ctx): + ts = ctx.target_universe(ctx.cli_args.universe).lookup(ctx.cli_args.target) + ctx.output.print(pstr({t: a.providers() for t, a in ctx.analysis(ts).items()})) + +run = bxl_main( + impl = _impl, + cli_args = { + "target": cli_args.target_label(), + "universe": cli_args.target_label(), + }, +) diff --git a/prelude/user/all.bzl b/prelude/user/all.bzl index 2fd2dbb095..1997bab09a 100644 --- a/prelude/user/all.bzl +++ b/prelude/user/all.bzl @@ -5,14 +5,17 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//apple:apple_resource_dedupe_alias.bzl", _apple_resource_dedupe_alias_spec = "registration_spec") +load("@prelude//apple/mockingbird:mockingbird_mock.bzl", _mockingbird_mock_spec = "registration_spec") load("@prelude//apple/user:apple_resource_bundle.bzl", _apple_resource_bundle_spec = "registration_spec") load("@prelude//apple/user:apple_selective_debugging.bzl", _apple_selective_debugging_spec = "registration_spec") load("@prelude//apple/user:apple_simulators.bzl", _apple_simulators_spec = "registration_spec") load("@prelude//apple/user:apple_toolchain_override.bzl", _apple_toolchain_override_spec = "registration_spec") load("@prelude//apple/user:apple_tools.bzl", _apple_tools_spec = "registration_spec") load("@prelude//apple/user:apple_watchos_bundle.bzl", _apple_watchos_bundle_spec = "registration_spec") +load("@prelude//apple/user:apple_xcframework.bzl", _apple_xcframework_spec = "registration_spec") load("@prelude//apple/user:resource_group_map.bzl", _resource_group_map_spec = "registration_spec") -load("@prelude//cxx/user:cxx_toolchain_override.bzl", _cxx_toolchain_override_inheriting_target_platform_spec = "cxx_toolchain_override_inheriting_target_platform_registration_spec", _cxx_toolchain_override_spec = "cxx_toolchain_override_registration_spec") +load("@prelude//cxx/user:cxx_toolchain_override.bzl", _cxx_toolchain_override_spec = "cxx_toolchain_override_registration_spec") load("@prelude//cxx/user:link_group_map.bzl", _link_group_map_spec = "registration_spec") load(":cxx_headers_bundle.bzl", _cxx_headers_bundle_spec = "registration_spec") load(":extract_archive.bzl", _extract_archive_spec = "registration_spec") @@ -23,15 +26,17 @@ _all_specs = [ _apple_tools_spec, _apple_selective_debugging_spec, _apple_resource_bundle_spec, + _apple_resource_dedupe_alias_spec, + _apple_xcframework_spec, _link_group_map_spec, _resource_group_map_spec, _apple_watchos_bundle_spec, _apple_toolchain_override_spec, _cxx_headers_bundle_spec, _cxx_toolchain_override_spec, - _cxx_toolchain_override_inheriting_target_platform_spec, _apple_simulators_spec, _write_file_spec, + _mockingbird_mock_spec, ] rules = { diff --git a/prelude/user/extract_archive.bzl b/prelude/user/extract_archive.bzl index a7c76731e6..80630cc99a 100644 --- a/prelude/user/extract_archive.bzl +++ b/prelude/user/extract_archive.bzl @@ -25,8 +25,10 @@ def _impl(ctx: AnalysisContext) -> list[Provider]: is_executable = True, allow_args = True, ) - ctx.actions.run(cmd_args(["/bin/sh", script]) - .hidden([archive, output.as_output()]), category = "extract_archive") + ctx.actions.run( + cmd_args(["/bin/sh", script], hidden = [archive, output.as_output()]), + category = "extract_archive", + ) return [DefaultInfo(default_output = output)] diff --git a/prelude/user/rule_spec.bzl b/prelude/user/rule_spec.bzl index 426b57d3f7..ad8b45113d 100644 --- a/prelude/user/rule_spec.bzl +++ b/prelude/user/rule_spec.bzl @@ -9,7 +9,8 @@ RuleRegistrationSpec = record( name = field(str), impl = field(typing.Callable), attrs = field(dict[str, Attr]), - cfg = field([None, "transition"], None), + # TODO(nga): should be `transition | None`, but `transition` does not work as type. + cfg = field(typing.Any | None, None), is_toolchain_rule = field(bool, False), doc = field(str, ""), ) diff --git a/prelude/utils/argfile.bzl b/prelude/utils/argfile.bzl new file mode 100644 index 0000000000..6512a8ed95 --- /dev/null +++ b/prelude/utils/argfile.bzl @@ -0,0 +1,43 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# Create an argument file. +# Return `cmd_args` which is single string containing `@path/to/argfile`. +# Returned `cmd_args` contains given files as hidden artifacts. +def at_argfile( + *, + # ctx.actions + actions, + # name of the argument file + name: str | Artifact, + # the arguments to write to the argument file + args, + # pass to `ctx.actions.write` + allow_args: bool = False) -> cmd_args: + if allow_args: + args_file, _ = actions.write(name, args, allow_args = True, with_inputs = True) + else: + args_file = actions.write(name, args, with_inputs = True) + return cmd_args(args_file, format = "@{}", hidden = args) + +# Write arguments to a file, and return the file path as `cmd_args` +# with args attached as hidden artifacts. +def argfile( + *, + # ctx.actions + actions, + # name of the argument file + name: str | Artifact, + # the arguments to write to the argument file + args, + # pass to `ctx.actions.write` + allow_args: bool = False) -> cmd_args: + if allow_args: + args_file, _ = actions.write(name, args, allow_args = True, with_inputs = True) + else: + args_file = actions.write(name, args, with_inputs = True) + return cmd_args(args_file, hidden = args) diff --git a/prelude/utils/buckconfig.bzl b/prelude/utils/buckconfig.bzl index 65ffc7be7a..8d747bd09d 100644 --- a/prelude/utils/buckconfig.bzl +++ b/prelude/utils/buckconfig.bzl @@ -7,8 +7,8 @@ """Provides macros for working with .buckconfig.""" -load("@fbsource//tools/build_defs:expect.bzl", "expect") -load("@fbsource//tools/build_defs:lazy.bzl", "lazy") +load(":expect.bzl", "expect") +load(":lazy.bzl", "lazy") def _decode_raw_word(val, start, delimiter = None): """ @@ -188,4 +188,4 @@ def resolve_alias(alias): return target else: alias = target - fail("This should never happen - either the alias exists or it doesnt") + fail("This should never happen - either the alias exists or it doesn't") diff --git a/prelude/utils/build_target_pattern.bzl b/prelude/utils/build_target_pattern.bzl index 3d97932c5b..5d9dd96297 100644 --- a/prelude/utils/build_target_pattern.bzl +++ b/prelude/utils/build_target_pattern.bzl @@ -5,9 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//utils:expect.bzl", "expect") - -_ROOT_SYMBOL = "//" +ROOT_SYMBOL = "//" _TARGET_SYMBOL = ":" _RECURSIVE_SYMBOL = "..." _PATH_SYMBOL = "/" @@ -25,13 +23,30 @@ BuildTargetPattern = record( name = field([str, None], None), matches = field(typing.Callable), as_string = field(typing.Callable), + + # Exists purely for optimisation purposes. + # Matching pattern inside a loop for many targets creates huge amount of + # unnecessary string allocations that we can avoid + _path_with_path_symbol = field(str), ) -def parse_build_target_pattern(pattern: str) -> BuildTargetPattern: - expect(len(pattern) >= len(_ROOT_SYMBOL) + 1, "Invalid build target pattern, pattern too short: {}".format(pattern)) +BuildTargetPatternParseResult = record( + build_target_pattern = field([BuildTargetPattern, None], None), + error = field([str, None], default = None), +) - root_position = pattern.find(_ROOT_SYMBOL) - expect(root_position >= 0, "Invalid build target pattern, pattern should started with `{}` or a cell name followed by `{}`: ".format(_ROOT_SYMBOL, _ROOT_SYMBOL, pattern)) +def try_parse_build_target_pattern(pattern: str) -> BuildTargetPatternParseResult: + """ + This function try to parse build target pattern. If parse fails, it will return the error message. + """ + if not (len(pattern) >= len(ROOT_SYMBOL) + 1): + err_msg = "Invalid build target pattern, pattern too short: {}".format(pattern) + return BuildTargetPatternParseResult(error = err_msg) + + root_position = pattern.find(ROOT_SYMBOL) + if not (root_position >= 0): + err_msg = "Invalid build target pattern, pattern should started with `{}` or a cell name followed by `{}`: ".format(ROOT_SYMBOL, ROOT_SYMBOL, pattern) + return BuildTargetPatternParseResult(error = err_msg) cell = None if root_position > 0: @@ -44,7 +59,9 @@ def parse_build_target_pattern(pattern: str) -> BuildTargetPattern: elif pattern.endswith(_RECURSIVE_SYMBOL): kind = _BuildTargetPatternKind("recursive") end_of_path_position = len(pattern) - len(_RECURSIVE_SYMBOL) - 1 - expect(pattern[end_of_path_position] == _PATH_SYMBOL, "Invalid build target pattern, `{}` should be preceded by a `{}`: {}".format(_RECURSIVE_SYMBOL, _PATH_SYMBOL, pattern)) + if not (pattern[end_of_path_position] == _PATH_SYMBOL): + err_msg = "Invalid build target pattern, `{}` should be preceded by a `{}`: {}".format(_RECURSIVE_SYMBOL, _PATH_SYMBOL, pattern) + return BuildTargetPatternParseResult(error = err_msg) else: kind = _BuildTargetPatternKind("single") end_of_path_position = pattern.rfind(_TARGET_SYMBOL) @@ -55,19 +72,30 @@ def parse_build_target_pattern(pattern: str) -> BuildTargetPattern: start_of_package = pattern.rfind(_PATH_SYMBOL) name = pattern[start_of_package + len(_PATH_SYMBOL):] elif end_of_path_position < root_position: - fail("Invalid build target pattern, cell name should not contain `{}`: {}".format(_PATH_SYMBOL, pattern)) + err_msg = "Invalid build target pattern, cell name should not contain `{}`: {}".format(_PATH_SYMBOL, pattern) + return BuildTargetPatternParseResult(error = err_msg) else: name = pattern[end_of_path_position + len(_TARGET_SYMBOL):] - start_of_path_position = root_position + len(_ROOT_SYMBOL) + start_of_path_position = root_position + len(ROOT_SYMBOL) - expect(pattern[start_of_path_position] != _PATH_SYMBOL, "Invalid build target pattern, path cannot start with `{}`: {}".format(_PATH_SYMBOL, pattern)) + if not (pattern[start_of_path_position] != _PATH_SYMBOL): + err_msg = "Invalid build target pattern, path cannot start with `{}`: {}".format(_PATH_SYMBOL, pattern) + return BuildTargetPatternParseResult(error = err_msg) path = pattern[start_of_path_position:end_of_path_position] - expect(path.find(_ROOT_SYMBOL) < 0, "Invalid build target pattern, `{}` can only appear once: {}".format(_ROOT_SYMBOL, pattern)) - expect(path.find(_RECURSIVE_SYMBOL) < 0, "Invalid build target pattern, `{}` can only appear once: {}".format(_RECURSIVE_SYMBOL, pattern)) - expect(path.find(_TARGET_SYMBOL) < 0, "Invalid build target pattern, `{}` can only appear once: {}".format(_TARGET_SYMBOL, pattern)) - expect(len(path) == 0 or path[-1:] != _PATH_SYMBOL, "Invalid build target pattern, path cannot end with `{}`: {}".format(_PATH_SYMBOL, pattern)) + if not (path.find(ROOT_SYMBOL) < 0): + err_msg = "Invalid build target pattern, `{}` can only appear once: {}".format(ROOT_SYMBOL, pattern) + return BuildTargetPatternParseResult(error = err_msg) + if not (path.find(_RECURSIVE_SYMBOL) < 0): + err_msg = "Invalid build target pattern, `{}` can only appear once: {}".format(_RECURSIVE_SYMBOL, pattern) + return BuildTargetPatternParseResult(error = err_msg) + if not (path.find(_TARGET_SYMBOL) < 0): + err_msg = "Invalid build target pattern, `{}` can only appear once: {}".format(_TARGET_SYMBOL, pattern) + return BuildTargetPatternParseResult(error = err_msg) + if not (len(path) == 0 or path[-1:] != _PATH_SYMBOL): + err_msg = "Invalid build target pattern, path cannot end with `{}`: {}".format(_PATH_SYMBOL, pattern) + return BuildTargetPatternParseResult(error = err_msg) # buildifier: disable=uninitialized - self is initialized def matches(label: [Label, TargetLabel]) -> bool: @@ -85,7 +113,7 @@ def parse_build_target_pattern(pattern: str) -> BuildTargetPattern: return True elif len(label.package) > path_pattern_length: # pattern cell//package/... matches label cell//package/subpackage:target - return label.package.startswith(self.path + _PATH_SYMBOL) + return label.package.startswith(self._path_with_path_symbol) else: return self.path == label.package else: @@ -99,10 +127,16 @@ def parse_build_target_pattern(pattern: str) -> BuildTargetPattern: elif self.kind == _BuildTargetPatternKind("package"): return "{}//{}:".format(normalized_cell, self.path) elif self.kind == _BuildTargetPatternKind("recursive"): - return "{}//{}...".format(normalized_cell, self.path + _PATH_SYMBOL if self.path else "") + return "{}//{}...".format(normalized_cell, self._path_with_path_symbol) else: fail("Unknown build target pattern kind.") - self = BuildTargetPattern(kind = kind, cell = cell, path = path, name = name, matches = matches, as_string = as_string) + self = BuildTargetPattern(kind = kind, cell = cell, path = path, name = name, matches = matches, as_string = as_string, _path_with_path_symbol = path + _PATH_SYMBOL if path else "") + + return BuildTargetPatternParseResult(build_target_pattern = self) - return self +def parse_build_target_pattern(pattern: str) -> BuildTargetPattern: + parse_res = try_parse_build_target_pattern(pattern) + if parse_res.error != None: + fail(parse_res.error) + return parse_res.build_target_pattern diff --git a/prelude/utils/cmd_script.bzl b/prelude/utils/cmd_script.bzl index 3d8cd25a4b..7a6c23ea89 100644 --- a/prelude/utils/cmd_script.bzl +++ b/prelude/utils/cmd_script.bzl @@ -52,4 +52,4 @@ def cmd_script( else: fail(os) - return cmd_args(wrapper).hidden(cmd) + return cmd_args(wrapper, hidden = cmd) diff --git a/prelude/utils/directory_fold.bzl b/prelude/utils/directory_fold.bzl new file mode 100644 index 0000000000..fbf2b31e48 --- /dev/null +++ b/prelude/utils/directory_fold.bzl @@ -0,0 +1,63 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# Given a list of files, return a tree structure with the shape: +# +# type Tree = dict[component, None | Tree] +# +# Where None indicates a file, and a Tree indicates a directory. +def _build_tree(files): + tree = {} + for file in files: + map = tree + + # For every python file, starting from distillery, figure out every subdirectory and add it to the map if it's not there already + components = file.split("/") + for directory_chunk in components[:-1]: + map = map.setdefault(directory_chunk, {}) + map[components[-1]] = None + + return tree + +def _reduce_tree(path, tree, directory): + files = [] + dirs = [] + for k, v in tree.items(): + path2 = path + ("/" if path else "") + k + if v == None: + files.append(path2) + else: + dirs.append(_reduce_tree(path2, v, directory)) + return directory(path, dirs, files) + +# Given a list of files, perform a reduction on the tree structure. +# The `directory` argument is a function that takes a path, a list of subdirectory results, and a list of files. +# For example, given the paths `foo/bar.txt` and `foo/baz.txt` it would be called thusly: +# +# directory("", [directory("foo", [], ["foo/bar.txt", "foo/baz.txt"])], []) +def directory_fold(files, directory): + return _reduce_tree("", _build_tree(files), directory) + +def _test_tree_functions(): + input = ["foo/bar/baz.txt", "foo/bar.txt", "foo.txt", "foo/bar/quux.txt", "foo/baz/quux.txt"] + output = { + "foo": { + "bar": {"baz.txt": None, "quux.txt": None}, + "bar.txt": None, + "baz": {"quux.txt": None}, + }, + "foo.txt": None, + } + result = _build_tree(input) + if result != output: + fail("_build_tree(), unexpected output. Wanted `{output}`, got `{tree}`".format(output = output, result = result)) + + original = directory_fold(input, lambda _name, dirs, files: files + [x for xs in dirs for x in xs]) + if sorted(original) != sorted(input): + fail("_directory_fold(), unexpected output. Wanted `{input}`, got `{original}`".format(input = input, original = original)) + +_test_tree_functions() diff --git a/prelude/utils/expect.bzl b/prelude/utils/expect.bzl index ed41d76636..889742519e 100644 --- a/prelude/utils/expect.bzl +++ b/prelude/utils/expect.bzl @@ -20,6 +20,7 @@ load( "is_list", "is_number", "is_string", + "is_struct", ) def expect(condition: typing.Any, message: str = "condition not expected", *format_args): @@ -32,10 +33,27 @@ def expect(condition: typing.Any, message: str = "condition not expected", *form format_args: optional arguments to format the error message with """ if not condition: - formatted_message = message.format(format_args) + formatted_message = message.format(*format_args) fail(formatted_message) -def expect_type(name: str, check: typing.Callable, desc: str, val: typing.Any): +def expect_equal(left: typing.Any, right: typing.Any, message: str | None = None, *format_args): + if left != right: + if message == None: + msg = "Expected values to be equal, but got '{}' and '{}' instead.".format(left, right) + fail(msg) + else: + formatted_message = message.format(*format_args) + fail(formatted_message) + +def expect_non_none(val, msg: str = "unexpected none", *fmt_args, **fmt_kwargs): + """ + Require the given value not be `None`. + """ + if val == None: + fail(msg.format(*fmt_args, **fmt_kwargs)) + return val + +def expect_type(name: str, check: typing.Callable[[typing.Any], bool], desc: str, val: typing.Any): """Fails if check(val) if not truthy. name, desc are used for the error message. Usually you shouldn't need to directly use this, and prefer the expect_* family of functions @@ -103,6 +121,9 @@ def expect_contains_all(name, val, options): for index, val in enumerate(val): expect_contains("{name}[{index}]".format(name = name, index = index), val, options) +def expect_struct(name: str, val: struct): + expect_type(name, is_struct, "struct", val) + # You'll likely want to import this struct for convenience, instead of each method separately expects = struct( type = expect_type, @@ -118,4 +139,6 @@ expects = struct( collection = expect_collection, contains = expect_contains, contains_all = expect_contains_all, + equal = expect_equal, + struct = expect_struct, ) diff --git a/prelude/utils/graph_utils.bzl b/prelude/utils/graph_utils.bzl index aa8c44fef7..393605d1f6 100644 --- a/prelude/utils/graph_utils.bzl +++ b/prelude/utils/graph_utils.bzl @@ -9,7 +9,7 @@ load("@prelude//utils:expect.bzl", "expect") def pre_order_traversal( graph: dict[typing.Any, list[typing.Any]], - node_formatter: typing.Callable = str) -> list[typing.Any]: + node_formatter: typing.Callable[[typing.Any], str] = str) -> list[typing.Any]: """ Perform a pre-order (topologically sorted) traversal of `graph` and return the ordered nodes """ @@ -46,23 +46,20 @@ def pre_order_traversal( def post_order_traversal( graph: dict[typing.Any, list[typing.Any]], - node_formatter: typing.Callable = str) -> list[typing.Any]: + node_formatter: typing.Callable[[typing.Any], str] = str) -> list[typing.Any]: """ Performs a post-order traversal of `graph`. """ - out_degrees = {node: 0 for node in graph} + out_degrees = {} rdeps = {node: [] for node in graph} for node, deps in graph.items(): - for dep in dedupe(deps): - out_degrees[node] += 1 + deps = dedupe(deps) + out_degrees[node] = len(deps) + for dep in deps: rdeps[dep].append(node) - queue = [] - - for node, out_degree in out_degrees.items(): - if out_degree == 0: - queue.append(node) + queue = [node for node, out_degree in out_degrees.items() if out_degree == 0] ordered = [] @@ -85,7 +82,7 @@ def post_order_traversal( def fail_cycle( graph: dict[typing.Any, list[typing.Any]], - node_formatter: typing.Callable) -> typing.Never: + node_formatter: typing.Callable[[typing.Any], str]) -> typing.Never: cycle = find_cycle(graph) if cycle: fail( @@ -169,52 +166,75 @@ def pre_order_traversal_by( ordered = post_order_traversal_by(roots, get_nodes_to_traverse_func) return ordered[::-1] -def breadth_first_traversal( +def depth_first_traversal( graph_nodes: dict[typing.Any, list[typing.Any]], roots: list[typing.Any]) -> list[typing.Any]: """ - Like `breadth_first_traversal_by` but the nodes are stored in the graph. + Like `depth_first_traversal_by` but the nodes are stored in the graph. """ def lookup(x): return graph_nodes[x] - return breadth_first_traversal_by(graph_nodes, roots, lookup) + return depth_first_traversal_by(graph_nodes, roots, lookup) + +# With following graph +# +# A +# / \ +# B C +# / \ / \ +# D E F G +# +# preorder-left-to-right starting from A will go to left leg first +# A-B-D-E-C-F-G +# +# preorder-right-to-left starting from A will go to right leg first +# A-C-G-F-B-E-D +# +GraphTraversal = enum( + "preorder-right-to-left", + "preorder-left-to-right", +) -def breadth_first_traversal_by( +def depth_first_traversal_by( graph_nodes: [dict[typing.Any, typing.Any], None], roots: list[typing.Any], get_nodes_to_traverse_func: typing.Callable, - node_formatter: typing.Callable = str) -> list[typing.Any]: + traversal: GraphTraversal = GraphTraversal("preorder-right-to-left"), + node_formatter: typing.Callable[[typing.Any], str] = str) -> list[typing.Any]: """ - Performs a breadth first traversal of `graph_nodes`, beginning + Performs a depth first traversal of `graph_nodes`, beginning with the `roots` and queuing the nodes returned by`get_nodes_to_traverse_func`. Returns a list of all visisted nodes. get_nodes_to_traverse_func(node: '_a') -> ['_a']: Starlark does not offer while loops, so this implementation - must make use of a for loop. We pop from the end of the queue - as a matter of performance. + must make use of a for loop. """ # Dictify for O(1) lookup visited = {k: None for k in roots} + stride = -1 if traversal == GraphTraversal("preorder-left-to-right") else 1 - queue = visited.keys() + stack = [] + for node in visited.keys()[::stride]: + stack.append(node) for _ in range(len(graph_nodes) if graph_nodes else 2000000000): - if not queue: + if not stack: break - node = queue.pop() - if graph_nodes: - expect(node in graph_nodes, "Expected node {} in graph nodes", node_formatter(node)) + node = stack.pop() + if graph_nodes and node not in graph_nodes: + fail("Expected node {} in graph nodes".format(node_formatter(node))) nodes_to_visit = get_nodes_to_traverse_func(node) - for node in nodes_to_visit: - if node not in visited: - visited[node] = None - queue.append(node) + if nodes_to_visit: + for node in nodes_to_visit[::stride]: + if node not in visited: + visited[node] = None + stack.append(node) - expect(not queue, "Expected to be done with graph traversal queue.") + expect(not stack, "Expected to be done with graph traversal stack.") return visited.keys() diff --git a/prelude/utils/pick.bzl b/prelude/utils/pick.bzl index ad1310eb4f..d6de059ae4 100644 --- a/prelude/utils/pick.bzl +++ b/prelude/utils/pick.bzl @@ -12,10 +12,13 @@ def pick_bin(override, underlying): return override[RunInfo] if override != None else underlying def pick_dep(override, underlying): + return pick_raw(override, underlying) + +def pick_raw(override, underlying): return override if override != None else underlying def pick_and_add(override, additional, underlying): - flags = cmd_args(pick(override, underlying)) + flags = [pick(override, underlying)] if additional: - flags.add(additional) - return flags + flags.append(additional) + return cmd_args(flags) diff --git a/prelude/utils/set.bzl b/prelude/utils/set.bzl index 14fb5e2c8b..d242f45bb5 100644 --- a/prelude/utils/set.bzl +++ b/prelude/utils/set.bzl @@ -52,9 +52,9 @@ def set(initial_entries: list[typing.Any] = []) -> set_type: def set_add(v: typing.Any) -> bool: if self.contains(v): - return True + return False self._entries[v] = None - return False + return True def set_contains(v: typing.Any) -> bool: return v in self._entries @@ -66,7 +66,7 @@ def set(initial_entries: list[typing.Any] = []) -> set_type: return False def set_update(values: list[typing.Any]) -> list[typing.Any]: - return filter(None, [v for v in values if not self.add(v)]) + return filter(None, [v for v in values if self.add(v)]) def set_size() -> int: return len(self._entries) diff --git a/prelude/utils/source_listing.bzl b/prelude/utils/source_listing.bzl new file mode 100644 index 0000000000..18c9dc2b15 --- /dev/null +++ b/prelude/utils/source_listing.bzl @@ -0,0 +1,15 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//:is_buck2.bzl", "is_buck2") +load(":source_listing_impl.bzl?v2_only", "SourceListingInfoAlias", "source_listing_impl") + +SourceListingInfo = SourceListingInfoAlias + +def source_listing(): + if is_buck2(): + source_listing_impl() diff --git a/prelude/utils/source_listing_impl.bzl b/prelude/utils/source_listing_impl.bzl new file mode 100644 index 0000000000..bde180bf54 --- /dev/null +++ b/prelude/utils/source_listing_impl.bzl @@ -0,0 +1,48 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +SourceListingInfo = provider(fields = { + "sources": dict[str, Artifact], +}) + +SourceListingInfoAlias = SourceListingInfo + +def _impl(ctx): + sources = {} + for d in ctx.attrs.deps: + package = ctx.label.package + if package != "": + package += "/" + rel_loc = d.label.package.removeprefix(package) + sources.update({rel_loc + "/" + p: art for p, art in d[SourceListingInfo].sources.items()}) + + for s in ctx.attrs.srcs: + sources[s.short_path] = s + return [DefaultInfo(), SourceListingInfo(sources = sources)] + +# This rule acts sort of like a `filegroup`, except that 1) it returns all the +# source artifacts unchanged, and 2) it reports the location of all artifacts +# relative to the current package. We use this for gathering listings of the +# source files for bundled cells. +_source_listing = rule( + impl = _impl, + attrs = { + "deps": attrs.list(attrs.dep()), + "srcs": attrs.list(attrs.source()), + }, +) + +def source_listing_impl(): + package = package_name() + if package != "": + package += "/" + _source_listing( + name = "source_listing", + srcs = glob(["**/*", "**/.*"]), + deps = ["//" + package + s + ":source_listing" for s in __internal__.sub_packages()], + visibility = ["PUBLIC"], + ) diff --git a/prelude/utils/type_defs.bzl b/prelude/utils/type_defs.bzl index 79ee96ba98..3ab5d21b5f 100644 --- a/prelude/utils/type_defs.bzl +++ b/prelude/utils/type_defs.bzl @@ -88,7 +88,7 @@ def is_bool(arg): """Checks if provided instance is a boolean value. Args: - arg: An instance ot check. type: Any + arg: An instance of check. type: Any Returns: True for boolean values, False otherwise. rtype: bool @@ -101,7 +101,7 @@ def is_number(arg): """Checks if provided instance is a number value. Args: - arg: An instance ot check. type: Any + arg: An instance of check. type: Any Returns: True for number values, False otherwise. rtype: bool diff --git a/prelude/utils/utils.bzl b/prelude/utils/utils.bzl index 1cf1ea7701..658629fcae 100644 --- a/prelude/utils/utils.bzl +++ b/prelude/utils/utils.bzl @@ -7,9 +7,17 @@ # General utilities shared between multiple rules. +load("@prelude//utils:expect.bzl", "expect") + def value_or(x: [None, typing.Any], default: typing.Any) -> typing.Any: return default if x == None else x +def values_or(*xs: typing.Any | None) -> typing.Any | None: + for x in xs: + if x != None: + return x + return None + # Flatten a list of lists into a list def flatten(xss: list[list[typing.Any]]) -> list[typing.Any]: return [x for xs in xss for x in xs] @@ -18,20 +26,6 @@ def flatten(xss: list[list[typing.Any]]) -> list[typing.Any]: def flatten_dict(xss: list[dict[typing.Any, typing.Any]]) -> dict[typing.Any, typing.Any]: return {k: v for xs in xss for k, v in xs.items()} -# Fail if given condition is not met. -def expect(x: bool, msg: str = "condition not expected", *fmt): - if not x: - fmt_msg = msg.format(*fmt) - fail(fmt_msg) - -def expect_non_none(val, msg: str = "unexpected none", *fmt_args, **fmt_kwargs): - """ - Require the given value not be `None`. - """ - if val == None: - fail(msg.format(*fmt_args, **fmt_kwargs)) - return val - def from_named_set(srcs: [dict[str, Artifact | Dependency], list[Artifact | Dependency]]) -> dict[str, Artifact | Dependency]: """ Normalize parameters of optionally named sources to a dictionary mapping @@ -62,9 +56,6 @@ def from_named_set(srcs: [dict[str, Artifact | Dependency], list[Artifact | Depe def map_idx(key: typing.Any, vals: list[typing.Any]) -> list[typing.Any]: return [x[key] for x in vals] -def filter_idx(key: typing.Any, vals: list[typing.Any]) -> list[typing.Any]: - return [x for x in vals if key in x] - def filter_and_map_idx(key: typing.Any, vals: list[typing.Any]) -> list[typing.Any]: return [x[key] for x in vals if key in x] @@ -75,7 +66,7 @@ def idx(x: [typing.Any, None], key: typing.Any) -> [typing.Any, None]: def dedupe_by_value(vals: list[typing.Any]) -> list[typing.Any]: return {val: None for val in vals}.keys() -def map_val(func: typing.Callable, val: [typing.Any, None]) -> [typing.Any, None]: +def map_val(func: typing.Callable[[typing.Any], typing.Any], val: [typing.Any, None]) -> [typing.Any, None]: """ If `val` if `None`, return `None`, else apply `func` to `val` and return the result. diff --git a/prelude/validation_deps.bzl b/prelude/validation_deps.bzl new file mode 100644 index 0000000000..a057e353d6 --- /dev/null +++ b/prelude/validation_deps.bzl @@ -0,0 +1,18 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +VALIDATION_DEPS_ATTR_NAME = "validation_deps" +VALIDATION_DEPS_ATTR_TYPE = attrs.set(attrs.dep(), sorted = True, default = []) + +def get_validation_deps_outputs(ctx: AnalysisContext) -> list[Artifact]: + artifacts = [] + if hasattr(ctx.attrs, VALIDATION_DEPS_ATTR_NAME): + validation_deps = getattr(ctx.attrs, VALIDATION_DEPS_ATTR_NAME) + for dep in validation_deps: + default_info = dep[DefaultInfo] + artifacts += default_info.default_outputs + return artifacts diff --git a/prelude/windows/tools/BUCK.v2 b/prelude/windows/tools/BUCK.v2 index bda1136989..a8fc018764 100644 --- a/prelude/windows/tools/BUCK.v2 +++ b/prelude/windows/tools/BUCK.v2 @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + prelude = native prelude.export_file( diff --git a/prelude/worker_tool.bzl b/prelude/worker_tool.bzl index 7b1bad6cee..1cac6e3253 100644 --- a/prelude/worker_tool.bzl +++ b/prelude/worker_tool.bzl @@ -28,9 +28,9 @@ def worker_tool(ctx: AnalysisContext) -> list[Provider]: expect(worker_tool_run_info != None, "Worker tool executable must have a RunInfo!") worker_tool_runner = ctx.attrs._worker_tool_runner[RunInfo] - worker_tool_cmd = cmd_args(worker_tool_runner) - worker_tool_cmd.add("--worker-tool") - worker_tool_cmd.add(worker_tool_run_info) + worker_tool_cmd = [worker_tool_runner] + worker_tool_cmd.append("--worker-tool") + worker_tool_cmd.append(worker_tool_run_info) worker_args = ctx.attrs.args if worker_args: @@ -40,8 +40,8 @@ def worker_tool(ctx: AnalysisContext) -> list[Provider]: allow_args = True, ) - worker_tool_cmd.add("--worker-args-file") - worker_tool_cmd.add(worker_args_file) + worker_tool_cmd.append("--worker-args-file") + worker_tool_cmd.append(worker_args_file) worker_env = ctx.attrs.env if worker_env: @@ -56,9 +56,10 @@ def worker_tool(ctx: AnalysisContext) -> list[Provider]: allow_args = True, ) - worker_tool_cmd.add("--worker-env-file") - worker_tool_cmd.add(env_args_file) + worker_tool_cmd.append("--worker-env-file") + worker_tool_cmd.append(env_args_file) + worker_tool_cmd = cmd_args(worker_tool_cmd) return [ DefaultInfo(), RunInfo( diff --git a/prelude/zip_file/tools/BUCK.v2 b/prelude/zip_file/tools/BUCK.v2 index 36cc2b8b9a..560920dfcd 100644 --- a/prelude/zip_file/tools/BUCK.v2 +++ b/prelude/zip_file/tools/BUCK.v2 @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + python_bootstrap_binary( name = "unzip", main = "unzip.py", diff --git a/prelude/zip_file/tools/unzip.py b/prelude/zip_file/tools/unzip.py index e571c3987f..3ec289156e 100644 --- a/prelude/zip_file/tools/unzip.py +++ b/prelude/zip_file/tools/unzip.py @@ -28,6 +28,11 @@ def do_unzip(archive, output_dir): # That way we don't need to pass `target_is_directory` argument to `os.symlink` function. for info in (i for i in z.infolist() if not _is_symlink(i)): z.extract(info, path=output_dir) + if _is_executable(info): + os.chmod( + os.path.join(output_dir, info.filename), + _file_attributes(info) | stat.S_IXUSR, + ) for info in (i for i in z.infolist() if _is_symlink(i)): symlink_path = os.path.join(output_dir, info.filename) symlink_dst = z.read(info).decode("utf-8") @@ -54,6 +59,10 @@ def _is_symlink(zip_info): return stat.S_ISLNK(_file_attributes(zip_info)) +def _is_executable(zip_info): + return stat.S_IMODE(_file_attributes(zip_info)) & stat.S_IXUSR + + def main(): args = _parse_args() print("Source zip is: {}".format(args.src), file=sys.stderr) diff --git a/prelude/zip_file/zip_file.bzl b/prelude/zip_file/zip_file.bzl index 79b8902158..3cf3c8017c 100644 --- a/prelude/zip_file/zip_file.bzl +++ b/prelude/zip_file/zip_file.bzl @@ -8,7 +8,7 @@ load("@prelude//decls/toolchains_common.bzl", "toolchains_common") load(":zip_file_toolchain.bzl", "ZipFileToolchainInfo") -def zip_file_impl(ctx: AnalysisContext) -> list[Provider]: +def _zip_file_impl(ctx: AnalysisContext) -> list[Provider]: """ zip_file() rule implementation @@ -29,42 +29,42 @@ def zip_file_impl(ctx: AnalysisContext) -> list[Provider]: zip_srcs = ctx.attrs.zip_srcs srcs = ctx.attrs.srcs - create_zip_cmd = cmd_args([ + create_zip_cmd = [ create_zip_tool, "--output_path", output.as_output(), "--on_duplicate_entry", on_duplicate_entry if on_duplicate_entry else "overwrite", - ]) + ] if srcs: - srcs_file_cmd = cmd_args() - # add artifact and is_source flag pair - for src in srcs: - srcs_file_cmd.add(src) - srcs_file_cmd.add(src.short_path) - srcs_file_cmd.add(str(src.is_source)) + srcs_file_cmd = cmd_args( + [ + [src, src.short_path, str(src.is_source)] + for src in srcs + ], + ) entries_file = ctx.actions.write("entries", srcs_file_cmd) - create_zip_cmd.add("--entries_file") - create_zip_cmd.add(entries_file) - create_zip_cmd.hidden(srcs) + create_zip_cmd.append("--entries_file") + create_zip_cmd.append(entries_file) + create_zip_cmd.append(cmd_args(hidden = srcs)) if zip_srcs: - create_zip_cmd.add("--zip_sources") - create_zip_cmd.add(zip_srcs) + create_zip_cmd.append("--zip_sources") + create_zip_cmd.append(zip_srcs) if entries_to_exclude: - create_zip_cmd.add("--entries_to_exclude") - create_zip_cmd.add(entries_to_exclude) + create_zip_cmd.append("--entries_to_exclude") + create_zip_cmd.append(entries_to_exclude) - ctx.actions.run(create_zip_cmd, category = "zip") + ctx.actions.run(cmd_args(create_zip_cmd), category = "zip") return [DefaultInfo(default_output = output)] implemented_rules = { - "zip_file": zip_file_impl, + "zip_file": _zip_file_impl, } extra_attributes = {