From c7b2268340a0601ac359023ca10bc747255a88be Mon Sep 17 00:00:00 2001 From: "Worker Pants (Pantsbuild GitHub Automation Bot)" <133242086+WorkerPants@users.noreply.github.com> Date: Tue, 12 Nov 2024 17:07:34 -0500 Subject: [PATCH] Update docs site for version 2.24.0a0 (#294) Docs from https://github.com/pantsbuild/pants/releases/tag/release_2.24.0a0 --- .../docs/ad-hoc-tools/_category_.json | 4 + .../integrating-new-tools-without-plugins.mdx | 98 + .../docs/contributions/_category_.json | 4 + .../contributions/development/_category_.json | 4 + .../debugging-and-benchmarking.mdx | 126 + .../development/developing-rust.mdx | 124 + .../docs/contributions/development/index.mdx | 15 + .../development/internal-architecture.mdx | 89 + .../maintenance-tasks-and-scripts.mdx | 122 + .../running-pants-from-sources.mdx | 31 + .../development/setting-up-pants.mdx | 118 + .../contributions/development/style-guide.mdx | 424 + .../version-2.24/docs/contributions/index.mdx | 165 + .../contributions/releases/_category_.json | 4 + .../github-actions-linux-aarch64-runners.mdx | 98 + .../github-actions-macos-arm64-runners.mdx | 211 + .../docs/contributions/releases/index.mdx | 11 + .../releases/release-process.mdx | 190 + .../releases/release-strategy.mdx | 100 + .../version-2.24/docs/docker/_category_.json | 4 + .../version-2.24/docs/docker/index.mdx | 412 + .../docs/docker/tagging-docker-images.mdx | 339 + .../docs/getting-started/_category_.json | 4 + .../example-projects-and-repositories.mdx | 33 + .../getting-started/incremental-adoption.mdx | 109 + .../docs/getting-started/index.mdx | 18 + .../getting-started/initial-configuration.mdx | 111 + .../docs/getting-started/installing-pants.mdx | 71 + .../getting-started/manual-installation.mdx | 50 + .../docs/getting-started/prerequisites.mdx | 63 + .../version-2.24/docs/go/_category_.json | 4 + versioned_docs/version-2.24/docs/go/index.mdx | 321 + .../docs/go/integrations/_category_.json | 4 + .../docs/go/integrations/index.mdx | 10 + .../docs/go/integrations/protobuf.mdx | 172 + .../docs/go/private-modules/_category_.json | 4 + .../docs/go/private-modules/index.mdx | 46 + .../version-2.24/docs/helm/_category_.json | 4 + .../version-2.24/docs/helm/deployments.mdx | 363 + .../version-2.24/docs/helm/index.mdx | 547 + .../version-2.24/docs/helm/kubeconform.mdx | 46 + .../docs/introduction/_category_.json | 4 + .../docs/introduction/how-does-pants-work.mdx | 96 + .../docs/introduction/welcome-to-pants.mdx | 72 + .../docs/javascript/_category_.json | 4 + .../docs/javascript/overview/_category_.json | 4 + .../overview/enabling-javascript-support.mdx | 76 + .../docs/javascript/overview/index.mdx | 29 + .../docs/javascript/overview/lockfiles.mdx | 65 + .../docs/javascript/overview/package.mdx | 67 + .../docs/javascript/overview/workspaces.mdx | 22 + .../version-2.24/docs/jvm/_category_.json | 4 + .../version-2.24/docs/jvm/java-and-scala.mdx | 527 + .../version-2.24/docs/jvm/kotlin.mdx | 251 + .../version-2.24/docs/python/_category_.json | 4 + .../docs/python/goals/_category_.json | 4 + .../version-2.24/docs/python/goals/check.mdx | 341 + .../version-2.24/docs/python/goals/fmt.mdx | 19 + .../version-2.24/docs/python/goals/index.mdx | 16 + .../version-2.24/docs/python/goals/lint.mdx | 33 + .../docs/python/goals/package.mdx | 232 + .../docs/python/goals/publish.mdx | 79 + .../version-2.24/docs/python/goals/repl.mdx | 86 + .../version-2.24/docs/python/goals/run.mdx | 96 + .../version-2.24/docs/python/goals/test.mdx | 615 + .../docs/python/integrations/_category_.json | 4 + .../docs/python/integrations/aws-lambda.mdx | 283 + .../integrations/google-cloud-functions.mdx | 214 + .../docs/python/integrations/index.mdx | 15 + .../docs/python/integrations/jupyter.mdx | 31 + .../python/integrations/protobuf-and-grpc.mdx | 246 + .../docs/python/integrations/pyoxidizer.mdx | 238 + .../docs/python/integrations/thrift.mdx | 146 + .../docs/python/overview/_category_.json | 4 + .../overview/building-distributions.mdx | 212 + .../overview/enabling-python-support.mdx | 71 + .../docs/python/overview/index.mdx | 29 + .../overview/interpreter-compatibility.mdx | 163 + .../overview/linters-and-formatters.mdx | 312 + .../docs/python/overview/lockfiles.mdx | 285 + .../version-2.24/docs/python/overview/pex.mdx | 128 + .../overview/third-party-dependencies.mdx | 488 + .../docs/releases/_category_.json | 4 + .../docs/releases/deprecation-policy.mdx | 108 + .../docs/releases/upgrade-tips.mdx | 77 + .../version-2.24/docs/shell/_category_.json | 4 + .../version-2.24/docs/shell/index.mdx | 393 + .../docs/shell/run-shell-commands.mdx | 56 + .../docs/shell/self-extractable-archives.mdx | 265 + .../version-2.24/docs/sql/_category_.json | 4 + .../version-2.24/docs/sql/index.mdx | 54 + .../docs/terraform/_category_.json | 4 + .../version-2.24/docs/terraform/index.mdx | 194 + .../docs/tutorials/_category_.json | 4 + .../tutorials/advanced-plugin-concepts.mdx | 678 + .../docs/tutorials/create-a-new-goal.mdx | 409 + .../docs/tutorials/testing-plugins.mdx | 293 + .../docs/using-pants/_category_.json | 4 + .../using-pants/advanced-target-selection.mdx | 227 + .../docs/using-pants/anonymous-telemetry.mdx | 88 + .../docs/using-pants/assets-and-archives.mdx | 159 + .../docs/using-pants/command-line-help.mdx | 109 + .../docs/using-pants/environments.mdx | 269 + .../generating-version-tags-from-git.mdx | 77 + .../using-pants/key-concepts/_category_.json | 4 + .../using-pants/key-concepts/backends.mdx | 122 + .../docs/using-pants/key-concepts/goals.mdx | 121 + .../docs/using-pants/key-concepts/index.mdx | 12 + .../docs/using-pants/key-concepts/options.mdx | 400 + .../using-pants/key-concepts/source-roots.mdx | 364 + .../key-concepts/targets-and-build-files.mdx | 473 + .../using-pants/project-introspection.mdx | 386 + .../_category_.json | 4 + .../remote-caching-and-execution/index.mdx | 58 + .../remote-caching.mdx | 99 + .../remote-execution.mdx | 100 + .../restricted-internet-access.mdx | 110 + .../docs/using-pants/setting-up-an-ide.mdx | 80 + .../troubleshooting-common-issues.mdx | 257 + .../docs/using-pants/using-pants-in-ci.mdx | 249 + .../using-pants/validating-dependencies.mdx | 423 + .../docs/writing-plugins/_category_.json | 4 + .../common-plugin-tasks/_category_.json | 4 + .../common-plugin-tasks/add-a-formatter.mdx | 185 + .../common-plugin-tasks/add-a-linter.mdx | 236 + .../common-plugin-tasks/add-a-repl.mdx | 114 + .../common-plugin-tasks/add-a-typechecker.mdx | 53 + .../common-plugin-tasks/add-codegen.mdx | 295 + .../allowing-tool-export.mdx | 61 + .../custom-python-artifact-kwargs.mdx | 223 + .../common-plugin-tasks/index.mdx | 16 + .../common-plugin-tasks/plugin-lockfiles.mdx | 149 + .../plugin-upgrade-guide.mdx | 837 + .../common-plugin-tasks/run-tests.mdx | 270 + .../common-subsystem-tasks.mdx | 174 + .../docs/writing-plugins/macros.mdx | 147 + .../docs/writing-plugins/overview.mdx | 210 + .../the-rules-api/_category_.json | 4 + .../the-rules-api/concepts.mdx | 336 + .../the-rules-api/file-system.mdx | 311 + .../the-rules-api/goal-rules.mdx | 208 + .../writing-plugins/the-rules-api/index.mdx | 20 + .../the-rules-api/installing-tools.mdx | 227 + .../logging-and-dynamic-output.mdx | 35 + .../the-rules-api/options-and-subsystems.mdx | 100 + .../the-rules-api/processes.mdx | 139 + .../rules-and-the-target-api.mdx | 417 + .../the-rules-api/testing-plugins.mdx | 600 + .../the-rules-api/tips-and-debugging.mdx | 143 + .../the-rules-api/union-rules-advanced.mdx | 84 + .../the-target-api/_category_.json | 4 + .../the-target-api/concepts.mdx | 236 + .../the-target-api/creating-new-fields.mdx | 278 + .../the-target-api/creating-new-targets.mdx | 103 + .../extending-existing-targets.mdx | 39 + .../writing-plugins/the-target-api/index.mdx | 13 + .../build-file-symbols/PANTS_VERSION.mdx | 10 + .../build-file-symbols/__defaults__.mdx | 15 + .../__dependencies_rules__.mdx | 13 + .../__dependents_rules__.mdx | 13 + .../build-file-symbols/_category_.json | 9 + .../build-file-symbols/build_file_dir.mdx | 17 + .../build-file-symbols/duplicate_rule.mdx | 13 + .../reference/build-file-symbols/env.mdx | 13 + .../build-file-symbols/http_source.mdx | 13 + .../build-file-symbols/jvm_exclude.mdx | 13 + .../build-file-symbols/node_build_script.mdx | 15 + .../build-file-symbols/node_test_script.mdx | 15 + .../build-file-symbols/parametrize.mdx | 15 + .../build-file-symbols/per_platform.mdx | 45 + .../build-file-symbols/python_artifact.mdx | 13 + .../build-file-symbols/scala_exclude.mdx | 13 + .../reference/build-file-symbols/setup_py.mdx | 13 + .../build-file-symbols/shading_keep.mdx | 13 + .../build-file-symbols/shading_relocate.mdx | 13 + .../build-file-symbols/shading_rename.mdx | 13 + .../build-file-symbols/shading_zap.mdx | 13 + .../stevedore_namespace.mdx | 24 + .../version-2.24/reference/global-options.mdx | 1704 + .../reference/goals/_category_.json | 9 + .../version-2.24/reference/goals/check.mdx | 61 + .../version-2.24/reference/goals/complete.mdx | 59 + .../reference/goals/count-loc.mdx | 44 + .../reference/goals/dependencies.mdx | 110 + .../reference/goals/dependents.mdx | 110 + .../reference/goals/experimental-bsp.mdx | 112 + .../reference/goals/experimental-deploy.mdx | 67 + .../reference/goals/experimental-explorer.mdx | 66 + .../reference/goals/export-codegen.mdx | 41 + .../version-2.24/reference/goals/export.mdx | 187 + .../version-2.24/reference/goals/filedeps.mdx | 109 + .../version-2.24/reference/goals/fix.mdx | 97 + .../version-2.24/reference/goals/fmt.mdx | 81 + .../reference/goals/generate-lockfiles.mdx | 104 + .../reference/goals/generate-snapshots.mdx | 41 + .../goals/go-dump-import-path-mapping.mdx | 41 + .../reference/goals/go-export-cgo-codegen.mdx | 44 + .../reference/goals/go-generate.mdx | 65 + .../goals/go-show-package-analysis.mdx | 44 + .../reference/goals/help-advanced.mdx | 40 + .../version-2.24/reference/goals/help-all.mdx | 40 + .../version-2.24/reference/goals/help.mdx | 40 + .../goals/java-dump-source-analysis.mdx | 46 + .../reference/goals/jvm-symbol-map.mdx | 43 + .../goals/kotlin-dump-source-analysis.mdx | 46 + .../version-2.24/reference/goals/lint.mdx | 113 + .../version-2.24/reference/goals/list.mdx | 81 + .../reference/goals/migrate-call-by-name.mdx | 69 + .../version-2.24/reference/goals/package.mdx | 41 + .../version-2.24/reference/goals/paths.mdx | 81 + .../version-2.24/reference/goals/peek.mdx | 95 + .../version-2.24/reference/goals/publish.mdx | 53 + .../reference/goals/py-constraints.mdx | 72 + .../goals/python-dump-source-analysis.mdx | 62 + .../version-2.24/reference/goals/repl.mdx | 87 + .../version-2.24/reference/goals/roots.mdx | 66 + .../version-2.24/reference/goals/run.mdx | 78 + .../goals/scala-dump-source-analysis.mdx | 48 + .../version-2.24/reference/goals/tailor.mdx | 157 + .../version-2.24/reference/goals/test.mdx | 309 + .../reference/goals/update-build-files.mdx | 100 + .../version-2.24/reference/goals/version.mdx | 40 + .../version-2.24/reference/help-all.json | 160645 +++++++++++++++ .../reference/subsystems/_category_.json | 9 + .../subsystems/add-trailing-comma.mdx | 158 + .../subsystems/anonymous-telemetry.mdx | 72 + .../reference/subsystems/apache-thrift.mdx | 75 + .../reference/subsystems/autoflake.mdx | 158 + .../reference/subsystems/avro.mdx | 48 + .../reference/subsystems/bandit.mdx | 154 + .../reference/subsystems/black.mdx | 190 + .../version-2.24/reference/subsystems/buf.mdx | 230 + .../subsystems/build-deprecations-fixer.mdx | 53 + .../reference/subsystems/buildifier.mdx | 173 + .../reference/subsystems/cc-external.mdx | 252 + .../reference/subsystems/cc-infer.mdx | 66 + .../version-2.24/reference/subsystems/cc.mdx | 184 + .../reference/subsystems/changed.mdx | 79 + .../reference/subsystems/clang-format.mdx | 158 + .../version-2.24/reference/subsystems/cli.mdx | 62 + .../reference/subsystems/coursier.mdx | 174 + .../reference/subsystems/coverage-py.mdx | 251 + .../version-2.24/reference/subsystems/cue.mdx | 168 + .../reference/subsystems/debug-adapter.mdx | 64 + .../reference/subsystems/debugpy.mdx | 144 + .../reference/subsystems/docformatter.mdx | 158 + .../reference/subsystems/docker.mdx | 379 + .../subsystems/dockerfile-parser.mdx | 116 + .../subsystems/download-terraform.mdx | 204 + .../subsystems/environments-preview.mdx | 63 + .../reference/subsystems/filter.mdx | 135 + .../reference/subsystems/flake8.mdx | 225 + .../reference/subsystems/go-test.mdx | 284 + .../reference/subsystems/go-vet.mdx | 48 + .../reference/subsystems/gofmt.mdx | 68 + .../reference/subsystems/golang.mdx | 364 + .../reference/subsystems/golangci-lint.mdx | 198 + .../subsystems/google-java-format.mdx | 130 + .../subsystems/grpc-python-plugin.mdx | 136 + .../reference/subsystems/hadolint.mdx | 198 + .../reference/subsystems/helm-infer.mdx | 91 + .../reference/subsystems/helm-k8s-parser.mdx | 100 + .../subsystems/helm-post-renderer.mdx | 100 + .../reference/subsystems/helm-unittest.mdx | 177 + .../reference/subsystems/helm.mdx | 302 + .../reference/subsystems/ipython.mdx | 128 + .../reference/subsystems/isort.mdx | 196 + .../reference/subsystems/jar_tool.mdx | 104 + .../reference/subsystems/jarjar.mdx | 131 + .../reference/subsystems/java-avro.mdx | 104 + .../reference/subsystems/java-infer.mdx | 80 + .../reference/subsystems/java-parser.mdx | 104 + .../reference/subsystems/java-thrift.mdx | 52 + .../reference/subsystems/javac.mdx | 64 + .../reference/subsystems/junit.mdx | 134 + .../version-2.24/reference/subsystems/jvm.mdx | 193 + .../reference/subsystems/kotlin-infer.mdx | 62 + .../reference/subsystems/kotlin-parser.mdx | 104 + .../reference/subsystems/kotlin.mdx | 62 + .../reference/subsystems/kotlinc.mdx | 70 + .../reference/subsystems/ktlint.mdx | 116 + .../reference/subsystems/kubeconform.mdx | 223 + .../reference/subsystems/makeself.mdx | 136 + .../reference/subsystems/mypy-protobuf.mdx | 100 + .../reference/subsystems/mypy.mdx | 212 + .../reference/subsystems/nfpm.mdx | 159 + .../reference/subsystems/nodejs-infer.mdx | 88 + .../reference/subsystems/nodejs-test.mdx | 101 + .../reference/subsystems/nodejs.mdx | 256 + .../reference/subsystems/openapi-format.mdx | 94 + .../subsystems/openapi-generator.mdx | 104 + .../reference/subsystems/openapi.mdx | 48 + .../subsystems/pex-binary-defaults.mdx | 50 + .../reference/subsystems/pex-cli.mdx | 156 + .../version-2.24/reference/subsystems/pex.mdx | 99 + .../reference/subsystems/preamble.mdx | 79 + .../reference/subsystems/prettier.mdx | 94 + .../subsystems/protobuf-java-grpc.mdx | 104 + .../reference/subsystems/protoc.mdx | 162 + .../reference/subsystems/pydocstyle.mdx | 190 + .../subsystems/pyenv-python-provider.mdx | 169 + .../reference/subsystems/pylint.mdx | 198 + .../reference/subsystems/pyoxidizer.mdx | 146 + .../reference/subsystems/pyright.mdx | 112 + .../reference/subsystems/pytest.mdx | 214 + .../reference/subsystems/python-bootstrap.mdx | 116 + ...ython-build-standalone-python-provider.mdx | 79 + .../subsystems/python-grpclib-protobuf.mdx | 100 + .../reference/subsystems/python-infer.mdx | 252 + .../subsystems/python-native-code.mdx | 72 + .../reference/subsystems/python-protobuf.mdx | 94 + .../reference/subsystems/python-repos.mdx | 120 + .../reference/subsystems/python-thrift.mdx | 68 + .../reference/subsystems/python.mdx | 539 + .../reference/subsystems/pytype.mdx | 172 + .../reference/subsystems/pyupgrade.mdx | 158 + .../reference/subsystems/redocly.mdx | 64 + .../reference/subsystems/regex-lint.mdx | 114 + .../reference/subsystems/ruff.mdx | 286 + .../reference/subsystems/rust.mdx | 68 + .../reference/subsystems/rustfmt.mdx | 48 + .../reference/subsystems/scala-infer.mdx | 92 + .../reference/subsystems/scala-parser.mdx | 104 + .../reference/subsystems/scala.mdx | 64 + .../reference/subsystems/scalac.mdx | 68 + .../reference/subsystems/scalafix.mdx | 179 + .../reference/subsystems/scalafmt.mdx | 145 + .../reference/subsystems/scalapb.mdx | 122 + .../reference/subsystems/scalatest.mdx | 136 + .../version-2.24/reference/subsystems/scc.mdx | 154 + .../reference/subsystems/scrooge.mdx | 104 + .../reference/subsystems/semgrep.mdx | 192 + .../subsystems/setup-py-generation.mdx | 63 + .../reference/subsystems/setuptools-scm.mdx | 128 + .../reference/subsystems/setuptools.mdx | 82 + .../reference/subsystems/shell-setup.mdx | 109 + .../reference/subsystems/shell-test.mdx | 48 + .../reference/subsystems/shellcheck.mdx | 180 + .../reference/subsystems/shfmt.mdx | 180 + .../reference/subsystems/shunit2.mdx | 148 + .../reference/subsystems/source.mdx | 84 + .../reference/subsystems/spectral.mdx | 94 + .../reference/subsystems/sqlfluff.mdx | 208 + .../reference/subsystems/stats.mdx | 93 + .../reference/subsystems/strip-jar.mdx | 104 + .../subsystems/subprocess-environment.mdx | 57 + .../reference/subsystems/system-binaries.mdx | 55 + .../reference/subsystems/taplo.mdx | 206 + .../reference/subsystems/terraform-fmt.mdx | 48 + .../subsystems/terraform-hcl2-parser.mdx | 100 + .../reference/subsystems/terraform-tfsec.mdx | 192 + .../subsystems/terraform-validate.mdx | 48 + .../reference/subsystems/thrift.mdx | 60 + .../reference/subsystems/trufflehog.mdx | 164 + .../reference/subsystems/twine.mdx | 210 + .../reference/subsystems/visibility.mdx | 62 + .../reference/subsystems/workunit-logger.mdx | 62 + .../reference/subsystems/yamllint.mdx | 223 + .../reference/subsystems/yapf.mdx | 192 + .../reference/targets/_category_.json | 9 + .../reference/targets/adhoc_tool.mdx | 337 + .../reference/targets/archive.mdx | 114 + .../reference/targets/avro_source.mdx | 105 + .../reference/targets/avro_sources.mdx | 134 + .../reference/targets/cc_source.mdx | 89 + .../reference/targets/cc_sources.mdx | 92 + .../reference/targets/code_quality_tool.mdx | 126 + .../reference/targets/cue_package.mdx | 63 + .../reference/targets/debian_package.mdx | 126 + .../reference/targets/deploy_jar.mdx | 199 + .../reference/targets/docker_environment.mdx | 468 + .../reference/targets/docker_image.mdx | 510 + .../experimental_test_shell_command.mdx | 195 + .../experimental_workspace_environment.mdx | 447 + ...xperimental_wrap_as_go_package_sources.mdx | 73 + .../experimental_wrap_as_java_sources.mdx | 73 + .../experimental_wrap_as_kotlin_sources.mdx | 73 + .../experimental_wrap_as_python_sources.mdx | 73 + .../experimental_wrap_as_resources.mdx | 73 + .../experimental_wrap_as_scala_sources.mdx | 73 + .../version-2.24/reference/targets/file.mdx | 97 + .../version-2.24/reference/targets/files.mdx | 108 + .../reference/targets/go_binary.mdx | 228 + .../version-2.24/reference/targets/go_mod.mdx | 167 + .../reference/targets/go_package.mdx | 240 + .../targets/go_third_party_package.mdx | 81 + .../reference/targets/helm_artifact.mdx | 88 + .../reference/targets/helm_chart.mdx | 298 + .../reference/targets/helm_deployment.mdx | 285 + .../reference/targets/helm_unittest_test.mdx | 101 + .../reference/targets/helm_unittest_tests.mdx | 132 + .../reference/targets/java_source.mdx | 139 + .../reference/targets/java_sources.mdx | 142 + .../reference/targets/javascript_source.mdx | 89 + .../reference/targets/javascript_sources.mdx | 121 + .../reference/targets/javascript_test.mdx | 129 + .../reference/targets/javascript_tests.mdx | 161 + .../reference/targets/jsx_source.mdx | 77 + .../reference/targets/jsx_sources.mdx | 109 + .../reference/targets/jsx_test.mdx | 129 + .../reference/targets/jsx_tests.mdx | 161 + .../reference/targets/junit_test.mdx | 144 + .../reference/targets/junit_tests.mdx | 147 + .../reference/targets/jvm_artifact.mdx | 200 + .../reference/targets/jvm_artifacts.mdx | 89 + .../reference/targets/jvm_war.mdx | 145 + .../reference/targets/kotlin_junit_test.mdx | 159 + .../reference/targets/kotlin_junit_tests.mdx | 162 + .../reference/targets/kotlin_source.mdx | 154 + .../reference/targets/kotlin_sources.mdx | 157 + .../reference/targets/kotlinc_plugin.mdx | 90 + .../reference/targets/local_environment.mdx | 452 + .../reference/targets/makeself_archive.mdx | 152 + .../reference/targets/nfpm_apk_package.mdx | 367 + .../targets/nfpm_archlinux_package.mdx | 413 + .../reference/targets/nfpm_content_dir.mdx | 136 + .../reference/targets/nfpm_content_dirs.mdx | 149 + .../reference/targets/nfpm_content_file.mdx | 226 + .../reference/targets/nfpm_content_files.mdx | 201 + .../targets/nfpm_content_symlink.mdx | 150 + .../targets/nfpm_content_symlinks.mdx | 151 + .../reference/targets/nfpm_deb_package.mdx | 573 + .../reference/targets/nfpm_rpm_package.mdx | 556 + .../reference/targets/node_package.mdx | 103 + .../targets/node_third_party_package.mdx | 89 + .../reference/targets/openapi_bundle.mdx | 78 + .../reference/targets/openapi_document.mdx | 151 + .../reference/targets/openapi_documents.mdx | 154 + .../reference/targets/openapi_source.mdx | 89 + .../reference/targets/openapi_sources.mdx | 92 + .../reference/targets/package_json.mdx | 95 + .../reference/targets/pants_requirements.mdx | 89 + .../reference/targets/pex_binaries.mdx | 387 + .../reference/targets/pex_binary.mdx | 435 + .../reference/targets/pipenv_requirements.mdx | 131 + .../reference/targets/poetry_requirements.mdx | 131 + .../reference/targets/protobuf_source.mdx | 208 + .../reference/targets/protobuf_sources.mdx | 233 + .../reference/targets/pyoxidizer_binary.mdx | 159 + .../targets/python_aws_lambda_function.mdx | 232 + .../targets/python_aws_lambda_layer.mdx | 228 + .../reference/targets/python_distribution.mdx | 274 + .../targets/python_google_cloud_function.mdx | 220 + .../reference/targets/python_requirement.mdx | 150 + .../reference/targets/python_requirements.mdx | 137 + .../reference/targets/python_source.mdx | 347 + .../reference/targets/python_sources.mdx | 381 + .../reference/targets/python_test.mdx | 505 + .../reference/targets/python_test_utils.mdx | 370 + .../reference/targets/python_tests.mdx | 532 + .../reference/targets/relocated_files.mdx | 119 + .../reference/targets/remote_environment.mdx | 482 + .../reference/targets/resource.mdx | 97 + .../reference/targets/resources.mdx | 108 + .../reference/targets/run_shell_command.mdx | 119 + .../reference/targets/rust_package.mdx | 75 + .../reference/targets/scala_artifact.mdx | 203 + .../reference/targets/scala_junit_test.mdx | 183 + .../reference/targets/scala_junit_tests.mdx | 228 + .../reference/targets/scala_source.mdx | 166 + .../reference/targets/scala_sources.mdx | 198 + .../reference/targets/scalac_plugin.mdx | 73 + .../reference/targets/scalatest_test.mdx | 183 + .../reference/targets/scalatest_tests.mdx | 215 + .../reference/targets/shell_command.mdx | 338 + .../reference/targets/shell_source.mdx | 101 + .../reference/targets/shell_sources.mdx | 133 + .../reference/targets/shunit2_test.mdx | 155 + .../reference/targets/shunit2_tests.mdx | 183 + .../reference/targets/sql_source.mdx | 109 + .../reference/targets/sql_sources.mdx | 122 + .../reference/targets/swift_source.mdx | 77 + .../reference/targets/swift_sources.mdx | 80 + .../reference/targets/system_binary.mdx | 115 + .../version-2.24/reference/targets/target.mdx | 67 + .../reference/targets/terraform_backend.mdx | 56 + .../targets/terraform_deployment.mdx | 89 + .../reference/targets/terraform_module.mdx | 108 + .../reference/targets/terraform_var_files.mdx | 61 + .../reference/targets/thrift_source.mdx | 134 + .../reference/targets/thrift_sources.mdx | 162 + .../reference/targets/tsx_source.mdx | 77 + .../reference/targets/tsx_sources.mdx | 109 + .../reference/targets/tsx_test.mdx | 129 + .../reference/targets/tsx_tests.mdx | 161 + .../reference/targets/typescript_source.mdx | 77 + .../reference/targets/typescript_sources.mdx | 109 + .../reference/targets/typescript_test.mdx | 129 + .../reference/targets/typescript_tests.mdx | 161 + .../reference/targets/uv_requirements.mdx | 131 + .../reference/targets/vcs_version.mdx | 131 + versioned_sidebars/version-2.24-sidebars.json | 34 + versions.json | 1 + 493 files changed, 230888 insertions(+) create mode 100644 versioned_docs/version-2.24/docs/ad-hoc-tools/_category_.json create mode 100644 versioned_docs/version-2.24/docs/ad-hoc-tools/integrating-new-tools-without-plugins.mdx create mode 100644 versioned_docs/version-2.24/docs/contributions/_category_.json create mode 100644 versioned_docs/version-2.24/docs/contributions/development/_category_.json create mode 100644 versioned_docs/version-2.24/docs/contributions/development/debugging-and-benchmarking.mdx create mode 100644 versioned_docs/version-2.24/docs/contributions/development/developing-rust.mdx create mode 100644 versioned_docs/version-2.24/docs/contributions/development/index.mdx create mode 100644 versioned_docs/version-2.24/docs/contributions/development/internal-architecture.mdx create mode 100644 versioned_docs/version-2.24/docs/contributions/development/maintenance-tasks-and-scripts.mdx create mode 100644 versioned_docs/version-2.24/docs/contributions/development/running-pants-from-sources.mdx create mode 100644 versioned_docs/version-2.24/docs/contributions/development/setting-up-pants.mdx create mode 100644 versioned_docs/version-2.24/docs/contributions/development/style-guide.mdx create mode 100644 versioned_docs/version-2.24/docs/contributions/index.mdx create mode 100644 versioned_docs/version-2.24/docs/contributions/releases/_category_.json create mode 100644 versioned_docs/version-2.24/docs/contributions/releases/github-actions-linux-aarch64-runners.mdx create mode 100644 versioned_docs/version-2.24/docs/contributions/releases/github-actions-macos-arm64-runners.mdx create mode 100644 versioned_docs/version-2.24/docs/contributions/releases/index.mdx create mode 100644 versioned_docs/version-2.24/docs/contributions/releases/release-process.mdx create mode 100644 versioned_docs/version-2.24/docs/contributions/releases/release-strategy.mdx create mode 100644 versioned_docs/version-2.24/docs/docker/_category_.json create mode 100644 versioned_docs/version-2.24/docs/docker/index.mdx create mode 100644 versioned_docs/version-2.24/docs/docker/tagging-docker-images.mdx create mode 100644 versioned_docs/version-2.24/docs/getting-started/_category_.json create mode 100644 versioned_docs/version-2.24/docs/getting-started/example-projects-and-repositories.mdx create mode 100644 versioned_docs/version-2.24/docs/getting-started/incremental-adoption.mdx create mode 100644 versioned_docs/version-2.24/docs/getting-started/index.mdx create mode 100644 versioned_docs/version-2.24/docs/getting-started/initial-configuration.mdx create mode 100644 versioned_docs/version-2.24/docs/getting-started/installing-pants.mdx create mode 100644 versioned_docs/version-2.24/docs/getting-started/manual-installation.mdx create mode 100644 versioned_docs/version-2.24/docs/getting-started/prerequisites.mdx create mode 100644 versioned_docs/version-2.24/docs/go/_category_.json create mode 100644 versioned_docs/version-2.24/docs/go/index.mdx create mode 100644 versioned_docs/version-2.24/docs/go/integrations/_category_.json create mode 100644 versioned_docs/version-2.24/docs/go/integrations/index.mdx create mode 100644 versioned_docs/version-2.24/docs/go/integrations/protobuf.mdx create mode 100644 versioned_docs/version-2.24/docs/go/private-modules/_category_.json create mode 100644 versioned_docs/version-2.24/docs/go/private-modules/index.mdx create mode 100644 versioned_docs/version-2.24/docs/helm/_category_.json create mode 100644 versioned_docs/version-2.24/docs/helm/deployments.mdx create mode 100644 versioned_docs/version-2.24/docs/helm/index.mdx create mode 100644 versioned_docs/version-2.24/docs/helm/kubeconform.mdx create mode 100644 versioned_docs/version-2.24/docs/introduction/_category_.json create mode 100644 versioned_docs/version-2.24/docs/introduction/how-does-pants-work.mdx create mode 100644 versioned_docs/version-2.24/docs/introduction/welcome-to-pants.mdx create mode 100644 versioned_docs/version-2.24/docs/javascript/_category_.json create mode 100644 versioned_docs/version-2.24/docs/javascript/overview/_category_.json create mode 100644 versioned_docs/version-2.24/docs/javascript/overview/enabling-javascript-support.mdx create mode 100644 versioned_docs/version-2.24/docs/javascript/overview/index.mdx create mode 100644 versioned_docs/version-2.24/docs/javascript/overview/lockfiles.mdx create mode 100644 versioned_docs/version-2.24/docs/javascript/overview/package.mdx create mode 100644 versioned_docs/version-2.24/docs/javascript/overview/workspaces.mdx create mode 100644 versioned_docs/version-2.24/docs/jvm/_category_.json create mode 100644 versioned_docs/version-2.24/docs/jvm/java-and-scala.mdx create mode 100644 versioned_docs/version-2.24/docs/jvm/kotlin.mdx create mode 100644 versioned_docs/version-2.24/docs/python/_category_.json create mode 100644 versioned_docs/version-2.24/docs/python/goals/_category_.json create mode 100644 versioned_docs/version-2.24/docs/python/goals/check.mdx create mode 100644 versioned_docs/version-2.24/docs/python/goals/fmt.mdx create mode 100644 versioned_docs/version-2.24/docs/python/goals/index.mdx create mode 100644 versioned_docs/version-2.24/docs/python/goals/lint.mdx create mode 100644 versioned_docs/version-2.24/docs/python/goals/package.mdx create mode 100644 versioned_docs/version-2.24/docs/python/goals/publish.mdx create mode 100644 versioned_docs/version-2.24/docs/python/goals/repl.mdx create mode 100644 versioned_docs/version-2.24/docs/python/goals/run.mdx create mode 100644 versioned_docs/version-2.24/docs/python/goals/test.mdx create mode 100644 versioned_docs/version-2.24/docs/python/integrations/_category_.json create mode 100644 versioned_docs/version-2.24/docs/python/integrations/aws-lambda.mdx create mode 100644 versioned_docs/version-2.24/docs/python/integrations/google-cloud-functions.mdx create mode 100644 versioned_docs/version-2.24/docs/python/integrations/index.mdx create mode 100644 versioned_docs/version-2.24/docs/python/integrations/jupyter.mdx create mode 100644 versioned_docs/version-2.24/docs/python/integrations/protobuf-and-grpc.mdx create mode 100644 versioned_docs/version-2.24/docs/python/integrations/pyoxidizer.mdx create mode 100644 versioned_docs/version-2.24/docs/python/integrations/thrift.mdx create mode 100644 versioned_docs/version-2.24/docs/python/overview/_category_.json create mode 100644 versioned_docs/version-2.24/docs/python/overview/building-distributions.mdx create mode 100644 versioned_docs/version-2.24/docs/python/overview/enabling-python-support.mdx create mode 100644 versioned_docs/version-2.24/docs/python/overview/index.mdx create mode 100644 versioned_docs/version-2.24/docs/python/overview/interpreter-compatibility.mdx create mode 100644 versioned_docs/version-2.24/docs/python/overview/linters-and-formatters.mdx create mode 100644 versioned_docs/version-2.24/docs/python/overview/lockfiles.mdx create mode 100644 versioned_docs/version-2.24/docs/python/overview/pex.mdx create mode 100644 versioned_docs/version-2.24/docs/python/overview/third-party-dependencies.mdx create mode 100644 versioned_docs/version-2.24/docs/releases/_category_.json create mode 100644 versioned_docs/version-2.24/docs/releases/deprecation-policy.mdx create mode 100644 versioned_docs/version-2.24/docs/releases/upgrade-tips.mdx create mode 100644 versioned_docs/version-2.24/docs/shell/_category_.json create mode 100644 versioned_docs/version-2.24/docs/shell/index.mdx create mode 100644 versioned_docs/version-2.24/docs/shell/run-shell-commands.mdx create mode 100644 versioned_docs/version-2.24/docs/shell/self-extractable-archives.mdx create mode 100644 versioned_docs/version-2.24/docs/sql/_category_.json create mode 100644 versioned_docs/version-2.24/docs/sql/index.mdx create mode 100644 versioned_docs/version-2.24/docs/terraform/_category_.json create mode 100644 versioned_docs/version-2.24/docs/terraform/index.mdx create mode 100644 versioned_docs/version-2.24/docs/tutorials/_category_.json create mode 100644 versioned_docs/version-2.24/docs/tutorials/advanced-plugin-concepts.mdx create mode 100644 versioned_docs/version-2.24/docs/tutorials/create-a-new-goal.mdx create mode 100644 versioned_docs/version-2.24/docs/tutorials/testing-plugins.mdx create mode 100644 versioned_docs/version-2.24/docs/using-pants/_category_.json create mode 100644 versioned_docs/version-2.24/docs/using-pants/advanced-target-selection.mdx create mode 100644 versioned_docs/version-2.24/docs/using-pants/anonymous-telemetry.mdx create mode 100644 versioned_docs/version-2.24/docs/using-pants/assets-and-archives.mdx create mode 100644 versioned_docs/version-2.24/docs/using-pants/command-line-help.mdx create mode 100644 versioned_docs/version-2.24/docs/using-pants/environments.mdx create mode 100644 versioned_docs/version-2.24/docs/using-pants/generating-version-tags-from-git.mdx create mode 100644 versioned_docs/version-2.24/docs/using-pants/key-concepts/_category_.json create mode 100644 versioned_docs/version-2.24/docs/using-pants/key-concepts/backends.mdx create mode 100644 versioned_docs/version-2.24/docs/using-pants/key-concepts/goals.mdx create mode 100644 versioned_docs/version-2.24/docs/using-pants/key-concepts/index.mdx create mode 100644 versioned_docs/version-2.24/docs/using-pants/key-concepts/options.mdx create mode 100644 versioned_docs/version-2.24/docs/using-pants/key-concepts/source-roots.mdx create mode 100644 versioned_docs/version-2.24/docs/using-pants/key-concepts/targets-and-build-files.mdx create mode 100644 versioned_docs/version-2.24/docs/using-pants/project-introspection.mdx create mode 100644 versioned_docs/version-2.24/docs/using-pants/remote-caching-and-execution/_category_.json create mode 100644 versioned_docs/version-2.24/docs/using-pants/remote-caching-and-execution/index.mdx create mode 100644 versioned_docs/version-2.24/docs/using-pants/remote-caching-and-execution/remote-caching.mdx create mode 100644 versioned_docs/version-2.24/docs/using-pants/remote-caching-and-execution/remote-execution.mdx create mode 100644 versioned_docs/version-2.24/docs/using-pants/restricted-internet-access.mdx create mode 100644 versioned_docs/version-2.24/docs/using-pants/setting-up-an-ide.mdx create mode 100644 versioned_docs/version-2.24/docs/using-pants/troubleshooting-common-issues.mdx create mode 100644 versioned_docs/version-2.24/docs/using-pants/using-pants-in-ci.mdx create mode 100644 versioned_docs/version-2.24/docs/using-pants/validating-dependencies.mdx create mode 100644 versioned_docs/version-2.24/docs/writing-plugins/_category_.json create mode 100644 versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/_category_.json create mode 100644 versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/add-a-formatter.mdx create mode 100644 versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/add-a-linter.mdx create mode 100644 versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/add-a-repl.mdx create mode 100644 versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/add-a-typechecker.mdx create mode 100644 versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/add-codegen.mdx create mode 100644 versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/allowing-tool-export.mdx create mode 100644 versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/custom-python-artifact-kwargs.mdx create mode 100644 versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/index.mdx create mode 100644 versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/plugin-lockfiles.mdx create mode 100644 versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/plugin-upgrade-guide.mdx create mode 100644 versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/run-tests.mdx create mode 100644 versioned_docs/version-2.24/docs/writing-plugins/common-subsystem-tasks.mdx create mode 100644 versioned_docs/version-2.24/docs/writing-plugins/macros.mdx create mode 100644 versioned_docs/version-2.24/docs/writing-plugins/overview.mdx create mode 100644 versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/_category_.json create mode 100644 versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/concepts.mdx create mode 100644 versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/file-system.mdx create mode 100644 versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/goal-rules.mdx create mode 100644 versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/index.mdx create mode 100644 versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/installing-tools.mdx create mode 100644 versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/logging-and-dynamic-output.mdx create mode 100644 versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/options-and-subsystems.mdx create mode 100644 versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/processes.mdx create mode 100644 versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/rules-and-the-target-api.mdx create mode 100644 versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/testing-plugins.mdx create mode 100644 versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/tips-and-debugging.mdx create mode 100644 versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/union-rules-advanced.mdx create mode 100644 versioned_docs/version-2.24/docs/writing-plugins/the-target-api/_category_.json create mode 100644 versioned_docs/version-2.24/docs/writing-plugins/the-target-api/concepts.mdx create mode 100644 versioned_docs/version-2.24/docs/writing-plugins/the-target-api/creating-new-fields.mdx create mode 100644 versioned_docs/version-2.24/docs/writing-plugins/the-target-api/creating-new-targets.mdx create mode 100644 versioned_docs/version-2.24/docs/writing-plugins/the-target-api/extending-existing-targets.mdx create mode 100644 versioned_docs/version-2.24/docs/writing-plugins/the-target-api/index.mdx create mode 100644 versioned_docs/version-2.24/reference/build-file-symbols/PANTS_VERSION.mdx create mode 100644 versioned_docs/version-2.24/reference/build-file-symbols/__defaults__.mdx create mode 100644 versioned_docs/version-2.24/reference/build-file-symbols/__dependencies_rules__.mdx create mode 100644 versioned_docs/version-2.24/reference/build-file-symbols/__dependents_rules__.mdx create mode 100644 versioned_docs/version-2.24/reference/build-file-symbols/_category_.json create mode 100644 versioned_docs/version-2.24/reference/build-file-symbols/build_file_dir.mdx create mode 100644 versioned_docs/version-2.24/reference/build-file-symbols/duplicate_rule.mdx create mode 100644 versioned_docs/version-2.24/reference/build-file-symbols/env.mdx create mode 100644 versioned_docs/version-2.24/reference/build-file-symbols/http_source.mdx create mode 100644 versioned_docs/version-2.24/reference/build-file-symbols/jvm_exclude.mdx create mode 100644 versioned_docs/version-2.24/reference/build-file-symbols/node_build_script.mdx create mode 100644 versioned_docs/version-2.24/reference/build-file-symbols/node_test_script.mdx create mode 100644 versioned_docs/version-2.24/reference/build-file-symbols/parametrize.mdx create mode 100644 versioned_docs/version-2.24/reference/build-file-symbols/per_platform.mdx create mode 100644 versioned_docs/version-2.24/reference/build-file-symbols/python_artifact.mdx create mode 100644 versioned_docs/version-2.24/reference/build-file-symbols/scala_exclude.mdx create mode 100644 versioned_docs/version-2.24/reference/build-file-symbols/setup_py.mdx create mode 100644 versioned_docs/version-2.24/reference/build-file-symbols/shading_keep.mdx create mode 100644 versioned_docs/version-2.24/reference/build-file-symbols/shading_relocate.mdx create mode 100644 versioned_docs/version-2.24/reference/build-file-symbols/shading_rename.mdx create mode 100644 versioned_docs/version-2.24/reference/build-file-symbols/shading_zap.mdx create mode 100644 versioned_docs/version-2.24/reference/build-file-symbols/stevedore_namespace.mdx create mode 100644 versioned_docs/version-2.24/reference/global-options.mdx create mode 100644 versioned_docs/version-2.24/reference/goals/_category_.json create mode 100644 versioned_docs/version-2.24/reference/goals/check.mdx create mode 100644 versioned_docs/version-2.24/reference/goals/complete.mdx create mode 100644 versioned_docs/version-2.24/reference/goals/count-loc.mdx create mode 100644 versioned_docs/version-2.24/reference/goals/dependencies.mdx create mode 100644 versioned_docs/version-2.24/reference/goals/dependents.mdx create mode 100644 versioned_docs/version-2.24/reference/goals/experimental-bsp.mdx create mode 100644 versioned_docs/version-2.24/reference/goals/experimental-deploy.mdx create mode 100644 versioned_docs/version-2.24/reference/goals/experimental-explorer.mdx create mode 100644 versioned_docs/version-2.24/reference/goals/export-codegen.mdx create mode 100644 versioned_docs/version-2.24/reference/goals/export.mdx create mode 100644 versioned_docs/version-2.24/reference/goals/filedeps.mdx create mode 100644 versioned_docs/version-2.24/reference/goals/fix.mdx create mode 100644 versioned_docs/version-2.24/reference/goals/fmt.mdx create mode 100644 versioned_docs/version-2.24/reference/goals/generate-lockfiles.mdx create mode 100644 versioned_docs/version-2.24/reference/goals/generate-snapshots.mdx create mode 100644 versioned_docs/version-2.24/reference/goals/go-dump-import-path-mapping.mdx create mode 100644 versioned_docs/version-2.24/reference/goals/go-export-cgo-codegen.mdx create mode 100644 versioned_docs/version-2.24/reference/goals/go-generate.mdx create mode 100644 versioned_docs/version-2.24/reference/goals/go-show-package-analysis.mdx create mode 100644 versioned_docs/version-2.24/reference/goals/help-advanced.mdx create mode 100644 versioned_docs/version-2.24/reference/goals/help-all.mdx create mode 100644 versioned_docs/version-2.24/reference/goals/help.mdx create mode 100644 versioned_docs/version-2.24/reference/goals/java-dump-source-analysis.mdx create mode 100644 versioned_docs/version-2.24/reference/goals/jvm-symbol-map.mdx create mode 100644 versioned_docs/version-2.24/reference/goals/kotlin-dump-source-analysis.mdx create mode 100644 versioned_docs/version-2.24/reference/goals/lint.mdx create mode 100644 versioned_docs/version-2.24/reference/goals/list.mdx create mode 100644 versioned_docs/version-2.24/reference/goals/migrate-call-by-name.mdx create mode 100644 versioned_docs/version-2.24/reference/goals/package.mdx create mode 100644 versioned_docs/version-2.24/reference/goals/paths.mdx create mode 100644 versioned_docs/version-2.24/reference/goals/peek.mdx create mode 100644 versioned_docs/version-2.24/reference/goals/publish.mdx create mode 100644 versioned_docs/version-2.24/reference/goals/py-constraints.mdx create mode 100644 versioned_docs/version-2.24/reference/goals/python-dump-source-analysis.mdx create mode 100644 versioned_docs/version-2.24/reference/goals/repl.mdx create mode 100644 versioned_docs/version-2.24/reference/goals/roots.mdx create mode 100644 versioned_docs/version-2.24/reference/goals/run.mdx create mode 100644 versioned_docs/version-2.24/reference/goals/scala-dump-source-analysis.mdx create mode 100644 versioned_docs/version-2.24/reference/goals/tailor.mdx create mode 100644 versioned_docs/version-2.24/reference/goals/test.mdx create mode 100644 versioned_docs/version-2.24/reference/goals/update-build-files.mdx create mode 100644 versioned_docs/version-2.24/reference/goals/version.mdx create mode 100644 versioned_docs/version-2.24/reference/help-all.json create mode 100644 versioned_docs/version-2.24/reference/subsystems/_category_.json create mode 100644 versioned_docs/version-2.24/reference/subsystems/add-trailing-comma.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/anonymous-telemetry.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/apache-thrift.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/autoflake.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/avro.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/bandit.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/black.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/buf.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/build-deprecations-fixer.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/buildifier.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/cc-external.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/cc-infer.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/cc.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/changed.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/clang-format.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/cli.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/coursier.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/coverage-py.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/cue.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/debug-adapter.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/debugpy.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/docformatter.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/docker.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/dockerfile-parser.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/download-terraform.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/environments-preview.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/filter.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/flake8.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/go-test.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/go-vet.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/gofmt.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/golang.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/golangci-lint.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/google-java-format.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/grpc-python-plugin.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/hadolint.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/helm-infer.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/helm-k8s-parser.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/helm-post-renderer.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/helm-unittest.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/helm.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/ipython.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/isort.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/jar_tool.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/jarjar.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/java-avro.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/java-infer.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/java-parser.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/java-thrift.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/javac.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/junit.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/jvm.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/kotlin-infer.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/kotlin-parser.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/kotlin.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/kotlinc.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/ktlint.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/kubeconform.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/makeself.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/mypy-protobuf.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/mypy.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/nfpm.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/nodejs-infer.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/nodejs-test.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/nodejs.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/openapi-format.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/openapi-generator.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/openapi.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/pex-binary-defaults.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/pex-cli.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/pex.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/preamble.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/prettier.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/protobuf-java-grpc.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/protoc.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/pydocstyle.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/pyenv-python-provider.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/pylint.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/pyoxidizer.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/pyright.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/pytest.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/python-bootstrap.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/python-build-standalone-python-provider.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/python-grpclib-protobuf.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/python-infer.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/python-native-code.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/python-protobuf.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/python-repos.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/python-thrift.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/python.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/pytype.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/pyupgrade.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/redocly.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/regex-lint.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/ruff.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/rust.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/rustfmt.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/scala-infer.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/scala-parser.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/scala.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/scalac.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/scalafix.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/scalafmt.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/scalapb.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/scalatest.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/scc.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/scrooge.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/semgrep.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/setup-py-generation.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/setuptools-scm.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/setuptools.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/shell-setup.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/shell-test.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/shellcheck.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/shfmt.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/shunit2.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/source.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/spectral.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/sqlfluff.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/stats.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/strip-jar.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/subprocess-environment.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/system-binaries.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/taplo.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/terraform-fmt.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/terraform-hcl2-parser.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/terraform-tfsec.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/terraform-validate.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/thrift.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/trufflehog.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/twine.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/visibility.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/workunit-logger.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/yamllint.mdx create mode 100644 versioned_docs/version-2.24/reference/subsystems/yapf.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/_category_.json create mode 100644 versioned_docs/version-2.24/reference/targets/adhoc_tool.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/archive.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/avro_source.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/avro_sources.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/cc_source.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/cc_sources.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/code_quality_tool.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/cue_package.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/debian_package.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/deploy_jar.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/docker_environment.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/docker_image.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/experimental_test_shell_command.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/experimental_workspace_environment.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/experimental_wrap_as_go_package_sources.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/experimental_wrap_as_java_sources.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/experimental_wrap_as_kotlin_sources.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/experimental_wrap_as_python_sources.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/experimental_wrap_as_resources.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/experimental_wrap_as_scala_sources.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/file.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/files.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/go_binary.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/go_mod.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/go_package.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/go_third_party_package.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/helm_artifact.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/helm_chart.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/helm_deployment.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/helm_unittest_test.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/helm_unittest_tests.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/java_source.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/java_sources.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/javascript_source.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/javascript_sources.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/javascript_test.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/javascript_tests.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/jsx_source.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/jsx_sources.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/jsx_test.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/jsx_tests.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/junit_test.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/junit_tests.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/jvm_artifact.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/jvm_artifacts.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/jvm_war.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/kotlin_junit_test.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/kotlin_junit_tests.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/kotlin_source.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/kotlin_sources.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/kotlinc_plugin.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/local_environment.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/makeself_archive.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/nfpm_apk_package.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/nfpm_archlinux_package.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/nfpm_content_dir.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/nfpm_content_dirs.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/nfpm_content_file.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/nfpm_content_files.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/nfpm_content_symlink.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/nfpm_content_symlinks.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/nfpm_deb_package.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/nfpm_rpm_package.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/node_package.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/node_third_party_package.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/openapi_bundle.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/openapi_document.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/openapi_documents.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/openapi_source.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/openapi_sources.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/package_json.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/pants_requirements.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/pex_binaries.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/pex_binary.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/pipenv_requirements.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/poetry_requirements.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/protobuf_source.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/protobuf_sources.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/pyoxidizer_binary.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/python_aws_lambda_function.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/python_aws_lambda_layer.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/python_distribution.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/python_google_cloud_function.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/python_requirement.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/python_requirements.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/python_source.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/python_sources.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/python_test.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/python_test_utils.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/python_tests.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/relocated_files.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/remote_environment.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/resource.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/resources.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/run_shell_command.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/rust_package.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/scala_artifact.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/scala_junit_test.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/scala_junit_tests.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/scala_source.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/scala_sources.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/scalac_plugin.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/scalatest_test.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/scalatest_tests.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/shell_command.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/shell_source.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/shell_sources.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/shunit2_test.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/shunit2_tests.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/sql_source.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/sql_sources.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/swift_source.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/swift_sources.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/system_binary.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/target.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/terraform_backend.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/terraform_deployment.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/terraform_module.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/terraform_var_files.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/thrift_source.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/thrift_sources.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/tsx_source.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/tsx_sources.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/tsx_test.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/tsx_tests.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/typescript_source.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/typescript_sources.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/typescript_test.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/typescript_tests.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/uv_requirements.mdx create mode 100644 versioned_docs/version-2.24/reference/targets/vcs_version.mdx create mode 100644 versioned_sidebars/version-2.24-sidebars.json diff --git a/versioned_docs/version-2.24/docs/ad-hoc-tools/_category_.json b/versioned_docs/version-2.24/docs/ad-hoc-tools/_category_.json new file mode 100644 index 000000000..1d819289f --- /dev/null +++ b/versioned_docs/version-2.24/docs/ad-hoc-tools/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Ad-Hoc Tools", + "position": 13 +} diff --git a/versioned_docs/version-2.24/docs/ad-hoc-tools/integrating-new-tools-without-plugins.mdx b/versioned_docs/version-2.24/docs/ad-hoc-tools/integrating-new-tools-without-plugins.mdx new file mode 100644 index 000000000..66abfd902 --- /dev/null +++ b/versioned_docs/version-2.24/docs/ad-hoc-tools/integrating-new-tools-without-plugins.mdx @@ -0,0 +1,98 @@ +--- + title: Integrating new tools without plugins + sidebar_position: 999 +--- + +--- + +## Integrating new tools without plugins + +The `adhoc_tool` target allows you to execute "runnable" targets inside the Pants sandbox. Runnable targets include first-party sources that can be run with `pants run`, 3rd-party dependencies like `python_requirement` or `jvm_artifact`, or even executables that exist on your system and managed externally to Pants. + +`adhoc_tool` provides you with the building blocks needed to put together a custom build process without needing to develop and maintain a plugin. The level of initial effort involved in using `adhoc_tool` is significantly lower than that of [writing a plugin](../writing-plugins/overview.mdx), so it's well-suited to consuming one-off scripts, or for rapidly prototyping a process before actually writing a plugin. The tradeoff is that there is more manual work involved in defining build processes that reflect your codebase's structure, and that the targets that define the tools you consume are less easy to reuse. + +The `antlr` demo in the [`example-adhoc` repository](https://github.com/pantsbuild/example-adhoc) shows an example of running a JVM-based tool to transparently generate Python code that can be used in another language: + +``` +adhoc_tool( + name="run_antlr", + runnable=":antlr4", + args=["Expr.g4", "-Dlanguage=Python3", "-o", "expr_parser", "-package", "expr_parser",], + output_directories=["expr_parser",], + # These are consumed by `antlr`, but are not relevant to this target's dependents. + execution_dependencies=[":grammars"], + # These are needed by the code that is output by this target + output_dependencies=[":antlr4-python3-runtime",], + root_output_directory=".", + log_output=True, +) +``` + +### `runnable` targets + +"Runnable" targets are targets that Pants knows how to execute within its sandbox. Generally, these correspond to targets that can be executed with the `pants run` goal, and include first-party source files, as well as third-party dependencies. + +The tool will be run with values from `args` specified as arguments. By default, the process' working directory will be the directory where the `BUILD` file is defined. This can be adjusted using the `workdir` field. + +:::caution `runnable` targets must be pure functions +When run by `adhoc_tool`, Pants assumes that the inputs provided to the process -- that is, the values of the `adhoc_tool`'s fields, and the contents of the runnable and execution dependencies -- fully describe the output. Output values will be [cached](../introduction/how-does-pants-work.mdx#caching) by Pants, and future invocations with identical inputs will be retrieved from the cache instead of being re-executed. If your process has behavior that is not fully defined by its inputs, Pants' behavior may be unexpected or inconsistent. +::: + +:::caution `runnable` targets must be idempotent +`adhoc_tool` processes may be cancelled or retried any number of times, so it is important that any side effects are idempotent. That is, it should not matter if it is run several times, or only partially. +::: + +### Specifying dependencies + +`adhoc_tool` has more complexity surrounding dependencies compared with Pants' first-class targets. This is because you need to do manual work to set up the execution environment, which is usually taken care of by plugin code. + +`adhoc_tool` has three dependencies fields: + +- `output_dependencies`, which defines dependencies that are required to effectively consume the output of the tool, _e.g._ runtime libraries for generated code bindings. Any targets that (transitively) depend on the `adhoc_tool` target will also transitively depend on these dependencies. +- `execution_dependencies`, which define data dependencies required for the tool to produce its output. These are not considered when resolving transitive dependencies that include this `adhoc_tool` target. +- `runnable_dependencies`, which define runnables that the `adhoc_tool` needs on its `PATH` to execute as a subprocess. These are also not considered when resolving transitive dependencies. The discussion of `system_binary` later in this page shows one key use of `runnable_dependencies`. + +In the `antlr` example, `output_dependencies` is used because the tool produces Python-based bindings that depend on a runtime library. `execution_dependencies` specifies the sources that are consumed by the tool, but do not need to be consumed by subsequent build steps. + +### Specifying outputs + +Generally, `adhoc_tool` targets are run to produce outputs that can be supplied to other targets. These can be in the form of files or directories that are output directly by the tools: use the `output_files` field to capture individual files, or `output_directories` to capture entire directories as output. + +Files are captured relative to the build root by default: this is useful when passing results to further `adhoc_tool` targets defined in the same `BUILD` file. If this behavior is not right for you, for example, if you are producing an artifact for packaging, you can change the root of the outputs using the `root_output_directory` field. + +Finally, if you want to capture `stdout` or `stderr` from your tool, you can use the `stdout` or `stderr` fields. These specify filenames where those streams will be dumped once the process completes. Note that these files are specified in addition to those from the `output_files` field, and an error will occur if the filename occurs in the outputs arising from `output_files` or `output_directories` and the contents of that file are different. + +### Chaining processes together + +_Our [JavaScript demo](https://github.com/pantsbuild/example-adhoc/tree/main/javascript) demonstrates a string of `adhoc_tool` targets that's used to produce a resource file._ + +To get the best cache efficiency, it can make sense to break your `adhoc_tool` into smaller incremental steps. For example, if your process needs to fetch dependencies and then build a library based on those dependencies and some first-party source files, having one `adhoc_tool` for each of those steps means that the dependency-fetching stage will only be re-run when your requirements change, and not when the first-party source files change. + +Generally, if you are chaining `adhoc_tool` targets together , it will be easier to use the default `workdir` and `root_output_directory` fields for each step that will be consumed by an `adhoc_tool` in the same `BUILD` file. Change the `root_output_directory` only for targets that are intended to be used in other places or ways. + +### Wrapping generated sources for use by other targets + +_Our [Antlr demo](https://github.com/pantsbuild/example-adhoc/tree/main/antlr) demonstrates wrapping the outputs of `adhoc_tool` targets for use as Python sources._ + +`adhoc_tool` generates `file` sources by default. This can be acceptable if generating assets that do not need to be consumed as source files for another Pants backend. Other Pants backends need generated sources to be marked as actual source files. + +There are several targets included in Pants with the prefix `experimental_wrap_as_`. These act as a source target that can be used as a dependency in a given language backend, with the caveat that dependency inference is not available. + +### Using externally-managed tools + +_Our [JavaScript demo](https://github.com/pantsbuild/example-adhoc/tree/main/javascript) demonstrates the use of externally-managed binaries._ + +Some build processes need to make use of tools that can't be modeled within a Pants codebase. The `system_binary` target lets you make use of a binary that is installed on the system. `system_binary` targets may be specified as `runnable` or `runnable_dependency` values for `adhoc_tool`. + +`system_binary` will search for a binary in pre-defined or user-supplied search paths with a given `binary_name`. To improve reproducibility, it's possible to test matching binaries with sample arguments, to see if its output matches a given regular expression. This can be used to match against version strings. If a such a binary does not exist on the system where Pants is being run, any build involving this `system_binary` target will fail. + +When specified as a `runnable_dependency`, the binary will be available on the `PATH` with the target name of the dependency. This can be important if the `runnable` field invokes a subprocess (for example, `yarn` tries to invoke a binary called `node` as its interpreter). + +### Running shell scripts + +Currently, `shell_source` targets are not runnable. In the meantime, it is possible to run a shell script as an `adhoc_tool` through the following approach: + +- Declare a `system_binary` target referring to your preferred shell (e.g. `bash`, `zsh`, `fish`) +- Declare an `adhoc_tool`, with the `runnable` field pointing at your `system_binary` target, add your `shell_source` as an `execution_dependency`, and provide your script's path relative to the buildroot as the first value in `args` + +This is clearly not as convenient as directly running a `shell_source` target, and we anticipate adding support in a future version of Pants. diff --git a/versioned_docs/version-2.24/docs/contributions/_category_.json b/versioned_docs/version-2.24/docs/contributions/_category_.json new file mode 100644 index 000000000..6499d2111 --- /dev/null +++ b/versioned_docs/version-2.24/docs/contributions/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Contributions", + "position": 16 +} diff --git a/versioned_docs/version-2.24/docs/contributions/development/_category_.json b/versioned_docs/version-2.24/docs/contributions/development/_category_.json new file mode 100644 index 000000000..d07b6dbf2 --- /dev/null +++ b/versioned_docs/version-2.24/docs/contributions/development/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Development", + "position": 2 +} diff --git a/versioned_docs/version-2.24/docs/contributions/development/debugging-and-benchmarking.mdx b/versioned_docs/version-2.24/docs/contributions/development/debugging-and-benchmarking.mdx new file mode 100644 index 000000000..989fe503c --- /dev/null +++ b/versioned_docs/version-2.24/docs/contributions/development/debugging-and-benchmarking.mdx @@ -0,0 +1,126 @@ +--- + title: Debugging and benchmarking + sidebar_position: 4 +--- + +Some techniques to figure out why Pants is behaving the way it is. + +--- + +## Benchmarking with `hyperfine` + +We use `hyperfine` to benchmark, especially comparing before and after to see the impact of a change: [https://github.com/sharkdp/hyperfine](https://github.com/sharkdp/hyperfine). + +When benchmarking, you must decide if you care about cold cache performance vs. warm cache (or both). If cold, use `--no-pantsd --no-local-cache`. If warm, use hyperfine's option `--warmup=1`. + +For example: + +``` +❯ hyperfine --warmup=1 --runs=5 'pants list ::` +❯ hyperfine --runs=5 'pants --no-pantsd --no-local-cache lint ::' +``` + +## CPU profiling with py-spy + +`py-spy` is a profiling sampler which can also be used to compare the impact of a change before and after: [https://github.com/benfred/py-spy](https://github.com/benfred/py-spy). + +To profile with `py-spy`: + +1. Activate Pants' development venv + - `source ~/.cache/pants/pants_dev_deps//bin/activate` +2. Install `py-spy` into it + - `pip install py-spy` +3. Run Pants with `py-spy` (be sure to disable `pantsd`) + - `PYTHONPATH=src/python NO_SCIE_WARNING=1 py-spy record --subprocesses -- python -m pants.bin.pants_loader --no-pantsd ` + - If you're running Pants from sources on code in another repo, set `PYTHONPATH` to the `src/python` dir in the pants repo, and set `PANTS_VERSION` to the current dev version in that repo. + - On MacOS you may have to run this as root, under `sudo`. + +The default output is a flamegraph. `py-spy` can also output speedscope ([https://github.com/jlfwong/speedscope](https://github.com/jlfwong/speedscope)) JSON with the `--format speedscope` flag. The resulting file can be uploaded to [https://www.speedscope.app/](https://www.speedscope.app/) which provides a per-process, interactive, detailed UI. + +Additionally, to profile the Rust code the `--native` flag can be passed to `py-spy` as well. The resulting output will contain frames from Pants Rust code. + +## Memory profiling with memray + +`memray` is a Python memory profiler that can also track allocation in native extension modules: [https://bloomberg.github.io/memray/](https://bloomberg.github.io/memray/). + +To profile with `memray`: + +1. Activate Pants' development venv + - `source ~/.cache/pants/pants_dev_deps//bin/activate` +2. Install `memray` into it + - `pip install memray` +3. Run Pants with `memray` + - `PYTHONPATH=src/python NO_SCIE_WARNING=1 memray run --native -o output.bin -m pants.bin.pants_loader --no-pantsd ` + - If you're running Pants from sources on code in another repo, set `PYTHONPATH` to the `src/python` dir in the pants repo, and set `PANTS_VERSION` to the current dev version in that repo. + +Note that in many cases it will be easier and more useful to run Pants with the `--stats-memory-summary` flag. + +## Debugging `rule` code with a debugger in VSCode + +Running pants with the `PANTS_DEBUG` environment variable set will use `debugpy` ([https://github.com/microsoft/debugpy](https://github.com/microsoft/debugpy)) +to start a Debug-Adapter server ([https://microsoft.github.io/debug-adapter-protocol/](https://microsoft.github.io/debug-adapter-protocol/)) which will +wait for a client connection before running Pants. + +You can connect any Debug-Adapter-compliant editor (Such as VSCode) as a client, and use breakpoints, +inspect variables, run code in a REPL, and break-on-exceptions in your `rule` code. + +NOTE: `PANTS_DEBUG` doesn't work with the pants daemon, so `--no-pantsd` must be specified. + +## Debugging `rule` code with a debugger in PyCharm + +You'll have to follow a different procedure until PyCharm adds Debug-Adapter support: + +1. Add a requirement on `pydevd-pycharm` in your local clone of the pants source in [3rdparty/python/requirements.txt](https://github.com/pantsbuild/pants/blob/main/3rdparty/python/requirements.txt) +2. Add this snippet where you want to break: + +```python +import pydevd_pycharm +pydevd_pycharm.settrace('localhost', port=5000, stdoutToServer=True, stderrToServer=True) +``` + +3. Start a remote debugging session +4. Run pants from your clone. The build will automatically install the new requirement. For example: + +``` +example-python$ PANTS_SOURCE= pants --no-pantsd test :: +``` + +## Identifying the impact of Python's GIL (on macOS) + + + +## Obtaining Full Thread Backtraces + +Pants runs as a Python program that calls into a native Rust library. In debugging locking and deadlock issues, it is useful to capture dumps of the thread stacks in order to figure out where a deadlock may be occurring. + +One-time setup: + +1. Ensure that gdb is installed. + - Ubuntu: `sudo apt install gdb` +2. Ensure that the kernel is configured to allow debuggers to attach to processes that are not in the same parent/child process hierarchy. + - `echo 0 | sudo tee /proc/sys/kernel/yama/ptrace_scope` + - To make the change permanent, add a file to /etc/sysctl.d named `99-ptrace.conf` with contents `kernel.yama.ptrace_scope = 0`. **Note: This is a security exposure if you are not normally debugging processes across the process hierarchy.** +3. Ensure that the debug info for your system Python binary is installed. + - Ubuntu: `sudo apt install python3-dbg` + +Dumping thread stacks: + +1. Find the pants binary (which may include pantsd if pantsd is enabled). + - Run: `ps -ef | grep pants` +2. Invoke gdb with the python binary and the process ID: + - Run: `gdb /path/to/python/binary PROCESS_ID` +3. Enable logging to write the thread dump to `gdb.txt`: `set logging on` +4. Dump all thread backtraces: `thread apply all bt` +5. If you use pyenv to manage your Python install, a gdb script will exist in the same directory as the Python binary. Source it into gdb: + - `source ~/.pyenv/versions/3.8.5/bin/python3.8-gdb.py` (if using version 3.8.5) +6. Dump all Python stacks: `thread apply all py-bt` diff --git a/versioned_docs/version-2.24/docs/contributions/development/developing-rust.mdx b/versioned_docs/version-2.24/docs/contributions/development/developing-rust.mdx new file mode 100644 index 000000000..21d64a462 --- /dev/null +++ b/versioned_docs/version-2.24/docs/contributions/development/developing-rust.mdx @@ -0,0 +1,124 @@ +--- + title: Developing Rust + sidebar_position: 2 +--- + +Hacking on the Pants engine in Rust. + +--- + +We welcome contributions to Rust! We use Rust to implement the Pants engine in a performant, safe, and ergonomic way. + +:::note Still learning Rust? Ask to get added to reviews +We'd be happy to ping you on Rust changes we make for you to see how Rust is used in the wild. Please message us on the #development channel in [Slack](/community/members) to let us know your interest. +::: + +:::caution Recommendation: share your plan first +Because changes to Rust deeply impact how Pants runs, it is especially helpful to share any plans to work on Rust before making changes. Please message us on [Slack](/community/members) in the #development channel or open a [GitHub issue](https://github.com/pantsbuild/pants/issues). +::: + +## Code organization + +The code for the top-level Pants Rust crate lives in `src/rust/engine`. The top-level `Cargo.toml` file at `src/rust/engine/Cargo.toml` defines a cargo workspace containing a number of other subcrates, which live in subdirectories of `src/rust/engine`. Defining multiple subcrates in this way allows changes affecting one subcrate to avoid affecting other subcrates and triggering more recompilation than is necessary. + +Several of the particularly important subcrates are: + +- `graph`: the core of Pants's rule graph implementation. +- `ui`: the dynamic UI. +- `sharded_lmdb`: custom wrappers around the `crates.io` `lmdb` crate, which provides bindings to [lmdb](https://en.wikipedia.org/wiki/Lightning_Memory-Mapped_Database). +- `fs`: manipulating the filesystem. +- `process_execution`: running local and remote processes. + +## Rust \<-> Python interaction + +Pants is best conceptualized as a Python program that makes frequent foreign function interface (FFI) calls into Rust code. + +The top-level `engine` Rust crate gets compiled into a library named `native_engine.so`, which Python code knows how to interact with. We use the Rust [PyO3](https://pyo3.rs/) crate to manage foreign function interaction. + +The C FFI functions that Rust code exposes as a public interface live in `src/rust/engine/src/externs/interface.rs`. On the Python side, `src/python/pants/engine/internals/native_engine.pyi` provides type hints for the functions and classes provided by Rust. + +Rust can also invoke Python functions and object constructors thanks to [PyO3](https://pyo3.rs) crate. + +We are planning to port additional functionality from Python to Rust, generally for performance reasons. + +## Common commands + +Rather than using a global installation of Cargo, use the `./cargo` script. + +### Compile + +To check that the Rust code is valid, use `./cargo check`. To check that it integrates correctly with Pants' Python code, use `MODE=debug pants ...` as usual (which will `compile` first, and is slower than `check`). + +:::caution Set `MODE=debug` when iterating on Rust +As described in [Setting up Pants](./setting-up-pants.mdx), we default to compiling Rust in release mode, rather than debug mode. + +When working on Rust, you typically should set the environment variable `MODE=debug` for substantially faster compiles. +::: + +### Run tests + +To run tests for all crates, run: + +```bash +./cargo test +``` + +To run for a specific crate, such as the `fs` create, run: + +```bash +./cargo test -p fs +``` + +To run for a specific test, use Cargo's filtering mechanism, e.g.: + +```bash +./cargo test -p fs read_file_missing +``` + +:::note Tip: enabling logging in tests +When debugging, it can be helpful to capture logs with [`env_logger`](https://docs.rs/env_logger/0.6.1/env_logger/). + +To enable logging: + +1. Add `env_logger = "..."` to `dev-dependencies` in the crate's `Cargo.toml`, replacing the `...` with the relevant version. Search for the version used in other crates. +2. At the start of your test, add `let _logger = env_logger::try_init();`. +3. Add log statements wherever you'd like using `log::info!()` et al. +4. Run your test with `RUST_LOG=trace ./cargo test -p $crate test_name -- --nocapture`, using one of `error`, `warn`, `info`, `debug`, or `trace`. + +::: + +### Autoformat + +```bash +./cargo fmt +``` + +To run in lint mode, add `--check`. + +### Run Clippy + +```bash +./cargo clippy +``` + +## The `fs_util` tool + +`fs_util` is a utility that enables you to interact with `Snapshot`s from the command line. You can use it to help debug issues with snapshotted files. + +To build it, run this from the root of the repository: + +```bash +$ ./cargo build -p fs_util +``` + +That will produce `src/rust/engine/target/debug/fs_util`. + +To inspect a particular snapshot, you'll need to tell `fs_util` where the storage is and the digest and length of the snapshot to inspect. You can use the `--local-store-path` flag for that. + +For example, this command pretty prints the recursive file list of a directory through the directory subcommand. + +```bash +$ src/rust/engine/target/debug/fs_util --local-store-path=${HOME}/.cache/pants/lmdb_store directory cat-proto --output-format=recursive-file-list +``` + +Pass the `--help` flag to see other ways of using `fs_util`, along with its subcommands. Each subcommand can be passed the `--help` flag. diff --git a/versioned_docs/version-2.24/docs/contributions/development/index.mdx b/versioned_docs/version-2.24/docs/contributions/development/index.mdx new file mode 100644 index 000000000..081ae52d7 --- /dev/null +++ b/versioned_docs/version-2.24/docs/contributions/development/index.mdx @@ -0,0 +1,15 @@ +--- + title: Development + sidebar_position: 1 +--- + +How to make code changes to Pants. + +--- + +- [Setting up Pants](./setting-up-pants.mdx) +- [Style guide](./style-guide.mdx) +- [Developing Rust](./developing-rust.mdx) +- [Internal Architecture](./internal-architecture.mdx) +- [Debugging and benchmarking](./debugging-and-benchmarking.mdx) +- [Running Pants from sources](./running-pants-from-sources.mdx) diff --git a/versioned_docs/version-2.24/docs/contributions/development/internal-architecture.mdx b/versioned_docs/version-2.24/docs/contributions/development/internal-architecture.mdx new file mode 100644 index 000000000..c5012fe5d --- /dev/null +++ b/versioned_docs/version-2.24/docs/contributions/development/internal-architecture.mdx @@ -0,0 +1,89 @@ +--- + title: Internal architecture + sidebar_position: 3 +--- + +--- + +## Rule Graph Construction + +### Overview + +Build logic in [Pants](https://www.pantsbuild.org/) is declared using collections of `@rules` with recursively memoized and invalidated results. This framework (known as Pants' "Engine") has similar goals to Bazel's [Skyframe](https://bazel.build/designs/skyframe.html) and the [Salsa](https://github.com/salsa-rs/salsa) framework: users define logic using a particular API, and the framework manages tracking the dependencies between nodes in a runtime graph. + +In order to maximize the amount of work that can be reused at runtime, Pants statically computes the memoization keys for the nodes of the runtime graph from the user specified `@rules` during startup: this process is known as "rule graph construction". See the `Goals` section for more information on the strategy and reasoning for this. + +Concepts used in compilers, including live variable analysis and monomorphization, can also be useful in rule graph construction to minimize rule identities and pre-decide which versions of their dependencies they will use. + +### Concepts + +A successfully constructed `RuleGraph` contains a graph where nodes have one of three types, `Rule`s, `Query`s, and `Param`s, which map fairly closely to what a Pants `@rule` author consumes. The edges between nodes represent dependencies: `Query`s are always roots of the graph, `Param`s are always leaves, and `Rule`s represent the end user logic making up all of the internal nodes of the graph. + +#### Rules + +A `Rule` is a function or coroutine with all of its inputs declared as part of its type signature. The end user type signature is made up of: + +1. the return type of the `Rule` +2. the positional arguments to the `Rule` +3. a set of `Get`s which declare the runtime requirements of a coroutine, of the form `Get(output_type, input_type)` + +In the `RuleGraph`, these are encoded in a [Rule](https://github.com/pantsbuild/pants/blob/3a188a1e06d8c27ff86d8c311ff1b2bdea0d39ff/src/rust/engine/rule_graph/src/rules.rs#L76-L95) trait, with a [DependencyKey](https://github.com/pantsbuild/pants/blob/3a188a1e06d8c27ff86d8c311ff1b2bdea0d39ff/src/rust/engine/rule_graph/src/rules.rs#L21-L41) trait representing both the positional arguments (which have no provided `Param`) and the `Get`s (which provide their input type as a `Param`). + +`Rule`s never refer to one another by name (i.e., they do not call one another by name): instead, their signature declares their requirements in terms of input/output types, and rule graph construction decides which potential dependencies will provide those requirements. + +#### Queries + +The roots/entrypoints of a `RuleGraph` are `Query`s, which should correspond one-to-one to external callsites that use the engine to request that values are computed. A `Query` has an output type, and a series of input types: `Query(output_type, (*input_types))`. + +If a user makes a request to the engine that does not have a corresponding `Query` declared, the engine fails rather than attempting to dynamically determine which `Rules` to use to answer the `Query`: how a `RuleGraph` is constructed should show why that is the case. + +#### Params + +`Params` are typed, comparable (`eq`/`hash`) values that represent both the inputs to `Rules`, and the building block of the runtime memoization key for a `Rule`. The set of `Params` (unique by type) that are consumed to create a `Rule`'s inputs (plus the `Rule`'s own identity) make up the memoization key for a runtime instance of the `Rule`. + +`Param`s are eventually used as positional args to `Rule`s, but it's important to note that the `Param`s in a `Rule` instance's identity/memoization-key will not always become the positional arguments to _that_ `Rule`: in many cases, a `Param` will be used by a `Rule`'s transitive dependencies in order to produce an output value that becomes either a positional argument to the `Rule` as it starts, or the result of a `Get` while a coroutine `Rule` runs. + +The `Param`s that are available to a `Rule` are made available by the `Rule`'s dependents (its "callers"), but similar to how `Rule`s are not called by name, neither are all of their `Param`s passed explicitly at each use site. A `Rule` will be used to compute the output value for a `DependencyKey`: i.e., a positional argument, `Get` result, or `Query` result. Of these usage sites, only `Query` specifies the complete set of `Params` that will be available: the other two usages (positional arguments and `Get`s) are able to use any Param that will be "in scope" at the use site. + +`Params` flow down the graph from `Query`s and the provided `Param`s of `Get`s: their presence does not need to be re-declared at each intermediate callsite. When a `Rule` consumes a `Param` as a positional argument, that `Param` will no longer be available to that `Rule`'s dependencies (but it might still be present in other subgraphs adjacent to that `Rule`). + +### Goals + +The goals of `RuleGraph` construction are: + +1. decide which `Rule`s to use to answer `Query`s (transitively, since `Rule`s do not call one another by name); and +2. determine the minimum set of `Param` inputs needed to satisfy the `Rule`s below those `Query`s + +If either of the goals were removed, `RuleGraph` construction might be more straightforward: + +1. If rather than being type-driven, `Rule`s called one another by name, you could statically determine their input `Params` by walking the call graph of `Rule`s by name, and collecting their transitive input `Params`. +2. If rather than needing to compute a minimum set of `Param` inputs for the memoization key, we instead required that all usage sites explicitly declared all `Param`s that their dependencies might need, we could relatively easily eliminate candidates based on the combination of `Param` types at a use site. And if we were willing to have very large memoization keys, we could continue to have simple callsites, but skip pruning the `Params` that pass from a dependent to a dependency at runtime, and include any `Params` declared in any of a `Rule`s transitive dependents to be part of its identity. + +But both of the goals are important because together they allow for an API that is easy to write `Rule`s for, with minimal boilerplate required to get the inputs needed for a `Rule` to compute a value, and minimal invalidation. Because the identity of a `Rule` is computed from its transitive input `Param`s rather than from its positional arguments, `Rule`s can accept arbitrarily-many large input values (which don't need to implement hash) with no impact on its memoization hit rate. + +### Constraints + +There are a few constraints that decide which `Rule`s are able to provide dependencies for one another: + +- `param_consumption` - When a `Rule` directly uses a `Param` as a positional argument, that `Param` is removed from scope for any of that `Rule`'s dependencies. + - For example, for a `Rule` `y` with a positional argument `A` and a `Get(B, C)`: if there is a `Param` `A` in scope at `y` and it is used to satisfy the positional argument, it cannot also be used to (transitively) to satisfy the `Get(B, C)` (i.e., a hypothetical rule that consumes both `A` and `C` would not be eligible in that position). + - On the other hand, for a `Rule` `w` with `Get(B, C)` and `Get(D, E)`, if there is a `Param` `A` in scope at `w`, two dependency `Rule`s that consume `A` (transitively) _can_ be used to satisfy those `Get`s. Only consuming a `Param` as a positional argument removes it from scope. +- `provided_params` - When deciding whether one `Rule` can use another `Rule` to provide the output type of a `Get`, a constraint is applied that the candidate dependency must (transitively) consume the `Param` that is provided by the `Get`. + - For example: if a `Rule` `z` has a `Get(A, B)`, only `Rule`s that compute an `A` and (transitively) consume a `B` are eligible to be used. This also means that a `Param` `A` which is already in scope for `Rule` `z` is not eligible to be used, because it would trivially not consume `B`. + +### Implementation + +As of [3a188a1e06](https://github.com/pantsbuild/pants/blob/3a188a1e06d8c27ff86d8c311ff1b2bdea0d39ff/src/rust/engine/rule_graph/src/builder.rs#L202-L219), we construct a `RuleGraph` using a combination of data flow analysis and some homegrown (and likely problematic: see the "Issue Overview") node splitting on the call graph of `Rule`s. + +The construction algorithm is broken up into phases: + +1. [initial_polymorphic](https://github.com/pantsbuild/pants/blob/3a188a1e06d8c27ff86d8c311ff1b2bdea0d39ff/src/rust/engine/rule_graph/src/builder.rs#L221) - Builds a polymorphic graph while computing an "out-set" for each node in the graph by accounting for which `Param`s are available at each use site. During this phase, nodes may have multiple dependency edges per `DependencyKey`, which is what makes them "polymorphic". Each of the possible ways to compute a dependency will likely have different input `Param` requirements, and each node in this phase represents all of those possibilities. +2. [live_param_labeled](https://github.com/pantsbuild/pants/blob/3a188a1e06d8c27ff86d8c311ff1b2bdea0d39ff/src/rust/engine/rule_graph/src/builder.rs#L749-L754) - Run [live variable analysis](https://en.wikipedia.org/wiki/Live_variable_analysis) on the polymorphic graph to compute the initial "in-set" of `Params` used by each node in the graph. Because nodes in the polymorphic graph have references to all possible sources of a particular dependency type, the computed set is conservative (i.e., overly large). + - For example: if a `Rule` `x` has exactly one `DependencyKey`, but there are two potential dependencies to provide that `DependencyKey` with input `Param`s `{A,B}` and `{B,C}` (respectively), then at this phase the input `Param`s for `x` must be the union of all possibilities: `{A,B,C}`. + - If we were to stop `RuleGraph` construction at this phase, it would be necessary to do a form of [dynamic dispatch](https://en.wikipedia.org/wiki/Dynamic_dispatch) at runtime to decide which source of a dependency to use based on the `Param`s that were currently in scope. And the sets of `Param`s used in the memoization key for each `Rule` would still be overly large, causing excess invalidation. +3. [monomorphize](https://github.com/pantsbuild/pants/blob/3a188a1e06d8c27ff86d8c311ff1b2bdea0d39ff/src/rust/engine/rule_graph/src/builder.rs#L325-L353) - "Monomorphize" the polymorphic graph by using the out-set of available `Param`s (initialized during `initial_polymorphic`) and the in-set of consumed `Param`s (computed during `live_param_labeled`) to partition nodes (and their dependents) for each valid combination of their dependencies. Combinations of dependencies that would be invalid (see the Constraints section) are not generated, which causes some pruning of the graph to happen during this phase. + - Continuing the example from above: the goal of monomorphize is to create one copy of `Rule` `x` per legal combination of its `DependencyKey`. Assuming that both of `x`'s dependencies remain legal (i.e. that all of `{A,B,C}` are still in scope in the dependents of `x`, etc), then two copies of `x` will be created: one that uses the first dependency and has an in-set of `{A,B}`, and another that uses the second dependency and has an in-set of `{B,C}`. +4. [prune_edges](https://github.com/pantsbuild/pants/blob/3a188a1e06d8c27ff86d8c311ff1b2bdea0d39ff/src/rust/engine/rule_graph/src/builder.rs#L836-L845) - Once the monomorphic graph has [converged](https://en.wikipedia.org/wiki/Data-flow_analysis#Convergence), each node in the graph will ideally have exactly one source of each `DependencyKey` (except for `Query`s, which are not monomorphized). This phase validates that, and chooses the smallest input `Param` set to use for each `Query`. In cases where a node has more than one dependency per `DependencyKey`, it is because given a particular set of input `Params` there was more than one valid way to compute a dependency. This can happen either because there were too many `Param`s in scope, or because there were multiple `Rule`s with the same `Param` requirements. + - This phase is the only phase that renders errors: all of the other phases mark nodes and edges "deleted" for particular reasons, and this phase consumes that record. A node that has been deleted indicates that that node is unsatisfiable for some reason, while an edge that has been deleted indicates that the source node was not able to consume the target node for some reason. + - If a node has too many sources of a `DependencyKey`, this phase will recurse to attempt to locate the node in the `Rule` graph where the ambiguity was introduced. Likewise, if a node has no source of a `DependencyKey`, this phase will recurse on deleted nodes (which are preserved by the other phases) to attempt to locate the bottom-most `Rule` that was missing a `DependencyKey`. +5. [finalize](https://github.com/pantsbuild/pants/blob/3a188a1e06d8c27ff86d8c311ff1b2bdea0d39ff/src/rust/engine/rule_graph/src/builder.rs#L1064-L1068) - After `prune_edges` the graph is known to be valid, and this phase generates the final static `RuleGraph` for all `Rule`s reachable from `Query`s. diff --git a/versioned_docs/version-2.24/docs/contributions/development/maintenance-tasks-and-scripts.mdx b/versioned_docs/version-2.24/docs/contributions/development/maintenance-tasks-and-scripts.mdx new file mode 100644 index 000000000..8e7472999 --- /dev/null +++ b/versioned_docs/version-2.24/docs/contributions/development/maintenance-tasks-and-scripts.mdx @@ -0,0 +1,122 @@ +--- + title: Maintenance tasks and scripts + sidebar_position: 6 +--- + +There's a variety of maintenance tasks that happen on different frequencies. + +## Update the default/known versions of a built-in tool/subsystem + +### External tools (downloaded executables) + +Some tools use [the `ExternalTool` class](../../writing-plugins/the-rules-api/installing-tools#externaltool-install-pre-compiled-binaries) to download a binary from the internet, verify its size and hash, and then execute it. + +To update these: + +1. For each platform and version to add: + 1. Download the archive/binary. + 2. Verify it: check signatures and/or hashes if available. + 3. Compute the sha256 hash and byte length. For example, if it's called `archive.zip`: `tee >(shasum -a 256) >(wc -c) > /dev/null < archive.zip`. +2. Apply the new values: + 1. Adjust `default_version` to the new version. + 2. Add or replace to `default_known_versions` using the hashes and lengths above (for some tools we don't preserve older versions, especially if they have strong backwards compatibility guarantees, while for others we do retain older versions). + +Example: [#20469](https://github.com/pantsbuild/pants/pull/20469). + +#### PEX + +The PEX external tool is a bit special, because it also appears as a requirement of the Pants project itself in `3rdparty/python/requirements.txt`. To update pex, do both: + +1. Update the `pex-cli` subsystem, as above (in `src/python/pants/backend/python/util_rules/pex_cli.py`). +2. Update the requirements file and run `pants generate-lockfiles --resolve=python-default` to update Pants' own lockfile. + +Example: [#20782](https://github.com/pantsbuild/pants/pull/20782). + +#### Terraform + +The `build-support/bin/terraform_tool_versions.py` script can help update Terraform versions. + +### Python tools + +Some tools use `PythonToolBase` to install executable PyPI packages, using a lockfile. Pants packages a default lockfile with a specific version of the package. To update these: + +1. Adjust `default_requirements` and/or `default_interpreter_constraints` as required. +2. Run `build-support/bin/generate_builtin_lockfiles.py $scope`, where `$scope` is the `options_scope` of the subsystem class. + +Example: [#20924](https://github.com/pantsbuild/pants/pull/20924). + +### JVM tools + +Some tools use `JVMToolBase` to install executable JVM packages, using a lockfile. Pants packages a default lockfile with a specific version of the package. To update these: + +1. Adjust `default_version` and/or `default_artifacts` as required. +2. Run `build-support/bin/generate_builtin_lockfiles.py $scope`, where `$scope` is the `options_scope` of the subsystem class. + +Example: none yet. + +### JS tools + +Some tools use `NodeJSToolBase` to install executable npm packages. To update these: + +1. Update `default_version`. That's all. + +Example: [#21007](https://github.com/pantsbuild/pants/pull/21007). + + +### Python Build Standalone known versions + +The Python Build Standalone providers needs to be updated with new upstream releases. There are *many* artifacts here, so the hashes are stored in a json file that is updated by running: + +``` +pants run src/python/pants/backend/python/providers/python_build_standalone/scripts/generate_urls.py +``` + + +## Update or create FaaS complete platforms files + +The function-as-a-service (FaaS) subsystems provide some built-in PEX complete platforms JSON files, for specific runtimes. To update or create these: + +### AWS Lambda + +1. Adjust `PythonAwsLambdaRuntime.known_runtimes` as required +2. Run `build-support/bin/generate_faas_complete_platforms.py` to create any new files and update the existing ones, using AWS's published docker images + +Example: [#21004](https://github.com/pantsbuild/pants/pull/21004). + +### Google Cloud Functions + +1. Adjust `PythonGoogleCloudFunctionRuntime.known_runtimes` as required +2. Run `build-support/bin/generate_faas_complete_platforms.py` to create any new files and update the existing ones, using GCF's published docker images + +Example: [#21248](https://github.com/pantsbuild/pants/pull/21248). + +## Cherry-pick a pull request to an older version + +We maintain multiple versions, with `main` being our development branch, and various `2.*.x` branches for the stable versions (see [Release strategy](../releases/release-strategy.mdx) for more details). + +### Cherry-picking a new pull request + +When a change needs to land in `main` but also one or more older versions, the usual process is: + +1. Create or review the pull request against `main` as usual +2. Label it as `needs-cherrypick` and set milestone to the oldest release to which it should be cherry-picked +3. Merge the pull request as normal +4. At this point, automation kicks in and attempts to cherrypick the merged commit to the release in the milestone and any newer ones. +5. The automation opens pull requests targeting each of the relevant `2.*.x` branches for which cherry-picking succeeds. +6. If the automation fails to do a cherry-pick, it will mark the PR as `auto-cherry-picking-failed` +7. In either case, the automation will add a comment describing what happened to the original pull request. + +For example, suppose `main` is for `2.23.x` and we're still maintaining `2.20.x`, `2.21.x` and `2.22.x`. If a pull request is labelled `needs-cherrypick` and has milestone `2.21.x`, then merging it will attempt to cherry pick to `2.21.x` and `2.22.x`. + +The process may fail in one of two ways: + +- The cherry-picking process failed, and tagged the PR with `auto-cherry-picking-failed`: follow the instructions in the comment on the pull request. (This likely means there are merge conflicts that require manual resolution.) +- the cherry-pick hasn't (yet) run: trigger the automation manually by going to [the GitHub Action](https://github.com/pantsbuild/pants/actions/workflows/auto-cherry-picker.yaml), clicking on the "Run workflow" button, and providing the PR number. + +### Cherry-picking a merged pull request + +A pull request might merged without being configured for cherry-picking, and we decide later that it should be. To cherry-pick in this case: + +1. Label the pull request as `needs-cherrypick` and set milestone to the oldest release to which it should be cherry-picked +2. Trigger the automation manually by going to [the GitHub Action](https://github.com/pantsbuild/pants/actions/workflows/auto-cherry-picker.yaml), clicking on the "Run workflow" button, and providing the PR number. +3. As above, the automation may (partially) succeed or fail, and will leave a comment describing what happened. diff --git a/versioned_docs/version-2.24/docs/contributions/development/running-pants-from-sources.mdx b/versioned_docs/version-2.24/docs/contributions/development/running-pants-from-sources.mdx new file mode 100644 index 000000000..0d7d8333f --- /dev/null +++ b/versioned_docs/version-2.24/docs/contributions/development/running-pants-from-sources.mdx @@ -0,0 +1,31 @@ +--- + title: Running Pants from sources + sidebar_position: 5 +--- + +--- + +## Running Pants from sources in its own repo + +In most repos, you invoke a pre-built release of Pants, either via the `pants` launcher binary, or a `./pants` runner script in the repo root. However in the Pants repo itself, the [`./pants`](https://github.com/pantsbuild/pants/blob/main/pants) runner script is different - it invokes Pants directly from the sources in that repo. + +This allows you to iterate rapidly when working in the Pants repo: You can edit Rust and Python source files, and immediately run `./pants` to try out your changes. The script will ensure that any Rust changes are compiled and linked, and then run Pants using your modified sources. + +## Running Pants from sources in other repos + +Sometimes you may want to try out your Pants changes on code in some other repo. If you're using the `pants` launcher binary, you can set the env var `PANTS_SOURCE` to point to your local clone of the pantsbuild/pants repo: + +``` +PANTS_SOURCE=../pants pants test :: +``` + +Or, if you're using a `./pants` runner script, you can instead use a special `./pants_from_sources` script that you copy into the repo. + +This script expects to find a clone of the Pants repo, named `pants`, as a sibling directory of the one you're running in, and it will use the sources in that sibling to run Pants in the other repo, using that repo's config file and so on. + +You can find an example of this script [here](https://github.com/pantsbuild/example-python/blob/2.14/pants_from_sources). To copy it into your repo, use + +``` +curl -L -O https://raw.githubusercontent.com/pantsbuild/example-python/2.14/pants_from_sources && \ + chmod +x pants_from_sources +``` diff --git a/versioned_docs/version-2.24/docs/contributions/development/setting-up-pants.mdx b/versioned_docs/version-2.24/docs/contributions/development/setting-up-pants.mdx new file mode 100644 index 000000000..ce7507ab0 --- /dev/null +++ b/versioned_docs/version-2.24/docs/contributions/development/setting-up-pants.mdx @@ -0,0 +1,118 @@ +--- + title: Setting up Pants + sidebar_position: 0 +--- + +How to set up Pants for local development. + +--- + +## Step 1: Fork and clone `pantsbuild/pants` + +We use the popular forking workflow typically used by open source projects. See [https://guides.github.com/activities/forking/](https://guides.github.com/activities/forking/) for a guide on how to fork [pantsbuild/pants](https://github.com/pantsbuild/pants), then clone it to your local machine. + +:::caution macOS users: install a newer `openssl` +Pants requires a more modern OpenSSL version than the one that comes with macOS. To get all dependencies to resolve correctly, run the below commands. If you are using Zsh, use `.zshrc` rather than `.bashrc`. + +```bash +$ brew install openssl +$ echo 'export PATH="$(brew --prefix)/opt/openssl/bin:$PATH"' >> ~/.bashrc +$ echo 'export LDFLAGS="-L$(brew --prefix)/opt/openssl/lib"' >> ~/.bashrc +$ echo 'export CPPFLAGS="-I$(brew --prefix)/opt/openssl/include"' >> ~/.bashrc +``` + +(If you don't have `brew` installed, see [https://brew.sh](https://brew.sh)) +::: + +## Step 2: Bootstrap the Rust engine + +Pants requires several dependencies to be installed: a Python 3.9 interpreter, Rust, the protobuf compiler, clang and others. There is experimental support for the Nix package manager that makes it easy to set up a dev environment. Follow the instructions on the [Nix website](https://nixos.org/download.html) to install Nix. Then `cd` into the directory where you cloned the Pants repo and type `nix-shell`. This will download all the necessary dependencies and start a shell with a suitably configured PATH variable to make them available for use. + +Alternatively, you can install the dependencies manually as follows: + +Pants uses Rustup to install Rust. Run the command from [https://rustup.rs](https://rustup.rs) to install Rustup; ensure that `rustup` is on your `$PATH`. + +If your system Python is not the version Pants expects (currently Python 3.9), you'll need to provide one. Python interpreters from Linux or Mac distributions sometimes have quirks that can cause headaches with bootstrapping the dev venv. Some examples of Pythons that work well with Pants are those provided by: + +- [Fedora](https://packages.fedoraproject.org/pkgs/python3.9/python3.9/) +- [ASDF](https://github.com/asdf-community/asdf-python) +- [PyEnv](https://github.com/pyenv/pyenv) + Providers that sometimes cause issues include: +- Ubuntu Deadsnakes + You also need to have the protobuf compiler and LLVM clang installed. On Debian derivatives, these can be installed using `apt install clang protobuf-compiler`. + +Then, run `pants` to set up the Python virtual environment and compile the engine. + +:::caution This will take several minutes +Rust compilation is really slow. Fortunately, this step gets cached, so you will only need to wait the first time. +::: + +:::note Want a faster compile? +We default to compiling with Rust's `release` mode, instead of its `debug` mode, because this makes Pants substantially faster. However, this results in the compile taking 5-10x longer. + +If you are okay with Pants running much slower when iterating, set the environment variable `MODE=debug` and rerun `pants` to compile in debug mode. +::: + +:::caution Rust compilation can use lots of storage +Compiling the engine typically results in several gigabytes of storage over time. We have not yet implemented automated garbage collection for building the engine because contributors are the only ones to need to compile Rust, not every-day users. + +To free up space, run `rm -rf src/rust/engine/target`. + +Warning: this will cause Rust to recompile everything. +::: + +## Step 3: Set up a pre-push Git Hook + +We have a [Git Hook](https://git-scm.com/book/en/v2/Customizing-Git-Git-Hooks) that runs some useful checks and lints when you `git push`. Running this locally can prevent easily avoidable CI failures such as whitespace or linting issues. + +To install this, run: + +```bash +$ build-support/bin/setup.sh +``` + +You can manually run the pre-push check with: + +```bash +$ build-support/githooks/pre-push +``` + +The [Rust-compilation](./developing-rust.mdx) affecting `MODE` flag is passed through to the hooks, so to run the githooks in "debug" mode, you can do something like: + +```bash +$ MODE=debug git push ... +``` + +:::note How to temporarily skip the pre-push checks +Use `git push --no-verify` or `git push -n` to skip the checks. +::: + +## Configure your IDE (optional) + +### Hooking up the Python virtual environment + +Most IDEs allow you to configure a Python [virtual environment](https://docs.python.org/3/tutorial/venv.html) so that the editor understands your Python import statements. + +Pants sets up its development virtualenv at `~/.cache/pants/pants_dev_deps/..venv/`. Point your editor to the `bin/python` file in this folder, e.g. `~/.cache/pants/pants_dev_deps/Darwin.py37.venv/bin/python`. + +### PyCharm guide + +1. Use "New project" and click the option "Existing interpreter". Point the interpreter to the virtual environment location described above. +2. In your project tree (the list of folders and files), secondary-click the folder `src/python`. Click "Mark directory as" and choose "Sources". + +### VSCode guide + +Add this to your `settings.json` file inside the build root's `.vscode` folder: + +```json title="settings.json" +{ + "python.analysis.extraPaths": ["src/python"], + "python.formatting.provider": "black", + "python.linting.enabled": true, + "python.linting.flake8Enabled": true, + "python.linting.flake8Args": ["--config=build-support/flake8/.flake8"], + "rust-analyzer.linkedProjects": ["src/rust/engine/Cargo.toml"] +} +``` + +`python.analysis.extraPaths` lets VSCode know where to find Pants's source root. The other config enables `black` and `flake8`. diff --git a/versioned_docs/version-2.24/docs/contributions/development/style-guide.mdx b/versioned_docs/version-2.24/docs/contributions/development/style-guide.mdx new file mode 100644 index 000000000..88980e51e --- /dev/null +++ b/versioned_docs/version-2.24/docs/contributions/development/style-guide.mdx @@ -0,0 +1,424 @@ +--- + title: Style guide + sidebar_position: 1 +--- + +Some conventions we encourage. + +--- + +## Reminder: running the autoformatters and linters + +Most of Pants' style is enforced via Black, isort, Docformatter, Flake8, and MyPy. You may find it helpful to run these commands before pushing a PR: + +```bash +$ pants --changed-since=HEAD fmt +$ build-support/githooks/pre-push +``` + +:::note Tip: improving Black's formatting by wrapping in `()` +Sometimes, Black will split code over multiple lines awkwardly. For example: + +```python +StrOption( + default="pants", + help="The name of the script or binary used to invoke pants. " + "Useful when printing help messages.", +) +``` + +Often, you can improve Black's formatting by wrapping the expression in parentheses, then rerunning `fmt`: + +```python +StrOption( + default="pants", + help=( + "The name of the script or binary used to invoke pants. " + "Useful when printing help messages." + ), +) +``` + +This is not mandatory, only encouraged. +::: + +## Comments + +### Style + +Comments must have a space after the starting `#`. All comments should be complete sentences and should end with a period. + +Good: + +```python +# This is a good comment. +``` + +Bad: + +```python +#Not This +``` + +Comment lines should not exceed 100 characters. Black will not auto-format this for you; you must manually format comments. + +### When to comment + +We strive for self-documenting code. Often, a comment can be better expressed by giving a variable a more descriptive name, adding type information, or writing a helper function. + +Further, there is no need to document how typical Python constructs behave, including how type hints work. + +Bad: + +``` +# Loop 10 times. +for _ in range(10): + pass + +# This stores the user's age in days. +age_in_days = user.age * 365 +``` + +Instead, comments are helpful to give context that cannot be inferred from reading the code. For example, comments may discuss performance, refer to external documentation / bug links, explain how to use the library, or explain why something was done a particular way. + +Good: + +``` +def __hash__(self): + # By overriding __hash__ here, rather than using the default implementation, + # we get a 10% speedup to `pants list ::` (1000 targets) thanks to more + # cache hits. This is safe to do because ... + ... + +# See https://github.com/LuminosoInsight/ordered-set for the original implementation. +class OrderedSet: + ... +``` + +### TODOs + +When creating a TODO, first [create an issue](https://github.com/pantsbuild/pants/issues/new) in GitHub. Then, link to the issue # in parentheses and add a brief description. + +For example: + +```python +# TODO(#5427): Remove this block once we can get rid of the `globs` feature. +``` + +## Strings + +### Use `f-strings` + +Use f-strings instead of `.format()` and `%`. + +```python +# Good +f"Hello {name}!" + +# Bad +"Hello {}".format(name) +"Hello %s" % name +``` + +## Conditionals + +### Prefer conditional expressions (ternary expressions) + +Similar to most languages' ternary expressions using `?`, Python has [conditional expressions](https://stackoverflow.com/a/394814). Prefer these to explicit `if else` statements because we generally prefer expressions to statements and they often better express the intent of assigning one of two values based on some predicate. + +```python +# Good +x = "hola" if lang == "spanish" else "hello" + +# Discouraged, but sometimes appropriate +if lang == "spanish": + x = "hola" +else: + x = "hello" +``` + +Conditional expressions do not work in more complex situations, such as assigning multiple variables based on the same predicate or wanting to store intermediate values in the branch. In these cases, you can use `if else` statements. + +### Prefer early returns in functions + +Often, functions will have branching based on a condition. When you `return` from a branch, you will exit the function, so you no longer need `elif` or `else` in the subsequent branches. + +```python +# Good +def safe_divide(dividend: int, divisor: int) -> Optional[int]: + if divisor == 0: + return None + return dividend / divisor + +# Discouraged +def safe_divide(dividend: int, divisor: int) -> Optional[int]: + if divisor == 0: + return None + else: + return dividend / divisor +``` + +Why prefer this? It reduces nesting and reduces the cognitive load of readers. See [here](https://medium.com/@scadge/if-statements-design-guard-clauses-might-be-all-you-need-67219a1a981a) for more explanation. + +## Collections + +### Use collection literals + +Collection literals are easier to read and have better performance. + +We allow the `dict` constructor because using the constructor will enforce that all the keys are `str`s. However, usually prefer a literal. + +```python +# Good +a_set = {a} +a_tuple = (a, b) +another_tuple = (a,) +a_dict = {"k": v} + +# Bad +a_set = set([a]) +a_tuple = tuple([a, b]) +another_tuple = tuple([a]) + +# Acceptable +a_dict = dict(k=v) +``` + +### Prefer merging collections through unpacking + +Python has several ways to merge iterables (e.g. sets, tuples, and lists): using `+` or `|`, using mutation like `extend()`, and using unpacking with the `*` character. Prefer unpacking because it makes it easier to merge collections with individual elements; it is formatted better by Black; and allows merging different iterable types together, like merging a list and tuple together. + +For dictionaries, the only two ways to merge are using mutation like `.update()` or using `**` unpacking (we cannot use PEP 584's `|` operator yet because we need to support \< Python 3.9.). Prefer merging with `**` for the same reasons as iterables, in addition to us preferring expressions to mutation. + +```python +# Preferred +new_list = [*l1, *l2, "element"] +new_tuple = (*t1, *t2, "element") +new_set = {*s1, *s2, "element"} +new_dict = {**d1, "key": "value"} + +# Discouraged +new_list = l1 + l2 + ["element"] +new_tuple = t1 + t2 + ("element",) +new_set = s1 | s2 | {"element"} +new_dict = d1 +new_dict["key"] = "value" +``` + +### Prefer comprehensions + +[Comprehensions](https://python-3-patterns-idioms-test.readthedocs.io/en/latest/Comprehensions.html) should generally be preferred to explicit loops and `map`/`filter` when creating a new collection. (See [https://www.youtube.com/watch?v=ei71YpmfRX4](https://www.youtube.com/watch?v=ei71YpmfRX4) for a deep dive on comprehensions.) + +Why avoid `map`/`filter`? Normally, these are fantastic constructs and you'll find them abundantly in the [Rust codebase](./developing-rust.mdx). They are awkward in Python, however, due to poor support for lambdas and because you would typically need to wrap the expression in a call to `list()` or `tuple()` to convert it from a generator expression to a concrete collection. + +```python +# Good +new_list = [x * 2 for x in xs] +new_dict = {k: v.capitalize() for k, v in d.items()} + +# Bad +new_list = [] +for x in xs: + new_list.append(x * 2) + +# Discouraged +new_list = list(map(xs, lambda x: x * 2)) +``` + +There are some exceptions, including, but not limited to: + +- If mutations are involved, use a `for` loop. +- If constructing multiple collections by iterating over the same original collection, use a `for` loop for performance. +- If the comprehension gets too complex, a `for` loop may be appropriate. Although, first consider refactoring with a helper function. + +## Classes + +### Prefer dataclasses + +We prefer [dataclasses](https://realpython.com/python-data-classes/) because they are declarative, integrate nicely with MyPy, and generate sensible defaults, such as a sensible `repr` method. + +```python +from dataclasses import dataclass + +# Good +@dataclass(frozen=True) +class Example: + name: str + age: int = 33 + +# Bad +class Example: + def __init__(self, name: str, age: int = 33) -> None: + self.name = name + self.age = age +``` + +Dataclasses should be marked with `frozen=True`. + +If you want to validate the input, use `__post_init__`: + +```python +@dataclass(frozen=True) +class Example: + name: str + age: int = 33 + + def __post_init__(self) -> None: + if self.age < 0: + raise ValueError( + f"Invalid age: {self.age}. Must be a positive number." + ) +``` + +If you need a custom constructor, such as to transform the parameters, the Python docs say to use `object.__setattr__` to set the attributes. + +```python +from dataclasses import dataclass +from typing import Iterable, Tuple + +@dataclass(frozen=True) +class Example: + values: Tuple[str, ...] + + def __init__(self, values: Iterable[str]) -> None: + object.__setattr__(self, "values", tuple(values)) +``` + +## Type hints + +Refer to [MyPy documentation](https://mypy.readthedocs.io/en/stable/introduction.html) for an explanation of type hints, including some advanced features you may encounter in our codebase like `Protocol` and `@overload`. + +### Annotate all new code + +All new code should have type hints. Even simple functions like unit tests should have annotations. Why? MyPy will only check the body of functions if they have annotations. + +```python +# Good +def test_demo() -> None: + assert 1 in "abc" # MyPy will catch this bug. + +# Bad +def test_demo(): + assert 1 in "abc" # MyPy will ignore this. +``` + +Precisely, all function definitions should have annotations for their parameters and their return type. MyPy will then tell you which other lines need annotations. + +:::note Interacting with legacy code? Consider adding type hints. +Pants did not widely use type hints until the end of 2019. So, a substantial portion of the codebase is still untyped. + +If you are working with legacy code, it is often valuable to start by adding type hints. This will both help you to understand that code and to improve the quality of the codebase. Land those type hints as a precursor to your main PR. +::: + +### Prefer `cast()` to override annotations + +MyPy will complain when it cannot infer the types of certain lines. You must then either fix the underlying API that MyPy does not understand or explicitly provide an annotation at the call site. + +Prefer fixing the underlying API if easy to do, but otherwise, prefer using `cast()` instead of a variable annotation. + +```python +from typing import cast + +# Good +x = cast(str, untyped_method()) + +# Discouraged +x: str = untyped_method() +``` + +Why? MyPy will warn if the `cast` ever becomes redundant, either because MyPy became more powerful or the untyped code became typed. + +### Use error codes in `# type: ignore` comments + +```python +# Good +x = "hello" +x = 0 # type: ignore[assignment] + +# Bad +y = "hello" +y = 0 # type: ignore +``` + +MyPy will output the code at the end of the error message in square brackets. + +### Prefer Protocols ("duck types") for parameters + +Python type hints use [Protocols](https://mypy.readthedocs.io/en/stable/protocols.html#predefined-protocols) as a way to express ["duck typing"](https://realpython.com/lessons/duck-typing/). Rather than saying you need a particular class, like a list, you describe which functionality you need and don't care what class is used. + +For example, all of these annotations are correct: + +```python +from typing import Iterable, List, MutableSequence, Sequence + +x: List = [] +x: MutableSequence = [] +x: Sequence = [] +x: Iterable = [] +``` + +Generally, prefer using a protocol like `Iterable`, `Sequence`, or `Mapping` when annotating function parameters, rather than using concrete types like `List` and `Dict`. Why? This often makes call sites much more ergonomic. + +```python +# Preferred +def merge_constraints(constraints: Iterable[str]) -> str: + ... + +# Now in call sites, these all work. +merge_constraints([">=3.7", "==3.8"]) +merge_constraints({">=3.7", "==3.8"}) +merge_constraints((">=3.7", "==3.8")) +merge_constraints(constraint for constraint in all_constraints if constraint.startswith("==")) +``` + +```python +# Discouraged, but sometimes appropriate +def merge_constraints(constraints: List[str]) -> str + ... + +# Now in call sites, we would need to wrap in `list()`. +constraints = {">=3.7", "==3.8"} +merge_constraints(list(constraints)) +merge_constraints([constraint for constraint in all_constraints if constraint.startswith("==")]) +``` + +The return type, however, should usually be as precise as possible so that call sites have better type inference. + +## Tests + +### Use Pytest-style instead of `unittest` + +```python +# Good +def test_demo() -> None: + assert x is True + assert y == 2 + assert "hello" in z + +# Bad +class TestDemo(unittest.TestCase): + def test_demo(self) -> None: + self.assertEqual(y, 2) +``` + +## Documentation + +User documentation uploaded to the [Pantsbuild web docs site](https://www.pantsbuild.org/docs) consists of two sections: + +- the reference docs that are generated from help strings in the source code +- the guides that are generated from the `docs/` directory's Markdown files. + +### Reference docs + +Not every help string will make it to the website: currently only help strings for global options, goals, subsystems, and targets are published. Please be extra vigilant when writing these and remember that they are going to be rendered as Markdown. + +It may be helpful to consider the following: + +- use the `softwrap` helper function to turn a multiline string into a softwrapped string +- if you experience `mypy` typing issues using the `softwrap` for documenting subclasses of `Field` and `Target` classes, consider using the `help_text` convenience function +- text inside the angle brackets (e.g. ``) will be ignored when rendered if not wrapped in backticks +- to create a numbered or bullet list, use 2 space indentation (or use the `bullet_list` convenience function) +- to create a codeblock, never use indentation. Only ever use triple-backtick blocks. +- make sure to use backticks to highlight config sections, command-line arguments, target names, and inline code examples. + +It may be difficult to confirm the accuracy of text formatting in plain Python, so you may want to run `pants help` on the relevant target/subsystem to see the resulting string. diff --git a/versioned_docs/version-2.24/docs/contributions/index.mdx b/versioned_docs/version-2.24/docs/contributions/index.mdx new file mode 100644 index 000000000..d30d96298 --- /dev/null +++ b/versioned_docs/version-2.24/docs/contributions/index.mdx @@ -0,0 +1,165 @@ +--- + title: Contribution overview + sidebar_position: 0 +--- + +The flow for making changes to Pants. + +--- + +We welcome contributions of all types: from fixing typos to bug fixes to new features. For further questions about any of the below, please refer to the [community overview](/community/members). + +:::tip Help wanted: identifying bad error messages +We strive—but sometimes fail—to make every error message easy to understand and to give insight into what went wrong and how to fix it. + +If you ever encounter a confusing or mediocre error message, we would love your help to identify the error message. Please open a [GitHub issue](https://github.com/pantsbuild/pants/issues) with the original Pants command, the error message, and what you found confusing or think could be improved. + +(If you'd be interested in then changing the code, we'd be happy to point you in the right direction!) +::: + +## Documentation Fixes + +To suggest edits to Pants documentation, fork the [Pants repository](https://github.com/pantsbuild/pants), make changes to files in the `docs/` directory, and submit a PR against the `main` branch. Address feedback from maintainers and, once approved, your changes will be incorporated into the official documentation. + +## Pants's tech stack + +Most of Pants is written in Python 3. The majority of contributions touch this Python codebase. + +We rely on several Python features that you will want to acquaint yourself with: + +- [Type hints and MyPy](https://mypy.readthedocs.io/en/stable/) +- [Dataclasses](https://realpython.com/python-data-classes/) +- [`async`/`await` coroutines](https://www.python.org/dev/peps/pep-0492) + - We do _not_ use `asyncio`. The scheduler is implemented in Rust. We only use `async` coroutines. +- [Decorators](https://realpython.com/primer-on-python-decorators/) +- [Comprehensions](https://www.geeksforgeeks.org/comprehensions-in-python/) + +Pants's engine is written in Rust. See [Developing Rust](./development/developing-rust.mdx) for a guide on making changes to the internals of Pants's engine. + +## First, share your plan + +Before investing your time into a code change, it helps to share your interest. This will allow us to give you initial feedback that will save you time, such as pointing you to related code. + +To share your plan, please either open a [GitHub issue](https://github.com/pantsbuild/pants/issues) or message us on [Slack](/community/getting-help#slack) (you can start with the #general channel). Briefly describe the change you'd like to make, including a motivation for the change. + +If we do not respond within 24 business hours, please gently ping us by commenting "ping" on your GitHub issue or messaging on Slack asking if someone could please take a look. + +:::note Tip: Can you split out any "prework"? +If your change is big, such as adding a new feature, it can help to split it up into multiple pull requests. This makes it easier for us to review and to get passing CI. + +This is a reason we encourage you to share your plan with us - we can help you to scope out if it would make sense to split into multiple PRs. +::: + +## Design docs + +Changes that substantially impact the user experience, APIs, design or implementation, may benefit from a design doc that serves as a basis for discussion. + +We store our design docs in [this Google Drive folder](https://drive.google.com/drive/folders/1LtA1EVPvalmfQ5AIDOqGRR3LV86_qCRZ). If you want to write a design doc, [let us know](/community/getting-help) and if necessary we can give you write access to that folder. + +We don't currently have any guidelines on the structure or format of design docs, so write those as you see fit. + +## Developing your change + +To begin, [set up Pants on your local machine](./development/setting-up-pants.mdx). + +To run a test, run: + +```bash +$ pants test src/python/pants/util/frozendict_test.py +``` + +Periodically, you will want to run MyPy and the autoformatters and linters: + +```bash +# Format un-committed changes +$ pants --changed-since=HEAD fmt + +# Run the pre-push checks, including `check` and `lint` +$ build-support/githooks/pre-push +``` + +See our [Style guide](./development/style-guide.mdx) for some Python conventions we follow. + +:::note You can share works in progress! +You do not need to fully finish your change before asking for feedback. We'd be eager to help you while iterating. + +If doing this, please open your pull request as a "Draft" and prefix your PR title with "WIP". Then, comment on the PR asking for feedback and/or post a link to the PR in [Slack](/community/members). +::: + +## Opening a pull request + +When opening a pull request, start by providing a concise and descriptive title. It's okay if you aren't sure what to put - we can help you to reword it. + +Good titles: + +- Fix typo in `strutil.py` +- Add Thrift code generator for Python +- Fix crash when running `test` with Python 3.9 + +Bad titles: + +- Fix bug +- Fix #8313 +- Add support for Thrift code generation by first adding the file `codegen.py`, then hooking it up, and finally adding tests + +Then, include a description. You can use the default template if you'd like, or use a normal description instead. Link to any corresponding GitHub issues. + +:::note Tip: Review your own PR +It is often helpful to other reviewers if you proactively review your own code. Specifically, add comments to parts where you want extra attention. + +For example: + +- "Do you know of a better way to do this? This felt clunky to write." +- "This was really tricky to figure out because there are so many edge cases. I'd appreciate extra attention here, please." +- "Note that I did not use a dataclass here because I do not want any of the methods like `__eq__` to be generated." + +::: + +:::note FYI: we squash merge +This means that the final commit message will come from your PR description, rather than your commit messages. + +Good commit messages are still very helpful for people reviewing your code; but, your PR description is what will show up in the changelog. +::: + +### CI + +We use GitHub Actions for CI. Look at the "Checks" tab of your PR. + +:::note Flaky tests? +We unfortunately have some flaky tests. If CI fails and you believe it is not related to your change, please comment about the failure so that a maintainer may investigate and restart CI for you. + +Alternatively, you can push an empty commit with `git commit --allow-empty` to force CI to restart. Although we encourage you to still point out the flake to us. +::: + +### Review feedback + +One or more reviewers will leave feedback. If you are confused by any of the feedback, please do not be afraid to ask for clarification! + +If we do not respond within 24 business hours, please gently ping us by commenting "ping" on your pull request or messaging on Slack asking if someone could please take a look. + +Once one or more reviewers have approved—and CI goes green—a reviewer will merge your change. + +:::note When will your change be released? +Your change will be included in the next weekly dev release, which usually happens every Friday or Monday. If you fixed a bug, your change may also be cherry-picked into a release candidate from the prior release series. + +See [Release strategy](./releases/release-strategy.mdx). +::: + +## Release notes + +We maintain release notes as we go: every pull request should add or adjust the release notes if required. These release notes are files in `docs/notes/`, grouped by release series; for example, `docs/notes/2.22.x.md` includes the release notes for 2.22 releases. + +The release note file is generally grouped by "backend". If you're not sure whether to add release notes, or where to put them, or how to phrase them, feel free to: + +- look in other release notes files in `docs/notes` for inspiration +- ask in `#development` on Slack +- open a pull request and ask the reviewers + +New features and major bug fixes should definitely have release notes, but other changes can opt out. For example, fixes to features that aren't released or minor documentation fixes. + +We have guidance to walk us through this, so it's not a problem to forget. Pull request CI enforces that either: + +- the PR release notes, by having changes in `docs/notes/` +- someone has opted out, by labelling the PR with `release-notes:not-required` or `category:internal` (the latter means that release notes are optional for all `category:internal` PRs). + +For minor releases, all GitHub release description will simply list all the commits save those with `release-notes:not-required` or `category:internal`. diff --git a/versioned_docs/version-2.24/docs/contributions/releases/_category_.json b/versioned_docs/version-2.24/docs/contributions/releases/_category_.json new file mode 100644 index 000000000..d2674b769 --- /dev/null +++ b/versioned_docs/version-2.24/docs/contributions/releases/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Releases", + "position": 3 +} diff --git a/versioned_docs/version-2.24/docs/contributions/releases/github-actions-linux-aarch64-runners.mdx b/versioned_docs/version-2.24/docs/contributions/releases/github-actions-linux-aarch64-runners.mdx new file mode 100644 index 000000000..424368fc9 --- /dev/null +++ b/versioned_docs/version-2.24/docs/contributions/releases/github-actions-linux-aarch64-runners.mdx @@ -0,0 +1,98 @@ +--- + title: GitHub Actions aarch64 runners + sidebar_position: 3 +--- + +--- + +GitHub Actions does not have affordable hosted aarch64 runners. + +For a while we enjoyed hardware donated by the Works on ARM program, but that has now been terminated, +so we instead rely on self-hosted EC2 instances. + +We use [Runs On](https://runs-on.com/) to simplify the management of these instances. Runs On monitors +requests for runners, launches EC2 instances on the fly to satify those requests, and terminates them +when the workflows complete. + +## The Custom AMI + +We configure Runs On to launch these instances with a custom AMI that pre-installs the Python interpreters +needed to bootstrap and test Pants on aarch64. This custom AMI is built on top of the standard Runs On +[Ubuntu 22 ARM64](runs-on-v2.2-ubuntu22-full-arm64-*) AMI. + +It may occasionally be necessary to update this AMI, for example to pick up updates to the underlying standard +AMI. To do so manually: + +### Create a temporary EC2 instance on the standard AMI + +In the AWS web console, initiate creation of a temporary instance that we use just to build the custom AMI. + +- Any instance type will do, t4g.nano for example. +- To select the base AMI for the instance, click on "Browse more AMIs", then the "Community AMIs" tab, +then search for `runs-on-v2.2-ubuntu22-full-arm64` and pick the latest standard AMI, as described in +the Runs On [images repo](https://github.com/runs-on/runner-images-for-aws). +- Select `pantsbuild.org.bastion` as the instance's SSH keypair. +- Allow the wizard to create a security group. +- The default storage settings are fine. +- Open the Advanced section, scroll all the way down, and set the instance's User Data to: + +```bash +#!/bin/bash +systemctl start ssh +``` + +to ensure that its SSH daemon runs on startup. + +- Click "Launch instance" + +### SSH into the temporary EC2 instance + +Once the instance is running, navigate to its details page and click on "Connect" to get SSH instructions. +You will need the `pantsbuild.org.bastion` private key, which is in our 1Password account. + +### Install Pythons on the instance + +Once you are in a bash prompt on the instance, run the following: + +```bash +sudo apt install -y software-properties-common +sudo add-apt-repository -y ppa:deadsnakes/ppa +sudo apt update +sudo apt install -y \ + python3.7 python3.7-dev python3.7-venv \ + python3.8 python3.8-dev python3.8-venv \ + python3.9 python3.9-dev python3.9-venv \ + python3.10 python3.10-dev python3.10-venv \ + python3.11 python3.11-dev python3.11-venv \ + python3.12 python3.12-dev python3.12-venv \ + python3.13 python3.13-dev python3.13-venv +``` + +to install the necessary Pythons. + +### Create an AMI from the instance + +From the instance's actions menu in the web console select "Images" and then "Create image". +The image name doesn't strictly matter, but `ubuntu22-full-arm64-python3.7-3.13` +is a good name for consistency (multiple AMIs may have the same name). + +### Terminate the temporary instance + +Once the AMI status shows as "Available", terminate the temporary instance and delete its security group +(whose name will match launch-wizard-*). + +We hope to [automate the above via Packer](https://runs-on.com/guides/building-custom-ami-with-packer/) at some point. + +### Update the Runs On config + +Edit `.github/runs-on.yml` and update the `ami` field. Note that the logical image name in this file +happens to be the same as the AMI name, but doesn't strictly have to be. This is the name that we +target in the `runs-on` stanza of a CI job in `generate_github_workflows.py`. + +Note that the new AMI will only be used in PR CI jobs after the config change is merged into the +target branch, so the CI job for the update itself will still use the old AMI. + +### Deregister the old AMI + +After the config update has been merged into `main` and any release branches it needs to be in, +we can deregister the old AMI in the AWS web console, to avoid confusion. diff --git a/versioned_docs/version-2.24/docs/contributions/releases/github-actions-macos-arm64-runners.mdx b/versioned_docs/version-2.24/docs/contributions/releases/github-actions-macos-arm64-runners.mdx new file mode 100644 index 000000000..1ffea70c9 --- /dev/null +++ b/versioned_docs/version-2.24/docs/contributions/releases/github-actions-macos-arm64-runners.mdx @@ -0,0 +1,211 @@ +--- + title: GitHub Actions macOS runners + sidebar_position: 2 +--- + +--- + +Apple is phasing out their X86_64 hardware, and all new macOS systems are based on the M1 ARM64 processor. Pants must run on these systems, which means we need an M1 CI machine on which to test and package Pants. + +Unfortunately, GitHub Actions does not yet have hosted runners for MacOS ARM64. So we must run our own self-hosted runner. This document describes how to set one up. It is intended primarily for Pants maintainers who have to maintain our CI infrastructure, but since there is not much information online about how to set up self-hosted runners on M1, it may be useful as a reference to other projects as well. One useful resource we did find is [this blog post](https://betterprogramming.pub/run-github-actions-self-hosted-macos-runners-on-apple-m1-mac-b559acd6d783) by Soumya Mahunt, so our thanks to them. + +If you find any errors or omissions in this page, please let us know on [Slack](/community/getting-help#slack) or provide corrections via the "Suggest Edits" link above. + +## The machine + +As yet there aren't many options for a hosted M1 system: + +- AWS has a [preview program](https://aws.amazon.com/about-aws/whats-new/2021/12/amazon-ec2-m1-mac-instances-macos/), which you can sign up for and hope to get into. Once these instances are generally available we can evaluate them as a solution. +- You can buy an M1 machine and stick it in a closet. You take on the risk of compromising your + network if the machine is compromised by a rogue CI job. +- You can rent a cloud-hosted M1 machine by the month from [MacStadium](https://www.macstadium.com/). + +We've gone with the MacStadium approach for now. + +## Connecting to the machine + +Since this is machine is [a pet, not cattle](https://iamondemand.com/blog/devops-concepts-pets-vs-cattle/), we allow ourselves a somewhat manual, bespoke setup process (we can script this up if it becomes necessary). There are two ways to connect to the machine: + +- Via VNC remote desktop from another macOS machine (not necessarily an M1) +- Via SSH + +In both cases, the first few setup steps will be done as the user `administrator` and the initial password for that user is provided by MacStadium. Once we create a role user, the subsequent steps will be run as that user. + +### SSH + +```shell title="Shell" +$ ssh administrator@XXX.XXX.XXX.XXX +(administrator@XXX.XXX.XXX.XXX) Password: +% +``` + +### VNC + +Enter `vnc://XXX.XXX.XXX.XXX` on the local machine's Safari address bar, substituting the machine's IP address, as given to you by MacStadium. Safari will prompt you to allow it to open the Screen Sharing app. + +Screen Sharing will give you a login prompt. Once logged in, you can control the remote machine's desktop in the Screen Sharing window, and even share the clipboard across the two machines. + +In this mode you can use the remote machine's desktop UI to make changes, or you can open a terminal and issue the same commands you would via SSH. + +A few of the steps below will have both SSH and VNC options, others only SSH (or terminal window in a remote desktop), or only VNC. + +## Setting up the machine + +### Change the initial password + +The first step is to change the initial `administrator` password to something secure, since the initial password appears as cleartext in the MacStadium ticket + +#### SSH + +```shell +# Will prompt for both the new and old passwords +% dscl . -passwd /Users/administrator +``` + +#### VNC + +Go to  > System Preferences > Users & Groups, select the administrator user, click "Change Password..." and select a strong password. + +### Ensure smooth restarts + +#### SSH + +```shell title="Shell" +# Ensure that this shows a value of 1 +% pmset -g | grep autorestart +# If it does not, run this +% sudo pmset -a autorestart 1 +``` + +#### VNC + +Go to  > System Preferences > Energy Saver and ensure that Restart After Power Failure is checked. + +### Install software + +Perform the following setup steps as `administrator`, some steps may request your password: + +```text title="Shell" +# Install Rosetta 2, will prompt to accept a license agreement +% softwareupdate --install-rosetta + +# Install XCode command-line tools +# IMPORTANT: This pops up a license agreement window on the desktop, +# so you must use VNC to accept the license and complete the installation. +% xcode-select --install + +# Install Homebrew +% /bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)" +% echo 'eval "$(/opt/homebrew/bin/brew shellenv)"' >> /Users/administrator/.zshenv +% eval "$(/opt/homebrew/bin/brew shellenv)" + +# Install pyenv +% brew install pyenv + +# Set up pyenv +% echo 'export PYENV_ROOT="$HOME/.pyenv"' >> ~/.zshenv +% echo 'command -v pyenv >/dev/null || export PATH="$PYENV_ROOT/bin:$PATH"' >> ~/.zshenv +% echo 'eval "$(pyenv init -)"' >> ~/.zshenv +% source ~/.zshenv + +# Install the AWS CLI +% brew install awscli + + + +``` + +### Create a role user + +We don't want to run actions as the administrator user, so we create a role account. + +#### SSH + +```shell +# Will prompt for password +% sudo sysadminctl -addUser gha -fullName "GitHub Actions Runner" -password - + +# Allow ssh'ing as gha +% sudo dseditgroup -o edit -a gha -t user com.apple.access_ssh +``` + +#### VNC + +Go to  > System Preferences > Users & Groups and create a Standard account with the full name `GitHub Actions Runner`, the account name `gha` and a strong password. + +### Set up auto-login + +This must be done from the remote desktop, via VNC, as `administrator`. + +Go to  > System Preferences > Users & Groups, and click the lock to make changes. + +Click on Login Options and for Automatic login choose GitHub Actions Runner. Enter the `gha` user's password when prompted. + +### Set up the role user + +Perform the following setup steps after SSHing in as the `gha` role user: + +``` +# Set up Homebrew +% echo 'export PATH=$PATH:/opt/homebrew/bin/' >> ~/.zshenv +... +# Set up pyenv +% echo 'export PYENV_ROOT="$HOME/.pyenv"' >> ~/.zshenv +% echo 'command -v pyenv >/dev/null || export PATH="$PYENV_ROOT/bin:$PATH"' >> ~/.zshenv +% echo 'eval "$(pyenv init -)"' >> ~/.zshenv +% source ~/.zshenv +... +# Install Python 3.9 +% pyenv install 3.9.13 +% pyenv global 3.9.13 +... +# Install rustup +% curl https://sh.rustup.rs -sSf | sh -s -- -y +``` + +Note that we use `.zshenv` because the runner will not execute in an interactive shell. + +## Setting up the self-hosted runner + +### Installing the runner + +On the GitHub repo's page, go to [Settings > Actions > Runners](https://github.com/pantsbuild/pants/settings/actions/runners). + +Click "New self-hosted runner", select macOS and run all the Download and Configure commands it displays, as `gha`, on the remote machine. Set the labels to [`self-hosted`, `macOS`, `ARM64`, `macOS11`]. + +Accept the default values for other settings. + +**Note:** The ARM64 GitHub Actions runner binary is still in pre-release status. If you don't want to rely on it, you can use the stable X86_64 binary under Rosetta. However in this case its subprocesses will run in X86_64 mode by default as well. So CI processes that care about platform (such as those that build and package native code) must be invoked with the `arch -arm64` prefix. Note that in this case GHA will always set the `X64` label on the runner, so be careful not to use that label for runner selection in your workflows if you also have X86_64 self-hosted runners. + +### Runner setup + +As `gha`, run: + +``` +% cd actions-runner + +# Ensure that the runner starts when the machine starts. +% ./svc.sh install + +# Set up some env vars the runner requires. +% echo 'ImageOS=macos11' >> .env +% echo "XCODE_11_DEVELOPER_DIR=$(xcode-select -p)" >> .env +``` + +## Testing it all out + +Now use the MacStadium web UI to restart the machine. Once it comes back up it +should be able to pick up any job with this setting: + +``` + runs-on: + - self-hosted + - macOS11 + - ARM64 +``` + +## Self-hosted macOS X86_64 Runners + +As of August 2024 we are also running a self-hosted macOS 10.15 x86_64 runner on MacStadium, +as GitHub Actions does not offer hosted runners for this old OS version. It is set up similarly +to the instructions above, with appropriate changes to the `runs-on` labels and env vars. diff --git a/versioned_docs/version-2.24/docs/contributions/releases/index.mdx b/versioned_docs/version-2.24/docs/contributions/releases/index.mdx new file mode 100644 index 000000000..d171217bd --- /dev/null +++ b/versioned_docs/version-2.24/docs/contributions/releases/index.mdx @@ -0,0 +1,11 @@ +--- + title: Releases + sidebar_position: 2 +--- + +--- + +- [Release strategy](./release-strategy.mdx) +- [Release process](./release-process.mdx) +- [GitHub Actions macOS runners](./github-actions-macos-arm64-runners.mdx) +- [GitHub Actions aarch64 runners](./github-actions-macos-aarch64-runners.mdx) diff --git a/versioned_docs/version-2.24/docs/contributions/releases/release-process.mdx b/versioned_docs/version-2.24/docs/contributions/releases/release-process.mdx new file mode 100644 index 000000000..6ba6f61b7 --- /dev/null +++ b/versioned_docs/version-2.24/docs/contributions/releases/release-process.mdx @@ -0,0 +1,190 @@ +--- + title: Release process + sidebar_position: 1 +--- + +How to release a new version of Pants and its plugins. + +--- + +This page covers the nitty-gritty of executing a release, and is probably only interesting for maintainers. If you're interested in when and why Pants is released, please see the [Release strategy](./release-strategy.mdx) page. + +## Prerequisites + +You only need to set these up once. + +### Create a PGP signing key + +If you already have one, you can reuse it. + +You likely want to use the gpg implementation of pgp. On macOS, you can `brew install gpg`. Once gpg is installed, generate a new key: [https://docs.github.com/en/github/authenticating-to-github/generating-a-new-gpg-key](https://docs.github.com/en/github/authenticating-to-github/generating-a-new-gpg-key). + +Please use a password for your key! + +### Add your PGP key to GitHub. + +See [https://docs.github.com/en/github/authenticating-to-github/adding-a-new-gpg-key-to-your-github-account](https://docs.github.com/en/github/authenticating-to-github/adding-a-new-gpg-key-to-your-github-account). + +### Configure Git to use your PGP key. + +See [https://docs.github.com/en/github/authenticating-to-github/telling-git-about-your-signing-key](https://docs.github.com/en/github/authenticating-to-github/telling-git-about-your-signing-key). + +Note: the last step is required on macOS. + +### Authenticate with the GitHub API + +Ensure that you have a [personal access token](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token) for your GitHub account in your `.netrc` file. + +``` +machine api.github.com + login + password +``` + +## Step 0: Preliminaries + +### `dev` - Check for any deprecations + +If this is a dev release, ensure that deprecations set to expire in the released version have been removed. To check for this, search the code for the version you're releasing. For example, `git grep 2.9.0.dev0`. + +If there is deprecated code that must be removed, you can either: + +1. Ping the person who made the deprecation to ask them to remove it. +2. Remove it yourself, in a precursor PR. +3. Bump the deprecation removal target back by one dev release. + +### `a0` - Release notes for next version + +When releasing an `a0` version, we need to prepare the release notes for the _next_ version: + +1. Create pull request adding the release notes file for the next version in `docs/notes/`, e.g. if you're releasing 2.8.0a0, create `docs/notes/2.9.x.md`. + 1. Copy the title and template over from the prior release, e.g. `2.8.x.md`. + 2. Delete the content, leaving just the headings, so the file is mostly empty. +2. NB. this can merge after the release pull request, but should be available soon so that pull requests tht land in `main` can update it + +### `rc` - Check for cherry-picks + +If this is a release candidate, ensure that pending cherry-picks have been applied in the release branch. Cherry-picks are usually applied automatically, but this may not always succeed, so [check for any pending cherry-picks](https://github.com/pantsbuild/pants/pulls?q=is%3Apr+label%3Aneeds-cherrypick+is%3Aclosed), and find the relevant ones by looking at the milestone: for instance, if doing a release for 2.16, the relevant cherry-picks are those for milestone `2.16.x` or earlier. + +The process may fail in one of two ways: + +- The cherry-picking process failed, and tagged the PR with `auto-cherry-picking-failed`: follow the instructions in the comment on the pull request. (This likely means there are merge conflicts that require manual resolution.) +- the cherry-pick hasn't (yet) run: trigger the automation manually by going to [the GitHub Action](https://github.com/pantsbuild/pants/actions/workflows/auto-cherry-picker.yaml), clicking on the "Run workflow" button, and providing the PR number. + +## Step 1: Create the release commit + +The release commit is the commit that bumps the VERSION string. For `dev`/`a0` releases this happens in the `main` branch, in the same commit that updates the release notes and the `CONTRIBUTORS.md` file. For `rc` and stable releases, this happens in the relevant stable branch. + +### Bump the VERSION + +From the `main` branch, run `pants run src/python/pants_release/start_release.py -- --new 2.9.0.dev1 --release-manager your_github_username --publish` with the relevant version and your own GitHub username. + +This will create a pull request that: + +1. updates `CONTRIBUTORS.md` +2. bumps the `VERSION` on the branch + +### Merge the pull request + +Post the PR to the `#development` channel in Slack. Merge once approved and green. + +### `a0` - new release cycle + +If you're releasing an `a0` release, you must: + +1. create the stable branch for that version, +2. identify pull requests that need changes to their release notes. + +#### Create new branch + +For example, if you're releasing `2.9.0a0`, create the branch `2.9.x` by running the command below. Make sure you are on your release commit before doing this. + +```bash +$ git checkout -b 2.9.x +$ git push upstream 2.9.x +``` + +#### Identify pull requests needing changes + +Find unmerged pull requests with release notes changes that will now be targetting the new release, and so should be updated: + +1. Use this `gh` CLI command to find pull requests targetting `main` that touch any release notes file: + + ```shell + gh pr list --limit 1000 --json url,files --base main | jq 'map(select(.files | map(.path | startswith("docs/notes/")) | any) | .url)' + ``` + +2. For each of them, add a "Request changes" review that asks for an update. Suggested review text (replace `$OLD_VERSION` and `$NEW_VERSION` as appropriate, e.g. if you're releasing 2.8.0a0, `OLD_VERSION = 2.8`, `NEW_VERSION = 2.9`): + + > Thanks for the contribution. We've just branched for $OLD_VERSION, so merging this pull request now will come out in $NEW_VERSION, please move the release notes updates to `docs/notes/$NEW_VERSION.x.md` if that's appropriate. + +3. In some cases, the release note changes can remain in the old version (for example, if the pull request is a bug fix that needs to be cherry-picked). + +## Step 2: Tag the release to trigger publishing + +Once you have merged the `VERSION` bump — which will be on `main` for `dev` and `a0` releases, and on the release branch for release candidates — tag the release commit to trigger wheel building and publishing. + +First, ensure that you are on your release branch at your version bump commit. + +:::note Tip: if new commits have landed after your release commit +You can reset to your release commit by running `git reset --hard `. +::: + +Then, run: + +```bash +./pants run src/python/pants_release/release.py -- tag-release +``` + +This will tag the release with your PGP key, and push the tag to origin, which will kick off a [`Release` job](https://github.com/pantsbuild/pants/actions/workflows/release.yaml) to build the wheels and publish them to PyPI. + +## Step 3: Test the release + +Run this script as a basic smoke test: + +```bash +./pants run src/python/pants_release/release.py -- test-release +``` + +You should also check [GitHub Releases](https://github.com/pantsbuild/pants/releases) to ensure everything looks good. Find the version you released, then click it and confirm that the "Assets" list includes PEXes for macOS and Linux. + +## Step 4: Run release testing on public repositories + +Manually trigger a run of the [public repositories testing workflow](https://github.com/pantsbuild/pants/actions/workflows/public_repos.yaml), specifying the version just published as the "Pants version". + +This workflow checks out various open-source repositories that use Pants and runs the given version of Pants against them, to try to validate if they can upgrade smoothly or if there's any (obvious) bugs. The workflow runs the repositories in two configurations: first with the repo's default configuration as a baseline, and then with the specified Pants version (and any additional options). + +Once the workflow finishes, look through any failures and determine if there's any interesting/unknown problems, ensuring there's issues filed (and tagged with the appropriate milestone) for them. For instance, a custom plugin that is broken by a plugin API change is okay, but other sorts of breakage might not be. If there's a failure during the baseline, a similar failure during the real (non-baseline) test can be ignored, as it likely means the repository in question is broken. + +Alternatively, after starting the workflow, post the link to the in-progress run in `#development` in Slack, so that someone can come back to it when it does finish. + +## Step 5: Publish a schema in JSON Schema Store + +Some editors can use JSON Schema for better completions when editing TOML files like `pants.toml`. +Pants configuration file schema is published at https://www.schemastore.org/. For every stable `2.x.0` release, +a new schema needs to be generated and uploaded by submitting a PR against https://github.com/SchemaStore/schemastore. +This is an example pull request for reference: https://github.com/SchemaStore/schemastore/pull/3880. + +To produce `pantsbuild-.json` schema file, run: + +```bash +pants help-all > all-help.json +pants run build-support/bin/generate_json_schema.py -- --all-help-file=all-help.json +``` + +It may be helpful to compare the last schema file with the newly produced one to make sure there are no discrepancies +(e.g. the config values have a sensible type and the help strings are rendered adequately). You can download the +schemas of previous releases from the store website; the JSON files are available at +`https://json.schemastore.org/pantsbuild-.json`. + +Watch out for any configuration parameters that may rely on your local environment as certain default config values +will be expanded using local runtime environment which is undesirable. The script handles those known config values +by keeping a list of them, however, it may need to be extended as more options with environment specific default +values are added. + +## When Things Go Wrong + +From time to time, a release will fail. It's a complex process. The first thing to do after you've +exhausted your knowledge and debugging skills or patience is to contact others. You might reach out +to the development or maintainers channels on Pantsbuild Slack in the absence of other ideas about +whom to ask for help. diff --git a/versioned_docs/version-2.24/docs/contributions/releases/release-strategy.mdx b/versioned_docs/version-2.24/docs/contributions/releases/release-strategy.mdx new file mode 100644 index 000000000..c3a1afe6d --- /dev/null +++ b/versioned_docs/version-2.24/docs/contributions/releases/release-strategy.mdx @@ -0,0 +1,100 @@ +--- + title: Release strategy + sidebar_position: 0 +--- + +Our approach to semantic versioning + time-based releases. + +--- + +Pants release cycles flow through: + +1. `dev` releases from the `main` branch, +2. an `a` (alpha) release, which is the first on a stable branch, +3. `rc` releases, which have begun to stabilize on a stable branch, and will become a stable release +4. stable releases, which are our most trusted. + +Pants follows semantic versioning, along with using regular time-based dev releases. We follow a strict [Deprecation policy](../../releases/deprecation-policy.mdx). + +:::note Tip: join the mailing group for release announcements +See [Community](/community/members). + +Also see [Upgrade tips](../../releases/upgrade-tips.mdx) for suggestions on how to effectively upgrade Pants versions. +::: + +## Stable releases + +Stable releases occur roughly every six weeks. They have been vetted through at least one alpha and one release candidate. + +Stable releases are named with the major, minor, and patch version (with no suffix). For example, `2.1.0` or `2.2.1`. + +Any new patch versions will only include: + +- Backward-compatible bug fixes +- Backward-compatible feature backports, as long as they: + 1. Are requested by users + 2. Are deemed low-risk and are easy to backport + 3. Do not introduce new deprecations + +Patch versions after `*.0` (i.e.: `2.2.1`) must have also had at least one release candidate, but no alpha releases are required. + +:::caution Stable releases may still have bugs +We try our best to write bug-free code, but, like everyone, we sometimes make mistakes. + +If you encounter a bug, please gently let us know by opening a GitHub issue or messaging us on Slack. See [Community](/community/members). +::: + +### Stable release managers + +Our weekly [release process](./release-process.mdx) is executed by a rotating "maintainer of the week" (MOTW). But because the entire stable release process (through `dev`s, to an `a`, past `rc`s, etc) can take 8 to 12 weeks, we additionally assign a release manager per stable release to have an overarching view and drive the manual steps of a release that are not covered by the weekly process. + +The release manager for a stable release is a maintainer chosen informally in Slack to run the upcoming release. They create or are assigned a stable release ticket ([example](https://github.com/pantsbuild/pants/issues/20578)). A stable release has some additional requirements, most of which are listed in the weekly release process steps, but which can require time outside of the weekly process: +1. Writing (or [automating!](https://github.com/pantsbuild/pants/discussions/19247)) the creation of a release blog ([example](https://www.pantsbuild.org/blog/2024/03/27/pants-2-20)) +2. Tracking/triaging issues which block finalizing the release, in [its milestone](https://github.com/pantsbuild/pants/milestones). This should include rejecting or postponing non-blocking bug-fixes and cherry-picks, if they might delay finalising a release inappropriately. +3. Occasionally running `rc` releases outside of the weekly release process in order to get feedback more quickly. + * For example: if a release-blocking issue is fixed shortly after the weekly release (meaning that it might be e.g. 6 days until the next `rc` is cut by the weekly process), the release manager might decide to cut another `rc` immediately. +4. Deciding when to cut the stable release for a release branch, when all blockers are fixed, and there's been sufficient testing. The release can be cut by either letting the MOTW know, or by executing the release themselves. + +## Release candidates + +`rc` releases are on track to being stable, but may still have some issues. + +Release candidates are named with the major, minor, and patch version, and end in `rc` and a number. For example, `2.1.0rc0` or `2.1.0rc1`. + +Release candidates are subject to the constraints on cherry-picks mentioned in the Stable releases section. + +:::note When is a release "stable" enough? +A stable release should not be created until at least five business days have passed since the first `rc0` release. Typically, during this time, there will be multiple release candidates to fix any issues discovered. + +A stable release can be created two business days after the most recent release candidate if there are no more blockers. +::: + +:::tip Help wanted: testing out release candidates +We greatly appreciate when users test out release candidates. While we do our best to have comprehensive CI—and we "dogfood" release candidates—we are not able to test all the ways Pants is used in the wild. + +If you encounter a bug, please gently let us know by opening a GitHub issue or messaging us on Slack. See [Community](/community/members). +::: + +## Alpha releases + +Alpha (`a`) releases are the first releases on a stable branch (after `dev` releases, and before `rc`s), and although they have not received any testing beyond what a `dev` release may have received, they are a particular focus for testing, because they represent code which will eventually become an `rc`. + +Alpha releases are named with the major, minor, and patch version, and end in `a` and a number. For example, `2.1.0a0`. + +Except in extenuating circumstances, there will usually only be a single alpha release per series. + +## Dev releases + +`dev` releases are weekly releases that occur directly from the `main` branch, without the additional vetting that is applied to stable releases, alpha releases, or release candidates. Usually, these are released on Friday or Monday. + +Dev releases help to ensure a steady release cadence from `main` by filling in the gaps between the more time consuming stable releases. + +Dev releases are named with the major, minor, and patch version, and end in `.dev` and a number. For example, `2.1.0.dev0` or `2.1.0.dev1`. + +Dev releases can include any changes, so long as they comply with the [Deprecation policy](../../releases/deprecation-policy.mdx). + +:::note How many dev releases until starting a release candidate? +Usually, we release 3-4 dev releases before switching to the alpha release `a0`. This means we usually release `dev0`, `dev1`, `dev2`, sometimes `dev3`, and then `a0`. + +We try to limit the number of changes in each stable release to make it easier for users to upgrade. If the dev releases have been particularly disruptive, such as making major deprecations, we may start a release candidate sooner, such as after `dev1`. +::: diff --git a/versioned_docs/version-2.24/docs/docker/_category_.json b/versioned_docs/version-2.24/docs/docker/_category_.json new file mode 100644 index 000000000..c586575d7 --- /dev/null +++ b/versioned_docs/version-2.24/docs/docker/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Docker", + "position": 9 +} diff --git a/versioned_docs/version-2.24/docs/docker/index.mdx b/versioned_docs/version-2.24/docs/docker/index.mdx new file mode 100644 index 000000000..5b9cc0cac --- /dev/null +++ b/versioned_docs/version-2.24/docs/docker/index.mdx @@ -0,0 +1,412 @@ +--- + title: Docker overview + sidebar_position: 0 +--- + +How to build Docker images containing artifacts built by Pants + +--- + +Docker images typically bundle build artifacts, such as PEX files, wheels, loose files, and so on, with other runtime requirements, such as a Python interpreter. + +Pants [makes it easy to embed the artifacts Pants builds into your Docker images](https://blog.pantsbuild.org/pants-pex-and-docker/), for easy deployment. + +## Enabling the Docker backend + +To use Pants's Docker support you must enable the appropriate backend: + +```toml title="pants.toml" +backend_packages = [ + ... + "pants.backend.docker", + ... +] +``` + +## Adding `docker_image` targets + +A Docker image is built from a recipe specified by a [Dockerfile](https://docs.docker.com/engine/reference/builder/). When you build Docker images with Pants, instead of running `docker build` on the Dockerfile directly, you let Pants do that for you. + +Pants uses [`docker_image`](../../reference/targets/docker_image.mdx) [targets](../using-pants/key-concepts/targets-and-build-files.mdx) to indicate which Dockerfiles you want Pants to know about, and to add any necessary metadata. + +You can generate initial BUILD files for your Docker images, using [tailor](../getting-started/initial-configuration.mdx#5-generate-build-files): + +``` +❯ pants tailor :: +Created src/docker/app1/BUILD: + - Add docker_image target docker +Created src/docker/app2/BUILD: + - Add docker_image target docker +``` + +Or you can add them manually, such as: + +```python title="src/docker/app1/BUILD" +docker_image(name="docker") +``` + +Alternatively you may provide the Docker build instructions inline in your BUILD file as [`instructions`](../../reference/targets/docker_image.mdx#instructions) on your `docker_image` if you don't want to create a `Dockerfile`. + +```python title="src/docker/app1/BUILD" +docker_image( + name="docker", + instructions=[ + "FROM python:3.8", + "RUN ..", + ] +) +``` + +:::caution The `docker_image` `instructions` field +Each `docker_image` uses a `Dockerfile` referred to by the `source` field, unless you have provided a value to the `instructions` field. +::: + +## Adding dependencies to your `docker_image` targets + +A Dockerfile is built in a _context_ - a set of files that the commands in the Dockerfile can reference, e.g., by copying them into the image. + +When you run `docker build` directly, the context is usually a directory within your repo containing the Dockerfile (typically at the root of the context) and any files that the build requires. If those files were themselves the product of a build step, or if they were sources from elsewhere in the repo, then you would have to copy them into the context. + +Pants, however, takes care of assembling the context for you. It does so using the dependencies of the [`docker_image`](../../reference/targets/docker_image.mdx) target, which can include: + +- Loose files specified using [`file` / `files` targets](../using-pants/assets-and-archives.mdx#files). +- Artifacts packaged from a variety of targets, such as [`pex_binary`](../../reference/targets/pex_binary.mdx) , [`python_distribution`](../../reference/targets/python_distribution.mdx), [`archive`](../../reference/targets/archive.mdx), and any other target that can be built via the [package](../../reference/goals/package.mdx) goal, including other docker images. + +The context is assembled as follows: + +- The sources of `file` / `files` targets are assembled at their relative path from the repo root. +- The artifacts of any packaged targets are built, as if by running `pants package`, and placed in the context using the artifact's `output_path` field. + - The `output_path` defaults to the scheme `path.to.directory/tgt_name.ext`, e.g. `src.python.helloworld/bin.pex`. + +### Dependency inference support + +When you `COPY` PEX binaries into your image, the dependency on the `pex_binary` target will be inferred, so you don't have to add that explicitly to the list of `dependencies` on your `docker_image` target. For example, the `pex_binary` target `src/python/helloworld/bin.pex` has the default `output_path` of `src.python.helloworld/bin.pex`. So, Pants can infer a dependency based on the line `COPY src.python.helloworld/bin.pex /bin/helloworld`. This inference is also done for targets referenced by their target address in build arguments, for example: + +```dockerfile +FROM python:3.9 +ARG PEX_BIN=src:my_target +COPY $PEX_BIN /app/my_app +``` + +Inference for Go binaries and artifacts of other packaged targets is similar. + +Inference on `file`/`files` targets is also done on files, for example: + +```dockerfile +FROM python:3.9 +COPY src/file.txt /app/ +``` + +Inference is also supported for `docker_image` targets specified in build arguments, for example: + +```dockerfile +ARG BASE_IMAGE=:base +FROM $BASE_IMAGE +``` + +In the example, `:base` is the base image target address specified using a relative path. Pants will provide the built Docker image name for that target as the `BASE_IMAGE` build arg to the Docker build command. + +## Building a Docker image + +You build Docker images using the `package` goal: + +``` +❯ pants package path/to/Dockerfile +``` + +### Build arguments + +To provide values to any [build `ARG`s](https://docs.docker.com/engine/reference/builder/#arg) in the Dockerfile, you can list them in the `[docker].build_args` option, which will apply for all images. You can also list any image-specific build args in the field `extra_build_args` for the `docker_image` target. + +The build args use the same syntax as the [docker build --build-arg](https://docs.docker.com/engine/reference/commandline/build/#set-build-time-variables---build-arg) command line option: `VARNAME=VALUE`, where the value is optional, and if left out, the value is taken from the environment instead. + +```toml tab={"label":"pants.toml"} +[docker] +build_args = [ + "VAR1=value1", + "VAR2" +] +``` + +```python tab={"label":"example/BUILD"} +docker_image( + name="docker", + extra_build_args=["VAR1=my_value", "VAR3"] +) +``` + +```dockerfile tab={"label":"example/Dockerfile"} +FROM python:3.8 +ARG VAR1 +ARG VAR2 +ARG VAR3=default +... +``` + +### Target build stage + +When your `Dockerfile` is a multi-stage build file, you may specify which stage to build with the [`--docker-build-target-stage`](../../reference/subsystems/docker.mdx#build_target_stage) for all images, or provide a per image setting with the `docker_image` field [`target_stage`](../../reference/targets/docker_image.mdx#target_stage). + +```dockerfile +FROM python:3.8 AS base +RUN + +FROM base AS img +COPY files / +``` + +``` +❯ pants package --docker-build-target-stage=base Dockerfile +``` + +See this [blog post](https://blog.pantsbuild.org/optimizing-python-docker-deploys-using-pants/) for more examples using multi-stage builds. + +### Build time secrets + +Secrets are supported for `docker_image` targets with the [`secrets`](../../reference/targets/docker_image.mdx#secrets) field. The defined secrets may then be mounted in the `Dockerfile` as [usual](https://docs.docker.com/develop/develop-images/build_enhancements/#new-docker-build-secret-information). + +```python tab={"label":"BUILD"} +docker_image( + secrets={ + "mysecret": "mysecret.txt", + } +) +``` + +```dockerfile +FROM python:3.8 + +# shows secret from default secret location: +RUN --mount=type=secret,id=mysecret cat /run/secrets/mysecret + +# shows secret from custom secret location: +RUN --mount=type=secret,id=mysecret,dst=/foobar cat /foobar +``` + +```text tab={"label":"mysecret.txt"} +very-secret-value +``` + +:::note Secret file path +Secrets should not be checked into version control. Use absolute paths to reference a file that is not in the project source tree. However, to keep the BUILD file as hermetic as possible, the files may be placed within the project source tree at build time for instance, and referenced with a path relative to the project root by default, or relative to the directory of the BUILD file when prefixed with `./`. + +See the example for the [`secrets`](../../reference/targets/docker_image.mdx#secrets) field. +::: + +### Buildx Support + +Buildx (using BuildKit) supports exporting build cache to an external location, making it possible to import in future builds. Cache backends can be configured using the [`cache_to`](../../reference/targets/docker_image.mdx#cache_to) and [`cache_from`](../../reference/targets/docker_image.mdx#cache_from) fields. + +To use BuildKit with Pants, enable the [Containerd Image Store](https://docs.docker.com/desktop/containerd/), either via [Docker Desktop settings](https://docs.docker.com/storage/containerd/) or by [setting daemon config](https://docs.docker.com/storage/containerd/#enable-containerd-image-store-on-docker-engine): +```json +{ + "features": { + "containerd-snapshotter": true + } +} +``` + +Optionally, run a build with the Docker CLI directly to validate buildx support on your system: + +``` +❯ docker buildx build -t pants-cache-test:latest \ + --cache-to type=local,dest=/tmp/docker/pants-test-cache \ + --cache-from type=local,src=/tmp/docker/pants-test-cache . +``` + +Configure Pants to use buildx: + +```toml tab={"label":"pants.toml"} +[docker] +use_buildx = true +``` + +```python tab={"label":"example/BUILD"} +docker_image( + name="with-local-cache-backend", + cache_to={ + "type": "local", + "dest": "/tmp/docker-cache/pants-example" + }, + cache_from=[{ + "type": "local", + "src": "/tmp/docker-cache/pants-example" + }] +) +``` + +For working examples, including multi-platform builds with GitHub Actions, refer to the [example-docker](https://github.com/pantsbuild/example-docker) repository. + +### Build Docker image example + +This example copies both a `file` and `pex_binary`. The file is specified as an explicit dependency in the `BUILD` file, whereas the `pex_binary` dependency is inferred from the path in the `Dockerfile`. + +```python tab={"label":"src/docker/hw/BUILD"} +file(name="msg", source="msg.txt") + +docker_image( + name="helloworld", + dependencies=[":msg"], +) +``` + +```dockerfile tab={"label":"src/docker/hw/Dockerfile"} +FROM python:3.8 +ENTRYPOINT ["/bin/helloworld"] +COPY src/docker/hw/msg.txt /var/msg +COPY src.python.hw/bin.pex /bin/helloworld +``` + +```text tab={"label":"src/docker/hw/msg.txt"} +Hello, Docker! +``` + +```python tab={"label":"src/python/hw/BUILD"} +python_sources(name="lib") + +pex_binary(name="bin", entry_point="main.py") +``` + +```python tab={"label":"src/python/hw/main.py"} +import os + +msg = "Hello" +if os.path.exists("/var/msg"): + with open("/var/msg") as fp: + msg = fp.read().strip() + +print(msg) +``` + +``` +❯ pants package src/docker/hw/Dockerfile +08:09:22.86 [INFO] Completed: Building local_dists.pex +08:09:23.80 [INFO] Completed: Building src.python.hw/bin.pex +08:10:42.51 [INFO] Completed: Building docker image helloworld:latest +08:10:42.51 [INFO] Built docker image: helloworld:latest +Docker image ID: 1fe744d52222 +``` + +## Running a Docker image + +You can ask Pants to run a Docker image on your local system with the `run` goal: + +``` +❯ pants run src/docker/hw/Dockerfile +Hello, Docker! +``` + +Any arguments for the Docker container may be provided as pass through args to the `run` goal, as usual. That is, use either the `--args` option or after all other arguments after a separating double-dash: + +``` +❯ pants run src/docker/hw/Dockerfile -- arguments for the container +Hello, Docker! +``` + +To provide any command line arguments to the `docker run` command, you may use the `--docker-run-args` option: + +``` +❯ pants run --docker-run-args="-p 8080 --name demo" src/docker/hw/Dockerfile +``` + +As with all configuration options, this is not limited to the command line, but may be configured in a Pants rc file (such as `pants.toml`) in the `[docker].run_args` section or as an environment variable, `PANTS_DOCKER_RUN_ARGS` as well. + +## Publishing images + +Pants can push your images to registries using `pants publish`: + +```shell +❯ pants publish src/docker/hw:helloworld +# Will build the image and push it to all registries, with all tags. +``` + +Publishing may be skipped per registry or entirely per `docker_image` using `skip_push`. + +See [here](./tagging-docker-images.mdx) for how to set up registries. + +## Docker configuration + +To configure the Docker binary, set `[docker].env_vars` in your `pants.toml` configuration file. You use that key to list environment variables such as `DOCKER_CONTEXT` or `DOCKER_HOST`, that will be set in the environment of the `docker` binary when Pants runs it. Each listed value can be of the form `NAME=value`, or just `NAME`, in which case the value will be inherited from the Pants process's own environment. + +```toml title="pants.toml" +[docker] +env_vars = [ + "DOCKER_CONTEXT=pants_context", + "DOCKER_HOST" +] +``` + +:::note Docker environment variables +See [Docker documentation](https://docs.docker.com/engine/reference/commandline/cli/#environment-variables) for the authoritative table of environment variables for the Docker CLI. +::: + +## Docker authentication + +To authenticate, you usually will need to: + +1. Set up a Docker config file, e.g. `~/.docker/config.json`. +2. Tell Pants about the config file by setting `[docker].env_vars`. +3. Tell Pants about any tools needed for authentication to work by setting `[docker].tools`. + +For example, a config file using the [GCloud helper](https://cloud.google.com/container-registry/docs/advanced-authentication#gcloud-helper) might look like this: + +``` +{ + "credHelpers": { + "europe-north1-docker.pkg.dev": "gcloud" + } +} +``` + +Then, tell Pants to use this config by setting `[docker].env_vars = ["DOCKER_CONFIG=%(homedir)s/.docker"]` in `pants.toml`, for example. + +Most authentication mechanisms will also require tools exposed on the `$PATH` to work. Teach Pants about those by setting the names of the tools in `[docker].tools`, and ensuring that they show up on your `$PATH`. For example, GCloud authentication requires `dirname`, `readlink` and `python3`. + +```toml title="pants.toml" +# Example GCloud authentication. + +[docker] +env_vars = ["DOCKER_CONFIG=%(homedir)s/.docker"] +tools = [ + "docker-credential-gcr", # or docker-credential-gcloud when using artifact registry + "dirname", + "readlink", + "python3", + # These may be necessary if using Pyenv-installed Python. + "cut", + "sed", + "bash", +] +``` + +You may need to set additional environment variables with `[docker].env_vars`. + +:::note How to troubleshoot authentication +It can be tricky to figure out what environment variables and tools are missing, as the output often has indirection. + +It can help to simulate a hermetic environment by using `env -i`. With credential helpers, it also helps to directly invoke the helper without Docker and Pants. For example, you can symlink the tools you think you need into a directory like `/some/isolated/directory`, then run the below: + +``` +❯ echo europe-north1-docker.pkg.dev | env -i PATH=/some/isolated/directory docker-credential-gcr get +{ + "Secret": "ya29.A0ARrdaM-...-ZhScVscwTVtQ", + "Username": "_dcgcloud_token" +} +``` + +::: + +## Linting Dockerfiles with Hadolint + +Pants can run [Hadolint](https://github.com/hadolint/hadolint) on your Dockerfiles to check for errors and mistakes: + +``` +❯ pants lint src/docker/hw/Dockerfile +``` + +This must first be enabled by activating the Hadolint backend: + +```toml title="pants.toml" +[GLOBAL] +backend_packages = ["pants.backend.docker.lint.hadolint"] +``` diff --git a/versioned_docs/version-2.24/docs/docker/tagging-docker-images.mdx b/versioned_docs/version-2.24/docs/docker/tagging-docker-images.mdx new file mode 100644 index 000000000..ffc0cde23 --- /dev/null +++ b/versioned_docs/version-2.24/docs/docker/tagging-docker-images.mdx @@ -0,0 +1,339 @@ +--- + title: Tagging Docker images + sidebar_position: 1 +--- + +How to set registry, repository and tag names on your images + +--- + +## Configuring registries + +A `docker_image` target takes an optional `registries` field, whose value is a list of registry endpoints and aliases: + +```python title="src/example/BUILD" +docker_image( + name="demo", + registries=[ + "reg1.company.internal", + "@company-registry2", + ] +) +``` + +When publishing this image, it will be pushed to these registries by default. + +In order to provide registry specific configuration, add them to the Pants configuration under +`[docker.registries.]` and refer to them by their alias from the `docker_image` targets, +using a `@` prefix. + +Options for `registries` in `pants.toml`: + +- `address` - The registry endpoint. + +- `default` - Use this registry for all `docker_image` targets that does not provide a value for + the `registries` field. Multiple registries may be used as default at the same time. + +- `extra_image_tags` - Registry specific version tags to apply to the image when using this + registry. + +- `repository` - Format the repository part of the image name for this image. See [Setting a + repository name](#setting-a-repository-name) for details of this option. + +- `skip_push` - Do not push images to this registry during `pants publish`. + +- `use_local_alias` - Use the registry alias as a shorter name to use locally such as when running + an image, useful if the address is unwieldy long. When building images using `pants package`, + the image will be tagged with all image names for the target where as when simply running an + image with `pants run` only the shorter image name will be tagged avoid cluttering the Docker + images repository. The shorter image names are automatically skipped for any push operations. + +Example: + +```toml tab={"label":"pants.toml"} +[docker.registries.company-registry1] +address = "reg1.company.internal" +default = true +extra_image_tags = ["dev"] + +[docker.registries.company-registry2] +address = "reg2.company.internal" +skip_push = true + +[docker.registries.company-registry3] +address = "reg3.company.internal" +repository = "{parent_directory}/{name}" +use_local_alias = true +``` + +```python tab={"label":"src/example/BUILD"} +docker_image(name="demo") + +# This is equivalent to the previous target, +# since company-registry1 is the default registry: +docker_image( + name="demo", + registries=["@company-registry1"], +) + +# You can mix named and direct registry references. +docker_image( + name="demo2", + registries=[ + "@company-registry2", + "ext-registry.company-b.net:8443", + ] +) +``` + +## Setting a repository name + +In Docker parlance, an image is identified by a _repository_ and one or more _tags_ within that repository. + +You set a repository name using the `repository` field on `docker_image`: + +```python tab={"label":"src/example/BUILD"} +docker_image( + name="demo", + repository="example/demo", +) +``` + +```shell +$ pants package src/example:demo +# Will build the image: example/demo:latest +``` + +To use a repository only for a specific registry, provide a `repository` value in the registry +configuration, and this can contain placeholders in curly braces that will be interpolated for each +image name. + +```toml title="pants.toml" +[docker.registries.demo] +address = "reg.company.internal" +repository = "example/{name}" +``` + +You can also specify a default repository name in config, and this name can contain placeholders in +curly braces that will be interpolated for each `docker_image`: + +```toml tab={"label":"pants.toml"} +[docker] +default_repository = "{directory}/{name}" +``` + +```python tab={"label":"src/example/BUILD"} +docker_image( + name="demo", +) +``` + +The default placeholders are: + +- `{name}`: The name of the `docker_image` target. +- `{directory}`: The folder name of the docker_image's BUILD file. +- `{parent_directory}`: The parent folder name of `{directory}`. +- `{full_directory}`: The full path to the BUILD file. +- `{build_args.ARG_NAME}`: Each defined Docker build arg is available for interpolation under the `build_args.` prefix. +- `{default_repository}`: The default repository from configuration. +- `{target_repository}`: The repository on the `docker_image` if provided, otherwise the default repository. + +Since repository names often conform to patterns like these, this can save you on some boilerplate +by allowing you to omit the `repository` field on each `docker_image`. But you can always override +this field on specific `docker_image` targets, of course. In fact, you can use these placeholders in +the `repository` field as well, if you find that helpful. + +See [String interpolation using placeholder values](#string-interpolation-using-placeholder-values) for more information. + +## Tagging images + +When Docker builds images, it can tag them with a set of tags. Pants will apply the tags listed in +the `image_tags` field of `docker_image`, and any additional tags if defined from the registry +configuration (see [Configuring registries](#configuring-registries)). + +(Note that the field is named `image_tags` and not just `tags`, because Pants has [its own tags +concept](../../reference/targets/target#tags), which is unrelated.) + +```python title="src/example/BUILD" +docker_image( + name="demo", + repository="example/demo", + image_tags=["1.2", "example"] +) +``` + +When pants builds the `src/example:demo` target, a single image will be built, with two tags applied: + +- `example/demo:1.2` +- `example/demo:example` + +It's often useful to keep versions of derived images and their base images in sync. Pants helps you +out with this by interpolating tags referenced in `FROM` commands in your Dockerfile into the +`image_tags` in the corresponding `docker_image`: + +```python tab={"label":"src/example/BUILD"} +# These three are equivalent +docker_image(name="demo1", image_tags=["{tags.upstream}"]) +docker_image(name="demo1", image_tags=["{tags.stage0}"]) +# The first FROM may also be referred to as "baseimage" +docker_image(name="demo1", image_tags=["{tags.baseimage}"]) + +# Any stage my be used, and being a format string, you may add extra text as well. +docker_image(name="demo1", image_tags=["{tags.stage1}-custom-suffix"]) +``` + +```dockerfile tab={"label":"src/example/Dockerfile"} +FROM upstream:1.2 as upstream +# ... +FROM scratch +# ... +``` + +This way you can specify a version just once, on the base image, and the derived images will +automatically acquire the same version. + +You may also use any Docker build arguments (when configured as described in [Docker build +arguments](../docker#build-arguments)) for interpolation into the `image_tags` in the corresponding +`docker_image`: + +```python title="src/example/BUILD" +docker_image(image_tags=["{build_args.ARG_NAME}"]) +``` + +## Using env vars to include dynamic data in tags + +You can interpolate dynamic data, such as the current Git commit sha, in an image tag, using environment variables and Docker build args. + +For example, you can declare a custom build arg, either in `extra_build_args` for a specific `docker_image` target, or for all `docker_image` targets in `pants.toml`: + +```python +# pants.toml +[docker] +build_args = ["GIT_COMMIT"] +``` + +and use this build arg in the image tag: + +```python +# src/example/BUILD +docker_image(name="demo", image_tags=["1.2-{build_args.GIT_COMMIT}"]) +``` + +Then, if you run Pants with the data set in an environment variable of the same name: + +``` +$ GIT_COMMIT=$(git rev-parse HEAD) pants package src/example:demo +``` + +the value from the environment will be used. + +:::note Generating dynamic tags in a plugin +If you don't want to use the environment variable method described above, you'll need to write some custom plugin code. Don't hesitate to [reach out](/community/getting-help) for help with this. + +We are looking into making some common dynamic data, such as the git sha, automatically available in the core Docker plugin in the future. +::: + +## Providing additional image tags with a plugin + +For cases where more customization is required and using environment variables and interpolation is +not enough, the next option is to write a plugin to provide additional tags when building images. + +Demonstrated with an example: + +```python title="example/plugin.py" +from pants.backend.docker.target_types import DockerImageTagsRequest, DockerImageTags +from pants.engine.unions import UnionRule +from pants.engine.rules import rule, collect_rules +from pants.engine.target import Target + + +class CustomDockerImageTagsRequest(DockerImageTagsRequest): + @classmethod + def is_applicable(cls, target: Target) -> bool: + # Optional. Opt-out on a per target basis. + if some-condition: + return False + else: + return True + + +@rule +async def custom_image_tags(request: CustomDockerImageTagsRequest) -> DockerImageTags: + custom_tags = ["some", "tags"] + return DockerImageTags(custom_tags) + + +def rules(): + return ( + *collect_rules(), + UnionRule(DockerImageTagsRequest, CustomDockerImageTagsRequest), + ) +``` + +## All together: Registries, Repositories and Tags + +To illustrate how all the above work together, this target: + +```python title="src/example/BUILD" +docker_image( + name="demo", + repository="example/demo", + registries=["reg1", "reg2"], + image_tags=["1.0", "latest"] +) +``` + +Will create a single image with these full names: + +``` +reg1/example/demo:1.0 +reg1/example/demo:latest +reg2/example/demo:1.0 +reg2/example/demo:latest +``` + +## String interpolation using placeholder values + +As we've seen above, some fields of the `docker_image` support replacing placeholder values in curly braces with variable text, such as a build arg or base image tag for instance. + +The interpolation context (the available placeholder values) depends on which field it is used in. These are the common values available for all fields: + +- `{tags.}`: The tag of a base image (the `FROM` instruction) for a particular stage in the `Dockerfile`. The `` is either `stageN` where `N` is the numeric index of the stage, starting at `0`. The first stage, `stage0`, is also available under the pseudonym `baseimage`. If the stage is named (`FROM image AS my_stage`), then the tag value is also available under that name: `{tags.my_stage}`. +- `{build_args.ARG_NAME}`: Each defined Docker build arg is available for interpolation under the `build_args.` prefix. +- `{pants.hash}`: This is a unique hash value calculated from all input sources and the `Dockerfile`. It is effectively a hash of the Docker build context. See note below regarding its stability guarantee. + +See [Setting a repository name](#setting-a-repository-name) for placeholders specific to the `repository` field. + +:::note The `{pants.hash}` stability guarantee +The calculated hash value _may_ change between stable versions of Pants for the otherwise same input sources. +::: + +## Retrieving the tags of a packaged image + +When a docker image is packaged, metadata about the resulting image is output to a JSON file artefact. This includes the image ID, as well as the full names that the image was tagged with. This file is written in the same manner as outputs of other packageable targets and available for later steps (for example, a test with `runtime_package_dependencies` including the docker image target) or in `dist/` after `pants package`. By default, this is available at `path.to.target/target_name.docker-info.json`. + +The structure of this JSON file is: + +```javascript +{ + "version": 1, // always 1, until a breaking change is made to this schema + "image_id": "sha256:..." // the local Image ID of the computed image + "registries": [ // info about each registry used for this image + { + "alias": "name", // set if the registry is configured in pants.toml, or null if not + "address": "reg.invalid", // the address of the registry itself + "repository": "the/repo", // the repository used for the image within the registry + "tags": [ + { + "template": "tag-{...}", // the tag before substituting any placeholders + "tag": "tag-some-value", // the fully-substituted tag, actually used to tag the image + "uses_local_alias": false, // if this tag used the local alias for the registry or not + "name": "reg.invalid/the/repo:tag-some-value", // the full name that the image was tagged with + } + ] + } + ] +} +``` + +This JSON file can be used to retrieve the exact name to place into cloud deploy templates or to use for running locally, especially when using tags with placeholders. diff --git a/versioned_docs/version-2.24/docs/getting-started/_category_.json b/versioned_docs/version-2.24/docs/getting-started/_category_.json new file mode 100644 index 000000000..877a378f7 --- /dev/null +++ b/versioned_docs/version-2.24/docs/getting-started/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Getting Started", + "position": 2 +} diff --git a/versioned_docs/version-2.24/docs/getting-started/example-projects-and-repositories.mdx b/versioned_docs/version-2.24/docs/getting-started/example-projects-and-repositories.mdx new file mode 100644 index 000000000..45802d080 --- /dev/null +++ b/versioned_docs/version-2.24/docs/getting-started/example-projects-and-repositories.mdx @@ -0,0 +1,33 @@ +--- + title: Example projects and repositories + sidebar_position: 3 +--- + +Example projects to help set up your own repository. + +--- + +A [Python repository](https://github.com/pantsbuild/example-python), demonstrating features such as: + +- running tests +- using linters and formatters +- using MyPy +- running a REPL +- building and running PEX files +- generating `setup.py` and building `.whl` files + +A [codegen repository](https://github.com/pantsbuild/example-codegen), demonstrating: + +- Apache Thrift & Python +- Protobuf/gRPC & Python + +A [Docker repository](https://github.com/pantsbuild/example-docker). + +A [Django repository](https://github.com/pantsbuild/example-django), demonstrating how to use Pants effectively on your Django code, including how to: + +- work with multiple Django services in a single repo +- work with multiple databases +- use pytest-django and conftest.py when running Django tests +- use manage.py + +A [Golang repository](https://github.com/pantsbuild/example-golang). diff --git a/versioned_docs/version-2.24/docs/getting-started/incremental-adoption.mdx b/versioned_docs/version-2.24/docs/getting-started/incremental-adoption.mdx new file mode 100644 index 000000000..d4500f28b --- /dev/null +++ b/versioned_docs/version-2.24/docs/getting-started/incremental-adoption.mdx @@ -0,0 +1,109 @@ +--- + title: Incremental adoption + sidebar_position: 4 +--- + +How to incrementally add Pants to an existing repository. + +--- + +## Recommended steps + +If you have an existing repository, we recommend incrementally adopting to reduce the surface area of change, which reduces risk. + +Incremental adoption also allows you to immediately start benefiting from Pants, then deepen adoption at your own pace, instead of postponing benefit until you are ready to make dramatic change all at once. + +:::note Joining Slack +We would love to help you with adopting Pants. Please reach out through [Slack](/community/getting-help). +::: + +### 1. A basic `pants.toml` + +Follow the [Getting Started](./index.mdx) guide to install Pants and [set up an initial `pants.toml`](./initial-configuration.mdx). Validate that running `pants count-loc ::` works properly. If you want to exclude a specific folder at first, you can use the [`pants_ignore`](../../reference/global-options.mdx#pants_ignore) option. + +Add the [relevant backends](../using-pants/key-concepts/backends.mdx) to `[GLOBAL].backend_packages`. + +### 2. Set up formatters/linters with basic BUILD files + +Formatters and linters are often the simplest to get working because—for all tools other than Pylint— you do not need to worry about things like dependencies and third-party requirements. + +First, run [`pants tailor ::`](./initial-configuration.mdx#5-generate-build-files) to generate BUILD files. This tells Pants which files to operate on, and will allow you to set additional metadata over time like test timeouts and dependencies on resources. + +Then, activate the [Linters and formatters](../python/overview/linters-and-formatters.mdx) you'd like to use. Hook up the `fmt` and `lint` goals to your [CI](../using-pants/using-pants-in-ci.mdx). + +### 3. Set up tests + +To get [tests](../python/goals/test.mdx) working, you will first need to set up [source roots](../using-pants/key-concepts/source-roots.mdx) and [third-party dependencies](../python/overview/third-party-dependencies.mdx). + +Pants's [dependency inference](../using-pants/key-concepts/targets-and-build-files.mdx) will infer most dependencies for you by looking at your import statements. However, some dependencies cannot be inferred, such as [resources](../using-pants/assets-and-archives.mdx). + +Try running `pants test ::` to see if any tests fail. Sometimes, your tests will fail with Pants even if they pass with your normal setup because tests are more isolated than when running Pytest/unittest directly: + +- Tests run in a sandbox, meaning they can only access dependencies that Pants knows about. If you have a missing file or missing import, run `pants dependencies path/to/my_test.py` and `pants dependencies --transitive path/to/my_test.py` to confirm what you are expecting is known by Pants. If not, see [Troubleshooting / common issues](../using-pants/troubleshooting-common-issues.mdx) for reasons dependency inference can fail. +- Test files are isolated from each other. If your tests depended on running in a certain order, they may now fail. This requires rewriting your tests to remove the shared global state. + +You can port your tests incrementally with the `skip_tests` field: + +```python title="project/BUILD" +python_tests( + name="tests", + # Skip all tests in this folder. + skip_tests=True, + # Or, use `overrides` to only skip some test files. + overrides={ + "dirutil_test.py": {"skip_tests": True}, + ("osutil_test.py", "strutil.py"): {"skip_tests": True}, + }, +) +``` + +`pants test ::` will only run the relevant tests. You can combine this with [`pants peek`](../using-pants/project-introspection.mdx) to get a list of test files that should be run with your original test runner: + +``` +pants --filter-target-type=python_test peek :: | \ + jq -r '.[] | select(.skip_tests== true) | .["sources"][]' +``` + +You may want to [speed up your CI](../using-pants/using-pants-in-ci.mdx) by having Pants only run tests for changed files. + +### 4. Set up `pants package` + +You can use `pants package` to package your code into various formats, such as a [PEX binary](../python/goals/package.mdx), a [wheel](../python/goals/package.mdx#create-a-setuptools-distribution), an [AWS Lambda](../python/integrations/aws-lambda.mdx), or a [zip/tar archive](../using-pants/assets-and-archives.mdx). + +We recommend manually verifying that this step is working how you'd like by inspecting the built packages. Alternatively, you can [write automated tests](../python/goals/test.mdx) that will call the equivalent of `pants package` for you, and insert the built package into your test environment. + +### 5. Check out writing a plugin + +Pants is highly extensible. In fact, all of Pants's core functionality is implemented using the exact same API used by plugins. + +Check out [Plugins Overview](../writing-plugins/overview.mdx). We'd also love to help in the #plugins channel on [Slack](/community/members). + +Some example plugins that users have written: + +- Cython support +- Building a Docker image with packages built via `pants package` +- Custom `setup.py` logic to compute the `version` dynamically +- Jupyter support + +## Migrating from other BUILD tools? Set custom BUILD file names + +If you're migrating from another system that already uses the name `BUILD`, such as Bazel or Please, you have a few ways to avoid conflicts: + +First, by default Pants recognizes `BUILD.extension` for any `extension` as a valid BUILD file. So you can use a name like `BUILD.pants` without changing configuration. + +Second, you can [configure](../../reference/global-options.mdx#build_patterns) Pants to use a different set of file names entirely: + +```toml title="pants.toml" +[GLOBAL] +build_patterns = ["PANTSBUILD", "PANTSBUILD.*"] + +[tailor] +build_file_name = "PANTSBUILD" +``` + +And finally you can configure Pants to not look for BUILD files in certain locations. This can be helpful, for example, if you use Pants for some languages and another tool for other languages: + +```toml title="pants.toml" +[GLOBAL] +build_ignore = ["src/cpp"] +``` diff --git a/versioned_docs/version-2.24/docs/getting-started/index.mdx b/versioned_docs/version-2.24/docs/getting-started/index.mdx new file mode 100644 index 000000000..d6386f817 --- /dev/null +++ b/versioned_docs/version-2.24/docs/getting-started/index.mdx @@ -0,0 +1,18 @@ +--- + title: Getting started + sidebar_position: 0 +--- + +--- + +Thanks for your interest in trying out Pants! + +We recommend joining our [Slack workspace](/community/members), in case you have any questions along the way. + +And if you want to show support for the project, [GitHub stars](https://github.com/pantsbuild/pants) are always appreciated! + +- [Prerequisites](./prerequisites.mdx) +- [Installing Pants](./installing-pants.mdx) +- [Initial configuration](./initial-configuration.mdx) +- [Example repositories](./example-projects-and-repositories.mdx) +- [Incremental adoption](./incremental-adoption.mdx) diff --git a/versioned_docs/version-2.24/docs/getting-started/initial-configuration.mdx b/versioned_docs/version-2.24/docs/getting-started/initial-configuration.mdx new file mode 100644 index 000000000..54b27cc84 --- /dev/null +++ b/versioned_docs/version-2.24/docs/getting-started/initial-configuration.mdx @@ -0,0 +1,111 @@ +--- + title: Initial configuration + sidebar_position: 2 +--- + +Creating the configuration necessary to run Pants. + +--- + +To get started in a new repository, follow these steps, and then visit one of the language-specific overview pages. + +## 1. Create `pants.toml` + +Pants configuration lives in a file called `pants.toml` in the root of the repo. This file uses the [TOML](https://github.com/toml-lang/toml) format. + +If you haven't yet, create a `pants.toml` file: + +```toml title="pants.toml" +[GLOBAL] +pants_version = "$PANTS_VERSION" +``` + +where `$PANTS_VERSION` is the version of Pants that you want to pin your repo to. When you'd like to upgrade Pants, edit `pants_version` and the `pants` script will self-update on the next run. + +## 2. Configure source roots + +Many languages organize code in a package hierarchy, so that the relative location of a source file on the filesystem corresponds to a logical package name. The directories that correspond to the roots of the language's package hierarchy are referred to as [source roots](../using-pants/key-concepts/source-roots.mdx). These are the filesystem locations from which import paths are computed. + +For example, if your Python code lives under `src/python`, then `import myorg.myproject.app` will import the code in `src/python/myorg/myproject/app.py`. + +In simple cases the root of the repository itself might be your only source root. But in many other cases the code is organized so that the source root is nested under some directory such as `src/` or `src/`. + +To work correctly, Pants needs to know about the source roots in your repo. By default, given a source file path, Pants will treat the longest path prefix that ends in `src`, `src/python`, or `src/py` as its source root, falling back to the repo root itself if no such prefix is found. + +If your project has a different structure, see [Source roots](../using-pants/key-concepts/source-roots.mdx) for how to configure them, and for examples of different project structures you can use Pants with. + +:::note Golang projects can skip this step +Golang projects already use `go.mod` to indicate source roots. +::: + +## 3. Enable backends + +Most Pants functionality is provided via pluggable [_backends_](../using-pants/key-concepts/backends.mdx), which are activated by adding to the `[GLOBAL].backend_packages` option like this: + +```toml title="pants.toml" +[GLOBAL] +... +backend_packages = [ + "pants.backend.experimental.go", + "pants.backend.python", + "pants.backend.python.lint.black", +] +``` + +See [here](../using-pants/key-concepts/backends.mdx) for a list of available backends. + +## 4. Update `.gitignore` + +If you use Git, we recommend adding these lines to your top-level `.gitignore` file: + +```text title=".gitignore" +# Pants workspace files +/.pants.d +/dist/ +``` + +:::note FYI: Pants will ignore all files in your `.gitignore` by default +The `pants_ignore` option tells Pants which files to avoid looking at, but it additionally ignores all `.gitignore`d files by default. Occasionally, you will want to ignore something with Git, but still want Pants to work on the file. See [Troubleshooting / common issues](../using-pants/troubleshooting-common-issues.mdx) for how to do this. +::: + +## 5. Generate BUILD files + +Once you have enabled the backends for the language(s) you'd like to use, run [`pants tailor ::`](./initial-configuration.mdx#5-generate-build-files) to generate an initial set of [BUILD](../using-pants/key-concepts/targets-and-build-files.mdx) files. + +[BUILD](../using-pants/key-concepts/targets-and-build-files.mdx) files provide metadata about your code (the timeout of a test, any dependencies which cannot be inferred, etc). BUILD files are typically located in the same directory as the code they describe. Unlike many other systems, Pants BUILD files are usually very succinct, as most metadata is either inferred from static analysis, assumed from sensible defaults, or generated for you. + +In general, you should create (and update) BUILD files by running `pants tailor ::`: + +``` +❯ pants tailor :: +Created scripts/BUILD: + - Add shell_sources target scripts +Created src/py/project/BUILD: + - Add python_sources target project + - Add python_tests target tests +Created src/go/BUILD: + - Add go_mod target mod +``` + +Often, this will be all you need for Pants to work, thanks to sensible defaults and inference, like [inferring your dependencies](../using-pants/key-concepts/targets-and-build-files.mdx). Sometimes, though, you may need to or want to change certain fields, like setting a longer timeout on a test. + +You may also need to add some targets that Pants cannot generate, like [`resources` and `files`](../using-pants/assets-and-archives.mdx) targets. + +To ignore false positives, set `[tailor].ignore_paths` and `[tailor].ignore_adding_targets`. See [tailor](../../reference/goals/tailor.mdx) for more detail. + +:::note Run `pants tailor --check ::` in CI +We recommend running `pants tailor --check ::` in your [continuous integration](../using-pants/using-pants-in-ci.mdx) so that you don't forget to add any targets and BUILD files (which might mean that tests aren't run or code isn't validated). + +``` +❯ pants tailor --check :: +Would create scripts/BUILD: + - Add shell_sources target scripts + +To fix `tailor` failures, run `pants tailor`. +``` + +::: + +## 6. Visit a language specific overview + +You're almost ready to go! Next up is visiting one of the language-specific overviews listed below. diff --git a/versioned_docs/version-2.24/docs/getting-started/installing-pants.mdx b/versioned_docs/version-2.24/docs/getting-started/installing-pants.mdx new file mode 100644 index 000000000..cc96b2334 --- /dev/null +++ b/versioned_docs/version-2.24/docs/getting-started/installing-pants.mdx @@ -0,0 +1,71 @@ +--- + title: Installing Pants + sidebar_position: 1 +--- + +--- + +You can download and run an installer script that will install the Pants binary with this command: + +``` +curl --proto '=https' --tlsv1.2 -fsSL https://static.pantsbuild.org/setup/get-pants.sh | bash +``` + +This script will install `pants` into `~/.local/bin`, which must be on your PATH. The installer script will warn you if it is not. + +For security reasons, we don't recommend frequently curling this script directly to `bash`, e.g., on every CI run. If the script were compromised during some time window, you'd be more likely to download it during that window and be impacted. Instead, for regular use, we recommend checking this script into the root of your repo and pointing users and CI machines to that checked-in version. The script is very simple and need not be updated very often. + +Alternatively, on macOS you can also use homebrew to install `pants`: + +``` +brew install pantsbuild/tap/pants +``` + +You can also use the [`bin`](https://github.com/marcosnils/bin) tool to install `pants`: + +``` +bin i github.com/pantsbuild/scie-pants ~/.local/bin/pants +``` + +`pants` is a launcher binary that delegates to the underlying version of Pants in each repo. This allows you to have multiple repos, each using an independent version of Pants. + +- If you run `pants` in a repo that is already configured to use Pants, it will read the repo's Pants version from the `pants.toml` config file, install that version if necessary, and then run it. + +- If you run `pants` in a repo that is not yet configured to use Pants, it will prompt you to set up a skeleton `pants.toml` that uses that latest stable version of Pants. You'll then need to edit that config file to add [initial configuration](./initial-configuration.mdx). + +If you have difficulty installing Pants, see our [getting help](/community/getting-help) for community resources to help you resolve your issue. + +:::tip Upgrading Pants +The `pants` launcher binary will automatically install and use the Pants version specified in `pants.toml`, so upgrading Pants in a repo is as simple as editing `pants_version` in that file. + +To upgrade the `pants` launcher binary itself, run + +``` +SCIE_BOOT=update pants +``` + +::: + +## Running Pants from sources + +See [here](../contributions/development/running-pants-from-sources.mdx) for instructions on how to run Pants directly from its [sources](https://github.com/pantsbuild/pants). + +This is useful when making changes directly to Pants, to see how those changes impact your repo. + +:::caution The old `./pants` script +Before the creation of the `pants` launcher binary, the recommended way of installing Pants was to check a `./pants` launcher script into each repo. This script required an external Python interpreter, and was prone to errors and issues related to discovery and use of this interpreter. + +The `pants` launcher binary uses an embedded interpreter and does not rely on one being present on the system (although if your repo contains Python code then it naturally requires a Python interpreter). + +We strongly recommend removing the `./pants` script from your repo and using the `pants` binary instead. You can keep a simple `./pants` script that delegates to `pants` to ease the transition. However, if you do need to continue to use the old installation method for some reason, it is described [here](./manual-installation.mdx). But please [let us know](/community/getting-help) so we can accommodate your use case in the launcher binary. +::: + +## The `pants` binary's implementation + +You don't need to know this to use `pants`, but it may be of interest: + +The `pants` launcher binary is also known as [scie-pants](https://github.com/pantsbuild/scie-pants) (pronounced "ski pants"), It's implemented using [scie](https://github.com/a-scie/jump), a Self Contained Interpreted Executable launcher. scie is what allows `pants` to embed its own Python interpreter, instead of relying on a specific interpreter being available on your PATH. + +In fact, instead of literally embedding an interpreter in the `pants` binary, which would inflate its size, the binary is "hollowed out": Instead of the interpreter itself it contains metadata on how to download a platform-specific [standalone interpreter executable](https://gregoryszorc.com/docs/python-build-standalone/main/). The scie mechanism then downloads that interpreter file on first use, and caches it for future use. So if you update the `pants` launcher binary, you don't have to re-download the interpreter. + +See the links above for more details. We hope to soon add support in Pants for building scies out of your code, which will allow you to package and ship fully standalone Python binaries! diff --git a/versioned_docs/version-2.24/docs/getting-started/manual-installation.mdx b/versioned_docs/version-2.24/docs/getting-started/manual-installation.mdx new file mode 100644 index 000000000..9ebde3d98 --- /dev/null +++ b/versioned_docs/version-2.24/docs/getting-started/manual-installation.mdx @@ -0,0 +1,50 @@ +--- + title: Manual installation + sidebar_position: 5 +--- + +--- + +:::caution This page describes an old, deprecated method for installing Pants. +We highly recommend using the `pants` [launcher binary](./installing-pants.mdx) instead. +::: + +## Manual installation + +This installation method requires Python 3.7, 3.8, or 3.9 discoverable on your `PATH`. On macOS on Apple Silicon (M1/M2), it must be Python 3.9. + +Pants is invoked via a launch script named `./pants` , saved at the root of the repository. This script will install Pants and handle upgrades. + +First, pick a release version. You can see the available releases [on PyPI](https://pypi.org/project/pantsbuild.pants/). We recommend picking the current stable release, unless you have reason to need a more recent one, such as a release candidate or a development release. + +Then, set up a minimal `pants.toml` config file, filling in the version you selected: + +```bash +printf '[GLOBAL]\npants_version = "X.Y.Z"\n' > pants.toml +``` + +Then, download the script: + +```bash +curl -L -O https://static.pantsbuild.org/setup/pants && chmod +x ./pants +``` + +Now, run this to bootstrap Pants and to verify the version it installs: + +```bash +./pants --version +``` + +:::note Add `./pants` to version control +You should check the `./pants` script into your repo so that all users can easily run Pants. +::: + +:::tip Upgrading Pants +The `./pants` script will automatically install and use the Pants version specified in `pants.toml`, so upgrading Pants is as simple as editing `pants_version` in that file. +::: + +## Building Pants from sources + +We currently distribute Pants for Linux (x86_64 and ARM64) and macOS (x86_64 and ARM64). + +If you need to run Pants on some other platform, such as Alpine Linux, you can try building it yourself by checking out the [Pants repo](https://github.com/pantsbuild/pants), and running `./pants package src/python/pants:pants-packaged` to build a wheel. diff --git a/versioned_docs/version-2.24/docs/getting-started/prerequisites.mdx b/versioned_docs/version-2.24/docs/getting-started/prerequisites.mdx new file mode 100644 index 000000000..895813c27 --- /dev/null +++ b/versioned_docs/version-2.24/docs/getting-started/prerequisites.mdx @@ -0,0 +1,63 @@ +--- + title: Prerequisites + sidebar_position: 0 +--- + +--- + +To run Pants, you need: + +- One of: + - Linux (x86_64 or ARM64) + - macOS (Intel or Apple Silicon, 10.15 Catalina or newer) + - Microsoft Windows 10 with WSL 2 +- Internet access (so that Pants can fully bootstrap itself) + +:::note Restricted Internet access? +See [Restricted Internet access](../using-pants/restricted-internet-access.mdx) for instructions. +::: + +## System-specific notes + +### Linux + +:::caution Some Linux distributions may need additional packages +On Ubuntu you may need to run: +`apt install -y python3-dev python3-distutils`. +::: + +:::caution Alpine Linux is not yet supported +Pants for Linux is currently distributed as a manylinux wheel. Alpine Linux is not covered by manylinux (it uses MUSL libc while manylinux requires glibc), so at present Pants will not run on Alpine Linux. + +If you need to run Pants on Alpine, [let us know](/community/members), so we can prioritize this work. Meanwhile, you can try [building Pants yourself](./manual-installation.mdx#building-pants-from-sources) on Alpine. +::: + +:::caution Linux on ARM will be supported from Pants 2.16 +Pants 2.16 will be distributed for Linux x86_64 and ARM64. Earlier versions are only distributed for Linux x86_64. + +If you need to run an earlier version of Pants on ARM, you can try [building Pants yourself](./manual-installation.mdx#building-pants-from-sources) on that platform. +::: + +### macOS + +:::note Apple Silicon (M1/M2) support +If running Pants inside a Docker container on an Apple Silicon machine you may need to set `--no-watch-filesystem --no-pantsd`. This is because notifications on native macOS files aren't mirrored over to the virtualized Linux system. + +We don't recommend setting this permanently, as these options are crucial for performance when iterating. Instead, you may want to look at the new [Environments](../using-pants/environments.mdx) feature, that lets Pants run natively on macOS but selectively invoke processes in a Docker container. +::: + +### Microsoft Windows + +:::note Windows 10 support +Pants runs on Windows 10 under the Windows Subsystem for Linux (WSL): + +- Follow [these instructions](https://docs.microsoft.com/en-us/windows/wsl/install-win10) to install WSL 2. +- Install a recent Linux distribution under WSL 2 (we have tested with Ubuntu 20.04 LTS). +- Run `sudo apt install unzip python3-dev python3-distutils python3-venv gcc` in the distribution. +- You can then run Pants commands in a Linux shell, or in a Windows shell by prefixing with `wsl `. + +Projects using Pants must be contained within the Linux virtual machine: + +- Navigating a Linux shell to a Windows directory via the `/mnt` directory, or using the `wsl` prefix with a Windows shell in a Windows directory, and executing Pants may result in unexpected behavior. + +::: diff --git a/versioned_docs/version-2.24/docs/go/_category_.json b/versioned_docs/version-2.24/docs/go/_category_.json new file mode 100644 index 000000000..f38a2baa9 --- /dev/null +++ b/versioned_docs/version-2.24/docs/go/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Go", + "position": 6 +} diff --git a/versioned_docs/version-2.24/docs/go/index.mdx b/versioned_docs/version-2.24/docs/go/index.mdx new file mode 100644 index 000000000..0b53cd157 --- /dev/null +++ b/versioned_docs/version-2.24/docs/go/index.mdx @@ -0,0 +1,321 @@ +--- + title: Go overview + sidebar_position: 0 +--- + +Pants's support for Golang. + +--- + +:::caution Go support is beta stage +We are done implementing the initial core functionality for Pants's initial Go support ([tracked here](https://github.com/pantsbuild/pants/issues/17447)). However, there may be some edge cases we aren't yet handling. There are also some features that are not yet supported like vendoring, which we'd love your input on how to prioritize! + +Please share feedback for what you need to use Pants with your Go project by either [opening a GitHub issue](https://github.com/pantsbuild/pants/issues/new/choose) or [joining our Slack](/community/members)! +::: + +:::tip Why use Pants with Go? +Go's builtin tooling is already excellent! Many projects may be fine only using Go's tooling, although Pants offers some unique benefits: + +- A consistent interface for all languages/tools in your repository, such as being able to run `pants fmt lint check test package`. +- Integration with Git, such as running `pants --changed-since=HEAD test`. +- Caching, such as caching test results on a per-package basis. +- [Remote execution and remote caching](../using-pants/remote-caching-and-execution/index.mdx). +- [Advanced project introspection](../using-pants/project-introspection.mdx), such as finding all code that transitively depends on a certain package. + +::: + +:::note Example Go repository +Check out [github.com/pantsbuild/example-golang](https://github.com/pantsbuild/example-golang) to try out Pants's Go support. +::: + +## Initial setup + +First, activate the Go backend in `pants.toml`: + +```toml title="pants.toml" +[GLOBAL] +backend_packages = ["pants.backend.experimental.go"] +``` + +You may want to set the option `[golang].minimum_expected_version` to a value like `"1.17"`. Pants will use this to find a Go distribution that is the same version or newer. You still set your projects' Go version with `go.mod` with the `go` directive; this option is only used for Pants to discover a compatible Go distribution. + +You can also set `[golang].go_search_paths` to influence where Pants looks for Go, e.g. `["/usr/bin"]`. It defaults to your `PATH`. + +Then run [`pants tailor ::`](../getting-started/initial-configuration.mdx#5-generate-build-files) to generate BUILD files. This will add a `go_mod` target where you have your `go.mod` file, a `go_package` target for every directory with a `.go` file, and a `go_binary` target in every directory where you have `package main`. + +``` +❯ pants tailor :: +Created BUILD: + - Add go_mod target root +Created cmd/deploy/BUILD: + - Add go_binary target bin + - Add go_package target deploy +Created cmd/runner/BUILD: + - Add go_binary target bin + - Add go_package target runner +Created pkg/deploy/BUILD: + - Add go_package target deploy +Created pkg/runner/BUILD: + - Add go_package target runner +``` + +Each `go_package` target allows you to set metadata for that directory, such as the `test_timeout` field. However, Pants uses sensible defaults so, usually, you can simply use what was generated by `tailor`. + +The `go_mod` target generates a `go_third_party_package` target for each package belonging to the modules declared in your `go.mod`. You will rarely need to interact with these directly, thanks to dependency inference. + +You can run `pants list ::` to see all targets in your project, including generated `go_third_party_package` targets: + +``` +❯ pants list +... +//:root#golang.org/x/net/ipv4 +//:root#golang.org/x/net/ipv6 +... +cmd/deploy:bin +cmd/deploy:deploy +cmd/runner:bin +cmd/runner:runner +pkg/deploy:deploy +pkg/runner:runner +``` + +:::caution `go.mod` and `go.sum` need to be up-to-date +Pants does not yet update your `go.mod` and `go.sum` for you; it only reads these files when downloading modules. Run `go mod download all` to make sure these files are correct. +::: + +### The `embed` directive and `resource` targets + +To use the [`embed` directive](https://pkg.go.dev/embed), you must first teach Pants about the [files](../using-pants/assets-and-archives.mdx) with the `resource` / `resources` targets: + +1. Add a `resource` or `resources` target with the embedded files in the `source` / `sources` field, respectively. +2. Add that target to the `dependencies` field of the relevant `go_package` target. + +For example: + +```python title="pkg/runner/BUILD" +go_package(dependencies=[":embeds"]) + +resources(name="embeds", sources=["hello.txt"]) +``` + +```go title="pkg/runner/lib.go" +package runner + +import _ "embed" + +//go:embed hello.txt +var s string +print(s) +``` + +```text title="pkg/runner/hello.txt" +Hello world! +``` + +## Package and run binaries + +To run a binary, use `pants run path/to/main_pkg:` (note the colon). You can pass through arguments with `--`, like this: + +``` +❯ pants run cmd/deploy: -- --help +Usage of /Users/pantsbuild/example/.pants.d/workdir/tmpzfh33ggu/cmd.deploy/bin: + --allow-insecure-auth allow credentials to be passed unencrypted (i.e., no TLS) + -A, --auth-token-env string name of environment variable with auth bearer token +... +pflag: help requested +``` + +You can also package your binaries (aka `go build`) by using `pants package`. `package ::` will build all your project's binaries, whereas `package path/to/main_pkg:` will build only the binary in that directory. + +``` +❯ pants package :: +[INFO] Wrote dist/cmd.deploy/bin +[INFO] Wrote dist/cmd.runner/bin +``` + +By default, Pants names the binary with the scheme `path.to.directory/target_name`, e.g. `cmd.deploy/bin`. You can set the field `output_path` to use a different name: + +```go title="cmd/deploy/BUILD" +go_binary(name="bin", output_path="deploy") +``` + +## Compile code + +To manually check that a package compiles, use `pants check`: + +``` +# Check this package +❯ pants check pkg/deploy: + +# Check this directory and all subdirectories +❯ pants check pkg:: + +# Check the whole project +❯ pants check :: +``` + +(Instead, you can simply run `package`, `run`, and `test`. Pants will compile all the relevant packages.) + +## Run tests + +To run tests, use `pants test`: + +``` +# Test this package +❯ pants test pkg/deploy: + +# Test this directory and all subdirectories +❯ pants check pkg:: + +# Test the whole project +❯ pants test :: +``` + +You can pass through arguments with `--`, e.g. `pants test pkg/deploy: -- -v -run TestFoo`. + +### Loose files in tests (`testdata`) + +To open files in your tests, use [`file` / `files`](../using-pants/assets-and-archives.mdx) targets and add them as `dependencies` to your `go_package`. + +```python title="pkg/runner/BUILD" +go_package(dependencies=[":testdata"]) + +files(name="testdata", sources=["testdata/*"]) +``` + +```go title="pkg/runner/foo_test.go" +package foo + +import ( + "os" + "testing" +) + +func TestFilesAvailable(t *testing.T) { + _, err := os.Stat("testdata/f.txt") + if err != nil { + t.Fatalf("Could not stat pkg/runner/testdata/f.txt: %v", err) + } +} +``` + +```text title="pkg/runner/testdata/f.txt" +"Hello world!" +``` + +Traditionally in Go, these files are located in the `testdata` directory. However, with Pants, you can place the files wherever you'd like. Pants sets the working directory to the path of the `go_package`, which allows you to open files regardless of where there are in your repository, such as with `os.Stat("../f.txt")`. + +### Timeouts + +Pants can cancel tests that take too long, which is useful to prevent tests from hanging indefinitely. + +To add a timeout, set the `test_timeout` field to an integer value of seconds, like this: + +```python title="BUILD" +go_package(test_timeout=120) +``` + +You can also set a default value and a maximum value in `pants.toml`: + +```toml title="pants.toml" +[test] +timeout_default = 60 +timeout_maximum = 600 +``` + +If a target sets its `timeout` higher than `[test].timeout_maximum`, Pants will use the value in `[test].timeout_maximum`. + +Use the option `pants test --no-timeouts` to temporarily disable timeouts, e.g. when debugging. + +### Retries + +Pants can automatically retry failed tests. This can help keep your builds passing even with flaky tests, like integration tests. + +```toml tab={"label":"pants.toml"} +[test] +attempts_default = 3 +``` + +## Gofmt + +Gofmt is activated by default when you activate the Go backend. Simply run `pants fmt` and `pants lint`: + +``` +# Format a single directory +❯ pants fmt cmd/deploy: + +# Format this directory and all subdirectories +❯ pants fmt cmd:: + +# Check that the whole project is formatted +❯ pants lint :: + +# Format all changed files +❯ pants --changed-since=HEAD fmt +``` + +If you'd like to disable Gofmt, set this: + +```go title="pants.toml" +[gofmt] +skip = true +``` + +To only run Gofmt, use `--fmt-only` and `--lint-only`: + +```bash +❯ pants fmt --only=gofmt :: +``` + +## golangci-lint + +Pants can run [golangci-lint](https://golangci-lint.run/) on your Go source +code. To activate, add this to your `pants.toml`: + +```tomls title="pants.toml" +[GLOBAL] +backend_packages = [ + "pants.backend.experimental.go", + "pants.backend.experimental.go.lint.golangci_lint", +] +``` + +Now you can run `pants lint`: + +``` +$ pants lint main.go +20:39:43.10 [ERROR] Completed: Lint with golangci-lint - golangci-lint failed (exit code 1). +main.go:5:6: func `bad` is unused (unused) +func bad() { + ^ + + + +✕ golangci-lint failed. +``` + +Pants will automatically include any relevant `.golangci.yml`, `.golangci.yaml`, +`.golangci.json`, or `.golangci.toml` files in the run. You can also pass +command line arguments with `--golangci-lint-args='--tests --fast' or +permanently set them in`pants.toml`. + +```toml +[golangci-lint] +args = ["--fast", "--tests"] +``` + +Temporarily disable golangci-lint with `--golangci-lint-skip`: + +```bash +pants --golangci-lint-skip lint :: +``` + +Only run golangci-lint with `--lint-only`: + +```bash +pants lint --only=golangci-lint :: +``` + +:::tip Benefit of Pants: golangci-lint runs in parallel with other linters +Pants will attempt to run all activated linters and formatters at the same time for improved performance, including [Python](../python/overview/linters-and-formatters.mdx), Shell, Java, and Scala linters. You can see this through Pants's dynamic UI. +::: diff --git a/versioned_docs/version-2.24/docs/go/integrations/_category_.json b/versioned_docs/version-2.24/docs/go/integrations/_category_.json new file mode 100644 index 000000000..f8777284b --- /dev/null +++ b/versioned_docs/version-2.24/docs/go/integrations/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Integrations", + "position": 2 +} diff --git a/versioned_docs/version-2.24/docs/go/integrations/index.mdx b/versioned_docs/version-2.24/docs/go/integrations/index.mdx new file mode 100644 index 000000000..9ffd9a7ed --- /dev/null +++ b/versioned_docs/version-2.24/docs/go/integrations/index.mdx @@ -0,0 +1,10 @@ +--- + title: Integrations + sidebar_position: 1 +--- + +Useful integrations for Golang. + +--- + +- [Protobuf](./protobuf.mdx) diff --git a/versioned_docs/version-2.24/docs/go/integrations/protobuf.mdx b/versioned_docs/version-2.24/docs/go/integrations/protobuf.mdx new file mode 100644 index 000000000..66d7a9d2f --- /dev/null +++ b/versioned_docs/version-2.24/docs/go/integrations/protobuf.mdx @@ -0,0 +1,172 @@ +--- + title: Protobuf + sidebar_position: 0 +--- + +How to generate Go from Protocol Buffers. + +--- + +When your Go code imports Protobuf generated files, Pants will detect the imports and run the Protoc compiler to generate then compile those files. + +:::note Example repository +See [the codegen example repository](https://github.com/pantsbuild/example-codegen) for an example of using Protobuf to generate Go. +::: + +:::tip Benefit of Pants: generated files are always up-to-date +With Pants, there's no need to manually regenerate your code or check it into version control. Pants will ensure you are always using up-to-date files in your builds. + +Thanks to fine-grained caching, Pants will regenerate the minimum amount of code required when you do make changes. +::: + +:::caution `go mod tidy` will complain about missing modules +Because Pants does not save generated code to disk, `go mod tidy` will error that it cannot find the generated packages. + +One workaround is to run `pants export-codegen ::` to save the generated files. +::: + +## Step 1: Activate the Protobuf Go backend + +Add this to your `pants.toml`: + +```toml title="pants.toml" +[GLOBAL] +backend_packages = [ + "pants.backend.experimental.codegen.protobuf.go", + "pants.backend.experimental.go", +] +``` + +This adds the new [`protobuf_source`](../../../reference/targets/protobuf_source.mdx) target, which you can confirm by running `pants help protobuf_source`. + +To reduce boilerplate, you can also use the [`protobuf_sources`](../../../reference/targets/protobuf_sources.mdx) target, which generates one `protobuf_source` target per file in the `sources` field. + +```python title="BUILD" +protobuf_sources(name="protos", sources=["user.proto", "admin.proto"]) + +# Spiritually equivalent to: +protobuf_source(name="user", source="user.proto") +protobuf_source(name="admin", source="admin.proto") + +# Thanks to the default `sources` value of '*.proto', spiritually equivalent to: +protobuf_sources(name="protos") +``` + +## Step 2: Set up your `go.mod` and `go.sum` + +The generated Go code requires `google.golang.org/protobuf` to compile. Add it to your `go.mod` with the version you'd like. Then run `go mod download all` to update your `go.sum`. + +```text title="go.mod" +require google.golang.org/protobuf v1.27.1 +``` + +## Step 3: Add `option go_package` to `.proto` files + +Every Protobuf file that should work with Go must set `option go_package` with the name of its Go package. For example: + +```text title="src/protos/example/v1/person.proto" +syntax = "proto3"; + +package simple_example.v1; + +option go_package = "github.com/pantsbuild/example-codegen/gen"; +``` + +Multiple Protobuf files can set the same `go_package` if their code should show up in the same package. + +## Step 4: Generate `protobuf_sources` targets + +Run [`pants tailor ::`](../../getting-started/initial-configuration.mdx#5-generate-build-files) for Pants to create a `protobuf_sources` target wherever you have `.proto` files: + +``` +❯ pants tailor :: +Created src/protos/BUILD: + - Add protobuf_sources target protos +``` + +Pants will use [dependency inference](../../using-pants/key-concepts/targets-and-build-files.mdx) for any `import` statements in your `.proto` files, which you can confirm by running `pants dependencies path/to/file.proto`. + +If you want gRPC code generated for all files in the folder, set `grpc=True`. + +```python title="src/proto/example/BUILD" +protobuf_sources( + name="protos", + grpc=True, +) +``` + +If you only want gRPC generated for some files in the folder, you can use the `overrides` field: + +```python title="src/proto/example/BUILD" +protobuf_sources( + name="protos", + overrides={ + "admin.proto": {"grpc": True}, + # You can also use a tuple for multiple files. + ("user.proto", "org.proto"): {"grpc": True}, + }, +) +``` + +## Step 5: Confirm Go imports are working + +Now, you can import the generated Go package in your Go code like normal, using whatever you set with `option go_package` from Step 3. + +```go title="src/go/examples/proto_test.go" +package examples + +import "testing" +import "github.com/pantsbuild/example-codegen/gen" + +func TestGenerateUuid(t *testing.T) { + person := gen.Person{ + Name: "Thomas the Train", + Id: 1, + Email: "allaboard@trains.build", + } + if person.Name != "Thomas the Train" { + t.Fail() + } +} +``` + +Pants's dependency inference will detect Go imports of Protobuf packages, which you can confirm by running `pants dependencies path/to/file.go`. You can also run `pants check path/to/file.go` to confirm that everything compiles. + +:::note Run `pants export-codegen ::` to inspect the files +`pants export-codegen ::` will run all relevant code generators and write the files to `dist/codegen` using the same paths used normally by Pants. + +You do not need to run this goal for codegen to work when using Pants; `export-codegen` is only for external consumption outside of Pants, e.g. to get `go mod tidy` working. +::: + +## Buf: format and lint Protobuf + +Pants integrates with the [`Buf`](https://buf.build/blog/introducing-buf-format) formatter and linter for Protobuf files. + +To activate, add this to `pants.toml`: + +```toml title="pants.toml" +[GLOBAL] +backend_packages = [ + "pants.backend.codegen.protobuf.lint.buf", +] +``` + +Now you can run `pants fmt` and `pants lint`: + +``` +❯ pants lint src/protos/user.proto +``` + +Use `pants fmt lint dir:` to run on all files in the directory, and `pants fmt lint dir::` to run on all files in the directory and subdirectories. + +Temporarily disable Buf with `--buf-fmt-skip` and `--buf-lint-skip`: + +```bash +❯ pants --buf-fmt-skip fmt :: +``` + +Only run Buf with `--lint-only=buf-fmt` or `--lint-only=buf-lint`, and `--fmt-only=buf-fmt`: + +```bash +❯ pants fmt --only=buf-fmt :: +``` diff --git a/versioned_docs/version-2.24/docs/go/private-modules/_category_.json b/versioned_docs/version-2.24/docs/go/private-modules/_category_.json new file mode 100644 index 000000000..ee519a0ce --- /dev/null +++ b/versioned_docs/version-2.24/docs/go/private-modules/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Private Modules", + "position": 2 +} diff --git a/versioned_docs/version-2.24/docs/go/private-modules/index.mdx b/versioned_docs/version-2.24/docs/go/private-modules/index.mdx new file mode 100644 index 000000000..4d3cc5acc --- /dev/null +++ b/versioned_docs/version-2.24/docs/go/private-modules/index.mdx @@ -0,0 +1,46 @@ +--- + title: Private Modules + sidebar_position: 1 +--- + +Use Golang modules from private repositories + +--- +# Using Private Modules in Golang + +Pants can build go binaries that use modules from private git repositories. +To do this specify the private repo(s) in `GOPRIVATE` and provide credentials for the git repo in your `$HOME/.netrc`. + +Define the `GOPRIVATE` variable in the `subprocess_env_vars` section of your `pants.toml`. The example below shows the `.netrc` file so that git can authentcate. A simple `go.mod` shows the inclusion of the private module, nothing special here. + + +```toml tab={"label":"pants.toml"} +[GLOBAL] +backend_packages.add = [ + "pants.backend.experimental.go", +] + + +[golang] +subprocess_env_vars = [ + 'GOPRIVATE=github.com/your-user/your-module/*', + 'HOME', +] +extra_tools = [ + 'git', +] +``` + +```go tab={"label":"go.mod"} +module consumer + +go 1.22 + +require github.com/your-user/your-repo/your-module v0.0.1 +``` + +``` tab={"label":".netrc"} +machine github.com +login your-user +password your-token +``` diff --git a/versioned_docs/version-2.24/docs/helm/_category_.json b/versioned_docs/version-2.24/docs/helm/_category_.json new file mode 100644 index 000000000..34bf47e92 --- /dev/null +++ b/versioned_docs/version-2.24/docs/helm/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Helm", + "position": 10 +} diff --git a/versioned_docs/version-2.24/docs/helm/deployments.mdx b/versioned_docs/version-2.24/docs/helm/deployments.mdx new file mode 100644 index 000000000..9d6f11cdb --- /dev/null +++ b/versioned_docs/version-2.24/docs/helm/deployments.mdx @@ -0,0 +1,363 @@ +--- + title: Deployments + sidebar_position: 1 +--- + +--- + +:::caution Helm deployment support is in alpha stage +Pants has experimental support for managing deployments via the `experimental-deploy` goal. Helm deployments provides with a basic implementation of this goal. + +Please share feedback for what you need to use Pants with your Helm deployments by either [opening a GitHub issue](https://github.com/pantsbuild/pants/issues/new/choose) or [joining our Slack](/community/getting-help)! +::: + +## Motivation + +Helm's ultimate purpose is to simplify the deployment of Kubernetes resources and help in making these reproducible. However it is quite common to deploy the same software application into different kind of environments using slightly different configuration overrides. + +This hinders reproducibility since operators end up having a set of configuration files and additional shell scripts that ensure that the Helm command line used to deploy a piece of software into a given environment is always the same. + +Pants solves this problem by providing with the ability to manage the configuration files and the different parameters of a deployment as single unit such that a simple command line as `pants experimental-deploy ::` will always have the same effect on each of the deployments previously defined. + +## Defining Helm deployments + +Helm deployments are defined using the `helm_deployment` target which has a series of fields that can be used to guarantee the reproducibility of the given deployment. `helm_deployment` targets need to be added by hand as there is no deterministic way of introspecting your repository to find sources that are specific to Helm: + +```python tab={"label":"src/chart/BUILD"} +helm_chart() +``` + +```yaml tab={"label":"src/chart/Chart.yaml"} +apiVersion: v2 +description: Example Helm chart +name: example +version: 0.1.0 +``` + +```python tab={"label":"src/deployment/BUILD"} +helm_deployment( + name="dev", + chart="//src/chart", + sources=["common-values.yaml", "dev-override.yaml"] +) + +helm_deployment( + name="stage", + chart="//src/chart", + sources=["common-values.yaml", "stage-override.yaml"] +) + +helm_deployment( + name="prod", + chart="//src/chart", + sources=["common-values.yaml", "prod-override.yaml"] +) +``` + +```yaml tab={"label":"src/deployment/common-values.yaml"} +# Default values common to all deployments +env: + SERVICE_NAME: my-service +``` + +```yaml tab={"label":"src/deployment/dev-override.yaml"} +# Specific values to the DEV environment +env: + ENV_ID: dev +``` + +```yaml tab={"label":"src/deployment/stage-override.yaml"} +# Specific values to the STAGE environment +env: + ENV_ID: stage +``` + +```yaml tab={"label":"src/deployment/prod-override.yaml"} +# Specific values to the PRODUCTION environment +env: + ENV_ID: prod +``` + +There are quite a few things to notice in the previous example: + +- The `helm_deployment` target requires you to explicitly set the `chart` field to specify which chart to use. +- We have three different deployments using different sets of configuration files with the same chart. +- One of those value files (`common-values.yaml`) provides with default values that are common to all deployments. +- Each deployment uses an additional `xxx-override.yaml` file with values that are specific to the given deployment. + +The `helm_deployment` target has many additional fields including the target kubernetes namespace, adding inline override values (similar to using helm's `--set` arg) and many others. Please run `pants help helm_deployment` to see all the possibilities. + +## Dependencies with `docker_image` targets + +A Helm deployment will in most cases deploy one or more Docker images into Kubernetes. Furthermore, it's quite likely there is going to be at least a few first party Docker images among those. Pants is capable of analysing the Helm chart being used in a deployment to detect those required first-party Docker images using Pants' target addresses to those Docker images. + +To illustrate this, let's imagine the following scenario: Let's say we have a first-party Docker image that we want to deploy into Kubernetes as a `Pod` resource kind. For achieving this we define the following workspace: + +```python tab={"label":"src/docker/BUILD"} +docker_image() +``` + +```text tab={"label":"src/docker/Dockerfile"} +FROM busybox:1.28 +``` + +```python tab={"label":"src/chart/BUILD"} +helm_chart() +``` + +```yaml tab={"label":"src/chart/Chart.yaml"} +apiVersion: v2 +description: Example Helm chart +name: example +version: 0.1.0 +``` + +```yaml tab={"label":"src/chart/values.yaml"} +# Default image in case this chart is used by other tools after being published +image: example.com/registry/my-app:latest +``` + +```yaml tab={"label":"src/chart/templates/pod.yaml"} +--- +apiVersion: v1 +kind: Pod +metadata: + name: my-pod + labels: + chart: "{{ .Chart.Name }}-{{ .Chart.Version | replace "+" "_" }}" +spec: + containers: + - name: my-app + # Uses the `image` value entry from the deployment inputs + image: {{ .Values.image }} +``` + +```python tab={"label":"src/deployment/BUILD"} +# Overrides the `image` value for the chart using the target address for the first-party docker image. +helm_deployment(chart="src/chart", values={"image": "src/docker:docker"}) +``` + +:::note Docker image references VS Pants' target addresses +You should use typical Docker registry addresses in your Helm charts. Because Helm charts are distributable artifacts and may be used with tools other than Pants, you should create your charts such that when that chart is being used, all Docker image addresses are valid references to images in accessible Docker registries. As shown in the example, we recommend that you make the image address value configurable, especially for charts that deploy first-party Docker images. +Your chart resources can still use off-the-shelf images published with other means, and in those cases you will also be referencing the Docker image address. Usage of Pants' target addresses is intended for your own first-party images because the image reference of those is not known at the time we create the sources (they are computed later). +::: + +With this setup we should be able to run `pants dependencies src/deployment` and Pants should give the following output: + +```text +src/chart +src/docker +``` + +This should work with any kind of Kubernetes resource that leads to Docker image being deployed into Kubernetes, such as `Deployment`, `StatefulSet`, `ReplicaSet`, `CronJob`, etc. Please get in touch with us in case you find Pants was not capable to infer dependencies in any of your `helm_deployment` targets by either [opening a GitHub issue](https://github.com/pantsbuild/pants/issues/new/choose) or [joining our Slack](/community/getting-help). + +:::note How the Docker image reference is calculated during deployment? +Pants' will rely on the behaviour of the `docker_image` target when it comes down to generate the final image reference. Since a given image may have more than one valid image reference, **Pants will try to use the first one that is not tagged as `latest`**, falling back to `latest` if none could be found. +It's good practice to publish your Docker images using tags other than `latest` and Pants preferred behaviour is to choose those as this guarantees that the _version_ of the Docker image being deployed is the expected one. +::: + +## Value files + +It's very common that Helm deployments use a series of files providing with values that customise the given chart. When using deployments that may have more than one YAML file as the source of configuration values, the Helm backend needs to sort the file names in a way that is consistent across different machines, as the order in which those files are passed to the Helm command is relevant. The final order depends on the same order in which those files are specified in the `sources` field of the `helm_deployment` target. For example, given the following `BUILD` file: + +```python title="src/deployment/BUILD" +helm_deployment(name="dev", chart="//src/chart", sources=["first.yaml", "second.yaml", "last.yaml"]) +``` + +This will result in the Helm command receiving the value files as in that exact order. + +If using any glob pattern in the `sources` field, the plugin will first group the files according to the order in which those glob patterns are listed. In this grouping, files that are resolved by more than one pattern will be part of the most specific group. Then we use alphanumeric ordering for the files that correspond to each of the previous groups. To illustrate this scenario, consider the following list of files: + +``` +src/deployment/002-config_maps.yaml +src/deployment/001-services.yaml +src/deployment/first.yaml +src/deployment/dev/daemon_sets.yaml +src/deployment/dev/services-override.yaml +src/deployment/last.yaml +``` + +And also the following `helm_deployment` target definition: + +```python title="src/deployment/BUILD" +helm_deployment( + name="dev", + chart="//src/chart", + sources=["first.yaml", "*.yaml", "dev/*-override.yaml", "dev/*.yaml", "last.yaml"] +) +``` + +In this case, the final ordering of the files would be as follows: + +``` +src/deployment/first.yaml +src/deployment/001-services.yaml +src/deployment/002-config_maps.yaml +src/deployment/dev/services-override.yaml +src/deployment/dev/daemon_sets.yaml +src/deployment/last.yaml +``` + +We believe that this approach gives a very consistent and predictable ordering while at the same time total flexibility to the end user to organise their files as they best fit each particular case of a deployment. + +## Inline values + +In addition to value files, you can also use inline values in your `helm_deployment` targets by means of the `values` field. All inlines values that are set this way will override any entry that may come from value files. + +Inline values are defined as a key-value dictionary, like in the following example: + +```python title="src/deployment/BUILD" +helm_deployment( + name="dev", + chart="//src/chart", + values={ + "nameOverride": "my_custom_name", + "image.pullPolicy": "Always", + }, +) +``` + +## Using dynamic values + +Pants has support for value interpolation in your BUILD files and you can make use of it when defining some of the values of your `helm_deployment`. This is not exclusive to the Helm backend but it's illustrated here to showcase how it could be leveraged to inject environment variables into your charts. + +Consider the following example: + +```python title="src/deployment/BUILD" +helm_deployment( + name="dev", + chart="//src/chart", + values={ + "configmap.deployedAt": f"{env('DEPLOY_TIME')}", + }, +) +``` + +In the previous example, Pants will use the value of the `DEPLOY_TIME` environment variable in your inline values, which will be then forwarded to your chart. Now you can launch a deployment using the following command: + +``` +DEPLOY_TIME=$(date) pants experimental-deploy src/deployment:dev +``` + +This isn't restricted to just the `values` field and it can be used in others like shown in the following example: + +```python title="src/deployment/BUILD" +helm_deployment( + name="dev", + chart="//src/chart", + release=f"{env('ORGANIZATION_ID')}-dev", + namespace=f"product-{env('NAMESPACE_SUFFIX')}" +) +``` + +As shown above, now the `release` and `namespace` fields are calculated at deploy-time by Pants and, as in the previous example, they will be forwarded to the Helm chart accordingly. + +:::caution Ensuring repeatable deployments +You should always favor using static values (or value files) VS dynamic values in your deployments. Using interpolated environment variables in your deployments can render your deployments non-repeatable anymore if those values can affect the behaviour of the system deployed, or what gets deployed (i.e. Docker image addresses). +Be careful when chossing the values that are going to be calculated dynamically. +::: + +## Third party chart artifacts + +Previous examples on the usage of the `helm_deployment` target are all based on the fact that the deployment declares a dependency on a Helm chart that is also part of the same repository. Since charts support having dependencies with other charts in the same repository or with external 3rd party Helm artifacts (declared as `helm_artifact`), all that dependency resolution is handled for us. + +However, `helm_deployment`s are not limited to only first party charts, as it is also possible to declare a deployment having a dependency on a 3rd party Helm artifact instead. As an example, consider the following workspace layout: + +```python tab={"label":"3rdparty/helm/jetstack/BUILD"} +helm_artifact( + name="cert-manager", + artifact="cert-manager", + version="v0.7.0", + repository="https://charts.jetstack.io", +) +``` + +```python tab={"label":"src/deploy/BUILD"} +helm_deployment( + name="main", + chart="//3rdparty/helm/jetstack:cert-manager", + values={ + "installCRDs": "true" + }, +) +``` + +In this example, the deployment at `src/deploy:main` declares a dependency on a 3rd party Helm artifact instead of a chart in the same repository. The only difference in this case when compared to first party charts is that Pants will resolve and fetch the third party artifact automatically. Once the artifact has been resolved, there is no difference to Pants. + +## Post-renderers + +User-defined [Helm post-renderers](https://helm.sh/docs/topics/advanced/#post-rendering) are supported by the Helm backend by means of the `post_renderers` field in the `helm_deployment` target. This field takes addresses to other runnable targets (any target that can be run using `pants run [address]`) and will build and run those targets as part of `experimental-deploy` goal. The referenced targets can be either shell commands or custom-made in any of the other languages supported by Pants. + +As an example, let's show how we can use the tool [`vals`](https://github.com/variantdev/vals) as a post-renderer and replace all references to secret values stored in HashiCorp Vault by their actual values. The following example is composed of a Helm chart that creates a secret resource in Kubernetes and a Helm deployment that is configured to use `vals` as a post-renderer: + +```python tab={"label":"src/chart/BUILD"} +helm_chart() +``` + +```yaml tab={"label":"src/chart/Chart.yaml"} +apiVersion: v2 +description: Example Helm chart with vals +name: example +version: 0.1.0 +``` + +```yaml tab={"label":"src/chart/templates/secret.yaml"} +apiVersion: v1 +kind: Secret +metadata: + name: mysecret + namespace: default +data: + username: admin + # This should be replaced by `vals` during the post-rendering + password: ref+vault://path/to/admin#/password +type: Opaque +``` + +```python tab={"label":"src/deploy/BUILD"} +run_shell_command( + name="vals", + command="vals eval -f -", +) + +helm_deployment( + chart="//src/chart", + post_renderers=[":vals"], +) +``` + +In the previous example we define a `run_shell_command` target that will invoke the `vals eval` command (`vals` needs to be installed in the local machine) as part of the Helm post-rendering machinery, which will result on the `ref+vault` reference being replaced by the actual value stored in Vault at the given path. + +:::note Using multiple post-renderers +If more than one target address is given in the `post_renderers` field, then they will be invoked in the same order given piping the output of one them into the input of the next one. +::: + +## Deploying + +Continuing with the example in the previous section, we can deploy it into Kubernetes using the command `pants experimental-deploy src/deployment`. This will trigger the following steps: + +1. Analyse the dependencies of the given deployment. +2. Build and publish any first-party Docker image and Helm charts that are part of those dependencies. +3. Post-process the Kubernetes manifests generated by Helm by replacing all references to first-party Docker images by their real final registry destination. +4. Initiate the deployment of the final Kubernetes resources resulting from the post-processing. + +The `experimental-deploy` goal also supports default Helm pass-through arguments that allow to change the deployment behaviour to be atomic or even what is the Kubernetes config file (the `kubeconfig` file) and target context to be used in the deployment. + +Please note that the list of valid pass-through arguments has been limited to those that do not alter the reproducibility of the deployment (i.e. `--create-namespace` is not a valid pass-through argument). Those arguments will have equivalent fields in the `helm_deployment` target. + +For example, to make an atomic deployment into a non-default Kubernetes context you can use a command like the following one: + +``` +pants experimental-deploy src/deployments:prod -- --kube-context my-custom-kube-context --atomic +``` + +To perform a dry run, use the `--dry-run` flag of the `experimental-deploy` goal. + +``` +pants experimental-deploy --dry-run src/deployments:prod +``` + +:::note How does Pants authenticate with the Kubernetes cluster? +Short answer is: it doesn't. +Pants will invoke Helm under the hood with the appropriate arguments to only perform the deployment. Any authentication steps that may be needed to perform the given deployment have to be done before invoking the `experimental-deploy` goal. If you are planning to run the deployment procedure from your CI/CD pipelines, ensure that all necessary preliminary steps (including authentication with the cluster) are done before the one that triggers the deployment. +::: diff --git a/versioned_docs/version-2.24/docs/helm/index.mdx b/versioned_docs/version-2.24/docs/helm/index.mdx new file mode 100644 index 000000000..e08078a2d --- /dev/null +++ b/versioned_docs/version-2.24/docs/helm/index.mdx @@ -0,0 +1,547 @@ +--- + title: Helm Overview + sidebar_position: 0 +--- + +--- + +:::caution Helm support is in alpha stage +Pants has good support for the most common operations for managing Helm charts sources. However there may be use cases not covered yet. + +Please share feedback for what you need to use Pants with your Helm charts by either [opening a GitHub issue](https://github.com/pantsbuild/pants/issues/new/choose) or [joining our Slack](/community/getting-help)! +::: + +## Initial setup + +First, activate the relevant backend in `pants.toml`: + +```toml title="pants.toml" +[GLOBAL] +backend_packages = [ + ... + "pants.backend.experimental.helm", + ... +] +``` + +If you have more than one Helm chart in the same repository, organise them such that each of them lives in a separate folder with the chart definition file (`Chart.yaml`) at their root. The Helm backend is capable of auto-detecting the root folder of your Helm charts taking the chart definition file `Chart.yaml` as the reference for that root. + +```yaml tab={"label":"src/helm/foo/Chart.yaml"} +apiVersion: v2 +description: Foo Helm chart +name: foo +version: 0.1.0 +``` + +```yaml tab={"label":"src/helm/bar/Chart.yaml"} +apiVersion: v2 +description: Bar Helm chart +name: bar +version: 0.1.0 +``` + +### Adding `helm_chart` targets + +Helm charts are identified by the presence of a `Chart.yaml` or `Chart.yml` file, which contains relevant metadata about the chart like its name, version, dependencies, etc. To get started quickly you can create a simple `Chart.yaml` file in your sources folder: + +```yaml title="Chart.yaml" +apiVersion: v2 +description: Example Helm chart +name: example +version: 0.1.0 +``` + +:::note Using `helm create` +You can use the `helm create` command to create an initial skeleton for your chart but be sure you have properly configured your source root patterns (as shown in the previous section) since the `helm create` command will create a folder name with the name of your chart and place the sources inside. +::: + +Then run [`pants tailor ::`](../getting-started/initial-configuration.mdx#5-generate-build-files) to generate `BUILD` files. This will scan your source repository in search of `Chart.yaml` or `Chart.yml` files and create a `helm_chart` target for each of them. + +``` +❯ pants tailor :: +Created src/helm/example/BUILD: + - Add helm_chart target example +``` + +If your workspace contains any Helm unit tests (under a `tests` folder), Pants will also idenfity them and create `helm_unittest_tests` targets for them. Additionally, if your unit tests also have snapshots (under a `tests/__snapshot__` folder), `tailor` will identify those files as test snapshots and will create `resources` targets for them. See "Snapshot testing" below for more info. + +### Basic operations + +The given setup is enough to now do some common operations on our Helm chart source code. + +#### Linting + +The Helm backend has an implementation of the Pants' `lint` goal which hooks it with the `helm lint` command: + +``` +pants lint :: +==> Linting example +[INFO] Chart.yaml: icon is recommended + +1 chart(s) linted, 0 chart(s) failed + + +✓ helm succeeded. +``` + +The linting command is non-strict by default. If you want to enforce strict linting it can be either done globally in the `pants.toml` file, or in a per-chart target basis, using one of the two following ways: + +```toml tab={"label":"pants.toml"} +[helm] +# Enables strict linting globally +lint_strict = true +``` + +```python tab={"label":"BUILD"} +helm_chart(lint_strict=True) +``` + +Likewise, in a similar way you could enable strict linting globally and then choose to disable it in a per-target basis. Run `pants help helm` or `pants help helm_chart` for more information. + +You can set the field `skip_lint=True` on each `helm_chart` target to avoid linting it. + +#### Package + +Packing helm charts is supported out of the box via the Pants' `package` goal. The final package will be saved as a `.tgz` file under the `dist` folder at your source root. + +``` +pants package :: +10:23:15.24 [INFO] Completed: Packaging Helm chart: testprojects/src/helm/example +10:23:15.24 [INFO] Wrote dist/testprojects.src.helm.example/example/example-0.2.0.tgz +Built Helm chart artifact: testprojects.src.helm.example/example/example-0.2.0.tgz +``` + +The final output folder can be customised using the `output_path` field in the `helm_chart` target. Run `pants help helm_chart` for more information. + +#### Helm chart version + +Helm charts are versioned artifacts with the value of the `version` field in `Chart.yaml` determining the actual version of the chart. Pants needs to know the version of a first party chart to be able to build packages and correctly establish the dependencies among them. By default, Pants will use the value in `Chart.yaml` as the given version of a chart but it also supports overriding that value via the `version` field in the `helm_chart` target. + +For example, a chart defined as such: + +```python tab={"label":"src/helm/example/BUILD"} +helm_chart() +``` + +```yaml tab={"label":"src/helm/example/Chart.yaml"} +apiVersion: v2 +description: Example Helm chart +name: example +version: 0.1.0 +``` + +Will be understood to have version `0.1.0` (as read from the `Chart.yaml` file). However, if we specify a version in `helm_chart` as follows: + +```python tab={"label":"src/helm/example/BUILD"} +helm_chart(version="2.0.0") +``` + +```yaml tab={"label":"src/helm/example/Chart.yaml"} +apiVersion: v2 +description: Example Helm chart +name: example +version: 0.1.0 +``` + +Now the value in `Chart.yaml` will be ignored and the chart will be understood to have version `2.0.0`. + +Because Pants has support for interpolating values in the target fields, we can also make this version value more dynamic as follows: + +```python title="src/helm/example/BUILD" +helm_chart(version=env('HELM_CHART_VERSION')) +``` + +Now the version value for this chart will be what has been set as the value of the environment variable `HELM_CHART_VERSION`. + +## Helm Unit tests + +The Helm backend supports running Helm unit tests via the [Helm `unittest` plugin](https://github.com/quintush/helm-unittest). To run unit tests follow the instructions on how to use that plugin and then create a `BUILD` file in the same folder where your tests live with the following target: + +```python tab={"label":"src/helm/example/tests/BUILD"} +helm_unittest_tests() +``` + +```yaml tab={"label":"src/helm/example/templates/env-configmap.yaml"} +apiVersion: v1 +kind: ConfigMap +metadata: + name: example-configmap +data: +{{- range $envKey, $envVal := .Values.env }} + {{ $envKey | upper }}: {{ $envVal | quote }} +{{- end }} +``` + +```yaml tab={"label":"src/helm/example/tests/env-configmap_test.yaml"} +suite: test env-configmap +templates: + - env-configmap.yaml +tests: + - it: should contain the env map variables + set: + env: + VAR1_NAME: var1Value + var2_name: var2Value + asserts: + - equal: + path: data.VAR1_NAME + value: "var1Value" + - equal: + path: data.VAR2_NAME + value: "var2Value" +``` + +With the test files in places, you can now run `pants test ::` and Pants will execute each of your tests individually: + +``` +pants test :: +10:50:12.45 [INFO] Completed: Running Helm unittest on: testprojects/src/helm/example/tests/env-configmap_test.yaml +10:50:12.46 [INFO] Completed: Run Helm Unittest - testprojects/src/helm/example/tests/env-configmap_test.yaml succeeded. + +✓ testprojects/src/helm/example/tests/env-configmap_test.yaml succeeded in 0.75s. +``` + +#### Feeding additional files to unit tests + +In some cases we may want our tests to have access to additional files which are not part of the chart. This can be achieved by setting a dependency between our unit test targets and a `resources` target as follows: + +```python tab={"label":"src/helm/example/tests/BUILD"} +helm_unittest_tests(dependencies=[":extra-values"]) + +resources(name="extra-values", sources=["extra-values.yml"]) +``` + +```yaml tab={"label":"src/helm/example/templates/env-configmap.yaml"} +apiVersion: v1 +kind: ConfigMap +metadata: + name: example-configmap +data: +{{- range $key, $val := .Values.data }} + {{ $key | upper }}: {{ $val | quote }} +{{- end }} +``` + +```yaml tab={"label":"src/helm/example/tests/extra-values.yml"} +data: + VAR1_NAME: var1Value + var2_name: var2Value +``` + +```yaml tab={"label":"src/helm/example/tests/env-configmap_test.yaml"} +suite: test env-configmap +templates: + - env-configmap.yaml +values: + - extra-values.yml +tests: + - it: should contain the env map variables + asserts: + - equal: + path: data.VAR1_NAME + value: "var1Value" + - equal: + path: data.VAR2_NAME + value: "var2Value" +``` + +Additional files can be referenced from any location inside your workspace. Note that the actual path to the additional files will be relative to the source roots configured in Pants. + +In this example, since Helm charts define their source root at the location of the `Chart.yaml` file and the `extra-values.yml` file is inside the `tests` folder relative to the chart, the test suite can access it as being local to it. + +However, in the following case, we need to reference the extra file relative to the chart root. Note the `../data/extra-values.yml` path in the test suite. + +```toml tab={"label":"pants.toml"} +[source] +root_patterns=["src/extra"] +``` + +```python tab={"label":"src/extra/data/BUILD"} +resources(name="extra-values", sources=["extra-values.yml"]) +``` + +```yaml tab={"label":"src/extra/data/extra-values.yml"} +data: + VAR1_NAME: var1Value + var2_name: var2Value +``` + +```python tab={"label":"src/helm/example/tests/BUILD"} +helm_unittest_tests(dependencies=["src/extra/data:extra-values"]) +``` + +```yaml tab={"label":"src/helm/example/templates/env-configmap.yaml"} +apiVersion: v1 +kind: ConfigMap +metadata: + name: example-configmap +data: +{{- range $key, $val := .Values.data }} + {{ $key | upper }}: {{ $val | quote }} +{{- end }} +``` + +```yaml tab={"label":"src/helm/example/tests/env-configmap_test.yaml"} +suite: test env-configmap +templates: + - env-configmap.yaml +values: + - ../data/extra-values.yml +tests: + - it: should contain the env map variables + asserts: + - equal: + path: data.VAR1_NAME + value: "var1Value" + - equal: + path: data.VAR2_NAME + value: "var2Value" +``` + +:::caution Using `file`, `files` and `relocated_files` targets +Other file-centric targets are also supported, just be aware that `file` and `files` targets are +not affected by the source roots setting. When using `relocated_files`, the files will be relative +to the value set in the `dest` field. +::: + +#### Snapshot testing + +Unit test snapshots are supported by Pants by wrapping the snapshots in resources targets, as shown in the previous section. Snapshot resources will be automatically inferred as dependencies of the tests where they reside, so there is no need to add a explicit `dependencies` relationship in your `helm_unittest_tests` targets. + +Since managing snapshots by hand is quite tedious, Pants provides some utilities to manage them in a simpler way. To generate or update the snapshots, use Pants's `generate-snapshots` goal: + +``` +pants generate-snapshots :: +``` + +This will generate test snapshots for tests that require them, with out-of-date snapshots being overwritten by newer ones. + +If new `__snapshot__` folders are created after running the `generate-snapshots` target, we recommend running the `tailor` goal again so that Pants can detect these new folders and create `resources` targets as appropriate. + +#### Timeouts + +Pants can cancel tests that take too long, which is useful to prevent tests from hanging indefinitely. + +To add a timeout, set the `timeout` field to an integer value of seconds, like this: + +```python title="BUILD" +helm_unittest_test(name="tests", source="env-configmap_test.yaml", timeout=120) +``` + +When you set `timeout` on the `helm_unittest_tests` target generator, the same timeout will apply to every generated `helm_unittest_test` target. Instead, you can use the `overrides` field: + +```python title="BUILD" +helm_unittest_tests( + name="tests", + overrides={ + "env-configmap_test.yaml": {"timeout": 20}, + ("deployment_test.yaml", "pod_test.yaml"): {"timeout": 35}, + }, +) +``` + +You can also set a default value and a maximum value in `pants.toml`: + +```toml title="pants.toml" +[test] +timeout_default = 60 +timeout_maximum = 600 +``` + +If a target sets its `timeout` higher than `[test].timeout_maximum`, Pants will use the value in `[test].timeout_maximum`. + +Use the option `pants test --no-timeouts` to temporarily disable timeouts, e.g. when debugging. + +#### Retries + +Pants can automatically retry failed tests. This can help keep your builds passing even with flaky tests, like integration tests. + +```toml tab={"label":"pants.toml"} +[test] +attempts_default = 3 +``` + +## Publishing Helm charts + +Pants only supports publishing Helm charts to OCI registries, a feature that was made generally available in Helm 3.8. + +The publishing is done with Pants' `publish` goal, but first you will need to tell Pants what are the possible destination registries where to upload your charts. + +### Configuring OCI registries + +In a similar way as the `docker_image` target, a `helm_chart` target takes an optional `registries` field whose value is a list of registry endpoints (prefixed by the `oci://` protocol): + +```python title="src/helm/example/BUILD" +helm_chart( + name="example", + registries=[ + "oci://reg.company.internal" + ] +) +``` + +The chart published from that given target will be uploaded to the OCI registry specified. + +If you have several charts that have to be published into the same registries, you can add them to your `pants.toml` file and then reference them by using their alias prefixed by a `@` symbol. + +You can also designate one or more registries as default and then charts that have no explicit `registries` field will use those default registries. + +```toml tab={"label":"pants.toml"} +[helm.registries.company-registry1] +address = "oci://reg1.company.internal" +default = true + +[helm.registries.company-registry2] +address = "oci://reg2.company.internal" +``` + +```python tab={"label":"src/example/BUILD"} +helm_chart(name="demo") + +# This is equivalent to the previous target, +# since company-registry1 is the default registry: +helm_chart( + name="demo", + registries=["@company-registry1"], +) + +# You can mix named and direct registry references. +helm_chart( + name="demo2", + registries=[ + "@company-registry2", + "oci://ext-registry.company-b.net:8443", + ] +) +``` + +### Setting a repository name + +When publishing charts into an OCI registry, you most likely will be interested on separating them from other kind of OCI assets (i.e. container images). For doing so you can set a `repository` field in the `helm_chart` target so the chart artifact will be uploaded to the given path: + +```python title="src/helm/example/BUILD" +helm_chart( + name="example", + repository="charts" +) +``` + +With the previous setting, your chart would be published to your default registry under the `charts` folder like in `oci://myregistry.internal/charts/example-0.1.0.tgz`. + +You can also set a default global repository in `pants.toml` as in the following example: + +```toml title="pants.toml" +[helm] +default_registry_repository = "charts" +``` + +## Managing Chart Dependencies + +Helm charts can depend on other charts, whether first-party charts defined in the same repo, or third-party charts published in a registry. Pants uses this dependency information to know when work needs to be re-run. + +:::note Chart.yaml API version +To benefit from Pants dependency management and inference in your Helm charts, you will need to use `apiVersion: v2` in your `Chart.yaml` file. +::: + +### `Chart.yaml` dependencies + +Pants will automatically infer dependencies from the `Chart.yaml` file. + +For example, given two charts `foo` and `bar` and a dependency between them: + +```yaml tab={"label":"src/helm/foo/Chart.yaml"} +apiVersion: v2 +description: Foo Helm chart +name: foo +version: 0.1.0 +``` + +```python tab={"label":"src/helm/foo/BUILD"} +helm_chart() +``` + +```yaml tab={"label":"src/helm/bar/Chart.yaml"} +apiVersion: v2 +description: Bar Helm chart +name: bar +version: 0.1.0 +dependencies: + - name: foo +``` + +```python tab={"label":"src/helm/bar/BUILD"} +helm_chart() +``` + +Then, running `pants dependencies`on `bar` will list `foo` as a dependency: + +``` +pants dependencies src/helm/bar +src/helm/foo +``` + +### Explicitly provided dependencies in `BUILD` files + +If you prefer, you can let your BUILD files be the "source of truth" for dependencies, instead of specifying them in `Chart.yaml`: + +```yaml tab={"label":"src/helm/foo/Chart.yaml"} +apiVersion: v2 +description: Foo Helm chart +name: foo +version: 0.1.0 +``` + +```python tab={"label":"src/helm/foo/BUILD"} +helm_chart() +``` + +```yaml tab={"label":"src/helm/bar/Chart.yaml"} +apiVersion: v2 +description: Bar Helm chart +name: bar +version: 0.1.0 +``` + +```python tab={"label":"src/helm/bar/BUILD"} +helm_chart(dependencies=["//src/helm/foo"]) +``` + +In this case, the `pants dependencies` command will show the same result and, in addition, Pants will modify its copy of `bar`'s `Chart.yaml` before using it, so that it includes `foo` in its dependency list. Note that Pants will not modify the original copy in your source tree, only the copy it uses in the sandboxed execution environment. + +### Third party chart artifacts + +Third party charts are provided to Pants using the `helm_artifact` target: + +```yaml title="3rdparty/helm/BUILD" +helm_artifact( + artifact="chart_name", + version="0.0.1", + registry="...", # Optional + repository="...", # Optional for OCI registries +) +``` + +Third party artifacts are resolved using `helm pull`. Other charts can reference them in the same way as first-party charts (either in the `Chart.yaml` or in the `BUILD` file). + +When adding third party artifacts, the `artifact` and `version` fields are mandatory, in addition to one _origin_ from which to download the actual archive. There are two different origins supported: _classic Helm repositories_ and _OCI registries_. + +For **classic repositories**, provide with the full URL to the location of the chart archive, excluding the archive file itself: + +```python title="3rdparty/helm/jetstack/BUILD" +helm_artifact( + artifact="cert-manager", + version="v0.7.0", + repository="https://charts.jetstack.io", +) +``` + +For **OCI registries**, you must provide with the URL to the registry in the `registry` field and an optional `repository` field with the path inside that registry. + +```python title="3rdparty/helm/example/BUILD" +helm_artifact( + artifact="foo", + version="1.0.0", + registry="oci://registry.example.com", + repository="charts", +) +``` diff --git a/versioned_docs/version-2.24/docs/helm/kubeconform.mdx b/versioned_docs/version-2.24/docs/helm/kubeconform.mdx new file mode 100644 index 000000000..89e929375 --- /dev/null +++ b/versioned_docs/version-2.24/docs/helm/kubeconform.mdx @@ -0,0 +1,46 @@ +--- + title: Kubeconform + sidebar_position: 999 +--- + +--- + +## Overview + +The Helm backend has opt-in support for using [Kubeconform](https://github.com/yannh/kubeconform) as a validation tool for both Helm charts and deployments. This gives the extra confidence that the templates defined in the different charts are conformant to a Kubernetes version or specification, plus the addtional benefit of ensuring that their final version (when used in deployments) also meets that criteria. + +To enable the usage of Kubeconform, first we need to activate the relevant backend in `pants.toml`: + +```toml title="pants.toml" +[GLOBAL] +backend_packages = [ + ... + "pants.backend.experimental.helm.check.kubeconform", + ... +] +``` + +This will enable the `kubeconform` subsystem in our workspace and enrich our `helm_chart` and `helm_deployment` with common fields that can be used to fine tune Kubeconform's behaviour in each of our targets. + +:::note Extra documentation on `kubeconform` +Enabling the backend will add the `kubeconform` subsystem in our workspace and enrich our `helm_chart` and `helm_deployment` with common fields that can be used to fine tune Kubeconform's behaviour in each of our targets. Please run `pants help kubeconform` as well as `pants help helm_chart`/`pants help helm_deployment` after enabling the backend to consult the different configuration settings. +::: + +## Validating charts and deployments + +After enabling the backend, we can run the `check` goal in both `helm_chart` and `helm_deployment` targets as we please: + +``` +❯ pants check :: +``` + +The check always happens in the rendered form of the Helm chart in question. In the case of `helm_chart` targets, the chart will be rendered as has been defined, using the default companion `values.yaml` (or `values.yml`) file defined for the chart. + +For `helm_deployment` targets, the referenced chart will be rendered using the deployment sources and other settings like inline values, etc. This will also include all post-renderers defined for that given deployment (in case any has been defined). The effect this has is that running `pants check src/helm/mydeployment` requires a bit more of work under the hood than checking a standalone chart. + +You can use the `skip_kubeconform` field in both `helm_chart` or `helm_deployment` to prevent running it against a given target in case you consider it to be a redundant check. + +:::caution Skipping check on publishable charts +On a workspace that contains both `helm_deployment` and `helm_chart` targets is easy to consider that checking the charts is a redundant task as doing it so on the deployment is effectively the same. This is a safe assumption as long as you don't publish your charts to be consumed elsewhere. For that specific case we recommend also checking the standalone chart as that gives assurance that the chart package itself is sound. +Regardless of that, running Kubeconform on a standalone chart is a pretty lightweight operation so unless you are facing extremelly long build times, better to not skip it. +::: diff --git a/versioned_docs/version-2.24/docs/introduction/_category_.json b/versioned_docs/version-2.24/docs/introduction/_category_.json new file mode 100644 index 000000000..fa1c06ac8 --- /dev/null +++ b/versioned_docs/version-2.24/docs/introduction/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Introduction", + "position": 1 +} diff --git a/versioned_docs/version-2.24/docs/introduction/how-does-pants-work.mdx b/versioned_docs/version-2.24/docs/introduction/how-does-pants-work.mdx new file mode 100644 index 000000000..ed73c64f3 --- /dev/null +++ b/versioned_docs/version-2.24/docs/introduction/how-does-pants-work.mdx @@ -0,0 +1,96 @@ +--- + title: How does Pants work? + sidebar_position: 1 +--- + +--- + +## The Pants Engine + +Pants is built around the "v2" engine, which is completely new technology, built from the ground up, based on lessons learned from working on the previous, "v1", technology. + +The Pants engine is written in [Rust](https://www.rust-lang.org/), for performance. The build rules that it uses are written in typed Python 3, for familiarity and simplicity. + +The engine is designed so that fine-grained invalidation, concurrency, hermeticity, caching, and remote execution happen naturally, without rule authors needing to think about it. + +## What are the benefits? + +### Concurrency + +The engine can take full advantage of all the cores on your machine because relevant portions are implemented in Rust atop the [Tokio](https://tokio.rs/) framework. + + + Pants running multiple linters in parallel. + + +This means, for example, that you can run all of your linters at the same time, and fully utilize your cores to run tests in parallel. + +### Caching + +The engine caches processes precisely based on their inputs, and sandboxes execution to minimize side effects and to make builds consistent and repeatable. + + + We run both tests, then add a syntax error to one test and rerun; the + unmodified test uses the cache and is isolated from the syntax error. + + +### Remote Execution + +The engine can delegate work to a remote build cluster so that you are no longer limited by the number of cores on your machine. If you have enough remote workers, you can run your entire test suite in total parallelism. + +Remote caching means that your coworkers and your CI can reuse the results of commands you already ran. + +### Fine-grained invalidation + +Work is broken down into many small units and kept warm in a daemon so that as little work as possible needs to be re-done when files change. + +### Hermetic execution + +Pants sandboxes all processes that it executes, ensuring that cache keys are always accurate, and builds are always correct. + +### Dependency inference + +Pants analyzes your code's import statements to determine files' dependencies automatically. Dependency information is required for precise change detection and cache invalidation, but inference means that you don't need to declare dependencies manually (and hermetic execution guarantees that they are always accurate)! + +Older build tools like Bazel: + +```python title="BUILD" +python_library( + name="lib" + deps=[ + "//src/python/project/core", + "//src/python/project/models:customer", + "//src/python/project/models:organization", + "//src/python/project/models:policy", + "//src/python/project/models:user", + "//src/python/project/views:dashboard", + "//src/python/project/util:csrf_util", + "//src/python/project/util:strutil", + ], +) + +python_tests( + name="tests", + deps=[ + ... + ], +) +``` + +Pants 2: + +```python title="BUILD" +python_sources(name="lib") +python_tests(name="tests") +``` + +### A powerful plugin system + +With the [Pants plugin API](../writing-plugins/overview.mdx), your custom rules will run with the same concurrency, caching, and remoting semantics as the core rules. + +Some example plugins that users have written: + +- Cython support +- Building a Docker image, including packages built via `pants package` +- Custom `setup.py` logic to compute the `version` dynamically +- Jupyter support diff --git a/versioned_docs/version-2.24/docs/introduction/welcome-to-pants.mdx b/versioned_docs/version-2.24/docs/introduction/welcome-to-pants.mdx new file mode 100644 index 000000000..12f0dbaf3 --- /dev/null +++ b/versioned_docs/version-2.24/docs/introduction/welcome-to-pants.mdx @@ -0,0 +1,72 @@ +--- + title: Welcome to Pants! + sidebar_position: 0 +--- + +--- + +## What is Pants? + +Pants is a fast, scalable, user-friendly build and developer workflow system for codebases of all sizes, including yours! + +## What does Pants do? + +Pants installs, orchestrates and runs dozens of standard underlying tools - compilers, code generators, dependency resolvers, test runners, linters, formatters, packagers, REPLs and more - composing them into a single stable, hermetic toolchain, and speeding up your workflows via caching and concurrency. + +Pants is designed to be easy to adopt, use, and extend. It doesn't require you to refactor your codebase or to create and maintain massive amounts of build metadata. You invoke it directly on source files and directories, so it doesn't require users to adopt a new conceptual model. + +Pants is currently focused on Python, Go, Java, Scala, Shell, and Docker, with more languages and frameworks coming soon. [The Pants community](/community/members) is friendly and helpful and welcomes involvement of anyone who is interested in creating modern software development tooling. + +## Who is Pants for? + +Pants is useful for repos of all sizes, but is particularly valuable for those containing multiple distinct but interdependent pieces. + +Pants works well with (but does not require) a [_monorepo_ architecture](https://blog.pantsbuild.org/the-monorepo-approach-to-code-management/): a codebase containing multiple projects—often using multiple programming languages and frameworks—in a single unified repository. If you want to scale your codebase without breaking it up into multiple disconnected repos, with all the versioning and maintenance headaches that causes, Pants provides the tooling for you to do so effectively. + +## What are the main features of Pants? + +Pants is designed for fast, consistent, ergonomic builds. Some noteworthy features include: + +- Dependency modeling using static analysis instead of handwritten metadata +- Fine-grained invalidation +- Shared result caching +- Concurrent and remote execution +- Support for dependency lockfiles to prevent supply chain attacks +- A unified interface across all tools and languages +- Extensibility and customizability via a plugin API +- Code introspection features + +## Which languages and frameworks does Pants support? + +- Pants [ships](page:language-support) with support for [Python](../python/overview/index.mdx), [Go](../go/index.mdx), [Java](../jvm/java-and-scala.mdx), [Scala](../jvm/java-and-scala.mdx), [Kotlin](../jvm/kotlin.mdx), and [Shell](../shell/index.mdx). +- Pants supports a wide range of code generators (such as Thrift, Protobuf, Scrooge and Avro), linters and formatters, and it is easy to add support for new or custom ones +- Pants can create standalone binaries, [Docker images](../docker/index.mdx), AWS Lambdas and GCP Cloud Functions + +We're listening to the community for which languages, frameworks and tools we should support next, so let us know about your needs by [opening an issue](https://github.com/pantsbuild/pants/issues/new/choose) on GitHub or [chatting with us](/community/members) about it on the community Slack! +Pants was designed for extensibility, and we welcome [contributions](../contributions/index.mdx)! + +## How does Pants work? + +The core of Pants is its execution engine, which sequences and coordinates all the underlying work. The engine is written in Rust, for performance. The underlying work is performed by executing _rules_, which are typed Python 3 async coroutines for familiarity and simplicity. + +The engine is designed so that fine-grained invalidation, concurrency, hermeticity, caching, and remote execution happen naturally, without rule authors needing to think about it. + +See [here](./how-does-pants-work.mdx) for more details about the Pants engine. + +## Is Pants similar to X? + +Pants (v2) is a leap forward in the evolution of build systems, a category that runs from the venerable Make through Ant, Maven, Gradle and SBT, to Bazel, Please, Buck, Pants v1 and others. + +Its design leans on ideas and inspiration from these earlier tools, while optimizing not just for speed and correctness, but also for ease of adoption, ease of use and ease of extension, all for real-world use cases at a variety of teams. + +## Who uses Pants? + +Pants is making engineering teams productive and happy at a range of companies and organizations. See a sample of them [here](/spotlight/users)! + +## Who develops Pants? + +Pants is an open-source software project, developed at [github.com/pantsbuild/pants](https://github.com/pantsbuild/pants). Pants is released under the [Apache License 2.0](https://github.com/pantsbuild/pants/blob/master/LICENSE). + +:::note Pants v2 vs. v1 +This documentation is for Pants v2, which is a new system built from the ground up, based on lessons from past work on Pants v1, as well valued feedback from the user community. See [https://v1.pantsbuild.org](https://v1.pantsbuild.org/) for Pants v1 documentation. +::: diff --git a/versioned_docs/version-2.24/docs/javascript/_category_.json b/versioned_docs/version-2.24/docs/javascript/_category_.json new file mode 100644 index 000000000..2c995785a --- /dev/null +++ b/versioned_docs/version-2.24/docs/javascript/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Javascript", + "position": 13 +} diff --git a/versioned_docs/version-2.24/docs/javascript/overview/_category_.json b/versioned_docs/version-2.24/docs/javascript/overview/_category_.json new file mode 100644 index 000000000..83066cfa7 --- /dev/null +++ b/versioned_docs/version-2.24/docs/javascript/overview/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Javascript overview", + "position": 1 +} diff --git a/versioned_docs/version-2.24/docs/javascript/overview/enabling-javascript-support.mdx b/versioned_docs/version-2.24/docs/javascript/overview/enabling-javascript-support.mdx new file mode 100644 index 000000000..d4a1518ae --- /dev/null +++ b/versioned_docs/version-2.24/docs/javascript/overview/enabling-javascript-support.mdx @@ -0,0 +1,76 @@ +--- + title: Enabling Javascript support + sidebar_position: 0 +--- + +How to enable Pants's bundled Javascript backend package. + +--- + +:::note Example Javascript repository +See [here](https://github.com/pantsbuild/example-javascript) for examples of Pants's Javascript functionality. + +::: + +### Configuring the repository + +Enable the experimental Javascript [backend](../../using-pants/key-concepts/backends.mdx) like this: + +```toml title="pants.toml" +[GLOBAL] +... +backend_packages = [ + "pants.backend.experimental.javascript" +] +``` + +Pants uses [`package_json`](../../../reference/targets/package_json.mdx) targets to model a NodeJS package. +Further, [`javascript_source`](../../../reference/targets/javascript_source.mdx) and +[`javascript_tests`](../../../reference/targets/javascript_test.mdx) targets are used to know which Javascript files to +run on and to set any metadata. + +You can generate these targets by running [`pants tailor ::`](../../getting-started/initial-configuration.mdx#5-generate-build-files). + +``` +❯ pants tailor :: +Created project/BUILD: + - Add javascript_sources target project + - Add javascript_tests target tests +``` + +:::tip Improved inference and introspection for bundled projects +For [dependency inference](../../introduction/how-does-pants-work.mdx#dependency-inference), Pants reads both your +projects' `package.json` sections and additionally +supports [`jsconfig.json`](https://code.visualstudio.com/docs/languages/jsconfig), if one is present. +::: + +### Setting up node +Pants will by default download a distribution of `node` according to the +[`nodejs` subsystem](../../../reference/subsystems/nodejs) configuration. If you wish to instead use a locally installed + version of, for example, 18.0.0 using `nvm` and its `.nvmrc` file, the following will get you there: + +```toml tab={"label": "pants.toml"} +[nodejs] +known_versions = [] # Assign this to the empty list to ensure Pants never downloads. +version = "v18.0.0" +search_path = [""] + +``` + +```txt tab={"label": ".nvmrc"} +v18.0.0 + +``` + +### Setting up a package manager +To set a package manager project wide, do the following: + +```toml title="pants.toml" +[nodejs] +package_manager = "pnpm" # or yarn, or npm. + +``` + +you can instead opt to use the [`package.json#packageManager`](./package.mdx#package-manager) field for this setting. +Regardless of setting, pants uses the [`corepack`](https://github.com/nodejs/corepack) version distributed with the Node +version you have chosen to install and manage package managers. \ No newline at end of file diff --git a/versioned_docs/version-2.24/docs/javascript/overview/index.mdx b/versioned_docs/version-2.24/docs/javascript/overview/index.mdx new file mode 100644 index 000000000..5e5c9d078 --- /dev/null +++ b/versioned_docs/version-2.24/docs/javascript/overview/index.mdx @@ -0,0 +1,29 @@ +--- + title: Javascript overview + sidebar_position: 0 +--- + +Pants's support for Javascript. + +:::caution Javascript support is beta stage +We are done implementing most functionality for Pants's Javascript support +([tracked here](https://github.com/pantsbuild/pants/labels/backend%3A%20javascript)). +However, there may be use cases that we aren't yet handling. +::: + +The Javascript and NodeJS ecosystem has a seemingly endless amount of frameworks and tooling, +all orchestrated via package managers. + +Pants employs a wrapping approach with a thin caching layer applied on top of current supported package managers: +`npm`, `pnpm` and `yarn`. + +Features for Javascript: + +- Caching the results of your test scripts and build scripts, + making the latter available in your Pants workflows as [`resources`](../../reference/targets/resource) and + package artifacts. +- A consistent interface for all languages/tools in your repository, + such as being able to run `pants fmt lint check test package`. +- [Remote execution and remote caching](../../using-pants/remote-caching-and-execution/index.mdx). +- [Advanced project introspection](../../using-pants/project-introspection.mdx), + such as finding all code that transitively depends on a certain package. \ No newline at end of file diff --git a/versioned_docs/version-2.24/docs/javascript/overview/lockfiles.mdx b/versioned_docs/version-2.24/docs/javascript/overview/lockfiles.mdx new file mode 100644 index 000000000..c4e9453d8 --- /dev/null +++ b/versioned_docs/version-2.24/docs/javascript/overview/lockfiles.mdx @@ -0,0 +1,65 @@ +--- + title: Lockfiles + sidebar_position: 2 +--- + +Package manager lockfile integration + +--- + +Third-party dependencies are specified in the package.json fields. +All package managers vendors a lockfile format specific for the package manager you are using. Pants knows of this +lockfile and models it as a "resolve". + +Resolves is the only way to deal with dependencies within pants, and no extra configuration is required. + +You can however name your resolves/lockfiles. The resolve name is otherwise auto-generated. + +```toml title="pants.toml" +[GLOBAL] +backend_packages.add = [ + "pants.backend.experimental.javascript" +] + +[nodejs.resolves] +package-lock.json = "my-lock" + +``` + +You generate the lockfile as follows: + +```shell title="Bash" +$ pants generate-lockfiles +19:00:39.26 [INFO] Completed: Generate lockfile for my-lock +19:00:39.29 [INFO] Wrote lockfile for the resolve `my-lock` to package-lock.json +``` + + +## Using lockfiles for tools + +To ensure that the same version of tooling you have specified in `package.json` is used with a NodeJS powered tool, +specify the resolve name for the tool. +E.g., for the Prettier linter: + +```toml tab={"label": "pants.toml"} +[GLOBAL] +backend_packages.add = [ + "pants.backend.experimental.javascript", + "pants.backend.experimental.javascript.lint.prettier", +] + +[prettier] +install_from_resolve = "nodejs-default" + +``` +```json tab={"label": "package.json"} +{ + "name": "@my-company/pkg", + "devDependencies": { + "prettier": "^2.6.2" + } +} +``` +```python tab={"label": "BUILD"} + package_json(name="pkg") +``` diff --git a/versioned_docs/version-2.24/docs/javascript/overview/package.mdx b/versioned_docs/version-2.24/docs/javascript/overview/package.mdx new file mode 100644 index 000000000..781f6c5f8 --- /dev/null +++ b/versioned_docs/version-2.24/docs/javascript/overview/package.mdx @@ -0,0 +1,67 @@ +--- + title: package.json + sidebar_position: 3 +--- + +package.json parsing and scripts integration + +--- + +As mentioned in the [overview introduction](./index.mdx), Pants's approach to enable support for Javascript is to +be a thin caching layer on top of your current tooling. + +Refer to the [example repository](https://github.com/pantsbuild/example-javascript) for example usage. + +### Package manager + +Pants uses `corepack` to manage package manager versions and installation. Like `corepack`, Pants respects +the experimental "packageManager" feature in `package.json` files. + +```json title="package.json" +{ + "name": "@my-company/pkg", + "packageManager": "yarn@1.22.22" +} +``` + +this setting will ensure that all scripts invoked for this package.json, and any +[workspaces managed by this package.json](./workspaces.mdx) will use this particular version of `yarn`. +It can be more convenient to define a project level +[package manager](./enabling-javascript-support.mdx#setting-up-a-package-manager). + +:::tip Choosing between `pants.toml` or `package.json` for package manager version configuration +In general, if your team runs all tooling via Pants, using `pants.toml` reduces boilerplate in cases where you maintain +multiple packages. If your team mixes usage of Pants and "bare" package manager invocations, package.json#packageManager +is the safer option. +::: + +### Testing + +By default Pants assumes a `package_json` target mapping a `package.json` includes a test script, e.g. + +```json title="package.json" +{ + "name": "@my-company/pkg", + "scripts": { + "test": "jest" + }, + "devDependencies": { + "jest": "^29.5.0" + } +} +``` + +and will use this script to execute your tests when running `pants test ::`. +See [Goal arguments](../../using-pants/key-concepts/goals.mdx#goal-arguments) for the normal techniques for telling Pants what to +run on. + +To enable configurability, the build symbol [`node_test_script`](../../../reference/build-file-symbols/node_test_script) +contains options for changing the entry point from "test", and to enable coverage reporting. + +### Packaging + +Similarly, build scripts can be introduced to Pants via the +[`node_build_script`](../../../reference/build-file-symbols/node_build_script) build symbol. This is intended to be used +as a way to introduce artifacts generated via bundlers and/or compilers installed and ran via your package manager. +The result can the be consumed by other targets as either `resource`-targets that can be depended on, +or as a package for the [docker backend](../../docker/index.mdx). diff --git a/versioned_docs/version-2.24/docs/javascript/overview/workspaces.mdx b/versioned_docs/version-2.24/docs/javascript/overview/workspaces.mdx new file mode 100644 index 000000000..b6769c3e2 --- /dev/null +++ b/versioned_docs/version-2.24/docs/javascript/overview/workspaces.mdx @@ -0,0 +1,22 @@ +--- + title: Monorepo workspaces + sidebar_position: 1 +--- +Package manager workspace management + +--- + +Modern versions of package managers introduce similar +concepts of "workspaces", a list of Nodejs packages all contained within the same code repository. + +Pants support all three flavors of package managers and understands the configuration +settings specific for each tool: + +- [pnpm](https://pnpm.io/workspaces) pnpm-workspaces.yaml +- [yarn](https://yarnpkg.com/features/workspaces) and [npm](https://docs.npmjs.com/cli/v10/using-npm/workspaces) + "workspaces" setting in package.json + +:::tip Use workspaces! +It is encouraged by the Pants team to utilize this project setup to only have to deal with and maintain +one [resolve](./lockfiles.mdx). Pants aims to provide full integration with these monorepo settings. +::: \ No newline at end of file diff --git a/versioned_docs/version-2.24/docs/jvm/_category_.json b/versioned_docs/version-2.24/docs/jvm/_category_.json new file mode 100644 index 000000000..25b92a6af --- /dev/null +++ b/versioned_docs/version-2.24/docs/jvm/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "JVM", + "position": 7 +} diff --git a/versioned_docs/version-2.24/docs/jvm/java-and-scala.mdx b/versioned_docs/version-2.24/docs/jvm/java-and-scala.mdx new file mode 100644 index 000000000..613e3911f --- /dev/null +++ b/versioned_docs/version-2.24/docs/jvm/java-and-scala.mdx @@ -0,0 +1,527 @@ +--- + title: Java and Scala + sidebar_position: 0 +--- + +Pants's support for Java and Scala. + +--- + +:::caution Java and Scala support is beta stage +We are done implementing most functionality for Pants's Java and Scala support ([tracked here](https://github.com/pantsbuild/pants/labels/lang-jvm)). However, there may be use cases that we aren't yet handling. + +Please share feedback for what you need to use Pants with your JVM project by either [opening a GitHub issue](https://github.com/pantsbuild/pants/issues/new/choose) or [joining our Slack](/community/members)! +::: + +:::note Example Java and Scala repository +Check out [github.com/pantsbuild/example-jvm](https://github.com/pantsbuild/example-jvm) to try out Pants's Java and Scala support. +::: + +## Initial setup + +First, activate the relevant backends in `pants.toml`: + +```toml title="pants.toml" +[GLOBAL] +backend_packages = [ + # Each backend can be used independently, so there is no need to enable Scala if you + # have a pure-Java repository (or vice versa). + "pants.backend.experimental.java", + "pants.backend.experimental.scala", +] +``` + +Then run [`pants tailor ::`](../getting-started/initial-configuration.mdx#5-generate-build-files) to generate BUILD files. This will create `java_sources` and `scala_sources` targets in every directory containing library code, as well as test targets like `scalatest_tests` and `junit_tests` for filenames that look like tests. + +``` +❯ pants tailor :: +Created src/jvm/org/pantsbuild/example/app/BUILD: + - Add scala_sources target app +Created src/jvm/org/pantsbuild/example/lib/BUILD: + - Add java_sources target lib +Created tests/jvm/org/pantsbuild/example/lib/BUILD: + - Add scalatest_tests target lib +``` + +You can run `pants list ::` to see all targets in your project: + +``` +❯ pants list +... +src/jvm/org/pantsbuild/example/app:app +src/jvm/org/pantsbuild/example/app/ExampleApp.scala +src/jvm/org/pantsbuild/example/lib:lib +src/jvm/org/pantsbuild/example/lib/ExampleLib.java +tests/jvm/org/pantsbuild/example/lib:lib +tests/jvm/org/pantsbuild/example/lib/ExampleLibSpec.scala +``` + +### Choosing JDK and Scala versions + +Pants `2.11.x` adds support for choosing JDK and Scala versions per target in your repository, but to reduce the amount of boilerplate required, most users set repository-wide defaults in `pants.toml`, and then only override them when necessary for particular targets. + +#### JDK + +JDKs used by Pants are automatically fetched using [Coursier](https://get-coursier.io/), and are chosen using the [`[jvm].jdk` setting](../../reference/subsystems/jvm.mdx#jdk) to set a repository-wide default. + +To override the default on a particular target, you can use the [`jdk=` field](../../reference/targets/java_source.mdx#jdk). It can be useful to use the [`parametrize` builtin](../using-pants/key-concepts/targets-and-build-files.mdx#parametrizing-targets) with the `jdk=` field, particularly to run test targets under multiple JDKs. + +#### Scala version + +The Scala version to use is configured on a resolve-by-resolve basis (see the "Third-party dependencies" section below) using the [`[scala].version_for_resolve` option](../../reference/subsystems/scala.mdx#version_for_resolve). The default Scala version for your repository will thus be whichever Scala version is configured for the "default" resolve, which is configured by the [`[jvm].default_resolve` option](../../reference/subsystems/jvm#default_resolve). + +To use multiple Scala versions in a repository, you would define multiple resolves, and then adjust the [`resolve` field](../../reference/targets/scalatest_test.mdx#resolve) of any targets which should be used with the non-`default_resolve` resolve. + +To cross-build a set of Scala targets for multiple Scala versions, you can use the [`parametrize` builtin](../using-pants/key-concepts/targets-and-build-files.mdx#parametrizing-targets) with the `resolve=` field of the target and its dependencies. + +:::caution A jvm_artifact for scala-library artifact is explicitly required. +The Scala backend currently requires that a `jvm_artifact` target for the `org.scala-lang:scala-library` Scala runtime be present in any resolve used for Scala. If such a jvm_artifact is missing, Pants will error. Pants will automatically inject a dependency on the runtime. (This target may be automatically supplied by Pants in a future version, but that is not currently implemented.) +::: + +### First-party dependencies + +In many cases, the dependencies of your first-party code are automatically inferred via [dependency inference](https://blog.pantsbuild.org/automatically-unlocking-concurrent-builds-and-fine-grained-caching-on-the-jvm-with-dependency-inference/) based on your `import` statements. If you do need to declare additional dependencies for any reason, you can do so using Pants' [syntax for declaring dependencies for targets](../using-pants/key-concepts/targets-and-build-files.mdx). + +### Third-party dependencies and lockfiles + +Third-party dependencies (i.e. those from repositories like [Maven central](https://search.maven.org/)) are also automatically inferred via dependency inference, but must first be declared once per repository as [`jvm_artifact` targets](../../reference/targets/jvm_artifact.mdx): + +```python title="BUILD" +jvm_artifact( + group="com.google.guava", + artifact="guava", + version="31.0.1-jre", + # See the callout below for more information on the `packages` argument. + packages=["com.google.common.**"], +) +``` + +If your third party dependency is a Scala library, you should use the `scala_artifact` target instead like follows: + +```python title="BUILD" +scala_artifact( + group="org.typelevel", + artifact="cats-core", + version="2.9.0", + packages=["cats.**"], +) +``` + +Pants will use the right artifact for the Scala version corresponding for the resolve specified (or the default one). + +Pants requires use of a lockfile for thirdparty dependencies. After adding or editing `jvm_artifact` targets, you will need to update affected lockfiles by running `pants generate-lockfiles`. The default lockfile is located at `3rdparty/jvm/default.lock`, but it can be relocated (as well as additional resolves declared) via the [`[jvm].resolves` option](../../reference/subsystems/jvm.mdx#resolves). + +:::note Thirdparty symbols and the `packages` argument +To efficiently determine which symbols are provided by thirdparty code (i.e., without hitting the network in order to compute dependencies in the common case), Pants relies on a static mapping of which artifacts provide which symbols, and defaults to treating each `jvm_artifact` as providing symbols within its `group`. + +The `packages` argument allows you to override which symbols a `jvm_artifact` provides. See the [`jvm_artifact` docs](../../reference/targets/jvm_artifact.mdx#packages) for more information. +::: + +To enable better IDE integration, Pants has `jvm_artifacts` target generator to +generate `jvm_artifact` targets for you. + +### `pom.xml` + +The `jvm_artifacts()` target generator parses a +[`pom.xml`](https://maven.apache.org/guides/introduction/introduction-to-the-pom.html) +to produce a `jvm_artifact` target for each `dependency` in +`project.dependencies`. + +For example: + +```xml tab={"label":"pom.xml"} + + + + com.google.guava + guava + 33.2.0-jre + + + org.apache.commons + commons-lang3 + 3.14.0 + + + +``` + +```python tab={"label":"BUILD"} +# This will generate two targets: +# +# - //:reqs#guava +# - //:reqs#commons-lang3 +jvm_artifacts(name="reqs") +``` + +The above target generator is spiritually equivalent to this: + +```python title="BUILD" +jvm_artifact( + group="com.google.guava", + artifact="guava", + version="33.2.0-jre", +) +jvm_artifact( + group="org.apache.commons", + artifact="commons-lang3", + version="3.14.0", +) +``` + +To define `jvm_artifact` packages use `package_mapping` field: + +```python tab={"label":"BUILD"} +jvm_artifacts( + name="reqs", + package_mapping={ + "com.google.guava:guava": [ + "com.google.common.**", + ], + "org.apache.commons:commons-lang3": [ + "org.apache.commons.lang3.**", + ], + }, +) +``` + +```xml tab={"label":"pom.xml"} + + + + com.google.guava + guava + 33.2.0-jre + + + org.apache.commons + commons-lang3 + 3.14.0 + + + +``` + +### `resource` targets + +To have your code [load files as "resources"](https://docs.oracle.com/javase/8/docs/technotes/guides/lang/resources.html): + +1. Add a `resource` or `resources` target with the relevant files in the `source` / `sources` field, respectively. +2. Ensure that [an appropriate `source_root`](../using-pants/key-concepts/source-roots.mdx) is detected for the `resources` target, in order to trim the relevant prefix from the filename to align with the layout of your JVM packages. +3. Add that target to the `dependencies` field of the relevant JVM target (usually the one that uses the JVM APIs to load the resource). + +For example: + +```toml tab={"label":"pants.toml"} +[source] +# In order for the resource to be loadable as `org/pantsbuild/example/lib/hello.txt`, +# the `/src/jvm/ prefix needs to be stripped. +root_patterns = ["/src/*"] +``` + +```python tab={"label":"src/jvm/org/pantsbuild/example/lib/BUILD"} +java_sources(dependencies=[":hello"]) + +resources(name="hello", sources=["hello.txt"]) +``` + +```java tab={"label":"src/jvm/org/pantsbuild/example/lib/Loader.java"} +package org.pantsbuild.example.lib; + +import com.google.common.io.Resources; + +public class Loader { + public static String load() { + ... = Resources.getResource(Loader.class, "hello.txt"); + } +} +``` + +```text tab={"label":"src/jvm/org/pantsbuild/example/lib/hello.txt"} +Hello world! +``` + +## Compile code + +To manually check that sources compile, use `pants check`: + +``` +# Check a single file +❯ pants check src/jvm/org/pantsbuild/example/lib/ExampleLib.java + +# Check files located recursively under a directory +❯ pants check src/jvm:: + +# Check the whole repository +❯ pants check :: +``` + +## Run tests + +To run tests, use `pants test`: + +``` +# Run a single test file +❯ pants test tests/jvm/org/pantsbuild/example/lib/ExampleLibSpec.scala + +# Test all files in a directory +❯ pants test tests/jvm:: + +# Test the whole repository +❯ pants test :: +``` + +You can also pass through arguments to the test runner with `--`, e.g.: + +``` +# Pass `-z hello` to scalatest in order to test a single method +❯ pants test tests/jvm/org/pantsbuild/example/lib/ExampleLibSpec.scala -- -z hello +``` + +### Timeouts + +Pants can cancel tests which take too long. This is useful to prevent tests from hanging indefinitely. + +To add a timeout, set the `timeout` field to an integer value of seconds in any of the supported targets, like this: + +```python title="BUILD" +java_junit_test(name="java_test", source="Test.java", timeout=120) +scala_junit_test(name="scala_junit_test", source="Test.scala", timeout=100) +scalatest_test(name="scalatest_test", source="Spec.scala", timeout=80) +``` + +When you set timeout on any of the target generators (i.e. `java_junit_tests`, `scalatest_tests`, etc.), the same timeout will apply to every generated corresponding target. + +```python title="BUILD" +java_junit_tests( + name="tests", + overrides={ + "MyClass1Test.java": {"timeout": 20}, + ("MyClass2Test.java", "MyClass3Test.java"): {"timeout": 35}, + }, +) +``` + +You can also set a default value and a maximum value in `pants.toml`: + +```toml title="pants.toml" +[test] +timeout_default = 60 +timeout_maximum = 600 +``` + +If a target sets its `timeout` higher than `[test].timeout_maximum`, Pants will use the value in `[test].timeout_maximum`. + +Use the option `pants test --no-timeouts` to temporarily disable timeouts, e.g. when debugging. + + +### Retries + +Pants can automatically retry failed tests. This can help keep your builds passing even with flaky tests, like integration tests. + +```toml tab={"label":"pants.toml"} +[test] +attempts_default = 3 +``` + +### Setting environment variables + +Test runs are _hermetic_, meaning that they are stripped of the parent `pants` process's environment variables. This is important for reproducibility, and it also increases cache hits. + +To add any arbitrary environment variable back to the process, you can either add the environment variable to the specific tests with the `extra_env_vars` field on `junit_test` / `junit_tests` / `scala_junit_test` / `scala_junit_tests` / `scalatest_test` / `scalatest_tests` targets or to all your tests with the `[test].extra_env_vars` option. Generally, prefer the field `extra_env_vars` field so that more of your tests are hermetic. + +With both `[test].extra_env_vars` and the `extra_env_vars` field, you can either hardcode a value or leave off a value to "allowlist" it and read from the parent `pants` process's environment. + +```toml tab={"label":"pants.toml"} +[test] +extra_env_vars = ["VAR1", "VAR2=hardcoded_value"] +``` + +```python tab={"label":"project/BUILD"} +junit_tests( + name="tests", + # Adds to all generated `junit_test` targets, + # i.e. each file in the `sources` field. + extra_env_vars=["VAR3", "VAR4=hardcoded"], + # Even better, use `overrides` to be more granular. + overrides={ + "StrUtilTest.java": {"extra_env_vars": ["VAR"]}, + ("DirUtilTest.java", "OSUtilTest.java"): {"extra_env_vars": ["VAR5"]}, + }, +) +``` + +## Repl + +Pants supports the [Scala repl](https://docs.scala-lang.org/overviews/scala-book/scala-repl.html), but +doesn't yet autodetect it based on the active backend, it always defaults to using "python" +(see [#14133](https://github.com/pantsbuild/pants/issues/14133)), so for now you'll have to +explicitly add `--repl-shell=scala` to the command line: + +``` +❯ pants repl --repl-shell=scala src/jvm/org/pantsbuild/example/app/ExampleApp.scala +Welcome to Scala 2.13.8 (OpenJDK 64-Bit Server VM, Java 11.0.21). +Type in expressions for evaluation. Or try :help. + +scala> import org.pantsbuild.example.app.ExampleApp +scala> ExampleApp.main(Array()) +Hello World! +scala> +``` + +Alternatively, you can set "scala" to be the default repl in `pants.toml`: + +```toml tab={"label":"pants.toml"} +[repl] +shell = "scala" +``` + +## Protobuf + +There's support for [ScalaPB](https://scalapb.github.io/) and [protoc Java generated code](https://developers.google.com/protocol-buffers/docs/reference/java-generated), currently in beta stage. To enable them, activate the relevant backends in `pants.toml`: + +```toml +[GLOBAL] +backend_packages = [ + "pants.backend.experimental.codegen.protobuf.scala", + "pants.backend.experimental.codegen.protobuf.java", +] +``` + +This adds the new `protobuf_source` target, which you can confirm by running `pants help protobuf_source`. + +## Lint and Format + +`scalafmt` and `Google Java Format` can be enabled by adding the `pants.backend.experimental.scala.lint.scalafmt` and `pants.backend.experimental.java.lint.google_java_format` backends (respectively) to `backend_packages` in the `[GLOBAL]` section of `pants.toml`. + +Once enabled, `lint` and `fmt` will check and automatically reformat your code: + +``` +# Format this directory and all subdirectories +❯ pants fmt src/jvm:: + +# Check that the whole project is formatted +❯ pants lint :: + +# Format all changed files +❯ pants --changed-since=HEAD fmt +``` + +### Fix Scala code + +Additionally to the previously mentioned tools, `scalafix` can also be enabled by adding the `pants.backend.experimental.scala.lint.scalafix` backend to `backend_packages` in the `[GLOBAL]` section of `pants.toml`. However to take full advantage of it additional settings are required. + +If we want to use Scalafix's semantic rules, Scalafix needs to be able to find `.semanticdb` compiled files in our classpath. In versions prior to Scala 3 this is achieved by adding the `semanticdb` scalac plugin to our build. Find which is the right version of it for the Scala version you are using and add the following targets: + +```python +scala_artifact( + name="semanticdb-jar", + group="org.scalameta", + artifact="semanticdb-scalac", + version="", + crossversion="full", +) + +scalac_plugin(name="semanticdb", artifact=":semanticdb-jar") +``` + +Now you will need to add the `scalac_plugins` field to your scala targets like in the following: + +```python +scala_sources(scalac_plugins=["semanticdb"]) +``` + +Alternatively, you could add `semanticdb` to the `[scalac].plugins_for_resolve` setting: + +```toml pants.toml +[scalac.plugins_for_resolve] +jvm-default = "semanticdb" +``` + +:::note Scalafix and Scala 3 +At the moment the support for Scalac 3 in Scalafix is limited, most of the syntactic rules work but not that many in the semantic front. + +Despite those raugh edges, Scalafix is a great linting tool for Scala 3, just note that the setup is different than from prior versions: Instead of adding a scalac plugin to our build, we only need to add the `-Xsemanticdb` flag to our `[scalac].args` settings to enable the generation of `.semanticdb` compiled files. +::: + +## Working in an IDE + +Pants supports loading Java and Scala projects in IntelliJ via the [BSP protocol](https://build-server-protocol.github.io/) (which should ease VSCode support [via Metals](https://scalameta.org/metals/docs/editors/vscode), although it is not yet supported). + +### Usage + +After Setup (see below), and after IntelliJ has finished indexing your code, you should be able to: + +- Use goto definition and other symbol-index-using operations. +- Run test classes, which will first compile them with Pants (and render compile failures if not), and then run them in the foreground with IntelliJ's test runner. + +### Setup + +#### First time setup (per-repository) + +1. Use a version of Pants containing BSP support: + 1. Versions after `2.12.0a0` support code indexing. + 2. Versions after `2.13.0.dev2` support test running. +2. Add a `.gitignore` entry for the `.bsp` directory: + +```text tab={"label":".gitignore"} +# This directory is not committed: each BSP user will create it independently. +/.bsp/ +``` + +```text tab={"label":"..."} + +``` + +3. Add a "group" config file like the one below, adjusting the address specs and resolve name as appropriate. + +```toml tab={"label":"bsp-groups.toml"} +# A "group" named `default`. +# Multiple groups are supported: consider creating a group per project or team. +[groups.default] +addresses = [ + "src/jvm::", + "tests/jvm::", +] + +resolve = "jvm:jvm-default" +``` + +```text tab={"label":"..."} + +``` + +4. Add to `pants.toml` an option to point at the BSP configuration file: + +```toml tab={"label":"pants.toml"} +[experimental-bsp] +groups_config_files = ["bsp-groups.toml"] +``` + +```text tab={"label":"..."} + +``` + +#### Per-user setup + +1. Run pants experimental-bsp to write the BSP connection file and script. +2. Ensure that you have the IntelliJ Scala plugin installed (it provides BSP support). +3. In IntelliJ, choose `File > New > Project from Existing Sources…` +4. Choose the root of the repository for the project from the file dialog. +5. In the "Import Project" dialog, choose "Import project from external model" and select "BSP." + +![](https://files.readme.io/47ad6e7-Screen_Shot_2022-05-13_at_09.40.33.png) + +6. Click "Create". +7. IntelliJ will invoke Pants to run the BSP server and synchronize state to produce IntelliJ modules. + +### Troubleshooting + +- If you see errors related to missing tools, you can set additional environment variables for BSP invocations in `pants.toml` under the `[experimental-bsp].runner_env_vars` option, and then re-run `pants experimental-bsp`. + - This is necessary because IntelliJ is invoked on macOS generally by launchd and not from the shell. Any `PATH` set in the shell will not be passed to the Pants BSP server in that case. + - If this is developer-specific, consider setting `--experimental-bsp-runner-env-args` as a command-line option, or using a `.pantsrc` file. +- After configuration changes, or after adding new thirdparty dependencies, you will generally need to reload the BSP configuration ([for now](https://github.com/pantsbuild/pants/issues/15054)), which you can do with this button in the side panel: + +![](https://files.readme.io/b6db23d-Screen_Shot_2022-05-13_at_09.50.28.png) + +- When filing bug reports, include the log output of the Pants instance hosting the BSP server, which goes to `.pants.d/bsp/logs/stderr.log`. diff --git a/versioned_docs/version-2.24/docs/jvm/kotlin.mdx b/versioned_docs/version-2.24/docs/jvm/kotlin.mdx new file mode 100644 index 000000000..8363b1fd4 --- /dev/null +++ b/versioned_docs/version-2.24/docs/jvm/kotlin.mdx @@ -0,0 +1,251 @@ +--- + title: Kotlin + sidebar_position: 1 +--- + +Kotlin support for Pants. + +--- + +:::caution Kotlin support is alpha stage +Kotlin support in Pants is still under active development, but currently supports compilation and testing. It has been tested with Kotlin v1.6.20. + +Please share feedback for what you need to use Pants with your Kotlin project by either [opening a GitHub issue](https://github.com/pantsbuild/pants/issues/new/choose) or [joining our Slack](/community/members)! +::: + +:::note Example Kotlin repository +Check out [github.com/pantsbuild/example-kotlin](https://github.com/pantsbuild/example-kotlin) to try a sample Pants project with Kotlin support. +::: + +## Overview + +[Kotlin](https://kotlinlang.org/) is a programming language from Jetbrains that runs on the JVM and certain other platforms. The Kotlin backend in Pants supports compilation, testing, and linting of [Kotlin code for the JVM](https://kotlinlang.org/docs/server-overview.html). (The other Kotlin platforms including [Kotlin Multiplatform Mobile](https://kotlinlang.org/docs/multiplatform.html) and [Kotlin/JS](https://kotlinlang.org/docs/js-overview.html) are not currently supported, nor are there currently any plans to do so.) + +## Initial Setup + +First, activate the Kotlin backend in `pants.toml` plus the `ktlint` backend if you would like to use [`ktlint`](https://ktlint.github.io/) for code formatting and linting: + +```toml title="pants.toml" +[GLOBAL] +backend_packages = [ + "pants.backend.experimental.kotlin", + + # Activate the following backend if you want to use `ktlint` for code formatting and linting. + "pants.backend.experimental.kotlin.lint.ktlint", +] +``` + +### Setting up targets + +Run [`pants tailor ::`](../getting-started/initial-configuration.mdx#5-generate-build-files) to generate BUILD files. This will create `kotlin_sources` targets in every directory containing library code, as well as `kotlin_junit_tests` targets for filenames that look like tests. + +``` +❯ pants tailor :: +Created src/jvm/org/pantsbuild/example/app/BUILD: + - Add kotlin_sources target app +Created src/jvm/org/pantsbuild/example/json/BUILD: + - Add kotlin_sources target json +Created src/jvm/org/pantsbuild/example/lib/BUILD: + - Add java_sources target lib +``` + +You can run `pants list ::` to see all targets in your project: + +``` +❯ pants list :: +... +src/jvm/org/pantsbuild/example/app:app +src/jvm/org/pantsbuild/example/app/ExampleApp.kt +src/jvm/org/pantsbuild/example/json:json +src/jvm/org/pantsbuild/example/json/JsonExample.kt +src/jvm/org/pantsbuild/example/lib:lib +src/jvm/org/pantsbuild/example/lib/ExampleLib.java +``` + +### Choosing JDK and Kotlin versions + +Pants supports choosing the JDK and Kotlin versions per target in your repository. To reduce the amount of boilerplate required, however, most users set repository-wide defaults in `pants.toml`, and then only override them when necessary for particular targets. + +#### JDK + +JDKs used by Pants are automatically fetched using [Coursier](https://get-coursier.io/), and are chosen using the [`[jvm].jdk` option](../../reference/subsystems/jvm.mdx#jdk) to set a repository-wide default. + +To override the default on a particular target, you can use the [`jdk=` field](../../reference/targets/kotlin_source.mdx#jdk). It can be useful to use the [`parametrize` builtin](../using-pants/key-concepts/targets-and-build-files.mdx#parametrizing-targets) with the `jdk=` field, particularly to run test targets under multiple JDKs. + +#### Kotlin version + +The Kotlin version to use is configured on a resolve-by-resolve basis (see the "Third-party dependencies" section below) using the [`[kotlin].version_for_resolve` option](../../reference/subsystems/kotlin.mdx#version_for_resolve). The default Kotlin version for your repository will thus be whichever Kotlin version is configured for the "default" resolve, which is configured by the [`[jvm].default_resolve` option](../../reference/subsystems/jvm#default_resolve). + +Each resolve must contain the following jars for the Kotlin runtime with the version matching the version specified for the resolve in the `[kotlin].version_for_resolve` option: + +- `org.jetbrains.kotlin:kotlin-stdlib` +- `org.jetbrains.kotlin:kotlin-reflect` +- `org.jetbrains.kotlin:kotlin-script-runtime` + +To use multiple Kotlin versions in a repository, you would define multiple resolves, and then adjust the [`resolve` field](../../reference/targets/kotlin_junit_test.mdx#resolve) of any targets which should be used with the non-`default_resolve` resolve. + +To cross-build a set of Kotlin targets for multiple Kotlin versions, you can use the [`parametrize` builtin](../using-pants/key-concepts/targets-and-build-files.mdx#parametrizing-targets) with the `resolve=` field of the target and its dependencies. + +:::caution `jvm_artifact` targets for the Kotlin runtime must be explicitly defined. +The Kotlin backend currently requires that a `jvm_artifact` target for each Kotlin runtime jars be present in any resolve used for Kotlin. If any of the required `jvm_artifact` targets are missing, Pants will error. Pants will automatically inject a dependency on the runtime into Kotlin targets. (These targets may be automatically supplied by Pants in a future version, but that is not currently implemented.) +::: + +### Dependencies + +#### First-party dependencies + +In many cases, the dependencies of your first-party code are automatically inferred via [dependency inference](https://blog.pantsbuild.org/automatically-unlocking-concurrent-builds-and-fine-grained-caching-on-the-jvm-with-dependency-inference/) based on `import` statements in the code. If you do need to declare additional dependencies for any reason, you can do so using Pants' [syntax for declaring dependencies for targets](../using-pants/key-concepts/targets-and-build-files.mdx). + +#### Third-party dependencies and lockfiles + +Third-party dependencies (i.e. those from repositories like [Maven central](https://search.maven.org/)) are also automatically inferred via dependency inference, but must first be declared once per repository as [`jvm_artifact` targets](../../reference/targets/jvm_artifact.mdx): + +```python title="BUILD" +jvm_artifact( + group="com.google.guava", + artifact="guava", + version="31.0.1-jre", + # See the callout below for more information on the `packages` argument. + packages=["com.google.common.**"], +) +``` + +Pants requires use of a lockfile for third-party dependencies. After adding or editing `jvm_artifact` targets, you will need to update affected lockfiles by running `pants generate-lockfiles`. The default lockfile is located at `3rdparty/jvm/default.lock`, but it can be relocated (as well as additional resolves declared) via the [`[jvm].resolves` option](../../reference/subsystems/jvm.mdx#resolves). + +:::note Thirdparty symbols and the `packages` argument +To efficiently determine which symbols are provided by third-party code (i.e., without hitting the network in order to compute dependencies in the common case), Pants relies on a static mapping of which artifacts provide which symbols, and defaults to treating each `jvm_artifact` as providing symbols within its `group`. + +The `packages` argument allows you to override which symbols a `jvm_artifact` provides. See the [`jvm_artifact` docs](../../reference/targets/jvm_artifact.mdx#packages) for more information. +::: + +#### `resource` targets + +To have your code [load files as "resources"](https://docs.oracle.com/javase/8/docs/technotes/guides/lang/resources.html): + +1. Add a `resource` or `resources` target with the relevant files in the `source` / `sources` field, respectively. +2. Ensure that [an appropriate `source_root`](../using-pants/key-concepts/source-roots.mdx) is detected for the `resources` target, in order to trim the relevant prefix from the filename to align with the layout of your JVM packages. +3. Add that target to the `dependencies` field of the relevant JVM target (usually the one that uses the JVM APIs to load the resource). + +For example: + +```toml tab={"label":"pants.toml"} +[source] +# In order for the resource to be loadable as `org/pantsbuild/example/lib/hello.txt`, +# the `/src/jvm/ prefix needs to be stripped. +root_patterns = ["/src/*"] +``` + +```python tab={"label":"src/jvm/org/pantsbuild/example/lib/BUILD"} +kotlin_sources(dependencies=[":hello"]) + +resources(name="hello", sources=["hello.txt"]) +``` + +```java tab={"label":"src/jvm/org/pantsbuild/example/lib/Loader.java"} +package org.pantsbuild.example.lib + +import com.google.common.io.Resources + +fun load() { + ... = Resources.getResource(Loader.class, "hello.txt") +} +``` + +```text tab={"label":"src/jvm/org/pantsbuild/example/lib/hello.txt"} +Hello world! +``` + +## Tasks + +### Compile code + +To manually check that sources compile, use `pants check`: + +``` +# Check a single file +❯ pants check src/jvm/org/pantsbuild/example/lib/ExampleLib.kt + +# Check files located recursively under a directory +❯ pants check src/jvm:: + +# Check the whole repository +❯ pants check :: +``` + +### Run tests + +To run tests, use `pants test`: + +``` +# Run a single test file +❯ pants test tests/jvm/org/pantsbuild/example/lib/ExampleLibTest.kt + +# Test all files in and under a directory +❯ pants test tests/jvm:: + +# Test the whole repository +❯ pants test :: +``` + +The Kotlin backend currently supports JUnit tests specified using the `kotlin_junit_tests` target type. + + +#### Retries + +Pants can automatically retry failed tests. This can help keep your builds passing even with flaky tests, like integration tests. + +```toml tab={"label":"pants.toml"} +[test] +attempts_default = 3 +``` + +#### Setting environment variables + +Test runs are _hermetic_, meaning that they are stripped of the parent `pants` process's environment variables. This is important for reproducibility, and it also increases cache hits. + +To add any arbitrary environment variable back to the process, you can either add the environment variable to the specific tests with the `extra_env_vars` field on `kotlin_junit_test` / `kotlin_junit_tests` targets or to all your tests with the `[test].extra_env_vars` option. Generally, prefer the field `extra_env_vars` field so that more of your tests are hermetic. + +With both `[test].extra_env_vars` and the `extra_env_vars` field, you can either hardcode a value or leave off a value to "allowlist" it and read from the parent `pants` process's environment. + +```toml tab={"label":"pants.toml"} +[test] +extra_env_vars = ["VAR1", "VAR2=hardcoded_value"] +``` + +```python tab={"label":"project/BUILD"} +kotlin_junit_tests( + name="tests", + # Adds to all generated `kotlin_junit_test` targets, + # i.e. each file in the `sources` field. + extra_env_vars=["VAR3", "VAR4=hardcoded"], + # Even better, use `overrides` to be more granular. + overrides={ + "StrUtilTest.kt": {"extra_env_vars": ["VAR"]}, + ("DirUtilTest.kt", "OSUtilTest.kt"): {"extra_env_vars": ["VAR5"]}, + }, +) +``` + +### Lint and Format + +[`ktlint`](https://ktlint.github.io/) can be enabled by adding the `pants.backend.experimental.kotlin.lint.ktlint` backend to `backend_packages` in the `[GLOBAL]` section of `pants.toml`. + +Once enabled, `lint` and `fmt` will check and automatically reformat your code: + +``` +# Format this directory and all subdirectories +❯ pants fmt src/jvm:: + +# Check that the whole project is formatted +❯ pants lint :: + +# Format all changed files +❯ pants --changed-since=HEAD fmt +``` + +## Caveats + +The Kotlin backend is currently experimental since many features are not implemented including: + +- Kotlin modules. We would love to hear from Kotlin developers for advice on how modules are used and could be potentially supported by Pants. +- Non-JVM backends including [Kotlin Multiplatform Mobile](https://kotlinlang.org/docs/multiplatform.html) and [Kotlin/JS](https://kotlinlang.org/docs/js-overview.html) diff --git a/versioned_docs/version-2.24/docs/python/_category_.json b/versioned_docs/version-2.24/docs/python/_category_.json new file mode 100644 index 000000000..135b4c603 --- /dev/null +++ b/versioned_docs/version-2.24/docs/python/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Python", + "position": 5 +} diff --git a/versioned_docs/version-2.24/docs/python/goals/_category_.json b/versioned_docs/version-2.24/docs/python/goals/_category_.json new file mode 100644 index 000000000..b680714e8 --- /dev/null +++ b/versioned_docs/version-2.24/docs/python/goals/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Goals", + "position": 2 +} diff --git a/versioned_docs/version-2.24/docs/python/goals/check.mdx b/versioned_docs/version-2.24/docs/python/goals/check.mdx new file mode 100644 index 000000000..13f55299a --- /dev/null +++ b/versioned_docs/version-2.24/docs/python/goals/check.mdx @@ -0,0 +1,341 @@ +--- + title: check + sidebar_position: 0 +--- + +How to use MyPy. + +--- + +## Activating MyPy + +To opt-in, add `pants.backend.python.typecheck.mypy` to `backend_packages` in your config file. + +```toml title="pants.toml" +[GLOBAL] +backend_packages.add = [ + "pants.backend.python", + "pants.backend.python.typecheck.mypy", +] +``` + +This will register a new `check` goal: + +```bash +$ pants check helloworld/util/lang.py +$ pants check :: +``` + +:::tip Benefit of Pants: typecheck Python 2-only and Python 3-only code at the same time +MyPy determines which Python version to use based on its `python_version` option. If that's undefined, MyPy uses the interpreter the tool is run with. Because you can only use one config file at a time with MyPy, you cannot normally say to use `2.7` for part of your codebase but `3.6` for the rest; you must choose a single version. + +Instead, Pants will group your targets based on their [interpreter constraints](../overview/interpreter-compatibility.mdx), and run all the Python 2 targets together and all the Python 3 targets together. It will automatically set `python_version` to the minimum compatible interpreter, such as a constraint like `["==2.7.*", ">3.6"]` using `2.7`. + +To turn this off, you can still set `python_version` in `mypy.ini` or `--python-version`/`--py2` in `--mypy-args`; Pants will respect the value you set. +::: + +### Hook up a MyPy config file + +Pants will automatically include your config file if it's located at `mypy.ini`, `.mypy.ini`, `setup.cfg`, or `pyproject.toml`. + +Otherwise, you must set the option `[mypy].config` for Pants to include the config file in the process's sandbox and to instruct MyPy to load it. + +```toml title="pants.toml" +[mypy] +config = "build-support/mypy.ini" +``` + +### Change the MyPy version + +Use the `install_from_resolve` option in the `[mypy]` scope: + +```toml title="pants.toml" +[python.resolves] +mypy = "3rdparty/python/mypy.lock" + +[mypy] +install_from_resolve = "mypy" +``` + +See [Lockfiles for tools](../overview/lockfiles.mdx#lockfiles-for-tools). + +### Incrementally adopt MyPy with `skip_mypy=True` + +You can tell Pants to skip running MyPy on certain files by adding `skip_mypy=True` to the relevant targets. + +```python title="project/BUILD" +# Skip MyPy for all the Python files in this directory +# (both test and non-test files). +python_sources(name="lib", skip_mypy=True) +python_tests(name="tests", skip_mypy=True) + +# To only skip certain files, use the `overrides` field. +python_sources( + name="lib", + overrides={ + "util.py": {"skip_mypy": True}, + # Use a tuple to specify multiple files. + ("user.py", "admin.py"): {"skip_mypy": True}, + }, +) +``` + +When you run `pants check ::`, Pants will skip any files belonging to skipped targets. + +:::caution MyPy may still try to check the skipped files! +The `skip_mypy` field only tells Pants not to provide the skipped files as direct input to MyPy. But MyPy, by default, will still try to check files that are [dependencies of the direct inputs](https://mypy.readthedocs.io/en/stable/running_mypy.html#following-imports). So if your skipped files are dependencies of unskipped files, they may still be checked. + +To change this behavior, use MyPy's [`--follow-imports` option](https://mypy.readthedocs.io/en/stable/command_line.html#cmdoption-mypy-follow-imports), typically by setting it to `silent`. You can do so either by adding it to the [`args` option](../../../reference/subsystems/mypy.mdx#args) in the `[mypy]` section of your Pants config file, or by setting it in [`mypy.ini`](https://mypy.readthedocs.io/en/stable/config_file.html). +::: + +### First-party type stubs (`.pyi` files) + +You can use [`.pyi` files](https://mypy.readthedocs.io/en/stable/stubs.html) for both first-party and third-party code. Include the `.pyi` files in the `sources` field for `python_source` / `python_sources` and `python_test` / `python_tests` targets. MyPy will use these stubs rather than looking at the implementation. + +Pants's dependency inference knows to infer a dependency both on the implementation and the type stub. You can verify this by running `pants dependencies path/to/file.py`. + +When writing stubs for third-party libraries, you may need to set up the `[source].root_patterns` option so that [source roots](../../using-pants/key-concepts/source-roots.mdx) are properly stripped. For example: + +```toml tab={"label":"pants.toml"} +[source] +root_patterns = ["mypy-stubs", "src/python"] +``` + +```python tab={"label":"mypy-stubs/colors.pyi"} +# Because we set `mypy-stubs` as a source root, this file will be +# stripped to be simply `colors.pyi`. MyPy will look at this file for +# imports of the `colors` module. + +def red(s: str) -> str: ... +``` + +```python tab={"label":"mypy-stubs/BUILD"} +python_sources(name="lib") +``` + +```python tab={"label":"src/python/project/app.py"} +from colors import red + +if __name__ == "__main__": + print(red("I'm red!")) +``` + +```python tab={"label":"src/python/project/BUILD"} +# Pants will infer a dependency both on the `ansicolors` requirement +# and our type stub. +python_sources(name="lib") +``` + +### Third-party type stubs + +You can install third-party type stubs (for example, `types-requests`) like [normal Python requirements](../overview/third-party-dependencies.mdx). Pants will infer a dependency on both the type stub and the actual dependency, for example, both `types-requests` and `requests`, which you can confirm by running `pants dependencies path/to/f.py`. + +Third-party type stubs must be installed in the same resolve as the third-party code they provide types for. + +### Add a third-party plugin + +Add any third-party MyPy plugins to a [custom lockfile](../overview/lockfiles.mdx#lockfiles-for-tools): + +```toml tab={"label":"pants.toml"} +[python.resolves] +mypy = "3rdparty/python/mypy-lock.txt" + +[mypy] +install_from_resolve = "mypy" +``` + +```text tab={"label":"3rdparty/python/mypy-requirements.txt"} +mypy==1.3.0 +pydantic==1.6.1 +``` + +```python tab={"label":"3rdparty/python/BUILD"} +python_requirements( + name="mypy", + source="mypy-requirements.txt", + resolve="mypy", +) +``` + +Then update your `mypy.ini` to load the plugin: + +```text title="mypy.ini" +[mypy] +plugins = + pydantic.mypy +``` + +For some plugins, such as `django-stubs`, you may need to always load certain source files, such as a `settings.py` file. You can make sure that this source file is always used by hijacking the `source_plugins` option, which allows you to specify targets whose `sources` should always be used when running MyPy. See the section below for more information about source plugins. + +For example, to fully use the `django-stubs` plugin, your setup might look like this: + +```toml tab={"label":"pants.toml"} +[source] +root_patterns = ["src/python"] + +[mypy] +install_from_resolve = "mypy" +source_plugins = ["src/python/project:django_settings"] +``` + +```text tab={"label":"3rdparty/python/mypy-requirements.txt"} +mypy==1.3.0 +django-stubs==1.5.0 +``` + +```python tab={"label":"3rdparty/python/BUILD"} +python_requirements( + name="mypy", + source="mypy-requirements.txt", + resolve="mypy", +) +``` + +```text tab={"label":"mypy.ini"} +[mypy] +plugins = + mypy_django_plugin.main + +[mypy.plugins.django-stubs] +django_settings_module = project.django_settings +``` + +```python tab={"label":"src/python/project/django_settings.py"} +from django.urls import URLPattern + +DEBUG = True +DEFAULT_FROM_EMAIL = "webmaster@example.com" +SECRET_KEY = "not so secret" +MY_SETTING = URLPattern(pattern="foo", callback=lambda: None) +``` + +```python tab={"label":"src/python/project/BUILD"} +python_source(name="django_settings", source="django_settings.py") +``` + +:::caution Importing type stubs +Type stubs specified in the MyPy custom lockfile are not visible to the `python-infer` subsystem, and cannot be referenced as explicit `dependencies`. If you `import` from a stubs module in your code, and it does not have a corresponding implementation `python_requirement` target that provides the imported module, you may see a warning/error depending on the value you've configured for `[python-infer].unowned_dependency_behavior`. Goals other than `check` will also raise `ImportError`s if the `import` isn't conditional on the value of `typing.TYPE_CHECKING`: + +```toml tab={"label":"pants.toml"} +[python-infer] +unowned_dependency_behavior = "warning" +[mypy] +install_from_resolve = "mypy" +``` + +```text tab={"label":"3rdparty/python/mypy-requirements.txt"} +mypy==1.3.0 +mypy_boto3_ec2==1.26.136 +``` + +```python tab={"label":"3rdparty/python/BUILD"} +python_requirements( +name="mypy", +source="mypy-requirements.txt", +resolve="mypy", +) +``` + +```python tab={"label":"src/example.py"} +from typing import TYPE_CHECKING + +# Unsafe! Will fail outside of `check` +from mypy_boto3_ec2 import EC2Client + +if TYPE_CHECKING: + # Safe, but will be flagged as a warning + from mypy_boto3_ec2 import EC2ServiceResource +``` + +For these reasons, it's recommended to load any type-stub libraries that require explicit imports as part of your normal [third-party dependencies](../overview/third-party-dependencies.mdx). Alternatively, you can set `# pants: no-infer-dep` on the lines of type-stub imports "guarded" by a check of `if TYPE_CHECKING`. +::: + +:::note MyPy Protobuf support +Add `mypy_plugin = true` to the `[python-protobuf]` scope. See [Protobuf](../integrations/protobuf-and-grpc.mdx) for more information. +::: + +### Add a first-party plugin + +To add a [MyPy plugin](https://mypy.readthedocs.io/en/stable/extending_mypy.html) you wrote, add a `python_source` or `python_sources` target with the plugin's Python file(s) included in the `sources` field. + +Then, add `plugins = path.to.module` to your MyPy config file, using the name of the module without source roots. For example, if your Python file is called `pants-plugins/mypy_plugins/custom_plugin.py`, and you set `pants-plugins` as a source root, then set `plugins = mypy_plugins.custom_plugin`. Set the `config` option in the `[mypy]` scope in your `pants.toml` to point to your MyPy config file. + +Finally, set the option `source_plugins` in the `[mypy]` scope to include this target's address, e.g. `source_plugins = ["pants-plugins/mypy_plugins:plugin"]`. This will ensure that your plugin's sources are always included in the subprocess. + +For example: + +```toml tab={"label":"pants.toml"} +[mypy] +source_plugins = ["pants-plugins/mypy_plugins:plugin"] +``` + +```text tab={"label":"mypy.ini"} +plugins = + mypy_plugins.change_return_type +``` + +```python tab={"label":"pants-plugins/mypy_plugins/BUILD"} +python_source(name="plugin", source="change_return_type.py") +``` + +```python tab={"label":"pants-plugins/mypy_plugins/change_return_type.py"} +"""A contrived plugin that changes the return type of any +function ending in `__overriden_by_plugin` to return None.""" + +from typing import Callable, Optional, Type + +from mypy.plugin import FunctionContext, Plugin +from mypy.types import NoneType, Type as MyPyType + +from plugins.subdir.dep import is_overridable_function + +class ChangeReturnTypePlugin(Plugin): + def get_function_hook( + self, fullname: str + ) -> Optional[Callable[[FunctionContext], MyPyType]]: + return hook if name.endswith("__overridden_by_plugin") else None + + +def hook(ctx: FunctionContext) -> MyPyType: + return NoneType() + + +def plugin(_version: str) -> Type[Plugin]: + return ChangeReturnTypePlugin +``` + +Because this is a `python_source` or `python_sources` target, Pants will treat this code like your other Python files, such as running linters on it or allowing you to write a `python_distribution` target to distribute the plugin externally. + +### Reports + +MyPy can generate [various report files](https://mypy.readthedocs.io/en/stable/command_line.html#report-generation). + +For Pants to properly preserve the reports, instruct MyPy to write to the `reports/` folder by updating its config file or `--mypy-args`. For example, in your pants.toml: + +```toml title="pants.toml" +[mypy] +args = ["--linecount-report=reports"] +``` + +Pants will copy all reports into the folder `dist/check/mypy`. + +## Known limitations + +### Performance is sometimes slower than normal + +Pants 2.14 added support for leveraging MyPy's cache, making subsequent runs of MyPy extremely performant. +The support, however, requires features that were added to MyPy in version `0.700`, and requires that +`python_version` isn't set in MyPy's config or in `[mypy].args`. + +If you're using a version of MyPy older than `0.700`, consider upgrading to unlock super-speedy subsequent runs of MyPy. +Additionally consider not providing `python_version` in your config or args. + +## Tip: only run over changed files and their dependents + +When changing type hints code, you not only need to run over the changed files, but also any code that depends on the changed files: + +```bash +$ pants --changed-since=HEAD --changed-dependents=transitive check +``` + +See [Advanced target selection](../../using-pants/advanced-target-selection.mdx) for more information. diff --git a/versioned_docs/version-2.24/docs/python/goals/fmt.mdx b/versioned_docs/version-2.24/docs/python/goals/fmt.mdx new file mode 100644 index 000000000..ca79c3f63 --- /dev/null +++ b/versioned_docs/version-2.24/docs/python/goals/fmt.mdx @@ -0,0 +1,19 @@ +--- + title: fmt + sidebar_position: 1 +--- + +Autoformat source code. + +--- + +See [here](../overview/linters-and-formatters.mdx) for how to opt in to specific formatters, along with how to configure them: + +- Autoflake +- Black +- Docformatter +- isort +- Pyupgrade +- yapf + +If you activate multiple formatters, Pants will run them sequentially so that they do not overwrite each other. You may need to update each formatter's config file to ensure that it is compatible with the other activated formatters. diff --git a/versioned_docs/version-2.24/docs/python/goals/index.mdx b/versioned_docs/version-2.24/docs/python/goals/index.mdx new file mode 100644 index 000000000..376096c5c --- /dev/null +++ b/versioned_docs/version-2.24/docs/python/goals/index.mdx @@ -0,0 +1,16 @@ +--- + title: Goals + sidebar_position: 1 +--- + +Details on the Python-related goals implemented in the Python backend. + +--- + +- [fmt](./fmt.mdx): autoformat source code. +- [lint](./lint.mdx): lint source code in check-only mode. +- [package](./package.mdx): package your code into an asset, e.g. a wheel or a PEX file. +- [repl](./repl.mdx): open a REPL (standard shell or IPython). +- [run](./run.mdx): run an executable or script. +- [test](./test.mdx): run tests with Pytest. +- [check](./check.mdx): run MyPy. diff --git a/versioned_docs/version-2.24/docs/python/goals/lint.mdx b/versioned_docs/version-2.24/docs/python/goals/lint.mdx new file mode 100644 index 000000000..c6cce5a74 --- /dev/null +++ b/versioned_docs/version-2.24/docs/python/goals/lint.mdx @@ -0,0 +1,33 @@ +--- + title: lint + sidebar_position: 2 +--- + +Lint source code. + +--- + +The `lint` goal runs both dedicated linters and any formatters in check-only mode: + +- Autoflake +- Bandit +- Black +- Docformatter +- Flake8 +- isort +- Pydocstyle +- Pylint +- Pyupgrade +- yapf + +See [here](../overview/linters-and-formatters.mdx) for how to opt in to specific formatters and linters, along with how to configure them. + +:::tip Benefit of Pants: runs linters in parallel +Pants will run all activated linters at the same time for improved performance. As explained at [Python linters and formatters](../overview/linters-and-formatters.mdx), Pants also uses some other techniques to improve concurrency, such as dynamically setting the `--jobs` option for linters that have it. +::: + +:::tip Benefit of Pants: lint Python 2-only and Python 3-only code at the same time +Bandit, Flake8, and Pylint depend on which Python interpreter the tool is run with. Normally, if your project has some Python 2-only files and some Python 3-only files, you would not be able to run the linter in a single command because it would fail to parse your code. + +Instead, Pants will do the right thing when you run `pants lint ::`. Pants will group your targets based on their [interpreter constraints](../overview/interpreter-compatibility.mdx), and run all the Python 2 targets together and all the Python 3 targets together. +::: diff --git a/versioned_docs/version-2.24/docs/python/goals/package.mdx b/versioned_docs/version-2.24/docs/python/goals/package.mdx new file mode 100644 index 000000000..7b8373416 --- /dev/null +++ b/versioned_docs/version-2.24/docs/python/goals/package.mdx @@ -0,0 +1,232 @@ +--- + title: package + sidebar_position: 3 +--- + +Create a deployable artifact. + +--- + +The `package` goal creates an artifact that can be deployed or distributed. + +The exact type of artifact depends on the type of target the goal is invoked on. + +You can run `pants package ::` to build all artifacts in your project. Pants will filter to only the relevant targets. + +:::tip Benefit of Pants: artifacts only include your true dependencies +Because Pants understands the dependencies of your code, and the dependencies of those dependencies, the generated artifact will only include the exact code needed for your package to work. This results in smaller, more focused packages. +::: + +:::tip Benefit of Pants: easily write automated tests of your packaging pipeline +You can depend on a package target in a `python_test` / `python_tests` target through the `runtime_package_dependencies` field. Pants will run the equivalent of `pants package` beforehand and copy the built artifact into the test's chroot, allowing you to test things like that the artifact has the correct files present and that it's executable. + +This allows you to test your packaging pipeline by simply running `pants test ::`, without needing custom integration test scripts. + +See [test](./test.mdx) for more information. +::: + +:::tip Streamline Docker builds +Check out our blog [Streamline Docker Builds](https://blog.pantsbuild.org/pants-pex-and-docker/) to read about how you can combine these `package` formats with Pants's Docker support. Also see our [Docker docs](../../docker/index.mdx) +::: + +## Creating a PEX file from a `pex_binary` target + +Running `package` on a `pex_binary` target will create an executable [PEX file](../overview/pex.mdx). + +The PEX file will contain all the code needed to run the binary, namely: + +- All Python code and resources the binary transitively depends on. +- The resolved 3rd-party Python dependencies (sdists and wheels) of all targets the binary transitively depends on. + +The PEX metadata will include: + +- The entry point or console script specified by the `pex_binary` target, if any. +- The intersection of all interpreter constraints applicable to the code in the Pex. See [Interpreter compatibility](../overview/interpreter-compatibility.mdx). + +You can also tweak many options, such as the `execution_mode` option to optimize for faster initial runs vs. subsequent runs. Run `pants help pex_binary`. + +### The `entry_point` and `script` fields + +The `entry_point` and `script` fields set the behavior for what happens when you run `./dist/my_app.pex`, such as if it runs a particular script or launches an app. + +Usually, you'll want to use `entry_point`, which lets you specify a module and optionally a function to execute, such as `project.my_app:main`. This is especially useful when you want to run first-party code. + +`script` is useful when you want to directly run a third-party dependency that sets `console_scripts` in its distribution. This allows you to, for example, set `script="black"` to create `black.pex` that behaves like if you had `pip install`ed `black` and then run `black` in your shell: + +``` +❯ ./dist/black.pex --version +python -m black, 23.1.0 (compiled: yes) +``` + +You can also leave off both fields, which will cause `./dist/my_app.pex` to launch a Python interpreter with all the relevant code and dependencies loaded. + +``` +❯ ./dist/my_app.pex +Python 3.9.6 (default, Jun 28 2021, 19:24:41) +[Clang 12.0.5 (clang-1205.0.22.9)] on darwin +Type "help", "copyright", "credits" or "license" for more information. +(InteractiveConsole) +``` + +If you use the `entry_point` field, Pants will use dependency inference, which you can confirm by running `pants dependencies path/to:app`. Otherwise, you must manually add to the `dependencies` field. + +#### `entry_point` with a file name + +You can specify a file name, which Pants will convert into a well-formed entry point. Like with the `source` / `sources` field, file paths are relative to the BUILD file, rather than the build root. + +```python title="helloworld/BUILD" +# The default `sources` field will include `main.py`. +python_sources(name="lib") + +# Pants will convert the entry point to `helloworld.main`. +pex_binary( + name="app", + entry_point="main.py", +) + +# You can also specify the function to run. +pex_binary( + name="app_with_func", + entry_point="main.py:my_func", +) +``` + +This approach has the added benefit that you can use file arguments, e.g. `pants package helloworld/main.py`, rather than needing to use target addresses like `pants package helloworld:app`. + +#### Explicit `entry_point` + +You can directly specify the entry point in the format `path.to.module` or `path.to.module:my_func`. This allows you to use an entry point for a third-party requirement or the Python standard library. + +```python title="helloworld/BUILD" +# The default `sources` field will include `main.py`. +python_sources(name="lib") + +pex_binary( + name="app", + entry_point="helloworld.main", +) + +# You can also specify the function to run. +pex_binary( + name="app_with_func", + entry_point="helloworld.main:my_func", +) + +# You can specify third-party requirements and the std lib. +pex_binary( + name="3rdparty_app", + entry_point="bandit:main", +) +``` + +Unlike using `entry_point` with a file name, this does not work with file arguments; you must use the target address, like `pants package helloworld:app`. + +#### `script` + +You can set the `script` to any `console_script` or script exposed by your third-party requirements. + +```python title="helloworld/BUILD" +python_requirement(name="black_req", requirements=["black==23.1.0"]) + +pex_binary( + name="black_bin", + script="black", + dependencies=[":black_req"], +) +``` + +You must explicitly add the dependencies you'd like to the `dependencies` field. + +This does not work with file arguments; you must use the target address, like `pants package helloworld:black_bin`. + +### Injecting command-line arguments and environment variables + +You can use the `inject_args` and `inject_env` fields to "freeze" command-line arguments and environment variables into the PEX file. This can save you from having to create shim files around generic binaries. For example: + +```python title="myproduct/myservice/BUILD" +python_requirement(name="gunicorn", requirements=["gunicorn==20.1.0"]) + +pex_binary( + name="myservice_bin", + script="gunicorn", + args=["myproduct.myservice.wsgi:app", "--name=myservice"], + env={"MY_ENV_VAR=1"}, + dependencies=[":gunicorn"], +) +``` + +:::caution PEX files may be platform-specific +If your code's requirements include distributions that include native code, then the resulting PEX file will only run on the platform it was built on. + +However, if all native code requirements are available as [wheels](https://packaging.python.org/glossary/#term-wheel) for the target platform, then you can cross-build a PEX file on a different source platform by specifying the `platforms` field on the `pex_binary`, e.g. `platforms=["linux-x86_64-cp-37-cp37m", "macosx_10_15_x86_64-cp-38-cp38"]`. +::: + +:::note Tip: inspect the `.pex` file with `unzip` +Because a `.pex` file is simply a ZIP file, you can use the Unix tool `unzip` to inspect the contents. For example, run `unzip -l dist/app.pex` to see all file members. +::: + +:::caution Use `resource` instead of `file` +`file` and `files` targets will not be included in the built PEX because filesystem APIs like `open()` would not load them as expected. Instead, use the `resource` and `resources` target or wrap your `pex_binary` in an `archive` target. See [Assets and archives](../../using-pants/assets-and-archives.mdx) for further explanation. +::: + +### Examples + +``` +❯ pants package helloworld/main.py + +17:36:42 [INFO] Wrote dist/helloworld/helloworld.pex +``` + +We can also build the same Pex by using the address of the `pex_binary` target, as described [here](../../using-pants/key-concepts/targets-and-build-files.mdx). + +``` +❯ pants package helloworld:app + +17:36:42 [INFO] Wrote dist/helloworld/helloworld.pex +``` + +### `pex_binaries` target generator + +If you have several scripts in the same directory, it can be convenient to use the `pex_binaries` [target generator](../../using-pants/key-concepts/targets-and-build-files.mdx), which will generate one `pex_binary` target per entry in the `entry_points` field: + +```python title="scripts/BUILD" +# The default `sources` will include all our source files. +python_sources(name="lib") + +pex_binaries( + name="binaries", + entry_points=[ + "app1.py", + "app2.py", + "app3.py:my_func", + ], + overrides={ + "app2.py:my_func": {"execution_mode": "venv"}, + }, +) +``` + +Use `pants peek path/to/dir:` to inspect the generated `pex_binary` targets. + +## Create a setuptools distribution + +Running `package` on a `python_distribution` target will create a standard setuptools-style Python distribution, such as an sdist or a wheel. See [Building Distributions](../overview/building-distributions.mdx) for details. + +## Create a `zip` or `tar` file + +See [Resources and archives](../../using-pants/assets-and-archives.mdx) for how to create a zip or tar file with built binaries and/or loose files in it by using the `archive` target. + +This is often useful when you want to create a PEX binary using the `pex_binary` target, and bundle it with some loose config files. + +## Create an AWS Lambda + +See [AWS Lambda](../integrations/aws-lambda.mdx) for how to build a zip file that works with AWS Lambda. + +## Create a Google Cloud Function + +See [Google Cloud Functions](../integrations/google-cloud-functions.mdx) for how to build a zip file that works with Google Cloud Functions. + +## Create a PyOxidizer binary + +See [PyOxidizer](../integrations/pyoxidizer.mdx) for how to distribute your code as a binary, like PEX, but with +the Python interpreter included. diff --git a/versioned_docs/version-2.24/docs/python/goals/publish.mdx b/versioned_docs/version-2.24/docs/python/goals/publish.mdx new file mode 100644 index 000000000..c40ad7611 --- /dev/null +++ b/versioned_docs/version-2.24/docs/python/goals/publish.mdx @@ -0,0 +1,79 @@ +--- + title: publish + sidebar_position: 4 +--- + +How to distribute packages to a PyPi repository + +--- + +The `publish` goal is currently in the experimental Python backend. Activate it with this config: + +```toml title="pants.toml" +[GLOBAL] +backend_packages.add = [ + "pants.backend.experimental.python", +] +``` + +This will register a new `repositories` field on the `python_distribution` target type, so that when you run `pants publish` on such targets they will be packaged into wheels and/or sdists, and then published to the repositories specified in your BUILD files. + +## Python Repositories + +When publishing a `python_distribution`, you need to tell Pants which repositories to publish to. That is done with a new `repositories` field on `python_distribution` targets. + +```python tab={"label":"src/python/BUILD"} +python_distribution( + name="demo", + # ... + repositories=[ + "@pypi", + "@private-repo", + "https://pypi.private2.example.com", + ] +) +``` + +```text tab={"label":".pypirc"} +[distutils] +index-servers = + pypi + private-repo + +[pypi] +username: publisher-example + +[private-repo] +repository: https://pypi.private.example.com +``` + +Each repository is either a repository URL or, when prefixed with `@`, a reference to a repository configured in the `.pypirc` file. + +:::danger Keep Secrets Secret +We strongly discourage the use of secrets verbatim in your configuration files. + +It is better to provide the required secrets using environment variables when running `pants publish`. Or, better yet, to use `keyring` as described in the [Twine documentation](https://twine.readthedocs.io/en/latest/#keyring-support) +::: + +## Environment variables + +Pants will pass certain configuration [environment variables](https://twine.readthedocs.io/en/latest/#environment-variables), through to Twine. If multiple repositories are involved in a single `publish` goal, you can distinguish them by adding an underscore and the repository name (upper-cased, and with hyphens replaced with underscores) as a suffix on the environment variable names: + +- `TWINE_USERNAME` +- `TWINE_USERNAME_` +- `TWINE_PASSWORD` +- `TWINE_PASSWORD_` +- `TWINE_REPOSITORY_URL` +- `TWINE_REPOSITORY_URL_` + +```shell title="secrets" +# Ephemeral file +export TWINE_USERNAME_PRIVATE_REPO="accountname" +export TWINE_PASSWORD_PRIVATE_REPO="secretvalue" +``` + +Given the example `BUILD` and `.pypirc` files from the previous section, `demo` could be published with the following command: + +```shell +$ { source ./secrets && pants publish src/python:demo } +``` diff --git a/versioned_docs/version-2.24/docs/python/goals/repl.mdx b/versioned_docs/version-2.24/docs/python/goals/repl.mdx new file mode 100644 index 000000000..4bbfec45f --- /dev/null +++ b/versioned_docs/version-2.24/docs/python/goals/repl.mdx @@ -0,0 +1,86 @@ +--- + title: repl + sidebar_position: 5 +--- + +Open a REPL for interactive development. + +--- + +Pants will load a [REPL](https://en.wikipedia.org/wiki/REPL) with all of your specified source code and any of its third-party dependencies, which allows you to import those values. + +## IPython + +In addition to the default Python shell, Pants supports the improved [IPython shell](https://ipython.org). + +To use IPython, run `pants repl --shell=ipython`. To permanently use IPython, add this to your `pants.toml`: + +```toml title="pants.toml" +[repl] +shell = "ipython" +``` + +You can change IPython's version [like any other tool, using `install_from_resolve`](../overview/lockfiles#lockfiles-for-tools). + +If you use a version lower than IPython 7, then you must set `[ipython].ignore_cwd = false` to avoid Pants setting an option that did not exist in earlier IPython releases. + +:::note Python 2 support +Pants uses IPython 7 by default, which does not work with Python 2. You can use `install_from_resolve` to install IPython 5: + +```toml tab={"label":"pants.toml"} +[python.resolves] +... +ipython = "3rdparty/python/ipython.lock" + +[ipython] +install_from_resolve = "ipython" +ignore_cwd = false +``` +```toml tab={"label": "BUILD"} +python_requirement(name="ipython", requirements=["ipython<6"], resolve="ipython") +``` + +::: + +## Examples + +```text title="Shell" +$ pants repl helloworld/greet/greeting.py + +Python 3.7.6 (default, Feb 26 2020, 08:28:08) +[Clang 11.0.0 (clang-1100.0.33.8)] on darwin +Type "help", "copyright", "credits" or "license" for more information. +(InteractiveConsole) +>>> from helloworld.greet.greeting import Greeter +>>> Greeter().greet("Pants") +'buenas tardes, Pants!' +>>> from translate import Translator +>>> Translator(to_lang="fr").translate("Good morning.") +'Salut.' +``` + +This will not load any of your code: + +```text title="Shell" +❯ pants repl --shell=ipython +Python 3.9.12 (main, Mar 26 2022, 15:45:34) +Type 'copyright', 'credits' or 'license' for more information +IPython 7.34.0 -- An enhanced Interactive Python. Type '?' for help. + +In [1]: 21 * 4 +Out[1]: 84 +``` + +`pants repl ::` will load all your code. + +To pass arguments to the repl program, use `--` at the end of the command, like this: + +```text title="Shell" +$ pants repl --shell=ipython -- -i helloworld/main.py +``` + +Check the documentation for the `--repl-args` option in `pants help repl` to see which shells support passing arguments. + +:::note Tip: how to exit the REPL +Either type `exit()` and hit enter, or press `ctrl+d`. +::: diff --git a/versioned_docs/version-2.24/docs/python/goals/run.mdx b/versioned_docs/version-2.24/docs/python/goals/run.mdx new file mode 100644 index 000000000..f1467545b --- /dev/null +++ b/versioned_docs/version-2.24/docs/python/goals/run.mdx @@ -0,0 +1,96 @@ +--- + title: run + sidebar_position: 6 +--- + +Run a `pex_binary` target. + +--- + +To run an executable/script, use `pants run` on one of the following target types: + +- [`pex_binary`](../../../reference/targets/pex_binary.mdx) +- [`python_source`](../../../reference/targets/python_source.mdx) + +(See [package](./package.mdx) for more on the `pex_binary` target.) + +```bash +# A python_source target (usually referred to by the filename) +$ pants run project/app.py +``` + +or + +```bash +# A pex_binary target (must be referred to by target name) +$ pants run project:app +``` + +To pass arguments to the script/executable, use `--` at the end of the command, like this: + +```bash +$ pants run project/app.py -- --arg1 arg2 +``` + +You may only run one target at a time. + +The program will have access to the same environment used by the parent `pants` process, so you can set environment variables in the external environment, e.g. `FOO=bar pants run project/app.py`. (Pants will auto-set some values like `$PATH`). + +:::note Tip: check the return code +Pants will propagate the return code from the underlying executable. Run `echo $?` after the Pants run to see the return code. +::: + +:::caution Issues finding files? +Run `pants dependencies --transitive path/to/binary.py` to ensure that all the files you need are showing up, including for any [assets](../../using-pants/assets-and-archives.mdx) you intend to use. +::: + +## Execution Semantics + +Running a `pex_binary` is equivalent to `package`-ing the target followed by executing the built PEX from the repo root. + +Running a `python_source` with the `run_goal_use_sandbox` field set to `True` (the default) runs your code in an ephemeral sandbox (temporary directory) with your firstparty code and Pants-generated files (such as a `relocated_files` or `archive`) copied inside. If you are using generated files like this, you may need to set the `run_goal_use_sandbox` to `True` for file loading to work properly. + +Running a `python_source` with the `run_goal_use_sandbox` field set to `False` is equivalent to running the source directly (a la `python ...`) with the set of third-party dependencies exposed to the interpreter. This is comparable to using a virtual environment or Poetry to run your script (E.g. `venv/bin/python ...` or `poetry run python ...`). When scripts write in-repo files—such as Django's `manage.py makemigrations` - it is often necessary to set `run_goal_use_sandbox` to `False` so that the file is written into the expected location. + +## Watching the filesystem + +If the app that you are running is long-lived and safe to restart (including web apps like Django and Flask or other types of servers/services), you can set `restartable=True` on your `pex_binary` target to indicate this to Pants. The `run` goal will then automatically restart the app when its input files change! + +On the other hand, if your app is short-lived (like a script) and you'd like to re-run it when files change but never interrupt an ongoing run, consider using `pants --loop run` instead. See [Goals](../../using-pants/key-concepts/goals.mdx#running-goals) for more information on `--loop`. + +## Debugging + +:::note Tip: using the VS Code (or any [DAP](https://microsoft.github.io/debug-adapter-protocol/)-compliant editor) remote debugger + +1. In your editor, set your breakpoints and any other debug settings (like break-on-exception). +2. Run your code with `pants run --debug-adapter`. +3. Connect your editor to the server. The server host and port are logged by Pants when executing `run --debug-adapter`. (They can also be configured using the `[debug-adapter]` subsystem). + +::: + +:::note Tip: Using the IntelliJ/PyCharm remote debugger +First, add the following target in some BUILD file (e.g., the one containing your other 3rd-party dependencies): + +``` +python_requirement( + name = "pydevd-pycharm", + requirements=["pydevd-pycharm==203.5419.8"], # Or whatever version you choose. +) +``` + +You can check this into your repo, for convenience. + +Now, use the remote debugger as usual: + +1. Start a Python remote debugging session in PyCharm, say on port 5000. +2. Add the following code at the point where you want execution to pause and connect to the debugger: + +``` +import pydevd_pycharm +pydevd_pycharm.settrace('localhost', port=5000, stdoutToServer=True, stderrToServer=True) +``` + +Run your executable with `pants run` as usual. + +Note: The first time you do so you may see some extra dependency resolution work, as `pydevd-pycharm` has now been added to the binary's dependencies, via inference. If you have dependency inference turned off in your repo, you will have to manually add a temporary explicit dependency in your binary target on the `pydevd-pycharm` target. +::: diff --git a/versioned_docs/version-2.24/docs/python/goals/test.mdx b/versioned_docs/version-2.24/docs/python/goals/test.mdx new file mode 100644 index 000000000..0a15dd65b --- /dev/null +++ b/versioned_docs/version-2.24/docs/python/goals/test.mdx @@ -0,0 +1,615 @@ +--- + title: test + sidebar_position: 7 +--- + +Run tests with Pytest. + +--- + +Pants uses the [Pytest](https://docs.pytest.org/en/latest/) test runner to run Python tests. You may write your tests in Pytest-style, unittest-style, or mix and match both. + +:::tip Benefit of Pants: runs each file in parallel +Each file gets run as a separate process, which gives you fine-grained caching and better parallelism. Given enough cores, Pants will be able to run all your tests at the same time. + +This also gives you fine-grained invalidation. If you run `pants test ::`, and then you only change one file, then only tests that depended on that changed file will need to rerun. +::: + +## Examples + +```bash + # Run all tests in the repository. +❯ pants test :: + +# Run all the tests in this directory. +❯ pants test helloworld/util: + +# Run just the tests in this file. +❯ pants test helloworld/util/lang_test.py + + # Run just one test. +❯ pants test helloworld/util/lang_test.py -- -k test_language_translator +``` + +## Pytest version and plugins + +To change the Pytest version, set the `install_from_resolve` option in the `[pytest]` scope. You may also add [plugins](https://docs.pytest.org/en/latest/plugins.html) including the plugins in the resolve: + +```toml title="pants.toml" +[python.resolves] +pytest = "3rdparty/python/pytest-lock.txt" + +[pytest] +install_from_resolve = "pytest" +``` + +Then, add a `requirements.txt` file specifying the version of `pytest` and other plugins: + +```text title="pytest-requirements.txt" +pytest>=5.4 +pytest-django>=3.9.0,<4 +pytest-rerunfailures==9.0 +``` + +Finally, generate the relevant lockfile with `pants generate-lockfiles --resolve=pytest`. For more information, see [Lockfiles for tools](../overview/lockfiles.mdx#lockfiles-for-tools). + +Alternatively, if you only want to install the plugin for certain tests, you can add the plugin to the `dependencies` field of your `python_test` / `python_tests` target. See [Third-party dependencies](../overview/third-party-dependencies.mdx) for how to install Python dependencies. For example: + +```text tab={"label":"requirements.txt"} +pytest-django==3.10.0 +``` + +```python tab={"label":"BUILD"} +python_requirements(name="reqs") +``` + +```python tab={"label":"helloworld/util/BUILD"} +python_tests( + name="tests", + # Normally, Pants infers dependencies based on imports. + # Here, we don't actually import our plugin, though, so + # we need to explicitly list it. + dependencies=["//:reqs#pytest-django"], +) +``` + +## Controlling output + +By default, Pants only shows output for failed tests. You can change this by setting `--test-output` to one of `all`, `failed`, or `never`, e.g. `pants test --output=all ::`. + +You can permanently set the output format in your `pants.toml` like this: + +```toml title="pants.toml" +[test] +output = "all" +``` + +:::note Tip: Use Pytest options to make output more or less verbose +See ["Passing arguments to Pytest"](./test.mdx#passing-arguments-to-pytest). + +For example: + +```bash +❯ pants test project/app_test.py -- -q +``` + +You may want to permanently set the Pytest option `--no-header` to avoid printing the Pytest version for each test run: + +```toml +[pytest] +args = ["--no-header"] +``` + +::: + +## Passing arguments to Pytest + +To pass arguments to Pytest, put them at the end after `--`, like this: + +```bash +❯ pants test project/app_test.py -- -k test_function1 -vv -s +``` + +You can also use the `args` option in the `[pytest]` scope, like this: + +```toml title="pants.toml" +[pytest] +args = ["-vv"] +``` + +:::note Tip: some useful Pytest arguments +See [https://docs.pytest.org/en/latest/usage.html](https://docs.pytest.org/en/latest/usage.html) for more information. + +- `-k expression`: only run tests matching the expression. +- `-v`: verbose mode. +- `-s`: always print the stdout and stderr of your code, even if a test passes. + +::: + +:::caution How to use Pytest's `--pdb` option +You must run `pants test --debug` for this to work properly. See the section "Debugging Tests" for more information. +::: + +## Config files + +Pants will automatically include any relevant config files in the process's sandbox: `pytest.ini`, `pyproject.toml`, `tox.ini`, and `setup.cfg`. + +## `conftest.py` + +Pytest uses [`conftest.py` files](https://docs.pytest.org/en/stable/fixture.html#conftest-py-sharing-fixture-functions) to share fixtures and config across multiple distinct test files. + +The default `sources` value for the `python_test_utils` target includes `conftest.py`. You can run [`pants tailor ::`](../../getting-started/initial-configuration.mdx#5-generate-build-files) to automatically add this target: + +``` +pants tailor :: +Created project/BUILD: + - Add python_sources target project + - Add python_tests target tests + - Add python_test_utils target test_utils +``` + +Pants will also infer dependencies on any `confest.py` files in the current directory _and_ any ancestor directories, which mirrors how Pytest behaves. This requires that each `conftest.py` has a target referring to it. You can verify this is working correctly by running `pants dependencies path/to/my_test.py` and confirming that each `conftest.py` file shows up. (You can turn off this feature by setting `conftests = false` in the `[python-infer]` scope.) + +## Setting environment variables + +Test runs are _hermetic_, meaning that they are stripped of the parent `pants` process's environment variables. This is important for reproducibility, and it also increases cache hits. + +To add any arbitrary environment variable back to the process, you can either add the environment variable to the specific tests with the `extra_env_vars` field on `python_test` / `python_tests` targets or to all your tests with the `[test].extra_env_vars` option. Generally, prefer the field `extra_env_vars` field so that more of your tests are hermetic. + +With both `[test].extra_env_vars` and the `extra_env_vars` field, you can either hardcode a value or leave off a value to "allowlist" it and read from the parent `pants` process's environment. + +```toml tab={"label":"pants.toml"} +[test] +extra_env_vars = ["VAR1", "VAR2=hardcoded_value"] +``` + +```python tab={"label":"project/BUILD"} +python_tests( + name="tests", + # Adds to all generated `python_test` targets, + # i.e. each file in the `sources` field. + extra_env_vars=["VAR3", "VAR4=hardcoded"], + # Even better, use `overrides` to be more granular. + overrides={ + "strutil_test.py": {"extra_env_vars": ["VAR"]}, + ("dirutil_test.py", "osutil_test.py"): {"extra_env_vars": ["VAR5"]}, + }, +) +``` + +:::note Tip: avoiding collisions between concurrent `pytest` runs using env vars +Sometimes your tests/code will need to reach outside of the sandbox, for example to initialize a test DB schema. In these cases you may see conflicts between concurrent `pytest` processes scheduled by Pants, when two or more tests try to set up / tear down the same resource concurrently. To avoid this issue, you can set `[pytest].execution_slot_var` to be a valid environment variable name. Pants will then inject a variable with that name into each `pytest` run, using the process execution slot ID (an integer) as the variable's value. You can then update your test code to check for the presence of the variable and incorporate its value into generated DB names / file paths. For example, in a project using `pytest-django` you could do: + +```toml tab={"label":"pants.toml"} +[pytest] +execution_slot_var = "PANTS_EXECUTION_SLOT" +``` + +```python tab={"label":"src/conftest.py"} +from pytest_django.fixtures import _set_suffix_to_test_databases +from pytest_django.lazy_django import skip_if_no_django + +@pytest.fixture(scope="session") +def django_db_modify_db_settings(): + skip_if_no_django() + if "PANTS_EXECUTION_SLOT" in os.environ: + _set_suffix_to_test_databases(os.environ["PANTS_EXECUTION_SLOT"]) +``` + +::: + +## Batching and parallelism + +By default, Pants will schedule concurrent `pytest` runs for each Python test file passed to the `test` goal. This approach provides parallelism with fine-grained caching, but can have drawbacks in some situations: + +- `package`- and `session`-scoped `pytest` fixtures will execute once per `python_test` target, instead of once per directory / once overall. This can cause significant overhead if you have many tests scoped under a time-intensive fixture (i.e. a fixture that sets up a large DB schema). +- Tests _within_ a `python_test` file will execute sequentially. This can be slow if you have large files containing many tests. + +### Batching tests + +Running multiple test files within a single `pytest` process can sometimes improve performance by allowing reuse of expensive high-level `pytest` fixtures. Pants allows users to opt into this behavior via the `batch_compatibility_tag` field on `python_test`, with the following rules: + +- If the field is not set, the `python_test` is assumed to be incompatible with all others and will run in a dedicated `pytest` process. +- If the field is set and is different from the value on some other `python_test`, the tests are explicitly incompatible and are guaranteed to not run in the same `pytest` process. +- If the field is set and is equal to the value on some other `python_test`, the tests are explicitly compatible and _may_ run in the same `pytest` process. + +Compatible tests _may not_ end up in the same `pytest` batch if: + +- There are "too many" tests with the same `batch_compatibility_tag`, as determined by the `[test].batch_size` setting. +- Compatible tests have some incompatibility in Pants metadata (i.e. different `resolve` or `extra_env_vars`). + +Compatible tests that _do_ end up in the same batch will run in a single `pytest` invocation. By default the tests will run sequentially, but they can be parallelized by enabling `pytest-xdist` (see below). A single success/failure result will be reported for the entire batch, and additional output files (i.e. XML results and coverage) will encapsulate all of the included Python test files. + +:::note Tip: finding failed tests in large batches +It can sometimes be difficult to locate test failures in the logging output of a large `pytest` batch. You can pass the `-r` flag to `pytest` to make this investigation easier: + +```bash +❯ pants test :: -- -r +``` + +This will cause `pytest` to print a "summary report" at the end of its output, including the names of all failed tests. See the `pytest` docs [here](https://docs.pytest.org/en/6.2.x/usage.html#detailed-summary-report) for more information. +::: + +The high-level `pytest` fixtures that motivate batched testing are often defined in a `conftest.py` near the root of your repository, applying to every test in a directory tree. In these cases, you can mark all the tests in the directory tree as compatible using the [`__defaults__` builtin](../../using-pants/key-concepts/targets-and-build-files.mdx#field-default-values): + +```python title="BUILD" +python_test_utils() + +__defaults__({(python_test, python_tests): dict(batch_compatibility_tag="your-tag-here"),}) +``` + +:::caution Caching batched tests +Batched test results are cached together by Pants, meaning that if any file in the batch changes (or if a file is added to / removed from the batch) then the entire batch will be invalidated and need to re-run. Depending on the time it takes to execute your fixtures and the number of tests sharing those fixtures, you may see better performance overall by setting a lower value for `[test].batch_size`, improving your cache-hit rate to skip running tests more often. +::: + +### Parallelism via `pytest-xdist` + +Pants includes built-in support for `pytest-xdist`, which can be enabled by setting: + +```toml title="pants.toml" +[pytest] +xdist_enabled = true +``` + +This will cause Pants to pass `-n ` when running `pytest`. When this is set, `pytest` will parallelize the tests _within_ your `python_test` file, instead of running them sequentially. If multiple `python_test`s are batched into the same process, `pytest-xdist` will parallelize the tests within _all_ of the files - this can help you regain the benefits of Pants' native concurrency when running batched tests. + +By default, Pants will automatically compute the value of `` for each target based on the number of tests defined in the file and the number of available worker threads. You can instead set a hard-coded upper limit on the concurrency per target: + +```python title="BUILD" +python_test(name="tests", source="tests.py", xdist_concurrency=4) +``` + +To explicitly disable the use of `pytest-xdist` for a target, set `xdist_concurrency=0`. This can be necessary for tests that are not safe to run in parallel. + +:::caution Parallelism in multiple concurrent processes +Pants will limit the total number of parallel tests running across _all_ scheduled processes so that it does not exceed the configured value of `[GLOBAL].process_execution_local_parallelism` (by default, the number of CPUs available on the machine running Pants). For example, if your machine has 8 CPUs and Pants schedules 8 concurrent `pytest` processes with `pytest-xdist` enabled, it will pass `-n 1` to each process so that the total concurrency is 8. + +It is possible to work around this behavior by marking all of your `python_test` targets as batch-compatible and setting a very large value for `[test].batch_size`. This will cause Pants to schedule fewer processes (containing more `python_test`s each) overall, allowing for larger values of `-n `. Note however that this approach will limit the cacheability of your tests. +::: + +When `pytest-xdist` is in use, the `PYTEST_XDIST_WORKER` and `PYTEST_XDIST_WORKER_COUNT` environment variables will be automatically set. You can use those values (in addition to `[pytest].execution_slot_var`) to avoid collisions between parallel tests (i.e. by using the combination of `[pytest].execution_slot_var` and `PYTEST_XDIST_WORKER` as a suffix for generated database names / file paths). + +:::caution `pytest-xdist` and high-level fixtures +Use of `pytest-xdist` may cause high-level `pytest` fixtures to execute more often than expected. See the `pytest-xdist` docs [here](https://pypi.org/project/pytest-xdist/#making-session-scoped-fixtures-execute-only-once) for more details, and tips on how to mitigate this. +::: + +## Force reruns with `--force` + +To force your tests to run again, rather than reading from the cache, run `pants test --force path/to/test.py`. + +## Debugging Tests + +Because Pants runs multiple test targets in parallel, you will not see your test results appear on the screen until the test has completely finished. This means that you cannot use debuggers normally; the breakpoint will never show up on your screen and the test will hang indefinitely (or timeout, if timeouts are enabled). + +Instead, if you want to run a test interactively—such as to use a debugger like `pdb`—run your tests with `pants test --debug`. For example: + +```python tab={"label":"test_debug_example.py"} +def test_debug(): + import pdb; pdb.set_trace() + assert 1 + 1 == 2 +``` + +```text tab={"label":"Shell"} +❯ pants test --debug test_debug_example.py + +===================================================== test session starts ===================================================== +platform darwin -- Python 3.6.10, pytest-5.3.5, py-1.8.1, pluggy-0.13.1 +rootdir: /private/var/folders/sx/pdpbqz4x5cscn9hhfpbsbqvm0000gn/T/.tmpn2li0z +plugins: cov-2.8.1, timeout-1.3.4 +collected 6 items + +test_debug_example.py +>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> PDB set_trace (IO-capturing turned off) >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +> /private/var/folders/sx/pdpbqz4x5cscn9hhfpbsbqvm0000gn/T/.tmpn2li0z/test_debug_example.py(11)test_debug() +-> assert 1 + 1 == 2 +(Pdb) 1 + 1 +2 +``` + +If you use multiple files with `test --debug`, they will run sequentially rather than in parallel. + +:::note Tip: using `ipdb` in tests +[`ipdb`](https://github.com/gotcha/ipdb) integrates IPython with the normal `pdb` debugger for enhanced features like autocomplete and improved syntax highlighting. `ipdb` is very helpful when debugging tests. + +To be able to access `ipdb` when running tests, add this to your `pants.toml`: + +```toml +[pytest] +extra_requirements.add = ["ipdb"] +``` + +Then, you can use `import ipdb; ipdb.set_trace()` in your tests. + +To run the tests you will need to add `-- -s` to the test call since ipdb will need stdin and pytest will capture it. + +```bash +❯ pants test --debug -- -s +``` + +::: + +:::note Tip: using the VS Code (or any [DAP](https://microsoft.github.io/debug-adapter-protocol/)-compliant editor) remote debugger in tests + +1. In your editor, set your breakpoints and any other debug settings (like break-on-exception). +2. Run your test with `pants test --debug-adapter`. +3. Connect your editor to the server. The server host and port are logged by Pants when executing `test --debug-adapter`. (They can also be configured using the `[debug-adapter]` subsystem). + +::: + +> Run your test with `pants test --debug` as usual. + +:::note Tip: using the IntelliJ/PyCharm remote debugger in tests +First, add this to your `pants.toml`: + +```toml +[pytest] +extra_requirements.add = ["pydevd-pycharm==203.5419.8"] # Or whatever version you choose. +``` + +Now, use the remote debugger as usual: + +1. Start a Python remote debugging session in PyCharm, say on port 5000. +2. Add the following code at the point where you want execution to pause and connect to the debugger: + +```python +import pydevd_pycharm +pydevd_pycharm.settrace('localhost', port=5000, stdoutToServer=True, stderrToServer=True) +``` + +Run your test with `pants test --debug` as usual. +::: + +## Timeouts + +Pants can cancel tests which take too long. This is useful to prevent tests from hanging indefinitely. + +To add a timeout, set the `timeout` field to an integer value of seconds, like this: + +```python title="BUILD" +python_test(name="tests", source="tests.py", timeout=120) +``` + +When you set timeout on the `python_tests` target generator, the same timeout will apply to every generated `python_test` target. + +```python title="BUILD" +python_tests( + name="tests", + overrides={ + "test_f1.py": {"timeout": 20}, + ("test_f2.py", "test_f3.py"): {"timeout": 35}, + }, +) +``` + +You can also set a default value and a maximum value in `pants.toml`: + +```toml title="pants.toml" +[test] +timeout_default = 60 +timeout_maximum = 600 +``` + +If a target sets its `timeout` higher than `[test].timeout_maximum`, Pants will use the value in `[test].timeout_maximum`. + +:::note Tip: temporarily ignoring timeouts +When debugging locally, such as with `pdb`, you might want to temporarily disable timeouts. To do this, set `--no-test-timeouts`: + +```bash +$ pants test project/app_test.py --no-test-timeouts +``` + +::: + +## Retries + +Pants can automatically retry failed tests. This can help keep your builds passing even with flaky tests, like integration tests. + +```toml tab={"label":"pants.toml"} +[test] +attempts_default = 3 +``` + +## Test utilities and resources + +### Test utilities + +Use the target type `python_source` for test utilities, rather than `python_test`. + +To reduce boilerplate, you can use either the [`python_sources`](../../../reference/targets/python_sources.mdx) or [`python_test_utils`](../../../reference/targets/python_test_utils.mdx) targets to generate `python_source` targets. These behave the same, except that `python_test_utils` has a different default `sources` to include `conftest.py` and type stubs for tests (like `test_foo.pyi`). Use [`pants tailor ::`](../../getting-started/initial-configuration.mdx#5-generate-build-files) to generate both these targets automatically. + +For example: + +```python tab={"label":"helloworld/BUILD"} +# The default `sources` includes all files other than +# `!*_test.py`, `!test_*.py`, and `tests.py`, and `conftest.py`. +python_sources(name="lib") + +# We leave off the `dependencies` field because Pants will infer +# it based on import statements. +python_tests(name="tests") +``` + +```python tab={"label":"helloworld/testutils.py"} +... + +@contextmanager +def setup_tmpdir(files: Mapping[str, str]) -> Iterator[str]: + with temporary_dir() as tmpdir: + ... + yield rel_tmpdir +``` + +```python tab={"label":"helloworld/app_test.py"} +from helloworld.testutils import setup_tmpdir + +def test_app() -> None: + with setup_tmpdir({"f.py": "print('hello')"}): + assert ... +``` + +### Assets + +Refer to [Assets](../../using-pants/assets-and-archives.mdx) for how to include asset files in your tests by adding to the `dependencies` field. + +It's often most convenient to use `file` / `files` and `relocated_files` targets in your test code, although you can also use `resource` / `resources` targets. + +## Testing your packaging pipeline + +You can include the result of `pants package` in your test through the `runtime_package_dependencies` field. Pants will run the equivalent of `pants package` beforehand and copy the built artifact into the test's chroot, allowing you to test things like that the artifact has the correct files present and that it's executable. + +This allows you to test your packaging pipeline by simply running `pants test ::`, without needing custom integration test scripts. + +To depend on a built package, use the `runtime_package_dependencies` field on the `python_test` / `python_tests` target, which is a list of addresses to targets that can be built with `pants package`, such as `pex_binary`, `python_aws_lambda_function`, and `archive` targets. Pants will build the package before running your test, and insert the file into the test's chroot. It will use the same name it would normally use with `pants package`, except without the `dist/` prefix (set by the `output_path` field). + +For example: + +```python tab={"label":"helloworld/BUILD"} +# This target teaches Pants about our non-test Python files. +python_sources(name="lib") + +pex_binary( + name="bin", + entry_point="say_hello.py", +) + +python_tests( + name="tests", + runtime_package_dependencies=[":bin"], +) +``` + +```python tab={"label":"helloworld/say_hello.py"} +print("Hello, test!") +``` + +```python tab={"label":"helloworld/test_binary.py"} +import subprocess + +def test_say_hello(): + assert b"Hello, test!" in subprocess.check_output(['helloworld/bin.pex']) +``` + +## Coverage + +To report coverage using [`Coverage.py`](https://coverage.readthedocs.io/en/coverage-5.1/), set the option `--test-use-coverage`: + +```bash +❯ pants test --use-coverage helloworld/util/lang_test.py +``` + +Or to permanently use coverage, set in your config file: + +```toml title="pants.ci.toml" +[test] +use_coverage = true +``` + +:::caution Failure to parse files? +Coverage defaults to running with Python 3.6+ when generating a report, which means it may fail to parse Python 2 syntax and Python 3.8+ syntax. You can fix this by changing the interpreter constraints for running Coverage: + +```toml +# pants.toml +[coverage-py] +interpreter_constraints = [">=3.8"] +``` + +However, if your repository has some Python 2-only code and some Python 3-only code, you will not be able to choose an interpreter that works with both versions. So, you will need to set up a `.coveragerc` config file and set `ignore_errors = true` under `[report]`, like this: + +``` +# .coveragerc +[report] +ignore_errors = true +``` + +`ignore_errors = true` means that those files will simply be left off of the final coverage report. + +(Pants should autodiscover the config file `.coveragerc`. See [coverage-py](../../../reference/subsystems/coverage-py.mdx#config-discovery).) + +There's a proposal for Pants to fix this by generating multiple reports when necessary: [https://github.com/pantsbuild/pants/issues/11137](https://github.com/pantsbuild/pants/issues/11137). We'd appreciate your feedback. +::: + +Coverage will report data on any files encountered during the tests. You can filter down the results by using the option `--coverage-py-filter` and passing the name(s) of modules you want coverage data for. Each module name is recursive, meaning submodules will be included. For example: + +```bash +❯ pants test --use-coverage helloworld/util/lang_test.py --coverage-py-filter=helloworld.util +❯ pants test --use-coverage helloworld/util/lang_test.py --coverage-py-filter='["helloworld.util.lang", "helloworld.util.lang_test"]' +``` + +:::note Set `global_report` to include un-encountered files +By default, coverage.py will only report on files encountered during the tests' run. This means +that your coverage score may be misleading; even with a score of 100%, you may have files +without any tests. + +Instead, you can set `global_report = true`: + +```toml title="pants.toml" +[coverage-py] +global_report = true +``` + +Coverage.py will report on [all files it considers importable](https://coverage.readthedocs.io/en/6.3.2/source.html), +i.e. files at the root of the tree, or in directories with a `__init__.py` file. It may still omit +files in [implicit namespace packages](https://peps.python.org/pep-0420/) that lack `__init__.py` files. +This is a shortcoming of Coverage.py itself. +::: + +Pants will default to writing the results to the console, but you can also output in HTML, XML, JSON, or the raw SQLite file: + +```toml title="pants.toml" +[coverage-py] +report = ["raw", "xml", "html", "json", "console"] +``` + +You can change the output dir with the `output_dir` option in the `[coverage-py]` scope. + +You may want to set `[coverage-py].fail_under` to cause Pants to gracefully fail if coverage is too low, e.g. `fail_under = 70`. + +You may use a Coverage config file, e.g. `.coveragerc` or `pyproject.toml`. Pants will autodiscover the config file for you, and you can also set `[coverage-py].config` in your `pants.toml` to point to a non-standard location. + +:::note You must include `relative_files = true` in the `[run]` section for Pants to work: + +```toml title=".coveragerc" +[run] +relative_files = true +branch = true +``` + +::: + +When generating HTML, XML, and JSON reports, you can automatically open the reports through the option `--test-open-coverage`. + +## JUnit XML results + +Pytest can generate [JUnit XML result files](https://docs.pytest.org/en/6.2.x/usage.html#creating-junitxml-format-files). This allows you to hook up your results, for example, to dashboards. + +To save JUnit XML result files, set the option `[test].report`, like this: + +```toml title="pants.toml" +[test] +report = true +``` + +This will default to writing test reports to `dist/test/reports`. You may also want to set the option `[pytest].junit_family` to change the format. Run `pants help-advanced pytest` for more information. + +## Customizing Pytest command line options per target + +You can set `PYTEST_ADDOPTS` environment variable to add your own command line options, like this: + +```python title="BUILD" +python_tests( + name="tests", + ... + extra_env_vars=[ + "PYTEST_ADDOPTS=-p myplugin --reuse-db", + ], + ... +) +``` + +Take note that Pants uses some CLI args for its internal mechanism of controlling Pytest (`--color`, `--junit-xml`, `junit_family`, `--cov`, `--cov-report` and `--cov-config`). If these options are overridden, Pants Pytest handling may not work correctly. Set these at your own peril! + +## Failures to collect tests + +`pytest` follows [certain conventions for test discovery](https://docs.pytest.org/en/7.1.x/explanation/goodpractices.html#conventions-for-python-test-discovery), so if no (or only some) tests are run, it may be worth reviewing the documentation. Pants can help you find test modules that would not be collected by `pytest`. For instance, `pants tailor --check ::` command would suggest creating targets for files that are not covered by glob expressions in your `BUILD` files (e.g. if a test module has a typo and is named `tes_connection.py`). You can also run `pants --filter-target-type=python_test filedeps ::` command to list all test files known to Pants and compare the output with the list of files that exist on disk. + +If your tests fail to import the source modules, it may be due to the import mode used by `pytest`, especially if you are using [namespace packages](https://packaging.python.org/en/latest/guides/packaging-namespace-packages/). Please review [Choosing an import mode](https://docs.pytest.org/en/7.1.x/explanation/goodpractices.html#choosing-an-import-mode) and [pytest import mechanisms and sys.path/PYTHONPATH](https://docs.pytest.org/en/7.1.x/explanation/pythonpath.html#import-modes) to learn more. diff --git a/versioned_docs/version-2.24/docs/python/integrations/_category_.json b/versioned_docs/version-2.24/docs/python/integrations/_category_.json new file mode 100644 index 000000000..cefe73a6c --- /dev/null +++ b/versioned_docs/version-2.24/docs/python/integrations/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Integrations", + "position": 3 +} diff --git a/versioned_docs/version-2.24/docs/python/integrations/aws-lambda.mdx b/versioned_docs/version-2.24/docs/python/integrations/aws-lambda.mdx new file mode 100644 index 000000000..0c0f7e7a8 --- /dev/null +++ b/versioned_docs/version-2.24/docs/python/integrations/aws-lambda.mdx @@ -0,0 +1,283 @@ +--- + title: AWS Lambda + sidebar_position: 2 +--- + +Create a Lambda with Python code. + +--- + +Pants can create a Lambda-compatible zip file or directory from your Python code, allowing you to develop your Lambda functions and layers in your repository instead of using the online Cloud9 editor. + +:::note FYI: how Pants does this +Under-the-hood, Pants uses the [PEX](https://github.com/pex-tool/pex) project, to select the appropriate third-party requirements and first-party sources and lay them out in a zip file or directory, in the format recommended by AWS. +::: + +## Step 1: Activate the Python AWS Lambda backend + +Add this to your `pants.toml`: + +```toml title="pants.toml" +[GLOBAL] +backend_packages.add = [ + "pants.backend.awslambda.python", + "pants.backend.python", +] +``` + +This adds the new `python_aws_lambda_function` target, which you can confirm by running `pants help python_aws_lambda_function` + +## Step 2: Define a `python_aws_lambda_function` target + +First, add your lambda function in a Python file like you would [normally do with AWS Lambda](https://docs.aws.amazon.com/lambda/latest/dg/python-handler.html). Specifically, create a function `def my_handler_name(event, context)` with the name you want. + +Then, in your BUILD file, make sure that you have a `python_source` or `python_sources` target with the handler file included in the `sources` field. You can use [`pants tailor ::`](../../getting-started/initial-configuration.mdx#5-generate-build-files) to automate this. + +Add a `python_aws_lambda_function` target and define the `handler` fields. The `handler` has the form `handler_file.py:handler_func`, which Pants will convert into a well-formed entry point. Alternatively, you can set `handler` to the format `path.to.module:handler_func`. + +For example: + +```python tab={"label":"project/BUILD"} +# The default `sources` field will include our handler file. +python_sources(name="lib") + +python_aws_lambda_function( + name="lambda", + # Pants will convert this to `project.lambda_example:example_handler`. + handler="lambda_example.py:example_handler", +) +``` + +```python tab={"label":"project/lambda_example.py"} +def example_handler(event, context): + print("Hello AWS!") +``` + +Pants will use [dependency inference](../../using-pants/key-concepts/targets-and-build-files.mdx) based on the `handler` field, which you can confirm by running `pants dependencies path/to:lambda`. You can also manually add to the `dependencies` field. + +You can optionally set the `output_path` field to change the generated zip file's path. + +:::tip Using layout +Use [layout](../../../reference/targets/python_aws_lambda_function.mdx#layout) to determine whether to build a `.zip` file or a directory +::: + +:::caution Use `resource` instead of `file` +`file` / `files` targets will not be included in the built AWS Lambda artifacts because filesystem APIs like `open()` would not load them as expected. Instead, use the `resource` and `resources` target. See [Assets and archives](../../using-pants/assets-and-archives.mdx) for further explanation. +::: + +### Specifying a runtime explicitly + +When building an Lambda artifact, Pants and the underlying Pex tool need to know details about target runtime to be able to choose appropriate artifacts for third-party dependencies that have native code. These details can be inferred or provided in three ways, from highest precedence to lowest precedence: + +1. An explicit value for [the `complete_platforms` field](../../../reference/targets/python_aws_lambda_function.mdx#complete_platforms). The "complete platforms" are the underlying source of truth. + ```python title="BUILD" + file(name="lambda-platform", source="lambda-platform.json") + + python_aws_lambda_function( + name="lambda", + handler="lambda_example.py:example_handler", + # Explicit complete platforms: + complete_platforms=[":lambda-platform"], + ) + ``` + + You can generate the `complete_platforms` file for a specific Lambda runtime by running the below cloud function and outputting its file contents. For Lambda, a handler to generate the file might look like this: + + ```python + import subprocess + import json + + def lambda_handler(event, context): + subprocess.run("pip install --target=/tmp/pex pex", shell=True) + result = subprocess.run( + "PYTHONPATH=/tmp/pex /tmp/pex/bin/pex3 interpreter inspect --markers --tags --indent=2", + shell=True, capture_output=True, text=True + ) + return { + "statusCode": 200, + "body": result.stdout, + } + ``` + + Deploy and invoke the function, then retrieve the output to use as your `complete_platforms` file. +2. An explicit value for [the `runtime` field](../../../reference/targets/python_aws_lambda_function.mdx#runtime) and, optionally, [the `architecture` field](../../../reference/targets/python_aws_lambda_function.mdx#architecture): Pants uses these to pick an appropriate "complete platforms" value, from options that Pants has pre-packaged. These are static exports from docker images provided by AWS, relying on the environment being relatively stable. (If Pants doesn't have an appropriate "complete platforms" default built-in, you will be prompted to use option 1 above.) + ```python title="BUILD" + python_aws_lambda_function( + name="lambda", + handler="lambda_example.py:example_handler", + # Explicit runtime, `complete_platforms` taken from Pants' built-in defaults: + runtime="python3.12", + # Override the default x86_64 architecture: + architecture="arm64", + ) + ``` +3. Inferred from [the relevant interpreter constraints](../overview/interpreter-compatibility.mdx): the interpreter constraints may unambiguously imply a value for the `runtime` and thus `complete_platforms` fields. For example, `interpreter_constaints = ["==3.12.*"]` implies `runtime="python3.12"`. This only works with interpreter constraints that cover all patch versions of a given minor release series: `>=3.11,<3.13` is too wide (it covers both 3.11 and 3.12), while `==3.12.0` is too specific (AWS's `python3.12` runtime may not use that exact patch version). As with option 2, the architecture is `x86_64` by default, but can changed using the `architecture` field. + + ```toml tab={"label":"pants.toml"} + [python] + interpreter_constraints = ["==3.12.*"] + ``` + ```python tab={"label":"project/BUILD"} + python_aws_lambda_function( + name="lambda", + handler="lambda_example.py:example_handler", + # `runtime` inferred and `complete_platforms` from built-in defaults, + # `architecture` defaults to x86_64, but can be overridden + ) + ``` + +This guide is written using the last option, with the default `x86_64` architecture, but you can add `runtime`, `complete_platforms` and/or `architecture` to any examples using the `python_aws_lambda_function` or `python_aws_lambda_layer` targets. + +## Step 3: Run `package` + +Now run `pants package` on your `python_aws_lambda_function` target to create a zipped file. + +For example: + +```bash +$ pants package project/:lambda +Wrote dist/project/lambda.zip + Handler: lambda_function.handler +``` + +:::caution Running from macOS and failing to build? +AWS Lambda functions must run on Linux, so Pants tells PEX and Pip to build for Linux when resolving your third party dependencies. This means that you can only use pre-built [wheels](https://packaging.python.org/glossary/#term-wheel) (bdists). If your project requires any source distributions ([sdists](https://packaging.python.org/glossary/#term-source-distribution-or-sdist)) that must be built locally, PEX and pip will fail to run. + +If this happens, you must either change your dependencies to only use dependencies with pre-built [wheels](https://pythonwheels.com) or find a Linux environment to run `pants package`. +::: + +:::caution "Encountering collisions" errors and failing to build? +If a build fails with an error like `Encountered collisions populating ... from PEX at faas_repository.pex:`, listing one or more files with different `sha1` hashes, this likely means your dependencies package files in unexpected locations, outside their "scoped" directory (for instance, a package `example-pkg` typically only includes files within `example_pkg/` and `example_pkg-*.dist-info/` directories). When multiple dependencies do this, those files can have exactly matching file paths but different contents, and so it is impossible to create a Lambda artifact: which of the files should be installed and which should be ignored? Resolving this requires human intervention to understand whether any of those files are important, and hence PEX emits an error rather than making an (arbitrary) choice that may result in confusing and/or broken behaviour at runtime. + +Most commonly this seems to happen with metadata like a README or LICENSE file, or test files (in a `tests/` subdirectory), which are likely not important at runtime. In these cases, the collision can be worked around by adding [a `pex3_venv_create_extra_args=["--collisions-ok"]` field](../../../reference/targets/python_aws_lambda_function.mdx#pex3_venv_create_extra_args) to the `python_aws_lambda_...` targets. + +A better solution is to work with the dependencies to stop them from packaging files outside their scoped directories. +::: + +## Step 4: Upload to AWS + +You can use any of the various AWS methods to upload your zip file, such as the AWS console or the AWS CLI via `aws lambda create-function` and `aws lambda update-function-code`. + +You can specify the AWS lambda handler as `lambda_function.handler`. This is a re-export of the function referred to by the `handler` field of the target. + +## Docker Integration + +To [deploy a Python lambda function with container images](https://docs.aws.amazon.com/lambda/latest/dg/python-image.html), you can use Pants's [Docker](../../docker/index.mdx) support. + +For example: + +```dockerfile tab={"label":"project/Dockerfile"} +FROM public.ecr.aws/lambda/python:3.8 + +RUN yum install unzip -y +COPY project/lambda.zip . +RUN unzip lambda.zip -d "${LAMBDA_TASK_ROOT}" +CMD ["lambda_function.handler"] +``` + +```python tab={"label":"project/BUILD"} +python_sources() + +python_aws_lambda_function( + name="lambda", + handler="main.py:lambda_handler" +) + +docker_image( + name="my_image", + dependencies = [":lambda"], +) +``` + +Then, use `pants package project:my_image`, for example. Pants will first build your AWS Lambda function, and then will build the Docker image and copy it into the AWS Lambda. + +## Building a Lambda Layer + +[AWS Lambda layers](https://docs.aws.amazon.com/lambda/latest/dg/gettingstarted-concepts.html#gettingstarted-concepts-layer) allow including additional code in the execution environment of a Lambda function, without having to include that code in the function package. Using a layer can allow for including more code in a single function, sharing common dependencies across several functions, and may even give faster builds and deploys. + +Pants uses the `python_aws_lambda_layer` target to build AWS Lambda layers. The contents of the layer must be specified in the `dependencies` field, and Pants will pull in all of the code that implies (transitively) as usual, including any exclusions via `!` and `!!`. The `include_sources` and `include_requirements` fields provide additional control over the contents of the layer. + +For example, one use of layers is splitting the deployment package for a Lambda function into: + +1. a function artifact with only the code in your repository (first-party sources) +2. a layer artifact with the third-party requirements that the function imports + +This split means making a change to first-party sources only requires rebuilding and re-deploying the function artifact. Since this artifact doesn't need to include all of the third-party requirements, rebuilding is likely to be much faster and the resulting package will be smaller. The layer will only need to be rebuilt and redeployed if the third-party dependencies change, like a version upgrade or an additional `import`. + +```python tab={"label":"project/BUILD"} +python_sources(name="lib") + +python_aws_lambda_function( + name="function", + handler="lambda_example.py:example_handler", + # only include the sources, the boto3 requirement is packaged in `:layer` + include_requirements=False, +) + +python_aws_lambda_layer( + name="layer", + # specify the handler file, and pants will automatically find its transitive dependencies + dependencies=["./lambda_example.py"], + # only include the boto3 requirement, any sources are packaged in `:function` + include_sources=False, +) +``` + +```python tab={"label":"project/lambda_example.py"} +from . import library_code + +def example_handler(event, context): + library_code.say_hi() +``` + +```python tab={"label":"project/library_code.py"} +# an example dependency +import boto3 + +def say_hi(): + print("Hello AWS!") +``` + +Run `pants package project:layer project:function` to produce two zip files: + +- `dist/project/layer.zip`: this must be published as a layer in AWS, such as through the console or using the CLI (`aws lambda publish-layer-version`). +- `dist/project/function.zip`: as [above](#step-4-upload-to-aws), this can be uploaded to AWS in various ways and the handler can be set to `lambda_function.handler`. The function will need specify that it uses the layer created above. + +## Advanced: Using PEX directly + +In the rare case where you need access to PEX features, such as dynamic selection of dependencies, a PEX file created by `pex_binary` can be used as a Lambda function package directly. A PEX file is a carefully constructed zip file, and can be understood natively by AWS. Note: using `pex_binary` results in larger packages and slower cold starts and is likely to be less convenient than using `python_aws_lambda_function`. + +The handler of a `pex_binary` is not re-exported at the fixed `lambda_function.handler` path, and the Lambda function handler must be configured as the `__pex__` pseudo-package followed by the handler's normal module path (for instance, if the handler is called `func` in `some/module/path.py` within [a source root](../../using-pants/key-concepts/source-roots.mdx), then use `__pex__.some.module.path.func`). The `__pex__` pseudo-package ensures dependencies are initialized before running any of your code. + +For example: + +```python tab={"label":"project/BUILD"} +python_sources() + +pex_binary( + name="lambda", + entry_point="lambda_example.py", + # specify an appropriate platform for the targeted Lambda runtime: + complete_platforms=["path/to:platform-json-target"], +) +``` + +```python tab={"label":"project/lambda_example.py"} +def example_handler(event, context): + print("Hello AWS!") +``` + +Then, use `pants package project:lambda`, and upload the resulting `project/lambda.pex` to AWS. The handler will need to be configured in AWS as `__pex__.lambda_example.example_handler` (assuming `project` is a [source root](../../using-pants/key-concepts/source-roots.mdx)). + +## Migrating from Pants 2.16 and earlier + +Pants implemented a new way to package Lambda functions in 2.17, which became the only option in 2.19, resulting in smaller packages and faster cold starts. This involves some changes: + +- In Pants 2.16 and earlier, Pants used the [Lambdex](https://github.com/pantsbuild/lambdex) project. First, Pants would convert your code into a [Pex file](../overview/pex.mdx) and then use Lambdex to adapt this to be better understood by AWS by adding a shim handler at the path `lambdex_handler.handler`. This shim handler first triggers the Pex initialization to choose and unzip dependencies, during the "INIT" phase. +- In Pants 2.17, the use of Lambdex was deprecated, in favour of choosing the appropriate dependencies ahead of time, as described above, without needing to do this on each cold start. This results in a zip file laid out in the format recommended by AWS, and includes a re-export of the handler at the path `lambda_function.handler`. +- In Pants 2.18, the new behaviour is now the default behaviour. Layers can now be built using Pants, and this addition includes renaming the `python_awslambda` target to `python_aws_lambda_function`. +- In Pants 2.19 and later, the old Lambdex behaviour has been entirely removed. + +If your code can be packaged without warnings using Pants 2.18, no change is required when upgrading to Pants 2.19 (except removing the `[lambdex]` section in `pants.toml` if that still remains). If not, [follow its instructions](/2.18/docs/python/integrations/aws-lambda#migrating-from-pants-216-and-earlier) to upgrade to Pants 2.18 fully first, and upgrade to Pants 2.19 after that. + +If you encounter a bug with the new behaviour, [please let us know](https://github.com/pantsbuild/pants/issues/new/choose). If you require advanced PEX features, [switch to using `pex_binary` directly](#advanced-using-pex-directly). diff --git a/versioned_docs/version-2.24/docs/python/integrations/google-cloud-functions.mdx b/versioned_docs/version-2.24/docs/python/integrations/google-cloud-functions.mdx new file mode 100644 index 000000000..be7c314c8 --- /dev/null +++ b/versioned_docs/version-2.24/docs/python/integrations/google-cloud-functions.mdx @@ -0,0 +1,214 @@ +--- + title: Google Cloud Functions + sidebar_position: 3 +--- + +Create a Cloud Function with Python. + +--- + +Pants can create a Google Cloud Function-compatible zip file or directory from your Python code, allowing you to develop your functions in your repository. + +:::note FYI: how Pants does this +Under-the-hood, Pants uses the [PEX](https://github.com/pex-tool/pex) project, to select the appropriate third-party requirements and first-party sources and lay them out in a zip file or directory, in the format recommended by Google Cloud Functions. +::: + +## Step 1: Activate the Python Google Cloud Function backend + +Add this to your `pants.toml`: + +```toml title="pants.toml" +[GLOBAL] +backend_packages.add = [ + "pants.backend.google_cloud_function.python", + "pants.backend.python", +] +``` + +This adds the new `python_google_cloud_function` target, which you can confirm by running `pants help python_google_cloud_function ` + +## Step 2: Define a `python_google_cloud_function ` target + +First, add your Cloud function in a Python file like you would [normally do with Google Cloud Functions](https://cloud.google.com/functions/docs/first-python), such as creating a function `def my_handler_name(event, context)` for event-based functions. + +Then, in your BUILD file, make sure that you have a `python_source` or `python_sources` target with the handler file included in the `sources` field. You can use [`pants tailor ::`](../../getting-started/initial-configuration.mdx#5-generate-build-files) to automate this. + +Add a `python_google_cloud_function` target and define `handler` and `type` fields. The `type` should be either `"event"` or `"http"`. The `handler` has the form `handler_file.py:handler_func`, which Pants will convert into a well-formed entry point. Alternatively, you can set `handler` to the format `path.to.module:handler_func`. + +For example: + +```python tab={"label":"project/BUILD"} +# The default `sources` field will include our handler file. +python_sources(name="lib") + +python_google_cloud_function( + name="cloud_function", + # Pants will convert this to `project.google_cloud_function_example:example_handler`. + handler="google_cloud_function_example.py:example_handler", + type="event", +) +``` + +```python tab={"label":"project/google_cloud_function_example.py"} +def example_handler(event, context): + print("Hello Google Cloud Function!") +``` + +Pants will use [dependency inference](../../using-pants/key-concepts/targets-and-build-files.mdx) based on the `handler` field, which you can confirm by running `pants dependencies path/to:cloud_function`. You can also manually add to the `dependencies` field. + +You can optionally set the `output_path` field to change the generated zip file's path. + +:::tip Using layout +Use [layout](../../../reference/targets/python_google_cloud_function.mdx#layout) to determine whether to build a `.zip` file or a directory +::: + +:::caution Use `resource` instead of `file` +`file` / `files` targets will not be included in the built Cloud Function because filesystem APIs like `open()` would not load them as expected. Instead, use the `resource` / `resources` target. See [Assets and archives](../../using-pants/assets-and-archives.mdx) for further explanation. +::: + +### Specifying a runtime explicitly + +When building an Cloud function artifact, Pants and the underlying Pex tool need to know details about target runtime to be able to choose appropriate artifacts for third-party dependencies that have native code. These details can be inferred or provided in three ways, from highest precedence to lowest precedence: + +1. An explicit value for [the `complete_platforms` field](../../../reference/targets/python_google_cloud_function.mdx#complete_platforms). The "complete platforms" are the underlying source of truth. + ```python title="BUILD" + file(name="gcf-platform", source="gcf-platform.json") + + python_google_cloud_function( + name="cloud_function", + handler="google_cloud_function_example.py:example_handler", + type="event", + # Explicit complete platforms: + complete_platforms=[":gcf-platform"], + ) + ``` + + If needed, this file can be generated for a specific Cloud Function runtime using the function below: + + ```python title="gcf-complete-platform-generator.py" + import subprocess + import json + + import functions_framework + + @functions_framework.http + def generate_pex_complete_platforms(request): + subprocess.run( + "python -m pip install --target=/tmp/pex pex", + shell=True, + check=True, + ) + result = subprocess.run( + "PYTHONPATH=/tmp/pex /tmp/pex/bin/pex3 interpreter inspect --markers --tags", + shell=True, + capture_output=True, + text=True, + ) + return result.stdout + ``` + + If you run this function in the Cloud Function testing environment, it will print out a formatted JSON object to the console. You can then copy this JSON object and add it to a file named `gcf-platform.json`. + +2. An explicit value for [the `runtime` field](../../../reference/targets/python_google_cloud_function.mdx#runtime): Pants uses this to pick an appropriate "complete platforms" value, from options that Pants has pre-packaged. These are static exports from docker images provided by GCP, relying on the environment being relatively stable. (If Pants doesn't have an appropriate "complete platforms" default built-in, you will be prompted to use option 1 above.) + ```python title="BUILD" + python_google_cloud_function( + name="cloud_function", + handler="google_cloud_function_example.py:example_handler", + type="event", + # Explicit runtime, `complete_platforms` taken from Pants' built-in defaults: + runtime="python312", + ) + ``` +3. Inferred from [the relevant interpreter constraints](../overview/interpreter-compatibility.mdx): the interpreter constraints may unambiguously imply a value for the `runtime` and thus `complete_platforms` fields. For example, `interpreter_constaints = ["==3.12.*"]` implies `runtime="python312"`. This only works with interpreter constraints that cover all patch versions of a given minor release series: `>=3.11,<3.13` is too wide (it covers both 3.11 and 3.12), while `==3.12.0` is too specific (GCF's `python312` runtime may not use that exact patch version). + + ```toml tab={"label":"pants.toml"} + [python] + interpreter_constraints = ["==3.12.*"] + ``` + ```python tab={"label":"project/BUILD"} + python_google_cloud_function( + name="cloud_function", + handler="google_cloud_function_example.py:example_handler", + type="event", + # `runtime` inferred and `complete_platforms` from built-in defaults + ) + ``` + +This guide is written using the last option, but you can add `runtime` or `complete_platforms` to any examples using the `python_google_cloud_function` target. + +## Step 3: Run `package` + +Now run `pants package` on your `python_google_cloud_function` target to create a zipped file. + +For example: + +```bash +$ pants package project/:cloud_function +Wrote dist/project/cloud_function.zip + Handler: handler +``` + +:::caution Running from macOS and failing to build? +Cloud Functions must run on Linux, so Pants tells PEX and Pip to build for Linux when resolving your third party dependencies. This means that you can only use pre-built [wheels](https://packaging.python.org/glossary/#term-wheel) (bdists). If your project requires any source distributions ([sdists](https://packaging.python.org/glossary/#term-source-distribution-or-sdist)) that must be built locally, PEX and pip will fail to run. + +If this happens, you must either change your dependencies to only use dependencies with pre-built [wheels](https://pythonwheels.com) or find a Linux environment to run `pants package`. +::: + +:::caution "Encountering collisions" errors and failing to build? +If a build fails with an error like `Encountered collisions populating ... from PEX at faas_repository.pex:`, listing one or more files with different `sha1` hashes, this likely means your dependencies package files in unexpected locations, outside their "scoped" directory (for instance, a package `example-pkg` typically only includes files within `example_pkg/` and `example_pkg-*.dist-info/` directories). When multiple dependencies do this, those files can have exactly matching file paths but different contents, and so it is impossible to create a GCF artifact: which of the files should be installed and which should be ignored? Resolving this requires human intervention to understand whether any of those files are important, and hence PEX emits an error rather than making an (arbitrary) choice that may result in confusing and/or broken behaviour at runtime. + +Most commonly this seems to happen with metadata like a README or LICENSE file, or test files (in a `tests/` subdirectory), which are likely not important at runtime. In these cases, the collision can be worked around by adding [a `pex3_venv_create_extra_args=["--collisions-ok"]` field](../../../reference/targets/python_google_cloud_function.mdx#pex3_venv_create_extra_args) to the `python_google_cloud_function` target. + +A better solution is to work with the dependencies to stop them from packaging files outside their scoped directories. +::: + +## Step 4: Upload to Google Cloud + +You can use any of the various Google Cloud methods to upload your zip file or directory, such as the Google Cloud console or the [Google Cloud CLI](https://cloud.google.com/functions/docs/deploying/filesystem#deploy_using_the_gcloud_tool). + +You must specify the `--entry-point` as `handler`. This is a re-export of the function referred to by the `handler` field of the target. + +For example, if using `layout="flat"`: + +``` +gcloud functions deploy --source=dist/project/cloud_function --entry-point=handler --trigger-topic= --runtime=python38 +``` + +## Advanced: Using PEX directly + +In the rare case where you need access to PEX features, such as dynamic selection of dependencies, a PEX file created by `pex_binary` can be used as a Google Cloud Function package directly. A PEX file is a carefully constructed zip file, and can be understood natively by Google Cloud Functions. Note: using `pex_binary` results in larger packages and slower cold starts and is likely to be less convenient than using `python_google_cloud_function`. + +The handler of a `pex_binary` is not re-exported at the fixed `main.handler` path, and the Google Cloud Function handler must be configured as the `__pex__` pseudo-package followed by the handler's normal module path (for instance, if the handler is in `some/module/path.py` within [a source root](../../using-pants/key-concepts/source-roots.mdx), then use `__pex__.some.module.path`). This may require being configured via [`GOOGLE_FUNCTION_SOURCE`](https://cloud.google.com/docs/buildpacks/service-specific-configs#google_function_source). The `__pex__` pseudo-package ensures dependencies are initialized before running any of your code. + +For example: + +```python tab={"label":"project/BUILD"} +python_sources() + +pex_binary( + name="gcf", + entry_point="gcf_example.py", + # specify an appropriate platform for the targeted GCF runtime: + complete_platforms=["path/to:platform-json-target"], +) +``` + +```python tab={"label":"project/gcf_example.py"} +def example_handler(event, context): + print("Hello GCF!") +``` + +Then, use `pants package project:gcf`, and upload the resulting `project/gcf.pex` to Google Cloud Functions. You will need to specify the handler as `example_handler` and set `GOOGLE_FUNCTION_SOURCE=__pex__.gcf_example` (assuming `project` is a [source root](../../using-pants/key-concepts/source-roots.mdx)). + +## Migrating from Pants 2.16 and earlier + +Pants implemented a new way to package Google Cloud Functions in 2.17, which became the only option in 2.19, resulting in smaller packages and faster cold starts. This involves some changes: + +- In Pants 2.16 and earlier, Pants used the [Lambdex](https://github.com/pantsbuild/lambdex) project. First, Pants would convert your code into a [Pex file](../overview/pex.mdx) and then use Lambdex to adapt this to be better understood by GCF by adding a shim handler. This shim handler first triggers the Pex initialization to choose and unzip dependencies, during initialization. +- In Pants 2.17, the use of Lambdex was deprecated, in favour of choosing the appropriate dependencies ahead of time, as described above, without needing to do this on each cold start. This results in a zip file laid out in the format recommended by GCF, and includes a re-export of the handler. +- In Pants 2.18, the new behaviour is now the default behaviour. +- In Pants 2.19 and later, the old Lambdex behaviour has been entirely removed. + +If your code can be packaged without warnings using Pants 2.18, no change is required when upgrading to Pants 2.19 (except removing the `[lambdex]` section in `pants.toml` if that still remains). If not, [follow its instructions](/v2.18/docs/google-cloud-function-python#migrating-from-pants-216-and-earlier) to upgrade to Pants 2.18 fully first, and upgrade to Pants 2.19 after that. + +If you encounter a bug with the new behaviour, [please let us know](https://github.com/pantsbuild/pants/issues/new/choose). If you require advanced PEX features, [switch to using `pex_binary` directly](#advanced-using-pex-directly). diff --git a/versioned_docs/version-2.24/docs/python/integrations/index.mdx b/versioned_docs/version-2.24/docs/python/integrations/index.mdx new file mode 100644 index 000000000..dd58eefd3 --- /dev/null +++ b/versioned_docs/version-2.24/docs/python/integrations/index.mdx @@ -0,0 +1,15 @@ +--- + title: Integrations + sidebar_position: 2 +--- + +Useful integrations for Python. + +--- + +- [Protobuf and gRPC](./protobuf-and-grpc.mdx) +- [Thrift](./thrift.mdx) +- [AWS Lambda](./aws-lambda.mdx) +- [Google Cloud Functions](./google-cloud-functions.mdx) +- [PyOxidizer](./pyoxidizer.mdx) +- [Jupyter](./jupyter.mdx) diff --git a/versioned_docs/version-2.24/docs/python/integrations/jupyter.mdx b/versioned_docs/version-2.24/docs/python/integrations/jupyter.mdx new file mode 100644 index 000000000..ea173686a --- /dev/null +++ b/versioned_docs/version-2.24/docs/python/integrations/jupyter.mdx @@ -0,0 +1,31 @@ +--- + title: Jupyter + sidebar_position: 5 +--- + +A Jupyter plugin to load Pants targets into Jupyter Notebooks. + +--- + +The [pants-jupyter-plugin](https://github.com/pantsbuild/pants-jupyter-plugin/) project provides a Jupyter plugin that can be used to load Pants targets directly into a notebook. + +## Installation + +Jupyter plugins are typically installed using `pip` directly alongside Jupyter (Lab) itself. + +If you don't already have Jupyter set up somewhere, create a virtualenv for it, and then install and start it by running: + +```shell +# Install jupyter and the plugin (NB: please use a virtualenv!) +pip install jupyterlab pants-jupyter-plugin +# Launch JupyterLab, which will open a browser window for notebook editing. +jupyter lab +``` + +## Usage + +For instructions on using the plugin, see its [README](https://github.com/pantsbuild/pants-jupyter-plugin/blob/main/README.md). + +An example session that loads a target from the example-python repository might look like: + +![](https://files.readme.io/9f7ca19-jupyter-session.png "jupyter-session.png") diff --git a/versioned_docs/version-2.24/docs/python/integrations/protobuf-and-grpc.mdx b/versioned_docs/version-2.24/docs/python/integrations/protobuf-and-grpc.mdx new file mode 100644 index 000000000..b97a934da --- /dev/null +++ b/versioned_docs/version-2.24/docs/python/integrations/protobuf-and-grpc.mdx @@ -0,0 +1,246 @@ +--- + title: Protobuf and gRPC + sidebar_position: 0 +--- + +How to generate Python from Protocol Buffers. + +--- + +When your Python code imports Protobuf generated files, Pants will detect the imports and run the Protoc compiler to generate those files. + +:::note Example repository +See [the codegen example repository](https://github.com/pantsbuild/example-codegen) for an example of using Protobuf to generate Python. +::: + +:::tip Benefit of Pants: generated files are always up-to-date +With Pants, there's no need to manually regenerate your code or check it into version control. Pants will ensure you are always using up-to-date files in your builds. + +Thanks to fine-grained caching, Pants will regenerate the minimum amount of code required when you do make changes. +::: + +## Step 1: Activate the Protobuf Python backend + +Add this to your `pants.toml`: + +```toml title="pants.toml" +[GLOBAL] +backend_packages.add = [ + "pants.backend.codegen.protobuf.python", + "pants.backend.python", +] +``` + +This adds the new [`protobuf_source`](../../../reference/targets/protobuf_source.mdx) target, which you can confirm by running `pants help protobuf_source`. + +To reduce boilerplate, you can also use the [`protobuf_sources`](../../../reference/targets/protobuf_sources.mdx) target, which generates one `protobuf_source` target per file in the `sources` field. + +```python title="BUILD" +protobuf_sources(name="protos", sources=["user.proto", "admin.proto"]) + +# Spiritually equivalent to: +protobuf_source(name="user", source="user.proto") +protobuf_source(name="admin", source="admin.proto") + +# Thanks to the default `sources` value of '*.proto', spiritually equivalent to: +protobuf_sources(name="protos") +``` + +:::note Enable the MyPy Protobuf plugin +The [MyPy Protobuf plugin](https://github.com/dropbox/mypy-protobuf) generates [`.pyi` type stubs](https://mypy.readthedocs.io/en/stable/stubs.html). If you use MyPy through Pants's [check goal](../goals/check.mdx), this will ensure MyPy understands your generated code. + +To activate, set `mypy_plugin = true` in the `[python-protobuf]` scope: + +```toml +[python-protobuf] +mypy_plugin = true +``` + +MyPy will use the generated `.pyi` type stub file, rather than looking at the `.py` implementation file. +::: + +## Step 2: Set up the `protobuf` and `grpcio` runtime libraries + +Generated Python files require the [`protobuf` dependency](https://pypi.org/project/protobuf/) for their imports to work properly. If you're using gRPC, you also need the [`grpcio` dependency](https://pypi.org/project/grpcio/). + +Add `protobuf`—and `grpcio`, if relevant— to your project, e.g. your `requirements.txt` (see [Third-party dependencies](../overview/third-party-dependencies.mdx)). + +```text title="requirements.txt" +grpcio==1.32.0 +protobuf>=3.12.1 +``` + +Pants will then automatically add these dependencies to your `protobuf_source` targets created in the next step. + +### Use alternative grpc plugins + +By default, the `grpcio` plugin is used to generate the services stubs. Pants can also generate services stubs using [`grpclib`](https://github.com/vmagamedov/grpclib). + +To use `grpclib`, set `grpclib_plugin = true` in the `[python-protobuf]` scope. You probably also want to disable the `grpcio` plugin if you don't use it. + +```toml title="pants.toml" +[python-protobuf] +grpclib_plugin = true +grpcio_plugin = false +``` + +Don't forget to add the `grpclib` dependency to your requirements. + +It is recommended to share the same dependencies versions between the protobuf plugin and your code runtime dependency (see [Python Lockfile](../overview/lockfiles.mdx#sharing-lockfiles-between-tools-and-code). The requirements for the `grpclib` plugin can be defined this way: + +```toml title="pants.toml" +[python-protobuf-grpclib] +install_from_resolve = python-default +# Change the path to your requirements. +requirements = [ + "//3rdparty/python#grpclib", + "//3rdparty/python#protobuf", +] +``` + +## Step 3: Generate `protobuf_sources` target + +Run [`pants tailor ::`](../../getting-started/initial-configuration.mdx#5-generate-build-files) for Pants to create a `protobuf_sources` target wherever you have `.proto` files: + +``` +$ pants tailor :: +Created src/protos/BUILD: + - Add protobuf_sources target protos +``` + +Pants will use [dependency inference](../../using-pants/key-concepts/targets-and-build-files.mdx) for any `import` statements in your `.proto` files, which you can confirm by running `pants dependencies path/to/file.proto`. You should also see the `python_requirement` target for the `protobuf` library from the previous step. + +If you want gRPC code generated for all files in the folder, set `grpc=True`. + +```python title="src/proto/example/BUILD" +protobuf_sources( + name="protos", + grpc=True, +) +``` + +If you only want gRPC generated for some files in the folder, you can use the `overrides` field: + +```python title="src/proto/example/BUILD" +protobuf_sources( + name="protos", + overrides={ + "admin.proto": {"grpc": True}, + # You can also use a tuple for multiple files. + ("user.proto", "org.proto"): {"grpc": True}, + }, +) +``` + +## Step 4: Confirm Python imports are working + +Now, you can import the generated Python module in your Python code. For example, to import `project/example/f.proto`, add `import project.example.f_pb2` to your code. + +If you have [source roots](../../using-pants/key-concepts/source-roots.mdx) other than the repository root, remove the source root from the import. For example, `src/protos/example/f.proto` gets stripped to `import example.f_pb2`. See the below section on source roots for more info. + +Pants's dependency inference will detect Python imports of Protobuf modules, which you can confirm by running `pants dependencies path/to/file.py`. + +If gRPC is activated, you can also import the module with `_pb2_grpc` at the end, e.g. `project.example.f_pb2_grpc`. + +```python +from project.example.f_pb2 import HelloReply +from project.example.f_pb2_grcp import GreeterServicer +``` + +:::note Run `pants export-codegen ::` to inspect the files +`pants export-codegen ::` will run all relevant code generators and write the files to `dist/codegen` using the same paths used normally by Pants. + +You do not need to run this goal for codegen to work when using Pants; `export-codegen` is only for external consumption outside of Pants. + +Note: You can also export the generated sources using the [`--export-py-generated-sources` option](../../../reference/goals/export#py_generated_sources) to the [`pants export` goal](../../../reference/goals/export). This is useful when you want to provide an IDE with third-party dependencies and generated sources in a single place. +::: + +:::caution You likely need to add empty `__init__.py` files +By default, Pants will generate the Python files in the same directory as the `.proto` file. To get Python imports working properly, you will likely need to add an empty `__init__.py` in the same location, and possibly in ancestor directories. + +See the below section "Protobuf and source roots" for how to generate into a different directory. If you use this option, you will still likely need an empty `__init__.py` file in the destination directory. +::: + +## Protobuf and source roots + +By default, generated code goes into the same [source root](../../using-pants/key-concepts/source-roots.mdx) as the `.proto` file from which it was generated. For example, a file `src/proto/example/f.proto` will generate `src/proto/example/f_pb2.py`. + +However, this may not always be what you want. In particular, you may not want to have to add `__init__py` files under `src/proto` just so you can import Python code generated to that source root. + +You can configure a different source root for generated code by setting the `python_source_root` field: + +```python title="src/proto/example/BUILD" +protobuf_sources( + name="protos", + python_source_root='src/python' +) +``` + +Now `src/proto/example/f.proto` will generate `src/python/example/f_pb2.py`, i.e., the generated files will share a source root with your other Python code. + +:::note Set the `.proto` file's `package` relative to the source root +Remember that the `package` directive in your `.proto` file should be relative to the source root. + +For example, if you have a file at `src/proto/example/subdir/f.proto`, you'd set its `package` to `example.subdir`; and in your Python code, `from example.subdir import f_pb2`. +::: + +## Multiple resolves + +If you're using [multiple resolves](../overview/third-party-dependencies.mdx) (i.e. multiple lockfiles), then you may need to set the `python_resolve` field. `protobuf_source` targets only work with a single resolve, meaning, for example, that a `python_source` target that uses the resolve 'a' can only depend on Protobuf targets that also uses this same resolve. + +By default, `protobuf_source` / `protobuf_sources` targets use the resolve set by the option `[python].default_resolve`. To use a different resolve, set the field `python_resolve: str` to one of the values from the option `[python].resolves`. + +You must also make sure that any resolves that use codegen include `python_requirement` targets for the `protobuf` and `grpcio` runtime libraries from Step 2. Pants will eagerly validate this for you. + +If the same Protobuf files should work with multiple resolves, you can use the +[`parametrize`](../../using-pants/key-concepts/targets-and-build-files.mdx#parametrizing-targets) mechanism. + +For example: + +```python title="BUILD" +python_requirement( + name="protobuf", + # Here, we use the same version of Protobuf in both resolves. You could instead create + # a distinct target per resolve so that they have different versions. + requirements=["protobuf==3.19.4"], + resolve=parametrize("resolve-a", "resolve-b"), +) + +protobuf_sources( + name="protos", + python_resolve=parametrize("resolve-a", "resolve-b") +) +``` + +## Buf: format and lint Protobuf + +Pants integrates with the [`Buf`](https://buf.build/blog/introducing-buf-format) formatter and linter for Protobuf files. + +To activate, add this to `pants.toml`: + +```toml title="pants.toml" +[GLOBAL] +backend_packages = [ + "pants.backend.codegen.protobuf.lint.buf", +] +``` + +Now you can run `pants fmt` and `pants lint`: + +``` +❯ pants lint src/protos/user.proto +``` + +Use `pants fmt lint dir:` to run on all files in the directory, and `pants fmt lint dir::` to run on all files in the directory and subdirectories. + +Temporarily disable Buf with `--buf-fmt-skip` and `--buf-lint-skip`: + +```bash +❯ pants --buf-fmt-skip fmt :: +``` + +Only run Buf with `--lint-only=buf-fmt` or `--lint-only=buf-lint`, and `--fmt-only=buf-fmt`: + +```bash +❯ pants fmt --only=buf-fmt :: +``` diff --git a/versioned_docs/version-2.24/docs/python/integrations/pyoxidizer.mdx b/versioned_docs/version-2.24/docs/python/integrations/pyoxidizer.mdx new file mode 100644 index 000000000..85a4965fa --- /dev/null +++ b/versioned_docs/version-2.24/docs/python/integrations/pyoxidizer.mdx @@ -0,0 +1,238 @@ +--- + title: PyOxidizer + sidebar_position: 4 +--- + +Creating Python binaries through PyOxidizer. + +--- + +:::caution (March 17, 2024) PyOxidizer is not actively maintained +As mentioned in [PyOxidizer issue #741](https://github.com/indygreg/PyOxidizer/issues/741) by indygreg, development on PyOxidizer is effectively stagnant. Please take this into consideration before using PyOxidizer in a new project, as this may affect PyOxidizer support within Pants as well. + +> tl;dr shifting personal priorities have resulted in me de-prioritizing PyOxidizer (and other open source projects). At this point in time, the future of PyOxidizer is uncertain, possibly dead. +::: + +PyOxidizer allows you to distribute your code as a single binary file, similar to [Pex files](../overview/pex.mdx). Unlike Pex, these binaries include a Python interpreter, often greatly simplifying distribution. + +See our blog post on [Packaging Python with the Pants PyOxidizer Plugin](https://blog.pantsbuild.org/packaging-python-with-the-pyoxidizer-pants-plugin/) for more discussion of the benefits of PyOxidizer. + +## Step 1: Activate the backend + +Add this to your `pants.toml`: + +```toml title="pants.toml" +[GLOBAL] +backend_packages.add = [ + "pants.backend.experimental.python.packaging.pyoxidizer", + "pants.backend.python", +] +``` + +This adds the new `pyoxidizer_binary` target, which you can confirm by running `pants help pyoxidizer_binary`. + +:::caution This backend is experimental +We are still discovering the best ways to provide PyOxidizer support, such as how to make our [default template more useful](https://github.com/pantsbuild/pants/pull/14183/files#r788253973). This backend does not follow the normal [deprecation policy](../../releases/deprecation-policy.mdx), although we will do our best to minimize breaking changes. + +We would [love your feedback](/community/getting-help) on this backend! +::: + +## Step 2: Define a `python_distribution` target + +The `pyoxidizer_binary` target works by pointing to a `python_distribution` target with the code you want included. Pants then passes the distribution to PyOxidizer to install it as a binary. + +So, to get started, create a `python_distribution` target per [Building distributions](../overview/building-distributions.mdx). + +```python title="project/BUILD" +python_sources(name="lib") + +python_distribution( + name="dist", + dependencies=[":lib"], + provides=python_artifact(name="my-dist", version="0.0.1"), +) +``` + +The `python_distribution` must produce at least one wheel (`.whl`) file. If you are using Pants's default of `generate_setup=True`, make sure you also use Pants's default of `wheel=True`. Pants will eagerly error when building your `pyoxidizer_binary` if you use a `python_distribution` that does not produce wheels. + +## Step 3: Define a `pyoxidizer_binary` target + +Now, create a `pyoxidizer_binary` target and set the `dependencies` field to the [address](../../using-pants/key-concepts/targets-and-build-files.mdx) of the `python_distribution` you created previously. + +```python title="project/BUILD" +pyoxidizer_binary( + name="bin", + dependencies=[":dist"], +) +``` + +Usually, you will want to set the `entry_point` field, which sets the behavior for what happens when you run the binary. + +If the `entry_point` field is not specified, running the binary will launch a Python interpreter with all the relevant code and dependencies loaded. + +```bash +❯ ./dist/bin/x86_64-apple-darwin/release/install/bin +Python 3.9.7 (default, Oct 18 2021, 00:59:13) +[Clang 13.0.0 ] on darwin +Type "help", "copyright", "credits" or "license" for more information. +>>> from myproject import myapp +>>> myapp.main() +Hello, world! +>>> +``` + +You can instead set `entry_point` to the Python module to execute (e.g. `myproject.myapp`). If specified, running the binary will launch the application similar to if it had been run as `python -m myproject.myapp`, for example. + +```python +pyoxidizer_binary( + name="bin", + dependencies=[":dist"], + entry_point="myproject.myapp", +) +``` + +```bash +❯ ./dist/bin/x86_64-apple-darwin/release/install/bin +Launching myproject.myapp from __main__ +Hello, world! +``` + +## Step 4: Run `package` or `run` goals + +Finally, run `pants package $address` on your `pyoxidizer_binary` target to create a directory +including your binary, or `pants run $address` to launch the binary. + +For example: + +``` +❯ pants package src/py/project:bin +14:15:31.18 [INFO] Completed: Building src.py.project:bin with PyOxidizer +14:15:31.23 [INFO] Wrote dist/src.py.project/bin/aarch64-apple-darwin/debug/install/bin +``` + +``` +❯ pants run src/py/project:bin +14:15:31.18 [INFO] Completed: Building src.py.project:bin with PyOxidizer +Hello, world! +``` + +By default, with the `package` goal, Pants will write the package using this scheme: `dist/{path.to.tgt_dir}/{tgt_name}/{platform}/{debug,release}/install/{tgt_name}`. You can change the first part of this path by setting the `output_path` field, although you risk name collisions with other `pyoxidizer_binary` targets in your project. See [pyoxidizer_binary](../../../reference/targets/pyoxidizer_binary.mdx) for more info. + +:::caution `debug` vs `release` builds +By default, PyOxidizer will build with Rust's "debug" mode, which results in much faster compile times but means that your binary will be slower to run. Instead, you can instruct PyOxidizer to build in [release mode](https://nnethercote.github.io/perf-book/build-configuration.html#release-builds) by adding this to `pants.toml`: + +```toml +[pyoxidizer] +args = ["--release"] +``` + +Or by using the command line flag `pants --pyoxidizer-args='--release' package path/to:tgt`. +::: + +## Advanced use cases + +:::tip Missing functionality? Let us know! +We would like to keep improving Pants's PyOxidizer support. We encourage you to let us know what features are missing through [Slack or GitHub](/community/getting-help)! +::: + +:::caution `[python-repos]` not yet supported for custom indexes +Currently, PyOxidizer can only resolve dependencies from PyPI and your first-party code. If you need support for custom indexes, please let us know by commenting on [https://github.com/pantsbuild/pants/issues/14619](https://github.com/pantsbuild/pants/issues/14619). + +(We'd be happy to help mentor someone through this change, although please still comment either way!) +::: + +### `python_distribution`s that implicitly depend on each other + +As explained at [Building distributions](../overview/building-distributions.mdx#mapping-source-files-to-distributions), Pants automatically detects when one `python_distribution` depends on another, and it will add that dependency to the `install_requires` for the distribution. + +When this happens, PyOxidizer would naively try installing that first-party dependency from PyPI, which will likely fail. Instead, include all relevant `python_distribution` targets in the `dependencies` field of the `pyoxidizer_binary` target. + +```python tab={"label":"project/BUILD"} +python_sources(name="lib") + +python_distribution( + name="dist", + # Note that this python_distribution does not + # explicitly include project/utils:dist in its + # `dependencies` field, but Pants still + # detects an implicit dependency and will add + # it to this dist's `install_requires`. + dependencies=[":lib"], + provides=setup_py(name="main-dist", version="0.0.1"), +) + +pyoxidizer_binary( + name="bin", + entry_point="hellotest.main", + dependencies=[":dist", "project/utils:dist"], +) +``` + +```python tab={"label":"project/main.py"} +from hellotest.utils.greeter import GREET + +print(GREET) +``` + +```python tab={"label":"project/utils/greeter.py"} +GREET = 'Hello world!' +``` + +```python tab={"label":"project/utils/BUILD"} +python_sources(name="lib") + +python_distribution( + name="dist", + dependencies=[":lib"], + provides=setup_py(name="utils-dist", version="0.0.1"), +) +``` + +### `template` field + +If the default PyOxidizer configuration that Pants generates is too limiting, a custom template can be used instead. Pants will expect a file with the extension `.bzlt` in a path relative to the `BUILD` file. + +```python +pyoxidizer_binary( + name="bin", + dependencies=[":dist"], + entry_point="myproject.myapp", + template="pyoxidizer.bzlt", +) +``` + +The custom `.bzlt` may use four parameters from within the Pants build process inside the template (these parameters must be prefixed by `$` or surrounded with `${ }` in the template). + +- `RUN_MODULE` - The re-formatted `entry_point` passed to this target (or None). +- `NAME` - This target's name. +- `WHEELS` - All python distributions passed to this target (or `[]`). +- `UNCLASSIFIED_RESOURCE_INSTALLATION` - This will populate a snippet of code to correctly inject the target's `filesystem_resources`. + +For example, in a custom PyOxidizer configuration template, to use the `pyoxidizer_binary` target's `name` field: + +```python +exe = dist.to_python_executable( + name="$NAME", + packaging_policy=policy, + config=python_config, +) +``` + +You almost certainly will want to include this line, which is how the `dependencies` field gets consumed: + +```python +exe.add_python_resources(exe.pip_install($WHEELS)) +``` + +### `filesystem_resources` field + +As explained in [PyOxidizer's documentation](https://pyoxidizer.readthedocs.io/en/stable/pyoxidizer_packaging_additional_files.html#installing-unclassified-files-on-the-filesystem), you may sometimes need to force certain dependencies to be installed to the filesystem. You can do that with the `filesystem_resources` field: + +```python +pyoxidizer_binary( + name="bin", + dependencies=[":dist"], + entry_point="myproject.myapp", + filesystem_resources=["numpy==1.17"], +) +``` diff --git a/versioned_docs/version-2.24/docs/python/integrations/thrift.mdx b/versioned_docs/version-2.24/docs/python/integrations/thrift.mdx new file mode 100644 index 000000000..0c0aeec88 --- /dev/null +++ b/versioned_docs/version-2.24/docs/python/integrations/thrift.mdx @@ -0,0 +1,146 @@ +--- + title: Thrift + sidebar_position: 1 +--- + +How to generate Python from Thrift. + +--- + +When your Python code imports Thrift generated files, Pants will detect the imports and run the Apache Thrift compiler to generate those files. + +:::note Example repository +See [the codegen example repository](https://github.com/pantsbuild/example-codegen) for an example of using Thrift to generate Python. +::: + +:::tip Benefit of Pants: generated files are always up-to-date +With Pants, there's no need to manually regenerate your code or check it into version control. Pants will ensure you are always using up-to-date files in your builds. + +Thanks to fine-grained caching, Pants will regenerate the minimum amount of code required when you do make changes. +::: + +## Step 1: Activate the Thrift Python backend + +Add this to your `pants.toml`: + +```toml title="pants.toml" +[GLOBAL] +backend_packages.add = [ + "pants.backend.codegen.thrift.apache.python", + "pants.backend.python", +] +``` + +You will also need to make sure that `thrift` is discoverable on your PATH, as Pants does not [install Thrift](https://thrift.apache.org/docs/install/) for you. Alternatively, you can tell Pants where to discover Thrift: + +```toml title="pants.toml" +[apache-thrift] +# Defaults to the special string "", which expands to your $PATH. +thrift_search_paths = ["/usr/bin"] +``` + +This backend adds the new [`thrift_source`](../../../reference/targets/thrift_source.mdx) target, which you can confirm by running `pants help thrift_source`. + +To reduce boilerplate, you can also use the [`thrift_sources`](../../../reference/targets/thrift_sources.mdx) target, which generates one `thrift_source` target per file in the `sources` field. + +```python title="BUILD" +thrift_sources(name="thrift", sources=["user.thrift", "admin.thrift"]) + +# Spiritually equivalent to: +thrift_source(name="user", source="user.thrift") +thrift_source(name="admin", source="admin.thrift") + +# Thanks to the default `sources` value of '*.thrift', spiritually equivalent to: +thrift_sources(name="thrift") +``` + +## Step 2: Set up the `thrift` runtime library + +Generated Python files require the [`thrift` dependency](https://pypi.org/project/thrift/) for their imports to work properly. + +Add `thrift` to your project, e.g. your `requirements.txt` (see [Third-party dependencies](../overview/third-party-dependencies.mdx)). + +```text title="requirements.txt" +thrift==0.15.0 +``` + +Pants will then automatically add these dependencies to your `thrift_sources` targets created in the next step. + +## Step 3: Generate `thrift_sources` target + +Run [`pants tailor ::`](../../getting-started/initial-configuration.mdx#5-generate-build-files) for Pants to create a `thrift_sources` target wherever you have `.thrift` files: + +``` +$ pants tailor :: +Created src/thrift/BUILD: + - Add thrift_sources target thrift +``` + +Pants will use [dependency inference](../../using-pants/key-concepts/targets-and-build-files.mdx) for any `import` statements in your `.thrift` files, which you can confirm by running `pants dependencies path/to/file.thrift`. You should also see the `python_requirement` target for the `thrift` library from the previous step. + +## Step 4: Confirm Python imports are working + +Now, you can import the generated Python modules in your Python code. + +For each Thrift file, the compiler will generate at least three files `__init__.py`, `ttypes.py`, and `constants.py`. The location of those files—and corresponding imports—depends on whether you set `namespace py` in your `.thrift` file: + +| `namespace py` | Behavior | Example | +| :------------- | :-------------------------------------------------------------------- | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| unset | Files generated as top-level modules, without any prefix directories. | `models/user.thrift`

Generated:

- `__init__.py`
- `user/__init__.py`
- `user/constants.py`
- `user/ttypes.py`Python import:
`import user.ttypes` | +| set | Files generated into the namespace. | `models/user.thrift`, with `namespace py custom_namespace.user`

Generated:

- `__init__.py`
- `custom_namespace/__init__.py`
- `custom_namespace/user/__init__.py`
- `custom_namespace/user/constants.py`
- `custom_namespace/user/ttypes.py`Python import:
`import custom_namespace.user.ttypes` | + +As shown in the table, your Python imports depend on whether the Thrift file uses `namespace py`. + +Imports behave the same regardless of whether you have [source roots](../../using-pants/key-concepts/source-roots.mdx), such as `src/thrift`. The import will still either be the top-level file like `user.ttypes` or the custom namespace. + +Pants's dependency inference will detect Python imports of Thrift modules, which you can confirm by running `pants dependencies path/to/file.py`. + +You can also [manually add](../../using-pants/key-concepts/targets-and-build-files.mdx) the dependency: + +```python title="src/py/BUILD" +python_sources(dependencies=["models:models"]) +``` + +:::note TIp: set `namespace py` +Pants can handle Thrift regardless of whether you set `namespace py`. + +However, it's often a good idea to set the namespace because it can make your imports more predictable and declarative. It also reduces the risk of your Thrift file names conflicting with other Python modules used, such as those from third-party requirements. + +For example, compare `import user.ttypes` to `import codegen.models.user.ttypes`. +::: + +:::note Run `pants export-codegen ::` to inspect the files +`pants export-codegen ::` will run all relevant code generators and write the files to `dist/codegen` using the same paths used normally by Pants. + +You do not need to run this goal for codegen to work when using Pants; `export-codegen` is only for external consumption outside of Pants. + +Note: You can also export the generated sources using the [`--export-py-generated-sources` option](../../../reference/goals/export#py_generated_sources) to the [`pants export` goal](../../../reference/goals/export). This is useful when you want to provide an IDE with third-party dependencies and generated sources in a single place. +::: + +## Multiple resolves + +If you're using [multiple resolves](../overview/third-party-dependencies.mdx) (i.e. multiple lockfiles), then you may need to set the `python_resolve` field. `thrift_source` targets only work with a single resolve, meaning, for example, that a `python_source` target that uses the resolve 'a' can only depend on Thrift targets that also uses this same resolve. + +By default, `thrift_source` / `thrift_sources` targets use the resolve set by the option `[python].default_resolve`. To use a different resolve, set the field `python_resolve: str` to one of the values from the option `[python].resolves`. + +You must also make sure that any resolves that use codegen include the `python_requirement` target for the `thrift` runtime library from Step 2. Pants will eagerly validate this for you. + +If the same Thrift files should work with multiple resolves, you can use the +[`parametrize`](../../using-pants/key-concepts/targets-and-build-files.mdx#parametrizing-targets) mechanism. + +For example: + +```python title="BUILD" +python_requirement( + name="thrift-requirement", + # Here, we use the same version of Thrift in both resolves. You could instead create + # a distinct target per resolve so that they have different versions. + requirements=["thrift==0.15.0""], + resolve=parametrize("resolve-a", "resolve-b"), +) + +thrift_sources( + name="thrift", + python_resolve=parametrize("resolve-a", "resolve-b") +) +``` diff --git a/versioned_docs/version-2.24/docs/python/overview/_category_.json b/versioned_docs/version-2.24/docs/python/overview/_category_.json new file mode 100644 index 000000000..53cf59a20 --- /dev/null +++ b/versioned_docs/version-2.24/docs/python/overview/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Python overview", + "position": 1 +} diff --git a/versioned_docs/version-2.24/docs/python/overview/building-distributions.mdx b/versioned_docs/version-2.24/docs/python/overview/building-distributions.mdx new file mode 100644 index 000000000..01f38515a --- /dev/null +++ b/versioned_docs/version-2.24/docs/python/overview/building-distributions.mdx @@ -0,0 +1,212 @@ +--- + title: Building distributions + sidebar_position: 6 +--- + +Packaging your code into an sdist or a wheel. + +--- + +A standard packaging format for Python code is the _distribution_: an archive that is published to a package index such as [PyPI](https://pypi.org/), and can be installed by [pip](https://packaging.python.org/key_projects/#pip). The two standard distribution archive types are [sdists](https://packaging.python.org/overview/#python-source-distributions) and [wheels](https://packaging.python.org/overview/#python-binary-distributions). + +This page explains how to use Pants to build distributions from your code. + +:::tip Benefit of Pants: multiple distributions from a single repository +Typically, repositories without sophisticated tooling end up building a single distribution which includes the entire repo. But Pants makes it easy to create multiple distributions from the same repository. +::: + +## Background: setuptools and PEP 517 + +For a long time, [Setuptools](https://setuptools.pypa.io/) was the de-facto standard mechanism for building Python distributions. Setuptools relies on a `setup.py` script that you provide in your code. This script contains the instructions on what code to package into the distribution and what the requirements and other metadata of the distribution should be. + +In the past few years, however, a new standard for specifying distribution builds has emerged: [PEP 517](https://www.python.org/dev/peps/pep-0517/). Under this standard (and its companion standard, [PEP 518](https://www.python.org/dev/peps/pep-0518/)) you use `pyproject.toml` to specify the python requirements and entry point for the builder code. This information is referred to as a _build backend_. + +Examples of build backends include Setuptools, but also other systems with package-building capabilities, such as [Flit](https://flit.readthedocs.io/en/latest/) or [Poetry](https://github.com/python-poetry/poetry-core). + +Pants reads a PEP 517 `[build-system]` specification from `pyproject.toml` and applies it to build your distributions. That is, Pants acts as a _build frontend_ in PEP 517 parlance. It is common to continue to use Setuptools as the build backend, but doing so via PEP 517 lets you control the exact version of Setuptools that gets used, as well as any other requirements that must be present at build time. + +If there is no `pyproject.toml` with a `[build-system]` table available, Pants falls back to using Setuptools directly. + +## The `python_distribution` target + +You configure a distribution using a [`python_distribution`](../../../reference/targets/python_distribution.mdx) target. This target provides Pants with the information needed to build the distribution. + +### PEP 517 + +If using a PEP 517 `pyproject.toml` file, you might have a target layout similar to this: + +```python title="example/dists/BUILD" +resource(name="pyproject", source="pyproject.toml") + +python_distribution( + name="mydist", + dependencies=[ + ":pyproject", + # Dependencies on code to be packaged into the distribution. + ], + provides=python_artifact( + name="mydist", + version="2.21.0", + ), + # Example of setuptools config, other build backends may have other config. + wheel_config_settings={"--global-option": ["--python-tag", "py37.py38.py39"]}, + # Don't use setuptools with a generated setup.py. + # You can also turn this off globally in pants.toml: + # + # [setup-py-generation] + # generate_setup_default = false + generate_setup = False, +) +``` + +Running `pants package example/dists:mydist` will cause Pants to inspect the `[build-system]` table in `pyproject.toml`, install the requirements specified in that table's `requires` key, and then execute the entry point specified in the `build-backend` key to build an sdist and a wheel, just as PEP 517 requires. + +If you want to build just a wheel or just an sdist, you can set `sdist=False` or `wheel=False` on the `python_distribution` target. + +### Setuptools + +If relying on legacy Setuptools behavior, you don't have a `pyproject.toml` resource, so your target is simply: + +```python title="example/dists/BUILD" +python_distribution( + name="mydist", + dependencies=[ + # Dependencies on code to be packaged into the distribution. + ], + provides=python_artifact( + name="mydist", + version="2.21.0", + ), + wheel_config_settings={"--global-option": ["--python-tag", "py37.py38.py39"]}, +) +``` + +Running `pants package example/dists:mydist` will cause Pants to run Setuptools, which will in turn run the `setup.py` script in the `python_distribution` target's directory. If no such script exists, Pants can generate one for you (see below). + +:::note See `package` for other package formats +This page focuses on building sdists and wheels with the `pants package` goal. See [package](../goals/package.mdx) for information on other formats that can be built with `pants package`, such as PEX binaries and zip/tar archives. +::: + +## setup.py + +Although alternatives exist, and PEP 517 enables them, Setuptools is still by far the most common choice for building distributions, whether via PEP 517 config, or directly via legacy support. If using Setuptools in either fashion, you need a `setup.py` script alongside your `python_distribution` target (and the target needs to depend on that script, typically via an explicit dependency on a `python_sources` target that owns it). + +You can either author `setup.py` yourself (which is necessary if building native extensions), or have Pants generate one for you (see below). + +By default, Pants will generate a `setup.py` for every `python_distribution` target, unless you set `generate_setup = False` on the target. But you can flip this behavior by setting `generate_setup_default = false` in the `[setup-py-generation]` section of your `pants.toml` config file. In that case Pants will only generate a `setup.py` for `python_distribution` targets that have `generate_setup = True` set on them. + +So if you expect to use handwritten `setup.py` scripts for most distributions in your repo, you probably want to set `generate-setup-default = false` and override it as needed. If you expect to mostly use generated `setup.py` scripts, you can set `generate-setup-default = true` (or just not set it, since that is the default). + +:::note 3rdparty requirements in `setup.py` +If you use a handwritten `setup.py`, the generated distribution will have requirements on the packages you list in the `install_requires` key, as expected. But Pants will not automatically use those as dependencies of the underlying sources, e.g., when running tests. They are strictly used when generating a distribution. + +Instead, the "universe" of possible requirements of your source files must be specified as described [here](./third-party-dependencies.mdx), and Pants will use dependency inference to select an appropriate subset as needed. + +If Pants generates a `setup.py` for you then the `install_requires` value will be generated from the actual requirements of your source files. +::: + +## Using a generated `setup.py` + +Much of the data you would normally put in a `setup.py` file is already known to Pants, so it can be convenient to let Pants generate `setup.py` files for you, instead of maintaining them manually for each distributable project. + +In this case, you may want to add some information to the `provides= ` field in the `python_distribution` target, for Pants to place in the generated `setup.py`: + +```python title="example/dists/BUILD" +python_distribution( + name="mydist", + dependencies=[ + # Dependencies on code to be packaged into the distribution. + ], + provides=python_artifact( + name="mydist", + version="2.21.0", + description="An example distribution built with Pants.", + author="Pantsbuild", + classifiers=[ + "Programming Language :: Python :: 3.7", + ], + ), + wheel_config_settings={"--global-option": ["--python-tag", "py37.py38.py39"]}, +) +``` + +Some important `setup.py` metadata is inferred by Pants from your code and its dependencies. Other metadata needs to be provided explicitly. In Pants, as shown above, you do so through the `provides` field. + +You can use almost any [keyword argument](https://packaging.python.org/guides/distributing-packages-using-setuptools/#setup-args) accepted by `setup.py` in the `setup()` function. + +However, you cannot use `data_files`, `install_requires`, `namespace_packages`, `package_dir`, `package_data`, or `packages` because Pants will generate these for you, based on the data derived from your code and dependencies. + +:::note Use the `entry_points` field to register entry points like `console_scripts` +The [`entry_points` field](../../../reference/targets/python_distribution.mdx#entry_points) allows you to configure [setuptools-style entry points](https://packaging.python.org/specifications/entry-points/#entry-points-specification): + +```python +python_distribution( + name="my-dist", + entry_points={ + "console_scripts": {"some-command": "project.app:main"}, + "flake8_entry_point": { + "PB1": "my_flake8_plugin:Plugin", + "PB2": "my_flake8_plugin:AnotherPlugin", + }, + provides=python_artifact(...), +) +``` + +Pants will infer dependencies on each entry point, which you can confirm by running `pants dependencies path/to:python_dist`. + +In addition to using the format `path.to.module:func`, you can use an [address](../../using-pants/key-concepts/targets-and-build-files.mdx) to a `pex_binary` target, like `src/py/project:pex_binary` or `:sibling_pex_binary`. Pants will use the `entry_point` already specified by the `pex_binary`, and it will infer a dependency on the `pex_binary` target. This allows you to better DRY your project's entry points. +::: + +:::note Consider writing a plugin to dynamically generate the `setup()` keyword arguments +You may want to write a plugin to do any of these things: + +- Reduce boilerplate by hardcoding common arguments and commands. +- Read from the file system to dynamically determine kwargs, such as the `long_description` or `version`. +- Run processes like Git to dynamically determine the `version` kwarg. + +Start by reading about the [Plugin API](../../writing-plugins/overview.mdx), then refer to the [Custom `python_artifact()` kwargs](../../writing-plugins/common-plugin-tasks/custom-python-artifact-kwargs.mdx) instructions. +::: + +## Mapping source files to distributions + +A Pants repo typically consists of one `python_source` target per file (usually generated by several `python_sources` targets). To build multiple distributions from the same repo, Pants must determine which libraries are bundled into each distribution. + +In the extreme case, you could have one distribution per `python_source` target, but publishing and consuming a distribution per file would of course not be practical. So in practice, multiple source files are bundled into a single distribution. + +Naively, you might think that a `python_distribution` publishes all the code of all the `python_source` targets it transitively depends on. But that could easily lead to trouble if you have multiple distributions that share common dependencies. You typically don't want the same code published in multiple distributions, as this can lead to all sorts of runtime import issues. + +If you use a handwritten `setup.py`, you have to figure this out for yourself - Pants will bundle whatever the script tells it to. But if you let Pants generate `setup.py` then it will apply the following algorithm: + +Given a `python_distribution` target D, take all the source files in the transitive dependency closure of D. Some of those source files may be published in D itself, but others may be published in some other `python_distribution` target, D', in which case Pants will correctly add a requirement on D' in the metadata for D. + +For each `python_source` target S, the distribution in which S's code is published is chosen to be: + +1. A `python_distribution` that depends, directly or indirectly, on S. +2. Is S's closest filesystem ancestor among those satisfying 1. + +If there are multiple such exported source files at the same degree of ancestry, the ownership +is ambiguous and an error is raised. If there is no `python_distribution` that depends on S +and is its ancestor, then there is no owner and an error is raised. + +This algorithm implies that all source files published by a distribution must be below it in the filesystem. It also guarantees that a source file is only published by a single distribution. + +The generated `setup.py` will have its `install_requires` set to include the 3rdparty dependencies of the code bundled in the distribution, plus any other distributions from your own repo. For example, if distribution D1 contains code that has a dependency on some source file S, and that source file is published in distribution D2, then D1's requirements will include a dependency on D2. In other words, Pants does the right thing. + +:::note Changing the versioning scheme for first-party dependencies +When a `python_distribution` depends on another `python_distribution`, Pants will add it to the `install_requires` value in the generated `setup.py`. + +By default, Pants will use exact requirements for first-party dependencies, like `other_dist==1.0.1`. You can set `first_party_dependency_version_scheme` in the `[setup-py-generation]` scope to `'compatible'` to use `~=` instead of `==`, and `any` to leave off the version. + +For example: + +```toml +[setup-py-generation] +first_party_dependency_version_scheme = "compatible" +``` + +See [https://www.python.org/dev/peps/pep-0440/#version-specifiers](https://www.python.org/dev/peps/pep-0440/#version-specifiers) for more information on the `~=` specifier. +::: + +:::note How to publish your distributions to a package index +See [publish](../goals/publish.mdx) for how to use Pants to publish distributions using Twine. +::: diff --git a/versioned_docs/version-2.24/docs/python/overview/enabling-python-support.mdx b/versioned_docs/version-2.24/docs/python/overview/enabling-python-support.mdx new file mode 100644 index 000000000..4a4fb2c6a --- /dev/null +++ b/versioned_docs/version-2.24/docs/python/overview/enabling-python-support.mdx @@ -0,0 +1,71 @@ +--- + title: Enabling Python support + sidebar_position: 0 +--- + +How to enable Pants's bundled Python backend package. + +--- + +:::note Example Python repository +See [here](https://github.com/pantsbuild/example-python) for examples of Pants's Python functionality. + +See [here](https://github.com/pantsbuild/example-django) for Django-specific examples. +::: + +Enable the Python [backend](../../using-pants/key-concepts/backends.mdx) like this: + +```toml title="pants.toml" +[GLOBAL] +... +backend_packages = [ + "pants.backend.python" +] +``` + +Pants use [`python_source`](../../../reference/targets/python_source.mdx) and [`python_test`](../../../reference/targets/python_test.mdx) targets to know which Python files to run on and to set any metadata. + +To reduce boilerplate, the [`python_sources`](../../../reference/targets/python_sources.mdx) target generates a `python_source` target for each file in its `sources` field, and [`python_tests`](../../../reference/targets/python_tests.mdx) generates a `python_test` target for each file in its `sources` field. + +```python title="BUILD" +python_sources(name="lib", sources=["dirutil.py", "strutil.py"]) +python_tests(name="tests", sources=["strutil_test.py"]) + +# Spiritually equivalent to: +python_source(name="dirutil", source="dirutil.py") +python_source(name="strutil", source="strutil.py") +python_test(name="strutil_test.py", source="strutil_test.py") + +# Thanks to the default `sources` values, spiritually equivalent to: +python_sources(name="lib") +python_tests(name="tests") +``` + +You can generate these targets by running [`pants tailor ::`](../../getting-started/initial-configuration.mdx#5-generate-build-files). + +``` +❯ pants tailor :: +Created project/BUILD: + - Add python_sources target project + - Add python_tests target tests +``` + +:::caution macOS users: you may need to change interpreter search paths +By default, Pants will look at both your `$PATH` and—if you use Pyenv—your `$(pyenv root)/versions` folder when discovering Python interpreters. Your `$PATH` likely includes the system Pythons at `/usr/bin/python` and `/usr/bin/python3`, which are known to have many issues like failing to install some dependencies. + +Pants will prefer new Python versions, like 3.6.10 over 3.6.3. Because macOS system Pythons are usually very old, they will usually be ignored. + +However, if you run into issues, you can set the `search_paths` option in the `[python-bootstrap]` scope: + +```toml +[python-bootstrap] +search_path = [ + # This will use all interpreters in `$(pyenv root)/versions`. + "", + # Brew usually installs Python here. + "/usr/local/bin", +] +``` + +See [here](./interpreter-compatibility.mdx#changing-the-interpreter-search-path) for more information. +::: diff --git a/versioned_docs/version-2.24/docs/python/overview/index.mdx b/versioned_docs/version-2.24/docs/python/overview/index.mdx new file mode 100644 index 000000000..8bd455ef3 --- /dev/null +++ b/versioned_docs/version-2.24/docs/python/overview/index.mdx @@ -0,0 +1,29 @@ +--- + title: Python overview + sidebar_position: 0 +--- + +--- + +The Python ecosystem has a great many tools for various features. Pants installs, configures, and invokes those tools for you, while taking care of orchestrating the workflow, caching results, and running concurrently. + +Pants currently supports the following goals and features for Python: + +| goal | underlying tools | +| :-------------------- | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| dependency resolution | [`pip`](./third-party-dependencies.mdx) | +| test running | [`pytest`](../goals/test.mdx) | +| linting/formatting | [`black`](../../../reference/subsystems/black.mdx), [`yapf`](../../../reference/subsystems/yapf.mdx), [`flake8`](../../../reference/subsystems/flake8.mdx), [`docformatter`](../../../reference/subsystems/docformatter.mdx), [`pydocstyle`](../../../reference/subsystems/pydocstyle.mdx) [`isort`](../../../reference/subsystems/isort.mdx), [`pylint`](../../../reference/subsystems/pylint.mdx), [`bandit`](../../../reference/subsystems/bandit.mdx), [`autoflake`](../../../reference/subsystems/autoflake.mdx), [`pyupgrade`](../../../reference/subsystems/pyupgrade.mdx), [`ruff`](../../../reference/subsystems/ruff.mdx) | +| typechecking | [MyPy](../goals/check.mdx), [`pyright`](../../../reference/subsystems/pyright.mdx), [`pytype`](../../../reference/subsystems/pytype.mdx) | +| code generation | [Protobuf](../integrations/protobuf-and-grpc.mdx) (including the `gRPC` and `MyPy` plugins), [Thrift](../integrations/thrift.mdx) | +| packaging | [`setuptools`](./building-distributions.mdx), [`pex`](../goals/package.mdx), [PyOxidizer](../integrations/pyoxidizer.mdx), [AWS lambda](../integrations/aws-lambda.mdx), [Google Cloud Function](../integrations/google-cloud-functions.mdx) | +| running a REPL | `python`, [`iPython`](../goals/repl.mdx) | + +There are also [goals](../../using-pants/project-introspection.mdx) for querying and understanding your dependency graph, and a robust [help system](../../using-pants/command-line-help.mdx). We're adding support for additional tools and features all the time, and it's straightforward to [implement your own](../../writing-plugins/overview.mdx). + +- [Enabling Python support](./enabling-python-support.mdx) +- [Third-party dependencies](./third-party-dependencies.mdx) +- [Interpreter compatibility](./interpreter-compatibility.mdx) +- [Linters and formatters](./linters-and-formatters.mdx) +- [Pex files](./pex.mdx) +- [Building distributions](./building-distributions.mdx) diff --git a/versioned_docs/version-2.24/docs/python/overview/interpreter-compatibility.mdx b/versioned_docs/version-2.24/docs/python/overview/interpreter-compatibility.mdx new file mode 100644 index 000000000..9684d1934 --- /dev/null +++ b/versioned_docs/version-2.24/docs/python/overview/interpreter-compatibility.mdx @@ -0,0 +1,163 @@ +--- + title: Interpreter compatibility + sidebar_position: 3 +--- + +How to configure which Python version(s) your project should use. + +--- + +## Setting the default Python version + +:::note Proactive Python 2.7 support ends in Pants v2.24.x and later versions +Pants v2.23.x is the last Pants release series which will be regularly tested with Python 2.7. Python 2.7 is [no longer supported by its maintainers as of 1 January 2020](https://www.python.org/doc/sunset-python-2/). While Pants may continue to work with Python 2.7 in the near term, as of Pants v2.24.x, Pants will no longer officially support use of Python 2.7, and, consequently, will no longer be proactively tested with Python 2.7 in CI. Contributions to fix issues with Python 2.7 support will continue to be accepted, but will depend on any community contributions and will not consitute continued official support for Python 2.7. +::: + +Configure your default Python interpreter compatibility constraints in `pants.toml` like this: + +```toml title="pants.toml" +[python] +interpreter_constraints = ["CPython==3.11.*"] +``` + +The value can be any valid Requirement-style strings. You can use multiple strings to OR constraints, and use commas within each string to AND constraints. For example: + +| Constraint | What it means | +| :----------------------------------- | :--------------------------------------- | +| `['CPython>=3.6,<4']` | CPython 3.6+, but not CPython 4 or later | +| `['CPython==3.7.3']` | CPython 3.7.3 | +| `['PyPy']` | any version of PyPy | +| `['CPython==2.7.*', 'CPython>=3.5']` | CPython 2.7 or 3.5+ | + +As a shortcut, you can leave off `CPython` and just put the version specifier. For example, `==3.8` will be expanded automatically to `CPython==3.8`. + +:::note Using Apple Silicon (M1/M2)? +If you use Python code on Apple's M1/M2 hardware you may need to set your interpreter constraints to Python 3.9+, as many tools, such as Black, will not install correctly on earlier Python versions on this platform. +::: + +## Using multiple Python versions in the same project + +Pants also allows you to specify the interpreter compatibility for particular targets. This allows you to use multiple Python versions in the same repository, such as allowing you to incrementally migrate from Python 2 to Python 3. + +Use the `interpreter_constraints` field on a Python target, like this: + +```python title="BUILD" +python_sources( + name="python2_target", + interpreter_constraints=["==2.7.*"], +) +``` + +If `interpreter_constraints` is left off, the target will default to the value from the option `interpreter_constraints` in `[python]`. + +To only change the interpreter constraints for a few files, you can use the `overrides` field: + +```python title="BUILD" +python_sources( + name="lib", + overrides={ + "py2.py": {"interpreter_constraints": ["==2.7.*"]}, + # You can use a tuple for multiple files: + ("common.py", "f.py"): {"interpreter_constraints": ["==2.7.*"]}, + } +) +``` + +A particular target's interpreter constraints must be the same or a subset of all of its +transitive dependencies. For example, a target marked Python 3-only cannot depend on a +Python 2-only target, which Pants will validate. + +It is possible for a target to have more precise interpreter constraints than its transitive +dependencies. For example, if you have a common helper file `utils.py` that works with both +Python 2.7 and 3.5 (`['==2.7.*', '>=3.5']`), it is legal for a dependent like `app.py` to use more +precise constraints like `['==3.7.*']`. It is often useful for helper code to work with multiple +Python versions, while specific "root" targets (tests, apps, and binaries) have more specific +constraints. + +:::caution Pants cannot validate that your interpreter constraints are accurate +Pants accepts your interpreter constraints at face value. If you use a constraint like `'>=3.6'`, Pants will trust you that your code indeed works with any interpreter >= 3.6, as Pants has no way to audit if your code is actually compatible. + +Instead, consider running your unit tests with every Python version you claim to support to ensure that your code really is compatible: + +```python +python_test( + source="util_test.py", + interpreter_constraints=parametrize(py2=["==2.7.*"], py3=["==3.6.*"]), +) +``` + +::: + +When you `parametrize` constraints, Pants will generate multiple targets, one for each constraint, allowing you to test multiple python versions with a single command. This is somewhat similar to "matrix testing". See [Target Generation](../../using-pants/key-concepts/targets-and-build-files.mdx#parametrizing-targets) for more details. + +### Tip: activate `pants.backend.python.mixed_interpreter_constraints` + +We recommend adding `pants.backend.python.mixed_interpreter_constraints` to `backend_packages` in the `[GLOBAL]` scope, which will add the new goal `py-constraints`. + +```toml title="pants.toml" +[GLOBAL] +backend_packages = [ + "pants.backend.python", + "pants.backend.python.mixed_interpreter_constraints", +] +``` + +You can run `pants py-constraints $file/$target` to see what final interpreter constraints will be used, and why. For example: + +``` +$ pants py-constraints helloworld/main.py +Final merged constraints: CPython==2.7.*,>=3.5 OR CPython>=3.5 + +CPython>=3.5 + helloworld/main.py + +CPython==2.7.* OR CPython>=3.5 + helloworld/util/__init__.py + helloworld/util/config_loader.py + helloworld/util/lang.py + helloworld/util/proto/__init__.py:init + helloworld/util/proto/config.proto +``` + +#### `py-constraints --summary` + +You can run `pants py-constraints --summary` for Pants to generate a CSV giving an overview of your project's interpreter constraints: + + + Result of `pants py-constraints --summary`, then importing the CSV into Google + Sheets. + + +We recommend then importing this CSV into a tool like Pandas or Excel to filter/sort the data. + +The `# Dependents` column is useful to see how impactful it is to port a file, and the `# Dependencies` can be useful to see how easy it would be to port. + +:::note Tips for Python 2 -> Python 3 migrations +While every project will have different needs and scope, there are a few best practices with Pants that will allow for a more successful migration. + +- Start by setting the `interpreter_constraints` option in `[python]` to describe the status of the majority of your targets. If most are only compatible with Python 2, set it to `['==2.7.*']`. If most are compatible with Python 2 _and_ Python 3, set to `['==2.7', '>=3.5']`. If most are only compatible with Python 3, set to `[>=3.5]`. For any targets that don't match these global constraints, override with the `interpreter_constraints` field. +- Run `pants py-constraints --summary` and sort by `# Dependents` from Z to A to find your most-used files. Focus on getting these targets to be compatible with Python 2 and 3. You may want to also sub-sort the CSV by `# Dependencies` to find what is easiest to port. +- Once >40% of your targets work with both Python 2 and Python 3, change the `interpreter_constraints` option in `[python]` to specify compatibility with both Python 2.7 and Python 3 so that all new code uses this by default. +- For files with no or few dependencies, change them to Python 3-only when possible so that you can start using all the neat new Python 3 features like f-strings! Use the CSV from `pants py-constraints --summary` to find these. You can also do this if every dependent target works exclusively with Python 3, which you can find by the `Transitive Constraints` column and by running `pants py-constraints path/to/file.py`. + +Check out [this blog post](https://enterprise.foursquare.com/intersections/article/how-our-intern-led-pants-migration-to-python-3/) on Pants' own migration to Python 3 in 2019 for more general tips on Python 3 migrations. +::: + +## Changing the interpreter search path + +Pants will default to looking at your `$PATH` to discover Python interpreters. You can change this by setting the option `search_paths` in the `[python-bootstrap]` scope. + +You can specify absolute paths to interpreter binaries and/or to directories containing interpreter binaries. In addition, Pants understands some special symbols: + +- ``: read the `$PATH` env var +- ``: use all directories in `$(pyenv root)/versions` +- ``: the interpreter specified in the local file `.python-version` +- ``, all Python versions currently configured by ASDF, with a fallback to all installed versions. +- ``, the ASDF interpreter with the version in `/.tool-versions`. + +For example: + +```toml title="pants.toml" +[python-bootstrap] +search_path = ["", "/opt/python3"] +``` diff --git a/versioned_docs/version-2.24/docs/python/overview/linters-and-formatters.mdx b/versioned_docs/version-2.24/docs/python/overview/linters-and-formatters.mdx new file mode 100644 index 000000000..7cf9572a5 --- /dev/null +++ b/versioned_docs/version-2.24/docs/python/overview/linters-and-formatters.mdx @@ -0,0 +1,312 @@ +--- + title: Linters and formatters + sidebar_position: 4 +--- + +How to activate and use the Python linters and formatters bundled with Pants. + +--- + +:::tip Benefit of Pants: consistent interface +`pants lint` and `pants fmt` will consistently and correctly run all your linters and formatters. No need to remember how to invoke each tool, and no need to write custom scripts. + +This consistent interface even works with multiple languages, like running Python linters at the same time as Go, Shell, Java, and Scala. +::: + +:::tip Benefit of Pants: concurrency +Pants does several things to speed up running formatters and linters: + +- Automatically configures tools that support concurrency (e.g. a `--jobs` option) based on your number of cores and what else is already running. +- Runs everything in parallel with the `lint` goal (although not the `fmt` goal, which pipes the results of one formatter to the next for correctness). +- Runs in batches of 256 files by default, which gives parallelism even for tools that don't have a `--jobs` option. This also increases cache reuse. + +::: + +## Activating linters and formatters + +Linter/formatter support is implemented in separate [backends](../../using-pants/key-concepts/backends.mdx) so that they are easy to opt in to individually: + +| Backend | Tool | +| :--------------------------------------------------- | :------------------------------------------------------------------------------------------------------------------------- | +| `pants.backend.python.lint.bandit` | [Bandit](https://bandit.readthedocs.io/en/latest/): security linter | +| `pants.backend.python.lint.black` | [Black](https://black.readthedocs.io/en/stable/): code formatter | +| `pants.backend.python.lint.docformatter` | [Docformatter](https://pypi.org/project/docformatter/): docstring formatter | +| `pants.backend.python.lint.flake8` | [Flake8](https://flake8.pycqa.org/en/latest/): style and bug linter | +| `pants.backend.python.lint.isort` | [isort](https://readthedocs.org/projects/isort/): import statement formatter | +| `pants.backend.python.lint.pydocstyle` | [Pydocstyle](https://pypi.org/project/pydocstyle/): docstring linter | +| `pants.backend.python.lint.pylint` | [Pylint](https://pylint.pycqa.org/): style and bug linter | +| `pants.backend.python.lint.yapf` | [Yapf](https://github.com/google/yapf): code formatter | +| `pants.backend.python.lint.autoflake` | [Autoflake](https://github.com/myint/autoflake): remove unused imports | +| `pants.backend.python.lint.pyupgrade` | [Pyupgrade](https://github.com/asottile/pyupgrade): automatically update code to use modern Python idioms like `f-strings` | +| `pants.backend.experimental.python.lint.ruff.check` | [Ruff (for linting)](https://docs.astral.sh/ruff/linter/): an extremely fast Python linter, written in Rust. | +| `pants.backend.experimental.python.lint.ruff.format` | [Ruff (for formatting)](https://docs.astral.sh/ruff/formatter/): an extremely fast Python code formatter, written in Rust. | + +To enable, add the appropriate backends in `pants.toml`: + +```toml title="pants.toml" +[GLOBAL] +... +backend_packages = [ + 'pants.backend.python', + 'pants.backend.python.lint.black', + 'pants.backend.python.lint.isort', +] +``` + +You should now be able to run `pants lint`, and possibly `pants fmt`: + +``` +$ pants lint src/py/project.py +17:54:32.51 [INFO] Completed: lint - Flake8 succeeded. +17:54:32.70 [INFO] Completed: lint - Black succeeded. +All done! ✨ 🍰 ✨ +1 file would be left unchanged. + +17:54:33.91 [INFO] Completed: lint - isort succeeded. + +✓ Black succeeded. +✓ Flake8 succeeded. +✓ isort succeeded. +``` + +:::note How to activate MyPy +MyPy is run with the [check goal](../goals/check.mdx), rather than `lint`. +::: + +## Configuring the tools, for example, adding plugins + +You can configure each formatter and linter using these options: + +| Option | What it does | +| :------------------------ | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `interpreter_constraints` | What interpreter to run the tool with. (`bandit`, `flake8`, and `pylint` determine this based on your [code's interpreter constraints](./interpreter-compatibility.mdx).) | +| `args` | Any command-line arguments you want to pass to the tool. | +| `config` | Path to a config file. Useful if the file is in a non-standard location such that it cannot be auto-discovered. | +| `install_from_resolve` | Name of a custom resolve to use for tool versions and plugins. See [Lockfiles for tools](./lockfiles.mdx#lockfiles-for-tools). | +| `requirements` | Optional subset of requirements to install from the custom resolve for this formatter or linter. See [Lockfiles for tools](./lockfiles.mdx#lockfiles-for-tools). | + +For example: + +```toml title="pants.toml" +[docformatter] +args = ["--wrap-summaries=100", "--wrap-descriptions=100"] + +[python.resolves] +# A custom resolve that updates the version and adds a custom plugin. +flake8 = "3rdparty/python/flake8.lock" + +[flake8] +# Load a config file in a non-standard location. +config = "build-support/flake8" +install_from_resolve = "flake8" +``` + +Then set up the resolve's inputs: + +```python tab={"label":"3rdparty/python/BUILD"} +python_requirements( + source="flake8-requirements.txt", + resolve="flake8", +) +``` + +```text tab={"label":"3rdparty/python/flake8-requirements.txt"} +flake8==6.0.0 +flake8-bugbear>=23.3.23 +``` + +And generate its custom lockfile: + +```shell title="Bash" +$ pants generate-lockfiles --resolve=flake8 +16:00:39.26 [INFO] Completed: Generate lockfile for flake8 +16:00:39.29 [INFO] Wrote lockfile for the resolve `flake8` to 3rdparty/python/flake8.lock +``` + +Run `pants help-advanced black`, `pants help-advanced flake8`, and so on for more information. + +:::note Config files are normally auto-discovered +For tools that autodiscover config files—such as Black, isort, Flake8, and Pylint—Pants will include any relevant config files in the process's sandbox when running the tool. + +If your config file is in a non-standard location, you must instead set the `--config` option, e.g. `[isort].config`. This will ensure that the config file is included in the process's sandbox and Pants will instruct the tool to load the config. +::: + +## Running only certain formatters or linters + +To temporarily skip a tool, use the `--skip` option for that tool. For example, run: + +```bash +❯ pants --black-skip --flake8-skip lint :: +``` + +You can also use the `--lint-only` and `--fmt-only` options with the names of the tools: + +```bash +❯ pants lint --only=black :: + +# To run several, you can use either approach: +❯ pants fmt --only=black --only=isort :: +❯ pants fmt --only='["black", "isort"]' :: +``` + +You can also skip for certain targets with the `skip_` fields, which can be useful for [incrementally adopting new tools](https://www.youtube.com/watch?v=BOhcdRsmv0s). For example: + +```python title="project/BUILD" +python_sources( + name="lib", + # Skip Black for all non-test files in this folder. + skip_black=True, + overrides={ + "strutil.py": {"skip_flake8": True}, + ("docutil.py", "dirutil.py"): {"skip_isort": True}, + }, +) + +python_tests( + name="tests", + # Skip isort for all the test files in this folder. + skip_isort=True, +) +``` + +When you run `pants fmt` and `pants lint`, Pants will ignore any files belonging to skipped targets. + +## Tip: only run over changed files + +With formatters and linters, there is usually no need to rerun on files that have not changed. + +Use the option `--changed-since` to get much better performance, like this: + +```bash +❯ pants --changed-since=HEAD fmt +``` + +or + +```bash +❯ pants --changed-since=main lint +``` + +Pants will find which files have changed and only run over those files. See [Advanced target selection](../../using-pants/advanced-target-selection.mdx) for more information. + +## Tips for specific tools + +### Order of `backend_packages` matters for `fmt` + +Pants will run formatters in the order in which they appear in the `backend_packages` option. + +For example, you likely want to put Autoflake (which removes unused imports) before Black and Isort, which will format your import statements. + +```toml title="pants.toml" +[GLOBAL] +backend_packages = [ + # Note that we want Autoflake to run before Black and isort, + # so it must appear first. + "pants.backend.python.experimental.autoflake", + "pants.backend.python.black", + "pants.backend.python.isort", +] +``` + +### Bandit, Flake8, and Pylint: report files + +Flake8, Bandit, and Pylint can generate report files saved to disk. + +For Pants to properly preserve the reports, instruct the tools to write to the `reports/` folder +by updating their config files, or `--flake8-args`, `--bandit-args`, and `--pylint-args`. For +example, in your `pants.toml`: + +```toml +[bandit] +args = ["--output=reports/report.txt"] + +[flake8] +args = ["--output-file=reports/report.txt"] + +[pylint] +args = ["--output-format=text:reports/report.txt"] +``` + +Pants will copy all reports into the folder `dist/lint/`. + +### Pylint and Flake8: how to add first-party plugins + +See [`[pylint].source_plugins`](../../../reference/subsystems/pylint.mdx#source_plugins) and [`[flake8].source_plugins`](../../../reference/subsystems/flake8.mdx#source_plugins) for instructions to add plugins written by you. + +### Bandit: less verbose logging + +Bandit output can be extremely verbose, including on successful runs. You may want to use its `--quiet` option, which will turn off output for successful runs but keep it for failures. + +For example, you can set this in your `pants.toml`: + +```toml +[bandit] +args = ["--quiet"] +``` + +### Black and isort can work together + +If you use both `black` and `isort`, you most likely will need to tell `isort` to work in a mode compatible with `black`. It is also a good idea to ensure they use the same line length. This requires tool specific configuration, which could go into `pyproject.toml` for example: + +```toml +# pyproject.toml +[tool.isort] +profile = "black" +line_length = 100 + +[tool.black] +line-length = 100 +``` + +### Pyupgrade: specify which Python version to target + +You must tell Pyupgrade which version of Python to target, like this: + +```toml +# pants.toml +[pyupgrade] +args = ["--py36-plus"] +``` + +### isort: possible issues with its import classifier algorithm + +Some Pants users had to explicitly set `default_section = "THIRDPARTY"` to get iSort 5 to correctly classify their first-party imports, even though this is the default value. + +They report that this config works for them: + +```toml +# pyproject.toml +[tool.isort] +known_first_party = ["my_org"] +default_section = "THIRDPARTY" +``` + +You may also want to try downgrading to iSort 4.x by setting `version = "isort>=4.6,<5"` in the `[isort]` options scope. + +### Black and isort: excluding files + +Although it is possible to skip linters for individual targets using `skip_` parameters, sometimes an entire category of files needs to be excluded, e.g. generated files with a prefix or suffix. This can be done by configuring those tools directly within `pyproject.toml`. + +In order to exclude files from being formatted by Black and isort, it may be necessary to tell those tools to respect skip configuration options. Since Pants runs these tools on individual files rather than directories, they often run on every Python file in spite of the intuitive skip arguments. See e.g. [this post](https://stackoverflow.com/a/73296261) from the Black maintainers. + +Pants users report that this config works for them: + +```toml +# pyproject.toml +[tool.isort] +# tell isort to respect skip_glob +filter_files = true +# in particular, extend_skip_glob doesn't seem to work under Pants isort +skip_glob = [ + "**/*_skip_me.py", + "**/*_skip_me.pyi", +] + +[tool.black] +# in particular, extend-exclude and exclude will not work +force-exclude=''' +^( + .*_skip_me\.py(i)? +)$ +''' +``` diff --git a/versioned_docs/version-2.24/docs/python/overview/lockfiles.mdx b/versioned_docs/version-2.24/docs/python/overview/lockfiles.mdx new file mode 100644 index 000000000..3fb76d623 --- /dev/null +++ b/versioned_docs/version-2.24/docs/python/overview/lockfiles.mdx @@ -0,0 +1,285 @@ +--- + title: Lockfiles + sidebar_position: 2 +--- + +Securely locking down your third-party dependencies. + +--- + +Third-party dependencies are typically specified via ranges of allowed versions, known as "requirements", one for each dependency, in a file such as requirements.txt or pyproject.toml. Examples of requirement strings include `mypy>1.0.0`, `Django>=3.1.0,<4`, `pytest==7.1.1`. + +A dependency resolution tool like [Pip](https://pip.pypa.io/en/stable/) then takes these initial requirements and attempts to find and download a consistent set of transitive dependencies that are mutually compatible with each other and with the target Python interpreter version. + +When used naively, this dependency resolution process is unstable: if you run a resolve, and then some time later run another resolve on the same inputs, you may end up with a different resulting set of dependencies. This is because new versions of direct or transitive dependencies may have been published (or, in rare cases, yanked) between the two runs. + +This is an issue both for correctness (your code may not be compatible with the new versions) and security (a new version may contain a vulnerability). A further security concern is that repeatedly downloading even the _same_ versions exposes you to greater risk if one of those versions is later compromised. + +Dependency resolution can also be a performance bottleneck, with the same complex version compatibility logic running repeatedly, and unnecessarily. + +Pants offers a solution to these issues that ensures stable, hermetic, secure builds over time, in the form of _lockfiles_. + +### What are lockfiles? + +A lockfile is a metadata file that enumerates specific pinned versions of every transitive third-party dependency. It also provides the expected SHA256 hashes of the downloadable artifacts (sdists and wheels) for each dependency. A lockfile can contain dependency version information that is valid across multiple platforms and Python interpreter versions. Lockfiles can be large and complex, but fortunately Pants will generate them for you! + +If you use lockfiles, and we highly recommend that you do, then Pants will use the locked transitive dependency versions in every build, and only change them when you deliberately update your lockfiles. Pants will also verify the downloaded artifacts against their expected hashes, to ensure that they haven't been compromised after the lockfile was generated. + +Pants supports multiple lockfiles for different parts of your repo, via the mechanism of "resolves" - logical names given to lockfiles so that they are easy to reference. + +:::note Lockfiles are generated by Pex +Pants delegates lockfile creation and consumption to the [Pex](https://github.com/pex-tool/pex) tool. So you may see standard lockfiles referred to as "Pex-style" lockfiles. +::: + +### Getting started with resolves + +First, you'll need to turn on the resolves functionality for the repo: + +```toml title="pants.toml" +[python] +enable_resolves = true +``` + +Initially, Pants will assume a single resolve named `python-default` which uses the repo's [default interpreter constraints](../../../reference/subsystems/python.mdx#interpreter_constraints) and references a lockfile at `3rdparty/python/default.lock`. You can change the name of the default resolve, and/or the location of its lockfile, via: + +```toml title="pants.toml" +[python] +enable_resolves = true +default_resolve = "myresolve" + +[python.resolves] +myresolve = "path/to/mylockfile" +``` + +You generate the lockfile as follows: + +```shell title="Bash" +$ pants generate-lockfiles +19:00:39.26 [INFO] Completed: Generate lockfile for python-default +19:00:39.29 [INFO] Wrote lockfile for the resolve `python-default` to 3rdparty/python/default.lock +``` + +The inputs used to generate a lockfile are third-party dependencies in your repo, expressed via [`python_requirement` targets](./third-party-dependencies.mdx) , or the `python_requirements` / `poetry_requirements` generator targets. In this case, since you haven't yet explicitly mapped your requirement targets to a resolve, they will all map to `python-default`, and so all serve as inputs to the default lockfile. + +### Multiple lockfiles + +It's generally simpler to have a single resolve for the whole repository, if you can get away with it. But sometimes you may need more than one resolve, if you genuinely have conflicting requirements in different parts of your repo. For example, you may have both Django 3 and Django 4 projects in your repo. + +If you need multiple resolves, you declare them in your config file: + +```toml title="pants.toml" +[python] +enable_resolves = true +default_resolve = "data_science" + +[python.resolves] +data_science = "3rdparty/python/data_science.lock" +webapps_django3 = "3rdparty/python/webapps_django3.lock" +webapps_django4 = "3rdparty/python/webapps_django4.lock" +``` + +Then, you partition your requirements targets across these resolves using the `resolve` field, and possibly the [parametrize](../../using-pants/key-concepts/targets-and-build-files.mdx#parametrizing-targets) mechanism: + +```python title="3rdparty/python/BUILD" +python_requirement( + name="django3", + requirements=["django>=3.1.0,<4"], + resolve="webapps_django3", +) + +python_requirement( + name="django4", + requirements=["django>=4.0.0,<5"], + resolve="webapps_django4", +) + +python_requirements( + name="webapps_shared", + source="webapps-shared-requirements.txt", + resolve=parametrize("webapps_django3", "webapps_django4") +) + +poetry_requirements( + name="data_science_requirements", +) +``` + +Any requirements targets that don't specify an explicit `resolve=` will be associated with the default resolve. + +As before, you run `pants generate-lockfiles` to generate the lockfiles. You can use the `--resolve` flag to generate just a subset of lockfiles. E.g., + +```shell title="Bash" +$ pants generate-lockfiles --resolve=webapps_django3 --resolve=webapps_django4 +19:00:39.26 [INFO] Completed: Generate lockfile for webapps_django3 +19:00:39.29 [INFO] Completed: Generate lockfile for webapps_django4 +19:00:40.02 [INFO] Wrote lockfile for the resolve `webapps_django3` to 3rdparty/python/webapps_django3.lock +19:00:40.17 [INFO] Wrote lockfile for the resolve `webapps_django4` to 3rdparty/python/webapps_django4.lock +``` + +Finally, you update your first-party code targets, such as `python_sources`, `python_tests`, and `pex_binary` to set their `resolve=` field (which, as before, defaults to the default resolve). + +```python title="my/project/BUILD" +python_sources( + resolve="django_webapp3", +) + +python_tests( + name="tests", + resolve="django_webapp3", + # You can use `overrides` to change certain generated targets + overrides={"test_django4.py": {"resolve": "django_webapp4"}}, +) +``` + +If a first-party target is compatible with multiple resolves, e.g., shared utility code, you can use the [parametrize](../../using-pants/key-concepts/targets-and-build-files.mdx#parametrizing-targets) mechanism with the `resolve=` field. + +:::note Transitive dependencies must use the same resolve +All transitive dependencies of a source target must use the same resolve. Pants's dependency inference already handles this for you by only inferring dependencies between targets that share the same resolve. + +If you manually add a dependency across different resolves, Pants will error with a helpful message when you try to use that dependency. +::: + +To reiterate an important distinction: The `resolve=` field on a third-party requirements target specifies that these requirements are _inputs_ to the lockfile generator for that resolve. The `resolve=` field on a first-party source target specifies that this target will _consume_ the generated lockfile for that resolve. + +### Interpreter constraints + +A lockfile will contain dependencies for all requested Python versions. By default, these are the global constraints specified by the [\[python\].interpreter_constraints](../../../reference/subsystems/python.mdx#interpreter_constraints) option. You can override this per-lockfile using the [\[python\].resolves_to_interpreter_constraints](../../../reference/subsystems/python.mdx#resolves_to_interpreter_constraints) option. + +### Modifying lockfile generation behavior + +You can use the following options to affect how the lockfile generator resolves dependencies for each resolve: + +- [\[python\].resolves_to_constraints_file](../../../reference/subsystems/python.mdx#resolves_to_constraints_file): For each resolve, a path to a [Pip constraints file](https://pip.pypa.io/en/stable/user_guide/#constraints-files) to use when resolving that lockfile. +- [\[python\].resolves_to_no_binary](../../../reference/subsystems/python.mdx#resolves_to_no_binary): For each resolve, a list of projects that must only resolve to sdists and not wheels. Use the value `[":all:"]` to disable wheels for all packages. +- [\[python\].resolves_to_only_binary](../../../reference/subsystems/python.mdx#resolves_to_only_binary): For each resolve, a list of projects that must only resolve to wheels and not sdists. Use the value `[":all:"]` to disable sdists for all packages. + +You can use the key `__default__` to set the value for all resolves at once. + +### Updating lockfiles + +If you modify the third-party requirements of a resolve then you must regenerate its lockfile by running the `generate-lockfiles` goal. Pants will display an error if a lockfile is no longer compatible with its updated requirements. + +You can have Pants display a useful summary of what changed between the old and new versions of the generated lockfile, by setting: + +```toml title="pants.toml" +[generate-lockfiles] +diff = true +``` + +In theory, when you generate a lockfile, you should want to audit it for bugs, compliance and security concerns. In practice this is intractable to do manually. We would like to integrate with automated auditing tools and services in the future, so watch this space for updates, or feel free to [reach out on Slack](/community/members) if this is important to you and you'd like to work on it. + +### Lockfile subsetting + +When consuming a lockfile, Pants uses only the necessary subset of its transitive dependencies in each situation. + +For example, when running a test, only the requirements actually used (transitively) by that test will be present on the `sys.path`. This means that a test run won't be invalidated if unrelated requirements have changed, which improves cache hit rates. The same holds true when running and packaging code. + +You can override this subsetting behavior by setting the [\[python\].run_against_entire_lockfile](../../../reference/subsystems/python.mdx#run_against_entire_lockfile) option. + +### Lockfiles for tools + +Pants's Python support typically involves invoking underlying tools, such as `pytest`, `mypy`, `black` etc. in subprocesses. Almost all these tools are themselves written in Python and thus depended on via requirement strings, just like your third-party import dependencies. + +It is strongly recommended that these tools be installed from a hermetic lockfile, for the same security and stability reasons stated above. In fact, Pants ships with built-in lockfiles for every Python tool it uses, and uses them automatically. + +The only time you need to think about this is if you want to customize the tool requirements that Pants uses. This might be the case if you want to modify the version of a tool or add extra requirements (for example, tool plugins). + +Tools can also be installed from a specific resolve instead of from the built-in lockfile. This is useful for specifying a version of the tool and including extra packages. To do this, set `install_from_resolve` and `requirements` on the tool's config section: + +```toml title="pants.toml" +[python.resolves] +pytest = "3rdparty/python/pytest.lock" + +[pytest] +install_from_resolve = "pytest" # Use this resolve's lockfiles. +requirements = ["//3rdparty/python:pytest"] # Use these requirements from the lockfile. +``` + +Then set up the resolve's inputs: + +```python tab={"label":"3rdparty/python/BUILD"} +python_requirements( + name="pytest", + source="pytest-requirements.txt", + resolve="pytest", +) +``` + +```text tab={"label":"3rdparty/python/pytest-requirements.txt"} +# The default requirements (possibly with custom versions). +pytest==7.1.1 +pytest-cov>=2.12,!=2.12.1,<3.1 +pytest-xdist>=2.5,<3 +ipdb +# Our extra requirement. +pytest-myplugin>=1.2.0,<2 +``` + +And generate its custom lockfile: + +```shell title="Bash" +$ pants generate-lockfiles --resolve=pytest +19:00:39.26 [INFO] Completed: Generate lockfile for pytest +19:00:39.29 [INFO] Wrote lockfile for the resolve `pytest` to 3rdparty/python/pytest.lock +``` + +Note that some tools, such as Flake8 and Bandit, must run on a Python interpreter that is compatible with the code they operate on. In this case you must ensure that the [interpreter constraints](#interpreter-constraints) for the tool's resolve are the same as those for the code in question. + +### Invalidating tool lockfiles + +Pants will verify that any requirements set in the `requirements` option are provided by the lockfile specified by install_from_resolve, and will error if not. This lets you ensure that you don't inadvertently use an older version of a tool if you update its requirements but forget to regenerate the lockfile. +The `requirements` option can either list requirement strings, such as `pytest==7.3.1`, or target addresses, such as `//3rdparty/python:pytest` (the `//` prefix tells Pants that these are target addresses). The latter is particularly useful as it allows you to avoid specifying the requirements redundantly in two places. Instead, the target can serve as both an input to the lockfile generator and as the requirements to verify. +Pants will only use the given `requirements` from the lockfile. If you don't set `requirements`, Pants will use the entire lockfile, and won't validate that it provides the desired tool at the desired version. + +### Sharing lockfiles between tools and code + +In some cases a tool also provides a runtime library. For example, `pytest` is run as a tool in a subprocess, but your tests can also `import pytest` to access testing functionality. + +Rather than repeat the same requirement in two different resolves, you can point the tool at an existing resolve that you also use for your code: + +```toml title="pants.toml" +[pytest] +install_from_resolve = "python-default" +``` + +Of course, you have to ensure that this resolve does in fact provide appropriate versions of the tool. + +As above, you will want to point `requirements` to the subset of targets representing the tool's requirements, so that Pants can verify that the resolve provides them, and can use just the needed subset without unnecessary invalidation: + +```toml title="pants.toml" +[pytest] +install_from_resolve = "python-default" + +requirements = [ + "//3rdparty/python#pytest", + "//3rdparty/python#pytest-cov", + "//3rdparty/python#pytest-xdist", + "//3rdparty/python#pytest-myplugin", + "//3rdparty/python#ipdb", +] +``` + +You can have a single resolve for all your tools, or even a single resolve for all your tools and code! This may be useful if you want to [export](../../../reference/goals/export.mdx) a virtualenv that includes all your dependencies and all the tools you use. + +But note that the more frequently you update a lockfile the more likely it is that unrelated updates will come along for the ride, since Pants does not yet support an "only-if-needed" upgrade strategy. + +:::caution The previous way of generating tool lockfiles is deprecated! +There is an older way of generating tool lockfiles, by setting the `version` and `extra_requirements` fields on a tool's config. This method is deprecated in favor of the standard one described above. + +If you're using this deprecated tool lockfile generation mechanism, please switch to using the one described here as soon as possible! +::: + +### Manually generating lockfiles + +Rather than using `generate-lockfiles` to generate Pex-style lockfiles, you can generate them manually. This can be useful when adopting Pants in a repository already using Poetry by running `poetry export --dev`. + +Manually generated lockfiles must either use Pex's JSON format or use pip's `requirements.txt`-style format (ideally with `--hash` entries for better supply chain security). +For example: + +```text title="3rdparty/user_lock.txt" +freezegun==1.2.0 \ + --hash=sha256:93e90676da3... \ + --hash=sha256:e19563d0b05... +``` + +To use a manually generated lockfile for a resolve, point the resolve to that lockfile's path in [`[python].resolves`](../../../reference/subsystems/python.mdx#resolves). Then set [`[python].resolves_generate_lockfiles`](../../../reference/subsystems/python.mdx#resolves_generate_lockfiles) to `False`. Warning: it will likely be slower to install manually-generated user lockfiles than Pex ones, because Pants cannot as efficiently extract the subset of requirements used for a particular task; see the option [`[python].run_against_entire_lockfile`](../../../reference/subsystems/python.mdx#run_against_entire_lockfile). diff --git a/versioned_docs/version-2.24/docs/python/overview/pex.mdx b/versioned_docs/version-2.24/docs/python/overview/pex.mdx new file mode 100644 index 000000000..4a682a00e --- /dev/null +++ b/versioned_docs/version-2.24/docs/python/overview/pex.mdx @@ -0,0 +1,128 @@ +--- + title: PEX + sidebar_position: 5 +--- + +--- + +## PEX files + +When working with Python code, Pants makes frequent use of the [PEX](https://github.com/pex-tool/pex) (Python EXecutable) format. So, you'll see PEX referenced frequently in this documentation. + +A PEX is a self-contained Python environment, similar in spirit to a virtualenv. A Pex can contain combinations of Python source files, 3rd-party requirements (sdists or wheels), resource files, and metadata describing the contents. + +Importantly, this metadata can include: + +- Python interpreter constraints. +- Python platforms, like `macosx_11_0_arm64-cp-39-cp39`. +- An entry point or console script. + +A PEX can be bundled into a single `.pex` file. This file, when executed, knows how to unpack itself, find an interpreter that matches its constraints, and run itself on that interpreter. Therefore deploying code packaged in a Pex file is as simple as copying the file to an environment that has a suitable Python interpreter. + +Check out [blog.pantsbuild.org/pants-pex-and-docker](https://blog.pantsbuild.org/pants-pex-and-docker/) for how this workflow gets even better when combined with Pants's Docker support! + +## Building a Pex + +You define a PEX using the [`pex_binary`](../../../reference/targets/pex_binary.mdx) target type: + +```python title="path/to/mybinary/BUILD" +python_sources(name="lib") + +pex_binary( + name="bin", + dependencies=[":lib"], + execution_mode="venv", +) +``` + +You then use the `package` goal to build the PEX, which will be output under the [dist/] directory. + +```shell +$ pants package path/to/mybinary:bin +``` + +There are several fields you can set on a `pex_binary` to configure the layout, entry point and behavior of the resulting PEX. See the [documentation](../../../reference/targets/pex_binary.mdx) for details. + +## Setting the target platforms for a PEX + +By default, the `package` goal builds a PEX that runs on the architecture and OS of the local machine (or local [environment](../../using-pants/environments.mdx)), and on a locally-found interpreter compatible with your code's [interpreter constraints](./interpreter-compatibility.mdx). However, you can also build a multiplatform PEX - one that will run on multiple architecture+OS+interpreter combinations. + +To do so, you must configure the [`complete_platforms`](../../../reference/targets/pex_binary.mdx#complete_platforms) field on your `pex_binary` to point to `file` targets that provide detailed information about your target platforms. This is information that Pants can't determine itself because it's not running on those platforms: + +```python title="BUILD" +file( + name="linux_x86_py39", + source="linux_x86_py39.json", +) + +file( + name="linux_aarch64_py310", + source="linux_aarch64_py310.json", +) + +pex_binary( + ... + complete_platforms=[":linux_x86_py39", ":linux_aarch64_py310"] + ... +) +``` + +### Generating the `complete_platforms` file + +The core command for generating a `complete_platforms` file looks like this (see [pex docs](https://docs.pex-tool.org/buildingpex.html#complete-platform)): + +```sh +pip install pex && pex3 interpreter inspect --markers --tags --indent=2 +``` + +You can run `pex3 interpreter inspect --help` for more options, and in particular for how to select the desired target interpreter. + +You can run this command in a few different ways: + +1. Manually/interactively in your target environment +2. In a Docker container + + If you have a Docker image corresponding to your target environment, you can start a container with that image and run the PEX tool inside it to generate the file: + + ```sh + docker run --entrypoint='/bin/sh' -it --rm : -c "python -m pip install --target=/tmp/pex pex >/dev/null 2>&1 && PYTHONPATH=/tmp/pex/bin/pex3 interpreter inspect --markers --tags --indent=2" > complete_platforms.json + ``` +3. Inside a Python process (e.g. [AWS Lambda](../integrations/aws-lambda.mdx#specifying-a-runtime-explicitly) or [Google Cloud Functions](../integrations/google-cloud-functions.mdx#specifying-a-runtime-explicitly)). + +:::caution Platform-specific dependencies must be available as wheels +Some Python distributions include native code, and therefore require platform-specific artifacts. Often, such artifacts are pre-built and available on PyPI as platform-specific wheels. But in some cases they are only available as source distributions (sdists) and must be compiled into platform-specific wheels at build time. Pants can only build platform-specific sdists for the local machine or [environment](../../using-pants/environments.mdx), and cannot cross-compile for other target platforms. Therefore, to build for platforms other than the local one, all the platform-specific third-party packages that your PEX transitively depends on must be available as prebuilt wheels for each platform you care about. If those wheels aren't available on PyPI you can always build them manually once and host them on a private package repository. +::: + +## Setting the .pex file's shebang + +You can invoke a `.pex` file either directly, as in `path/to/mybinary.pex`, or via an explicitly specified interpreter, as in `path/to/python3.11 path/to/mybinary.pex`. In the former case, PEX's bootstrapping logic will find an interpreter based on the `.pex` file's shebang. By default, this will be `#!/usr/bin/env pythonX.Y` where Python X.Y is some version compatible with your code's [interpreter constraints](./interpreter-compatibility.mdx). This default may not be appropriate in a few cases: + +- If your PEX is compatible with multiple versions and the chosen one (X.Y) is not present on one of your target systems. +- If the interpreter on the target system is not present on the PATH. +- If `/usr/bin/env` is not available on the target system. + +In these cases you can override the default shebang using the [`shebang`](../../../reference/targets/pex_binary.mdx#shebang) field on the `pex_binary` target, or invoke the `.pex` via an explicit interpreter. + +## Setting Pex and Pip versions + +Pants makes use of the [Pex](https://github.com/pex-tool/pex) command-line tool internally for building PEXes. The Pex version that Pants uses is specified by the `version` option under the `pex-cli` subsystem. The known Pex versions are specified by the `known_versions` option under the `pex-cli` subsystem. You can see all Pex tool options and their current values by running `pants help-advanced pex-cli`. To upgrade the Pex version, update these option values accordingly. For instance, in `pants.toml`, to upgrade to Pex 2.1.143: + +``` +[pex-cli] +version = "v2.1.143" +known_versions = [ + "v2.1.143|macos_arm64|7dba8776000b4f75bc9af850cb65b2dc7720ea211733e8cb5243c0b210ef3c19|4194291", + "v2.1.143|macos_x86_64|7dba8776000b4f75bc9af850cb65b2dc7720ea211733e8cb5243c0b210ef3c19|4194291", + "v2.1.143|linux_x86_64|7dba8776000b4f75bc9af850cb65b2dc7720ea211733e8cb5243c0b210ef3c19|4194291", + "v2.1.143|linux_arm64|7dba8776000b4f75bc9af850cb65b2dc7720ea211733e8cb5243c0b210ef3c19|4194291" +] +``` + +The Pex version determines which Pip versions are supported. To see the lists of Pip versions a certain version of Pex supports you can either install that version of Pex as a standalone CLI and run `pex --help`, or examine [pex/pip/version.py](https://github.com/pex-tool/pex/blob/main/pex/pip/version.py) in the sources of the relevant Pex version. + +The Pip version that Pex uses is determined by the `pip_version` option in Pants. To upgrade the Pip version, update this option value accordingly. For instance, in `pants.toml`, to set the Pip version to be the latest supported by Pex: + +``` +[python] +pip_version = "latest" +``` diff --git a/versioned_docs/version-2.24/docs/python/overview/third-party-dependencies.mdx b/versioned_docs/version-2.24/docs/python/overview/third-party-dependencies.mdx new file mode 100644 index 000000000..e4cc13f9a --- /dev/null +++ b/versioned_docs/version-2.24/docs/python/overview/third-party-dependencies.mdx @@ -0,0 +1,488 @@ +--- + title: Third-party dependencies + sidebar_position: 1 +--- + +How to use third-party Python libraries in your project. + +--- + +Pants handles dependencies with more precision than traditional Python workflows. Traditionally, you have a single heavyweight [virtual environment](https://docs.python.org/3/tutorial/venv.html) that includes a large set of dependencies, whether or not you actually need them for your current task. + +Instead, Pants understands exactly which dependencies every file in your project needs, and efficiently uses just that subset of dependencies needed for the task. + +``` +❯ pants dependencies src/py/util.py +3rdparty/py#requests + +❯ pants dependencies --transitive src/py/app.py +3rdparty/py#flask +3rdparty/py#requests +``` + +Among other benefits, this precise and automatic understanding of your dependencies gives you fine-grained caching. This means, for example, that if none of the dependencies for a particular test file have changed, the cached result can be safely used. + +## Teaching Pants your "universe"(s) of dependencies + +For Pants to know which dependencies each file uses, it must first know which specific dependencies are in your "universe", that is, all the third-party dependencies your project directly uses. + +By default, Pants uses a single universe for your whole project, but it's possible to set up multiple. See the header "Multiple resolves" in the "Lockfiles" section. + +Each third-party dependency you directly use is modeled by a `python_requirement` target: + +```python title="BUILD" +python_requirement( + name="django", + requirements=["Django==3.2.1"], +) +``` + +You do not need a `python_requirement` target for transitive dependencies, that is, requirements that you do not directly import. + +To minimize boilerplate, Pants has target generators to generate `python_requirement` targets for you: + +- `python_requirements` for `requirements.txt` or [PEP 621](https://peps.python.org/pep-0621/)-compliant `pyproject.toml`. +- `poetry_requirements` for Poetry projects. + +### `requirements.txt` + +The `python_requirements()` target generator parses a [`requirements.txt`-style file](https://pip.pypa.io/en/stable/user_guide/#requirements-files) to produce a `python_requirement` target for each entry. + +For example: + +```text tab={"label":"requirements.txt"} +flask>=1.1.2,<1.3 +requests[security]==2.23.0 +dataclasses ; python_version<'3.7' +``` + +```python tab={"label":"BUILD"} +# This will generate three targets: +# +# - //:reqs#flask +# - //:reqs#requests +# - //:reqs#dataclasses +python_requirements(name="reqs") + +# The above target generator is spiritually equivalent to this: +python_requirement( + name="flask", + requirements=["flask>=1.1.2,<1.3"], +) +python_requirement( + name="requests", + requirements=["requests[security]==2.23.0"], +) +python_requirement( + name="dataclasses", + requirements=["dataclasses ; python_version<'3.7'"], +) +``` + +If the file uses a different name than `requirements.txt`, set `source` like this: + +```python +python_requirements(source="reqs.txt") +``` + +:::note Where should I put the `requirements.txt`? +You can name the file whatever you want, and put it wherever makes the most sense for your project. + +In smaller repositories that only use Python, it's often convenient to put the file at the "build root" (top-level), as used on this page. + +For larger repositories or multilingual repositories, it's often useful to have a `3rdparty` or `3rdparty/python` directory. Rather than the target's address being `//:reqs#my_requirement`, its address would be `3rdparty/python:reqs#my_requirement`, for example; or `3rdparty/python#my_requirement` if you leave off the `name` field for `python_requirements`. See [Target Generation](../../using-pants/key-concepts/targets-and-build-files.mdx#target-generation). +::: + +### PEP 621-compliant `pyproject.toml` + +The `python_requirements()` target generator also supports parsing dependencies from a [PEP 621](https://peps.python.org/pep-0621/)-compliant `pyproject.toml`. You must manually specify the source file if you want to use a `pyproject.toml` file to generate `python_requirement` targets. For example: + +```python +python_requirements(source="pyproject.toml") +``` + +Further information about PEP 621 fields can be found in the PEP documentation. Pants will read dependencies from the `project.dependencies` list, as well as the `project.optional-dependencies` mappings. Pants makes no distinction between `dependencies` and `optional-dependencies`, all dependencies are treated in the same manner as though they were listed in the `dependencies` list. For example: + +```toml tab={"label":"pyproject.toml"} +[project] +dependencies = [ + "flask>=1.1.2,<1.3", + "requests[security]==2.23.0", +] + +[project.optional-dependencies] +dataclass = ["dataclasses ; python_version<'3.7'"] +``` + +```python tab={"label":"BUILD"} +# This will generate three targets: +# +# - //:reqs#flask +# - //:reqs#requests +# - //:reqs#dataclasses +python_requirements(source="pyproject.toml") + +# The above target generator is spiritually equivalent to this: +python_requirement( + name="flask", + requirements=["flask>=1.1.2,<1.3"], +) +python_requirement( + name="requests", + requirements=["requests[security]==2.23.0"], +) +python_requirement( + name="dataclasses", + requirements=["dataclasses ; python_version<'3.7'"], +) +``` + +### Poetry + +The `poetry_requirements()` target generator parses the [Poetry](https://python-poetry.org/docs/) section in `pyproject.toml` to produce a `python_requirement` target for each entry. + +```toml tab={"label":"pyproject.toml"} +[tool.poetry.dependencies] +python = "^3.8" +requests = {extras = ["security"], version = "~1"} +flask = "~1.12" + +[tool.poetry.dev-dependencies] +isort = "~5.5" +``` + +```python tab={"label":"BUILD"} +# This will generate three targets: +# +# - //:poetry#flask +# - //:poetry#requests +# - //:poetry#dataclasses +poetry_requirements(name="poetry") + +# The above target generator is spiritually equivalent to this: +python_requirement( + name="requests", + requirements=["requests[security]>=1,<2.0"], +) +python_requirement( + name="flask", + requirements=["flask>=1.12,<1.13"], +) +python_requirement( + name="isort", + requirements=["isort>=5.5,<5.6"], +) +``` + +Note that Pants does not consume your `poetry.lock` file. Instead, see the [page on lockfiles](./lockfiles.mdx). + +## How dependencies are chosen + +Once Pants knows about your "universe"(s) of dependencies, it determines which subset should be used through [dependency inference](https://blog.pantsbuild.org/dependency-inference/). Pants will read your import statements, like `import django`, and map it back to the relevant `python_requirement` target. Run [`pants dependencies path/to/file.py`](../../using-pants/project-introspection.mdx) or `pants dependencies path/to:target` to confirm this works. + +If dependency inference does not work—such as because it's a runtime dependency you do not import—you can explicitly add the `python_requirement` target to the `dependencies` field, like this: + +```python title="project/BUILD" +python_sources( + name="lib", + dependencies=[ + # We don't have an import statement for this dep, so inference + # won't add it automatically. We add it explicitly instead. + "3rdparty/python#psycopg2-binary", + ], +) +``` + +### Use `modules` and `module_mapping` when the module name is not standard + +Some dependencies expose a module different than their project name, such as `beautifulsoup4` exposing `bs4`. Pants assumes that a dependency's module is its normalized name—i.e. `My-distribution` exposes the module `my_distribution`. If that default does not apply to a dependency, it will not be inferred. + +Pants already defines a [default module mapping](https://github.com/pantsbuild/pants/blob/main/src/python/pants/backend/python/dependency_inference/default_module_mapping.py) for some common Python requirements, but you may need to augment this by teaching Pants additional mappings: + +```python title="3rdparty/python/BUILD" +# `modules` and `module_mapping` is only needed for requirements where +# the defaults do not work. + +python_requirement( + name="my_distribution", + requirements=["my_distribution==4.1"], + modules=["custom_module"], +) + +python_requirements( + name="reqs", + module_mapping={"my_distribution": ["custom_module"]}, +) + +poetry_requirements( + name="poetry", + module_mapping={"my_distribution": ["custom_module"]}, +) +``` + +If the dependency is a type stub, and the default does not work, set `type_stub_modules` on the `python_requirement` target, and `type_stubs_module_mapping` on the `python_requirements` and `poetry_requirements` target generators. (The default for type stubs is to strip off `types-`, `-types`, `-stubs`, and `stubs-`. So, `types-requests` gives type stubs for the module `requests`.) + +### Warning: multiple versions of the same dependency + +It's invalid in Python to have conflicting versions of the same requirement, e.g. `Django==2` and `Django==3`. Instead, Pants supports "multiple resolves" (i.e. multiple lockfiles), as explained in the below section on lockfiles. + +When you have multiple targets for the same dependency and they belong to the same [resolve](./lockfiles.mdx), dependency inference will not work due to ambiguity. If you're using lockfiles—which we strongly recommend—the solution is to set the `resolve` field for problematic `python_requirement` targets so that each resolve has only one requirement and there is no ambiguity. + +This ambiguity is often a problem when you have 2+ `requirements.txt` or `pyproject.toml` files in your project, such as `project1/requirements.txt` and `project2/requirements.txt` both specifying `django`. You may want to set up each `poetry_requirements`/`python_requirements` target generator to use a distinct resolve so that there is no overlap. Alternatively, if the versions are the same, you may want to consolidate the requirements into a common file. + +## Lockfiles + +We strongly recommend using [lockfiles](./lockfiles.mdx) to ensure secure, repeatable dependency resolution. See [here](./lockfiles.mdx) for details on how to do so. + +## Advanced usage + +### Requirements with undeclared dependencies + +Sometimes a requirement does not properly declare in its packaging metadata the other dependencies it depends on, so those will not be installed. It's especially common to leave off dependencies on `setuptools`, which results in import errors like this: + +``` +import pkg_resources +ModuleNotFoundError: No module named 'pkg_resources' +``` + +To work around this, you can use the `dependencies` field of `python_requirement`, so that anytime you depend on your requirement, you also bring in the undeclared dependency. + +```python title="BUILD" +# First, make sure you have a `python_requirement` target for +# the undeclared dependency. +python_requirement( + name="setuptools", + requirements=["setuptools"], +) + +python_requirement( + name="mongomock", + requirements=["mongomock"], + dependencies=[":setuptools"], +) +``` + +If you are using the `python_requirements` and `poetry_requirements` target generators, you can use the `overrides` field to do the same thing: + +```python tab={"label":"BUILD"} +python_requirements( + name="reqs", + overrides={ + "mongomock": {"dependencies": [":reqs#setuptools"]}, + }, +) +``` + +```text tab={"label":"requirements.txt"} +setuptools +mongomock +``` + +### Version control requirements + +You can install requirements from version control using two styles: + +- pip's proprietary VCS-style requirements, e.g. + - `git+https://github.com/django/django.git#egg=Django` + - `git+https://github.com/django/django.git@stable/2.1.x#egg=Django` + - `git+https://github.com/django/django.git@fd209f62f1d83233cc634443cfac5ee4328d98b8#egg=Django` +- direct references from [PEP 440](https://www.python.org/dev/peps/pep-0440/#direct-references), e.g. + - `Django@ git+https://github.com/django/django.git` + - `Django@ git+https://github.com/django/django.git@stable/2.1.x` + - `Django@ git+https://github.com/django/django.git@fd209f62f1d83233cc634443cfac5ee4328d98b8` + +:::note Version control via SSH +When using version controlled direct references hosted on private repositories with SSH access: + +``` +target@ git+ssh://git@github.com:/myorg/myrepo.git@myhash +``` + +...you may see errors like: + +``` + Complete output (5 lines): + git@github.com: Permission denied (publickey). + fatal: Could not read from remote repository. + Please make sure you have the correct access rights + and the repository exists. + ---------------------------------------- +``` + +To fix this, Pants needs to be configured to pass relevant SSH specific environment variables to processes by adding the following to `pants.toml`: + +``` +[subprocess-environment] +env_vars.add = [ + "SSH_AUTH_SOCK", +] +``` + +::: + +### Custom repositories + +There are two mechanisms for setting up custom Python distribution repositories: + +#### PEP-503 compatible indexes + +Use `[python-repos].indexes` to add [PEP 503-compatible](https://peps.python.org/pep-0503/) indexes, like PyPI. + +```toml title="pants.toml" +[python-repos] +indexes.add = ["https://custom-cheeseshop.net/simple"] +``` + +To exclusively use your custom index, i.e. to not use the default of PyPI, use `indexes = [..]` instead of `indexes.add = [..]`. + +#### pip `--find-links` + +Use the option `[python-repos].find_links` for flat lists of packages. Same as pip's [`--find-links`](https://pip.pypa.io/en/stable/cli/pip_wheel/?highlight=find%20links#cmdoption-f) option, you can either use: + +- a URL to an HTML file with links to wheel and/or sdist files, or +- a `file://` absolute path to an HTML file with links, or to a local directory with wheel and/or + sdist files. See the section on local requirements below. + +```toml +[python-repos] +find_links = [ + "https://your/repo/here", + "file:///Users/pantsbuild/prebuilt_wheels", +] +``` + +#### Authenticating to custom repos + +To authenticate to custom repos, you may need to provide credentials (such as a username and password) in the URL. + +You can use [config file `%(env.ENV_VAR)s` interpolation](../../using-pants/key-concepts/options.mdx#config-file-interpolation) to load the values via environment variables. This avoids checking in sensitive information to version control. + +```toml title="pants.toml" +[python-repos] +indexes.add = ["http://%(env.INDEX_USERNAME)s:%(INDEX_PASSWORD)s@my.custom.repo/index"] +``` + +Alternatively, you can hardcode the value in a private (not checked-in) [.pants.rc file](../../using-pants/key-concepts/options.mdx#pantsrc-file) in each user's Pants repo, that sets this config for the user: + +```toml title=".pants.rc" +[python-repos] +indexes.add = ["http://$USERNAME:$PASSWORD@my.custom.repo/index"] +``` + +### Local requirements + +There are two ways to specify local requirements from the filesystem: + +- [PEP 440 direct references](https://www.python.org/dev/peps/pep-0440/#direct-references) + +```python title="3rdparty/python" +python_requirement( + name="django", + # Use an absolute path to a .whl or sdist file. + requirements=["Django @ file:///Users/pantsbuild/prebuilt_wheels/django-3.1.1-py3-none-any.whl"], +) + +# Reminder: we could also put this requirement string in requirements.txt and use the +# `python_requirements` target generator. +``` + +- The option `[python-repos].find_links` + +```toml tab={"label":"pants.toml"} +[python-repos] +# Use an absolute path to a directory containing `.whl` and/or sdist files. +find_links = ["file:///Users/pantsbuild/prebuilt_wheels"] +``` + +```shell +❯ ls /Users/pantsbuild/prebuilt_wheels +ansicolors-1.1.8-py2.py3-none-any.whl +django-3.1.1-py3-none-any.whl +``` + +```python tab={"label":"3rdparty/BUILD"} +# Use normal requirement strings, i.e. without file paths. +python_requirement(name="ansicolors", requirements=["ansicolors==1.1.8"]) +python_requirement(name="django", requirements=["django>=3.1,<3.2"]) + +# Reminder: we could also put these requirement strings in requirements.txt and use the +# `python_requirements` target generator +``` + +Unlike PEP 440 direct references, `[python-repos].find_links` allows you to use multiple artifacts for the same project name. For example, you can include multiple `.whl` and sdist files for the same project in the directory; if `[python-repos].indexes` is still set, then Pex/pip may use artifacts both from indexes like PyPI and from your local `--find-links`. + +Both approaches require using absolute paths, and the files must exist on your machine. This is usually fine when locally iterating and debugging. This approach also works well if your entire team can use the same fixed location. Otherwise, see the below section. + +#### Working around absolute paths + +If you need to share a lockfile on different machines, and you cannot use the same absolute path, then you can use the option `[python-repos].path_mappings` along with `[python-repos].find_links`. (`path_mappings` is not intended for PEP 440 direct requirements.) + +The `path_mappings` option allows you to substitute a portion of the absolute path with a logical name, which can be set to a different value than your teammates. For example, the path +`file:///Users/pantsbuild/prebuilt_wheels/django-3.1.1-py3-none-any.whl` could become `file://${WHEELS_DIR}/django-3.1.1-py3-none-any.whl`, where each Pants user defines what `WHEELS_DIR` should be on their machine. + +This feature only works when using Pex lockfiles via `[python].resolves`. + +`[python-repos].path_mappings` expects values in the form `NAME|PATH`, e.g. `WHEELS_DIR|/Users/pantsbuild/prebuilt_wheels`. Also, still use an absolute path for `[python-repos].find_links`. + +If possible, we recommend using a common file location for your whole team, and leveraging [Pants's interpolation](../../using-pants/key-concepts/options.mdx#config-file-interpolation), so that you avoid each user needing to manually configure `[python-repos].path_mappings` and `[python-repos].find_links`. For example, in `pants.toml`, you could set `[python-repos].path_mappings` to `WHEELS_DIR|%(buildroot)s/python_wheels` and `[python-repos].find_links` to `%(buildroot)s/python_wheels`. Then, as long as every user has the folder `python_wheels` in the root of the repository, things will work without additional configuration. Or, you could use a value like `%(env.HOME)s/pants_wheels` for the path `~/pants_wheels`. + +```toml title="pants.toml" +[python-repos] +# No one needs to change these values, as long as they can use the same shared location. +find_links = ["file://%(buildroot)s/prebuilt_wheels"] +path_mappings = ["WHEELS_DIR|%(buildroot)s/prebuilt_wheels"] +``` + +If you cannot use a common file location via interpolation, then we recommend setting these options in a [`.pants.rc` file](../../using-pants/key-concepts/options.mdx#pantsrc-file). Every teammate will need to set this up for their machine. + +```toml title=".pants.rc" +[python-repos] +# Each user must set both of these to the absolute paths on their machines. +find_links = ["file:///Users/pantsbuild/prebuilt_wheels"] +path_mappings = ["WHEELS_DIR|/Users/pantsbuild/prebuilt_wheels"] +``` + +After initially setting up `[python-repos].path_mappings` and `[python-repos].find_links`, run `pants generate-lockfiles` or `pants generate-lockfiles --resolve=`. You should see the `path_mappings` key set in the lockfile's JSON. + +### Constraints files + +Sometimes, transitive dependencies of one of your third-party requirements can cause trouble. For example, sometimes requirements do not pin their dependencies well enough, and a newer version of its transitive dependency is released that breaks the requirement. +[Constraints files](https://pip.pypa.io/en/stable/user_guide/?highlight=constraints#constraints-files) allow you to pin transitive dependencies to certain versions, overriding the version that pip/Pex would normally choose. + +Constraints files are configured per-resolve, meaning that the resolves for your user code from `[python].resolves` and each Python tool, such as Black and Pytest, can have different configuration. Use the option `[python].resolves_to_constraints_file` to map resolve names to paths to pip-compatible constraints files. For example: + +```toml tab={"label":"pants.toml"} +[python.resolves_to_constraints_file] +data-science = "3rdparty/python/data_science_constraints.txt" +pytest = "3rdparty/python/pytest_constraints.txt" +``` + +```text tab={"label":"3rdparty/python/data_science_constraints.txt"} +requests==22.1.0 +urrllib3==4.2 +``` + +You can also set the key `__default__` to apply the same constraints file to every resolve by default, although this is not always useful because resolves often need different constraints. + +### `only_binary` and `no_binary` + +You can use `[python].resolves_to_only_binary` to avoid using sdists (source distributions) for certain requirements, and `[python].resolve_to_no_binary` to avoid using bdists (wheel files) for certain requirements. + +`only_binary` and `no_binary` are configured per-resolve, meaning that the resolves for your user code from `[python].resolves` and each Python tool, such as Black and Pytest, can have different configuration. Use the options `[python].resolves_to_only_binary` and `[python].resolves_to_no_binary` to map resolve names to list of Python requirement names. + +For example: + +```toml title="pants.toml" +[python.resolves_to_only_binary] +data-science = ["numpy"] + +[python.resolves_to_no_binary] +pytest = ["pytest-xdist"] +mypy = ["django-stubs"] +``` + +You can also set the key `__default__` to apply the same value to every resolve by default. + +## Tip: use `pants export` to create a virtual environment for IDEs + +See [Setting up an IDE](../../using-pants/setting-up-an-ide.mdx) for more information on `pants export`. This will create a virtual environment for your user code for compatibility with the rest of the Python ecosystem, e.g. IDEs like Pycharm. diff --git a/versioned_docs/version-2.24/docs/releases/_category_.json b/versioned_docs/version-2.24/docs/releases/_category_.json new file mode 100644 index 000000000..71ee09205 --- /dev/null +++ b/versioned_docs/version-2.24/docs/releases/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Releases", + "position": 15 +} diff --git a/versioned_docs/version-2.24/docs/releases/deprecation-policy.mdx b/versioned_docs/version-2.24/docs/releases/deprecation-policy.mdx new file mode 100644 index 000000000..8ac710dfc --- /dev/null +++ b/versioned_docs/version-2.24/docs/releases/deprecation-policy.mdx @@ -0,0 +1,108 @@ +--- + title: Deprecation policy + sidebar_position: 1 +--- + +How we try to provide you a stable experience. + +--- + +Deprecations must live at least one minor release, meaning that if something is deprecated in 2.1.x, for example, it cannot be removed until 2.2.x. + +Every deprecation message will include a description of how to respond to the change, such as renaming a value in your `pants.toml` config file. When possible, we will automate the deprecation via the `update-build-files` goal. + +## Prohibited changes + +- Removing options without a deprecation. + - Deprecated options must behave the same as before. +- Changing default option values without a deprecation. +- Removing features without a deprecation. +- Substantial performance regressions (slowdown of >= 10%). + - If a new feature results in this slowdown, it should be put behind a flag that is disabled by default. + +## Allowed changes + +- Adding new options. +- Adding new functionality. +- Fixing bugs. +- Changes that are required by law. + +## Plugin API deprecation policy + +When [writing plugins](../writing-plugins/overview.mdx), Pants is used as a _library_, rather than a _binary_. That is, you import Pants code to write plugins. We try to make this API stable for your plugins. + +### What is public? + +A module, variable, method, function, or class is part of the public API if at least one of the following is true: + +- Its definition's docstring is marked with `:API: public`. +- Its enclosing scope is marked with `:API: public` and the name does not start with an underscore. +- It is abstract and any inheriting class published by Pants is marked `:API: public`. + +All other code defaults to being a private API and does not need to follow this deprecation policy. + +Examples: + +```python title="deprecation_example.py" +"""An example public module. + +This means that everything in this module is public, except for +values prefixed with `_`. + +:API: public +""" + +def demo_function(x: int) -> None: + """An example public top-level function. + + :API: public + """ + print(x) + + +class Demo: + """An example public class. + + All methods and class properties are public, except for values + prefixed with `_`. + + :API: public + """ + + def demo_method(self, x: int) -> None: + """An example public method. + + :API: public + """ + print(x) +``` + +### Prohibited API changes + +These changes all require a deprecation. + +- Removing a public API. +- Moving a public API to a new module. +- Removing the parameters of a public function. +- Changing the default values of a public function. +- Changing a public function to require keyword arguments through the `*` operator. +- Moving the order of the parameters of a public function. + - This is only allowed if we are already enforcing keyword arguments with the `*` operator. +- Changing the behavior of a public API. + - Instead, the API would need a new parameter that toggles the change in behavior. + +### Allowed API changes + +- Adding a new module. +- Adding new functionality to a module, e.g. new classes or functions. +- Adding new parameters to a function _if and only if_ they have a default value. +- Adding type hints. +- Fixing bugs. +- Upgrading Pants to use new versions of third-party libraries. +- Changes that are required by law. + +:::caution The Rules and Target APIs are still experimental +These two APIs do not yet follow this deprecation policy because we are actively shaping the API. + +We do try, however, to limit changes and may choose to respect the deprecation policy on a case-by-case basis. +::: diff --git a/versioned_docs/version-2.24/docs/releases/upgrade-tips.mdx b/versioned_docs/version-2.24/docs/releases/upgrade-tips.mdx new file mode 100644 index 000000000..44ade285a --- /dev/null +++ b/versioned_docs/version-2.24/docs/releases/upgrade-tips.mdx @@ -0,0 +1,77 @@ +--- + title: Upgrade tips + sidebar_position: 2 +--- + +How we recommend staying up-to-date with Pants. + +--- + +:::note Reminder: change the `pants_version` to upgrade +Change the `pants_version` option in the `[GLOBAL]` scope in your pants.toml to upgrade. + +You can see all releases at [https://github.com/pantsbuild/pants/releases](https://github.com/pantsbuild/pants/releases). +::: + +## Upgrade one minor release at a time + +Per our [Deprecation policy](./deprecation-policy.mdx), deprecations must last a minimum of one minor release. For example, something may be deprecated in 2.1.0 and then removed in 2.2.0. + +This means that it is helpful to upgrade one minor release at a time so that you can see all deprecation warnings. + +You do not need to land every upgrade into your organization—often, you will want to upgrade your organization multiple versions at a time, e.g. 2.1.0 to 2.4.0. But, when you are working on the upgrade locally, it is helpful to iterate one version at a time. + +First, see if Pants can automatically fix any safe deprecations for you: + +```bash +# To avoid unrelated formatting changes, you may want to use `--no-fmt`. +❯ pants update-build-files --no-fmt :: +``` + +You can add `update-build-files` to your [continuous integration](../using-pants/using-pants-in-ci.mdx) so that developers don't accidentally use removed features: + +```bash +❯ pants update-build-files --check :: +``` + +Then, see if there are any remaining deprecation warnings: + +```bash +❯ pants +❯ pants list :: > /dev/null +❯ pants filedeps :: > /dev/null +``` + +It is also helpful to spot-check that your main commands like `lint`, `package`, and `test` still work by running on a single target. + +:::note Use dev releases for the newest +As described in our [Release strategy](../contributions/releases/release-strategy.mdx), we make weekly dev releases with all the latest features and bug fixes we've been working on. While dev releases are less stable, they mean you get access to improvements sooner. + +If you encounter any blocking issues, you can easily roll back to a prior version by changing the `pants_version` option. (Please let us know the issue by opening a [GitHub issue](https://github.com/pantsbuild/pants/issues) or messaging us on [Slack](/community/members)). +::: + +## Ignore deprecation messages with `ignore_warnings` + +Sometimes when upgrading, you will not have time to fully fix the deprecation. The `ignore_warnings` option allows you to silence those deprecations. + +The `ignore_warnings` option expects a string with the start of the deprecation warning. You can also prefix the string with `$regex$` to use a regex pattern instead of literal string matching. + +```toml title="pants.toml" +[GLOBAL] +ignore_warnings = [ + "DEPRECATED: option 'config' in scope 'flake8' will be removed", + "$regex$DEPRECATED:\\s*", +] +``` + +## Check for updates to the `pants` script + +Run `curl -L -o pants https://pantsbuild.github.io/setup/pants` to check if there have been any changes, e.g. adding support for running Pants with new Python interpreters. + +:::caution Consider migrating to the new `pants` binary +The `pants` script is deprecated. The new `pants` binary has many improvements over the script, including an embedded Python runtime. The [installation page](../getting-started/installing-pants.mdx) has more details on how to migrate. +::: + +## Find any bugs or issues? + +Please either open a [GitHub issue](https://github.com/pantsbuild/pants/issues) or head over to [Slack](/community/members). We'd be happy to help and would appreciate knowing about the issue! diff --git a/versioned_docs/version-2.24/docs/shell/_category_.json b/versioned_docs/version-2.24/docs/shell/_category_.json new file mode 100644 index 000000000..e40d8ae62 --- /dev/null +++ b/versioned_docs/version-2.24/docs/shell/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Shell", + "position": 8 +} diff --git a/versioned_docs/version-2.24/docs/shell/index.mdx b/versioned_docs/version-2.24/docs/shell/index.mdx new file mode 100644 index 000000000..c059e7bdb --- /dev/null +++ b/versioned_docs/version-2.24/docs/shell/index.mdx @@ -0,0 +1,393 @@ +--- + title: Shell overview + sidebar_position: 0 +--- + +Pants's support for Shellcheck, shfmt, and shUnit2. + +--- + +Pants integrates with these tools to empower you to follow best practices with your Shell scripts: + +- [Shellcheck](https://www.shellcheck.net): lint for common Shell mistakes. +- [shfmt](https://github.com/mvdan/sh): autoformat Shell code so that you can instead focus on the logic. +- [shUnit2](https://github.com/kward/shunit2/): write light-weight unit tests for your Shell code. + +Pants installs these tools deterministically and integrates them into the workflows you already use: `pants fmt`, `pants lint`, and `pants test`. + +## Initial setup: add `shell_sources` targets + +Pants uses [`shell_source`](../../reference/targets/shell_source.mdx) and [`shunit2_test`](../../reference/targets/shunit2_test.mdx) [targets](../using-pants/key-concepts/targets-and-build-files.mdx) to know which Shell files you want to operate on and to set any metadata. + +To reduce boilerplate, the [`shell_sources`](../../reference/targets/shell_sources.mdx) target generates a `shell_source` target for each file in its `sources` field, and [`shunit2_tests`](../../reference/targets/shunit2_tests.mdx) generates a `shunit2_test` target for each file in its `sources` field. + +```python title="BUILD" +shell_sources(name="lib", sources=["deploy.sh", "changelog.sh"]) +shell_tests(name="tests", sources=["changelog_test.sh"]) + +# Spiritually equivalent to: +shell_source(name="deploy", source="deploy.sh") +shell_source(name="changelog", source="changelog.sh") +shell_test(name="changelog_test", source="changelog_test.sh") + +# Thanks to the default `sources` values, spiritually equivalent to: +shell_sources(name="lib") +shell_tests(name="tests") +``` + +First, activate the Shell backend in your `pants.toml`: + +```toml title="pants.toml" +[GLOBAL] +backend_packages = [ + "pants.backend.shell", +] +``` + +Then, run [`pants tailor ::`](../getting-started/initial-configuration.mdx#5-generate-build-files) to generate BUILD files: + +``` +$ pants tailor :: +Created scripts/BUILD: + - Add shell_sources target scripts +Created scripts/subdir/BUILD: + - Add shell_sources target subdir +``` + +You can also manually add targets, which is necessary if you have any scripts that don't end in `.sh`: + +```python +shell_source(name="script_without_a_extension", source="script_without_an_extension") +``` + +:::note Shell dependency inference +Pants will infer dependencies by looking for imports like `source script.sh` and `. script.sh`. You can check that the correct dependencies are inferred by running `pants dependencies path/to/script.sh` and `pants dependencies --transitive path/to/script.sh`. + +Normally, Pants will not understand dynamic sources, e.g. using variable expansion. However, Pants uses Shellcheck for parsing, so you can use Shellcheck's syntax to give a hint to Pants: + +```shell +another_script="dir/some_script.sh" + +# Normally Pants couldn't infer this, but we can give a hint like this: +# shellcheck source=dir/some_script.sh +source "${another_script}" +``` + +Alternatively, you can explicitly add `dependencies` in the relevant BUILD file. + +```python +shell_sources(dependencies=["path/to:shell_source_tgt"]) +``` + +::: + +## shfmt autoformatter + +To activate, add this to your `pants.toml`: + +```toml title="pants.toml" +[GLOBAL] +backend_packages = [ + "pants.backend.shell", + "pants.backend.shell.lint.shfmt", +] +``` + +Make sure that you also have set up `shell_source`/`shell_sources` or `shunit2_test`/`shunit2_tests` targets so that Pants knows to operate on the relevant files. + +Now you can run `pants fmt` and `pants lint`: + +``` +$ pants lint scripts/my_script.sh +13:05:56.34 [WARN] Completed: lint - shfmt failed (exit code 1). +--- scripts/my_script.sh.orig ++++ scripts/my_script.sh +@@ -9,7 +9,7 @@ + + set -eo pipefail + +-HERE=$(cd "$(dirname "${BASH_SOURCE[0]}")" && \ ++HERE=$(cd "$(dirname "${BASH_SOURCE[0]}")" && + pwd) + +𐄂 shfmt failed. +``` + +Use `pants fmt lint dir:` to run on all files in the directory, and `pants fmt lint dir::` to run on all files in the directory and subdirectories. + +Pants will automatically include any relevant `.editorconfig` files in the run. You can also pass command line arguments with `--shfmt-args='-ci -sr'` or permanently set them in `pants.toml`: + +```toml +[shfmt] +args = ["-i 2", "-ci", "-sr"] +``` + +Temporarily disable shfmt with `--shfmt-skip`: + +```bash +pants --shfmt-skip fmt :: +``` + +Only run shfmt with `--lint-only` and `--fmt-only`: + +```bash +pants fmt --only=shfmt :: +``` + +:::tip Benefit of Pants: shfmt runs in parallel with Python, Java, Scala, and Go formatters +Normally, Pants runs formatters sequentially so that it can pipe the results of one formatter into the next. However, Pants will run shfmt in parallel to formatters for other languages, [like Python](../python/overview/linters-and-formatters.mdx), because shfmt does not operate on those languages. + +You can see this concurrency through Pants's dynamic UI. +::: + +## Shellcheck linter + +To activate, add this to your `pants.toml`: + +```toml title="pants.toml" +[GLOBAL] +backend_packages = [ + "pants.backend.shell", + "pants.backend.shell.lint.shellcheck", +] +``` + +Make sure that you also have set up `shell_source` / `shell_sources` or `shunit2_test` / `shunit_tests` targets so that Pants knows to operate on the relevant files. + +Now you can run `pants lint`: + +``` +$ pants lint scripts/my_script.sh +13:09:10.49 [WARN] Completed: lint - Shellcheck failed (exit code 1). + +In scripts/my_script.sh line 12: +HERE=$(cd $(dirname ${BASH_SOURCE[0]}) && pwd) + ^--------------------------^ SC2046: Quote this to prevent word splitting. + ^---------------^ SC2086: Double quote to prevent globbing and word splitting. + +Did you mean: +... + +𐄂 Shellcheck failed. +``` + +Use `pants fmt lint dir:` to run on all files in the directory, and `pants fmt lint dir::` to run on all files in the directory and subdirectories. + +Pants will automatically include any relevant `.shellcheckrc` and `shellcheckrc` files in the run. You can also pass command line arguments with `--shellcheck-args='-x -W 3'` or permanently set them in `pants.toml`: + +```toml +[shellcheck] +args = ["--external-sources", "--wiki-link-count=3"] +``` + +Temporarily disable Shellcheck with `--shellcheck-skip`: + +```bash +pants --shellcheck-skip lint :: +``` + +Only run Shellcheck with `--lint-only`: + +```bash +pants lint --only=shellcheck :: +``` + +:::tip Benefit of Pants: Shellcheck runs in parallel with other linters +Pants will attempt to run all activated linters and formatters at the same time for improved performance, including [Python](../python/overview/linters-and-formatters.mdx), Go, Java, and Scala linters. You can see this through Pants's dynamic UI. +::: + +## shUnit2 test runner + +[shUnit2](https://github.com/kward/shunit2/) allows you to write lightweight unit tests for your Shell code. + +To use shunit2 with Pants: + +1. Create a test file like `tests.sh`, `test_foo.sh`, or `foo_test.sh`. + - Refer to [https://github.com/kward/shunit2/](https://github.com/kward/shunit2/) for how to write shUnit2 tests. +2. Create a `shunit2_test` or `shunit2_tests` target in the directory's BUILD file. + - You can run [`pants tailor`](../getting-started/initial-configuration.mdx#5-generate-build-files) to automate this step. +3. Specify which shell to run your tests with, either by setting a shebang directly in the test file or by setting the field `shell` on the `shunit2_test` / `shunit2_tests` target. + - See [here](../../reference/targets/shunit2_tests.mdx#shell) for all supported shells. + +```shell tab={"label":"scripts/tests.sh"} +#!/usr/bin/env bash + +testEquality() { + assertEquals 1 1 +} +``` + +```python tab={"label":"scripts/BUILD"} +shunit2_tests(name="tests") +``` + +You can then run your tests like this: + +```bash +# Run all tests in the repository. +pants test :: + +# Run all the tests in the folder. +pants test scripts: + +# Run just the tests in this file. +pants test scripts/tests.sh +``` + +Pants will download the `./shunit2` script and will add `source ./shunit2` with the correct relpath for you. + +You can import your production code by using `source`. Make sure the code belongs to a `shell_source` or `shell_sources` target. Pants's [dependency inference](../using-pants/key-concepts/targets-and-build-files.mdx) will add the relevant dependencies, which you can confirm by running `pants dependencies scripts/tests.sh`. You can also manually add to the `dependencies` field of your `shunit2_tests` target. + +```shell tab={"label":"scripts/tests.sh"} +#!/usr/bin/bash + +source scripts/lib.sh + +testAdd() { + assertEquals $(add_one 4) 5 +} +``` + +```shell tab={"label":"scripts/lib.sh"} +add_one() { + echo $(($1 + 1)) +} +``` + +```shell tab={"label":"scripts/BUILD"} +shell_sources(name="lib") +shell_tests(name="tests") +``` + +:::tip Running your tests with multiple shells +Pants allows you to run the same tests against multiple shells, e.g. Bash and Zsh, to ensure your code works with each shell. + +To test multiple shells, use the `parametrize` mechanism, like this: + +```python +shunit2_tests( + name="tests", + shell=parametrize("bash", "zsh"), +) +``` + +Then, use `pants test`: + +```bash +# Run tests with both shells. +pants test scripts/tests.sh + +# Run tests with only Zsh. +pants test scripts/tests.sh:tests@shell=zsh +``` + +::: + +### Controlling output + +By default, Pants only shows output for failed tests. You can change this by setting `--test-output` to one of `all`, `failed`, or `never`, e.g. `pants test --output=all ::`. + +You can permanently set the output format in your `pants.toml` like this: + +```toml title="pants.toml" +[test] +output = "all" +``` + +### Force reruns with `--force` + +To force your tests to run again, rather than reading from the cache, run `pants test --force path/to/test.sh`. + +### Setting environment variables + +Test runs are _hermetic_, meaning that they are stripped of the parent `pants` process's environment variables. This is important for reproducibility, and it also increases cache hits. + +To add any arbitrary environment variable back to the process, use the option `extra_env_vars` in the `[test]` options scope. You can hardcode a value for the option, or leave off a value to "allowlist" it and read from the parent `pants` process's environment. + +```toml title="pants.toml" +[test] +extra_env_vars = ["VAR1", "VAR2=hardcoded_value"] +``` + +Use `[bash-setup].executable_search_paths` to change the `$PATH` env var used during test runs. You can use the special string `""` to read the value from the parent `pants` process's environment. + +```toml title="pants.toml" +[bash-setup] +executable_search_paths = ["/usr/bin", ""] +``` + +### Timeouts + +Pants can cancel tests that take too long, which is useful to prevent tests from hanging indefinitely. + +To add a timeout, set the `timeout` field to an integer value of seconds, like this: + +```python title="BUILD" +shunit2_test(name="tests", source="tests.sh", timeout=120) +``` + +When you set `timeout` on the `shunit2_tests` target generator, the same timeout will apply to every generated `shunit2_test` target. Instead, you can use the `overrides` field: + +```python title="BUILD" +shunit2_tests( + name="tests", + overrides={ + "test_f1.sh": {"timeout": 20}, + ("test_f2.sh", "test_f3.sh"): {"timeout": 35}, + }, +) +``` + +You can also set a default value and a maximum value in `pants.toml`: + +```toml title="pants.toml" +[test] +timeout_default = 60 +timeout_maximum = 600 +``` + +If a target sets its `timeout` higher than `[test].timeout_maximum`, Pants will use the value in `[test].timeout_maximum`. + +Use the option `pants test --no-timeouts` to temporarily disable timeouts, e.g. when debugging. + +### Retries + +Pants can automatically retry failed tests. This can help keep your builds passing even with flaky tests, like integration tests. + +```toml tab={"label":"pants.toml"} +[test] +attempts_default = 3 +``` + +### Testing your packaging pipeline + +You can include the result of `pants package` in your test through the `runtime_package_dependencies field`. Pants will run the equivalent of `pants package` beforehand and copy the built artifact into the test's chroot, allowing you to test things like that the artifact has the correct files present and that it's executable. + +This allows you to test your packaging pipeline by simply running `pants test ::`, without needing custom integration test scripts. + +To depend on a built package, use the `runtime_package_dependencies` field on the `shunit2_test` / `shunit2_tests` targets, which is a list of addresses to targets that can be built with `pants package`, such as [`pex_binary`](../python/goals/package.mdx), [`python_aws_lambda_function`](../python/integrations/aws-lambda.mdx), and [`archive`](../using-pants/assets-and-archives.mdx) targets. Pants will build the package before running your test, and insert the file into the test's chroot. It will use the same name it would normally use with `pants package`, except without the `dist/` prefix. + +For example: + +```python tab={"label":"helloworld/BUILD"} +python_source(name="py_src", source="say_hello.py") +pex_binary(name="pex", entry_point="say_hello.py") + +shunit2_test( + name="tests", + source="tests.sh", + runtime_package_dependencies=[":pex"], +) +``` + +```python tab={"label":"helloworld/say_hello.py"} +print("Hello, test!") +``` + +```shell tab={"label":"helloworld/tests.sh"} +#!/usr/bin/bash + +testArchiveCreated() { + assertTrue "[[ -f helloworld/say_hello.pex ]]" +} +``` diff --git a/versioned_docs/version-2.24/docs/shell/run-shell-commands.mdx b/versioned_docs/version-2.24/docs/shell/run-shell-commands.mdx new file mode 100644 index 000000000..442d9b00d --- /dev/null +++ b/versioned_docs/version-2.24/docs/shell/run-shell-commands.mdx @@ -0,0 +1,56 @@ +--- + title: Run shell commands + sidebar_position: 1 +--- + +How to execute arbitrary scripts and programs + +--- + +The [`shell_command`](../../reference/targets/shell_command.mdx) target allows you to run any command during a Pants execution, for the purpose of modifying or creating files to be used by other targets, or its (idempotent: see below) side effects when accessing services over the network. + +```python tab={"label":"BUILD"} +shell_command( + command="./my-script.sh download some-archive.tar.gz", + tools=["curl", "env", "bash", "mkdir", "tar"], + output_directories=["files"], + dependencies=[":shell-scripts", ":images"] +) + +shell_sources(name="shell-scripts") +files(name="images", sources=["*.png"]) +``` + +```shell tab={"label":"my-script.sh"} +#!/usr/bin/env bash +case "$1" in + download) + echo "Downloading $2..." + curl https://my-storage.example.net/blob/$2 -O + mkdir files && tar xzf $2 -C files ;; + *) + echo "Usage: $0 [download|...]" ;; +esac +``` + +## The `shell_command` target + +The `command` field is passed to `bash -c `. The execution sandbox will include any files from the `dependencies` field. Any executable tools that might be used must be specified in the `tools` field, in order to be available on the `PATH` while executing the command. + +The command is limited to operating on the specific set of input files provided as dependencies, and only produces output files for other targets to consume. It is not possible to mutate any file in the workspace. + +In case there are resulting files that should be captured and passed to any consuming targets, list them in the `outputs` field. To capture directories, simply add the path to the directory, with a trailing slash (as in the example `”files/”`, above). + +:::note Idempotency requirement +The shell command may be cancelled or retried any number of times, so it is important that any side effects are idempotent. That is, it should not matter if it is run several times, or only partially. +::: + +:::note Running other Pants targets as commands +See the [`adhoc_tool`](../ad-hoc-tools/integrating-new-tools-without-plugins.mdx) documentation for discussion of how to run source files, third-party tools, and version-matched system binaries from within the Pants sandbox. +::: + +## The `run_shell_command` target + +Unlike `shell_command`, the [`run_shell_command` target](../../reference/targets/run_shell_command.mdx) runs directly in your workspace, without sandboxing. + +This target type allows you to formalize the Pants dependencies of shell scripts, and track when their impact on your workspace might have changed. But since its outputs cannot be captured, it must be a root target in your build graph (i.e.: it may not be consumed by other targets). diff --git a/versioned_docs/version-2.24/docs/shell/self-extractable-archives.mdx b/versioned_docs/version-2.24/docs/shell/self-extractable-archives.mdx new file mode 100644 index 000000000..3bd592a8c --- /dev/null +++ b/versioned_docs/version-2.24/docs/shell/self-extractable-archives.mdx @@ -0,0 +1,265 @@ +--- + title: Self-extractable archives + sidebar_position: 2 +--- + +Self-extractable archives with [`makeself`](https://github.com/megastep/makeself) + +--- + +Pants integrates with [`makeself`](https://github.com/megastep/makeself) tool +to allow you to easily build self-extractable archives. To enable the +integration activate the `makeself` backend in `pants.toml`: + +```toml title="pants.toml" +[GLOBAL] +backend_packages = [ + ... + "pants.backend.experimental.makeself", +] +``` + +## Minimal example + +The [`makeself_archive`](../../reference/targets/makeself_archive.mdx) target +allows you to bundle files and packages into a single executable archive. + +Here is a minimal example: + +```python title="BUILD" +makeself_archive( + name="arc", + startup_script=["echo", "hello pants"], +) +``` + +To build the archive use the `package` goal: + +```bash +pants package :arc +``` +``` +[INFO] Wrote dist/arc.run +Built Makeself binary: arc.run +``` + +Now run the archive just like a regular executable: + +```bash +dist/arc.run +``` +``` +Verifying archive integrity... 100% MD5 checksums are OK. All good. +Uncompressing arc.run 100% +hello pants +``` + +The built archive supports a bunch of parameters, you can inspect them manually: + +```bash +dist/arc.run --help +``` + +Or refer to the [`makeself`](https://github.com/megastep/makeself) documentation. + +## Bundling multiple files + +You can bundle multiple shell scripts using +[`files`](../../reference/targets/makeself_archive.mdx#files) field: + +```python tab={"label":"BUILD"} +shell_sources(name="src") + +makeself_archive( + name="arc", + files=["lib.sh:src", "entrypoint.sh:src"], + startup_script=["./entrypoint.sh"], +) +``` + +```bash tab={"label":"entrypoint.sh"} +#!/bin/bash + +. lib.sh +echo $@ "one two three" | first_column +``` + +```bash tab={"label":"lib.sh"} +#!/bin/bash + +function first_column { + awk '{print $1}' +} +``` + +Notice that we need to use a relative path to the `./entrypoint.sh`. + +```bash +pants package :arc && dist/arc.run +``` +``` +[INFO] Wrote dist/arc.run +Built Makeself binary: arc.run +Verifying archive integrity... 100% MD5 checksums are OK. All good. +Uncompressing arc.run 100% +one +``` + +To pass the arguments to the `startup_script` use `--`: + +```bash +pants package :arc && dist/arc.run -- zero +``` +``` +[INFO] Wrote dist/arc.run +Built Makeself binary: arc.run +Verifying archive integrity... 100% MD5 checksums are OK. All good. +Uncompressing arc.run 100% +zero +``` + +## `pants run` + +Instead of packaging and running `makeself_archive` manually, you can use the `run` goal instead: + +```bash +pants run :arc +``` +``` +Verifying archive integrity... 100% MD5 checksums are OK. All good. +Uncompressing arc.run 100% +one +``` + +To pass the arguments through the `pants run` goal you need `--`, then you need +another `--` to pass arguments to the archive's `startup_script`, so you end up with +two `--`: + +```bash +pants run :arc -- -- zero +``` +``` +Verifying archive integrity... 100% MD5 checksums are OK. All good. +Uncompressing arc.run 100% +zero +``` + +Similarly you can pass flags to the archive, for example, `quiet` flag to suppress progress messages: +```bash +pants run :arc -- -q -- zero +``` +``` +zero +``` + +## Bundling packages like `pex_binary` + +You can put other packages like `pex_binary` into a makeself archive. + +To configure `pex_binary`, first, update your `pants.toml`: + +```toml title="pants.toml" +backend_packages = [ + ... + "pants.backend.shell", + "pants.backend.experimental.makeself", + "pants.backend.python", +] + +[python] +interpreter_constraints = ["CPython==3.12.*"] +``` + +Now define the `pex_binary` and add it to the makeself archive via the +`packages` field: + +```python tab={"label":"BUILD"} +python_sources(name="py") +pex_binary( + name="upper", + entry_point="upper.py", +) +shell_sources(name="sh") +makeself_archive( + name="arc", + files=["lib.sh:sh", "entrypoint.sh:sh"], + packages=[":upper"], + startup_script=["./entrypoint.sh"], +) +``` + +```python tab={"label":"upper.py"} +print(input().upper()) +``` + +```bash tab={"label":"entrypoint.sh"} +#!/bin/bash + +. lib.sh +echo $@ "one two three" | first_column | ./upper.pex +``` + +```bash tab={"label":"lib.sh"} +#!/bin/bash + +function first_column { + awk '{print $1}' +} +``` + +```bash +pants run :arc -- -q -- zero +``` +``` +/usr/bin/env: ‘python3.12’: No such file or directory +``` + +Oops! This happened because `pants run` is running in isolated environment, so +we have to explicitly tell pants that we want to access the python interpreter +and a couple of binaries used by pex: + +```python title="BUILD" +... +makeself_archive( + name="arc", + files=["lib.sh:sh", "entrypoint.sh:sh"], + packages=[":upper"], + startup_script=["./entrypoint.sh"], + tools=["python3.12", "grep", "sort"], +) +``` + +Now it should work: + +```bash +pants run :arc -- -q -- zero +``` +``` +ZERO +``` + +Yay! + +## Using `makeself` build args + +To control the `makeself` archive creation you can provide `args` field: + +```python title="BUILD" +makeself_archive( + name="arc", + startup_script=["echo", "Done"], + args=["--xz", "--nomd5"], + tools=["xz"], +) +``` +```bash +pants run :arc +``` +``` +Verifying archive integrity... 100% CRC checksums are OK. All good. +Uncompressing arc.run 100% +Done +``` + +See full list of available options in the +[docs](https://github.com/megastep/makeself#usage). diff --git a/versioned_docs/version-2.24/docs/sql/_category_.json b/versioned_docs/version-2.24/docs/sql/_category_.json new file mode 100644 index 000000000..779726b6b --- /dev/null +++ b/versioned_docs/version-2.24/docs/sql/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "SQL", + "position": 12 +} diff --git a/versioned_docs/version-2.24/docs/sql/index.mdx b/versioned_docs/version-2.24/docs/sql/index.mdx new file mode 100644 index 000000000..510504fb3 --- /dev/null +++ b/versioned_docs/version-2.24/docs/sql/index.mdx @@ -0,0 +1,54 @@ +--- + title: SQL Overview + sidebar_position: 999 +--- + +--- + +:::caution SQL support is in alpha stage +Pants is currently building support for SQL. Simple use cases might be +supported, but many options are missing. + +Please share feedback for what you need to use Pants with your SQL queries by +either [opening a GitHub +issue](https://github.com/pantsbuild/pants/issues/new/choose) or [joining our +Slack](/community/getting-help)! +::: + +## Initial setup + +First, activate the relevant backend in `pants.toml`: + +```toml title="pants.toml" +[GLOBAL] +backend_packages = [ + ... + "pants.backend.experimental.sql", + ... +] +``` + +The SQL backend adds [`sql_source`](../../reference/targets/sql_source.mdx) and +[`sql_sources`](../../reference/targets/sql_sources.mdx) target types for SQL +files. The [`sql_source`](../../reference/targets/sql_source.mdx) behaves like +[`resource`](../../reference/targets/resource.mdx), so you can use it directly +without wrappers. The `tailor` goal will automatically generate the targets for +your .sql files. + +## Enable sqlfluff linter + +To enable the linter activate the relevant backend in `pants.toml`: + +```toml title="pants.toml" +[GLOBAL] +backend_packages = [ + ... + "pants.backend.experimental.sql.lint.sqlfluff", + ... +] +``` + +You can run the linter via `lint` goal: +``` +pants lint --only=sqlfluff :: +``` diff --git a/versioned_docs/version-2.24/docs/terraform/_category_.json b/versioned_docs/version-2.24/docs/terraform/_category_.json new file mode 100644 index 000000000..bd8fe8b26 --- /dev/null +++ b/versioned_docs/version-2.24/docs/terraform/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Terraform", + "position": 11 +} diff --git a/versioned_docs/version-2.24/docs/terraform/index.mdx b/versioned_docs/version-2.24/docs/terraform/index.mdx new file mode 100644 index 000000000..6f69c2fff --- /dev/null +++ b/versioned_docs/version-2.24/docs/terraform/index.mdx @@ -0,0 +1,194 @@ +--- + title: Terraform Overview + sidebar_position: 999 +--- + +--- + +:::caution Terraform support is in alpha stage +Pants is currently building support for developing and deploying Terraform. Simple use cases might be supported, but many options are missing. + +Terraform release progress is tracked in the [stability for release issue](https://github.com/pantsbuild/pants/issues/21119). +Please share feedback for what you need to use Pants with your Terraform modules and deployments by commenting on that issue, [opening a new GitHub issue](https://github.com/pantsbuild/pants/issues/new/choose) or [joining our Slack](/community/getting-help)! +::: + +## Initial setup + +First, activate the relevant backend in `pants.toml`: + +```toml title="pants.toml" +[GLOBAL] +backend_packages = [ + ... + "pants.backend.experimental.terraform", + ... +] +``` + +The Terraform backend also needs Python to run Pants's analysers. The setting `[python].interpreter_constraints` will need to be set. + +Terraform needs git to download modules. Many providers and provisioners need additional binaries to be available on the PATH. Currently, you can forward your PATH by adding `"PATH"` to `[download-terraform].extra_env_vars` in `pants.toml`, like: +```toml pants.toml +[download-terraform] +extra_env_vars = [ + "PATH" +] +``` + +### Adding Terraform targets + +The Terraform backend has 5 target types: + +- `terraform_module` for Terraform source code +- `terraform_deployment` for deployments that can be deployed with the `experimental-deploy` goal +- `terraform_backend` for backend configuration files (typically ending in `.tfbackend`) +- `terraform_var_files` for files to pass with `-var-file` +- `_terraform_lockfile` autogenerated target for lockfiles. + +#### Modules + +The `tailor` goal will automatically generate `terraform_module`, `terraform_backend`, and `terraform_var_files` targets. Run [`pants tailor ::`](../getting-started/initial-configuration.mdx#5-generate-build-files). For example: + +``` +❯ pants tailor :: +Created src/terraform/root/BUILD: + - Add terraform_module target root +``` + +> 🚧 `terraform_module`s must be defined in a BUILD file in the same directory as the module sources +> +> Terraform only uses files in a single directory. The Pants Terraform plugin uses the directory of the BUILD file for this. + +#### Deployments + +`terraform_deployment`s must be manually created. The deployment points to a `terraform_module` target as its `root_module` field. This module will be the "root" module that Terraform operations will be run on. Identify backend configs with a `terraform_backend` target, and var files with a `terraform_var_files` target. Pants will automatically use `terraform_backend`s and `terraform_var_files` in the same directory as a `terraform_deployment`. + +``` +terraform_module(name="root") +terraform_deployment(name="prod", root_module=":root") +terraform_backend(name="tfbackend", source="main.tfbackend") +terraform_var_files(name="tfvars") +``` + +You can override this behaviour by explicitly specifying these in the `dependencies` field. + +``` +terraform_module(name="root") +terraform_backend(name="prod_backend", source="prod.tfbackend") +terraform_deployment(name="prod", root_module=":root", dependencies=["prod_backend"]) +terraform_backend(name="test_backend", source="test.tfbackend") +terraform_deployment(name="test", root_module=":root", dependencies=["test_backend"]) +``` + +#### Lockfiles + +Lockfiles will be loaded from the directory of the root module of a deployment, just like with the `terraform` command. Pants will automatically generate targets for them if they exist. + +Pants can generate and update lockfiles with the `generate-lockfiles` command. Use the target of the `terraform_module`'s address as the resolve name. For example, `pants generate-lockfiles --resolve=tf:infrastructure`. + +```python tab={"label":"prod/BUILD"} +terraform_deployment(name="prod", root_module="//tf:infrastructure") +``` + +```python tab={"label":"tf/BUILD"} +terraform_module(name="infrastructure") +``` + +```python tab={"label":"tf/main.tf"} +resource "null_resource" "dep" {} +``` + +```hcl tab={"label":"tf/.terraform.lock.hcl"} +# This file is maintained automatically by "terraform init". +# Manual edits may be lost in future updates. + +provider "registry.terraform.io/hashicorp/null" { + version = "3.2.2" + hashes = [ + "h1:zT1ZbegaAYHwQa+QwIFugArWikRJI9dqohj8xb0GY88=", + "zh:3248aae6a2198f3ec8394218d05bd5e42be59f43a3a7c0b71c66ec0df08b69e7", + "zh:32b1aaa1c3013d33c245493f4a65465eab9436b454d250102729321a44c8ab9a", + "zh:38eff7e470acb48f66380a73a5c7cdd76cc9b9c9ba9a7249c7991488abe22fe3", + "zh:4c2f1faee67af104f5f9e711c4574ff4d298afaa8a420680b0cb55d7bbc65606", + "zh:544b33b757c0b954dbb87db83a5ad921edd61f02f1dc86c6186a5ea86465b546", + "zh:696cf785090e1e8cf1587499516b0494f47413b43cb99877ad97f5d0de3dc539", + "zh:6e301f34757b5d265ae44467d95306d61bef5e41930be1365f5a8dcf80f59452", + "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", + "zh:913a929070c819e59e94bb37a2a253c228f83921136ff4a7aa1a178c7cce5422", + "zh:aa9015926cd152425dbf86d1abdbc74bfe0e1ba3d26b3db35051d7b9ca9f72ae", + "zh:bb04798b016e1e1d49bcc76d62c53b56c88c63d6f2dfe38821afef17c416a0e1", + "zh:c23084e1b23577de22603cff752e59128d83cfecc2e6819edadd8cf7a10af11e", + ] +} +``` + +Pants can generate multi-platform lockfiles for Terraform. The setting `[download-terraform].platforms` uses the same values as [Terraform's multi-platform implementation](https://developer.hashicorp.com/terraform/cli/commands/providers/lock#specifying-target-platforms). + +```toml title=pants.toml +[download_terraform] +platforms = ["windows_amd64", "darwin_amd64", "linux_amd64"] +``` + +### Basic Operations + +#### Formatting + +Run `terraform fmt` as part of the `fix`, `fmt`, or `lint` goals. + +``` +pants fix :: +[INFO] Completed: pants.backend.terraform.lint.tffmt.tffmt.tffmt_fmt - terraform-fmt made no changes. + +✓ terraform-fmt made no changes. +``` + +#### Validate + +Run `terraform validate` as part of the `check` goal. + +``` +pants check :: +[INFO] Completed: pants.backend.terraform.goals.check.terraform_check - terraform-validate succeeded. +Success! The configuration is valid. + +✓ terraform-validate succeeded. + +``` + +`terraform validate` isn't valid for all Terraform modules. Some child modules, in particular those using aliased providers, need to have their providers provided by a "root" module. You can opt these modules out of `validate` by setting `skip_terraform_validate=True`. For example: + +``` +terraform_module(skip_terraform_validate=True) +``` + +#### Deploying + +:::caution Terraform deployment support is in alpha stage +Many options and features aren't supported yet. +For the local state backend, use an absolute path. +::: + +Run `terraform apply` as part of the `experimental-deploy` goal. The process is run interactively, so you will be prompted for variables and confirmation as usual. + +``` +pants experimental-deploy :: +[INFO] Deploying targets... +--- 8< --- +Do you want to perform these actions? + Terraform will perform the actions described above. + Only 'yes' will be accepted to approve. + + Enter a value: yes +--- 8< --- +Apply complete! Resources: 4 added, 0 changed, 0 destroyed. + +✓ testprojects/src/terraform/root:root deployed +``` + +You can set auto approve by adding `-auto-approve` to the `[download-terraform].args` setting in `pants.toml`. You can also set it for a single pants invocation with `--download-terraform-args='-auto-approve'`, for example `pants experimental-deploy "--download-terraform-args='-auto-approve'"`. + +To run `terraform plan`, use the `--dry-run` flag of the `experimental-deploy` goal. + +``` +pants experimental-deploy --dry-run :: +``` diff --git a/versioned_docs/version-2.24/docs/tutorials/_category_.json b/versioned_docs/version-2.24/docs/tutorials/_category_.json new file mode 100644 index 000000000..69483bea3 --- /dev/null +++ b/versioned_docs/version-2.24/docs/tutorials/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Tutorials", + "position": 17 +} diff --git a/versioned_docs/version-2.24/docs/tutorials/advanced-plugin-concepts.mdx b/versioned_docs/version-2.24/docs/tutorials/advanced-plugin-concepts.mdx new file mode 100644 index 000000000..1d83ce3fe --- /dev/null +++ b/versioned_docs/version-2.24/docs/tutorials/advanced-plugin-concepts.mdx @@ -0,0 +1,678 @@ +--- + title: Advanced plugin concepts + sidebar_position: 2 +--- + +Learning advanced concepts for writing plugins. + +--- + +## Introduction + +In this tutorial, we continue from where we've left in the [previous tutorial](./create-a-new-goal.mdx). Having now a complete goal with a custom target, we are ready to make certain improvements and learn more advanced concepts that you would likely find useful when working on your own plugins. + +### Adding a custom `source` field + +In the first tutorial, to keep things simple, we used the default `SingleSourceField` class for our `source` field where we provided the path to the `VERSION` file. We could have added [a custom field](../writing-plugins/the-target-api/creating-new-fields.mdx) to provide a file path, however, when using the `source` field, you get a few features for free such as setting the `default` value and `expected_file_extensions`. Furthermore, with the `source` field, thanks to the [`unmatched_build_file_globs`](../../reference/global-options.mdx#unmatched_build_file_globs) option, you won't need to provide custom logic to handle errors when path globs do not expand to any files in your repository. + +Let's modify our `myapp/BUILD` file: + +``` +version_file( + name="main-project-version", + source="non-existing-file", +) +``` + +and run the `project-version` goal: + +``` +$ pants project-version myapp: +... +[WARN] Unmatched glob from myapp:main-project-version's `source` field: "myapp/non-existing-file" +[ERROR] 1 Exception encountered: + + InvalidFieldException: The 'source' field in target myapp:main-project-version must have 1 file, but it had 0 files. +... +``` + +It is possible to adjust how Pants handle unmatched globs to prevent this type of issue: + +``` +$ PANTS_UNMATCHED_BUILD_FILE_GLOBS=error pants project-version myapp: +[ERROR] 1 Exception encountered: + Exception: Unmatched glob from myapp:main-project-version's `source` field: "myapp/non-existing-file" +``` + +We would likely want to use the same name for the version file (`VERSION`) throughout the repo for consistency, so we should probably set a default value for the target to reduce the amount of boilerplate in the `BUILD` files. To change a default value, we have to subclass the original field. Visit [customizing fields through subclassing](../writing-plugins/the-target-api/concepts.mdx#customizing-fields-through-subclassing) to learn more. + +```python title="pants-plugins/project_version/target_types.py" +from pants.engine.target import COMMON_TARGET_FIELDS, SingleSourceField, Target + +class ProjectVersionSourceField(SingleSourceField): + help = "Path to the file with the project version." + default = "VERSION" + required = False + +class ProjectVersionTarget(Target): + alias = "version_file" + core_fields = (*COMMON_TARGET_FIELDS, ProjectVersionSourceField) + help = "A project version target representing the VERSION file." +``` + +You may have noticed that we have decided to override the `help` property to show more relevant information than the default help message: + +``` +$ pants help version_file +`version_file` target +--------------------- + +A project version target representing the VERSION file. + +Activated by project_version +Valid fields: + +... +source + type: str | None + default: 'VERSION' + + Path to the file with the project version. +... +``` + +Having a dedicated source field will let us filter the targets based on the fact that they have a `ProjectVersionSourceField` field instead of checking what their alias is. This means we can refactor how we collect the relevant targets from: + +```python +targets = [tgt for tgt in targets if tgt.alias == ProjectVersionTarget.alias] +``` + +to + +```python +targets = [tgt for tgt in targets if tgt.has_field(ProjectVersionSourceField)] +``` + +Using own classes via subclassing will also help with refactoring if you decide to deprecate the target alias in order to rename it. In a more advanced scenario, other plugins may import the `ProjectVersionSourceField` field and use it in their own custom targets, so that `project-version` specific behavior would still apply to those targets as well. + +### Ensuring a version follows a semver convention + +With the current implementation, we have simply returned the contents of the file as is. We may want to add some validation, for instance, to check that a version string follows a semver convention. Let's learn how to [bring a 3rd party Python package](../writing-plugins/overview.mdx#thirdparty-dependencies), namely, [packaging](https://pypi.org/project/packaging/), into our plugin to do that! + +To start depending on the `packaging` package in our in-repo plugin, we must extend the `pants.toml` file: + +```toml +[GLOBAL] +plugins = ["packaging==22.0"] +``` + +Now, let's raise an exception if it isn't possible to construct an instance of the `Version` class: + +```python +from packaging.version import Version, InvalidVersion +from project_version.target_types import ProjectVersionTarget, ProjectVersionSourceField + +class InvalidProjectVersionString(ValueError): + pass + +@goal_rule +async def goal_show_project_version(targets: Targets) -> ProjectVersionGoal: + targets = [tgt for tgt in targets if tgt.has_field(ProjectVersionSourceField)] + results = await MultiGet( + Get(ProjectVersionFileView, ProjectVersionTarget, target) for target in targets + ) + for result in results: + try: + _ = Version(result.version) + except InvalidVersion: + raise InvalidProjectVersionString(f"Invalid version string '{result.version}' from '{result.path}'") + ... +``` + +To test this behavior, let's set a bogus version and see our goal in action! + +``` +$ cat myapp/VERSION +x.y.z + +$ pants project-version myapp: +[ERROR] 1 Exception encountered: + + InvalidProjectVersionString: Invalid version string 'x.y.z' from 'myapp/VERSION' +``` + +### Exploring caching + +When you have run the goal a few times, you may have noticed that sometimes the command takes a few seconds to complete, and sometimes it completes immediately. If that's the case, then you have just seen [Pants caching](../writing-plugins/the-rules-api/tips-and-debugging.mdx#fyi-caching-semantics) working! Because we use Pants engine to read the `VERSION` file, it copies it into the cache. Pants knows that when the command is re-run, if there are no changes to the Python source code or the `VERSION` file, there's no need to re-run the code because the result is guaranteed to stay the same. + +If your plugin uses 3rd party Python packages dependencies, it can be worth checking whether the package has any side effects such as reading from the filesystem since this won't let you take full advantage of the Pants engine's caching mechanism. Keep in mind that the commands you run via Pants may be cancelled or retried any number of times, so ideally any side effects should be [idempotent](https://en.wikipedia.org/wiki/Idempotence). That is, it should not matter if it is run once or several times. + +You can confirm that cache is being used by adding [log statements](../writing-plugins/the-rules-api/logging-and-dynamic-output.mdx). When run for the first time, the logging messages will show up; on subsequent runs, they won't because the code of the rules won't be executed. + +### Showing output as JSON + +We have so far shown the version string as part of the `ProjectVersionFileView` class: + +``` +$ pants project-version myapp: +ProjectVersionFileView(path='myapp/VERSION', version='0.0.1') +``` + +To be able to pipe the output of our command, it may make sense to emit the format in a parseable structure instead of plain text. Pants goals come with lots of options that can adjust their behavior, and this is true for custom goals as well. Let's [add a new option](../writing-plugins/the-rules-api/options-and-subsystems.mdx) for our goal, so that the version information would be shown as a JSON object. + +Adding a new option is trivial and is done in the subsystem: + +```python +class ProjectVersionSubsystem(GoalSubsystem): + name = "project-version" + help = "Show representation of the project version from the `VERSION` file." + + as_json = BoolOption( + default=False, + help="Show project version information as JSON.", + ) +``` + +To use a subsystem in the goal rule (where we show the version in the console), we need to request it as a parameter: + +```python +import json + +@goal_rule +async def goal_show_project_version( + console: Console, project_version_subsystem: ProjectVersionSubsystem +) -> ProjectVersionGoal: + ... + if project_version_subsystem.as_json: + console.print_stdout(json.dumps(dataclasses.asdict(result))) + else: + console.print_stdout(str(result)) +``` + +Let's run our goal with the new `--as-json` flag: + +``` +$ pants project-version --as-json myapp: | jq +{ + "path": "myapp/VERSION", + "version": "0.0.1" +} +``` + +### Automating generation of `project_version` targets + +Pants provides a way to automate generation of standard targets using the [`tailor`](../../reference/goals/tailor.mdx) goal. If a monorepository has many projects, each containing a `VERSION` file, it might be useful to generate `version_file` targets in every directory where the relevant files are found. This is what Pants does, for instance, when Docker backend is enabled, and you have `Dockerfile` files in the codebase. To make this work for our use case, however, we need to introduce the `tailor` goal to the `VERSION` files. + +We've reached the moment when the documentation won't be of help: there are no instructions on how to extend the `tailor` goal. In a situation like this, it may be worth exploring the Pants codebase to see how this was done in other plugins that are part of Pants. Once you find a piece of code that looks like it does what you want, you can copy it and tweak it to better suit your needs. For our use case, the code used in [generation of C++ source targets](https://github.com/pantsbuild/pants/blob/672ca1d662c76f2567e432347deee8949c14d35d/src/python/pants/backend/cc/goals/tailor.py) may get handy. After making a few changes, we have a new rule we can place in a new file: + +```python title="pants-plugins/project_version/tailor.py" +from __future__ import annotations + +from dataclasses import dataclass + +from pants.core.goals.tailor import ( + AllOwnedSources, + PutativeTarget, + PutativeTargets, + PutativeTargetsRequest, +) +from pants.util.dirutil import group_by_dir +from pants.engine.fs import PathGlobs, Paths +from pants.engine.internals.selectors import Get +from pants.engine.rules import collect_rules, rule +from pants.engine.unions import UnionRule +from project_version.target_types import ProjectVersionTarget + + +@dataclass(frozen=True) +class PutativeProjectVersionTargetsRequest(PutativeTargetsRequest): + pass + + +@rule(desc="Determine candidate project_version targets to create") +async def find_putative_targets( + req: PutativeProjectVersionTargetsRequest, + all_owned_sources: AllOwnedSources, +) -> PutativeTargets: + all_project_version_files = await Get(Paths, PathGlobs, req.path_globs("VERSION")) + unowned_project_version_files = set(all_project_version_files.files) - set( + all_owned_sources + ) + classified_unowned_project_version_files = { + ProjectVersionTarget: unowned_project_version_files + } + + putative_targets = [] + for tgt_type, paths in classified_unowned_project_version_files.items(): + for dirname, filenames in group_by_dir(paths).items(): + putative_targets.append( + PutativeTarget.for_target_type( + ProjectVersionTarget, + path=dirname, + name="project-version-file", + triggering_sources=sorted(filenames), + ) + ) + + return PutativeTargets(putative_targets) + + +def rules(): + return [ + *collect_rules(), + UnionRule(PutativeTargetsRequest, PutativeProjectVersionTargetsRequest), + ] +``` + +In this file, we use an advanced feature of Pants, [union rules](../writing-plugins/the-rules-api/union-rules-advanced.mdx): + +```python +def rules(): + return [ + *collect_rules(), + UnionRule(PutativeTargetsRequest, PutativeProjectVersionTargetsRequest), + ] +``` + +When the `tailor` goal is run, the build graph is analyzed to see when `PutativeTargetsRequest` is needed, i.e. to find out if there are any files (yet unknown to Pants) that look like they could potentially be made targets. For instance, if there is a `requirements.txt` file, a `python_requirement` target is created and when there is a Python `test_` module, a `python_test` target is created. To be able to customize the `tailor` goal (to allow generation of custom targets), we need to "extend" the build graph. That is, we ask Pants to also run our rule when searching for files that maybe should have a target created. + +We also have to make sure that the new rule is collected: + +```python title="pants-plugins/project_version/register.py" +... +def rules(): + return [*project_version_rules.rules(), *tailor_rules.rules()] +``` + +Let's remove existing `version_file` target from the `myapp/BUILD` file and run the `tailor` goal: + +``` +$ pants tailor :: +Created myapp/BUILD: + - Add version_file target project-version-file +``` + +If you have multiple projects, being able to generate the targets automatically may save time. You would also likely want to run the `tailor` goal in the check mode to confirm that new projects created have a `version_file` target. Remove the `version_file` target from the `myapp/BUILD` file and re-run the `tailor` goal: + +``` +$ pants tailor --check :: +Would create myapp/BUILD: + - Add version_file target project-version-file + +To fix `tailor` failures, run `pants tailor`. +``` + +### Running system tools + +Pants lets you [run system applications](../writing-plugins/the-rules-api/installing-tools.mdx) your plugin may need. For our use case, we can assume that Git is installed and can be run from the `/usr/bin/git`. If there's a `VERSION` file in the root of the repository representing the final artifact version (in case of a monolith), we could use Git to confirm that the version string matches the latest tag the repository was tagged with. + +We can create a new rule: + +```python +class GitTagVersion(str): + pass + +@rule +async def get_git_repo_version(buildroot: BuildRoot) -> GitTagVersion: + git_paths = await Get( + BinaryPaths, + BinaryPathRequest( + binary_name="git", + search_path=["/usr/bin", "/bin"], + ), + ) + git_bin = git_paths.first_path + if git_bin is None: + raise OSError("Could not find 'git'.") + git_describe = await Get( + ProcessResult, + Process( + argv=[git_bin.path, "-C", buildroot.path, "describe", "--tags"], + description="git describe --tags", + ), + ) + return GitTagVersion(git_describe.stdout.decode().strip()) +``` + +and then use this rule in the main goal rule: + +```python +class ProjectVersionGitTagMismatch(ValueError): + pass + +@goal_rule +async def goal_show_project_version(...) -> ProjectVersionGoal: + ... + git_repo_version = await Get(GitTagVersion) + ... + if git_repo_version != result.version: + raise ProjectVersionGitTagMismatch( + f"Project version string '{result.version}' from '{result.path}' " + f"doesn't match latest Git tag '{git_repo_version}'" + ) +``` + +Let's modify our `VERSION` file to have a version different from what we have tagged our repository with: + +``` +$ git tag 0.0.1 +$ git describe --tags +0.0.1 +$ cat myapp/VERSION +0.0.2 + +$ pants project-version --as-json myapp: +12:40:17.02 [INFO] Initializing scheduler... +12:40:17.14 [INFO] Scheduler initialized. +12:40:17.18 [ERROR] 1 Exception encountered: + + ProjectVersionGitTagMismatch: Project version string '0.0.2' from 'myapp/VERSION' doesn't match latest Git tag '0.0.1' +``` + +Now, let's tag our repository with another tag and update our `VERSION` file: + +``` +$ git tag --delete 0.0.1 +Deleted tag '0.0.1' (was 006f320) +$ git tag 0.0.2 +$ git describe --tags +0.0.2 +$ cat myapp/VERSION +0.0.1 + +$ pants project-version --as-json myapp: +{"path": "myapp/VERSION", "version": "0.0.1"} +``` + +Pants is happy, but clearly something is wrong as our Git tag version doesn't match the `myapp/VERSION` version! If you update your `myapp/VERSION` with another version, say, `0.0.3`, we get an error, but this time, the shown Git tag is wrong: + +``` +$ cat myapp/VERSION +0.0.3 + +$ pants project-version --as-json myapp: +[ERROR] 1 Exception encountered: + + ProjectVersionGitTagMismatch: Project version string '0.0.3' from 'myapp/VERSION' doesn't match latest Git tag '0.0.1' +``` + +This happens because of how the Pants cache works. Modifying our repository tags doesn't qualify for the changes that should invalidate the cache. It is not safe to [cache the `Process` runs](../writing-plugins/the-rules-api/processes.mdx) and since we know that Git will access the repository (that is outside the sandbox), we should change its cacheability using the `ProcessCacheScope` parameter so that our Git call would run once per run of Pants. + +```python +git_describe = await Get( + ProcessResult, + Process( + argv=[git_bin.path, "-C", buildroot.path, "describe", "--tags"], + description="git describe --tags", + cache_scope=ProcessCacheScope.PER_SESSION, + ), +) +``` + +Let's add another option so that we can control whether Git tag should be retrieved: + +```python +class ProjectVersionSubsystem(GoalSubsystem): + name = "project-version" + help = "Show representation of the project version from the `VERSION` file." + + ... + match_git = BoolOption( + default=False, + help="Check Git tag of the repository matches the project version.", + ) +``` + +Keep in mind that once you've declared [custom options in the plugin's subsystem](../using-pants/key-concepts/options.mdx#setting-options), they can be set in the `pants.toml` file just like any standard Pants options. + +If you know that your Git tag may be different from the project version stored in the `VERSION` file and that you would always want the output to be in the JSON format, you can set these options in the `pants.toml` file for visibility (and to avoid setting them via command line flags): + +```toml +[project-version] +as_json = true +match_git = false +``` + +### Putting it all together + +We have now extended the plugin with extra functionality: + +``` +$ pants project-version myapp: +[INFO] Initializing scheduler... +[INFO] Scheduler initialized. +{"path": "myapp/VERSION", "version": "0.0.1"} +``` + +Let's get all of this code in one place: + +```python tab={"label":"pants-plugins/project_version/register.py"} +from typing import Iterable + +import project_version.rules as project_version_rules +import project_version.tailor as tailor_rules +from pants.engine.target import Target +from project_version.target_types import ProjectVersionTarget + + +def target_types() -> Iterable[type[Target]]: + return [ProjectVersionTarget] + + +def rules(): + return [*project_version_rules.rules(), *tailor_rules.rules()] +``` + +```python tab={"label":"pants-plugins/project_version/rules.py"} +import dataclasses +import json +from dataclasses import dataclass + +from packaging.version import InvalidVersion, Version +from pants.base.build_root import BuildRoot +from pants.core.util_rules.system_binaries import BinaryPathRequest, BinaryPaths +from pants.engine.console import Console +from pants.engine.fs import DigestContents +from pants.engine.goal import Goal, GoalSubsystem +from pants.engine.internals.native_engine import Digest +from pants.engine.internals.selectors import Get, MultiGet +from pants.engine.process import Process, ProcessCacheScope, ProcessResult +from pants.engine.rules import collect_rules, goal_rule, rule +from pants.engine.target import ( + HydratedSources, + HydrateSourcesRequest, + SourcesField, + Targets, +) +from pants.option.option_types import BoolOption +from project_version.target_types import ProjectVersionSourceField, ProjectVersionTarget + + +@dataclass(frozen=True) +class ProjectVersionFileView: + path: str + version: str + + +@rule +async def get_project_version_file_view( + target: ProjectVersionTarget, +) -> ProjectVersionFileView: + sources = await Get(HydratedSources, HydrateSourcesRequest(target[SourcesField])) + digest_contents = await Get(DigestContents, Digest, sources.snapshot.digest) + file_content = digest_contents[0] + return ProjectVersionFileView( + path=file_content.path, version=file_content.content.decode("utf-8").strip() + ) + + +class ProjectVersionSubsystem(GoalSubsystem): + name = "project-version" + help = "Show representation of the project version from the `VERSION` file." + + as_json = BoolOption( + default=False, + help="Show project version information as JSON.", + ) + match_git = BoolOption( + default=False, + help="Check Git tag of the repository matches the project version.", + ) + + +class ProjectVersionGoal(Goal): + subsystem_cls = ProjectVersionSubsystem + environment_behavior = Goal.EnvironmentBehavior.LOCAL_ONLY + + +class InvalidProjectVersionString(ValueError): + pass + + +class ProjectVersionGitTagMismatch(ValueError): + pass + + +class GitTagVersion(str): + pass + + +@goal_rule +async def goal_show_project_version( + console: Console, + targets: Targets, + project_version_subsystem: ProjectVersionSubsystem, +) -> ProjectVersionGoal: + targets = [tgt for tgt in targets if tgt.has_field(ProjectVersionSourceField)] + results = await MultiGet( + Get(ProjectVersionFileView, ProjectVersionTarget, target) for target in targets + ) + if project_version_subsystem.match_git: + git_repo_version = await Get(GitTagVersion) + + for result in results: + try: + _ = Version(result.version) + except InvalidVersion: + raise InvalidProjectVersionString( + f"Invalid version string '{result.version}' from '{result.path}'" + ) + if project_version_subsystem.match_git: + if git_repo_version != result.version: + raise ProjectVersionGitTagMismatch( + f"Project version string '{result.version}' from '{result.path}' " + f"doesn't match latest Git tag '{git_repo_version}'" + ) + + if project_version_subsystem.as_json: + console.print_stdout(json.dumps(dataclasses.asdict(result))) + else: + console.print_stdout(str(result)) + + return ProjectVersionGoal(exit_code=0) + + +@rule +async def get_git_repo_version() -> GitTagVersion: + git_paths = await Get( + BinaryPaths, + BinaryPathRequest( + binary_name="git", + search_path=["/usr/bin", "/bin"], + ), + ) + git_bin = git_paths.first_path + if git_bin is None: + raise OSError("Could not find 'git'.") + git_describe = await Get( + ProcessResult, + Process( + argv=[git_bin.path, "-C", buildroot.path, "describe", "--tags"], + description="git describe --tags", + cache_scope=ProcessCacheScope.PER_SESSION, + ), + ) + return GitTagVersion(git_describe.stdout.decode().strip()) + + +def rules(): + return collect_rules() +``` + +```python tab={"label":"pants-plugins/project_version/tailor.py"} +from __future__ import annotations + +from dataclasses import dataclass + +from pants.core.goals.tailor import ( + AllOwnedSources, + PutativeTarget, + PutativeTargets, + PutativeTargetsRequest, +) +from pants.engine.fs import PathGlobs, Paths +from pants.engine.internals.selectors import Get +from pants.engine.rules import collect_rules, rule +from pants.engine.unions import UnionRule +from pants.util.dirutil import group_by_dir +from project_version.target_types import ProjectVersionTarget + + +@dataclass(frozen=True) +class PutativeProjectVersionTargetsRequest(PutativeTargetsRequest): + pass + + +@rule(desc="Determine candidate version_file targets to create") +async def find_putative_targets( + req: PutativeProjectVersionTargetsRequest, + all_owned_sources: AllOwnedSources, +) -> PutativeTargets: + all_project_version_files = await Get(Paths, PathGlobs, req.path_globs("VERSION")) + unowned_project_version_files = set(all_project_version_files.files) - set( + all_owned_sources + ) + classified_unowned_project_version_files = { + ProjectVersionTarget: unowned_project_version_files + } + + putative_targets = [] + for tgt_type, paths in classified_unowned_project_version_files.items(): + for dirname, filenames in group_by_dir(paths).items(): + putative_targets.append( + PutativeTarget.for_target_type( + ProjectVersionTarget, + path=dirname, + name="project-version-file", + triggering_sources=sorted(filenames), + ) + ) + + return PutativeTargets(putative_targets) + + +def rules(): + return [ + *collect_rules(), + UnionRule(PutativeTargetsRequest, PutativeProjectVersionTargetsRequest), + ] +``` + +```python tab={"label":"pants-plugins/project_version/target_types.py"} +from pants.engine.target import COMMON_TARGET_FIELDS, SingleSourceField, Target + + +class ProjectVersionSourceField(SingleSourceField): + alias = "source" + help = "Path to the file with the project version." + default = "VERSION" + required = False + + +class ProjectVersionTarget(Target): + alias = "version_file" + core_fields = (*COMMON_TARGET_FIELDS, ProjectVersionSourceField) + help = "A project version target representing the VERSION file." +``` + +There are a few more things left to do, for example, we haven't written any tests yet. This is what we'll do in the next tutorial! diff --git a/versioned_docs/version-2.24/docs/tutorials/create-a-new-goal.mdx b/versioned_docs/version-2.24/docs/tutorials/create-a-new-goal.mdx new file mode 100644 index 000000000..15efc42cc --- /dev/null +++ b/versioned_docs/version-2.24/docs/tutorials/create-a-new-goal.mdx @@ -0,0 +1,409 @@ +--- + title: Create a new goal + sidebar_position: 1 +--- + +Getting started writing plugins for Pants by creating a new goal. + +--- + +In this tutorial, you'll learn the basics needed to get started writing a plugin. You will create a new goal, `project-version`, which will tell you the version (retrieved from the `VERSION` text file) of a particular project in your monorepository. You will learn how to create a new custom target to refer to the `VERSION` file, how to author a new goal, and, most importantly, how to connect rules and targets. + +You can follow along this tutorial in your own repository; you only need to be on a recent version of Pants and have a `VERSION` file containing a version string e.g. `1.2.3`. If you do not have a repository with Pants enabled yet, you can use [this example Python repository](https://github.com/pantsbuild/example-python/) to work on the plugin. + +### Registering a plugin + +We'll be writing an [in-repo plugin](../writing-plugins/overview.mdx#in-repo-plugins), and expect you to have the `pants-plugins/project_version` directory as well as the `pants.toml` file with this configuration: + +```toml title="pants.toml" +# Specifying the path to our plugin's top-level folder using the `pythonpath` option: +pythonpath = ["%(buildroot)s/pants-plugins"] + +backend_packages = [ + "pants.backend.python", + ... + "project_version", +] +``` + +### Creating a new target + +Once you have become familiar with the [core concepts of Targets and Fields](../writing-plugins/the-target-api/concepts.mdx), you are ready to [create an own custom target](../writing-plugins/the-target-api/creating-new-targets.mdx) that will represent the `VERSION` file: + +```python title="pants-plugins/project_version/target_types.py" +from pants.engine.target import COMMON_TARGET_FIELDS, SingleSourceField, Target + + +class ProjectVersionTarget(Target): + alias = "version_file" + core_fields = (*COMMON_TARGET_FIELDS, SingleSourceField) + help = "A project version target representing the VERSION file." +``` + +Our target has some common target fields such as `tags` and `description` available via the `COMMON_TARGET_FIELDS`; including those fields in your targets may be convenient if you decide to use tags and provide a description later. In addition, it also has the [`source` field](../writing-plugins/the-rules-api/rules-and-the-target-api.mdx#sourcesfield) which will be used to provide path to the project's `VERSION` file. + +We could [add a custom field](../writing-plugins/the-target-api/creating-new-fields.mdx) to provide a file path, however, there are multiple advantages to using the `source` field. You will learn more about them in the following tutorials. + +In order to start using a target, you only need to register it: + +```python title="pants-plugins/project_version/register.py" +from typing import Iterable + +from pants.engine.target import Target +from project_version.target_types import ProjectVersionTarget + + +def target_types() -> Iterable[type[Target]]: + return [ProjectVersionTarget] +``` + +You can now run `pants help version_file` to learn more about the target: + +``` +❯ pants help version_file + +`version_file` target +--------------------- + +A project version target representing the VERSION file. + + +Activated by project_version +Valid fields: + +source + type: str + required + + A single file that belongs to this target. + + Path is relative to the BUILD file's directory, e.g. `source='example.ext'`. + +... +``` + +You can now also add a target to the `myapp/BUILD` file: + +```python +version_file( + name="main-project-version", + source="VERSION", +) +``` + +Since you have registered the target, Pants will be able to "understand" it: + +```text +$ pants peek myapp:main-project-version +[ + { + "address": "myapp:main-project-version", + "target_type": "version_file", + "dependencies": [], + "description": null, + "source_raw": "VERSION", + "sources": [ + "myapp/VERSION" + ], + "tags": null + } +] +``` + +### Creating a goal + +[Goals](../using-pants/key-concepts/goals.mdx) are the commands that Pants runs such as `fmt` or `lint`. Writing a plugin doesn't necessarily mean adding a new goal. Most users would likely only want to enrich their build metadata with new kinds of targets or extend behavior of existing Pants goals. See [Common plugin tasks](../writing-plugins/common-plugin-tasks/index.mdx) to learn more. + +For the purposes of our tutorial, to be able to get a project version number (using a target we've just created), we need to [create a new goal](../writing-plugins/the-rules-api/goal-rules.mdx). The code below is the boilerplate necessary to create a goal, so it's not really necessary to understand how, for instance, subsystems work right now. The function decorated with the `@goal_rule` can be named anything, but it's helpful for the name to represent the functionality your goal provides. To make your goal part of the plugin's interface, add it to the `rules` function in the `register.py` module. + +```python tab={"label":"pants-plugins/project_version/rules.py"} +from pants.engine.goal import Goal, GoalSubsystem +from pants.engine.rules import collect_rules, goal_rule + + +class ProjectVersionSubsystem(GoalSubsystem): + name = "project-version" + help = "Show representation of the project version from the `VERSION` file." + + +class ProjectVersionGoal(Goal): + subsystem_cls = ProjectVersionSubsystem + environment_behavior = Goal.EnvironmentBehavior.LOCAL_ONLY + + +@goal_rule +async def goal_show_project_version() -> ProjectVersionGoal: + return ProjectVersionGoal(exit_code=0) + + +def rules(): + return collect_rules() +``` + +```python tab={"label":"pants-plugins/project_version/register.py"} +from typing import Iterable + +import project_version.rules as project_version_rules +from pants.engine.target import Target +from project_version.target_types import ProjectVersionTarget + + +def target_types() -> Iterable[type[Target]]: + return [ProjectVersionTarget] + + +def rules(): + return [*project_version_rules.rules()] +``` + +You can now run `pants project-version` to confirm the command exits with the exit code `0`. + +At this point, we are ready to do something useful with the new target of ours. Goals generally run on targets, so they need to be passed as an argument in the command line. For instance, to format the `myproject` directory targets, you would run `pants fmt myproject`. To get the version of a project in your repository, it makes sense to pass to the `project-version` goal a project directory containing the `version_file` definition. + +To make a target passed as an argument accessible in the goal rule, we pass the [`Targets`](../writing-plugins/the-rules-api/goal-rules.mdx#how-to-operate-on-targets) as input arguments of the function along with the [`Console`](../writing-plugins/the-rules-api/goal-rules.mdx#console-output-to-stdoutstderr) object so that we can print the details of our target in the user terminal: + +```python +@goal_rule +async def goal_show_project_version(console: Console, targets: Targets) -> ProjectVersionGoal: + # since we don't know what targets will be passed (e.g. `::`), + # we want to keep only `version_file` targets + targets = [tgt for tgt in targets if tgt.alias == ProjectVersionTarget.alias] + for target in targets: + console.print_stdout(target.address.metadata()) + return ProjectVersionGoal(exit_code=0) + +``` + +Having the following directory structure: + +```text +myapp +├── BUILD +└── VERSION +``` + +we are ready to inspect our new target: + +```text +$ pants project-version myapp +{'address': 'myapp:main-project-version'} +``` + +### Writing a rule + +You can think of the `@goal_rule` as of the `main` function in your Python program where you would call various functions that your program needs to complete. For auxiliary code, it makes sense to place it into standalone functions which is what `@rule`s are for. + +Let's create a rule that will return a data structure that we'll use to represent our project version. [Data classes](../writing-plugins/the-rules-api/concepts.mdx#dataclasses) work really well with Pants engine, so let's create one: + +```python +@dataclass(frozen=True) +class ProjectVersionFileView: + path: str + version: str +``` + +This is what our `@rule` function would return (for now without actually reading the `VERSION` file): + +```python +@rule +async def get_project_version_file_view( + target: ProjectVersionTarget, +) -> ProjectVersionFileView: + return ProjectVersionFileView( + path="path", version="1.2.3" + ) +``` + +Now, we have our `@goal_rule`, but we cannot call the `get_project_version_file_view` function; it's Pants that will determine that a rule is used and will make a function call. Well, what should you do to tell Pants you need that rule executed? You should make a function call that: + +- passes an object of the type that matches the type of the rule's input arguments +- requests an object of the type that a rule returns (you can see that in a type hint) + +For this, you can use [`Get`](../writing-plugins/the-rules-api/concepts.mdx#await-get---awaiting-results-in-a-rule-body): + +```python +@goal_rule +async def goal_show_project_version(console: Console, targets: Targets) -> ProjectVersionGoal: + targets = [tgt for tgt in targets if tgt.alias == ProjectVersionTarget.alias] + for target in targets: + project_version = await Get(ProjectVersionFileView, ProjectVersionTarget, target) + console.print_stdout(project_version) + return ProjectVersionGoal(exit_code=0) +``` + +Understanding that calling `Get()` is what causes a particular `@rule` to be executed is essential! It may feel awkward that you cannot run your function. However, by using the `Get()`, you are asking Pants to run your rule, and only knowing this will get you quite far! + +Compare this `Get()` call with the rule signature: + +```python +# requesting an object of type "ProjectVersionFileView", +# passing an object of type "ProjectVersionTarget" in the variable "target" +Get(ProjectVersionFileView, ProjectVersionTarget, target) +``` + +```python +# it requires an object of type "ProjectVersionTarget" and +# will return an object of type "ProjectVersionFileView" +@rule +async def get_project_version_file_view(target: ProjectVersionTarget) -> ProjectVersionFileView: ... +``` + +:::note Understanding the requests and rules signatures +In our basic usage, there's a 1:1 match between the `Get(output: B, input: A, obj)` request and the `@rule(input: A) -> B` function signature. This doesn't have to be the case! When you make a request (providing an input type and asking for an output type), Pants looks at all the [rules in the graph](../writing-plugins/the-rules-api/concepts.mdx#the-rule-graph) to find a way from the input to the output using all the available rules. +::: + +Let's consider a following scenario where you have a few `@rule`s and a `Get()` request: + +```python +@rule +async def rule1(A) -> B: ... + +@rule +async def rule2(B) -> C: ... + +@goal_rule +async def main(...): + result = await Get(C, A, obj) +``` + +With the following suite of rules, Pants will "figure out" that in order to return `C`, it's necessary to call `rule1` first to get `B` and then once there's `B`, call `rule2` to get C. This means you can focus on writing individual rules and leave the hard work of finding out the right order of calls that will need happen to Pants! + +The `project-version` Pants goal now shows some useful information -- the target path along with a dummy version. This means our `@rule` was run! + +``` +$ pants project-version myapp +ProjectVersionFileView(path='myapp:main-project-version', version='1.2.3') +``` + +You would normally expect for a project to have only a single `version_file` target declared, so as an improvement, we could raise an exception if there are multiple targets of this type found within a single project. This is something we'll do in the following tutorials. + +### Reading the `VERSION` file + +Let's read the `VERSION` file and print the version number in the terminal. The `source` field of our target needs to be ["hydrated"](../writing-plugins/the-rules-api/rules-and-the-target-api.mdx#sourcesfield). [Reading a file](../writing-plugins/the-rules-api/file-system.mdx) is pretty straightforward as well. We use `Get()` to transform our inputs as needed. Knowing what class you need to request may be tricky, so make sure to review the documentation, and ask for help if you are stuck! + +```python +@rule +async def get_project_version_file_view( + target: ProjectVersionTarget, +) -> ProjectVersionFileView: + sources = await Get(HydratedSources, HydrateSourcesRequest(target[SourcesField])) + digest_contents = await Get(DigestContents, Digest, sources.snapshot.digest) + file_content = digest_contents[0] + return ProjectVersionFileView( + path=file_content.path, version=file_content.content.decode("utf-8").strip() + ) +``` + +If the `@goal_rule` would receive multiple `version_file` targets (which may happen if user would run the goal for multiple projects or provide a recursive glob pattern such as `::`), it would be required to iterate over the list of targets. For efficiency, it is generally encouraged to replace the `Get()` calls in the `for` loop with a [`MultiGet()` call](../writing-plugins/the-rules-api/tips-and-debugging.mdx#tip-use-multiget-for-increased-concurrency): + +```python +@goal_rule +async def goal_show_project_version(console: Console, targets: Targets) -> ProjectVersionGoal: + targets = [tgt for tgt in targets if tgt.alias == ProjectVersionTarget.alias] + results = await MultiGet( + Get(ProjectVersionFileView, ProjectVersionTarget, target) for target in targets + ) + for result in results: + console.print_stdout(str(result)) + return ProjectVersionGoal(exit_code=0) +``` + +### Putting it all together + +Let's get all of this code in one place and see what happens! + +```python tab={"label":"pants-plugins/project_version/rules.py"} +from dataclasses import dataclass + +from pants.engine.console import Console +from pants.engine.fs import DigestContents +from pants.engine.goal import Goal, GoalSubsystem +from pants.engine.internals.native_engine import Digest +from pants.engine.internals.selectors import Get, MultiGet +from pants.engine.rules import collect_rules, goal_rule, rule +from pants.engine.target import (HydratedSources, HydrateSourcesRequest, + SourcesField, Targets) +from project_version.target_types import ProjectVersionTarget + + +@dataclass(frozen=True) +class ProjectVersionFileView: + path: str + version: str + + +@rule +async def get_project_version_file_view( + target: ProjectVersionTarget, +) -> ProjectVersionFileView: + sources = await Get(HydratedSources, HydrateSourcesRequest(target[SourcesField])) + digest_contents = await Get(DigestContents, Digest, sources.snapshot.digest) + file_content = digest_contents[0] + return ProjectVersionFileView( + path=file_content.path, version=file_content.content.decode("utf-8").strip() + ) + + +class ProjectVersionSubsystem(GoalSubsystem): + name = "project-version" + help = "Show representation of the project version from the `VERSION` file." + + +class ProjectVersionGoal(Goal): + subsystem_cls = ProjectVersionSubsystem + environment_behavior = Goal.EnvironmentBehavior.LOCAL_ONLY + + +@goal_rule +async def goal_show_project_version( + console: Console, targets: Targets +) -> ProjectVersionGoal: + targets = [tgt for tgt in targets if tgt.alias == ProjectVersionTarget.alias] + results = await MultiGet( + Get(ProjectVersionFileView, ProjectVersionTarget, target) for target in targets + ) + for result in results: + console.print_stdout(str(result)) + return ProjectVersionGoal(exit_code=0) + + +def rules(): + return collect_rules() +``` + +```python tab={"label":"pants-plugins/project_version/target_types.py"} +from pants.engine.target import COMMON_TARGET_FIELDS, SingleSourceField, Target + + +class ProjectVersionTarget(Target): + alias = "version_file" + core_fields = (*COMMON_TARGET_FIELDS, SingleSourceField) + help = "A project version target representing the VERSION file." +``` + +```python tab={"label":"pants-plugins/project_version/register.py"} +from typing import Iterable + +import project_version.rules as project_version_rules +from pants.engine.target import Target +from project_version.target_types import ProjectVersionTarget + + +def target_types() -> Iterable[type[Target]]: + return [ProjectVersionTarget] + + +def rules(): + return [*project_version_rules.rules()] +``` + +Running our goal: + +``` +$ pants project-version myapp +ProjectVersionFileView(path='myapp/VERSION', version='0.0.1') +``` + +The `VERSION` file was read and its contents is shown in the console. Congratulations, you have now finished writing your first plugin! + +There are a few things that we could do to improve it, though. We may want to check that the version string follows a semver convention, let user see the version in the console as a JSON object if desired, or show the version number string when exploring the `version_file` target via the `peek` Pants goal. This is something we'll do in the following tutorials! diff --git a/versioned_docs/version-2.24/docs/tutorials/testing-plugins.mdx b/versioned_docs/version-2.24/docs/tutorials/testing-plugins.mdx new file mode 100644 index 000000000..44cfdbd53 --- /dev/null +++ b/versioned_docs/version-2.24/docs/tutorials/testing-plugins.mdx @@ -0,0 +1,293 @@ +--- + title: Testing plugins + sidebar_position: 0 +--- + +How to write tests for your custom plugin code. + +--- + +## Introduction + +In this tutorial, we'll learn how to test the custom plugin we wrote earlier. Pants documentation provides comprehensive coverage of the [plugin testing](../writing-plugins/the-rules-api/testing-plugins.mdx) and this tutorial should help you get started writing own tests. + +Most of the plugin code that needs to be tested is in the following files: + +- `rules.py` where we implemented how a `VERSION` file needs to be read and how to use a `version_file` BUILD target +- `tailor.py` where we taught the `tailor` goal about the `VERSION` files and generation of `version_file` targets + +To author a test suite, it may make sense to write a very high level test first to confirm our code does what we expect. Let's write some [integration tests for Pants](../writing-plugins/the-rules-api/testing-plugins.mdx#approach-4-run_pants-integration-tests-for-pants) so that we could run our goal from a test! + +### Testing with a complete Pants process + +Pants provides a convenient way to run a full Pants process as it would run on the command line. Writing such a test would be equal to having, say, a Shell script to confirm that the output of the `pants project-version myapp:` command is `{"path": "myapp/VERSION", "version": "0.0.1"}`. Keep in mind that running custom scripts with this type of tests would require having a Pants repository set up (including the `pants.toml` configuration), creating `BUILD` metadata files and so on. When writing custom acceptance tests using `pants.testutil` package, you, in contrast, don't have to worry about that and can focus on testing your plugin logic in the very minimalistic environment containing only what's absolutely necessary to run your plugin code. + +In the following code snippet, we define a set of files to be created (in a temporary directory that Pants manages for us), the backends to be used (Python and our custom plugin), and a Pants command to be run. By reading the `stdout` of a process, we can confirm the plugin works as expected (conveniently ignoring any unrelated warnings that Pants may have produced). + +```python +import json +from pathlib import Path + +from pants.testutil.pants_integration_test import run_pants, setup_tmpdir + +build_root_marker = Path.cwd().joinpath("BUILDROOT") + + +def test_reading_project_version_target() -> None: + """Run a full Pants process as it would run on the command line.""" + project_files = { + "project/BUILD": "version_file(source='VERSION')", + "project/VERSION": "10.6.1", + } + # This is a limitation of the current implementation. + # See https://github.com/pantsbuild/pants/issues/12760. + build_root_marker.touch() + with setup_tmpdir(project_files) as tmpdir: + result = run_pants( + [ + ( + "--backend-packages=" + "['pants.backend.python', 'internal_plugins.project_version']" + ), + "project-version", + "--as-json", + f"{tmpdir}/project:", + ], + ) + result.assert_success() + assert result.stdout.strip() == json.dumps( + {"path": f"{tmpdir}/project/VERSION", "version": "10.6.1"} + ) + build_root_marker.unlink() +``` + +These tests do not need any special bootstrapping and can be run just like any other tests you may have in the repository with the `test` goal. They, however, are slow, and if there are lots of test cases to check (e.g. you want to test usage of flags and targets with various fields set), it may soon become impractical to run them often enough. You would most likely want to test your plugin logic in a more isolated fashion. + +### Testing goal rules + +You can exercise the goal rule by using [`rule_runner.run_goal_rule()`](../writing-plugins/the-rules-api/testing-plugins.mdx#testing-goal_rules) which runs very fast and does not start a full Pants process. In the test below, we register all rules from the `project_version` plugin with the `RuleRunner` so that the engine can find them when a test is run. These tests scale nicely and if your plugins are fairly simple, they may suffice. + +```python +import pytest +from pants.engine.internals.scheduler import ExecutionError +from pants.testutil.rule_runner import RuleRunner + +from internal_plugins.project_version.rules import ProjectVersionGoal +from internal_plugins.project_version.rules import rules as project_version_rules +from internal_plugins.project_version.target_types import ProjectVersionTarget + + +@pytest.fixture +def rule_runner() -> RuleRunner: + return RuleRunner( + rules=project_version_rules(), target_types=[ProjectVersionTarget] + ) + + +def test_project_version_goal(rule_runner: RuleRunner) -> None: + """Test a `project-version` goal using VERSION files.""" + rule_runner.write_files( + { + "project/VERSION": "10.6.1", + "project/BUILD": "version_file(source='VERSION')", + } + ) + result = rule_runner.run_goal_rule( + ProjectVersionGoal, args=["--as-json", "project:"] + ) + assert result.stdout.splitlines() == [ + '{"path": "project/VERSION", "version": "10.6.1"}' + ] + + # Invalid version string is provided. + rule_runner.write_files( + { + "project/VERSION": "foo.bar", + "project/BUILD": "version_file(source='VERSION')", + } + ) + with pytest.raises(ExecutionError): + rule_runner.run_goal_rule(ProjectVersionGoal, args=["project:"]) +``` + +### Testing individual rules + +If your plugin is more sophisticated, and there are many rules, you may want to test them in isolation. In our plugin, there are a couple of rules we could write tests for. For example, the `get_project_version_file_view` rule reads a target and returns an instance of `dataclass`, namely `ProjectVersionFileView`. This looks like a good candidate for a very isolated test. + +```python +import pytest +from pants.build_graph.address import Address +from pants.testutil.rule_runner import QueryRule, RuleRunner + +from internal_plugins.project_version.rules import ( + ProjectVersionFileView, + get_project_version_file_view, +) +from internal_plugins.project_version.target_types import ProjectVersionTarget + + +@pytest.fixture +def rule_runner() -> RuleRunner: + return RuleRunner( + rules=[ + get_project_version_file_view, + QueryRule(ProjectVersionFileView, [ProjectVersionTarget]), + ], + target_types=[ProjectVersionTarget], + ) + + +def test_get_project_version_file_view(rule_runner: RuleRunner) -> None: + """Test plugin rules in isolation (not specifying what rules need to be run).""" + rule_runner.write_files( + {"project/VERSION": "10.6.1", "project/BUILD": "version_file(source='VERSION')"} + ) + target = rule_runner.get_target(Address("project", target_name="project")) + result = rule_runner.request(ProjectVersionFileView, [target]) + assert result == ProjectVersionFileView(path="project/VERSION", version="10.6.1") +``` + +Since we have extended the `tailor` goal to generate `version_file` targets in the directories containing `VERSION` files, let's write a test to confirm the goal does what we want. For this, we can continue using the [`RuleRunner`](../writing-plugins/the-rules-api/testing-plugins.mdx#running-your-rules). Let's create a temporary build root, write necessary files, and then ask Pants to get a list of targets that it would have created for us. + +It's often very difficult to know how testing of a particular functionality is done, so it's worth taking a look at the Pants codebase. For instance, this `tailor` test has been adopted from this [test suite](https://github.com/pantsbuild/pants/blob/8cb558592d00b228182e6bbcb667705dad73bb95/src/python/pants/backend/cc/goals/tailor_test.py#L1-L0). + +```python +from pathlib import Path + +import pytest +from pants.core.goals.tailor import AllOwnedSources, PutativeTarget, PutativeTargets +from pants.engine.rules import QueryRule +from pants.testutil.rule_runner import RuleRunner +from pants.util.frozendict import FrozenDict + +from internal_plugins.project_version.tailor import ( + PutativeProjectVersionTargetsRequest, + rules, +) +from internal_plugins.project_version.target_types import ProjectVersionTarget + + +@pytest.fixture +def rule_runner() -> RuleRunner: + return RuleRunner( + rules=[ + *rules(), + QueryRule( + PutativeTargets, (PutativeProjectVersionTargetsRequest, AllOwnedSources) + ), + ], + target_types=[ProjectVersionTarget], + ) + + +def test_find_putative_avnpkg_files_targets(rule_runner: RuleRunner) -> None: + """Test generating `version_file` targets in a project directory.""" + files = { + "project/dir1/VERSION": "10.6.1", + "project/dir2/file.txt": "", + "project/dir3/VERSION": "10.7.1", + # Note that dir3/VERSION already has the target and should be ignored. + "project/dir3/BUILD": "version_file(source='VERSION')", + } + rule_runner.write_files(files) + for filepath, _ in files.items(): + assert Path(rule_runner.build_root, filepath).exists() + + putative_targets = rule_runner.request( + PutativeTargets, + [ + PutativeProjectVersionTargetsRequest( + ("project/dir1", "project/dir2", "project/dir3"), + ), + # Declare that all these files in the project are already owned by targets. + AllOwnedSources(["project/dir2/file.txt", "project/dir3/VERSION"]), + ], + ) + + assert ( + PutativeTargets( + [ + PutativeTarget( + path="project/dir1", + name="project-version-file", + type_alias="version_file", + triggering_sources=("VERSION",), + owned_sources=("VERSION",), + kwargs=FrozenDict({}), + comments=(), + ) + ] + ) + == putative_targets + ) +``` + +### Unit testing for rules + +Finally, if your plugin is very complex and would benefit from a more rigorous testing, you may consider writing [unit tests for the rules](../writing-plugins/the-rules-api/testing-plugins.mdx#approach-2-run_rule_with_mocks-unit-tests-for-rules) where some parts of the rules are going to be patched with mocks. For instance, there's `get_git_repo_version` rule which calls Git (in a subprocess) to describe the repository status. We could mock the `Process` call to make sure the inline logic of the rule is correct instead. + +```python +from unittest.mock import Mock + +from pants.base.build_root import BuildRoot +from pants.core.util_rules.system_binaries import ( + BinaryPath, + BinaryPathRequest, + BinaryPaths, +) +from pants.engine.process import Process, ProcessResult +from pants.testutil.rule_runner import MockGet, run_rule_with_mocks + +from internal_plugins.project_version.rules import GitTagVersion, get_git_repo_version + + +def test_get_git_version() -> None: + """Test running a specific rule returning a GitVersion.""" + + def mock_binary_paths(request: BinaryPathRequest) -> BinaryPaths: + return BinaryPaths(binary_name="git", paths=[BinaryPath("/usr/bin/git")]) + + def mock_process_git_describe(process: Process) -> ProcessResult: + return Mock(stdout=b"10.6.1\n") + + result: GitTagVersion = run_rule_with_mocks( + get_git_repo_version, + rule_args=[BuildRoot, ""], + mock_gets=[ + MockGet( + output_type=BinaryPaths, + input_types=(BinaryPathRequest,), + mock=mock_binary_paths, + ), + MockGet( + output_type=ProcessResult, + input_types=(Process,), + mock=mock_process_git_describe, + ), + ], + ) + assert result == GitTagVersion("10.6.1") +``` + +You could write the helper functions returning the mock objects as lambdas, if you like, for instance: + +```python +MockGet( + output_type=BinaryPaths, + input_types=(BinaryPathRequest,), + mock=lambda request: BinaryPaths(binary_name="git", paths=[BinaryPath("/usr/bin/git")]), +), +``` + +however if you have many `Get` requests that are being mocked, because `lambda`'s syntax does not support type annotations, it can make your tests slightly harder to read. For instance, in the example above, the type of the `request` argument is unknown. + +--- + +This concludes the series of tutorials that should help you get started writing own plugins with Pants. We have by now done quite a lot of work! You have learned: + +- how to create an own goal, custom options and custom targets +- how to extend existing Pants goals such as `tailor` +- how to run system tools in your plugins and how Pants interacts with the file system +- how to write unit and integration tests for your plugin code + +You are now ready to design and implement your next Pants plugin! diff --git a/versioned_docs/version-2.24/docs/using-pants/_category_.json b/versioned_docs/version-2.24/docs/using-pants/_category_.json new file mode 100644 index 000000000..098e12f2b --- /dev/null +++ b/versioned_docs/version-2.24/docs/using-pants/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Using Pants", + "position": 4 +} diff --git a/versioned_docs/version-2.24/docs/using-pants/advanced-target-selection.mdx b/versioned_docs/version-2.24/docs/using-pants/advanced-target-selection.mdx new file mode 100644 index 000000000..8fb1267bf --- /dev/null +++ b/versioned_docs/version-2.24/docs/using-pants/advanced-target-selection.mdx @@ -0,0 +1,227 @@ +--- + title: Advanced target selection + sidebar_position: 3 +--- + +Alternative techniques to tell Pants which files/targets to run on. + +--- + +See [Goal arguments](./key-concepts/goals.mdx#goal-arguments) for the normal techniques for telling Pants what to +run on. + +See [Project introspection](./project-introspection.mdx) for queries that you can run and then pipe +into another Pants run, such as finding the dependencies of a target or file. + +## Running over changed files with `--changed-since` + +Because Pants understands Git, it can find which files have changed since a certain commit through the `--changed-since` option. + +For example, to lint all uncommitted files, run: + +```bash +pants --changed-since=HEAD lint +``` + +To run against another branch, run: + +```bash +pants --changed-since=origin/main lint +``` + +By default, `--changed-since` will only run over files directly changed. Often, though, you will want to run over any [dependents](./project-introspection.mdx) of those changed files, meaning any targets that depend on the changed files. Use ` --changed-dependents=direct` or ` --changed-dependents=transitive` for this: + +```bash +❯ pants \ + --changed-since=origin/main \ + --changed-dependents=transitive \ + test +``` + +:::note Hint: Pants does not understand transitive third-party dependencies in this context. +Changes to third-party dependencies (particularly, dependencies of dependencies) may not be +surfaced as you expect via `--changed-*`. In particular, any change to a single dependency +within a lockfile or a target generator (such as `python_requirements`) will consider all +users of _any_ dependency changed, transitively. +::: + +## `filter` options + +Use filters to operate on only targets that match the predicate, e.g. only running Python tests. + +Specify a predicate by using one of the below `filter` options, like `--filter-target-type`. You +can use a comma to OR multiple values, meaning that at least one member must be matched. You +can repeat the option multiple times to AND each filter. You can prefix the filter with +`-` to negate the filter, meaning that the target must not be true for the filter. + +Some examples: + +```bash +# Only `python_source` targets. +pants --filter-target-type=python_source list :: + +# `python_source` or `python_test` targets. +pants --filter-target-type='python_source,python_test' list :: + +# Any target except for `python_source` targets +pants --filter-target-type='-python_source' list :: +``` + +You can combine multiple filter options in the same run, e.g.: + +```bash +pants --filter-target-type='python_test' --filter-address-regex=^integration_tests test :: +``` + +### `--filter-target-type` + +Each value should be the name of a target type, e.g. +`pants --filter-target-type=python_test test ::`. + +Run `pants help targets` to see what targets are registered. + +### `--filter-address-regex` + +Regex strings for the address, such as +`pants --filter-address-regex='^integration_tests$' test ::`. + +### `--filter-tag-regex` + +Regex strings to match against the `tags` field, such as +`pants --filter-tag-regex='^skip_lint$' lint ::`. + +If you don't need the power of regex, use the simpler `--tag` global option explained below. + +## Tags: annotating targets + +Every target type has a field called `tags`, which allows you to add a sequence of strings. The +strings can be whatever you'd like, such as `"integration_test"`. + +```python title="BUILD" +python_tests( + name="integration", + sources=["*_integration_test.py"], + tags=["skip_lint", "integration_test"], +) +``` + +You can then filter by tags with the global `--tag` [option](../../reference/global-options.mdx#tag), like this: + +```bash +pants --tag=integration_test list :: +``` + +To exclude certain tags, prefix with a `-`: + +```bash +pants --tag='-integration_test' list :: +``` + +You can even combine multiple includes and excludes: + +```bash +pants --tag='+type_checked,skip_lint' --tag='-integration_test' list :: +``` + +Use `--filter-tag-regex` instead for more complex queries. + +## `--spec-files` + +The global option `--spec-files` allows you to pass a file containing target addresses and/or file names/globs to Pants. + +Each entry must be separated by a new line. + +For example: + +```text tab={"label":"Shell"} +$ pants --spec-files=targets.txt list +``` + +```text tab={"label":"targets.txt"} +helloworld/lang/*.py +helloworld/util +helloworld/util:tests +``` + +:::note Tip: centralized allow/block lists +Whereas `tags` are useful for _decentralized_ allow/block lists, `--spec-files` is useful when you want to define one single list of targets or files. +::: + +## Piping to other Pants runs + +To pipe a Pants run, use your shell's `|` pipe operator and `xargs`: + +```bash +pants dependents helloworld/util | xargs pants list +``` + +You can, of course, pipe multiple times: + +```bash +# Run over the second-degree dependents of `utils.py`. +❯ pants dependents helloworld/utils.py | \ + xargs pants dependents | \ + xargs pants lint +``` + +:::note Alternative: use `--spec-files` +Sometimes, you may want to reuse the output of a Pants run for multiple subsequent Pants runs. Rather than repeating `xargs` multiple times, you can generate a file through stdout redirection and `--spec-files`. + +For example: + +```bash +$ pants dependencies helloworld/util > util_dependencies.txt +$ pants --spec-files=util_dependencies.txt lint +``` + +Using spec files is also more robust because when piping output of a Pants goal to `xargs`, the specified command +may be invoked by `xargs` as many times as necessary to use up the list of input items. +This may break the structured data output, for instance, when you want to `peek` the targets as JSON: + +```bash +$ pants list --filter-target-type=resource :: | xargs pants peek +``` + +If you don't want to save the output to an actual file—such as to not pollute version control—you can use a variable and a named pipe: + +```bash +$ TARGETS=$(pants dependencies helloworld/util) +$ pants --spec-files=<(echo $TARGETS) lint +``` + +::: + +## Sharding the input targets + +The `test` goal natively supports sharding input targets into multiple shards. Use the option `--test-shard=k/N`, where k is a non-negative integer less than N. For example, you can split up your CI into three shards with `--shard=0/3`, `--shard=1/3`, and `--shard=2/3`. + +For other goals, you can leverage shell piping to partition the input targets into multiple shards. For example, to split your `package` run into 5 shards, and select shard 0: + +```bash +pants list :: | awk 'NR % 5 == 0' | xargs pants package +``` + +## Using CLI aliases + +If setting tags on individual targets is not feasible, there are a few other options available to refer to multiple targets. + +If you have an operation that you perform often on a certain group of targets, you can use the +[cli](../../reference/subsystems/cli) subsystem options to create shortcuts. For instance, this alias +would let you run `pants publish-libraries` to publish all Python distributions declared in the `src/libA` and `src/libB` +directories. + +```toml title="pants.toml" +[cli.alias] +publish-libraries = "--filter-target-type=python_distribution --filter-address-regex=\"['^src/libA/,^src/libB/']\" publish src::" +``` + +You can use any argument or goal, and the alias doesn't need to be a "full" invocation of Pants. +For instance, you could combine filtering arguments along with `--changed-since` flag and a tag to refer to long-running +integration tests that have been recently modified: + +```toml title="pants.toml" +[cli.alias] +--integration-long = "--changed-since --filter-target-type=python_test --tag=long" +``` + +You can now invoke `pants --integration-long test tests::` to run the relevant tests. diff --git a/versioned_docs/version-2.24/docs/using-pants/anonymous-telemetry.mdx b/versioned_docs/version-2.24/docs/using-pants/anonymous-telemetry.mdx new file mode 100644 index 000000000..a8dbff66f --- /dev/null +++ b/versioned_docs/version-2.24/docs/using-pants/anonymous-telemetry.mdx @@ -0,0 +1,88 @@ +--- + title: Anonymous telemetry + sidebar_position: 11 +--- + +--- + +Pants can optionally send anonymized telemetry to the Pants project. This data helps us develop and improve Pants by detecting bugs, analyzing usage patterns, and so on. + +Telemetry is sent in the background, so it doesn't slow down your Pants runs. + +No telemetry is sent until you opt into this feature. + +## Opting in to telemetry + +To enable telemetry, you set options in the `[anonymous-telemetry]` of your `pants.toml` config file: + +```toml title="pants.toml" +[anonymous-telemetry] +enabled = true +repo_id = "" +``` + +Where `` is some random identifier unique to your repo, such as one generated by the `uuidgen` program. + +An easy way to add this to your `pants.toml` is: + +``` +printf "\n[anonymous-telemetry]\nenabled = true\nrepo_id = \"$(uuidgen)\"\n" >> pants.toml +``` + +The anonymous data we receive from telemetry is extremely valuable, and a great help to the project maintainers. We also plan to make your telemetry data available to you for your own analytics. So we hope you are able to opt in. However we understand if you prefer not to. + +To explicitly opt out of telemetry and silence any logging about it, set `enabled = false` instead. + +## What data is sent + +Each Pants run will send the following data: + +- The unique id of the run, which is a random uuid prefixed by the timestamp of the run. +- The timestamp of the run. +- The duration of the run. +- The outcome of the run (success or failure). +- Platform information, as returned by [`platform.platform()`](https://docs.python.org/3/library/platform.html#platform.platform) (e.g., `'macOS-10.16-x86_64-i386-64bit'`). +- the implementation of the Python interpreter that Pants ran on, as returned by [`platform.python_implementation()`](https://docs.python.org/3/library/platform.html#platform.python_implementation) (e.g., `'CPython'`). +- The version of the Python interpreter that Pants ran on, as returned by [`platform.python_version()`](https://docs.python.org/3/library/platform.html#platform.python_version) (e.g., `'3.7.3'`). +- The Pants version (e.g., `'2.3.0'`). +- The sha256 hash of the repo id as set in pants.toml. +- The sha256 hash of the concatenation of the repo id and the machine's MAC address, as returned by [`uuid.getnode()`](https://docs.python.org/3/library/uuid.html#uuid.getnode). +- The sha256 hash of the concatenation of the repo id and the username (as returned by [getpass.getuser()](https://docs.python.org/3/library/getpass.html#getpass.getuser)). +- The goals of the run, with custom goals filtered out (e.g., `'test,fmt,lint'`). +- The number of goals run (including custom goals). + +## How we ensure anonymity + +- We only send sha256 hashes of ids. +- The repo id, even before hashing, is a uuid. So its hash should be robust against dictionary attacks, assuming your uuid generator is strong (e.g., you used `uuidgen` and your system has a strong random number generator). +- The machine and user ids are prefixed by the repo id, so the resulting hashes are similarly robust against dictionary attacks. +- We do not record the IP address or any other envelope information. + +:::caution In public repos the repo id may be public +The anonymity properties above are ensured for private repos, where `pants.toml`, and therefore your `repo_id`, are private. + +For repos that are publicly visible, e.g., on GitHub, the `repo_id` will be visible in your `pants.toml`. So repo-level data is not anonymous. However machine- and user-level data is still anonymous (although somewhat more susceptible to dictionary attacks). + +Developers in public repos are usually not concerned about this, since their entire development occurs in the open anyway, via publicly visible code, CI runs, pull request comments and so on. All the telemetry potentially exposes is various stats about Pants usage. + +If you still prefer not to expose these stats, you can set the `repo_id` to the empty string. This will remove repo, machine and user ids entirely from the telemetry. +::: + +## How we avoid exposing proprietary information + +Innocuous data elements such as filenames, custom option names and custom goal names may reference proprietary information. E.g., `path/to/my/secret/project/BUILD`. To avoid accidentally exposing even so much as a secret name: + +- We don't send the full command line, just the goals invoked. +- Even then, we only send standard goal names, such as `test` or `lint`, and filter out custom goals. +- We only send numerical error codes, not error messages or stack traces. +- We don't send config or environment variable values. + +## Data policies + +Data is aggregated and processed on our behalf by [bugout.dev](https://bugout.dev/). + +Data can be accessed by selected maintainers of the Pants open source community (as GDPR controllers), by bugout.dev in their capacity as processors of the data, and by Pants users (as GDPR data subjects) when they exercise their Right of Access. + +The data retention period is 1 year. + +We will honor requests for access and requests for deletion within 14 days of request. diff --git a/versioned_docs/version-2.24/docs/using-pants/assets-and-archives.mdx b/versioned_docs/version-2.24/docs/using-pants/assets-and-archives.mdx new file mode 100644 index 000000000..8fa623ea8 --- /dev/null +++ b/versioned_docs/version-2.24/docs/using-pants/assets-and-archives.mdx @@ -0,0 +1,159 @@ +--- + title: Assets and archives + sidebar_position: 5 +--- + +How to include assets such as images and config files in your project. + +--- + +There are two ways to include asset files in your project: `resource` and `file` targets. + +## `resources` + +A [`resource`](../../reference/targets/resource.mdx) target is for files that are members of code packages, and are loaded via language-specific mechanisms, such as Python's `importlib.resources.read_text()` or Java's `getResource()`. + +Pants will make resources available on the appropriate runtime path, such as Python's `PYTHONPATH` or the JVM classpath. Resources can be loaded directly from a binary in which they are embedded, such as a Pex file, without first unpacking it. + +To reduce boilerplate, the [`resources`](../../reference/targets/resources.mdx) target generates a `resource` target per file in the `sources` field. + +For example, to load resources in Python: + +```python tab={"label":"src/python/project/app.py"} +import importlib.resources + +if __name__ == "__main__": + config = importlib.resources.read_text("project", "config.json") + print(f"Config: {config}") +``` + +```python tab={"label":"src/python/project/BUILD"} +python_source( + name="app", + source="app.py", + # Pants cannot infer this dependency, so we explicitly add it. + dependencies=[":config"], +) + +resource( + name="config", + source="config.json", +) +``` + +```json tab={"label":"src/python/project/config.json"} +{ "k1": "v", "k2": "v" } +``` + +[Source root](./key-concepts/source-roots.mdx) stripping applies to resources, just as it does for code. In the example above, Python loads the resource named `project/config`, rather than `src/python/project/config.json`. + +## `files` + +A `file` target is for loose files that are copied into the chroot where Pants runs your code. You can then load these files through direct mechanisms like Python's `open()` or Java's `FileInputStream`. The files are not associated with a code package, and must be extracted out of a deployed archive file before they can be loaded. + +To reduce boilerplate, the [`files`](../../reference/targets/files.mdx) target generates a `file` target per file in the `sources` field. + +For example, to load loose files in Python: + +```python tab={"label":"src/python/project/app_test.py"} +def test_open_file(): + with open("src/python/project/config.json") as f: + content = f.read().decode() + assert content == '{"k1": "v", "k2": "v"}' +``` + +```python tab={"label":"src/python/project/BUILD"} +python_test( + name="app_test", + source="app_test.py", + # Pants cannot infer this dependency, so we explicitly add it. + dependencies=[":config"], +) + +file( + name="config", + source="config.json", +) +``` + +```json tab={"label":"src/python/project/config.json"} +{ "k1": "v", "k2": "v" } +``` + +Note that we open the file with its full path, including the `src/python` prefix. + +:::caution `file` targets are not included with binaries like `pex_binary` +Pants will not include dependencies on `file` / `files` targets when creating binaries like `pex_binary` and `python_aws_lambda_function` via `pants package`. Filesystem APIs like Python's `open()` are relative to the current working directory, and they would try to read the files from where the binary is executed, rather than reading from the binary itself. +Instead, use `resource` / `resources` targets or an `archive` target. +::: + +## When to use each asset target type + +### When to use `resource` + +Use `resource` / `resources` for files that are associated with (and typically live alongside) the code that loads them. That code's target (e.g. `python_source`) should depend on the `resource` target, ensuring that code and data together are embedded directly in a binary package, such as a wheel, Pex file or AWS Lambda. + +### When to use `file` + +Use `file` / `files` for files that aren't tightly coupled to any specific code, but need to be deployed alongside a binary, such as images served by a web server. + +When writing tests, it is also often more convenient to open a file than to load a resource. + +| | `resource` | `file` | +| :-------------------- | :-------------------------------------------------------------------------------------------------------- | :---------------------------------------------------- | +| **Runtime path** | Relative to source root | Relative to repo root | +| **Loading mechanism** | Language's package loader, relative to package | Language's file loading idioms, relative to repo root | +| **Use with** | Targets that produce binaries, such as `pex_binary`, `python_distribution`, `python_aws_lambda_function`. | `archive` targets, tests | + +## `relocated_files` + +When you use a `file` target, Pants will preserve the path to the files, relative to your build root. For example, the file `src/assets/logo.png` in your repo would be under this same path in the runtime chroot. + +However, you may want to change the path to something else. For example, when creating an `archive` target and setting the `files` field, you might want those files to be placed at a different path in the archive; rather than `src/assets/logo.png`, for example, you might want the file to be at `imgs/logo.png`. + +You can use the `relocated_files` target to change the path used at runtime for the files. Your other targets can then add this target to their `dependencies` field, rather than using the original `files` target: + +```python title="src/assets/BUILD" +# Original file target. +file( + name="logo", + source="logo.png", +) + +# At runtime, the file will be `imgs/logo.png`. +relocated_files( + name="relocated_logo", + files_targets=[":logo"], + src="src/assets", + dest="imgs", +) +``` + +You can use an empty string in the `src` to add to an existing prefix and an empty string in the `dest` to strip an existing prefix. + +If you want multiple different re-mappings for the same original files, you can define multiple `relocated_files` targets. + +The `relocated_files` target only accepts `file` and `files` targets in its `files_targets` field. To relocate where other targets like `resource` and `python_source` show up at runtime, you need to change where that code is located in your repository. + +## `archive`: create a `zip` or `tar` file + +Running `pants package` on an `archive` target will create a zip or tar file with built packages and/or loose files included. This is often useful when you want to create a binary and bundle it with some loose config files. + +For example: + +```python title="project/BUILD" +archive( + name="app_with_config", + packages=[":app"], + files=[":production_config"], + format="tar.xz", +) +``` + +The format can be `zip`, `tar`, `tar.xz`, `tar.gz`, or `tar.bz2`. + +The `packages` field is a list of targets that can be built using `pants package`, such as `pex_binary`, `python_aws_lambda_function`, and even other `archive` targets. Pants will build the packages as if you had run `pants package`. It will include the results in your archive using the same name they would normally have, but without the `dist/` prefix. + +The `files` field is a list of `file`, `files`, and `relocated_files` targets. + +You can optionally set the field `output_path` to change the generated archive's name. diff --git a/versioned_docs/version-2.24/docs/using-pants/command-line-help.mdx b/versioned_docs/version-2.24/docs/using-pants/command-line-help.mdx new file mode 100644 index 000000000..3c5d15d1c --- /dev/null +++ b/versioned_docs/version-2.24/docs/using-pants/command-line-help.mdx @@ -0,0 +1,109 @@ +--- + title: Command line help + sidebar_position: 1 +--- + +How to dynamically get more information on Pants's internals. + +--- + +Run `pants help` to get basic help, including a list of commands you can run to get more specific help: + +```text title="Shell" +❯ pants help +Pants 2.14.0 + +Usage: + + pants [options] [goals] [inputs] Attempt the specified goals on the specified inputs. + pants help Display this usage message. + pants help goals List all installed goals. + pants help targets List all installed target types. + pants help subsystems List all configurable subsystems. + pants help tools List all external tools. + pants help api-types List all plugin API types. + pants help global Help for global options. + pants help-advanced global Help for global advanced options. + pants help [name] Help for a target type, goal, subsystem, plugin API type or rule. + pants help-advanced [goal/subsystem] Help for a goal or subsystem's advanced options. + pants help-all Print a JSON object containing all help info. + + [inputs] can be: + A file, e.g. path/to/file.ext + A path glob, e.g. '**/*.ext' (in quotes to prevent premature shell expansion) + A directory, e.g. path/to/dir + A directory ending in `::` to include all subdirectories, e.g. path/to/dir:: + A target address, e.g. path/to/dir:target_name. + Any of the above with a `-` prefix to ignore the value, e.g. -path/to/ignore_me:: + +Documentation at https://www.pantsbuild.org +Download at https://pypi.org/pypi/pantsbuild.pants/2.14.0 +``` + +For example, to get help on the `test` goal: + +```text title="Shell" +$ pants help test + +`test` goal options +------------------- + +Run tests. + +Config section: [test] + + --[no-]test-debug + PANTS_TEST_DEBUG + debug + default: False + current value: False + Run tests sequentially in an interactive process. This is necessary, for example, when you add + breakpoints to your code. + + --[no-]test-force + PANTS_TEST_FORCE + force + default: False + current value: False + Force the tests to run, even if they could be satisfied from cache. +... + +Related subsystems: coverage-py, download-pex-bin, pants-releases, pex, pex-binary-defaults, pytest, python-infer, python-native-code, python-repos, python-setup, setup-py-generation, setuptools, source, subprocess-environment +``` + +Note that when you run `pants help `, it outputs all related subsystems, such as `pytest`. You can then run `pants help pytest` to get more information. + +You can also run `pants help goals` and `pants help subsystems` to get a list of all activated options scopes. + +To get help on the `python_tests` target: + +```text title="Shell" +❯ pants help python_test + +`python_test` target +-------------------- + +A single Python test file, written in either Pytest style or unittest style. + +All test util code, including `conftest.py`, should go into a dedicated `python_source` target and then be included in the +`dependencies` field. (You can use the `python_test_utils` target to generate these `python_source` targets.) + +See https://www.pantsbuild.org/v2.8/docs/python-test-goal + +Valid fields: + +timeout + type: int | None + default: None + A timeout (in seconds) used by each test file belonging to this target. + + This only applies if the option `--pytest-timeouts` is set to True. + +... +``` + +## Advanced Help + +Many options are classified as _advanced_, meaning they are primarily intended to be used by admins, not by regular users. + +Use `help-advanced`, e.g. `pants help-advanced global` or `pants help-advanced pytest`. diff --git a/versioned_docs/version-2.24/docs/using-pants/environments.mdx b/versioned_docs/version-2.24/docs/using-pants/environments.mdx new file mode 100644 index 000000000..67a64a305 --- /dev/null +++ b/versioned_docs/version-2.24/docs/using-pants/environments.mdx @@ -0,0 +1,269 @@ +--- + title: "Environments: Cross-Platform or Remote Builds" + sidebar_position: 9 +--- + +--- + +## Environments + +:::caution Environments are currently in `preview`, and have not yet stabilized. +We'd love your feedback on how Environments could be most useful to you! Please refer to [the tracking issue](https://github.com/pantsbuild/pants/issues/17355) for known stabilization blockers. +::: + +By default, Pants will execute all sandboxed build work directly on localhost. But defining and using additional "environments" for particular targets allows Pants to transparently execute some or all of your build either: + +1. locally in Docker containers +2. remotely via [remote execution](./remote-caching-and-execution/remote-execution.mdx) +3. locally, but with a non-default set of environment variables and settings (such as when different platforms need different values, or when cross-building) +4. locally, but with execution performed in the workspace / repository and not an execution sandbox (with the trade-off that you must be cognizant of build reproducibility) + +### Defining environments + +Environments are defined using environment targets: + +- [`local_environment`](../../reference/targets/local_environment.mdx) - Runs without containerization on localhost (which is also the default if no environment targets are defined). +- [`docker_environment`](../../reference/targets/docker_environment.mdx) - Runs in a cached container using the specified Docker image using a local installation of Docker. If the image does not already exist locally, it will be pulled. +- [`remote_environment`](../../reference/targets/remote_environment.mdx) - Runs in a remote worker via [remote execution](./remote-caching-and-execution/remote-execution.mdx) (possibly with containerization, depending on the server implementation). + +Give your environment targets short, descriptive names using the [`[environments-previews.names]` option](../../reference/subsystems/environments-preview.mdx#names) (usually defined in `pants.toml`), which consuming targets use to refer to them in `BUILD` files. That might look like a `pants.toml` section and `BUILD` file (at the root of the repository in this case) containing: + +```toml tab={"label":"pants.toml"} +[environments-preview.names] +linux = "//:local_linux" +linux_docker = "//:local_busybox" +``` + +```python tab={"label":"BUILD"} +local_environment( + name="local_linux", + compatible_platforms=["linux_x86_64"], + fallback_environment="local_busybox", + .. +) + +docker_environment( + name="local_busybox", + platform="linux_x86_64", + image="busybox:latest@sha256:abcd123...", + .. +) +``` + +:::caution Environment targets and macros +Environment targets are loaded before regular targets in a bootstrap phase, during which macros are unavailable. As such any required field values must be fully defined in the BUILD file without referencing any macros. For optional fields, the use of macros are still discouraged as it may or may not work and Pants makes no guarantees that it will not break in a future version if it were to currently work. +::: + +#### Environment-aware options + +Environment targets have fields ([target](./key-concepts/targets-and-build-files.mdx) arguments) which correspond to [options](./key-concepts/options.mdx) which are marked "environment-aware". When an option is environment-aware, the value of the option that will be used in an environment can be overridden by setting the corresponding field value on the associated environment target. If an environment target does not set a value, it defaults to the value which is set globally via options values. + +For example, the [`[python-bootstrap].search_path` option](../../reference/subsystems/python-bootstrap.mdx#search_path) is environment-aware, which is indicated in its help. It can be overridden for a particular environment by a corresponding environment target field, such as [the one on `local_environment`](../../reference/targets/local_environment.mdx#python_bootstrap_search_path). + +:::tip See an option which should be environment-aware, but isn't? +Environments are a new concept: if you see an option value which should be marked environment-aware but isn't, please definitely [file an issue](https://github.com/pantsbuild/pants/issues/new/choose)! +::: + +### Consuming environments + +To declare which environment they should build with, many target types (but particularly "root" targets like tests or binaries) have an `environment=` field: for example, [`python_tests(environment=..)`](../../reference/targets/python_tests.mdx#environment). + +The `environment=` field may either: + +1. refer to an environment by name +2. use one of the following special environment names to select a matching environment: (see "Environment matching" below) + 1. `__local__` resolves to any matching `local_environment` + 2. `__local_workspace__` resolves to any matching `experimental_workspace_environment` + +:::caution Environment compatibility +Currently, there is no static validation that a target's environment is compatible with its dependencies' environments -- only the implicit validation of the goals that you run successfully against those targets (`check`, `lint`, `test`, `package`, etc). + +As we gain more experience with how environments are used in the wild, it's possible that more static validation can be added: your feedback would be very welcome! +::: + +#### Setting the environment on many targets at once + +To use an environment everywhere in your repository (or only within a particular subdirectory, or with a particular target +type), you can use the [`__defaults__` builtin](./key-concepts/targets-and-build-files.mdx#field-default-values). For example, to use an environment named `my_default_environment` globally by default, you would add the following to a `BUILD` file at the root of the repository: + +```python title="BUILD" +__defaults__(all=dict(environment="my_default_environment")) +``` + +... and individual targets could override the default as needed. + +#### Building one target in multiple environments + +If a target will always need to be built in multiple environments (rather than conditionally based on which user is building it: see the "Toggle use of an environment for some consumers" section), then you can use the [`parametrize` builtin](./key-concepts/targets-and-build-files.mdx#parametrizing-targets) for the `environment=` field. If you had two environments named `linux` and `macos`, that would look like: + +```python title="BUILD" +pex_binary( + name="bin", + environment=parametrize("linux", "macos"), +) +``` + +#### Environment matching + +A single environment name may end up referring to different environment targets on different physical machines, or with different global settings applied: this is known as environment "matching". + +- `local_environment` targets will match if their `compatible_platforms=` field matches localhost's platform. +- `docker_environment` targets will match [if Docker is enabled](../../reference/global-options.mdx#docker_execution), and if their `platform=` field is compatible with localhost's platform. +- `remote_environment` targets will match [if Remote execution is enabled](../../reference/global-options.mdx#remote_execution). +- `experimental_workspace_environment` targets will match if their `compatible_platforms=` field matches localhost's platform. + +If a particular environment target _doesn't_ match (other than for `workspace_enviroment` targets), it can configure a `fallback_environment=` which will be attempted next. This allows for forming preference chains which are referred to by whichever environment name is at the head of the chain. This does not apply to `workspace_enviroment` targets because in-workspace execution differs significantly from the execution in the other environments due to the lack of an execution sandbox. + +For example: a chain like "prefer remote execution if enabled, but fall back to local execution if the platform matches, otherwise use docker" might be configured via the targets: + +```python title="BUILD" +remote_environment( + name="remote", + fallback_environment="local", + .. +) + +local_environment( + name="local", + compatible_platforms=["linux_x86_64"], + fallback_environment="docker", +) + +docker_environment( + name="docker", + .. +) +``` + +In future versions, environment targets will gain additional predicates to control whether they match (for example: `local_environment` will likely gain a predicate that looks for the [presence or value of an environment variable](https://github.com/pantsbuild/pants/issues/17107). But in the meantime, it's possible to override which environments are matched for particular use cases by overriding their configured names: see the "Toggle use of an environment" workflow below for an example. + +### Example workflows + +#### Enabling remote execution globally + +`remote_environment` targets match unless the [`--remote-execution`](../../reference/global-options.mdx#remote_execution) option is disabled. So to cause a particular environment name to use remote execution whenever it is enabled, you could define environment targets which try remote execution first, and then fall back to local execution: + +```python title="BUILD" +remote_environment( + name="remote_busybox", + platform="linux_x86_64", + extra_platform_properties={"container-image=busybox:latest"}, + fallback_environment="local", +) + +local_environment( + name="local", + compatible_platforms=[...], +) +``` + +You'd then give your `remote_environment` target an unassuming name like "default": + +```toml title="pants.toml" +[environments-preview.names] +default = "//:remote_busybox" +local = "//:local" +``` + +... and use that environment by default with all targets. Users or consumers like CI could then toggle whether remote execution is used by setting `--remote-execution`. + +:::caution Speculation of remote execution +The `2.15.x` series of Pants does not yet support ["speculating" remote execution](https://github.com/pantsbuild/pants/issues/8353) by racing it against another environment (usually local or docker). While we expect that this will be necessary to make remote execution a viable option for local execution on user's laptops (where network connections are less reliable), it is less critical for CI use-cases. +::: + +#### Use a `docker_environment` to build the inputs to a `docker_image` + +To build a `docker_image` target containing a `pex_binary` which uses native (i.e. compiled) dependencies on a `macOS` machine, you can configure the `pex_binary` to be built in a `docker_environment`. + +You'll need a `docker_environment` which uses an image containing the relevant build-time requirements of your PEX. At a minimum, you'll need Python itself: + +```python title="BUILD" +docker_environment( + name="python_bullseye", + platform="linux_x86_64", + image="python:3.9.14-slim-bullseye@sha256:abcd123...", + .. +) +``` + +Next, mark your `pex_binary` target with this environment (with the name `python_bullseye`: see "Defining environments" above), and define a `docker_image` target depending on it. + +```python title="BUILD" +pex_binary( + name="main", + environment="python_bullseye", +) + +docker_image( + name="docker_image", + instructions=[ + "FROM python:3.9.14-slim-bullseye@sha256:abcd123...", + "ENTRYPOINT ["/main"]", + "COPY examples/main.pex /main", + ], +) +``` + +:::tip Compatibility of `docker_environment` and `docker_image` +Note that the Docker image used in your `docker_environment` does not need to match the base image of the `docker_image` targets that consume them: they only need to be compatible. This is because execution of build steps in a `docker_environment` occurs in an anonymous container, and only the required inputs are provided to the `docker_image` build. + +This means that your `docker_environment` can include things like compilers or other tools relevant to your build, without needing to manually use multi-stage Docker builds. +::: + +#### Toggle use of an environment for some consumers + +As mentioned above in "Environment matching", environment targets "match" based on their field values and global options. But if two environment targets would be ambiguous in some cases, or if you'd otherwise like to control what a particular environment name means (in CI, for example), you can override an environment name via options. + +For example: if you'd like to use a particular `macOS` environment target locally, but override it for a particular use case in CI, you'd start by defining two `local_environment` targets which would usually match ambiguously: + +```python title="BUILD" +local_environment( + name="macos_laptop", + compatible_platforms=["macos_x86_64"], +) + +local_environment( + name="macos_ci", + compatible_platforms=["macos_x86_64"], +) +``` + +... and then assign one of them a (generic) environment name in `pants.toml`: + +```toml title="pants.toml" +[environments-preview.names] +macos = "//:macos_laptop" +... +``` + +You could then _override_ that name definition in `pants.ci.toml` (note the use of the `.add` suffix, in order to preserve any other named environments): + +```toml title="pants.ci.toml" +[environments-preview.names.add] +macos = "//:macos_ci" +``` + +### In-Workspace Execution (`experimental_workspace_environment`) + +The `workspace_enviroment` target type configures a special "workspace" environment in which build actions are invoked in the repository / workspace instead of an execution sandbox as would be done with `local_environment` executions. + +The primary motivation for this feature is to better support integration with third-party build orchestration tools (for example, Bazel) which may not operate properly when not invoked in the repository (including in some cases incurring signifcant performance penalties). + +:::caution Caching and reproducibility + +Pants' caching relies on all process being reproducible based solely on inputs in the repository. +Processes executed in a workspace environment can easily accidentally read unexpected files, that aren't specified as a dependency. +Thus, Pants puts that burden on you, the Pants user, to ensure a process output only depends on its specified input files, and doesn't read anything else. + +If a process isn't reproducible, re-running a build from the same source code could fail unexpectedly, or give different output to an earlier build. + +You should use the `workspace_invalidation_sources` field available on the `adhoc_tool` and `shell_command` target types to inform Pants of what files should cause re-execution of the target's process if they change. + +::: + +The special environment name `__local_workspace__` can be used to select a matching `experimental_workspace_environment` based on its `compatible_platforms` attribute. + +There is no `fallback_environment=` atribute on `workspace_enviroment` targets because in-workspace execution differs significantly from +the other environemnts due to the lack of an execution sandbox. + +Also, workspace environments change how the `output_files` and `output_directories` fields are interpreted for the `adhoc_tool` and `shell_command` target types. For most invoked processes, Pants will interpret `output_files` and `output_directories` as relative paths relative to the configured working directory from the `workdir` field. This is fine for most executions because they run in a sandbox environment and the base for capturing outputs is exactly the same as the working directory for the invoked process. For in-workspace execution executions, however, this interpretation is not correct because the base for capturing outputs for in-workspace executions is _not_ the same as the working directory for the invoked process. Specifically, in-workspace executions capture from the root of the temporary sandbox directory used during execution and not from the working directory in the workspace. diff --git a/versioned_docs/version-2.24/docs/using-pants/generating-version-tags-from-git.mdx b/versioned_docs/version-2.24/docs/using-pants/generating-version-tags-from-git.mdx new file mode 100644 index 000000000..143f29ee8 --- /dev/null +++ b/versioned_docs/version-2.24/docs/using-pants/generating-version-tags-from-git.mdx @@ -0,0 +1,77 @@ +--- + title: Generating version tags from Git + sidebar_position: 10 +--- + +--- + +Pants can generate a version string based on Git state that you can use during builds. + +## Enabling the Python backend + +The implementation of this feature relies on a third-party [Python library](https://github.com/pypa/setuptools_scm), +so to use it you will need to activate the Python backend and its "experimental" extension, even if +you're not otherwise using Python in your repo: + +```toml title="pants.toml" +backend_packages.add = [ + ... + "pants.backend.python", + "pants.backend.experimental.python", + ... +] +``` + +## The `vcs_version` target + +To utilize this functionality, you first create a `vcs_version` target in some BUILD file, e.g.,: + +```python title="src/foo/BUILD" +vcs_version( + name="version", + generate_to="src/foo/version.py", + template='version = "{version}"', +) +``` + +When you test, run or package any code that depends (directly or indirectly) on this target, +Pants will compute a version string based on the current Git state, and then generate content +using the specified `template` (with `{version}` serving as a placeholder for the version string) +at the path specified by `generate_to`. + +This content will be available at test and run time, and be packaged along with your +code and its dependencies. Your code can use the generated content (e.g., by importing +it or reading it) and thus have access to a dynamically-generated version. + +Note that, similarly to how Pants handles other code generation tools (such as [Protobuf](../python/integrations/protobuf-and-grpc.mdx)), +this content is not written out as a file into your source tree. Instead, the content is materialized +on the fly, as needed. If you want to inspect the generated contents manually, you can use the +`export-codegen` goal: + +```shell +pants export-codegen src/foo:version +``` + +## Using the generated version + +Code that depends on the `vcs_version` target can import and use the generated file: + +```python title="src/util.py" +from version import version +... +``` + +In this example we generated a Python source file, but you can generate code in any language, +or even generate a text file for some code to load at runtime. + +In fact, in the Python case you don't even need an explicit dependency on the `vcs_version` target, as +one will be inferred from the import. This does not work (yet) for other languages, so those will +require an explicit dependency. + +## The generated version string + +Pants delegates the version string computation to [setuptools_scm](https://github.com/pypa/setuptools_scm). +See [here](https://setuptools-scm.readthedocs.io/en/latest/usage/#default-versioning-scheme) for how it computes the version string from the Git state. + +We don't yet support any of the configuration options that control how the string is computed. Please +[let us know](/community/getting-help) if you need such advanced functionality. diff --git a/versioned_docs/version-2.24/docs/using-pants/key-concepts/_category_.json b/versioned_docs/version-2.24/docs/using-pants/key-concepts/_category_.json new file mode 100644 index 000000000..c72aa4ca5 --- /dev/null +++ b/versioned_docs/version-2.24/docs/using-pants/key-concepts/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Key concepts", + "position": 1 +} diff --git a/versioned_docs/version-2.24/docs/using-pants/key-concepts/backends.mdx b/versioned_docs/version-2.24/docs/using-pants/key-concepts/backends.mdx new file mode 100644 index 000000000..66192148c --- /dev/null +++ b/versioned_docs/version-2.24/docs/using-pants/key-concepts/backends.mdx @@ -0,0 +1,122 @@ +--- + title: Backends + sidebar_position: 3 +--- + +How to enable specific functionality. + +--- + +Most Pants functionality is opt-in by adding the relevant _backend_ to the `[GLOBAL].backend_packages` option in `pants.toml`. For example: + +```toml title="pants.toml" +[GLOBAL] +backend_packages = [ + "pants.backend.shell", + "pants.backend.python", + "pants.backend.python.lint.black", +] +``` + +## Available stable backends + +This list is also available via `pants backends --help`, which includes any additional plugins in your repository that aren't built-in to Pants itself. + +| Backend | What it does | Docs | +| :--------------------------------------------------- | :---------------------------------------------------------------------------------------------------------------------------------------- | :------------------------------------------------------------------------------- | +| `pants.backend.build_files.fmt.black` | Enables autoformatting `BUILD` files using `black`. | | +| `pants.backend.build_files.fmt.buildifier` | Enables autoformatting `BUILD` files using `buildifier`. | | +| `pants.backend.build_files.fmt.ruff` | Enables autoformatting `BUILD` files using `ruff`. | | +| `pants.backend.build_files.fmt.yapf` | Enables autoformatting `BUILD` files using `yapf`. | | +| `pants.backend.awslambda.python` | Enables generating an AWS Lambda zip file from Python code. | [AWS Lambda](../../python/integrations/aws-lambda.mdx) | +| `pants.backend.codegen.protobuf.lint.buf` | Activate the Buf formatter and linter for Protocol Buffers. | [Protobuf](../../python/integrations/protobuf-and-grpc.mdx) | +| `pants.backend.codegen.protobuf.python` | Enables generating Python from Protocol Buffers. Includes gRPC support. | [Protobuf and gRPC](../../python/integrations/protobuf-and-grpc.mdx) | +| `pants.backend.codegen.thrift.apache.python` | Enables generating Python from Apache Thrift. | [Thrift](../../python/integrations/thrift.mdx) | +| `pants.backend.docker` | Enables building, running, and publishing Docker images. | [Docker overview](../../docker/index.mdx) | +| `pants.backend.docker.lint.hadolint` | Enables Hadolint, a Docker linter: [https://github.com/hadolint/hadolint](https://github.com/hadolint/hadolint) | [Docker overview](../../docker/index.mdx) | +| `pants.backend.google_cloud_function.python` | Enables generating a Google Cloud Function from Python code. | [Google Cloud Function](../../python/integrations/google-cloud-functions.mdx) | +| `pants.backend.plugin_development` | Enables `pants_requirements` target. | [Plugins overview](../../writing-plugins/overview.mdx) | +| `pants.backend.python` | Core Python support. | [Enabling Python support](../../python/overview/enabling-python-support.mdx) | +| `pants.backend.python.mixed_interpreter_constraints` | Adds the `py-constraints` goal for insights on Python interpreter constraints. | [Interpreter compatibility](../../python/overview/interpreter-compatibility.mdx) | +| `pants.backend.python.lint.autoflake` | Enables Autoflake, which removes unused Python imports: [https://pypi.org/project/autoflake/](https://pypi.org/project/autoflake/) | [Linters and formatters](../../python/overview/linters-and-formatters.mdx) | +| `pants.backend.python.lint.bandit` | Enables Bandit, the Python security linter: [https://bandit.readthedocs.io/en/latest/](https://bandit.readthedocs.io/en/latest/). | [Linters and formatters](../../python/overview/linters-and-formatters.mdx) | +| `pants.backend.python.lint.black` | Enables Black, the Python autoformatter: [https://black.readthedocs.io/en/stable/](https://black.readthedocs.io/en/stable/). | [Linters and formatters](../../python/overview/linters-and-formatters.mdx) | +| `pants.backend.python.lint.docformatter` | Enables Docformatter, the Python docstring autoformatter: [https://github.com/myint/docformatter](https://github.com/myint/docformatter). | [Linters and formatters](../../python/overview/linters-and-formatters.mdx) | +| `pants.backend.python.lint.flake8` | Enables Flake8, the Python linter: [https://flake8.pycqa.org/en/latest/](https://flake8.pycqa.org/en/latest/). | [Linters and formatters](../../python/overview/linters-and-formatters.mdx) | +| `pants.backend.python.lint.isort` | Enables isort, the Python import autoformatter: [https://timothycrosley.github.io/isort/](https://timothycrosley.github.io/isort/). | [Linters and formatters](../../python/overview/linters-and-formatters.mdx) | +| `pants.backend.python.lint.pydocstyle` | Enables pydocstyle, a Python docstring convention checker: [https://www.pydocstyle.org/](https://www.pydocstyle.org/) | [Linters and formatters](../../python/overview/linters-and-formatters.mdx) | +| `pants.backend.python.lint.pylint` | Enables Pylint, the Python linter: [https://www.pylint.org](https://www.pylint.org) | [Linters and formatters](../../python/overview/linters-and-formatters.mdx) | +| `pants.backend.python.lint.pyupgrade` | Enables Pyupgrade, which upgrades to new Python syntax: [https://pypi.org/project/pyupgrade/](https://pypi.org/project/pyupgrade/) | [Linters and formatters](../../python/overview/linters-and-formatters.mdx) | +| `pants.backend.python.lint.yapf` | Enables Yapf, the Python formatter: [https://pypi.org/project/yapf/](https://pypi.org/project/yapf/) | [Linters and formatters](../../python/overview/linters-and-formatters.mdx) | +| `pants.backend.python.typecheck.mypy` | Enables MyPy, the Python type checker: [https://mypy.readthedocs.io/en/stable/](https://mypy.readthedocs.io/en/stable/). | [typecheck](../../python/goals/check.mdx) | +| `pants.backend.shell` | Core Shell support, including shUnit2 test runner. | [Shell overview](../../shell/index.mdx) | +| `pants.backend.shell.lint.shfmt` | Enables shfmt, a Shell autoformatter: [https://github.com/mvdan/sh](https://github.com/mvdan/sh). | [Shell overview](../../shell/index.mdx) | +| `pants.backend.shell.lint.shellcheck` | Enables Shellcheck, a Shell linter: [https://www.shellcheck.net/](https://www.shellcheck.net/). | [Shell overview](../../shell/index.mdx) | +| `pants.backend.tools.preamble` | Enables "preamble", a Pants fixer for copyright headers and shebang lines | [`preamble`](../../../reference/subsystems/preamble.mdx) | +| `pants.backend.tools.taplo` | Enables Taplo, a TOML autoformatter: [https://taplo.tamasfe.dev](https://taplo.tamasfe.dev) | | +| `pants.backend.url_handlers.s3` | Enables accessing s3 via credentials in `file(source=http_source(...))` | | + +## Available experimental backends + +Pants offers [additional backends as previews](https://blog.pantsbuild.org/quick-feedback-on-new-features-via-experimental-backends/) that are still in development. These backends may still undergo major changes to improve the interface or fix bugs, with fewer (or no) deprecation warnings. If any of these backends are relevant to you, please try them, [ask any questions](/community/getting-help) you have, and [contribute improvements](../../contributions/index.mdx)! Volunteers like you jumping in to help is how these backends are promoted from preview to fully stable. + +The list of all backends (both stable and experimental) is also available via `pants backends --help-advanced`, which includes any additional plugins in your repository that aren't built-in to Pants itself. + +| Backend | What it does | Docs | +| :----------------------------------------------------------------- | :---------------------------------------------------------------------------------------------------------------------------------------------------- | :---------------------------------------------------------------------------------------------------- | +| `pants.backend.experimental.adhoc` | Enables support for executing arbitrary runnable targets. | [Integrating new tools without plugins](../../ad-hoc-tools/integrating-new-tools-without-plugins.mdx) | +| `pants.backend.experimental.bsp` | Enables core Build Server Protocol ("BSP") support. | | +| `pants.backend.experimental.cc` | Enables core C and C++ support. | | +| `pants.backend.experimental.cc.lint.clangformat` | Enables clang-format, a C and C++ autoformatter: [https://clang.llvm.org/docs/ClangFormat.html](https://clang.llvm.org/docs/ClangFormat.html) | | +| `pants.backend.experimental.codegen.avro.java` | Enables generating Java from Avro | | +| `pants.backend.experimental.codegen.protobuf.go` | Enables generating Go from Protocol Buffers. | | +| `pants.backend.experimental.codegen.protobuf.java` | Enables generating Java from Protocol Buffers. | | +| `pants.backend.experimental.codegen.protobuf.scala` | Enables generating Scala from Protocol Buffers. | | +| `pants.backend.experimental.codegen.thrift.apache.java` | Enables generating Java from Thrift using the Apache Thrift generator. | | +| `pants.backend.experimental.codegen.thrift.scrooge.java` | Enables generating Java from Thrift using the Scrooge Thrift IDL compiler. | | +| `pants.backend.experimental.codegen.thrift.scrooge.scala` | Enables generating Scala from Thrift using the Scrooge Thrift IDL compiler. | | +| `pants.backend.experimental.cue` | Enables core Cue support: [https://cuelang.org/](https://cuelang.org/) | | +| `pants.backend.experimental.debian` | Enables support for packaging `.deb` files via `dpkg-deb` | | +| `pants.backend.experimental.go` | Enables core Go support. | [Go overview](../../go/index.mdx) | +| `pants.backend.experimental.go.debug_goals` | Enables additional goals for introspecting Go targets | [Go overview](../../go/index.mdx) | +| `pants.backend.experimental.go.lint.golangci_lint` | Enable golangci-lint, a Go linter: [https://golangci-lint.run](https://golangci-lint.run) | [Go overview](../../go/index.mdx) | +| `pants.backend.experimental.go.lint.vet` | Enables support for running `go vet` | [Go overview](../../go/index.mdx) | +| `pants.backend.experimental.helm` | Enables core Helm support: [https://helm.sh](https://helm.sh) | [Helm overview](../../helm/index.mdx) | +| `pants.backend.experimental.helm.check.kubeconfirm` | Enables Kubeconform, a fast Kubernetes manifest validator: [https://github.com/yannh/kubeconform](https://github.com/yannh/kubeconform) | [Helm overview](../../helm/index.mdx) | +| `pants.backend.experimental.java` | Enables core Java support. | [Java & Scala overview](../../jvm/java-and-scala.mdx) | +| `pants.backend.experimental.java.bsp` | Enable Java-specific support for Build Server Protocol | [Java & Scala overview](../../jvm/java-and-scala.mdx) | +| `pants.backend.experimental.java.debug_goals` | Enable additional goals for introspecting Java targets | [Java & Scala overview](../../jvm/java-and-scala.mdx) | +| `pants.backend.experimental.java.lint.google_java_format` | Enables Google Java Format. | [Java & Scala overview](../../jvm/java-and-scala.mdx) | +| `pants.backend.experimental.javascript` | Enables core JavaScript support. | | +| `pants.backend.experimental.javascript.lint.prettier` | Enables Prettier, a JavaScript (and more) autoformatter: [https://prettier.io](https://prettier.io) | | +| `pants.backend.experimental.kotlin` | Enables core Kotlin support | [Kotlin](../../jvm/kotlin.mdx) | +| `pants.backend.experimental.kotlin.debug_goals` | Enables additional goals for introspecting Kotlin targets | [Kotlin](../../jvm/kotlin.mdx) | +| `pants.backend.experimental.kotlin.lint.ktlint` | Enables ktlint, an anti-bikeshedding linter with built-in formatter: [https://pinterest.github.io/ktlint/](https://pinterest.github.io/ktlint/) | [Kotlin](../../jvm/kotlin.mdx) | +| `pants.backend.experimental.nfpm` | Enables support for building `apk`, `archlinux`, `deb`, and `rpm` packages via [`nFPM`](https://nfpm.goreleaser.com/) | [`nfpm`](../../../reference/subsystems/nfpm.mdx) | +| `pants.backend.experimental.openapi` | Enables core OpenAPI support: [https://swagger.io/specification/](https://swagger.io/specification/) | [`openapi`](../../../reference/subsystems/openapi.mdx) | +| `pants.backend.experimental.openapi.codegen.java` | Enables generating Java from OpenAPI | | +| `pants.backend.experimental.openapi.lint.openapi_format` | Enables openapi-format: [https://github.com/thim81/openapi-format](https://github.com/thim81/openapi-format) | | +| `pants.backend.experimental.openapi.lint.spectral` | Enables spectral: [https://github.com/stoplightio/spectral](https://github.com/stoplightio/spectral) | [`spectral`](../../../reference/subsystems/spectral.mdx) | +| `pants.backend.experimental.python` | Enables experimental rules for Python | | +| `pants.backend.experimental.python.framework.django` | Enables better support for projects using Django: [https://www.djangoproject.com](https://www.djangoproject.com) | | +| `pants.backend.experimental.python.framework.stevedore` | Enables better support for projects using stevedore: [https://docs.openstack.org/stevedore/](https://docs.openstack.org/stevedore/) | | +| `pants.backend.experimental.python.lint.add_trailing_comma` | Enables add-trailing-comma, a Python code formatter: [https://github.com/asottile/add-trailing-comma](https://github.com/asottile/add-trailing-comma) | [`add-trailing-comma`](../../../reference/subsystems/add-trailing-comma.mdx) | +| `pants.backend.experimental.python.lint.ruff.check` | Enables Ruff (for `lint`), an extremely fast Python linter: [https://docs.astral.sh/ruff/linter/](https://docs.astral.sh/ruff/linter/) | [Linters and formatters](../../python/overview/linters-and-formatters.mdx) | +| `pants.backend.experimental.python.lint.ruff.format` | Enables Ruff (for `fmt`), an extremely fast Python code formatter: [https://docs.astral.sh/ruff/formatter/](https://docs.astral.sh/ruff/formatter/) | [Linters and formatters](../../python/overview/linters-and-formatters.mdx) | +| `pants.backend.experimental.python.packaging.pyoxidizer` | Enables `pyoxidizer_binary` target. | [PyOxidizer](../../python/integrations/pyoxidizer.mdx) | +| `pants.backend.experimental.python.typecheck.pyright` | Enables Pyright, a Python type checker: [https://github.com/microsoft/pyright](https://github.com/microsoft/pyright) | | +| `pants.backend.experimental.python.typecheck.pytype` | Enables Pytype, a Python type checker: [https://google.github.io/pytype/](https://google.github.io/pytype/) | | +| `pants.backend.experimental.rust` | Enables core Rust support. | | +| `pants.backend.experimental.scala` | Enables core Scala support. | [Java & Scala overview](../../jvm/java-and-scala.mdx) | +| `pants.backend.experimental.scala.bsp` | Enables Scala-specific support for Build Server Protocol | [Java & Scala overview](../../jvm/java-and-scala.mdx) | +| `pants.backend.experimental.scala.debug_goals` | Enables additional goals for introspecting Scala targets | [Java & Scala overview](../../jvm/java-and-scala.mdx) | +| `pants.backend.experimental.scala.lint.scalafmt` | Enables the Scalafmt formatter. | [Java & Scala overview](../../jvm/java-and-scala.mdx) | +| `pants.backend.experimental.swift` | Enables core Swift support. | | +| `pants.backend.experimental.terraform` | Enables core Terraform support. | | +| `pants.backend.experimental.terraform.lint.tfsec` | Enables tfsec, for static analysis of Terraform: [https://aquasecurity.github.io/tfsec/](https://aquasecurity.github.io/tfsec/) | | +| `pants.backend.experimental.tools.semgrep` | Enables semgrep, a fast multi-language static analysis engine: [https://semgrep.dev](https://semgrep.dev) | [`semgrep`](../../../reference/subsystems/semgrep.mdx) | +| `pants.backend.experimental.tools.workunit_logger` | Enables the workunit logger for debugging pants itself | [`workunit-logger`](../../../reference/subsystems/workunit-logger.mdx) | +| `pants.backend.experimental.tools.yamllint` | Enables yamllint, a linter for YAML files: [https://yamllint.readthedocs.io/](https://yamllint.readthedocs.io/) | [`yamllint`](../../../reference/subsystems/yamllint.mdx) | +| `pants.backend.experimental.visibility` | Enables `__dependencies_rules__` and `__dependents_rules__` | [Visibility](../validating-dependencies) | +| `pants.backend.python.providers.experimental.pyenv` | Enables Pants to manage appropriate Python interpreters via pyenv | | +| `pants.backend.python.providers.experimental.pyenv.custom_install` | Enables customising how the pyenv provider builds a Python interpreter | | diff --git a/versioned_docs/version-2.24/docs/using-pants/key-concepts/goals.mdx b/versioned_docs/version-2.24/docs/using-pants/key-concepts/goals.mdx new file mode 100644 index 000000000..bcb81c90e --- /dev/null +++ b/versioned_docs/version-2.24/docs/using-pants/key-concepts/goals.mdx @@ -0,0 +1,121 @@ +--- + title: Goals + sidebar_position: 0 +--- + +The commands Pants runs. + +--- + +Pants commands are known as _goals_, such as `test` and `lint`. + +To see the current list of goals, run: + +```bash +❯ pants help goals +``` + +You'll see more goals activated as you activate more [backends](./backends.mdx). + +## Running goals + +For example: + +``` +❯ pants count-loc project/app_test.py +─────────────────────────────────────────────────────────────────────────────── +Language Files Lines Blanks Comments Code Complexity +─────────────────────────────────────────────────────────────────────────────── +Python 1 374 16 19 339 6 +─────────────────────────────────────────────────────────────────────────────── +Total 1 374 16 19 339 6 +─────────────────────────────────────────────────────────────────────────────── +``` + +You can also run multiple goals in a single run of Pants, in which case they will run sequentially: + +```bash +# Format all code, and then test it: +❯ pants fmt test :: +``` + +Finally, Pants supports running goals in a `--loop`. In this mode, all goals specified will run sequentially, and then Pants will wait until a relevant file has changed to try running them again. + +```bash +# Re-run linters and testing continuously as files or their dependencies change: +❯ pants --loop lint test project/app_test.py +``` + +Use `Ctrl+C` to exit the `--loop`. + +## Goal arguments + +Most goals require arguments to know what to work on. + +You can use several argument types: + +| Argument type | Semantics | Example | +| ------------------------------------------------- | ------------------------------------------- | ----------------------------- | +| File path | Match the file | `pants test project/tests.py` | +| Directory path | Match everything in the directory | `pants test project/utils` | +| `::` globs | Match everything in the directory and below | `pants test project::` | +| [Target addresses](./targets-and-build-files.mdx) | Match the target | `pants package project:tests` | + +You can combine argument types, e.g. `pants fmt src/go:: src/py/app.py`. + +You can address targets from the root of the repository by using plain `::` and `:`. For example, `pants package ::` would produce artifacts for all packages declared in the whole repository and `pants package :` would produce artifacts only for those packages that are declared in the root directory of the repository. + +To ignore something, prefix the argument with `-`. For example, `pants test :: -project/integration_tests` will run all your tests except for those in the directory `project/integration_tests` and `pants package project:: -project:` will package all targets in the subdirectories of the `project` directory, recursively, except for those declared directly under the `project` directory. + +:::note Tip: advanced target selection, such as running over changed files +See [Advanced target selection](../advanced-target-selection.mdx) for alternative techniques to specify which files/targets to run on. +::: + +### Goal options + +Many goals also have [options](./options.mdx) to change how they behave. Every option in Pants can be set via an environment variable, config file, and the command line. + +To see if a goal has any options, run `pants help $goal` or `pants help-advanced $goal`. See [Command Line Help](/community/getting-help) for more information. + +For example: + +``` +❯ pants help test +17:20:14.24 [INFO] Remote cache/execution options updated: reinitializing scheduler... +17:20:15.36 [INFO] Scheduler initialized. + +`test` goal options +------------------- + +Run tests. + +Config section: [test] + + --[no-]test-debug + PANTS_TEST_DEBUG + debug + default: False + current value: False + Run tests sequentially in an interactive process. This is necessary, for example, when you + add breakpoints to your code. + +... +``` + +You can then use the option by prefixing it with the goal name: + +```bash +pants --test-debug test project/app_test.py +``` + +You can also put the option after the file/target arguments: + +```bash +pants test project/app_test.py --test-debug +``` + +As a shorthand, if you put the option after the goal and before the file/target arguments, you can leave off the goal name in the flag: + +```bash +pants test --debug project/app_test.py +``` diff --git a/versioned_docs/version-2.24/docs/using-pants/key-concepts/index.mdx b/versioned_docs/version-2.24/docs/using-pants/key-concepts/index.mdx new file mode 100644 index 000000000..a5cecf353 --- /dev/null +++ b/versioned_docs/version-2.24/docs/using-pants/key-concepts/index.mdx @@ -0,0 +1,12 @@ +--- + title: Key concepts + sidebar_position: 0 +--- + +--- + +- [Goals](./goals.mdx) +- [Targets and BUILD files](./targets-and-build-files.mdx) +- [Options](./options.mdx) +- [Backends](./backends.mdx) +- [Source roots](./source-roots.mdx) diff --git a/versioned_docs/version-2.24/docs/using-pants/key-concepts/options.mdx b/versioned_docs/version-2.24/docs/using-pants/key-concepts/options.mdx new file mode 100644 index 000000000..7abc74ff8 --- /dev/null +++ b/versioned_docs/version-2.24/docs/using-pants/key-concepts/options.mdx @@ -0,0 +1,400 @@ +--- + title: Options + sidebar_position: 2 +--- + +A deep dive into how options may be configured. + +--- + +## Option scopes + +Options are partitioned into named _scopes_. + +Some system-wide options belong in the _global scope_. For example, the `--level` option, which controls the logging level, is in the global scope. + +Other options belong to a _subsystem scope_. A _subsystem_ is simply a collection of related options, in a scope. For example, the `pytest` subsystem contains options related to [Python's test framework pytest](../../../reference/subsystems/pytest.mdx). + +## Setting options + +Every option can be set in the following ways, in order of precedence: + +1. Via a command line flag. +2. In an environment variable. +3. In a config file (`pants.toml`). + +If an option isn't set in one of these ways, it will take on a default value. + +You can inspect both the current value and the default value by using `pants help $scope` or `pants help-advanced $scope`, e.g. `pants help global`. + +### Command-line flags + +Global options are set using an unqualified flag: + +```bash +pants --level=debug ... +``` + +Subsystem options are set by providing the flag, with the name prefixed with the lower-case scope name and a dash. So for the option `--root-patterns` in the scope `source`: + +```bash +pants --source-root-patterns="['^ext']" +``` + +### Environment variables + +Global options are set using the environment variable `PANTS_{OPTION_NAME}`: + +```bash +PANTS_LEVEL=debug pants ... +``` + +Subsystem options are set using the environment variable +`PANTS_{SCOPE}_{OPTION_NAME}`: + +```bash +PANTS_SOURCE_ROOT_PATTERNS="['^ext']" pants ... +``` + +Note that the scope and option name are upper-cased, and any dashes in the option flag name are converted to underscores: `--multiword-name` becomes `MULTIWORD_NAME`. + +### Config file entries + +Global options are set in the `GLOBAL` section of the config file: + +```toml title="pants.toml" +[GLOBAL] +level = "debug" +``` + +Subsystem options are set in the section named for their scope: + +```toml title="pants.toml" +[source] +root_patterns = ["/src/python"] +``` + +Note that any dashes in the option flag name are converted to underscores: `--multiword-name` becomes `multiword_name`. + +#### Config file interpolation + +A string value in a config file can contain placeholders of the form `%(key)s`, which will be replaced with a corresponding value. The `key` can be one of: + +- A string-valued option in the DEFAULT section of the same config file. +- A string-valued option in the same section of the config file as the value containing the placeholder. +- Any environment variable, prefixed with `env.`: `%(env.ENV_VAR)s`. +- The following special values: + - `%(buildroot)s`: absolute path to the root of your repository. + - `%(homedir)s`: equivalent to `$HOME` or `~`. + - `%(user)s`: the current user's username, obtained from the system password file. + - `%(pants_workdir)s`: the absolute path of the global option `--pants-workdir`, which defaults + to `{buildroot}/.pants.d/`. + - `%(pants_distdir)s`: the absolute path of the global option `--pants-distdir`, which defaults + to `{buildroot}/dist/`. + +An interpolated value may itself contain placeholders, that will be recursively interpolated. + +For example: + +```toml title="pants.toml" +[DEFAULT] +domain = "my.domain" + +[python-repos] +repo_host = "repo.%(domain)s" +indexes.add = ["https://%(env.PY_REPO)s@%(repo_host)s/index"] +``` + +Learn more about exporting environment variables in the [`.pants.bootstrap`](#pantsbootstrap-file)) +Bash script that is sourced before Pants runs. + +## Option types + +Every option has a type, and any values you set must be of that type. + +The option types are: + +- string +- integer +- bool +- list +- dict + +A list-valued option may also declare a specific type for its members (e.g., a list of strings, or a list of integers). + +### String and integer values + +Standalone string and integer values are written without quotes. Any quotes will be considered part of the value, after shell escaping. + +#### Command-line flags: + +```bash +pants --scope-intopt=42 +pants --scope-stropt=qux +``` + +#### Environment variables: + +```bash +PANTS_SCOPE_INTOPT=42 +PANTS_SCOPE_STROPT=qux +``` + +#### Config file entries: + +```toml title="pants.toml" +[scope] +intopt = 42 +stropt = "qux" +``` + +### Boolean values + +Boolean values can be specified using the special strings `true` and `false`. When specifying them via command-line flags you can also use the `--boolopt/--no-boolopt` syntax. + +#### Command-line flags: + +```bash +pants --scope-boolopt=true +pants --scope-boolopt +pants --no-scope-boolopt +``` + +#### Environment variables: + +```bash +PANTS_SCOPE_BOOLOPT=true +``` + +#### Config file entries: + +```toml title="pants.toml" +[scope] +boolopt = true +``` + +### List values + +List values are parsed as Python list literals, so you must quote string values, and you may need to apply shell-level quoting and/or escaping, as required. + +#### Command-line flags: + +```bash +pants --scope-listopt="['foo','bar']" +``` + +You can also leave off the `[]` to _append_ elements. So we can rewrite the above to: + +```bash +pants --scope-listopt=foo --scope-listopt=bar +``` + +Appending will add to any values from lower-precedence sources, such as config files (`pants.toml`) and possibly Pants's `default`. Otherwise, using `[]` will override any lower-precedence sources. + +#### Environment variables: + +```bash +PANTS_SCOPE_LISTOPT="['foo','bar']" +``` + +Like with command-line flags, you can leave off the `[]` to _append_ elements: + +```bash +PANTS_SCOPE_LISTOPT=foo +``` + +#### Config file entries: + +```toml title="pants.toml" +[scope] +listopt = [ + 'foo', + 'bar' +] +``` + +#### Add/remove semantics + +List values have some extra semantics: + +- A value can be preceded by `+`, which will _append_ the elements to the value obtained from lower-precedence sources. +- A value can be preceded by `-`, which will _remove_ the elements from the value obtained from lower-precedence sources. +- Multiple `+` and `-` values can be provided, separated by commas. +- Otherwise, the value _replaces_ the one obtained from lower-precedence sources. + +For example, if the value of `--listopt` in `scope` is set to `[1, 2]` in a config file, then + +```bash +pants --scope-listopt="+[3,4]" +``` + +will set the value to `[1, 2, 3, 4]`. + +```bash +pants --scope-listopt="-[1],+[3,4]" +``` + +will set the value to `[2, 3, 4]`, and + +```bash +pants --scope-listopt="[3,4]" +``` + +will set the value to `[3, 4]`. + +:::note Add/remove syntax in .toml files +The +/- syntax works in .toml files, but the entire value must be quoted: + +```toml title="pants.toml" +[scope] +listopt = "+[1,2],-[3,4]" +``` + +This means that TOML treats the value as a string, instead of a TOML list. + +Alternatively, you can use this syntactic sugar, which allows the values to be regular TOML lists: + +```toml title="pants.toml" +[scope] +listopt.add = [1, 2] +listopt.remove = [3, 4] +``` + +But note that this only works in Pants's `.toml` config files, not in environment variables or command-line flags. +::: + +### Dict values + +Dict values are parsed as Python dict literals on the command-line and environment variables, so you must quote string keys and values, and you may need to apply shell-level quoting and/or escaping, as required. + +#### Command-line flags: + +```bash +pants --scope-dictopt="{'foo':1,'bar':2}" +``` + +#### Environment variables: + +```bash +PANTS_SCOPE_DICTOPT="{'foo':1,'bar':2}" +``` + +#### Config file entries: + +You can use TOML's [nested table features](https://toml.io/en/v1.0.0#inline-table). These are equivalent: + +```toml title="pants.toml" +[scope] +dictopt = { foo = 1, bar = 2} +``` + +```toml title="pants.toml" +[scope.dictopt] +foo = 1 +bar = 2 +``` + +You can also use a string literal. Note the quotes: + +```toml title="pants.toml" +[scope] +dictopt = """{ + 'foo': 1, + 'bar': 2, +}""" +``` + +#### Add/replace semantics + +- A value can be preceded by `+`, which will _update_ the value obtained from lower-precedence sources with the entries. +- Otherwise, the value _replaces_ the one obtained from lower-precedence sources. + +For example, if the value of `--dictopt` in `scope` is set to `{'foo', 1, 'bar': 2}` in a config file, then + +```bash +pants --scope-dictopt="+{'foo':42,'baz':3}" +``` + +will set the value to `{'foo': 42, 'bar': 2, 'baz': 3}`, and + +```bash +pants --scope-dictopt="{'foo':42,'baz':3}" +``` + +will set the value to `{'foo': 42, 'baz': 3}`. + +## Reading individual option values from files + +If an option value is too large or elaborate to use directly, or if you don't want to hard-code +values directly in `pants.toml`, you can set the value of any option to the string +`@relative/path/from/repo/root/to/file` (note the leading `@`), and the value will be read +from that file. + +If the file name ends with `.json` or `.yaml` then the file will be parsed as the relevant +format, which is useful for list- and dict-valued options. + +Otherwise, the file is parsed as a literal as described above for each option type. + +Normally, the file must exist, and it is an error if it doesn't. +To avoid an error when the file doesn't exist, add a `?` after `@`, for example: +`@?path/that/may/not/exist` - this will treat the config value in question as not +being set when the file does not exist. + +Note that you can use this feature on the command-line, in an env var, or in a config file: + +```toml title="pants.toml" +[scope] +opt = "@path/to/file.json" +``` + +```bash +PANTS_SCOPE_OPTION=@path/to/file.json +``` + +```bash +pants --scope-option="@path/to/file.json" +``` + +:::caution Gotcha: If you modify the value file, you must manually restart pantsd +Until we resolve [this issue](https://github.com/pantsbuild/pants/issues/10360), changing +the value in a file used with the `@` syntax as described above will not invalidate the build. +For now, if such a file changes you will have to stop pantsd so that it will be restarted on +the next invocation of pants. To do so, run `rm -r .pants.d/pids/` in the build root. +::: + +## `.pants.rc` file + +You can set up personal Pants config files, using the same TOML syntax as `pants.toml`. By default, Pants looks for the paths `/etc/pantsrc`, `~/.pants.rc`, and `.pants.rc` in the repository root. + +For example: + +```toml title=".pants.rc" +[python] +# Even though our repository uses 3.8+, because I have an M1, +# I must use Python 3.9+. +interpreter_constraints = ["==3.9.*"] +``` + +If you want to ban this feature, set `[GLOBAL].pantsrc = false` in `pants.toml`. + +## `.pants.bootstrap` file + +If you need to set default values for environment variables without requiring all users to define them in the local +environment, you can export them in the `.pants.bootstrap` Bash script. This file needs to be placed in the root of your +workspace, and it is going to be sourced before invoking any Pants goal. + +You can also add to this file any Bash code you want to execute before Pants runs, and any environment variables +declared in this file are going to be available to any process that Pants launcher binary may start. + +For example: + +```bash title=".pants.bootstrap" +# these variables are defined in our CI agents, +# but this is set to support local development +export DOCKER_DEFAULT_REPO="https://hub.docker.com/" +export GIT_COMMIT="$(git rev-parse HEAD)" +``` + +If you want to learn more about how +[Pants launcher binary](../../getting-started/installing-pants#the-pants-binarys-implementation) works, see +the [scie-pants](https://github.com/pantsbuild/scie-pants) project. diff --git a/versioned_docs/version-2.24/docs/using-pants/key-concepts/source-roots.mdx b/versioned_docs/version-2.24/docs/using-pants/key-concepts/source-roots.mdx new file mode 100644 index 000000000..84f4a9e29 --- /dev/null +++ b/versioned_docs/version-2.24/docs/using-pants/key-concepts/source-roots.mdx @@ -0,0 +1,364 @@ +--- + title: Source roots + sidebar_position: 4 +--- + +Configuring Pants to understand your imports. + +--- + +:::note Go and Shell can skip this page +Go does have a notion of source roots: where your `go.mod` is located. However, that is handled automatically by Pants without you needing to follow this page. + +Shell does not have any notion of source roots. +::: + +## What are source roots? + +Some project layouts use top-level folders for namespace purposes, but have the code live underneath. However, the code's imports will ignore these top-level folders, thanks to mechanisms like the `$PYTHONPATH` and the JVM classpath. _Source roots_ are a generic equivalent of these concepts. + +For example, given this Python project: + +``` +src +└── python + └── project + ├── __init__.py + ├── app.py + ├── config + │ ├── __init__.py + │ └── prod.json + └── util + ├── __init__.py + └── math.py +``` + +You would likely set `PYTHONPATH=src/python` and use imports like this: + +```python +from project.app import App +from project.util.math import add_two + +pkgutil.get_data("project.config", "prod.json") +``` + +In the example above, `src/python` is a source root. So, when some code says `from project.app import App`, Pants can know that this corresponds to the code in `src/python/project/app.py`. + +## Configuring source roots + +There are two ways to configure source roots: + +- Using patterns +- Using marker files + +You can mix and match between both styles. Run `pants roots` to see what Pants is using: + +``` +pants roots +src/assets +src/python +src/rust +``` + +### Configuring source roots using patterns + +You can provide a set of patterns that match your source roots: + +```toml title="pants.toml" +[source] +root_patterns = [ + '/src/python', + '/test/python', +] +``` + +The `/` prefix means that the source root is located at the build root, so it will match `src/python`, but not `project1/src/python`. + +You can leave off the `/` prefix to match any directory whose suffix matches a pattern. For example, `root_patterns = ["src/python"]` would consider all of these to be source roots, if they exist: + +- `src/python` +- `project1/src/python` + +You can use `*` as a glob. For example, `root_patterns = ["/src/*"]` would consider all of these to be source roots: + +- `src/python` +- `src/java` +- `src/assets` + +#### Configuring no source roots + +Many projects do not have any top-level folders used for namespacing. + +For example, given this Python project: + +``` +project +├── __init__.py +├── app.py +├── config +│ ├── __init__.py +│ └── prod.json +└── util + ├── __init__.py + └── math.py +``` + +You would likely _not_ set `PYTHONPATH` and would still use imports like this: + +```python +from project.app import App +from project.util.math import add_two + +pkgutil.get_data("project.config", "prod.json") +``` + +If you have no source roots, use this config: + +```toml title="pants.toml" +[source] +root_patterns = ["/"] +``` + +:::note Default source roots +The default value of the `root_patterns` config key is `["/", "src", "src/python", "src/py", "src/java", "src/scala", "src/thrift", "src/protos", "src/protobuf"]`. + +These capture a range of common cases, including a source root at the root of the repository. If your source roots match these patterns, you don't need to explicitly configure them. +::: + +### Configuring source roots using marker files + +You can also denote your source roots using specially-named marker files. To do so, first pick a name (or multiple names) to use: + +```toml title="pants.toml" +[source] +marker_filenames = ["SOURCE_ROOT"] +``` + +Then, place a file of that name in each of the source roots. The contents of those files don't matter. They can be empty. + +For example, given this Python repo, where we have a `setup.py` for each distinct project: + +``` +. +├── server +│ ├── server +│ │ ├── __init__.py +│ │ └── app.py +│ └── setup.py +└── utils + ├── setup.py + └── utils + ├── __init__.py + ├── math.py + └── strutil.py +``` + +We could use this config: + +```toml title="pants.toml" +[source] +marker_filenames = ["setup.py"] +``` + +We can then run `pants roots` to find these source roots used: + +``` +pants roots +server +utils +``` + +This means that Pants would work with these imports: + +```python +import server.app +from utils.strutil import capitalize +``` + +Whereas these imports are invalid: + +```python +import server.server.app +from utils.utils.strutil import capitalize +``` + +## Examples + +These project structures are all valid; Pants does not expect you to reorganize your codebase to use the tool. + +### `src/` setup + +This setup is common in "polyglot" repositories: i.e. repos with multiple languages. + +#### Project: + +``` +. +├── 3rdparty +│ ├── java +│ │ └── ivy.xml +│ └── python +│ └── requirements.txt +├── src +│ ├── java +│ │ └── org +│ │ └── pantsbuild +│ │ └── project +│ │ ├── App.java +│ │ └── util +│ │ └── Math.java +│ └── python +│ └── project +│ ├── __init__.py +│ ├── app.py +│ ├── config +│ │ ├── __init__.py +│ │ └── prod.json +│ └── util +│ ├── __init__.py +│ └── math.py +└── test + └── python + └── project + ├── __init__.py + └── util + ├── __init__.py + └── test_math.py +``` + +While we have tests in a separate source root here, it's also valid to have tests colocated with their src files. + +#### Example imports: + +```python +# Python +from project.app import App +from project.util.test_math import test_add_2 +``` + +```java +// Java +import org.pantsbuild.project.App +import org.pantsbuild.project.util.Math +``` + +#### Config: + +```toml title="pants.toml" +[source] +root_patterns = [ + "/src/java", + "/src/python", + "/test/python", +] +``` + +Note that we organized our 3rdparty requirements in the top-level folders `3rdparty/python` and `3rdparty/java`, but we do not need to include them as source roots because we do not have any first-party code there. + +### Multiple top-level projects + +#### Project: + +This layout has lots of nesting; this is only one possible way to organize the repository. + +``` +. +├── ads +│ └── py +│ └── ads +│ ├── __init__.py +│ ├── billing +│ │ ├── __init__.py +│ │ └── calculate_bill.py +│ └── targeting +│ ├── __init__.py +│ └── validation.py +├── base +│ └── py +│ └── base +│ ├── __init__.py +│ ├── models +│ │ ├── __init__.py +│ │ ├── org.py +│ │ └── user.py +│ └── util +│ ├── __init__.py +│ └── math.py +└── news + └── js + └── spa.js +``` + +#### Example imports: + +```python +import ads.billing.calculate_bill +from base.models.user import User +from base.util.math import add_two +``` + +Note that even though the projects live in different top-level folders, you are still able to import from other projects. If you would like to limit this, you can use `pants dependents` or `pants dependencies` in CI to track where imports are being used. See [Project introspection](../project-introspection.mdx). + +#### Config: + +Either of these are valid and they have the same result: + +```toml title="pants.toml" +[source] +root_patterns = [ + "/ads/py", + "/base/py", + "/new/js", +] +``` + +```toml title="pants.toml" +[source] +root_patterns = [ + "py", + "js", +] +``` + +### No source root + +Warning: while this project structure is valid, it often does not scale as well as your codebase grows, such as adding new languages. + +#### Project: + +``` +. +├── project +│ ├── __init__.py +│ ├── app.py +│ ├── config +│ │ ├── __init__.py +│ │ └── prod.json +│ └── util +│ ├── __init__.py +│ └── math.py +└── pyproject.toml +``` + +#### Example imports: + +```python +from project.app import App +from project.util.math import add_two + +pkgutil.get_data("project.config", "prod.json") +``` + +#### Config: + +Either of these are valid and they have the same result: + +```toml title="pants.toml" +[source] +root_patterns = ["/"] +``` + +```toml +[source] +marker_filenames = ["pyproject.toml"] +``` diff --git a/versioned_docs/version-2.24/docs/using-pants/key-concepts/targets-and-build-files.mdx b/versioned_docs/version-2.24/docs/using-pants/key-concepts/targets-and-build-files.mdx new file mode 100644 index 000000000..a4b4154fc --- /dev/null +++ b/versioned_docs/version-2.24/docs/using-pants/key-concepts/targets-and-build-files.mdx @@ -0,0 +1,473 @@ +--- + title: Targets and BUILD files + sidebar_position: 1 +--- + +Metadata for your code. + +--- + +Most goals require metadata about your code. For example, to run a test, you need to know about all the transitive dependencies of that test. You may also want to set a timeout on that test. + +_Targets_ are an _addressable_ set of metadata describing your code. + +For example: + +- `shell_source` and `python_test` describe first-party code +- `python_requirement` describes third-party requirements +- `pex_binary` and `archive` describe artifacts you'd like Pants to build + +To reduce boilerplate, some targets also generate other targets: + +- `python_tests` -> `python_test` +- `shell_sources` -> `shell_source` +- `go_mod` -> `go_third_party_package` + +## BUILD files + +Targets are defined in files with the name `BUILD`. For example: + +```python title="helloworld/greet/BUILD" +python_tests( + name="tests", + timeout=120, +) + +pex_binary( + name="bin", + entry_point="app.py:main", +) +``` + +Each target type has different _fields_, or individual metadata values. Run `pants help $target` to see which fields a particular target type has, e.g. `pants help file`. Most fields are optional and use sensible defaults. See [Field default values](./targets-and-build-files.mdx#field-default-values) for how you may override a field's default value. + +All target types have a `name` field, which is used to identify the target. Target names must be unique within a directory. + +You can autoformat `BUILD` files by enabling a `BUILD` file formatter by adding it to `[GLOBAL].backend_packages` in `pants.toml` (such as `pants.backend.build_files.fmt.black` [or others](./backends.mdx)). Then to format, run `pants fmt '**/BUILD'` or `pants fmt ::` (formats everything). + +### Environment variables + +BUILD files are very hermetic in nature with no support for using `import` or other I/O operations. In order to have dynamic data in BUILD files, you may inject values from the local environment using the `env()` function. It takes the variable name and optional default value as arguments. + +```python title="helloworld/pkg/BUILD" +python_distribution( + name="helloworld-dist", + description=env("DIST_DESC", "Set the `DIST_DESC` env variable to override this value."), + provides=python_artifact( + name="helloworld", + version=env("HELLO_WORLD_VERSION"), + ), +) +``` + +### Multiple BUILD files in a directory + +Typically, there would be one BUILD file in every directory containing source code and any other resources you +may want to use as part of your builds. Most likely, having just one BUILD file in a directory is also what you would want. +However, you can have multiple BUILD files in a single directory, if desired. When running a Pants goal, the contents +of the BUILD files will be merged making it possible to better group your targets. + +Storing targets in multiple BUILD files also makes it possible to dynamically include or exclude targets from your +builds. For example, you could include some experimental targets when running a Pants goal from a command line by +extending the list of recognized BUILD file patterns: + +```bash +$ pants --build-patterns="+['BUILD.experimental']" package project:app +``` + +## Target addresses + +A target is identified by its unique address, in the form `path/to/dir:name`. The above example has the addresses `helloworld/greet:tests` and `helloworld/greet:bin`. + +Addresses are used in the `dependencies` field to depend on other targets. Addresses can also be used as command-line arguments, such as `pants fmt path/to:tgt`. + +(Both "generated targets" and "parametrized targets" have a variant of this syntax; see the below sections.) + +:::note Default for the `name` field +The `name` field defaults to the directory name. So, this target has the address `helloworld/greet:greet`. + +```python +# helloworld/greet/BUILD +python_sources() +``` + +::: + +:::note Use `//:tgt` for the root of your repository +Addresses defined in the `BUILD` file at the root of your repository are prefixed with `//`, e.g. `//:my_tgt`. +::: + +## `source` and `sources` field + +Targets like `python_test` and `resource` have a `source: str` field, while target generators like `python_tests` and `resources` have a `sources: list[str]` field. This determines which source files belong to the target. + +Values are relative to the BUILD file's directory. Sources must be in or below this directory, i.e. `../` is not allowed. + +The `sources` field also supports `_` and `**` as globs. To exclude a file or glob, prefix with `!`. For example, `["_.py", "!exclude_*.py"]` will include `f.py` but not `exclude_me.py`. + +```python title="BUILD" +resource(name="logo", source="logo.png") + +python_tests( + name="tests", + sources=["*_test.py"], +) +``` + +:::caution Be careful with overlapping `source` fields +It's legal to include the same file in the `source` / `sources` field for multiple targets. + +When would you do this? Sometimes you may have conflicting metadata for the same source file, such as wanting to check that a Shell test works with multiple shells. Normally, you should prefer Pants's `parametrize` mechanism to do this. See the below section "Parametrizing Targets". + +Often, however, it is not intentional when multiple targets own the same file. For example, this often happens when using `**` globs, like this: + +```python +# project/BUILD +python_sources(sources=["**/*.py"]) + +# project/subdir/BUILD +python_sources(sources=["**/*.py"]) +``` + +Including the same file in the `source` / `sources` field for multiple targets can result in two confusing behaviors: + +- File arguments will run over all owning targets, e.g. `pants test path/to/test.ext` would run both test targets as two separate subprocesses, even though you might only expect a single subprocess. +- Pants will sometimes no longer be able to infer dependencies on this file because it cannot disambiguate which of the targets you want to use. You must use explicit dependencies instead. (For some blessed fields, like the `resolve` field, if the targets have different values, then there will not be ambiguity.) + +You can run `pants list path/to/file.ext` to see all "owning" targets to check if >1 target has the file in its `source` field. +::: + +## `dependencies` field + +A target's dependencies determines which other first-party code and third-party requirements to include when building the target. + +Usually, you leave off the `dependencies` field thanks to _dependency inference_. Pants will read your import statements and map those imports back to your first-party code and your third-party requirements. You can run `pants dependencies path/to:target` to see what dependencies Pants infers. + +However, dependency inference cannot infer everything, such as dependencies on `resource` and `file` targets. + +To add an explicit dependency, add the target's address to the `dependencies` field. This augments any dependencies that were inferred. + +```python title="helloworld/greet/BUILD" +python_sources( + name="lib", + dependencies=[ + "3rdparty/python:ansicolors", + "assets:logo", + ], +) +``` + +You only need to declare direct dependencies. Pants will pull in _transitive dependencies_—i.e. the dependencies of your dependencies—for you. + +:::note Relative addresses, `:tgt` +When depending on a target defined in the same BUILD file, you can simply use `:tgt_name`, rather than `helloworld/greet:tgt_name`, for example. + +Addresses for generated targets also support relative addresses in the `dependencies` field, as explained in the "Target Generation" section below. +::: + +:::note Ignore dependencies with `!` and `!!` +If you don't like that Pants inferred a certain dependency—as reported by [`pants dependencies path/to:tgt`](../project-introspection.mdx)—tell Pants to ignore it with `!`: + +```python +python_sources( + name="lib", + dependencies=["!3rdparty/python:numpy"], +) +``` + +You can use the prefix `!!` to transitively exclude a dependency, meaning that even if a target's dependencies include the bad dependency, the final result will not include the value. + +Transitive excludes can only be used in target types that conventionally are not depended upon by other targets, such as `pex_binary`, `python_distribution`, and `python_test` / `python_tests`. This is meant to limit confusion, as using `!!` in something like a `python_source` / `python_sources` target could result in surprising behavior for everything that depends on it. (Pants will print a helpful error when using `!!` when it's not legal.) +::: + +## Using the generic `target` + +[`target`](../../..reference/targets/target) is a generic target with no specific type. +It can be used as a generic collection of targets to group related, but distinct targets into one single target. + +### Referring to a group of targets + +You could use the generic `target` when you need to group multiple targets to refer to them as a unit +(a single dependency) to reduce repetition: + +```python title="BUILD" +target( + name="python-libs", + dependencies=["src/python/libraries/libA", "src/python/libraries/libB"], +) +``` + +If declared in the root of your workspace, you can now address the Python libraries by `//:python-libs`: + +```bash +$ pants dependencies //:python-libs +```` + +### Creating aliases for targets + +If you have some targets declared in BUILD files that are stored deep within the directory structure of your workspace, +you can make it easier to refer to that target when listing that target among dependencies of other targets. + +For example, you can simplify accessing a target by creating another target that will serve as an alias definition in +a BUILD file stored in a more convenient location in the workspace, for instance, in the build root directory: + +```python title="BUILD" +target( + name="deployment-bins", + dependencies=["src/golang/production/cloud/deployment/binaries:tools"] +) +``` + +You can now refer to that target more concisely in BUILD files: + +```python title="BUILD" +python_sources(dependencies=["//:deployment-bins"]) +``` + +## Field default values + +As mentioned above in [BUILD files](./targets-and-build-files.mdx#build-files), most target fields have sensible defaults. And it's easy to override those values on a specific target. But applying the same non-default value on many targets can get unwieldy, error-prone and hard to maintain. Enter `__defaults__`. + +Alternative default field values are set using the `__defaults__` BUILD file symbol, and apply to targets in the filesystem tree under that BUILD file's directory. + +The defaults are provided as a dictionary mapping target types to the default field values. Multiple target types may share the same set of default field values, when grouped together in parentheses (as a Python tuple). + +Use the `all` keyword argument to provide default field values that should apply to all targets. + +The `extend=True` keyword argument allows to add to any existing default field values set by a previous `__defaults__` call rather than replacing them. + +Default fields and values are validated against their target types, except when provided using the `all` keyword, in which case only values for fields applicable to each target are validated. Use `ignore_unknown_fields=True` to ignore invalid fields. + +This means, that it is legal to provide a default value for `all` targets, even if it is only a subset of targets that actually supports that particular field. + +:::note `__defaults__` does not apply to environment targets. +The environment targets (such as `local_environment` and `docker_environment` etc) are special and used during a bootstrap phase before any targets are defined and as such can not be targeted by the `__defaults__` construct. +::: + +Examples: + +```python title="src/example/BUILD" + # Provide default `tags` to all targets in this subtree, and skip black, where applicable. + __defaults__(all=dict(tags=["example"], skip_black=True)) +``` + +Subdirectories may override defaults from a parent BUILD file: + +```python title="src/example/override/BUILD" + # For `files` and `resources` targets, we want to use some other defaults. + __defaults__({ + (files, resources): dict(tags=["example", "overridden"], description="Our assets") + }) +``` + +Use the `extend=True` keyword to update defaults rather than replace them, for any given target. + +```python title="src/example/extend/BUILD" + # Add a default description to all types, in addition to the inherited default tags. + __defaults__(extend=True, all=dict(description="Add default description to the defaults.")) +``` + +To reset any modified defaults, simply override with the empty dict: + +```python title="src/example/nodefaults/BUILD" + __defaults__(all={}) +``` + +### Supporting optional plugin fields + +Normally Pants presents an error message when attempting to provide a default value for a field that doesn't exist for the target. However, some fields comes from plugins, and to support disabling a plugin without having to remove any default values referencing any plugin fields it was providing, there is a `ignore_unknown_fields` option to use: + +```python title="example/BUILD" + __defaults__( + { + # Defaults... + }, + ignore_unknown_fields=True, + ) +``` + +### Extending field defaults + +To add to a default value rather than replacing it, the current default value for a target field is available in the BUILD file using `..default`. This allows you to augment a field's default value with much more precision. As an example, if you want to make the default sources for a `python_sources` target to work recursively you may specify a target augmenting the default sources field: + +```python title="BUILD" +python_sources( + name="my-one-top-level-target", + sources=[ + f"{pattern[0] if pattern.startswith("!") else ""}**/{pattern.lstrip("!")}" + for pattern in python_sources.sources.default + ] +) +``` + +## Target generation + +To reduce boilerplate, Pants provides target types that generate other targets. For example: + +- `files` -> `file` +- `python_tests` -> `python_test` +- `go_mod` -> `go_third_party_package` + +Usually, prefer these target generators. [`pants tailor ::`](../../getting-started/initial-configuration.mdx#5-generate-build-files) will automatically add them for you. + +Run `pants help targets` to see how the target determines what to generate. Targets for first-party code, like `resources` and `python_tests`, will generate one target for each file in their `sources` field. + +```python +python_sources( + name="lib", + # Will generate two `python_source` targets. + sources=["app.py", "util.py"], +) +``` + +(Usually, you can leave off the `sources` field. When possible, it defaults to all relevant files in the current directory.) + +Typically, fields declared in the target generator will be inherited by each generated target. For example, if you set `timeout=120` in a `python_tests` target, each generated `python_test` target will have `timeout=120`. You can instead use the `overrides` field for more granular metadata: + +```python title="helloworld/BUILD" +python_tests( + name="tests", + # This applies to every generated target. + extra_env_vars=["MY_ENV_VAR"], + # These only apply to the relevant generated targets. + overrides={ + "dirutil_test.py": {"timeout": 30}, + ("osutil_test.py", "strutil_test.py"): {"timeout": 15}, + }, +) +``` + +The address for generated targets depends if the generated target is for first-party code or not: + +| Generated target type | Generated address syntax | +| :----------------------------------------------- | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| First-party, e.g. `python_source` and `file` | `path/to/file.ext:tgt_generator`

Example: `src/py/app.py:lib`

The address always starts with the path to the file.

If the file lives in the same directory as the target generator and the target generator left off the `name` field, you can use just the file path. For example, `src/py/app.py` (without the `:lib` suffix).

If the file lives in a subdirectory of the target generator, the suffix will look like `../tgt_generator`. For example, `src/py/subdir/f.py:../lib`, where the target generator is `src/py:lib`.

With the `dependencies` field, you can use relative addresses by prefixing the path with `./`, so long as the path is in the same directory or below the current BUILD file. For example, `./app.py:lib` rather than `src/py/app.py:lib`. | +| All other targets, e.g. `go_third_party_package` | `path/to:tgt_generator#generated_name`

Example: `3rdparty/py:reqs#django`

Run `pants help $target_type` on the target generator to see how it sets the generated name. For example, `go_mod` uses the Go package's name.

If the target generator left off the `name` field, you can leave it off for the generated address too, e.g. `3rdparty/py#django` (without the `:reqs` portion).

With the `dependencies` field, you can use relative addresses to reference generated targets in the same BUILD file, e.g. `:generator#generated_name` instead of `src/py:generated#generated_name`. If the target generator uses the default `name`, you can simply use `#generated_name`. | + +Run [`pants list dir:`](../project-introspection.mdx) in the directory of the target generator to see all generated target addresses, and [`pants peek dir:`](../project-introspection.mdx) to see all their metadata. + +You can use the address for the target generator as an alias for all of its generated targets. For example, if you have the `files` target `assets:logos`, adding `dependencies=["assets:logos"]`to another target will add a dependency on each generated `file` target. Likewise, if you have a `python_tests` target `project:tests`, then `pants test project:tests` will run on each generated `python_test` target. + +:::note Tip: one BUILD file per directory +Target generation means that it is technically possible to put everything in a single BUILD file. + +However, we've found that it usually scales much better to use a single BUILD file per directory. Even if you start with using the defaults for everything, projects usually need to change some metadata over time, like adding a `timeout` to a test file or adding `dependencies` on resources. + +It's useful for metadata to be as fine-grained as feasible, such as by using the `overrides` field to only change the files you need to. Fine-grained metadata is key to having smaller cache keys (resulting in more cache hits), and allows you to more accurately reflect the status of your project. We have found that using one BUILD file per directory encourages fine-grained metadata by defining the metadata adjacent to where the code lives. + +[`pants tailor ::`](../../getting-started/initial-configuration.mdx#5-generate-build-files) will automatically create targets that only apply metadata for the directory. +::: + +## Parametrizing targets + +It can be useful to create multiple targets describing the same entity, each with different metadata. For example: + +- Run the same tests with different interpreter constraints, e.g. Python 2 vs Python 3. +- Declare that a file should work with multiple "resolves" (lockfiles). + +The `parametrize` builtin creates a distinct target per parametrized field value. All values other than the parametrized field(s) are the same for each target. For example: + +```python title="example/BUILD" +# Creates two targets: +# +# example:tests@shell=bash +# example:tests@shell=zsh + +shunit2_test( + name="tests", + source="tests.sh", + shell=parametrize("bash", "zsh"), +) +``` + +If multiple fields are parametrized, a target will be created for each value in the Cartesian product, with `,` as the delimiter in the address. See the next example. + +If the field value is not a string—or it is a string but includes spaces—you can give it an alias, like the `interpreter_constraints` field below: + +```python title="example/BUILD" +# Creates four targets: +# +# example:tests@interpreter_constraints=py2,resolve=lock-a +# example:tests@interpreter_constraints=py2,resolve=lock-b +# example:tests@interpreter_constraints=py3,resolve=lock-a +# example:tests@interpreter_constraints=py3,resolve=lock-b + +python_test( + name="tests", + source="tests.py", + interpreter_constraints=parametrize(py2=["==2.7.*"], py3=[">=3.6,<3.7"]), + resolve=parametrize("lock-a", "lock-b"), +) +``` + +To parametrize multiple fields together as one parametrization, unpack a parametrize object with the field values to use for that group as the parametrization keyword arguments. The parametrization must be named by providing one positional string argument as the name. (See example below.) This is useful to avoid a full cartesian product if not every combination of field values makes sense. i.e. The previous example uses the same resolve (lockfile) for both interpreter constraints, however if you want to use a different resolve per interpreter, then grouping the resolve value with the interpreter constraint may be the way to go. + +```python title="example/BUILD" +# Creates two targets: +# +# example:tests@parametrize=py2 +# example:tests@parametrize=py3 + +python_test( + name="tests", + source="tests.py", + **parametrize("py2", interpreter_constraints=["==2.7.*"], resolve="lock-a"), + **parametrize("py3", interpreter_constraints=[">=3.6,<3.7"], resolve="lock-b"), +) +``` + +(Using `parametrize` on grouped fields is also supported. For instance, if there is two resolves to use for Python 3.10, these can be provided within the py310 group: `**parametrize("py310", interpreter_constraints=[">=3.10,<3.11"], resolve=parametrize("lock-b", "lock-c"))`.) + +The targets' addresses will have `@key=value` at the end, as shown above. Run [`pants list dir:`](../project-introspection.mdx) in the directory of the parametrized target to see all parametrized target addresses, and [`pants peek dir:`](../project-introspection.mdx) to see all their metadata. + +Generally, you can use the address without the `@` suffix as an alias to all the parametrized targets. For example, `pants test example:tests` will run all the targets in parallel. Use the more precise address if you only want to use one parameter value, e.g. `pants test example:tests@shell=bash`. + +Parametrization can be combined with target generation. The `@key=value` will be added to the end of the address for each generated target. For example: + +```python title="example/BUILD" +# Generates four `shunit2_test` targets: +# +# example/test1.sh:tests@shell=bash +# example/test1.sh:tests@shell=zsh +# example/test2.sh:tests@shell=bash +# example/test2.sh:tests@shell=zsh +# +# Also creates two `shunit2_tests` target +# generators, which can be used as aliases +# to their generated targets: +# +# example:tests@shell=bash +# example:tests@shell=zsh +# +# Generally, you can still use `example:tests` +# without the `@` suffix as an alias to all the +# created targets. + +shunit2_tests( + name="tests", + sources=["test1.sh", "test2.sh"], + shell=parametrize("bash", "zsh"), +) +``` + +You can combine `parametrize` with the `overrides` field to set more granular metadata for generated targets: + +```python title="example/BUILD" +# Generates three `shunit2_test` targets: +# +# example/test1.sh:tests +# example/test2.sh:tests@shell=bash +# example/test2.sh:tests@shell=zsh +# +# The `shunit2_tests` target generator +# `example:tests` can be used as an alias +# to all 3 created targets. + +shunit2_tests( + name="tests", + sources=["test1.sh", "test2.sh"], + overrides={ + "test2.sh": {"shell": parametrize("bash", "zsh")}, + }, +) +``` diff --git a/versioned_docs/version-2.24/docs/using-pants/project-introspection.mdx b/versioned_docs/version-2.24/docs/using-pants/project-introspection.mdx new file mode 100644 index 000000000..b66323f66 --- /dev/null +++ b/versioned_docs/version-2.24/docs/using-pants/project-introspection.mdx @@ -0,0 +1,386 @@ +--- + title: Project introspection + sidebar_position: 4 +--- + +Finding insights in your project. + +--- + +Pants provides several goals to provide insights into your project's structure. + + + +:::note Tip: Use `xargs` to pipe these goals into other Pants commands +For example: + +```bash +$ pants dependents project/util.py | xargs pants test +``` + +See [Advanced target selection](./advanced-target-selection.mdx) for more info and other techniques to use the results. +::: + +## `list` - find your project's targets + +`list` will find all targets that match the arguments. + +For example, to show all targets in your project: + +```bash +❯ pants list :: +//:ansicolors +//:setuptools +helloworld:lib +helloworld:pex_binary +helloworld/__init__.py:lib +helloworld/main.py:lib +... +``` + +You can specify a file, which will find the target(s) owning that file: + +```bash +❯ pants list helloworld/greet/greeting_test.py +helloworld/greet/greeting_test.py:tests +``` + +`list` often works well when paired with the `--filter` options from +[Advanced Target Selection](./advanced-target-selection.mdx), e.g. +`pants --filter-target-type=python_test list ::` to find all your `python_test` targets. + +## `dependencies` - find a target's dependencies + +Use `dependencies` to list all targets used directly by a target. + +```bash +❯ pants dependencies helloworld:pex_binary +helloworld/main.py:lib +``` + +You can specify a file, which will run on the target(s) owning that file: + +```bash +❯ pants dependencies helloworld/main.py:lib +//:ansicolors +helloworld/greet/greeting.py:lib +helloworld/main.py:lib +``` + +To include transitive dependencies—meaning the dependencies of the direct dependencies—use `--transitive`: + +```bash +❯ pants dependencies --transitive helloworld/main.py:lib +//:ansicolors +//:setuptools +//:types-setuptools +helloworld/greet/greeting.py:lib +helloworld/greet:translations +helloworld/main.py:lib +helloworld/translator/translator.py:lib +``` + +## `dependents` - find which targets depend on a target + +The `dependents` goal finds all targets that directly depend on the target you specify. + +```bash +❯ pants dependents //:ansicolors +helloworld/main.py:lib +``` + +You can specify a file, which will run on the target(s) owning that file: + +``` +❯ pants dependents helloworld/translator/translator.py +helloworld/greet/greeting.py:lib +helloworld/translator:lib +helloworld/translator/translator_test.py:tests +``` + +To include transitive dependents — meaning targets that don't directly depend on your target, but which depend on a target that does directly use your target — use `--transitive`: + +```bash +❯ pants dependents --transitive helloworld/translator/translator.py +helloworld:lib +helloworld:pex_binary +helloworld/main.py:lib +helloworld/greet:lib +... +``` + +To include the original target itself, use `--closed`: + +```bash +❯ pants dependents --closed //:ansicolors +//:ansicolors +helloworld/main.py:lib +``` + +## Export dependency graph + +Both `dependencies` and `dependents` goals have the `--format` option allowing you to export data in multiple formats. +Exporting information about the dependencies and dependents in JSON format will produce the +[adjacency list](https://en.wikipedia.org/wiki/Adjacency_list) of your dependency graph: + +```bash +$ pants dependencies --format=json \ + helloworld/greet/greeting.py \ + helloworld/translator/translator_test.py + +{ + "helloworld/greet/greeting.py:lib": [ + "//:reqs#setuptools", + "//:reqs#types-setuptools", + "helloworld/greet:translations", + "helloworld/translator/translator.py:lib" + ], + "helloworld/translator/translator_test.py:tests": [ + "//:reqs#pytest", + "helloworld/translator/translator.py:lib" + ] +} +``` + +This has various applications, and you could analyze, visualize, and process the data further. Sometimes, a fairly +straightforward `jq` query would suffice, but for anything more complex, it may make sense to write a small program +to process the exported graph. For instance, you could: + +* find tests with most transitive dependencies + +```bash +$ pants dependencies --filter-target-type=python_test --format=json :: \ + | jq -r 'to_entries[] | "\(.key)\t\(.value | length)"' \ + | sort -k2 -n +``` + +* find resources that only a few other targets depend on + +```bash +$ pants dependents --filter-target-type=resource --format=json :: \ + | jq -r 'to_entries[] | select(.value | length < 2)' +``` + +* find files within the `src/` directory that transitively lead to the most tests + +```python +# depgraph.py +import json + +with open("data.json") as fh: + data = json.load(fh) + +for source, dependents in data.items(): + print(source, len([d for d in dependents if d.startswith("tests/")])) +``` + +```bash +$ pants dependents --transitive --format=json src:: > data.json +$ python3 depgraph.py | sort -k2 -n +``` + +For more sophisticated graph querying, you may want to look into graph libraries such as [`networkx`](https://networkx.org/). +In a larger repository, it may make sense to track the health of the dependency graph and use the output +of the graph export to identify parts of your codebase that would benefit from refactoring. + +## `filedeps` - find which files a target owns + +`filedeps` outputs all of the files belonging to a target, based on its `sources` field. + +```bash +❯ pants filedeps helloworld/greet:lib +helloworld/greet/BUILD +helloworld/greet/__init__.py +helloworld/greet/greeting.py +``` + +To output absolute paths, use the option `--absolute`: + +```bash +$ pants filedeps --absolute helloworld/util:util +/Users/pantsbuild/example-python/helloworld/greet/BUILD +/Users/pantsbuild/example-python/helloworld/greet/__init__.py +/Users/pantsbuild/example-python/helloworld/greet/greeting.py +``` + +To include the files used by dependencies (including transitive dependencies), use `--transitive`: + +```bash +$ pants filedeps --transitive helloworld/util:util +BUILD +helloworld/greet/BUILD +helloworld/greet/__init__.py +helloworld/greet/greeting.py +helloworld/greet/translations.json +... +``` + +## `peek` - programmatically inspect a target + +`peek` outputs JSON for each target specified. + +```bash +$ pants peek helloworld/util:tests +[ + { + "address": "helloworld/util:tests", + "target_type": "python_tests", + "dependencies": null, + "description": null, + "interpreter_constraints": null, + "skip_black": false, + "skip_docformatter": false, + "skip_flake8": true, + "skip_isort": false, + "skip_mypy": false, + "sources": [ + "*.py", + "*.pyi", + "!test_*.py", + "!*_test.py", + "!tests.py", + "!conftest.py", + "!test_*.pyi", + "!*_test.pyi", + "!tests.pyi" + ], + "tags": null + } +] +``` + +You can use `--exclude-defaults` for less verbose output: + +```bash +$ pants peek --exclude-defaults helloworld/util:tests +[ + { + "address": "helloworld/util:tests", + "target_type": "python_tests", + "skip_flake8": true, + } +] +``` + +:::note Piping peek output into jq +`peek` can be particularly useful when paired with [JQ](https://stedolan.github.io/jq/) to query the JSON. For example, you can combine `pants peek` with JQ to find all targets where you set the field `skip_flake8=True`: + +```bash +$ pants peek :: | jq -r '.[] | select(.skip_flake8 == true) | .["address"]' +helloworld/greet:lib +helloworld/greet:tests +helloworld/util:lib +``` + +::: + +:::note Piping other introspection commands into `pants peek` +Some introspection goals, such as `filter`, `dependencies` and `dependents` emit a flat list of target addresses. It's often useful to expand each of those into a full JSON structure with detailed properties of each target, by piping to `pants peek`: + +```bash +pants dependents helloworld/main.py:lib | xargs pants peek --exclude-defaults +[ + { + "address": "helloworld:lib", + "target_type": "python_sources", + "dependencies": [ + "helloworld/__init__.py:lib", + "helloworld/main.py:lib" + ], + "sources": [ + "helloworld/__init__.py", + "helloworld/main.py" + ] + }, + { + "address": "helloworld:pex_binary", + "target_type": "pex_binary", + "dependencies": [ + "helloworld/main.py:lib" + ], + "entry_point": { + "module": "main.py", + "function": null + } + } +] +``` + +Keep in mind, however, that the `peek` goal may be invoked by `xargs` as many times as necessary to use up the list +of input items. This may break the structured data output, so it may be safer to use the +[`--spec-files`](../../reference/global-options#spec_files) option. + +::: + +## `paths` - find dependency paths + +`paths` emits a list of all dependency paths between two targets: + +```bash +$ pants paths --from=helloworld/main.py --to=helloworld/translator/translator.py +[ + [ + "helloworld/main.py:lib", + "helloworld/greet/greeting.py:lib", + "helloworld/translator/translator.py:lib" + ] +] +``` + +## `count-loc` - count lines of code + +`count-loc` counts the lines of code of the specified files by running the [Succinct Code Counter](https://github.com/boyter/scc) tool. + +```shell +❯ pants count-loc :: +─────────────────────────────────────────────────────────────────────────────── +Language Files Lines Blanks Comments Code Complexity +─────────────────────────────────────────────────────────────────────────────── +Python 1690 618679 23906 7270 587503 18700 +HTML 61 6522 694 67 5761 0 +JSON 36 18755 6 0 18749 0 +YAML 30 2451 4 19 2428 0 +JavaScript 6 671 89 8 574 32 +CSV 1 2 0 0 2 0 +JSONL 1 4 0 0 4 0 +Jinja 1 11 0 0 11 2 +Shell 1 13 2 2 9 4 +TOML 1 146 5 0 141 0 +─────────────────────────────────────────────────────────────────────────────── +Total 1828 647254 24706 7366 615182 18738 +─────────────────────────────────────────────────────────────────────────────── +Estimated Cost to Develop $22,911,268 +Estimated Schedule Effort 50.432378 months +Estimated People Required 53.813884 +─────────────────────────────────────────────────────────────────────────────── +``` + +SCC has [dozens of options](https://github.com/boyter/scc#usage). You can pass through options by either setting `--scc-args` or using `--` at the end of your command, like this: + +```bash +pants count-loc :: -- --no-cocomo +``` + +:::caution See unexpected results? Set `pants_ignore`. +By default, Pants will ignore all globs specified in your `.gitignore`, along with `dist/` and any hidden files. + +To ignore additional files, add to the global option `pants_ignore` in your `pants.toml`, using the same [syntax](https://git-scm.com/docs/gitignore) as `.gitignore` files. + +For example: + +```toml title="pants.toml" +[GLOBAL] +pants_ignore.add = ["/ignore_this_dir/"] +``` + +::: diff --git a/versioned_docs/version-2.24/docs/using-pants/remote-caching-and-execution/_category_.json b/versioned_docs/version-2.24/docs/using-pants/remote-caching-and-execution/_category_.json new file mode 100644 index 000000000..1dd264360 --- /dev/null +++ b/versioned_docs/version-2.24/docs/using-pants/remote-caching-and-execution/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Remote caching & execution", + "position": 9 +} diff --git a/versioned_docs/version-2.24/docs/using-pants/remote-caching-and-execution/index.mdx b/versioned_docs/version-2.24/docs/using-pants/remote-caching-and-execution/index.mdx new file mode 100644 index 000000000..36be19fde --- /dev/null +++ b/versioned_docs/version-2.24/docs/using-pants/remote-caching-and-execution/index.mdx @@ -0,0 +1,58 @@ +--- + title: Remote caching & execution + sidebar_position: 8 +--- + +--- + +## Overview + +By default, Pants executes processes in a local [environment](../environments.mdx) on the system on which it is run, and caches the results of those processes locally as well. Besides this "local execution" mode of operation, Pants also supports two distributed modes of operation: + +1. "Remote caching" where Pants store results from local process execution in a remote cache and also consumes results from that remote cache; and + +2. "Remote execution" where Pants offloads execution of processes to a remote server (and consumes cached results from that remote server) + +Pants does this by using the "Remote Execution API" to converse with the remote cache or remote execution server. Pants also [supports some additional providers](./remote-caching.mdx) other than Remote Execution API that provide only remote caching, without execution. + +### What is Remote Execution API? + +Pants is compatible with remote caching and remote execution servers that comply with the [Remote Execution API](https://github.com/bazelbuild/remote-apis) standard ("REAPI"). The REAPI protocol is supported by several different server and client projects including Bazel and of course Pants. + +REAPI servers implement several related but distinct services: + +1. A "content-addressable storage" service that stores data keyed by the hash of that data (also known as a "CAS"). +2. An "action cache service" that maps process executions to their results. +3. An "execution service" that executes processes by using the content-addressable storage service to obtain the inputs and store the outputs from running those processes. + +Remote cache servers implement the CAS and action cache services. Remote execution servers implement all three services. + +Pants calls the CAS a "store server" and the execution service an "execution server." These are logically distinct in the REAPI, but in fact may be exposed to clients on the same network endpoint. + +The REAPI model contains the notion of an "instance." An "instance" is a distinct deployment of a CAS and/or execution service that is given a specific name. All REAPI operations send an instance name to the server, thus a single network endpoint can conceivably support multiple REAPI deployments. + +## Server compatibility + +In order to use remote caching or remote execution, Pants will need access to a server that complies with REAPI. Pants is known to work with: + +**Self-hosted**: + +- [BuildBarn](https://github.com/buildbarn/bb-remote-execution) +- [Buildfarm](https://github.com/bazelbuild/bazel-buildfarm/) +- [BuildGrid](https://buildgrid.build/) + +**Note**: Setup of a self-hosted REAPI server is beyond the scope of this documentation. All these server projects have support channels on the BuildTeamWorld Slack. [Go here to obtain an invite to that Slack.](https://bit.ly/2SG1amT) + +As a more lightweight solution - for caching only - Pants can use a server providing only the CAS (content-addressable storage) service. + +**Self-hosted**: + +- [bazel-remote-cache](https://github.com/buchgr/bazel-remote) + +Bazel Remote Cache supports local disk, S3, GCS, and Azure Blob storage options - needing only a single lightweight Docker container. + +There are a few [other](https://github.com/bazelbuild/remote-apis) systems and services in this space, but they have not, to our knowledge, been tested with Pants. Let us know if you have any experience with them! + +## Resources + +- The [remote-apis-testing project](https://gitlab.com/remote-apis-testing/remote-apis-testing) maintains a compatibility test suite of the various server and client implementations of REAPI. diff --git a/versioned_docs/version-2.24/docs/using-pants/remote-caching-and-execution/remote-caching.mdx b/versioned_docs/version-2.24/docs/using-pants/remote-caching-and-execution/remote-caching.mdx new file mode 100644 index 000000000..f22a6c470 --- /dev/null +++ b/versioned_docs/version-2.24/docs/using-pants/remote-caching-and-execution/remote-caching.mdx @@ -0,0 +1,99 @@ +--- + title: Remote caching + sidebar_position: 0 +--- + +--- + +## What is remote caching? + +Remote caching allows Pants to store and retrieve the results of process execution to and from a remote server, rather than only using your machine's local Pants cache. This allows Pants to efficiently share a cache across different runs and different machines, for example, all of your CI workers sharing the same fine-grained cache. + +Pants supports several remote caching providers: + +- [Remote Execution API](https://github.com/bazelbuild/remote-apis) ("REAPI"), which also supports [remote execution](./remote-execution.mdx) +- GitHub Actions Cache +- Local file system + +## Remote Execution API + +### Server + +See the [REAPI server compatibility guide](./index.mdx#server-compatibility) for more information about REAPI-compatible caches. + +### Pants Configuration + +After you have either set up a REAPI cache server or obtained access to one, the next step is to point Pants to it so that Pants will use it to read and write process results. + +For the following examples, assume that the REAPI server is running on `cache.corp.example.com` at port 8980 and that it is on an internal network. Also assume that the name of the REAPI instance is "main." At a minimum, you will need to configure `pants.toml` as follows: + +```toml +[GLOBAL] +remote_cache_read = true +remote_cache_write = true +remote_store_address = "grpc://build.corp.example.com:8980" +remote_instance_name = "main" +``` + +If the endpoint is using TLS, then the `remote_store_address` option would be specified with the `grpcs://` scheme, i.e. `"grpcs://cache.corp.example.com:8980"`. + +## GitHub Actions Cache + +GitHub Actions provides a built-in caching service which Pants supports using for sharing caches across GitHub Actions runs (not with machines outside of GitHub Actions). It is typically used via the `actions/cache` action to cache whole directories and files, but Pants can use the same functionality for fine-grained caching. + +:::caution GitHub Actions Cache support is still experimental +Support for this cache provider is still under development, with more refinement required. Pants' fine-grained caching makes for many requests, and thus [often hits rate limit errors](https://github.com/pantsbuild/pants/issues/20133). + +Please [let us know](/community/getting-help) if you use it and encounter errors or warnings. +::: + +### Workflow + +The values of the `ACTIONS_CACHE_URL` and `ACTIONS_RUNTIME_TOKEN` environment variables need to be provided to Pants via the `[GLOBAL].remote_store_address` and `[GLOBAL].remote_oauth_bearer_token` options respectively. They are only provided to action calls (not shell steps that use `run: ...`). Include a step like the following in your jobs, which sets those options via environment variables, before executing any Pants commands: + +```yaml +- name: Configure Pants caching to GitHub Actions Cache + uses: actions/github-script@v6 + with: + script: | + core.exportVariable('PANTS_REMOTE_STORE_ADDRESS', process.env.ACTIONS_CACHE_URL); + core.exportVariable('PANTS_REMOTE_OAUTH_BEARER_TOKEN', process.env.ACTIONS_RUNTIME_TOKEN); +``` + +### Pants Configuration + +Once the GitHub values are configured, Pants will read the environment variables. You will also need to configure pants to read and write to the cache only while in CI, such as [via a `pants.ci.toml` configuration file](../using-pants-in-ci.mdx#configuring-pants-for-ci-pantscitoml-optional): + +```toml +[GLOBAL] +# GitHub Actions cache URL and token are set via environment variables +remote_provider = "experimental-github-actions-cache" +remote_cache_read = true +remote_cache_write = true +``` + +If desired, you can also set `remote_instance_name` to a string that's included as a prefix on each cache key, which will be then be displayed in the 'Actions' > 'Caches' UI. + +## Local file system + +Pants can cache "remotely" to a local file system path, which can be used for a networked mount cache, without having to pay the cost of storing Pants' local cache on the network mount too. This can also be used for testing/validation. + +:::caution Local file system caching support is still experimental +Support for this cache provider is still under development, with more refinement required. Please [let us know](/community/getting-help) if you use it and encounter errors or warnings. +::: + +### Pants Configuration + +To read and write the cache to `/path/to/cache`, you will need to configure `pants.toml` as follows: + +```toml +[GLOBAL] +remote_provider = "experimental-file" +remote_store_address = "file:///path/to/cache" +remote_cache_read = true +remote_cache_write = true +``` + +## Reference + +Run `pants help-advanced global` or refer to [Global options](../../../reference/global-options.mdx). Most remote execution and caching options begin with the prefix `--remote`. diff --git a/versioned_docs/version-2.24/docs/using-pants/remote-caching-and-execution/remote-execution.mdx b/versioned_docs/version-2.24/docs/using-pants/remote-caching-and-execution/remote-execution.mdx new file mode 100644 index 000000000..4a8af190c --- /dev/null +++ b/versioned_docs/version-2.24/docs/using-pants/remote-caching-and-execution/remote-execution.mdx @@ -0,0 +1,100 @@ +--- + title: Remote execution + sidebar_position: 1 +--- + +--- + +:::caution Remote execution support is still experimental +Remote execution support in Pants comes with several limitations. For example, Pants requires that the server's operating system match the client's operating system. In practice, this means that Pants must be running on Linux because all three major server projects generally only operate on Linux. +::: + +## What is remote execution? + +"Remote execution" allows Pants to offload execution of processes to a remote server that complies with the [Remote Execution API](https://github.com/bazelbuild/remote-apis) standard ("REAPI"). The REAPI standard is supported by several different server and client projects including Bazel and of course Pants. + +## Setup + +### Server + +Remote execution requires the availability of a REAPI-compatible execution server. See the [REAPI server compatibility guide](./index.mdx#server-compatibility) for more information. + +### Pants + +After you have either set up a REAPI server or obtained access to one, the next step is to point Pants to it so that Pants may submit REAPI execution requests. The server should be running a CAS and execution service. These may be the same network endpoint, but for Pants' purposes, they are configured by different configuration options. + +For the following examples, assume that the REAPI server is running on `build.corp.example.com` at port 8980 and that it is on an internal network (and for the sake of this example is not running TLS, which will be covered later). Also, assume that the name of the REAPI instance is "main." At a minimum, you will need to configure `pants.toml` as follows: + +```toml +[GLOBAL] +remote_execution = true +remote_store_address = "grpc://build.corp.example.com:8980" +remote_execution_address = "grpc://build.corp.example.com:8980" +remote_instance_name = "main" +``` + +#### Environment-specific settings + +The REAPI execution service selects a worker for a process by consulting the "platform properties" that are passed in a remote execution request. These platform properties are key/value pairs that are configured for particular workers in the server. Generally, you will configure these in the server (or be provided them by your server's administrator), and then configure Pants to match particular workers using their relevant platform properties. + +To define platform properties (as well as to configure any other settings which are specific to running on a remote worker), you should define a remote environment. Building on the first example earlier, you would add [`remote_environment` targets](../../../reference/targets/remote_environment.mdx) (see [environment](../environments.mdx) for more information) corresponding to each set of distinct workers you want to use in the server. Assuming that the REAPI server is configured with a particular worker type labeled `docker-container=busybox:latest`, that might look like a `BUILD` file containing: + +```python +remote_environment( + name="remote_busybox", + platform="linux_x86_64", + extra_platform_properties = [ + "docker-container=busybox:latest", + ], + .. +) +``` + +Your `remote_environment` will also need to override any [environment-aware options](../environments.mdx) which configure the relevant tools used in your repository. For example: if building Python code, a Python interpreter must be available and matched via the environment-aware options of `[python-bootstrap]`. If using protobuf support, then you may also need `unzip` available in the remote execution environment in order to unpack the protoc archive. Etc. + +#### Concurrency + +Finally, you should configure Pants to limit the number of concurrent execution requests that are sent to the REAPI server. The `process_execution_remote_parallelism` option controls this concurrency. For example, if `process_execution_remote_parallelism` is set to `20`, then Pants will only send a maximum of 20 execution requests at a single moment of time. + +Note: The specific value depends on the resources available to the REAPI server. If this value is configured to a high number, then Pants will happily send that many concurrent execution requests, which could potentially overwhelm the REAPI server. + +Building on the previous example, `pants.toml` would contain: + +```toml +[GLOBAL] +remote_execution = true +remote_store_address = "grpc://build.corp.example.com:8980" +remote_execution_address = "grpc://build.corp.example.com:8980" +remote_instance_name = "main" +remote_execution_extra_platform_properties = [ + "OSFamily=linux", +] +process_execution_remote_parallelism = 20 +``` + +#### TLS + +You can enable TLS by prefixing the `remote_store_address` and `remote_execution_address` with `grpcs://` instead of `grpc://`. + +Pants will automatically discover root CA certificates on your machine, but you can also configure Pants to use your preferred certificates with the `--remote-ca-certs-path` option. + +Assume that the REAPI server is running on port 443 (https/TLS) at build.example.com. Then the relevant parts of `pants.toml` would contain: + +```toml +[GLOBAL] +remote_execution = true +remote_store_address = "grpcs://build.example.com:443" +remote_execution_address = "grpcs://build.example.com:443" +remote_instance_name = "main" +# This is optional, Pants will auto-discover certificates otherwise. +remote_ca_certs_path = "/etc/ssl/certs/ca-certificates.crt" +# this allows you to setup client authentication with a certificate and key (mTLS). +remote_client_certs_path = "/etc/ssl/certs/client-cert.pem" +remote_client_key_path = "/etc/ssl/certs/client-key.pem" +``` + +## Reference + +For global options, run `pants help-advanced global` or refer to [Global options](../../../reference/global-options.mdx). Most remote execution and caching options begin with the prefix `--remote`. + +For environment-specific options, see `pants help-advanced remote_environment` or the [`remote_environment` target](../../../reference/targets/remote_environment.mdx). diff --git a/versioned_docs/version-2.24/docs/using-pants/restricted-internet-access.mdx b/versioned_docs/version-2.24/docs/using-pants/restricted-internet-access.mdx new file mode 100644 index 000000000..27e08c250 --- /dev/null +++ b/versioned_docs/version-2.24/docs/using-pants/restricted-internet-access.mdx @@ -0,0 +1,110 @@ +--- + title: Restricted Internet access + sidebar_position: 12 +--- + +How to use Pants when you have restricted access to the Internet + +--- + +Some organizations place restrictions on their users' Internet access, for security or compliance reasons. Such restrictions may prevent Pants from downloading various underlying tools it uses, and it may interfere with bootstrapping Pants itself. + +In such cases, users are typically still able to access internal proxies and servers. This page shows how to configure Pants to work smoothly in these circumstances. + +## Installing Pants + +The `pants` launcher from [Installing Pants](../getting-started/installing-pants.mdx) uses GitHub Releases to download and install a PEX including Pants and all its dependencies. + +If you cannot access GitHub directly, you will need to follow the instructions for firewalls/restricted internet access for [the launcher itself](https://github.com/pantsbuild/scie-pants). + +## Setting up a Certificate Authority + +By default, Pants will respect and pass through the `SSL_CERT_DIR` and `SSL_CERT_FILE` environment variables. + +If you need to override those values, you can configure Pants to use a custom Certificate Authority (CA) bundle: + +```toml title="pants.toml" +[GLOBAL] +ca_certs_path = "/path/to/certs/file" +``` + +## Setting `HTTP_PROXY` and `HTTPS_PROXY` + +You may need to set standard proxy-related environment variables, such as `http_proxy`, `https_proxy` and `all_proxy`, in executed subprocesses: + +```toml title="pants.toml" +[subprocess-environment] +env_vars.add = ["http_proxy=http://myproxy", "https_proxy"] +``` + +You may need to use lowercase or uppercase env var names, or both. + +Note that if you leave of the env var's value, as for `https_proxy` above, Pants will use the value of the same variable in the environment in which it is invoked. + +## Customizing tool download locations + +There are three types of tools that Pants may need to download and invoke: + +- **Python tools**: these are resolved from a package repository (PyPI by default) via requirement strings such as `mypy==0.910`. +- **JVM tools**: these are resolved from a package repository (Maven Central by default) via coordinates such as `org.scalatest:scalatest_2.13:3.2.10`. +- **Standalone binaries**: these are downloaded from a configured URL and verified against a SHA256 hash. + +If you cannot access these resources from their default locations, you can customize those locations. + +You can get a list of the tools Pants uses, in all three categories, with `pants help tools`. + +### Python tools + +Pants downloads the various Python-related tools it uses from [PyPI](https://pypi.org/), just as it does for your Python code's dependencies. + +If you use Python but cannot access PyPI directly, then you probably have an internal mirror or a custom Python package repository. So all you have to do is configure Pants to access this custom repository, and ensure that the tools it needs are available there. + +See [Python third-party dependencies](../python/overview/third-party-dependencies.mdx#custom-repositories) for instructions on how to set up Pants to access a custom Python package repository. + +### JVM tools + +Pants downloads the various JVM-related tools it uses from [Maven Central](https://search.maven.org/), just as it does for your JVM code's dependencies. + +If you use JVM code but cannot access Maven Central directly, then you probably have an internal mirror or a custom JVM package repository. So all you have to do is configure Pants to access this custom repository, and ensure that the tools it needs are available there. + +To do so, set the [`repos`](../../reference/subsystems/coursier.mdx#repos) option on the `[coursier]` scope. E.g., + +```text title="pants.toml" +[coursier] +repos = ["https://my.custom.repo/maven2"] +``` + +### Binary tools + +Pants downloads various binary tools from preset locations, and verifies them against a SHA. If you are not able to allowlist these locations, you can host the binaries yourself and instruct Pants to use the custom locations. + +You set these custom locations by setting the `url_template` option for the tool. In this URL template, Pants will replace `{version}` with the requested version of the tool and `{platform}`, with the platform name (e.g., `linux.x86_64`). + +The platform name used to replace the `{platform}` placeholder can be modified using the `url_platform_mapping` option for the tool. This option maps a canonical platform name (`linux_arm64`, `linux_x86_64`, `macos_arm64`, `macos_x86_64`) to the name that should be substituted into the template. + +This is best understood by looking at an example: + +`pants help-advanced protoc` (or its [online equivalent](../../reference/subsystems/protoc.mdx#advanced-options)) shows that the default URL template is `https://github.com/protocolbuffers/protobuf/releases/download/v{version}/protoc-{version}-{platform}.zip`. + +- We see the `version` option is set to `3.11.4`. +- We are running on macOS ARM, so look up `macos_arm64` in the `url_platform_mapping` option and find the string `osx-x86_64`. + +Thus, the final URL is: +`https://github.com/protocolbuffers/protobuf/releases/download/v3.11.4/protoc-3.11.4-osx-x86_64.zip`. + +It should be clear from this example how to modify the URL template to point to your own hosted binaries: + +```python title="pants.toml" +[protoc] +url_template = "https://my.custom.host/bin/protoc/{version}/{platform}/protoc.zip" +``` + +For simplicity, we used the original value for `url_platform_mapping`, meaning that we set up our hosted URL to store the macOS x86 binary at `.../osx-x86_64/protoc.zip`, for example. You can override the option `url_platform_mapping` if you want to use different values. + +Occasionally, new Pants releases will upgrade to new versions of these binaries, which will be mentioned in the "User API Changes" part of the changelog [https://github.com/pantsbuild/pants/tree/main/docs/notes](https://github.com/pantsbuild/pants/tree/main/docs/notes). When upgrading to these new Pants releases, you should download the new artifact and upload a copy to your host. + +:::note Asking for help +It's possible that Pants does not yet have all the mechanisms it'll need to work with your organization's specific networking setup, which we'd love to fix. + +Please reach out on [Slack](/community/members) or open a [GitHub issue](https://github.com/pantsbuild/pants/issues) for any help. +::: diff --git a/versioned_docs/version-2.24/docs/using-pants/setting-up-an-ide.mdx b/versioned_docs/version-2.24/docs/using-pants/setting-up-an-ide.mdx new file mode 100644 index 000000000..02cf82ea5 --- /dev/null +++ b/versioned_docs/version-2.24/docs/using-pants/setting-up-an-ide.mdx @@ -0,0 +1,80 @@ +--- + title: Setting up an IDE + sidebar_position: 7 +--- + +--- + +If you use a code-aware editor or IDE, such as PyCharm or VSCode, you may want to set it up to understand your code layout and dependencies. This will allow it to perform code navigation, auto-completion and other features that rely on code comprehension. + +## First-party sources + +To get your editor to understand the repo's first-party sources, you will probably need to tell it about the repo's [source roots](./key-concepts/source-roots.mdx). You can list those with: + +```shell +$ pants roots +``` + +and then apply the corresponding IDE concept. + +For example, in PyCharm you would mark each source root as a "Sources" folder. See [Configuring Project Structure](https://www.jetbrains.com/help/pycharm/configuring-project-structure.html) to learn more. + +In VSCode, the Python extension will look for a file named `.env` in the current workspace folder. If the file is found, then it will be loaded and evaluated. For Python, this file can be used to set the `PYTHONPATH` variable. Having this file makes it possible to jump to definitions in the source code across multiple projects. It also makes cross-project refactoring possible. + +For Python, to generate the `.env` file containing all the source roots, you can use something like this: + +```shell +$ ROOTS=$(pants roots) +$ python3 -c "print('PYTHONPATH=./' + ':./'.join('''${ROOTS}'''.replace(' ', '\\ ').split('\n')) + ':\$PYTHONPATH')" > .env +``` + +See [Use of the PYTHONPATH variable](https://code.visualstudio.com/docs/python/environments#_use-of-the-pythonpath-variable) to learn more about using the `PYTHONPATH` variable in VSCode. + +## Python third-party dependencies and tools + +To get your editor to understand the repo's third-party Python dependencies, you will probably want to point it at a virtualenv containing those dependencies. + +Assuming you are using the ["resolves" feature for Python lockfiles](../python/overview/third-party-dependencies.mdx)—which we strongly recommend—Pants can export a virtualenv for each of your resolves. You can then point your IDE to whichever resolve you want to load at the time. + +To use the `export` goal to create a virtualenv: + +``` +❯ pants export --py-resolve-format=symlinked_immutable_virtualenv --resolve=python-default +Wrote symlink to immutable virtualenv for python-default (using Python 3.9.13) to dist/export/python/virtualenvs/python-default +``` + +You can specify the `--resolve` flag [multiple times](./key-concepts/options.mdx#list-values) to export multiple virtualenvs at once. + +The `--py-resolve-format=symlinked_immutable_virtualenv` option symlinks to an immutable, internal virtualenv that does not have `pip` installed in it. This method is faster, but you must be careful not to attempt to modify the virtualenv. If you omit this flag, Pants will create a standalone, mutable virtualenv that includes `pip`, and that you can modify, but this method is slower. + +### Tool virtualenvs + +`pants export` can also create a virtualenv for each of the Python tools you use via Pants, such as `black`, `isort`, `pytest`, `mypy`, `flake8` and so on. This allows you to configure your editor to use the same version of the tool as Pants does for workflows like formatting on save. To use a custom version of these tools, follow [the instructions for creating a tool lockfile](../python/overview/lockfiles#lockfiles-for-tools). + +### Binary tools + +`pants export` can export many tools fetched by Pants. For example, `pants export --bin=taplo`. + +## Generated code + +If you're using [Protobuf and gRPC](../python/integrations/protobuf-and-grpc.mdx), you may want your editor to be able to index and navigate the generated source code. + +Normally Pants treats generated code as an internal byproduct, and doesn't expose it. But you can run the `export-codegen` goal to generate code to a well-known output location for consumption: + +```shell +$ pants export-codegen :: +``` + +The generated code will be written to `dist/codegen`, and you can now add them as sources in the IDE. For example, in PyCharm you would mark `dist/codegen` as a "Sources" folder. + +Warning: you will have to manually rerun this goal when changes are made. + +## Remote debugging + +You can use PyCharm to debug code running under Pants. + +See the following links for instructions on how to do so under the [test goal](../python/goals/test.mdx) and under the [run goal](../python/goals/run.mdx). + +## IDE integrations + +We have not yet developed tight IDE integrations, such as a PyCharm plugin or a VSCode extension, that would allow the IDE to run Pants on your behalf. If you're interested in developing this functionality for your favorite IDE, [let us know](/community/members)! diff --git a/versioned_docs/version-2.24/docs/using-pants/troubleshooting-common-issues.mdx b/versioned_docs/version-2.24/docs/using-pants/troubleshooting-common-issues.mdx new file mode 100644 index 000000000..6f9406a37 --- /dev/null +++ b/versioned_docs/version-2.24/docs/using-pants/troubleshooting-common-issues.mdx @@ -0,0 +1,257 @@ +--- + title: Troubleshooting / common issues + sidebar_position: 2 +--- + +Frequently asked questions (FAQs) and known issues you may encounter. + +--- + +:::tip We love giving help! +See [Getting Help](/community/getting-help). We would love to help! + +If you are confused by something, likely someone else will run into the same issue. It is helpful for us to know what is going wrong so that we can improve Pants and improve this documentation. +::: + +## Debug tip: enable stack traces and increase logging + +Pants defaults to not displaying the full stack trace when it encounters an error. Pants also defaults to logging at the info level. + +When you encounter an exception, it can help to use the global options `--print-stacktrace` and `-ldebug`, like this: + +```bash +pants --print-stacktrace -ldebug +``` + +Setting the option `--pex-verbosity=9` can help debug exceptions that occur when building .pex files. + +Once you have this stack trace, we recommend copying it into Pastebin or a GitHub Gist, then opening a GitHub issue or posting on Slack. Someone from the Pants team would be happy to help. See [Getting Help](/community/getting-help). + +## Debug tip: inspect the sandbox with `--keep-sandboxes` + +Pants runs most processes in a hermetic sandbox (temporary directory), which allows for safely caching and running multiple processes in parallel. + +Use the option `--keep-sandboxes=always` for Pants to log the paths to these sandboxes, and to keep them around after the run. You can then inspect them to check if the files you are expecting are present. + +```bash +pants --keep-sandboxes=always lint src/project/app.py +... +21:26:13.55 [INFO] preserving local process execution dir `"/private/var/folders/hm/qjjq4w3n0fsb07kp5bxbn8rw0000gn/T/process-executionQgIOjb"` for "Run isort on 1 file." +... +``` + +You can also pass `--keep-sandboxes=on_failure`, to preserve only the sandboxes of failing processes. + +There is even a `__run.sh` script in the directory that will run the process using the same argv and environment variables that Pants would use. + +## Cache or pantsd invalidation issues + +If you are using the latest stable version of Pants and still experience a cache invalidation issue: we are sorry for the trouble. We have not yet added a comprehensive goal to "clear all caches", because we are very interested in coming up with coherent solutions to potential issues (see for more information). If you experience a cache issue, please absolutely [file a bug](https://github.com/pantsbuild/pants/issues/new) before proceeding to the following steps. + +To start with, first try using `--no-pantsd`. If `--no-pantsd` worked, you can stop pantsd so that it will restarted on the next invocation of pants. To do so, run `rm -r .pants.d/pids/`in the build root. + +If this resolves the issue, please report that on the ticket and attach the recent content of the `.pants.d/workdir/pantsd/pantsd.log` file. + +If restarting pantsd is not sufficient, you can also use `--no-local-cache` to ignore the persistent caches. If this resolves the issue, then it is possible that the contents of the cache (at `~/.cache/pants`) will be useful for debugging the ticket that you filed: please try to preserve the cache contents until it can be resolved. + +## Pants cannot find a file in your project + +Pants may complain that it cannot find a file or directory, even though the file does indeed exist. + +This error generally happens because of the option `pants_ignore` in the `[GLOBAL]` scope, but you should also check for case-mismatches in filenames ("3rdparty" vs "3rdParty"). By default, Pants will read your top-level `.gitignore` file to populate `pants_ignore`, along with ignoring `dist/` and any top-level files/directories starting with `.`. + +To override something included in your `.gitignore`, add a new value to `pants_ignore` and prefix it with `!`, like the below. `pants_ignore` uses the [same syntax as gitignore](https://git-scm.com/docs/gitignore). + +```toml title="pants.toml" +[GLOBAL] +pants_ignore.add = ["!folder/"] +``` + +Alternatively, you can stop populating `pants_ignore` from your `.gitignore` by setting `pants_ignore_use_gitignore = false` in the `[GLOBAL]` scope. + +## Pants cannot find a required binary + +Pants may be unable to locate a required binary to execute a specified goal. This can manifest in an error message such as: + +``` +BinaryNotFoundError: Cannot find `required_binary` on `['/usr/bin/', '/bin', '/usr/local/bin', '/opt/homebrew/bin']`. Please ensure that it is installed so that Pants can interact with the required_binary. +``` + +Helpfully, the error message already contains the list of directories that pants searches for binaries. This list is controlled by the [`system_binary_paths`](../../reference/subsystems/system-binaries#system_binary_paths) setting in the system-binaries subsystem. Ensure that the directory that contains your binary is on the list. + +If `system_binary_paths` contains the special string ``, note that the PATH variable can also be changed after the invocation of pants in [`.pants.bootstrap`](../..//docs/using-pants/key-concepts/options#pantsbootstrap-file). + +Pantsd caches the location of binaries and also negative hits. If you recently installed the binary or changed its location, make sure to stop pantsd so that it will be restarted on the next invocation of pants. To do so, run `rm -r .pants.d/pids/` in the build root. + +## Import errors and missing dependencies + +Because Pants runs processes in hermetic sandboxes (temporary directories), Pants must properly know about your [dependencies](../introduction/how-does-pants-work#dependency-inference) to avoid import errors. + +Usually, you do not need to tell Pants about your dependencies thanks to dependency inference, but sometimes dependency inference is not set up properly or cannot work. + +To see what dependencies Pants knows about, run `pants dependencies path/to/file.ext` and `pants dependencies --transitive`. + +Is the missing import from a third-party dependency? Common issues: + +- Pants does know about your third-party requirements, e.g. missing `python_requirements` and `go_mod` target generators. + - To see all third-party requirement targets Pants knows, run `pants --filter-target-type=$tgt list ::`, where Python: `python_requirement`, Go: `go_third_party_package`, and JVM: `jvm_artifact`. + - Run `pants tailor ::`, or manually add the relevant targets. +- The dependency is missing from your third-party requirements list, e.g. `go.mod` or `requirements.txt`. +- The dependency exposes a module different than the default Pants uses, e.g. Python's `ansicolors` exposing `colors`. + - [Python](../python/overview/third-party-dependencies.mdx): set the `modules` field and `module_mapping` fields. + - [JVM](../../reference/targets/jvm_artifact.mdx): set the `packages` field on `jvm_artifact` targets. +- Python: check for any [undeclared transitive dependencies](../python/overview/third-party-dependencies.mdx#advanced-usage). + +Is the missing import from first-party code? Common issues: + +- The file does not exist. + - Or, it's ignored by Pants. See the above guide "Pants cannot find a file in your project". +- The file is missing an owning target like `python_sources`, `go_package`, or `resources`. + - Run `pants list path/to/file.ext` to see all owning targets. + - Try running `pants tailor ::`. Warning: some target types like [`resources` and `files`](./assets-and-archives.mdx) must be manually added. +- [Source roots](./key-concepts/source-roots.mdx) are not set up properly (Python and JVM only). + - This allows converting file paths like `src/py/project/app.py` to the Python module `project.app`. +- Code generation such as [Protobuf](../python/integrations/protobuf-and-grpc.mdx) is not set up properly (Python and JVM only). + - Generate missing targets so that produced modules could be found. If there are any Python files that are known to be created ad hoc only at runtime, you might consider using `.pyi` stub files for the modules to be discovered during dependency inference. + +Common issues with both first and third-party imports: + +- Ambiguity. >1 target exposes the same module/package. + - If it's a third-party dependency, you should likely use multiple "resolves" (lockfiles). Each resolve should have no more than one of the same requirement. See [Python](../python/overview/lockfiles.mdx#multiple-lockfiles) and [JVM](../jvm/java-and-scala.mdx). + - If it's a first-party dependency, you may have unintentionally created multiple targets owning the same file. Run `pants list path/to/file.ext` to see all owners. This often happens from overlapping `sources` fields. If this was intentional, follow the instructions in the ambiguity warning to disambiguate via the `dependencies` field. +- Some target types like `resources` and `files` often need to be explicitly added to the `dependencies` field and cannot be inferred (yet). +- Multiple resolves (Python and JVM). + - A target can only depend on targets that share the same "resolve" (lockfile). + - Pants will warn when it detects that the import exists in another resolve. This usually implies you should either change the current target's `resolve` field, or use the `parametrize()` mechanism so that the code works with multiple resolves. + - See [Python](../python/overview/lockfiles.mdx#multiple-lockfiles) and [JVM](../jvm/java-and-scala.mdx). + +When debugging dependency inference, it can help to explicitly add the problematic dependency to the `dependencies` field to see if it gets the code running. If so, you can then try to figure out why dependency inference is not working. + +## "Out of space" error: set an alternative tmpdir + +It may be necessary to explicitly set the directory Pants uses as a temporary directory. For example, if the system default temporary directory is a small partition, you may exhaust that temp space. + +Use the global option `local_execution_root_dir` to change the tmpdir used by Pants. + +```toml title="pants.toml" +[GLOBAL] +local_execution_root_dir = "/mnt/large-partition/tmpdir" +``` + +## "No space left on device" error while watching files + +On Linux, Pants uses `inotify` to watch all files and directories related to any particular build. Some systems have limits configured for the maximum number of files watched. To adjust the limit on file watches, you can run: + +```shell +echo fs.inotify.max_user_watches=524288 | sudo tee -a /etc/sysctl.conf && sudo sysctl -p +``` + +## How to change your cache directory + +You may change any of these options in the `[GLOBAL]` section of your `pants.toml`: + +| Option | What it does | Default | +| :----------------- | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :------------------------------ | +| `local_store_dir` | Stores the results of running subprocesses and of some file operations. | `~/.cache/pants/lmdb_store` | +| `named_caches_dir` | Stores the caches for certain tools used by Pants, like PEX's cache for resolving Python requirements. | `~/.cache/pants/named_caches` | +| `pants_workdir` | Stores some project-specific logs; used as a temporary directory when running `pants repl` and `pants run`.

This is not used for caching.

This must be relative to the build root. | `/.pants.d/workdir` | +| `pants_distdir` | Where Pants writes artifacts to, such as the result of `pants package`.

This is not used for caching; you can delete this folder and still leverage the cache from `local_store_dir`.

This must be relative to the build root. | `/dist/` | + +For `local_store_dir` and `named_caches_dir`, you may either specify an absolute path or a relative path, which will be relative to the build root. You may use the special string `%(homedir)s` to get the value of `~`, e.g. `local_store_dir = "%(homedir)s/.custom_cache/pants/lmdb_store"`. + +It is safe to delete these folders to free up space. + +You can also change the cache used by the `pants` script described in [Installing Pants](../getting-started/installing-pants.mdx), which defaults to `~/.pants/cache/setup`. Either set the environment variable `PANTS_SETUP_CACHE` or change the Bash script directly where it defines `PANTS_SETUP_CACHE`. You may use an absolute path or a path relative to the build root. + +## BadZipFile error when processing Python wheels + +This can happen if your temporary directory (`/tmp/` by default) is not on the same filesystem as `~/.cache/pants/named_caches`, and is caused by the fact that `pip` is not concurrency-safe when moving files across filesystems. + +The solution is to move `~/.cache/pants`, or at least the `named_caches_dir`(see [above](#how-to-change-your-cache-directory)), to the same filesystem as the temporary directory, or vice versa. + +## Issues packaging AWS CDK into a PEX + +If you get errors like `ModuleNotFoundError: No module named 'aws_cdk.asset_awscli_v1`, set `execution_mode="venv"` and `venv_site_packages_copies=True` on your `pex_binary` target. + +This ensures that the `aws_cdk` subpackages are properly nested under the parent package, despite those distributions not being configured as [namespace packages](https://packaging.python.org/en/latest/guides/packaging-namespace-packages/). + +## "Double requirement given" error when resolving Python requirements + +This is an error from `pip`, and it means that the same 3rd-party Python requirement—with different version constraints—appears in your dependencies. + +You can use `pants peek` to help identify why the same requirement is being used more than once: + +```shell title="Shell" +# Check the `requirements` key to see if it has the problematic requirement. +pants --filter-target-type=python_requirement peek :: +``` + +## macOS users: issues with system Python interpreters + +The macOS system Python interpreters are broken in several ways, such as sometimes resulting in: + +``` +ERROR: Could not install packages due to an EnvironmentError: [Errno 13] Permission denied: '/Library/Python/3.7' +``` + +You can set the option `interpreter_search_paths` in the `[python]` scope to teach Pants to ignore the interpreters in `/usr/bin`. See [here](../python/overview/interpreter-compatibility.mdx#changing-the-interpreter-search-path) for more information. + +## "Too many open files" error + +You may encounter this error when running Pants: + +``` +pants count-loc helloworld/greet/f.py + +ERROR: Could not initialize store for process cache: "Error making env for store at \"/Users/pantsbuild/.cache/pants/lmdb_store/processes/2\": Too many open files" + +(Use --print-exception-stacktrace to see more error details.) +``` + +This sometimes happens because Pants uses lots of file handles to read and write to its cache at `~/.cache/pants/lmdb_store`; often, this is more than your system's default. + +This can be fixed by setting `ulimit -n 10000`. (10,000 should work in all cases, but feel free to lower or increase this number as desired.) + +:::note Tip: permanently configuring `ulimit -n` +We recommend permanently setting this by either: + +1. Adding `ulimit -n 10000` to your [`.pants.bootstrap`](../using-pants/key-concepts/options.mdx#pantsbootstrap-file) script. +2. Adding `ulimit -n 10000` to your global `.bashrc` or equivalent. + +The first two approaches have the benefit that they will be checked into version control, so every developer at your organization can use the same setting. +::: + +:::caution macOS users: avoid `ulimit unlimited` +Contrary to the name, this will not fix the issue. You must use `ulimit -n` instead. +::: + +## Controlling (test) parallelism + +When adopting Pants for your tests you may find that they have issues with being run in parallel, particularly if they are integration tests and use a shared resource such as a database. + +To temporarily run a single test at a time (albeit with reduced performance), you can reduce the parallelism globally: + +``` +pants --process-execution-local-parallelism=1 test :: +``` + +A more sustainable solution for shared resources is to use the [`[pytest].execution_slot_var`](../../reference/subsystems/pytest.mdx#execution_slot_var) option, which sets an environment variable which test runs can consume to determine which copy of a resource to consume. + +## Snap-based Docker + +In recent Ubuntu distributions, the Docker service is often installed using [Snap](https://snapcraft.io/docker). +It works mostly same as a normal installation, but has an important difference: it cannot access the `/tmp` directory of the host because it is virtualized when Snap starts the Docker service. + +This may cause problems if your code or tests ry to create a container with a bind-mount of a directory or file _under the current working directory_. Container creation will fail with "invalid mount config for type "bind": bind source path does not exist", because Pants' default `local_execution_root_dir` option is `/tmp`, which the Snap-based Docker service cannot access. + +You can work around this issue by explicitly setting `[GLOBAL].local_execution_root_dir` to a directory outside the system `/tmp` directory, such as `"%(buildroot)s/tmp"`. + +## Using pants on self-hosted GitHub actions runner + +Setting up pants to run with Python executables provided by [setup-python](https://github.com/marketplace/actions/setup-python) will not work on vanilla actions runner setup. This is due to the [known limitation](https://github.com/pantsbuild/pants/issues/16565) of pants which does not allow leaking arbitrary environment variable (in this case `LD_LIBRARY_PATH` for us) when evaluating dependency inference rules. If you fall into this situation, you will face an error complaining about missing shared object files, like this: + +``` +/home/ubuntu/.cache/python-tools/Python/3.11.3/x64/bin/python3.11: error while loading shared libraries: libpython3.11.so.1.0: cannot open shared object file: No such file or directory +``` + +One of the workaround to fix this issue is setting up python tool cache files at `/opt/hostedtoolcache` directory. This is the default path which `setup-python` action uses to download relevant files on hosted GitHub actions runner. Overriding tool cache download directory can be achieved by following [setup-python documentation](https://github.com/actions/setup-python/blob/main/docs/advanced-usage.md#linux). diff --git a/versioned_docs/version-2.24/docs/using-pants/using-pants-in-ci.mdx b/versioned_docs/version-2.24/docs/using-pants/using-pants-in-ci.mdx new file mode 100644 index 000000000..65545d283 --- /dev/null +++ b/versioned_docs/version-2.24/docs/using-pants/using-pants-in-ci.mdx @@ -0,0 +1,249 @@ +--- + title: Using Pants in CI + sidebar_position: 6 +--- + +Suggestions for how to use Pants to speed up your CI (continuous integration). + +--- + +:::note Examples +See the example-python repository for an [example GitHub Actions worfklow](https://github.com/pantsbuild/example-python/blob/main/.github/workflows/pants.yaml). +::: + +## Directories to cache + +:::note The `init-pants` GitHub Action +If you're using GitHub Actions to run your CI workflows, then you can use our [standard action](https://github.com/pantsbuild/actions/tree/main/init-pants) to set up and cache the Pants bootstrap state. Otherwise, read on to learn how to configure this manually. +::: + +In your CI's config file, we recommend caching these directories: + +- `$HOME/.cache/nce` (Linux) or `$HOME/Library/Caches/nce` (macOS)
+ This is the cache directory used by the [Pants launcher binary](../getting-started/installing-pants.mdx) to cache the assets, interpreters and venvs required to run Pants itself. Cache this against the Pants version, as specified in `pants.toml`. See the [pantsbuild/example-python](https://github.com/pantsbuild/example-python/blob/main/.github/workflows/pants.yaml) repo for an example of how to generate an effective cache key for this directory in GitHub Actions. +- `$HOME/.cache/pants/named_caches`
+ Caches used by some underlying tools. Cache this against the inputs to those tools. For the `pants.backend.python` backend, named caches are used by PEX, and therefore its inputs are your lockfiles. Again, see [pantsbuild/example-python](https://github.com/pantsbuild/example-python/blob/main/.github/workflows/pants.yaml) for an example. + +If you're not using a fine-grained [remote caching](./remote-caching-and-execution/index.mdx) service, then you may also want to preserve the local Pants cache at `$HOME/.cache/pants/lmdb_store`. This has to be invalidated on any file that can affect any process, e.g., `hashFiles('**/*')` on GitHub Actions. + +Computing such a coarse hash, and saving and restoring large directories, can be unwieldy. So this may be impractical and slow on medium and large repos. + +A [remote cache service](./remote-caching-and-execution/index.mdx) integrates with Pants's fine-grained invalidation and avoids these problems, and is recommended for the best CI performance. + +See [Troubleshooting](./troubleshooting-common-issues.mdx#how-to-change-your-cache-directory) for how to change these cache locations. + +:::note Nuking the cache when too big +In CI, the cache must be uploaded and downloaded every run. This takes time, so there is a tradeoff where too large a cache will slow down your CI. + +You can use this script to nuke the cache when it gets too big: + +```bash + function nuke_if_too_big() { + path=$1 + limit_mb=$2 + size_mb=$(du -m -d0 "${path}" | cut -f 1) + if (( size_mb > limit_mb )); then + echo "${path} is too large (${size_mb}mb), nuking it." + nuke_prefix="$(dirname "${path}")/$(basename "${path}").nuke" + nuke_path=$(mktemp -d "${nuke_prefix}.XXXXXX") + mv "${path}" "${nuke_path}/" + rm -rf "${nuke_prefix}.*" + fi + } + +nuke_if_too_big ~/.cache/nce 512 +nuke_if_too_big ~/.cache/pants/named_caches 1024 +``` + +::: + +:::note Tip: check cache performance with `[stats].log` +Set the option `[stats].log = true` in `pants.ci.toml` for Pants to print metrics of your cache's performance at the end of the run, including the number of cache hits and the total time saved thanks to caching, e.g.: + +``` + local_cache_requests: 204 + local_cache_requests_cached: 182 + local_cache_requests_uncached: 22 + local_cache_total_time_saved_ms: 307200 +``` + +You can also add `plugins = ["hdrhistogram"]` to the `[GLOBAL]` section of `pants.ci.toml` for Pants to print histograms of cache performance, e.g. the size of blobs cached. +::: + +:::tip Remote caching +Rather than storing your cache with your CI provider, remote caching stores the cache in the cloud, using gRPC and the open-source Remote Execution API for low-latency and fine-grained caching. + +This brings several benefits over local caching: + +- All machines and CI jobs share the same cache. +- Remote caching downloads precisely what is needed by your run—when it's needed—rather than pessimistically downloading the entire cache at the start of the run. + - No download and upload stage for your cache. + - No need to "nuke" your cache when it gets too big. + +See [Remote Caching and Execution](./remote-caching-and-execution/index.mdx) for more information. +::: + +## Recommended commands + +:::caution Autofixing goals +The goals `fmt` and `fix` will attempt to automatically correct your code and then return zero if they were able to do so. This generally counts as "success" for most CI systems. In contrast the `lint` goal will not modify code and instead exit with a non-zero status if any tools detected a problem. In other words the `lint` goal is like the "checking" version of `fmt/fix`. Prefer `lint` if you want your CI system to return job failures to enforce linting and format rules. +::: + +With both approaches, you may want to shard the input targets into multiple CI jobs, for increased parallelism. See [Advanced Target Selection](./advanced-target-selection.mdx#sharding-the-input-targets). (This is typically less necessary when using [remote caching](./remote-caching-and-execution/index.mdx).) + +### Approach #1: only run over changed files + +Because Pants understands the dependencies of your code, you can use Pants to speed up your CI by only running tests and linters over files that actually made changes. + +We recommend running these commands in CI: + +```shell +❯ pants --version # Bootstrap Pants. +❯ pants \ + --changed-since=origin/main \ + tailor --check \ + update-build-files --check \ + lint +❯ pants \ + --changed-since=origin/main \ + --changed-dependents=transitive \ + check test +``` + +Because most linters do not care about a target's dependencies, we lint all changed files and targets, but not any dependents of those changes. + +Meanwhile, tests should be rerun when any changes are made to the tests _or_ to dependencies of those tests, so we use the option `--changed-dependents=transitive`. `check` should also run on any transitive changes. + +See [Advanced target selection](./advanced-target-selection.mdx) for more information on `--changed-since` and alternative techniques to select targets to run in CI. + +:::caution This will not handle all cases, like hooking up a new linter +For example, if you add a new plugin to Flake8, Pants will still only run over changed files, meaning you may miss some new lint issues. + +For absolute correctness, you may want to use Approach #2. Alternatively, add conditional logic to your CI, e.g. that any changes to `pants.toml` trigger using Approach #2. +::: + +:::note GitHub Actions: use `Checkout` +To use `--changed-since`, you may want to use the [Checkout action](https://github.com/actions/checkout). + +By default, Checkout will only fetch the latest commit; you likely want to set `fetch-depth` to fetch prior commits. +::: + +:::note GitLab CI: disable shallow clones or fetch main branch +GitLab's merge pipelines make a shallow clone by default, which only contains recent commits for the feature branch being merged. That severely limits `--changed-since`. There are two possible workarounds: + +1. Clone the entire repository by going to "CI / CD" settings and erase the number from the "Git shallow clone" field of the "General pipelines" section. Don't forget to "Save changes". This has the advantage of cloning everything, which also is the biggest long-term disadvantage. +2. A more targeted and hence light-weight intervention leaves the shallow clone setting at its default value and instead fetches the `main` branch as well: + +``` +git branch -a +git remote set-branches origin main +git fetch --depth 1 origin main +git branch -a +``` + +The `git branch` commands are only included to print out all available branches before and after fetching `origin/main`. + +::: + +:::note Using partial clones in CI +Shallow clones are fast, but have the disadvantage of breaking `--changed-since` if an insufficient amount of depth is fetched from remote. This is particularly acute for feature branches that are very out-of-date or have a large number of commits. + +[Partial clones](https://git-scm.com/docs/partial-clone) are still quite fast, have the advantage of not breaking `--changed-since`, and don't require any depth setting. Unlike shallow clones, Git will fetch trees and blobs on-demand as it needs them without failing. + +If your CI does not support partial clones directly, you can define your own custom checkout strategy: + +- Treeless: `git clone --filter=tree:0 ` +- Blobless: `git clone --filter=blob:none ` + +As a workaround to [#20027](https://github.com/pantsbuild/pants/issues/20027) permission errors, you might need to run this after the cloning the repo: + +`git config core.sshCommand "env SSH_AUTH_SOCK=$SSH_AUTH_SOCK ssh"` +::: + +### Approach #2: run over everything + +Alternatively, you can simply run over all your code. Pants's caching means that you will not need to rerun on changed files. + +```bash +❯ pants --version # Bootstrap Pants. +❯ pants \ + tailor --check \ + update-build-files --check \ + lint check test :: +``` + +However, when the cache gets too big, it should be nuked (see "Directories to cache"), so your CI may end up doing more work than Approach #1. + +This approach works particularly well if you are using [remote caching](./remote-caching-and-execution/remote-caching.mdx). + +## Configuring Pants for CI: `pants.ci.toml` (optional) + +Sometimes, you may want config specific to your CI, such as turning on test coverage reports. If you want CI-specific config, create a dedicated `pants.ci.toml` [config file](./key-concepts/options.mdx). For example: + +```toml title="pants.ci.toml" +[GLOBAL] +# Colors often work in CI, but the shell is usually not a TTY so Pants +# doesn't attempt to use them by default. +colors = true + +[stats] +log = true + +[test] +use_coverage = true + +[coverage-py] +report = ["xml"] +global_report = true + +[pytest] +args = ["-vv", "--no-header"] +``` + +Then, in your CI script or config, set the environment variable `PANTS_CONFIG_FILES=pants.ci.toml` to use this new config file, in addition to `pants.toml`. + +### Tuning resource consumption (advanced) + +Pants allows you to control its resource consumption. These options all have sensible defaults. In most cases, there is no need to change them. However, you may benefit from tuning these options. + +Concurrency options: + +- [`process_execution_local_parallelism`](../../reference/global-options.mdx#process_execution_local_parallelism): number of concurrent processes that may be executed locally. +- [`rule_threads_core`](../../reference/global-options.mdx#rule_threads_core): number of threads to keep active to execute `@rule` logic. +- [`rule_threads_max`](../../reference/global-options.mdx#rule_threads_max): maximum number of threads to use to execute `@rule` logic. + +Memory usage options: + +- [`pantsd`](../../reference/global-options.mdx#pantsd): enable or disable the Pants daemon, which uses an in-memory cache to speed up subsequent runs after the first run in CI. +- [`pantsd_max_memory_usage`](../../reference/global-options.mdx#pantsd_max_memory_usage): reduce or increase the size of Pantsd's in-memory cache. + +The default test runners for these CI providers have the following resources. If you are using a custom runner, e.g. enterprise, check with your CI provider. + +| CI Provider | Cores | RAM | Docs | +| :--------------------------- | :---- | :------ | :------------------------------------------------------------------------------------------------------------------------------------------ | +| GitHub Actions, Linux | 2 | 7 GB | [link](https://docs.github.com/en/actions/using-github-hosted-runners/about-github-hosted-runners#supported-runners-and-hardware-resources) | +| Travis, Linux | 2 | 7.5 GB | [link](https://docs.travis-ci.com/user/reference/overview/#virtualisation-environment-vs-operating-system) | +| Circle CI, Linux, free plan | 2 | 4 GB | [link](https://circleci.com/docs/2.0/credits/#free-plan) | +| GitLab, Linux shared runners | 1 | 3.75 GB | [link](https://docs.gitlab.com/ee/user/gitlab_com/#linux-shared-runners) | + +## Tip: automatically retry failed tests + +Pants can automatically retry failed tests. This can help keep your builds passing even with flaky tests, like integration tests. + +```toml +[test] +attempts_default = 3 +``` + +## Tip: store Pants logs as artifacts + +We recommend that you configure your CI system to store the pants log (`.pants.d/workdir/pants.log`) as a build artifact, so that it is available in case you need to troubleshoot CI issues. + +Different CI providers and systems have different ways to configure build artifacts: + +- Circle CI - [Storing artifacts](https://circleci.com/docs/2.0/artifacts/) +- GitHub Actions - [Storing Artifacts](https://docs.github.com/en/actions/guides/storing-workflow-data-as-artifacts) - [example in the pants repo](https://github.com/pantsbuild/pants/pull/11860) +- Bitbucket pipelines - [Using artifacts](https://support.atlassian.com/bitbucket-cloud/docs/use-artifacts-in-steps/) +- Jenkins - [Recording artifacts](https://www.jenkins.io/doc/pipeline/tour/tests-and-artifacts/) + +It's particularly useful to configure your CI to always upload the log, even if prior steps in your pipeline failed. diff --git a/versioned_docs/version-2.24/docs/using-pants/validating-dependencies.mdx b/versioned_docs/version-2.24/docs/using-pants/validating-dependencies.mdx new file mode 100644 index 000000000..f416ef5bd --- /dev/null +++ b/versioned_docs/version-2.24/docs/using-pants/validating-dependencies.mdx @@ -0,0 +1,423 @@ +--- + title: Validating dependencies + sidebar_position: 13 +--- + +Validating your code's dependencies. + +--- + +Visibility rules are the mechanism by which to control who may depend on whom. It is an implementation of Pants's dependency rules API. With these rules a dependency between two files (or targets) may be set for entire directory trees down to single files. A target may be selected not only by its file path but also by target type, name, and tags. + +NB: Visibility rules are applied on a low level, so they can inspect metadata provided directly on a target and via `__defaults__`, but anything applied via `overrides` is opaque to the visibility rules. For example, a rule that selects tagged targets can find tags provided directly on the target using the `tags` field (like `target(tags=[...])`) or via `__defaults__` (like `__defaults__({target:dict(tags=[...])})`) but any tags in `overrides` (like `target(overrides={"foo": dict(tags=[...])})`) cannot be inspected by the visibility rules. + +To jump right in, start with [enabling the backend](./validating-dependencies.mdx#enable-visibility-backend) and add some rules to your BUILD files. + +## Example visibility rules + +This example gives a quick introduction to what it looks like, and in the rest of this chapter we will be breaking down and looking at each part of the example going over what it does and how it works. + +```python +# example/BUILD + +__dependencies_rules__( + ( + {"type": python_sources}, + "src/a/**", + "src/b/lib.py", + "!*", + ), + + ("*", "*"), +) + +__dependents_rules__( + ( + ( + {"type": "python_*", "tags":["any-python"]}, + {"type": "*", "tags":["libs"]}, + {"path": "special-cased.py"} + ), + ( + ( + "!tests/**", + "!src/*/*/**", + ), + ( + "*", + ), + ), + ), + + ("*", "*"), +) +``` + +First things first, it is a good idea to get familiar with the terminology used here for [dependencies and dependents](./validating-dependencies.mdx#dependencies-and-dependents). The syntax for `__dependencies_rules__` and `__dependents_rules__` is the same for both directives. They accept any number of input [Rule sets](./validating-dependencies.mdx#rule-sets). In the above example there are two _rule sets_ in each directive both have the generic "all everything" rule set of `("*", "*")`. + +The dependencies rules above reads: All `python_sources` targets may depend on everything from the subtree rooted in `src/a/` the source file `src/b/lib.py` and nothing else. For all other targets everything is allowed. + +The dependents rules above reads: Any of all targets beginning with `python_` that has the tag `any-python`, OR any target that has the tag `libs`, OR the source file `special-cased.py`, may _not_ be used by anything from the subtrees rooted in `tests/` nor `src/*/*/` anything else is allowed. For everything else anything is allowed. + +Note that when there are both dependencies rules _and_ dependents rules in play for a dependency, both have to allow the dependency for it to be valid. + +## Enable visibility backend + +To use the visibility feature, enable the `pants.backend.experimental.visibility` backend by adding it to the list of `backend_packages` in the `[GLOBAL]` section of your `pants.toml` file. + +```toml +[GLOBAL] +backend_packages.add = [ + ... + "pants.backend.experimental.visibility", +] + +``` + +:::note The visibility implementation is marked "experimental" +This does not mean you should not use it, only that it is in "preview" mode meaning that things may change between releases without following our deprecation policy as we work on stabilizing this feature. See [the stabilization ticket](https://github.com/pantsbuild/pants/issues/17634) for what remains to be done for the visibility backend to graduate out of preview. +::: + +## Dependencies and dependents + +The visibility rules operates on the dependency link between two targets. Dependencies are directional, so if target `A` depends on another target `B` the dependency goes from the "origin" target `A` -> `B`, we say that `B` is the **dependency** of `A`, while `A` is the **dependent** of `B`. + +:::note The Direction of Dependency, `A` -> `B`. +Target `A` may have zero or more dependencies. For each of those dependencies `A` is their dependent. + +Target `B` may be the dependency of zero or more dependents. For each of those dependents `B` is their dependency. +::: + +Dependency rules are configured in the BUILD files along with targets and any other BUILD file configuration. Rules may be provided on either end of a dependency link between two targets. There are two different keywords to use, one for each side of this link. As discussed above, any target may have both dependencies and dependents and these keywords map onto that: + +- `__dependencies_rules__` declares the rules that applies to a targets dependencies. +- `__dependents_rules__` declares the rules that applies to a targets dependents. + + `A` `__dependencies_rules__` -> `__dependents_rules__` `B` `__dependencies_rules__` -> ... + +It may help to think about these terms in context of a sentence. For `__dependencies_rules__` it is "this X may only import from \<...\>", and for `__dependents_rules__` it is "this X may only be imported from \<...\>". + +For each dependency there may be up to 2 sets of rules consulted to determine if it should be `allowed` or `denied` (or just `warn`, see [Rule Actions](#rule-actions)), one for each end of the dependency link. The rules themselves are merely [path globs](#glob-syntax) applied in order until a matching rule is found. It is an error for there to not be any matching rule, if any rules are defined. That is, you may have a dependency without any rules and that will be allowed, but as soon as there are rules in play there must exist at least one that is a match for the dependency link that dictates the outcome. It is valid to declare either dependency rules or dependent rules you don't have to have both when using visibility rules. + +:::caution There are no default rules +When you setup a set of rules, it must be comprehensive or Pants will throw an error when it fails to find a matching rule for a dependency/dependent. + +Without explicit rules defined, Pants allows all dependencies. This allows you to incrementally start to introduce rules. + +**Warning: Rule sets propagate to their subtrees, unless you override them with new rule sets in a corresponding BUILD file.** +::: + +Lets look at another dependency example, where we have the following BUILD files for the two source files `src/a/main.py` and `src/b/lib.py`: + +```python +# src/a/BUILD +python_sources(dependencies=["src/b/lib.py"], tags=["apps"]) + +# src/b/BUILD +python_sources(tags=["libs"]) +``` + +The dependency `src/a/main.py` -> `src/b/lib.py` would consult the `__dependencies_rules__` in `src/a/BUILD` for a rule that matches `src/b/lib.py` and the `__dependents_rules__` in `src/b/BUILD` for a rule that matches `src/a/main.py`. See [rule sets](./key-concepts/targets-and-build-files.mdx#rule-sets) for more details on how this works. + +When declaring your rules, you not only provide the rules for the current directory, but also set the default rules for all the subdirectories as well. When overriding such default rules in a child BUILD file, there is a `extend=True` kwarg you may use if you want the default rules to still apply after the ones declared in the current BUILD file. + +```python +# src/BUILD +# given some parent rules: +__dependencies_rules__( + , + , +) + +# src/subdir/BUILD + +# The following rules: +__dependencies_rules__( + , + , + extend=True, +) + +# are equivalent to: +__dependencies_rules__( + , + , + , + , +) + +# Due to the `extend=True` flag, which inherits the parent rules after those just declared. +``` + +:::note Any rule globs using the declaration path anchoring mode that is inherited using `extend=True` will be anchored to the path of the current BUILD file, not the original one where the rule was extended from. +See [glob syntax](./key-concepts/targets-and-build-files.mdx#glob-syntax) for details on anchoring modes. +::: + +For example: + +```python +# src/BUILD +__dependencies_rules__( + (files, + "/relative/to/BUILD/file/**", + "!*", + ), +) + +# src/subdir/BUILD +__dependencies_rules__( + (resources, + "/relative/to/BUILD/file/**", + "!*", + ), + extend=True, +) +``` + +The above rules when applied to `resources` _as well as_ `files` targets in `src/subdir` will allow dependencies only to other targets in the subtree of `src/subdir/relative/to/BUILD/file/` despite the inherited rule declared in `src/BUILD`. For `files` targets in other directories in the `src/` subtree (e.g. `src/another/dir`) dependencies will be allowed only to other targets in the subtree of `src/relative/to/BUILD/file/`. + +Keep in mind that visibility rules only operate on direct dependencies - they do not validate dependencies transitively. This is because it would otherwise make it impossible to use private modules. For instance, imagine you have an application stored in `src.app.main`. It needs to access the public modules in the shared library `src.library`. The methods in the public module `src.library.public` make calls to the private modules in that shared library, which means that `src.library.public` depends on `src.library._private`. So when we declare that `src.app.main` may not depend on `src.library._private`, we only forbid the application accessing the private modules directly, since it still needs to access the functionality they provide transitively (but only via the public interface). + +If your codebase has a very complex dependency graph, you may need to make sure a given module never reaches some other modules (transitively). For instance, you may need to declare that modules in component `C1` may depend on any module in component `C2` as long as these modules (in `C2`) do not depend (transitively) on any module in component `C3`. This could be necessary if components are deployed separately, for instance, you may package a deployable artifact with components `C1` (full) and `C2` (partial) and another one with components `C2` (partial) and `C3` (full). + +In this scenario, you may need to look into alternative solutions to codify these constraints. For example, to check for violations, you could query the dependencies of component `C1` (transitively) using the `dependencies` goal with the `--transitive` option to confirm none of the modules from component `C3` are listed. If there are some, you may find the `paths` goal helpful as it would show you exactly why a certain module from component `C1` depends on a given module in component `C3` if it is unclear. + +### Rule sets + +As there may be many targets and files with dependencies, odds are that they won't all share the same set of rules. The rules keywords accepts multiple sets of rules, or "rule sets", along with "selector rules" that is used to select which set to use for each target. + +The overall structure is (example with 2 rule sets): + +```python +# BUILD + +__dependencies_rules__( + + # Rule set 1 + (, , , ...), + + # Rule set 2 + (, , ...), + + ... +) +``` + +The selector and rule share a common syntax (refered to as a **target rule spec**), that is a dictionary value with properties describing what targets it applies to. Together, this pair of selector(s) and rules is called a **rule set**. A rule set may have multiple selectors wrapped in a list/tuple and the rules may be spread out or grouped in any fashion. Grouping rules like this makes it easier to re-use/insert rules from macros or similar. + +:::note An empty selector (`{}` or `""`) will never match anything and is as such pointless and is ignored. +For every dependency link, only a single set of rules will ever be applied. The first rule set +with a matching selector will be the only one used and any remaining rule sets are never +consulted. +::: + +The **target rule spec** has four (five for rules) properties: `type`, `name`, `tags`, `path`, and `action` (only rules consult `action`). The `action` is one of `allow` (default if not specified), `warn`, or `deny`. From the above example, when determining which rule set to apply for the dependencies of `src/a/main.py` Pants will look for the first selector for `src/a/BUILD` that satisifies the properties `type=python_sources`, `tags=["apps"]`, and `path=src/a/main.py`. The selection is based on exclusion so only when there is a property value and it doesn't match the target's property it will move on to the next selector; the lack of a property will be considered to match anything. Consequently an empty target spec would match all targets, but this is disallowed and will raise an error if used because it is conceptually not very clear when reading the rules. + +The values of a **target rule spec** supports wildcard patterns (or globs) in order to have a single selector match multiple different targets, as described in [glob syntax](./key-concepts/targets-and-build-files.mdx#glob-syntax). When listing multiple values for the `tags` property, the target must have all of them in order to match. Spread the tags over multiple selectors in order to switch from _AND_ to _OR_ as required. The target `type` to match against will be that of the type used in the BUILD file, as the path (and target address) may refer to a generated target it is the target generators type that will be used during the selector matching process. + +The selectors are matched against the target in the order they are defined in the BUILD file, and the first rule set with a selector that is a match will be selected. The rules from the selected rule set is then matched in order against the path of the **target on the other end** of the dependency link. This is worth reading again; Using the above example, the rules defined in `src/a/BUILD` will be matched against `src/b/lib.py` while the `path` selector will be matched against `src/a/main.py`. + +Providing some example rule sets for the above example (see [rule actions](./key-concepts/targets-and-build-files.mdx#rule-actions) on how to mark a rule as "allow" or "deny"): + +```python +# src/a/BUILD (continued from previous example) +__dependencies_rules__( + ( + {"type": python_sources}, # We can use the target type unquoted when we don't need glob syntax + "src/a/**", # May depend on anything from src/a/ + "src/b/lib.py", # May depend on specific file + "!*", # May not depend on anything else. This is our "catch all" rule, ensuring there will never be any fall-through, which would've been an error + ), + + # We need another rule set, in case we have non-python sources in here, to avoid fall-through. + # Sticking in a generic catch-all allow-all rule. + ("*", "*"), +) + + +# src/b/BUILD (continued from previous example) +__dependents_rules__( + ( + ( # Using multiple selectors + {"type": "python_*", "tags":["any-python"]}, + {"type": "*", "tags":["libs"]}, + {"path": "special-cased.py"}, + {"name": "my-target-name"}, + {"path": "//src/**", "name": "named-*"}, + ), + ( # Grouping rules for readability + ( # Deny rules + "!tests/**", # No tests + "!src/*/*/**", # Nothing deeply nested + ), + ( # Allow rules + "*", # Allow everything else + ), + ) + ), + + # We need another rule set, in case we have non-python sources in here, to avoid fall-through. + # Sticking in a generic catch-all allow-all rule. + ("*", "*"), +) +``` + +There is some syntactic sugar for **target rule specs** so they may be declared in a more concise text form rather than as a dictionary (we have used this text form for the rules already--this is also the form in which they are presented in messages from Pants, when possible). The syntax is `[path:name](tags, ...)`. Empty parts are optional and can be left out, and if only `path` (and/or `name`) is provided the enclosing square brackets are optional. For reference, the string form of the selectors in the previous example code block would look like this: + +```python +python_sources # {"type": python_sources} -- target types works as strings when used bare +"" # {"type": "python_sources"} +"(any-python)" # {"type": "python_*", "tags":["any-python"]} +"<*>(libs)" # {"type": "*", "tags":["libs"]} +"[special-cased.py]" # {"path": "special-cased.py"} +# May omit square brackets when only providing a path and/or name: +"special-cased.py" # {"path": "special-cased.py"} +":my-target-name" # {"name": "my-target-name"} +"//src/**:named-*" # {"path": "//src/**", "name": "named-*"} +``` + +The previous example, using this alternative syntax for the selectors, would look like: + +```python + ( + ( # Using multiple selectors + "(any-python)", + "<*>(libs)", + "special-cased.py", + ":my-target-name", + "//src/**:named-*", + ), + ... + ) +``` + +Similarily, the rules may also be expressed using the dict syntax: + +```python + ( + ( # Selectors block + ... + ), + ( # Grouping rules for readability + ( # Deny rules + {"path": "tests/**", "action": "deny"}, # No tests + {"path": "src/*/*/**", "action": "deny"}, # Nothing deeply nested + ), + ( # Allow rules + {"path": "*"}, # Allow everything else (allow is default action) + ), + ) + ) +``` + +### Glob syntax + +The visibility rules are all about matching globs. There are two wildcards: the `*` matches anything except `/`, and the `**` matches anything including `/`. (For paths that is non-recursive and recursive globbing respectively.) + +A glob is matched until the end of the value it is being applied to, so if there is no trailing wildcard (`*` or `**`) on the end of the path glob, it will match to the end of the value. An example where this is useful is for matching on file names regardless of where in the project tree they are: + +``` +.py +my_source.py +my_*.py +``` + +Any leading wildcards may be used to emphasize this if desired, but will function the same, so the above is equvalent to (non-exhaustive list of alternatives): + +``` +*.py +*/my_source.py +**/my_*.py +``` + +When providing a file name, like `my_source.py` it will be assumed that it will be the full name, so `another_my_source.py` will _not_ be considered a match in that case. + +To match any file in a particular directory: + +``` +some/directory/* +``` + +So far the rule globs have been matched from anywhere in the matched to path up to the end. To ensure the path begins with a certain pattern we'd have to provide full paths in our rules, like `src/python/proj/lib/file.py` if we want to make sure that our `file.py` is from the `src/` tree. To avoid lengthy and rigid rule globs hurting refactorings etc, there's a concept of "anchoring" the rule. That will apply the rule glob from a fixed point in the matched to path, and there are three such points: project root, rule declaration path and rule invocation path. The difference between declaration and invocation is explained below. + +##### Anchoring mode for path globs + +The glob prefix specifies which "anchoring mode" to use, and are: + +- `//` - Anchor the glob to the project root. + Example: `//src/python/**` will match all files from the `src/python/` tree. + +- `/` - Anchor the glob to the declaration path. + This is the path of the BUILD file where the rule is declared or extended to, using one of the rules keywords (i.e. `__dependencies_rules__` or `__dependents_rules__`) + Example: in `src/python/BUILD` there is a rule `/proj/**` which will match all files from the `src/python/proj/` tree. + + Note: When a rule is extended (using `extend=True` in a rules keyword), it is treated as declared in that new BUILD with the `extend=True` argument. + +- `.` - Anchor the glob to the invocation path. + This is the file path of the target for which the rule will apply for. Relative paths are supported, so `../../cousin/path` is valid. + Example: there is a rule `./lib/*` when applied for the file `src/python/proj/main.py` it would match `src/python/proj/lib/*`. + +- Any other value will be left "floating" as described at the top of this glob syntax chapter. + +> 🚧 Regardless of anchoring mode, all rules are always anchored to the end of the matched path. + +##### Targeting non-source files + +So far most examples have been about providing rules that match source files. This will likely be the most common scenario, but other targets may just as well need to be considered. For the general case, non-source file targets will be matched using the directory where it has been declared (the path containing the BUILD file in most cases). + +Generated targets such as the `python_requirement` target that has been generated from a `python_requirements` target is a bit special, and borrows some syntax from its address. + +```python +# example/BUILD + +python_requirements(name="reqs", ...) + +# Limit who may depend on a certain library using dependents rules +__dependents_rules__( + ( + ( + # List libraries this rule set applies to, + # here using various anchor modes and patterns for illustration purposes. + "//example/reqs#click", + "/reqs#ansicolors", + "#requests", + "setuptools", + ... + ), + "src/cli/**", + "src/net/**", + "!*", + ), + ("*", "*"), +) +``` + +From the other end, to limit which libraries may be used for some sources: + +```python +# example/BUILD + +# Limit which libraries may be depended upon +__dependencies_rules__( + ( + "*", # These rules applies to all targets + # May only import setuptools and ansicolors, but no other libraries from the example/reqs target + "//example/reqs#setuptools", + "reqs#ansicolors", + "!//example/reqs#*", + + # Any other dependency allowed + "*", + ), +) +``` + +### Rule actions + +When a matching rule is found for a path, the path is either allowed or denied based on the rule's action. The dependency link as a whole is only allowed if both ends of the dependency allow it. By default a rule's action is `ALLOW`, but may be changed to `DENY` or `WARN`. The `WARN` action logs a warning message rather than raising an error, but will otherwise allow the dependency. + +The rule action is specified as a prefix on the rule glob: + +- `!` - Sets the rule's action to `DENY`. +- `?` - Sets the rule's action to `WARN`. +- Any other value is part of the [rule glob](#glob-syntax). diff --git a/versioned_docs/version-2.24/docs/writing-plugins/_category_.json b/versioned_docs/version-2.24/docs/writing-plugins/_category_.json new file mode 100644 index 000000000..c16bad5ac --- /dev/null +++ b/versioned_docs/version-2.24/docs/writing-plugins/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Writing Plugins", + "position": 14 +} diff --git a/versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/_category_.json b/versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/_category_.json new file mode 100644 index 000000000..7308847dd --- /dev/null +++ b/versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "Common plugin tasks", + "position": 5 +} diff --git a/versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/add-a-formatter.mdx b/versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/add-a-formatter.mdx new file mode 100644 index 000000000..67f1767c8 --- /dev/null +++ b/versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/add-a-formatter.mdx @@ -0,0 +1,185 @@ +--- + title: Add a formatter + sidebar_position: 1 +--- + +How to add a new formatter to the `fmt` and `lint` goals. + +--- + +In Pants, every formatter is also a linter, meaning that if you can run a tool with `pants fmt`, you can run the same tool in check-only mode with `pants lint`. Start by skimming [Add a linter](./add-a-linter.mdx) to familiarize yourself with how linters work. + +This guide assumes that you are running a formatter that already exists outside of Pants as a stand-alone binary, such as running Black or Prettier. + +If you are instead writing your own formatting logic inline, you can skip Step 1. In Step 4, you will not need to use `Process`. + +## 1. Install your formatter + +There are several ways for Pants to install your formatter. See [Installing tools](../the-rules-api/installing-tools.mdx). This example will use `ExternalTool` because there is already a pre-compiled binary for shfmt. + +You will also likely want to register some options, like `--config`, `--skip`, and `--args`. Options are registered through a [`Subsystem`](../the-rules-api/options-and-subsystems.mdx). If you are using `ExternalTool`, this is already a subclass of `Subsystem`. Otherwise, create a subclass of `Subsystem`. Then, set the class property `options_scope` to the name of the tool, e.g. `"shfmt"` or `"prettier"`. Finally, add options from `pants.option.option_types`. + +```python +from pants.core.util_rules.external_tool import ExternalTool +from pants.engine.platform import Platform +from pants.option.option_types import ArgsListOption, SkipOption + + +class Shfmt(ExternalTool): + """An autoformatter for shell scripts (https://github.com/mvdan/sh).""" + + options_scope = "shfmt" + name = "Shfmt" + default_version = "v3.2.4" + default_known_versions = [ + "v3.2.4|macos_arm64 |e70fc42e69debe3e400347d4f918630cdf4bf2537277d672bbc43490387508ec|2998546", + "v3.2.4|macos_x86_64|43a0461a1b54070ddc04fbbf1b78f7861ee39a65a61f5466d15a39c4aba4f917|2980208", + "v3.2.4|linux_arm64 |6474d9cc08a1c9fe2ef4be7a004951998e3067d46cf55a011ddd5ff7bfab3de6|2752512", + "v3.2.4|linux_x86_64|3f5a47f8fec27fae3e06d611559a2063f5d27e4b9501171dde9959b8c60a3538|2797568", + ] + + # We set this because we need the mapping for both `generate_exe` and `generate_url`. + platform_mapping = { + "macos_arm64": "darwin_arm64", + "macos_x86_64": "darwin_amd64", + "linux_arm64": "linux_arm64", + "linux_x86_64": "linux_amd64", + } + + skip = SkipOption("fmt", "lint") + args = ArgsListOption(example="-i 2") + + def generate_url(self, plat: Platform) -> str: + plat_str = self.platform_mapping[plat.value] + return ( + f"https://github.com/mvdan/sh/releases/download/{self.version}/" + f"shfmt_{self.version}_{plat_str}" + ) + + def generate_exe(self, plat: Platform) -> str: + plat_str = self.platform_mapping[plat.value] + return f"./shfmt_{self.version}_{plat_str}" +``` + +## 2. Set up a `FieldSet` and `FmtTargetsRequest` + +As described in [Rules and the Target API](../the-rules-api/rules-and-the-target-api.mdx), a `FieldSet` is a way to tell Pants which `Field`s you care about targets having for your plugin to work. + +Usually, you should add a subclass of `SourcesField` to the class property `required_fields`, such as `ShellSourceField` or `PythonSourceField`. This means that your linter will run on any target with that sources field or a subclass of it. + +Create a new dataclass that subclasses `FieldSet`: + +```python +from dataclasses import dataclass + +from pants.engine.target import FieldSet + +... + +@dataclass(frozen=True) +class ShfmtFieldSet(FieldSet): + required_fields = (ShellSourceField,) + + sources: ShellSourceField +``` + +Then, hook this up to a new subclass of `FmtTargetsRequest`. + +```python +from pants.core.goals.fmt import FmtTargetsRequest + + +class ShfmtRequest(FmtTargetsRequest): + field_set_type = ShfmtFieldSet + tool_subsystem = Shfmt +``` + +## 3. Create `fmt` rules + +You will need a rule for `fmt` which takes the `FmtTargetsRequest.Batch` from step 3 (e.g. `ShfmtRequest`) as a parameter and returns a `FmtResult`. + +```python +@rule(desc="Format with shfmt", level=LogLevel.DEBUG) +async def shfmt_fmt(request: ShfmtRequest.Batch, shfmt: Shfmt, platform: Platform) -> FmtResult: + download_shfmt_get = Get( + DownloadedExternalTool, + ExternalToolRequest, + shfmt.get_request(platform), + ) + + # If the user specified `--shfmt-config`, we must search for the file they specified with + # `PathGlobs` to include it in the `input_digest`. We error if the file cannot be found. + config_digest_get = Get( + Digest, + PathGlobs( + globs=[shfmt.config] if shfmt.config else [], + glob_match_error_behavior=GlobMatchErrorBehavior.error, + description_of_origin="the option `--shfmt-config`", + ), + ) + + downloaded_shfmt, config_digest = await MultiGet( + download_shfmt_get, config_digest_get + ) + + input_digest = await Get( + Digest, + MergeDigests( + (request.snapshot.digest, downloaded_shfmt.digest, config_digest) + ), + ) + + argv = [ + downloaded_shfmt.exe, + "-w", + *shfmt.args, + *request.snapshot.files, + ] + process = Process( + argv=argv, + input_digest=input_digest, + output_files=request.snapshot.files, + description=f"Run shfmt on {pluralize(len(request.snapshot.files), 'file')}.", + level=LogLevel.DEBUG, + ) + + result = await Get(ProcessResult, Process, process) + return await FmtResult.create(request, result, output_snapshot) +``` + +The `ShfmtRequest.Batch` object has `.snapshot`, which stores the list of files and the `Digest` for each source file. + +If you used `ExternalTool` in step 1, you will use `Get(DownloadedExternalTool, ExternalToolRequest)` to ensure that the tool is fetched. + +Use `Get(Digest, MergeDigests)` to combine the different inputs together, such as merging the source files and downloaded tool. + +At the bottom of your file, tell Pants about your rules: + +```python +def rules(): + return [ + *collect_rules(), + *ShfmtRequest.rules(partitioner_type=PartitionerType.DEFAULT_SINGLE_PARTITION), + ] +``` + +Finally, update your plugin's `register.py` to activate this file's rules. Note that we must register the rules added in Step 2, as well. + +```python title="pants-plugins/shell/register.py" +from shell import shfmt + + +def rules(): + return [*shfmt.rules()] +``` + +Now, when you run `pants fmt ::` or `pants lint ::`, your new formatter should run. + +## 4. Add tests (optional) + +Refer to [Testing rules](../the-rules-api/testing-plugins.mdx). + + +## 5. Make the tool exportable (optional) + +Refer to [Allowing tool export](allowing-tool-export.mdx) to allow users to export the tool for use in external programs. \ No newline at end of file diff --git a/versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/add-a-linter.mdx b/versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/add-a-linter.mdx new file mode 100644 index 000000000..d63cc1c3e --- /dev/null +++ b/versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/add-a-linter.mdx @@ -0,0 +1,236 @@ +--- + title: Add a linter + sidebar_position: 0 +--- + +How to add a new linter to the `lint` goal. + +--- + +This guide assumes that you are running a linter that already exists outside of Pants as a stand-alone binary, such as running Shellcheck, Pylint, Checkstyle, or ESLint. + +If you are instead writing your own linting logic inline, you can skip Step 1. In Step 3, you will not need to use `Process`. You may find Pants's [`regex-lint` implementation](https://github.com/pantsbuild/pants/blob/main/src/python/pants/backend/project_info/regex_lint.py) helpful for how to integrate custom linting logic into Pants. + +## 1. Install your linter + +There are several ways for Pants to install your linter. See [Installing tools](../the-rules-api/installing-tools.mdx). This example will use `ExternalTool` because there is already a pre-compiled binary for Shellcheck. + +You will also likely want to register some options, like `--config`, `--skip`, and `--args`. Options are registered through a [`Subsystem`](../the-rules-api/options-and-subsystems.mdx). If you are using `ExternalTool`, this is already a subclass of `Subsystem`. Otherwise, create a subclass of `Subsystem`. Then, set the class property `options_scope` to the name of the tool, e.g. `"shellcheck"` or `"eslint"`. Finally, add options from `pants.option.option_types`. + +```python +from pants.core.util_rules.external_tool import ExternalTool +from pants.engine.platform import Platform +from pants.option.option_types import ArgsListOption, SkipOption + + +class Shellcheck(ExternalTool): + """A linter for shell scripts.""" + + options_scope = "shellcheck" + name = "ShellCheck" + default_version = "v0.8.0" + default_known_versions = [ + "v0.8.0|macos_arm64 |e065d4afb2620cc8c1d420a9b3e6243c84ff1a693c1ff0e38f279c8f31e86634|4049756", + "v0.8.0|macos_x86_64|e065d4afb2620cc8c1d420a9b3e6243c84ff1a693c1ff0e38f279c8f31e86634|4049756", + "v0.8.0|linux_arm64 |9f47bbff5624babfa712eb9d64ece14c6c46327122d0c54983f627ae3a30a4ac|2996468", + "v0.8.0|linux_x86_64|ab6ee1b178f014d1b86d1e24da20d1139656c8b0ed34d2867fbb834dad02bf0a|1403852", + ] + + skip = SkipOption("lint") + args = ArgsListOption(example="-e SC20529") + + def generate_url(self, plat: Platform) -> str: + plat_str = { + "macos_arm64": "darwin.x86_64", + "macos_x86_64": "darwin.x86_64", + "linux_arm64": "linux.aarch64", + "linux_x86_64": "linux.x86_64", + }[plat.value] + return ( + f"https://github.com/koalaman/shellcheck/releases/download/{self.version}/" + f"shellcheck-{self.version}.{plat_str}.tar.xz" + ) + + def generate_exe(self, _: Platform) -> str: + return f"./shellcheck-{self.version}/shellcheck" + +``` + +## Set up a `FieldSet` and `LintTargetsRequest` + +As described in [Rules and the Target API](../the-rules-api/rules-and-the-target-api.mdx), a `FieldSet` is a way to tell Pants which `Field`s you care about targets having for your plugin to work. + +Usually, you should add a subclass of the `Sources` field to the class property `required_fields`, such as `BashSources` or `PythonSources`. This means that your linter will run on any target with that sources field or a subclass of it. + +Create a new dataclass that subclasses `FieldSet`: + +```python +from dataclasses import dataclass + +from pants.engine.target import Dependencies, FieldSet + +... + +@dataclass(frozen=True) +class ShellcheckFieldSet(FieldSet): + required_fields = (BashSources,) + + sources: BashSources + dependencies: Dependencies +``` + +Then, hook this up to a new subclass of `LintTargetsRequest`: + +```python +from pants.core.goals.lint import LintTargetsRequest + +... + +class ShellcheckRequest(LintTargetsRequest): + field_set_type = ShellcheckFieldSet + tool_subsystem = Shellcheck +``` + +## 3. Create a rule for your linter logic + +Your rule should take as a parameter `ShellcheckRequest.Batch` and the `Subsystem` (or `ExternalTool`) from step 1 (a `Batch` is an object containing a subset of all the matched field sets for your tool). It should return a `LintResult`: + +```python +from pants.engine.rules import rule +from pants.core.goals.lint import LintResult + +... + +@rule +async def run_shellcheck( + request: ShellcheckRequest, shellcheck: Shellcheck +) -> LintResult: + return LintResult.create(...) +``` + +The `ShellcheckRequest.Batch` instance has a property called `.elements`, which in this case, stores a collection of the `FieldSet`s defined in step 2. Each `FieldSet` corresponds to a single target. Pants will have already validated that there is at least one valid `FieldSet`. + +If you used `ExternalTool` in step 1, you will use `Get(DownloadedExternalTool, ExternalToolRequest)` to install the tool. + +Typically, you will use `Get(SourceFiles, SourceFilesRequest)` to get all the sources you want to run your linter on. + +If you have a `--config` option, you should use `Get(Digest, PathGlobs)` to find the config file and include it in the `input_digest`. + +Use `Get(Digest, MergeDigests)` to combine the different inputs together, such as merging the source files, config file, and downloaded tool. + +Usually, you will use `Get(FallibleProcessResult, Process)` to run a subprocess (see [Processes](../the-rules-api/processes.mdx)). We use `Fallible` because Pants should not throw an exception if the linter returns a non-zero exit code. Then, you can use `LintResult.from_fallible_process_result()` to convert this into a `LintResult`. + +```python +from pants.core.goals.lint import LintTargetsRequest, LintResult, LintResults +from pants.core.util_rules.source_files import ( + SourceFilesRequest, + SourceFiles, +) +from pants.core.util_rules.external_tool import ( + DownloadedExternalTool, + ExternalTool, + ExternalToolRequest, +) +from pants.engine.fs import ( + Digest, + GlobMatchErrorBehavior, + MergeDigests, + PathGlobs, +) +from pants.engine.platform import Platform +from pants.engine.process import FallibleProcessResult, Process +from pants.engine.rules import Get, MultiGet, rule +from pants.util.logging import LogLevel +from pants.util.strutil import pluralize + +... + +@rule +async def run_shellcheck( + request: ShellcheckRequest.Batch, shellcheck: Shellcheck, platform: Platform +) -> LintResult: + download_shellcheck_request = Get( + DownloadedExternalTool, + ExternalToolRequest, + shellcheck.get_request(platform), + ) + + sources_request = Get( + SourceFiles, + SourceFilesRequest(field_set.sources for field_set in request.elements), + ) + + # If the user specified `--shellcheck-config`, we must search for the file they specified with + # `PathGlobs` to include it in the `input_digest`. We error if the file cannot be found. + config_digest_request = Get( + Digest, + PathGlobs( + globs=[shellcheck.config] if shellcheck.config else [], + glob_match_error_behavior=GlobMatchErrorBehavior.error, + description_of_origin="the option `[shellcheck].config`", + ), + ) + + downloaded_shellcheck, sources, config_digest = await MultiGet( + download_shellcheck_request, sources_request, config_digest_request + ) + + # The Process needs one single `Digest`, so we merge everything together. + input_digest = await Get( + Digest, + MergeDigests( + ( + downloaded_shellcheck.digest, + sources.snapshot.digest, + config_digest, + ) + ), + ) + + process_result = await Get( + FallibleProcessResult, + Process( + argv=[ + downloaded_shellcheck.exe, + *shellcheck.args, + *sources.snapshot.files, + ], + input_digest=input_digest, + description=f"Run Shellcheck on {pluralize(len(request.elements), 'file')}.", + level=LogLevel.DEBUG, + ), + ) + return LintResult.create(request, process_result) + +``` + +At the bottom of your file, tell Pants about your rules: + +```python +def rules(): + return [ + *collect_rules(), + ShellcheckRequest.rules(partitioner_type=PartitionerType.DEFAULT_SINGLE_PARTITION), + UnionRule(ExportableTool, Shellcheck), # allows exporting the `shellcheck` binary + ] +``` + +Finally, update your plugin's `register.py` to activate this file's rules. + +```python title="pants-plugins/bash/register.py" +from bash import shellcheck + + +def rules(): + return [*shellcheck.rules()] +``` + +Now, when you run `pants lint ::`, your new linter should run. + +## 4. Add tests (optional) + +Refer to [Testing rules](../the-rules-api/testing-plugins.mdx). + +## 5. Make the tool exportable (optional) + +Refer to [Allowing tool export](allowing-tool-export.mdx) to allow users to export the tool for use in external programs. diff --git a/versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/add-a-repl.mdx b/versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/add-a-repl.mdx new file mode 100644 index 000000000..59b0d9d32 --- /dev/null +++ b/versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/add-a-repl.mdx @@ -0,0 +1,114 @@ +--- + title: Add a REPL + sidebar_position: 5 +--- + +How to add a new implementation to the `repl` goal. + +--- + +The `repl` goal opens up an interactive Read-Eval-Print Loop that runs in the foreground. + +Typically, the REPL is loaded with the transitive closure of the files and targets that the user provided, so that users may import their code and resources in the REPL. + +1. Install your REPL + +--- + +There are several ways for Pants to install your REPL. See [Installing tools](../the-rules-api/installing-tools.mdx). + +In this example, we simply find the program `bash` on the user's machine, but often you will want to install a tool like Ammonite or iPython instead. + +You may want to also add options for your REPL implementation, such as allowing users to change the version of the tool. See [Options and subsystems](../the-rules-api/options-and-subsystems.mdx). + +2. Set up a subclass of `ReplImplementation` + +--- + +Subclass `ReplImplementation` and define the class property `name: str` with the name of your REPL, e.g. `"bash"` or `"ipython"`. Users can then set the option `--repl-shell` to this option to choose your REPL implementation. + +```python +from pants.core.goals.repl import ReplImplementation + +class BashRepl(ReplImplementation): + name = "bash" +``` + +Then, register your new `ReplImplementation` with a [`UnionRule`](../the-rules-api/union-rules-advanced.mdx) so that Pants knows your REPL implementation exists: + +```python +from pants.engine.rules import collect_rules +from pants.engine.unions import UnionRule + +... + +def rules(): + return [ + *collect_rules(), + UnionRule(ReplImplementation, BashRepl), + ] +``` + +3. Create a rule for your REPL logic + +--- + +Your rule should take as a parameter the `ReplImplementation ` from Step 2, which has a field `targets: Targets` containing the targets specified by the user. It also has a convenience property `addresses: Addresses` with the addresses of what was specified. + +Your rule should return `ReplRequest`, which has the fields `digest: Digest`, `args: Iterable[str]`, and `extra_env: Optional[Mapping[str, str]]`. + +The `ReplRequest ` will get converted into an `InteractiveProcess` that will run in the foreground. + +The process will run in a temporary directory in the build root, which means that the script/program can access files that would normally need to be declared by adding a `file` / `files` or `resource` / `resources` target to the `dependencies` field. + +The process will not be hermetic, meaning that it will inherit the environment variables used by the `pants` process. Any values you set in `extra_env` will add or update the specified environment variables. + +```python +from dataclasses import dataclass + +from pants.core.goals.repl import ReplRequest +from pants.core.target_types import FileSourceField, ResourceSourceField +from pants.core.util_rules.source_files import SourceFiles, SourceFilesRequest +from pants.engine.rules import Get, rule +from pants.engine.target import SourcesField +from pants.util.logging import LogLevel + +... + +@rule(level=LogLevel.DEBUG) +async def create_bash_repl_request(repl: BashRepl) -> ReplRequest: + # First, we find the `bash` program. + bash_program_paths = await Get( + BinaryPaths, BinaryPathRequest(binary_name="bash", search_path=("/bin", "/usr/bin")), + ) + if not bash_program_paths.first_path: + raise EnvironmentError("Could not find the `bash` program on /bin or /usr/bin.") + bash_program = bash_program_paths.first_path + + transitive_targets = await Get(TransitiveTargets, TransitiveTargetsRequest(request.addresses)) + sources = await Get( + SourceFiles, + SourceFilesRequest( + (tgt.get(SourcesField) for tgt in transitive_targets.closure), + for_sources_types=(BashSourceField, FileSourceField, ResourceSourceField), + ), + ) + return ReplRequest( + digest=sources.snapshot.digest, args=(bash_program.exe,) + ) + +``` + +If you use any relative paths in `args` or `extra_env`, you should call `repl.in_chroot("./example_relative_path")` on the values. This ensures that you run on the correct file in the temporary directory created by Pants. + +Finally, update your plugin's `register.py` to activate this file's rules. + +```python title="pants-plugins/bash/register.py" +from bash import repl + + +def rules(): + return [*repl.rules()] +``` + +Now, when you run `pants repl --shell=bash ::`, your new REPL should be used. diff --git a/versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/add-a-typechecker.mdx b/versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/add-a-typechecker.mdx new file mode 100644 index 000000000..26df80e04 --- /dev/null +++ b/versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/add-a-typechecker.mdx @@ -0,0 +1,53 @@ +--- + title: Add a typechecker + sidebar_position: 2 +--- + +How to add a new typechecker to the `check` goal. + +--- + +Adding a typechecker is almost identical to [adding a linter](./add-a-linter.mdx), except for these differences: + +1. Subclass `CheckRequest` from `pants.core.goals.check`, rather than `LintTargetsRequest`. +2. Register a `UnionRule(CheckRequest, CustomCheckRequest)` in your `rules()` instead of unpacking `.rules(...)`. +3. Return `CheckResults` in your rule—which is a collection of `CheckResult` objects—rather than returning a `LintResult`. + +The rule will look like this: + +```python +from dataclasses import dataclass + +from pants.core.goals.check import CheckRequest, CheckResult, CheckResults +from pants.engine.target import FieldSet +from pants.engine.rules import collect_rules, rule +from pants.engine.unions import UnionRule +from pants.util.logging import LogLevel + + +@dataclass(frozen=True) +class MyPyFieldSet(FieldSet): + required_fields = (PythonSourceField,) + + source: PythonSourceField + + +class MyPyRequest(CheckRequest): + field_set_type = MyPyFieldSet + name = "mypy" + + +@rule(desc="Typecheck using MyPy", level=LogLevel.DEBUG) +async def mypy_typecheck(request: MyPyRequest, mypy: MyPy) -> CheckResults: + if mypy.skip: + return CheckResults([], checker_name=request.name) + ... + return CheckResults( + [CheckResult.from_fallible_process_result(result)], checker_name=request.name + ) + +def rules(): + return [*collect_rules(), UnionRule(CheckRequest, MyPyRequest)] +``` + +Refer to [Add a linter](./add-a-linter.mdx). See [`pants/backend/python/typecheck/mypy/rules.py`](https://github.com/pantsbuild/pants/blob/master/src/python/pants/backend/python/typecheck/mypy/rules.py) for an example of MyPy. diff --git a/versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/add-codegen.mdx b/versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/add-codegen.mdx new file mode 100644 index 000000000..b7a89ae34 --- /dev/null +++ b/versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/add-codegen.mdx @@ -0,0 +1,295 @@ +--- + title: Add codegen + sidebar_position: 3 +--- + +How to add a new code generator to Pants. + +--- + +Pants supports code generators that convert a protocol language like Protobuf into other languages, such as Python or Java. The same protocol source may be used to generate multiple distinct languages. + +Pants will not actually write the generated files to disk, except when running `pants export-codegen`. Instead, any targets that depend on the protocol targets will cause their code to be generated, and those generated files will be copied over into the "chroot" (temporary directory) where Pants executes. + +:::note Example: Protobuf -> Python +This guide walks through each step of adding Protobuf to generate Python sources. See [here](https://github.com/pantsbuild/pants/tree/master/src/python/pants/backend/codegen/protobuf) for the final result. +::: + +This guide assumes that you are running a code generator that already exists outside of Pants as a stand-alone binary, such as running Protoc or Thrift. + +If you are instead writing your own code generation logic inline, you can skip Step 2. In Step 4, rather than running a `Process`, use [`CreateDigest`](../the-rules-api/file-system.mdx). + +## 1. Create a target type for the protocol + +You will need to define a new target type to allow users to provide metadata for their protocol files, e.g. their `.proto` files. See [Creating new targets](../the-target-api/creating-new-targets.mdx) for a guide on how to do this. + +```python +from pants.engine.target import COMMON_TARGET_FIELDS, Dependencies, SingleSourceField, Target + +class ProtobufSourceField(SingleSourceField): + expected_file_extensions = (".proto",) + +class ProtobufSourceTarget(Target): + alias = "protobuf_source" + help = "A single Protobuf file." + core_fields = (*COMMON_TARGET_FIELDS, Dependencies, ProtobufSourceField) +``` + +You should define a subclass of `SourcesField`, like `ProtobufSourceField` or `ThriftSourceField`. This is important for Step 3. + +Typically, you will want to register the `Dependencies` field. + +### Target type already exists? + +If Pants already has a target type for your protocol—such as Pants already having a `ProtobufSourceTarget` defined—you should not create a new target type. + +Instead, you can optionally add any additional fields that you would like through plugin fields. See [Extending pre-existing targets](../the-target-api/extending-existing-targets.mdx). + +### Add dependency injection (Optional) + +Often, generated files will depend on a runtime library to work. For example, Python files generated from Protobuf depend on the `protobuf` library. + +Instead of users having to explicitly add this dependency every time, you can dynamically inject this dependency for them. + +To inject dependencies: + +1. Subclass the `Dependencies` field. Register this subclass on your protocol target type. +2. Define a subclass of `InjectDependenciesRequest` and set the class property `inject_for` to the `Dependencies` subclass defined in the previous step. Register this new class with a [`UnionRule`](../the-rules-api/union-rules-advanced.mdx) for `InjectDependenciesRequest`. +3. Create a new rule that takes your new `InjectDependenciesRequest` subclass as a parameter and returns `InjectedDependencies`. + +```python +from pants.engine.addresses import Address +from pants.engine.target import Dependencies, InjectDependenciesRequest, InjectedDependencies +from pants.engine.rules import collect_rules, rule +from pants.engine.unions import UnionRule + +class ProtobufDependencies(Dependencies): + pass + +... + +class ProtobufSourceTarget(Target): + alias = "protobuf_source" + core_fields = (*COMMON_TARGET_FIELDS, ProtobufDependencies, ProtobufSourceField) + +class InjectProtobufDependencies(InjectDependenciesRequest): + inject_for = ProtobufDependencies + +@rule +async def inject_dependencies(_: InjectProtobufDependencies) -> InjectedDependencies: + address = Address("3rdparty/python", target_name="protobuf") + return InjectedDependencies([address]) + +def rules(): + return [ + *collect_rules(), + UnionRule(InjectDependenciesRequest, InjectProtobufDependencies), + ] +``` + +This example hardcodes the injected address. You can instead add logic to your rule to make this dynamic. For example, in Pants's Protobuf implementation, Pants looks for a `python_requirement` target with `protobuf`. See [protobuf/python/python_protobuf_subsystem.py](https://github.com/pantsbuild/pants/blob/main/src/python/pants/backend/codegen/protobuf/python/python_protobuf_subsystem.py). + +## 2. Install your code generator + +There are several ways for Pants to install your tool. See [Installing tools](../the-rules-api/installing-tools.mdx). This example will use `ExternalTool` because there is already a pre-compiled binary for Protoc. + +```python +from pants.core.util_rules.external_tool import ExternalTool +from pants.engine.platform import Platform + +class Protoc(ExternalTool): + options_scope = "protoc" + help = "The protocol buffer compiler (https://developers.google.com/protocol-buffers)." + + default_version = "3.11.4" + default_known_versions = [ + "3.11.4|linux_arm64 |f24c9fa1fc4a7770b8a5da66e515cb8a638d086ad2afa633abb97137c5f029a8|1481946", + "3.11.4|linux_x86_64|6d0f18cd84b918c7b3edd0203e75569e0c8caecb1367bbbe409b45e28514f5be|1591191", + "3.11.4|macos_arm64 |8c6af11e1058efe953830ecb38324c0e0fd2fb67df3891896d138c535932e7db|2482119", + "3.11.4|macos_x86_64|8c6af11e1058efe953830ecb38324c0e0fd2fb67df3891896d138c535932e7db|2482119", + ] + + def generate_url(self, plat: Platform) -> str: + platform_mapping = { + "linux_arm64": "linux-aarch_64", + "linux_x86_64": "linux-x86_64", + "macos_arm64": "osx-x86_64", + "macos_x86_64": "osx-x86_64", + } + plat_str = platform_mapping[plat.value] + return ( + f"https://github.com/protocolbuffers/protobuf/releases/download/" + f"v{self.version}/protoc-{self.version}-{plat_str}.zip" + ) + + def generate_exe(self, _: Platform) -> str: + return "./bin/protoc" +``` + +## 3. Create a `GenerateSourcesRequest` + +`GenerateSourcesRequest` tells Pants the `input` and the `output` of your code generator, such as going from `ProtobufSourceField -> PythonSourceField`. Pants will use this to determine when to use your code generation implementation. + +Subclass `GenerateSourcesRequest`: + +```python +from pants.engine.target import GenerateSourcesRequest + +class GeneratePythonFromProtobufRequest(GenerateSourcesRequest): + input = ProtobufSourceField + output = PythonSourceField +``` + +The `input` should be the `SourcesField` class for your protocol target from Step 1. + +The `output` should typically be the `SourcesField` class corresponding to the "language" you're generating for, such as `JavaSourceField` or `PythonSourceField`. The `output` type will understand subclasses of what you specify, so, generally, you should specify `PythonSourceField` instead of something more specific like `PythonTestSourceField`. + +Note that your rule will not actually return an instance of the `output` type, e.g. `PythonSourceField`. Codegen rules only return a `Snapshot`, rather than a whole `SourcesField`. The `output` field is only used as a signal of intent. + +Finally, register your new `GenerateSourcesRequest` with a [`UnionRule`](../the-rules-api/union-rules-advanced.mdx). + +```python +from pants.engine.rules import collect_rules +from pants.engine.unions import UnionRule + +def rules(): + return [ + *collect_rules(), + UnionRule(GenerateSourcesRequest, GeneratePythonFromProtobufRequest), + ] +``` + +## 4. Create a rule for your codegen logic + +Your rule should take as a parameter the `GenerateSourcesRequest` from Step 3 and the `Subsystem` (or `ExternalTool`) from Step 2. It should return `GeneratedSources`. + +```python +from pants.engine.rules import rule +from pants.engine.target import GeneratedSources + +... + +@rule +async def generate_python_from_protobuf( + request: GeneratePythonFromProtobufRequest, protoc: Protoc +) -> GeneratedSources: + ... + return GeneratedSources(result_snaphsot) +``` + +The `request` parameter will have two fields: `protocol_sources: Snapshot` and `protocol_target: Target`. Often, you will want to include `protocol_sources` in the `input_digest` to the `Process` you use to run the generator. You can use `protocol_target` to look up more information about the input target, such as finding its dependencies. + +The rule should return `GeneratedSources`, which take a [`Snapshot`](../the-rules-api/file-system.mdx) as its only argument. This should be a Snapshot of the generated files for the input target. + +If you used `ExternalTool` in step 1, you will use `Get(DownloadedExternalTool, ExternalToolRequest)` to install the tool. Be sure to merge this with the `protocol_sources` and any other relevant input digests via `Get(Digest, MergeDigests)`. + +For many code generators, you will need to get the input target's direct or transitive dependencies and include their sources in the `input_digest`. See [Rules and the Target API](../the-rules-api/rules-and-the-target-api.mdx). + +You will likely need to add logic for handling [source roots](../../using-pants/key-concepts/source-roots.mdx). For example, the code generator may not understand source roots so you may need to [strip source roots](../the-rules-api/rules-and-the-target-api.mdx) before putting the sources in the `input_digest`. Likely, you will want to restore a source root after generation because most Pants code will assume that there is a source root present. In the below example, we restore the original source root, e.g. `src/protobuf/f.proto` becomes `src/protobuf/f_pb2.py`. See [`protobuf/python/rules.py`](https://github.com/pantsbuild/pants/tree/master/src/python/pants/backend/codegen/protobuf/python/rules.py) for a more complex example that allows the user to specify what source root to use through a field on the `protobuf_library`. + +```python +@rule +async def generate_python_from_protobuf( + request: GeneratePythonFromProtobufRequest, protoc: Protoc, platform: Platform +) -> GeneratedSources: + download_protoc_get = Get( + DownloadedExternalTool, ExternalToolRequest, protoc.get_request(platform) + ) + + # Protoc needs all transitive dependencies on `protobuf_libraries` to work properly. It won't + # actually generate those dependencies; it only needs to look at their .proto files to work + # with imports. + transitive_targets = await Get( + TransitiveTargets, + TransitiveTargetsRequest([request.protocol_target.address]), + ) + + # NB: By stripping the source roots, we avoid having to set the value `--proto_path` + # for Protobuf imports to be discoverable. + all_stripped_sources_get = Get( + StrippedSourceFiles, + SourceFilesRequest( + (tgt.get(Sources) for tgt in transitive_targets.closure), + for_sources_types=(ProtobufSources,), + ), + ) + target_stripped_sources_get= Get( + StrippedSourceFiles, SourceFilesRequest([request.protocol_target[ProtobufSources]]), + ) + + ( + downloaded_protoc_binary, + all_sources_stripped, + target_sources_stripped, + ) = await MultiGet( + download_protoc_get, + all_stripped_sources_get, + target_stripped_sources_get, + ) + + input_digest = await Get( + Digest, + MergeDigests( + (all_sources_stripped.snapshot.digest, downloaded_protoc_binary.digest) + ), + ) + + output_dir = "_generated_files" + result = await Get( + ProcessResult, + Process( + ( + downloaded_protoc_binary.exe, + "--python_out", + output_dir, + *target_sources_stripped.snapshot.files, + ), + input_digest=input_digest, + description=f"Generating Python sources from {request.protocol_target.address}.", + output_directories=(output_dir,), + ), + ) + + # We must do some path manipulation on the output digest for it to look like normal sources, + # including adding back a source root. + source_root_request = SourceRootRequest.for_target(request.protocol_target) + normalized_digest, source_root = await MultiGet( + Get(Digest, RemovePrefix(result.output_digest, output_dir)), + Get(SourceRoot, SourceRootRequest, source_root_request), + ) + source_root_restored = ( + await Get(Snapshot, AddPrefix(normalized_digest, source_root.path)) + if source_root.path != "." + else await Get(Snapshot, Digest, normalized_digest) + ) + return GeneratedSources(source_root_restored) +``` + +Finally, update your plugin's `register.py` to activate this file's rules. + +```python title="pants-plugins/protobuf/register.py" +from protobuf import python_support + +def rules(): + return [*python_support.rules()] +``` + +:::note Tip: use `export-codegen` to test it works +Run `pants export-codegen path/to/file.ext` to ensure Pants is correctly generating the file. This will write the generated file(s) under the `dist/` directory, using the same path that will be used during Pants runs. +::: + +## 5. Audit call sites to ensure they've enabled codegen + +Call sites must opt into using codegen, and they must also specify what types of sources they're expecting. See [Rules and the Target API](../the-rules-api/rules-and-the-target-api.mdx) about `SourcesField`. + +For example, if you added a code generator that goes from `ProtobufSourceField -> JavaSourceField`, then Pants's Python backend would not use your new implementation because it ignores `JavaSourceField`. + +You should check that everywhere you're expecting is using your new codegen implementation by manually testing it out. Create a new protocol target, add it to the `dependencies` field of a target, and then run goals like `pants package` and `pants test` to make sure that the generated file works correctly. + +## 6. Add tests (optional) + +Refer to [Testing rules](../the-rules-api/testing-plugins.mdx). + +## 7. Make the tool exportable (optional) + +Refer to [Allowing tool export](allowing-tool-export.mdx) to allow users to export the tool for use in external programs. diff --git a/versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/allowing-tool-export.mdx b/versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/allowing-tool-export.mdx new file mode 100644 index 000000000..5595d33c9 --- /dev/null +++ b/versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/allowing-tool-export.mdx @@ -0,0 +1,61 @@ +--- + title: Making a tool exportable + sidebar_position: 10 +--- + +How to make a tool exportable with the `export` goal. + +--- + +Backends that implement the `export` goal can indicate binaries that should be exported. These will have their contents exported to a subfolder in the `dist/bins` directory, and the binary itself will be linked in `dist/bin`. + +## Downloadable Tools + +Subclasses of `ExternalTool` (including `TemplatedExternalTool`) have the logic for exporting implemented. Tools are marked for export as follows: + +1. Implement `ExternalTool.generate_exe` if the default is not correct. For instance, a tool downloaded might include a binary, a readme, and a license. This method will point to the binary within the downloaded files. + +2. Register a `UnionRule` with `ExportableTool`. For example, `UnionRule(ExportableTool, FortranLint)` + +## Implementing for new backends + +Backends need to implement: + +1. A subclass of `ExportRequest` + +```python +@dataclass(frozen=True) +class ExportExternalToolRequest(ExportRequest): + pass +``` + +2. A rule from this subclass to `ExportResults` + +```python +@rule +async def export_external_tools( + request: ExportExternalToolRequest, export: ExportSubsystem +) -> ExportResults: +``` + +3. Inside of that rule, fill the `ExportResult.exported_binaries` field. + +```python +ExportResult( + description=f"Export tool {req.resolve}", + reldir=dest, + digest=downloaded_tool.digest, + resolve=req.resolve, + exported_binaries=(ExportedBinary(name=Path(exe).name, path_in_export=exe),), +) +``` + +4. For every tool, mark it for export registering a `UnionRule` with `ExportableTool`. + +```python +def rules(): + return [ + ..., + `UnionRule(ExportableTool, FortranLint)`, + ] +``` \ No newline at end of file diff --git a/versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/custom-python-artifact-kwargs.mdx b/versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/custom-python-artifact-kwargs.mdx new file mode 100644 index 000000000..cb341f285 --- /dev/null +++ b/versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/custom-python-artifact-kwargs.mdx @@ -0,0 +1,223 @@ +--- + title: Custom `python_artifact()` kwargs + sidebar_position: 9 +--- + +How to add your own logic to `python_artifact()`. + +--- + +Pants can build [Python distributions](../../python/overview/building-distributions.mdx), such as wheels and sdists, from information you provide in a [`python_distribution`](../../../reference/targets/python_distribution.mdx) target. + +When doing so, and if you don't provide your own `setup.py` file, Pants generates one and passes it the kwargs provided in the `provides=python_artifact(...)` field to the `setup(...)` call (Pants also generates some of the kwargs, such as `install_requires` and `namespace_packages` by analyzing your code). + +It's fairly common to want to generate more of the kwargs dynamically. For example, you may want to: + +- Reduce boilerplate by not repeating common kwargs across BUILD files. +- Read from the file system to dynamically determine kwargs, such as the `long_description` or `version`. +- Run processes like `git` to dynamically determine kwargs like `version`. + +You can write a plugin to add custom kwarg generation logic. + +Note: there may only be at most one applicable plugin per target customizing the kwargs for the `setup()` function. + +:::note Example +See [here](https://github.com/pantsbuild/pants/blob/master/pants-plugins/internal_plugins/releases/register.py) for an example that Pants uses internally for its `python_distribution` targets. This plugin demonstrates reading from the file system to set the `version` and `long_description` kwargs, along with adding hardcoded kwargs. +::: + +1. Set up a subclass of `SetupKwargsRequest` + +--- + +Set the class method `is_applicable()` to determine whether your implementation should be used for the particular `python_distribution` target. If `False`, Pants will use the default implementation which simply uses the explicitly provided `python_artifact` from the BUILD file. + +In this example, we will always use our custom implementation: + +```python +from pants.backend.python.util_rules.package_dists import SetupKwargsRequest +from pants.engine.target import Target + +class CustomSetupKwargsRequest(SetupKwargsRequest): + @classmethod + def is_applicable(cls, _: Target) -> bool: + return True +``` + +This example will only use our plugin implementation for `python_distribution` targets defined in the folder `src/python/project1`. + +```python +class CustomSetupKwargsRequest(SetupKwargsRequest): + @classmethod + def is_applicable(cls, target: Target) -> bool: + return target.address.spec.startswith("src/python/project1") +``` + +Then, register your new `SetupKwargsRequest ` with a [`UnionRule`](../the-rules-api/union-rules-advanced.mdx) so that Pants knows your implementation exists: + +```python +from pants.engine.rules import collect_rules +from pants.engine.unions import UnionRule + +... + +def rules(): + return [ + *collect_rules(), + UnionRule(SetupKwargsRequest, CustomSetupKwargsRequest), + ] +``` + +:::note Consider defining custom `python_distribution` target types +If you don't want to always use a single custom implementation, an effective approach could be to create custom `python_distribution` target types so that your users decide which implementation they want to use in their BUILD files. + +For example, a user could do this: + +```python +pants_python_distribution( + name="my-dist", + dependencies=[...], + provides=python_artifact(...) +) + +pants_contrib_python_distribution( + name="my-dist", + dependencies=[...], + provides=python_artifact(...) +) +``` + +To support this workflow, [create new target types](../the-target-api/creating-new-targets.mdx). + +```python +class PantsPythonDistribution(Target): + alias = "pants_python_distribution" + core_fields = PythonDistribution.core_fields + +class PantsContribPythonDistribution(Target): + alias = "pants_contrib_python_distribution" + core_fields = PythonDistribution.core_fields +``` + +Then, for each `SetupKwargsRequest` subclass, check which target type was used: + +```python +class PantsSetupKwargsRequest(SetupKwargsRequest): + @classmethod + def is_applicable(cls, target: Target) -> bool: + return isinstance(target, PantsPythonDistribution) +``` + +::: + +2. Create a rule with your logic + +--- + +Your rule should take as a parameter the `SetupKwargsRequest ` from step 1. This type has two fields: `target: Target` and `explicit_kwargs: dict[str, Any]`. You can use these fields to get more information on the target you are generating a `setup.py` for. + +Your rule should return `SetupKwargs`, which takes two arguments: `kwargs: dict[str, Any]` and `address: Address`. + +For example, this will simply hardcode a kwarg: + +```python +from pants.backend.python.util_rules.package_dists import SetupKwargs +from pants.engine.rules import rule + +@rule +async def setup_kwargs_plugin(request: CustomSetupKwargsRequest) -> SetupKwargs: + return SetupKwargs( + {**request.explicit_kwargs, "plugin_demo": "hello world"}, address=request.target.address + ) +``` + +Update your plugin's `register.py` to activate this file's rules. + +```python title="pants-plugins/python_plugins/register.py" +from python_plugins import custom_python_artifact + +def rules(): + return custom_python_artifact.rules() +``` + +Then, run `pants package path/to:python_distribution` and inspect the generated `setup.py`to confirm that your plugin worked correctly. + +Often, you will want to read from a file in your project to set kwargs like `version` or `long_description`. Use `await Get(DigestContents, PathGlobs)` to do this (see [File system](../the-rules-api/file-system.mdx)): + +```python +from pants.backend.python.util_rules.package_dists import SetupKwargs +from pants.engine.fs import DigestContents, GlobMatchErrorBehavior, PathGlobs +from pants.engine.rules import rule + +@rule +async def setup_kwargs_plugin(request: CustomSetupKwargsRequest) -> SetupKwargs: + digest_contents = await Get( + DigestContents, + PathGlobs( + ["project/ABOUT.rst"], + description_of_origin="`python_artifact()` plugin", + glob_match_error_behavior=GlobMatchErrorBehavior.error, + ), + ) + about_page_content = digest_contents[0].content.decode() + return SetupKwargs( + {**request.explicit_kwargs, "long_description": "\n".join(about_page_content)}, + address=request.target.address + ) +``` + +It can be helpful to allow users to add additional kwargs to their BUILD files for you to consume in your plugin. For example, this plugin adds a custom `long_description_path` field, which gets popped and replaced by the plugin with a normalized `long_description` kwarg: + +```python +python_distribution( + name="mydist", + dependencies=[...], + provides=python_artifact( + name="mydist", + ... + long_description_path="README.md", + ), + generate_setup = True, + sdist = False, +) +``` + +```python +import os.path + +from pants.backend.python.util_rules.package_dists import SetupKwargs +from pants.engine.fs import DigestContents, GlobMatchErrorBehavior, PathGlobs +from pants.engine.rules import rule + +@rule +async def setup_kwargs_plugin(request: CustomSetupKwargsRequest) -> SetupKwargs: + original_kwargs = request.explicit_kwargs.copy() + long_description_relpath = original_kwargs.pop("long_description_file", None) + if not long_description_relpath: + raise ValueError( + f"The python_distribution target {request.target.address} did not include " + "`long_description_file` in its python_artifact's kwargs. Our plugin requires this! " + "Please set to a path relative to the BUILD file, e.g. `ABOUT.md`." + ) + + build_file_path = request.target.address.spec_path + long_description_path = os.path.join(build_file_path, long_description_relpath) + digest_contents = await Get( + DigestContents, + PathGlobs( + [long_description_path], + description_of_origin=f"the 'long_description_file' kwarg in {request.target.address}", + glob_match_error_behavior=GlobMatchErrorBehavior.error, + ), + ) + description_content = digest_contents[0].content.decode() + return SetupKwargs( + {**original_kwargs, "long_description": "\n".join(description_content)}, + address=request.target.address + ) +``` + +Refer to these guides for additional things you may want to do in your plugin: + +- [Read from options](../the-rules-api/options-and-subsystems.mdx). Also see [here](https://github.com/pantsbuild/pants/blob/master/pants-plugins/internal_plugins/releases/register.py) for an example. +- [Read values from the target](../the-rules-api/rules-and-the-target-api.mdx) using the Target API. +- [Run a `Process`](../the-rules-api/processes.mdx), such as `git`. Also see [Installing tools](../the-rules-api/installing-tools.mdx). diff --git a/versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/index.mdx b/versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/index.mdx new file mode 100644 index 000000000..3a45674ff --- /dev/null +++ b/versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/index.mdx @@ -0,0 +1,16 @@ +--- + title: Common plugin tasks + sidebar_position: 4 +--- + +--- + +- [Add a linter](./add-a-linter.mdx) +- [Add a formatter](./add-a-formatter.mdx) +- [Add a typechecker](./add-a-typechecker.mdx) +- [Add codegen](./add-codegen.mdx) +- [Add a REPL](./add-a-repl.mdx) +- [Add Tests](./run-tests.mdx) +- [Add lockfile support](./plugin-lockfiles.mdx) +- [Custom `setup-py` kwargs](./custom-python-artifact-kwargs.mdx) +- [Plugin upgrade guide](./plugin-upgrade-guide.mdx) \ No newline at end of file diff --git a/versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/plugin-lockfiles.mdx b/versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/plugin-lockfiles.mdx new file mode 100644 index 000000000..87da9be2e --- /dev/null +++ b/versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/plugin-lockfiles.mdx @@ -0,0 +1,149 @@ +--- + title: "Add lockfiles" + sidebar_position: 7 +--- + +How to add lockfiles and the `generate-lockfiles` goal. + +--- +Lockfiles are a way to pin to exact versions of dependencies, often including hashes to guarantee integrity between the version pinned and the version downloaded. + +This guide will walk you through implementing lockfiles and hooking them into the `generate-lockfiles` goal. It assumes that your language has a tool that supports generating and using lockfiles or that you have written code which does these. + +## 1. Expose your lockfiles to Pants + +Create subclasses of `KnownUserResolveNamesRequest` to inform Pants about which resolves exist, and a subclass of `RequestedUserResolveNames` for Pants to request those resolves later. Implement the resolve-finding logic in a Rule from your subclass of `KnownUserResolveNamesRequest` to `KnownUserResolveNames`. Set `KnownResolveNames.requested_resolve_names_cls` to your subclass of `RequestedUserResolveNames`. + +```python tab={"label": "pants-plugins/fortran/lockfiles.py"} +from pants.core.goals.generate_lockfiles import KnownUserResolveNamesRequest, RequestedUserResolveNames, KnownUserResolveNames +from pants.engine.rules import rule +from pants.engine.target import AllTargets + + +class KnownFortranResolveNamesRequest(KnownUserResolveNamesRequest): + pass + + +class RequestedFortranResolveNames(RequestedUserResolveNames): + pass + + +@rule +async def identify_user_resolves_from_fortran_files( + _: KnownFortranResolveNamesRequest, + all_targets: AllTargets, +) -> KnownUserResolveNames: + ... + return KnownUserResolveNames( + ..., + requested_resolve_names_cls=RequestedFortranResolveNames + ) +``` + +## 2. Connect resolve names to requests to generate lockfiles + +Create a subclass of `GenerateLockfile`. Pants will use this to represent a lockfile to generate. Then create a rule from your subclass of `RequestedUserResolveNames` to `UserGenerateLockfiles`. Pants will use this rule to convert from a user's request to export a resolve by name into the information needed to export the resolve. + +```python tab={"label": "pants-plugins/fortran/lockfiles.py"} +from dataclasses import dataclass + +from pants.backend.fortran.target_types import FortranDeploymentTarget +from pants.core.goals.generate_lockfiles import GenerateLockfile, UserGenerateLockfiles +from pants.engine.rules import rule + + +@dataclass(frozen=True) +class GenerateFortranLockfile(GenerateLockfile): + target: FortranDeploymentTarget + + +@rule +async def setup_user_lockfile_requests( + requested: RequestedFortranResolveNames, +) -> UserGenerateLockfiles: + ... + return UserGenerateLockfiles( + [ + GenerateFortranLockfile( + ... + ) + ] + ) +``` + +## 3. Generate lockfiles + +Create a rule from your subclass of `GenerateLockfile` to `GenerateLockfileResult`. This rule generates the lockfile. In the common case that you're running a process to generate this lockfile, you can use the `Process.output_files` to gather those files from the execution sandbox. + +```python tab={"label": "pants-plugins/fortran/lockfiles.py"} +from pants.backend.fortran.tool import FortranProcess +from pants.core.goals.generate_lockfiles import GenerateLockfileResult +from pants.engine.internals.selectors import Get +from pants.engine.process import ProcessResult +from pants.engine.rules import rule + + +@rule +async def generate_lockfile_from_sources( + request: GenerateFortranLockfile, +) -> GenerateLockfileResult: + ... + + result = await Get( + ProcessResult, + FortranProcess(...), + ) + + return GenerateLockfileResult(result.output_digest, request.resolve_name, request.lockfile_dest) + +``` + +## 4. Register rules + +At the bottom of the file, let Pants know what your rules and types do. Update your plugin's `register.py` to tell Pants about them/ + +```python tab={"label": "pants-plugins/fortran/lockfiles.py"} + + +from pants.core.goals.generate_lockfiles import GenerateLockfile, KnownUserResolveNamesRequest, RequestedUserResolveNames +from pants.engine.rules import collect_rules +from pants.engine.unions import UnionRule + + +def rules(): + return ( + *collect_rules(), + UnionRule(GenerateLockfile, GenerateFortranLockfile), + UnionRule(KnownUserResolveNamesRequest, KnownFortranResolveNamesRequest), + UnionRule(RequestedUserResolveNames, RequestedFortranResolveNames), + ) +``` + +```python tab={"label": "pants-plugins/fortran/register.py"} +from fortran import lockfiles + +def rules(): + return [ + ..., + *lockfiles.rules() + ] +``` + +## 5. Use lockfiles for fetching dependencies + +If you have a tool that supports lockfiles, the easiest way to get the lockfile to it is to simply use a glob to pull the file into a digest. + +```python +from pathlib import Path + +from pants.engine.fs import PathGlobs +from pants.engine.internals.native_engine import Snapshot +from pants.engine.internals.selectors import Get +from pants.engine.rules import rule + + +@rule +async def init_fortran(request: FortranInitRequest) -> FortranInitResponse: + ... + Get(Snapshot, PathGlobs([(Path(request.root_module.address.spec_path) / ".fortran.lock").as_posix()])), +``` diff --git a/versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/plugin-upgrade-guide.mdx b/versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/plugin-upgrade-guide.mdx new file mode 100644 index 000000000..bbafe2e00 --- /dev/null +++ b/versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/plugin-upgrade-guide.mdx @@ -0,0 +1,837 @@ +--- + title: Plugin upgrade guide + sidebar_position: 9 +--- + +How to adjust for changes made to the Plugin API. + +--- + +## 2.23 + +### Deprecated GenerateToolLockfileSentinel + +`GenerateToolLockfileSentinel` was used to generate lockfiles. It is deprecated in favour of `ExportableTool`. + +```python title=Before +class JarJar(JvmToolBase): + ... + +class JarJarGeneratorLockfileSentinel(GenerateToolLockfileSentinel): + resolve_name = JarJar.options_scope + + +@rule +async def generate_jarjar_lockfile_request( + _: JarJarGeneratorLockfileSentinel, jarjar: JarJar +) -> GenerateJvmLockfileFromTool: + return GenerateJvmLockfileFromTool.create(jarjar) + + +def rules(): + return [..., UnionRule(GenerateToolLockfileSentinel, JarJarGeneratorLockfileSentinel)] +``` + +```python title=After +class JarJar(JvmToolBase): + ... + +def rules(): + return [..., UnionRule(ExportableTool, JarJar)] +``` + +#### JVM: Deprecated Get(GenerateJvmLockfileFromTool, GenerateToolLockfileSentinel) for lockfiles + +As part of the deprecation, `GenerateJvmLockfileFromTool` can be invoked directly on your tool, without a Get. You may need to request your `JvmToolBase` + +```python title=Before +@rule +async def shade_jar(request: ShadeJarRequest, jdk: InternalJdk, jarjar: JarJar) -> ShadedJar: + ... + lockfile_request = await Get(GenerateJvmLockfileFromTool, JarJarGeneratorLockfileSentinel()) + + tool_classpath = await Get(ToolClasspath, ToolClasspathRequest(lockfile=lockfile_request)) +``` + +```python title=After +@rule +async def shade_jar(request: ShadeJarRequest, jdk: InternalJdk, jarjar: JarJar) -> ShadedJar: + ... + await Get( + ToolClasspath, ToolClasspathRequest(lockfile=GenerateJvmLockfileFromTool.create(jarjar)) + ) +``` + +### JVM: Migrating from GenerateJvmLockfileFromTool to JvmToolBase + +`JvmToolBase` is the preferred way to define JVM-based tools. + +1. set the `options_scope`, usually to the `resolve_name` +2. set the `help` +3. `default_artifacts` is the `artifact_inputs` converted to a tuple. A version can be replaced by `{version}` and moved to the `default_version` field +4. `default_lockfile_resource` is the same + +A `GenerateJvmLockfileFromTool` is created from a `JvmToolBase` with `GenerateJvmLockfileFromTool.create` + +## 2.17 + +### Deprecated some `Request` types in favor of `Get` with only one arg + +For several APIs like `pants.core.util_rules.system_binaries`, we had an eager and lazy version of the same API. You could do either of these two: + +```python +from pants.core.util_rules.system_binaries import ZipBinary, ZipBinaryRequest +from pants.engine.rules import Get, rule + +class MyOutput: + pass + +@rule +def my_rule(zip_binary: ZipBinary) -> MyOutput: + return MyOutput() + +@rule +async def my_rule_lazy() -> MyOutput: + zip_binary = await Get(ZipBinary, ZipBinaryRequest()) + return MyOutput() +``` + +The lazy API is useful, for example, when you only want to `Get` that output type inside an `if` branch. + +We added syntax in 2.17 to now use `Get(OutputType)`, whereas before you had to do `Get(OutputType, OutputTypeRequest)` or (as of 2.15) `Get(OutputType, {})`. So, these `OutputTypeRequest` types are now redundant and deprecated in favor of simply using `Get(OutputType)`. + +### `EnvironmentBehavior.UNMIGRATED` is no longer available + +Following the deprecation cycle in 2.15, all `Goal`s need to set `EnvironmentBehavior.LOCAL_ONLY` or `EnvironmentBehavior.USES_ENVIRONMENTS`. + +## 2.16 + +### `RunFieldSet` and `TestRequest` now have a `.rules()` method + +These methods should be used to register your run/test plugins: + +```python + +def rules(): + return [ + *MyRunFieldSetSubclass.rules(), + *MyTestRequestSubclass.rules(), + ] +``` + +Additionally, these types now by-default register the implementations for the rules used for `--debug`/`--debug-adapter`. If your plugin doesn't support these flags, simply remove the rules you've declared and let the default ones handle erroring. If your plugin does support these, set the class property(s) `supports_debug = True`/`supports_debug_adapter = True`, respectively. + +### `RunFieldSet` can be used to run targets in the sandbox as part of a build rule + +With the new `experimental_run_in_sandbox` target type, targets that implement `RunFieldSet` can be run as a build rule for their side effects. + +Many rules that create `RunRequest`s can be used verbatim, but others may make assumptions that they will not be run hermetically. You will need set `run_in_sandbox_behavior` to one of the following values to generate a rule that allows your targets to be run in the sandbox: + +- `RunInSandboxBehavior.RUN_REQUEST_HERMETIC`: Use the existing `RunRequest`-generating rule, and enable cacheing. Use this if you are confident the behaviour of the rule relies only on state that is captured by pants (e.g. binary paths are found using `EnvironmentVarsRequest`), and that the rule only refers to files in the sandbox. +- `RunInSandboxBehavior.RUN_REQUEST_NOT_HERMETIC`: Use the existing `RunRequest`-generating rule, and do not enable cacheing. Use this if your existing rule is mostly suitable for use in the sandbox, but you cannot guarantee reproducible behavior. +- `RunInSandboxBehavior.CUSTOM`: Opt to write your own rule that returns `RunInSandboxRequest`. +- `RunInSandboxBehavior.NOT_SUPPORTED`: Opt out of being usable in `experimental_run_in_sandbox`. Attempting to use such a target will result in a runtime exception. + +We expect to deprecate `RUN_REQUEST_NOT_HERMETIC` and `NOT_SUPPORTED` in a few versions time: these options are provided to give you some time to make your existing rules match the semantics of `RUN_REQUEST_HERMETIC`, or to add a `CUSTOM` rule. + +### `BinaryShimsRequest` no longer accepts `output_directory` + +`BinaryShims` now produces all of its shim scripts in the root of its `digest`, and provides helper methods for use with `immutable_input_digests` and the `PATH` environment variable. It also produces a unique directory name so that multiple rules can be called to populate `PATH`. + +Consider using these helper methods in favor of the old behavior: + +``` +process = Process( + immutable_input_digests=binary_shims.immutable_input_digests, + env={"PATH": binary_shims.path_component}, +) +``` + +You can replicate the previous behavior using `AddDigest`: + +``` +new_digest = await Get(Digest, AddDigest(binary_shims.digest, output_directory)) +``` + +## 2.15 + +### `lint` and `fmt` schema changes + +In order to accomplish several goals (namely targetless formatters and unifying the implementation of `lint`) `lint` and `fmt` have undergone a drastic change of their plugin API. + +#### 1. `LintRequest` and `FmtTargetsRequest` now require a `tool_subsystem` class attribute. + +Instead of the `name` class attribute, `LintRequest` and `FmtTargetsRequest` require subclasses to provide a `tool_subsystem` class attribute with a value of your tool's `Subsystem` subclass. + +#### 2. Your tool subsystem should have a `skip` option. + +Although not explicitly not required by the engine to function correctly, `mypy` will complain if the subsystem type provided to `tool_subsystem` doesn't have a `skip: SkipOption` option registered. + +Otherwise, you can `# type: ignore[assignment]` on your `tool_subsystem` declaration. + +#### 3. The core goals now use a 2-rule approach + +Fmt: + +In order to support targetless formatters, `fmt` needs to know which _files_ you'll be operating on. Therefore the plugin API for `fmt` has forked into 2 rules: + +1. A rule taking `.PartitionRequest` and returning a `Partitions` object. This is sometimes referred to as the "partitioner" rule. +2. A rule taking `.Batch` and returning a `FmtResult`. This is sometimes referred to as the "runner" rule. + +This way `fmt` can serialize tool runs that operate on the same file(s) while parallelizing tool runs that don't overlap. + +(Why are targetless formatters something we want to support? This allows us to have `BUILD` file formatters, formatters like `Prettier` running on your codebase _without_ boilerplate targets, as well as Pants doing interesting deprecation fixers on its own files) + +The partitioner rule gives you all the matching files (or `FieldSet`s depending on which class you subclassed) and you'll return a mapping from `` to files (called a Partition). The `` can be anything passable at the rule boundary and is given back to you in your runner rule. The partitioner rule gives you an opportunity to perform expensive `Get`s once for the entire run, to partition the inputs based on metadata to simplify your runner, and to have a place for easily skipping your tool if requested. + +The runner rule will mostly remain unchanged, aside from the request type (`.Batch`), which now has a `.files` property. + +If you don't require any `Get`s or metadata for your tool in your partitioner rule, Pants has a way to provide a "default" implementation. In your `FmtRequest` subclass, set the `partitioner_type` class variable to `PartitionerType.DEFAULT_SINGLE_PARTITION` and only provide a runner rule. + +--- + +Lint: + +Lint plugins are almost identical to format plugins, except in 2 ways: + +1. Your partitioner rule still returns a `Partitions` object, but the element type can be anything. +2. `.Batch` has a `.elements` field instead of `.files`. + +--- + +As always, taking a look at Pants' own plugins can also be very enlightening. + +### `test` schema changes + +To enable running tests in batches, the plugin API for `test` has significantly changed. The new API largely resembles the `lint`/`fmt` API described above. + +#### 1. Test plugins must now define a `skip`-able `Subsystem`. + +To hook into the new API, a test runner must declare a subclass of `Subsystem` with a `skip: SkipOption` option. Add `skip = SkipOption("test")` to your existing (or new) subsystems. + +#### 2. Test plugins must define a subclass of `TestRequest`. + +To define the rules expected by the new `test` API, you will need to define a `TestRequest` subclass. This new type will point at your plugin-specific `TestFieldSet` and `Subsystem` subclasses: + +```python +class CustomTestRequest(TestRequest): + field_set_type = CustomTestFieldSet + tool_subsystem = CustomSubsystem +``` + +After declaring your new type, register its rules: + +```python +def rules(): + return [ + # Add to what you already have: + *CustomTestRequest.rules(), + ] +``` + +#### 3. Test execution now uses a 2-rule approach + +The plugin API for `test` has forked into 2 rules: + +1. A rule taking `.PartitionRequest` and returning a `Partitions` object. This is sometimes referred to as the "partitioner" rule. +2. A rule taking `.Batch` and returning a `TestResult`. This is sometimes referred to as the "runner" rule. + +The "partitioner" rule was introduced to allow plugins to group tests into "compatible" batches, to be executed as a batch within the "runner" rule. The "runner" rule is a replacement for the previous API which took `TestFieldSet` instances as input. + +By default, registering `.rules()` will register a "partitioner" rule that creates a single-element partition per input `TestFieldSet`, replicating the behavior from before Pants 2.15. You can then upgrade your existing "runner" rule to take the new input type. + +Before: + +```python +@rule +async def run_test(field_set: CustomTestFieldSet) -> TestResult: + ... +``` + +After: + +```python +@rule +async def run_tests(batch: CustomTestRequest.Batch) -> TestResult: + field_set = batch.single_element + ... +``` + +If you would like to make use of the new support for batched testing, override the `partitioner_type` field in your `TestRequest` subclass: + +```python +class CustomTestRequest(TestRequest): + field_set_type = CustomTestFieldSet + tool_subsystem = CustomSubsystem + partitioner_type = PartitionerType.CUSTOM +``` + +This will prevent registration of the default "partitioner" rule, allowing you to implement any partitioning logic you'd like. You'll then need to update your "runner" rule to handle a multi-element `batch`. + +### `EnvironmentName` is now required to run processes, get environment variables, etc + +Pants 2.15 introduces the concept of ["Target Environments"](../../using-pants/environments.mdx), which allow Pants to execute processes in remote or local containerized environments (using Docker), and to specify configuration values for those environments. + +In order to support the new environments feature, an `EnvironmentName` parameter is now required in order to: + +- Run a `Process` +- Get environment variables +- Inspect the current `Platform` + +This parameter is often provided automatically from a transitive value provided earlier in the call graph. The choice of whether to use a local or alternative environment must be made at a `@goal_rule` level. + +In many cases, the local execution environment is sufficient. If so, your rules will not require significant work to migrate, and execution will behave similarly to pre-2.15 versions of Pants. + +In cases where the environment needs to be factored into to rule execution, you'll need to do some work. + +2.15 adds a deprecation warning for all goals that have not considered whether they need to use the execution environment. + +#### `Goal.environment_behavior` + +2.15 adds the `environment_behavior` property to the `Goal` class, which controls whether an `EnvironmentName` is automatically injected when a `@goal_rule` runs. + +When `environment_behavior=Goal.EnvironmentBehavior.UNMIGRATED` (the default), the `QueryRule` that is installed for a `@goal_rule` will include an `EnvironmentName` and will raise a deprecation warning. + +If your Goal only ever needs to use the local target environment, use `environment_behavior=Goal.EnvironmentBehavior.LOCAL_ONLY`. The `QueryRule` installed for the `@goal_rule` will include an `EnvironmentName` that refers to a local environment, and will silence the deprecation warning. No further migration work needs to be done for your Goal. + +##### For goals that need to respect `EnvironmentField`s + +If your goal needs to select the target's specified environment when running underlying rules, set `environment_behavior=Goal.EnvironmentBehavior.USES_ENVIRONMENTS`, which will silence the deprecation. Unlike for the `LOCAL_ONLY` behavior, any rules that require an `EnvironmentName` will need to specify that name directly. + +In general, `Goal`s should use `EnvironmentNameRequest` to get `EnvironmentName`s for the targets that they will be operating on. + +```python +Get( + EnvironmentName, + EnvironmentNameRequest, + EnvironmentNameRequest.from_field_set(field_set), +) +``` + +Then, the `EnvironmentName` should be used at `Get` callsites which require an environment: + +```python +Get(TestResult, {field_set: TestFieldSet, environment_name: EnvironmentName}) +``` + +The multi-parameter `Get` syntax provides the value transitively, and so will need to be used in many `Get` callsites in `@goal_rule`s which transitively run processes, consume the platform, etc. One exception is that (most of) the APIs provided by `pants.engine.target` are pinned to running in the `__local__` environment, and so do not require an `EnvironmentName` to use. + +#### `RuleRunner.inherent_environment` + +To reduce the number of changes necessary in tests, the `RuleRunner.inherent_environment` argument defaults to injecting an `EnvironmentName` when running `@rule`s in tests. + +### `platform` kwarg for `Process` deprecated + +Previously, we assumed processes were platform-agnostic, i.e. they had identical output on all platforms (OS x CPU architecture). You had to opt into platform awareness by setting the kwarg`platform` on the `Process`; otherwise, remote caching could incorrectly use results from a different platform. + +This was not a safe default, and this behavior also breaks the new Docker support. So, now all processes automatically are marked as platform-specific. + +https://github.com/pantsbuild/pants/issues/16873 proposes how you will eventually be able to mark a `Process` as platform-agnostic. + +To fix this deprecation, simply delete the `platform` kwarg. + +### `Environment`, `EnvironmentRequest`, and `CompleteEnvironment` renamed and moved + +The types were moved from `pants.engine.environment` to `pants.engine.env_vars`, and now have `Vars` in their names: + +Before: `pants.engine.environment.{Environment,EnvironmentRequest,CompleteEnvironment}` +After: `pants.engine.env_vars.{EnvironmentVars,EnvironmentVarsRequest,CompleteEnvironmentVars}` + +The old names still exist until Pants 2.16 as deprecated aliases. + +This rename was to avoid ambiguity with the new "environments" mechanism, which lets users specify different options for environments like Linux vs. macOS and running in Docker images. + +### `MockGet` expects `input_types` kwarg, not `input_type` + +It's now possible in Pants 2.15 to use zero arguments or multiple arguments in a `Get`. To support this change, `MockGet` from `run_run_with_mocks()` now expects the kwarg `input_types: tuple[type, ...]` rather than `input_type: type`. + +Before: + +```python +MockGet( + output_type=LintResult, + input_type=LintTargetsRequest, + mock=lambda _: LintResult(...), +) +``` + +After: + +```python +MockGet( + output_type=LintResult, + input_types=(LintTargetsRequest,), + mock=lambda _: LintResult(...), +) +``` + +### Deprecated `Platform.current` + +The `Platform` to use will soon become dependent on a `@rule`'s position in the `@rule` graph. To get the correct `Platform`, a `@rule` should request a `Platform` as a positional argument. + +### Deprecated `convert_dir_literal_to_address_literal` kwarg + +The `convert_dir_literal_to_address_literal` keyword argument for `RawSpecs.create()` and `SpecsParser.parse_specs()` no longer does anything. It should be deleted. + +## 2.14 + +See [https://github.com/pantsbuild/pants/blob/main/docs/notes/2.14.x.md](https://github.com/pantsbuild/pants/blob/main/docs/notes/2.14.x.md) for the changelog. + +### Removed second type parameter from `Get` + +`Get` now takes only a single type parameter for the output type: `Get[_Output]`. The input type parameter was unused. + +### `FmtRequest` -> `FmtTargetsRequest` + +In order to support non-target formatting (like `BUILD` files) we'll be introducing additional `fmt` request types. Therefore `FmtRequest` has been renamed to `FmtTargetsRequest` to reflect the behavior. + +This change also matches `lint`, which uses `LintTargetsRequest`. + +### Optional Option flag name + +Pants 2.14 adds support for deducing the flag name from the attribute name when declaring `XOption`s. You can still provide the flag name in case the generated one shouldn't match the attribute name. + +Before: + +```python +my_version = StrOption("--my-version", ...) +_run = BoolOption("--run", ...) +``` + +Now: + +```python +my_version = StrOption(...) # Still uses --my-version +_run = BoolOption(...) # Still uses --run +``` + +### `InjectDependencies` -> `InferDependencies`, with `InferDependencies` using a `FieldSet` + +`InjectDependenciesRequest` has been folded into `InferDependenciesRequest`, which has also been changed to receive a `FieldSet`. + +If you have an `InjectDependenciesRequest` type/rule, those should be renamed to `Infer...`. + +Then for each `InferDependenciesRequest`, the `infer_from` class variable should now point to a relevant `FieldSet` subclass type. If you had an `Inject...` request, the `required_fields` will likely include the relevant `Dependencies` subclass. Likewise for pre-2.14 `Infer...` request, the`required_fields` will include the relevant `SourcesField` subclass. + +Note that in most cases, you no longer need to request the target in your rule code, and should rely on `FieldSet`'s mechanisms for matching targets and getting field values. + +### `GenerateToolLockfileSentinel` encouraged to use language-specific subclasses + +Rather than directly subclassing `GenerateToolLockfileSentinel`, we encourage you to subclass `GeneratePythonToolLockfileSentinel` and `GenerateJvmToolLockfileSentinel`. This is so that we can distinguish what language a tool belongs to, which is used for options like `[python].resolves_to_constraints_file` to validate which resolve names are recognized. + +Things will still work if you do not make this change, other than the new options not recognizing your tool. + +However, keep the `UnionRule` the same, i.e. with the first argument still `GenerateToolLockfileSentinel`. + +### `matches_filespec()` replaced by `FilespecMatcher` + +Instead, use `FilespecMatcher(includes=[], excludes=[]).matches(paths: Sequence[str])` from `pants.source.filespec`. + +The functionality is the same, but can have better performance because we don't need to parse the same globs each time `.matches()` is called. When possible, reuse the same `FilespecMatcher` object to get these performance benefits. + +## 2.13 + +See [https://github.com/pantsbuild/pants/blob/main/docs/notes/2.13.x.md](https://github.com/pantsbuild/pants/blob/main/docs/notes/2.13.x.md) for the changelog. + +### `AddressInput` and `UnparsedAddressInputs` require `description_of_origin` + +Both types now require the keyword argument `description_of_origin: str`, which is used to make better error messages when the address cannot be found. + +If the address is hardcoded in your rules, then use a description like `"the rule find_my_binary"`. If the address comes from user inputs, it is helpful to mention where the user defines the value, for example `f"the dependencies field from the target {my_tgt.address}"`. + +You can now also set `UnparsedAddressInputs(skip_invalid_addresses=True, ...)`, which will not error when addresses are invalid. + +### `WrappedTarget` requires `WrappedTargetRequest` + +Before: + +```python +from pants.engine.addresses import Address +from pants.engine.target import WrappedTarget + +await Get(WrappedTarget, Address, my_address) +``` + +After: + +```python +from pants.engine.target import WrappedTarget, WrappedTargetRequest + +await Get(WrappedTarget, WrappedTargetRequest(my_address, description_of_origin="my rule")) +``` + +### Redesign of `Specs` + +Specs, aka command line arguments, were redesigned in Pants 2.13: + +- The globs `::` and `:` now match all files, even if there are no owning targets. +- Directory args like `my_dir/` can be set to match everything in the current directory, rather than the default target `my_dir:my_dir`. +- Ignore globs were added with a `-` prefix, like `:: -ignore_me::` + +To support these changes, we redesigned the class `Specs` and its sibling classes like `AddressSpecs`. + +Renames for `Spec` subclass: + +- `SiblingAddresses` -> `DirGlobSpec` (vs. `DirLiteralSpec`) +- `DescendantAddresses` -> `RecursiveGlobSpec` +- `AscendantAddresses` -> `AncestorGlobSpec` + +Those classes now have a keyword arg `error_if_no_target_matches`, rather than having a distinct class like `MaybeDescendantAddresses`. + +`AddressSpecs` was renamed to `SpecsWithoutFileOwners`, and `FilesystemSpecs` to `SpecsWithOnlyFileOwners`. But almost always, you should instead use the new `RawSpecs` class because it is simpler. See [Rules API and Target API](../the-rules-api/rules-and-the-target-api.mdx#how-to-resolve-targets) for how to use `Get(Targets, RawSpecs)`, including its keyword arguments. + +If you were directly creating `Specs` objects before, you likely want to change to `RawSpecs`. `Specs` allows us to handle "ignore specs" like `-ignore_me/`, which is usually not necessary in rules. See the above paragraph for how to use `RawSpecs`. + +### `SpecsSnapshot` is now `SpecsPaths` + +`SpecsSnapshot` was replaced with the more performant `SpecsPaths` from `pants.engine.fs`, which avoids digesting any files into the LMDB store. + +Instead of `specs_snapshot.snapshot.files`, use `specs_paths.files` to get a list of all matching files. + +If you still need the `Digest` (`specs_snapshot.snapshot.digest`), use `await Get(Digest, PathGlobs(globs=specs_paths.files))`. + +### Removed `PutativeTargetsSearchPaths` for `tailor` plugins + +Before: + +```python +all_proto_files = await Get(Paths, PathGlobs, req.search_paths.path_globs("*.proto")) +``` + +After: + +```python +all_proto_files = await Get(Paths, PathGlobs, req.path_globs("*.proto")) +``` + +You can also now specify multiple globs, e.g. `req.path_globs("*.py", "*.pyi")`. + +### Banned short option names like `-x` + +You must now use a long option name when [defining options](../the-rules-api/options-and-subsystems.mdx). You can also now only specify a single option name per option. + +(These changes allowed us to introduce ignore specs, like `pants list :: -ignore_me::`.) + +## 2.12 + +See [https://github.com/pantsbuild/pants/blob/main/docs/notes/2.12.x.md](https://github.com/pantsbuild/pants/blob/main/docs/notes/2.12.x.md) for the changelog. + +### Unified formatters + +Formatters no longer need to be installed in both the `FmtRequest` and `LintTargetsRequest` `@unions`: instead, installing in the `FmtRequest` union is sufficient to act as both a linter and formatter. + +See [Add a formatter](./add-a-formatter.mdx) for more information. + +## 2.11 + +See [https://github.com/pantsbuild/pants/blob/main/docs/notes/2.11.x.md](https://github.com/pantsbuild/pants/blob/main/docs/notes/2.11.x.md) for the changelog. + +### Deprecated `Subsystem.register_options()` + +Pants 2.11 added "concrete" option types which when used as class attributes of your subsystem. These are more declarative, simplify accessing options, and work with MyPy! + +Before: + +```python +class MySubsystem(Subsystem): + options_scope = "example" + help = "..." + + @classmethod + def register_options(cls, register): + super().register_options(register) + register( + "--my-opt", + type=bool, + default=True, + help="...", + ) +``` + +Now: + +```python +class MySubsystem(Subsystem): + options_scope = "example" + help = "..." + + my_opt = BoolOption( + "--my-opt", + default=True, + help="...", + ) +``` + +To access an option in rules, simply use `my_subsystem.my_opt` rather than `my_subsystem.options.my_opt`. + +See [Options and subsystems](../the-rules-api/options-and-subsystems.mdx) for more information, including the available types. + +### Moved `BinaryPathRequest` to `pants.core.util_rules.system_binaries` + +The new module `pants.core.util_rules.system_binaries` centralizes all discovery of existing binaries on a user's machines. + +The functionality is the same, you only need to change your imports for types like `BinaryPathRequest` to `pants.core.util_rules.system_binaries` rather than `pants.engine.process`. + +### Deprecated not implementing `TargetGenerator` in `GenerateTargetsRequest` implementors + +See [https://github.com/pantsbuild/pants/pull/14962](https://github.com/pantsbuild/pants/pull/14962) for an explanation and some examples of how to fix. + +### Replaced `GoalSubsystem.required_union_implementations` with `GoalSubsystem.activated()` + +See [https://github.com/pantsbuild/pants/pull/14313](https://github.com/pantsbuild/pants/pull/14313) for an explanation and some examples of how to fix. + +## 2.10 + +See [https://github.com/pantsbuild/pants/blob/main/docs/notes/2.10.x.md](https://github.com/pantsbuild/pants/blob/main/docs/notes/2.10.x.md) for the changelog. + +### Rename `LintRequest` to `LintTargetsRequest` + +Pants 2.10 added a new `LintFilesRequest`, which allows you to run linters on code without any owning targets! [https://github.com/pantsbuild/pants/pull/14102](https://github.com/pantsbuild/pants/pull/14102) + +To improve clarity, we renamed `LintRequest` to `LintTargetsRequest`. + +### `FmtRequest`, `CheckRequest`, and `LintTargetsRequest` must set `name` + +You must set the class property `name` on these three types. + +Before: + +```python +class MyPyRequest(CheckRequest): + field_set_type = MyPyFieldSet +``` + +After: + +```python +class MyPyRequest(CheckRequest): + field_set_type = MyPyFieldSet + name = "mypy" +``` + +This change is what allowed us to add the `lint --only=flake8` feature. + +For DRY, it is a good idea to change the `formatter_name`, `linter_name`, and `checker_name` in `FmtResult`, `LintResults`, and `CheckResults`, respectively, to use `request.name` rather than hardcoding the string again. See [https://github.com/pantsbuild/pants/pull/14304](https://github.com/pantsbuild/pants/pull/14304) for examples. + +### Removed `LanguageFmtTargets` for `fmt` + +When setting up a new language to be formatted, you used to have to copy and paste a lot of boilerplate like `ShellFmtTargets`. That's been fixed, thanks to [https://github.com/pantsbuild/pants/pull/14166](https://github.com/pantsbuild/pants/pull/14166). + +To fix your code: + +1. If you defined any new languages to be formatted, delete the copy-and-pasted `LanguageFmtTargets` code. +2. For every formatter, change the `UnionRule` to be `UnionRule(FmtRequest, BlackRequest)`, rather than `UnionRule(PythonFmtRequest, BlackRequest)`, for example. + +### `ReplImplementation` now passes root targets, not transitive closure + +We realized that it's useful to let REPL rules know what was specified vs. what is a transitive dependency: [https://github.com/pantsbuild/pants/pull/14323](https://github.com/pantsbuild/pants/pull/14323). + +To adapt to this, you will want to use `transitive_targets = await Get(TransitiveTargets, TransitiveTargetsRequest(request.addresses)`, then operate on `transitive_targets.closure`. + +### Removed `PexFromTargetsRequest.additional_requirements` + +Let us know if you were using this, and we can figure out how to add it back: [https://github.com/pantsbuild/pants/pull/14350](https://github.com/pantsbuild/pants/pull/14350). + +### Removed `PexFromTargetsRequest(direct_deps_only: bool)` + +Let us know if you were using this, and we can figure out how to add it back: [https://github.com/pantsbuild/pants/pull/14291](https://github.com/pantsbuild/pants/pull/14291). + +### Renamed `GenerateToolLockfileSentinel.options_scope` to `resolve_name` + +See [https://github.com/pantsbuild/pants/pull/14231](https://github.com/pantsbuild/pants/pull/14231) for more info. + +### Renamed `PythonModule` to `PythonModuleOwnersRequest` + +This type was used to determine the owners of a Python module. The new name makes that more clear. See [https://github.com/pantsbuild/pants/pull/14276](https://github.com/pantsbuild/pants/pull/14276). + +## 2.9 + +See [https://github.com/pantsbuild/pants/blob/main/docs/notes/2.9.x.md](https://github.com/pantsbuild/pants/blob/main/docs/notes/2.9.x.md) for the changelog. + +### Deprecated `RuleRunner.create_files()`, `.create_file()` and `.add_to_build_file()` + +Instead, for your `RuleRunner` tests, use `.write_files()`. See [https://github.com/pantsbuild/pants/pull/13817](https://github.com/pantsbuild/pants/pull/13817) for some examples. + +## 2.8 + +See [https://github.com/pantsbuild/pants/blob/main/docs/notes/2.8.x.md](https://github.com/pantsbuild/pants/blob/main/docs/notes/2.8.x.md) for the changelog. + +### Target modeling changes + +Pants 2.8 cleaned up the modeling of targets. Now, there are targets that describe the atom of each language, like `python_test` and `python_source` which correspond to a single file. There are also target generators which exist solely for less boilerplate, like `python_tests` and `python_sources`. + +We recommend re-reading [Targets and BUILD files](../../using-pants/key-concepts/targets-and-build-files.mdx). + +#### `SourcesField` + +The `Sources` class was replaced with `SourcesField`, `SingleSourceField`, and `MultipleSourcesField`. + +When defining new target types with the Target API, you should choose between subclassing `SingleSourceField` and `MultipleSourcesField`, depending on if you want the field to be `source: str` or `sources: list[str]`. + +Wherever you were using `Sources` in your `@rule`s, simply replace with `SourcesField`. + +#### Renames of some `Sources` subclasses + +You should update all references to these classes in your `@rule`s. + +- `FilesSources` -> `FileSourceField` +- `ResourcesSources` -> `ResourceSourceField` +- `PythonSources` -> `PythonSourceField` + +### `OutputPathField.value_or_default()` + +The method `OutputPathField.value_or_default()` no longer takes `Address` as an argument. + +## 2.7 + +See [https://github.com/pantsbuild/pants/blob/main/docs/notes/2.7.x.md](https://github.com/pantsbuild/pants/blob/main/docs/notes/2.7.x.md) for the changelog. + +### Type hints work properly + +Pants was not using PEP 561 properly, which means that MyPy would not enforce type hints when using Pants APIs. Oops! This is now fixed. + +### Options scopes should not have `_` + +For example, use `my-subsystem` instead of `my_subsystem`. This is to avoid ambiguity with target types. + +## 2.6 + +See [https://github.com/pantsbuild/pants/blob/main/docs/notes/2.6.x.md](https://github.com/pantsbuild/pants/blob/main/docs/notes/2.6.x.md) for the changelog. + +### `ProcessCacheScope` + +`ProcessCacheScope.NEVER` was renamed to `ProcessCacheScope.PER_SESSION` to better reflect that a rule never runs more than once in a session (i.e. a single Pants run) given the same inputs. + +`ProcessCacheScope.PER_RESTART` was replaced with `ProcessCacheScope.PER_RESTART_ALWAYS` and `ProcessCacheScope.PER_RESTART_SUCCESSFUL`. + +### `PexInterpreterConstraints` + +Now called `InterpreterConstraints` and defined in `pants.backend.python.util_rules.interpreter_constraints`. + +## 2.5 + +See [https://github.com/pantsbuild/pants/blob/main/docs/notes/2.5.x.md](https://github.com/pantsbuild/pants/blob/main/docs/notes/2.5.x.md) for the changelog. + +### `TriBoolField` + +`BoolField.value` is no longer `bool | None`, but simply `bool`. This means that you must either set `required = True` or set the `default`. + +Use `TriBoolField` if you still want to be able to represent a trinary state: `False`, `True`, and `None`. + +### Added `RuleRunner.write_files()` + +This is a more declarative way to set up files than the older API of `RuleRunner.create_file()`, `.create_files()`, and `.add_to_build_files()`. See [Testing plugins](../the-rules-api/testing-plugins.mdx). + +## 2.4 + +See [https://github.com/pantsbuild/pants/blob/main/docs/notes/2.4.x.md](https://github.com/pantsbuild/pants/blob/main/docs/notes/2.4.x.md) for the changelog. + +### `PexRequest` changes how entry point is set + +See [https://github.com/pantsbuild/pants/pull/11620](https://github.com/pantsbuild/pants/pull/11620). Instead of setting `entry_point="pytest"` in the `PexRequest` constructor, now you set `main=ConsoleScript("black")` or `main=EntryPoint("pytest")`. + +### Must use `EnvironmentRequest` for accessing environment variables + +See [https://github.com/pantsbuild/pants/pull/11641](https://github.com/pantsbuild/pants/pull/11641). Pants now eagerly purges environment variables from the run, so using `os.environ` in plugins won't work anymore. + +Instead, use `await Get(Environment, EnvironmentRequest(["MY_ENV_VAR"])`. + +For `RuleRunner` tests, you must now either set `env` or the new `env_inherit` arguments for environment variables to be set. Tests are now hermetic. + +## 2.3 + +There were no substantial changes to the Plugin API in 2.3. See [https://github.com/pantsbuild/pants/blob/main/docs/notes/2.3.x.md](https://github.com/pantsbuild/pants/blob/main/docs/notes/2.3.x.md) for the changelog. + +## 2.2 + +See [https://github.com/pantsbuild/pants/blob/main/docs/notes/2.2.x.md](https://github.com/pantsbuild/pants/blob/main/docs/notes/2.2.x.md) for the changelog. + +### `PrimitiveField` and `AsyncField` are removed (2.2.0.dev0) + +Rather than subclassing `PrimitiveField`, subclass `Field` directly. `Field` now behaves like `PrimitiveField` used to, and `PrimitiveField` was removed for simplicity. + +Rather than subclassing `AsyncField` or `AsyncStringSequenceField`, subclass `Field` or a template like `StringField` and also subclass `AsyncFieldMixin`: + +```python +from pants.engine.target import AsyncFieldMixin, StringField) + +class MyField(StringField, AsyncFieldMixin): + alias = "my_field" + help = "Description." +``` + +Async fields now access the raw value with the property `.value`, rather than `.sanitized_raw_value`. To override the eager validation, override `compute_value()`, rather than `sanitize_raw_value()`. Both these changes bring async fields into alignment with non-async fields. + +### Set the property `help` with Subsystems, Targets, and Fields (2.2.0.dev3) + +Previously, you were supposed to set the class's docstring for the `pants help` message. Instead, now set a class property `help`, like this: + +```python +class MyField(StringField): + alias = "my_field" + help = "A summary.\n\nOptional extra information." +``` + +Pants will now properly wrap strings and preserve newlines. You may want to run `pants help ${target/subsystem}` to verify things render properly. + +## 2.1 + +See [https://github.com/pantsbuild/pants/blob/main/docs/notes/2.1.x.md](https://github.com/pantsbuild/pants/blob/main/docs/notes/2.1.x.md) for the changelog. + +### `SourcesSnapshot` is now `SpecsSnapshot` (2.1.0rc0) + +The type was renamed for clarity. Still import it from `pants.engine.fs`. + +## 2.0 + +See [https://github.com/pantsbuild/pants/blob/main/docs/notes/2.0.x.md](https://github.com/pantsbuild/pants/blob/main/docs/notes/2.0.x.md) for the changelog. + +### Use `TransitiveTargetsRequest` as input for resolving `TransitiveTargets` (2.0.0rc0) + +Rather than `await Get(TransitiveTargets, Addresses([addr1]))`, use `await Get(TransitiveTargets, TransitiveTargetsRequest([addr1]))`, from `pants.engine.target`. + +It's no longer possible to include `TransitiveTargets` in your `@rule` signature in order to get the transitive closure of what the user specified on the command. Instead, put `Addresses` in your rule's signature, and use `await Get(TransitiveTargets, TransitiveTargetsRequest(addresses))`. + +### Codegen implementations: use `DependenciesRequestLite` and `TransitiveTargetsLite` (2.0.0rc0) + +Due to a new cycle in the rule graph, for any codegen implementations, you must use `DependenciesRequestLite` instead of `DependenciesRequest`, and `TransitiveTargetsLite` instead of `TransitiveTargetsRequest`. Both imports are still from `pants.engine.target`. + +These behave identically, except that they do not include dependency inference in the results. Unless you are generating for `input = PythonSources`, this should be fine, as dependency inference is currently only used with Python. + +This is tracked by [https://github.com/pantsbuild/pants/issues/10917](https://github.com/pantsbuild/pants/issues/10917). + +### Dependencies-like fields have more robust support (2.0.0rc0) + +If you have any custom fields that act like the dependencies field, but do not subclass `Dependencies`, there are two new mechanisms for better support. + +1. Instead of subclassing `StringSequenceField`, subclass `SpecialCasedDependencies` from `pants.engine.target`. This will ensure that the dependencies show up with `pants dependencies` and `pants dependents`. +2. You can use `UnparsedAddressInputs` from `pants.engine.addresses` to resolve the addresses: + +```python +from pants.engine.addresses import Address, Addresses, UnparsedAddressedInputs +from pants.engine.target import Targets + +... + +addresses = await Get(Addresses, UnparsedAddressedInputs(["//:addr1", "project/addr2"], owning_address=None) + +# Or, use this. +targets = await Get( + Targets, + UnparsedAddressedInputs(["//:addr1", "project/addr2"], owning_address=Address("project", target_name="original") +) +``` + +If you defined a subclass of `SpecialCasedDependencies`, you can use `await Get(Addresses | Targets, UnparsedAddressInputs, my_tgt[MyField].to_unparsed_address_inputs())`. + +(Why would you ever do this? If you have dependencies that you don't treat like normal—e.g. that you will call the equivalent of `pants package` on those deps—it's often helpful to call out this magic through a dedicated field. For example, Pants's [archive](https://github.com/pantsbuild/pants/blob/969c8dcba6eda0c939918b3bc5157ca45099b4d1/src/python/pants/core/target_types.py#L231-L257) target type has the fields `files` and `packages`, rather than `dependencies`.) + +### `package` implementations may want to add the field `output_path` (2.0.0rc0) + +All of Pants's target types that can be built via `pants package` now have an `output_path` field, which allows the user to override the path used for the created asset. + +You optionally may want to add this `output_path` field to your custom target type for consistency: + +1. Include `OutputPathField` from `pants.core.goals.package` in your target's `core_fields` class property. +2. In your `PackageFieldSet` subclass, include `output_path: OutputPathField`. +3. When computing the filename in your rule, use `my_package_field_set.output_path.value_or_default(field_set.address, file_ending="my_ext")`. diff --git a/versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/run-tests.mdx b/versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/run-tests.mdx new file mode 100644 index 000000000..1d07036b2 --- /dev/null +++ b/versioned_docs/version-2.24/docs/writing-plugins/common-plugin-tasks/run-tests.mdx @@ -0,0 +1,270 @@ +--- + title: Run tests + sidebar_position: 6 +--- + +How to add a new test runner to the `test` goal. + +--- + +1. Set up a test target type + +--- + +Usually, you will want to add a "test" target type for your language, such as `shell_test` or `python_test`. A test target contrasts with a "source" target, such as `shell_source`. A test target is useful so that `pants test ::` doesn't try to run tests on non-test files. + +When creating a test target, you should usually subclass `SingleSourceField`. You may also want to create `TimeoutField` (which should subclass `IntField`) and a `SkipField` (which should subclass `BoolField`). + +See [Creating new targets](../the-target-api/creating-new-targets.mdx) for a guide on how to define new target types. + +```python +from pants.engine.target import ( + COMMON_TARGET_FIELDS, + Dependencies, + BoolField, + IntField, + SingleSourceField, + Target, +) + + +class ExampleTestSourceField(SingleSourceField): + expected_file_extensions = (".example",) + + +class ExampleTestTimeoutField(IntField): + alias = "timeout" + help = "Whether to time out after a certain period of time" + + +class SkipExampleTestsField(BoolField): + alias = "skip_example_tests" + default = False + help = "If set, don't run tests on this source" + + +class ExampleTestTarget(Target): + alias = "example_tests" + help = "Example tests run by some tool" + core_fields = ( + *COMMON_TARGET_FIELDS, + Dependencies, + ExampleTestSourceField, + ExampleTestTimeoutField, + SkipExampleTestsField, + ) +``` + +2. Set up a subclass of `TestFieldSet` + +--- + +Your test-runner will need access to some / most of the fields defined on your new target to actually execute the tests within. Collect those fields into a new subclass of `TestFieldSet`, and mark at least your source field as required. + +If you have a "skip" field, use it in an `opt_out` method of your subclass: + +```python +from pants.core.goals.test import TestFieldSet + +@dataclass(frozen=True) +class ExampleTestFieldSet(TestFieldSet): + required_fields = (ExamleTestSourceField,) + sources: ExampleTestSourceField + timeout: ExampleTestTimeoutField + + @classmethod + def opt_out(cls, tgt: Target) -> bool: + return tgt.get(SkipExampleTestsField).value +``` + +3. Set up a `Subsystem` for your test runner + +--- + +Test runners are expected to implement (at least) a `skip` option at a subsystem level. + +```python +from pants.option.option_types import SkipOption +from pants.option.subsystem import Subsystem + +class ExampleTestSubsystem(Subsystem): + name = "Example" + options_scope = "example-test" + help = "Some tool to run tests" + + skip = SkipOption("test") +``` + +See [Options and subsystems](../the-rules-api/options-and-subsystems.mdx) for more information about defining new subsystems. + +4. Set up a subclass of `TestRequest` + +--- + +The rules used to drive batching and executing tests come from the `TestRequest` class. To use it, first declare a new subclass pointing at your subclasses of `TestFieldSet` and `Subsystem`: + +```python +from pants.core.goals.test import TestRequest + +@dataclass(frozen=True) +class ExampleTestRequest(TestRequest): + field_set_type = ExampleTestFieldSet + tool_subsystem = ExampleTestSubsystem +``` + +Then register the rules of your subclass: + +```python +def rules(): + return [ + # Add to any other existing rules here: + *ExampleTestRequest.rules() + ] +``` + +In addition to registering your subclass as a valid `TestRequest`, this will automatically register rules to handle splitting your test inputs into single-element batches. If this is the correct behavior for your test runner, you can move on and skip the following section about defining a batching/partitioning rule. On the other hand, if your test runner supports testing multiple files in a single process (i.e. to share expensive setup logic), you can override the default `partitioner_type` on your `TestRequest` subclass: + +```python +from pants.core.goals.test import PartitionerType + +@dataclass(frozen=True) +class ExampleTestRequest(TestRequest): + field_set_type = ExampleTestFieldSet + tool_subsystem = ExampleTestSubsystem + # Changed from the default: + partitioner_type = PartitionerType.CUSTOM +``` + +This will prevent generation of the "default" partitioning rule, allowing you to implement a custom rule for grouping compatible tests into the same process. + +5. Define a batching/partitioning `@rule` + +--- + +:::caution This step is optional +Defining a partitioning rule is only required if you overrode the `partitioner_type` field in your `TestRequest` subclass to be `PartitionerType.CUSTOM`. Skip to the next section if your subclass is using the default `partitioner_type`. +::: +Pants can run tests from multiple targets/files within the same process (for example, to share expensive setup/teardown logic across multiple files). Since it's not always safe/possible to batch test files together, each plugin defining a `test` implementation is expected to define a `@rule` for splitting field-sets into appropriate batches: + +```python +from pants.core.goals.test import Partitions +from pants.engine.rules import collect_rules, rule + +@rule +async def partition( + request: ExampleTestRequest.PartitionRequest[ExampleTestFieldSet] +) -> Partitions: + ... + +def rules(): + return [ + # If it isn't already in the list: + *collect_rules(), + ] +``` + +The `Partitions` type is a custom collection of `Partition` objects, and a `Partition` is a `dataclass` containing: + +- A `tuple[TestFieldSetSubclass, ...]` of partition `elements` +- An optional `metadata` field + +Partition metadata can be any type implementing: + +```python +@property +def description(self) -> str: + ... +``` + +Any metadata returned by the partitioning rule will be passed back to your test runner as an input to the test execution rule, so it can be useful to declare a custom type modeling everything that's constant for a collection of `TestFieldSet` inputs: + +```python +@dataclass(frozen=True) +class ExampleTestMetadata: + common_property: str + other_common_property: int | None +``` + +6. Define the main test execution `@rule` + +--- + +To actually execute your test runner, define a rule like: + +```python +from pants.core.goals.test import TestResult + +@rule +async def run_example_tests( + batch: ExampleTestRequest.Batch[ExampleTestFieldSet, ExampleTestMetadata], + # Any other subsystems/inputs you need. +) -> TestResult: + ... +``` + +If you didn't define a custom metadata type, you can use `Any` as the second type argument to the `Batch` type: + +```python +from pants.core.goals.test import TestResult + +@rule +async def run_example_tests( + batch: ExampleTestRequest.Batch[ExampleTestFieldSet, Any], + # Any other subsystems/inputs you need. +) -> TestResult: + ... +``` + +The `batch` input will have two properties: + +1. `elements` contains all the field sets that should be tested by your runner +2. `metadata` contains any (optional) common data about the batch returned by your partitioning rule + +If you didn't override the `partitioner_type` in your `TestRequest` subclass, `elements` will be a list of size 1 and `metadata` will be `None`. For convenience, you can use `batch.single_element` in this case to get the single field set. The `single_element` property will raise a `TypeError` if used on a batch with more than one element. + +7. Define `@rule`s for debug testing + +--- + +`pants test` exposes `--debug` and `--debug-adapter` options for interactive execution of tests. To hook into these execution modes, opt-in in your `TestRequest` subclass and define one/both additional rules: + +```python +from pants.core.goals.test import TestDebugAdapterRequest, TestDebugRequest +from pants.core.subsystems.debug_adapter import DebugAdapterSubsystem + +@dataclass(frozen=True) +class ExampleTestRequest(TestRequest): + ... # Fields from earlier + supports_debug = True # Supports --debug + supports_debug_adapter = True # Supports --debug-adapter + +@rule +async def setup_example_debug_test( + batch: ExampleTestRequest.Batch[ExampleTestFieldSet, ExampleTestMetadata], +) -> TestDebugRequest: + ... + +@rule +async def setup_example_debug_adapter_test( + batch: ExampleTestRequest.Batch[ExampleTestFieldSet, ExampleTestMetadata], + debug_adapter: DebugAdapterSubsystem, +) -> TestDebugAdapterRequest: + ... +``` + +## Automatic retries for tests + +Running the process without retries could look like this: + +```python +result = await Get(FallibleProcessResult, Process, my_test_process) +``` + +Simply wrap the process in types that request the retries: + +```python +results = await Get( + ProcessResultWithRetries, ProcessWithRetries(my_test_process, retry_count) +) +last_result = results.last +``` diff --git a/versioned_docs/version-2.24/docs/writing-plugins/common-subsystem-tasks.mdx b/versioned_docs/version-2.24/docs/writing-plugins/common-subsystem-tasks.mdx new file mode 100644 index 000000000..cf24a0893 --- /dev/null +++ b/versioned_docs/version-2.24/docs/writing-plugins/common-subsystem-tasks.mdx @@ -0,0 +1,174 @@ +--- + title: Common subsystem tasks + sidebar_position: 999 +--- + +Common tasks for Subsystems + +--- + +## Skipping individual targets + +Many subsystems allow skipping specific targets. For example, you might have Python files that you want to not typecheck with mypy. In Pants, this is achieved with a `skip_*` field on the target. This is simple to implement. + +1. Create a field for skipping your tool + +```python +from pants.engine.target import BoolField + +class SkipFortranLintField(BoolField): + alias = "skip_fortran_lint" + default = False + help = "If true, don't run fortran-lint on this target's code." +``` + +2. Register this field on the appropriate targets. + +```python +def rules(): + return [ + FortranSourceTarget.register_plugin_field(SkipFortranLintField), + ] +``` + +3. Add this field as part of your subsystems `opt_out` method: + +```python +from dataclasses import dataclass + +from pants.engine.target import FieldSet, Target + + +@dataclass +class FortranLintFieldSet(FieldSet): + required_fields = (FortranSourceField,) + + source: FortranSourceField + + @classmethod + def opt_out(cls, tgt: Target) -> bool: + return tgt.get(SkipFortranLintField).value +``` + +## Making subsystems exportable with their default lockfile + +:::note Support depends on language backend of the subsystem +Only some language backends support `pants export`. These include the Python and JVM backends. Only tools which are themselves written to use a backend with this feature can be exported. For example, a Python-based tool which operates on a different language is exportable. +::: + +1. Make the subsystem a subclass of `ExportableTool` + + :::note Language backends may have done this in their Tool base class. + For example, the Python backend with `PythonToolRequirementsBase` and JVM with `JvmToolBase` are already subclasses. + ::: + + ```python + from pants.backend.python.subsystems.python_tool_base import PythonToolBase + from pants.core.goals.resolves import ExportableTool + + class FortranLint(PythonToolBase, ExportableTool): + ... + ``` + +2. Register your class with a `UnionRule` with `ExportableTool` + + ```python + def rules(): + return [ + UnionRule(ExportableTool, FortranLint) + ] + ``` + +## Loading config files + +1. Add an option to toggle config discovery: + + ```python + from pants.option.subsystem import Subsystem + from pants.option.option_types import BoolOption + from pants.util.strutil import softwrap + + class FortranLint(Subsystem): + config_discovery = BoolOption( + default=True, + advanced=True, + help=lambda cls: softwrap( + f""" + If true, Pants will include all relevant config files during runs. + + Use `[{cls.options_scope}].config` and `[{cls.options_scope}].custom_check_dir` instead if your config is in a non-standard location. + """ + ), + ) + ``` + +2. Add an option for the configuration file itself. Several options are useful depending on what types of config files you need: `FileOption`, `FileListOption`, `DirOption`, `DirListOption`. + + ```python + from pants.option.subsystem import Subsystem + from pants.option.option_types import FileOption + from pants.util.strutil import softwrap + + class FortranLint(Subsystem): + config = FileOption( + default=None, + advanced=True, + help=lambda cls: softwrap( + """ + Path to the fortran-lint config file. + + Setting this option will disable config discovery for the config file. Use this option if the config is located in a non-standard location. + """ + ), + ) + ``` + +3. Add a helper function to generate the `ConfigFilesRequest`. The `check_existence` field is used for config discovery. `specified` can also be a list for using one of the list options. + + ```python + from pants.core.util_rules.config_files import ConfigFilesRequest + from pants.option.subsystem import Subsystem + + class FortranLint(Subsystem): + def config_request(self) -> ConfigFilesRequest: + return ConfigFilesRequest( + specified=self.config, + specified_option_name=f"[{self.options_scope}].config", + discovery=self.config_discovery, + check_existence=["fortran_lint.ini"], + ) + ``` + +4. Make a request for the config files in a rule for running the tool. Use a `Get(ConfigFiles, ConfigFilesRequest)` to get the config files. This has a snapshot that contains the config files (or will be empty if none are found). You can merge these with the other digests to pass the files to your `Process`. If a custom value was provided for the config file, you may need to pass that as an argument to the `Process`. You may also need to register rules from `pants.core.util_rules.config_files`. + + ```python + from pants.core.goals.lint import LintResult + from pants.core.util_rules import config_files + from pants.core.util_rules.config_files import ConfigFiles, ConfigFilesRequest + from pants.core.util_rules.source_files import SourceFiles, SourceFilesRequest + from pants.engine.fs import Digest, MergeDigests + from pants.engine.rules import Get, MultiGet, collect_rules, rule + + @rule + async def run_fortran_lint(request: FortranlintRequest.Batch, subsystem: FortranLint) -> LintResult: + sources, config_file = await MultiGet( + Get(SourceFiles, SourceFilesRequest(fs.sources for fs in request.elements)), + Get(ConfigFiles, ConfigFilesRequest, subsystem.config_request()), + ) + + input_digest = await Get( + Digest, MergeDigests((sources.snapshot.digest, config_file.snapshot.digest)) + ) + + args = [] + if subsystem.config_request: + args.append(f"--config-file={subsystem.config}") + + # run your process with the digest and args + + def rules(): + return [ + *collect_rules(), + *config_files.rules(), + ] + ``` \ No newline at end of file diff --git a/versioned_docs/version-2.24/docs/writing-plugins/macros.mdx b/versioned_docs/version-2.24/docs/writing-plugins/macros.mdx new file mode 100644 index 000000000..4304ab4d1 --- /dev/null +++ b/versioned_docs/version-2.24/docs/writing-plugins/macros.mdx @@ -0,0 +1,147 @@ +--- + title: Macros + sidebar_position: 1 +--- + +Reducing boilerplate in BUILD files. + +--- + +## When to use a macro + +Macros are useful to reduce boilerplate in BUILD files. For example, if you keep using the same value for a field, you can use a macro. + +However, also consider that introducing new symbols to BUILD files adds some indirection to your codebase, such as making it harder to follow along with the Pants docs. As with any tool, macros should be used judiciously. + +Often, you can instead use the [`parametrize`](../using-pants/key-concepts/targets-and-build-files.mdx) mechanism: + +```python title="BUILD" +shell_tests( + name="tests", + shell=parametrize("bash", "zsh"), +) +``` + +If you instead want to add support for a new language, or do something more complex than a macro allows, create a new [target type](./the-target-api/creating-new-targets.mdx). + +If you are already using a target type, but need to store additional metadata for your plugin, [add a new field to the target type](./the-target-api/extending-existing-targets.mdx). + +## How to add a macro + +Macros are defined in Python files that act like a normal BUILD file. They have access to all the symbols you normally have registered in a BUILD file, such as all of your target types. + +Macros cannot import other modules, just like BUILD files cannot have import statements. + +To define a new macro, add a function with `def` and the name of the new symbol. Usually, the last line of the macro will create a new target, like this: + +```python title="pants-plugins/macros.py" +def python2_sources(**kwargs): + kwargs["interpreter_constraints"] = ["==2.7.*"] + python_sources(**kwargs) + +def python3_sources(**kwargs): + kwargs["interpreter_constraints"] = [">=3.5"] + python_sources(**kwargs) +``` + +Then, add this file to the option `[GLOBAL].build_file_prelude_globs`: + +```toml title="pants.toml" +[GLOBAL] +build_file_prelude_globs = ["pants-plugins/macros.py"] +``` + +Now, in BUILD files, you can use the new macros: + +```python title="project/BUILD" +python2_sources( + name="app_py2", + sources=["app_py2.py"], +) + +python3_sources( + name="app_py3", + sources=["app_py3.py"], +) +``` + +A macro can create multiple targets—although often it's better to use [`parametrize`](../using-pants/key-concepts/targets-and-build-files.mdx): + +```python title="pants-plugins/macros.py" +def python23_tests(name, **kwargs): + kwargs.pop("interpreter_constraints", None) + + python_tests( + name=f"{name}_py2", + interpreter_constraints=["==2.7.*"], + **kwargs, + ) + + python_tests( + name=f"{name}_py3", + interpreter_constraints=[">=3.5"], + **kwargs, + ) +``` + +A macro can perform validation: + +```python title="pants-plugins/macros.py" +def custom_python_sources(**kwargs): + if "2.7" in kwargs.get("interpreter_constraints", ""): + raise ValueError("Python 2.7 is banned!") + python_sources(**kwargs) +``` + +A macro can take new parameters to generate the target dynamically. For example: + +```python tab={"label":"pants-plugins/macros.py"} +def custom_python_sources(has_type_hints: bool = True, **kwargs): + if has_type_hints: + kwargs["tags"] = kwargs.get("tags", []) + ["type_checked"] + python_sources(**kwargs) +``` + +```python tab={"label":"project/BUILD"} +custom_python_sources( + has_type_hints=False, +) +``` + +## Documenting your macros + +Using doc-strings to document your macros, Pants will pick up and present these for the online help text. Also global constants can be documented using `Doc` wrapped in `Annotated`. + +```python title="pants-plugins/macros.py" +OUR_GLOBAL_CONSTANT: Annotated[ + int, + Doc( + """This is our magic number. + + It is useful when you need the answer to the meaning of life, the universe and everythin. + """ + ) +] = 42 + + +def custom_python_sources(has_type_hints: bool = True, **kwargs): + """Custom target for Python sources. + + This target adds the `type_checked` tag for targets for which `has_type_hints` is true. + """ + # ... +``` + +For CLI help, this information is presented when calling pants with: +``` +pants OUR_GLOBAL_CONSTANT --help +pants custom_python_sources --help +``` + +To list all available symbols, along with the first sentence of the docs for each: +``` +pants symbols --help + +# To also include targets (to list only targets, there is "pants targets --help"): +pants symbols --help-advanced +``` diff --git a/versioned_docs/version-2.24/docs/writing-plugins/overview.mdx b/versioned_docs/version-2.24/docs/writing-plugins/overview.mdx new file mode 100644 index 000000000..981f7b48d --- /dev/null +++ b/versioned_docs/version-2.24/docs/writing-plugins/overview.mdx @@ -0,0 +1,210 @@ +--- + title: Plugins overview + sidebar_position: 0 +--- + +An intro to the Pants engine's core concepts. + +--- + +Pants is designed for extensibility: you can extend Pants by writing custom _plugins_, using a standard Plugin API. In fact, all of Pants's built-in functionality uses the same API! + +Some of the ways you can extend Pants: + +- Add support for new languages. +- Add new goals, like a `publish` goal or `docker` goal. +- Add new linters, formatters, and type-checkers. +- Add new codegen implementations. +- Define new target types that still work with core Pants. +- Add new forms of dependency inference +- Define macros to reduce boilerplate in BUILD files. + +Thanks to Pants's execution engine, your plugins will automatically bring you the same benefits you get from using core Pants, including: + +- Fine-grained caching. +- Concurrent execution. +- Remote execution. + +:::danger The Plugin API is not yet stable +While we'll try our best to limit changes, the Plugin API does not yet follow the [Deprecation Policy](../releases/deprecation-policy.mdx). Components of the API may change between minor versions—e.g. 2.7 to 2.8—without a deprecation. + +We will document changes at [Plugin upgrade guide](./common-plugin-tasks/plugin-upgrade-guide.mdx). +::: + +## Core concepts + +The plugin API is split into two main interfaces: + +1. [The Target API](./the-target-api/concepts.mdx): a declarative interface for creating new target types and extending existing targets. +2. [The Rules API](./the-rules-api/concepts.mdx): where you define your logic and model each step of your build. + +Plugins are written in typed Python 3 code. You write your logic in Python, and then Pants will run your plugin in the Rust engine. + +## Locating Plugin code + +Plugins can be consumed in either of two ways: + +- From a published package in a repository such as [PyPI](https://pypi.org/). +- Directly from in-repo sources. + +It's often convenient to use in-repo plugins, particularly when the plugin is only relevant to a single repo and you want to iterate on it rapidly. In other cases, you may want to publish the plugin, so it can be reused across multiple repos. + +### Published plugins + +You consume published plugins by adding them to the `plugins` option: + +```toml title="pants.toml" +[GLOBAL] +plugins = ["my.plugin==2.3.4"] +``` + +### In-repo plugins + +Conventionally, in-repo plugins live in a folder called `pants-plugins`, although they may be placed anywhere. + +You must specify the path to your plugin's top-level folder using the `pythonpath` option: + +```toml title="pants.toml" +[GLOBAL] +pythonpath = ["%(buildroot)s/pants-plugins"] +``` + +:::caution In-repo dependencies +In-repo plugin code should not depend on other in-repo code outside of the `pants-plugins` folder. The `pants-plugins` folder helps isolate plugins from regular code, which is necessary due to how Pants's startup sequence works. +::: + +You can depend on third-party dependencies in your in-repo plugin by adding a `requirements.txt` file next to +the plugin `register.py` module: + +``` +ansicolors==1.18.0 +``` + +Or, although less recommended, you can add them to the `plugins` option: + +```toml title="pants.toml" +[GLOBAL] +plugins = ["ansicolors==1.18.0"] +``` + +However, be careful adding third-party dependencies that perform side effects like reading from the filesystem or making network requests, as they will not work properly with the engine's caching model. + +## Enabling Plugins with `register.py` + +A Pants [_backend_](../using-pants/key-concepts/backends.mdx) is a Python package that implements some required functionality and uses hooks to register itself with Pants. + +A plugin will contain one or more backends, with the hooks for each one defined in a file called `register.py`. To enable a custom plugin you add its backends to your `backend_packages` configuration: + +```toml tab={"label":"pants.toml"} +[GLOBAL] +pythonpath = ["%(buildroot)s/pants-plugins"] +backend_packages.add = [ + # This will activate `pants-plugins/plugin1/register.py`. + "plugin1", + # This will activate `pants-plugins/subdir/plugin2/register.py`. + "subdir.plugin2", +] +``` + +```python tab={"label":"pants-plugins/plugin1/register.py"} +from plugin1.lib import CustomTargetType, rule1, rule2 + + +def rules(): + return [rule1, rule2] + + +def target_types(): + return [CustomTargetType] +``` + +## Building in-repo plugins with Pants + +Because plugin code is written in Python, you can optionally use Pants's [Python backend](../python/overview/enabling-python-support.mdx) to build your plugin code. For example, you can use Pants to lint, format, and test your plugin code. This is not required, but it's usually a good idea to improve the quality of your plugin. + +To do so, activate the [Python backend](../python/overview/index.mdx) and `plugin_development` backend, which adds the `pants_requirements` target type. Also add your `pants-plugins` directory as a source root: + +```toml title="pants.toml" +[GLOBAL] +backend_packages = [ + "pants.backend.python", + "pants.backend.plugin_development", +] + +[source] +root_patterns = [ + .., + "pants-plugins", +] +``` + +Then, add the `pants_requirements` target generator. + +```python title="pants-plugins/BUILD" +pants_requirements(name="pants") +``` + +This will generate [`python_requirement` targets](../python/overview/third-party-dependencies.mdx) for the `pantsbuild.pants` and `pantsbuild.pants.testutil` distributions, so that when you build your code—like running MyPy or Pytest on your plugin—the dependency on Pants itself is properly resolved. This isn't used for your plugin to work, only for Pants goals like `test` and `check` to understand how to resolve the dependency. + +The target generator dynamically sets the version downloaded to match your current `pants_version` set in `pants.toml`. Pants's [dependency inference](../using-pants/key-concepts/targets-and-build-files.mdx) understands imports of the `pants` module and will automatically add dependencies on the generated `python_requirement` targets where relevant. + +:::caution Plugins resolve +We strongly recommend to set up a dedicated "resolve" (lockfile) for your plugins. Pants ships as a monolithic application with a pinned set of dependencies, which can make it hard to combine with your project's dependencies. +::: + +To set up a dedicated resolve for your plugins, update your `pants.toml` as follows: + +```python title="pants.toml" +[python] +enable_resolves = true +# The repository's own constraints. +interpreter_constraints = ["==3.12.*"] + +[python.resolves] +pants-plugins = "pants-plugins/lock.txt" +python-default = "3rdparty/python/default_lock.txt" + +[python.resolves_to_interpreter_constraints] +# Pants runs with Python 3.9, so this lets us +# use different interpreter constraints when +# generating the lockfile than the rest of our project. +# +# Warning: it's still necessary to set the `interpreter_constraints` +# field on each `python_sources` and `python_tests` target in +# our plugin! This only impacts how the lockfile is generated. +pants-plugins = ["==3.9.*"] +``` + +Then, update your `pants_requirements` target generator with `resolve="pants-plugins"`, and run `pants generate-lockfiles`. You will also need to update the relevant `python_source` / `python_sources` and `python_test` / `python_tests` targets to set `resolve="pants-plugins"` (along with possibly the `interpreter_constraints` field). +See [Third-party dependencies](../python/overview/third-party-dependencies.mdx) for more information. + +## Publishing a plugin + +Pants plugins can be published to PyPI and consumed by other Pants users. + +As mentioned above: the plugin API is still unstable, and so supporting multiple versions of Pants with a single plugin version may be challenging. Give careful consideration to who you expect to consume the plugin, and what types of maintenance guarantees you hope to provide. + +### Thirdparty dependencies + +When publishing a plugin, ensure that any [`python_requirement` targets](../python/overview/third-party-dependencies.mdx) that the plugin depends on either: + +1. Do not overlap with [the requirements of Pants itself](https://github.com/pantsbuild/pants/blob/aa0932a54e8c1b6ed6f3be8e084a11b2f6c808e5/3rdparty/python/requirements.txt), or +2. Use range requirements that are compatible with Pants' own requirements. + +For example: if a particular version of Pants depends on `requests>=2.25.1` and your plugin must also depend on `requests`, then the safest approach is to specify exactly that range in the plugins' requirements. + +### Adapting to changed plugin APIs + +If a `@rule` API has been added or removed in versions of Pants that you'd like to support with your plugin, you can use conditional imports to register different `@rules` based on the version: + +```python +from pants.version import PANTS_SEMVER + +if PANTS_SEMVER < Version("2.10.0"): + import my.plugin.pants_pre_210 as plugin +else: + import my.plugin.pants_default as plugin + +def rules(): + return plugin.rules() +``` diff --git a/versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/_category_.json b/versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/_category_.json new file mode 100644 index 000000000..d6dfc1f87 --- /dev/null +++ b/versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "The Rules API", + "position": 4 +} diff --git a/versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/concepts.mdx b/versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/concepts.mdx new file mode 100644 index 000000000..3f5eb1473 --- /dev/null +++ b/versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/concepts.mdx @@ -0,0 +1,336 @@ +--- + title: Concepts + sidebar_position: 0 +--- + +The core concepts of the Rules API. + +--- + +## Rules + +Plugin logic is defined in _rules_: [pure functions](https://en.wikipedia.org/wiki/Pure_function) that map a set of statically-declared input types to a statically-declared output type. + +Each rule is an `async` Python function annotated with the decorator `@rule`, which takes any number of parameters (including zero) and returns a value of one specific type. Rules must be annotated with [type hints](https://www.python.org/dev/peps/pep-0484/). + +For example, this rule maps `(int) -> str`. + +```python +from pants.engine.rules import rule + +@rule +async def int_to_str(i: int) -> str: + return str(i) +``` + +Although any Python type, including builtin types like `int`, can be a parameter or return type of a rule, in almost all cases rules will deal with values of custom Python classes. + +Generally, rules correspond to a step in your build process. For example, when adding a new linter, you may have a rule that maps `(Target, Shellcheck) -> LintResult`: + +```python +@rule +async def run_shellcheck(target: Target, shellcheck: Shellcheck) -> LintResult: + # Your logic. + return LintResult(stdout="", stderr="", exit_code=0) +``` + +You do not call a rule like you would a normal function. In the above examples, you would not say `int_to_str(26)` or `run_shellcheck(tgt, shellcheck)`. Instead, the Pants engine determines when rules are used and calls the rules for you. + +Each rule should be pure; you should not use side effects like `subprocess.run()`, `print()`, or the `requests` library. Instead, the Rules API has its own alternatives that are understood by the Pants engine and which work properly with its caching and parallelism. + +## The rule graph + +All the registered rules create a rule graph, with each type as a node and the edges being dependencies used to compute those types. + +For example, the `list` goal uses this rule definition and results in the below graph: + +```python +@goal_rule +async def list_targets( + console: Console, addresses: Addresses, list_subsystem: ListSubsystem +) -> ListGoal: + ... + return ListGoal(exit_code=0) +``` + +![](https://files.readme.io/7d5163f-Rule_graph_example-2.png) + +At the top of the graph will always be the goals that Pants runs, such as `list` and `test`. These goals are the entry-point into the graph. When a user runs `pants list`, the engine looks for a special type of rule, called a `@goal_rule`, that implements the respective goal. From there, the `@goal_rule` might request certain types like `Console` and `Addresses`, which will cause other helper `@rule`s to be used. To view the graph for a goal, see: [Visualize the rule graph](./tips-and-debugging.mdx#debugging-visualize-the-rule-graph). + +The graph also has several "roots", such as `Console`, `Specs`, and `OptionsBootstrapper` in this example. Those roots are injected into the graph as the initial input, whereas all other types are derived from those roots. + +The engine will find a path through the rules to satisfy the types that you are requesting. In this example, we do not need to explicitly specify `Specs`; we only specify `Addresses` in our rule's parameters, and the engine finds a path from `Specs` to `Addresses` for us. This is similar to [Dependency Injection](https://www.freecodecamp.org/news/a-quick-intro-to-dependency-injection-what-it-is-and-when-to-use-it-7578c84fa88f/), but with a typed and validated graph. + +If the engine cannot find a path, or if there is ambiguity due to multiple possible paths, the rule graph will fail to compile. This ensures that the rule graph is always unambiguous. + +:::caution Rule graph errors can be confusing +We know that rule graph errors can be intimidating and confusing to understand. We are planning to improve them. In the meantime, please do not hesitate to ask for help in the #plugins channel on [Slack](/community/getting-help). + +Also see [Tips and debugging](./tips-and-debugging.mdx#debugging-rule-graph-issues) for some tips for how to approach these errors. +::: + +## `await Get` - awaiting results in a rule body + +In addition to requesting types in your rule's parameters, you can request types in the body of your rule. + +Add `await Get(OutputType, InputType, input)`, where the output type is what you are requesting and the input is what you're giving the engine for it to be able to compute the output. For example: + +```python +from pants.engine.rules import Get, rule + +@rule +async def run_shellcheck(target: Target, shellcheck: Shellcheck) -> LintResult: + ... + process_request = Process( + ["/bin/echo", str(target.address)], + description=f"Echo {target.address}", + ) + process_result = await Get(ProcessResult, Process, process_request) + return LintResult(stdout=process_result.stdout, stderr=process_result.stderr, exit_code=0) +``` + +Pants will run your rule like normal Python code until encountering the `await`, which will yield execution to the engine. The engine will look in the pre-compiled rule graph to determine how to go from `Process -> ProcessResult`. Once the engine gives back the resulting `ProcessResult` object, control will be returned back to your Python code. + +In this example, we could not have requested the type `ProcessResult` as a parameter to our rule because we needed to dynamically create a `Process` object. + +Thanks to `await Get`, we can write a recursive rule to compute a [Fibonacci number](https://en.wikipedia.org/wiki/Fibonacci_number): + +```python +@dataclass(frozen=True) +class Fibonacci: + val: int + +@rule +async def compute_fibonacci(n: int) -> Fibonacci: + if n < 2: + return Fibonacci(n) + x = await Get(Fibonacci, int, n - 2) + y = await Get(Fibonacci, int, n - 1) + return Fibonacci(x.val + y.val) +``` + +Another rule could then "call" our Fibonacci rule by using its own `Get`: + +```python +@rule +async def call_fibonacci(...) -> Foo: + fib = await Get(Fibonnaci, int, 4) + ... +``` + +:::note `Get` constructor shorthand +The verbose constructor for a `Get` object takes three parameters: `Get(OutputType, InputType, input)`, where `OutputType` and `InputType` are both types, and `input` is an instance of `InputType`. + +Instead, you can use `Get(OutputType, InputType(constructor arguments))`. These two are equivalent: + +- `Get(ProcessResult, Process, Process(["/bin/echo"]))` +- `Get(ProcessResult, Process(["/bin/echo"]))` + +However, the below is invalid because Pants's AST parser will not be able to see what the `InputType` is: + +```python +process = Process(["/bin/echo"]) +Get(ProcessResult, process) +``` + +::: + +:::note Why only one input? +Currently, you can only give a single input. It is not possible to do something like `Get(OutputType, InputType1(...), InputType2(...))`. + +Instead, it's common for rules to create a "Request" data class, such as `PexRequest` or `SourceFilesRequest`. This request centralizes all the data it needs to operate into one data structure, which allows for call sites to say `await Get(SourceFiles, SourceFilesRequest, my_request)`, for example. + +See [https://github.com/pantsbuild/pants/issues/7490](https://github.com/pantsbuild/pants/issues/7490) for the tracking issue. +::: + +### `MultiGet` for concurrency + +Every time your rule has the `await` keyword, the engine will pause execution until the result is returned. This means that if you have two `await Get`s, the engine will evaluate them sequentially, rather than concurrently. + +You can use `await MultiGet` to instead get multiple results in parallel. + +```python +from pants.engine.rules import Get, MultiGet, rule + +@rule +async def call_fibonacci(...) -> Foo: + results = await MultiGet(Get(Fibonnaci, int, n) for n in range(100)) + ... +``` + +The result of `MultiGet` is a tuple with each individual result, in the same order as the requests. + +You should rarely use a `for` loop with `await Get` - use `await MultiGet` instead, as shown above. + +`MultiGet` can either take a single iterable of `Get` objects or take multiple individual arguments of `Get` objects. Thanks to this, we can rewrite our Fibonacci rule to parallelize the two recursive calls: + +```python +from pants.engine.rules import Get, MultiGet, rule + +@rule +async def compute_fibonacci(n: int) -> Fibonacci: + if n < 2: + return Fibonacci(n) + x, y = await MultiGet( + Get(Fibonacci, int, n - 2), + Get(Fibonacci, int, n - 1), + ) + return Fibonacci(x.val + y.val) +``` + +## Valid types + +Types used as inputs to `Get`s or `Query`s must be hashable, and therefore should be immutable. Specifically, the type must have implemented `__hash__()` and `__eq__()`. While the engine will not validate that your type is immutable, you should be careful to ensure this so that the cache works properly. + +Because you should use immutable types, use these collection types: + +- `tuple` instead of `list`. +- `pants.util.frozendict.FrozenDict` instead of the built-in `dict`. +- `pants.util.ordered_set.FrozenOrderedSet` instead of the built-in `set`. This will also preserve the insertion order, which is important for determinism. + +Unlike Python in general, the engine uses exact type matches, rather than considering inheritance; even if `Truck` subclasses `Vehicle`, the engine will view these types as completely separate when deciding which rules to use. + +You cannot use generic Python type hints in a rule's parameters or in a `Get()`. For example, a rule cannot return `Optional[Foo]`, or take as a parameter `Tuple[Foo, ...]`. To express generic type hints, you should instead create a class that stores that value. + +To disambiguate between different uses of the same type, you will usually want to "newtype" the types that you use. Rather than using the builtin `str` or `int`, for example, you should define a new, declarative class like `Name` or `Age`. + +### Dataclasses + +Python 3's [dataclasses](https://docs.python.org/3/library/dataclasses.html) work well with the engine because: + +1. If `frozen=True` is set, they are immutable and hashable. +2. Dataclasses use type hints. +3. Dataclasses are declarative and ergonomic. + +You do not need to use dataclasses. You can use alternatives like `attrs` or normal Python classes. However, dataclasses are a nice default. + +You should set `@dataclass(frozen=True)` for Python to autogenerate `__hash__()` and to ensure that the type is immutable. + +```python +from __future__ import annotations + +from dataclasses import dataclass + +@dataclass(frozen=True) +class Name: + first: str + last: str | None + +@rule +async def demo(name: Name) -> Foo: + ... +``` + +:::caution Don't use `NamedTuple` +`NamedTuple` behaves similarly to dataclasses, but it should not be used because the `__eq__()` implementation uses structural equality, rather than the nominal equality used by the engine. +::: + +:::note Custom dataclass `__init__()` +Sometimes, you may want to have a custom `__init__()` constructor. For example, you may want your dataclass to store a `tuple[str, ...]`, but for your constructor to take the more flexible `Iterable[str]` which you then convert to an immutable tuple sequence. + +The Python docs suggest using `object.__setattr__` to set attributes in your `__init__` for frozen dataclasses. + +```python +from __future__ import annotations + +from dataclasses import dataclass +from typing import Iterable + +@dataclass(frozen=True) +class Example: + args: tuple[str, ...] + + def __init__(self, args: Iterable[str]) -> None: + object.__setattr__(self, "args", tuple(args)) +``` + +::: + +### `Collection`: a newtype for `tuple` + +If you want a rule to use a homogenous sequence, you can use `pants.engine.collection.Collection` to "newtype" a tuple. This will behave the same as a tuple, but will have a distinct type. + +```python +from pants.engine.collection import Collection + +@dataclass(frozen=True) +class LintResult: + stdout: str + stderr: str + exit_code: int + + +class LintResults(Collection[LintResult]): + pass + + +@rule +async def demo(results: LintResults) -> Foo: + for result in results: + print(result.stdout) + ... +``` + +### `DeduplicatedCollection`: a newtype for `FrozenOrderedSet` + +If you want a rule to use a homogenous set, you can use `pants.engine.collection.DeduplicatedCollection` to "newtype" a `FrozenOrderedSet`. This will behave the same as a `FrozenOrderedSet`, but will have a distinct type. + +```python +from pants.engine.collection import DeduplicatedCollection + +class RequirementStrings(DeduplicatedCollection[str]): + sort_input = True + + +@rule +async def demo(requirements: RequirementStrings) -> Foo: + for requirement in requirements: + print(requirement) + ... +``` + +You can optionally set the class property `sort_input`, which will often result in more cache hits with the Pantsd daemon. + +## Registering rules in `register.py` + +To register a new rule, use the `rules()` hook in your [`register.py` file](../overview.mdx). This function expects a list of functions annotated with `@rule`. + +```python title="pants-plugins/plugin1/register.py" +def rules(): + return [rule1, rule2] +``` + +Conventionally, each file will have a function called `rules()` and then `register.py` will re-export them. This is meant to make imports more organized. Within each file, you can use `collect_rules()` to automatically find the rules in the file. + +```python tab={"label":"pants-plugins/fortran/register.py"} +from fortran import fmt, test + +def rules(): + return [*fmt.rules(), *test.rules()] +``` + +```python tab={"label":"pants-plugins/fortran/fmt.py"} +from pants.engine.rules import collect_rules, rule + +@rule +async def setup_formatter(...) -> Formatter: + ... + +@rule +async def fmt_fortran(...) -> FormatResult: + ... + +def rules(): + return collect_rules() +``` + +```python tab={"label":"pants-plugins/fortran/test.py"} +from pants.engine.rules import collect_rules, rule + +@rule +async def run_fotran_test(...) -> TestResult: + ... + +def rules(): + return collect_rules() +``` diff --git a/versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/file-system.mdx b/versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/file-system.mdx new file mode 100644 index 000000000..1097542a9 --- /dev/null +++ b/versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/file-system.mdx @@ -0,0 +1,311 @@ +--- + title: File system + sidebar_position: 3 +--- + +How to safely interact with the file system in your plugin. + +--- + +It is not safe to use functions like `open` or the non-pure operations of `pathlib.Path` like you normally might: this will break caching because they do not hook up to Pants's file watcher. + +Instead, Pants has several mechanisms to work with the file system in a safe and concurrent way. + +:::caution Missing certain file operations? +If it would help you to have a certain file operation, please let us know by either opening a new [GitHub issue](https://github.com/pantsbuild/pants/issues) or by messaging us on [Slack](/community/members) in the #plugins room. +::: + +## Core abstractions: `Digest` and `Snapshot` + +The core building block is a `Digest`, which is a lightweight reference to a set of files known about by the engine. + +- The `Digest` is only a reference; the files are stored in the engine's persistent [content-addressable storage (CAS)](https://en.wikipedia.org/wiki/Lightning_Memory-Mapped_Database). +- The files do not need to actually exist on disk. +- Every file uses a relative path. This allows the `Digest` to be passed around in different environments safely, such as running in a temporary directory locally or running through remote execution. +- The files may be binary files and/or text files. +- The `Digest` may refer to 0 - n files. If it's empty, the digest will be equal to `pants.engine.fs.EMPTY_DIGEST`. +- You will never create a `Digest` directly in rules, only in tests. Instead, you get a `Digest` by using `CreateDigest` or `PathGlobs`, or using the `output_digest` from a `Process` that you've run. + +Most of Pants's operations with the file system either accept a `Digest` as input or return a `Digest`. For example, when running a `Process`, you may provide a `Digest` as input. + +A `Snapshot` composes a `Digest` and adds the useful properties `files: tuple[str, ...]` and `dirs: tuple[str, ...]`, which store the sorted file names and directory names, respectively. For example: + +```python +Snapshot( + digest=Digest( + fingerprint="21bcd9fcf01cc67e9547b7d931050c1c44d668e7c0eda3b5856aa74ad640098b", + serialized_bytes_length=162, + ), + files=("f.txt", "grandparent/parent/c.txt"), + dirs=("grandparent", "grandparent/parent"), +) +``` + +A `Snapshot` is useful when you want to know which files a `Digest` refers to. For example, when running a tool, you might set `argv=snapshot.files`, and then pass `snapshot.digest` to the `Process` so that it has access to those files. + +Given a `Digest`, you may use the engine to enrich it into a `Snapshot`: + +```python +from pants.engine.fs import Digest, Snapshot +from pants.engine.rules import Get, rule + +@rule +async def demo(...) -> Foo: + ... + snapshot = await Get(Snapshot, Digest, my_digest) +``` + +## `CreateDigest`: create new files + +`CreateDigest` allows you to create a new digest with whichever files you would like, even if they do not exist on disk. + +```python +from pants.engine.fs import CreateDigest, Digest, FileContent +from pants.engine.rules import Get, rule + +@rule +async def demo(...) -> Foo: + ... + digest = await Get(Digest, CreateDigest([FileContent("f1.txt", b"hello world")])) +``` + +The `CreateDigest` constructor expects an iterable including any of these types: + +- `FileContent` objects, which represent a file to create. It takes a `path: str` parameter, `contents: bytes` parameter, and optional `is_executable: bool` parameter with a default of `False`. +- `Directory` objects, which can be used to create empty directories. It takes a single parameter: `path: str`. You do not need to use this when creating a file inside a certain directory; this is only to create empty directories. +- `FileEntry` objects, which are handles to existing files from `DigestEntries`. Do not manually create these. + +This does _not_ write the `Digest` to the build root. Use `Workspace.write_digest()` for that. + +## `PathGlobs`: read from filesystem + +`PathGlobs` allows you to read from the local file system using globbing. That is, sets of filenames with wildcard characters. + +```python +from pants.engine.fs import Digest, PathGlobs +from pants.engine.rules import Get, rule + +@rule +async def demo(...) -> Foo: + ... + digest = await Get(Digest, PathGlobs(["**/*.txt", "!ignore_me.txt"])) +``` + +- All globs must be relative paths, relative to the build root. +- `PathGlobs` uses the same syntax as the `sources` field, which is roughly Git's syntax. Use `*` for globs over just the current working directory, `**` for recursive globs over everything below (at any level the current working directory), and prefix with `!` for ignores. +- `PathGlobs` will ignore all values from the global option `pants_ignore`. + +By default, the engine will no-op for any globs that are unmatched. If you want to instead warn or error, set `glob_match_error_behavior=GlobMatchErrorBehavior.warn` or `GlobMatchErrorBehavior.error`. This will require that you also set `description_of_origin`, which is a human-friendly description of where the `PathGlobs` is coming from so that the error message is helpful. For example: + +```python +from pants.engine.fs import GlobMatchErrorBehavior, PathGlobs + +PathGlobs( + globs=[shellcheck.options.config], + glob_match_error_behavior=GlobMatchErrorBehavior.error, + description_of_origin="the option `--shellcheck-config`", +) +``` + +If you set `glob_match_error_behavior`, you may also want to set `conjunction`. By default, only one glob must match. If you set `conjunction=GlobExpansionConjunction.all_match`, then all globs must match or the engine will warn or error. For example, this would fail, even if the config file existed: + +```python +from pants.engine.fs import GlobExpansionConjunction, GlobMatchErrorBehavior, PathGlobs + +PathGlobs( + globs=[shellcheck.options.config, "does_not_exist.txt"], + glob_match_error_behavior=GlobMatchErrorBehavior.error, + conjunction=GlobExpansionConjunction.all_match, + description_of_origin="the option `--shellcheck-config`", +) +``` + +If you only need to resolve the file names—and don't actually need to use the file content—you can use `await Get(Paths, PathGlobs)` instead of `await Get(Digest, PathGlobs)` or `await Get(Snapshot, PathGlobs)`. This will avoid "digesting" the files to the LMDB Store cache as a performance optimization. `Paths` has two properties: `files: tuple[str, ...]` and `dirs: tuple[str, ...]`. + +```python +from pants.engine.fs import Paths, PathGlobs +from pants.engine.rules import Get, rule + +@rule +async def demo(...) -> Foo: + ... + paths = await Get(Paths, PathGlobs(["**/*.txt", "!ignore_me.txt"])) + logger.info(paths.files) +``` + +## `DigestContents`: read contents of files + +`DigestContents` allows you to get the file contents from a `Digest`. + +```python +from pants.engine.fs import Digest, DigestContents +from pants.engine.rules import Get, rule + +@rule +async def demo(...) -> Foo: + ... + digest_contents = await Get(DigestContents, Digest, my_digest) + for file_content in digest_contents: + logger.info(file_content.path) + logger.info(file_content.content) # This will be `bytes`. +``` + +The result will be a sequence of `FileContent` objects, which each have a property `path: str` and a property `content: bytes`. You may want to call `content.decode()` to convert to `str`. + +:::caution You may not need `DigestContents` +Only use `DigestContents` if you need to read and operate on the content of files directly in your rule. + +- If you are running a `Process`, you only need to pass the `Digest` as input and that process will be able to read all the files in its environment. If you only need a list of files included in the digest, use `Get(Snapshot, Digest)`. + +- If you just need to manipulate the directory structure of a `Digest`, such as renaming files, use `DigestEntries` with `CreateDigest` or use `AddPrefix` and `RemovePrefix`. These avoid reading the file content into memory. + +::: + +:::caution Does not handle empty directories in a `Digest` +`DigestContents` does not have a way to represent empty directories in a `Digest` since it is only a sequence of `FileContent` objects. That is, passing the `FileContent` objects to `CreateDigest` will not result in the original `Digest` if there were empty directories in that original `Digest`. Use `DigestEntries` instead if your rule needs to handle empty directories in a `Digest`. +::: + +## `DigestEntries`: light-weight handles to files + +`DigestEntries` allows a rule to obtain the filenames (with content digests) and empty directories from a `Digest`. The value of a `DigestEntries` is a sequence of `FileEntry` and `Directory` objects representing files and empty directories in the `Digest`, respectively. That sequence can be passed to `CreateDigest` to recreate the original `Digest`. + +This is useful if you need to manipulate the directory structure of a `Digest` without actually needing to bring the file contents into memory (which is what occurs if you were to use `DigestContents`). + +```python +from pants.engine.fs import Digest, DigestEntries, Directory, FileEntry +from pants.engine.rules import Get, rule + +@rule +async def demo(...) -> Foo: + ... + digest_entries = await Get(DigestEntries, Digest, my_digest) + for entry in digest_entries: + if isinstance(entry, FileEntry): + logger.info(entry.path) + logger.info(entry.file_digest) # This will be digest of the content. + elif isinstance(entry, Directory): + logger.info(f"Empty directory: {entry.path}") + +``` + +## `MergeDigests`: merge collections of files + +Often, you will need to provide a single `Digest` somewhere in your plugin—such as the `input_digest` for a `Process`—but you may have multiple `Digest`s that you want to use. Use `MergeDigests` to combine them all into a single `Digest`. + +```python +from pants.engine.fs import Digest, MergeDigests +from pants.engine.rules import Get, rule + +@rule +async def demo(...) -> Foo: + ... + digest = await Get( + Digest, + MergeDigests([downloaded_tool_digest, config_file_digest, source_files_snapshot.digest], + ) +``` + +- It is okay if multiple digests include the same file, so long as they have identical content. +- If any digests have different content for the same file, the engine will error. Unlike Git, the engine does not attempt to resolve merge conflicts. +- It is okay if some digests are empty, i.e. `EMPTY_DIGEST`. + +## `DigestSubset`: extract certain files from a `Digest` + +To get certain files out of a `Digest`, use `DigestSubset`. + +```python +from pants.engine.fs import Digest, DigestSubset, PathGlobs +from pants.engine.rules import Get, rule + +@rule +async def demo(...) -> Foo: + ... + new_digest = await Get( + Digest, DigestSubset(original_digest, PathGlobs(["file1.txt"]) + ) +``` + +See the section `PathGlobs` for more details on how the type works. + +## `AddPrefix` and `RemovePrefix` + +Use `AddPrefix` and `RemovePrefix` to change the paths of every file in the digest, while keeping the file contents the same. + +```python +from pants.engine.fs import AddPrefix, Digest, RemovePrefix +from pants.engine.rules import Get, rule + +@rule +async def demo(...) -> Foo: + ... + added_prefix = await Get(Digest, AddPrefix(original_digest, "new_prefix/subdir")) + removed_prefix = await Get(Digest, RemovePrefix(added_prefix, "new_prefix/subdir")) + assert removed_prefix == original_digest +``` + +`RemovePrefix` will error if it encounters any files that do not have the requested prefix. + +## `Workspace.write_digest()`: save to disk + +To write a digest to disk in the build root, request the type `Workspace`, then use its method `.write_digest()`. + +```python +from pants.engine.fs import Workspace +from pants.engine.rules import goal_rule + +@goal_rule +async def run_my_goal(..., workspace: Workspace) -> MyGoal: + ... + # Note that this is a normal method; we do not use `await Get`. + workspace.write_digest(digest) +``` + +- The digest will always be written to the build root; you cannot write to arbitrary locations on your machine. +- You may set the optional parameter `path_prefix: str` with a relative path. + +`Workspace` is a special type that can only be requested in `@goal_rule`s because it is only safe to write to disk in a `@goal_rule`. So, a common pattern is for "downstream" rules to return a `Digest` with the contents they want to write to disk, and then the `@goal_rule` aggregating all the results and writing them to disk. For example, for the `fmt` goal, each `FmtResult` includes a `digest` field. + +For better performance, avoid calling `workspace.write_digest` multiple times, such as in a `for` loop. Instead, first, merge all the digests, then write them in a single call. + +Bad: + +```python +for digest in all_digests: + workspace.write_digest(digest) +``` + +Good: + +```python +merged_digest = await Get(Digest, MergeDigests(all_digests)) +workspace.write_digest(merged_digest) +``` + +## `DownloadFile` + +`DownloadFile` allows you to download an asset using a `GET` request. + +```python +from pants.engine.fs import DownloadFile, FileDigest +from pants.engine.rules import Get, rule + +@rule +async def demo(...) -> Foo: + ... + url = "https://github.com/pex-tool/pex/releases/download/v2.1.14/pex" + file_digest = FileDigest( + "12937da9ad5ad2c60564aa35cb4b3992ba3cc5ef7efedd44159332873da6fe46", + 2637138 + ) + downloaded = await Get(Digest, DownloadFile(url, file_digest) +``` + +`DownloadFile` expects a `url: str` parameter pointing to a stable URL for the asset, along with an `expected_digest: FileDigest` parameter. A `FileDigest` is like a normal `Digest`, but represents a single file, rather than a set of files/directories. To determine the `expected_digest`, manually download the file, then run `shasum -a 256` to compute the fingerprint and `wc -c` to compute the expected length of the downloaded file in bytes. + +Often, you will want to download a pre-compiled binary for a tool. When doing this, use `ExternalTool` instead for help with extracting the binary from the download. See [Installing tools](./installing-tools.mdx). + +:::caution HTTP requests without digests are unsafe +It is not safe to use `DownloadFile` for mutable HTTP requests, as it will never ping the server for updates once it is cached. It is also not safe to use the `requests` library or similar because it will not be cached safely. + +You can use a `Process` with uniquely identifying information in its arguments to run `/usr/bin/curl`. +::: diff --git a/versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/goal-rules.mdx b/versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/goal-rules.mdx new file mode 100644 index 000000000..1eaa43206 --- /dev/null +++ b/versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/goal-rules.mdx @@ -0,0 +1,208 @@ +--- + title: Goal rules + sidebar_position: 1 +--- + +How to create new goals. + +--- + +For many [plugin tasks](../common-plugin-tasks/index.mdx), you will be extending existing goals, such as adding a new linter to the `lint` goal. However, you may instead want to create a new goal, such as a `publish` goal. This page explains how to create a new goal. + +As explained in [Concepts](./concepts.mdx), `@goal_rule`s are the entry-point into the rule graph. When a user runs `pants my-goal`, the Pants engine will look for the respective `@goal_rule`. That `@goal_rule` will usually request other types, either as parameters in the `@goal_rule` signature or through `await Get`. But unlike a `@rule`, a `@goal_rule` may also trigger side effects (such as running interactive processes, writing to the filesystem, etc) via `await Effect`. + +Often, you can keep all of your logic inline in the `@goal_rule`. As your `@goal_rule` gets more complex, you may end up factoring out helper `@rule`s, but you do not need to start with writing helper `@rule`s. + +## How to register a new goal + +There are four steps to creating a new [goal](../../using-pants/key-concepts/goals.mdx) with Pants: + +1. Define a subclass of `GoalSubsystem`. This is the API to your goal. + 1. Set the class property `name` to the name of your goal. + 2. Set the class property `help`, which is used by `pants help`. + 3. You may register options through attributes of `pants.option.option_types` types. See [Options and subsystems](./options-and-subsystems.mdx). +2. Define a subclass of `Goal`. When a user runs `pants my-goal`, the engine will request your subclass, which is what causes the `@goal_rule` to run. + 1. Set the class property `subsystem_cls` to the `GoalSubsystem` from the previous step. + 2. A `Goal` takes a single argument in its constructor, `exit_code: int`. Pants will use this to determine what its own exit code should be. +3. Define an `@goal_rule`, which must return the `Goal` from the previous step and set its `exit_code`. + 1. For most goals, simply return `MyGoal(exit_code=0)`. Some goals like `lint` and `test` will instead propagate the error code from the tools they run. +4. Register the `@goal_rule` in a `register.py` file. + +```python tab={"label":"pants-plugins/example/hello_world.py"} +from pants.engine.goal import Goal, GoalSubsystem +from pants.engine.rules import collect_rules, goal_rule + + +class HelloWorldSubsystem(GoalSubsystem): + name = "hello-world" + help = "An example goal." + + +class HelloWorld(Goal): + subsystem_cls = HelloWorldSubsystem + environment_behavior = Goal.EnvironmentBehavior.LOCAL_ONLY + + +@goal_rule +async def hello_world() -> HelloWorld: + return HelloWorld(exit_code=1) + + +def rules(): + return collect_rules() +``` + +```python tab={"label":"pants-plugins/example/register.py"} +from example import hello_world + +def rules(): + return [*hello_world.rules()] +``` + +You may now run `pants hello-world`, which should cause Pants to return with an error code of 1 (run `echo $?` to verify). Precisely, this causes the engine to request the type `HelloWorld`, which results in running the `@goal_rule` `hello_world`. + +## `Console`: output to stdout/stderr + +To output to the user, request the type `Console` as a parameter in your `@goal_rule`. This is a special type that may only be requested in `@goal_rules` and allows you to output to stdout and stderr. + +```python +from pants.engine.console import Console +... + +@goal_rule +async def hello_world(console: Console) -> HelloWorld: + console.print_stdout("Hello!") + console.print_stderr("Uh oh, an error.") + return HelloWorld(exit_code=1) +``` + +### Using colors + +You may output in color by using the methods `.blue()`, `.cyan()`, `.green()`, `.magenta()`, `.red()`, and `.yellow()`. The colors will only be used if the global option `--colors` is True. + +```python +console.print_stderr(f"{console.red('𐄂')} Error encountered.") +``` + +### `Outputting` mixin (optional) + +If your goal's purpose is to emit output, it may be helpful to use the mixin `Outputting`. This mixin will register the output `--output-file`, which allows the user to redirect the goal's stdout. + +```python +from pants.engine.goal import Goal, GoalSubsystem, Outputting +from pants.engine.rules import goal_rule + +class HelloWorldSubsystem(Outputting, GoalSubsystem): + name = "hello-world" + help = "An example goal." + +... + +@goal_rule +async def hello_world( + console: Console, hello_world_subsystem: HelloWorldSubsystem +) -> HelloWorld: + with hello_world_subsystem.output(console) as write_stdout: + write_stdout("Hello world!") + return HelloWorld(exit_code=0) +``` + +### `LineOriented` mixin (optional) + +If your goal's purpose is to emit output -- and that output is naturally split by new lines -- it may be helpful to use the mixin `LineOriented`. This subclasses `Outputting`, so will register both the options `--output-file` and `--sep`, which allows the user to change the separator to not be `\n`. + +```python +from pants.engine.goal import Goal, GoalSubsystem, LineOriented +from pants.engine.rules import goal_rule + +class HelloWorldSubsystem(LineOriented, GoalSubsystem): + name = "hello-world" + help = "An example goal." + +... + +@goal_rule +async def hello_world( + console: Console, hello_world_subsystem: HelloWorldSubsystem +) -> HelloWorld: + with hello_world_subsystem.line_oriented(console) as print_stdout: + print_stdout("0") + print_stdout("1") + return HelloWorld(exit_code=0) +``` + +## How to operate on Targets + +Most goals will want to operate on targets. To do this, specify `Targets` as a parameter of your goal rule. + +```python +from pants.engine.target import Targets +... + +@goal_rule +async def hello_world(console: Console, targets: Targets) -> HelloWorld: + for target in targets: + console.print_stdout(target.address.spec) + return HelloWorld(exit_code=0) +``` + +This example will print the address of any targets specified by the user, just as the `list` goal behaves. + +```bash +$ pants hello-world helloworld/util:: +helloworld/util +helloworld/util:tests +``` + +See [Rules and the Target API](./rules-and-the-target-api.mdx) for detailed information on how to use these targets in your rules, including accessing the metadata specified in BUILD files. + +:::caution Common mistake: requesting the type of target you want in the `@goal_rule` signature +For example, if you are writing a `publish` goal, and you expect to operate on `python_distribution` targets, you might think to request `PythonDistribution` in your `@goal_rule` signature: + +```python +@goal_rule +def publish(distribution: PythonDistributionTarget, console: Console) -> Publish: + ... +``` + +This will not work because the engine has no path in the rule graph to resolve a `PythonDistribution` type given the initial input types to the rule graph (the "roots"). + +Instead, request `Targets`, which will give you all the targets that the user specified on the command line. The engine knows how to resolve this type because it can go from `Specs` -> `Addresses` -> `Targets`. + +From here, filter out the relevant targets you want using the Target API (see [Rules and the Target API](./rules-and-the-target-api.mdx)). + +```python +from pants.engine.target import Targets + +@goal_rule +def publish(targets: Targets, console: Console) -> Publish: + relevant_targets = [ + tgt for tgt in targets + if tgt.has_field(PythonPublishDestination) + ] +``` + +::: + +### Only care about source files? + +If you only care about files, and you don't need any metadata from BUILD files, then you can request `SpecsPaths` instead of `Targets`. + +```python +from pants.engine.fs import SpecsPaths +... + +@goal_rule +async def hello_world(console: Console, specs_paths: SpecsPaths) -> HelloWorld: + for f in specs_paths.files: + console.print_stdout(f) + return HelloWorld(exit_code=0) +``` + +`SpecsPaths.files` will list all files matched by the specs, e.g. `::` will match every file in the project (regardless of if targets own the files). + +To convert `SpecsPaths` into a [`Digest`](./file-system.mdx), use `await Get(Digest, PathGlobs(globs=specs_paths.files))`. + +:::note Name clashing +It is very unlikely, but is still possible that adding a custom goal with an unfortunate name may cause issues when certain existing Pants options are passed in the command line. For instance, executing a goal named `local` with a particular option (in this case, the global `local_cache` option), e.g. `pants --no-local-cache local ...` would fail since there's no `--no-cache` flag defined for the `local` goal. +::: diff --git a/versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/index.mdx b/versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/index.mdx new file mode 100644 index 000000000..eb243a93d --- /dev/null +++ b/versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/index.mdx @@ -0,0 +1,20 @@ +--- + title: The Rules API + sidebar_position: 3 +--- + +Adding logic to your plugin. + +--- + +- [Concepts](./concepts.mdx) +- [Goal rules](./goal-rules.mdx) +- [Options and subsystems](./options-and-subsystems.mdx) +- [File system](./file-system.mdx) +- [Processes](./processes.mdx) +- [Installing tools](./installing-tools.mdx) +- [Rules and the Target API](./rules-and-the-target-api.mdx) +- [Union rules (advanced)](./union-rules-advanced.mdx) +- [Logging and dynamic output](./logging-and-dynamic-output.mdx) +- [Testing rules](./testing-plugins.mdx) +- [Tips and debugging](./tips-and-debugging.mdx) diff --git a/versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/installing-tools.mdx b/versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/installing-tools.mdx new file mode 100644 index 000000000..b7e1ba838 --- /dev/null +++ b/versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/installing-tools.mdx @@ -0,0 +1,227 @@ +--- + title: Installing tools + sidebar_position: 5 +--- + +Various methods for Pants to access the tools your plugin needs. + +--- + +## `BinaryPaths`: Find already installed binaries + +For certain tools that are hard to automatically install—such as Docker or language interpreters—you may want to assume that the user already has the tool installed on their machine. + +The simplest approach is to assume that the binary is installed at a fixed absolute path, such as `/bin/echo` or `/usr/bin/perl`. In the `argv` for your `Process`, use this absolute path as your first element. + +If you instead want to allow the binary to be located anywhere on a user's machine, you can use `BinaryPaths` to search certain directories—such as a user's `$PATH`—to find the absolute path to the binary. + +```python +from pants.core.util_rules.system_binaries import ( + BinaryPathRequest, + BinaryPaths, + ProcessResult, + Process, +) + +@rule +async def demo(...) -> Foo: + docker_paths = await Get( + BinaryPaths, + BinaryPathRequest( + binary_name="docker", + search_path=["/usr/bin", "/bin"], + ) + ) + docker_bin = docker_paths.first_path + if docker_bin is None: + raise OSError("Could not find 'docker'.") + result = await Get(ProcessResult, Process(argv=[docker_bin.path, ...], ...)) +``` + +`BinaryPaths` has a field called `paths: tuple[BinaryPath, ...]`, which stores all the discovered absolute paths to the specified binary. Each `BinaryPath` object has the fields `path: str`, such as `/usr/bin/docker`, and `fingerprint: str`, which is used to invalidate the cache if the binary changes. The results will be ordered by the order of `search_path`, meaning that earlier entries in `search_path` will show up earlier in the result. + +`BinaryPaths` also has a convenience property called `first_path: BinaryPath | None`, which will return the first matching path, if any. + +In this example, the `search_path` is hardcoded. Instead, you may want to create a [subsystem](./options-and-subsystems.mdx) to allow users to override the search path through a dedicated option. See [pex_environment.py](https://github.com/pantsbuild/pants/blob/57a47457bda0b0dfb0882d851ccd58a7535f15c1/src/python/pants/backend/python/rules/pex_environment.py#L60-L71) for an example that allows the user to use the special string `` to read the user's `$PATH` environment variable. + +:::note Checking for valid binaries (recommended) +When setting up a `BinaryPathsRequest`, you can optionally pass the argument `test: BinaryPathTest`. When discovering a binary, Pants will run your test and only use the binary if the return code is 0. Pants will also fingerprint the output and invalidate the cache if the output changes from before, such as because the user upgraded the version of the tool. + +Why do this? This is helpful to ensure that all discovered binaries are valid and safe. This is also important for Pants to be able to detect when the user has changed the binary, such as upgrading its version. + +`BinaryPathTest` takes the argument `args: Iterable[str]`, which is the arguments that Pants should run on your binary to ensure that it's a valid program. Usually, you'll set `args=["--version"]`. + +```python +from pants.core.util_rules.system_binaries import BinaryPathRequest, BinaryPathTest + +BinaryPathRequest( + binary_name="docker", + search_path=["/usr/bin", "/bin"], + test=BinaryPathTest(args=["--version"]), +) +``` + +You can optionally set `fingerprint_stdout=False` to the `BinaryPathTest` constructor, but usually, you should keep the default of `True`. +::: + +## `ExternalTool`: Install pre-compiled binaries + +If your tool has a pre-compiled binary available online, Pants can download and use that binary automatically for users. This is often a better user experience than requiring the users to pre-install the tool. This will also make your build more deterministic because everyone will be using the same binary. + +First, manually download the file. Typically, the downloaded file will be an archive like a `.zip` or `.tar.xz` file, but it may also be the actual binary. Then, run `shasum -a 256` on the downloaded file to get its digest ID, and `wc -c` to get its number of bytes. + +If the downloaded file is an archive, you will also need to find the relative path within the archive to the binary, such as `bin/shellcheck`. You may need to use a tool like `unzip` to inspect the archive. + +With this information, you can define a new `ExternalTool`: + +```python +from pants.core.util_rules.external_tool import ExternalTool +from pants.engine.platform import Platform + +class Shellcheck(ExternalTool): + options_scope = "shellcheck" + help = "A linter for shell scripts." + + default_version = "v0.7.1" + default_known_versions = [ + "v0.7.1|macos_arm64 |b080c3b659f7286e27004aa33759664d91e15ef2498ac709a452445d47e3ac23|1348272", + "v0.7.1|macos_x86_64|b080c3b659f7286e27004aa33759664d91e15ef2498ac709a452445d47e3ac23|1348272", + "v0.7.1|linux_arm64 |b50cc31509b354ab5bbfc160bc0967567ed98cd9308fd43f38551b36cccc4446|1432492", + "v0.7.1|linux_x86_64|64f17152d96d7ec261ad3086ed42d18232fcb65148b44571b564d688269d36c8|1443836", + ] + + def generate_url(self, plat: Platform) -> str: + platform_mapping = { + "macos_arm64": "darwin.x86_64", + "macos_x86_64": "darwin.x86_64", + "linux_arm64": "linux.aarch64", + "linux_x86_64": "linux.x86_64", + } + plat_str = platform_mapping[plat.value] + return ( + f"https://github.com/koalaman/shellcheck/releases/download/{self.version}/" + f"shellcheck-{self.version}.{plat_str}.tar.xz" + ) + + def generate_exe(self, _: Platform) -> str: + return f"./shellcheck-{self.version}/shellcheck" +``` + +You must define the class properties `default_version` and `default_known_versions`. `default_known_versions` is a list of pipe-separated strings in the form `version|platform|sha256|length`. Use the values you found earlier by running `shasum` and `wc` for sha256 and length, respectively. `platform` should be one of `linux_arm64`, `linux_x86_64`, `macos_arm64`, and `macos_x86_64`. + +You must also define the methods `generate_url`, which is the URL to make a GET request to download the file, and `generate_exe`, which is the relative path to the binary in the downloaded digest. Both methods take `plat: Platform` as a parameter. + +Because an `ExternalTool` is a subclass of [`Subsystem`](./options-and-subsystems.mdx), you must also define an `options_scope`. You may optionally register additional options from `pants.option.option_types`. + +In your rules, include the `ExternalTool` as a parameter of the rule, then use `Get(DownloadedExternalTool, ExternalToolRequest)` to download and extract the tool. + +```python +from pants.core.util_rules.external_tool import DownloadedExternalTool, ExternalToolRequest +from pants.engine.platform import Platform + +@rule +async def demo(shellcheck: Shellcheck, ...) -> Foo: + shellcheck = await Get( + DownloadedExternalTool, + ExternalToolRequest, + shellcheck.get_request(platform) + ) + result = await Get( + ProcessResult, + Process(argv=[shellcheck.exe, ...], input_digest=shellcheck.digest, ...) + ) +``` + +A `DownloadedExternalTool` object has two fields: `digest: Digest` and `exe: str`. Use the `.exe` field as the first value of a `Process`'s `argv`, and use the `.digest` in the `Process's` `input_digest`. If you want to use multiple digests for the input, call `Get(Digest, MergeDigests)` with the `DownloadedExternalTool.digest` included. + +## `Pex`: Install binaries through pip + +If a tool can be installed via `pip` - e.g., Pytest or Black - you can install and run it using `Pex`. + +```python +from pants.backend.python.target_types import ConsoleScript +from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints +from pants.backend.python.util_rules.pex import ( + Pex, + PexProcess, + PexRequest, + PexRequirements, +) +from pants.engine.process import FallibleProcessResult + +@rule +async def demo(...) -> Foo: + pex = await Get( + Pex, + PexRequest( + output_filename="black.pex", + internal_only=True, + requirements=PexRequirements(["black==19.10b0"]), + interpreter_constraints=InterpreterConstraints([">=3.6"]), + main=ConsoleScript("black"), + ) + ) + result = await Get( + FallibleProcessResult, + PexProcess(pex, argv=["--check", ...], ...), + ) +``` + +When defining a `PexRequest` for a tool, you must give arguments for `output_filename`, `internal_only`, `requirements`, `main`, and usually `interpreter_constraints`. + +Set `internal_only` if the PEX is only used as an internal tool, rather than distributed to users (e.g. the `package` goal). This speeds up performance when building the PEX. + +The `main` argument can be one of: + +- `ConsoleScript("scriptname")`, where `scriptname` is a [console_script](https://packaging.python.org/specifications/entry-points/) that the tool installs +- `EntryPoint.parse("module")`, which executes the given module +- `EntryPoint.parse("module:func")`, which executes the given nullary function in the given module. + +There are several other optional parameters that may be helpful. + +The resulting `Pex` object has a `digest: Digest` field containing the built `.pex` file. This digest should be included in the `input_digest` to the `Process` you run. + +Instead of the normal `Get(ProcessResult, Process)`, you should use `Get(ProcessResult, PexProcess)`, which will set up the environment properly for your Pex to execute. There is a predefined rule to go from `PexProcess -> Process`, so `Get(ProcessResult, PexProcess)` will cause the engine to run `PexProcess -> Process -> ProcessResult`. + +`PexProcess` requires arguments for `pex: Pex`, `argv: Iterable[str]`, and `description: str`. It has several optional parameters that mirror the arguments to `Process`. If you specify `input_digest`, be careful to first use `Get(Digest, MergeDigests)` on the `pex.digest` and any of the other input digests. + +:::note Use `PythonToolBase` when you need a Subsystem +Often, you will want to create a [`Subsystem`](./options-and-subsystems.mdx) for your Python tool +to allow users to set options to configure the tool. You can subclass `PythonToolBase`, which +subclasses `Subsystem`, to do this: + +```python +from pants.backend.python.subsystems.python_tool_base import PythonToolBase +from pants.backend.python.target_types import ConsoleScript +from pants.option.option_types import StrOption + +class Black(PythonToolBase): + options_scope = "black" + help = "The Black Python code formatter (https://black.readthedocs.io/)." + + default_main = ConsoleScript("black") + + register_interpreter_constraints = True + default_interpreter_constraints = ["CPython>=3.8,<3.9"] + + default_lockfile_resource = ("pants.backend.python.lint.black", "black.lock") + + config = StrOption( + default=None, + advanced=True, + help="Path to Black's pyproject.toml config file", + ) +``` + +You must define the class properties `options_scope` and `default_main`, and a +default lockfile at the location referenced by default_lockfile_resource. + +Then, you can set up your `Pex` like this: + +```python +@rule +async def demo(black: Black, ...) -> Foo: + pex = await Get(Pex, PexRequest, black.to_pex_request()) +``` + +::: diff --git a/versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/logging-and-dynamic-output.mdx b/versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/logging-and-dynamic-output.mdx new file mode 100644 index 000000000..0a6b73563 --- /dev/null +++ b/versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/logging-and-dynamic-output.mdx @@ -0,0 +1,35 @@ +--- + title: Logging and dynamic output + sidebar_position: 8 +--- + +How to add logging and influence the dynamic UI. + +--- + +Even though a [`@goal_rule`](./goal-rules.mdx) is the only type of rule that can print to `stdout` (via the special `Console` type), any rule can log to stderr and change how the rule appears in the dynamic UI. + +## Adding logging + +To add logging, use the [`logging` standard library module](https://docs.python.org/3/library/logging.html). + +```python +import logging + +logger = logging.getLogger(__name__) + +@rule +def demo(...) -> Foo: + logger.info("Inside the demo rule.") + ... +``` + +You can use `logger.info`, `logger.warning`, `logger.error`, `logger.debug`, and `logger.trace`. You can then change your log level by setting the `-l`/`--level` option, e.g. `pants -ldebug my-goal`. + +## Changing the dynamic UI + +## Streaming results (advanced) + +When you run `pants fmt`, `pants lint`, and `pants test`, you may notice that we "stream" the results. As soon as an individual process finishes, we print the result, rather than waiting for all the processes to finish and dumping at the end. + +We also set the log level dynamically. If something succeeds, we log the result at `INFO`, but if something fails, we use `WARN`. diff --git a/versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/options-and-subsystems.mdx b/versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/options-and-subsystems.mdx new file mode 100644 index 000000000..77bf09e3b --- /dev/null +++ b/versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/options-and-subsystems.mdx @@ -0,0 +1,100 @@ +--- + title: Options and subsystems + sidebar_position: 2 +--- + +How to add options to your plugin. + +--- + +## Defining options + +As explained in [Options](../../using-pants/key-concepts/options.mdx), options are partitioned into named scopes, like `[test]` and `[isort]`. Each of these scopes corresponds to a _subsystem_. + +To add new options: + +1. Define a subclass of `Subsystem` from `pants.subsystem.subsystem`. + 1. Set the class property `options_scope` with the name of the subsystem. + - This value will be prepended to all options in the subsystem, e.g. `--skip` will become `--shellcheck-skip`. + 2. Set the class property `help`, which is used by `pants help`. +2. Add new options through `pants.options.option_types` class attributes. +3. Register the `Subsystem` with `Subsystem.rules()` and `register.py`. + - You don't need `Subsystem.rules()` if the `Subsystem` is used in an `@rule` because `collect_rules()` will recognize it. It doesn't hurt to keep this around, though. + +```python tab={"label":"pants-plugins/example/shellcheck.py"} +from pants.option.option_types import BoolOption +from pants.option.subsystem import Subsystem + + +class ShellcheckSubsystem(Subsystem): + options_scope = "shellcheck" + help = "The Shellcheck linter." + + config_discovery = BoolOption( + default=True, + advanced=True, + help="Whether Pants should...", + ) + + +def rules(): + return [*ShellcheckSubsystem.rules()] +``` + +```python tab={"label":"pants-plugins/example/register.py"} +from example import shellcheck + +def rules(): + return [*shellcheck.rules()] +``` + +The subsystem should now show up when you run `pants help shellcheck`. + +:::note `GoalSubsystem` +As explained in [Goal rules](./goal-rules.mdx), goals use a subclass of `Subsystem`: `GoalSubsystem` from `pants.engine.goal`. + +`GoalSubsystem` behaves the same way as a normal subsystem, except that you set the class property `name` rather than `options_scope`. The `name` will auto-populate the `options_scope`. +::: + +### Option types + +These classes correspond to the option types at [Options](../../using-pants/key-concepts/options.mdx). + +Every option type requires that you set the keyword argument `help`. + +Most types require that you set `default`. You can optionally set `advanced=True` with every option +for it to only show up with `help-advanced`. + +The option name will default to the class attribute name, e.g. `my_opt = StrOption()` will default to `--my-opt`. +You can instead pass a string positional argument, e.g. `my_opt = StrOption("--different-name")`. + +| Class name | Notes | +| :------------------------------------------------------------------------------------------------------------------------------------- | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `StrOption` | Must set `default` to a `str` or `None`. | +| `BoolOption` | Must set `default` to a `bool` or `None`. TODO

Reminder when choosing a flag name: Pants will recognize the command line argument `--no-my-flag-name` as equivalent to `--my-flag-name=false`. | +| `IntOption` | Must set `default` to an `int` or `None`. | +| `FloatOption` | Must set `default` to a `float` or `None`. | +| `EnumOption` | This is like `StrOption`, but with the valid choices constrained to your enum.

To use, define an `enum.Enum`. The values of your enum will be what users can type, e.g. `'kale'` and `'spinach'` below:

`python
class LeafyGreens(Enum):
KALE = "kale"
SPINACH = "spinach"
`You must either set `default` to a value from your enum or `None`. If you set `default=None`, you must set `enum_type`. | +| List options:

- `StrListOption`
- `BoolListOption`
- `IntListOption`
- `FloatListOption`
- `EnumListOption` | Default is `[]` if `default` is not set.

For `EnumListOption`, you must set the keyword argument `enum_type`. | +| `DictOption` | Default is `{}` if `default` is not set.

Currently, Pants does not offer any validation of the dictionary entries, e.g. `dict[str, str]` vs `dict[str, list[str]]`. (Although per TOML specs, the key should always be `str`.) You may want to add eager validation that users are inputting options the correct way. | +| `ArgsListOption` | Adds an `--args` option, e.g. `--isort-args`. This type is extra useful because it uses a special `shell_str` that lets users type the arguments as a single string with spaces, which Pants will _shlex_ for them. That is, `--args='arg1 arg2'` gets converted to `['arg1', 'arg2']`.

You must set the keyword argument `example`, e.g. `'--arg1 arg2'`. You must also set `tool_name: str`, e.g. `'Black'`.

You can optionally set `passthrough=True` if the user should be able to use the style `pants my-goal :: -- --arg1`, i.e. arguments after `--`. | + +## Using options in rules + +To use a `Subsystem` or `GoalSubsystem` in your rule, request it as a parameter. Then, use the class attributes to access the option value. + +```python +from pants.engine.rules import rule +... + +@rule +async def demo(shellcheck: Shellcheck) -> LintResults: + if shellcheck.skip: + return LintResults() + config_discovery = shellcheck.config_discovery + ... +``` + +:::note Name clashing +When adding custom options, make sure their name does not start with an existing goal name. For instance, passing a boolean option named `check_foobar` as `--check-foobar` in the command line would fail since Pants would think you are trying to pass the `--foobar` flag in the built-in `check` goal scope. +::: diff --git a/versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/processes.mdx b/versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/processes.mdx new file mode 100644 index 000000000..eef43b061 --- /dev/null +++ b/versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/processes.mdx @@ -0,0 +1,139 @@ +--- + title: Processes + sidebar_position: 4 +--- + +How to safely run subprocesses in your plugin. + +--- + +It is not safe to use `subprocess.run()` like you normally would because this can break caching and will not leverage Pants's parallelism. Instead, Pants has safe alternatives with `Process` and `InteractiveProcess`. + +## `Process` + +### Overview + +`Process` is similar to Python's `subprocess.Popen()`. The process will run in the background, and you can run multiple processes in parallel. + +```python +from pants.engine.process import Process, ProcessResult +from pants.engine.rules import Get, rule + +@rule +async def demo(...) -> Foo: + result = await Get( + ProcessResult, + Process( + argv=["/bin/echo", "hello world"], + description="Demonstrate processes.", + ) + ) + logger.info(result.stdout.decode()) + logger.info(result.stderr.decode()) +``` + +This will return a `ProcessResult` object, which has the fields `stdout: bytes`, `stderr: bytes`, and `output_digest: Digest`. + +The process will run in a temporary directory and is hermetic, meaning that it cannot read any arbitrary file from your project and that it will be stripped of environment variables. This sandbox is important for reproducibility and to allow running your `Process` anywhere, such as through remote execution. + +:::note Debugging a `Process` +Setting the [`--keep-sandboxes=always`](./tips-and-debugging.mdx#debugging-look-inside-the-chroot) flag will cause the sandboxes of `Process`es to be preserved and logged to the console for inspection. + +It can be very helpful while editing `Process` definitions! +::: + +### Input Files + +To populate the temporary directory with files, use the parameter `input_digest: Digest`. It's common to use [`MergeDigests`](./file-system.mdx) to combine multiple `Digest`s into one single `input_digest`. + +### Environment Variables + +To set environment variables, use the parameter `env: Mapping[str, str]`. `@rules` are prevented from accessing `os.environ` (it will always be empty) because this reduces reproducibility and breaks caching. Instead, either hardcode the value or add a [`Subsystem` option](./options-and-subsystems.mdx) for the environment variable in question, or request the `Environment` type in your `@rule`. + +The `EnvironmentVars` type contains a subset of the environment that Pants was run in, and is requested via a `EnvironmentVarsRequest` that lists the variables to consume. + +```python + +from pants.engine.env_vars import EnvironmentVarsRequest, EnvironmentVars +from pants.engine.rules import Get, rule + + +@rule +async def partial_env(...) -> Foo: + relevant_env_vars = await Get(EnvironmentVars, EnvironmentVarsRequest(["RELEVANT_VAR", "PATH"])) + ... +``` + +### Output Files + +To capture output files from the process, set `output_files: Iterable[str]` and/or `output_directories: Iterable[str]`. Then, you can use the `ProcessResult.output_digest` field to get a [`Digest`](./file-system.mdx) of the result. + +`output_directores` captures that directory and everything below it. + +### Timeouts + +To use a timeout, set the `timeout_seconds: int` field. Otherwise, the process will never time out, unless the user cancels Pants. + +:::note `Process` caching +By default, a `Process` will be cached to `~/.cache/pants/lmdb_store` if the `exit_code` is `0`. + +If it's not safe to cache your `Process` -- usually the case when you know that a process accesses files outside its sandbox -- you can change the cacheability of your `Process` using the `ProcessCacheScope` parameter: + +```python +from pants.engine.process import Process, ProcessCacheScope, ProcessResult + +@rule +async def demo(...) -> Foo: + process = Process( + argv=["/bin/echo", "hello world"], + description="Not persisted between Pants runs ('sessions').", + cache_scope=ProcessCacheScope.PER_SESSION, + ) + ... +``` + +`ProcessCacheScope` supports other options as well, including `ALWAYS`. +::: + +### FallibleProcessResult + +Normally, a `ProcessResult` will raise an exception if the return code is not `0`. Instead, a `FallibleProcessResult` allows for any return code. + +Use `Get(FallibleProcessResult, Process)` if you expect that the process may fail, such as when running a linter or tests. + +Like `ProcessResult`, `FallibleProcessResult` has the attributes `stdout: bytes`, `stderr: bytes`, and `output_digest: Digest`, and it adds `exit_code: int`. + +## `InteractiveProcess` + +`InteractiveProcess` is similar to Python's `subprocess.run()`. The process will run in the foreground, optionally with access to the workspace. + +Because the process is potentially side-effecting, you may only run an `InteractiveProcess` in an [`@goal_rule`](./goal-rules.mdx) as an `Effect`. + +```python +from pants.engine.rules import Effect, goal_rule +from pants.engine.process import InteractiveProcess, InteractiveProcessResult + +@goal_rule +async def hello_world() -> HelloWorld: + # This demonstrates opening a Python REPL. + result = await Effect( + InteractiveProcessResult, + InteractiveProcess(argv=["/usr/bin/python"]), + ) + return HelloWorld(exit_code=result.exit_code) +``` + +You may either set the parameter `input_digest: Digest`, or you may set `run_in_workspace=True`. When running in the workspace, you will have access to any file in the build root. If the process can safely be restarted, set the `restartable=True` flag, which will allow the engine to interrupt and restart the process if its inputs have changed. + +To set environment variables, use the parameter `env: Mapping[str, str]`, like you would with `Process`. You can also set `hermetic_env=False` to inherit the environment variables from the parent `pants` process. + +The `Effect` will return an `InteractiveProcessResult`, which has a single field `exit_code: int`. + +### ProcessWithRetries + +A `Process` can be retried by wrapping it in a `ProcessWithRetries` and requesting a `ProcessResultWithRetries`. The last result, whether succeeded or failed, is available with the `last` parameter. For example, the following will allow for up to 5 attempts at running `my_process`: + +```python +results = await Get(ProcessResultWithRetries, ProcessWithRetries(my_process, 5)) +last_result = results.last +``` diff --git a/versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/rules-and-the-target-api.mdx b/versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/rules-and-the-target-api.mdx new file mode 100644 index 000000000..346ed8d95 --- /dev/null +++ b/versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/rules-and-the-target-api.mdx @@ -0,0 +1,417 @@ +--- + title: Rules and the Target API + sidebar_position: 6 +--- + +How to use the Target API in rules. + +--- + +Start by reading the [Concepts](../the-target-api/concepts.mdx) of the Target API. + +Note that the engine does not have special knowledge about `Target`s and `Field`s. To the engine, these are like any other types you'd use, and the `@rule`s to work with targets are like any other `@rule`. + +## How to read values from a `Target` + +As explained in [Concepts](../the-target-api/concepts.mdx), a `Target` is an addressable combination of fields, where each field gives some metadata about your code. + +To read a particular `Field` for a `Target`, look it up with the `Field`'s class in square brackets, like you would look up a normal Python dictionary: + +```python +from pants.backend.python.target_types import PythonTestsTimeoutField + +timeout_field = target[PythonTestsTimeoutField] +print(timeout_field.value) +``` + +This will return an instance of the `Field` subclass you looked up, which has two properties: `alias: str` and `value`. The type of `value` depends on the particular field. For example, `PythonTestsTimeout` subclasses `IntField`, so `value` has an `int` type. + +Looking up a field with `tgt[MyField]` will fail if the field is not registered on the target type. + +If the `Field` might not be registered, and you're okay with using a default value, you can instead use the method `.get()`. When the `Field` is not registered, this will call the constructor for that `Field` with `raw_value=None`, which is equivalent to if the user left off the field from their BUILD file. + +```python +from pants.backend.python.target_types import PythonTestsTimeoutField + +timeout_field = target.get(PythonTestsTimeoutField) +print(timeout_field.value) +``` + +Often, you may want to see if a target type has a particular `Field` registered. This is useful to filter targets. Use the methods `.has_field()` and `.has_fields()`. + +```python +from pants.backend.python.target_types import PythonTestsTimeoutField, PythonSourceField + +if target.has_field(PythonSourceField): + print("My plugin can work on this target.") + +if target.has_fields([PythonSourceField, PythonTestsTimeoutField]): + print("The target has both Python sources and a timeout field") +``` + +### `Field` subclasses + +As explained in [Concepts](../the-target-api/concepts.mdx), subclassing `Field`s is key to how the Target API works. + +The `Target` methods `[MyField]`, `.has_field()` and `.get()` understand when a `Field` is subclassed, as follows: + +```python +>>> docker_tgt.has_field(DockerSourceField) +True +>>> docker_tgt.has_field(SingleSourceField) +True +>>> python_test_tgt.has_field(DockerSourceField) +False +>>> python_test_tgt.has_field(SingleSourceField) +True +``` + +This allows you to express specifically which types of `Field`s you need to work. For example, the `pants filedeps` goal only needs `SourceField`, and works with any subclasses. Meanwhile, Black and isort need `PythonSourceField`, and work with any subclasses. Finally, the Pytest runner needs `PythonTestSourceField` (or any subclass). + +### A Target's `Address` + +Every target is identified by its `Address`, from `pants.engine.addresses`. Many types used in the Plugin API will use `Address` objects as fields, and it's also often useful to use the `Address` when writing the description for a `Process` you run. + +A `Target` has a field `address: Address`, e.g. `my_tgt.address`. + +You can also create an `Address` object directly, which is often useful in tests: + +- `project:tgt` -> `Address("project", target_name="tgt")` +- `project/` -> `Address("project")` +- `//:top-level` -> `Address("", target_name="top_level")` +- `project/app.py:tgt` -> `Address("project", target_name="tgt", relative_file_name="app.py")` +- `project:tgt#generated` -> `Address("project", target_name="tgt", generated_name="generated")` +- `project:tgt@shell=zsh` -> `Address("project", target_name="tgt", parameters={"shell": "zsh"})` + +You can use `str(address)` or `address.spec` to get the normalized string representation. `address.spec_path` will give the path to the parent directory of the target's original BUILD file. + +## How to resolve targets + +How do you get `Target`s in the first place in your plugin? + +As explained in [Goal rules](./goal-rules.mdx), to get all the targets specified on the command line by a user, you can request the type `Targets` as a parameter to your `@rule` or `@goal_rule`. From there, you can optionally filter out the targets you want, such as by using `target.has_field()`. + +```python +from pants.engine.target import Targets + +@rule +async def example(targets: Targets) -> Foo: + logger.info(f"User specified these targets: {[tgt.address.spec for tgt in targets]}") + ... +``` + +(You can also request `Addresses` (from `pants.engine.addresses`) as a parameter to your `@rule` if you only need the addresses specified on the command line by a user.) + +Use `AllTargets` to instead get all targets defined in the repository. + +```python +from pants.engine.target import AllTargets + +@rule +async def example(targets: AllTargets) -> Foo: + logger.info(f"All targets: {[tgt.address.spec for tgt in targets]}") + ... +``` + +For most [Common plugin tasks](../common-plugin-tasks/index.mdx), like adding a linter, Pants will have already filtered out the relevant targets for you and will pass you only the targets you care about. + +Given targets, you can find their direct and transitive dependencies. See the below section "The Dependencies field". + +You can also find targets by writing your own `Spec`s, rather than using what the user provided. (The types come from `pants.base.specs`.) + +```python +# Inside an `@rule`, use `await Get` like this. +await Get( + Targets, + RawSpecs( + description_of_origin="my plugin", # Used in error messages for invalid specs. + # Each of these keyword args are optional. + address_literals=( + AddressLiteralSpec("my_dir", target_component="tgt"), # `my_dir:tgt` + AddressLiteralSpec("my_dir", target_component="tgt", generated_component="gen"), # `my_dir:tgt#gen` + AddressLiteralSpec("my_dir/f.ext", target_component="tgt"), # `my_dir/f.ext:tgt` + ), + file_literals=(FileLiteralSpec("my_dir/f.ext"),), # `my_dir/f.ext` + file_globs=(FileGlobSpec("my_dir/*.ext"),), # `my_dir/*.ext` + dir_literals=(DirLiteralSpec("my_dir"),), # `my_dir/` + dir_globs=(DirGlobSpec("my_dir"),), # `my_dir:` + recursive_globs=(RecursiveGlobSpec("my_dir"),), # `my_dir::` + ancestor_globs=(AncestorGlobSpec("my_dir"),), # i.e. `my_dir` and all ancestors + ) +) +``` + +Finally, you can look up an `Address` given a raw address string, using `AddressInput`. This is often useful to allow a user to refer to targets in [Options](./options-and-subsystems.mdx) and in `Field`s in your `Target`. For example, this mechanism is how the `dependencies` field works. This will error if the address does not exist. + +```python +from pants.engine.addresses import AddressInput, Address +from pants.engine.rules import Get, rule + +@rule +async def example(...) -> Foo: + address = await Get( + Address, + AddressInput, + AddressInput.parse("project/util:tgt", description_of_origin="my custom rule"), + ) +``` + +Given an `Address`, there are two ways to find its corresponding `Target`: + +```python +from pants.engine.addresses import AddressInput, Address, Addresses +from pants.engine.rules import Get, rule +from pants.engine.target import Targets, WrappedTarget, WrappedTargetRequest + +@rule +async def example(...) -> Foo: + address = Address("project/util", target_name="tgt") + + # Approach #1 + wrapped_target = await Get( + WrappedTarget, + WrappedTargetRequest(address, description_of_origin="my custom rule"), + ) + target = wrapped_target.target + + # Approach #2 + targets = await Get(Targets, Addresses([address])) + target = targets[0] +``` + +## The `Dependencies` field + +The `Dependencies` field is an `AsyncField`, which means that you must use the engine to hydrate its values, rather than using `Dependencies.value` like normal. + +```python +from pants.engine.target import Dependencies, DependenciesRequest, Targets +from pants.engine.rules import Get, rule + +@rule +async def demo(...) -> Foo: + ... + direct_deps = await Get(Targets, DependenciesRequest(target.get(Dependencies))) +``` + +`DependenciesRequest` takes a single argument: `field: Dependencies`. The return type `Targets` is a `Collection` of individual `Target` objects corresponding to each direct dependency of the original target. + +If you only need the addresses of a target's direct dependencies, you can use `Get(Addresses, DependenciesRequest(target.get(Dependencies))` instead. (`Addresses` is defined in `pants.engine.addresses`.) + +### Transitive dependencies with `TransitiveTargets` + +If you need the transitive dependencies of a target—meaning both the direct dependencies and those dependencies' dependencies—use `Get(TransitiveDependencies, TransitiveTargetsRequest)`. + +```python +from pants.engine.target import TransitiveTargets, TransitiveTargetsRequest +from pants.engine.rules import Get, rule + +@rule +async def demo(...) -> Foo: + ... + transitive_targets = await Get(TransitiveTargets, TransitiveTargetsRequest([target.address])) +``` + +`TransitiveTargetsRequest` takes an iterable of `Address`es. + +`TransitiveTargets` has two fields: `roots: tuple[Target, ...]` and `dependencies: tuple[Target, ...]`. `roots` stores the original input targets, and `dependencies` stores the transitive dependencies of those roots. `TransitiveTargets` also has a property `closure: FrozenOrderedSet[Target]` which merges the roots and dependencies. + +### Dependencies-like fields + +You may want to have a field on your target that's like the normal `dependencies` field, but you do something special with it. For example, Pants's [archive](https://github.com/pantsbuild/pants/blob/969c8dcba6eda0c939918b3bc5157ca45099b4d1/src/python/pants/core/target_types.py#L231-L257) target type has the fields `files` and `packages`, rather than `dependencies`, and it has special logic on those fields like running the equivalent of `pants package` on the `packages` field. + +Instead of subclassing `Dependencies`, you can subclass `SpecialCasedDependencies` from `pants.engine.target`. You must set the `alias` class property to the field's name. + +```python +from pants.engine.target import SpecialCasedDependencies, Target + +class PackagesField(SpecialCasedDependencies): + alias = "packages" + +class MyTarget(Target): + alias = "my_tgt" + core_fields = (..., PackagesField) +``` + +Then, to resolve the addresses, you can use `UnparsedAddressInputs`: + +```python +from pants.engine.addresses import Addresses, UnparsedAddressInputs +from pants.engine.target import Targets +from pants.engine.rules import Get, rule + +@rule +async def demo(...) -> Foo: + addresses = await Get( + Addresses, + UnparsedAddressInputs, + my_tgt[MyField].to_unparsed_address_inputs() + ) + # Or, use this: + targets = await Get( + Targets, + UnparsedAddressInputs, + my_tgt[MyField].to_unparsed_address_inputs() + ) +``` + +Pants will include your special-cased dependencies with `pants dependencies`, `pants dependents`, and `pants --changed-since`, but the dependencies will not show up when using `await Get(Addresses, DependenciesRequest)`. + +## `SourcesField` + +`SourceField` is an `AsyncField`, which means that you must use the engine to hydrate its values, rather than using `Sources.value` like normal. + +Some Pants targets like `python_test` have the field `source: str`, whereas others like `go_package` have the field `sources: list[str]`. These are represented by the fields `SingleSourceField` and `MultipleSourcesField`. When you're defining a new target type, you should choose which of these to subclass. However, when operating over sources generically in your `@rules`, you can use the common base class `SourcesField` so that your rule works with both formats. + +```python +from pants.engine.target import HydratedSources, HydrateSourcesRequest, SourcesField +from pants.engine.rules import Get, rule + +@rule +async def demo(...) -> Foo: + ... + sources = await Get(HydratedSources, HydrateSourcesRequest(target[SourcesField])) +``` + +`HydrateSourcesRequest` expects a `SourcesField` object. This can be a subclass, such as `PythonSourceField` or `GoPackageSourcesField`. + +`HydratedSources` has a field called `snapshot: Snapshot`, which allows you to see what files were resolved by calling `hydrated_sources.snapshot.files` and to use the resulting [`Digest`](./file-system.mdx) in your plugin with `hydrated_sources.snapshot.digest`. + +Typically, you will want to use the higher-level `Get(SourceFiles, SourceFilesRequest)` utility instead of `Get(HydrateSources, HydrateSourcesRequest)`. This allows you to ergonomically hydrate multiple `SourcesField`s objects in the same call, resulting in a single merged snapshot of all the input source fields. + +```python +from pants.core.util_rules.source_files import SourceFiles, SourceFilesRequest +from pants.engine.target import SourcesField +from pants.engine.rules import Get, rule + +@rule +async def demo(...) -> Foo: + ... + sources = await Get(SourceFiles, SourceFilesRequest([tgt1[SourcesField], tgt2[SourcesField]])) +``` + +`SourceFilesRequest` expects an iterable of `SourcesField` objects. `SourceFiles` has a field `snapshot: Snapshot` with the merged snapshot of all resolved input sources fields. + +To convert a list of target addresses to existing source file names, you can request `HydratedSources` for every input target: + +```python +from itertools import chain +from pants.engine.addresses import Addresses +from pants.engine.collection import DeduplicatedCollection +from pants.engine.rules import Get, MultiGet, rule +from pants.engine.target import (HydratedSources, HydrateSourcesRequest, SourcesField, UnexpandedTargets) + + +class ProjectSources(DeduplicatedCollection[str]): + pass + + +@rule +async def addresses_to_source_files(addresses: Addresses) -> ProjectSources: + targets = await Get(UnexpandedTargets, Addresses, addresses) + all_sources = await MultiGet(Get(HydratedSources, HydrateSourcesRequest(tgt.get(SourcesField))) for tgt in targets) + return ProjectSources(chain.from_iterable(sources.snapshot.files for sources in all_sources)) +``` + +This is often useful when you need to pass target addresses to commands that are not Pants goals and would not +be able to interpret them properly. + +### Enabling codegen + +If you want your plugin to work with code generation, you must set the argument `enable_codegen=True`, along with `for_sources_types` with the types of `SourcesField` you're expecting. + +```python +from pants.backend.python.target_types import PythonSourceField +from pants.core.target_types import ResourceSourceField +from pants.engine.target import HydratedSources, HydrateSourcesRequest, SourcesField +from pants.engine.rules import Get, rule + +@rule +async def demo(...) -> Foo: + ... + sources = await Get( + HydratedSources, + HydrateSourcesRequest( + target.get(SourcesField), + enable_codegen=True, + for_sources_types=(PythonSourceField, ResourceSourceField) + ) + ) +``` + +If the provided `SourcesField` object is already a subclass of one of the `for_sources_types`—or it can be generated into one of those types—then the sources will be hydrated; otherwise, you'll get back a `HydratedSources` object with an empty snapshot and the field `sources_type=None`. + +`SourceFilesRequest` also accepts the `enable_codegen` and `for_source_types` arguments. This will filter out any inputted `Sources` field that are not compatible with `for_sources_type`. + +```python +from pants.backend.python.target_types import PythonSourceField +from pants.core.target_types import ResourceSourceField +from pants.core.util_rules.source_files import SourceFiles, SourceFilesRequest +from pants.engine.target import SourcesField +from pants.engine.rules import Get, rule + +@rule +async def demo(...) -> Foo: + ... + sources = await Get( + SourceFiles, + SourceFilesRequest( + [target.get(SourcesField)], + enable_codegen=True, + for_sources_types=(PythonSourceField, ResourceSourceField) + ) + ) +``` + +### Stripping source roots + +You may sometimes want to remove source roots from files, i.e. go from `src/python/f.py` to `f.py`. This can make it easier to work with tools that would otherwise be confused by the source root. + +To strip source roots, use `Get(StrippedSourceFiles, SourceFiles)`. + +```python +from pants.core.util_rules.source_files import SourceFiles, SourceFilesRequest +from pants.core.util_rules.stripped_source_files import StrippedSourceFiles +from pants.engine.rules import Get, rule +from pants.engine.target import SourcesField + +@rule +async demo(...) -> Foo: + ... + unstripped_sources = await Get(SourceFiles, SourceFilesRequest([target.get(SourcesField)])) + stripped_sources = await Get(StrippedSourceFiles, SourceFiles, unstripped_sources) +``` + +`StrippedSourceFiles` has a single field `snapshot: Snapshot`. + +You can also use `Get(StrippedSourceFiles, SourceFilesRequest)`, and the engine will automatically go from `SourceFilesRequest -> SourceFiles -> StrippedSourceFiles)`. + +## `FieldSet`s + +A `FieldSet` is a way to specify which Fields your rule needs to use in a typed way that is understood by the engine. + +Normally, your rule should simply use `tgt.get()` and `tgt.has_field()` instead of a `FieldSet`. However, for several of the [Common plugin tasks](../common-plugin-tasks/index.mdx), you will instead need to create a `FieldSet` so that the combination of fields you use can be represented by a type understood by the engine. + +To create a `FieldSet`, create a new dataclass with `@dataclass(frozen=True)`. You will sometimes directly subclass `FieldSet`, but will often subclass something like `BinaryFieldSet` or `TestFieldSet`. Refer to the instructions in [Common plugin tasks](../common-plugin-tasks/index.mdx). + +List every `Field` that your plugin will use as a field of your dataclass. The type hints you specify will be used by Pants to identify what `Field`s to use, e.g. `PythonSourceField` or `Dependencies`. + +Finally, set the class property `required_fields` as a tuple of the `Field`s that your plugin requires. Pants will use this to filter out irrelevant targets that your plugin does not know how to operate on. Often, this will be the same as the `Field`s that you listed as dataclass fields, but it does not need to be. If a target type does not have registered one of the `Field`s that are in the dataclass fields, and it isn't a required `Field`, then Pants will use a default value as if the user left it off from their BUILD file. + +```python +from dataclasses import dataclass + +from pants.engine.target import Dependencies, FieldSet + +@dataclass(frozen=True) +class ShellcheckFieldSet(FieldSet): + required_fields = (ShellSourceField,) + + source: ShellSourceField + # Because this is not in `required_fields`, this `FieldSet` will still match target types + # that don't have a `Dependencies` field registered. If it's not registered, then a + # default value for `Dependencies` will be used as if the user left off the field from + # their BUILD file. + dependencies: Dependencies +``` + +In your rule, you can access your `FieldSet` like a normal dataclass, e.g. `field_set.source` or `field_set.dependencies`. The object also has a field called `address: Address`. diff --git a/versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/testing-plugins.mdx b/versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/testing-plugins.mdx new file mode 100644 index 000000000..f3e7cc287 --- /dev/null +++ b/versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/testing-plugins.mdx @@ -0,0 +1,600 @@ +--- + title: Testing plugins + sidebar_position: 9 +--- + +How to verify your plugin works. + +--- + +There are four main approaches to testing your plugin, ranging in terms of scope (unit vs. integration test). You may mix-and-match between these approaches. + +All approaches use [Pytest](https://docs.pytest.org/en/latest/)-style tests, rather than [`unittest`](https://docs.python.org/3/library/unittest.html)-style tests. + +You must also install the distribution `pantsbuild.pants.testutil`. We recommend using the [`pants_requirements` target to do this](../overview.mdx). + +## Approach 1: normal unit tests + +Often, you can factor out normal Python functions from your plugin that do not use the Rules API. These helpers can be tested like you would test any other Python code. + +For example, some Pants rules take the type `InterpreterConstraints` as input. `InterpreterConstraints` has a factory method `merge_constraint_sets()` that we can test through a normal unit test. + +```python +def test_merge_interpreter_constraints() -> None: + # A & B => A & B + assert InterpreterConstraints.merge_constraint_sets( + [["CPython==2.7.*"], ["CPython==3.6.*"]] + ) == ["CPython==2.7.*,==3.6.*"] + + # A | B => A | B + assert InterpreterConstraints.merge_constraint_sets( + [["CPython==2.7.*", "CPython==3.6.*"]] + ) == ["CPython==2.7.*", "CPython==3.6.*"] +``` + +This approach can be especially useful for testing the Target API, such as testing custom validation you added to a `Field`. + +```python +def test_timeout_validation() -> None: + with pytest.raises(InvalidFieldException): + PythonTestTimeoutField(-100, Address("demo")) + with pytest.raises(InvalidFieldException): + PythonTestTimeoutField(0, Address("demo")) + assert PythonTestTimeoutField(5, Address("demo")).value == 5 +``` + +:::note How to create a `Target` in-memory +For Approaches #1 and #2, you will often want to pass a `Target` instance to your test, such as a `PythonTestTarget` instance. + +To create a `Target` instance, choose which subclass you want, then pass a dictionary of the values you want to use, followed by an `Address` object. The dictionary corresponds to what you'd put in the BUILD file; any values that you leave off will use their default values. + +The `Address` constructor's first argument is the path to the BUILD file; you can optionally define `target_name: str` if it is not the default `name`. + +For example, given this target definition for `project/app:tgt`: + +```python +python_test( + name="tgt", + source="app_test.py", + timeout=120, +) +``` + +We would write: + +```python +tgt = PythonTestTarget( + {"source": "app_test.py", "timeout": 120}, + Address("project/app", target_name="tgt"), +) +``` + +Note that we did not put `"name": "tgt"` in the dictionary. `name` is a special field that does not use the Target API. Instead, pass the `name` to the `target_name` argument in the `Address` constructor. + +For Approach #3, you should instead use `rule_runner.write_files()` to write a BUILD file, followed by `rule_runner.get_target()`. + +For Approach #4, you should use `setup_tmpdir()` to set up BUILD files. +::: + +## Approach 2: `run_rule_with_mocks()` (unit tests for rules) + +`run_rule_with_mocks()` will run your rule's logic, but with each argument to your `@rule` provided explicitly by you and with mocks for any `await Get`s. This means that the test is fully mocked; for example, `run_rule_with_mocks()` will not actually run a `Process`, nor will it use the file system operations. This is useful when you want to test the inlined logic in your rule, but usually, you will want to use Approach #3. + +To use `run_rule_with_mocks`, pass the `@rule` as its first arg, then `rule_args=[arg1, arg2, ...]` in the same order as the arguments to the `@rule`. + +For example: + +```python +from pants.engine.rules import rule +from pants.testutil.rule_runner import run_rule_with_mocks + + +@rule +async def int_to_str(i: int) -> str: + return str(i) + + +def test_int_to_str() -> None: + result: str = run_rule_with_mocks(int_to_str, rule_args=[42], mock_gets=[]) + assert result == "42" +``` + +If your `@rule` has any `await Get`s or `await Effect`s, set the argument `mock_gets=[]` with `MockGet`/`MockEffect` objects corresponding to each of them. A `MockGet` takes three arguments: `output_type: type`, `input_types: tuple[type, ...]`, and `mock: Callable[..., InputType]`, which is a function that takes an instance of each of the `input_types` and returns a single instance of the `output_type`. + +For example, given this contrived rule to find all targets with `sources` with a certain filename included (find a "needle in the haystack"): + +```python +from __future__ import annotations + +from dataclasses import dataclass +from pathlib import PurePath + +from pants.engine.collection import Collection +from pants.engine.rules import Get, MultiGet, rule +from pants.engine.target import HydratedSources, HydrateSourcesRequest, SourcesField, Target + + +@dataclass(frozen=True) +class FindNeedle: + """A request to find all targets with a `sources` file matching the `needle_filename`.""" + targets: tuple[Target, ...] + needle_filename: str + + +# We want to return a sequence of found `Target` objects. Rather than +# returning `Targets`, we create a "newtype" specific to this rule. +class TargetsWithNeedle(Collection[Target]): + pass + + +@rule +async def find_needle_in_haystack(find_needle: FindNeedle) -> TargetsWithNeedle: + all_hydrated_sources = await MultiGet( + [Get(HydratedSources, HydrateSourcesRequest(tgt.get(SourcesField))) for tgt in find_needle.targets] + ) + return TargetsWithNeedle( + tgt + for tgt, hydrated_sources in zip(find_needle.targets, all_hydrated_sources) + if any(PurePath(fp).name == find_needle.needle_filename for fp in hydrated_sources.snapshot.files) + ) +``` + +We can write this test: + +```python +from pants.engine.addresses import Address +from pants.engine.fs import EMPTY_DIGEST, Snapshot +from pants.engine.target import HydratedSources, HydrateSourcesRequest, Target, Sources +from pants.testutil.rule_runner import MockGet, run_rule_with_mocks + +class MockTarget(Target): + alias = "mock_target" + core_fields = (Sources,) + + +def test_find_needle_in_haystack() -> None: + tgt1 = MockTarget({}, Address("", target_name="t1")) + tgt2 = MockTarget({}, Address("", target_name="t2")) + tgt3 = MockTarget({}, Address("", target_name="t3")) + find_needles_request = FindNeedle(targets=(tgt1, tgt2, tgt3), needle_filename="needle.txt") + + def mock_hydrate_sources(request: HydrateSourcesRequest) -> HydratedSources: + # Our rule only looks at `HydratedSources.snapshot.files`, so we mock all other fields. We + # include the file `needle.txt` for the target `:t2`, but no other targets. + files = ( + ("needle.txt", "foo.txt") + if request.field.address.target_name == "t2" + else ("foo.txt", "bar.txt") + ) + mock_snapshot = Snapshot(EMPTY_DIGEST, files=files, dirs=()) + return HydratedSources(mock_snapshot, filespec={}, sources_type=None) + + result: TargetsWithNeedle = run_rule_with_mocks( + find_needle_in_haystack, + rule_args=[find_needles_request], + mock_gets=[ + MockGet( + output_type=HydratedSources, + input_types=(HydrateSourcesRequest,), + mock=mock_hydrate_sources, + ) + ], + ) + assert list(result) == [tgt2] +``` + +### How to mock some common types + +See the above tooltip about how to create a `Target` instance. + +If your rule takes a `Subsystem` or `GoalSubsystem` as an argument, you can use the utilities `create_subsystem` and `create_goal_subsystem` like below. Note that you must explicitly provide all options read by your `@rule`; the default values will not be used. + +```python +from pants.backend.python.subsystems.setup import PythonSetup +from pants.core.goals.fmt import FmtSubsystem +from pants.testutil.option_util import create_goal_subsystem, create_subsystem + +mock_subsystem = create_subsystem(PythonSetup, interpreter_constraints=["CPython==3.8.*"]) +mock_goal_subsystem = create_goal_subsystem(FmtSubsystem, sep="\n") +``` + +If your rule takes `Console` as an argument, you can use the `with_console` context manager like this: + +```python +from pants.testutil.option_util import create_options_bootstrapper +from pants.testutil.rule_runner import mock_console, run_rule_with_mocks + +def test_with_console() -> None: + with mock_console(create_options_bootstrapper()) as (console, stdio_reader): + result: MyOutputType = run_rule_with_mocks(my_rule, [..., console]) + assert stdio_reader.get_stdout() == "expected stdout" + assert not stdio_reader.get_stderr() +``` + +If your rule takes `Workspace` as an argument, first create a `pants.testutil.rule_runner.RuleRunner()` instance in your individual test. Then, create a `Workspace` object with `Workspace(rule_runner.scheduler)`. + +## Approach 3: `RuleRunner` (integration tests for rules) + +`RuleRunner` allows you to run rules in an isolated environment, i.e. where you set up the rule graph and registered target types exactly how you want. `RuleRunner` will set up your rule graph and create a temporary build root. This is useful for integration tests that are more isolated and faster than Approach #4. + +After setting up your isolated environment, you can run `rule_runner.request(Output, [input1, input2])`, e.g. `rule_runner.request(SourceFiles, [SourceFilesRequest([sources_field])])` or `rule_runner.request(TargetsWithNeedle, [FindNeedle(targets, "needle.txt"])`. This will cause Pants to "call" the relevant `@rule` to get the output type. + +### Setting up the `RuleRunner` + +First, you must set up a `RuleRunner` instance and activate the rules and target types you'll use in your tests. Set the argument `target_types` with a list of the `Target` types used in your tests, and set `rules` with a list of all the rules used transitively. + +This means that you must register the rules you directly wrote, and also any rules that they depend on. Pants will automatically register some core rules for you, but leaves off most of them for better isolation of tests. If you're missing some rules, the rule graph will fail to be built. + +:::caution Confusing rule graph error? +It can be confusing figuring out what's wrong when setting up a `RuleRunner`. We know the error messages are not ideal and are working on improving them. + +Please feel free to reach out on [Slack](/community/members) for help with figuring out how to get things working. +::: + +```python +from pants.backend.python.goals import pytest_runner +from pants.backend.python.goals.pytest_runner import PythonTestFieldSet +from pants.backend.python.util_rules import pex_from_targets +from pants.backend.python.target_types import PythonSourceTarget, PythonTestTarget +from pants.core.goals.test import TestResult +from pants.testutil.rule_runner import QueryRule, RuleRunner + +def test_example() -> None: + rule_runner = RuleRunner( + target_types=[PythonSourceTarget, PythonTestTarget], + rules=[ + *pytest_runner.rules(), + *pex_from_targets.rules(), + QueryRule(TestResult, [PythonTestFieldSet]) + ], + ) +``` + +What's with the `QueryRule`? Normally, we don't use `QueryRule` because we're using the _asynchronous_ version of the Rules API, and Pants is able to parse your Python code to see how your rules are used. However, with tests, we are using the _synchronous_ version of the Rules API, so we need to give a hint to the engine about what requests we're going to make. Don't worry about filling in the `QueryRule` part yet. You'll add it later when writing `rule_runner.request()`. + +Each test should create its own distinct `RuleRunner` instance. This is important for isolation between each test. + +It's often convenient to define a [Pytest fixture](https://docs.pytest.org/en/stable/fixture.html) in each test file. This allows you to share a common `RuleRunner` setup, but get a new instance for each test. + +```python +import pytest + +from pants.testutil.rule_runner import RuleRunner + +@pytest.fixture +def rule_runner() -> RuleRunner: + return RuleRunner(target_types=[PythonSourceTarget], rules=[rule1, rule2]) + + +def test_example1(rule_runner: RuleRunner) -> None: + rule_runner.write_files(...) + ... + + +def test_example2(rule_runner: RuleRunner) -> None: + rule_runner.write_files(...) + ... +``` + +If you want multiple distinct `RuleRunner` setups in your file, you can define multiple Pytest fixtures. + +```python +import pytest + +from pants.testutil.rule_runner import RuleRunner + +@pytest.fixture +def first_rule_runner() -> RuleRunner: + return RuleRunner(rules=[rule1, rule2]) + + +def test_example1(first_rule_runner: RuleRunner) -> None: + first_rule_runner.write_files(...) + ... + + +def test_example2(first_rule_runner: RuleRunner) -> None: + first_rule_runner.write_files(...) + ... + + +@pytest.fixture +def second_rule_runner() -> RuleRunner: + return RuleRunner(rules=[rule3]) + + +def test_example3(second_rule_runner: RuleRunner) -> None: + second_rule_runner.write_files(...) + ... +``` + +### Setting up the content and BUILD files + +For most tests, you'll want to create files and BUILD files in your temporary build root. Use `rule_runner.write_files(files: dict[str, str])`. + +```python +from pants.testutil.rule_runner import RuleRunner + +def test_example() -> None: + rule_runner = RuleRunner() + rule_runner.write_files( + { + "project/app.py": "print('hello world!')\n", + "project/BUILD": "python_library()", + } + ) +``` + +This function will write the files to the correct location and also notify the engine that the files were created. + +You can then use `rule_runner.get_target()` to have Pants read the BUILD file and give you back the corresponding `Target`. + +```python +from textwrap import dedent + +from pants.engine.addresses import Address +from pants.testutil.rule_runner import RuleRunner + +def test_example() -> None: + rule_runner = RuleRunner() + rule_runner.write_files({ + "project/BUILD": dedent( + """\ + python_source( + name="my_tgt", + source="f.py", + """) + } + ) + tgt = rule_runner.get_target(Address("project", target_name="my_tgt")) +``` + +To read any files that were created, use `rule_runner.build_root` as the first part of the path to ensure that the correct directory is read. + +```python +from pants.testutil.rule_runner import RuleRunner + +def test_example() -> None: + rule_runner = RuleRunner() + rule_runner.write_files({"project/app.py": "print('hello world!')\n"}) + assert Path(rule_runner.build_root, "project/app.py").read_text() == "print('hello world!')\n" +``` + +### Setting options + +Often, you will want to set Pants options, such as activating a certain backend or setting a `--config` option. + +To set options, call `rule_runer.set_options()` with a list of the arguments, e.g. `rule_runner.set_options(["--pytest-version=pytest>=6.0"])`. Global options will need to be set when constructing the `rule_runner` using the `bootstrap_args` parameter. For example, `bootstrap_args=["--pants-ignore=['!/.normally_ignored/']"]` will allow a test to read from a normally ignored directory, which can be useful for reading config files. + +You can also set the keyword argument `env: dict[str, str]`. If the option starts with `PANTS_`, it will change which options Pants uses. You can include any arbitrary environment variable here; some rules use the parent Pants process to read arbitrary env vars, e.g. the `--test-extra-env-vars` option, so this allows you to mock the environment in your test. Alternatively, use the keyword argument `env_inherit: set[str]` to set the specified environment variables using the test runner's environment, which is useful to set values like `PATH` which may vary across machines. + +:::caution Calling `rule_runner.set_options()` will override any options that were previously set. +You will need to register everything you want in a single call. +::: + +### Running your rules + +Now that you have your `RuleRunner` set up, along with any options and the content/BUILD files for your test, you can test that your rules work correctly. + +Unlike Approach #2, you will not explicitly say which `@rule` you want to run. Instead, look at the return type of your `@rule`. Use `rule_runner.request(MyOutput, [input1, ...])`, where `MyOutput` is the return type. + +`rule_runner.request()` is equivalent to how you would normally use `await Get(MyOuput, Input1, input1_instance)` in a rule (See [Concepts](./concepts.mdx)). For example, if you would normally say `await Get(Digest, MergeDigests([digest1, digest2])`, you'd instead say `rule_runner.request(Digest, [MergeDigests([digest1, digest2])`. + +You will also need to add a `QueryRule` to your `RuleRunner` setup, which gives a hint to the engine for what requests you are going to make. The `QueryRule` takes the same form as your `rule_runner.request()`, except that the inputs are types, rather than instances of those types. + +For example, given this rule signature (from the above Approach #2 example): + +```python +@rule +async def find_needle_in_haystack(find_needle: FindNeedle) -> TargetsWithNeedle: + ... +``` + +We could write this test: + +```python +from pants.core.target_types import FileTarget +from pants.testutil.rule_runner import QueryRule, RuleRunner + +@pytest.fixture +def rule_runner() -> RuleRunner: + return RuleRunner( + rules=[ + find_needle_in_haystack, + QueryRule(TargetsWithNeedle, [FindNeedle]), + ], + target_types=[FileTarget], + ) + + +def test_find_needle(rule_runner: RuleRunner) -> None: + # Set up the files and targets. + rule_runner.write_files( + { + "project/f1.txt": "", + "project/f2.txt": "", + "project/needle.txt": "", + "project/BUILD": dedent( + """\ + file(name="t1", source="f1.txt") + file(name="t2", source="f2.txt") + file(name="t3", source="needle.txt") + """ + ), + } + ) + tgt1 = rule_runner.get_target(Address("project", target_name="t1")) + tgt2 = rule_runner.get_target(Address("project", target_name="t2")) + tgt3 = rule_runner.get_target(Address("project", target_name="t3")) + + # Run our rule. + find_needle_request = FindNeedle((tgt1, tgt2, tgt3), needle="needle.txt") + result = rule_runner.request(TargetsWithNeedle, [find_needle_request]) + assert list(result) == [tgt3] +``` + +Given this rule signature for running the linter Bandit: + +```python +@rule +async def bandit_lint( + request: BanditRequest, bandit: Bandit, python_setup: PythonSetup +) -> LintResults: + ... +``` + +We can write a test like this: + +```python +from pants.core.goals.lint import LintResult, LintResults +from pants.testutil.rule_runner import QueryRule, RuleRunner + +@pytest.fixture +def rule_runner() -> RuleRunner: + return RuleRunner( + rules=[ + *bandit_rules(), + QueryRule(LintResults, [BanditRequest]), + ], + target_types=[PythonSourceTarget] + ) + +def test_bandit(rule_runner: RuleRunner) -> None: + # Set up files and targets. + rule_runner.write_files(...) + ... + + # Run Bandit rule. + bandit_request = BanditRequest(...) + lint_results = rule_runner.request(LintResults, [bandit_request]) +``` + +Note that our `@rule` takes 3 parameters, but we only explicitly included `BanditRequest` in the inputs. This is possible because the engine knows how to compute all [Subsystems](./options-and-subsystems.mdx) based on the initial input to the graph. See [Concepts](./concepts.mdx). + +We are happy [to help](/community/members) figure out what rules to register, and what inputs to pass to `rule_runner.request()`. It can also help to [visualize the rule graph](./tips-and-debugging.mdx) when running your code in production. If you're missing an input that you need, the engine will error explaining that there is no way to compute your `OutputType`. + +### Testing `@goal_rule`s + +You can run `@goal_rule`s by using `rule_runner.run_goal_rule()`. The first argument is your `Goal` subclass, such as `Filedeps` or `Lint`. Usually, you will set `args: Iterable[str]` by giving the specs for the targets/files you want to run on, and sometimes passing options for your goal like `--transitive`. If you need to also set global options that do not apply to your specific goal, set `global_args: Iterable[str]`. + +`run_goal_rule()` will return a `GoalRuleResult` object, which has the fields `exit_code: int`, `stdout: str`, and `stderr: str`. + +For example, to test the `filedeps` goal: + +```python +import pytest + +from pants.backend.project_info import filedeps +from pants.backend.project_info.filedeps import Filedeps +from pants.engine.target import Dependencies, SingleSourceField, Target +from pants.testutil.rule_runner import RuleRunner + +# We create a mock `Target` for better isolation of our tests. We could have +# instead used a pre-defined target like `PythonLibrary` or `Files`. +class MockTarget(Target): + alias = "tgt" + core_fields = (SingleSourceField, Dependencies) + + +@pytest.fixture +def rule_runner() -> RuleRunner: + return RuleRunner(rules=filedeps.rules(), target_types=[MockTarget]) + + +def test_one_target_one_source(rule_runner: RuleRunner) -> None: + rule_runner.write_files( + { + "project/example.ext": "", + "project/BUILD": "mock_tgt(source='example.ext')" + } + ) + result = rule_runner.run_goal_rule(Filedeps, args=["project/example.ext"]) + assert result.stdout.splitlines() == ["project/BUILD", "project/example.ext"] +``` + +Unlike when testing normal `@rules`, you do not need to define a `QueryRule` when using `rule_runner.run_goal_rule()`. This is already set up for you. However, you do need to make sure that your `@goal_rule` and all the rules it depends on are registered with the `RuleRunner` instance. + +## Approach 4: `run_pants()` (integration tests for Pants) + +`pants_integration_test.py ` provides functions that allow you to run a full Pants process as it would run on the command line. It's useful for acceptance testing and for testing things that are too difficult to test with Approach #3. + +You will typically use three functions: + +- `setup_tmpdir()`, which is a [context manager](https://book.pythontips.com/en/latest/context_managers.html) that sets up temporary files in the build root to simulate a real project. + - It takes a single parameter `files: Mapping[str, str]`, which is a dictionary of file paths to file content. + - All file paths will be prefixed by the temporary directory. + - File content can include `{tmpdir}`, which will get substituted with the actual temporary directory. + - It yields the temporary directory, relative to the test's current work directory. +- `run_pants()`, which runs Pants using the `list[str]` of arguments you pass, such as `["help"]`. + - It returns a `PantsResult` object, which has the fields `exit_code: int`, `stdout: str`, and `stderr: str`. + - It accepts several other optional arguments, including `config`, `extra_env`, and any keyword argument accepted by `subprocess.Popen()`. +- `PantsResult.assert_success()` or `PantsResult.assert_failure()`, which checks the exit code and prints a nice error message if unexpected. + +For example: + +```python +from pants.testutil.pants_integration_test import run_pants, setup_tmpdir + +def test_build_ignore_dependency() -> None: + sources = { + "dir1/BUILD": "files(sources=[])", + "dir2/BUILD": "files(sources=[], dependencies=['{tmpdir}/dir1'])", + } + with setup_tmpdir(sources) as tmpdir: + ignore_result = run_pants( + [f"--build-ignore={tmpdir}/dir1", "dependencies", f"{tmpdir}/dir2"] + ) + no_ignore_result = run_pants(["dependencies", f"{tmpdir}/dir2"]) + ignore_result.assert_failure() + assert f"{tmpdir}/dir1" not in ignore_result.stderr + no_ignore_result.assert_success() + assert f"{tmpdir}/dir1" in no_ignore_result.stdout + +``` + +`run_pants()` is hermetic by default, meaning that it will not read your `pants.toml`. As a result, you often need to include the option `--backend-packages` in the arguments to `run_pants()`. You can alternatively set the argument `hermetic=False`, although we discourage this. + +For example: + +```python +from pants.testutil.pants_integration_test import run_pants, setup_tmpdir + + +def test_getting_list_of_files_from_a_target() -> None: + sources = { + "dir/BUILD": "files(sources=['subdir/*.txt'])", + "dir/subdir/file1.txt": "", + "dir/subdir/file2.txt": "", + } + with setup_tmpdir(sources) as tmpdir: + result = run_pants( + [ + "--backend-packages=['pants.backend.python']", + "filedeps", + f"{tmpdir}/dir:", + ], + ) + result.assert_success() + assert all( + filepath in result.stdout + for filepath in ( + f"{tmpdir}/dir/subdir/file1.txt", + f"{tmpdir}/dir/subdir/file2.txt", + ) + ) +``` + +To read any files that were created, use `get_buildroot()` as the first part of the path to ensure that the correct directory is read. + +```python +from pathlib import Path + +from pants.base.build_environment import get_buildroot +from pants.testutil.pants_integration_test import run_pants, setup_tmpdir + +def test_junit_report() -> None: + with setup_tmpdir(...) as tmpdir: + run_pants(["--coverage-py-reports=['json']", "test", ...]).assert_success() + coverage_report = Path(get_buildroot(), "dist", "coverage", "python", "report.json") + assert coverage_report.read_text() == "foo" +``` diff --git a/versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/tips-and-debugging.mdx b/versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/tips-and-debugging.mdx new file mode 100644 index 000000000..9644d9859 --- /dev/null +++ b/versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/tips-and-debugging.mdx @@ -0,0 +1,143 @@ +--- + title: Tips and debugging + sidebar_position: 10 +--- + +--- + +:::note Reminder: ask for help +We would love to help you with your plugin. Please reach out through [Slack](/community/members). + +We also appreciate any feedback on the Rules API. If you find certain things confusing or are looking for additional mechanisms, please let us know. +::: + +## Tip: Use `MultiGet` for increased concurrency + +Every time your rule has `await`, Python will yield execution to the engine and not resume until the engine returns the result. So, you can improve concurrency by instead bundling multiple `Get` requests into a single `MultiGet`, which will allow each request to be resolved through a separate thread. + +Okay: + +```python +from pants.core.util_rules.source_files import SourceFilesRequest, SourceFiles +from pants.engine.fs import AddPrefix, Digest +from pants.engine.internals.selectors import Get + +@rule +async def demo(...) -> Foo: + new_digest = await Get(Digest, AddPrefix(original_digest, "new_prefix")) + source_files = await Get(SourceFiles, SourceFilesRequest(sources_fields)) +``` + +Better: + +```python +from pants.core.util_rules.source_files import SourceFilesRequest, SourceFiles +from pants.engine.fs import AddPrefix, Digest +from pants.engine.internals.selectors import Get, MultiGet + +@rule +async def demo(...) -> Foo: + new_digest, source_files = await MultiGet( + Get(Digest, AddPrefix(original_digest, "new_prefix")), + Get(SourceFiles, SourceFilesRequest(sources_fields)), + ) +``` + +## Tip: Add logging + +As explained in [Logging and dynamic output](./logging-and-dynamic-output.mdx), you can add logging to any `@rule` by using Python's `logging` module like you normally would. + +## FYI: Caching semantics + +There are two layers to Pants caching: in-memory memoization and caching written to disk via the [LMDB store](https://en.wikipedia.org/wiki/Lightning_Memory-Mapped_Database). + +Pants will write to the LMDB store—usually at `~/.cache/pants/lmdb_store`—for any `Process` execution and when ["digesting" files](./file-system.mdx), such as downloading a file or reading from the filesystem. The cache is based on inputs; for example, if the input `Process` is identical to a previous run, then the cache will use the corresponding cached `ProcessResult`. Writing to and reading from LMDB store is very fast, and reads are concurrent. The cache will be occasionally garbage collected by Pantsd, and users may also use `--no-local-cache` or manually delete `~/.cache/pants/lmdb_store`. + +Pants will also memoize in-memory the evaluation of all `@rule`s. This means that once a rule runs, if the inputs are identical to a prior run, the cache will be used instead of re-evaluating the rule. If the user uses Pantsd (the Pants daemon), this memoization will persist across distinct Pants runs, until the daemon is shut down or restarted. This memoization happens automatically. + +## Debugging: Look inside the chroot + +When Pants runs most processes, it runs in a `chroot` (temporary directory). Usually, this gets cleaned up after the `Process` finishes. You can instead pass `--keep-sandboxes=always` to keep those directories for all processes, or `--keep-sandboxes=on_failure` to keep those directories for only processes which have failed. + +Pants will log the path to the chroot, e.g.: + +``` +▶ pants --keep-sandboxes=always test src/python/pants/util/strutil_test.py +... +12:29:45.08 [INFO] preserving local process execution dir `"/private/var/folders/sx/pdpbqz4x5cscn9hhfpbsbqvm0000gn/T/process-executionN9Kdk0"` for "Test binary /Users/pantsbuild/.pyenv/shims/python3." +... +``` + +Inside the preserved sandbox there will be a `__run.sh` script which can be used to inspect or re-run the `Process` precisely as Pants did when creating the sandbox. + +## Debugging: Visualize the rule graph + +You can create a visual representation of the rule graph through the option `--engine-visualize-to=$dir_path $goal`. This will create the files `rule_graph.dot`, `rule_graph.$goal.dot`, and `graph.000.dot`, which are [`.dot` files](https://en.wikipedia.org/wiki/DOT_%28graph_description_language%29). `rule_graph.$goal.dot` contains only the rules used during your run, `rule_graph.dot` contains all rules, and `graph.000.dot` contains the actual runtime results of all rules (it can be quite large!). + +To open up the `.dot` file, you can install the [`graphviz`](https://graphviz.org) program, then run `dot -Tpdf -O $destination`. We recommend opening up the PDF in Google Chrome or OSX Preview, which do a good job of zooming in large PDF files. + +## Debugging rule graph issues + +Rule graph issues can be particularly hard to figure out - the error messages are noisy and do not make clear how to fix the issue. We plan to improve this. + +We encourage you to reach out in #plugins on [Slack](/community/getting-help) for help. + +Often the best way to debug a rule graph issue is to isolate where the problem comes from by commenting out code until the graph compiles. The rule graph is formed solely by looking at the types in the signature of your `@rule` and in any `Get` statements - none of the rest of your rules matter. To check if the rule graph can be built, simply run `pants --version`. + +We recommend starting by determining which backend—or combination of backends—is causing issues. You can run the below script to find this. Once you find the smallest offending combination, focus on fixing that first by removing all irrelevant backends from `backend_packages` in `pants.toml`—this reduces the surface area of where issues can come from. (You may need to use the option `--no-verify-config` so that Pants doesn't complain about unrecognized options.) + +```python title="find_bad_backend_combos.py" +#!/usr/bin/env python3 + +import itertools +import logging +import subprocess + +BACKENDS = { + # Replace this with the backend_packages from your pants.toml. + # + # Warning: it's easy to get a combinatorial explosion if you + # use lots of backends. In that case, try using a subset of your + # backends and see if you can still get a rule graph failure. + "pants.backend.python", + "pants.backend.shell", +} + + +def backends_load(backends) -> bool: + logging.info(f"Testing {backends}") + result = subprocess.run( + ["pants", f"--backend-packages={repr(list(backends))}", "--version"], + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + ) + loads = result.returncode == 0 + if not loads: + logging.error(f"Failed! {backends}") + return result.returncode == 0 + + +def main() -> None: + all_combos = itertools.chain.from_iterable( + itertools.combinations(BACKENDS, r=r) for r in range(1, len(BACKENDS) + 1) + ) + bad_combos = {repr(combo) for combo in all_combos if not backends_load(combo)} + print("----\nBad combos:\n" + "\n".join(bad_combos)) + + +if __name__ == "__main__": + logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s") + main() +``` + +Once you've identified the smallest combination of backends that fail, and you have updated `pants.toml`, you can try isolating which rules are problematic by commenting out `Get`s and the parameters to `@rule`s. + +Some common sources of rule graph failures: + +- Dependent rules are not registered. + - This is especially common when you only have one backend activated entirely. We recommend trying to get each backend to be valid regardless of what other backends are activated. Use the above script to see if this is happening. + - To fix this, see which types you're using in your `@rule` signatures and `Get`s. If they come from another backend, activate their rules. For example, if you use `await Get(Pex, PexRequest)`, you should activate `pants.backend.python.util_rules.pex.rules()` in your `register.py`. +- Not "newtyping". + - It's possible and sometimes desirable to use types already defined in your plugin or core Pants. For example, you might want to define a new rule that goes from `MyCustomClass -> Process`. However, sometimes this makes the rule graph more complicated than it needs to be. + - It's often helpful to create a result and request type for each of your `@rule`s, e.g. `MyPlugin` and `MyPluginRequest`. + - See [Valid types](./concepts.mdx#valid-types) for more. diff --git a/versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/union-rules-advanced.mdx b/versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/union-rules-advanced.mdx new file mode 100644 index 000000000..635d62972 --- /dev/null +++ b/versioned_docs/version-2.24/docs/writing-plugins/the-rules-api/union-rules-advanced.mdx @@ -0,0 +1,84 @@ +--- + title: Union rules (advanced) + sidebar_position: 7 +--- + +Polymorphism for the engine. + +--- + +Union rules solve the same problem that polymorphism solves in general: how to write generic code that operates on types not known about at the time of writing. + +For example, Pants has many generic goals like `lint` and `test`. Those `@goal_rule` definitions cannot know about every concrete linter or test implementation ahead-of-time. + +Unions allow a specific linter to be registered with `UnionRule(LintTargetsRequest, ShellcheckRequest)`, and then for `lint.py` to access its type: + +```python title="pants/core/goals/lint.py" +from pants.engine.rules import Get, MultiGet, goal_rule +from pants.engine.target import Targets +from pants.engine.unions import UnionMembership + +.. + +@goal_rule +async def lint(..., targets: Targets, union_membership: UnionMembership) -> Lint: + lint_request_types = union_membership[LintTargetsRequest] + concrete_requests = [ + request_type( + request_type.field_set_type.create(target) + for target in targets + if request_type.field_set_type.is_valid(target) + ) + for request_type in lint_request_types + ] + results = await MultiGet( + Get(LintResults, LintTargetsRequest, concrete_request) + for concrete_request in concrete_requests + ) +``` + +```python title="pants-plugins/bash/shellcheck.py" +from pants.core.goals.lint import LintTargetsRequest + + +class ShellcheckRequest(LintTargetsRequest): + ... + + +... + + +def rules(): + return [*ShellcheckRequest.rules()] +``` + +This example will find all registered linter implementations by looking up `union_membership[LintTargetsRequest]`, which returns a tuple of all `LintTargetsRequest ` types that were registered with a `UnionRule`, such as `ShellcheckRequest` and `Flake8Request`. + +## How to create a new Union + +To set up a new union, create a class for the union "base". Typically, this should be an [abstract class](https://docs.python.org/3/library/abc.html) that is subclassed by the union members, but it does not need to be. Mark the class with `@union`. + +```python +from abc import ABC, abstractmethod + +from pants.engine.unions import union + +@union +class Vehicle(ABC): + @abstractmethod + def num_wheels(self) -> int: + pass +``` + +Then, register every implementation of your union with `UnionRule`: + +```python +class Truck(Vehicle): + def num_wheels(self) -> int: + return 4 + +def rules(): + return [UnionRule(Vehicle, Truck)] +``` + +Now, your rules can request `UnionMembership` as a parameter in the `@rule`, and then look up `union_membership[Vehicle]` to get a tuple of all relevant types that are registered via `UnionRule`. diff --git a/versioned_docs/version-2.24/docs/writing-plugins/the-target-api/_category_.json b/versioned_docs/version-2.24/docs/writing-plugins/the-target-api/_category_.json new file mode 100644 index 000000000..0a605a8a8 --- /dev/null +++ b/versioned_docs/version-2.24/docs/writing-plugins/the-target-api/_category_.json @@ -0,0 +1,4 @@ +{ + "label": "The Target API", + "position": 3 +} diff --git a/versioned_docs/version-2.24/docs/writing-plugins/the-target-api/concepts.mdx b/versioned_docs/version-2.24/docs/writing-plugins/the-target-api/concepts.mdx new file mode 100644 index 000000000..e99a264eb --- /dev/null +++ b/versioned_docs/version-2.24/docs/writing-plugins/the-target-api/concepts.mdx @@ -0,0 +1,236 @@ +--- + title: Concepts + sidebar_position: 0 +--- + +The core concepts of Targets and Fields. + +--- + +The Target API defines how you interact with targets in your plugin. For example, you would use the Target API to read the `source` / `sources` field of a target to know which files to run on. + +The Target API can also be used to add new target types—such as adding support for a new language. Additionally, the Target API can be used to extend existing target types and even declare synthetic targets as if they came from a BUILD file. + +## Targets and Fields - the core building blocks + +### Definition of _target_ + +As described in [Targets and BUILD files](../../using-pants/key-concepts/targets-and-build-files.mdx), a _target_ is an _addressable_ set of metadata describing some of your code. + +For example, this BUILD file defines a `PythonTestTarget` target with `Address("project", target_name="app_test")`. + +```python title="project/BUILD" +python_test( + name="app_test", + source="app_test.py", + timeout=120, +) +``` + +### Definition of _field_ + +A _field_ is a single value of metadata belonging to a target, such as `source` and `timeout` above. (`name` is a special thing used to create the `Address`.) + +Each field has a Python class that defines its BUILD file alias, data type, and optional settings like default values. For example: + +```python title="example_fields.py" +from pants.engine.target import IntField + +class PythonTestTimeoutField(IntField): + alias = "timeout" + default = 60 +``` + +### Target == alias + combination of fields + +Alternatively, you can think of a target as simply an alias and a combination of fields: + +```python title="plugin_target_types.py" +from pants.engine.target import Dependencies, SingleSourceField, Target, Tags + +class CustomTarget(Target): + alias = "custom_target" + core_fields = (SingleSourceField, Dependencies, Tags) +``` + +A target's fields should make sense together. For example, it does not make sense for a `python_source` target to have a `haskell_version` field. + +Any unrecognized fields will cause an exception when used in a BUILD file. + +### Fields may be reused + +Because fields are stand-alone Python classes, the same field definition may be reused across multiple different target types. + +For example, many target types have the `source` field. + +```python title="BUILD" +resource( + name="logo", + source="logo.png", +) + +dockerfile( + name="docker", + source="Dockerfile", +) +``` + +This gives you reuse of code ([DRY](https://en.wikipedia.org/wiki/Don't_repeat_yourself)) and is important for your plugin to work with multiple different target types, as explained below. + +## A Field-Driven API + +Idiomatic Pants plugins do not care about specific target types; they only care that the target type has the right combination of field types that the plugin needs to operate. + +For example, the Python formatter Black does not actually care whether you have a `python_source`, `python_test`, or `custom_target` target; all that it cares about is that your target type has the field `PythonSourceField`. + +Targets are only [used by the Rules API](../the-rules-api/rules-and-the-target-api.mdx) to get access to the underlying fields through the methods `.has_field()` and `.get()`: + +```python +if target.has_field(PythonSourceField): + print("My plugin can work on this target.") + +timeout_field = target.get(PythonTestTimeoutField) +print(timeout_field.value) +``` + +This means that when creating new target types, the fields you choose for your target will determine the functionality it has. + +## Customizing fields through subclassing + +Often, you may like how a field behaves, but want to make some tweaks. For example, you may want to give a default value to the `SingleSourceField` field. + +To modify an existing field, simply subclass it. + +```python +from pants.engine.target import SingleSourceField + +class DockerSourceField(SingleSourceField): + default = "Dockerfile" +``` + +The `Target` methods `.has_field()` and `.get()` understand this subclass relationship, as follows: + +```python +>>> docker_tgt.has_field(DockerSourceField) +True +>>> docker_tgt.has_field(SingleSourceField) +True +>>> python_test_tgt.has_field(DockerSourceField) +False +>>> python_test_tgt.has_field(SingleSourceField) +True +``` + +This subclass mechanism is key to how the Target API behaves: + +- You can use subclasses of fields—along with `Target.has_field()`— to filter out irrelevant targets. For example, the Black formatter doesn't work with any plain `SourcesField` field; it needs `PythonSourceField`. The Python test runner is even more specific: it needs `PythonTestSourceField`. +- You can create custom fields and custom target types that still work with pre-existing functionality. For example, you can subclass `PythonSourceField` to create `DjangoSourceField`, and the Black formatter will still be able to operate on your target. + +## Synthetic Targets API + +Normally targets are declared in BUILD files to provide metadata about the project's sources and artifacts etc. Occassionally there may be instances of project metadata that is not served well by being declared explicitly in a BUILD file, for instance if the metadata itself is inferred from other sources of information. For these cases, there is a Target API for declaring synthetic targets, that is targets that are not declared in a BUILD file on disk but instead come from a Plugin's rule. + +### Example + +To declare synthetic targets from a Plugin, first subclass the `SyntheticTargetsRequest` union type and register it as a union member with `UnionRule(SyntheticTargetsRequest, SubclassedType)`. Secondly there needs to be a rule that takes this union member type as input and returns a `SyntheticAddressMaps`. + +```python +from dataclasses import dataclass +from pants.engine.internals.synthetic_targets import ( + SyntheticAddressMaps, + SyntheticTargetsRequest, +) +from pants.engine.internals.target_adaptor import TargetAdaptor +from pants.engine.unions import UnionRule +from pants.engine.rules import collect_rules, rule + + +@dataclass(frozen=True) +class SyntheticExampleTargetsRequest(SyntheticTargetsRequest): + pass + + +@rule +async def example_synthetic_targets(request: SyntheticExampleTargetsRequest) -> SyntheticAddressMaps: + return SyntheticAddressMaps.for_targets_request( + request, + [ + ( + "BUILD.synthetic-example", + ( + TargetAdaptor("", "", **target_field_values), + ... + ), + ), + ... + ] + ) + + +def rules(): + return ( + *collect_rules(), + UnionRule(SyntheticTargetsRequest, SyntheticExampleTargetsRequest), + ... + ) +``` + +### Register synthetic targets per directory or globally + +Depending on the source information for the synthetic targets, it may make sense to either register them with a request per directory or for all directories at once with a single request. + +If the source information is derived from parsing files from the project source tree, then go with the per directory request style (which also is the default mode of operation), where as if the information is known up-front without consulting the project sources or otherwise does not depend on which directory is being parsed for BUILD files, it may be more performant to return all synthetic targets in a single request. + +The mode of operation is declared per union member (i.e. on the subclass of the `SyntheticTargetsRequest` class) by providing a default value to the `path` field: + +```python +@dataclass(frozen=True) +class SyntheticExamplePerDirectoryTargetsRequest(SyntheticTargetsRequest): + path: str = SyntheticTargetsRequest.REQUEST_TARGETS_PER_DIRECTORY + +@dataclass(frozen=True) +class SyntheticExampleAllTargetsAtOnceRequest(SyntheticTargetsRequest): + path: str = SyntheticTargetsRequest.SINGLE_REQUEST_FOR_ALL_TARGETS +``` + +Any other default value for `path` should be considered invalid and yield undefined behaviour. (that is it may change without notice in future versions of Pants.) + +During rule execution, the `path` field of the `request` instance will hold the value for the path currently being parsed in case of a per directory mode of operation otherwise it will be `SyntheticTargetsRequest.SINGLE_REQUEST_FOR_ALL_TARGETS`. + +## Adding information to pants peek output + +Sometimes you may have metadata for a target that cannot be encompassed by a field, e.g. if it depends on the content of a file or if it requires some rule resolution to be calculated. + +You can attach this data to the output of `pants peek` by subclassing the `HasAdditionalTargetDataFieldSet` union type and register it as a union member with `UnionRule(HasAdditionalTargetDataFieldSet, SubclassedType)`. Then, implement a rule that takes `SubclassedType` as input and returns an `AdditionalTargetData` object. + +```python +from dataclasses import dataclass +from pants.backend.project_info.peek import AdditionalTargetData, HasAdditionalTargetDataFieldSet +from pants.engine.unions import UnionRule +from pants.engine.rules import collect_rules, rule + + +@dataclass(frozen=True) +class MyCustomTargetFieldSet(HasAdditionalTargetDataFieldSet): + ... + + +@rule +async def attach_custom_target_data(field_set: MyCustomTargetFieldSet) -> AdditionalTargetData: + # You can return any json-serializable type for the second field + return AdditionalTargetData("my_custom_target_data", {"hello": "world"}) + + +def rules(): + return (*collect_rules(), UnionRule(HasAdditionalTargetDataFieldSet, MyCustomTargetFieldSet)) +``` + +Then, if you run `pants peek --include-additional-info my/custom:target` you will see an `additional_info` field which will contain the following JSON object: + +```json +{ + "my_custom_target_data": { + "hello": "world" + } +} +``` diff --git a/versioned_docs/version-2.24/docs/writing-plugins/the-target-api/creating-new-fields.mdx b/versioned_docs/version-2.24/docs/writing-plugins/the-target-api/creating-new-fields.mdx new file mode 100644 index 000000000..074284b35 --- /dev/null +++ b/versioned_docs/version-2.24/docs/writing-plugins/the-target-api/creating-new-fields.mdx @@ -0,0 +1,278 @@ +--- + title: Creating new fields + sidebar_position: 1 +--- + +How to create a Field, including the available templates. + +--- + +Before creating a new target type, the first step is to create all of the target type's fields. + +## Defining a Field + +To define a new field: + +1. Subclass one of the below field templates, like `IntField` or `BoolField`; or, subclass an existing field, like `SingleSourceField`. +2. Set the class property `alias`. This is the symbol that people use in BUILD files. +3. Set the class property `help`. This is used by `pants help`. + +For example: + +```python +from pants.engine.target import IntField + +class TimeoutField(IntField): + alias = "timeout" + help = "How long to run until timing out." +``` + +### `default` + +The `default` is used whenever a user does not explicitly specify the field in a BUILD file. + +```python +class TimeoutField(IntField): + alias = "timeout" + help = "..." + default = 60 +``` + +If you don't override this property, `default` will be set to `None`, which signals that the value was undefined. + +### `required` + +Set `required = True` to require explicitly defining the field. + +```python +class TimeoutField(IntField): + alias = "timeout" + help = "..." + required = True +``` + +If you set `required = True`, the `default` will be ignored. + +:::note Reminder: subclass existing fields to modify their behavior +If you want to change how an existing field behaves, you should subclass the original field. For example, if you want to change a default value, subclass the original field. When doing this, you only need to override the properties you want to change. + +See [Concepts](./concepts.mdx) for how subclassing plays a key role in the Target API. +::: + +## Adding custom validation + +The field templates will validate that users are using the correct _types_, like ints or strings. But you may want to add additional validation, such as banning certain values. + +To do this, override the classmethod `compute_value`: + +```python +from pants.engine.target import IntField, InvalidFieldException + +class UploadTimeout(IntField): + alias = "timeout" + help = "..." + default = 30 + + @classmethod + def compute_value( + cls, raw_value: Optional[int], *, address: Address + ) -> int: + value_or_default = super().compute_value(raw_value, address=address) + if value_or_default < 10 or value_or_default > 300: + raise InvalidFieldException( + f"The {repr(cls.alias)} field in target {address} must " + f"be between 10 and 300, but was {value_or_default}." + ) + return value_or_default +``` + +Be careful to use the same type hint for the parameter `raw_value` as used in the template. This is used to generate the documentation in `pants help my_target`. + +:::caution Cannot use new type hint syntax with `compute_value()` and `default` +You cannot use the [new type hint syntax](https://mypy-lang.blogspot.com/2021/01/) with the Target API, i.e. `list[str] | None` instead of `Optional[List[str]]`. The new syntax breaks `pants help`. + +Otherwise, it's safe to use the new syntax when writing plugins. +::: + +## Available templates + +All templates are defined in `pants.engine.target`. + +### `BoolField` + +Use this when the option is a boolean toggle. You must either set `required = True` or set `default` to `False` or `True`. + +### `TriBoolField` + +This is like `BoolField`, but allows you to use `None` to represent a third state. You do not have to set `required = True` or `default`, as the field template defaults to `None` already. + +### `IntField` + +Use this when you expect an integer. This will reject floats. + +### `FloatField` + +Use this when you expect a float. This will reject integers. + +### `StringField` + +Use this when you expect a single string. + +:::note `StringField` can be like an enum +You can set the class property `valid_choices` to limit what strings are acceptable. This class property can either be a tuple of strings or an `enum.Enum`. + +For example: + +```python +class LeafyGreensField(StringField): + alias = "leafy_greens" + valid_choices = ("kale", "spinach", "chard") +``` + +or: + +```python +class LeafyGreens(Enum): + KALE = "kale" + SPINACH = "spinach" + CHARD = "chard" + +class LeafyGreensField(StringField): + alias = "leafy_greens" + valid_choices = LeafyGreens +``` + +::: + +### `StringSequenceField` + +Use this when you expect 0-n strings. + +The user may use a tuple, set, or list in their BUILD file; Pants will convert the value to an immutable tuple. + +### `SequenceField` + +Use this when you expect a homogenous sequence of values other than strings, such as a sequence of integers. + +The user may use a tuple, set, or list in their BUILD file; Pants will convert the value to an immutable tuple. + +You must set the class properties `expected_element_type` and `expected_type_description`. You should also change the type signature of the classmethod `compute_value` so that Pants can show the correct types when running `pants help $target_type`. + +```python +class ExampleIntSequence(SequenceField): + alias = "int_sequence" + expected_element_type = int + expected_type_description = "a sequence of integers" + + @classmethod + def compute_value( + raw_value: Optional[Iterable[int]], *, address: Address + ) -> Optional[Tuple[int, ...]]: + return super().compute_value(raw_value, address=address) +``` + +### `DictStringToStringField` + +Use this when you expect a dictionary of string keys with strings values, such as `{"k": "v"}`. + +The user may use a normal Python dictionary in their BUILD file. Pants will convert this into an instance of `pants.util.frozendict.FrozenDict`, which is a lightweight wrapper around the native `dict` type that simply removes all mechanisms to mutate the dictionary. + +### `DictStringToStringSequenceField` + +Use this when you expect a dictionary of string keys with a sequence of strings values, such as `{"k": ["v1", "v2"]}`. + +The user may use a normal Python dictionary in their BUILD file, and they may use a tuple, set, or list for the dictionary values. Pants will convert this into an instance of `pants.util.frozendict.FrozenDict`, which is a lightweight wrapper around the native `dict` type that simply removes all mechanisms to mutate the dictionary. Pants will also convert the values into immutable tuples, resulting in a type hint of `FrozenDict[str, Tuple[str, ...]]`. + +### `Field` - the fallback class + +If none of these templates work for you, you can subclass `Field`, which is the superclass of all of these templates. + +You must give a type hint for `value`, define the classmethod `compute_value`, and either set `required = True` or define the class property `default`. + +For example, we could define a `StringField` explicitly like this: + +```python +from typing import Optional + +from pants.engine.addresses import Address +from pants.engine.target import Field, InvalidFieldTypeException + + +class VersionField(Field): + alias = "version" + value: Optional[str] + default = None + help = "The version to build with." + + @classmethod + def compute_value( + cls, raw_value: Optional[str], *, address: Address + ) -> Optional[str]: + value_or_default = super().compute_value(raw_value, address=address) + if value_or_default is not None and not isinstance(value, str): + # A helper exception message to generate nice error messages + # automatically. You can use another exception if you prefer. + raise InvalidFieldTypeException( + address, cls.alias, raw_value, expected_type="a string", + ) + return value_or_default +``` + +:::tip Asking for help +Have a tricky field you're trying to write? We would love to help! See [Getting Help](/community/members). +::: + +## Examples + +```python title="plugins/target_types.py" +from typing import Optional + +from pants.engine.target import ( + BoolField, + IntField, + InvalidFieldException, + MultipleSourcesField, + StringField +) + + +class FortranVersion(StringField): + alias = "fortran_version" + required = True + valid_choices = ("f95", "f98") + help = "Which version of Fortran should this use?" + + +class CompressToggle(BoolField): + alias = "compress" + default = False + help = "Whether to compress the generated file." + + +class UploadTimeout(IntField): + alias = "upload_timeout" + default = 100 + help = ( + "How long to upload (in seconds) before timing out.\n\n" + "This must be between 10 and 300 seconds." + ) + + @classmethod + def compute_value( + cls, raw_value: Optional[int], *, address: Address + ) -> int: + value_or_default = super().compute_value(raw_value, address=address) + if value_or_default < 10 or value_or_default > 300: + raise InvalidFieldException( + f"The {repr(cls.alias)} field in target {address} must " + f"be between 10 and 300, but was {value_or_default}." + ) + return value_or_default + + +# Example of subclassing an existing field. +# We don't need to define `alias = sources` because the +# parent class does this already. +class FortranSources(MultipleSourcesField): + default = ("*.f95",) +``` diff --git a/versioned_docs/version-2.24/docs/writing-plugins/the-target-api/creating-new-targets.mdx b/versioned_docs/version-2.24/docs/writing-plugins/the-target-api/creating-new-targets.mdx new file mode 100644 index 000000000..ccfee4b0b --- /dev/null +++ b/versioned_docs/version-2.24/docs/writing-plugins/the-target-api/creating-new-targets.mdx @@ -0,0 +1,103 @@ +--- + title: Creating new targets + sidebar_position: 2 +--- + +How to add a custom target type. + +--- + +## When to create a new target type? + +Adding new target types is most helpful when you are adding support for a new language. + +If you instead want to reduce boilerplate in BUILD files, such as changing default values, use [macros](../macros.mdx) . + +If you are already using a target type, but need to store additional metadata for your plugin, [add a new field to the target type](./extending-existing-targets.mdx). + +## Step 1: Define the target type + +To define a new target: + +1. Subclass `pants.engine.target.Target`. +2. Define the class property `alias`. This is the symbol that people use in BUILD files. +3. Define the class property `core_fields`. +4. Define the class property `help`. This is used by `pants help`. + +For `core_fields`, we recommend including `COMMON_TARGET_FIELDS` to add the useful `tags` and `description` fields. You will also often want to add `Dependencies`, and either `SingleSourceField` or `MultipleSourcesField`. + +```python title="plugins/target_types.py" +from pants.engine.target import ( + COMMON_TARGET_FIELDS, + Dependencies, + SingleSourceField, + StringField, + Target, +) + + +class CustomField(StringField): + alias = "custom_field" + help = "A custom field." + + +class CustomTarget(Target): + alias = "custom_target" + core_fields = (*COMMON_TARGET_FIELDS, Dependencies, SingleSourceField, CustomField) + help = ( + "A custom target to demo the Target API.\n\n" + "This docstring will be used in the output of " + "`pants help $target_type`." + ) +``` + +:::note Tip: subclass `SingleSourceField` or `MultipleSourcesField` +Use `SingleSourceField` for `source: str` and `MultipleSourcesField` for `sources: Iterable[str]`. + +You will often want to subclass either of these fields to give custom functionality: + +- set the `default` +- set `expected_file_extensions`, e.g. to `(".json", ".txt")` +- set `expected_num_files`, e.g. to `1` or `range(0, 5)` (i.e. 0 to 4 files) + +::: + +:::note Using the fields of an existing target type +Sometimes, you may want to create a new target type that behaves similarly to one that already exists, except for some small changes. + +For example, you might like how `pex_binary` behaves in general, but you have a Django application and keep writing `entry_point="manage.py"`. Normally, you should write a [macro](../macros.mdx) to set this default value; but, here, you also want to add new Django-specific fields, so you decide to create a new target type. + +Rather than subclassing the original target type, use this pattern: + +```python +from pants.backend.python.target_types import PexBinaryTarget, PexEntryPointField +from pants.engine.target import Target +from pants.util.ordered_set import FrozenOrderedSet + +class DjangoEntryPointField(PexEntryPointField): + default = "manage.py" + + +class DjangoManagePyTarget(Target): + alias = "django_manage_py" + core_fields = ( + *(FrozenOrderedSet(PexBinaryTarget.core_fields) - {PexEntryPoint}), + DjangoEntryPointField, + ) +``` + +In this example, we register all of the fields of `PexBinaryTarget`, except for the field `PexEntryPoint `. We instead register our custom field `DjangoEntryPointField `. +::: + +## Step 2: Register the target type in `register.py` + +Now, in your [`register.py`](../overview.mdx), add the target type to the `def target_types()` entry point. + +```python title="plugins/register.py" +from plugins.target_types import CustomTarget + +def target_types(): + return [CustomTarget] +``` + +You can confirm this works by running `pants help custom_target`. diff --git a/versioned_docs/version-2.24/docs/writing-plugins/the-target-api/extending-existing-targets.mdx b/versioned_docs/version-2.24/docs/writing-plugins/the-target-api/extending-existing-targets.mdx new file mode 100644 index 000000000..f67041ee8 --- /dev/null +++ b/versioned_docs/version-2.24/docs/writing-plugins/the-target-api/extending-existing-targets.mdx @@ -0,0 +1,39 @@ +--- + title: Extending existing targets + sidebar_position: 3 +--- + +Adding new fields to target types. + +--- + +## When to add new fields? + +Adding new fields is useful when you are already using a target type, but need to store additional metadata for your plugin. + +For example, if you're writing a codegen plugin to convert a `protobuf_source` target into Java source files, you may want to add a `jdk_version` field to `protobuf_source`. + +If you are instead adding support for a new language, [create a new target type](./creating-new-targets.mdx). + +If you want to reduce boilerplate in BUILD files, such as changing default values, use [macros](../macros.mdx). + +## How to add new fields + +First, [define the field](./creating-new-fields.mdx). Then, register it by using `OriginalTarget.register_plugin_field(CustomField)`, like this: + +```python title="plugins/register.py" +from pants.backend.codegen.protobuf.target_types import ProtobufSourceTarget +from pants.engine.target import IntField + + +class ProtobufJdkVersionField(IntField): + alias = "jdk_version" + default = 11 + help = "Which JDK protobuf should target." + + +def rules(): + return [ProtobufSourceTarget.register_plugin_field(ProtobufJdkVersionField)] +``` + +To confirm this worked, run `pants help protobuf_source`. diff --git a/versioned_docs/version-2.24/docs/writing-plugins/the-target-api/index.mdx b/versioned_docs/version-2.24/docs/writing-plugins/the-target-api/index.mdx new file mode 100644 index 000000000..d3228610c --- /dev/null +++ b/versioned_docs/version-2.24/docs/writing-plugins/the-target-api/index.mdx @@ -0,0 +1,13 @@ +--- + title: The Target API + sidebar_position: 2 +--- + +A declarative interface for working with targets and their fields. + +--- + +- [Concepts](./concepts.mdx) +- [Creating new fields](./creating-new-fields.mdx) +- [Creating new targets](./creating-new-targets.mdx) +- [Extending existing targets](./extending-existing-targets.mdx) diff --git a/versioned_docs/version-2.24/reference/build-file-symbols/PANTS_VERSION.mdx b/versioned_docs/version-2.24/reference/build-file-symbols/PANTS_VERSION.mdx new file mode 100644 index 000000000..60574213d --- /dev/null +++ b/versioned_docs/version-2.24/reference/build-file-symbols/PANTS_VERSION.mdx @@ -0,0 +1,10 @@ +--- +title: PANTS_VERSION +description: | +--- + +import BuildFileSymbol from "@site/src/components/reference/BuildFileSymbol"; + + + + diff --git a/versioned_docs/version-2.24/reference/build-file-symbols/__defaults__.mdx b/versioned_docs/version-2.24/reference/build-file-symbols/__defaults__.mdx new file mode 100644 index 000000000..892276e8e --- /dev/null +++ b/versioned_docs/version-2.24/reference/build-file-symbols/__defaults__.mdx @@ -0,0 +1,15 @@ +--- +title: __defaults__ +description: | + Provide default field values. +--- + +import BuildFileSymbol from "@site/src/components/reference/BuildFileSymbol"; + + 'None'`}> + +Provide default field values. + +Learn more https://www.pantsbuild.org/2.24/docs/using-pants/key-concepts/targets-and-build-files#field-default-values + + diff --git a/versioned_docs/version-2.24/reference/build-file-symbols/__dependencies_rules__.mdx b/versioned_docs/version-2.24/reference/build-file-symbols/__dependencies_rules__.mdx new file mode 100644 index 000000000..3d3366087 --- /dev/null +++ b/versioned_docs/version-2.24/reference/build-file-symbols/__dependencies_rules__.mdx @@ -0,0 +1,13 @@ +--- +title: __dependencies_rules__ +description: | + Declare dependencies rules. +--- + +import BuildFileSymbol from "@site/src/components/reference/BuildFileSymbol"; + + 'None'`}> + +Declare dependencies rules. + + diff --git a/versioned_docs/version-2.24/reference/build-file-symbols/__dependents_rules__.mdx b/versioned_docs/version-2.24/reference/build-file-symbols/__dependents_rules__.mdx new file mode 100644 index 000000000..a50a19e57 --- /dev/null +++ b/versioned_docs/version-2.24/reference/build-file-symbols/__dependents_rules__.mdx @@ -0,0 +1,13 @@ +--- +title: __dependents_rules__ +description: | + Declare dependents rules. +--- + +import BuildFileSymbol from "@site/src/components/reference/BuildFileSymbol"; + + 'None'`}> + +Declare dependents rules. + + diff --git a/versioned_docs/version-2.24/reference/build-file-symbols/_category_.json b/versioned_docs/version-2.24/reference/build-file-symbols/_category_.json new file mode 100644 index 000000000..19a1c84c2 --- /dev/null +++ b/versioned_docs/version-2.24/reference/build-file-symbols/_category_.json @@ -0,0 +1,9 @@ +{ + "label": "BUILD file symbols", + "link": { + "type": "generated-index", + "slug": "/reference/build-file-symbols", + "title": "BUILD file symbols" + }, + "position": 5 +} diff --git a/versioned_docs/version-2.24/reference/build-file-symbols/build_file_dir.mdx b/versioned_docs/version-2.24/reference/build-file-symbols/build_file_dir.mdx new file mode 100644 index 000000000..34cd3100d --- /dev/null +++ b/versioned_docs/version-2.24/reference/build-file-symbols/build_file_dir.mdx @@ -0,0 +1,17 @@ +--- +title: build_file_dir +description: | + Returns the path to the directory of the current BUILD file. +--- + +import BuildFileSymbol from "@site/src/components/reference/BuildFileSymbol"; + + 'PurePath'`}> + +Returns the path to the directory of the current BUILD file. + +The returned value is an instance of `PurePath` to make path name manipulations easy. + +See: https://docs.python.org/3/library/pathlib.html#pathlib.PurePath + + diff --git a/versioned_docs/version-2.24/reference/build-file-symbols/duplicate_rule.mdx b/versioned_docs/version-2.24/reference/build-file-symbols/duplicate_rule.mdx new file mode 100644 index 000000000..6b0781673 --- /dev/null +++ b/versioned_docs/version-2.24/reference/build-file-symbols/duplicate_rule.mdx @@ -0,0 +1,13 @@ +--- +title: duplicate_rule +description: | + DeployJarDuplicateRule(pattern: 'str', action: 'str') +--- + +import BuildFileSymbol from "@site/src/components/reference/BuildFileSymbol"; + + None`}> + +DeployJarDuplicateRule(pattern: 'str', action: 'str') + + diff --git a/versioned_docs/version-2.24/reference/build-file-symbols/env.mdx b/versioned_docs/version-2.24/reference/build-file-symbols/env.mdx new file mode 100644 index 000000000..7d514bca9 --- /dev/null +++ b/versioned_docs/version-2.24/reference/build-file-symbols/env.mdx @@ -0,0 +1,13 @@ +--- +title: env +description: | + Reference environment variable. +--- + +import BuildFileSymbol from "@site/src/components/reference/BuildFileSymbol"; + + 'Any'`}> + +Reference environment variable. + + diff --git a/versioned_docs/version-2.24/reference/build-file-symbols/http_source.mdx b/versioned_docs/version-2.24/reference/build-file-symbols/http_source.mdx new file mode 100644 index 000000000..eb507ce46 --- /dev/null +++ b/versioned_docs/version-2.24/reference/build-file-symbols/http_source.mdx @@ -0,0 +1,13 @@ +--- +title: http_source +description: | + http_source(url: 'str', *, len: 'int', sha256: 'str', filename: 'str' = '') +--- + +import BuildFileSymbol from "@site/src/components/reference/BuildFileSymbol"; + + + +http_source(url: 'str', \*, len: 'int', sha256: 'str', filename: 'str' = '') + + diff --git a/versioned_docs/version-2.24/reference/build-file-symbols/jvm_exclude.mdx b/versioned_docs/version-2.24/reference/build-file-symbols/jvm_exclude.mdx new file mode 100644 index 000000000..527ad1e71 --- /dev/null +++ b/versioned_docs/version-2.24/reference/build-file-symbols/jvm_exclude.mdx @@ -0,0 +1,13 @@ +--- +title: jvm_exclude +description: | + JvmArtifactExclusion(group: 'str', artifact: 'str | None' = None) +--- + +import BuildFileSymbol from "@site/src/components/reference/BuildFileSymbol"; + + None`}> + +JvmArtifactExclusion(group: 'str', artifact: 'str | None' = None) + + diff --git a/versioned_docs/version-2.24/reference/build-file-symbols/node_build_script.mdx b/versioned_docs/version-2.24/reference/build-file-symbols/node_build_script.mdx new file mode 100644 index 000000000..d8964ac7a --- /dev/null +++ b/versioned_docs/version-2.24/reference/build-file-symbols/node_build_script.mdx @@ -0,0 +1,15 @@ +--- +title: node_build_script +description: | + A build script, mapped from the `scripts` section of a package.json file. +--- + +import BuildFileSymbol from "@site/src/components/reference/BuildFileSymbol"; + + 'NodeBuildScript'`}> + +A build script, mapped from the `scripts` section of a package.json file. + +Either the `output_directories` or the `output_files` argument has to be set to capture the output artifacts of the build. + + diff --git a/versioned_docs/version-2.24/reference/build-file-symbols/node_test_script.mdx b/versioned_docs/version-2.24/reference/build-file-symbols/node_test_script.mdx new file mode 100644 index 000000000..fa9893731 --- /dev/null +++ b/versioned_docs/version-2.24/reference/build-file-symbols/node_test_script.mdx @@ -0,0 +1,15 @@ +--- +title: node_test_script +description: | + The test script for this package, mapped from the `scripts` section of a package.json file. The pointed to script should accept a variadic number of ([ARG]...) path arguments. +--- + +import BuildFileSymbol from "@site/src/components/reference/BuildFileSymbol"; + + 'NodeTestScript'`}> + +The test script for this package, mapped from the `scripts` section of a package.json file. The pointed to script should accept a variadic number of ([ARG]...) path arguments. + +This entry point is the "test" script, by default. + + diff --git a/versioned_docs/version-2.24/reference/build-file-symbols/parametrize.mdx b/versioned_docs/version-2.24/reference/build-file-symbols/parametrize.mdx new file mode 100644 index 000000000..7316c3a27 --- /dev/null +++ b/versioned_docs/version-2.24/reference/build-file-symbols/parametrize.mdx @@ -0,0 +1,15 @@ +--- +title: parametrize +description: | + A builtin function/dataclass that can be used to parametrize Targets. +--- + +import BuildFileSymbol from "@site/src/components/reference/BuildFileSymbol"; + + 'None'`}> + +A builtin function/dataclass that can be used to parametrize Targets. + +Parametrization is applied between TargetAdaptor construction and Target instantiation, which means that individual Field instances need not be aware of it. + + diff --git a/versioned_docs/version-2.24/reference/build-file-symbols/per_platform.mdx b/versioned_docs/version-2.24/reference/build-file-symbols/per_platform.mdx new file mode 100644 index 000000000..916bb9cfe --- /dev/null +++ b/versioned_docs/version-2.24/reference/build-file-symbols/per_platform.mdx @@ -0,0 +1,45 @@ +--- +title: per_platform +description: | + An object containing differing homogeneous platform-dependent values. +--- + +import BuildFileSymbol from "@site/src/components/reference/BuildFileSymbol"; + + None`}> + +An object containing differing homogeneous platform-dependent values. + +The values should be evaluated for the execution environment, and not the host environment (I.e. it should be evaluated in a `rule` which requests `Platform`). + +Expected usage is roughly: + +```python class MyFieldType(...): + value = str | per_platform[str] + + @classmethod + def compute_value( # type: ignore[override] + cls, + raw_value: Optional[Union[str, per_platform[str]]], + address: Address, + ) -> Optional[Union[str, per_platform[str]]]: + if isinstance(raw_value, per_platform): + # NOTE: Ensure the values are homogeneous + raw_value.check_types(str) + + return raw_value + +... + +@rule async def my_rule(..., platform: Platform) -> ...: + field_value = target[MyFieldType].value + + if isinstance(field_value, per_platform): + field_value = field_value.get_value_for_platform(platform) + + ... +``` + +NOTE: Support for this object should be heavily weighed, as it would be inappropriate to use in certain contexts (such as the `source` field in a `foo_source` target, where the intent is to support differing source files based on platform. The result would be that dependency inference (and therefore the dependencies field) wouldn't be knowable on the host, which is not something the engine can support yet). + + diff --git a/versioned_docs/version-2.24/reference/build-file-symbols/python_artifact.mdx b/versioned_docs/version-2.24/reference/build-file-symbols/python_artifact.mdx new file mode 100644 index 000000000..b99bfee25 --- /dev/null +++ b/versioned_docs/version-2.24/reference/build-file-symbols/python_artifact.mdx @@ -0,0 +1,13 @@ +--- +title: python_artifact +description: | + Represents a Python setup.py-based project. +--- + +import BuildFileSymbol from "@site/src/components/reference/BuildFileSymbol"; + + None`}> + +Represents a Python setup.py-based project. + + diff --git a/versioned_docs/version-2.24/reference/build-file-symbols/scala_exclude.mdx b/versioned_docs/version-2.24/reference/build-file-symbols/scala_exclude.mdx new file mode 100644 index 000000000..94890d053 --- /dev/null +++ b/versioned_docs/version-2.24/reference/build-file-symbols/scala_exclude.mdx @@ -0,0 +1,13 @@ +--- +title: scala_exclude +description: | + ScalaArtifactExclusion(group: 'str', artifact: 'str | None' = None, crossversion: 'str' = 'binary') +--- + +import BuildFileSymbol from "@site/src/components/reference/BuildFileSymbol"; + + None`}> + +ScalaArtifactExclusion(group: 'str', artifact: 'str | None' = None, crossversion: 'str' = 'binary') + + diff --git a/versioned_docs/version-2.24/reference/build-file-symbols/setup_py.mdx b/versioned_docs/version-2.24/reference/build-file-symbols/setup_py.mdx new file mode 100644 index 000000000..6b4382b30 --- /dev/null +++ b/versioned_docs/version-2.24/reference/build-file-symbols/setup_py.mdx @@ -0,0 +1,13 @@ +--- +title: setup_py +description: | + Represents a Python setup.py-based project. +--- + +import BuildFileSymbol from "@site/src/components/reference/BuildFileSymbol"; + + None`}> + +Represents a Python setup.py-based project. + + diff --git a/versioned_docs/version-2.24/reference/build-file-symbols/shading_keep.mdx b/versioned_docs/version-2.24/reference/build-file-symbols/shading_keep.mdx new file mode 100644 index 000000000..64ae53ec6 --- /dev/null +++ b/versioned_docs/version-2.24/reference/build-file-symbols/shading_keep.mdx @@ -0,0 +1,13 @@ +--- +title: shading_keep +description: | + JvmShadingKeepRule(pattern: 'str') +--- + +import BuildFileSymbol from "@site/src/components/reference/BuildFileSymbol"; + + None`}> + +JvmShadingKeepRule(pattern: 'str') + + diff --git a/versioned_docs/version-2.24/reference/build-file-symbols/shading_relocate.mdx b/versioned_docs/version-2.24/reference/build-file-symbols/shading_relocate.mdx new file mode 100644 index 000000000..104c8b7dd --- /dev/null +++ b/versioned_docs/version-2.24/reference/build-file-symbols/shading_relocate.mdx @@ -0,0 +1,13 @@ +--- +title: shading_relocate +description: | + JvmShadingRelocateRule(package: 'str', into: 'str | None' = None) +--- + +import BuildFileSymbol from "@site/src/components/reference/BuildFileSymbol"; + + None`}> + +JvmShadingRelocateRule(package: 'str', into: 'str | None' = None) + + diff --git a/versioned_docs/version-2.24/reference/build-file-symbols/shading_rename.mdx b/versioned_docs/version-2.24/reference/build-file-symbols/shading_rename.mdx new file mode 100644 index 000000000..ca1128dc1 --- /dev/null +++ b/versioned_docs/version-2.24/reference/build-file-symbols/shading_rename.mdx @@ -0,0 +1,13 @@ +--- +title: shading_rename +description: | + JvmShadingRenameRule(pattern: 'str', replacement: 'str') +--- + +import BuildFileSymbol from "@site/src/components/reference/BuildFileSymbol"; + + None`}> + +JvmShadingRenameRule(pattern: 'str', replacement: 'str') + + diff --git a/versioned_docs/version-2.24/reference/build-file-symbols/shading_zap.mdx b/versioned_docs/version-2.24/reference/build-file-symbols/shading_zap.mdx new file mode 100644 index 000000000..553179c3b --- /dev/null +++ b/versioned_docs/version-2.24/reference/build-file-symbols/shading_zap.mdx @@ -0,0 +1,13 @@ +--- +title: shading_zap +description: | + JvmShadingZapRule(pattern: 'str') +--- + +import BuildFileSymbol from "@site/src/components/reference/BuildFileSymbol"; + + None`}> + +JvmShadingZapRule(pattern: 'str') + + diff --git a/versioned_docs/version-2.24/reference/build-file-symbols/stevedore_namespace.mdx b/versioned_docs/version-2.24/reference/build-file-symbols/stevedore_namespace.mdx new file mode 100644 index 000000000..8a6609f9d --- /dev/null +++ b/versioned_docs/version-2.24/reference/build-file-symbols/stevedore_namespace.mdx @@ -0,0 +1,24 @@ +--- +title: stevedore_namespace +description: | + Tag a namespace in entry_points as a stevedore namespace. +--- + +import BuildFileSymbol from "@site/src/components/reference/BuildFileSymbol"; + + + +Tag a namespace in entry_points as a stevedore namespace. + +This is required for the entry_point to be visible to dep inference based on the `stevedore_namespaces` field. + +For example: ```python python_distribution( +... +entry_points={ +stevedore_namespace("a.b.c"): { +"plugin_name": "some.entry:point", +}, +}, +) ``` + + diff --git a/versioned_docs/version-2.24/reference/global-options.mdx b/versioned_docs/version-2.24/reference/global-options.mdx new file mode 100644 index 000000000..42f956ddb --- /dev/null +++ b/versioned_docs/version-2.24/reference/global-options.mdx @@ -0,0 +1,1704 @@ +--- +title: Global options +description: | + Options to control the overall behavior of Pants. +sidebar_position: 1 +--- + +import Option from "@site/src/components/reference/Option"; +import styles from "@site/src/components/reference/styles.module.css"; + +--- + +Options to control the overall behavior of Pants. + +Backend: `pants.backend.python.lint.isort` + +Config section: `[GLOBAL]` + + + +## Basic options + +### `colors` + + + +### `concurrent` + + + +### `dynamic_ui` + + + +### `dynamic_ui_renderer` + + + +### `keep_sandboxes` + + + +### `level` + + + +### `local_cache` + + + +### `loop` + + + +### `native_options_validation` + + + +### `pantsd` + + + +### `remote_cache_read` + + + +### `remote_cache_write` + + + +### `remote_execution` + + + +### `remote_provider` + + + +### `session_end_tasks_timeout` + + + +### `spec_files` + + + +### `tag` + + + +## Advanced options + +### `allow_deprecated_macos_versions` + + + +### `backend_packages` + + + +### `build_file_prelude_globs` + + + +### `build_ignore` + + + +### `build_patterns` + + + +### `ca_certs_path` + + + +### `cache_content_behavior` + + + +### `docker_execution` + + + +### `enable_target_origin_sources_blocks` + + + +### `engine_visualize_to` + + + +### `file_downloads_max_attempts` + + + +### `file_downloads_retry_delay` + + + +### `ignore_warnings` + + + +### `local_execution_root_dir` + + + +### `local_store_dir` + + + +### `local_store_directories_max_size_bytes` + + + +### `local_store_files_max_size_bytes` + + + +### `local_store_processes_max_size_bytes` + + + +### `local_store_shard_count` + + + +### `log_levels_by_target` + + + +### `log_show_rust_3rdparty` + + + +### `logdir` + +