From 2f2551bfdbd530afa447d7d13d706fc2c208747d Mon Sep 17 00:00:00 2001 From: Kevin Brolly Date: Thu, 17 Oct 2019 13:55:52 +0100 Subject: [PATCH 01/25] Cache the STACK in use and use it to bust cache if it changes --- bin/compile | 3 +++ 1 file changed, 3 insertions(+) diff --git a/bin/compile b/bin/compile index d36967c..6bf9356 100755 --- a/bin/compile +++ b/bin/compile @@ -128,3 +128,6 @@ export | grep -E -e ' (PATH|LD_LIBRARY_PATH|LIBRARY_PATH|INCLUDE_PATH|CPATH|CPPP topic "Rewrite package-config files" find $BUILD_DIR/.apt -type f -ipath '*/pkgconfig/*.pc' | xargs --no-run-if-empty -n 1 sed -i -e 's!^prefix=\(.*\)$!prefix='"$BUILD_DIR"'/.apt\1!g' + +# Store which STACK we are running on in the cache to bust the cache if it changes +echo "$STACK" > "$CACHE_DIR/.apt/STACK" From f094cf022e6e2e4714fcccc644e32f85e329c994 Mon Sep 17 00:00:00 2001 From: Kevin Brolly Date: Thu, 17 Oct 2019 13:58:24 +0100 Subject: [PATCH 02/25] Refactored tests and added fixtures to make future tests easier --- test/compile_test.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/compile_test.sh b/test/compile_test.sh index f5c4a40..bf75aa8 100644 --- a/test/compile_test.sh +++ b/test/compile_test.sh @@ -21,7 +21,7 @@ testStackChange() { #Set the cached STACK value to a non-existent stack, so it is guaranteed to change. mkdir -p "$CACHE_DIR/.apt/" echo "cedar-10" > "$CACHE_DIR/.apt/STACK" - + #Load the Aptfile into the cache, to exclusively test the stack changes mkdir -p "$CACHE_DIR/apt/cache" cp $BUILD_DIR/Aptfile "$CACHE_DIR/apt/cache" @@ -56,4 +56,4 @@ testStackCached() { loadFixture() { cp -a $BUILDPACK_HOME/test/fixtures/$1/. ${BUILD_DIR} -} \ No newline at end of file +} From 25af516bd3cad643b444a6d0e377ec11b1e6ef56 Mon Sep 17 00:00:00 2001 From: Kevin Brolly Date: Thu, 17 Oct 2019 14:22:55 +0100 Subject: [PATCH 03/25] Fixed issue with stack not getting cached --- bin/compile | 3 +++ 1 file changed, 3 insertions(+) diff --git a/bin/compile b/bin/compile index 6bf9356..caaa997 100755 --- a/bin/compile +++ b/bin/compile @@ -36,6 +36,9 @@ if [ -f $CACHE_DIR/.apt/STACK ]; then CACHED_STACK=$(cat "$CACHE_DIR/.apt/STACK") else CACHED_STACK=$STACK + # Store the STACK in the cache for next time. + mkdir -p "$CACHE_DIR/.apt" + echo "$STACK" > "$CACHE_DIR/.apt/STACK" fi # Ensure we store the STACK in the cache for next time. From 57994aaf5a0235fb4add4970ac07c3ebdce62034 Mon Sep 17 00:00:00 2001 From: Kevin Brolly Date: Thu, 17 Oct 2019 15:53:02 +0100 Subject: [PATCH 04/25] Fixed issue where STACK was not always cached --- bin/compile | 3 --- test/compile_test.sh | 4 ++++ 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/bin/compile b/bin/compile index caaa997..6bf9356 100755 --- a/bin/compile +++ b/bin/compile @@ -36,9 +36,6 @@ if [ -f $CACHE_DIR/.apt/STACK ]; then CACHED_STACK=$(cat "$CACHE_DIR/.apt/STACK") else CACHED_STACK=$STACK - # Store the STACK in the cache for next time. - mkdir -p "$CACHE_DIR/.apt" - echo "$STACK" > "$CACHE_DIR/.apt/STACK" fi # Ensure we store the STACK in the cache for next time. diff --git a/test/compile_test.sh b/test/compile_test.sh index bf75aa8..71e841b 100644 --- a/test/compile_test.sh +++ b/test/compile_test.sh @@ -46,12 +46,16 @@ testStackNoChange() { } testStackCached() { + # Test that we are correctly storing the value of STACK in the cache loadFixture "Aptfile" compile assertCapturedSuccess assertTrue 'STACK not cached' "[ -e $CACHE_DIR/.apt/STACK ]" + + CACHED_STACK=$(cat "$CACHE_DIR/.apt/STACK") + assertTrue 'STACK not cached' "[[ $CACHED_STACK == $STACK ]]" } loadFixture() { From c783231388c60fec703c88fb17656a314739162e Mon Sep 17 00:00:00 2001 From: Kevin Brolly Date: Thu, 17 Oct 2019 16:13:56 +0100 Subject: [PATCH 05/25] Fixed test to ensure STACK is cached --- test/compile_test.sh | 1 - 1 file changed, 1 deletion(-) diff --git a/test/compile_test.sh b/test/compile_test.sh index 71e841b..82d2f15 100644 --- a/test/compile_test.sh +++ b/test/compile_test.sh @@ -46,7 +46,6 @@ testStackNoChange() { } testStackCached() { - # Test that we are correctly storing the value of STACK in the cache loadFixture "Aptfile" compile From a11d9104eba9fbed7edf2eafb42c9dc674e6ea4e Mon Sep 17 00:00:00 2001 From: Ed Morley <501702+edmorley@users.noreply.github.com> Date: Fri, 15 Jan 2021 18:33:46 +0000 Subject: [PATCH 06/25] Add a changelog (#78) Since for the check changelog check (added in #56) to pass, we actually need a changelog to which we can add entries. I've backfilled recent changes based on the Git history. The buildpack doesn't tag versions, so I've used dates only. Closes W-8726144. --- CHANGELOG.md | 36 ++++++++++++++++++++++-------------- 1 file changed, 22 insertions(+), 14 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 295492e..5daaff2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,20 +1,28 @@ -# Changelog +# APT Buildpack Changelog -## To Be Released +## Unreleased -* Add support for comments in `Aptfile` ([#24](https://github.com/heroku/heroku-buildpack-apt/pull/24)). -* Prevent APT using source lists from `/etc/apt/sources.list.d/` ([#46](https://github.com/heroku/heroku-buildpack-apt/pull/46)). -* Stop using `force-yes` with newer version of apt-get ([#51](https://github.com/heroku/heroku-buildpack-apt/pull/51)). -* Flush the cache on stack change ([#58](https://github.com/heroku/heroku-buildpack-apt/pull/58)). -* Fail the build if `apt-get` or `curl` errors ([#79](https://github.com/heroku/heroku-buildpack-apt/pull/79)). -* Only try to add custom repositories when some are defined in `Aptfile` ([#79](https://github.com/heroku/heroku-buildpack-apt/pull/79)). -* Output a helpful error message when no `Aptfile` is found ([#87](https://github.com/heroku/heroku-buildpack-apt/pull/87)). -## Version 1.1 +## 2019-10-17 -* Add `$HOME/.apt/usr/sbin` into application PATH (`profile.d` script) -* Add `APT_FILE_MANIFEST` environment variable to use another file than `Aptfile` from build directory root +- Flush the cache on stack change ([#58](https://github.com/heroku/heroku-buildpack-apt/pull/58)). -## Version 1.0 +## 2019-09-06 -Initial Version +- Stop using `force-yes` with newer version of apt-get ([#51](https://github.com/heroku/heroku-buildpack-apt/pull/51)). + +## 2019-06-11 + +- Prevent APT using source lists from `/etc/apt/sources.list.d/` ([#46](https://github.com/heroku/heroku-buildpack-apt/pull/46)). + +## 2019-06-10 + +- Add support for comments in `Aptfile` ([#24](https://github.com/heroku/heroku-buildpack-apt/pull/24)). + +## 2017-09-13 + +- Add support for custom repositories ([#18](https://github.com/heroku/heroku-buildpack-apt/pull/18)). + +## 2016 and earlier + +See the [Git log](https://github.com/heroku/heroku-buildpack-apt/commits/40883f0cb8e8ddb2876ca8be5d25ade4ff9617b1). From 4ac5ebb9c373f6079524e88a1ed2485fb2509de2 Mon Sep 17 00:00:00 2001 From: Ed Morley <501702+edmorley@users.noreply.github.com> Date: Fri, 15 Jan 2021 18:50:39 +0000 Subject: [PATCH 07/25] Fail the build if apt-get or curl errors (#79) Enables the bash `pipefail` mode, which ensures that a failure in a command prior to a pipe correctly causes the script to exit 1. Without this, failures during the `apt-get` and `curl` invocations were ignored and the compile marked as a success. At best this leads to confusing errors in later buildpacks (if build time dependencies are missing), and at worst this could cause runtime failures for packages not used during the build, but required by the app at runtime. Enabling `pipefail` mode required a change to the custom repositories feature, to prevent the build exiting 1 when `grep -s -e "^:repo:"` found no matches (ie when no custom repositories are specified). In addition, the `--show-error` and `--fail` flags have been added to the `curl` call, otherwise non-HTTP 200 exit codes are ignored and the compile similarly marked as successful when it should not have been. Fixes #47. Fixes W-8722791. --- CHANGELOG.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5daaff2..326c20d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,7 +1,9 @@ # APT Buildpack Changelog -## Unreleased +## 2021-01-15 +- Fail the build if `apt-get` or `curl` errors ([#79](https://github.com/heroku/heroku-buildpack-apt/pull/79)). +- Only try to add custom repositories when some are defined in `Aptfile` ([#79](https://github.com/heroku/heroku-buildpack-apt/pull/79)). ## 2019-10-17 From a65c917f2f6f88d89cc44085b562b287c9ef4dbb Mon Sep 17 00:00:00 2001 From: Ed Morley <501702+edmorley@users.noreply.github.com> Date: Thu, 21 Jan 2021 16:04:45 +0000 Subject: [PATCH 08/25] Docs: Make the example more obviously an example (#80) In a recent support ticket, a user had copied the example `:repo:` line into their `Aptfile`, which previously referred to the now obsolete "artful" Ubuntu LTS release. This caused the build to fail after #79, when previously the error was ignored. This PR makes the example more clearly just an example, and emphasises that using a custom repository is only necessary if using packages from outside the standard Ubuntu repository. In addition, the bogus `pg` Gem example was removed, since `libpq-dev` is part of the stack image, so we should not be encouraging people to install it unnecessarily. --- README.md | 25 ++++++------------------- 1 file changed, 6 insertions(+), 19 deletions(-) diff --git a/README.md b/README.md index c33b919..07f3e2e 100644 --- a/README.md +++ b/README.md @@ -13,6 +13,7 @@ Include a list of apt package names to be installed in a file named `Aptfile`. To find out what packages are available, see: +<<<<<<< HEAD See the [Scalingo stacks](https://doc.scalingo.com/platform/internals/stacks/stacks) documentation for which Ubuntu LTS version is used by each stack. #### Setup @@ -37,27 +38,13 @@ Note that the order of the buildpacks in the `.buildpacks` file matters. #### Aptfile # you can list packages - libpq-dev - + libexample-dev + # or include links to specific .deb files - http://downloads.sourceforge.net/project/wkhtmltopdf/0.12.1/wkhtmltox-0.12.1_linux-precise-amd64.deb - + https://downloads.example.com/example.deb + # or add custom apt repos (only required if using packages outside of the standard Ubuntu APT repositories) - :repo:deb http://cz.archive.ubuntu.com/ubuntu artful main universe - -#### Gemfile - - source "https://rubygems.org" - gem "pg" - -### Check out the PG library version - - $ scalingo run bash -a apt-pg-test - ~ $ irb - irb(main):001:0> require "pg" - => true - irb(main):002:0> PG::version_string - => "PG 0.15.1" + :repo:deb https://apt.example.com/ example-distro main ## License From 68fb5a677ae4f958cde8c8be2a492d9b65ab0e9c Mon Sep 17 00:00:00 2001 From: Ed Morley <501702+edmorley@users.noreply.github.com> Date: Wed, 10 Mar 2021 10:38:56 +0000 Subject: [PATCH 09/25] Output a helpful error message when no Aptfile is found (#87) The error message is output to `stderr` otherwise it won't be shown. Closes GUS-W-8799411. Refs #86. --- CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 326c20d..c70da68 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,9 @@ # APT Buildpack Changelog +## Unreleased + +- Output a helpful error message when no `Aptfile` is found ([#87](https://github.com/heroku/heroku-buildpack-apt/pull/87)). + ## 2021-01-15 - Fail the build if `apt-get` or `curl` errors ([#79](https://github.com/heroku/heroku-buildpack-apt/pull/79)). From ceffc13b0cd8aa5925ba026c7b64e7d7e1c4d7be Mon Sep 17 00:00:00 2001 From: Ed Morley <501702+edmorley@users.noreply.github.com> Date: Wed, 10 Mar 2021 11:05:03 +0000 Subject: [PATCH 10/25] Update changelog for 2021-03-10 release (#88) To pick up #87. Refs GUS-W-8799411. --- CHANGELOG.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index c70da68..b24fd59 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,9 @@ ## Unreleased + +## 2021-03-10 + - Output a helpful error message when no `Aptfile` is found ([#87](https://github.com/heroku/heroku-buildpack-apt/pull/87)). ## 2021-01-15 From f7b0b072cc57c3c9105209721f5755d521f22e32 Mon Sep 17 00:00:00 2001 From: Ed Morley <501702+edmorley@users.noreply.github.com> Date: Tue, 16 Jan 2024 12:55:40 +0000 Subject: [PATCH 11/25] Modernise the check changelog check (#109) Switches to the newer label based approach used in eg: https://github.com/heroku/buildpacks-python/blob/main/.github/workflows/check_changelog.yml --- .github/workflows/check_changelog.yml | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/.github/workflows/check_changelog.yml b/.github/workflows/check_changelog.yml index 282bf46..c0cc43b 100644 --- a/.github/workflows/check_changelog.yml +++ b/.github/workflows/check_changelog.yml @@ -2,16 +2,19 @@ name: Check Changelog on: pull_request: - types: [opened, reopened, edited, synchronize] + types: [opened, reopened, labeled, unlabeled, synchronize] + +permissions: + contents: read jobs: check-changelog: runs-on: ubuntu-latest - if: | - !contains(github.event.pull_request.body, '[skip changelog]') && - !contains(github.event.pull_request.body, '[changelog skip]') && - !contains(github.event.pull_request.body, '[skip ci]') + if: (!contains(github.event.pull_request.labels.*.name, 'skip changelog')) steps: - - uses: actions/checkout@v1 + - name: Checkout + uses: actions/checkout@v4 - name: Check that CHANGELOG is touched - run: git diff remotes/origin/${{ github.base_ref }} --name-only | grep CHANGELOG.md + run: | + git fetch origin ${{ github.base_ref }} --depth 1 && \ + git diff remotes/origin/${{ github.base_ref }} --name-only | grep CHANGELOG.md From 6d1fa4a6030972f1738cb1c1882103fbbcafca6e Mon Sep 17 00:00:00 2001 From: Ed Morley <501702+edmorley@users.noreply.github.com> Date: Tue, 16 Jan 2024 12:58:20 +0000 Subject: [PATCH 12/25] Add CODEOWNERS file (#108) https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners GUS-W-14825333. --- .github/CODEOWNERS | 1 + 1 file changed, 1 insertion(+) create mode 100644 .github/CODEOWNERS diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 0000000..25f6404 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1 @@ +* @heroku/languages From 31acd201a6fed3f121e3465813a32661d757a056 Mon Sep 17 00:00:00 2001 From: Colin Casey Date: Fri, 1 Mar 2024 10:18:30 -0400 Subject: [PATCH 13/25] Add `bin/report` script to capture buildpack metadata (#110) * Add `bin/report` script to capture buildpack metadata [W-15039947](https://gus.lightning.force.com/lightning/r/a07EE00001k7v4BYAQ/view) --- CHANGELOG.md | 1 + bin/report | 38 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 39 insertions(+) create mode 100755 bin/report diff --git a/CHANGELOG.md b/CHANGELOG.md index b24fd59..8fa85ac 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,7 @@ ## Unreleased +- Add `bin/report` script to capture buildpack metadata ## 2021-03-10 diff --git a/bin/report b/bin/report new file mode 100755 index 0000000..7c7b723 --- /dev/null +++ b/bin/report @@ -0,0 +1,38 @@ +#!/usr/bin/env bash +# bin/report + +### Configure environment + +set -o errexit # always exit on error +set -o pipefail # don't ignore exit codes when piping output + +BUILD_DIR=${1:-} + +packages=() +custom_packages=() +custom_repositories=() + +while IFS= read -r line; do + if grep --silent -e "^:repo:" <<< "${line}"; then + custom_repositories+=("${line//:repo:/}") + elif [[ $line == *deb ]]; then + custom_packages+=("${line}") + else + packages+=("${line}") + fi +done < <(grep --invert-match -e "^#" -e "^\s*$" "${BUILD_DIR}/Aptfile") + +output_key_value() { + local key value + key="$1" + shift + # sort & join the array values with a ',' then escape both '\' and '"' characters + value=$(printf '%s\n' "$@" | sort | tr '\n' ',' | sed 's/,$/\n/' | sed 's/\\/\\\\/g' | sed 's/"/\\"/g') + if [[ -n "${value}" ]]; then + echo "$key: \"$value\"" + fi +} + +output_key_value "packages" "${packages[@]}" +output_key_value "custom_packages" "${custom_packages[@]}" +output_key_value "custom_repositories" "${custom_repositories[@]}" From 5349c463be3316c22442720ee83acf529d6f6025 Mon Sep 17 00:00:00 2001 From: Colin Casey Date: Fri, 1 Mar 2024 11:34:38 -0400 Subject: [PATCH 14/25] Prepare release v7 (#111) Includes: - #110 --- CHANGELOG.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8fa85ac..c4e85ff 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,7 +2,9 @@ ## Unreleased -- Add `bin/report` script to capture buildpack metadata +## 2024-03-01 + +- Add `bin/report` script to capture buildpack metadata ([#110](https://github.com/heroku/heroku-buildpack-apt/pull/110)) ## 2021-03-10 From 64ca4dffbec2db922daf29d2ffd6398a3a3ca0b7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?David=20Z=C3=BClke?= Date: Thu, 14 Mar 2024 18:07:03 +0100 Subject: [PATCH 15/25] Shell hardening (#115) * Fix variable quoting throughout Guards against glob expansion from user input, and works with spaces etc in any path names. Double square brackets do not technically need quoting, but doing it for consistency, as the lines have been touched anyway. Variable assignments, on the other hand, do not require quoting, but I did not remove existing quotes to aid future 'git blame's. * Fix .deb globbing Iterating over 'ls' output breaks with spaces in path names * Use nul byte separators with find/xargs Guards against spaces etc in path/file names --- bin/compile | 43 ++++++++++++++++++++----------------------- 1 file changed, 20 insertions(+), 23 deletions(-) diff --git a/bin/compile b/bin/compile index 6bf9356..1691eea 100755 --- a/bin/compile +++ b/bin/compile @@ -12,7 +12,7 @@ fi # parse and derive params BUILD_DIR=$1 CACHE_DIR=$2 -LP_DIR=$(cd $(dirname $0); cd ..; pwd) +LP_DIR=$(cd "$(dirname "$0")"; cd ..; pwd) function error() { echo " ! $*" >&2 @@ -32,7 +32,7 @@ function indent() { } # Store which STACK we are running on in the cache to bust the cache if it changes -if [ -f $CACHE_DIR/.apt/STACK ]; then +if [[ -f "$CACHE_DIR/.apt/STACK" ]]; then CACHED_STACK=$(cat "$CACHE_DIR/.apt/STACK") else CACHED_STACK=$STACK @@ -52,8 +52,8 @@ APT_FILE_MANIFEST="${APT_FILE_MANIFEST:-Aptfile}" APT_VERSION=$(apt-get -v | awk 'NR == 1{ print $2 }') case "$APT_VERSION" in - 0* | 1.0*) APT_FORCE_YES="--force-yes";; - *) APT_FORCE_YES="--allow-downgrades --allow-remove-essential --allow-change-held-packages";; + 0* | 1.0*) APT_FORCE_YES=("--force-yes");; + *) APT_FORCE_YES=("--allow-downgrades" "--allow-remove-essential" "--allow-change-held-packages");; esac if [ -f $APT_CACHE_DIR/$APT_FILE_MANIFEST ] && cmp -s $BUILD_DIR/$APT_FILE_MANIFEST $APT_CACHE_DIR/$APT_FILE_MANIFEST && [[ $CACHED_STACK == $STACK ]] ; then @@ -76,39 +76,39 @@ else fi fi -APT_OPTIONS="-o debug::nolocking=true -o dir::cache=$APT_CACHE_DIR -o dir::state=$APT_STATE_DIR" +APT_OPTIONS=("-o" "debug::nolocking=true" "-o" "dir::cache=$APT_CACHE_DIR" "-o" "dir::state=$APT_STATE_DIR") # Override the use of /etc/apt/sources.list (sourcelist) and /etc/apt/sources.list.d/* (sourceparts). -APT_OPTIONS="$APT_OPTIONS -o dir::etc::sourcelist=$APT_SOURCES -o dir::etc::sourceparts=/dev/null" +APT_OPTIONS+=("-o" "dir::etc::sourcelist=$APT_SOURCES" "-o" "dir::etc::sourceparts=/dev/null") topic "Updating apt caches" -apt-get $APT_OPTIONS update | indent +apt-get "${APT_OPTIONS[@]}" update | indent for PACKAGE in $(cat $BUILD_DIR/$APT_FILE_MANIFEST | grep -v -s -e '^#' | grep -v -s -e "^:repo:"); do if [[ $PACKAGE == *deb ]]; then - PACKAGE_NAME=$(basename $PACKAGE .deb) + PACKAGE_NAME=$(basename "$PACKAGE" .deb) PACKAGE_FILE=$APT_CACHE_DIR/archives/$PACKAGE_NAME.deb topic "Fetching $PACKAGE" - curl --silent --show-error --fail -L -z $PACKAGE_FILE -o $PACKAGE_FILE $PACKAGE 2>&1 | indent + curl --silent --show-error --fail -L -z "$PACKAGE_FILE" -o "$PACKAGE_FILE" "$PACKAGE" 2>&1 | indent else topic "Fetching .debs for $PACKAGE" - apt-get $APT_OPTIONS -y $APT_FORCE_YES -d install --reinstall $PACKAGE | indent + apt-get "${APT_OPTIONS[@]}" -y "${APT_FORCE_YES[@]}" -d install --reinstall "$PACKAGE" | indent fi done -mkdir -p $BUILD_DIR/.apt +mkdir -p "$BUILD_DIR/.apt" -for DEB in $(ls -1 $APT_CACHE_DIR/archives/*.deb); do - topic "Installing $(basename $DEB)" - dpkg -x $DEB $BUILD_DIR/.apt/ +for DEB in "$APT_CACHE_DIR/archives/"*.deb; do + topic "Installing $(basename "$DEB")" + dpkg -x "$DEB" "$BUILD_DIR/.apt/" done topic "Writing profile script" -mkdir -p $BUILD_DIR/.profile.d -cat <$BUILD_DIR/.profile.d/000_apt.sh -export PATH="\$HOME/.apt/usr/bin:\$HOME/.apt/usr/sbin:\$PATH" -export LD_LIBRARY_PATH="\$HOME/.apt/lib/x86_64-linux-gnu:\$HOME/.apt/usr/lib/x86_64-linux-gnu:\$HOME/.apt/usr/lib/i386-linux-gnu:\$HOME/.apt/lib:\$HOME/.apt/usr/lib:\$LD_LIBRARY_PATH" -export LIBRARY_PATH="\$HOME/.apt/lib/x86_64-linux-gnu:\$HOME/.apt/usr/lib/x86_64-linux-gnu:\$HOME/.apt/usr/lib/i386-linux-gnu:\$HOME/.apt/lib:\$HOME/.apt/usr/lib:\$LIBRARY_PATH" +mkdir -p "$BUILD_DIR/.profile.d" +cat <"$BUILD_DIR/.profile.d/000_apt.sh" +export PATH="\$HOME/.apt/usr/bin:\$PATH" +export LD_LIBRARY_PATH="\$HOME/.apt/usr/lib/x86_64-linux-gnu:\$HOME/.apt/usr/lib/i386-linux-gnu:\$HOME/.apt/usr/lib:\$LD_LIBRARY_PATH" +export LIBRARY_PATH="\$HOME/.apt/usr/lib/x86_64-linux-gnu:\$HOME/.apt/usr/lib/i386-linux-gnu:\$HOME/.apt/usr/lib:\$LIBRARY_PATH" export INCLUDE_PATH="\$HOME/.apt/usr/include:\$HOME/.apt/usr/include/x86_64-linux-gnu:\$INCLUDE_PATH" export CPATH="\$INCLUDE_PATH" export CPPPATH="\$INCLUDE_PATH" @@ -127,7 +127,4 @@ export PKG_CONFIG_PATH="$BUILD_DIR/.apt/usr/lib/x86_64-linux-gnu/pkgconfig:$BUIL export | grep -E -e ' (PATH|LD_LIBRARY_PATH|LIBRARY_PATH|INCLUDE_PATH|CPATH|CPPPATH|PKG_CONFIG_PATH)=' > "$LP_DIR/export" topic "Rewrite package-config files" -find $BUILD_DIR/.apt -type f -ipath '*/pkgconfig/*.pc' | xargs --no-run-if-empty -n 1 sed -i -e 's!^prefix=\(.*\)$!prefix='"$BUILD_DIR"'/.apt\1!g' - -# Store which STACK we are running on in the cache to bust the cache if it changes -echo "$STACK" > "$CACHE_DIR/.apt/STACK" +find "$BUILD_DIR/.apt" -type f -ipath '*/pkgconfig/*.pc' -print0 | xargs -0 --no-run-if-empty -n 1 sed -i -e 's!^prefix=\(.*\)$!prefix='"$BUILD_DIR"'/.apt\1!g' From 289c0c22d51ac2617efc368eae197698ade901c9 Mon Sep 17 00:00:00 2001 From: Colin Casey Date: Thu, 14 Mar 2024 14:59:20 -0300 Subject: [PATCH 16/25] Instrumentation: Handle when multiple packages are declared on a single line (#112) * Handle when multiple packages are declared on a single line. --- bin/report | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/bin/report b/bin/report index 7c7b723..1558b6c 100755 --- a/bin/report +++ b/bin/report @@ -18,7 +18,13 @@ while IFS= read -r line; do elif [[ $line == *deb ]]; then custom_packages+=("${line}") else - packages+=("${line}") + # while this is not documented behavior, the Aptfile format technically + # does allow for multiple packages separated by spaces to be specified + # on a single line due to how the download command is implemented + IFS=$' \t' read -ra package_names <<< "${line}" + for package_name in "${package_names[@]}"; do + packages+=("${package_name}") + done fi done < <(grep --invert-match -e "^#" -e "^\s*$" "${BUILD_DIR}/Aptfile") From d9f268960e0f855b3d6d2e3eccc8f767cfaa0388 Mon Sep 17 00:00:00 2001 From: Colin Casey Date: Thu, 14 Mar 2024 16:44:20 -0300 Subject: [PATCH 17/25] Prepare release v8 (#116) * Update CHANGELOG.md --------- Co-authored-by: Rune Soerensen --- CHANGELOG.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index c4e85ff..2bb8b57 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,11 @@ ## Unreleased +## 2024-03-14 + +- Shell hardening ([#115](https://github.com/heroku/heroku-buildpack-apt/pull/115)) +- Handle multi-package lines when capturing buildpack metadata ([#112](https://github.com/heroku/heroku-buildpack-apt/pull/112)) + ## 2024-03-01 - Add `bin/report` script to capture buildpack metadata ([#110](https://github.com/heroku/heroku-buildpack-apt/pull/110)) From 48b2fed17a8f86edc95685bbad130666086a3e76 Mon Sep 17 00:00:00 2001 From: Colin Casey Date: Fri, 22 Mar 2024 12:14:40 -0300 Subject: [PATCH 18/25] Added test harness (#121) --- .github/workflows/ci.yml | 25 + .gitignore | 1 + Makefile | 11 + test/compile_test.sh | 62 - test/fixtures/Aptfile/Aptfile | 3 - .../custom-package-url-heroku-20/Aptfile | 1 + .../custom-package-url-heroku-22/Aptfile | 1 + .../custom-repository-heroku-20/Aptfile | 2 + .../custom-repository-heroku-22/Aptfile | 2 + test/fixtures/package-names/Aptfile | 5 + test/run | 112 ++ test/shunit2 | 1343 +++++++++++++++++ test/utils | 212 +++ 13 files changed, 1715 insertions(+), 65 deletions(-) create mode 100644 .github/workflows/ci.yml create mode 100644 Makefile delete mode 100644 test/compile_test.sh delete mode 100644 test/fixtures/Aptfile/Aptfile create mode 100644 test/fixtures/custom-package-url-heroku-20/Aptfile create mode 100644 test/fixtures/custom-package-url-heroku-22/Aptfile create mode 100644 test/fixtures/custom-repository-heroku-20/Aptfile create mode 100644 test/fixtures/custom-repository-heroku-22/Aptfile create mode 100644 test/fixtures/package-names/Aptfile create mode 100755 test/run create mode 100644 test/shunit2 create mode 100644 test/utils diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..1ef2c49 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,25 @@ +name: CI + +on: + push: + branches: ["main"] + pull_request: + +permissions: + contents: read + +jobs: + functional-test: + runs-on: ubuntu-22.04 + container: + image: heroku/heroku:${{ matrix.stack_number }}-build + strategy: + matrix: + stack_number: ["20", "22"] + env: + STACK: heroku-${{ matrix.stack_number }} + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Functional tests on heroku:${{ matrix.stack_number }}-build + run: test/run diff --git a/.gitignore b/.gitignore index 7ee0c0f..94c93ea 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,2 @@ .anvil +.idea diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..31bb73c --- /dev/null +++ b/Makefile @@ -0,0 +1,11 @@ +test: heroku-22-build heroku-20-build + +heroku-22-build: + @echo "Running tests in docker (heroku-22-build)..." + @docker run -v $(shell pwd):/buildpack:ro --rm -it -e "STACK=heroku-22" heroku/heroku:22-build bash -c 'cp -r /buildpack /buildpack_test; cd /buildpack_test/; test/run;' + @echo "" + +heroku-20-build: + @echo "Running tests in docker (heroku-20-build)..." + @docker run -v $(shell pwd):/buildpack:ro --rm -it -e "STACK=heroku-20" heroku/heroku:20-build bash -c 'cp -r /buildpack /buildpack_test; cd /buildpack_test/; test/run;' + @echo "" diff --git a/test/compile_test.sh b/test/compile_test.sh deleted file mode 100644 index 82d2f15..0000000 --- a/test/compile_test.sh +++ /dev/null @@ -1,62 +0,0 @@ -#!/usr/bin/env bash - -. ${BUILDPACK_TEST_RUNNER_HOME}/lib/test_utils.sh - -testCompile() { - loadFixture "Aptfile" - - compile - - assertCapturedSuccess - - assertCaptured "Fetching .debs for s3cmd" - assertCaptured "Installing s3cmd_" - assertCaptured "Fetching .debs for wget" - assertCaptured "Installing wget_" -} - -testStackChange() { - loadFixture "Aptfile" - - #Set the cached STACK value to a non-existent stack, so it is guaranteed to change. - mkdir -p "$CACHE_DIR/.apt/" - echo "cedar-10" > "$CACHE_DIR/.apt/STACK" - - #Load the Aptfile into the cache, to exclusively test the stack changes - mkdir -p "$CACHE_DIR/apt/cache" - cp $BUILD_DIR/Aptfile "$CACHE_DIR/apt/cache" - - compile - - assertCapturedSuccess - - assertCaptured "Detected Aptfile or Stack changes, flushing cache" -} - -testStackNoChange() { - loadFixture "Aptfile" - - #Load the Aptfile into the cache, to exclusively test the stack changes - mkdir -p "$CACHE_DIR/apt/cache" - cp $BUILD_DIR/Aptfile "$CACHE_DIR/apt/cache" - - compile - - assertCaptured "Reusing cache" -} - -testStackCached() { - loadFixture "Aptfile" - - compile - assertCapturedSuccess - - assertTrue 'STACK not cached' "[ -e $CACHE_DIR/.apt/STACK ]" - - CACHED_STACK=$(cat "$CACHE_DIR/.apt/STACK") - assertTrue 'STACK not cached' "[[ $CACHED_STACK == $STACK ]]" -} - -loadFixture() { - cp -a $BUILDPACK_HOME/test/fixtures/$1/. ${BUILD_DIR} -} diff --git a/test/fixtures/Aptfile/Aptfile b/test/fixtures/Aptfile/Aptfile deleted file mode 100644 index b24b956..0000000 --- a/test/fixtures/Aptfile/Aptfile +++ /dev/null @@ -1,3 +0,0 @@ -# Test comment -s3cmd -wget \ No newline at end of file diff --git a/test/fixtures/custom-package-url-heroku-20/Aptfile b/test/fixtures/custom-package-url-heroku-20/Aptfile new file mode 100644 index 0000000..0a2a322 --- /dev/null +++ b/test/fixtures/custom-package-url-heroku-20/Aptfile @@ -0,0 +1 @@ +https://github.com/wkhtmltopdf/packaging/releases/download/0.12.6-1/wkhtmltox_0.12.6-1.focal_amd64.deb diff --git a/test/fixtures/custom-package-url-heroku-22/Aptfile b/test/fixtures/custom-package-url-heroku-22/Aptfile new file mode 100644 index 0000000..3808ef9 --- /dev/null +++ b/test/fixtures/custom-package-url-heroku-22/Aptfile @@ -0,0 +1 @@ +https://github.com/wkhtmltopdf/packaging/releases/download/0.12.6.1-2/wkhtmltox_0.12.6.1-2.jammy_amd64.deb diff --git a/test/fixtures/custom-repository-heroku-20/Aptfile b/test/fixtures/custom-repository-heroku-20/Aptfile new file mode 100644 index 0000000..8136ade --- /dev/null +++ b/test/fixtures/custom-repository-heroku-20/Aptfile @@ -0,0 +1,2 @@ +:repo:deb http://us.archive.ubuntu.com/ubuntu/ focal multiverse +fasttracker2 diff --git a/test/fixtures/custom-repository-heroku-22/Aptfile b/test/fixtures/custom-repository-heroku-22/Aptfile new file mode 100644 index 0000000..e595b46 --- /dev/null +++ b/test/fixtures/custom-repository-heroku-22/Aptfile @@ -0,0 +1,2 @@ +:repo:deb http://us.archive.ubuntu.com/ubuntu/ jammy multiverse +fasttracker2 diff --git a/test/fixtures/package-names/Aptfile b/test/fixtures/package-names/Aptfile new file mode 100644 index 0000000..9ab34b4 --- /dev/null +++ b/test/fixtures/package-names/Aptfile @@ -0,0 +1,5 @@ +# single package +xmlsec1 + +# globbed package +mysql-client-* diff --git a/test/run b/test/run new file mode 100755 index 0000000..843f6d3 --- /dev/null +++ b/test/run @@ -0,0 +1,112 @@ +#!/usr/bin/env bash + +testCompilePackageNames() { + compile "package-names" + assertCaptured "Updating apt caches" + assertCaptured "Fetching .debs for xmlsec1" + assertCaptured "Fetching .debs for mysql-client-*" + assertCaptured "Installing xmlsec1" + assertCaptured "Installing mysql-client" + assertCaptured "Installing mysql-client-core" + assertCaptured "Writing profile script" + assertCaptured "Rewrite package-config files" +} + +testReportPackageNames() { + report "package-names" + assertCaptured "packages: \"mysql-client-*,xmlsec1\"" + assertNotCaptured "custom_packages" + assertNotCaptured "custom_repositories" + assertCapturedSuccess +} + +testCompileCustomPackageUrl() { + declare -A download_urls=( + [heroku-20]="https://github.com/wkhtmltopdf/packaging/releases/download/0.12.6-1/wkhtmltox_0.12.6-1.focal_amd64.deb" + [heroku-22]="https://github.com/wkhtmltopdf/packaging/releases/download/0.12.6.1-2/wkhtmltox_0.12.6.1-2.jammy_amd64.deb" + ) + compile "custom-package-url-$STACK" + assertCaptured "Updating apt caches" + assertCaptured "Fetching ${download_urls[$STACK]}" + assertCaptured "Installing wkhtmltox" + assertCaptured "Writing profile script" + assertCaptured "Rewrite package-config files" +} + +testReportCustomPackageUrl() { + declare -A download_urls=( + [heroku-20]="https://github.com/wkhtmltopdf/packaging/releases/download/0.12.6-1/wkhtmltox_0.12.6-1.focal_amd64.deb" + [heroku-22]="https://github.com/wkhtmltopdf/packaging/releases/download/0.12.6.1-2/wkhtmltox_0.12.6.1-2.jammy_amd64.deb" + ) + report "custom-package-url-$STACK" + assertNotCaptured "^packages" + assertCaptured "custom_packages: \"${download_urls[$STACK]}\"" + assertNotCaptured "custom_repositories" +} + +testCompileCustomRepository() { + declare -A ubuntu_release_names=( + [heroku-20]="focal" + [heroku-22]="jammy" + ) + compile "custom-repository-$STACK" + assertCaptured "Adding custom repositories" + assertCaptured "Updating apt caches" + assertCaptured "http://us.archive.ubuntu.com/ubuntu ${ubuntu_release_names[$STACK]}/multiverse amd64 Packages" + assertCaptured "Fetching .debs for fasttracker2" + assertCaptured "Installing fasttracker2" + assertCaptured "Writing profile script" + assertCaptured "Rewrite package-config files" +} + +testReportCustomRepository() { + declare -A ubuntu_release_names=( + [heroku-20]="focal" + [heroku-22]="jammy" + ) + report "custom-repository-$STACK" + assertCaptured "packages: \"fasttracker2\"" + assertNotCaptured "custom_packages" + assertCaptured "custom_repositories: \"deb http://us.archive.ubuntu.com/ubuntu/ ${ubuntu_release_names[$STACK]} multiverse\"" +} + +pushd "$(dirname 0)" >/dev/null || exit 1 +popd >/dev/null || exit 1 + +source "$(pwd)"/test/utils + +compile() { + default_process_types_cleanup + bp_dir=$(mktmpdir) + compile_dir=$(mktmpdir) + cp -a "$(pwd)"/* "${bp_dir}" + cp -a "${bp_dir}"/test/fixtures/"$1"/. "${compile_dir}" + capture "${bp_dir}"/bin/compile "${compile_dir}" "${2:-$(mktmpdir)}" "$3" +} + +report() { + default_process_types_cleanup + compile_dir=${1:-$(mktmpdir)} + cache_dir=${2:-$(mktmpdir)} + env_dir=${3:-$(mktmpdir)} + bp_dir=$(mktmpdir) + cp -a "$(pwd)"/* "${bp_dir}" + cp -a "${bp_dir}"/test/fixtures/"$1"/. "${compile_dir}" + capture "${bp_dir}"/bin/report "${compile_dir}" "${cache_dir}" "${env_dir}" +} + +mktmpdir() { + dir=$(mktemp -t testXXXXX) + rm -rf "$dir" + mkdir "$dir" + echo "$dir" +} + +default_process_types_cleanup() { + file="/tmp/default_process_types" + if [ -f "$file" ]; then + rm "$file" + fi +} + +source "$(pwd)"/test/shunit2 diff --git a/test/shunit2 b/test/shunit2 new file mode 100644 index 0000000..6239683 --- /dev/null +++ b/test/shunit2 @@ -0,0 +1,1343 @@ +#! /bin/sh +# vim:et:ft=sh:sts=2:sw=2 +# +# Copyright 2008-2020 Kate Ward. All Rights Reserved. +# Released under the Apache 2.0 license. +# http://www.apache.org/licenses/LICENSE-2.0 +# +# shUnit2 -- Unit testing framework for Unix shell scripts. +# https://github.com/kward/shunit2 +# +# Author: kate.ward@forestent.com (Kate Ward) +# +# shUnit2 is a xUnit based unit test framework for Bourne shell scripts. It is +# based on the popular JUnit unit testing framework for Java. +# +# $() are not fully portable (POSIX != portable). +# shellcheck disable=SC2006 +# expr may be antiquated, but it is the only solution in some cases. +# shellcheck disable=SC2003 + +# Return if shunit2 already loaded. +command [ -n "${SHUNIT_VERSION:-}" ] && exit 0 +SHUNIT_VERSION='2.1.8' + +# Return values that scripts can use. +SHUNIT_TRUE=0 +SHUNIT_FALSE=1 +SHUNIT_ERROR=2 + +# Logging functions. +_shunit_warn() { + ${__SHUNIT_CMD_ECHO_ESC} \ + "${__shunit_ansi_yellow}shunit2:WARN${__shunit_ansi_none} $*" >&2 +} +_shunit_error() { + ${__SHUNIT_CMD_ECHO_ESC} \ + "${__shunit_ansi_red}shunit2:ERROR${__shunit_ansi_none} $*" >&2 +} +_shunit_fatal() { + ${__SHUNIT_CMD_ECHO_ESC} \ + "${__shunit_ansi_red}shunit2:FATAL${__shunit_ansi_none} $*" >&2 + exit ${SHUNIT_ERROR} +} + +# Determine some reasonable command defaults. +__SHUNIT_CMD_ECHO_ESC='echo -e' +# shellcheck disable=SC2039 +command [ "`echo -e test`" = '-e test' ] && __SHUNIT_CMD_ECHO_ESC='echo' + +__SHUNIT_UNAME_S=`uname -s` +case "${__SHUNIT_UNAME_S}" in + BSD) __SHUNIT_CMD_EXPR='gexpr' ;; + *) __SHUNIT_CMD_EXPR='expr' ;; +esac +__SHUNIT_CMD_TPUT='tput' + +# Commands a user can override if needed. +SHUNIT_CMD_EXPR=${SHUNIT_CMD_EXPR:-${__SHUNIT_CMD_EXPR}} +SHUNIT_CMD_TPUT=${SHUNIT_CMD_TPUT:-${__SHUNIT_CMD_TPUT}} + +# Enable color output. Options are 'never', 'always', or 'auto'. +SHUNIT_COLOR=${SHUNIT_COLOR:-auto} + +# Specific shell checks. +if command [ -n "${ZSH_VERSION:-}" ]; then + setopt |grep "^shwordsplit$" >/dev/null + if command [ $? -ne ${SHUNIT_TRUE} ]; then + _shunit_fatal 'zsh shwordsplit option is required for proper operation' + fi + if command [ -z "${SHUNIT_PARENT:-}" ]; then + _shunit_fatal "zsh does not pass \$0 through properly. please declare \ +\"SHUNIT_PARENT=\$0\" before calling shUnit2" + fi +fi + +# +# Constants +# + +__SHUNIT_MODE_SOURCED='sourced' +__SHUNIT_MODE_STANDALONE='standalone' +__SHUNIT_PARENT=${SHUNIT_PARENT:-$0} + +# User provided test prefix to display in front of the name of the test being +# executed. Define by setting the SHUNIT_TEST_PREFIX variable. +__SHUNIT_TEST_PREFIX=${SHUNIT_TEST_PREFIX:-} + +# ANSI colors. +__SHUNIT_ANSI_NONE='\033[0m' +__SHUNIT_ANSI_RED='\033[1;31m' +__SHUNIT_ANSI_GREEN='\033[1;32m' +__SHUNIT_ANSI_YELLOW='\033[1;33m' +__SHUNIT_ANSI_CYAN='\033[1;36m' + +# Set the constants readonly. +__shunit_constants=`set |grep '^__SHUNIT_' |cut -d= -f1` +echo "${__shunit_constants}" |grep '^Binary file' >/dev/null && \ + __shunit_constants=`set |grep -a '^__SHUNIT_' |cut -d= -f1` +for __shunit_const in ${__shunit_constants}; do + if command [ -z "${ZSH_VERSION:-}" ]; then + readonly "${__shunit_const}" + else + case ${ZSH_VERSION} in + [123].*) readonly "${__shunit_const}" ;; + *) readonly -g "${__shunit_const}" # Declare readonly constants globally. + esac + fi +done +unset __shunit_const __shunit_constants + +# +# Internal variables. +# + +# Variables. +__shunit_lineno='' # Line number of executed test. +__shunit_mode=${__SHUNIT_MODE_SOURCED} # Operating mode. +__shunit_reportGenerated=${SHUNIT_FALSE} # Is report generated. +__shunit_script='' # Filename of unittest script (standalone mode). +__shunit_skip=${SHUNIT_FALSE} # Is skipping enabled. +__shunit_suite='' # Suite of tests to execute. +__shunit_clean=${SHUNIT_FALSE} # _shunit_cleanup() was already called. + +# ANSI colors (populated by _shunit_configureColor()). +__shunit_ansi_none='' +__shunit_ansi_red='' +__shunit_ansi_green='' +__shunit_ansi_yellow='' +__shunit_ansi_cyan='' + +# Counts of tests. +__shunit_testSuccess=${SHUNIT_TRUE} +__shunit_testsTotal=0 +__shunit_testsPassed=0 +__shunit_testsFailed=0 + +# Counts of asserts. +__shunit_assertsTotal=0 +__shunit_assertsPassed=0 +__shunit_assertsFailed=0 +__shunit_assertsSkipped=0 + +# +# Macros. +# + +# shellcheck disable=SC2016,SC2089 +_SHUNIT_LINENO_='eval __shunit_lineno=""; if command [ "${1:-}" = "--lineno" ]; then command [ -n "$2" ] && __shunit_lineno="[$2] "; shift 2; fi' + +#----------------------------------------------------------------------------- +# Assertion functions. +# + +# Assert that two values are equal to one another. +# +# Args: +# message: string: failure message [optional] +# expected: string: expected value +# actual: string: actual value +# Returns: +# integer: success (TRUE/FALSE/ERROR constant) +assertEquals() { + # shellcheck disable=SC2090 + ${_SHUNIT_LINENO_} + if command [ $# -lt 2 -o $# -gt 3 ]; then + _shunit_error "assertEquals() requires two or three arguments; $# given" + _shunit_assertFail + return ${SHUNIT_ERROR} + fi + _shunit_shouldSkip && return ${SHUNIT_TRUE} + + shunit_message_=${__shunit_lineno} + if command [ $# -eq 3 ]; then + shunit_message_="${shunit_message_}$1" + shift + fi + shunit_expected_=$1 + shunit_actual_=$2 + + shunit_return=${SHUNIT_TRUE} + if command [ "${shunit_expected_}" = "${shunit_actual_}" ]; then + _shunit_assertPass + else + failNotEquals "${shunit_message_}" "${shunit_expected_}" "${shunit_actual_}" + shunit_return=${SHUNIT_FALSE} + fi + + unset shunit_message_ shunit_expected_ shunit_actual_ + return ${shunit_return} +} +# shellcheck disable=SC2016,SC2034 +_ASSERT_EQUALS_='eval assertEquals --lineno "${LINENO:-}"' + +# Assert that two values are not equal to one another. +# +# Args: +# message: string: failure message [optional] +# expected: string: expected value +# actual: string: actual value +# Returns: +# integer: success (TRUE/FALSE/ERROR constant) +assertNotEquals() { + # shellcheck disable=SC2090 + ${_SHUNIT_LINENO_} + if command [ $# -lt 2 -o $# -gt 3 ]; then + _shunit_error "assertNotEquals() requires two or three arguments; $# given" + _shunit_assertFail + return ${SHUNIT_ERROR} + fi + _shunit_shouldSkip && return ${SHUNIT_TRUE} + + shunit_message_=${__shunit_lineno} + if command [ $# -eq 3 ]; then + shunit_message_="${shunit_message_}$1" + shift + fi + shunit_expected_=$1 + shunit_actual_=$2 + + shunit_return=${SHUNIT_TRUE} + if command [ "${shunit_expected_}" != "${shunit_actual_}" ]; then + _shunit_assertPass + else + failSame "${shunit_message_}" "${shunit_expected_}" "${shunit_actual_}" + shunit_return=${SHUNIT_FALSE} + fi + + unset shunit_message_ shunit_expected_ shunit_actual_ + return ${shunit_return} +} +# shellcheck disable=SC2016,SC2034 +_ASSERT_NOT_EQUALS_='eval assertNotEquals --lineno "${LINENO:-}"' + +# Assert that a container contains a content. +# +# Args: +# message: string: failure message [optional] +# container: string: container to analyze +# content: string: content to find +# Returns: +# integer: success (TRUE/FALSE/ERROR constant) +assertContains() { + # shellcheck disable=SC2090 + ${_SHUNIT_LINENO_} + if command [ $# -lt 2 -o $# -gt 3 ]; then + _shunit_error "assertContains() requires two or three arguments; $# given" + _shunit_assertFail + return ${SHUNIT_ERROR} + fi + _shunit_shouldSkip && return ${SHUNIT_TRUE} + + shunit_message_=${__shunit_lineno} + if command [ $# -eq 3 ]; then + shunit_message_="${shunit_message_}$1" + shift + fi + shunit_container_=$1 + shunit_content_=$2 + + shunit_return=${SHUNIT_TRUE} + if echo "$shunit_container_" | grep -F -- "$shunit_content_" > /dev/null; then + _shunit_assertPass + else + failNotFound "${shunit_message_}" "${shunit_content_}" + shunit_return=${SHUNIT_FALSE} + fi + + unset shunit_message_ shunit_container_ shunit_content_ + return ${shunit_return} +} +# shellcheck disable=SC2016,SC2034 +_ASSERT_CONTAINS_='eval assertContains --lineno "${LINENO:-}"' + +# Assert that a container does not contain a content. +# +# Args: +# message: string: failure message [optional] +# container: string: container to analyze +# content: string: content to look for +# Returns: +# integer: success (TRUE/FALSE/ERROR constant) +assertNotContains() { + # shellcheck disable=SC2090 + ${_SHUNIT_LINENO_} + if command [ $# -lt 2 -o $# -gt 3 ]; then + _shunit_error "assertNotContains() requires two or three arguments; $# given" + _shunit_assertFail + return ${SHUNIT_ERROR} + fi + _shunit_shouldSkip && return ${SHUNIT_TRUE} + + shunit_message_=${__shunit_lineno} + if command [ $# -eq 3 ]; then + shunit_message_="${shunit_message_}$1" + shift + fi + shunit_container_=$1 + shunit_content_=$2 + + shunit_return=${SHUNIT_TRUE} + if echo "$shunit_container_" | grep -F -- "$shunit_content_" > /dev/null; then + failFound "${shunit_message_}" "${shunit_content_}" + shunit_return=${SHUNIT_FALSE} + else + _shunit_assertPass + fi + + unset shunit_message_ shunit_container_ shunit_content_ + return ${shunit_return} +} +# shellcheck disable=SC2016,SC2034 +_ASSERT_NOT_CONTAINS_='eval assertNotContains --lineno "${LINENO:-}"' + +# Assert that a value is null (i.e. an empty string) +# +# Args: +# message: string: failure message [optional] +# actual: string: actual value +# Returns: +# integer: success (TRUE/FALSE/ERROR constant) +assertNull() { + # shellcheck disable=SC2090 + ${_SHUNIT_LINENO_} + if command [ $# -lt 1 -o $# -gt 2 ]; then + _shunit_error "assertNull() requires one or two arguments; $# given" + _shunit_assertFail + return ${SHUNIT_ERROR} + fi + _shunit_shouldSkip && return ${SHUNIT_TRUE} + + shunit_message_=${__shunit_lineno} + if command [ $# -eq 2 ]; then + shunit_message_="${shunit_message_}$1" + shift + fi + assertTrue "${shunit_message_}" "[ -z '$1' ]" + shunit_return=$? + + unset shunit_message_ + return ${shunit_return} +} +# shellcheck disable=SC2016,SC2034 +_ASSERT_NULL_='eval assertNull --lineno "${LINENO:-}"' + +# Assert that a value is not null (i.e. a non-empty string) +# +# Args: +# message: string: failure message [optional] +# actual: string: actual value +# Returns: +# integer: success (TRUE/FALSE/ERROR constant) +assertNotNull() { + # shellcheck disable=SC2090 + ${_SHUNIT_LINENO_} + if command [ $# -gt 2 ]; then # allowing 0 arguments as $1 might actually be null + _shunit_error "assertNotNull() requires one or two arguments; $# given" + _shunit_assertFail + return ${SHUNIT_ERROR} + fi + _shunit_shouldSkip && return ${SHUNIT_TRUE} + + shunit_message_=${__shunit_lineno} + if command [ $# -eq 2 ]; then + shunit_message_="${shunit_message_}$1" + shift + fi + shunit_actual_=`_shunit_escapeCharactersInString "${1:-}"` + test -n "${shunit_actual_}" + assertTrue "${shunit_message_}" $? + shunit_return=$? + + unset shunit_actual_ shunit_message_ + return ${shunit_return} +} +# shellcheck disable=SC2016,SC2034 +_ASSERT_NOT_NULL_='eval assertNotNull --lineno "${LINENO:-}"' + +# Assert that two values are the same (i.e. equal to one another). +# +# Args: +# message: string: failure message [optional] +# expected: string: expected value +# actual: string: actual value +# Returns: +# integer: success (TRUE/FALSE/ERROR constant) +assertSame() { + # shellcheck disable=SC2090 + ${_SHUNIT_LINENO_} + if command [ $# -lt 2 -o $# -gt 3 ]; then + _shunit_error "assertSame() requires two or three arguments; $# given" + _shunit_assertFail + return ${SHUNIT_ERROR} + fi + _shunit_shouldSkip && return ${SHUNIT_TRUE} + + shunit_message_=${__shunit_lineno} + if command [ $# -eq 3 ]; then + shunit_message_="${shunit_message_}$1" + shift + fi + assertEquals "${shunit_message_}" "$1" "$2" + shunit_return=$? + + unset shunit_message_ + return ${shunit_return} +} +# shellcheck disable=SC2016,SC2034 +_ASSERT_SAME_='eval assertSame --lineno "${LINENO:-}"' + +# Assert that two values are not the same (i.e. not equal to one another). +# +# Args: +# message: string: failure message [optional] +# expected: string: expected value +# actual: string: actual value +# Returns: +# integer: success (TRUE/FALSE/ERROR constant) +assertNotSame() { + # shellcheck disable=SC2090 + ${_SHUNIT_LINENO_} + if command [ $# -lt 2 -o $# -gt 3 ]; then + _shunit_error "assertNotSame() requires two or three arguments; $# given" + _shunit_assertFail + return ${SHUNIT_ERROR} + fi + _shunit_shouldSkip && return ${SHUNIT_TRUE} + + shunit_message_=${__shunit_lineno} + if command [ $# -eq 3 ]; then + shunit_message_="${shunit_message_:-}$1" + shift + fi + assertNotEquals "${shunit_message_}" "$1" "$2" + shunit_return=$? + + unset shunit_message_ + return ${shunit_return} +} +# shellcheck disable=SC2016,SC2034 +_ASSERT_NOT_SAME_='eval assertNotSame --lineno "${LINENO:-}"' + +# Assert that a value or shell test condition is true. +# +# In shell, a value of 0 is true and a non-zero value is false. Any integer +# value passed can thereby be tested. +# +# Shell supports much more complicated tests though, and a means to support +# them was needed. As such, this function tests that conditions are true or +# false through evaluation rather than just looking for a true or false. +# +# The following test will succeed: +# assertTrue 0 +# assertTrue "[ 34 -gt 23 ]" +# The following test will fail with a message: +# assertTrue 123 +# assertTrue "test failed" "[ -r '/non/existent/file' ]" +# +# Args: +# message: string: failure message [optional] +# condition: string: integer value or shell conditional statement +# Returns: +# integer: success (TRUE/FALSE/ERROR constant) +assertTrue() { + # shellcheck disable=SC2090 + ${_SHUNIT_LINENO_} + if command [ $# -lt 1 -o $# -gt 2 ]; then + _shunit_error "assertTrue() takes one or two arguments; $# given" + _shunit_assertFail + return ${SHUNIT_ERROR} + fi + _shunit_shouldSkip && return ${SHUNIT_TRUE} + + shunit_message_=${__shunit_lineno} + if command [ $# -eq 2 ]; then + shunit_message_="${shunit_message_}$1" + shift + fi + shunit_condition_=$1 + + # See if condition is an integer, i.e. a return value. + shunit_match_=`expr "${shunit_condition_}" : '\([0-9]*\)'` + shunit_return=${SHUNIT_TRUE} + if command [ -z "${shunit_condition_}" ]; then + # Null condition. + shunit_return=${SHUNIT_FALSE} + elif command [ -n "${shunit_match_}" -a "${shunit_condition_}" = "${shunit_match_}" ] + then + # Possible return value. Treating 0 as true, and non-zero as false. + command [ "${shunit_condition_}" -ne 0 ] && shunit_return=${SHUNIT_FALSE} + else + # Hopefully... a condition. + ( eval "${shunit_condition_}" ) >/dev/null 2>&1 + command [ $? -ne 0 ] && shunit_return=${SHUNIT_FALSE} + fi + + # Record the test. + if command [ ${shunit_return} -eq ${SHUNIT_TRUE} ]; then + _shunit_assertPass + else + _shunit_assertFail "${shunit_message_}" + fi + + unset shunit_message_ shunit_condition_ shunit_match_ + return ${shunit_return} +} +# shellcheck disable=SC2016,SC2034 +_ASSERT_TRUE_='eval assertTrue --lineno "${LINENO:-}"' + +# Assert that a value or shell test condition is false. +# +# In shell, a value of 0 is true and a non-zero value is false. Any integer +# value passed can thereby be tested. +# +# Shell supports much more complicated tests though, and a means to support +# them was needed. As such, this function tests that conditions are true or +# false through evaluation rather than just looking for a true or false. +# +# The following test will succeed: +# assertFalse 1 +# assertFalse "[ 'apples' = 'oranges' ]" +# The following test will fail with a message: +# assertFalse 0 +# assertFalse "test failed" "[ 1 -eq 1 -a 2 -eq 2 ]" +# +# Args: +# message: string: failure message [optional] +# condition: string: integer value or shell conditional statement +# Returns: +# integer: success (TRUE/FALSE/ERROR constant) +assertFalse() { + # shellcheck disable=SC2090 + ${_SHUNIT_LINENO_} + if command [ $# -lt 1 -o $# -gt 2 ]; then + _shunit_error "assertFalse() requires one or two arguments; $# given" + _shunit_assertFail + return ${SHUNIT_ERROR} + fi + _shunit_shouldSkip && return ${SHUNIT_TRUE} + + shunit_message_=${__shunit_lineno} + if command [ $# -eq 2 ]; then + shunit_message_="${shunit_message_}$1" + shift + fi + shunit_condition_=$1 + + # See if condition is an integer, i.e. a return value. + shunit_match_=`expr "${shunit_condition_}" : '\([0-9]*\)'` + shunit_return=${SHUNIT_TRUE} + if command [ -z "${shunit_condition_}" ]; then + # Null condition. + shunit_return=${SHUNIT_FALSE} + elif command [ -n "${shunit_match_}" -a "${shunit_condition_}" = "${shunit_match_}" ] + then + # Possible return value. Treating 0 as true, and non-zero as false. + command [ "${shunit_condition_}" -eq 0 ] && shunit_return=${SHUNIT_FALSE} + else + # Hopefully... a condition. + ( eval "${shunit_condition_}" ) >/dev/null 2>&1 + command [ $? -eq 0 ] && shunit_return=${SHUNIT_FALSE} + fi + + # Record the test. + if command [ "${shunit_return}" -eq "${SHUNIT_TRUE}" ]; then + _shunit_assertPass + else + _shunit_assertFail "${shunit_message_}" + fi + + unset shunit_message_ shunit_condition_ shunit_match_ + return "${shunit_return}" +} +# shellcheck disable=SC2016,SC2034 +_ASSERT_FALSE_='eval assertFalse --lineno "${LINENO:-}"' + +#----------------------------------------------------------------------------- +# Failure functions. +# + +# Records a test failure. +# +# Args: +# message: string: failure message [optional] +# Returns: +# integer: success (TRUE/FALSE/ERROR constant) +fail() { + # shellcheck disable=SC2090 + ${_SHUNIT_LINENO_} + if command [ $# -gt 1 ]; then + _shunit_error "fail() requires zero or one arguments; $# given" + return ${SHUNIT_ERROR} + fi + _shunit_shouldSkip && return ${SHUNIT_TRUE} + + shunit_message_=${__shunit_lineno} + if command [ $# -eq 1 ]; then + shunit_message_="${shunit_message_}$1" + shift + fi + + _shunit_assertFail "${shunit_message_}" + + unset shunit_message_ + return ${SHUNIT_FALSE} +} +# shellcheck disable=SC2016,SC2034 +_FAIL_='eval fail --lineno "${LINENO:-}"' + +# Records a test failure, stating two values were not equal. +# +# Args: +# message: string: failure message [optional] +# expected: string: expected value +# actual: string: actual value +# Returns: +# integer: success (TRUE/FALSE/ERROR constant) +failNotEquals() { + # shellcheck disable=SC2090 + ${_SHUNIT_LINENO_} + if command [ $# -lt 2 -o $# -gt 3 ]; then + _shunit_error "failNotEquals() requires one or two arguments; $# given" + return ${SHUNIT_ERROR} + fi + _shunit_shouldSkip && return ${SHUNIT_TRUE} + + shunit_message_=${__shunit_lineno} + if command [ $# -eq 3 ]; then + shunit_message_="${shunit_message_}$1" + shift + fi + shunit_expected_=$1 + shunit_actual_=$2 + + shunit_message_=${shunit_message_%% } + _shunit_assertFail "${shunit_message_:+${shunit_message_} }expected:<${shunit_expected_}> but was:<${shunit_actual_}>" + + unset shunit_message_ shunit_expected_ shunit_actual_ + return ${SHUNIT_FALSE} +} +# shellcheck disable=SC2016,SC2034 +_FAIL_NOT_EQUALS_='eval failNotEquals --lineno "${LINENO:-}"' + +# Records a test failure, stating a value was found. +# +# Args: +# message: string: failure message [optional] +# content: string: found value +# Returns: +# integer: success (TRUE/FALSE/ERROR constant) +failFound() { + # shellcheck disable=SC2090 + ${_SHUNIT_LINENO_} + if command [ $# -lt 1 -o $# -gt 2 ]; then + _shunit_error "failFound() requires one or two arguments; $# given" + return ${SHUNIT_ERROR} + fi + _shunit_shouldSkip && return ${SHUNIT_TRUE} + + shunit_message_=${__shunit_lineno} + if command [ $# -eq 2 ]; then + shunit_message_="${shunit_message_}$1" + shift + fi + + shunit_message_=${shunit_message_%% } + _shunit_assertFail "${shunit_message_:+${shunit_message_} }Found" + + unset shunit_message_ + return ${SHUNIT_FALSE} +} +# shellcheck disable=SC2016,SC2034 +_FAIL_FOUND_='eval failFound --lineno "${LINENO:-}"' + +# Records a test failure, stating a content was not found. +# +# Args: +# message: string: failure message [optional] +# content: string: content not found +# Returns: +# integer: success (TRUE/FALSE/ERROR constant) +failNotFound() { + # shellcheck disable=SC2090 + ${_SHUNIT_LINENO_} + if command [ $# -lt 1 -o $# -gt 2 ]; then + _shunit_error "failNotFound() requires one or two arguments; $# given" + return ${SHUNIT_ERROR} + fi + _shunit_shouldSkip && return ${SHUNIT_TRUE} + + shunit_message_=${__shunit_lineno} + if command [ $# -eq 2 ]; then + shunit_message_="${shunit_message_}$1" + shift + fi + shunit_content_=$1 + + shunit_message_=${shunit_message_%% } + _shunit_assertFail "${shunit_message_:+${shunit_message_} }Not found:<${shunit_content_}>" + + unset shunit_message_ shunit_content_ + return ${SHUNIT_FALSE} +} +# shellcheck disable=SC2016,SC2034 +_FAIL_NOT_FOUND_='eval failNotFound --lineno "${LINENO:-}"' + +# Records a test failure, stating two values should have been the same. +# +# Args: +# message: string: failure message [optional] +# expected: string: expected value +# actual: string: actual value +# Returns: +# integer: success (TRUE/FALSE/ERROR constant) +failSame() +{ + # shellcheck disable=SC2090 + ${_SHUNIT_LINENO_} + if command [ $# -lt 2 -o $# -gt 3 ]; then + _shunit_error "failSame() requires two or three arguments; $# given" + return ${SHUNIT_ERROR} + fi + _shunit_shouldSkip && return ${SHUNIT_TRUE} + + shunit_message_=${__shunit_lineno} + if command [ $# -eq 3 ]; then + shunit_message_="${shunit_message_}$1" + shift + fi + + shunit_message_=${shunit_message_%% } + _shunit_assertFail "${shunit_message_:+${shunit_message_} }expected not same" + + unset shunit_message_ + return ${SHUNIT_FALSE} +} +# shellcheck disable=SC2016,SC2034 +_FAIL_SAME_='eval failSame --lineno "${LINENO:-}"' + +# Records a test failure, stating two values were not equal. +# +# This is functionally equivalent to calling failNotEquals(). +# +# Args: +# message: string: failure message [optional] +# expected: string: expected value +# actual: string: actual value +# Returns: +# integer: success (TRUE/FALSE/ERROR constant) +failNotSame() { + # shellcheck disable=SC2090 + ${_SHUNIT_LINENO_} + if command [ $# -lt 2 -o $# -gt 3 ]; then + _shunit_error "failNotSame() requires one or two arguments; $# given" + return ${SHUNIT_ERROR} + fi + _shunit_shouldSkip && return ${SHUNIT_TRUE} + + shunit_message_=${__shunit_lineno} + if command [ $# -eq 3 ]; then + shunit_message_="${shunit_message_}$1" + shift + fi + failNotEquals "${shunit_message_}" "$1" "$2" + shunit_return=$? + + unset shunit_message_ + return ${shunit_return} +} +# shellcheck disable=SC2016,SC2034 +_FAIL_NOT_SAME_='eval failNotSame --lineno "${LINENO:-}"' + +#----------------------------------------------------------------------------- +# Skipping functions. +# + +# Force remaining assert and fail functions to be "skipped". +# +# This function forces the remaining assert and fail functions to be "skipped", +# i.e. they will have no effect. Each function skipped will be recorded so that +# the total of asserts and fails will not be altered. +# +# Args: +# None +startSkipping() { __shunit_skip=${SHUNIT_TRUE}; } + +# Resume the normal recording behavior of assert and fail calls. +# +# Args: +# None +endSkipping() { __shunit_skip=${SHUNIT_FALSE}; } + +# Returns the state of assert and fail call skipping. +# +# Args: +# None +# Returns: +# boolean: (TRUE/FALSE constant) +isSkipping() { return ${__shunit_skip}; } + +#----------------------------------------------------------------------------- +# Suite functions. +# + +# Stub. This function should contains all unit test calls to be made. +# +# DEPRECATED (as of 2.1.0) +# +# This function can be optionally overridden by the user in their test suite. +# +# If this function exists, it will be called when shunit2 is sourced. If it +# does not exist, shunit2 will search the parent script for all functions +# beginning with the word 'test', and they will be added dynamically to the +# test suite. +# +# This function should be overridden by the user in their unit test suite. +# Note: see _shunit_mktempFunc() for actual implementation +# +# Args: +# None +#suite() { :; } # DO NOT UNCOMMENT THIS FUNCTION + +# Adds a function name to the list of tests schedule for execution. +# +# This function should only be called from within the suite() function. +# +# Args: +# function: string: name of a function to add to current unit test suite +suite_addTest() { + shunit_func_=${1:-} + + __shunit_suite="${__shunit_suite:+${__shunit_suite} }${shunit_func_}" + __shunit_testsTotal=`expr ${__shunit_testsTotal} + 1` + + unset shunit_func_ +} + +# Stub. This function will be called once before any tests are run. +# +# Common one-time environment preparation tasks shared by all tests can be +# defined here. +# +# This function should be overridden by the user in their unit test suite. +# Note: see _shunit_mktempFunc() for actual implementation +# +# Args: +# None +#oneTimeSetUp() { :; } # DO NOT UNCOMMENT THIS FUNCTION + +# Stub. This function will be called once after all tests are finished. +# +# Common one-time environment cleanup tasks shared by all tests can be defined +# here. +# +# This function should be overridden by the user in their unit test suite. +# Note: see _shunit_mktempFunc() for actual implementation +# +# Args: +# None +#oneTimeTearDown() { :; } # DO NOT UNCOMMENT THIS FUNCTION + +# Stub. This function will be called before each test is run. +# +# Common environment preparation tasks shared by all tests can be defined here. +# +# This function should be overridden by the user in their unit test suite. +# Note: see _shunit_mktempFunc() for actual implementation +# +# Args: +# None +#setUp() { :; } # DO NOT UNCOMMENT THIS FUNCTION + +# Note: see _shunit_mktempFunc() for actual implementation +# Stub. This function will be called after each test is run. +# +# Common environment cleanup tasks shared by all tests can be defined here. +# +# This function should be overridden by the user in their unit test suite. +# Note: see _shunit_mktempFunc() for actual implementation +# +# Args: +# None +#tearDown() { :; } # DO NOT UNCOMMENT THIS FUNCTION + +#------------------------------------------------------------------------------ +# Internal shUnit2 functions. +# + +# Create a temporary directory to store various run-time files in. +# +# This function is a cross-platform temporary directory creation tool. Not all +# OSes have the `mktemp` function, so one is included here. +# +# Args: +# None +# Outputs: +# string: the temporary directory that was created +_shunit_mktempDir() { + # Try the standard `mktemp` function. + ( exec mktemp -dqt shunit.XXXXXX 2>/dev/null ) && return + + # The standard `mktemp` didn't work. Use our own. + # shellcheck disable=SC2039 + if command [ -r '/dev/urandom' -a -x '/usr/bin/od' ]; then + _shunit_random_=`/usr/bin/od -vAn -N4 -tx4 "${_shunit_file_}" +#! /bin/sh +exit ${SHUNIT_TRUE} +EOF + command chmod +x "${_shunit_file_}" + done + + unset _shunit_file_ +} + +# Final cleanup function to leave things as we found them. +# +# Besides removing the temporary directory, this function is in charge of the +# final exit code of the unit test. The exit code is based on how the script +# was ended (e.g. normal exit, or via Ctrl-C). +# +# Args: +# name: string: name of the trap called (specified when trap defined) +_shunit_cleanup() { + _shunit_name_=$1 + + case "${_shunit_name_}" in + EXIT) ;; + INT) _shunit_signal_=130 ;; # 2+128 + TERM) _shunit_signal_=143 ;; # 15+128 + *) + _shunit_error "unrecognized trap value (${_shunit_name_})" + _shunit_signal_=0 + ;; + esac + if command [ "${_shunit_name_}" != 'EXIT' ]; then + _shunit_warn "trapped and now handling the (${_shunit_name_}) signal" + fi + + # Do our work. + if command [ ${__shunit_clean} -eq ${SHUNIT_FALSE} ]; then + # Ensure tear downs are only called once. + __shunit_clean=${SHUNIT_TRUE} + + tearDown + command [ $? -eq ${SHUNIT_TRUE} ] \ + || _shunit_warn "tearDown() returned non-zero return code." + oneTimeTearDown + command [ $? -eq ${SHUNIT_TRUE} ] \ + || _shunit_warn "oneTimeTearDown() returned non-zero return code." + + command rm -fr "${__shunit_tmpDir}" + fi + + if command [ "${_shunit_name_}" != 'EXIT' ]; then + # Handle all non-EXIT signals. + trap - 0 # Disable EXIT trap. + exit ${_shunit_signal_} + elif command [ ${__shunit_reportGenerated} -eq ${SHUNIT_FALSE} ]; then + _shunit_assertFail 'unknown failure encountered running a test' + _shunit_generateReport + exit ${SHUNIT_ERROR} + fi + + unset _shunit_name_ _shunit_signal_ +} + +# configureColor based on user color preference. +# +# Args: +# color: string: color mode (one of `always`, `auto`, or `none`). +_shunit_configureColor() { + _shunit_color_=${SHUNIT_FALSE} # By default, no color. + case $1 in + 'always') _shunit_color_=${SHUNIT_TRUE} ;; + 'auto') + command [ "`_shunit_colors`" -ge 8 ] && _shunit_color_=${SHUNIT_TRUE} + ;; + 'none') ;; + *) _shunit_fatal "unrecognized color option '$1'" ;; + esac + + case ${_shunit_color_} in + ${SHUNIT_TRUE}) + __shunit_ansi_none=${__SHUNIT_ANSI_NONE} + __shunit_ansi_red=${__SHUNIT_ANSI_RED} + __shunit_ansi_green=${__SHUNIT_ANSI_GREEN} + __shunit_ansi_yellow=${__SHUNIT_ANSI_YELLOW} + __shunit_ansi_cyan=${__SHUNIT_ANSI_CYAN} + ;; + ${SHUNIT_FALSE}) + __shunit_ansi_none='' + __shunit_ansi_red='' + __shunit_ansi_green='' + __shunit_ansi_yellow='' + __shunit_ansi_cyan='' + ;; + esac + + unset _shunit_color_ _shunit_tput_ +} + +# colors returns the number of supported colors for the TERM. +_shunit_colors() { + _shunit_tput_=`${SHUNIT_CMD_TPUT} colors 2>/dev/null` + if command [ $? -eq 0 ]; then + echo "${_shunit_tput_}" + else + echo 16 + fi + unset _shunit_tput_ +} + +# The actual running of the tests happens here. +# +# Args: +# None +_shunit_execSuite() { + for _shunit_test_ in ${__shunit_suite}; do + __shunit_testSuccess=${SHUNIT_TRUE} + + # Disable skipping. + endSkipping + + # Execute the per-test setup function. + setUp + command [ $? -eq ${SHUNIT_TRUE} ] \ + || _shunit_fatal "setup() returned non-zero return code." + + # Execute the test. + echo "${__SHUNIT_TEST_PREFIX}${_shunit_test_}" + eval "${_shunit_test_}" + if command [ $? -ne ${SHUNIT_TRUE} ]; then + _shunit_error "${_shunit_test_}() returned non-zero return code." + __shunit_testSuccess=${SHUNIT_ERROR} + _shunit_incFailedCount + fi + + # Execute the per-test tear-down function. + tearDown + command [ $? -eq ${SHUNIT_TRUE} ] \ + || _shunit_fatal "tearDown() returned non-zero return code." + + # Update stats. + if command [ ${__shunit_testSuccess} -eq ${SHUNIT_TRUE} ]; then + __shunit_testsPassed=`expr ${__shunit_testsPassed} + 1` + else + __shunit_testsFailed=`expr ${__shunit_testsFailed} + 1` + fi + done + + unset _shunit_test_ +} + +# Generates the user friendly report with appropriate OK/FAILED message. +# +# Args: +# None +# Output: +# string: the report of successful and failed tests, as well as totals. +_shunit_generateReport() { + command [ "${__shunit_reportGenerated}" -eq ${SHUNIT_TRUE} ] && return + + _shunit_ok_=${SHUNIT_TRUE} + + # If no exit code was provided, determine an appropriate one. + command [ "${__shunit_testsFailed}" -gt 0 \ + -o ${__shunit_testSuccess} -eq ${SHUNIT_FALSE} ] \ + && _shunit_ok_=${SHUNIT_FALSE} + + echo + _shunit_msg_="Ran ${__shunit_ansi_cyan}${__shunit_testsTotal}${__shunit_ansi_none}" + if command [ "${__shunit_testsTotal}" -eq 1 ]; then + ${__SHUNIT_CMD_ECHO_ESC} "${_shunit_msg_} test." + else + ${__SHUNIT_CMD_ECHO_ESC} "${_shunit_msg_} tests." + fi + + if command [ ${_shunit_ok_} -eq ${SHUNIT_TRUE} ]; then + _shunit_msg_="${__shunit_ansi_green}OK${__shunit_ansi_none}" + command [ "${__shunit_assertsSkipped}" -gt 0 ] \ + && _shunit_msg_="${_shunit_msg_} (${__shunit_ansi_yellow}skipped=${__shunit_assertsSkipped}${__shunit_ansi_none})" + else + _shunit_msg_="${__shunit_ansi_red}FAILED${__shunit_ansi_none}" + _shunit_msg_="${_shunit_msg_} (${__shunit_ansi_red}failures=${__shunit_assertsFailed}${__shunit_ansi_none}" + command [ "${__shunit_assertsSkipped}" -gt 0 ] \ + && _shunit_msg_="${_shunit_msg_},${__shunit_ansi_yellow}skipped=${__shunit_assertsSkipped}${__shunit_ansi_none}" + _shunit_msg_="${_shunit_msg_})" + fi + + echo + ${__SHUNIT_CMD_ECHO_ESC} "${_shunit_msg_}" + __shunit_reportGenerated=${SHUNIT_TRUE} + + unset _shunit_msg_ _shunit_ok_ +} + +# Test for whether a function should be skipped. +# +# Args: +# None +# Returns: +# boolean: whether the test should be skipped (TRUE/FALSE constant) +_shunit_shouldSkip() { + command [ ${__shunit_skip} -eq ${SHUNIT_FALSE} ] && return ${SHUNIT_FALSE} + _shunit_assertSkip +} + +# Records a successful test. +# +# Args: +# None +_shunit_assertPass() { + __shunit_assertsPassed=`expr ${__shunit_assertsPassed} + 1` + __shunit_assertsTotal=`expr ${__shunit_assertsTotal} + 1` +} + +# Records a test failure. +# +# Args: +# message: string: failure message to provide user +_shunit_assertFail() { + __shunit_testSuccess=${SHUNIT_FALSE} + _shunit_incFailedCount + + \[ $# -gt 0 ] && ${__SHUNIT_CMD_ECHO_ESC} \ + "${__shunit_ansi_red}ASSERT:${__shunit_ansi_none}$*" +} + +# Increment the count of failed asserts. +# +# Args: +# none +_shunit_incFailedCount() { + __shunit_assertsFailed=`expr "${__shunit_assertsFailed}" + 1` + __shunit_assertsTotal=`expr "${__shunit_assertsTotal}" + 1` +} + + +# Records a skipped test. +# +# Args: +# None +_shunit_assertSkip() { + __shunit_assertsSkipped=`expr "${__shunit_assertsSkipped}" + 1` + __shunit_assertsTotal=`expr "${__shunit_assertsTotal}" + 1` +} + +# Prepare a script filename for sourcing. +# +# Args: +# script: string: path to a script to source +# Returns: +# string: filename prefixed with ./ (if necessary) +_shunit_prepForSourcing() { + _shunit_script_=$1 + case "${_shunit_script_}" in + /*|./*) echo "${_shunit_script_}" ;; + *) echo "./${_shunit_script_}" ;; + esac + unset _shunit_script_ +} + +# Escape a character in a string. +# +# Args: +# c: string: unescaped character +# s: string: to escape character in +# Returns: +# string: with escaped character(s) +_shunit_escapeCharInStr() { + command [ -n "$2" ] || return # No point in doing work on an empty string. + + # Note: using shorter variable names to prevent conflicts with + # _shunit_escapeCharactersInString(). + _shunit_c_=$1 + _shunit_s_=$2 + + # Escape the character. + # shellcheck disable=SC1003,SC2086 + echo ''${_shunit_s_}'' |command sed 's/\'${_shunit_c_}'/\\\'${_shunit_c_}'/g' + + unset _shunit_c_ _shunit_s_ +} + +# Escape a character in a string. +# +# Args: +# str: string: to escape characters in +# Returns: +# string: with escaped character(s) +_shunit_escapeCharactersInString() { + command [ -n "$1" ] || return # No point in doing work on an empty string. + + _shunit_str_=$1 + + # Note: using longer variable names to prevent conflicts with + # _shunit_escapeCharInStr(). + for _shunit_char_ in '"' '$' "'" '`'; do + _shunit_str_=`_shunit_escapeCharInStr "${_shunit_char_}" "${_shunit_str_}"` + done + + echo "${_shunit_str_}" + unset _shunit_char_ _shunit_str_ +} + +# Extract list of functions to run tests against. +# +# Args: +# script: string: name of script to extract functions from +# Returns: +# string: of function names +_shunit_extractTestFunctions() { + _shunit_script_=$1 + + # Extract the lines with test function names, strip of anything besides the + # function name, and output everything on a single line. + _shunit_regex_='^\s*((function test[A-Za-z0-9_-]*)|(test[A-Za-z0-9_-]* *\(\)))' + # shellcheck disable=SC2196 + egrep "${_shunit_regex_}" "${_shunit_script_}" \ + |command sed 's/^[^A-Za-z0-9_-]*//;s/^function //;s/\([A-Za-z0-9_-]*\).*/\1/g' \ + |xargs + + unset _shunit_regex_ _shunit_script_ +} + +#------------------------------------------------------------------------------ +# Main. +# + +# Determine the operating mode. +if command [ $# -eq 0 -o "${1:-}" = '--' ]; then + __shunit_script=${__SHUNIT_PARENT} + __shunit_mode=${__SHUNIT_MODE_SOURCED} +else + __shunit_script=$1 + command [ -r "${__shunit_script}" ] || \ + _shunit_fatal "unable to read from ${__shunit_script}" + __shunit_mode=${__SHUNIT_MODE_STANDALONE} +fi + +# Create a temporary storage location. +__shunit_tmpDir=`_shunit_mktempDir` + +# Provide a public temporary directory for unit test scripts. +# TODO(kward): document this. +SHUNIT_TMPDIR="${__shunit_tmpDir}/tmp" +command mkdir "${SHUNIT_TMPDIR}" + +# Setup traps to clean up after ourselves. +trap '_shunit_cleanup EXIT' 0 +trap '_shunit_cleanup INT' 2 +trap '_shunit_cleanup TERM' 15 + +# Create phantom functions to work around issues with Cygwin. +_shunit_mktempFunc +PATH="${__shunit_tmpDir}:${PATH}" + +# Make sure phantom functions are executable. This will bite if `/tmp` (or the +# current `$TMPDIR`) points to a path on a partition that was mounted with the +# 'noexec' option. The noexec command was created with `_shunit_mktempFunc()`. +noexec 2>/dev/null || _shunit_fatal \ + 'Please declare TMPDIR with path on partition with exec permission.' + +# We must manually source the tests in standalone mode. +if command [ "${__shunit_mode}" = "${__SHUNIT_MODE_STANDALONE}" ]; then + # shellcheck disable=SC1090 + command . "`_shunit_prepForSourcing \"${__shunit_script}\"`" +fi + +# Configure default output coloring behavior. +_shunit_configureColor "${SHUNIT_COLOR}" + +# Execute the oneTimeSetUp function (if it exists). +oneTimeSetUp +command [ $? -eq ${SHUNIT_TRUE} ] \ + || _shunit_fatal "oneTimeSetUp() returned non-zero return code." + +# Command line selected tests or suite selected tests +if command [ "$#" -ge 2 ]; then + # Argument $1 is either the filename of tests or '--'; either way, skip it. + shift + # Remaining arguments ($2 .. $#) are assumed to be test function names. + # Interate through all remaining args in "$@" in a POSIX (likely portable) way. + # Helpful tip: https://unix.stackexchange.com/questions/314032/how-to-use-arguments-like-1-2-in-a-for-loop + for _shunit_arg_ do + suite_addTest "${_shunit_arg_}" + done + unset _shunit_arg_ +else + # Execute the suite function defined in the parent test script. + # DEPRECATED as of 2.1.0. + suite +fi + +# If no tests or suite specified, dynamically build a list of functions. +if command [ -z "${__shunit_suite}" ]; then + shunit_funcs_=`_shunit_extractTestFunctions "${__shunit_script}"` + for shunit_func_ in ${shunit_funcs_}; do + suite_addTest "${shunit_func_}" + done +fi +unset shunit_func_ shunit_funcs_ + +# Execute the suite of unit tests. +_shunit_execSuite + +# Execute the oneTimeTearDown function (if it exists). +oneTimeTearDown +command [ $? -eq ${SHUNIT_TRUE} ] \ + || _shunit_fatal "oneTimeTearDown() returned non-zero return code." + +# Generate a report summary. +_shunit_generateReport + +# That's it folks. +command [ "${__shunit_testsFailed}" -eq 0 ] +exit $? diff --git a/test/utils b/test/utils new file mode 100644 index 0000000..966bc2b --- /dev/null +++ b/test/utils @@ -0,0 +1,212 @@ +#!/bin/sh + +# taken from +# https://github.com/ryanbrainard/heroku-buildpack-testrunner/blob/master/lib/test_utils.sh + +oneTimeSetUp() +{ + TEST_SUITE_CACHE="$(mktemp -d ${SHUNIT_TMPDIR}/test_suite_cache.XXXX)" +} + +oneTimeTearDown() +{ + rm -rf ${TEST_SUITE_CACHE} +} + +setUp() +{ + OUTPUT_DIR="$(mktemp -d ${SHUNIT_TMPDIR}/output.XXXX)" + STD_OUT="${OUTPUT_DIR}/stdout" + STD_ERR="${OUTPUT_DIR}/stderr" + BUILD_DIR="${OUTPUT_DIR}/build" + CACHE_DIR="${OUTPUT_DIR}/cache" + mkdir -p ${OUTPUT_DIR} + mkdir -p ${BUILD_DIR} + mkdir -p ${CACHE_DIR} +} + +tearDown() +{ + rm -rf ${OUTPUT_DIR} +} + +capture() +{ + resetCapture + + LAST_COMMAND="$@" + + "$@" >${STD_OUT} 2>${STD_ERR} + RETURN=$? + rtrn=${RETURN} # deprecated +} + +resetCapture() +{ + if [ -f ${STD_OUT} ]; then + rm ${STD_OUT} + fi + + if [ -f ${STD_ERR} ]; then + rm ${STD_ERR} + fi + + unset LAST_COMMAND + unset RETURN + unset rtrn # deprecated +} + +detect() +{ + capture ${BUILDPACK_HOME}/bin/detect ${BUILD_DIR} +} + +compile() +{ + capture ${BUILDPACK_HOME}/bin/compile ${BUILD_DIR} ${CACHE_DIR} +} + +release() +{ + capture ${BUILDPACK_HOME}/bin/release ${BUILD_DIR} +} + +assertCapturedEquals() +{ + assertEquals "$@" "$(cat ${STD_OUT})" +} + +assertCapturedNotEquals() +{ + assertNotEquals "$@" "$(cat ${STD_OUT})" +} + +assertCaptured() +{ + assertFileContains "$@" "${STD_OUT}" +} + +assertNotCaptured() +{ + assertFileNotContains "$@" "${STD_OUT}" +} + +assertCapturedSuccess() +{ + assertEquals "Expected captured exit code to be 0; was <${RETURN}>" "0" "${RETURN}" + assertEquals "Expected STD_ERR to be empty; was <$(cat ${STD_ERR})>" "" "$(cat ${STD_ERR})" +} + +# assertCapturedError [[expectedErrorCode] expectedErrorMsg] +assertCapturedError() +{ + if [ $# -gt 1 ]; then + local expectedErrorCode=${1} + shift + fi + + local expectedErrorMsg=${1:-""} + + if [ -z ${expectedErrorCode} ]; then + assertTrue "Expected captured exit code to be greater than 0; was <${RETURN}>" "[ ${RETURN} -gt 0 ]" + else + assertTrue "Expected captured exit code to be <${expectedErrorCode}>; was <${RETURN}>" "[ ${RETURN} -eq ${expectedErrorCode} ]" + fi + + if [ "${expectedErrorMsg}" != "" ]; then + assertFileContains "Expected STD_ERR to contain error <${expectedErrorMsg}>" "${expectedErrorMsg}" "${STD_ERR}" + fi +} + +_assertContains() +{ + if [ 5 -eq $# ]; then + local msg=$1 + shift + elif [ ! 4 -eq $# ]; then + fail "Expected 4 or 5 parameters; Receieved $# parameters" + fi + + local needle=$1 + local haystack=$2 + local expectation=$3 + local haystack_type=$4 + + case "${haystack_type}" in + "file") grep -q -F -e "${needle}" ${haystack} ;; + "text") echo "${haystack}" | grep -q -F -e "${needle}" ;; + esac + + if [ "${expectation}" != "$?" ]; then + case "${expectation}" in + 0) default_msg="Expected <${haystack}> to contain <${needle}>" ;; + 1) default_msg="Did not expect <${haystack}> to contain <${needle}>" ;; + esac + + fail "${msg:-${default_msg}}" + fi +} + +assertFileContains() +{ + _assertContains "$@" 0 "file" +} + +assertFileNotContains() +{ + _assertContains "$@" 1 "file" +} + +assertFileContainsMatch() +{ + local needle=$1 + local haystack=$2 + + grep -q -E -e "${needle}" ${haystack} + if [ "$?" != 0 ]; then + fail "Expected <${haystack}> to contain <${needle}>" + fi +} + +command_exists () { + type "$1" > /dev/null 2>&1 ; +} + +assertFileMD5() +{ + expectedHash=$1 + filename=$2 + + if command_exists "md5sum"; then + md5_cmd="md5sum ${filename}" + expected_md5_cmd_output="${expectedHash} ${filename}" + elif command_exists "md5"; then + md5_cmd="md5 ${filename}" + expected_md5_cmd_output="MD5 (${filename}) = ${expectedHash}" + else + fail "no suitable MD5 hashing command found on this system" + fi + + assertEquals "${expected_md5_cmd_output}" "$(${md5_cmd})" +} + +assertDirectoryExists() { + if [[ ! -e "$1" ]]; then + fail "$1 does not exist" + fi + if [[ ! -d $1 ]]; then + fail "$1 is not a directory" + fi +} + +assertFileExists() +{ + filename=$1 + assertTrue "$filename doesn't exist" "[[ -e $filename ]]" +} + +assertFileDoesNotExist() +{ + filename=$1 + assertTrue "$filename exists" "[[ ! -e $filename ]]" +} From 35d00ed22d596ee6a878520b06596094bd494b74 Mon Sep 17 00:00:00 2001 From: Colin Casey Date: Thu, 28 Mar 2024 10:44:41 -0300 Subject: [PATCH 19/25] Redirect stderr to stdout for apt-get update (#122) --- bin/compile | 2 +- test/run | 5 +++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/bin/compile b/bin/compile index 1691eea..ab759af 100755 --- a/bin/compile +++ b/bin/compile @@ -81,7 +81,7 @@ APT_OPTIONS=("-o" "debug::nolocking=true" "-o" "dir::cache=$APT_CACHE_DIR" "-o" APT_OPTIONS+=("-o" "dir::etc::sourcelist=$APT_SOURCES" "-o" "dir::etc::sourceparts=/dev/null") topic "Updating apt caches" -apt-get "${APT_OPTIONS[@]}" update | indent +apt-get "${APT_OPTIONS[@]}" update 2>&1 | indent for PACKAGE in $(cat $BUILD_DIR/$APT_FILE_MANIFEST | grep -v -s -e '^#' | grep -v -s -e "^:repo:"); do if [[ $PACKAGE == *deb ]]; then diff --git a/test/run b/test/run index 843f6d3..4b90157 100755 --- a/test/run +++ b/test/run @@ -10,6 +10,7 @@ testCompilePackageNames() { assertCaptured "Installing mysql-client-core" assertCaptured "Writing profile script" assertCaptured "Rewrite package-config files" + assertCapturedSuccess } testReportPackageNames() { @@ -31,6 +32,7 @@ testCompileCustomPackageUrl() { assertCaptured "Installing wkhtmltox" assertCaptured "Writing profile script" assertCaptured "Rewrite package-config files" + assertCapturedSuccess } testReportCustomPackageUrl() { @@ -42,6 +44,7 @@ testReportCustomPackageUrl() { assertNotCaptured "^packages" assertCaptured "custom_packages: \"${download_urls[$STACK]}\"" assertNotCaptured "custom_repositories" + assertCapturedSuccess } testCompileCustomRepository() { @@ -57,6 +60,7 @@ testCompileCustomRepository() { assertCaptured "Installing fasttracker2" assertCaptured "Writing profile script" assertCaptured "Rewrite package-config files" + assertCapturedSuccess } testReportCustomRepository() { @@ -68,6 +72,7 @@ testReportCustomRepository() { assertCaptured "packages: \"fasttracker2\"" assertNotCaptured "custom_packages" assertCaptured "custom_repositories: \"deb http://us.archive.ubuntu.com/ubuntu/ ${ubuntu_release_names[$STACK]} multiverse\"" + assertCapturedSuccess } pushd "$(dirname 0)" >/dev/null || exit 1 From 187bf6b74132499057a27eb3e0811dc6466aa77f Mon Sep 17 00:00:00 2001 From: Colin Casey Date: Thu, 28 Mar 2024 10:47:56 -0300 Subject: [PATCH 20/25] Enable shellcheck and fix warnings (#123) --- .github/workflows/ci.yml | 9 +++++++++ Makefile | 3 +++ bin/compile | 2 +- 3 files changed, 13 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1ef2c49..aaf0c67 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -23,3 +23,12 @@ jobs: uses: actions/checkout@v4 - name: Functional tests on heroku:${{ matrix.stack_number }}-build run: test/run + + shell-lint: + runs-on: ubuntu-22.04 + container: + image: koalaman/shellcheck-alpine:v0.9.0 + steps: + - uses: actions/checkout@v4 + - name: shellcheck + run: shellcheck -x bin/compile bin/detect bin/release bin/report diff --git a/Makefile b/Makefile index 31bb73c..cc13917 100644 --- a/Makefile +++ b/Makefile @@ -1,5 +1,8 @@ test: heroku-22-build heroku-20-build +shellcheck: + @shellcheck -x bin/compile bin/detect bin/release bin/report + heroku-22-build: @echo "Running tests in docker (heroku-22-build)..." @docker run -v $(shell pwd):/buildpack:ro --rm -it -e "STACK=heroku-22" heroku/heroku:22-build bash -c 'cp -r /buildpack /buildpack_test; cd /buildpack_test/; test/run;' diff --git a/bin/compile b/bin/compile index ab759af..df9c631 100755 --- a/bin/compile +++ b/bin/compile @@ -94,7 +94,7 @@ for PACKAGE in $(cat $BUILD_DIR/$APT_FILE_MANIFEST | grep -v -s -e '^#' | grep - topic "Fetching .debs for $PACKAGE" apt-get "${APT_OPTIONS[@]}" -y "${APT_FORCE_YES[@]}" -d install --reinstall "$PACKAGE" | indent fi -done +done < <(grep --invert-match -e "^#" -e "^\s*$" -e "^:repo:" "${BUILD_DIR}/Aptfile") mkdir -p "$BUILD_DIR/.apt" From 3e6f212b4707ed1911bab88a0da6f36a9496f14f Mon Sep 17 00:00:00 2001 From: Colin Casey Date: Thu, 28 Mar 2024 10:50:21 -0300 Subject: [PATCH 21/25] Allow multiple packages on a single Aptfile line (#124) --- bin/compile | 7 ++++++- bin/report | 3 --- test/fixtures/package-names/Aptfile | 3 +++ test/run | 5 ++++- 4 files changed, 13 insertions(+), 5 deletions(-) diff --git a/bin/compile b/bin/compile index df9c631..17a78cd 100755 --- a/bin/compile +++ b/bin/compile @@ -92,7 +92,12 @@ for PACKAGE in $(cat $BUILD_DIR/$APT_FILE_MANIFEST | grep -v -s -e '^#' | grep - curl --silent --show-error --fail -L -z "$PACKAGE_FILE" -o "$PACKAGE_FILE" "$PACKAGE" 2>&1 | indent else topic "Fetching .debs for $PACKAGE" - apt-get "${APT_OPTIONS[@]}" -y "${APT_FORCE_YES[@]}" -d install --reinstall "$PACKAGE" | indent + # while this is not documented behavior, the Aptfile format technically + # did allow for multiple packages separated by spaces to be specified + # on a single line due to how the download command was implemented so we + # should respect that behavior since users are doing this + IFS=$' \t' read -ra PACKAGE_NAMES <<< "$PACKAGE" + apt-get "${APT_OPTIONS[@]}" -y "${APT_FORCE_YES[@]}" -d install --reinstall "${PACKAGE_NAMES[@]}" | indent fi done < <(grep --invert-match -e "^#" -e "^\s*$" -e "^:repo:" "${BUILD_DIR}/Aptfile") diff --git a/bin/report b/bin/report index 1558b6c..ce294dc 100755 --- a/bin/report +++ b/bin/report @@ -18,9 +18,6 @@ while IFS= read -r line; do elif [[ $line == *deb ]]; then custom_packages+=("${line}") else - # while this is not documented behavior, the Aptfile format technically - # does allow for multiple packages separated by spaces to be specified - # on a single line due to how the download command is implemented IFS=$' \t' read -ra package_names <<< "${line}" for package_name in "${package_names[@]}"; do packages+=("${package_name}") diff --git a/test/fixtures/package-names/Aptfile b/test/fixtures/package-names/Aptfile index 9ab34b4..6c5c25e 100644 --- a/test/fixtures/package-names/Aptfile +++ b/test/fixtures/package-names/Aptfile @@ -3,3 +3,6 @@ xmlsec1 # globbed package mysql-client-* + +# multiple packages on single line +s3cmd wget diff --git a/test/run b/test/run index 4b90157..2039756 100755 --- a/test/run +++ b/test/run @@ -4,8 +4,11 @@ testCompilePackageNames() { compile "package-names" assertCaptured "Updating apt caches" assertCaptured "Fetching .debs for xmlsec1" + assertCaptured "Fetching .debs for s3cmd wget" assertCaptured "Fetching .debs for mysql-client-*" assertCaptured "Installing xmlsec1" + assertCaptured "Installing s3cmd" + assertCaptured "Installing wget" assertCaptured "Installing mysql-client" assertCaptured "Installing mysql-client-core" assertCaptured "Writing profile script" @@ -15,7 +18,7 @@ testCompilePackageNames() { testReportPackageNames() { report "package-names" - assertCaptured "packages: \"mysql-client-*,xmlsec1\"" + assertCaptured "packages: \"mysql-client-*,s3cmd,wget,xmlsec1\"" assertNotCaptured "custom_packages" assertNotCaptured "custom_repositories" assertCapturedSuccess From abe48c62ad524ad599a1b098f1f1f5c8f9234bb5 Mon Sep 17 00:00:00 2001 From: Colin Casey Date: Thu, 28 Mar 2024 10:51:54 -0300 Subject: [PATCH 22/25] Improved comment line check (#125) --- bin/compile | 2 +- bin/report | 2 +- test/fixtures/package-names/Aptfile | 2 ++ 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/bin/compile b/bin/compile index 17a78cd..7e258dc 100755 --- a/bin/compile +++ b/bin/compile @@ -99,7 +99,7 @@ for PACKAGE in $(cat $BUILD_DIR/$APT_FILE_MANIFEST | grep -v -s -e '^#' | grep - IFS=$' \t' read -ra PACKAGE_NAMES <<< "$PACKAGE" apt-get "${APT_OPTIONS[@]}" -y "${APT_FORCE_YES[@]}" -d install --reinstall "${PACKAGE_NAMES[@]}" | indent fi -done < <(grep --invert-match -e "^#" -e "^\s*$" -e "^:repo:" "${BUILD_DIR}/Aptfile") +done < <(grep --invert-match -e "^\s*#" -e "^\s*$" -e "^:repo:" "${BUILD_DIR}/Aptfile") mkdir -p "$BUILD_DIR/.apt" diff --git a/bin/report b/bin/report index ce294dc..1421bba 100755 --- a/bin/report +++ b/bin/report @@ -23,7 +23,7 @@ while IFS= read -r line; do packages+=("${package_name}") done fi -done < <(grep --invert-match -e "^#" -e "^\s*$" "${BUILD_DIR}/Aptfile") +done < <(grep --invert-match -e "^\s*#" -e "^\s*$" "${BUILD_DIR}/Aptfile") output_key_value() { local key value diff --git a/test/fixtures/package-names/Aptfile b/test/fixtures/package-names/Aptfile index 6c5c25e..15fa750 100644 --- a/test/fixtures/package-names/Aptfile +++ b/test/fixtures/package-names/Aptfile @@ -6,3 +6,5 @@ mysql-client-* # multiple packages on single line s3cmd wget + + # comment with bad indent From 22a9ebd9345302826b961a1e362dc28986dd9513 Mon Sep 17 00:00:00 2001 From: Colin Casey Date: Thu, 28 Mar 2024 10:53:20 -0300 Subject: [PATCH 23/25] Warn and exit when no packages are listed in Aptfile (#126) --- CHANGELOG.md | 2 + bin/compile | 10 +++++ .../custom-repository-no-packages/Aptfile | 1 + test/fixtures/empty/Aptfile | 0 test/fixtures/only-comments/Aptfile | 4 ++ test/run | 45 +++++++++++++++++++ 6 files changed, 62 insertions(+) create mode 100644 test/fixtures/custom-repository-no-packages/Aptfile create mode 100644 test/fixtures/empty/Aptfile create mode 100644 test/fixtures/only-comments/Aptfile diff --git a/CHANGELOG.md b/CHANGELOG.md index 2bb8b57..1cda33f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,8 @@ ## Unreleased +- Warn when Aptfile contains no packages ([#126](https://github.com/heroku/heroku-buildpack-apt/pull/126)) + ## 2024-03-14 - Shell hardening ([#115](https://github.com/heroku/heroku-buildpack-apt/pull/115)) diff --git a/bin/compile b/bin/compile index 7e258dc..8de64f3 100755 --- a/bin/compile +++ b/bin/compile @@ -31,6 +31,16 @@ function indent() { esac } +if ! grep --invert-match -e "^\s*#" -e "^\s*$" -e "^:repo:" -q "${BUILD_DIR}/Aptfile"; then + echo " +! You have no packages listed in your Aptfile. If you don't need custom Apt packages, +! delete your Aptfile and remove the buildpack with: +! +! $ heroku buildpacks:remove heroku-community/apt +" + exit 0 +fi + # Store which STACK we are running on in the cache to bust the cache if it changes if [[ -f "$CACHE_DIR/.apt/STACK" ]]; then CACHED_STACK=$(cat "$CACHE_DIR/.apt/STACK") diff --git a/test/fixtures/custom-repository-no-packages/Aptfile b/test/fixtures/custom-repository-no-packages/Aptfile new file mode 100644 index 0000000..003c0c6 --- /dev/null +++ b/test/fixtures/custom-repository-no-packages/Aptfile @@ -0,0 +1 @@ +:repo:deb http://us.archive.ubuntu.com/ubuntu/ jammy multiverse diff --git a/test/fixtures/empty/Aptfile b/test/fixtures/empty/Aptfile new file mode 100644 index 0000000..e69de29 diff --git a/test/fixtures/only-comments/Aptfile b/test/fixtures/only-comments/Aptfile new file mode 100644 index 0000000..349c538 --- /dev/null +++ b/test/fixtures/only-comments/Aptfile @@ -0,0 +1,4 @@ +# no packages + # only comments + +# and whitespace diff --git a/test/run b/test/run index 2039756..6718ba7 100755 --- a/test/run +++ b/test/run @@ -78,6 +78,51 @@ testReportCustomRepository() { assertCapturedSuccess } +testCompileEmpty() { + compile "empty" + assertCaptured "You have no packages listed in your Aptfile" + assertNotCaptured "Updating apt caches" + assertCapturedSuccess +} + +testReportEmpty() { + report "empty" + assertNotCaptured "^packages" + assertNotCaptured "custom_packages" + assertNotCaptured "custom_repositories" + assertCapturedSuccess +} + +testCompileOnlyComments() { + compile "only-comments" + assertCaptured "You have no packages listed in your Aptfile" + assertNotCaptured "Updating apt caches" + assertCapturedSuccess +} + +testReportOnlyComments() { + report "only-comments" + assertNotCaptured "^packages" + assertNotCaptured "custom_packages" + assertNotCaptured "custom_repositories" + assertCapturedSuccess +} + +testCompileCustomRepositoryNoPackages() { + compile "custom-repository-no-packages" + assertCaptured "You have no packages listed in your Aptfile" + assertNotCaptured "Updating apt caches" + assertCapturedSuccess +} + +testReportCustomRepositoryNoPackages() { + report "custom-repository-no-packages" + assertNotCaptured "^packages" + assertNotCaptured "custom_packages" + assertCaptured "custom_repositories: \"deb http://us.archive.ubuntu.com/ubuntu/ jammy multiverse\"" + assertCapturedSuccess +} + pushd "$(dirname 0)" >/dev/null || exit 1 popd >/dev/null || exit 1 From bb15c586c3ff07f21c72dc652979c43d35709ae4 Mon Sep 17 00:00:00 2001 From: Colin Casey Date: Thu, 28 Mar 2024 14:57:36 -0300 Subject: [PATCH 24/25] Support source parts directory (#119) --- .github/workflows/ci.yml | 3 ++- CHANGELOG.md | 1 + Makefile | 7 ++++++- bin/compile | 4 +++- test/fixtures/custom-package-url-heroku-24/Aptfile | 2 ++ test/fixtures/custom-repository-heroku-24/Aptfile | 2 ++ test/run | 6 ++++++ 7 files changed, 22 insertions(+), 3 deletions(-) create mode 100644 test/fixtures/custom-package-url-heroku-24/Aptfile create mode 100644 test/fixtures/custom-repository-heroku-24/Aptfile diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index aaf0c67..958dad1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -13,9 +13,10 @@ jobs: runs-on: ubuntu-22.04 container: image: heroku/heroku:${{ matrix.stack_number }}-build + options: --user root strategy: matrix: - stack_number: ["20", "22"] + stack_number: ["20", "22", "24"] env: STACK: heroku-${{ matrix.stack_number }} steps: diff --git a/CHANGELOG.md b/CHANGELOG.md index 1cda33f..fa7e501 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,7 @@ ## Unreleased - Warn when Aptfile contains no packages ([#126](https://github.com/heroku/heroku-buildpack-apt/pull/126)) +- Support sources parts directory for Heroku-24 compatibility ([#119](https://github.com/heroku/heroku-buildpack-apt/pull/119)) ## 2024-03-14 diff --git a/Makefile b/Makefile index cc13917..5dd5e97 100644 --- a/Makefile +++ b/Makefile @@ -1,8 +1,13 @@ -test: heroku-22-build heroku-20-build +test: heroku-24-build heroku-22-build heroku-20-build shellcheck: @shellcheck -x bin/compile bin/detect bin/release bin/report +heroku-24-build: + @echo "Running tests in docker (heroku-24-build)..." + @docker run --user root -v $(shell pwd):/buildpack:ro --rm -it -e "STACK=heroku-24" heroku/heroku:24-build bash -c 'cp -r /buildpack /buildpack_test; cd /buildpack_test/; test/run;' + @echo "" + heroku-22-build: @echo "Running tests in docker (heroku-22-build)..." @docker run -v $(shell pwd):/buildpack:ro --rm -it -e "STACK=heroku-22" heroku/heroku:22-build bash -c 'cp -r /buildpack /buildpack_test; cd /buildpack_test/; test/run;' diff --git a/bin/compile b/bin/compile index 8de64f3..895f54a 100755 --- a/bin/compile +++ b/bin/compile @@ -55,6 +55,7 @@ echo "$STACK" > "$CACHE_DIR/.apt/STACK" APT_CACHE_DIR="$CACHE_DIR/apt/cache" APT_STATE_DIR="$CACHE_DIR/apt/state" APT_SOURCELIST_DIR="$CACHE_DIR/apt/sources" # place custom sources.list here +APT_SOURCEPARTS_DIR="$APT_SOURCELIST_DIR/sources.list.d" APT_SOURCES="$APT_SOURCELIST_DIR/sources.list" APT_FILE_MANIFEST="${APT_FILE_MANIFEST:-Aptfile}" @@ -78,6 +79,7 @@ else mkdir -p "$APT_SOURCELIST_DIR" # make dir for sources cp -f "$BUILD_DIR/$APT_FILE_MANIFEST" "$APT_CACHE_DIR/$APT_FILE_MANIFEST" cat "/etc/apt/sources.list" > "$APT_SOURCES" # no cp here + cp -R "/etc/apt/sources.list.d" "$APT_SOURCEPARTS_DIR" # add custom repositories from Aptfile to sources.list # like>> :repo:deb http://cz.archive.ubuntu.com/ubuntu artful main universe if grep -q -e "^:repo:" $BUILD_DIR/$APT_FILE_MANIFEST; then @@ -88,7 +90,7 @@ fi APT_OPTIONS=("-o" "debug::nolocking=true" "-o" "dir::cache=$APT_CACHE_DIR" "-o" "dir::state=$APT_STATE_DIR") # Override the use of /etc/apt/sources.list (sourcelist) and /etc/apt/sources.list.d/* (sourceparts). -APT_OPTIONS+=("-o" "dir::etc::sourcelist=$APT_SOURCES" "-o" "dir::etc::sourceparts=/dev/null") +APT_OPTIONS+=("-o" "dir::etc::sourcelist=$APT_SOURCES" "-o" "dir::etc::sourceparts=$APT_SOURCEPARTS_DIR") topic "Updating apt caches" apt-get "${APT_OPTIONS[@]}" update 2>&1 | indent diff --git a/test/fixtures/custom-package-url-heroku-24/Aptfile b/test/fixtures/custom-package-url-heroku-24/Aptfile new file mode 100644 index 0000000..dc351b5 --- /dev/null +++ b/test/fixtures/custom-package-url-heroku-24/Aptfile @@ -0,0 +1,2 @@ +# no noble package for wkhtmltopdf yet, so using jammy package +https://github.com/wkhtmltopdf/packaging/releases/download/0.12.6.1-2/wkhtmltox_0.12.6.1-2.jammy_amd64.deb diff --git a/test/fixtures/custom-repository-heroku-24/Aptfile b/test/fixtures/custom-repository-heroku-24/Aptfile new file mode 100644 index 0000000..6f33d3e --- /dev/null +++ b/test/fixtures/custom-repository-heroku-24/Aptfile @@ -0,0 +1,2 @@ +:repo:deb http://us.archive.ubuntu.com/ubuntu/ noble multiverse +fasttracker2 diff --git a/test/run b/test/run index 6718ba7..243237e 100755 --- a/test/run +++ b/test/run @@ -28,6 +28,8 @@ testCompileCustomPackageUrl() { declare -A download_urls=( [heroku-20]="https://github.com/wkhtmltopdf/packaging/releases/download/0.12.6-1/wkhtmltox_0.12.6-1.focal_amd64.deb" [heroku-22]="https://github.com/wkhtmltopdf/packaging/releases/download/0.12.6.1-2/wkhtmltox_0.12.6.1-2.jammy_amd64.deb" + # no noble package for wkhtmltopdf yet, so using jammy package + [heroku-24]="https://github.com/wkhtmltopdf/packaging/releases/download/0.12.6.1-2/wkhtmltox_0.12.6.1-2.jammy_amd64.deb" ) compile "custom-package-url-$STACK" assertCaptured "Updating apt caches" @@ -42,6 +44,8 @@ testReportCustomPackageUrl() { declare -A download_urls=( [heroku-20]="https://github.com/wkhtmltopdf/packaging/releases/download/0.12.6-1/wkhtmltox_0.12.6-1.focal_amd64.deb" [heroku-22]="https://github.com/wkhtmltopdf/packaging/releases/download/0.12.6.1-2/wkhtmltox_0.12.6.1-2.jammy_amd64.deb" + # no noble package for wkhtmltopdf yet, so using jammy package + [heroku-24]="https://github.com/wkhtmltopdf/packaging/releases/download/0.12.6.1-2/wkhtmltox_0.12.6.1-2.jammy_amd64.deb" ) report "custom-package-url-$STACK" assertNotCaptured "^packages" @@ -54,6 +58,7 @@ testCompileCustomRepository() { declare -A ubuntu_release_names=( [heroku-20]="focal" [heroku-22]="jammy" + [heroku-24]="noble" ) compile "custom-repository-$STACK" assertCaptured "Adding custom repositories" @@ -70,6 +75,7 @@ testReportCustomRepository() { declare -A ubuntu_release_names=( [heroku-20]="focal" [heroku-22]="jammy" + [heroku-24]="noble" ) report "custom-repository-$STACK" assertCaptured "packages: \"fasttracker2\"" From de0d1470a697084d3fde0aba59ebce75f7bfa9bc Mon Sep 17 00:00:00 2001 From: Colin Casey Date: Thu, 28 Mar 2024 15:20:22 -0300 Subject: [PATCH 25/25] Prepare release v9 (#127) --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index fa7e501..6666e72 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,8 @@ ## Unreleased +## 2024-03-28 + - Warn when Aptfile contains no packages ([#126](https://github.com/heroku/heroku-buildpack-apt/pull/126)) - Support sources parts directory for Heroku-24 compatibility ([#119](https://github.com/heroku/heroku-buildpack-apt/pull/119))