diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 12c5dd1e21..f6bf8674a3 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -41,6 +41,20 @@ jobs: steps: - name: Checkout uses: actions/checkout@v3 + - name: Install OpenCL + if: matrix.os == 'ubuntu-latest' + run: | + sudo apt update + sudo apt install -y opencl-headers ocl-icd-opencl-dev + - name: Install OpenMP + if: matrix.os == 'macos-latest' + run: | + brew install libomp + brew link libomp --force + - name: Install OpenCL + if: matrix.os == 'macos-latest' + run: | + brew install opencl-clhpp-headers - name: Prepare MSVC if: matrix.os == 'windows-latest' uses: ilammy/msvc-dev-cmd@v1 @@ -52,8 +66,9 @@ jobs: - name: Build package mlrl-common uses: pypa/cibuildwheel@v2 env: - CIBW_BEFORE_ALL_MACOS: brew install libomp && brew link libomp --force - CIBW_BEFORE_BUILD: make clean install_cpp install_cython + CIBW_BEFORE_BUILD_LINUX: ./build --clean && ./build install + CIBW_BEFORE_BUILD_MACOS: ./build --clean && CPLUS_INCLUDE_PATH=/usr/local/opt/opencl-clhpp-headers/include ./build install + CIBW_BEFORE_BUILD_WINDOWS: ./build.bat --clean && ./build.bat install CIBW_BUILD_FRONTEND: build CIBW_ARCHS: auto64 CIBW_SKIP: 'pp* *musllinux*' @@ -62,8 +77,9 @@ jobs: - name: Build package mlrl-boosting uses: pypa/cibuildwheel@v2 env: - CIBW_BEFORE_ALL_MACOS: brew install libomp && brew link libomp --force - CIBW_BEFORE_BUILD: make clean install_cpp install_cython + CIBW_BEFORE_BUILD_LINUX: ./build --clean && ./build install + CIBW_BEFORE_BUILD_MACOS: ./build --clean && CPLUS_INCLUDE_PATH=/usr/local/opt/opencl-clhpp-headers/include ./build install + CIBW_BEFORE_BUILD_WINDOWS: ./build.bat --clean && ./build.bat install CIBW_BUILD_FRONTEND: build CIBW_ARCHS: auto64 CIBW_SKIP: 'pp* *musllinux*' @@ -101,7 +117,7 @@ jobs: - name: Build package mlrl-common uses: pypa/cibuildwheel@v2 env: - CIBW_BEFORE_BUILD: make clean install_cpp install_cython + CIBW_BEFORE_BUILD: ./build --clean && ./build install CIBW_BUILD_FRONTEND: build CIBW_ARCHS_LINUX: aarch64 CIBW_SKIP: 'pp* *musllinux*' @@ -110,7 +126,7 @@ jobs: - name: Build package mlrl-boosting uses: pypa/cibuildwheel@v2 env: - CIBW_BEFORE_BUILD: make clean install_cpp install_cython + CIBW_BEFORE_BUILD: ./build --clean && ./build install CIBW_BUILD_FRONTEND: build CIBW_ARCHS_LINUX: aarch64 CIBW_SKIP: 'pp* *musllinux*' diff --git a/.github/workflows/test_build.yml b/.github/workflows/test_build.yml index 3a0eaac994..326ef9525c 100644 --- a/.github/workflows/test_build.yml +++ b/.github/workflows/test_build.yml @@ -8,8 +8,8 @@ on: - '**/*.pyx' - '**/*.py' - '**/*.build' - - 'Makefile' - - 'python/requirements.txt' + - 'build' + - '**/requirements.txt' - 'doc/**' - 'python/subprojects/testbed/tests/**' - '.github/workflows/test_build.yml' @@ -26,15 +26,18 @@ jobs: sudo apt install -y opencl-headers ocl-icd-opencl-dev - name: Compile via GCC run: | - make compile + ./build compile - name: Run Tests run: | - make tests + ./build tests - name: Install Doxygen uses: ssciwr/doxygen-install@v1 + - name: Install Roboto font + run: | + sudo apt install -y fonts-roboto - name: Generate Documentation run: | - make doc + ./build doc macos_build: name: Test MacOS build runs-on: macos-latest @@ -50,7 +53,7 @@ jobs: brew install opencl-clhpp-headers - name: Compile via Clang run: | - CPLUS_INCLUDE_PATH=/usr/local/opt/opencl-clhpp-headers/include make compile + CPLUS_INCLUDE_PATH=/usr/local/opt/opencl-clhpp-headers/include ./build compile windows_build: name: Test Windows build runs-on: windows-latest @@ -73,4 +76,4 @@ jobs: run: | $env:INCLUDE += ";$($pwd.Path)\vcpkg\packages\opencl_x64-windows\include" $env:LIB += ";$($pwd.Path)\vcpkg\packages\opencl_x64-windows\lib" - make compile + ./build.bat compile diff --git a/.github/workflows/test_format.yml b/.github/workflows/test_format.yml index 104d6a9d34..b736bfeddc 100644 --- a/.github/workflows/test_format.yml +++ b/.github/workflows/test_format.yml @@ -5,7 +5,7 @@ on: - '**/*.hpp' - '**/*.cpp' - '**/*.py' - - 'Makefile' + - 'build' - '.clang-format' - '.isort.cfg' - '.style.yapf' @@ -19,7 +19,7 @@ jobs: uses: actions/checkout@v3 - name: Check C++ code style run: | - make test_format_cpp + ./build test_format_cpp - name: Check Python code style run: | - make test_format_python + ./build test_format_python diff --git a/.gitignore b/.gitignore index e1dc86f2b2..bb6f86eee4 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,6 @@ # Build files __pycache__/ +scons/build/ python/**/build/ python/**/dist/ python/**/*egg-info/ @@ -10,7 +11,6 @@ python/**/cython/*.lib python/**/cython/*.pyd python/**/tests/res/tmp/ cpp/build/ -.cpp_files.tmp # Documentation files doc/_build/ diff --git a/.isort.cfg b/.isort.cfg index 4f384833b2..e42dcea7db 100644 --- a/.isort.cfg +++ b/.isort.cfg @@ -3,8 +3,8 @@ supported_extensions=py,pxd,pyx line_length=120 group_by_package=true known_first_party=mlrl -known_third_party=sklearn,scipy,numpy,tabulate,arff -forced_separate=mlrl.common,mlrl.boosting,mlrl.seco,mlrl.testbed +known_third_party=sklearn,scipy,numpy,tabulate,arff,SCons +forced_separate=mlrl.common,mlrl.boosting,mlrl.seco,mlrl.testbed,SCons lines_between_types=1 order_by_type=true multi_line_output=2 diff --git a/CHANGELOG.md b/CHANGELOG.md index f4fa2a0658..c09a353867 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -24,6 +24,7 @@ A major update to the BOOMER algorithm that introduces the following changes. * The documentation has been updated to a more modern theme supporting light and dark theme variants. * A build option that allows to disable multi-threading support via OpenMP at compile-time has been added. * The Python code is now checked for common issues by applying `pylint` via continuous integration. +* The Makefile has been replaced with wrapper scripts triggering a [SCons](https://scons.org/) build. ## Version 0.9.0 (Jul. 2nd, 2023) diff --git a/Makefile b/Makefile deleted file mode 100644 index f81172a2e1..0000000000 --- a/Makefile +++ /dev/null @@ -1,246 +0,0 @@ -default_target: install -.PHONY: clean_venv clean_cpp clean_cython clean_compile clean_cpp_install clean_cython_install clean_wheel \ - clean_install clean_doc clean test_format_cpp test_format_python test_format format_cpp format_python format \ - compile_cpp compile_cython compile install_cpp install_cython wheel install apidoc_cpp apidoc_python doc - -UNAME = $(if $(filter Windows_NT,${OS}),Windows,$(shell uname)) -IS_WIN = $(filter Windows,${UNAME}) - -VENV_DIR = venv -CPP_SRC_DIR = cpp -CPP_BUILD_DIR = ${CPP_SRC_DIR}/build -CPP_PACKAGE_DIR = ${CPP_SRC_DIR}/subprojects -PYTHON_SRC_DIR = python -PYTHON_BUILD_DIR = ${PYTHON_SRC_DIR}/build -PYTHON_PACKAGE_DIR = ${PYTHON_SRC_DIR}/subprojects -DIST_DIR = dist -DOC_DIR = doc -DOC_API_DIR = ${DOC_DIR}/apidoc -DOC_TMP_DIR = ${DOC_DIR}/python -DOC_BUILD_DIR = ${DOC_DIR}/_build - -PS = powershell -Command -PYTHON = $(if ${IS_WIN},python,python3) -VENV_CREATE = ${PYTHON} -m venv ${VENV_DIR} -VENV_ACTIVATE = $(if ${IS_WIN},${PS} "${VENV_DIR}/Scripts/activate.bat;",. ${VENV_DIR}/bin/activate) -VENV_DEACTIVATE = $(if ${IS_WIN},${PS} "${VENV_DIR}/Scripts/deactivate.bat;",deactivate) -PIP_INSTALL = python -m pip install --prefer-binary -ISORT = isort --settings-path . --virtual-env ${VENV_DIR} --skip-gitignore -ISORT_DRYRUN = ${ISORT} --check -ISORT_INPLACE = ${ISORT} --overwrite-in-place -YAPF = yapf -r -p --style=.style.yapf --verbose --exclude '**/build/*.py' -YAPF_DRYRUN = ${YAPF} --diff -YAPF_INPLACE = ${YAPF} -i -PYLINT = pylint --jobs=0 --recursive=y --ignore=build --rcfile=.pylintrc -CLANG_FORMAT = clang-format --style=file --verbose -CLANG_FORMAT_DRYRUN = ${CLANG_FORMAT} -n --Werror -CLANG_FORMAT_INPLACE = ${CLANG_FORMAT} -i -MESON_SETUP = meson setup -MESON_COMPILE = meson compile -MESON_INSTALL = meson install -WHEEL_BUILD = python -m build --wheel -WHEEL_INSTALL = python -m pip install --force-reinstall --no-deps -PYTHON_UNITTEST = python -m unittest discover -v -f -s -DOXYGEN = $(if ${IS_WIN},for /f %%i in (./../VERSION) do set PROJECT_NUMBER=%%i && doxygen,PROJECT_NUMBER=${file < VERSION} doxygen) -SPHINX_APIDOC = sphinx-apidoc --tocfile index -f -SPHINX_BUILD = sphinx-build -M html - -define delete_dir - $(if ${IS_WIN},\ - ${PS} "if (Test-Path ${1}) {rm ${1} -Recurse -Force}",\ - rm -rf ${1}) -endef - -define delete_files_recursively - $(if ${IS_WIN},\ - ${PS} "rm ${1} -Recurse -Force -Include ${2}",\ - rm -f ${1}/**/${2}) -endef - -define delete_dirs_recursively - $(if ${IS_WIN},\ - ${PS} "rm ${1} -Recurse -Force -Include ${2}",\ - rm -rf ${1}/**/${2}) -endef - -define install_wheels - $(if ${IS_WIN},\ - ${PS} "${WHEEL_INSTALL} (Get-ChildItem -Path ${1} | Where Name -Match '\.whl' | Select-Object -ExpandProperty FullName);",\ - ${WHEEL_INSTALL} ${1}/*.whl) -endef - -define create_dir - $(if ${IS_WIN},\ - ${PS} "New-Item -Path ${1} -ItemType "directory" -Force",\ - mkdir -p ${1}) -endef - -define clang_format_dryrun_recursively - $(if ${IS_WIN},\ - (${PS} "Get-ChildItem -Path ${1} -Recurse | Where Name -Match '\.hpp|\.cpp' | Select-Object -ExpandProperty FullName | Out-File .cpp_files.tmp -Encoding utf8";\ - ${CLANG_FORMAT_DRYRUN} --files=.cpp_files.tmp;\ - ${PS} "rm .cpp_files.tmp -Force"),\ - find ${1} -type f \( -iname "*.hpp" -o -iname "*.cpp" \) -exec ${CLANG_FORMAT_DRYRUN} {} +) -endef - -define clang_format_inplace_recursively - $(if ${IS_WIN},\ - (${PS} "Get-ChildItem -Path ${1} -Recurse | Where Name -Match '\.hpp|\.cpp' | Select-Object -ExpandProperty FullName | Out-File .cpp_files.tmp -Encoding utf8";\ - ${CLANG_FORMAT_INPLACE} --files=.cpp_files.tmp;\ - ${PS} "rm .cpp_files.tmp -Force"),\ - find ${1} -type f \( -iname "*.hpp" -o -iname "*.cpp" \) -exec ${CLANG_FORMAT_INPLACE} {} +) -endef - -clean_venv: - @echo Removing virtual Python environment... - $(call delete_dir,${VENV_DIR}) - -clean_cpp: - @echo Removing C++ compilation files... - $(call delete_dir,${CPP_BUILD_DIR}) - -clean_cython: - @echo Removing Cython compilation files... - $(call delete_dir,${PYTHON_BUILD_DIR}) - -clean_compile: clean_cpp clean_cython - -clean_install: - @echo Removing shared libraries and extension modules from source tree... - $(call delete_files_recursively,${PYTHON_PACKAGE_DIR},*.so*) - $(call delete_files_recursively,${PYTHON_PACKAGE_DIR},*.dylib) - $(call delete_files_recursively,${PYTHON_PACKAGE_DIR},*.dll) - $(call delete_files_recursively,${PYTHON_PACKAGE_DIR},*.lib) - $(call delete_files_recursively,${PYTHON_PACKAGE_DIR},*.pyd) - -clean_wheel: - @echo Removing Python build files... - $(call delete_dirs_recursively,${PYTHON_PACKAGE_DIR},build) - $(call delete_dirs_recursively,${PYTHON_PACKAGE_DIR},${DIST_DIR}) - $(call delete_dirs_recursively,${PYTHON_PACKAGE_DIR},*.egg-info) - -clean_doc: - @echo Removing documentation... - $(call delete_dir,${DOC_BUILD_DIR}) - $(call delete_dir,${DOC_API_DIR}) - $(call delete_files_recursively,${DOC_TMP_DIR},*.rst) - -clean: clean_doc clean_wheel clean_compile clean_install clean_venv - -venv: - @echo Creating virtual Python environment... - ${VENV_CREATE} - ${VENV_ACTIVATE} \ - && ${PIP_INSTALL} -r ${PYTHON_SRC_DIR}/requirements.txt \ - && ${VENV_DEACTIVATE} - -test_format_python: venv - @echo Checking Python code style... - ${VENV_ACTIVATE} \ - && ${ISORT_DRYRUN} ${PYTHON_PACKAGE_DIR} \ - && ${YAPF_DRYRUN} ${PYTHON_PACKAGE_DIR} \ - && ${PYLINT} ${PYTHON_PACKAGE_DIR} \ - && ${VENV_DEACTIVATE} - -test_format_cpp: venv - @echo Checking C++ code style... - ${VENV_ACTIVATE} \ - && $(call clang_format_dryrun_recursively,${CPP_PACKAGE_DIR}) \ - && ${VENV_DEACTIVATE} - -test_format: test_format_python test_format_cpp - -format_python: venv - @echo Formatting Python code... - ${VENV_ACTIVATE} \ - && ${ISORT_INPLACE} ${PYTHON_PACKAGE_DIR} \ - && ${YAPF_INPLACE} ${PYTHON_PACKAGE_DIR} \ - && ${VENV_DEACTIVATE} - -format_cpp: venv - @echo Formatting C++ code... - ${VENV_ACTIVATE} \ - && $(call clang_format_inplace_recursively,${CPP_PACKAGE_DIR}) \ - && ${VENV_DEACTIVATE} - -format: format_python format_cpp - -compile_cpp: venv - @echo Compiling C++ code... - ${VENV_ACTIVATE} \ - && ${MESON_SETUP} ${CPP_BUILD_DIR} ${CPP_SRC_DIR} \ - && ${MESON_COMPILE} -C ${CPP_BUILD_DIR} \ - && ${VENV_DEACTIVATE} - -compile_cython: venv - @echo Compiling Cython code... - ${VENV_ACTIVATE} \ - && ${MESON_SETUP} ${PYTHON_BUILD_DIR} ${PYTHON_SRC_DIR} \ - && ${MESON_COMPILE} -C ${PYTHON_BUILD_DIR} \ - && ${VENV_DEACTIVATE} - -compile: compile_cpp compile_cython - -install_cpp: compile_cpp - @echo Installing shared libraries into source tree... - ${VENV_ACTIVATE} \ - && ${MESON_INSTALL} -C ${CPP_BUILD_DIR} \ - && ${VENV_DEACTIVATE} - -install_cython: compile_cython - @echo Installing extension modules into source tree... - ${VENV_ACTIVATE} \ - && ${MESON_INSTALL} -C ${PYTHON_BUILD_DIR} \ - && ${VENV_DEACTIVATE} - -wheel: install_cpp install_cython - @echo Building wheel packages... - ${VENV_ACTIVATE} \ - && ${WHEEL_BUILD} ${PYTHON_PACKAGE_DIR}/common \ - && ${WHEEL_BUILD} ${PYTHON_PACKAGE_DIR}/boosting \ - && ${WHEEL_BUILD} ${PYTHON_PACKAGE_DIR}/seco \ - && ${WHEEL_BUILD} ${PYTHON_PACKAGE_DIR}/testbed \ - && ${VENV_DEACTIVATE} - -install: wheel - @echo Installing wheel packages into virtual environment... - ${VENV_ACTIVATE} \ - && $(call install_wheels,${PYTHON_PACKAGE_DIR}/common/${DIST_DIR}) \ - && $(call install_wheels,${PYTHON_PACKAGE_DIR}/boosting/${DIST_DIR}) \ - && $(call install_wheels,${PYTHON_PACKAGE_DIR}/seco/${DIST_DIR}) \ - && $(call install_wheels,${PYTHON_PACKAGE_DIR}/testbed/${DIST_DIR}) \ - && ${VENV_DEACTIVATE} - -tests: install - @echo Running integration tests... - ${VENV_ACTIVATE} \ - && ${PYTHON_UNITTEST} ${PYTHON_PACKAGE_DIR}/testbed/tests \ - && ${VENV_DEACTIVATE} - -apidoc_cpp: - @echo Generating C++ API documentation via Doxygen... - $(call create_dir,${DOC_API_DIR}/api/cpp/common) - cd ${DOC_DIR} && ${DOXYGEN} Doxyfile_common - $(call create_dir,${DOC_API_DIR}/api/cpp/boosting) - cd ${DOC_DIR} && ${DOXYGEN} Doxyfile_boosting - -apidoc_python: install - @echo Installing documentation dependencies into virtual environment... - ${VENV_ACTIVATE} \ - && ${PIP_INSTALL} -r ${DOC_DIR}/requirements.txt \ - && ${VENV_DEACTIVATE} - @echo Generating Python API documentation via Sphinx-Apidoc... - ${VENV_ACTIVATE} \ - && ${SPHINX_APIDOC} -o ${DOC_TMP_DIR}/common ${PYTHON_PACKAGE_DIR}/common/mlrl **/cython \ - && ${SPHINX_BUILD} ${DOC_TMP_DIR}/common ${DOC_API_DIR}/api/python/common \ - && ${SPHINX_APIDOC} -o ${DOC_TMP_DIR}/boosting ${PYTHON_PACKAGE_DIR}/boosting/mlrl **/cython \ - && ${SPHINX_BUILD} ${DOC_TMP_DIR}/boosting ${DOC_API_DIR}/api/python/boosting \ - && ${SPHINX_APIDOC} -o ${DOC_TMP_DIR}/testbed ${PYTHON_PACKAGE_DIR}/testbed/mlrl \ - && ${SPHINX_BUILD} ${DOC_TMP_DIR}/testbed ${DOC_API_DIR}/api/python/testbed \ - && ${VENV_DEACTIVATE} - -doc: apidoc_cpp apidoc_python - @echo Generating Sphinx documentation... - ${VENV_ACTIVATE} \ - && ${SPHINX_BUILD} ${DOC_DIR} ${DOC_BUILD_DIR} \ - && ${VENV_DEACTIVATE} diff --git a/build b/build new file mode 100755 index 0000000000..3223efbcba --- /dev/null +++ b/build @@ -0,0 +1,36 @@ +#!/bin/sh + +VENV_DIR="venv" +SCONS_DIR="scons" +CLEAN=false +EXIT_CODE=0 + +if [ $# -eq 1 ]; then + if [ $1 = "--clean" ]; then + CLEAN=true + fi + if [ $1 = "-c" ]; then + CLEAN=true + fi +fi + +if [ ! -d $VENV_DIR ] && [ $CLEAN = false ]; then + echo "Creating virtual Python environment..." + python3 -m venv ${VENV_DIR} +fi + +if [ -d "$VENV_DIR" ]; then + . $VENV_DIR/bin/activate + python3 -c "import sys; sys.path.append('$SCONS_DIR'); import run; run.install_build_dependencies('scons')" + scons --silent --file $SCONS_DIR/sconstruct.py $@ + EXIT_CODE=$? + deactivate +fi + +if [ $CLEAN = true ] && [ -d $VENV_DIR ]; then + echo "Removing virtual Python environment..." + rm -rf $VENV_DIR + rm -rf $SCONS_DIR/build +fi + +exit $EXIT_CODE diff --git a/build.bat b/build.bat new file mode 100644 index 0000000000..36c8b9d15c --- /dev/null +++ b/build.bat @@ -0,0 +1,39 @@ +@echo off + +set "VENV_DIR=venv" +set "SCONS_DIR=scons" +set "CLEAN=false" +set "EXIT_CODE=0" + +if not "%1"=="" if "%2"=="" ( + if "%1"=="--clean" ( + set "CLEAN=true" + ) + if "%1"=="-c" ( + set "CLEAN=true" + ) +) + +if not exist "%VENV_DIR%" if "%CLEAN%"=="false" ( + echo Creating virtual Python environment... + python -m venv "%VENV_DIR%" +) + +if exist "%VENV_DIR%" ( + call %VENV_DIR%\Scripts\activate + python -c "import sys;sys.path.append('%SCONS_DIR%'); import run; run.install_build_dependencies('scons')" + scons --silent --file %SCONS_DIR%\sconstruct.py %* + set "EXIT_CODE=%ERRORLEVEL%" + call deactivate +) + +if "%CLEAN%"=="true" if exist "%VENV_DIR%" ( + echo Removing virtual Python environment... + rd /s /q "%VENV_DIR%" + + if exist "%SCONS_DIR%\build" ( + rd /s /q "%SCONS_DIR%\build" + ) +) + +exit /b "%EXIT_CODE%" diff --git a/doc/Doxyfile_boosting b/doc/Doxyfile_boosting index 9aac1eb085..6e017e88c1 100644 --- a/doc/Doxyfile_boosting +++ b/doc/Doxyfile_boosting @@ -68,7 +68,7 @@ PROJECT_LOGO = # entered, it will be relative to the location where doxygen was started. If # left blank the current directory will be used. -OUTPUT_DIRECTORY = apidoc/api/cpp/boosting/ +OUTPUT_DIRECTORY = doc/apidoc/api/cpp/boosting/ # If the CREATE_SUBDIRS tag is set to YES then doxygen will create up to 4096 # sub-directories (in 2 levels) under the output directory of each output format @@ -943,7 +943,7 @@ WARN_LOGFILE = # spaces. See also FILE_PATTERNS and EXTENSION_MAPPING # Note: If this tag is empty the current directory is searched. -INPUT = ../cpp/subprojects/boosting/ +INPUT = cpp/subprojects/boosting/ # This tag can be used to specify the character encoding of the source files # that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses diff --git a/doc/Doxyfile_common b/doc/Doxyfile_common index 2ecc170179..2337f1cd91 100644 --- a/doc/Doxyfile_common +++ b/doc/Doxyfile_common @@ -68,7 +68,7 @@ PROJECT_LOGO = # entered, it will be relative to the location where doxygen was started. If # left blank the current directory will be used. -OUTPUT_DIRECTORY = apidoc/api/cpp/common/ +OUTPUT_DIRECTORY = doc/apidoc/api/cpp/common/ # If the CREATE_SUBDIRS tag is set to YES then doxygen will create up to 4096 # sub-directories (in 2 levels) under the output directory of each output format @@ -943,7 +943,7 @@ WARN_LOGFILE = # spaces. See also FILE_PATTERNS and EXTENSION_MAPPING # Note: If this tag is empty the current directory is searched. -INPUT = ../cpp/subprojects/common/ +INPUT = cpp/subprojects/common/ # This tag can be used to specify the character encoding of the source files # that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses diff --git a/doc/api/codestyle.inc.rst b/doc/api/codestyle.inc.rst index c8193537ef..eec38971dc 100644 --- a/doc/api/codestyle.inc.rst +++ b/doc/api/codestyle.inc.rst @@ -5,20 +5,48 @@ Code Style We aim to enforce a consistent code style across the entire project. For formatting the C++ code, we employ `clang-format `__. The desired C++ code style is defined in the file ``.clang-format`` in project's root directory. Accordingly, we use `YAPF `__ to enforce the Python code style defined in the file ``.style.yapf``. In addition, `isort `__ is used to keep the ordering of imports in Python and Cython source files consistent according to the configuration file ``.isort.cfg`` and `pylint `__ is used to check for common issues in the Python code according to the configuration file ``.pylintrc``. If you have modified the project's source code, you can check whether it adheres to our coding standards via the following command: -.. code-block:: text +.. tab:: Linux - make test_format + .. code-block:: text + + ./build test_format + +.. tab:: MacOS + + .. code-block:: text + + ./build test_format + +.. tab:: Windows + + .. code-block:: text + + build.bat test_format .. note:: - If you want to check for compliance with the C++ or Python code style independently, you can alternatively use the command ``make test_format_cpp`` or ``make test_format_python``. + If you want to check for compliance with the C++ or Python code style independently, you can use the build target ``test_format_cpp`` or ``test_format_python`` instead of ``test_format``. In order to automatically format the project's source files according to our style guidelines, the following command can be used: -.. code-block:: text +.. tab:: Linux + + .. code-block:: text + + ./build format + +.. tab:: MacOS + + .. code-block:: text + + ./build format + +.. tab:: Windows + + .. code-block:: text - make format + build.bat format .. note:: - If you want to format only the C++ source files, you can run the command ``make format_cpp`` instead. Accordingly, the command ``make format_python`` may be used to format only the Python source files. + If you want to format only the C++ source files, you can specify the build target ``format_cpp`` instead of ``format``. Accordingly, the target ``format_python`` may be used to format only the Python source files. Whenever any source files have been modified, a `Github Action `__ is run automatically to verify if they adhere to our code style guidelines. The result of these runs can be found in the `Github repository `__. diff --git a/doc/api/compilation.inc.rst b/doc/api/compilation.inc.rst index 54ca621e85..54f86f8bda 100644 --- a/doc/api/compilation.inc.rst +++ b/doc/api/compilation.inc.rst @@ -3,106 +3,288 @@ Building from Source -------------------- -As discussed in the previous section :ref:`structure`, the algorithm that is provided by this project is mostly implemented in `C++ `__ to ensure maximum efficiency (requires C++ 14 or newer). In addition, a `Python `__ wrapper that integrates the algorithm with the `scikit-learn `__ framework is provided (requires Python 3.8 or newer). To make the underlying C++ implementation accessible from within the Python code, `Cython `__ is used (requires Cython 3.0 or newer). +As discussed in the previous section :ref:`structure`, the algorithm that is provided by this project is implemented in `C++ `__ to ensure maximum efficiency (requires C++ 14 or newer). In addition, a `Python `__ wrapper that integrates the algorithm with the `scikit-learn `__ framework is provided (requires Python 3.8 or newer). To make the underlying C++ implementation accessible from within the Python code, `Cython `__ is used (requires Cython 3.0 or newer). -Unlike pure Python programs, the C++ and Cython source files must be compiled for a particular target platform. To ease the process of compiling the source code, the project comes with a `Makefile `__ that automates the necessary steps. In the following, we discuss the individual steps that are necessary for building the project from scratch. This is necessary if you intend to modify the library's source code. If you want to use the algorithm without any custom modifications, the :ref:`installation` of pre-built packages is usually a better choice. +Unlike pure Python programs, the C++ and Cython source files must be compiled for a particular target platform. To ease the process of compiling the source code, the project comes with a `SCons `__ build that automates the necessary steps. In the following, we discuss the individual steps that are necessary for building the project from scratch. This is necessary if you intend to modify the library's source code. If you want to use the algorithm without any custom modifications, the :ref:`installation` of pre-built packages is usually a better choice. **Prerequisites** -As a prerequisite, a supported version of Python, a suitable C++ compiler, an implementation of the Make build automation tool, as well as libraries for multi-threading and GPU support, must be installed on the host system. The installation of these software components depends on the operation system at hand. In the following, we provide installation instructions for the supported platforms. +As a prerequisite, a supported version of Python, a suitable C++ compiler, as well as optional libraries for multi-threading and GPU support, must be available on the host system. The installation of these software components depends on the operation system at hand. In the following, we provide installation instructions for the supported platforms. -* **Linux:** Nowadays, most Linux distributions include a pre-installed version of Python 3. If this is not the case, instructions on how to install a recent Python version can be found in Python's `Beginners Guide `__. As noted in this guide, Python should be installed via the distribution's package manager if possible. The most common Linux distributions do also ship with `GNU Make `__ and the `GNU Compiler Collection `__ (GCC) by default. If this is not the case, these software packages can typically be installed via the distribution's default package manager. `OpenMP `__ and `OpenCL `__, which are optionally required for multi-threading and GPU support, should be installable via the package manager as well. -* **MacOS:** Recent versions of MacOS do not include Python by default. A suitable Python version can manually be downloaded from the `project's website `__. Alternatively, the package manager `Homebrew `__ can be used for installation via the command ``brew install python``. MacOS relies on the `Clang `__ compiler for building C++ code. It is part of the `Xcode `__ developer toolset. In addition, if the project should be compiled with multi-threading support enabled, the `OpenMP `__ library must be installed. We recommend to install it via Homebrew by running the command ``brew install libomp``. The `Xcode `__ developer toolset should also include `OpenCL `__, which is needed for GPU support. However, the `OpenCL C++ headers `__ must be installed manually. The easiest way to do so is via the Homebrew command ``brew install opencl-clhpp-headers``. -* **Windows:** Python releases for Windows are available at the `project's website `__. In addition, an implementation of the Make tool must be installed. We recommend to use `GNU Make for Windows `__. For the compilation of the project's source code, the MSVC compiler must be used. It is included in the `Build Tools for Visual Studio `__, which also includes the `OpenMP `__ library. Finally, `Powershell `__ must be used to run the project's Makefile. It should be included by default on modern Windows systems. If you intend to compile the project with GPU support enabled, `OpenCL `__ must be installed manually. In order to do so, we recommend to install the package ``opencl`` via the package manager `vcpkg `__. +.. tab:: Linux -Additional compile- or build-time dependencies will automatically be installed when following the instructions below and must not be installed manually. + +------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ + | **Python** | Nowadays, most Linux distributions include a pre-installed version of Python 3. If this is not the case, instructions on how to install a recent Python version can be found in Python’s `Beginners Guide `__. As noted in this guide, Python should be installed via the distribution’s package manager if possible. | + +------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ + | **C++ compiler** | Most Linux distributions provide the `GNU Compiler Collection `__ (GCC), which includes a C++ compiler, as part of their software repositories. If this is the case, it can be installed via the distribution's package manager. | + +------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ + | **OpenMP** | `OpenMP `__, which is optionally required for multi-threading support, should be installable via your Linux distribution's package manager. | + +------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ + | **OpenCL** | If the project should be compiled with GPU support enabled, `OpenCL `__ must be available. On Linux, it should be installable via your distribution's package manager. | + +------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ + + +.. tab:: MacOS + + +------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ + | **Python** | Recent versions of MacOS do not include Python by default. A suitable Python version can manually be downloaded from the `project's website `__. Alternatively, the package manager `Homebrew `__ can be used for installation via the command ``brew install python``. | + +------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ + | **C++ compiler** | MacOS relies on the `Clang `__ compiler for building C++ code. It is part of the `Xcode `__ developer toolset. | + +------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ + | **OpenMP** | If the project should be compiled with multi-threading support enabled, the `OpenMP `__ library must be installed. We recommend to install it via Homebrew by running the command ``brew install libomp``. | + +------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ + | **OpenCL** | The `Xcode `__ developer toolset should include `OpenCL `__, which is needed for GPU support. However, the `OpenCL C++ headers `__ must be installed manually. The easiest way to do so is via the Homebrew command ``brew install opencl-clhpp-headers``. | + +------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ + +.. tab:: Windows + + +------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ + | **Python** | Python releases for Windows are available at the `project's website `__, where you can download an installer. | + +------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ + | **C++ compiler** | For the compilation of the project's source code, the MSVC compiler must be used. It is included in the `Build Tools for Visual Studio `__. | + +------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ + | **OpenMP** | The `Build Tools for Visual Studio `__ also include the `OpenMP `__ library, which is utilized by the project for multi-theading support. | + +------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ + | **OpenCL** | If you intend to compile the project with GPU support enabled, `OpenCL `__ must be installed manually. In order to do so, we recommend to install the package ``opencl`` via the package manager `vcpkg `__. | + +------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ + +Additional build- or run-time dependencies will automatically be installed when following the instructions below and must not be installed manually. **Step 1: Creating a virtual environment** -The build process is based on creating a virtual Python environment that allows to install build-time dependencies in an isolated manner and independently from the host system. Once all packages have successfully been built, they are installed into the virtual environment. To create new virtual environment and install all necessarily build-time dependencies, the following command must be executed: +The build process is based on an virtual Python environment that allows to install build- and run-time dependencies in an isolated manner and independently from the host system. Once the build process was completed, the resulting Python packages are installed into the virtual environment. To create new virtual environment and install all necessarily run-time dependencies, the following command must be executed: + +.. tab:: Linux + + .. code-block:: text + + ./build venv + +.. tab:: MacOS + + .. code-block:: text -.. code-block:: text + ./build venv - make venv +.. tab:: Windows -All compile-time dependencies (`numpy`, `scipy`, `cython`, `meson`, `ninja`, etc.) that are required for building the project should automatically be installed into the virtual environment when executing the above command. As a result, a subdirectory `venv/` should have been created in the project's root directory. + .. code-block:: text + + build.bat venv + +All run-time dependencies (`numpy`, `scipy`, etc.) that are required for running the algorithms that are provided by the project should automatically be installed into the virtual environment when executing the above command. As a result, a subdirectory `venv/` should have been created in the project's root directory. **Step 2: Compiling the C++ code** Once a new virtual environment has successfully been created, the compilation of the C++ code can be started by executing the following command: -.. code-block:: text +.. tab:: Linux + + .. code-block:: text + + ./build compile_cpp + +.. tab:: MacOS - make compile_cpp + .. code-block:: text -Compilation is based on the build system `Meson `_ and uses `Ninja `_ as a backend. After the above command has been completed, a new directory `cpp/build/` should have been created. It contains the shared libraries ("libmlrlcommon", "libmlrlboosting" and possibly others) that provide the basic functionality of the project's algorithms. + ./build compile_cpp + +.. tab:: Windows + + .. code-block:: text + + build.bat compile_cpp + +The compilation is based on the build system `Meson `_ and uses `Ninja `_ as a backend. After the above command has terminated, a new directory `cpp/build/` should have been created. It contains the shared libraries ("libmlrlcommon", "libmlrlboosting" and possibly others) that provide the basic functionality of the project's algorithms. **Step 3: Compiling the Cython code** -Once the compilation of the C++ code has completed, the Cython code that allows to access the corresponding shared libraries from within Python can be compiled in the next step. Again, Meson and Ninja are used for compilation. It can be started via the following command: +Once the compilation of the C++ code has completed, the Cython code, which allows to access the corresponding shared libraries from within Python, can be compiled in the next step. Again, Meson and Ninja are used for compilation. It can be started via the following command: + +.. tab:: Linux -.. code-block:: text + .. code-block:: text - make compile_cython + ./build compile_cython + +.. tab:: MacOS + + .. code-block:: text + + ./build compile_cython + +.. tab:: Windows + + .. code-block:: text + + build.bat compile_cython As a result of executing the above command, the directory `python/build` should have been created. It contains Python extension modules for the respective target platform. .. note:: - Instead of performing the previous steps one after the other, the command ``make compile`` can be used to compile the C++ and Cython source files in a single step. + Instead of performing the previous steps one after the other, the build target ``compile`` can be specfied instead of ``compile_cpp`` and ``compile_cython`` to build the C++ and Cython source files in a single step. + +**Step 4: Copying shared libraries into the Python source tree** + +The shared libraries that have been created in the previous steps from the C++ source files must afterwards be copied into the Python source tree. This can be achieved by executing the following command: + +.. tab:: Linux + + .. code-block:: text + + ./build install_cpp + +.. tab:: MacOS + + .. code-block:: text + + ./build install_cpp + +.. tab:: Windows + + .. code-block:: text + + build.bat install_cpp + +This should result in the compilation files, which were previously located in the `cpp/build/` directory, to be copied into the `cython/` subdirectories that are contained by each Python module (e.g., into the directory `python/subprojects/common/mlrl/common/cython/`). + +**Step 5: Copying extension modules into the Python source tree** + +Similar to the previous step, the Python extension modules that have been built from the project's Cython code must be copied into the Python source tree via the following command: + +.. tab:: Linux -**Step 4: Copying compilation files into the Python source tree** + .. code-block:: text -The shared library files and Python extension modules that have been created in the previous steps must afterwards be copied into the source tree that contains the Python code. This can be achieved by executing the following commands: + ./build install_cython -.. code-block:: text +.. tab:: MacOS - make install_cpp - make install_cython + .. code-block:: text -This should result in the compilation files, which were previously located in the `cpp/build/` and `python/build/` directories, to be copied into the `cython/` subdirectories that are contained by each Python module (e.g., into the directory `python/subprojects/common/mlrl/common/cython/`). + ./build install_cython -**Step 5: Building wheel packages** +.. tab:: Windows + + .. code-block:: text + + build.bat install_cython + +As a result, the compilation files that can be found in the `python/build/` directories should have been copied into the `cython/` subdirectories of each Python module. + +.. note:: + Instead of executing the above commands one after the other, the build target ``install`` can be used instead of ``install_cpp`` and ``install_cython`` to copy both, the shared libraries and the extension modules, into the source tree. + +**Step 6: Building wheel packages** Once the compilation files have been copied into the Python source tree, wheel packages can be built for the individual Python modules via the following command: -.. code-block:: text +.. tab:: Linux + + .. code-block:: text + + ./build build_wheels + +.. tab:: MacOS + + .. code-block:: text + + ./build build_wheels - make wheel +.. tab:: Windows + + .. code-block:: text + + build.bat build_wheels This should result in .whl files being created in a new `dist/` subdirectory inside the directories that correspond to the individual Python modules (e.g., in the directory `python/subprojects/common/dist/`). -**Step 6: Installing the wheel packages into the virtual environment** +**Step 7: Installing the wheel packages into the virtual environment** + +The wheel packages that have previously been created can finally be installed into the virtual environment via the following command: + +.. tab:: Linux + + .. code-block:: text -The wheel packages that have previously been created, as well as its runtime-dependencies (e.g., `scikit-learn` or `liac-arff`), can finally be installed into the virtual environment via the following command: + ./build install_wheels -.. code-block:: text +.. tab:: MacOS - make install + .. code-block:: text -After this final step has completed, the Python packages can be used from within the virtual environment. To ensure that the installation of the wheel packages was successful, check if a `mlrl/` directory has been created in the `lib/` directory of the virtual environment (depending on the Python version, it should be located at `venv/lib/python3.9/site-packages/mlrl/` or similar). If this is the case, the algorithm can be used from within your own Python code. Alternatively, the command line API can be used to start an experiment (see :ref:`experiments`). + ./build install_wheels -.. warning:: - Whenever any C++, Cython or Python source files have been modified, they must be recompiled and updated wheel packages must be installed into the virtual environment by executing the command ``make install``. If any compilation files do already exist, this will only result in the affected parts of the code to be rebuilt. +.. tab:: Windows + + .. code-block:: text + + build.bat install_wheels + +After this final step has completed, the Python packages can be used from within the virtual environment once it has been `activated `__. To ensure that the installation of the wheel packages was successful, check if a `mlrl/` directory has been created in the `lib/` directory of the virtual environment (depending on the Python version, it should be located at `venv/lib/python3.9/site-packages/mlrl/` or similar). If this is the case, the algorithm can be used from within your own Python code. Alternatively, the command line API can be used to start an experiment (see :ref:`experiments`). + +.. note:: + Instead of following the above instructions step by step, the following command, which automatically executes all necessary steps, can be used for simplicity: + + .. tab:: Linux + + .. code-block:: text + + ./build + + .. tab:: MacOS + + .. code-block:: text + + ./build + + .. tab:: Windows + + .. code-block:: text + + build.bat + + Whenever any C++, Cython or Python source files have been modified, the above command must be run again in order to rebuild modified files and install updated wheel packages into the virtual environment. If any compilation files do already exist, this will only result in the affected parts of the code to be rebuilt. **Cleanup** -The Makefile allows to delete the files that result from the individual steps that have been described above. To delete the wheel packages that have been created via the command ``make wheel`` the following command can be used: +It is possible to delete the compilation files that result from an individual step of the build process mentioned above by using the command libe argument ``--clean`` or ``-c``. This may be useful if you want to repeat a single or multiple steps of the build process from scratch in case anything went wrong. For example, to delete the C++ compilation files, the following command can be used: -.. code-block:: text +.. tab:: Linux - make clean_wheel + .. code-block:: text -The following command allows to remove the shared library files and Python extension modules that have been copied into the Python source tree via the commands ``make install_cpp`` and ``make install_cython``: + ./build --clean compile_cpp -.. code-block:: text +.. tab:: MacOS - make clean_install + .. code-block:: text -The commands ``make clean_cython`` and ``make clean_cpp`` remove the Cython or C++ compilation files that have been created via the command ``make compile_cython`` or ``make compile_cpp`` from the respective `build/` directories. If you want to delete both, the Cython and C++ compilation files, the following command can be used: + ./build --clean compile_cpp -.. code-block:: text +.. tab:: Windows - make clean_compile + .. code-block:: text -.. note:: - If you want to delete all compilation files that have been created via the Makefile, including the virtual environment, you should use the command ``make clean``. + build.bat --clean compile_cpp + +If you want to delete all compilation files that have previously been created, including the virtual environment, you should use the following command, where no build target is specified: + +.. tab:: Linux + + .. code-block:: text + + ./build --clean + +.. tab:: MacOS + + .. code-block:: text + + ./build --clean + +.. tab:: Windows + + .. code-block:: text + + build.bat --clean diff --git a/doc/api/documentation.inc.rst b/doc/api/documentation.inc.rst index 3b4f8fb5ba..74f1084f89 100644 --- a/doc/api/documentation.inc.rst +++ b/doc/api/documentation.inc.rst @@ -3,16 +3,83 @@ Generating the Documentation ---------------------------- -In order to generate the documentation (this document), `Doxygen `_ must be installed on the host system beforehand. It is used to generate an API documentation from the C++ source files. By running the following command, the C++ API documentation is generated via Doxygen, the Python API documentation is created via `sphinx-apidoc `_ and the documentation's HTML files are generated via `sphinx `_: +**Prerequisites** -.. code-block:: text +In order to generate the documentation (this document), `Doxygen `__ must be installed on the host system beforehand. It is used to generate an API documentation from the C++ source files. In addition, the `Roboto `__ font should be available on your system. If this is not the case, another font will be used as a fallback. - make doc +**Step 1: Generating the C++ API documentation** -Afterwards, the generated files can be found in the directory `doc/build_/html/`. +By running the following command, the C++ API documentation is generated via Doxygen: + +.. tab:: Linux + + .. code-block:: text + + ./build apidoc_cpp + +.. tab:: MacOS + + .. code-block:: text + + ./build apidoc_cpp + +.. tab:: Windows + + .. code-block:: text + + build.bat apidoc_cpp + +The resulting HTML files should be located in the directory `doc/apidoc/api/cpp/`. + +**Step 2: Generating the Python API documentation** + +Similarly, the following command generates an API documentation from the project's Python code via `sphinx-apidoc `__: + +.. tab:: Linux + + .. code-block:: text + + ./build apidoc_python + +.. tab:: MacOS + + .. code-block:: text -To clean up the generated documentation files, the following command can be used: + ./build apidoc_python + +.. tab:: Windows + + .. code-block:: text + + build.bat apidoc_python + +.. note:: + If you want to generate the API documentation for the C++ and Python code simulatenously, it is possible to use the build target ``apidoc`` instead of ``apidoc_cpp`` and ``apidoc_python``. + +**Step 3: Generating the final documentation** + +To generate the final documentation's HTML files via `sphinx `__, the following command can be used: + +.. tab:: Linux + + .. code-block:: text + + ./build doc + +.. tab:: MacOS + + .. code-block:: text + + ./build doc + +.. tab:: Windows + + .. code-block:: text + + build.bat doc + +Afterwards, the generated files can be found in the directory `doc/build_/html/`. -.. code-block:: text +It should further be noted that it is not necessary to run the above steps one after the other. Executing a single command with the build target ``doc`` should suffice to create the entire documentation, including files that describe the C++ and Python API. - make clean_doc +Files that have been generated via the above steps can be removed by invoking the respective commands with the command line argument ``--clean``. A more detailed description can be found under :ref:`compilation`. diff --git a/doc/api/testing.inc.rst b/doc/api/testing.inc.rst index 76510dd8c8..9b6714d113 100644 --- a/doc/api/testing.inc.rst +++ b/doc/api/testing.inc.rst @@ -5,8 +5,23 @@ Testing the Code To be able to detect problems with the project's source code early during development, it comes with a large number of integration tests. Each of these tests runs a different configuration of the project's algorithms via the command line API and checks for unexpected results. If you want to execute the integrations tests on your own system, you can use the following command: -.. code-block:: text +.. tab:: Linux + + .. code-block:: text + + ./build tests + +.. tab:: MacOS + + .. code-block:: text + + ./build tests + +.. tab:: Windows + + .. code-block:: text + + build.bat tests - make tests The integration tests are also run automatically on a `CI server `__ whenever relevant parts of the source code have been modified. For this purpose, we rely on the infrastructure provided by `Github Actions `__. A track record of past test runs can be found in the `Github repository `__. diff --git a/python/requirements.txt b/python/requirements.txt index ebe2cdb075..46658ec398 100644 --- a/python/requirements.txt +++ b/python/requirements.txt @@ -1,13 +1,5 @@ -cython >= 3.0, < 3.1 -meson >= 1.2, < 1.3 -ninja >= 1.11, < 1.12 -build >= 1.0, < 1.1 -pylint >= 2.17, < 2.18 -yapf >= 0.40, < 0.41 -isort >= 5.12, < 5.13 -clang-format >= 16.0, < 16.1 +liac-arff >= 2.5, < 2.6 numpy >= 1.25, < 1.26 -scipy >= 1.11, < 1.12 scikit-learn >=1.3, < 1.4 -liac-arff >= 2.5, < 2.6 +scipy >= 1.11, < 1.12 tabulate >= 0.9, < 0.10 diff --git a/scons/code_style.py b/scons/code_style.py new file mode 100644 index 0000000000..cf115982ff --- /dev/null +++ b/scons/code_style.py @@ -0,0 +1,84 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides utility functions for checking and enforcing code style definitions. +""" +from glob import glob +from os import path + +from modules import BUILD_MODULE, CPP_MODULE, PYTHON_MODULE +from run import run_venv_program + + +def __isort(directory: str, enforce_changes: bool = False): + args = ['--settings-path', '.', '--virtual-env', 'venv', '--skip-gitignore'] + + if not enforce_changes: + args.append('--check') + + run_venv_program('isort', *args, directory) + + +def __yapf(directory: str, enforce_changes: bool = False): + args = ['-r', '-p', '--style=.style.yapf', '--exclude', '**/build/*.py', '-i' if enforce_changes else '--diff'] + run_venv_program('yapf', *args, directory) + + +def __pylint(directory: str): + args = ['--jobs=0', '--recursive=y', '--ignore=build', '--rcfile=.pylintrc', '--score=n'] + run_venv_program('pylint', *args, directory) + + +def __clang_format(directory: str, enforce_changes: bool = True): + cpp_header_files = glob(path.join(directory, '**', '*.hpp'), recursive=True) + cpp_source_files = glob(path.join(directory, '**', '*.cpp'), recursive=True) + args = ['--style=file'] + + if enforce_changes: + args.append('-i') + else: + args.append('-n') + args.append('--Werror') + + run_venv_program('clang-format', *args, *cpp_header_files, *cpp_source_files) + + +def check_python_code_style(**_): + """ + Checks if the Python source files adhere to the code style definitions. If this is not the case, an error is raised. + """ + for module in [BUILD_MODULE, PYTHON_MODULE]: + directory = module.root_dir + print('Checking Python code style in directory "' + directory + '"...') + __isort(directory) + __yapf(directory) + __pylint(directory) + + +def enforce_python_code_style(**_): + """ + Enforces the Python source files to adhere to the code style definitions. + """ + for module in [BUILD_MODULE, PYTHON_MODULE]: + directory = module.root_dir + print('Formatting Python code in directory "' + directory + '"...') + __isort(directory, enforce_changes=True) + __yapf(directory, enforce_changes=True) + + +def check_cpp_code_style(**_): + """ + Checks if the C++ source files adhere to the code style definitions. If this is not the case, an error is raised. + """ + directory = CPP_MODULE.root_dir + print('Checking C++ code style in directory "' + directory + '"...') + __clang_format(directory) + + +def enforce_cpp_code_style(**_): + """ + Enforces the C++ source files to adhere to the code style definitions. + """ + directory = CPP_MODULE.root_dir + print('Formatting C++ code in directory "' + directory + '"...') + __clang_format(directory, enforce_changes=True) diff --git a/scons/compilation.py b/scons/compilation.py new file mode 100644 index 0000000000..5b3dcaff73 --- /dev/null +++ b/scons/compilation.py @@ -0,0 +1,68 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides utility functions for compiling C++ and Cython code. +""" +from typing import List, Optional + +from modules import CPP_MODULE, PYTHON_MODULE +from run import run_venv_program + + +def __meson_setup(root_dir: str, build_dir: str, dependencies: Optional[List[str]] = None): + print('Setting up build directory "' + build_dir + '"...') + run_venv_program('meson', 'setup', build_dir, root_dir, print_args=True, additional_dependencies=dependencies) + + +def __meson_compile(build_dir: str): + run_venv_program('meson', 'compile', '-C', build_dir, print_args=True) + + +def __meson_install(build_dir: str): + run_venv_program('meson', 'install', '--no-rebuild', '--only-changed', '-C', build_dir, print_args=True) + + +def setup_cpp(**_): + """ + Sets up the build system for compiling the C++ code. + """ + __meson_setup(CPP_MODULE.root_dir, CPP_MODULE.build_dir, dependencies=['ninja']) + + +def compile_cpp(**_): + """ + Compiles the C++ code. + """ + print('Compiling C++ code...') + __meson_compile(CPP_MODULE.build_dir) + + +def install_cpp(**_): + """ + Installs shared libraries into the source tree. + """ + print('Installing shared libraries into source tree...') + __meson_install(CPP_MODULE.build_dir) + + +def setup_cython(**_): + """ + Sets up the build system for compiling the Cython code. + """ + __meson_setup(PYTHON_MODULE.root_dir, PYTHON_MODULE.build_dir, dependencies=['cython']) + + +def compile_cython(**_): + """ + Compiles the Cython code. + """ + print('Compiling Cython code...') + __meson_compile(PYTHON_MODULE.build_dir) + + +def install_cython(**_): + """ + Installs extension modules into the source tree. + """ + print('Installing extension modules into source tree...') + __meson_install(PYTHON_MODULE.build_dir) diff --git a/scons/documentation.py b/scons/documentation.py new file mode 100644 index 0000000000..8558a2ccd9 --- /dev/null +++ b/scons/documentation.py @@ -0,0 +1,90 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides utility functions for generating the documentation. +""" +from os import makedirs, path +from typing import List, Optional + +from modules import DOC_MODULE +from run import run_program, run_venv_program + + +def __doxygen(config_file: str, output_dir: str): + makedirs(output_dir, exist_ok=True) + run_program('doxygen', config_file, print_args=True) + + +def __sphinx_apidoc(source_dir: str, output_dir: str): + run_venv_program('sphinx-apidoc', + '--tocfile', + 'index', + '-f', + '-o', + output_dir, + source_dir, + '**/cython', + print_args=True, + additional_dependencies=['sphinx', 'furo'], + requirements_file=DOC_MODULE.requirements_file) + + +def __sphinx_build(source_dir: str, output_dir: str, additional_dependencies: Optional[List[str]] = None): + run_venv_program('sphinx-build', + '-M', + 'html', + source_dir, + output_dir, + print_args=True, + additional_dependencies=additional_dependencies, + requirements_file=DOC_MODULE.requirements_file) + + +# pylint: disable=unused-argument +def apidoc_cpp(env, target, source): + """ + Builds the API documentation for a single C++ subproject. + + :param env: The scons environment + :param target: The path of the files that belong to the API documentation, if it has already been built, or the + path of the directory, where the API documentation should be stored + :param source: The paths of the source files from which the API documentation should be built + """ + if target: + apidoc_subproject = DOC_MODULE.find_cpp_apidoc_subproject(target[0].path) + config_file = apidoc_subproject.config_file + + if path.isfile(config_file): + print('Generating C++ API documentation for subproject "' + apidoc_subproject.name + '"...') + __doxygen(config_file=config_file, output_dir=path.join(apidoc_subproject.apidoc_dir)) + + +# pylint: disable=unused-argument +def apidoc_python(env, target, source): + """ + Builds the API documentation for a single Python subproject. + + :param env: The scons environment + :param target: The path of the files that belong to the API documentation, if it has already been built, or the + path of the directory, where the API documentation should be stored + :param source: The paths of the source files from which the API documentation should be built + """ + if target: + apidoc_subproject = DOC_MODULE.find_python_apidoc_subproject(target[0].path) + tmp_dir = apidoc_subproject.build_dir + + if path.isdir(tmp_dir): + print('Generating Python API documentation for subproject "' + apidoc_subproject.name + '"...') + __sphinx_apidoc(source_dir=apidoc_subproject.source_subproject.source_dir, output_dir=tmp_dir) + __sphinx_build(source_dir=tmp_dir, output_dir=apidoc_subproject.apidoc_dir) + + +def doc(**_): + """ + Builds the documentation. + """ + print('Generating documentation...') + __sphinx_build( + source_dir=DOC_MODULE.root_dir, + output_dir=DOC_MODULE.build_dir, + additional_dependencies=['sphinxext-opengraph', 'sphinx-inline-tabs', 'sphinx-copybutton', 'myst-parser']) diff --git a/scons/modules.py b/scons/modules.py new file mode 100644 index 0000000000..470756d98a --- /dev/null +++ b/scons/modules.py @@ -0,0 +1,455 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides access to directories and files belonging to different modules that are part of the project. +""" +from abc import ABC, abstractmethod +from glob import glob +from os import path, walk +from typing import Callable, List + + +def find_files_recursively(directory: str, + directory_filter: Callable[[str], bool] = lambda _: True, + file_filter: Callable[[str], bool] = lambda _: True) -> List[str]: + """ + Finds and returns files in a directory and its subdirectories that match a given filter. + + :param directory: The directory to be searched + :param directory_filter: A function to be used for filtering subdirectories + :param file_filter: A function to be used for filtering files + :return: A list that contains the paths of all files that have been found + """ + result = [] + + for root_directory, subdirectories, files in walk(directory, topdown=True): + subdirectories[:] = [subdirectory for subdirectory in subdirectories if directory_filter(subdirectory)] + + for file in files: + if file_filter(file): + result.append(path.join(root_directory, file)) + + return result + + +class Module(ABC): + """ + An abstract base class for all classes that provide access to directories and files that belong to a module. + """ + + @property + @abstractmethod + def root_dir(self) -> str: + """ + The path to the module's root directory. + """ + + @property + def build_dir(self) -> str: + """ + The path to the directory, where build files are stored. + """ + return path.join(self.root_dir, 'build') + + @property + def requirements_file(self) -> str: + """ + The path to the requirements.txt file that specifies dependencies required by a module. + """ + return path.join(self.root_dir, 'requirements.txt') + + +class SourceModule(Module, ABC): + """ + An abstract base class for all classes that provide access to directories and files that belong to a module, which + contains source code. + """ + + class Subproject(ABC): + """ + An abstract base class for all classes that provide access to directories and files that belong to an individual + subproject that is part of a module, which contains source files. + """ + + def __init__(self, parent_module: 'SourceModule', root_dir: str): + """ + :param parent_module: The `SourceModule`, the subproject belongs to + :param root_dir: The root directory of the suproject + """ + self.parent_module = parent_module + self.root_dir = root_dir + + @property + def name(self) -> str: + """ + The name of the subproject. + """ + return path.basename(self.root_dir) + + +class PythonModule(SourceModule): + """ + Provides access to directories and files that belong to the project's Python code. + """ + + class Subproject(SourceModule.Subproject): + """ + Provides access to directories and files that belong to an individual subproject that is part of the project's + Python code. + """ + + @staticmethod + def __filter_pycache_directories(directory: str) -> bool: + return directory != '__pycache__' + + @property + def source_dir(self) -> str: + """ + The directory that contains the subproject's source code. + """ + return path.join(self.root_dir, 'mlrl') + + @property + def test_dir(self) -> str: + """ + The directory that contains the subproject's automated tests. + """ + return path.join(self.root_dir, 'tests') + + @property + def dist_dir(self) -> str: + """ + The directory that contains all wheel packages that have been built for the subproject. + """ + return path.join(self.root_dir, 'dist') + + @property + def build_dirs(self) -> List[str]: + """ + A list that contains all directories, where the subproject's build files are stored. + """ + return [self.dist_dir, path.join(self.root_dir, 'build')] + glob(path.join(self.root_dir, '*.egg-info')) + + def find_wheels(self) -> List[str]: + """ + Finds and returns all wheel packages that have been built for the subproject. + + :return: A list that contains the paths of the wheel packages that have been found + """ + return glob(path.join(self.dist_dir, '*.whl')) + + def find_source_files(self) -> List[str]: + """ + Finds and returns all source files that are contained by the subproject. + + :return: A list that contains the paths of the source files that have been found + """ + return find_files_recursively(self.source_dir, directory_filter=self.__filter_pycache_directories) + + def find_shared_libraries(self) -> List[str]: + """ + Finds and returns all shared libraries that are contained in the subproject's source tree. + + :return: A list that contains all shared libraries that have been found + """ + + def file_filter(file) -> bool: + return (file.startswith('lib') and file.find('.so') >= 0) \ + or file.endswith('.dylib') \ + or (file.startswith('mlrl') and file.endswith('.lib')) \ + or file.endswith('.dll') + + return find_files_recursively(self.source_dir, + directory_filter=self.__filter_pycache_directories, + file_filter=file_filter) + + def find_extension_modules(self) -> List[str]: + """ + Finds and returns all extension modules that are contained in the subproject's source tree. + + :return: A list that contains all extension modules that have been found + """ + + def file_filter(file) -> bool: + return (not file.startswith('lib') and file.endswith('.so')) \ + or file.endswith('.pyd') \ + or (not file.startswith('mlrl') and file.endswith('.lib')) + + return find_files_recursively(self.source_dir, + directory_filter=self.__filter_pycache_directories, + file_filter=file_filter) + + @property + def root_dir(self) -> str: + return 'python' + + def find_subprojects(self) -> List[Subproject]: + """ + Finds and returns all subprojects that are part of the Python code. + + :return: A list that contains all subrojects that have been found + """ + return [ + PythonModule.Subproject(self, file) for file in glob(path.join(self.root_dir, 'subprojects', '*')) + if path.isdir(file) + ] + + def find_subproject(self, file: str) -> Subproject: + """ + Finds and returns the subproject to which a given file belongs. + + :param file: The path of the file + :return: The subproject to which the given file belongs + """ + for subproject in self.find_subprojects(): + if file.startswith(subproject.root_dir): + return subproject + + raise ValueError('File "' + file + '" does not belong to a Python subproject') + + +class CppModule(SourceModule): + """ + Provides access to directories and files that belong to the project's C++ code. + """ + + class Subproject(SourceModule.Subproject): + """ + Provides access to directories and files that belong to an individual subproject that is part of the project's + C++ code. + """ + + def find_source_files(self) -> List[str]: + """ + Finds and returns all source files that are contained by the subproject. + + :return: A list that contains the paths of the source files that have been found + """ + + def file_filter(file) -> bool: + return file.endswith('.hpp') or file.endswith('.cpp') + + return find_files_recursively(self.root_dir, file_filter=file_filter) + + @property + def root_dir(self) -> str: + return 'cpp' + + def find_subprojects(self) -> List[Subproject]: + """ + Finds and returns all subprojects that are part of the C++ code. + + :return: A list that contains all subrojects that have been found + """ + return [ + CppModule.Subproject(self, file) for file in glob(path.join(self.root_dir, 'subprojects', '*')) + if path.isdir(file) + ] + + +class BuildModule(Module): + """ + Provides access to directories and files that belong to the build system. + """ + + @property + def root_dir(self) -> str: + return 'scons' + + +class DocumentationModule(Module): + """ + Provides access to directories and files that belong to the project's documentation. + """ + + class ApidocSubproject(ABC): + """ + An abstract base class for all classes that provide access to directories and files that are needed for building + the API documentation of a certain C++ or Python subproject. + """ + + def __init__(self, parent_module: 'DocumentationModule', source_subproject: SourceModule.Subproject): + """ + :param parent_module: The `DocumentationModule` this subproject belongs to + :param source_subproject: The subproject of which the API documentation should be built + """ + self.parent_module = parent_module + self.source_subproject = source_subproject + + @property + def name(self) -> str: + """ + The name of the subproject of which the API documentation should be built. + """ + return self.source_subproject.name + + @property + def apidoc_dir(self) -> str: + """ + The directory, where the API documentation should be stored. + """ + return path.join(self.parent_module.apidoc_dir, 'api', self.source_subproject.parent_module.root_dir, + self.name) + + def find_apidoc_files(self) -> List[str]: + """ + Finds and returns all files that belong to the API documentation that has been built. + + :return: A list that contains the paths of the build files that have been found + """ + return find_files_recursively(self.apidoc_dir) + + def find_build_files(self) -> List[str]: + """ + Finds and returns all build files that have been created when building the API documentation. + + :return: A list that contains the paths of all build files that have been found + """ + return [self.apidoc_dir] + + class CppApidocSubproject(ApidocSubproject): + """ + Provides access to the directories and files that are necessary for building the API documentation of a certain + C++ subproject. + """ + + @property + def config_file(self) -> str: + """ + The config file, which should be used for building the API documentation. + """ + return path.join(self.parent_module.root_dir, 'Doxyfile_' + self.name) + + class PythonApidocSubproject(ApidocSubproject): + """ + Provides access to the directories and files that are necessary for building the API documentation of a certain + Python subproject. + """ + + @property + def config_file(self) -> str: + """ + The config file, which should be used for building the API documentation. + """ + return path.join(self.build_dir, 'conf.py') + + @property + def build_dir(self) -> str: + """ + The directory, where build files should be stored. + """ + return path.join(self.parent_module.root_dir, 'python', self.name) + + def find_build_files(self) -> List[str]: + + def file_filter(file) -> bool: + return file.endswith('.rst') + + return find_files_recursively(self.build_dir, file_filter=file_filter) + super().find_build_files() + + @property + def root_dir(self) -> str: + return 'doc' + + @property + def config_file(self) -> str: + """ + The config file that should be used for building the documentation. + """ + return path.join(self.root_dir, 'conf.py') + + @property + def apidoc_dir(self) -> str: + """ + The directory, where API documentations should be stored. + """ + return path.join(self.root_dir, 'apidoc') + + @property + def build_dir(self) -> str: + """ + The directory, where the documentation should be stored. + """ + return path.join(self.root_dir, '_build') + + def find_build_files(self) -> List[str]: + """ + Finds and returns all files that belong to the documentation that has been built. + + :return: A list that contains the paths of the build files that have been found + """ + return find_files_recursively(self.build_dir) + + def find_source_files(self) -> List[str]: + """ + Finds and returns all source files from which the documentation is built. + + :return: A list that contains the paths of the source files that have been found + """ + + def directory_filter(directory: str) -> bool: + return directory != path.basename(self.build_dir) \ + and directory != path.basename(self.apidoc_dir) \ + and directory != 'python' + + def file_filter(file: str) -> bool: + return not file.startswith('Doxyfile') and not file == 'requirements.txt' and not file == 'conf.py' + + return find_files_recursively(self.root_dir, directory_filter=directory_filter, file_filter=file_filter) + + def get_cpp_apidoc_subproject(self, cpp_subproject: CppModule.Subproject) -> CppApidocSubproject: + """ + Returns a `CppApidocSubproject` for building the API documentation of a given C++ subproject. + + :param cpp_subproject: The C++ subproject of which the API documentation should be built + :return: A `CppApidocSubproject` + """ + return DocumentationModule.CppApidocSubproject(self, cpp_subproject) + + def get_python_apidoc_subproject(self, python_subproject: PythonModule.Subproject) -> PythonApidocSubproject: + """ + Returns a `PythonApidocSubproject` for building the API documentation of a given Python subproject. + + :param python_subproject: The Python subproject of which the API documentation should be built + :return: A `PythonApidocSubproject` + """ + return DocumentationModule.PythonApidocSubproject(self, python_subproject) + + def find_cpp_apidoc_subproject(self, file: str) -> CppApidocSubproject: + """ + Finds and returns the `CppApidocSubproject` to which a given file belongs. + + :param file: The path of the file + :return: The `CppApiSubproject` to which the given file belongs + """ + for subproject in CPP_MODULE.find_subprojects(): + apidoc_subproject = self.get_cpp_apidoc_subproject(subproject) + + if file.startswith(apidoc_subproject.apidoc_dir): + return apidoc_subproject + + raise ValueError('File "' + file + '" does not belong to a C++ API documentation subproject') + + def find_python_apidoc_subproject(self, file: str) -> PythonApidocSubproject: + """ + Finds and returns the `PythonApidocSubproject` to which a given file belongs. + + :param file: The path of the file + :return: The `PythonApidocSubproject` to which the given file belongs + """ + for subproject in PYTHON_MODULE.find_subprojects(): + apidoc_subproject = self.get_python_apidoc_subproject(subproject) + + if file.startswith(apidoc_subproject.apidoc_dir): + return apidoc_subproject + + raise ValueError('File "' + file + '" does not belong to a Python API documentation subproject') + + +BUILD_MODULE = BuildModule() + +PYTHON_MODULE = PythonModule() + +CPP_MODULE = CppModule() + +DOC_MODULE = DocumentationModule() diff --git a/scons/packaging.py b/scons/packaging.py new file mode 100644 index 0000000000..be8e2a2910 --- /dev/null +++ b/scons/packaging.py @@ -0,0 +1,48 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides utility functions for building and installing Python wheel packages. +""" +from typing import List + +from modules import PYTHON_MODULE +from run import run_python_program + + +def __build_python_wheel(package_dir: str): + run_python_program('build', '--wheel', package_dir, print_args=True) + + +def __install_python_wheels(wheels: List[str]): + run_python_program('pip', 'install', '--force-reinstall', '--no-deps', *wheels, print_args=True) + + +# pylint: disable=unused-argument +def build_python_wheel(env, target, source): + """ + Builds a Python wheel package for a single subproject. + + :param env: The scons environment + :param target: The path of the wheel package to be built, if it does already exist, or the path of the directory, + where the wheel package should be stored + :param source: The source files from which the wheel package should be built + """ + if target: + subproject = PYTHON_MODULE.find_subproject(target[0].path) + print('Building Python wheels for subproject "' + subproject.name + '"...') + __build_python_wheel(subproject.root_dir) + + +# pylint: disable=unused-argument +def install_python_wheels(env, target, source): + """ + Installs all Python wheel packages that have been built for a single subproject. + + :param env: The scons environment + :param target: The path of the subproject's root directory + :param source: The paths of the wheel packages to be installed + """ + if source: + subproject = PYTHON_MODULE.find_subproject(source[0].path) + print('Installing Python wheels for subproject "' + subproject.name + '"...') + __install_python_wheels(subproject.find_wheels()) diff --git a/scons/requirements.txt b/scons/requirements.txt new file mode 100644 index 0000000000..09ce76feb0 --- /dev/null +++ b/scons/requirements.txt @@ -0,0 +1,9 @@ +build >= 1.0, < 1.1 +clang-format >= 16.0, < 16.1 +cython >= 3.0, < 3.1 +isort >= 5.12, < 5.13 +meson >= 1.2, < 1.3 +ninja >= 1.11, < 1.12 +pylint >= 2.17, < 2.18 +scons >= 4.5, < 4.6 +yapf >= 0.40, < 0.41 diff --git a/scons/run.py b/scons/run.py new file mode 100644 index 0000000000..4b607a7d25 --- /dev/null +++ b/scons/run.py @@ -0,0 +1,177 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides utility functions for installing and running external programs during the build process. +""" +import subprocess +import sys + +from functools import reduce +from os import path +from typing import List, Optional, Tuple + +from modules import BUILD_MODULE, CPP_MODULE, PYTHON_MODULE +from pkg_resources import DistributionNotFound, VersionConflict, parse_requirements, require + + +def __run_command(cmd: str, *args, print_args: bool = False): + cmd_formatted = path.basename(cmd) + (reduce(lambda aggr, argument: aggr + ' ' + argument, args, '') + if print_args else '') + print('Running external command "' + cmd_formatted + '"...') + cmd_args = [cmd] + + for arg in args: + cmd_args.append(str(arg)) + + out = subprocess.run(cmd_args, check=False) + exit_code = out.returncode + + if exit_code != 0: + print('External command "' + cmd_formatted + '" terminated with non-zero exit code ' + str(exit_code)) + sys.exit(exit_code) + + +def __is_dependency_missing(dependency: str) -> bool: + try: + require(dependency) + return False + except DistributionNotFound: + return True + except VersionConflict: + return False + + +def __is_dependency_outdated(dependency: str) -> bool: + try: + require(dependency) + return False + except DistributionNotFound: + return False + except VersionConflict: + return True + + +def __find_dependencies(requirements_file: str, *dependencies: str) -> List[str]: + with open(requirements_file, mode='r', encoding='utf-8') as file: + dependency_dict = {dependency.key: str(dependency) for dependency in parse_requirements(file.read())} + + if dependencies: + return [dependency_dict[dependency] for dependency in dependencies if dependency in dependency_dict] + + return list(dependency_dict.values()) + + +def __find_missing_and_outdated_dependencies(requirements_file: str, *dependencies: str) -> Tuple[List[str], List[str]]: + dependencies = __find_dependencies(requirements_file, *dependencies) + missing_dependencies = [dependency for dependency in dependencies if __is_dependency_missing(dependency)] + outdated_dependencies = [dependency for dependency in dependencies if __is_dependency_outdated(dependency)] + return missing_dependencies, outdated_dependencies + + +def __pip_install(dependencies: List[str], force_reinstall: bool = False): + args = ['--prefer-binary'] + + if force_reinstall: + args.append('--force-reinstall') + + __run_command('python', '-m', 'pip', 'install', *args, *dependencies, print_args=True) + + +def __install_dependencies(requirements_file: str, *dependencies: str): + missing_dependencies, outdated_dependencies = __find_missing_and_outdated_dependencies( + requirements_file, *dependencies) + + if missing_dependencies: + __pip_install(missing_dependencies) + + if outdated_dependencies: + __pip_install(outdated_dependencies, force_reinstall=True) + + +def install_build_dependencies(*dependencies: str): + """ + Installs one or several dependencies that are required by the build system. + + :param dependencies: The names of the dependencies that should be installed + """ + __install_dependencies(BUILD_MODULE.requirements_file, *dependencies) + + +def install_runtime_dependencies(**_): + """ + Installs all runtime dependencies that are required by the Python and C++ module. + """ + for module in [PYTHON_MODULE, CPP_MODULE]: + requirements_file = module.requirements_file + + if path.isfile(requirements_file): + __install_dependencies(requirements_file) + + +def run_program(program: str, + *args, + print_args: bool = False, + additional_dependencies: Optional[List[str]] = None, + requirements_file: str = BUILD_MODULE.requirements_file): + """ + Runs an external program that has been installed into the virtual environment. + + :param program: The name of the program to be run + :param args: Optional arguments that should be passed to the program + :param print_args: True, if the arguments should be included in log statements, False otherwise + :param additional_dependencies: The names of dependencies that should be installed before running the program + :param requirements_file: The path of the requirements.txt file that specifies the dependency versions + """ + dependencies = [program] + + if additional_dependencies: + dependencies.extend(additional_dependencies) + + __install_dependencies(requirements_file, *dependencies) + __run_command(program, *args, print_args=print_args) + + +def run_venv_program(program: str, + *args, + print_args: bool = False, + additional_dependencies: Optional[List[str]] = None, + requirements_file: str = BUILD_MODULE.requirements_file): + """ + Runs an external program that has been installed into the virtual environment. + + :param program: The name of the program to be run + :param args: Optional arguments that should be passed to the program + :param print_args: True, if the arguments should be included in log statements, False otherwise + :param additional_dependencies: The names of dependencies that should be installed before running the program + :param requirements_file: The path of the requirements.txt file that specifies the dependency versions + """ + dependencies = [program] + + if additional_dependencies: + dependencies.extend(additional_dependencies) + + __install_dependencies(requirements_file, *dependencies) + __run_command(path.join(path.dirname(sys.executable), program), *args, print_args=print_args) + + +def run_python_program(program: str, + *args, + print_args: bool = False, + additional_dependencies: Optional[List[str]] = None, + requirements_file: str = BUILD_MODULE.requirements_file): + """ + Runs an external Python program. + + :param program: The name of the program to be run + :param args: Optional arguments that should be passed to the program + :param print_args: True, if the arguments should be included in log statements, False otherwise + :param additional_dependencies: The names of dependencies that should be installed before running the program + :param requirements_file: The path of the requirements.txt file that specifies the dependency versions + """ + dependencies = [program] + + if additional_dependencies: + dependencies.extend(additional_dependencies) + + __install_dependencies(requirements_file, *dependencies) + __run_command(path.join(path.dirname(sys.executable), 'python'), '-m', program, *args, print_args=print_args) diff --git a/scons/sconstruct.py b/scons/sconstruct.py new file mode 100644 index 0000000000..e494a7dbed --- /dev/null +++ b/scons/sconstruct.py @@ -0,0 +1,247 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Defines the individual targets of the build process. +""" +import sys + +from functools import reduce +from os import path + +from code_style import check_cpp_code_style, check_python_code_style, enforce_cpp_code_style, enforce_python_code_style +from compilation import compile_cpp, compile_cython, install_cpp, install_cython, setup_cpp, setup_cython +from documentation import apidoc_cpp, apidoc_python, doc +from modules import BUILD_MODULE, CPP_MODULE, DOC_MODULE, PYTHON_MODULE +from packaging import build_python_wheel, install_python_wheels +from run import install_runtime_dependencies +from testing import run_tests + +from SCons.Script import COMMAND_LINE_TARGETS +from SCons.Script.SConscript import SConsEnvironment + + +def __create_phony_target(environment, target, action=None): + return environment.AlwaysBuild(environment.Alias(target, None, action)) + + +def __print_if_clean(environment, message: str): + if environment.GetOption('clean'): + print(message) + + +# Define target names... +TARGET_NAME_TEST_FORMAT = 'test_format' +TARGET_NAME_TEST_FORMAT_PYTHON = TARGET_NAME_TEST_FORMAT + '_python' +TARGET_NAME_TEST_FORMAT_CPP = TARGET_NAME_TEST_FORMAT + '_cpp' +TARGET_NAME_FORMAT = 'format' +TARGET_NAME_FORMAT_PYTHON = TARGET_NAME_FORMAT + '_python' +TARGET_NAME_FORMAT_CPP = TARGET_NAME_FORMAT + '_cpp' +TARGET_NAME_VENV = 'venv' +TARGET_NAME_COMPILE = 'compile' +TARGET_NAME_COMPILE_CPP = TARGET_NAME_COMPILE + '_cpp' +TARGET_NAME_COMPILE_CYTHON = TARGET_NAME_COMPILE + '_cython' +TARGET_NAME_INSTALL = 'install' +TARGET_NAME_INSTALL_CPP = TARGET_NAME_INSTALL + '_cpp' +TARGET_NAME_INSTALL_CYTHON = TARGET_NAME_INSTALL + '_cython' +TARGET_NAME_BUILD_WHEELS = 'build_wheels' +TARGET_NAME_INSTALL_WHEELS = 'install_wheels' +TARGET_NAME_TESTS = 'tests' +TARGET_NAME_APIDOC = 'apidoc' +TARGET_NAME_APIDOC_CPP = TARGET_NAME_APIDOC + '_cpp' +TARGET_NAME_APIDOC_PYTHON = TARGET_NAME_APIDOC + '_python' +TARGET_NAME_DOC = 'doc' + +VALID_TARGETS = { + TARGET_NAME_TEST_FORMAT, TARGET_NAME_TEST_FORMAT_PYTHON, TARGET_NAME_TEST_FORMAT_CPP, TARGET_NAME_FORMAT, + TARGET_NAME_FORMAT_PYTHON, TARGET_NAME_FORMAT_CPP, TARGET_NAME_VENV, TARGET_NAME_COMPILE, TARGET_NAME_COMPILE_CPP, + TARGET_NAME_COMPILE_CYTHON, TARGET_NAME_INSTALL, TARGET_NAME_INSTALL_CPP, TARGET_NAME_INSTALL_CYTHON, + TARGET_NAME_BUILD_WHEELS, TARGET_NAME_INSTALL_WHEELS, TARGET_NAME_TESTS, TARGET_NAME_APIDOC, TARGET_NAME_APIDOC_CPP, + TARGET_NAME_APIDOC_PYTHON, TARGET_NAME_DOC +} + +DEFAULT_TARGET = TARGET_NAME_INSTALL_WHEELS + +# Raise an error if any invalid targets are given... +invalid_targets = [target for target in COMMAND_LINE_TARGETS if target not in VALID_TARGETS] + +if invalid_targets: + print('The following targets are unknown: ' + + reduce(lambda aggr, target: aggr + (', ' if len(aggr) > 0 else '') + target, invalid_targets, '')) + sys.exit(-1) + +# Create temporary file ".sconsign.dblite" in the build directory... +env = SConsEnvironment() +env.SConsignFile(name=path.relpath(path.join(BUILD_MODULE.build_dir, '.sconsign'), BUILD_MODULE.root_dir)) + +# Define targets for checking code style definitions... +target_test_format_python = __create_phony_target(env, TARGET_NAME_TEST_FORMAT_PYTHON, action=check_python_code_style) +target_test_format_cpp = __create_phony_target(env, TARGET_NAME_TEST_FORMAT_CPP, action=check_cpp_code_style) +target_test_format = __create_phony_target(env, TARGET_NAME_TEST_FORMAT) +env.Depends(target_test_format, [target_test_format_python, target_test_format_cpp]) + +# Define targets for enforcing code style definitions... +target_format_python = __create_phony_target(env, TARGET_NAME_FORMAT_PYTHON, action=enforce_python_code_style) +target_format_cpp = __create_phony_target(env, TARGET_NAME_FORMAT_CPP, action=enforce_cpp_code_style) +target_format = __create_phony_target(env, TARGET_NAME_FORMAT) +env.Depends(target_format, [target_format_python, target_format_cpp]) + +# Define target for installing runtime dependencies... +target_venv = __create_phony_target(env, TARGET_NAME_VENV, action=install_runtime_dependencies) + +# Define targets for compiling the C++ and Cython code... +env.Command(CPP_MODULE.build_dir, None, action=setup_cpp) +target_compile_cpp = __create_phony_target(env, TARGET_NAME_COMPILE_CPP, action=compile_cpp) +env.Depends(target_compile_cpp, [target_venv, CPP_MODULE.build_dir]) + +env.Command(PYTHON_MODULE.build_dir, None, action=setup_cython) +target_compile_cython = __create_phony_target(env, TARGET_NAME_COMPILE_CYTHON, action=compile_cython) +env.Depends(target_compile_cython, [target_compile_cpp, PYTHON_MODULE.build_dir]) + +target_compile = __create_phony_target(env, TARGET_NAME_COMPILE) +env.Depends(target_compile, [target_compile_cpp, target_compile_cython]) + +# Define targets for cleaning up C++ and Cython build directories... +if not COMMAND_LINE_TARGETS \ + or TARGET_NAME_COMPILE_CPP in COMMAND_LINE_TARGETS \ + or TARGET_NAME_COMPILE in COMMAND_LINE_TARGETS: + __print_if_clean(env, 'Removing C++ build files...') + env.Clean([target_compile_cpp, DEFAULT_TARGET], CPP_MODULE.build_dir) + +if not COMMAND_LINE_TARGETS \ + or TARGET_NAME_COMPILE_CYTHON in COMMAND_LINE_TARGETS \ + or TARGET_NAME_COMPILE in COMMAND_LINE_TARGETS: + __print_if_clean(env, 'Removing Cython build files...') + env.Clean([target_compile_cython, DEFAULT_TARGET], PYTHON_MODULE.build_dir) + +# Define targets for installing shared libraries and extension modules into the source tree... +target_install_cpp = __create_phony_target(env, TARGET_NAME_INSTALL_CPP, action=install_cpp) +env.Depends(target_install_cpp, target_compile_cpp) + +target_install_cython = __create_phony_target(env, TARGET_NAME_INSTALL_CYTHON, action=install_cython) +env.Depends(target_install_cython, target_compile_cython) + +target_install = env.Alias(TARGET_NAME_INSTALL, None, None) +env.Depends(target_install, [target_install_cpp, target_install_cython]) + +# Define targets for removing shared libraries and extension modules from the source tree... +if not COMMAND_LINE_TARGETS \ + or TARGET_NAME_INSTALL_CPP in COMMAND_LINE_TARGETS \ + or TARGET_NAME_INSTALL in COMMAND_LINE_TARGETS: + __print_if_clean(env, 'Removing shared libraries from source tree...') + + for subproject in PYTHON_MODULE.find_subprojects(): + env.Clean([target_install_cpp, DEFAULT_TARGET], subproject.find_shared_libraries()) + +if not COMMAND_LINE_TARGETS \ + or TARGET_NAME_INSTALL_CYTHON in COMMAND_LINE_TARGETS \ + or TARGET_NAME_INSTALL in COMMAND_LINE_TARGETS: + __print_if_clean(env, 'Removing extension modules from source tree...') + + for subproject in PYTHON_MODULE.find_subprojects(): + env.Clean([target_install_cython, DEFAULT_TARGET], subproject.find_extension_modules()) + +# Define targets for building and installing Python wheels... +commands_build_wheels = [] +commands_install_wheels = [] + +for subproject in PYTHON_MODULE.find_subprojects(): + wheels = subproject.find_wheels() + targets_build_wheels = wheels if wheels else subproject.dist_dir + + command_build_wheels = env.Command(targets_build_wheels, subproject.find_source_files(), action=build_python_wheel) + commands_build_wheels.append(command_build_wheels) + + command_install_wheels = env.Command(subproject.root_dir, targets_build_wheels, action=install_python_wheels) + env.Depends(command_install_wheels, command_build_wheels) + commands_install_wheels.append(command_install_wheels) + +target_build_wheels = env.Alias(TARGET_NAME_BUILD_WHEELS, None, None) +env.Depends(target_build_wheels, [target_install] + commands_build_wheels) + +target_install_wheels = env.Alias(TARGET_NAME_INSTALL_WHEELS, None, None) +env.Depends(target_install_wheels, [target_install] + commands_install_wheels) + +# Define target for cleaning up Python wheels and associated build directories... +if not COMMAND_LINE_TARGETS or TARGET_NAME_BUILD_WHEELS in COMMAND_LINE_TARGETS: + __print_if_clean(env, 'Removing Python wheels...') + + for subproject in PYTHON_MODULE.find_subprojects(): + env.Clean([target_build_wheels, DEFAULT_TARGET], subproject.build_dirs) + +# Define targets for running automated tests... +target_test = __create_phony_target(env, TARGET_NAME_TESTS, action=run_tests) +env.Depends(target_test, target_install_wheels) + +# Define targets for generating the documentation... +commands_apidoc_cpp = [] +commands_apidoc_python = [] + +for subproject in CPP_MODULE.find_subprojects(): + apidoc_subproject = DOC_MODULE.get_cpp_apidoc_subproject(subproject) + config_file = apidoc_subproject.config_file + + if path.isfile(config_file): + apidoc_files = apidoc_subproject.find_apidoc_files() + targets_apidoc_cpp = apidoc_files if apidoc_files else apidoc_subproject.apidoc_dir + source_files = [config_file] + subproject.find_source_files() + command_apidoc_cpp = env.Command(targets_apidoc_cpp, source_files, action=apidoc_cpp) + commands_apidoc_cpp.append(command_apidoc_cpp) + +target_apidoc_cpp = env.Alias(TARGET_NAME_APIDOC_CPP, None, None) +env.Depends(target_apidoc_cpp, commands_apidoc_cpp) + +for subproject in PYTHON_MODULE.find_subprojects(): + apidoc_subproject = DOC_MODULE.get_python_apidoc_subproject(subproject) + config_file = apidoc_subproject.config_file + + if path.isfile(config_file): + apidoc_files = apidoc_subproject.find_apidoc_files() + targets_apidoc_python = apidoc_files if apidoc_files else apidoc_subproject.apidoc_dir + source_files = [config_file] + subproject.find_source_files() + command_apidoc_python = env.Command(targets_apidoc_python, source_files, action=apidoc_python) + env.Depends(command_apidoc_python, target_install_wheels) + commands_apidoc_python.append(command_apidoc_python) + +target_apidoc_python = env.Alias(TARGET_NAME_APIDOC_PYTHON, None, None) +env.Depends(target_apidoc_python, commands_apidoc_python) + +target_apidoc = env.Alias(TARGET_NAME_APIDOC, None, None) +env.Depends(target_apidoc, [target_apidoc_cpp, target_apidoc_python]) + +doc_files = DOC_MODULE.find_build_files() +targets_doc = doc_files if doc_files else DOC_MODULE.build_dir +command_doc = env.Command(targets_doc, [DOC_MODULE.config_file] + DOC_MODULE.find_source_files(), action=doc) +env.Depends(command_doc, target_apidoc) +target_doc = env.Alias(TARGET_NAME_DOC, None, None) +env.Depends(target_doc, command_doc) + +# Define target for cleaning up the documentation and associated build directories... +if not COMMAND_LINE_TARGETS \ + or TARGET_NAME_APIDOC_CPP in COMMAND_LINE_TARGETS \ + or TARGET_NAME_APIDOC in COMMAND_LINE_TARGETS \ + or TARGET_NAME_DOC in COMMAND_LINE_TARGETS: + __print_if_clean(env, 'Removing C++ API documentation...') + + for subproject in CPP_MODULE.find_subprojects(): + apidoc_subproject = DOC_MODULE.get_cpp_apidoc_subproject(subproject) + env.Clean([target_apidoc_cpp, DEFAULT_TARGET], apidoc_subproject.find_build_files()) + +if not COMMAND_LINE_TARGETS \ + or TARGET_NAME_APIDOC_PYTHON in COMMAND_LINE_TARGETS \ + or TARGET_NAME_APIDOC in COMMAND_LINE_TARGETS \ + or TARGET_NAME_DOC in COMMAND_LINE_TARGETS: + __print_if_clean(env, 'Removing Python API documentation...') + + for subproject in PYTHON_MODULE.find_subprojects(): + apidoc_subproject = DOC_MODULE.get_python_apidoc_subproject(subproject) + env.Clean([target_apidoc_python, DEFAULT_TARGET], apidoc_subproject.find_build_files()) + +if not COMMAND_LINE_TARGETS or TARGET_NAME_APIDOC in COMMAND_LINE_TARGETS or TARGET_NAME_DOC in COMMAND_LINE_TARGETS: + env.Clean([target_apidoc, DEFAULT_TARGET], DOC_MODULE.apidoc_dir) + +if not COMMAND_LINE_TARGETS or TARGET_NAME_DOC in COMMAND_LINE_TARGETS: + __print_if_clean(env, 'Removing documentation...') + env.Clean([target_doc, DEFAULT_TARGET], DOC_MODULE.build_dir) + +# Set the default target... +env.Default(DEFAULT_TARGET) diff --git a/scons/testing.py b/scons/testing.py new file mode 100644 index 0000000000..30a5b9d753 --- /dev/null +++ b/scons/testing.py @@ -0,0 +1,25 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides utility functions for running automated tests. +""" +from os import path + +from modules import PYTHON_MODULE +from run import run_python_program + + +def __run_python_tests(directory: str): + run_python_program('unittest', 'discover', '-v', '-f', '-s', directory) + + +def run_tests(**_): + """ + Runs all automated tests. + """ + for subproject in PYTHON_MODULE.find_subprojects(): + test_dir = subproject.test_dir + + if path.isdir(test_dir): + print('Running automated tests for subpackage "' + subproject.name + '"...') + __run_python_tests(test_dir)