diff --git a/.changelog-bugfix.md b/.changelog-bugfix.md index 48f5d4c026..d846aebe23 100644 --- a/.changelog-bugfix.md +++ b/.changelog-bugfix.md @@ -1,5 +1,6 @@ # Quality-of-Life Improvements +- The build system now uses a lightweight custom implementation instead of [SCons](https://scons.org/) and is better modularized to avoid unnecessary runs of Continuous Integration jobs when only certain parts of it are modified. - Releases are now automated via Continuous Integration, including the update of the project's changelog. - The presentation of algorithmic parameters in the documentation has been improved. - Outdated GitHub Actions can now be printed via the build target `check_github_actions`. Alternatively, the build target `update_github_actions` may be used to update them automatically. diff --git a/.github/workflows/test_build.yml b/.github/workflows/test_build.yml index c136d4e476..be2f773729 100644 --- a/.github/workflows/test_build.yml +++ b/.github/workflows/test_build.yml @@ -35,9 +35,16 @@ jobs: - '.github/workflows/test_build.yml' - 'build' - 'build.bat' - - 'scons/**' + - 'build_system/main.py' + - 'build_system/core/**' + - 'build_system/util/**' + - 'build_system/targets/paths.py' + - 'build_system/targets/compilation/*' + - 'build_system/targets/testing/*' cpp: &cpp - *build_files + - 'build_system/targets/compilation/cpp/*' + - 'build_system/targets/testing/cpp/*' - 'cpp/**/include/**' - 'cpp/**/src/**' - '**/*.pxd' @@ -48,6 +55,8 @@ jobs: - 'cpp/**/test/**' python: &python - *build_files + - 'build_system/targets/compilation/cython/*' + - 'build_system/targets/testing/python/*' - 'python/requirements.txt' - 'python/**/mlrl/**' python_tests: &python_tests diff --git a/.github/workflows/test_changelog.yml b/.github/workflows/test_changelog.yml index 2679713e24..92221b3b40 100644 --- a/.github/workflows/test_changelog.yml +++ b/.github/workflows/test_changelog.yml @@ -33,7 +33,11 @@ jobs: - '.github/workflows/test_changelog.yml' - 'build' - 'build.bat' - - 'scons/**' + - 'build_system/main.py' + - 'build_system/core/**' + - 'build_system/util/**' + - 'build_system/targets/paths.py' + - 'build_system/targets/versioning/*' bugfix: - *build_files - '.changelog-bugfix.md' diff --git a/.github/workflows/test_doc.yml b/.github/workflows/test_doc.yml index 84836393d8..509159edfd 100644 --- a/.github/workflows/test_doc.yml +++ b/.github/workflows/test_doc.yml @@ -34,15 +34,21 @@ jobs: - '.github/workflows/test_doc.yml' - 'build' - 'build.bat' - - 'scons/**' + - 'build_system/main.py' + - 'build_system/core/**' + - 'build_system/util/**' + - 'build_system/targets/paths.py' cpp: &cpp - *build_files + - 'build_system/targets/documentation/cpp/*' - 'cpp/**/include/**' python: &python - *build_files + - 'build_system/targets/documentation/python/*' - 'python/**/mlrl/**' doc: &doc - *build_files + - 'build_system/targets/documentation/*' - 'doc/**' any: - *cpp diff --git a/.github/workflows/test_format.yml b/.github/workflows/test_format.yml index 936dd78259..1360870ee3 100644 --- a/.github/workflows/test_format.yml +++ b/.github/workflows/test_format.yml @@ -33,23 +33,31 @@ jobs: - '.github/workflows/test_format.yml' - 'build' - 'build.bat' - - 'scons/**' + - 'build_system/main.py' + - 'build_system/core/**' + - 'build_system/util/**' + - 'build_system/targets/paths.py' cpp: - *build_files + - 'build_system/targets/code_style/*' + - 'build_system/targets/code_style/cpp/*' + - '.cpplint.cfg' - '**/*.hpp' - '**/*.cpp' - - '.clang-format' python: - *build_files + - 'build_system/targets/code_style/*' + - 'build_system/targets/code_style/python/*' - '**/*.py' - - '.isort.cfg' - - '.pylintrc' - - '.style.yapf' md: - *build_files + - 'build_system/targets/code_style/*' + - 'build_system/targets/code_style/markdown/*' - '**/*.md' yaml: - *build_files + - 'build_system/targets/code_style/*' + - 'build_system/targets/code_style/yaml/*' - '**/*.y*ml' - name: Check C++ code style if: steps.filter.outputs.cpp == 'true' diff --git a/.github/workflows/test_publish.yml b/.github/workflows/test_publish.yml index 5d64d7b06d..1dec1fbc8d 100644 --- a/.github/workflows/test_publish.yml +++ b/.github/workflows/test_publish.yml @@ -36,7 +36,11 @@ jobs: - '.github/workflows/template_publish_pure.yml' - 'build' - 'build.bat' - - 'scons/**' + - 'build_system/main.py' + - 'build_system/core/**' + - 'build_system/util/**' + - 'build_system/targets/paths.py' + - 'build_system/targets/packaging/*' - name: Read Python version uses: juliangruber/read-file-action@v1 id: python_version diff --git a/.gitignore b/.gitignore index 5cded93418..b9b950e538 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,6 @@ # Build files __pycache__/ -scons/build/ +build_system/build/ python/**/build/ python/**/dist/ python/**/*egg-info/ diff --git a/build b/build index 1b9f3cceaa..d42154e363 100755 --- a/build +++ b/build @@ -1,7 +1,7 @@ #!/bin/sh VENV_DIR="venv" -SCONS_DIR="scons" +BUILD_SYSTEM_DIR="build_system" CLEAN=false set -e @@ -22,13 +22,12 @@ fi if [ -d "$VENV_DIR" ]; then . $VENV_DIR/bin/activate - python3 -c "import sys; sys.path.append('$SCONS_DIR'); import dependencies; dependencies.install_build_dependencies('scons')" - scons --silent --file $SCONS_DIR/sconstruct.py $@ + python3 $BUILD_SYSTEM_DIR/main.py $@ deactivate fi if [ $CLEAN = true ] && [ -d $VENV_DIR ]; then echo "Removing virtual Python environment..." rm -rf $VENV_DIR - rm -rf $SCONS_DIR/build + rm -rf $BUILD_SYSTEM_DIR/build fi diff --git a/build.bat b/build.bat index fba6b99529..4250487089 100644 --- a/build.bat +++ b/build.bat @@ -1,7 +1,7 @@ @echo off set "VENV_DIR=venv" -set "SCONS_DIR=scons" +set "BUILD_SYSTEM_DIR=build_system" set "CLEAN=false" if not "%1"=="" if "%2"=="" ( @@ -20,8 +20,7 @@ if not exist "%VENV_DIR%" ( if exist "%VENV_DIR%" ( call %VENV_DIR%\Scripts\activate || exit - .\%VENV_DIR%\Scripts\python -c "import sys;sys.path.append('%SCONS_DIR%');import dependencies;dependencies.install_build_dependencies('scons')" || exit - .\%VENV_DIR%\Scripts\python -m SCons --silent --file %SCONS_DIR%\sconstruct.py %* || exit + .\%VENV_DIR%\Scripts\python %BUILD_SYSTEM_DIR%\main.py %* || exit call deactivate || exit ) @@ -29,7 +28,7 @@ if "%CLEAN%"=="true" if exist "%VENV_DIR%" ( echo Removing virtual Python environment... rd /s /q "%VENV_DIR%" || exit - if exist "%SCONS_DIR%\build" ( - rd /s /q "%SCONS_DIR%\build" || exit + if exist "%BUILD_SYSTEM_DIR%\build" ( + rd /s /q "%BUILD_SYSTEM_DIR%\build" || exit ) ) diff --git a/build_system/core/__init__.py b/build_system/core/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/build_system/core/build_unit.py b/build_system/core/build_unit.py new file mode 100644 index 0000000000..926f317224 --- /dev/null +++ b/build_system/core/build_unit.py @@ -0,0 +1,57 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides classes that provide information about independent units of the build system. +""" +from os import path +from typing import List + + +class BuildUnit: + """ + An independent unit of the build system that may come with its own built-time dependencies. + """ + + BUILD_SYSTEM_DIRECTORY = 'build_system' + + BUILD_DIRECTORY_NAME = 'build' + + def __init__(self, root_directory: str = BUILD_SYSTEM_DIRECTORY): + """ + :param root_directory: The root directory of this unit + """ + self.root_directory = root_directory + + @staticmethod + def for_file(file) -> 'BuildUnit': + """ + Creates and returns a `BuildUnit` for a given file. + + :param file: The file for which a `BuildUnit` should be created + :return: The `BuildUnit` that has been created + """ + return BuildUnit(path.relpath(path.dirname(file), path.dirname(BuildUnit.BUILD_SYSTEM_DIRECTORY))) + + @property + def build_directory(self) -> str: + """ + The path to the build directory of this unit. + """ + return path.join(self.root_directory, self.BUILD_DIRECTORY_NAME) + + def find_requirements_files(self) -> List[str]: + """ + The path to the requirements file that specifies the build-time dependencies of this unit. + """ + requirements_files = [] + current_directory = self.root_directory + + while path.basename(current_directory) != self.BUILD_SYSTEM_DIRECTORY: + requirements_file = path.join(current_directory, 'requirements.txt') + + if path.isfile(requirements_file): + requirements_files.append(requirements_file) + + current_directory = path.dirname(current_directory) + + return requirements_files diff --git a/build_system/core/changes.py b/build_system/core/changes.py new file mode 100644 index 0000000000..4590a85075 --- /dev/null +++ b/build_system/core/changes.py @@ -0,0 +1,132 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides classes for detecting changes in files. +""" +import json + +from functools import cached_property +from os import path +from typing import Dict, List, Set + +from core.modules import Module +from util.io import TextFile, create_directories + + +class JsonFile(TextFile): + """ + Allows to read and write the content of a JSON file. + """ + + @cached_property + def json(self) -> Dict: + """ + The content of the JSON file as a dictionary. + """ + lines = self.lines + + if lines: + return json.loads('\n'.join(lines)) + + return {} + + def write_json(self, dictionary: Dict): + """ + Writes a given dictionary to the JSON file. + + :param dictionary: The dictionary to be written + """ + self.write_lines(json.dumps(dictionary, indent=4)) + + def write_lines(self, *lines: str): + super().write_lines(*lines) + + try: + del self.json + except AttributeError: + pass + + +class ChangeDetection: + """ + Allows to detect changes in tracked files. + """ + + class CacheFile(JsonFile): + """ + A JSON file that stores checksums for tracked files. + """ + + @staticmethod + def __checksum(file: str) -> str: + return str(path.getmtime(file)) + + def __init__(self, file: str): + """ + :param file: The path to the JSON file + """ + super().__init__(file, accept_missing=True) + create_directories(path.dirname(file)) + + def update(self, module_name: str, files: Set[str]): + """ + Updates the checksums of given files. + + :param module_name: The name of the module, the files belong to + :param files: A set that contains the paths of the files to be updated + """ + cache = self.json + module_cache = cache.setdefault(module_name, {}) + + for invalid_key in [file for file in module_cache.keys() if file not in files]: + del module_cache[invalid_key] + + for file in files: + module_cache[file] = self.__checksum(file) + + if module_cache: + cache[module_name] = module_cache + else: + del cache[module_name] + + if cache: + self.write_json(cache) + else: + self.delete() + + def has_changed(self, module_name: str, file: str) -> bool: + """ + Returns whether a file has changed according to the cache or not. + + :param module_name: The name of the module, the file belongs to + :param file: The file to be checked + :return: True, if the file has changed, False otherwise + """ + module_cache = self.json.get(module_name, {}) + return file not in module_cache or module_cache[file] != self.__checksum(file) + + def __init__(self, cache_file: str): + """ + :param cache_file: The path to the file that should be used for tracking files + """ + self.cache_file = ChangeDetection.CacheFile(cache_file) + + def track_files(self, module: Module, *files: str): + """ + Updates the cache to keep track of given files. + + :param module: The module, the files belong to + :param files: The files to be tracked + """ + self.cache_file.update(str(module), set(files)) + + def get_changed_files(self, module: Module, *files: str) -> List[str]: + """ + Filters given files and returns only those that have changed. + + :param module: The module, the files belong to + :param files: The files to be filtered + :return: A list that contains the files that have changed + """ + module_name = str(module) + return [file for file in files if self.cache_file.has_changed(module_name, file)] diff --git a/build_system/core/modules.py b/build_system/core/modules.py new file mode 100644 index 0000000000..bc0bbe2d40 --- /dev/null +++ b/build_system/core/modules.py @@ -0,0 +1,63 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides classes that provide information about files and directories that belong to individual modules of the project +to be dealt with by the targets of the build system. +""" +from abc import ABC +from functools import reduce +from typing import List + + +class Module(ABC): + """ + An abstract base class for all modules. + """ + + class Filter(ABC): + """ + An abstract base class for all classes that allow to filter modules. + """ + + def matches(self, module: 'Module') -> bool: + """ + Returns whether the filter matches a given module or not. + + :param module: The module to be matched + :return: True, if the filter matches the given module, False otherwise + """ + + def match(self, module_filter: Filter) -> List['Module']: + """ + Returns a list that contains all submodules in this module that match a given filter. + + :param module_filter: The filter + :return: A list that contains all matching submodules + """ + return [self] if module_filter.matches(self) else [] + + +class ModuleRegistry: + """ + Allows to look up modules that have previously been registered. + """ + + def __init__(self): + self.modules = [] + + def register(self, module: Module): + """ + Registers a new module. + + :param module: The module to be registered + """ + self.modules.append(module) + + def lookup(self, module_filter: Module.Filter) -> List[Module]: + """ + Looks up and returns all modules that match a given filter. + + :param module_filter: The filter + :return: A list that contains all modules matching the given filter + """ + return list(reduce(lambda aggr, module: aggr + module.match(module_filter), self.modules, [])) diff --git a/build_system/core/targets.py b/build_system/core/targets.py new file mode 100644 index 0000000000..51fe019e0a --- /dev/null +++ b/build_system/core/targets.py @@ -0,0 +1,792 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides base classes for defining individual targets of the build process. +""" +from abc import ABC, abstractmethod +from dataclasses import dataclass +from enum import Enum, auto +from functools import reduce +from os import path +from typing import Any, Callable, Dict, List, Optional, Tuple + +from core.build_unit import BuildUnit +from core.changes import ChangeDetection +from core.modules import Module, ModuleRegistry +from util.format import format_iterable +from util.io import delete_files +from util.log import Log + + +class Target(ABC): + """ + An abstract base class for all targets of the build system. + """ + + @dataclass + class Dependency: + """ + A single dependency of a parent target. + + Attributes: + target_name: The name of the target, the parent target depends on + clean_dependency: True, if the output files of the dependency should also be cleaned when cleaning the + output files of the parent target, False otherwise + """ + target_name: str + clean_dependency: bool = True + + def __str__(self) -> str: + return self.target_name + + def __eq__(self, other) -> bool: + return isinstance(other, type(self)) and self.target_name == other.target_name + + class Builder(ABC): + """ + An abstract base class for all builders that allow to configure and create targets. + """ + + def __init__(self): + self.dependency_names = set() + self.dependencies = [] + + def depends_on(self, *target_names: str, clean_dependencies: bool = False) -> 'Target.Builder': + """ + Adds on or several targets, this target should depend on. + + :param target_names: The names of the targets, this target should depend on + :param clean_dependencies: True, if output files of the dependencies should also be cleaned when cleaning + the output files of this target, False otherwise + :return: The `Target.Builder` itself + """ + for target_name in target_names: + if not target_name in self.dependency_names: + self.dependency_names.add(target_name) + self.dependencies.append( + Target.Dependency(target_name=target_name, clean_dependency=clean_dependencies)) + + return self + + @abstractmethod + def build(self, build_unit: BuildUnit) -> 'Target': + """ + Creates and returns the target that has been configured via the builder. + + :param build_unit: The build unit, the target belongs to + :return: The target that has been created + """ + + def __init__(self, name: str, dependencies: List['Target.Dependency']): + """ + :param name: The name of the target + :param dependencies: A list that contains all dependencies of the target + """ + self.name = name + self.dependencies = dependencies + + @abstractmethod + def run(self, module_registry: ModuleRegistry): + """ + Must be implemented by subclasses in order to run this target. + + :param module_registry: The `ModuleRegistry` that can be used by the target for looking up modules + """ + + def clean(self, module_registry: ModuleRegistry): + """ + May be overridden by subclasses in order to clean up this target. + + :param module_registry: The `ModuleRegistry` that can be used by the target for looking up modules + """ + + def __str__(self) -> str: + result = type(self).__name__ + '{name="' + self.name + '"' + + if self.dependencies: + result += ', dependencies={' + format_iterable(self.dependencies, delimiter='"') + '}' + + return result + '}' + + +class BuildTarget(Target): + """ + A build target, which produces one or several output files from given input files. + """ + + class Runnable(ABC): + """ + An abstract base class for all classes that can be run via a build target. + """ + + def __init__(self, module_filter: Module.Filter): + """ + :param module_filter: A filter that matches the modules, the target should be applied to + """ + self.module_filter = module_filter + + def run_all(self, build_unit: BuildUnit, modules: List[Module]): + """ + May be overridden by subclasses in order to apply the target to all modules that match the filter. + + :param build_unit: The build unit, the target belongs to + :param modules: A list that contains the modules, the target should be applied to + """ + raise NotImplementedError('Class ' + type(self).__name__ + ' does not implement the "run_all" method') + + def run(self, build_unit: BuildUnit, module: Module): + """ + May be overridden by subclasses in order to apply the target to an individual module that matches the + filter. + + :param build_unit: The build unit, the target belongs to + :param module: The module, the target should be applied to + """ + raise NotImplementedError('Class ' + type(self).__name__ + ' does not implement the "run" method') + + # pylint: disable=unused-argument + def get_input_files(self, module: Module) -> List[str]: + """ + May be overridden by subclasses in order to return the input files required by the target. + + :param module: The module, the target should be applied to + :return: A list that contains the input files + """ + return [] + + # pylint: disable=unused-argument + def get_output_files(self, module: Module) -> List[str]: + """ + May be overridden by subclasses in order to return the output files produced by the target. + + :param module: The module, the target should be applied to + :return: A list that contains the output files + """ + return [] + + def get_clean_files(self, module: Module) -> List[str]: + """ + May be overridden by subclasses in order to return the output files produced by the target that must be + cleaned. + + :param module: The module, the target should be applied to + :return: A list that contains the files to be cleaned + """ + return self.get_output_files(module) + + class Builder(Target.Builder): + """ + A builder that allows to configure and create build targets. + """ + + def __init__(self, parent_builder: 'TargetBuilder', target_name: str): + """ + :param parent_builder: The builder, this builder has been created from + :param target_name: The name of the target that is configured via the builder + """ + super().__init__() + self.parent_builder = parent_builder + self.target_name = target_name + self.runnables = [] + + def set_runnables(self, *runnables: 'BuildTarget.Runnable') -> Any: + """ + Sets one or several `Runnable` objects to be run by the target. + + :param runnables: The `Runnable` objects to be set + :return: The builder, this builder has been created from + """ + self.runnables = list(runnables) + return self.parent_builder + + def build(self, build_unit: BuildUnit) -> Target: + return BuildTarget(self.target_name, self.dependencies, self.runnables, build_unit) + + def __get_missing_output_files(self, runnable: Runnable, module: Module) -> Tuple[List[str], List[str]]: + output_files = runnable.get_output_files(module) + missing_output_files = [output_file for output_file in output_files if not path.exists(output_file)] + + if output_files: + if missing_output_files: + Log.verbose( + 'Target "%s" must be applied to module "%s", because the following output files do not exist:\n', + self.name, str(module)) + + for missing_output_file in missing_output_files: + Log.verbose(' - %s', missing_output_file) + + Log.verbose('') + else: + Log.verbose('Target "%s" must not be applied to module "%s", because all output files already exist:\n', + self.name, str(module)) + + for output_file in output_files: + Log.verbose(' - %s', output_file) + + Log.verbose('') + + return output_files, missing_output_files + + def __get_changed_input_files(self, runnable: Runnable, module: Module) -> Tuple[List[str], List[str]]: + input_files = runnable.get_input_files(module) + changed_input_files = [ + input_file for input_file in input_files if self.change_detection.get_changed_files(module, *input_files) + ] + + if input_files: + if changed_input_files: + Log.verbose( + 'Target "%s" must be applied to module "%s", because the following input files have changed:\n', + self.name, str(module)) + + for changed_input_file in changed_input_files: + Log.verbose(' - %s', changed_input_file) + + Log.verbose('') + else: + Log.verbose('Target "%s" must not be applied to module "%s", because no input files have changed:\n', + self.name, str(module)) + + for input_file in input_files: + Log.verbose(' - %s', input_file) + + Log.verbose('') + + return input_files, changed_input_files + + def __init__(self, name: str, dependencies: List[Target.Dependency], runnables: List[Runnable], + build_unit: BuildUnit): + """ + :param name: The name of the target or None, if the target does not have a name + :param dependencies: A list that contains all dependencies of the target + :param runnables: The `BuildTarget.Runnable` to the be run by the target + :param build_unit: The `BuildUnit`, the target belongs to + """ + super().__init__(name, dependencies) + self.runnables = runnables + self.build_unit = build_unit + self.change_detection = ChangeDetection(path.join(BuildUnit().build_directory, self.name + '.json')) + + def run(self, module_registry: ModuleRegistry): + for runnable in self.runnables: + modules = module_registry.lookup(runnable.module_filter) + modules_to_be_run = [] + input_files_per_module = [] + + for module in modules: + output_files, missing_output_files = self.__get_missing_output_files(runnable, module) + input_files, changed_input_files = self.__get_changed_input_files(runnable, module) + + if (not output_files and not input_files) or missing_output_files or changed_input_files: + modules_to_be_run.append(module) + input_files_per_module.append(input_files) + + try: + runnable.run_all(self.build_unit, modules_to_be_run) + except NotImplementedError: + try: + for module in modules_to_be_run: + runnable.run(self.build_unit, module) + except NotImplementedError as error: + raise RuntimeError('Class ' + type(runnable).__name__ + + ' must implement either the "run_all" or "run" method') from error + + for i, module in enumerate(modules_to_be_run): + self.change_detection.track_files(module, *input_files_per_module[i]) + + def clean(self, module_registry: ModuleRegistry): + for runnable in self.runnables: + modules = module_registry.lookup(runnable.module_filter) + + for module in modules: + clean_files = runnable.get_clean_files(module) + delete_files(*clean_files, accept_missing=True) + + +class PhonyTarget(Target): + """ + A phony target, which executes a certain action unconditionally. + """ + + Function = Callable[[], None] + + class Runnable(ABC): + """ + An abstract base class for all classes that can be run via a phony target. + """ + + def __init__(self, module_filter: Module.Filter): + """ + :param module_filter: A filter that matches the modules, the target should be applied to + """ + self.module_filter = module_filter + + def run_all(self, build_unit: BuildUnit, modules: List[Module]): + """ + May be overridden by subclasses in order to apply the target to all modules that match the filter. + + :param build_unit: The build unit, the target belongs to + :param modules: A list that contains the modules, the target should be applied to + """ + raise NotImplementedError('Class ' + type(self).__name__ + ' does not implement the "run_all" method') + + def run(self, build_unit: BuildUnit, module: Module): + """ + May be overridden by subclasses in order to apply the target to an individual module that matches the + filter. + + :param build_unit: The build unit, the target belongs to + :param module: The module, the target should be applied to + """ + raise NotImplementedError('Class ' + type(self).__name__ + ' does not implement the "run" method') + + class Builder(Target.Builder): + """ + A builder that allows to configure and create phony targets. + """ + + def __init__(self, parent_builder: 'TargetBuilder', target_name: str): + """ + :param parent_builder: The builder, this builder has been created from + :param target_name: The name of the target that is configured via the builder + """ + super().__init__() + self.parent_builder = parent_builder + self.target_name = target_name + self.functions = [] + self.runnables = [] + + def nop(self) -> Any: + """ + Instructs the target to not execute any action. + + :return: The `TargetBuilder`, this builder has been created from + """ + return self.parent_builder + + def set_functions(self, *functions: 'PhonyTarget.Function') -> Any: + """ + Sets one or several functions to be run by the target. + + :param functions: The functions to be set + :return: The builder, this builder has been created from + """ + self.functions = list(functions) + return self.parent_builder + + def set_runnables(self, *runnables: 'PhonyTarget.Runnable') -> Any: + """ + Sets one or several `Runnable` objects to be run by the target. + + :param runnables: The `Runnable` objects to be set + :return: The builder, this builder has been created from + """ + self.runnables = list(runnables) + return self.parent_builder + + def build(self, build_unit: BuildUnit) -> Target: + + def action(module_registry: ModuleRegistry): + for function in self.functions: + function() + + for runnable in self.runnables: + modules = module_registry.lookup(runnable.module_filter) + + try: + runnable.run_all(build_unit, modules) + except NotImplementedError: + try: + for module in modules: + runnable.run(build_unit, module) + except NotImplementedError as error: + raise RuntimeError('Class ' + type(runnable).__name__ + + ' must implement either the "run_all" or "run" method') from error + + return PhonyTarget(self.target_name, self.dependencies, action) + + def __init__(self, name: str, dependencies: List[Target.Dependency], action: Callable[[ModuleRegistry], None]): + """ + :param name: The name of the target + :param dependencies: A list that contains all dependencies of the target + :param action: The action to be executed by the target + """ + super().__init__(name, dependencies) + self.action = action + + def run(self, module_registry: ModuleRegistry): + self.action(module_registry) + + +class TargetBuilder: + """ + A builder that allows to configure and create multiple targets. + """ + + def __init__(self, build_unit: BuildUnit): + """ + :param build_unit: The build unit, the targets belong to + """ + self.build_unit = build_unit + self.target_builders = [] + + def add_build_target(self, name: str) -> BuildTarget.Builder: + """ + Adds a build target. + + :param name: The name of the target + :return: A `BuildTarget.Builder` that allows to configure the target + """ + target_builder = BuildTarget.Builder(self, name) + self.target_builders.append(target_builder) + return target_builder + + def add_phony_target(self, name: str) -> PhonyTarget.Builder: + """ + Adds a phony target. + + :param name: The name of the target + :return: A `PhonyTarget.Builder` that allows to configure the target + """ + target_builder = PhonyTarget.Builder(self, name) + self.target_builders.append(target_builder) + return target_builder + + def build(self) -> List[Target]: + """ + Creates and returns the targets that have been configured via the builder. + + :return: A list that stores the targets that have been created + """ + return [builder.build(self.build_unit) for builder in self.target_builders] + + +class DependencyGraph: + """ + A graph that determines the execution order of targets based on the dependencies between them. + """ + + class Type(Enum): + """ + All available types of dependency graphs. + """ + RUN = auto() + CLEAN = auto() + CLEAN_ALL = auto() + + @dataclass + class Node(ABC): + """ + An abstract base class for all nodes in a dependency graph. + + Attributes: + target: The target that corresponds to this node + parent: The parent of this node, if any + child: The child of this node, if any + """ + target: Target + parent: Optional['DependencyGraph.Node'] = None + child: Optional['DependencyGraph.Node'] = None + + @staticmethod + def from_name(targets_by_name: Dict[str, Target], target_name: str, + graph_type: 'DependencyGraph.Type') -> 'DependencyGraph.Node': + """ + Creates and returns a new node of a dependency graph corresponding to the target with a specific name. + + :param targets_by_name: A dictionary that stores all available targets by their names + :param target_name: The name of the target, the node should correspond to + :param graph_type: The type of the dependency graph + :return: The node that has been created + """ + target = targets_by_name[target_name] + return DependencyGraph.RunNode( + target) if graph_type == DependencyGraph.Type.RUN else DependencyGraph.CleanNode(target) + + @staticmethod + def from_dependency(targets_by_name: Dict[str, Target], dependency: Target.Dependency, + graph_type: 'DependencyGraph.Type') -> Optional['DependencyGraph.Node']: + """ + Creates and returns a new node of a dependency graph corresponding to the target referred to by a + `Target.Dependency`. + + :param targets_by_name: A dictionary that stores all available targets by their names + :param dependency: The dependency referring to the target, the node should correspond to + :param graph_type: The type of the dependency graph + :return: The node that has been created or None, if the dependency does not require a node to + be created + """ + if graph_type == DependencyGraph.Type.RUN \ + or graph_type == DependencyGraph.Type.CLEAN_ALL \ + or dependency.clean_dependency: + target = targets_by_name[dependency.target_name] + return DependencyGraph.RunNode( + target) if graph_type == DependencyGraph.Type.RUN else DependencyGraph.CleanNode(target) + + return None + + @abstractmethod + def execute(self, module_registry: ModuleRegistry): + """ + Must be implemented by subclasses in order to execute the node. + + :param module_registry: A `ModuleRegistry` that may be used for looking up modules + """ + + @abstractmethod + def copy(self) -> 'DependencyGraph.Node': + """ + Must be implemented by subclasses in order to create a shallow copy of the node. + + :return: The copy that has been created + """ + + def __str__(self) -> str: + return '[' + self.target.name + ']' + + def __eq__(self, other) -> bool: + return isinstance(other, type(self)) and self.target == other.target + + class RunNode(Node): + """ + A node in the dependency graph that runs one or several targets. + """ + + def execute(self, module_registry: ModuleRegistry): + Log.verbose('Running target "%s"...', self.target.name) + self.target.run(module_registry) + + def copy(self) -> 'DependencyGraph.Node': + return DependencyGraph.RunNode(self.target) + + class CleanNode(Node): + """ + A node in the dependency graph that cleans one or several targets. + """ + + def execute(self, module_registry: ModuleRegistry): + Log.verbose('Cleaning target "%s"...', self.target.name) + self.target.clean(module_registry) + + def copy(self) -> 'DependencyGraph.Node': + return DependencyGraph.CleanNode(self.target) + + @dataclass + class Sequence: + """ + A sequence consisting of several nodes in a dependency graph. + + Attributes: + first: The first node in the sequence + last: The last node in the sequence + """ + first: 'DependencyGraph.Node' + last: 'DependencyGraph.Node' + + @staticmethod + def from_node(node: 'DependencyGraph.Node') -> 'DependencyGraph.Sequence': + """ + Creates and returns a new path that consists of a single node. + + :param node: The node + :return: The path that has been created + """ + node.parent = None + node.child = None + return DependencyGraph.Sequence(first=node, last=node) + + def prepend(self, node: 'DependencyGraph.Node'): + """ + Adds a new node at the start of the sequence. + + :param node: The node to be added + """ + first_node = self.first + first_node.parent = node + node.parent = None + node.child = first_node + self.first = node + + def copy(self) -> 'DependencyGraph.Sequence': + """ + Creates a deep copy of the sequence. + + :return: The copy that has been created + """ + current_node = self.last + copy = DependencyGraph.Sequence.from_node(current_node.copy()) + current_node = current_node.parent + + while current_node: + copy.prepend(current_node.copy()) + current_node = current_node.parent + + return copy + + def execute(self, module_registry: ModuleRegistry): + """ + Executes all nodes in the sequence. + + :param module_registry: A `ModuleRegistry` that may be used for looking up modules + """ + current_node = self.first + + while current_node: + current_node.execute(module_registry) + current_node = current_node.child + + def __str__(self) -> str: + current_node = self.first + result = ' → ' + str(current_node) + current_node = current_node.child + indent = 1 + + while current_node: + result += '\n' + reduce(lambda aggr, _: aggr + ' ', range(indent), '') + ' ↳ ' + str(current_node) + current_node = current_node.child + indent += 1 + + return result + + @staticmethod + def __expand_sequence(targets_by_name: Dict[str, Target], sequence: Sequence, + graph_type: 'DependencyGraph.Type') -> List[Sequence]: + sequences = [] + dependencies = sequence.first.target.dependencies + + if dependencies: + for dependency in dependencies: + new_node = DependencyGraph.Node.from_dependency(targets_by_name, dependency, graph_type) + + if new_node: + new_sequence = sequence.copy() + new_sequence.prepend(new_node) + sequences.extend(DependencyGraph.__expand_sequence(targets_by_name, new_sequence, graph_type)) + else: + sequences.append(sequence) + else: + sequences.append(sequence) + + return sequences + + @staticmethod + def __create_sequence(targets_by_name: Dict[str, Target], target_name: str, + graph_type: 'DependencyGraph.Type') -> List[Sequence]: + node = DependencyGraph.Node.from_name(targets_by_name, target_name, graph_type) + sequence = DependencyGraph.Sequence.from_node(node) + return DependencyGraph.__expand_sequence(targets_by_name, sequence, graph_type) + + @staticmethod + def __find_in_parents(node: Node, parent: Optional[Node]) -> Optional[Node]: + while parent: + if parent == node: + return parent + + parent = parent.parent + + return None + + @staticmethod + def __merge_two_sequences(first_sequence: Sequence, second_sequence: Sequence) -> Sequence: + first_node = first_sequence.last + second_node = second_sequence.last + + while second_node: + overlapping_node = DependencyGraph.__find_in_parents(second_node, first_node) + + if overlapping_node: + first_node = overlapping_node.parent + else: + new_node = second_node.copy() + + if first_node: + new_node.parent = first_node + + if first_node.child: + new_node.child = first_node.child + first_node.child.parent = new_node + + first_node.child = new_node + + if first_node == first_sequence.last: + first_sequence.last = new_node + else: + first_sequence.prepend(new_node) + + second_node = second_node.parent + + return first_sequence + + @staticmethod + def __merge_multiple_sequences(sequences: List[Sequence]) -> Sequence: + while len(sequences) > 1: + second_sequence = sequences.pop() + first_sequence = sequences.pop() + merged_sequence = DependencyGraph.__merge_two_sequences(first_sequence, second_sequence) + sequences.append(merged_sequence) + + return sequences[0] + + def __init__(self, targets_by_name: Dict[str, Target], *target_names: str, graph_type: 'DependencyGraph.Type'): + """ + :param targets_by_name: A dictionary that stores all available targets by their names + :param target_names: The names of the targets to be included in the graph + :param graph_type: The type of the dependency graph + """ + self.sequence = self.__merge_multiple_sequences( + reduce(lambda aggr, target_name: aggr + self.__create_sequence(targets_by_name, target_name, graph_type), + target_names, [])) + + def execute(self, module_registry: ModuleRegistry): + """ + Executes all targets in the graph in the pre-determined order. + + :param module_registry: The `ModuleRegistry` that should be used by targets for looking up modules + """ + self.sequence.execute(module_registry) + + def __str__(self) -> str: + return str(self.sequence) + + +class TargetRegistry: + """ + Allows to register targets. + """ + + def __init__(self): + self.targets_by_name = {} + + def register(self, target: Target): + """ + Registers a new target. + + :param target: The target to be registered + """ + existing = self.targets_by_name.get(target.name) + + if existing: + raise ValueError('Failed to register target ' + str(target) + + ', because a target with the same name has already been registered: ' + str(existing)) + + self.targets_by_name[target.name] = target + + def create_dependency_graph(self, + *target_names: str, + graph_type: 'DependencyGraph.Type' = DependencyGraph.Type.RUN) -> DependencyGraph: + """ + Creates and returns a `DependencyGraph` for each of the given targets. + + :param target_names: The names of the targets for which graphs should be created + :param graph_type: The type of the dependency graph + :return: A list that contains the graphs that have been created + """ + if not target_names: + Log.error('No targets given') + + invalid_targets = [target_name for target_name in target_names if target_name not in self.targets_by_name] + + if invalid_targets: + Log.error('The following targets are invalid: %s', format_iterable(invalid_targets)) + + return DependencyGraph(self.targets_by_name, *target_names, graph_type=graph_type) diff --git a/build_system/main.py b/build_system/main.py new file mode 100644 index 0000000000..362498b0e6 --- /dev/null +++ b/build_system/main.py @@ -0,0 +1,161 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Initializes the build system and runs targets specified via command line arguments. +""" +import sys + +from argparse import ArgumentParser +from importlib.util import module_from_spec, spec_from_file_location +from types import ModuleType +from typing import List, Optional + +from core.build_unit import BuildUnit +from core.modules import Module, ModuleRegistry +from core.targets import DependencyGraph, Target, TargetRegistry +from util.files import FileSearch +from util.format import format_iterable +from util.log import Log + + +def __parse_command_line_arguments(): + parser = ArgumentParser(description='The build system of the project "MLRL-Boomer"') + parser.add_argument('--verbose', action='store_true', help='Enables verbose logging.') + parser.add_argument('--clean', action='store_true', help='Cleans the specified targets.') + parser.add_argument('targets', nargs='*') + return parser.parse_args() + + +def __configure_log(args): + log_level = Log.Level.VERBOSE if args.verbose else Log.Level.INFO + Log.configure(log_level) + + +def __find_init_files() -> List[str]: + return FileSearch() \ + .set_recursive(True) \ + .filter_by_name('__init__.py') \ + .list(BuildUnit().root_directory) + + +def __import_source_file(source_file: str) -> ModuleType: + try: + spec = spec_from_file_location(source_file, source_file) + module = module_from_spec(spec) + sys.modules[source_file] = module + spec.loader.exec_module(module) + return module + except FileNotFoundError as error: + raise ImportError('Source file "' + source_file + '" not found') from error + + +def __register_modules(init_files: List[str]) -> ModuleRegistry: + Log.verbose('Registering modules...') + module_registry = ModuleRegistry() + num_modules = 0 + + for init_file in init_files: + modules = [ + module for module in getattr(__import_source_file(init_file), 'MODULES', []) if isinstance(module, Module) + ] + + if modules: + Log.verbose('Registering %s modules defined in file "%s":\n', str(len(modules)), init_file) + + for module in modules: + Log.verbose(' - %s', str(module)) + module_registry.register(module) + + Log.verbose('') + num_modules += len(modules) + + Log.verbose('Successfully registered %s modules.\n', str(num_modules)) + return module_registry + + +def __register_targets(init_files: List[str]) -> TargetRegistry: + Log.verbose('Registering targets...') + target_registry = TargetRegistry() + num_targets = 0 + + for init_file in init_files: + targets = [ + target for target in getattr(__import_source_file(init_file), 'TARGETS', []) if isinstance(target, Target) + ] + + if targets: + Log.verbose('Registering %s targets defined in file "%s":\n', str(len(targets)), init_file) + + for target in targets: + Log.verbose(' - %s', str(target)) + target_registry.register(target) + + Log.verbose('') + num_targets += len(targets) + + Log.verbose('Successfully registered %s targets.\n', str(num_targets)) + return target_registry + + +def __find_default_target(init_files: List[str]) -> Optional[str]: + Log.verbose('Searching for default target...') + default_targets = [] + + for init_file in init_files: + default_target = getattr(__import_source_file(init_file), 'DEFAULT_TARGET', None) + + if default_target and isinstance(default_target, str): + Log.verbose('Found default target "%s" defined in file "%s"', default_target, init_file) + default_targets.append(default_target) + + if len(default_targets) > 1: + raise RuntimeError('Only one default target may be specified, but found: ' + format_iterable(default_targets)) + + if default_targets: + return default_targets[0] + + Log.verbose('Found no default target.') + return None + + +def __create_dependency_graph(target_registry: TargetRegistry, args, default_target: Optional[str]) -> DependencyGraph: + targets = args.targets + clean = args.clean + graph_type = DependencyGraph.Type.CLEAN if clean else DependencyGraph.Type.RUN + + if not targets and default_target: + targets = [default_target] + + if clean: + graph_type = DependencyGraph.Type.CLEAN_ALL + + Log.verbose('Creating dependency graph for %s targets [%s]...', 'cleaning' if clean else 'running', + format_iterable(targets)) + dependency_graph = target_registry.create_dependency_graph(*targets, graph_type=graph_type) + Log.verbose('Successfully created dependency graph:\n\n%s\n', str(dependency_graph)) + return dependency_graph + + +def __execute_dependency_graph(dependency_graph: DependencyGraph, module_registry: ModuleRegistry): + Log.verbose('Executing dependency graph...') + dependency_graph.execute(module_registry) + Log.verbose('Successfully executed dependency graph.') + + +def main(): + """ + The main function to be executed when the build system is invoked. + """ + args = __parse_command_line_arguments() + __configure_log(args) + + init_files = __find_init_files() + module_registry = __register_modules(init_files) + target_registry = __register_targets(init_files) + default_target = __find_default_target(init_files) + dependency_graph = __create_dependency_graph(target_registry, args, default_target=default_target) + __execute_dependency_graph(dependency_graph, module_registry) + + +if __name__ == '__main__': + main() diff --git a/build_system/targets/__init__.py b/build_system/targets/__init__.py new file mode 100644 index 0000000000..18b8fd2616 --- /dev/null +++ b/build_system/targets/__init__.py @@ -0,0 +1,8 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Defines the build sytem's default target. +""" +from targets.packaging import INSTALL_WHEELS + +DEFAULT_TARGET = INSTALL_WHEELS diff --git a/build_system/targets/code_style/__init__.py b/build_system/targets/code_style/__init__.py new file mode 100644 index 0000000000..2939cd87c4 --- /dev/null +++ b/build_system/targets/code_style/__init__.py @@ -0,0 +1,21 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Defines targets for checking and enforcing code style definitions. +""" +from core.build_unit import BuildUnit +from core.targets import TargetBuilder + +from targets.code_style.cpp import FORMAT_CPP, TEST_FORMAT_CPP +from targets.code_style.markdown import FORMAT_MARKDOWN, TEST_FORMAT_MARKDOWN +from targets.code_style.python import FORMAT_PYTHON, TEST_FORMAT_PYTHON +from targets.code_style.yaml import FORMAT_YAML, TEST_FORMAT_YAML + +TARGETS = TargetBuilder(BuildUnit.for_file(__file__)) \ + .add_phony_target('format') \ + .depends_on(FORMAT_PYTHON, FORMAT_CPP, FORMAT_MARKDOWN, FORMAT_YAML) \ + .nop() \ + .add_phony_target('test_format') \ + .depends_on(TEST_FORMAT_PYTHON, TEST_FORMAT_CPP, TEST_FORMAT_MARKDOWN, TEST_FORMAT_YAML) \ + .nop() \ + .build() diff --git a/.clang-format b/build_system/targets/code_style/cpp/.clang-format similarity index 100% rename from .clang-format rename to build_system/targets/code_style/cpp/.clang-format diff --git a/build_system/targets/code_style/cpp/__init__.py b/build_system/targets/code_style/cpp/__init__.py new file mode 100644 index 0000000000..e75a92b16a --- /dev/null +++ b/build_system/targets/code_style/cpp/__init__.py @@ -0,0 +1,29 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Defines targets and modules for checking and enforcing code style definitions for C++ files. +""" +from core.build_unit import BuildUnit +from core.targets import PhonyTarget, TargetBuilder +from util.files import FileType + +from targets.code_style.cpp.targets import CheckCppCodeStyle, EnforceCppCodeStyle +from targets.code_style.modules import CodeModule +from targets.paths import Project + +FORMAT_CPP = 'format_cpp' + +TEST_FORMAT_CPP = 'test_format_cpp' + +TARGETS = TargetBuilder(BuildUnit.for_file(__file__)) \ + .add_phony_target(FORMAT_CPP).set_runnables(EnforceCppCodeStyle()) \ + .add_phony_target(TEST_FORMAT_CPP).set_runnables(CheckCppCodeStyle()) \ + .build() + +MODULES = [ + CodeModule( + file_type=FileType.cpp(), + root_directory=Project.Cpp.root_directory, + source_file_search=Project.Cpp.file_search(), + ), +] diff --git a/build_system/targets/code_style/cpp/clang_format.py b/build_system/targets/code_style/cpp/clang_format.py new file mode 100644 index 0000000000..1c86e16ac8 --- /dev/null +++ b/build_system/targets/code_style/cpp/clang_format.py @@ -0,0 +1,29 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides classes that allow to run the external program "clang-format". +""" +from os import path + +from core.build_unit import BuildUnit +from util.run import Program + +from targets.code_style.modules import CodeModule + + +class ClangFormat(Program): + """ + Allows to run the external program "clang-format". + """ + + def __init__(self, build_unit: BuildUnit, module: CodeModule, enforce_changes: bool = False): + """ + :param build_unit: The build unit from which the program should be run + :param module: The module, the program should be applied to + :param enforce_changes: True, if changes should be applied to files, False otherwise + """ + super().__init__('clang-format', '--style=file:' + path.join(build_unit.root_directory, '.clang-format')) + self.add_conditional_arguments(enforce_changes, '-i') + self.add_conditional_arguments(not enforce_changes, '--dry-run', '--Werror') + self.add_arguments(*module.find_source_files()) + self.set_build_unit(build_unit) diff --git a/build_system/targets/code_style/cpp/cpplint.py b/build_system/targets/code_style/cpp/cpplint.py new file mode 100644 index 0000000000..eba93b6e44 --- /dev/null +++ b/build_system/targets/code_style/cpp/cpplint.py @@ -0,0 +1,23 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides classes that allow to run the external program "cpplint". +""" +from core.build_unit import BuildUnit +from util.run import Program + +from targets.code_style.modules import CodeModule + + +class CppLint(Program): + """ + Allows to run the external program "cpplint". + """ + + def __init__(self, build_unit: BuildUnit, module: CodeModule): + """ + :param build_unit: The build unit from which the program should be run + :param module: The module, the program should be applied to + """ + super().__init__('cpplint', '--quiet', '--config=.cpplint.cfg', *module.find_source_files()) + self.set_build_unit(build_unit) diff --git a/build_system/targets/code_style/cpp/requirements.txt b/build_system/targets/code_style/cpp/requirements.txt new file mode 100644 index 0000000000..3f5670a408 --- /dev/null +++ b/build_system/targets/code_style/cpp/requirements.txt @@ -0,0 +1,2 @@ +cpplint >= 2.0, < 2.1 +clang-format >= 19.1, < 19.2 diff --git a/build_system/targets/code_style/cpp/targets.py b/build_system/targets/code_style/cpp/targets.py new file mode 100644 index 0000000000..3bfce02868 --- /dev/null +++ b/build_system/targets/code_style/cpp/targets.py @@ -0,0 +1,43 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Implements targets for checking and enforcing code style definitions for C++ files. +""" +from core.build_unit import BuildUnit +from core.modules import Module +from core.targets import PhonyTarget +from util.files import FileType +from util.log import Log + +from targets.code_style.cpp.clang_format import ClangFormat +from targets.code_style.cpp.cpplint import CppLint +from targets.code_style.modules import CodeModule + +MODULE_FILTER = CodeModule.Filter(FileType.cpp()) + + +class CheckCppCodeStyle(PhonyTarget.Runnable): + """ + Checks if C++ source files adhere to the code style definitions. If this is not the case, an error is raised. + """ + + def __init__(self): + super().__init__(MODULE_FILTER) + + def run(self, build_unit: BuildUnit, module: Module): + Log.info('Checking C++ code style in directory "%s"...', module.root_directory) + ClangFormat(build_unit, module).run() + CppLint(build_unit, module).run() + + +class EnforceCppCodeStyle(PhonyTarget.Runnable): + """ + Enforces C++ source files to adhere to the code style definitions. + """ + + def __init__(self): + super().__init__(MODULE_FILTER) + + def run(self, build_unit: BuildUnit, module: Module): + Log.info('Formatting C++ code in directory "%s"...', module.root_directory) + ClangFormat(build_unit, module, enforce_changes=True).run() diff --git a/build_system/targets/code_style/markdown/__init__.py b/build_system/targets/code_style/markdown/__init__.py new file mode 100644 index 0000000000..b9717b2655 --- /dev/null +++ b/build_system/targets/code_style/markdown/__init__.py @@ -0,0 +1,39 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Defines targets and modules for checking and enforcing code style definitions for Markdown files. +""" +from core.build_unit import BuildUnit +from core.targets import PhonyTarget, TargetBuilder +from util.files import FileSearch, FileType + +from targets.code_style.markdown.targets import CheckMarkdownCodeStyle, EnforceMarkdownCodeStyle +from targets.code_style.modules import CodeModule +from targets.paths import Project + +FORMAT_MARKDOWN = 'format_md' + +TEST_FORMAT_MARKDOWN = 'test_format_md' + +TARGETS = TargetBuilder(BuildUnit.for_file(__file__)) \ + .add_phony_target(FORMAT_MARKDOWN).set_runnables(EnforceMarkdownCodeStyle()) \ + .add_phony_target(TEST_FORMAT_MARKDOWN).set_runnables(CheckMarkdownCodeStyle()) \ + .build() + +MODULES = [ + CodeModule( + file_type=FileType.markdown(), + root_directory=Project.root_directory, + source_file_search=FileSearch().set_recursive(False), + ), + CodeModule( + file_type=FileType.markdown(), + root_directory=Project.Python.root_directory, + source_file_search=Project.Python.file_search(), + ), + CodeModule( + file_type=FileType.markdown(), + root_directory=Project.Documentation.root_directory, + source_file_search=Project.Documentation.file_search(), + ), +] diff --git a/build_system/targets/code_style/markdown/mdformat.py b/build_system/targets/code_style/markdown/mdformat.py new file mode 100644 index 0000000000..40d8c16548 --- /dev/null +++ b/build_system/targets/code_style/markdown/mdformat.py @@ -0,0 +1,27 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides classes that allow to run the external program "mdformat". +""" +from core.build_unit import BuildUnit +from util.run import Program + +from targets.code_style.modules import CodeModule + + +class MdFormat(Program): + """ + Allows to run the external program "mdformat". + """ + + def __init__(self, build_unit: BuildUnit, module: CodeModule, enforce_changes: bool = False): + """ + :param build_unit: The build unit from which the program should be run + :param module: The module, the program should be applied to + :param enforce_changes: True, if changes should be applied to files, False otherwise + """ + super().__init__('mdformat', '--number', '--wrap', 'no', '--end-of-line', 'lf') + self.add_conditional_arguments(not enforce_changes, '--check') + self.add_arguments(*module.find_source_files()) + self.set_build_unit(build_unit) + self.add_dependencies('mdformat-myst') diff --git a/build_system/targets/code_style/markdown/requirements.txt b/build_system/targets/code_style/markdown/requirements.txt new file mode 100644 index 0000000000..8feb1dcaef --- /dev/null +++ b/build_system/targets/code_style/markdown/requirements.txt @@ -0,0 +1,2 @@ +mdformat >= 0.7, < 0.8 +mdformat-myst >= 0.2, < 0.3 diff --git a/build_system/targets/code_style/markdown/targets.py b/build_system/targets/code_style/markdown/targets.py new file mode 100644 index 0000000000..e52735bd4a --- /dev/null +++ b/build_system/targets/code_style/markdown/targets.py @@ -0,0 +1,41 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Implements targets for checking and enforcing code style definitions for Markdown files. +""" +from core.build_unit import BuildUnit +from core.modules import Module +from core.targets import PhonyTarget +from util.files import FileType +from util.log import Log + +from targets.code_style.markdown.mdformat import MdFormat +from targets.code_style.modules import CodeModule + +MODULE_FILTER = CodeModule.Filter(FileType.markdown()) + + +class CheckMarkdownCodeStyle(PhonyTarget.Runnable): + """ + Checks if Markdown files adhere to the code style definitions. If this is not the case, an error is raised. + """ + + def __init__(self): + super().__init__(MODULE_FILTER) + + def run(self, build_unit: BuildUnit, module: Module): + Log.info('Checking Markdown code style in the directory "%s"...', module.root_directory) + MdFormat(build_unit, module).run() + + +class EnforceMarkdownCodeStyle(PhonyTarget.Runnable): + """ + Enforces Markdown files to adhere to the code style definitions. + """ + + def __init__(self): + super().__init__(MODULE_FILTER) + + def run(self, build_unit: BuildUnit, module: Module): + Log.info('Formatting Markdown files in the directory "%s"...', module.root_directory) + MdFormat(build_unit, module, enforce_changes=True).run() diff --git a/build_system/targets/code_style/modules.py b/build_system/targets/code_style/modules.py new file mode 100644 index 0000000000..9c922d493f --- /dev/null +++ b/build_system/targets/code_style/modules.py @@ -0,0 +1,54 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Implements modules that provide access to source code. +""" +from typing import List + +from core.modules import Module +from util.files import FileSearch, FileType + + +class CodeModule(Module): + """ + A module that provides access to source code. + """ + + class Filter(Module.Filter): + """ + A filter that matches modules of type `CodeModule`. + """ + + def __init__(self, *file_types: FileType): + """ + :param file_types: The file types of the code modules to be matched or None, if no restrictions should be + imposed on the file types + """ + self.file_types = set(file_types) + + def matches(self, module: Module) -> bool: + return isinstance(module, CodeModule) and (not self.file_types or module.file_type in self.file_types) + + def __init__(self, + file_type: FileType, + root_directory: str, + source_file_search: FileSearch = FileSearch().set_recursive(True)): + """ + :param file_type: The `FileType` of the source files that belongs to the module + :param root_directory: The path to the module's root directory + :param source_file_search: The `FileSearch` that should be used to search for source files + """ + self.file_type = file_type + self.root_directory = root_directory + self.source_file_search = source_file_search + + def find_source_files(self) -> List[str]: + """ + Finds and returns all source files that belong to the module. + + :return: A list that contains the paths of the source files that have been found + """ + return self.source_file_search.filter_by_file_type(self.file_type).list(self.root_directory) + + def __str__(self) -> str: + return 'CodeModule {file_type="' + str(self.file_type) + '", root_directory="' + self.root_directory + '"}' diff --git a/.isort.cfg b/build_system/targets/code_style/python/.isort.cfg similarity index 73% rename from .isort.cfg rename to build_system/targets/code_style/python/.isort.cfg index e42dcea7db..0dbfa6c521 100644 --- a/.isort.cfg +++ b/build_system/targets/code_style/python/.isort.cfg @@ -1,10 +1,9 @@ [settings] -supported_extensions=py,pxd,pyx line_length=120 group_by_package=true known_first_party=mlrl -known_third_party=sklearn,scipy,numpy,tabulate,arff,SCons -forced_separate=mlrl.common,mlrl.boosting,mlrl.seco,mlrl.testbed,SCons +known_third_party=sklearn,scipy,numpy,tabulate,arff +forced_separate=mlrl.common,mlrl.boosting,mlrl.seco,mlrl.testbed lines_between_types=1 order_by_type=true multi_line_output=2 diff --git a/.pylintrc b/build_system/targets/code_style/python/.pylintrc similarity index 96% rename from .pylintrc rename to build_system/targets/code_style/python/.pylintrc index a72297b30b..bab33ed6b5 100644 --- a/.pylintrc +++ b/build_system/targets/code_style/python/.pylintrc @@ -39,7 +39,8 @@ disable=no-name-in-module, too-many-locals, too-many-lines, too-many-public-methods, - too-many-statements + too-many-statements, + wrong-import-order [STRING] diff --git a/.style.yapf b/build_system/targets/code_style/python/.style.yapf similarity index 100% rename from .style.yapf rename to build_system/targets/code_style/python/.style.yapf diff --git a/build_system/targets/code_style/python/__init__.py b/build_system/targets/code_style/python/__init__.py new file mode 100644 index 0000000000..e30a72280e --- /dev/null +++ b/build_system/targets/code_style/python/__init__.py @@ -0,0 +1,45 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Defines targets and modules for checking and enforcing code style definitions for Python and Cython files. +""" +from core.build_unit import BuildUnit +from core.targets import PhonyTarget, TargetBuilder +from util.files import FileType + +from targets.code_style.modules import CodeModule +from targets.code_style.python.targets import CheckCythonCodeStyle, CheckPythonCodeStyle, EnforceCythonCodeStyle, \ + EnforcePythonCodeStyle +from targets.paths import Project + +FORMAT_PYTHON = 'format_python' + +TEST_FORMAT_PYTHON = 'test_format_python' + +TARGETS = TargetBuilder(BuildUnit.for_file(__file__)) \ + .add_phony_target(FORMAT_PYTHON).set_runnables(EnforcePythonCodeStyle(), EnforceCythonCodeStyle()) \ + .add_phony_target(TEST_FORMAT_PYTHON).set_runnables(CheckPythonCodeStyle(), CheckCythonCodeStyle()) \ + .build() + +MODULES = [ + CodeModule( + file_type=FileType.python(), + root_directory=Project.BuildSystem.root_directory, + source_file_search=Project.BuildSystem.file_search(), + ), + CodeModule( + file_type=FileType.python(), + root_directory=Project.Python.root_directory, + source_file_search=Project.Python.file_search(), + ), + CodeModule( + file_type=FileType.cython(), + root_directory=Project.Python.root_directory, + source_file_search=Project.Python.file_search(), + ), + CodeModule( + file_type=FileType.python(), + root_directory=Project.Documentation.root_directory, + source_file_search=Project.Documentation.file_search(), + ), +] diff --git a/build_system/targets/code_style/python/isort.py b/build_system/targets/code_style/python/isort.py new file mode 100644 index 0000000000..b67bf3bcad --- /dev/null +++ b/build_system/targets/code_style/python/isort.py @@ -0,0 +1,26 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides classes that allow to run the external program "isort". +""" +from core.build_unit import BuildUnit +from util.run import Program + +from targets.code_style.modules import CodeModule + + +class ISort(Program): + """ + Allows to run the external program "isort". + """ + + def __init__(self, build_unit: BuildUnit, module: CodeModule, enforce_changes: bool = False): + """ + :param build_unit: The build unit from which the program should be run + :param module: The module, the program should be applied to + :param enforce_changes: True, if changes should be applied to files, False otherwise + """ + super().__init__('isort', '--settings-path', build_unit.root_directory, '--virtual-env', 'venv', + '--skip-gitignore', *module.find_source_files()) + self.add_conditional_arguments(not enforce_changes, '--check') + self.set_build_unit(build_unit) diff --git a/build_system/targets/code_style/python/pylint.py b/build_system/targets/code_style/python/pylint.py new file mode 100644 index 0000000000..ad1ae2bf74 --- /dev/null +++ b/build_system/targets/code_style/python/pylint.py @@ -0,0 +1,26 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides classes that allow to run the external program "isort". +""" +from os import path + +from core.build_unit import BuildUnit +from util.run import Program + +from targets.code_style.modules import CodeModule + + +class PyLint(Program): + """ + Allows to run the external program "pylint". + """ + + def __init__(self, build_unit: BuildUnit, module: CodeModule): + """ + :param build_unit: The build unit from which the program should be run + :param module: The module, the program should be applied to + """ + super().__init__('pylint', *module.find_source_files(), '--jobs=0', '--ignore=build', + '--rcfile=' + path.join(build_unit.root_directory, '.pylintrc'), '--score=n') + self.set_build_unit(build_unit) diff --git a/build_system/targets/code_style/python/requirements.txt b/build_system/targets/code_style/python/requirements.txt new file mode 100644 index 0000000000..ef0b290c85 --- /dev/null +++ b/build_system/targets/code_style/python/requirements.txt @@ -0,0 +1,3 @@ +isort >= 5.13, < 5.14 +pylint >= 3.3, < 3.4 +yapf >= 0.43, < 0.44 diff --git a/build_system/targets/code_style/python/targets.py b/build_system/targets/code_style/python/targets.py new file mode 100644 index 0000000000..b10e8fe8bb --- /dev/null +++ b/build_system/targets/code_style/python/targets.py @@ -0,0 +1,74 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Implements targets for checking and enforcing code style definitions for Python and Cython files. +""" +from core.build_unit import BuildUnit +from core.modules import Module +from core.targets import PhonyTarget +from util.files import FileType +from util.log import Log + +from targets.code_style.modules import CodeModule +from targets.code_style.python.isort import ISort +from targets.code_style.python.pylint import PyLint +from targets.code_style.python.yapf import Yapf + +PYTHON_MODULE_FILTER = CodeModule.Filter(FileType.python()) + +CYTHON_MODULE_FILTER = CodeModule.Filter(FileType.cython()) + + +class CheckPythonCodeStyle(PhonyTarget.Runnable): + """ + Checks if Python source files adhere to the code style definitions. If this is not the case, an error is raised. + """ + + def __init__(self): + super().__init__(PYTHON_MODULE_FILTER) + + def run(self, build_unit: BuildUnit, module: Module): + Log.info('Checking Python code style in directory "%s"...', module.root_directory) + ISort(build_unit, module).run() + Yapf(build_unit, module).run() + PyLint(build_unit, module).run() + + +class EnforcePythonCodeStyle(PhonyTarget.Runnable): + """ + Enforces Python source files to adhere to the code style definitions. + """ + + def __init__(self): + super().__init__(PYTHON_MODULE_FILTER) + + def run(self, build_unit: BuildUnit, module: Module): + Log.info('Formatting Python code in directory "%s"...', module.root_directory) + ISort(build_unit, module, enforce_changes=True).run() + Yapf(build_unit, module, enforce_changes=True).run() + + +class CheckCythonCodeStyle(PhonyTarget.Runnable): + """ + Checks if Cython source files adhere to the code style definitions. If this is not the case, an error is raised. + """ + + def __init__(self): + super().__init__(CYTHON_MODULE_FILTER) + + def run(self, build_unit: BuildUnit, module: Module): + Log.info('Checking Cython code style in directory "%s"...', module.root_directory) + ISort(build_unit, module).run() + + +class EnforceCythonCodeStyle(PhonyTarget.Runnable): + """ + Enforces Cython source files to adhere to the code style definitions. + """ + + def __init__(self): + super().__init__(CYTHON_MODULE_FILTER) + + def run(self, build_unit: BuildUnit, module: Module): + Log.info('Formatting Cython code in directory "%s"...', module.root_directory) + ISort(build_unit, module, enforce_changes=True).run() diff --git a/build_system/targets/code_style/python/yapf.py b/build_system/targets/code_style/python/yapf.py new file mode 100644 index 0000000000..e18b77198e --- /dev/null +++ b/build_system/targets/code_style/python/yapf.py @@ -0,0 +1,27 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides classes that allow to run the external program "yapf". +""" +from os import path + +from core.build_unit import BuildUnit +from util.run import Program + +from targets.code_style.modules import CodeModule + + +class Yapf(Program): + """ + Allows to run the external program "yapf". + """ + + def __init__(self, build_unit: BuildUnit, module: CodeModule, enforce_changes: bool = False): + """ + :param build_unit: The build unit from which the program should be run + :param module: The module, the program should be applied to + :param enforce_changes: True, if changes should be applied to files, False otherwise + """ + super().__init__('yapf', '--parallel', '--style=' + path.join(build_unit.root_directory, '.style.yapf'), + '--in-place' if enforce_changes else '--diff', *module.find_source_files()) + self.set_build_unit(build_unit) diff --git a/.yamlfix.toml b/build_system/targets/code_style/yaml/.yamlfix.toml similarity index 100% rename from .yamlfix.toml rename to build_system/targets/code_style/yaml/.yamlfix.toml diff --git a/build_system/targets/code_style/yaml/__init__.py b/build_system/targets/code_style/yaml/__init__.py new file mode 100644 index 0000000000..c24cfc3dcd --- /dev/null +++ b/build_system/targets/code_style/yaml/__init__.py @@ -0,0 +1,33 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Defines targets and modules for checking and enforcing code style definitions for YAML files. +""" +from core.build_unit import BuildUnit +from core.targets import PhonyTarget, TargetBuilder +from util.files import FileSearch, FileType + +from targets.code_style.modules import CodeModule +from targets.code_style.yaml.targets import CheckYamlCodeStyle, EnforceYamlCodeStyle +from targets.paths import Project + +FORMAT_YAML = 'format_yaml' + +TEST_FORMAT_YAML = 'test_format_yaml' + +TARGETS = TargetBuilder(BuildUnit.for_file(__file__)) \ + .add_phony_target(FORMAT_YAML).set_runnables(EnforceYamlCodeStyle()) \ + .add_phony_target(TEST_FORMAT_YAML).set_runnables(CheckYamlCodeStyle()) \ + .build() + +MODULES = [ + CodeModule( + file_type=FileType.yaml(), + root_directory=Project.root_directory, + source_file_search=FileSearch().set_recursive(False).set_hidden(True), + ), + CodeModule( + file_type=FileType.yaml(), + root_directory=Project.Github.root_directory, + ), +] diff --git a/build_system/targets/code_style/yaml/requirements.txt b/build_system/targets/code_style/yaml/requirements.txt new file mode 100644 index 0000000000..70c03b21f4 --- /dev/null +++ b/build_system/targets/code_style/yaml/requirements.txt @@ -0,0 +1 @@ +yamlfix >= 1.17, < 1.18 diff --git a/build_system/targets/code_style/yaml/targets.py b/build_system/targets/code_style/yaml/targets.py new file mode 100644 index 0000000000..1e1670d06e --- /dev/null +++ b/build_system/targets/code_style/yaml/targets.py @@ -0,0 +1,41 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Implements targets for checking and enforcing code style definitions for YAML files. +""" +from core.build_unit import BuildUnit +from core.modules import Module +from core.targets import PhonyTarget +from util.files import FileType +from util.log import Log + +from targets.code_style.modules import CodeModule +from targets.code_style.yaml.yamlfix import YamlFix + +MODULE_FILTER = CodeModule.Filter(FileType.yaml()) + + +class CheckYamlCodeStyle(PhonyTarget.Runnable): + """ + Checks if YAML files adhere to the code style definitions. If this is not the case, an error is raised. + """ + + def __init__(self): + super().__init__(MODULE_FILTER) + + def run(self, build_unit: BuildUnit, module: Module): + Log.info('Checking YAML files in the directory "%s"...', module.root_directory) + YamlFix(build_unit, module).run() + + +class EnforceYamlCodeStyle(PhonyTarget.Runnable): + """ + Enforces YAML files to adhere to the code style definitions. + """ + + def __init__(self): + super().__init__(MODULE_FILTER) + + def run(self, build_unit: BuildUnit, module: Module): + Log.info('Formatting YAML files in the directory "%s"...', module.root_directory) + YamlFix(build_unit, module, enforce_changes=True).run() diff --git a/build_system/targets/code_style/yaml/yamlfix.py b/build_system/targets/code_style/yaml/yamlfix.py new file mode 100644 index 0000000000..6becdfafc4 --- /dev/null +++ b/build_system/targets/code_style/yaml/yamlfix.py @@ -0,0 +1,29 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides classes that allow to run the external program "yamlfix". +""" +from os import path + +from core.build_unit import BuildUnit +from util.run import Program + +from targets.code_style.modules import CodeModule + + +class YamlFix(Program): + """ + Allows to run the external program "yamlfix". + """ + + def __init__(self, build_unit: BuildUnit, module: CodeModule, enforce_changes: bool = False): + """ + :param build_unit: The build unit from which the program should be run + :param module: The module, the program should be applied to + :param enforce_changes: True, if changes should be applied to files, False otherwise + """ + super().__init__('yamlfix', '--config-file', path.join(build_unit.root_directory, '.yamlfix.toml')) + self.add_conditional_arguments(not enforce_changes, '--check') + self.add_arguments(*module.find_source_files()) + self.set_build_unit(build_unit) + self.print_arguments(True) diff --git a/build_system/targets/compilation/__init__.py b/build_system/targets/compilation/__init__.py new file mode 100644 index 0000000000..e741590691 --- /dev/null +++ b/build_system/targets/compilation/__init__.py @@ -0,0 +1,21 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Defines targets for compiling code. +""" +from core.build_unit import BuildUnit +from core.targets import TargetBuilder + +from targets.compilation.cpp import COMPILE_CPP, INSTALL_CPP +from targets.compilation.cython import COMPILE_CYTHON, INSTALL_CYTHON + +INSTALL = 'install' + +TARGETS = TargetBuilder(BuildUnit.for_file(__file__)) \ + .add_phony_target('compile') \ + .depends_on(COMPILE_CPP, COMPILE_CYTHON, clean_dependencies=True) \ + .nop() \ + .add_phony_target(INSTALL) \ + .depends_on(INSTALL_CPP, INSTALL_CYTHON, clean_dependencies=True) \ + .nop() \ + .build() diff --git a/build_system/targets/compilation/build_options.py b/build_system/targets/compilation/build_options.py new file mode 100644 index 0000000000..0cab43c8d6 --- /dev/null +++ b/build_system/targets/compilation/build_options.py @@ -0,0 +1,96 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides classes that allow to configure build options. +""" +from abc import ABC, abstractmethod +from os import environ +from typing import Iterable, Optional + +from util.env import get_env + + +class BuildOption(ABC): + """ + An abstract base class for all build options. + """ + + def __init__(self, name: str, subpackage: Optional[str]): + """ + name: The name of the build option + subpackage: The subpackage, the build option corresponds to, or None, if it is a global option + """ + self.name = name + self.subpackage = subpackage + + @property + def key(self) -> str: + """ + The key to be used for setting the build option. + """ + return (self.subpackage + ':' if self.subpackage else '') + self.name + + @property + @abstractmethod + def value(self) -> Optional[str]: + """ + Returns the value of the build option. + + :return: The value or None, if no value is set + """ + + def __eq__(self, other: 'BuildOption') -> bool: + return self.key == other.key + + def __hash__(self) -> int: + return hash(self.key) + + def __bool__(self) -> bool: + return self.value is not None + + +class EnvBuildOption(BuildOption): + """ + A build option, whose value is obtained from an environment variable. + """ + + def __init__(self, name: str, subpackage: Optional[str] = None): + super().__init__(name, subpackage) + + @property + def value(self) -> Optional[str]: + value = get_env(environ, self.name.upper(), None) + + if value: + value = value.strip() + + return value + + +class BuildOptions(Iterable): + """ + Stores multiple build options. + """ + + def __init__(self): + self.build_options = set() + + def add(self, build_option: BuildOption) -> 'BuildOptions': + """ + Adds a build option. + + :param build_option: The build option to be added + :return: The `BuildOptions` itself + """ + self.build_options.add(build_option) + return self + + def __iter__(self): + return iter(self.build_options) + + def __bool__(self) -> bool: + for build_option in self.build_options: + if build_option: + return True + + return False diff --git a/build_system/targets/compilation/cpp/__init__.py b/build_system/targets/compilation/cpp/__init__.py new file mode 100644 index 0000000000..6f931f7968 --- /dev/null +++ b/build_system/targets/compilation/cpp/__init__.py @@ -0,0 +1,41 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Defines targets and modules for compiling C++ code. +""" +from core.build_unit import BuildUnit +from core.targets import PhonyTarget, TargetBuilder +from util.files import FileType + +from targets.compilation.cpp.targets import CompileCpp, InstallCpp, SetupCpp +from targets.compilation.modules import CompilationModule +from targets.dependencies.python import VENV +from targets.paths import Project + +SETUP_CPP = 'setup_cpp' + +COMPILE_CPP = 'compile_cpp' + +INSTALL_CPP = 'install_cpp' + +TARGETS = TargetBuilder(BuildUnit.for_file(__file__)) \ + .add_build_target(SETUP_CPP) \ + .depends_on(VENV) \ + .set_runnables(SetupCpp()) \ + .add_phony_target(COMPILE_CPP) \ + .depends_on(SETUP_CPP, clean_dependencies=True) \ + .set_runnables(CompileCpp()) \ + .add_build_target(INSTALL_CPP) \ + .depends_on(COMPILE_CPP) \ + .set_runnables(InstallCpp()) \ + .build() + +MODULES = [ + CompilationModule( + file_type=FileType.cpp(), + root_directory=Project.Cpp.root_directory, + build_directory_name=Project.Cpp.build_directory_name, + install_directory=Project.Python.root_directory, + installed_file_search=Project.Cpp.file_search().filter_by_file_type(FileType.shared_library()), + ), +] diff --git a/build_system/targets/compilation/cpp/targets.py b/build_system/targets/compilation/cpp/targets.py new file mode 100644 index 0000000000..3d0aee3898 --- /dev/null +++ b/build_system/targets/compilation/cpp/targets.py @@ -0,0 +1,74 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Implements targets for compiling C++ code. +""" +from typing import List + +from core.build_unit import BuildUnit +from core.modules import Module +from core.targets import BuildTarget, PhonyTarget +from util.files import FileType +from util.log import Log + +from targets.compilation.build_options import BuildOptions, EnvBuildOption +from targets.compilation.meson import MesonCompile, MesonConfigure, MesonInstall, MesonSetup +from targets.compilation.modules import CompilationModule + +MODULE_FILTER = CompilationModule.Filter(FileType.cpp()) + +BUILD_OPTIONS = BuildOptions() \ + .add(EnvBuildOption(name='subprojects')) \ + .add(EnvBuildOption(name='test_support', subpackage='common')) \ + .add(EnvBuildOption(name='multi_threading_support', subpackage='common')) \ + .add(EnvBuildOption(name='gpu_support', subpackage='common')) + + +class SetupCpp(BuildTarget.Runnable): + """ + Sets up the build system for compiling C++ code. + """ + + def __init__(self): + super().__init__(MODULE_FILTER) + + def run(self, build_unit: BuildUnit, module: Module): + MesonSetup(build_unit, module, build_options=BUILD_OPTIONS).run() + + def get_output_files(self, module: Module) -> List[str]: + return [module.build_directory] + + def get_clean_files(self, module: Module) -> List[str]: + Log.info('Removing C++ build files from directory "%s"...', module.root_directory) + return super().get_clean_files(module) + + +class CompileCpp(PhonyTarget.Runnable): + """ + Compiles C++ code. + """ + + def __init__(self): + super().__init__(MODULE_FILTER) + + def run(self, build_unit: BuildUnit, module: Module): + Log.info('Compiling C++ code in directory "%s"...', module.root_directory) + MesonConfigure(build_unit, module, BUILD_OPTIONS).run() + MesonCompile(build_unit, module).run() + + +class InstallCpp(BuildTarget.Runnable): + """ + Installs shared libraries into the source tree. + """ + + def __init__(self): + super().__init__(MODULE_FILTER) + + def run(self, build_unit: BuildUnit, module: Module): + Log.info('Installing shared libraries from directory "%s" into source tree...', module.root_directory) + MesonInstall(build_unit, module).run() + + def get_clean_files(self, module: Module) -> List[str]: + Log.info('Removing shared libraries installed from directory "%s" from source tree...', module.root_directory) + return module.find_installed_files() diff --git a/build_system/targets/compilation/cython/__init__.py b/build_system/targets/compilation/cython/__init__.py new file mode 100644 index 0000000000..1d3bce4ae9 --- /dev/null +++ b/build_system/targets/compilation/cython/__init__.py @@ -0,0 +1,40 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Defines targets and modules for compiling Cython code. +""" +from core.build_unit import BuildUnit +from core.targets import PhonyTarget, TargetBuilder +from util.files import FileType + +from targets.compilation.cpp import COMPILE_CPP +from targets.compilation.cython.targets import CompileCython, InstallCython, SetupCython +from targets.compilation.modules import CompilationModule +from targets.paths import Project + +SETUP_CYTHON = 'setup_cython' + +COMPILE_CYTHON = 'compile_cython' + +INSTALL_CYTHON = 'install_cython' + +TARGETS = TargetBuilder(BuildUnit.for_file(__file__)) \ + .add_build_target(SETUP_CYTHON) \ + .depends_on(COMPILE_CPP) \ + .set_runnables(SetupCython()) \ + .add_phony_target(COMPILE_CYTHON) \ + .depends_on(SETUP_CYTHON, clean_dependencies=True) \ + .set_runnables(CompileCython()) \ + .add_build_target(INSTALL_CYTHON) \ + .depends_on(COMPILE_CYTHON) \ + .set_runnables(InstallCython()) \ + .build() + +MODULES = [ + CompilationModule( + file_type=FileType.cython(), + root_directory=Project.Python.root_directory, + build_directory_name=Project.Python.build_directory_name, + installed_file_search=Project.Python.file_search().filter_by_file_type(FileType.extension_module()), + ), +] diff --git a/build_system/targets/compilation/cython/requirements.txt b/build_system/targets/compilation/cython/requirements.txt new file mode 100644 index 0000000000..14997b3294 --- /dev/null +++ b/build_system/targets/compilation/cython/requirements.txt @@ -0,0 +1 @@ +cython >= 3.0, < 3.1 diff --git a/build_system/targets/compilation/cython/targets.py b/build_system/targets/compilation/cython/targets.py new file mode 100644 index 0000000000..d981b7a7c2 --- /dev/null +++ b/build_system/targets/compilation/cython/targets.py @@ -0,0 +1,73 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Implements targets for compiling Cython code. +""" +from typing import List + +from core.build_unit import BuildUnit +from core.modules import Module +from core.targets import BuildTarget, PhonyTarget +from util.files import FileType +from util.log import Log + +from targets.compilation.build_options import BuildOptions, EnvBuildOption +from targets.compilation.meson import MesonCompile, MesonConfigure, MesonInstall, MesonSetup +from targets.compilation.modules import CompilationModule + +MODULE_FILTER = CompilationModule.Filter(FileType.cython()) + +BUILD_OPTIONS = BuildOptions() \ + .add(EnvBuildOption(name='subprojects')) + + +class SetupCython(BuildTarget.Runnable): + """ + Sets up the build system for compiling the Cython code. + """ + + def __init__(self): + super().__init__(MODULE_FILTER) + + def run(self, build_unit: BuildUnit, module: Module): + MesonSetup(build_unit, module, build_options=BUILD_OPTIONS) \ + .add_dependencies('cython') \ + .run() + + def get_output_files(self, module: Module) -> List[str]: + return [module.build_directory] + + def get_clean_files(self, module: Module) -> List[str]: + Log.info('Removing Cython build files from directory "%s"...', module.root_directory) + return super().get_clean_files(module) + + +class CompileCython(PhonyTarget.Runnable): + """ + Compiles the Cython code. + """ + + def __init__(self): + super().__init__(MODULE_FILTER) + + def run(self, build_unit: BuildUnit, module: Module): + Log.info('Compiling Cython code in directory "%s"...', module.root_directory) + MesonConfigure(build_unit, module, build_options=BUILD_OPTIONS).run() + MesonCompile(build_unit, module).run() + + +class InstallCython(BuildTarget.Runnable): + """ + Installs extension modules into the source tree. + """ + + def __init__(self): + super().__init__(MODULE_FILTER) + + def run(self, build_unit: BuildUnit, module: Module): + Log.info('Installing extension modules from directory "%s" into source tree...', module.root_directory) + MesonInstall(build_unit, module).run() + + def get_clean_files(self, module: Module) -> List[str]: + Log.info('Removing extension modules installed from directory "%s" from source tree...', module.root_directory) + return module.find_installed_files() diff --git a/build_system/targets/compilation/meson.py b/build_system/targets/compilation/meson.py new file mode 100644 index 0000000000..93cbf4c2af --- /dev/null +++ b/build_system/targets/compilation/meson.py @@ -0,0 +1,112 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides classes that allow to run the external program "meson". +""" +from abc import ABC +from typing import List + +from core.build_unit import BuildUnit +from util.log import Log +from util.run import Program + +from targets.compilation.build_options import BuildOptions +from targets.compilation.modules import CompilationModule + + +def build_options_as_meson_arguments(build_options: BuildOptions) -> List[str]: + """ + Returns a list of arguments that can be passed to meson for setting build options. + + :param build_options: The build options + :return: A list of arguments + """ + arguments = [] + + for build_option in build_options: + if build_option: + arguments.append('-D') + arguments.append(build_option.key + '=' + build_option.value) + + return arguments + + +class Meson(Program, ABC): + """ + An abstract base class for all classes that allow to run the external program "meson". + """ + + def __init__(self, build_unit: BuildUnit, meson_command: str, *arguments: str): + """ + :param build_unit: The build unit from which the program should be run + :param program: The meson command to be run + :param arguments: Optional arguments to be passed to meson + """ + super().__init__('meson', meson_command, *arguments) + self.print_arguments(True) + self.set_build_unit(build_unit) + + +class MesonSetup(Meson): + """ + Allows to run the external program "meson setup". + """ + + def __init__(self, build_unit: BuildUnit, module: CompilationModule, build_options: BuildOptions = BuildOptions()): + """ + :param build_unit: The build unit from which the program should be run + :param module: The module, the program should be applied to + :param build_options: The build options to be used + """ + super().__init__(build_unit, 'setup', *build_options_as_meson_arguments(build_options), module.build_directory, + module.root_directory) + self.add_dependencies('ninja') + + +class MesonConfigure(Meson): + """ + Allows to run the external program "meson configure". + """ + + def __init__(self, build_unit: BuildUnit, module: CompilationModule, build_options: BuildOptions = BuildOptions()): + """ + :param build_unit: The build unit from which the program should be run + :param module: The module, the program should be applied to + :param build_options: The build options to be used + """ + super().__init__(build_unit, 'configure', *build_options_as_meson_arguments(build_options), + module.build_directory) + self.build_options = build_options + + def _should_be_skipped(self) -> bool: + return not self.build_options + + def _before(self): + Log.info('Configuring build options according to environment variables...') + + +class MesonCompile(Meson): + """ + Allows to run the external program "meson compile". + """ + + def __init__(self, build_unit: BuildUnit, module: CompilationModule): + """ + :param build_unit: The build unit from which the program should be run + :param module: The module, the program should be applied to + """ + super().__init__(build_unit, 'compile', '-C', module.build_directory) + + +class MesonInstall(Meson): + """ + Allows to run the external program "meson install". + """ + + def __init__(self, build_unit: BuildUnit, module: CompilationModule): + """ + :param build_unit: The build unit from which the program should be run + :param module: The module, the program should be applied to + """ + super().__init__(build_unit, 'install', '--no-rebuild', '--only-changed', '-C', module.build_directory) + self.install_program(False) diff --git a/build_system/targets/compilation/modules.py b/build_system/targets/compilation/modules.py new file mode 100644 index 0000000000..7ba5097a4e --- /dev/null +++ b/build_system/targets/compilation/modules.py @@ -0,0 +1,78 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Implements modules that provide access to source code that must be compiled. +""" +from os import path +from typing import List, Optional + +from core.modules import Module +from util.files import FileSearch, FileType + + +class CompilationModule(Module): + """ + A module that provides access to source code that must be compiled. + """ + + class Filter(Module.Filter): + """ + A filter that matches modules of type `CompilationModule`. + """ + + def __init__(self, *file_types: FileType): + """ + :param file_types: The file types of the source files contained by the modules to be matched or None, if no + restrictions should be imposed on the file types + """ + self.file_types = set(file_types) + + def matches(self, module: Module) -> bool: + return isinstance(module, CompilationModule) and (not self.file_types + or module.file_type in self.file_types) + + def __init__(self, + file_type: FileType, + root_directory: str, + build_directory_name: str, + install_directory: Optional[str] = None, + installed_file_search: Optional[FileSearch] = None): + """ + :param file_type: The file types of the source files that belongs to the module + :param root_directory: The path to the module's root directory + :param build_directory_name: The name of the module's build directory + :param install_directory: The path to the directory into which files are installed or None, if the files + are installed into the root directory + :param installed_file_search: The `FileSearch` that should be used to search for installed files or None, if + the module does never contain any installed files + """ + self.file_type = file_type + self.root_directory = root_directory + self.build_directory_name = build_directory_name + self.install_directory = install_directory if install_directory else root_directory + self.installed_file_search = installed_file_search + + @property + def build_directory(self) -> str: + """ + The path to the directory, where build files should be stored. + """ + return path.join(self.root_directory, self.build_directory_name) + + def find_installed_files(self) -> List[str]: + """ + Finds and returns all installed files that belong to the module. + + :return: A list that contains the paths of the requirements files that have been found + """ + if self.installed_file_search: + return self.installed_file_search \ + .set_recursive(True) \ + .exclude_subdirectories_by_name(path.basename(self.build_directory)) \ + .list(self.install_directory) + + return [] + + def __str__(self) -> str: + return 'CompilationModule {file_type="' + str( + self.file_type) + '", root_directory=' + self.root_directory + '"}' diff --git a/build_system/targets/compilation/requirements.txt b/build_system/targets/compilation/requirements.txt new file mode 100644 index 0000000000..8611b61e24 --- /dev/null +++ b/build_system/targets/compilation/requirements.txt @@ -0,0 +1,2 @@ +meson >= 1.6, < 1.7 +ninja >= 1.11, < 1.12 diff --git a/build_system/targets/dependencies/__init__.py b/build_system/targets/dependencies/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/build_system/targets/dependencies/github/__init__.py b/build_system/targets/dependencies/github/__init__.py new file mode 100644 index 0000000000..32d11d8295 --- /dev/null +++ b/build_system/targets/dependencies/github/__init__.py @@ -0,0 +1,20 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Defines targets for updating the project's GitHub Actions. +""" +from core.build_unit import BuildUnit +from core.targets import PhonyTarget, TargetBuilder + +from targets.dependencies.github.modules import GithubWorkflowModule +from targets.dependencies.github.targets import CheckGithubActions, UpdateGithubActions +from targets.paths import Project + +TARGETS = TargetBuilder(BuildUnit.for_file(__file__)) \ + .add_phony_target('check_github_actions').set_runnables(CheckGithubActions()) \ + .add_phony_target('update_github_actions').set_runnables(UpdateGithubActions()) \ + .build() + +MODULES = [ + GithubWorkflowModule(root_directory=Project.Github.root_directory), +] diff --git a/build_system/targets/dependencies/github/actions.py b/build_system/targets/dependencies/github/actions.py new file mode 100644 index 0000000000..61ca1c1ab4 --- /dev/null +++ b/build_system/targets/dependencies/github/actions.py @@ -0,0 +1,350 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides utility functions for checking the project's GitHub workflows for outdated Actions. +""" +from dataclasses import dataclass, replace +from functools import cached_property, reduce +from os import environ +from typing import Dict, List, Optional, Set + +from core.build_unit import BuildUnit +from util.env import get_env +from util.log import Log + +from targets.dependencies.github.modules import GithubWorkflowModule +from targets.dependencies.github.pygithub import GithubApi +from targets.dependencies.github.pyyaml import YamlFile + + +@dataclass +class ActionVersion: + """ + The version of a GitHub Action. + + Attributes: + version: The full version string + """ + version: str + + SEPARATOR = '.' + + @staticmethod + def from_version_numbers(*version_numbers: int) -> 'ActionVersion': + """ + Creates and returns the version of a GitHub Action from one or several version numbers. + + :param version_numbers: The version numbers + :return: The version that has been created + """ + return ActionVersion(ActionVersion.SEPARATOR.join([str(version_number) for version_number in version_numbers])) + + @property + def version_numbers(self) -> List[int]: + """ + A list that stores the individual version numbers, the full version consists of. + """ + return [int(version_number) for version_number in str(self).split(self.SEPARATOR)] + + def __str__(self) -> str: + return self.version.lstrip('v') + + def __lt__(self, other: 'ActionVersion') -> bool: + first_version_numbers = self.version_numbers + second_version_numbers = other.version_numbers + + for i in range(min(len(first_version_numbers), len(second_version_numbers))): + first_version_number = first_version_numbers[i] + second_version_number = second_version_numbers[i] + + if first_version_number > second_version_number: + return False + if first_version_number < second_version_number: + return True + + return False + + +@dataclass +class Action: + """ + A GitHub Action. + + Attributes: + name: The name of the Action + version: The version of the Action + """ + name: str + version: ActionVersion + + SEPARATOR = '@' + + @staticmethod + def from_uses_clause(uses_clause: str) -> 'Action': + """ + Creates and returns a GitHub Action from the uses-clause of a workflow. + + :param uses_clause: The uses-clause + :return: The GitHub Action that has been created + """ + parts = uses_clause.split(Action.SEPARATOR) + + if len(parts) != 2: + raise ValueError('Uses-clause must contain the symbol + "' + Action.SEPARATOR + '", but got "' + uses_clause + + '"') + + return Action(name=parts[0], version=ActionVersion(parts[1])) + + @property + def repository(self) -> str: + """ + The name of the repository, where the GitHub Action is hosted. + """ + repository = self.name + separator = '/' + parts = repository.split(separator) + return separator.join(parts[:2]) if len(parts) > 2 else repository + + def __str__(self) -> str: + return self.name + self.SEPARATOR + str(self.version) + + def __eq__(self, other: 'Action') -> bool: + return str(self) == str(other) + + def __hash__(self) -> int: + return hash(str(self)) + + +class Workflow(YamlFile): + """ + A GitHub workflow. + """ + + TAG_USES = 'uses' + + @cached_property + def uses_clauses(self) -> List[str]: + """ + A list that contains all uses-clauses in the workflow. + """ + uses_clauses = [] + + for job in self.yaml_dict.get('jobs', {}).values(): + for step in job.get('steps', []): + uses_clause = step.get(self.TAG_USES, None) + + if uses_clause: + uses_clauses.append(uses_clause) + + return uses_clauses + + @cached_property + def actions(self) -> Set[Action]: + """ + A set that contains all GitHub Actions used in the workflow. + """ + actions = set() + + for uses_clause in self.uses_clauses: + try: + actions.add(Action.from_uses_clause(uses_clause)) + except ValueError as error: + raise RuntimeError('Failed to parse uses-clause in workflow "' + self.file + '"') from error + + return actions + + def update_actions(self, *updated_actions: Action): + """ + Updates given Actions in the workflow definition file. + + :param updated_actions: The actions to be updated + """ + updated_actions_by_name = reduce(lambda aggr, x: dict(aggr, **{x.name: x}), updated_actions, {}) + uses_prefix = self.TAG_USES + ':' + updated_lines = [] + + for line in self.lines: + updated_lines.append(line) + line_stripped = line.strip() + + if line_stripped.startswith(uses_prefix): + uses_clause = line_stripped[len(uses_prefix):].strip() + action = Action.from_uses_clause(uses_clause) + updated_action = updated_actions_by_name.get(action.name) + + if updated_action: + updated_lines[-1] = line.replace(str(action.version), str(updated_action.version)) + + self.write_lines(*updated_lines) + + def write_lines(self, *lines: str): + super().write_lines(lines) + + try: + del self.uses_clauses + except AttributeError: + pass + + try: + del self.actions + except AttributeError: + pass + + def __eq__(self, other: 'Workflow') -> bool: + return self.file == other.file + + def __hash__(self) -> int: + return hash(self.file) + + +class WorkflowUpdater: + """ + Allows checking the versions of GitHub Actions used in multiple workflows and updating outdated ones. + """ + + ENV_GITHUB_TOKEN = 'GITHUB_TOKEN' + + @dataclass + class OutdatedAction: + """ + An outdated GitHub Action. + + Attributes: + action: The outdated Action + latest_version: The latest version of the Action + """ + action: Action + latest_version: ActionVersion + + def __str__(self) -> str: + return str(self.action) + + def __eq__(self, other: 'WorkflowUpdater.OutdatedAction') -> bool: + return self.action == other.action + + def __hash__(self) -> int: + return hash(self.action) + + @dataclass + class UpdatedAction: + """ + A GitHub Action that has been updated. + + Attributes: + previous: The previous Action + updated: The updated Action + """ + previous: 'WorkflowUpdater.OutdatedAction' + updated: Action + + def __str__(self) -> str: + return str(self.updated) + + def __eq__(self, other: 'WorkflowUpdater.UpdatedAction') -> bool: + return self.updated == other.updated + + def __hash__(self) -> int: + return hash(self.updated) + + @staticmethod + def __get_github_token() -> Optional[str]: + github_token = get_env(environ, WorkflowUpdater.ENV_GITHUB_TOKEN) + + if not github_token: + Log.warning('No GitHub API token is set. You can specify it via the environment variable %s.', + WorkflowUpdater.ENV_GITHUB_TOKEN) + + return github_token + + def __query_latest_action_version(self, action: Action) -> ActionVersion: + repository_name = action.repository + + try: + latest_tag = GithubApi(self.build_unit) \ + .set_token(self.__get_github_token()) \ + .open_repository(repository_name) \ + .get_latest_release_tag() + + if not latest_tag: + raise RuntimeError('No releases available') + + return ActionVersion(latest_tag) + except RuntimeError as error: + raise RuntimeError('Unable to determine latest version of action "' + str(action) + + '" hosted in repository "' + repository_name + '"') from error + + def __get_latest_action_version(self, action: Action) -> ActionVersion: + latest_version = self.version_cache.get(action.name) + + if not latest_version: + Log.info('Checking version of GitHub Action "%s"...', action.name) + latest_version = self.__query_latest_action_version(action) + self.version_cache[action.name] = latest_version + + return latest_version + + def __init__(self, build_unit: BuildUnit, module: GithubWorkflowModule): + """ + :param build_unit: The build unit from which workflow definition files should be read + :param module: The module, that contains the workflow definition files + """ + self.build_unit = build_unit + self.module = module + self.version_cache = {} + self.github_token = WorkflowUpdater.__get_github_token() + + @cached_property + def workflows(self) -> Set[Workflow]: + """ + All GitHub workflows that are defined in the directory where workflow definition files are located. + """ + workflows = set() + + for workflow_file in self.module.find_workflow_files(): + Log.info('Searching for GitHub Actions in workflow "%s"...', workflow_file) + workflows.add(Workflow(self.build_unit, workflow_file)) + + return workflows + + def find_outdated_workflows(self) -> Dict[Workflow, Set[OutdatedAction]]: + """ + Finds and returns all workflows with outdated GitHub actions. + + :return: A dictionary that contains for each workflow a set of outdated Actions + """ + outdated_workflows = {} + + for workflow in self.workflows: + for action in workflow.actions: + latest_version = self.__get_latest_action_version(action) + + if action.version < latest_version: + outdated_actions = outdated_workflows.setdefault(workflow, set()) + outdated_actions.add(WorkflowUpdater.OutdatedAction(action, latest_version)) + + return outdated_workflows + + def update_outdated_workflows(self) -> Dict[Workflow, Set[UpdatedAction]]: + """ + Updates all workflows with outdated GitHub Actions. + + :return: A dictionary that contains for each workflow a set of updated Actions + """ + updated_workflows = {} + + for workflow, outdated_actions in self.find_outdated_workflows().items(): + updated_actions = set() + + for outdated_action in outdated_actions: + previous_version = outdated_action.action.version + previous_version_numbers = previous_version.version_numbers + latest_version_numbers = outdated_action.latest_version.version_numbers + max_version_numbers = min(len(previous_version_numbers), len(latest_version_numbers)) + updated_version = ActionVersion.from_version_numbers(*latest_version_numbers[:max_version_numbers]) + updated_actions = updated_workflows.setdefault(workflow, updated_actions) + updated_action = replace(outdated_action.action, version=updated_version) + updated_actions.add(WorkflowUpdater.UpdatedAction(previous=outdated_action, updated=updated_action)) + + workflow.update_actions(*[updated_action.updated for updated_action in updated_actions]) + + return updated_workflows diff --git a/build_system/targets/dependencies/github/modules.py b/build_system/targets/dependencies/github/modules.py new file mode 100644 index 0000000000..5f4b31d6fe --- /dev/null +++ b/build_system/targets/dependencies/github/modules.py @@ -0,0 +1,42 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Implements modules that provide access to GitHub workflows. +""" +from typing import List + +from core.modules import Module +from util.files import FileSearch, FileType + + +class GithubWorkflowModule(Module): + """ + A module that provides access to GitHub workflows. + """ + + class Filter(Module.Filter): + """ + A filter that matches modules of type `GithubWorkflowModule`. + """ + + def matches(self, module: Module) -> bool: + return isinstance(module, GithubWorkflowModule) + + def __init__(self, root_directory: str, workflow_file_search: FileSearch = FileSearch().set_recursive(True)): + """ + :param root_directory: The path to the module's root directory + :param workflow_file_search: The `FileSearch` that should be used to search for workflow definition files + """ + self.root_directory = root_directory + self.workflow_file_search = workflow_file_search + + def find_workflow_files(self) -> List[str]: + """ + Finds and returns all workflow definition files that belong to the module. + + :return: A list that contains the paths of the workflow definition files that have been found + """ + return self.workflow_file_search.filter_by_file_type(FileType.yaml()).list(self.root_directory) + + def __str__(self) -> str: + return 'GithubWorkflowModule {root_directory="' + self.root_directory + '"}' diff --git a/build_system/targets/dependencies/github/pygithub.py b/build_system/targets/dependencies/github/pygithub.py new file mode 100644 index 0000000000..3b7773cab3 --- /dev/null +++ b/build_system/targets/dependencies/github/pygithub.py @@ -0,0 +1,75 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides classes for accessing the GitHub API via "pygithub". +""" +from typing import Optional + +from core.build_unit import BuildUnit +from util.pip import Pip + + +class GithubApi: + """ + Allows to access the GitHub API. + """ + + class Repository: + """ + Allows to query information about a single GitHub repository. + """ + + def __init__(self, repository_name: str, authentication): + """ + :param repository_name: The name of the repository + :param authentication: The authentication to be used for accessing the repository or None, if no + authentication should be used + """ + self.repository_name = repository_name + self.authentication = authentication + + def get_latest_release_tag(self) -> Optional[str]: + """ + Returns the tag of the repository's latest release, if any. + + :return: The tag of the latest release or None, if no release is available + """ + # pylint: disable=import-outside-toplevel + from github import Github, UnknownObjectException + + with Github(auth=self.authentication) as client: + try: + repository = client.get_repo(self.repository_name) + latest_release = repository.get_latest_release() + return latest_release.tag_name + except UnknownObjectException as error: + raise RuntimeError('Failed to query latest release of GitHub repository "' + self.repository_name + + '"') from error + + def __init__(self, build_unit: BuildUnit): + """ + :param build_unit: The build unit to access the GitHub API from + """ + Pip.for_build_unit(build_unit).install_packages('pygithub') + self.authentication = None + + def set_token(self, token: Optional[str]) -> 'GithubApi': + """ + Sets a token to be used for authentication. + + :param token: The token to be set or None, if no token should be used + :return: The `GithubApi` itself + """ + # pylint: disable=import-outside-toplevel + from github import Auth + self.authentication = Auth.Token(token) if token else None + return self + + def open_repository(self, repository_name: str) -> Repository: + """ + Specifies the name of a GitHub repository about which information should be queried. + + :param repository_name: The name of the repository, e.g., "mrapp-ke/MLRL-Boomer" + :return: A `GithubApi.Repository` + """ + return GithubApi.Repository(repository_name, self.authentication) diff --git a/build_system/targets/dependencies/github/pyyaml.py b/build_system/targets/dependencies/github/pyyaml.py new file mode 100644 index 0000000000..0acd4e194f --- /dev/null +++ b/build_system/targets/dependencies/github/pyyaml.py @@ -0,0 +1,44 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides classes for reading the contents of YAML files via "pyyaml". +""" +from functools import cached_property +from typing import Dict + +from core.build_unit import BuildUnit +from util.io import TextFile, read_file +from util.pip import Pip + + +class YamlFile(TextFile): + """ + A YAML file. + """ + + def __init__(self, build_unit: BuildUnit, file: str): + """ + :param build_unit: The build unit from which the YAML file is read + :param file: The path to the YAML file + """ + super().__init__(file) + self.build_unit = build_unit + + @cached_property + def yaml_dict(self) -> Dict: + """ + A dictionary that stores the content of the YAML file. + """ + Pip.for_build_unit(self.build_unit).install_packages('pyyaml') + # pylint: disable=import-outside-toplevel + import yaml + with read_file(self.file) as file: + return yaml.load(file.read(), Loader=yaml.CLoader) + + def write_lines(self, *lines: str): + super().write_lines(lines) + + try: + del self.yaml_dict + except AttributeError: + pass diff --git a/build_system/targets/dependencies/github/requirements.txt b/build_system/targets/dependencies/github/requirements.txt new file mode 100644 index 0000000000..27a52b7759 --- /dev/null +++ b/build_system/targets/dependencies/github/requirements.txt @@ -0,0 +1,2 @@ +pygithub >= 2.5, < 2.6 +pyyaml >= 6.0, < 6.1 diff --git a/build_system/targets/dependencies/github/targets.py b/build_system/targets/dependencies/github/targets.py new file mode 100644 index 0000000000..71613a3499 --- /dev/null +++ b/build_system/targets/dependencies/github/targets.py @@ -0,0 +1,65 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Implements targets for updating the project's GitHub Actions. +""" +from core.build_unit import BuildUnit +from core.modules import Module +from core.targets import PhonyTarget +from util.log import Log + +from targets.dependencies.github.actions import WorkflowUpdater +from targets.dependencies.github.modules import GithubWorkflowModule +from targets.dependencies.table import Table + +MODULE_FILTER = GithubWorkflowModule.Filter() + + +class CheckGithubActions(PhonyTarget.Runnable): + """ + Prints all outdated Actions used in the project's GitHub workflows. + """ + + def __init__(self): + super().__init__(MODULE_FILTER) + + def run(self, build_unit: BuildUnit, module: Module): + outdated_workflows = WorkflowUpdater(build_unit, module).find_outdated_workflows() + + if outdated_workflows: + table = Table(build_unit, 'Workflow', 'Action', 'Current version', 'Latest version') + + for workflow, outdated_actions in outdated_workflows.items(): + for outdated_action in outdated_actions: + table.add_row(workflow.file, str(outdated_action.action.name), str(outdated_action.action.version), + str(outdated_action.latest_version)) + + table.sort_rows(0, 1) + Log.info('The following GitHub Actions are outdated:\n\n%s', str(table)) + else: + Log.info('All GitHub Actions are up-to-date!') + + +class UpdateGithubActions(PhonyTarget.Runnable): + """ + Updates and prints all outdated Actions used in the project's GitHub workflows. + """ + + def __init__(self): + super().__init__(MODULE_FILTER) + + def run(self, build_unit: BuildUnit, module: Module): + updated_workflows = WorkflowUpdater(build_unit, module).update_outdated_workflows() + + if updated_workflows: + table = Table(build_unit, 'Workflow', 'Action', 'Previous version', 'Updated version') + + for workflow, updated_actions in updated_workflows.items(): + for updated_action in updated_actions: + table.add_row(workflow.file, updated_action.updated.name, + str(updated_action.previous.action.version), str(updated_action.updated.version)) + + table.sort_rows(0, 1) + Log.info('The following GitHub Actions have been updated:\n\n%s', str(table)) + else: + Log.info('No GitHub Actions have been updated.') diff --git a/build_system/targets/dependencies/python/__init__.py b/build_system/targets/dependencies/python/__init__.py new file mode 100644 index 0000000000..bc8ce0bc3e --- /dev/null +++ b/build_system/targets/dependencies/python/__init__.py @@ -0,0 +1,36 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Defines targets and modules for installing Python dependencies that are required by the project. +""" +from core.build_unit import BuildUnit +from core.targets import PhonyTarget, TargetBuilder + +from targets.dependencies.python.modules import DependencyType, PythonDependencyModule +from targets.dependencies.python.targets import CheckPythonDependencies, InstallRuntimeDependencies +from targets.paths import Project + +VENV = 'venv' + +TARGETS = TargetBuilder(BuildUnit.for_file(__file__)) \ + .add_phony_target(VENV).set_runnables(InstallRuntimeDependencies()) \ + .add_phony_target('check_dependencies').set_runnables(CheckPythonDependencies()) \ + .build() + +MODULES = [ + PythonDependencyModule( + dependency_type=DependencyType.BUILD_TIME, + root_directory=Project.BuildSystem.root_directory, + requirements_file_search=Project.BuildSystem.file_search(), + ), + PythonDependencyModule( + dependency_type=DependencyType.RUNTIME, + root_directory=Project.Python.root_directory, + requirements_file_search=Project.Python.file_search(), + ), + PythonDependencyModule( + dependency_type=DependencyType.BUILD_TIME, + root_directory=Project.Documentation.root_directory, + requirements_file_search=Project.Documentation.file_search(), + ), +] diff --git a/build_system/targets/dependencies/python/modules.py b/build_system/targets/dependencies/python/modules.py new file mode 100644 index 0000000000..f5bba96a2d --- /dev/null +++ b/build_system/targets/dependencies/python/modules.py @@ -0,0 +1,65 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Implements modules that provide access to Python requirements files. +""" +from enum import Enum +from typing import List + +from core.modules import Module +from util.files import FileSearch + + +class DependencyType(Enum): + """ + The type of the Python dependencies. + """ + BUILD_TIME = 'build-time' + RUNTIME = 'runtime' + + +class PythonDependencyModule(Module): + """ + A module that provides access to Python requirements files. + """ + + class Filter(Module.Filter): + """ + A filter that matches modules of type `PythonDependencyModule`. + """ + + def __init__(self, *dependency_types: DependencyType): + """ + :param dependency_types: The type of the Python dependencies of the modules to be matched or None, if no + restrictions should be imposed on the types of dependencies + """ + self.dependency_types = set(dependency_types) + + def matches(self, module: Module) -> bool: + return isinstance(module, PythonDependencyModule) and (not self.dependency_types + or module.dependency_type in self.dependency_types) + + def __init__(self, + dependency_type: DependencyType, + root_directory: str, + requirements_file_search: FileSearch = FileSearch()): + """ + :param dependency_type: The type of the Python dependencies + :param root_directory: The path to the module's root directory + :param requirements_file_search: The `FileSearch` that should be used to search for requirements files + """ + self.dependency_type = dependency_type + self.root_directory = root_directory + self.requirements_file_search = requirements_file_search + + def find_requirements_files(self) -> List[str]: + """ + Finds and returns all requirements files that belong to the module. + + :return: A list that contains the paths of the requirements files that have been found + """ + return self.requirements_file_search.filter_by_name('requirements.txt').list(self.root_directory) + + def __str__(self) -> str: + return ('PythonDependencyModule {dependency_type="' + self.dependency_type.value + '", root_directory="' + + self.root_directory + '"}') diff --git a/build_system/targets/dependencies/python/pip.py b/build_system/targets/dependencies/python/pip.py new file mode 100644 index 0000000000..0d1a2db6d2 --- /dev/null +++ b/build_system/targets/dependencies/python/pip.py @@ -0,0 +1,89 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides classes for listing installed Python dependencies via pip. +""" +from dataclasses import dataclass +from typing import Set + +from util.pip import Package, Pip, Requirement + + +@dataclass +class Dependency: + """ + Provides information about a dependency. + + Attributes: + installed: The version of the dependency that is currently installed + latest: The latest version of the dependency + """ + installed: Requirement + latest: Requirement + + def __eq__(self, other: 'Dependency') -> bool: + return self.installed == other.installed + + def __hash__(self) -> int: + return hash(self.installed) + + +class PipList(Pip): + """ + Allows to list installed Python packages via pip. + """ + + class ListCommand(Pip.Command): + """ + Allows to list information about installed packages via the command `pip list`. + """ + + def __init__(self, outdated: bool = False): + """ + :param outdated: True, if only outdated packages should be listed, False otherwise + """ + super().__init__('list') + self.add_conditional_arguments(outdated, '--outdated') + + def install_all_packages(self): + """ + Installs all dependencies in the requirements file. + """ + for requirement in self.requirements.requirements: + Pip.install_requirement(requirement, dry_run=True) + + def list_outdated_dependencies(self) -> Set[Dependency]: + """ + Returns all outdated Python dependencies that are currently installed. + """ + stdout = PipList.ListCommand(outdated=True).print_command(False).capture_output() + stdout_lines = stdout.strip().split('\n') + i = 0 + + for line in stdout_lines: + i += 1 + + if line.startswith('----'): + break + + outdated_dependencies = set() + + for line in stdout_lines[i:]: + parts = line.split() + + if len(parts) < 3: + raise ValueError( + 'Output of command "pip list" is expected to be a table with at least three columns, but got:' + + line) + + package = Package(parts[0]) + requirement = self.requirements.lookup_requirement(package, accept_missing=True) + + if requirement and requirement.version: + installed_version = parts[1] + latest_version = parts[2] + outdated_dependencies.add( + Dependency(installed=Requirement(package, version=installed_version), + latest=Requirement(package, version=latest_version))) + + return outdated_dependencies diff --git a/build_system/targets/dependencies/python/targets.py b/build_system/targets/dependencies/python/targets.py new file mode 100644 index 0000000000..bdb7b0d752 --- /dev/null +++ b/build_system/targets/dependencies/python/targets.py @@ -0,0 +1,58 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Implements targets for installing runtime requirements that are required by the project's source code. +""" +from functools import reduce +from typing import List + +from core.build_unit import BuildUnit +from core.modules import Module +from core.targets import PhonyTarget +from util.log import Log + +from targets.dependencies.python.modules import DependencyType, PythonDependencyModule +from targets.dependencies.python.pip import PipList +from targets.dependencies.table import Table + + +class InstallRuntimeDependencies(PhonyTarget.Runnable): + """ + Installs all runtime dependencies that are required by the project's source code. + """ + + def __init__(self): + super().__init__(PythonDependencyModule.Filter(DependencyType.RUNTIME)) + + def run_all(self, _: BuildUnit, modules: List[Module]): + requirements_files = reduce(lambda aggr, module: aggr + module.find_requirements_files(), modules, []) + PipList(*requirements_files).install_all_packages() + + +class CheckPythonDependencies(PhonyTarget.Runnable): + """ + Installs all Python dependencies used by the project and checks for outdated ones. + """ + + def __init__(self): + super().__init__(PythonDependencyModule.Filter()) + + def run_all(self, build_unit: BuildUnit, modules: List[Module]): + requirements_files = reduce(lambda aggr, module: aggr + module.find_requirements_files(), modules, []) + pip = PipList(*requirements_files) + Log.info('Installing all dependencies...') + pip.install_all_packages() + Log.info('Checking for outdated dependencies...') + outdated_dependencies = pip.list_outdated_dependencies() + + if outdated_dependencies: + table = Table(build_unit, 'Dependency', 'Installed version', 'Latest version') + + for outdated_dependency in outdated_dependencies: + table.add_row(str(outdated_dependency.installed.package), outdated_dependency.installed.version, + outdated_dependency.latest.version) + + table.sort_rows(0, 1) + Log.info('The following dependencies are outdated:\n\n%s', str(table)) + else: + Log.info('All dependencies are up-to-date!') diff --git a/build_system/targets/dependencies/requirements.txt b/build_system/targets/dependencies/requirements.txt new file mode 100644 index 0000000000..8cd36735df --- /dev/null +++ b/build_system/targets/dependencies/requirements.txt @@ -0,0 +1 @@ +tabulate >= 0.9, < 0.10 diff --git a/build_system/targets/dependencies/table.py b/build_system/targets/dependencies/table.py new file mode 100644 index 0000000000..cb7a0f01d0 --- /dev/null +++ b/build_system/targets/dependencies/table.py @@ -0,0 +1,45 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides classes for creating tables. +""" +from core.build_unit import BuildUnit +from util.pip import Pip + + +class Table: + """ + A table with optional headers. + """ + + def __init__(self, build_unit: BuildUnit, *headers: str): + """ + :param build_unit: The build unit, the table is created for + :param headers: The headers of the table + """ + self.build_unit = build_unit + self.headers = list(headers) if headers else None + self.rows = [] + + def add_row(self, *entries: str): + """ + Adds a new row to the end of the table. + + :param entries: The entries of the row to be added + """ + self.rows.append(list(entries)) + + def sort_rows(self, column_index: int, *additional_column_indices: int): + """ + Sorts the rows in the table. + + :param column_index: The index of the column to sort by + :param additional_column_indices: Additional indices of columns to sort by + """ + self.rows.sort(key=lambda row: ([row[i] for i in [column_index] + list(additional_column_indices)])) + + def __str__(self) -> str: + Pip.for_build_unit(self.build_unit).install_packages('tabulate') + # pylint: disable=import-outside-toplevel + from tabulate import tabulate + return tabulate(self.rows, headers=self.headers) diff --git a/build_system/targets/documentation/__init__.py b/build_system/targets/documentation/__init__.py new file mode 100644 index 0000000000..9e7f81fb26 --- /dev/null +++ b/build_system/targets/documentation/__init__.py @@ -0,0 +1,37 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Defines targets and modules for generating documentations. +""" +from os import path + +from core.build_unit import BuildUnit +from core.targets import TargetBuilder + +from targets.documentation.cpp import APIDOC_CPP, APIDOC_CPP_INDEX +from targets.documentation.modules import SphinxModule +from targets.documentation.python import APIDOC_PYTHON, APIDOC_PYTHON_INDEX +from targets.documentation.targets import BuildDocumentation +from targets.paths import Project + +APIDOC_INDEX = 'apidoc_index' + +TARGETS = TargetBuilder(BuildUnit.for_file(__file__)) \ + .add_phony_target('apidoc') \ + .depends_on(APIDOC_CPP, APIDOC_PYTHON, clean_dependencies=True) \ + .nop() \ + .add_phony_target(APIDOC_INDEX) \ + .depends_on(APIDOC_CPP_INDEX, APIDOC_PYTHON_INDEX, clean_dependencies=True) \ + .nop() \ + .add_build_target('doc') \ + .depends_on(APIDOC_INDEX, clean_dependencies=True) \ + .set_runnables(BuildDocumentation()) \ + .build() + +MODULES = [ + SphinxModule( + root_directory=Project.Documentation.root_directory, + output_directory=path.join(Project.Documentation.root_directory, Project.Documentation.build_directory_name), + source_file_search=Project.Documentation.file_search(), + ), +] diff --git a/doc/Doxyfile b/build_system/targets/documentation/cpp/Doxyfile similarity index 100% rename from doc/Doxyfile rename to build_system/targets/documentation/cpp/Doxyfile diff --git a/build_system/targets/documentation/cpp/__init__.py b/build_system/targets/documentation/cpp/__init__.py new file mode 100644 index 0000000000..4ad6c586f1 --- /dev/null +++ b/build_system/targets/documentation/cpp/__init__.py @@ -0,0 +1,36 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Defines targets and modules for generating API documentations for C++ code. +""" +from os import path + +from core.build_unit import BuildUnit +from core.targets import TargetBuilder + +from targets.documentation.cpp.modules import CppApidocModule +from targets.documentation.cpp.targets import ApidocCpp, ApidocIndexCpp +from targets.paths import Project + +APIDOC_CPP = 'apidoc_cpp' + +APIDOC_CPP_INDEX = 'apidoc_cpp_index' + +TARGETS = TargetBuilder(BuildUnit.for_file(__file__)) \ + .add_build_target(APIDOC_CPP) \ + .set_runnables(ApidocCpp()) \ + .add_build_target(APIDOC_CPP_INDEX) \ + .depends_on(APIDOC_CPP) \ + .set_runnables(ApidocIndexCpp()) \ + .build() + +MODULES = [ + CppApidocModule( + root_directory=path.dirname(meson_file), + output_directory=path.join(Project.Documentation.apidoc_directory, 'cpp', + path.basename(path.dirname(meson_file))), + project_name=path.basename(path.dirname(meson_file)), + include_directory_name='include', + ) for meson_file in Project.Cpp.file_search().filter_by_name('meson.build').list(Project.Cpp.root_directory) + if path.isdir(path.join(path.dirname(meson_file), 'include')) +] diff --git a/build_system/targets/documentation/cpp/breathe_apidoc.py b/build_system/targets/documentation/cpp/breathe_apidoc.py new file mode 100644 index 0000000000..7e54cd5a55 --- /dev/null +++ b/build_system/targets/documentation/cpp/breathe_apidoc.py @@ -0,0 +1,30 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides classes that allow to run the external program "breathe-apidoc". +""" +from os import path + +from core.build_unit import BuildUnit +from util.run import Program + +from targets.documentation.cpp.modules import CppApidocModule + + +class BreatheApidoc(Program): + """ + Allows to run the external program "breathe-apidoc". + """ + + def __init__(self, build_unit: BuildUnit, module: CppApidocModule): + """ + :param build_unit: The build unit from which the program should be run + :param module: The module, the program should be applied to + """ + super().__init__('breathe-apidoc', '--members', '--project', module.project_name, '-g', 'file', '-o', + module.output_directory, path.join(module.output_directory, 'xml')) + self.module = module + self.print_arguments(True) + self.install_program(False) + self.add_dependencies('breathe') + self.set_build_unit(build_unit) diff --git a/build_system/targets/documentation/cpp/doxygen.py b/build_system/targets/documentation/cpp/doxygen.py new file mode 100644 index 0000000000..4561a274b2 --- /dev/null +++ b/build_system/targets/documentation/cpp/doxygen.py @@ -0,0 +1,44 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides classes that allow to run the external program "doxygen". +""" +from os import environ, path +from typing import Dict + +from core.build_unit import BuildUnit +from util.env import set_env +from util.io import create_directories +from util.run import Program + +from targets.documentation.cpp.modules import CppApidocModule + + +class Doxygen(Program): + """ + Allows to run the external program "doxygen". + """ + + @staticmethod + def __create_environment(module: CppApidocModule) -> Dict: + env = environ.copy() + set_env(env, 'DOXYGEN_PROJECT_NAME', 'libmlrl' + module.project_name) + set_env(env, 'DOXYGEN_INPUT_DIR', module.include_directory) + set_env(env, 'DOXYGEN_OUTPUT_DIR', module.output_directory) + set_env(env, 'DOXYGEN_PREDEFINED', 'MLRL' + module.project_name.upper() + '_API=') + return env + + def __init__(self, build_unit: BuildUnit, module: CppApidocModule): + """ + :param build_unit: The build unit from which the program should be run + :param module: The module, the program should be applied to + """ + super().__init__('doxygen', path.join(build_unit.root_directory, 'Doxyfile')) + self.module = module + self.print_arguments(True) + self.install_program(False) + self.use_environment(self.__create_environment(module)) + self.set_build_unit(build_unit) + + def _before(self): + create_directories(self.module.output_directory) diff --git a/build_system/targets/documentation/cpp/modules.py b/build_system/targets/documentation/cpp/modules.py new file mode 100644 index 0000000000..69d0bb5b6b --- /dev/null +++ b/build_system/targets/documentation/cpp/modules.py @@ -0,0 +1,69 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Implements modules that provide access to C++ code for which an API documentation can be generated. +""" +from os import path +from typing import List + +from core.modules import Module +from util.files import FileSearch, FileType + +from targets.documentation.modules import ApidocModule + + +class CppApidocModule(ApidocModule): + """ + A module that provides access to C++ code for which an API documentation can be generated. + """ + + class Filter(ApidocModule.Filter): + """ + A filter that matches modules of type `CppApidocModule`. + """ + + def matches(self, module: Module) -> bool: + return isinstance(module, CppApidocModule) + + def __init__(self, + root_directory: str, + output_directory: str, + project_name: str, + include_directory_name: str, + header_file_search: FileSearch = FileSearch().set_recursive(True)): + """ + :param root_directory: The path to the module's root directory + :param output_directory: The path to the directory where the API documentation should be stored + :param project_name: The name of the C++ project to be documented + :param include_directory_name: The name of the directory that contains the header files to be included in the + API documentation + :param header_file_search: The `FileSearch` that should be used to search for the header files to be + included in the API documentation + """ + super().__init__(output_directory) + self.root_directory = root_directory + self.project_name = project_name + self.include_directory_name = include_directory_name + self.header_file_search = header_file_search + + @property + def include_directory(self) -> str: + """ + The path to the directory that contains the header files to be included in the API documentation. + """ + return path.join(self.root_directory, self.include_directory_name) + + def find_header_files(self) -> List[str]: + """ + Finds and returns the header files to be included in the API documentation. + + :return: A list that contains the header files that have been found + """ + return self.header_file_search.filter_by_file_type(FileType.cpp()).list(self.include_directory) + + def create_reference(self) -> str: + return 'Library libmlrl' + self.project_name + ' <' + path.join(path.basename(self.output_directory), + 'filelist.rst') + '>' + + def __str__(self) -> str: + return 'CppApidocModule {root_directory="' + self.root_directory + '"}' diff --git a/build_system/targets/documentation/cpp/requirements.txt b/build_system/targets/documentation/cpp/requirements.txt new file mode 100644 index 0000000000..7e3a3f719a --- /dev/null +++ b/build_system/targets/documentation/cpp/requirements.txt @@ -0,0 +1 @@ +breathe >= 4.35, < 4.36 diff --git a/build_system/targets/documentation/cpp/targets.py b/build_system/targets/documentation/cpp/targets.py new file mode 100644 index 0000000000..44ca0cc69a --- /dev/null +++ b/build_system/targets/documentation/cpp/targets.py @@ -0,0 +1,51 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Implements targets for generating API documentations for C++ code. +""" +from typing import List + +from core.build_unit import BuildUnit +from core.modules import Module +from core.targets import BuildTarget +from util.log import Log + +from targets.documentation.cpp.breathe_apidoc import BreatheApidoc +from targets.documentation.cpp.doxygen import Doxygen +from targets.documentation.cpp.modules import CppApidocModule +from targets.documentation.targets import ApidocIndex + +MODULE_FILTER = CppApidocModule.Filter() + + +class ApidocCpp(BuildTarget.Runnable): + """ + Generates API documentations for C++ code. + """ + + def __init__(self): + super().__init__(MODULE_FILTER) + + def run(self, build_unit: BuildUnit, module: Module): + Log.info('Generating C++ API documentation for directory "%s"...', module.root_directory) + Doxygen(build_unit, module).run() + BreatheApidoc(build_unit, module).run() + + def get_output_files(self, module: Module) -> List[str]: + return [module.output_directory] + + def get_input_files(self, module: Module) -> List[str]: + return module.find_header_files() + + def get_clean_files(self, module: Module) -> List[str]: + Log.info('Removing C++ API documentation for directory "%s"...', module.root_directory) + return super().get_clean_files(module) + + +class ApidocIndexCpp(ApidocIndex): + """ + Generates index files referencing API documentations for C++ code. + """ + + def __init__(self): + super().__init__(MODULE_FILTER) diff --git a/build_system/targets/documentation/modules.py b/build_system/targets/documentation/modules.py new file mode 100644 index 0000000000..00c24d9ae0 --- /dev/null +++ b/build_system/targets/documentation/modules.py @@ -0,0 +1,75 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides classes that provide access to a Sphinx documentation. +""" +from abc import ABC, abstractmethod +from typing import List + +from core.modules import Module +from util.files import FileSearch + + +class ApidocModule(Module, ABC): + """ + An abstract base class for all modules that provide access to source code for which an API documentation can be + generated. + """ + + class Filter(Module.Filter, ABC): + """ + A filter that matches modules of type `ApidocModule`. + """ + + def __init__(self, output_directory: str): + """ + :param output_directory: The path to the directory where the API documentation should be stored + """ + self.output_directory = output_directory + + @abstractmethod + def create_reference(self) -> str: + """ + Must be implemented by subclasses in order to create a reference to API documentation. + + :return: The reference that has been created + """ + + +class SphinxModule(Module): + """ + A module that provides access to a Sphinx documentation. + """ + + class Filter(Module.Filter): + """ + A filter that matches modules of type `SphinxModule`. + """ + + def matches(self, module: Module) -> bool: + return isinstance(module, SphinxModule) + + def __init__(self, + root_directory: str, + output_directory: str, + source_file_search: FileSearch = FileSearch().set_recursive(True)): + """ + :param root_directory: The path to the module's root directory + :param output_directory: The path to the directory where the documentation should be stored + :param source_file_search: The `FileSearch` that should be used to search for the source files of the + documentation + """ + self.root_directory = root_directory + self.output_directory = output_directory + self.source_file_search = source_file_search + + def find_source_files(self) -> List[str]: + """ + Finds and returns all source files of the documentation. + + :return: A list that contains the source files that have been found + """ + return self.source_file_search.list(self.root_directory) + + def __str__(self) -> str: + return 'SphinxModule {root_directory="' + self.root_directory + '"}' diff --git a/build_system/targets/documentation/python/__init__.py b/build_system/targets/documentation/python/__init__.py new file mode 100644 index 0000000000..ec1df9e115 --- /dev/null +++ b/build_system/targets/documentation/python/__init__.py @@ -0,0 +1,36 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Defines targets and modules for generating API documentations for C++ code. +""" +from os import path + +from core.build_unit import BuildUnit +from core.targets import TargetBuilder + +from targets.documentation.python.modules import PythonApidocModule +from targets.documentation.python.targets import ApidocIndexPython, ApidocPython +from targets.packaging import INSTALL_WHEELS +from targets.paths import Project + +APIDOC_PYTHON = 'apidoc_python' + +APIDOC_PYTHON_INDEX = 'apidoc_python_index' + +TARGETS = TargetBuilder(BuildUnit.for_file(__file__)) \ + .add_build_target(APIDOC_PYTHON) \ + .depends_on(INSTALL_WHEELS) \ + .set_runnables(ApidocPython()) \ + .add_build_target(APIDOC_PYTHON_INDEX) \ + .depends_on(APIDOC_PYTHON) \ + .set_runnables(ApidocIndexPython()) \ + .build() + +MODULES = [ + PythonApidocModule(root_directory=path.dirname(setup_file), + output_directory=path.join(Project.Documentation.apidoc_directory, 'python', + path.basename(path.dirname(setup_file))), + source_directory_name='mlrl', + source_file_search=Project.Python.file_search()) + for setup_file in Project.Python.file_search().filter_by_name('setup.py').list(Project.Python.root_directory) +] diff --git a/build_system/targets/documentation/python/modules.py b/build_system/targets/documentation/python/modules.py new file mode 100644 index 0000000000..9c81fbbbe3 --- /dev/null +++ b/build_system/targets/documentation/python/modules.py @@ -0,0 +1,67 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Implements modules that provide access to Python code for which an API documentation can be generated. +""" +from os import path +from typing import List + +from core.modules import Module +from util.files import FileSearch, FileType + +from targets.documentation.modules import ApidocModule + + +class PythonApidocModule(ApidocModule): + """ + A module that provides access to Python code for which an API documentation can be generated. + """ + + class Filter(ApidocModule.Filter): + """ + A filter that matches modules of type `PythonApidocModule`. + """ + + def matches(self, module: Module) -> bool: + return isinstance(module, PythonApidocModule) + + def __init__(self, + root_directory: str, + output_directory: str, + source_directory_name: str, + source_file_search: FileSearch = FileSearch().set_recursive(True)): + """ + :param root_directory: The path to the module's root directory + :param output_directory: The path to the directory where the API documentation should be stored + :param source_directory_name: The name of the directory that contains the Python source files to be included + in the API documentation + :param source_file_search: The `FileSearch` that should be used to search for the header files to be + included in the API documentation + """ + super().__init__(output_directory) + self.root_directory = root_directory + self.source_directory_name = source_directory_name + self.source_file_search = source_file_search + + @property + def source_directory(self) -> str: + """ + The path to the directory that contains the Python source files to be included in the API documentation. + """ + return path.join(self.root_directory, self.source_directory_name) + + def find_source_files(self) -> List[str]: + """ + Finds and returns the Python source files to be included in the API documentation. + + :return: A list that contains the source files that have been found + """ + return self.source_file_search.filter_by_file_type(FileType.python()).list(self.source_directory) + + def create_reference(self) -> str: + project_name = path.basename(self.output_directory) + return 'Package mlrl-' + path.basename(self.output_directory) + ' <' + path.join( + project_name, self.source_directory_name + '.' + project_name + '.rst') + '>' + + def __str__(self) -> str: + return 'PythonApidocModule {root_directory="' + self.root_directory + '"}' diff --git a/build_system/targets/documentation/python/requirements.txt b/build_system/targets/documentation/python/requirements.txt new file mode 100644 index 0000000000..d35e56e846 --- /dev/null +++ b/build_system/targets/documentation/python/requirements.txt @@ -0,0 +1 @@ +sphinx >= 7.4, < 7.5 diff --git a/build_system/targets/documentation/python/sphinx_apidoc.py b/build_system/targets/documentation/python/sphinx_apidoc.py new file mode 100644 index 0000000000..2827b09f98 --- /dev/null +++ b/build_system/targets/documentation/python/sphinx_apidoc.py @@ -0,0 +1,41 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides classes that allow to run the external program "sphinx-apidoc". +""" +from os import path + +from core.build_unit import BuildUnit +from util.files import FileType +from util.io import create_directories, delete_files +from util.run import Program + +from targets.documentation.python.modules import PythonApidocModule + + +class SphinxApidoc(Program): + """ + Allows to run the external program "sphinx-apidoc". + """ + + def __init__(self, build_unit: BuildUnit, module: PythonApidocModule): + """ + :param build_unit: The build unit from which the program should be run + :param module: The module, the program should be applied to + """ + super().__init__('sphinx-apidoc', '--separate', '--module-first', '--no-toc', '-o', module.output_directory, + module.source_directory, + *['*.' + suffix + '*' for suffix in FileType.extension_module().suffixes], + *['*.' + suffix + '*' for suffix in FileType.shared_library().suffixes]) + self.module = module + self.print_arguments(True) + self.install_program(False) + self.add_dependencies('sphinx') + self.set_build_unit(build_unit) + + def _before(self): + create_directories(self.module.output_directory) + + def _after(self): + root_rst_file = path.join(self.module.output_directory, path.basename(self.module.source_directory) + '.rst') + delete_files(root_rst_file, accept_missing=False) diff --git a/build_system/targets/documentation/python/targets.py b/build_system/targets/documentation/python/targets.py new file mode 100644 index 0000000000..8cc20ae380 --- /dev/null +++ b/build_system/targets/documentation/python/targets.py @@ -0,0 +1,49 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Implements targets for generating API documentations for Python code. +""" +from typing import List + +from core.build_unit import BuildUnit +from core.modules import Module +from core.targets import BuildTarget +from util.log import Log + +from targets.documentation.python.modules import PythonApidocModule +from targets.documentation.python.sphinx_apidoc import SphinxApidoc +from targets.documentation.targets import ApidocIndex + +MODULE_FILTER = PythonApidocModule.Filter() + + +class ApidocPython(BuildTarget.Runnable): + """ + Generates API documentations for Python code. + """ + + def __init__(self): + super().__init__(MODULE_FILTER) + + def run(self, build_unit: BuildUnit, module: Module): + Log.info('Generating Python API documentation for directory "%s"...', module.root_directory) + SphinxApidoc(build_unit, module).run() + + def get_output_files(self, module: Module) -> List[str]: + return [module.output_directory] + + def get_input_files(self, module: Module) -> List[str]: + return module.find_source_files() + + def get_clean_files(self, module: Module) -> List[str]: + Log.info('Removing Python API documentation for directory "%s"...', module.root_directory) + return super().get_clean_files(module) + + +class ApidocIndexPython(ApidocIndex): + """ + Generates index files referencing API documentations for Python code. + """ + + def __init__(self): + super().__init__(MODULE_FILTER) diff --git a/doc/requirements.txt b/build_system/targets/documentation/requirements.txt similarity index 88% rename from doc/requirements.txt rename to build_system/targets/documentation/requirements.txt index d7d7050e55..21d52227e2 100644 --- a/doc/requirements.txt +++ b/build_system/targets/documentation/requirements.txt @@ -1,4 +1,3 @@ -breathe >= 4.35, < 4.36 furo == 2024.8.6 myst-parser >= 4.0, < 4.1 sphinx >= 7.4, < 7.5 diff --git a/build_system/targets/documentation/sphinx_build.py b/build_system/targets/documentation/sphinx_build.py new file mode 100644 index 0000000000..238488637c --- /dev/null +++ b/build_system/targets/documentation/sphinx_build.py @@ -0,0 +1,38 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides classes that allow to run the external program "sphinx-build". +""" +from os import path + +from core.build_unit import BuildUnit +from util.run import Program + +from targets.documentation.cpp.modules import CppApidocModule + + +class SphinxBuild(Program): + """ + Allows to run the external program "sphinx-build". + """ + + def __init__(self, build_unit: BuildUnit, module: CppApidocModule): + """ + :param build_unit: The build unit from which the program should be run + :param module: The module, the program should be applied to + """ + super().__init__('sphinx-build', '--jobs', 'auto', module.root_directory, + path.join(module.output_directory, 'html')) + self.module = module + self.print_arguments(True) + self.install_program(False) + self.add_dependencies( + 'furo', + 'myst-parser', + 'sphinx', + 'sphinx-copybutton', + 'sphinx-favicon', + 'sphinx-inline-tabs', + 'sphinxext-opengraph', + ) + self.set_build_unit(build_unit) diff --git a/build_system/targets/documentation/targets.py b/build_system/targets/documentation/targets.py new file mode 100644 index 0000000000..41c6921ad7 --- /dev/null +++ b/build_system/targets/documentation/targets.py @@ -0,0 +1,101 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Implements targets for generating documentations. +""" +from abc import ABC +from os import path +from typing import Dict, List, Optional + +from core.build_unit import BuildUnit +from core.modules import Module +from core.targets import BuildTarget +from util.io import TextFile +from util.log import Log + +from targets.documentation.modules import ApidocModule, SphinxModule +from targets.documentation.sphinx_build import SphinxBuild + + +class ApidocIndex(BuildTarget.Runnable, ABC): + """ + An abstract base class for all targets that generate index files referencing API documentations. + """ + + @staticmethod + def __get_template(module: ApidocModule) -> Optional[str]: + parent_directory = path.dirname(module.output_directory) + template = path.join(parent_directory, 'index.md.template') + return template if path.isfile(template) else None + + @staticmethod + def __get_templates_and_modules(modules: List[ApidocModule]) -> Dict[str, List[ApidocModule]]: + modules_by_template = {} + + for module in modules: + template = ApidocIndex.__get_template(module) + + if template: + modules_in_directory = modules_by_template.setdefault(template, []) + modules_in_directory.append(module) + + return modules_by_template + + @staticmethod + def __index_file(template: str) -> str: + return path.join(path.dirname(template), 'index.md') + + def __init__(self, module_filter: ApidocModule.Filter): + """ + :param module_filter: A filter that matches the modules, the target should be applied to + """ + super().__init__(module_filter) + + def run_all(self, _: BuildUnit, modules: List[Module]): + for template, modules_in_directory in self.__get_templates_and_modules(modules).items(): + Log.info('Generating index file referencing API documentations from template "%s"...', template) + references = [module.create_reference() + '\n' for module in modules_in_directory] + new_lines = [] + + for line in TextFile(template).lines: + if line.strip() == '%s': + new_lines.extend(references) + else: + new_lines.append(line) + + TextFile(self.__index_file(template), accept_missing=True).write_lines(*new_lines) + + def get_input_files(self, module: Module) -> List[str]: + template = self.__get_template(module) + return [template] if template else [] + + def get_output_files(self, module: Module) -> List[str]: + template = self.__get_template(module) + return [self.__index_file(template)] if template else [] + + def get_clean_files(self, module: Module) -> List[str]: + Log.info('Removing index file referencing API documentation in directory "%s"', module.output_directory) + return super().get_clean_files(module) + + +class BuildDocumentation(BuildTarget.Runnable): + """ + Generates documentations. + """ + + def __init__(self): + super().__init__(SphinxModule.Filter()) + + def run(self, build_unit: BuildUnit, module: Module): + Log.info('Generating documentation for directory "%s"...', module.root_directory) + SphinxBuild(build_unit, module).run() + + def get_input_files(self, module: Module) -> List[str]: + return module.find_source_files() + + def get_output_files(self, module: Module) -> List[str]: + return [module.output_directory] + + def get_clean_files(self, module: Module) -> List[str]: + Log.info('Removing documentation generated for directory "%s"...', module.root_directory) + return super().get_clean_files(module) diff --git a/build_system/targets/packaging/__init__.py b/build_system/targets/packaging/__init__.py new file mode 100644 index 0000000000..c7a16eed34 --- /dev/null +++ b/build_system/targets/packaging/__init__.py @@ -0,0 +1,34 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Defines targets and modules for building and install Python wheel packages. +""" +from os import path + +from core.build_unit import BuildUnit +from core.targets import TargetBuilder + +from targets.compilation import INSTALL +from targets.packaging.modules import PythonPackageModule +from targets.packaging.targets import BuildPythonWheels, InstallPythonWheels +from targets.paths import Project + +BUILD_WHEELS = 'build_wheels' + +INSTALL_WHEELS = 'install_wheels' + +TARGETS = TargetBuilder(BuildUnit.for_file(__file__)) \ + .add_build_target(BUILD_WHEELS) \ + .depends_on(INSTALL) \ + .set_runnables(BuildPythonWheels()) \ + .add_build_target(INSTALL_WHEELS) \ + .depends_on(BUILD_WHEELS) \ + .set_runnables(InstallPythonWheels()) \ + .build() + +MODULES = [ + PythonPackageModule( + root_directory=path.dirname(setup_file), + wheel_directory_name=Project.Python.wheel_directory_name, + ) for setup_file in Project.Python.file_search().filter_by_name('setup.py').list(Project.Python.root_directory) +] diff --git a/build_system/targets/packaging/build.py b/build_system/targets/packaging/build.py new file mode 100644 index 0000000000..bf67ae54ea --- /dev/null +++ b/build_system/targets/packaging/build.py @@ -0,0 +1,25 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides classes that allow to build wheel packages via the external program "build". +""" +from core.build_unit import BuildUnit +from util.run import PythonModule + +from targets.packaging.modules import PythonPackageModule + + +class Build(PythonModule): + """ + Allows to run the external program "build". + """ + + def __init__(self, build_unit: BuildUnit, module: PythonPackageModule): + """ + :param build_unit: The build unit from which the program should be run + :param module: The module, the program should be applied to + """ + super().__init__('build', '--no-isolation', '--wheel', module.root_directory) + self.print_arguments(True) + self.add_dependencies('wheel', 'setuptools') + self.set_build_unit(build_unit) diff --git a/build_system/targets/packaging/modules.py b/build_system/targets/packaging/modules.py new file mode 100644 index 0000000000..e3015a098f --- /dev/null +++ b/build_system/targets/packaging/modules.py @@ -0,0 +1,50 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Implements modules that provide access to Python code that can be built as wheel packages. +""" +from os import path +from typing import List + +from core.modules import Module +from util.files import FileSearch + + +class PythonPackageModule(Module): + """ + A module that provides access to Python code that can be built as wheel packages. + """ + + class Filter(Module.Filter): + """ + A filter that matches modules of type `PythonPackageModule`. + """ + + def matches(self, module: Module) -> bool: + return isinstance(module, PythonPackageModule) + + def __init__(self, root_directory: str, wheel_directory_name: str): + """ + :param root_directory: The path to the module's root directory + :param wheel_directory_name: The name of the directory that contains wheel packages + """ + self.root_directory = root_directory + self.wheel_directory_name = wheel_directory_name + + @property + def wheel_directory(self) -> str: + """ + Returns the path of the directory that contains the wheel packages that have been built for the module. + """ + return path.join(self.root_directory, self.wheel_directory_name) + + def find_wheels(self) -> List[str]: + """ + Finds and returns all wheel packages that have been built for the module. + + :return: A list that contains the paths to the wheel packages + """ + return FileSearch().filter_by_suffix('whl').list(self.wheel_directory) + + def __str__(self) -> str: + return 'PythonPackageModule {root_directory="' + self.root_directory + '"}' diff --git a/build_system/targets/packaging/pip.py b/build_system/targets/packaging/pip.py new file mode 100644 index 0000000000..ac4456743f --- /dev/null +++ b/build_system/targets/packaging/pip.py @@ -0,0 +1,29 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides classes for installing wheel packages via pip. +""" +from util.pip import Pip + + +class PipInstallWheel(Pip): + """ + Allows to install wheel packages via pip. + """ + + class InstallWheelCommand(Pip.Command): + """ + Allows to install wheel packages via the command `pip install`. + """ + + def __init__(self, *wheels: str): + """ + :param wheels: The paths to the wheel packages to be installed + """ + super().__init__('install', '--force-reinstall', '--no-deps', *wheels) + + def install_wheels(self, *wheels: str): + """ + Installs several wheel packages. + """ + PipInstallWheel.InstallWheelCommand(*wheels).print_arguments(True).run() diff --git a/build_system/targets/packaging/requirements.txt b/build_system/targets/packaging/requirements.txt new file mode 100644 index 0000000000..a2cb5b645e --- /dev/null +++ b/build_system/targets/packaging/requirements.txt @@ -0,0 +1,3 @@ +build >= 1.2, < 1.3 +setuptools +wheel >= 0.45, < 0.46 diff --git a/build_system/targets/packaging/targets.py b/build_system/targets/packaging/targets.py new file mode 100644 index 0000000000..5f4c758f74 --- /dev/null +++ b/build_system/targets/packaging/targets.py @@ -0,0 +1,70 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Implements targets for building and installing wheel packages. +""" +from typing import List + +from core.build_unit import BuildUnit +from core.modules import Module +from core.targets import BuildTarget +from util.files import DirectorySearch, FileType +from util.log import Log + +from targets.packaging.build import Build +from targets.packaging.modules import PythonPackageModule +from targets.packaging.pip import PipInstallWheel +from targets.paths import Project + +MODULE_FILTER = PythonPackageModule.Filter() + + +class BuildPythonWheels(BuildTarget.Runnable): + """ + Builds Python wheel packages. + """ + + def __init__(self): + super().__init__(MODULE_FILTER) + + def run(self, build_unit: BuildUnit, module: Module): + Log.info('Building Python wheels for directory "%s"...', module.root_directory) + Build(build_unit, module).run() + + def get_input_files(self, module: Module) -> List[str]: + file_search = Project.Python.file_search() \ + .set_symlinks(False) \ + .exclude_subdirectories_by_name(Project.Python.test_directory_name) \ + .filter_by_file_type(FileType.python(), FileType.extension_module(), FileType.shared_library()) + return file_search.list(module.root_directory) + + def get_output_files(self, module: Module) -> List[str]: + return [module.wheel_directory] + + def get_clean_files(self, module: Module) -> List[str]: + clean_files = [] + Log.info('Removing Python wheels from directory "%s"...', module.root_directory) + clean_files.append(module.wheel_directory) + clean_files.extend( + DirectorySearch() \ + .filter_by_name(Project.Python.build_directory_name) \ + .filter_by_substrings(ends_with=Project.Python.wheel_metadata_directory_suffix) \ + .list(module.root_directory) + ) + return clean_files + + +class InstallPythonWheels(BuildTarget.Runnable): + """ + Installs Python wheel packages. + """ + + def __init__(self): + super().__init__(MODULE_FILTER) + + def run(self, _: BuildUnit, module: Module): + Log.info('Installing Python wheels for directory "%s"...', module.root_directory) + PipInstallWheel().install_wheels(*module.find_wheels()) + + def get_input_files(self, module: Module) -> List[str]: + return module.find_wheels() diff --git a/build_system/targets/paths.py b/build_system/targets/paths.py new file mode 100644 index 0000000000..4da0f9aa37 --- /dev/null +++ b/build_system/targets/paths.py @@ -0,0 +1,139 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides paths within the project that are important for the build system. +""" +from os import path + +from core.build_unit import BuildUnit +from util.files import FileSearch + + +class Project: + """ + Provides paths within the project. + + Attributes: + root_directory: The path to the project's root directory + """ + + root_directory = '.' + + class BuildSystem: + """ + Provides paths within the project's build system. + + Attributes: + root_directory: The path to the build system's root directory + build_directory_name: The name of the build system's build directory + """ + + root_directory = BuildUnit.BUILD_SYSTEM_DIRECTORY + + build_directory_name = BuildUnit.BUILD_DIRECTORY_NAME + + @staticmethod + def file_search() -> FileSearch: + """ + Creates and returns a `FileSearch` that allows searching for files within the build system. + + :return: The `FileSearch` that has been created + """ + return FileSearch() \ + .set_recursive(True) \ + .exclude_subdirectories_by_name(Project.BuildSystem.build_directory_name) + + class Python: + """ + Provides paths within the project's Python code. + + Attributes: + root_directory: The path to the Python code's root directory + build_directory_name: The name of the Python code's build directory + test_directory_name: The name fo the directory that contains tests + wheel_directory_name: The name of the directory that contains wheel packages + wheel_metadata_directory_suffix: The suffix of the directory that contains the metadata of wheel packages + """ + + root_directory = 'python' + + build_directory_name = 'build' + + test_directory_name = 'tests' + + wheel_directory_name = 'dist' + + wheel_metadata_directory_suffix = '.egg-info' + + @staticmethod + def file_search() -> FileSearch: + """ + Creates and returns a `FileSearch` that allows searching for files within the Python code. + + :return: The `FileSearch` that has been created + """ + return FileSearch() \ + .set_recursive(True) \ + .exclude_subdirectories_by_name(Project.Python.build_directory_name) \ + .exclude_subdirectories_by_name(Project.Python.wheel_directory_name) \ + .exclude_subdirectories_by_name('__pycache__') \ + .exclude_subdirectories_by_substrings(ends_with=Project.Python.wheel_metadata_directory_suffix) + + class Cpp: + """ + Provides paths within the project's C++ code. + + Attributes: + root_directory: The path to the C++ code's root directory + build_directory_name: The name of the C++ code's build directory + """ + + root_directory = 'cpp' + + build_directory_name = 'build' + + @staticmethod + def file_search() -> FileSearch: + """ + Creates and returns a `FileSearch` that allows searchin for files within the C++ code. + + :return: The `FileSearch` that has been created + """ + return FileSearch() \ + .set_recursive(True) \ + .exclude_subdirectories_by_name(Project.Cpp.build_directory_name) + + class Documentation: + """ + Provides paths within the project's documentation. + + Attributes: + root_directory: The path to the documentation's root directory + """ + + root_directory = 'doc' + + apidoc_directory = path.join(root_directory, 'developer_guide', 'api') + + build_directory_name = '_build' + + @staticmethod + def file_search() -> FileSearch: + """ + Creates and returns a `FileSearch` that allows searching for files within the documentation. + + :return: The `FileSearch` that has been created + """ + return FileSearch() \ + .set_recursive(True) \ + .exclude_subdirectories_by_name(Project.Documentation.build_directory_name) + + class Github: + """ + Provides paths within the project's GitHub-related files. + + Attributes: + root_directory: The path to the root directory that contains all GitHub-related files + """ + + root_directory = '.github' diff --git a/build_system/targets/testing/__init__.py b/build_system/targets/testing/__init__.py new file mode 100644 index 0000000000..e1a567b3ec --- /dev/null +++ b/build_system/targets/testing/__init__.py @@ -0,0 +1,16 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Defines targets for testing code. +""" +from core.build_unit import BuildUnit +from core.targets import TargetBuilder + +from targets.testing.cpp import TESTS_CPP +from targets.testing.python import TESTS_PYTHON + +TARGETS = TargetBuilder(BuildUnit.for_file(__file__)) \ + .add_phony_target('tests') \ + .depends_on(TESTS_CPP, TESTS_PYTHON) \ + .nop() \ + .build() diff --git a/build_system/targets/testing/cpp/__init__.py b/build_system/targets/testing/cpp/__init__.py new file mode 100644 index 0000000000..f06278511d --- /dev/null +++ b/build_system/targets/testing/cpp/__init__.py @@ -0,0 +1,27 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Defines targets and modules for testing C++ code. +""" +from core.build_unit import BuildUnit +from core.targets import PhonyTarget, TargetBuilder + +from targets.compilation.cpp import COMPILE_CPP +from targets.paths import Project +from targets.testing.cpp.modules import CppTestModule +from targets.testing.cpp.targets import TestCpp + +TESTS_CPP = 'tests_cpp' + +TARGETS = TargetBuilder(BuildUnit.for_file(__file__)) \ + .add_phony_target(TESTS_CPP) \ + .depends_on(COMPILE_CPP) \ + .set_runnables(TestCpp()) \ + .build() + +MODULES = [ + CppTestModule( + root_directory=Project.Cpp.root_directory, + build_directory_name=Project.Cpp.build_directory_name, + ), +] diff --git a/build_system/targets/testing/cpp/meson.py b/build_system/targets/testing/cpp/meson.py new file mode 100644 index 0000000000..8c8a01b9aa --- /dev/null +++ b/build_system/targets/testing/cpp/meson.py @@ -0,0 +1,24 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides classes that allow to run automated tests via the external program "meson". +""" +from core.build_unit import BuildUnit + +from targets.compilation.meson import Meson +from targets.testing.cpp.modules import CppTestModule + + +class MesonTest(Meson): + """ + Allows to run the external program "meson test". + """ + + def __init__(self, build_unit: BuildUnit, module: CppTestModule): + """ + :param build_unit: The build unit from which the program should be run + :param module: The module, the program should be applied to + """ + super().__init__(build_unit, 'test', '-C', module.build_directory, '--verbose') + self.add_conditional_arguments(module.fail_fast, '--maxfail', '1') + self.install_program(False) diff --git a/build_system/targets/testing/cpp/modules.py b/build_system/targets/testing/cpp/modules.py new file mode 100644 index 0000000000..104ea8133f --- /dev/null +++ b/build_system/targets/testing/cpp/modules.py @@ -0,0 +1,42 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Implements modules that provide access to automated tests for C++ code. +""" +from os import path + +from core.modules import Module + +from targets.testing.modules import TestModule + + +class CppTestModule(TestModule): + """ + A module that provides access to automated tests for C++ code. + """ + + class Filter(Module.Filter): + """ + A filter that matches modules of type `CppTestModule`. + """ + + def matches(self, module: Module) -> bool: + return isinstance(module, CppTestModule) + + def __init__(self, root_directory: str, build_directory_name: str): + """ + :param root_directory: The path to the module's root directory + :param build_directory_name: The name of the module's build directory + """ + self.root_directory = root_directory + self.build_directory_name = build_directory_name + + @property + def build_directory(self) -> str: + """ + The path to the directory, where build files are stored. + """ + return path.join(self.root_directory, self.build_directory_name) + + def __str__(self) -> str: + return 'CppTestModule {root_directory="' + self.root_directory + '"}' diff --git a/build_system/targets/testing/cpp/targets.py b/build_system/targets/testing/cpp/targets.py new file mode 100644 index 0000000000..1a572260ed --- /dev/null +++ b/build_system/targets/testing/cpp/targets.py @@ -0,0 +1,23 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Implements targets for testing C++ code. +""" +from core.build_unit import BuildUnit +from core.modules import Module +from core.targets import PhonyTarget + +from targets.testing.cpp.meson import MesonTest +from targets.testing.cpp.modules import CppTestModule + + +class TestCpp(PhonyTarget.Runnable): + """ + Runs automated tests for C++ code. + """ + + def __init__(self): + super().__init__(CppTestModule.Filter()) + + def run(self, build_unit: BuildUnit, module: Module): + MesonTest(build_unit, module).run() diff --git a/build_system/targets/testing/modules.py b/build_system/targets/testing/modules.py new file mode 100644 index 0000000000..7e1a4d82a6 --- /dev/null +++ b/build_system/targets/testing/modules.py @@ -0,0 +1,23 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Implements modules that provide access to automated tests. +""" +from abc import ABC +from os import environ + +from core.modules import Module +from util.env import get_env_bool + + +class TestModule(Module, ABC): + """ + An abstract base class for all modules that provide access to automated tests. + """ + + @property + def fail_fast(self) -> bool: + """ + True, if all tests should be skipped as soon as a single test fails, False otherwise + """ + return get_env_bool(environ, 'FAIL_FAST') diff --git a/build_system/targets/testing/python/__init__.py b/build_system/targets/testing/python/__init__.py new file mode 100644 index 0000000000..1af719d689 --- /dev/null +++ b/build_system/targets/testing/python/__init__.py @@ -0,0 +1,29 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Defines targets and modules for testing Python code. +""" +from core.build_unit import BuildUnit +from core.targets import PhonyTarget, TargetBuilder + +from targets.packaging import INSTALL_WHEELS +from targets.paths import Project +from targets.testing.python.modules import PythonTestModule +from targets.testing.python.targets import TestPython + +TESTS_PYTHON = 'tests_python' + +TARGETS = TargetBuilder(BuildUnit.for_file(__file__)) \ + .add_phony_target(TESTS_PYTHON) \ + .depends_on(INSTALL_WHEELS) \ + .set_runnables(TestPython()) \ + .build() + +MODULES = [ + PythonTestModule( + root_directory=Project.Python.root_directory, + build_directory_name=Project.Python.build_directory_name, + test_file_search=Project.Python.file_search() \ + .filter_subdirectories_by_name(Project.Python.test_directory_name), + ), +] diff --git a/build_system/targets/testing/python/modules.py b/build_system/targets/testing/python/modules.py new file mode 100644 index 0000000000..d3a55dc23e --- /dev/null +++ b/build_system/targets/testing/python/modules.py @@ -0,0 +1,62 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Implements modules that provide access to automated tests for Python code. +""" +from os import path +from typing import List + +from core.modules import Module +from util.files import FileSearch, FileType + +from targets.testing.modules import TestModule + + +class PythonTestModule(TestModule): + """ + A module that provides access to automated tests for Python code. + """ + + class Filter(Module.Filter): + """ + A filter that matches modules of type `PythonTestModule`. + """ + + def matches(self, module: Module) -> bool: + return isinstance(module, PythonTestModule) + + def __init__(self, + root_directory: str, + build_directory_name: str, + test_file_search: FileSearch = FileSearch().set_recursive(True)): + """ + :param root_directory: The path to the module's root directory + :param build_directory_name: The name of the module's build directory + :param test_file_search: The `FilesSearch` that should be used to search for test files + """ + self.root_directory = root_directory + self.build_directory_name = build_directory_name + self.test_file_search = test_file_search + + @property + def test_result_directory(self) -> str: + """ + The path of the directory where tests results should be stored. + """ + return path.join(self.root_directory, self.build_directory_name, 'test-results') + + def find_test_directories(self) -> List[str]: + """ + Finds and returns all directories that contain automated tests that belong to the module. + + :return: A list that contains the paths of the directories that have been found + """ + test_files = self.test_file_search \ + .exclude_subdirectories_by_name(self.build_directory_name) \ + .filter_by_substrings(starts_with='test_') \ + .filter_by_file_type(FileType.python()) \ + .list(self.root_directory) + return list({path.dirname(test_file) for test_file in test_files}) + + def __str__(self) -> str: + return 'PythonTestModule {root_directory="' + self.root_directory + '"}' diff --git a/build_system/targets/testing/python/requirements.txt b/build_system/targets/testing/python/requirements.txt new file mode 100644 index 0000000000..b582722d5a --- /dev/null +++ b/build_system/targets/testing/python/requirements.txt @@ -0,0 +1 @@ +unittest-xml-reporting >= 3.2, < 3.3 diff --git a/build_system/targets/testing/python/targets.py b/build_system/targets/testing/python/targets.py new file mode 100644 index 0000000000..9290267ce4 --- /dev/null +++ b/build_system/targets/testing/python/targets.py @@ -0,0 +1,23 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Implements targets for testing Python code. +""" +from core.build_unit import BuildUnit +from core.modules import Module +from core.targets import PhonyTarget + +from targets.testing.python.modules import PythonTestModule +from targets.testing.python.unittest import UnitTest + + +class TestPython(PhonyTarget.Runnable): + """ + Runs automated tests for Python code. + """ + + def __init__(self): + super().__init__(PythonTestModule.Filter()) + + def run(self, build_unit: BuildUnit, module: Module): + UnitTest(build_unit, module).run() diff --git a/build_system/targets/testing/python/unittest.py b/build_system/targets/testing/python/unittest.py new file mode 100644 index 0000000000..b506a5729a --- /dev/null +++ b/build_system/targets/testing/python/unittest.py @@ -0,0 +1,37 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides classes that allow to run automated tests via the external program "unittest". +""" +from core.build_unit import BuildUnit +from util.run import PythonModule + +from targets.testing.python.modules import PythonTestModule + + +class UnitTest: + """ + Allows to run the external program "unittest". + """ + + def __init__(self, build_unit: BuildUnit, module: PythonTestModule): + """ + :param build_unit: The build unit from which the program should be run + :param module: The module, the program should be applied to + """ + self.build_unit = build_unit + self.module = module + + def run(self): + """ + Runs the program. + """ + for test_directory in self.module.find_test_directories(): + PythonModule('xmlrunner', 'discover', '--verbose', '--start-directory', test_directory, '--output', + self.module.test_result_directory) \ + .add_conditional_arguments(self.module.fail_fast, '--failfast') \ + .print_arguments(True) \ + .install_program(False) \ + .add_dependencies('unittest-xml-reporting') \ + .set_build_unit(self.build_unit) \ + .run() diff --git a/build_system/targets/versioning/__init__.py b/build_system/targets/versioning/__init__.py new file mode 100644 index 0000000000..1e3122f770 --- /dev/null +++ b/build_system/targets/versioning/__init__.py @@ -0,0 +1,28 @@ +""" +Defines build targets for updating the project's version and changelog. +""" +from core.build_unit import BuildUnit +from core.targets import PhonyTarget, TargetBuilder + +from targets.versioning.changelog import print_latest_changelog, update_changelog_bugfix, update_changelog_feature, \ + update_changelog_main, validate_changelog_bugfix, validate_changelog_feature, validate_changelog_main +from targets.versioning.versioning import apply_development_version, increment_development_version, \ + increment_major_version, increment_minor_version, increment_patch_version, print_current_version, \ + reset_development_version + +TARGETS = TargetBuilder(BuildUnit.for_file(__file__)) \ + .add_phony_target('increment_development_version').set_functions(increment_development_version) \ + .add_phony_target('reset_development_version').set_functions(reset_development_version) \ + .add_phony_target('apply_development_version').set_functions(apply_development_version) \ + .add_phony_target('increment_patch_version').set_functions(increment_patch_version) \ + .add_phony_target('increment_minor_version').set_functions(increment_minor_version) \ + .add_phony_target('increment_major_version').set_functions(increment_major_version) \ + .add_phony_target('validate_changelog_bugfix').set_functions(validate_changelog_bugfix) \ + .add_phony_target('validate_changelog_feature').set_functions(validate_changelog_feature) \ + .add_phony_target('validate_changelog_main').set_functions(validate_changelog_main) \ + .add_phony_target('update_changelog_bugfix').set_functions(update_changelog_bugfix) \ + .add_phony_target('update_changelog_feature').set_functions(update_changelog_feature) \ + .add_phony_target('update_changelog_main').set_functions(update_changelog_main) \ + .add_phony_target('print_version').set_functions(print_current_version) \ + .add_phony_target('print_latest_changelog').set_functions(print_latest_changelog) \ + .build() diff --git a/build_system/targets/versioning/changelog.py b/build_system/targets/versioning/changelog.py new file mode 100644 index 0000000000..39026e6a4e --- /dev/null +++ b/build_system/targets/versioning/changelog.py @@ -0,0 +1,400 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides actions for validating and updating the project's changelog. +""" +from dataclasses import dataclass, field +from datetime import date +from enum import Enum, auto +from functools import cached_property +from typing import List, Optional + +from util.io import TextFile +from util.log import Log + +from targets.versioning.versioning import Version, get_current_version + +CHANGESET_FILE_MAIN = '.changelog-main.md' + +CHANGESET_FILE_FEATURE = '.changelog-feature.md' + +CHANGESET_FILE_BUGFIX = '.changelog-bugfix.md' + + +class LineType(Enum): + """ + Represents different types of lines that may occur in a changeset. + """ + BLANK = auto() + HEADER = auto() + ENUMERATION = auto() + + @staticmethod + def parse(line: str) -> Optional['LineType']: + """ + Parses a given line and returns its type. + + :return: The type of the given line or None, if the line is invalid + """ + if not line or line.isspace(): + return LineType.BLANK + if line.startswith(Line.PREFIX_HEADER): + return LineType.HEADER + if line.startswith(Line.PREFIX_DASH) or line.startswith(Line.PREFIX_ASTERISK): + return LineType.ENUMERATION + return None + + +@dataclass +class Line: + """ + A single line in a changeset. + + Attributes: + line_number: The line number, starting at 1 + line_type: The type of the line + line: The original content of the line + content: The content of the line with Markdown keywords being stripped away + """ + line_number: int + line_type: LineType + line: str + content: str + + PREFIX_HEADER = '# ' + + PREFIX_DASH = '- ' + + PREFIX_ASTERISK = '* ' + + @staticmethod + def parse(line: str, line_number: int) -> 'Line': + """ + Parses and returns a single line in a changeset. + + :param line: The line to be parsed + :param line_number: The number of the line to parsed (starting at 1) + :return: The `Line` that has been created + """ + line = line.strip('\n') + line_type = LineType.parse(line) + + if not line_type: + raise ValueError('Line ' + str(line_number) + + ' is invalid: Must be blank, a top-level header (starting with "' + Line.PREFIX_HEADER + + '"), or an enumeration (starting with "' + Line.PREFIX_DASH + '" or "' + + Line.PREFIX_ASTERISK + '"), but is "' + line + '"') + + content = line + + if line_type != LineType.BLANK: + content = line.lstrip(Line.PREFIX_HEADER).lstrip(Line.PREFIX_DASH).lstrip(Line.PREFIX_ASTERISK) + + if not content or content.isspace(): + raise ValueError('Line ' + str(line_number) + ' is is invalid: Content must not be blank, but is "' + + line + '"') + + return Line(line_number=line_number, line_type=line_type, line=line, content=content) + + +@dataclass +class Changeset: + """ + A changeset, consisting of a header and textual descriptions of several changes. + + Attributes: + header: The header of the changeset + changes: A list that stores the textual descriptions of the changes + """ + header: str + changes: List[str] = field(default_factory=list) + + def __str__(self) -> str: + changeset = '### ' + self.header + '\n\n' + + for content in self.changes: + changeset += Line.PREFIX_DASH + content + '\n' + + return changeset + + +class ChangesetFile(TextFile): + """ + A file that stores several changesets. + """ + + def __validate_line(self, current_line: Optional[Line], previous_line: Optional[Line]): + current_line_is_enumeration = current_line and current_line.line_type == LineType.ENUMERATION + + if current_line_is_enumeration and not previous_line: + raise ValueError('File "' + self.file + '" must start with a top-level header (starting with "' + + Line.PREFIX_HEADER + '")') + + current_line_is_header = current_line and current_line.line_type == LineType.HEADER + previous_line_is_header = previous_line and previous_line.line_type == LineType.HEADER + + if (current_line_is_header and previous_line_is_header) or (not current_line and previous_line_is_header): + raise ValueError('Header "' + previous_line.line + '" at line ' + str(previous_line.line_number) + + ' of file "' + self.file + '" is not followed by any content') + + @cached_property + def parsed_lines(self) -> List[Line]: + """ + The lines in the changelog as `Line` objects. + """ + parsed_lines = [] + + for i, line in enumerate(self.lines): + current_line = Line.parse(line, line_number=i + 1) + + if current_line.line_type != LineType.BLANK: + parsed_lines.append(current_line) + + return parsed_lines + + @cached_property + def changesets(self) -> List[Changeset]: + """ + A list that contains all changesets in the changelog. + """ + changesets = [] + + for line in self.parsed_lines: + if line.line_type == LineType.HEADER: + changesets.append(Changeset(header=line.content)) + elif line.line_type == LineType.ENUMERATION: + current_changeset = changesets[-1] + current_changeset.changes.append(line.content) + + return changesets + + def validate(self): + """ + Validates the changelog. + """ + previous_line = None + + for current_line in self.parsed_lines: + if current_line.line_type != LineType.BLANK: + self.__validate_line(current_line=current_line, previous_line=previous_line) + previous_line = current_line + + self.__validate_line(current_line=None, previous_line=previous_line) + + def write_lines(self, *lines: str): + super().write_lines(lines) + + try: + del self.parsed_lines + except AttributeError: + pass + + try: + del self.changesets + except AttributeError: + pass + + +class ReleaseType(Enum): + """ + Represents the type of a release. + """ + MAJOR = 'major' + MINOR = 'feature' + PATCH = 'bugfix' + + +@dataclass +class Release: + """ + A release, consisting of a version, a release date, a type, and several changesets. + + Attributes: + version: The version + release_date: The release date + release_type: The type of the release + changesets: A list that stores the changesets + """ + version: Version + release_date: date + release_type: ReleaseType + changesets: List[Changeset] = field(default_factory=list) + + URL_DOCUMENTATION = 'https://mlrl-boomer.readthedocs.io/en/' + + PREFIX_SUB_HEADER = '## ' + + @staticmethod + def __format_release_month(month: int) -> str: + return ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'][month - 1] + + @staticmethod + def __format_release_day(day: int) -> str: + if 11 <= (day % 100) <= 13: + suffix = 'th' + else: + suffix = ['th', 'st', 'nd', 'rd', 'th'][min(day % 10, 4)] + + return str(day) + suffix + + def __format_release_date(self) -> str: + return self.__format_release_month(self.release_date.month) + '. ' + self.__format_release_day( + self.release_date.day) + ', ' + str(self.release_date.year) + + def __format_disclaimer(self) -> str: + if [changeset for changeset in self.changesets if changeset.header.lower() == 'api changes']: + return ('```{warning}\nThis release comes with API changes. For an updated overview of the available ' + + 'parameters and command line arguments, please refer to the ' + '[documentation](' + + self.URL_DOCUMENTATION + str(self.version) + ').\n```\n\n') + return '' + + def __str__(self) -> str: + release = self.PREFIX_SUB_HEADER + 'Version ' + str( + self.version) + ' (' + self.__format_release_date() + ')\n\n' + release += 'A ' + self.release_type.value + ' release that comes with the following changes.\n\n' + release += self.__format_disclaimer() + + for i, changeset in enumerate(self.changesets): + release += str(changeset) + ('\n' if i < len(self.changesets) else '\n\n') + + return release + + +class ChangelogFile(TextFile): + """ + The file that stores the project's changelog. + """ + + def __init__(self): + super().__init__('CHANGELOG.md') + + def add_release(self, release: Release): + """ + Adds a new release to the project's changelog. + + :param release: The release to be added + """ + formatted_release = str(release) + Log.info('Adding new release to changelog file "%s":\n\n%s', self.file, formatted_release) + original_lines = self.lines + modified_lines = [] + offset = 0 + + for offset, line in enumerate(original_lines): + if line.startswith(Release.PREFIX_SUB_HEADER): + break + + modified_lines.append(line) + + modified_lines.append(formatted_release) + modified_lines.extend(original_lines[offset:]) + self.write_lines(*modified_lines) + + @property + def latest(self) -> str: + """ + The latest release in the changelog. + """ + release = '' + lines = self.lines + offset = 0 + + for offset, line in enumerate(lines): + if line.startswith(Release.PREFIX_SUB_HEADER): + break + + for line in lines[offset + 2:]: + if line.startswith(Release.PREFIX_SUB_HEADER): + break + + if line.startswith('```{'): + release += '***' + elif line.startswith('```'): + release = release.rstrip('\n') + release += '***\n' + else: + release += line + + return release.rstrip('\n') + + +def __validate_changeset(changeset_file: str): + try: + Log.info('Validating changeset file "%s"...', changeset_file) + ChangesetFile(changeset_file, accept_missing=True).validate() + except ValueError as error: + Log.error('Changeset file "%s" is malformed!\n\n%s', changeset_file, str(error)) + + +def __merge_changesets(*changeset_files) -> List[Changeset]: + changesets_by_header = {} + + for changeset_file in changeset_files: + for changeset in ChangesetFile(changeset_file).changesets: + merged_changeset = changesets_by_header.setdefault(changeset.header.lower(), changeset) + + if merged_changeset != changeset: + merged_changeset.changes.extend(changeset.changes) + + return list(changesets_by_header.values()) + + +def __update_changelog(release_type: ReleaseType, *changeset_files): + merged_changesets = __merge_changesets(*changeset_files) + new_release = Release(version=get_current_version(), + release_date=date.today(), + release_type=release_type, + changesets=merged_changesets) + ChangelogFile().add_release(new_release) + + for changeset_file in changeset_files: + ChangesetFile(changeset_file).clear() + + +def validate_changelog_bugfix(): + """ + Validates the changelog file that lists bugfixes. + """ + __validate_changeset(CHANGESET_FILE_BUGFIX) + + +def validate_changelog_feature(): + """ + Validates the changelog file that lists new features. + """ + __validate_changeset(CHANGESET_FILE_FEATURE) + + +def validate_changelog_main(): + """ + Validates the changelog file that lists major updates. + """ + __validate_changeset(CHANGESET_FILE_MAIN) + + +def update_changelog_main(): + """ + Updates the projects changelog when releasing bugfixes. + """ + __update_changelog(ReleaseType.MAJOR, CHANGESET_FILE_MAIN, CHANGESET_FILE_FEATURE, CHANGESET_FILE_BUGFIX) + + +def update_changelog_feature(): + """ + Updates the project's changelog when releasing new features. + """ + __update_changelog(ReleaseType.MINOR, CHANGESET_FILE_FEATURE, CHANGESET_FILE_BUGFIX) + + +def update_changelog_bugfix(): + """ + Updates the project's changelog when releasing major updates. + """ + __update_changelog(ReleaseType.PATCH, CHANGESET_FILE_BUGFIX) + + +def print_latest_changelog(): + """ + Prints the changelog of the latest release. + """ + Log.info('%s', ChangelogFile().latest) diff --git a/build_system/targets/versioning/versioning.py b/build_system/targets/versioning/versioning.py new file mode 100644 index 0000000000..58c364d861 --- /dev/null +++ b/build_system/targets/versioning/versioning.py @@ -0,0 +1,228 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides actions for updating the project's version. +""" +from dataclasses import dataclass, replace +from functools import cached_property +from typing import Optional + +from util.io import TextFile +from util.log import Log + + +@dataclass +class Version: + """ + Represents a semantic version. + + Attributes: + major: The major version number + minor: The minor version number + patch: The patch version number + dev: The development version number + """ + major: int + minor: int + patch: int + dev: Optional[int] = None + + @staticmethod + def parse_version_number(version_number: str) -> int: + """ + Parses and returns a single version number from a given string. + + :param version_number: The string to be parsed + :return: The version number that has been parsed + """ + try: + number = int(version_number) + + if number < 0: + raise ValueError() + + return number + except ValueError as error: + raise ValueError('Version numbers must be non-negative integers, but got: ' + version_number) from error + + @staticmethod + def parse(version: str) -> 'Version': + """ + Parses and returns a version from a given string. + + :param version: The string to be parsed + :return: The version that has been parsed + """ + parts = version.split('.') + + if len(parts) != 3: + raise ValueError('Version must be given in format MAJOR.MINOR.PATCH, but got: ' + version) + + major = Version.parse_version_number(parts[0]) + minor = Version.parse_version_number(parts[1]) + patch = Version.parse_version_number(parts[2]) + return Version(major=major, minor=minor, patch=patch) + + def __str__(self) -> str: + version = str(self.major) + '.' + str(self.minor) + '.' + str(self.patch) + + if self.dev: + version += '.dev' + str(self.dev) + + return version + + +class VersionFile(TextFile): + """ + The file that stores the project's version. + """ + + def __init__(self): + super().__init__('.version') + + @cached_property + def version(self) -> Version: + """ + The version that is stored in the file. + """ + lines = self.lines + + if len(lines) != 1: + raise ValueError('File "' + self.file + '" must contain exactly one line') + + return Version.parse(lines[0]) + + def update(self, version: Version): + """ + Updates the version that is stored in the file. + + :param version: The version to be stored + """ + self.write_lines(str(version)) + Log.info('Updated version to "%s"', str(version)) + + def write_lines(self, *lines: str): + super().write_lines(lines) + + try: + del self.version + except AttributeError: + pass + + +class DevelopmentVersionFile(TextFile): + """ + The file that stores the project's development version. + """ + + def __init__(self): + super().__init__('.version-dev') + + @cached_property + def development_version(self) -> int: + """ + The development version that is stored in the file. + """ + lines = self.lines + + if len(lines) != 1: + raise ValueError('File "' + self.file + '" must contain exactly one line') + + return Version.parse_version_number(lines[0]) + + def update(self, development_version: int): + """ + Updates the development version that is stored in the file. + + :param development_version: The development version to be stored + """ + self.write_lines(str(development_version)) + Log.info('Updated development version to "%s"', str(development_version)) + + def write_lines(self, *lines: str): + super().write_lines(lines) + + try: + del self.development_version + except AttributeError: + pass + + +def __get_version_file() -> VersionFile: + version_file = VersionFile() + Log.info('Current version is "%s"', str(version_file.version)) + return version_file + + +def __get_development_version_file() -> DevelopmentVersionFile: + version_file = DevelopmentVersionFile() + Log.info('Current development version is "%s"', str(version_file.development_version)) + return version_file + + +def get_current_version() -> Version: + """ + Returns the project's current version. + + :return: The project's current version + """ + return VersionFile().version + + +def print_current_version(): + """ + Prints the project's current version. + """ + return Log.info('%s', str(get_current_version())) + + +def increment_development_version(): + """ + Increments the development version. + """ + version_file = __get_development_version_file() + version_file.update(version_file.development_version + 1) + + +def reset_development_version(): + """ + Resets the development version. + """ + version_file = __get_development_version_file() + version_file.update(0) + + +def apply_development_version(): + """ + Appends the development version to the current semantic version. + """ + version_file = __get_version_file() + development_version = __get_development_version_file().development_version + version_file.update(replace(version_file.version, dev=development_version)) + + +def increment_patch_version(): + """ + Increments the patch version. + """ + version_file = __get_version_file() + version = version_file.version + version_file.update(replace(version, patch=version.patch + 1)) + + +def increment_minor_version(): + """ + Increments the minor version. + """ + version_file = __get_version_file() + version = version_file.version + version_file.update(replace(version, minor=version.minor + 1, patch=0)) + + +def increment_major_version(): + """ + Increments the major version. + """ + version_file = __get_version_file() + version = version_file.version + version_file.update(replace(version, major=version.major + 1, minor=0, patch=0)) diff --git a/build_system/util/__init__.py b/build_system/util/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/build_system/util/cmd.py b/build_system/util/cmd.py new file mode 100644 index 0000000000..9f73c8d4a4 --- /dev/null +++ b/build_system/util/cmd.py @@ -0,0 +1,224 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides utility functions for running command line programs during the build process. +""" +import subprocess +import sys + +from os import path +from subprocess import CompletedProcess + +from util.format import format_iterable +from util.log import Log + + +class Command: + """ + Allows to run command line programs. + """ + + class PrintOptions: + """ + Allows to customize how command line programs are presented in log statements. + """ + + def __init__(self): + self.print_arguments = False + + def format(self, command: 'Command') -> str: + """ + Creates and returns a textual representation of a given command line program. + + :param command: The command line program + :return: The textual representation that has been created + """ + result = command.command + + if self.print_arguments: + result += ' ' + format_iterable(command.arguments, separator=' ') + + return result + + class RunOptions: + """ + Allows to customize options for running command line programs. + """ + + @staticmethod + def __in_virtual_environment() -> bool: + return sys.prefix != sys.base_prefix + + def __get_executable(self, command: 'Command') -> str: + if self.__in_virtual_environment(): + # On Windows, we use the relative path to the command's executable within the virtual environment, if + # such an executable exists. This circumvents situations where the PATH environment variable has not + # been updated after activating the virtual environment. This can prevent the executables from being + # found or can lead to the wrong executable, from outside the virtual environment, being executed. + executable = path.join(sys.prefix, 'Scripts', command.command + '.exe') + + if path.isfile(executable): + return executable + + return command.command + + def __init__(self): + self.print_command = True + self.exit_on_error = True + self.environment = None + + def run(self, command: 'Command', capture_output: bool) -> CompletedProcess: + """ + Runs a given command line program. + + :param command: The command line program to be run + :param capture_output: True, if the output of the program should be captured, False otherwise + :return: The output of the program + """ + if self.print_command: + Log.info('Running external command "%s"...', command.print_options.format(command)) + + output = subprocess.run([self.__get_executable(command)] + command.arguments, + check=False, + text=capture_output, + capture_output=capture_output, + env=self.environment) + exit_code = output.returncode + + if exit_code != 0: + message = ('External command "' + str(command) + '" terminated with non-zero exit code ' + + str(exit_code)) + + if self.exit_on_error: + if capture_output: + Log.info('%s', str(output.stderr).strip()) + + Log.error(message, exit_code=exit_code) + else: + raise RuntimeError(message) + + return output + + def __init__(self, + command: str, + *arguments: str, + print_options: PrintOptions = PrintOptions(), + run_options: RunOptions = RunOptions()): + """ + :param command: The name of the command line program + :param arguments: Optional arguments to be passed to the command line program + :param run_options: The options that should eb used for running the command line program + :param print_options: The options that should be used for creating textual representations of the command line + program + """ + self.command = command + self.arguments = list(arguments) + self.print_options = print_options + self.run_options = run_options + + def add_arguments(self, *arguments: str) -> 'Command': + """ + Adds one or several arguments to be passed to the command line program. + + :param arguments: The arguments to be added + :return: The `Command` itself + """ + self.arguments.extend(arguments) + return self + + def add_conditional_arguments(self, condition: bool, *arguments: str) -> 'Command': + """ + Adds one or several arguments to be passed to the command line program, if a certain condition is True. + + :param condition: The condition + :param arguments: The arguments to be added + :return: The `Command` itself + """ + if condition: + self.arguments.extend(arguments) + return self + + def print_arguments(self, print_arguments: bool) -> 'Command': + """ + Sets whether the arguments of the command line program should be included in log statements or not. + + :param print_arguments: True, if the arguments should be included, False otherwise + :return: The `Command` itself + """ + self.print_options.print_arguments = print_arguments + return self + + def print_command(self, print_command: bool) -> 'Command': + """ + Sets whether the command line program should be printed on the console when being run or not. + + :param print_command: True, if the command line program should be printed, False otherwise + :return: The `Command` itself + """ + self.run_options.print_command = print_command + return self + + def exit_on_error(self, exit_on_error: bool) -> 'Command': + """ + Sets whether the build system should be terminated if the program exits with a non-zero exit code or not. + + :param exit_on_error: True, if the build system should be terminated, False, if a `RuntimeError` should be + raised instead + :return: The `Command` itself + """ + self.run_options.exit_on_error = exit_on_error + return self + + def use_environment(self, environment) -> 'Command': + """ + Sets the environment to be used for running the command line program. + + :param environment: The environment to be set or None, if the default environment should be used + :return: The `Command` itself + """ + self.run_options.environment = environment + return self + + def _should_be_skipped(self) -> bool: + """ + May be overridden by subclasses in order to determine whether the command should be skipped or not. + """ + return False + + def _before(self): + """ + May be overridden by subclasses in order to perform some operations before the command is run. + """ + + def run(self): + """ + Runs the command line program. + """ + if not self._should_be_skipped(): + self._before() + self.run_options.run(self, capture_output=False) + self._after() + + def capture_output(self) -> str: + """ + Runs the command line program and returns its output. + + :return: The output of the program + """ + if not self._should_be_skipped(): + self._before() + output = self.run_options.run(self, capture_output=True) + self._after() + return output.stdout + + return '' + + def _after(self): + """ + May be overridden by subclasses in order to perform some operations after the command has been run. + """ + + def __str__(self) -> str: + print_options = Command.PrintOptions() + print_options.print_arguments = True + return print_options.format(self) diff --git a/scons/environment.py b/build_system/util/env.py similarity index 79% rename from scons/environment.py rename to build_system/util/env.py index 9d48141791..614a8a21a4 100644 --- a/scons/environment.py +++ b/build_system/util/env.py @@ -3,10 +3,12 @@ Provides utility functions for accessing environment variables. """ -from typing import List, Optional +from typing import Dict, List, Optional +from util.log import Log -def get_env(env, name: str, default: Optional[str] = None) -> Optional[str]: + +def get_env(env: Dict, name: str, default: Optional[str] = None) -> Optional[str]: """ Returns the value of the environment variable with a given name. @@ -18,7 +20,7 @@ def get_env(env, name: str, default: Optional[str] = None) -> Optional[str]: return env.get(name, default) -def get_env_bool(env, name: str, default: bool = False) -> bool: +def get_env_bool(env: Dict, name: str, default: bool = False) -> bool: """ Returns the value of the environment variable with a given name as a boolean value. @@ -31,7 +33,7 @@ def get_env_bool(env, name: str, default: bool = False) -> bool: return bool(value) if value else default -def get_env_array(env, name: str, default: Optional[List[str]] = None) -> List[str]: +def get_env_array(env: Dict, name: str, default: Optional[List[str]] = None) -> List[str]: """ Returns the value of the environment variable with a given name as a comma-separated list. @@ -48,7 +50,7 @@ def get_env_array(env, name: str, default: Optional[List[str]] = None) -> List[s return default if default else [] -def set_env(env, name: str, value: str): +def set_env(env: Dict, name: str, value: str): """ Sets the value of the environment variable with a given name. @@ -57,4 +59,4 @@ def set_env(env, name: str, value: str): :param value: The value to be set """ env[name] = value - print('Set environment variable \'' + name + '\' to value \'' + value + '\'') + Log.info('Set environment variable "%s" to value "%s"', name, value) diff --git a/build_system/util/files.py b/build_system/util/files.py new file mode 100644 index 0000000000..662c752f21 --- /dev/null +++ b/build_system/util/files.py @@ -0,0 +1,546 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides classes for listing files and directories. +""" +from functools import partial, reduce +from glob import glob +from os import path +from typing import Callable, List, Optional, Set + + +class DirectorySearch: + """ + Allows to search for subdirectories. + """ + + Filter = Callable[[str, str], bool] + + def __init__(self): + self.recursive = False + self.excludes = [] + self.filters = [] + + def set_recursive(self, recursive: bool) -> 'DirectorySearch': + """ + Sets whether the search should be recursive or not. + + :param recursive: True, if the search should be recursive, False otherwise + :return: The `DirectorySearch` itself + """ + self.recursive = recursive + return self + + def add_filters(self, *filter_functions: Filter) -> 'DirectorySearch': + """ + Adds one or several filters that match subdirectories to be included. + + :param filter_functions: The filters to be added + :return: The `DirectorySearch` itself + """ + self.filters.extend(filter_functions) + return self + + def filter_by_name(self, *names: str) -> 'DirectorySearch': + """ + Adds one or several filters that match subdirectories to be included based on their name. + + :param names: The names of the subdirectories that should be included + :return: The `DirectorySearch` itself + """ + + def filter_directory(filtered_names: Set[str], _: str, directory_name: str): + return directory_name in filtered_names + + return self.add_filters(*[partial(filter_directory, name) for name in names]) + + def filter_by_substrings(self, + starts_with: Optional[str] = None, + not_starts_with: Optional[str] = None, + ends_with: Optional[str] = None, + not_ends_with: Optional[str] = None, + contains: Optional[str] = None, + not_contains: Optional[str] = None) -> 'DirectorySearch': + """ + Adds a filter that matches subdirectories based on whether their name contains specific substrings. + + :param starts_with: A substring, names must start with or None, if no restrictions should be imposed + :param not_starts_with: A substring, names must not start with or None, if no restrictions should be imposed + :param ends_with: A substring, names must end with or None, if no restrictions should be imposed + :param not_ends_with: A substring, names must not end with or None, if no restrictions should be imposed + :param contains: A substring, names must contain or None, if no restrictions should be imposed + :param not_contains: A substring, names must not contain or None, if no restrictions should be imposed + :return: The `DirectorySearch` itself + """ + + def filter_directory(start: Optional[str], not_start: Optional[str], end: Optional[str], not_end: Optional[str], + substring: Optional[str], not_substring: Optional[str], _: str, directory_name: str): + return (not start or directory_name.startswith(start)) \ + and (not not_start or not directory_name.startswith(not_start)) \ + and (not end or directory_name.endswith(end)) \ + and (not not_end or directory_name.endswith(not_end)) \ + and (not substring or directory_name.find(substring) >= 0) \ + and (not not_substring or directory_name.find(not_substring) < 0) + + return self.add_filters( + partial(filter_directory, starts_with, not_starts_with, ends_with, not_ends_with, contains, not_contains)) + + def exclude(self, *excludes: Filter) -> 'DirectorySearch': + """ + Adds one or several filters that should be used for excluding subdirectories. + + :param excludes: The filters to be set + :return: The `DirectorySearch` itself + """ + self.excludes.extend(excludes) + return self + + def exclude_by_name(self, *names: str) -> 'DirectorySearch': + """ + Adds one or several filters that should be used for excluding subdirectories by their names. + + :param names: The names of the subdirectories to be excluded + :return: The `DirectorySearch` itself + """ + + def filter_directory(excluded_name: str, _: str, directory_name: str): + return directory_name == excluded_name + + return self.exclude(*[partial(filter_directory, name) for name in names]) + + def exclude_by_substrings(self, + starts_with: Optional[str] = None, + not_starts_with: Optional[str] = None, + ends_with: Optional[str] = None, + not_ends_with: Optional[str] = None, + contains: Optional[str] = None, + not_contains: Optional[str] = None) -> 'DirectorySearch': + """ + Adds a filter that should be used for excluding subdirectories based on whether their name contains specific + substrings. + + :param starts_with: A substring, names must start with or None, if no restrictions should be imposed + :param not_starts_with: A substring, names must not start with or None, if no restrictions should be imposed + :param ends_with: A substring, names must end with or None, if no restrictions should be imposed + :param not_ends_with: A substring, names must not end with or None, if no restrictions should be imposed + :param contains: A substring, names must contain or None, if no restrictions should be imposed + :param not_contains: A substring, names must not contain or None, if no restrictions should be imposed + :return: The `DirectorySearch` itself + """ + + def filter_directory(start: Optional[str], not_start: Optional[str], end: Optional[str], not_end: Optional[str], + substring: Optional[str], not_substring: Optional[str], _: str, directory_name: str): + return (not start or directory_name.startswith(start)) \ + and (not not_start or not directory_name.startswith(not_start)) \ + and (not end or directory_name.endswith(end)) \ + and (not not_end or directory_name.endswith(not_end)) \ + and (not substring or directory_name.find(substring) >= 0) \ + and (not not_substring or directory_name.find(not_substring) < 0) + + return self.exclude( + partial(filter_directory, starts_with, not_starts_with, ends_with, not_ends_with, contains, not_contains)) + + def list(self, *directories: str) -> List[str]: + """ + Lists all subdirectories that can be found in given directories. + + :param directories: The directories to search for subdirectories + :return: A list that contains all subdirectories that have been found + """ + result = [] + + def filter_file(file: str) -> bool: + if path.isdir(file): + parent = path.dirname(file) + file_name = path.basename(file) + + if not reduce(lambda aggr, exclude: aggr or exclude(parent, file_name), self.excludes, False): + return True + + return False + + def filter_subdirectory(subdirectory: str, filters: List[DirectorySearch.Filter]) -> bool: + parent = path.dirname(subdirectory) + directory_name = path.basename(subdirectory) + + if reduce(lambda aggr, dir_filter: aggr or dir_filter(parent, directory_name), filters, False): + return True + + return False + + for directory in directories: + subdirectories = [file for file in glob(path.join(directory, '*')) if filter_file(file)] + + if self.recursive: + result.extend(self.list(*subdirectories)) + + if self.filters: + subdirectories = [ + subdirectory for subdirectory in subdirectories if filter_subdirectory(subdirectory, self.filters) + ] + + result.extend(subdirectories) + + return result + + +class FileSearch: + """ + Allows to search for files. + """ + + Filter = Callable[[str, str], bool] + + def __init__(self): + self.hidden = False + self.symlinks = True + self.excludes = [] + self.filters = [] + self.directory_search = DirectorySearch() + + def set_recursive(self, recursive: bool) -> 'FileSearch': + """ + Sets whether the search should be recursive or not. + + :param recursive: True, if the search should be recursive, False otherwise + :return: The `FileSearch` itself + """ + self.directory_search.set_recursive(recursive) + return self + + def add_subdirectory_filters(self, *filter_functions: DirectorySearch.Filter) -> 'FileSearch': + """ + Adds one or several filters that match subdirectories to be included. + + :param filter_functions: The filters to be added + :return: The `FileSearch` itself + """ + self.directory_search.add_filters(*filter_functions) + return self + + def filter_subdirectories_by_name(self, *names: str) -> 'FileSearch': + """ + Adds one or several filters that match subdirectories to be included based on their name. + + :param names: The names of the subdirectories that should be included + :return: The `FileSearch` itself + """ + self.directory_search.filter_by_name(*names) + return self + + def exclude_subdirectories(self, *excludes: DirectorySearch.Filter) -> 'FileSearch': + """ + Adds one or several filters that should be used for excluding subdirectories. Does only have an effect if the + search is recursive. + + :param excludes: The filters to be set + :return: The `FileSearch` itself + """ + self.directory_search.exclude(*excludes) + return self + + def exclude_subdirectories_by_name(self, *names: str) -> 'FileSearch': + """ + Adds one or several filters that should be used for excluding subdirectories by their names. Does only have an + effect if the search is recursive. + + :param names: The names of the subdirectories to be excluded + :return: The `FileSearch` itself + """ + self.directory_search.exclude_by_name(*names) + return self + + def exclude_subdirectories_by_substrings(self, + starts_with: Optional[str] = None, + not_starts_with: Optional[str] = None, + ends_with: Optional[str] = None, + not_ends_with: Optional[str] = None, + contains: Optional[str] = None, + not_contains: Optional[str] = None) -> 'FileSearch': + """ + Adds a filter that should be used for excluding subdirectories based on whether their name contains specific + substrings. + + :param starts_with: A substring, names must start with or None, if no restrictions should be imposed + :param not_starts_with: A substring, names must not start with or None, if no restrictions should be imposed + :param ends_with: A substring, names must end with or None, if no restrictions should be imposed + :param not_ends_with: A substring, names must not end with or None, if no restrictions should be imposed + :param contains: A substring, names must contain or None, if no restrictions should be imposed + :param not_contains: A substring, names must not contain or None, if no restrictions should be imposed + :return: The `FileSearch` itself + """ + self.directory_search.exclude_by_substrings(starts_with=starts_with, + not_starts_with=not_starts_with, + ends_with=ends_with, + not_ends_with=not_ends_with, + contains=contains, + not_contains=not_contains) + return self + + def set_hidden(self, hidden: bool) -> 'FileSearch': + """ + Sets whether hidden files should be included or not. + + :param hidden: True, if hidden files should be included, False otherwise + :return: The `FileSearch` itself + """ + self.hidden = hidden + return self + + def set_symlinks(self, symlinks: bool) -> 'FileSearch': + """ + Sets whether symbolic links should be included or not. + + :param symlinks: True, if symbolic links should be included, False otherwise + :return: The `FileSearch` itself + """ + self.symlinks = symlinks + return self + + def add_filters(self, *filter_functions: Filter) -> 'FileSearch': + """ + Adds one or several filters that match files to be included. + + :param filter_functions: The filters to be added + :return: The `FileSearch` itself + """ + self.filters.extend(filter_functions) + return self + + def filter_by_name(self, *names: str) -> 'FileSearch': + """ + Adds one or several filters that match files to be included based on their name. + + :param names: The names of the files that should be included (including their suffix) + :return: The `FileSearch` itself + """ + + def filter_file(filtered_name: str, _: str, file_name: str): + return file_name == filtered_name + + return self.add_filters(*[partial(filter_file, name) for name in names]) + + def filter_by_substrings(self, + starts_with: Optional[str] = None, + not_starts_with: Optional[str] = None, + ends_with: Optional[str] = None, + not_ends_with: Optional[str] = None, + contains: Optional[str] = None, + not_contains: Optional[str] = None) -> 'FileSearch': + """ + Adds a filter that matches files based on whether their name contains specific substrings. + + :param starts_with: A substring, names must start with or None, if no restrictions should be imposed + :param not_starts_with: A substring, names must not start with or None, if no restrictions should be imposed + :param ends_with: A substring, names must end with or None, if no restrictions should be imposed + :param not_ends_with: A substring, names must not end with or None, if no restrictions should be imposed + :param contains: A substring, names must contain or None, if no restrictions should be imposed + :param not_contains: A substring, names must not contain or None, if no restrictions should be imposed + :return: The `FileSearch` itself + """ + + def filter_file(start: Optional[str], not_start: Optional[str], end: Optional[str], not_end: Optional[str], + substring: Optional[str], not_substring: Optional[str], _: str, file_name: str): + return (not start or file_name.startswith(start)) \ + and (not not_start or not file_name.startswith(not_start)) \ + and (not end or file_name.endswith(end)) \ + and (not not_end or file_name.endswith(not_end)) \ + and (not substring or file_name.find(substring) >= 0) \ + and (not not_substring or file_name.find(not_substring) < 0) + + return self.add_filters( + partial(filter_file, starts_with, not_starts_with, ends_with, not_ends_with, contains, not_contains)) + + def filter_by_suffix(self, *suffixes: str) -> 'FileSearch': + """ + Adds one or several filters that match files to be included based on their suffix. + + :param suffixes: The suffixes of the files that should be included (without starting dot) + :return: The `FileSearch` itself + """ + + def filter_file(filtered_suffixes: List[str], _: str, file_name: str): + return reduce(lambda aggr, suffix: aggr or file_name.endswith(suffix), filtered_suffixes, False) + + return self.add_filters(partial(filter_file, list(suffixes))) + + def filter_by_file_type(self, *file_types: 'FileType') -> 'FileSearch': + """ + Adds one or several filters that match files to be included based on a `FileType`. + + :param file_types: The `FileType` of the files that should be included + :return: The `FileSearch` itself + """ + for file_type in file_types: + file_type.file_search_decorator(self) + + return self + + def exclude(self, *excludes: Filter) -> 'FileSearch': + """ + Adds one or several filters that should be used for excluding files. + + :param excludes: The filters to be set + :return: The `FileSearch` itself + """ + self.excludes.extend(excludes) + return self + + def exclude_by_name(self, *names: str) -> 'FileSearch': + """ + Adds one or several filters that should be used for excluding files by their names. + + :param names: The names of the files to be excluded + :return: The `FileSearch` itself + """ + + def filter_file(excluded_name: str, _: str, file_name: str): + return file_name == excluded_name + + return self.exclude(*[partial(filter_file, name) for name in names]) + + def list(self, *directories: str) -> List[str]: + """ + Lists all files that can be found in given directories. + + :param directories: The directories to search for files + :return: A list that contains all files that have been found + """ + result = [] + subdirectories = self.directory_search.list(*directories) if self.directory_search.recursive else [] + + def filter_file(file: str) -> bool: + if path.isfile(file) and (self.symlinks or not path.islink(file)): + parent = path.dirname(file) + file_name = path.basename(file) + + if not self.filters: + match = True + else: + match = reduce(lambda aggr, file_filter: aggr or file_filter(parent, file_name), self.filters, + False) + + exclude = reduce(lambda aggr, file_filter: aggr or file_filter(parent, file_name), self.excludes, False) + return match and not exclude + + return False + + for directory in list(directories) + subdirectories: + files = [file for file in glob(path.join(directory, '*')) if filter_file(file)] + + if self.hidden: + files.extend([file for file in glob(path.join(directory, '.*')) if filter_file(file)]) + + result.extend(files) + + return result + + +class FileType: + """ + Represents different types of files. + """ + + def __init__(self, + name: str, + suffixes: Set[str], + file_search_decorator: Optional[Callable[[FileSearch], None]] = None): + """ + :param name: The name of the file type + :param suffixes: The suffixes that correspond to this file type (without leading dot) + :param file_search_decorator: A function that adds a filter for this file type to a `FileSearch` or None, if a + filter should automatically be created + """ + self.name = name + self.suffixes = suffixes + self.file_search_decorator = file_search_decorator + + if not self.file_search_decorator: + self.file_search_decorator = lambda file_search: file_search.filter_by_suffix(*suffixes) + + @staticmethod + def python() -> 'FileType': + """ + Creates and returns a `FileType` that corresponds to Python source files. + + :return: The `FileType` that has been created + """ + return FileType(name='Python', suffixes={'py'}) + + @staticmethod + def cpp() -> 'FileType': + """ + Creates and returns a `FileType` that corresponds to C++ source files. + + :return: The `FileType` that has been created + """ + return FileType(name='C++', suffixes={'cpp', 'hpp'}) + + @staticmethod + def cython() -> 'FileType': + """ + Creates and returns a `FileType` that corresponds to Cython source files. + + :return: The `FileType` that has been created + """ + return FileType(name='Cython', suffixes={'pyx', 'pxd'}) + + @staticmethod + def markdown() -> 'FileType': + """ + Creates and returns a `FileType` that corresponds to Markdown files. + + :return: The `FileType` that has been created + """ + return FileType(name='Markdown', suffixes={'md', 'md.template'}) + + @staticmethod + def yaml() -> 'FileType': + """ + Creates and returns a `FileType` that corresponds to YAML files. + + :return: The `FileType` that has been created + """ + return FileType(name='YAML', suffixes={'yaml', 'yml'}) + + @staticmethod + def extension_module() -> 'FileType': + """ + Creates and returns a `FileType` that corresponds to shared libraries. + + :return: The `FileType` that has been created + """ + return FileType( + name='Extension module', + suffixes={'so', 'pyd', 'lib'}, + file_search_decorator=lambda file_search: file_search \ + .filter_by_substrings(not_starts_with='lib', ends_with='.so') \ + .filter_by_substrings(ends_with='.pyd') \ + .filter_by_substrings(not_starts_with='mlrl', ends_with='.lib'), + ) + + @staticmethod + def shared_library() -> 'FileType': + """ + Creates and returns a `FileType` that corresponds to shared libraries. + + :return: The `FileType` that has been created + """ + return FileType( + name='Shared library', + suffixes={'so', 'dylib', 'lib', 'dll'}, + file_search_decorator=lambda file_search: file_search \ + .filter_by_substrings(starts_with='lib', contains='.so') \ + .filter_by_substrings(ends_with='.dylib') \ + .filter_by_substrings(starts_with='mlrl', ends_with='.lib') \ + .filter_by_substrings(ends_with='.dll'), + ) + + def __str__(self) -> str: + return self.name + + def __eq__(self, other: 'FileType') -> bool: + return self.name == other.name + + def __hash__(self) -> int: + return hash(self.name) diff --git a/build_system/util/format.py b/build_system/util/format.py new file mode 100644 index 0000000000..73816e5ccd --- /dev/null +++ b/build_system/util/format.py @@ -0,0 +1,25 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides utility functions for creating textual representations. +""" +from functools import reduce +from typing import Any, Callable, Iterable + + +def format_iterable(objects: Iterable[Any], + separator: str = ', ', + delimiter: str = '', + mapping: Callable[[Any], Any] = lambda x: x) -> str: + """ + Creates and returns a textual representation of objects in an iterable. + + :param objects: The iterable of objects to be formatted + :param separator: The string that should be used as a separator + :param delimiter: The string that should be added at the beginning and end of each object + :param mapping: An optional function that maps each object in the iterable to another one + :return: The textual representation that has been created + """ + return reduce( + lambda aggr, obj: aggr + (separator + if len(aggr) > 0 else '') + delimiter + str(mapping(obj)) + delimiter, objects, '') diff --git a/build_system/util/io.py b/build_system/util/io.py new file mode 100644 index 0000000000..78c9b2cd72 --- /dev/null +++ b/build_system/util/io.py @@ -0,0 +1,115 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides utility functions for reading and writing files. +""" +from functools import cached_property +from os import makedirs, path, remove +from shutil import rmtree +from typing import List + +from util.log import Log + +ENCODING_UTF8 = 'utf-8' + + +def read_file(file: str): + """ + Opens a file to read from. + + :param file: The file to be opened + """ + return open(file, mode='r', encoding=ENCODING_UTF8) + + +def write_file(file: str): + """ + Opens a file to be written to. + + :param file: The file to be opened + """ + return open(file, mode='w', encoding=ENCODING_UTF8) + + +def delete_files(*files: str, accept_missing: bool = True): + """ + Deletes one or several files or directories. + + :param files: The files or directories to be deleted + :param accept_missing: True, if no error should be raised if the file is missing, False otherwise + """ + for file in files: + if path.isdir(file): + Log.verbose('Deleting directory "%s"...', file) + rmtree(file) + else: + if not accept_missing or path.isfile(file): + Log.verbose('Deleting file "%s"...', file) + remove(file) + + +def create_directories(*directories: str): + """ + Creates one or several directories, if they do not already exist. + + :param directories: The directories to be created + """ + for directory in directories: + if not path.isdir(directory): + Log.verbose('Creating directory "%s"...', directory) + makedirs(directory) + + +class TextFile: + """ + Allows to read and write the content of a text file. + """ + + def __init__(self, file: str, accept_missing: bool = False): + """ + :param file: The path to the text file + :param accept_missing: True, if no errors should be raised if the text file is missing, False otherwise + """ + self.file = file + self.accept_missing = accept_missing + + @cached_property + def lines(self) -> List[str]: + """ + The lines in the text file. + """ + if self.accept_missing and not path.isfile(self.file): + return [] + + with read_file(self.file) as file: + return file.readlines() + + def write_lines(self, *lines: str): + """ + Overwrites all lines in the text file. + + :param lines: The lines to be written + """ + with write_file(self.file) as file: + file.writelines(lines) + + try: + del self.lines + except AttributeError: + pass + + def clear(self): + """ + Clears the text file. + """ + Log.info('Clearing file "%s"...', self.file) + self.write_lines('') + + def delete(self): + """ + Deletes the text file. + """ + delete_files(self.file, accept_missing=self.accept_missing) + + def __str__(self) -> str: + return self.file diff --git a/build_system/util/log.py b/build_system/util/log.py new file mode 100644 index 0000000000..cf5b488c96 --- /dev/null +++ b/build_system/util/log.py @@ -0,0 +1,87 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides classes for writing log messages. +""" +import logging +import sys + +from enum import Enum +from typing import Optional + + +class Log: + """ + Allows to write log messages. + """ + + class Level(Enum): + """ + The log levels supported by the build system. + """ + NONE = logging.NOTSET + ERROR = logging.ERROR + WARNING = logging.WARNING + INFO = logging.INFO + VERBOSE = logging.DEBUG + + @staticmethod + def configure(log_level: Level = Level.INFO): + """ + Configures the logger to be used by the build system. + + :param log_level: The log level to be used + """ + root = logging.getLogger() + root.setLevel(log_level.value) + out_handler = logging.StreamHandler(sys.stdout) + out_handler.setLevel(log_level.value) + out_handler.setFormatter(logging.Formatter('%(message)s')) + root.addHandler(out_handler) + + @staticmethod + def error(message: str, *args, error: Optional[Exception] = None, exit_code: int = 1): + """ + Writes a log message at level `Log.Level.ERROR` and terminates the build system. + + :param message: The log message to be written + :param args: Optional arguments to be included in the log message + :param error: An optional error to be included in the log message + :param exit_code: The exit code to be returned when terminating the build system + """ + if error: + logging.error(message + ': %s', *args, error) + else: + logging.error(message, *args) + + sys.exit(exit_code) + + @staticmethod + def warning(message: str, *args): + """ + Writes a log message at level `Log.Level.WARNING`. + + :param message: The log message to be written + :param args: Optional arguments to be included in the log message + """ + logging.warning(message, *args) + + @staticmethod + def info(message: str, *args): + """ + Writes a log message at level `Log.Level.INFO`. + + :param message: The log message to be written + :param args: Optional arguments to be included in the log message + """ + logging.info(message, *args) + + @staticmethod + def verbose(message: str, *args): + """ + Writes a log message at level `Log.Level.VERBOSE`. + + :param message: The log message to be written + :param args: Optional arguments to be included in the log message + """ + logging.debug(message, *args) diff --git a/build_system/util/pip.py b/build_system/util/pip.py new file mode 100644 index 0000000000..72063c8895 --- /dev/null +++ b/build_system/util/pip.py @@ -0,0 +1,254 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides classes for installing Python packages via pip. +""" +from abc import ABC, abstractmethod +from dataclasses import dataclass +from functools import reduce +from typing import Dict, Optional, Set + +from core.build_unit import BuildUnit +from util.cmd import Command as Cmd +from util.io import TextFile +from util.log import Log + + +@dataclass +class Package: + """ + A Python package. + + Attributes: + name: The name of the package + """ + name: str + + @property + def normalized_name(self) -> str: + """ + The normalized name of the package in lower-case and with invalid characters being replaced. + """ + return self.name.replace('_', '-').lower() + + def __str__(self) -> str: + return self.normalized_name + + def __eq__(self, other: 'Package') -> bool: + return self.normalized_name == other.normalized_name + + def __hash__(self) -> int: + return hash(self.normalized_name) + + +@dataclass +class Requirement: + """ + A single requirement included in a requirements file, consisting of a Python package and an optional version. + + Attributes: + package: The package + version: The version of the package or None, if no version is specified + """ + package: Package + version: Optional[str] = None + + @staticmethod + def parse(requirement: str) -> 'Requirement': + """ + Parses and returns a single requirement included a requirements file. + + :param requirement: The requirement to be parsed + :return: The requirement that has been parsed + """ + parts = requirement.split() + package = Package(name=parts[0].strip()) + version = ' '.join(parts[1:]).strip() if len(parts) > 1 else None + return Requirement(package, version) + + def __str__(self) -> str: + return str(self.package) + (self.version if self.version else '') + + def __eq__(self, other: 'Requirement') -> bool: + return self.package == other.package + + def __hash__(self) -> int: + return hash(self.package) + + +class Requirements(ABC): + """ + An abstract base class for all classes that provide access to requirements. + """ + + @property + @abstractmethod + def requirements_by_package(self) -> Dict[Package, Requirement]: + """ + A dictionary that contains all requirements by their package. + """ + + @property + def requirements(self) -> Set[Requirement]: + """ + A set that contains all requirements in the requirements file + """ + return set(self.requirements_by_package.values()) + + def lookup_requirements(self, *packages: Package, accept_missing: bool = False) -> Set[Requirement]: + """ + Looks up the requirements for given packages in the requirements file. + + :param packages: The packages that should be looked up + :param accept_missing: False, if an error should be raised if a package is not listed in the requirements file, + True, if it should simply be ignored + :return: A set that contains the requirements for the given packages + """ + requirements = set() + + for package in packages: + requirement = self.requirements_by_package.get(package) + + if requirement: + requirements.add(requirement) + elif not accept_missing: + raise RuntimeError('Requirement for package "' + str(package) + '" not found') + + return requirements + + def lookup_requirement(self, package: Package, accept_missing: bool = False) -> Optional[Requirement]: + """ + Looks up the requirement for a given package in the requirements file. + + :param package: The package that should be looked up + :param accept_missing: False, if an error should be raised if the package is not listed in the requirements + file, True, if it should simply be ignored + :return: The requirement for the given package + """ + requirements = self.lookup_requirements(package, accept_missing=accept_missing) + return requirements.pop() if requirements else None + + +class RequirementsFile(TextFile, Requirements): + """ + Represents a specific requirements.txt file. + """ + + @property + def requirements_by_package(self) -> Dict[Package, Requirement]: + return { + requirement.package: requirement + for requirement in [Requirement.parse(line) for line in self.lines if line.strip('\n').strip()] + } + + +class RequirementsFiles(Requirements): + """ + Represents multiple requirements.txt files. + """ + + def __init__(self, *requirements_files: str): + self.requirements_files = [RequirementsFile(requirements_file) for requirements_file in requirements_files] + + @property + def requirements_by_package(self) -> Dict[Package, Requirement]: + return reduce(lambda aggr, requirements_file: aggr | requirements_file.requirements_by_package, + self.requirements_files, {}) + + +class Pip: + """ + Allows to install Python packages via pip. + """ + + class Command(Cmd, ABC): + """ + An abstract base class for all classes that allow to run pip on the command line. + """ + + def __init__(self, pip_command: str, *arguments: str): + """ + :param pip_command: The pip command to be run, e.g., "install" + :param arguments: Optional arguments to be passed to pip + """ + super().__init__('python', '-m', 'pip', pip_command, *arguments, '--disable-pip-version-check') + + class InstallCommand(Command): + """ + Allows to install requirements via the command `pip install`. + """ + + def __init__(self, requirement: Requirement, dry_run: bool = False): + """ + :param requirement: The requirement be installed + :param dry_run: True, if the --dry-run flag should be set, False otherwise + """ + super().__init__('install', str(requirement), '--upgrade', '--upgrade-strategy', 'eager', '--prefer-binary') + self.add_conditional_arguments(dry_run, '--dry-run') + + @staticmethod + def __would_install_requirement(requirement: Requirement, stdout: str) -> bool: + prefix = 'Would install' + + for line in stdout.split('\n'): + if line.strip().startswith(prefix): + package = Package(line[len(prefix):].strip()) + + if package.normalized_name.find(requirement.package.normalized_name) >= 0: + return True + + return False + + @staticmethod + def install_requirement(requirement: Requirement, dry_run: bool = False): + """ + Installs a requirement. + + :param requirement: The requirement to be installed + """ + try: + stdout = Pip.InstallCommand(requirement, dry_run=dry_run) \ + .print_command(False) \ + .exit_on_error(not dry_run) \ + .capture_output() + + if Pip.__would_install_requirement(requirement, stdout): + if dry_run: + Pip.InstallCommand(requirement) \ + .print_arguments(True) \ + .run() + else: + Log.info(stdout) + except RuntimeError: + Pip.install_requirement(requirement) + + def __init__(self, *requirements_files: str): + """ + :param requirements_files: The paths to the requirements files that specify the versions of the packages to be + installed + """ + self.requirements = RequirementsFiles(*requirements_files) + + @staticmethod + def for_build_unit(build_unit: BuildUnit = BuildUnit.for_file(__file__)): + """ + Creates and returns a new `Pip` instance for installing packages for a specific build unit. + + :param build_unit: The build unit for which packages should be installed + :return: The `Pip` instance that has been created + """ + return Pip(*build_unit.find_requirements_files()) + + def install_packages(self, *package_names: str, accept_missing: bool = False): + """ + Installs one or several dependencies in the requirements file. + + :param package_names: The names of the packages that should be installed + :param accept_missing: False, if an error should be raised if a package is not listed in the requirements file, + True, if it should simply be ignored + """ + packages = [Package(package_name) for package_name in package_names] + requirements = self.requirements.lookup_requirements(*packages, accept_missing=accept_missing) + + for requirement in requirements: + self.install_requirement(requirement, dry_run=True) diff --git a/build_system/util/run.py b/build_system/util/run.py new file mode 100644 index 0000000000..1dc5db4050 --- /dev/null +++ b/build_system/util/run.py @@ -0,0 +1,102 @@ +""" +Author: Michael Rapp (michael.rapp.ml@gmail.com) + +Provides utility functions for running external programs during the build process. +""" +from subprocess import CompletedProcess + +from core.build_unit import BuildUnit +from util.cmd import Command +from util.pip import Pip + + +class Program(Command): + """ + Allows to run an external program. + """ + + class RunOptions(Command.RunOptions): + """ + Allows to customize options for running an external program. + """ + + def __init__(self, build_unit: BuildUnit = BuildUnit.for_file(__file__)): + """ + :param build_unit: The build unit from which the program should be run + """ + super().__init__() + self.build_unit = build_unit + self.install_program = True + self.dependencies = set() + + def run(self, command: Command, capture_output: bool) -> CompletedProcess: + dependencies = [] + + if self.install_program: + dependencies.append(command.command) + + dependencies.extend(self.dependencies) + Pip.for_build_unit(self.build_unit).install_packages(*dependencies) + return super().run(command, capture_output) + + def __init__(self, program: str, *arguments: str): + """ + :param program: The name of the program to be run + :param arguments: Optional arguments to be passed to the program + """ + super().__init__(program, *arguments, run_options=Program.RunOptions()) + + def set_build_unit(self, build_unit: BuildUnit) -> 'Program': + """ + Sets the build unit from which the program should be run. + + :param build_unit: The build unit to be set + :return: The `Program` itself + """ + self.run_options.build_unit = build_unit + return self + + def install_program(self, install_program: bool) -> 'Program': + """ + Sets whether the program should be installed via pip before being run or not. + + :param install_program: True, if the program should be installed before being run, False otherwise + :return: The `Program` itself + """ + self.run_options.install_program = install_program + return self + + def add_dependencies(self, *dependencies: str) -> 'Program': + """ + Adds one or several Python packages that should be installed before running the program. + + :param dependencies: The names of the Python packages to be added + :return: The `Program` itself + """ + self.run_options.dependencies.update(dependencies) + return self + + +class PythonModule(Program): + """ + Allows to run a Python module. + """ + + def __init__(self, module: str, *arguments: str): + """ + :param module: The name of the module to be run + :param arguments: Optional arguments to be passed to the module + """ + super().__init__('python', '-m', module, *arguments) + self.module = module + self.install_program(True) + + def install_program(self, install_program: bool) -> Program: + super().install_program(False) + + if install_program: + super().add_dependencies(self.module) + else: + self.run_options.dependencies.remove(self.module) + + return self diff --git a/CPPLINT.cfg b/cpp/.cpplint.cfg similarity index 95% rename from CPPLINT.cfg rename to cpp/.cpplint.cfg index c68d29bd44..732d9bec77 100644 --- a/CPPLINT.cfg +++ b/cpp/.cpplint.cfg @@ -1,3 +1,5 @@ +set noparent + filter=-build/include_subdir filter=-build/include_order filter=-build/include_what_you_use diff --git a/cpp/subprojects/boosting/test/.cpplint.cfg b/cpp/subprojects/boosting/test/.cpplint.cfg new file mode 100644 index 0000000000..657274cdd3 --- /dev/null +++ b/cpp/subprojects/boosting/test/.cpplint.cfg @@ -0,0 +1 @@ +filter=-build/include diff --git a/cpp/subprojects/common/test/.cpplint.cfg b/cpp/subprojects/common/test/.cpplint.cfg new file mode 100644 index 0000000000..657274cdd3 --- /dev/null +++ b/cpp/subprojects/common/test/.cpplint.cfg @@ -0,0 +1 @@ +filter=-build/include diff --git a/cpp/subprojects/seco/test/.cpplint.cfg b/cpp/subprojects/seco/test/.cpplint.cfg new file mode 100644 index 0000000000..657274cdd3 --- /dev/null +++ b/cpp/subprojects/seco/test/.cpplint.cfg @@ -0,0 +1 @@ +filter=-build/include diff --git a/doc/conf.py b/doc/conf.py index 6148a575fb..a7652d7caf 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -1,12 +1,13 @@ +""" +Configuration file for the Sphinx documentation builder. + +This file only contains a selection of the most common options. For a full list see the documentation: +https://www.sphinx-doc.org/en/master/usage/configuration.html +""" +# pylint: disable=redefined-builtin,invalid-name from os import listdir from pathlib import Path -# Configuration file for the Sphinx documentation builder. -# -# This file only contains a selection of the most common options. For a full -# list see the documentation: -# https://www.sphinx-doc.org/en/master/usage/configuration.html - # -- Path setup -------------------------------------------------------------- # If extensions (or modules to document with autodoc) are in another directory, @@ -40,7 +41,7 @@ ] # Favicons -favicons = [{"href": 'favicon.svg'}] +favicons = [{'href': 'favicon.svg'}] # Intersphinx configuration intersphinx_mapping = { diff --git a/doc/developer_guide/api/cpp/index.md.template b/doc/developer_guide/api/cpp/index.md.template index 72136801f6..6be8b2d613 100644 --- a/doc/developer_guide/api/cpp/index.md.template +++ b/doc/developer_guide/api/cpp/index.md.template @@ -5,7 +5,8 @@ For those who are interested in modifying the project's source code or want to use it in their own C++ code, the API documentation of the following C++ libraries provides valuable insights into the classes, functions, etc., that are available (see {ref}`project-structure`): ```{toctree} -:maxdepth: 1 - +--- +maxdepth: 1 +--- %s ``` diff --git a/doc/developer_guide/api/python/index.md.template b/doc/developer_guide/api/python/index.md.template index f6781a709f..4c15c60971 100644 --- a/doc/developer_guide/api/python/index.md.template +++ b/doc/developer_guide/api/python/index.md.template @@ -5,7 +5,8 @@ For those who want to use the algorithms provided by this project in their own Python code, we provide a documentation of all classes, functions, etc., that can be used. Currently, the project includes the following Python packages (see {ref}`project-structure`): ```{toctree} -:maxdepth: 1 - +--- +maxdepth: 1 +--- %s ``` diff --git a/doc/developer_guide/coding_standards.md b/doc/developer_guide/coding_standards.md index 84203ebd06..64badb0255 100644 --- a/doc/developer_guide/coding_standards.md +++ b/doc/developer_guide/coding_standards.md @@ -28,23 +28,23 @@ To be able to detect problems with the project's source code early during develo ``` ```` -This will result in all tests being run and their results being reported. If the execution should be aborted as soon as a single test fails, the environment variable `SKIP_EARLY` can be used as shown below: +This will result in all tests being run and their results being reported. If the execution should be aborted as soon as a single test fails, the environment variable `FAIL_FAST` can be used as shown below: ````{tab} Linux ```text - SKIP_EARLY=true ./build tests + FAIL_FAST=true ./build tests ``` ```` ````{tab} macOS ```text - SKIP_EARLY=true ./build tests + FAIL_FAST=true ./build tests ``` ```` ````{tab} Windows ```text - $env:SKIP_EARLY = "true" + $env:FAIL_FAST = "true" build.bat tests ``` ```` @@ -65,10 +65,10 @@ The unit and integration tests are run automatically via {ref}`Continuous Integr We aim to enforce a consistent code style across the entire project. For this purpose, we employ the following tools: -- For formatting the C++ code, we use [clang-format](https://clang.llvm.org/docs/ClangFormat.html). The desired C++ code style is defined in the file `.clang-format` in the project's root directory. In addition, [cpplint](https://github.com/cpplint/cpplint) is used for static code analysis. It uses the configuration file `CPPLINT.cfg`. -- We use [YAPF](https://github.com/google/yapf) to enforce the Python code style defined in the file `.style.yapf`. In addition, [isort](https://github.com/PyCQA/isort) is used to keep the ordering of imports in Python and Cython source files consistent according to the configuration file `.isort.cfg` and [pylint](https://pylint.org/) is used to check for common issues in the Python code according to the configuration file `.pylintrc`. +- For formatting the C++ code, we use [clang-format](https://clang.llvm.org/docs/ClangFormat.html). The desired C++ code style is defined in the file `build_system/code_style/cpp/.clang-format`. In addition, [cpplint](https://github.com/cpplint/cpplint) is used for static code analysis. It is configured according to the `.cpplint.cfg` files located in the directory `cpp` and its subdirectories. +- We use [YAPF](https://github.com/google/yapf) to enforce the Python code style defined in the file `build_system/code_style/python/.style.yapf`. In addition, [isort](https://github.com/PyCQA/isort) is used to keep the ordering of imports in Python and Cython source files consistent according to the configuration file `build_system/code_style/python/.isort.cfg` and [pylint](https://pylint.org/) is used to check for common issues in the Python code according to the configuration file `build_system/code_style/python/.pylintrc`. - For applying a consistent style to Markdown files, including those used for writing the documentation, we use [mdformat](https://github.com/executablebooks/mdformat). -- We apply [yamlfix](https://github.com/lyz-code/yamlfix) to YAML files to enforce the code style defined in the file `.yamlfix.toml`. +- We apply [yamlfix](https://github.com/lyz-code/yamlfix) to YAML files to enforce the code style defined in the file `build_system/code_style/yaml/.yamlfix.toml`. If you have modified the project's source code, you can check whether it adheres to our coding standards via the following command: diff --git a/doc/developer_guide/compilation.md b/doc/developer_guide/compilation.md index d70bbdd27f..f0ab54e6d2 100644 --- a/doc/developer_guide/compilation.md +++ b/doc/developer_guide/compilation.md @@ -4,7 +4,7 @@ As discussed in the previous section {ref}`project-structure`, the algorithms that are provided by this project are implemented in [C++](https://en.wikipedia.org/wiki/C%2B%2B) to ensure maximum efficiency (requires C++ 20 or newer). In addition, a [Python]() wrapper that integrates with the [scikit-learn](https://scikit-learn.org) framework is provided (requires Python 3.10 or newer). To make the underlying C++ implementation accessible from within the Python code, [Cython](https://en.wikipedia.org/wiki/Cython) is used (requires Cython 3.0 or newer). -Unlike pure Python programs, the C++ and Cython source files must be compiled for a particular target platform. To ease the process of compiling the source code, the project comes with a [SCons](https://scons.org/) build that automates the necessary steps. In the following, we discuss the individual steps that are necessary for building the project from scratch. This is necessary if you intend to modify the library's source code. If you want to use the algorithm without any custom modifications, the {ref}`installation ` of pre-built packages is usually a better choice. +Unlike pure Python programs, the C++ and Cython source files must be compiled for a particular target platform. To ease the process of compiling the source code, the project comes with a build system that automates the necessary steps. In the following, we discuss the individual steps that are necessary for building the project from scratch. This is necessary if you intend to modify the library's source code. If you want to use the algorithm without any custom modifications, the {ref}`installation ` of pre-built packages is usually a better choice. ## Prerequisites diff --git a/scons/changelog.py b/scons/changelog.py deleted file mode 100644 index 8c7a5ab1b7..0000000000 --- a/scons/changelog.py +++ /dev/null @@ -1,370 +0,0 @@ -""" -Author: Michael Rapp (michael.rapp.ml@gmail.com) - -Provides utility functions for validating and updating the project's changelog. -""" -import sys - -from dataclasses import dataclass, field -from datetime import date -from enum import Enum, auto -from os.path import isfile -from typing import List, Optional - -from versioning import Version, get_current_version - -PREFIX_HEADER = '# ' - -PREFIX_SUB_HEADER = '## ' - -PREFIX_SUB_SUB_HEADER = '### ' - -PREFIX_DASH = '- ' - -PREFIX_ASTERISK = '* ' - -URL_DOCUMENTATION = 'https://mlrl-boomer.readthedocs.io/en/' - -CHANGELOG_FILE_MAIN = '.changelog-main.md' - -CHANGELOG_FILE_FEATURE = '.changelog-feature.md' - -CHANGELOG_FILE_BUGFIX = '.changelog-bugfix.md' - -CHANGELOG_FILE = 'CHANGELOG.md' - -CHANGELOG_ENCODING = 'utf-8' - - -class LineType(Enum): - """ - Represents different types of lines that may occur in a changelog. - """ - BLANK = auto() - HEADER = auto() - ENUMERATION = auto() - - @staticmethod - def parse(line: str) -> Optional['LineType']: - """ - Parses a given line and returns its type. - - :return: The type of the given line or None, if the line is invalid - """ - if not line or line.isspace(): - return LineType.BLANK - if line.startswith(PREFIX_HEADER): - return LineType.HEADER - if line.startswith(PREFIX_DASH) or line.startswith(PREFIX_ASTERISK): - return LineType.ENUMERATION - return None - - -@dataclass -class Line: - """ - A single line in a changelog. - - Attributes: - line_number: The line number, starting at 1 - line_type: The type of the line - line: The original content of the line - content: The content of the line with Markdown keywords being stripped away - """ - line_number: int - line_type: LineType - line: str - content: str - - -@dataclass -class Changeset: - """ - A changeset, consisting of a header and textual descriptions of several changes. - - Attributes: - header: The header of the changeset - changes: A list that stores the textual descriptions of the changes - """ - header: str - changes: List[str] = field(default_factory=list) - - def __str__(self) -> str: - changeset = PREFIX_SUB_SUB_HEADER + self.header + '\n\n' - - for content in self.changes: - changeset += PREFIX_DASH + content + '\n' - - return changeset - - -class ReleaseType(Enum): - """ - Represents the type of a release. - """ - MAJOR = 'major' - MINOR = 'feature' - PATCH = 'bugfix' - - -@dataclass -class Release: - """ - A release, consisting of a version, a release date, a type, and several changesets. - - Attributes: - version: The version - release_date: The release date - release_type: The type of the release - changesets: A list that stores the changesets - """ - version: Version - release_date: date - release_type: ReleaseType - changesets: List[Changeset] = field(default_factory=list) - - @staticmethod - def __format_release_month(month: int) -> str: - return ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'][month - 1] - - @staticmethod - def __format_release_day(day: int) -> str: - if 11 <= (day % 100) <= 13: - suffix = 'th' - else: - suffix = ['th', 'st', 'nd', 'rd', 'th'][min(day % 10, 4)] - - return str(day) + suffix - - def __format_release_date(self) -> str: - return self.__format_release_month(self.release_date.month) + '. ' + self.__format_release_day( - self.release_date.day) + ', ' + str(self.release_date.year) - - def __format_disclaimer(self) -> str: - if [changeset for changeset in self.changesets if changeset.header.lower() == 'api changes']: - return ('```{warning}\nThis release comes with API changes. For an updated overview of the available ' - + 'parameters and command line arguments, please refer to the ' + '[documentation](' - + URL_DOCUMENTATION + str(self.version) + ').\n```\n\n') - return '' - - def __str__(self) -> str: - release = PREFIX_SUB_HEADER + 'Version ' + str(self.version) + ' (' + self.__format_release_date() + ')\n\n' - release += 'A ' + self.release_type.value + ' release that comes with the following changes.\n\n' - release += self.__format_disclaimer() - - for i, changeset in enumerate(self.changesets): - release += str(changeset) + ('\n' if i < len(self.changesets) else '\n\n') - - return release - - -def __read_lines(changelog_file: str, skip_if_missing: bool = False) -> List[str]: - if skip_if_missing and not isfile(changelog_file): - return [] - - with open(changelog_file, mode='r', encoding=CHANGELOG_ENCODING) as file: - return file.readlines() - - -def __write_lines(changelog_file: str, lines: List[str]): - with open(changelog_file, mode='w', encoding=CHANGELOG_ENCODING) as file: - file.writelines(lines) - - -def __parse_line(changelog_file: str, line_number: int, line: str) -> Line: - line = line.strip('\n') - line_type = LineType.parse(line) - - if not line_type: - print('Line ' + str(line_number) + ' of file "' + changelog_file - + '" is invalid: Must be blank, a top-level header (starting with "' + PREFIX_HEADER - + '"), or an enumeration (starting with "' + PREFIX_DASH + '" or "' + PREFIX_ASTERISK + '"), but is "' - + line + '"') - sys.exit(-1) - - content = line - - if line_type != LineType.BLANK: - content = line.lstrip(PREFIX_HEADER).lstrip(PREFIX_DASH).lstrip(PREFIX_ASTERISK) - - if not content or content.isspace(): - print('Line ' + str(line_number) + ' of file "' + changelog_file - + '" is is invalid: Content must not be blank, but is "' + line + '"') - sys.exit(-1) - - return Line(line_number=line_number, line_type=line_type, line=line, content=content) - - -def __validate_line(changelog_file: str, current_line: Optional[Line], previous_line: Optional[Line]): - current_line_is_enumeration = current_line and current_line.line_type == LineType.ENUMERATION - - if current_line_is_enumeration and not previous_line: - print('File "' + changelog_file + '" must start with a top-level header (starting with "' + PREFIX_HEADER - + '")') - sys.exit(-1) - - current_line_is_header = current_line and current_line.line_type == LineType.HEADER - previous_line_is_header = previous_line and previous_line.line_type == LineType.HEADER - - if (current_line_is_header and previous_line_is_header) or (not current_line and previous_line_is_header): - print('Header "' + previous_line.line + '" at line ' + str(previous_line.line_number) + ' of file "' - + changelog_file + '" is not followed by any content') - sys.exit(-1) - - -def __parse_lines(changelog_file: str, lines: List[str]) -> List[Line]: - previous_line = None - parsed_lines = [] - - for i, line in enumerate(lines): - current_line = __parse_line(changelog_file=changelog_file, line_number=(i + 1), line=line) - - if current_line.line_type != LineType.BLANK: - __validate_line(changelog_file=changelog_file, current_line=current_line, previous_line=previous_line) - previous_line = current_line - parsed_lines.append(current_line) - - __validate_line(changelog_file=changelog_file, current_line=None, previous_line=previous_line) - return parsed_lines - - -def __parse_changesets(changelog_file: str, skip_if_missing: bool = False) -> List[Changeset]: - changesets = [] - lines = __parse_lines(changelog_file, __read_lines(changelog_file, skip_if_missing=skip_if_missing)) - - for line in lines: - if line.line_type == LineType.HEADER: - changesets.append(Changeset(header=line.content)) - elif line.line_type == LineType.ENUMERATION: - current_changeset = changesets[-1] - current_changeset.changes.append(line.content) - - return changesets - - -def __validate_changelog(changelog_file: str): - print('Validating changelog file "' + changelog_file + '"...') - __parse_changesets(changelog_file, skip_if_missing=True) - - -def __merge_changesets(*changelog_files) -> List[Changeset]: - changesets_by_header = {} - - for changelog_file in changelog_files: - for changeset in __parse_changesets(changelog_file): - merged_changeset = changesets_by_header.setdefault(changeset.header.lower(), changeset) - - if merged_changeset != changeset: - merged_changeset.changes.extend(changeset.changes) - - return list(changesets_by_header.values()) - - -def __create_release(release_type: ReleaseType, *changelog_files) -> Release: - return Release(version=get_current_version(), - release_date=date.today(), - release_type=release_type, - changesets=__merge_changesets(*changelog_files)) - - -def __add_release_to_changelog(changelog_file: str, new_release: Release): - formatted_release = str(new_release) - print('Adding new release to changelog file "' + changelog_file + '":\n\n' + formatted_release) - original_lines = __read_lines(changelog_file) - modified_lines = [] - offset = 0 - - for offset, line in enumerate(original_lines): - if line.startswith(PREFIX_SUB_HEADER): - break - - modified_lines.append(line) - - modified_lines.append(formatted_release) - modified_lines.extend(original_lines[offset:]) - __write_lines(changelog_file, modified_lines) - - -def __clear_changelogs(*changelog_files): - for changelog_file in changelog_files: - print('Clearing changelog file "' + changelog_file + '"...') - __write_lines(changelog_file, ['']) - - -def __update_changelog(release_type: ReleaseType, *changelog_files): - new_release = __create_release(release_type, *changelog_files) - __add_release_to_changelog(CHANGELOG_FILE, new_release) - __clear_changelogs(*changelog_files) - - -def __get_latest_changelog() -> str: - changelog = '' - lines = __read_lines(CHANGELOG_FILE) - offset = 0 - - for offset, line in enumerate(lines): - if line.startswith(PREFIX_SUB_HEADER): - break - - for line in lines[offset + 2:]: - if line.startswith(PREFIX_SUB_HEADER): - break - - if line.startswith('```{'): - changelog += '***' - elif line.startswith('```'): - changelog = changelog.rstrip('\n') - changelog += '***\n' - else: - changelog += line - - return changelog.rstrip('\n') - - -def validate_changelog_bugfix(**_): - """ - Validates the changelog file that lists bugfixes. - """ - __validate_changelog(CHANGELOG_FILE_BUGFIX) - - -def validate_changelog_feature(**_): - """ - Validates the changelog file that lists new features. - """ - __validate_changelog(CHANGELOG_FILE_FEATURE) - - -def validate_changelog_main(**_): - """ - Validates the changelog file that lists major updates. - """ - __validate_changelog(CHANGELOG_FILE_MAIN) - - -def update_changelog_main(**_): - """ - Updates the projects changelog when releasing bugfixes. - """ - __update_changelog(ReleaseType.MAJOR, CHANGELOG_FILE_MAIN, CHANGELOG_FILE_FEATURE, CHANGELOG_FILE_BUGFIX) - - -def update_changelog_feature(**_): - """ - Updates the project's changelog when releasing new features. - """ - __update_changelog(ReleaseType.MINOR, CHANGELOG_FILE_FEATURE, CHANGELOG_FILE_BUGFIX) - - -def update_changelog_bugfix(**_): - """ - Updates the project's changelog when releasing major updates. - """ - __update_changelog(ReleaseType.PATCH, CHANGELOG_FILE_BUGFIX) - - -def print_latest_changelog(**_): - """ - Prints the changelog of the latest release. - """ - print(__get_latest_changelog()) diff --git a/scons/code_style.py b/scons/code_style.py deleted file mode 100644 index 769336f43e..0000000000 --- a/scons/code_style.py +++ /dev/null @@ -1,158 +0,0 @@ -""" -Author: Michael Rapp (michael.rapp.ml@gmail.com) - -Provides utility functions for checking and enforcing code style definitions. -""" -from glob import glob -from os import path - -from modules import BUILD_MODULE, CPP_MODULE, DOC_MODULE, PYTHON_MODULE -from run import run_program - -MD_DIRS = [('.', False), (DOC_MODULE.root_dir, True), (PYTHON_MODULE.root_dir, True)] - -YAML_DIRS = [('.', False), ('.github', True)] - - -def __isort(directory: str, enforce_changes: bool = False): - args = ['--settings-path', '.', '--virtual-env', 'venv', '--skip-gitignore'] - - if not enforce_changes: - args.append('--check') - - run_program('isort', *args, directory) - - -def __yapf(directory: str, enforce_changes: bool = False): - run_program('yapf', '-r', '-p', '--style=.style.yapf', '--exclude', '**/build/*.py', - '-i' if enforce_changes else '--diff', directory) - - -def __pylint(directory: str): - run_program('pylint', '--jobs=0', '--recursive=y', '--ignore=build', '--rcfile=.pylintrc', '--score=n', directory) - - -def __clang_format(directory: str, enforce_changes: bool = False): - cpp_header_files = glob(path.join(directory, '**', '*.hpp'), recursive=True) - cpp_source_files = glob(path.join(directory, '**', '*.cpp'), recursive=True) - args = ['--style=file'] - - if enforce_changes: - args.append('-i') - else: - args.append('-n') - args.append('--Werror') - - run_program('clang-format', *args, *cpp_header_files, *cpp_source_files) - - -def __cpplint(directory: str): - run_program('cpplint', '--quiet', '--recursive', directory) - - -def __mdformat(directory: str, recursive: bool = False, enforce_changes: bool = False): - suffix_md = '*.md' - glob_path = path.join(directory, '**', '**', suffix_md) if recursive else path.join(directory, suffix_md) - md_files = glob(glob_path, recursive=recursive) - args = ['--number', '--wrap', 'no', '--end-of-line', 'lf'] - - if not enforce_changes: - args.append('--check') - - run_program('mdformat', *args, *md_files, additional_dependencies=['mdformat-myst']) - - -def __yamlfix(directory: str, recursive: bool = False, enforce_changes: bool = False): - glob_path = path.join(directory, '**', '*') if recursive else path.join(directory, '*') - glob_path_hidden = path.join(directory, '**', '.*') if recursive else path.join(directory, '.*') - yaml_files = [ - file for file in glob(glob_path) + glob(glob_path_hidden) - if path.basename(file).endswith('.yml') or path.basename(file).endswith('.yaml') - ] - args = ['--config-file', '.yamlfix.toml'] - - if not enforce_changes: - args.append('--check') - - run_program('yamlfix', *args, *yaml_files, print_args=True) - - -def check_python_code_style(**_): - """ - Checks if the Python source files adhere to the code style definitions. If this is not the case, an error is raised. - """ - for module in [BUILD_MODULE, PYTHON_MODULE]: - directory = module.root_dir - print('Checking Python code style in directory "' + directory + '"...') - __isort(directory) - __yapf(directory) - __pylint(directory) - - -def enforce_python_code_style(**_): - """ - Enforces the Python source files to adhere to the code style definitions. - """ - for module in [BUILD_MODULE, PYTHON_MODULE, DOC_MODULE]: - directory = module.root_dir - print('Formatting Python code in directory "' + directory + '"...') - __isort(directory, enforce_changes=True) - __yapf(directory, enforce_changes=True) - - -def check_cpp_code_style(**_): - """ - Checks if the C++ source files adhere to the code style definitions. If this is not the case, an error is raised. - """ - root_dir = CPP_MODULE.root_dir - print('Checking C++ code style in directory "' + root_dir + '"...') - __clang_format(root_dir) - - for subproject in CPP_MODULE.find_subprojects(): - for directory in [subproject.include_dir, subproject.src_dir]: - __cpplint(directory) - - -def enforce_cpp_code_style(**_): - """ - Enforces the C++ source files to adhere to the code style definitions. - """ - root_dir = CPP_MODULE.root_dir - print('Formatting C++ code in directory "' + root_dir + '"...') - __clang_format(root_dir, enforce_changes=True) - - -def check_md_code_style(**_): - """ - Checks if the Markdown files adhere to the code style definitions. If this is not the case, an error is raised. - """ - for directory, recursive in MD_DIRS: - print('Checking Markdown code style in the directory "' + directory + '"...') - __mdformat(directory, recursive=recursive) - - -def enforce_md_code_style(**_): - """ - Enforces the Markdown files to adhere to the code style definitions. - """ - for directory, recursive in MD_DIRS: - print('Formatting Markdown files in the directory "' + directory + '"...') - __mdformat(directory, recursive=recursive, enforce_changes=True) - - -def check_yaml_code_style(**_): - """ - Checks if the YAML files adhere to the code style definitions. If this is not the case, an error is raised. - """ - for directory, recursive in YAML_DIRS: - print('Checking YAML files in the directory "' + directory + '"...') - __yamlfix(directory, recursive=recursive) - - -def enforce_yaml_code_style(**_): - """ - Enforces the YAML files to adhere to the code style definitions. - """ - for directory, recursive in YAML_DIRS: - print('Formatting YAML files in the directory "' + directory + '"...') - __yamlfix(directory, recursive=recursive, enforce_changes=True) diff --git a/scons/command_line.py b/scons/command_line.py deleted file mode 100644 index 257eb3df7b..0000000000 --- a/scons/command_line.py +++ /dev/null @@ -1,74 +0,0 @@ -""" -Author: Michael Rapp (michael.rapp.ml@gmail.com) - -Provides utility functions for running command line programs during the build process. -""" -import subprocess -import sys - -from functools import reduce -from os import path - - -def __format_command(cmd: str, *args, format_args: bool = True) -> str: - return cmd + (reduce(lambda aggr, argument: aggr + ' ' + argument, args, '') if format_args else '') - - -def __is_virtual_environment() -> bool: - return sys.prefix != sys.base_prefix - - -def __get_qualified_command(cmd: str) -> str: - if __is_virtual_environment(): - # On Windows, we use the relative path to the command's executable within the virtual environment, if such an - # executable exists. This circumvents situations where the PATH environment variable has not been updated after - # activating the virtual environment. This can prevent the executables from being found or can lead to the wrong - # executable, from outside the virtual environment, being executed. - executable = path.join(sys.prefix, 'Scripts', cmd + '.exe') - - if path.isfile(executable): - return executable - - return cmd - - -def run_command(cmd: str, - *args, - print_cmd: bool = True, - print_args: bool = False, - capture_output: bool = False, - exit_on_error: bool = True, - env=None): - """ - Runs a command line program. - - :param cmd: The name of the program to be run - :param args: Optional arguments that should be passed to the program - :param print_cmd: True, if the name of the program should be included in log statements, False otherwise - :param print_args: True, if the arguments should be included in log statements, False otherwise - :param capture_output: True, if the output of the program should be captured and returned, False otherwise - :param exit_on_error: True, if the build system should be terminated when an error occurs, False otherwise - :param env: The environment variables to be passed to the program - """ - cmd = __get_qualified_command(cmd) - - if print_cmd: - print('Running external command "' + __format_command(cmd, *args, format_args=print_args) + '"...') - - out = subprocess.run([cmd] + list(args), check=False, text=capture_output, capture_output=capture_output, env=env) - exit_code = out.returncode - - if exit_code != 0: - message = ('External command "' + __format_command(cmd, *args) + '" terminated with non-zero exit code ' - + str(exit_code)) - - if exit_on_error: - if capture_output: - print(str(out.stderr).strip()) - - print(message) - sys.exit(exit_code) - else: - raise RuntimeError(message) - - return out diff --git a/scons/compilation.py b/scons/compilation.py deleted file mode 100644 index 1651e3de9e..0000000000 --- a/scons/compilation.py +++ /dev/null @@ -1,166 +0,0 @@ -""" -Author: Michael Rapp (michael.rapp.ml@gmail.com) - -Provides utility functions for compiling C++ and Cython code. -""" -from os import environ -from typing import List, Optional - -from environment import get_env -from modules import CPP_MODULE, PYTHON_MODULE -from run import run_program - - -class BuildOptions: - """ - Allows to obtain build options from environment variables. - """ - - class BuildOption: - """ - A single build option. - """ - - def __init__(self, name: str, subpackage: Optional[str] = None): - """ - :param name: The name of the build option - :param subpackage: The subpackage, the build option corresponds to, or None, if it is a global option - """ - self.name = name - self.subpackage = subpackage - - @property - def key(self) -> str: - """ - The key to be used for setting the build option. - """ - return (self.subpackage + ':' if self.subpackage else '') + self.name - - @property - def value(self) -> Optional[str]: - """ - Returns the value to be set for the build option. - - :return: The value to be set or None, if no value should be set - """ - value = get_env(environ, self.name.upper(), None) - - if value: - value = value.strip() - - return value - - def __init__(self): - self.build_options = [] - - def add(self, name: str, subpackage: Optional[str] = None) -> 'BuildOptions': - """ - Adds a build option. - - :param name: The name of the build option - :param subpackage: The subpackage, the build option corresponds to, or None, if it is a global option - :return: The `BuildOptions` itself - """ - self.build_options.append(BuildOptions.BuildOption(name=name, subpackage=subpackage)) - return self - - def to_args(self) -> List[str]: - """ - Returns a list of arguments to be passed to the command "meson configure" for setting the build options. - - :return: A list of arguments - """ - args = [] - - for build_option in self.build_options: - value = build_option.value - - if value: - args.append('-D') - args.append(build_option.key + '=' + value) - - return args - - -CPP_BUILD_OPTIONS = BuildOptions() \ - .add(name='subprojects') \ - .add(name='test_support', subpackage='common') \ - .add(name='multi_threading_support', subpackage='common') \ - .add(name='gpu_support', subpackage='common') - - -CYTHON_BUILD_OPTIONS = BuildOptions() \ - .add(name='subprojects') - - -def __meson_setup(root_dir: str, - build_dir: str, - build_options: BuildOptions = BuildOptions(), - dependencies: Optional[List[str]] = None): - print('Setting up build directory "' + build_dir + '"...') - args = build_options.to_args() - run_program('meson', 'setup', *args, build_dir, root_dir, print_args=True, additional_dependencies=dependencies) - - -def __meson_configure(build_dir: str, build_options: BuildOptions): - args = build_options.to_args() - - if args: - print('Configuring build options according to environment variables...') - run_program('meson', 'configure', *args, build_dir, print_args=True) - - -def __meson_compile(build_dir: str): - run_program('meson', 'compile', '-C', build_dir, print_args=True) - - -def __meson_install(build_dir: str): - run_program('meson', 'install', '--no-rebuild', '--only-changed', '-C', build_dir, print_args=True) - - -def setup_cpp(**_): - """ - Sets up the build system for compiling the C++ code. - """ - __meson_setup(CPP_MODULE.root_dir, CPP_MODULE.build_dir, CPP_BUILD_OPTIONS, dependencies=['ninja']) - - -def compile_cpp(**_): - """ - Compiles the C++ code. - """ - __meson_configure(CPP_MODULE.build_dir, CPP_BUILD_OPTIONS) - print('Compiling C++ code...') - __meson_compile(CPP_MODULE.build_dir) - - -def install_cpp(**_): - """ - Installs shared libraries into the source tree. - """ - print('Installing shared libraries into source tree...') - __meson_install(CPP_MODULE.build_dir) - - -def setup_cython(**_): - """ - Sets up the build system for compiling the Cython code. - """ - __meson_setup(PYTHON_MODULE.root_dir, PYTHON_MODULE.build_dir, CYTHON_BUILD_OPTIONS, dependencies=['cython']) - - -def compile_cython(**_): - """ - Compiles the Cython code. - """ - __meson_configure(PYTHON_MODULE.build_dir, CYTHON_BUILD_OPTIONS) - print('Compiling Cython code...') - __meson_compile(PYTHON_MODULE.build_dir) - - -def install_cython(**_): - """ - Installs extension modules into the source tree. - """ - print('Installing extension modules into source tree...') - __meson_install(PYTHON_MODULE.build_dir) diff --git a/scons/dependencies.py b/scons/dependencies.py deleted file mode 100644 index 36b1f6b359..0000000000 --- a/scons/dependencies.py +++ /dev/null @@ -1,170 +0,0 @@ -""" -Author: Michael Rapp (michael.rapp.ml@gmail.com) - -Provides utility functions for dealing with dependencies. -""" - -from dataclasses import dataclass -from functools import reduce -from os import path -from typing import List, Optional - -from command_line import run_command -from modules import ALL_MODULES, BUILD_MODULE, CPP_MODULE, PYTHON_MODULE, Module - - -@dataclass -class Requirement: - """ - Specifies the supported version(s) of a specific dependency. - - Attributes: - dependency: The name of the dependency - version: The supported version(s) of the dependency or None, if there are no restrictions - """ - dependency: str - version: Optional[str] = None - - def __str__(self): - return self.dependency + (self.version if self.version else '') - - -def __run_pip_command(*args, **kwargs): - return run_command('python', '-m', 'pip', *args, **kwargs) - - -def __run_pip_install_command(requirement: Requirement, *args, **kwargs): - return __run_pip_command('install', str(requirement), '--upgrade', '--upgrade-strategy', 'eager', '--prefer-binary', - '--disable-pip-version-check', *args, **kwargs) - - -def __pip_install(requirement: Requirement, dry_run: bool = False): - try: - args = ['--dry-run'] if dry_run else [] - out = __run_pip_install_command(requirement, - *args, - print_cmd=False, - capture_output=True, - exit_on_error=not dry_run) - stdout = str(out.stdout).strip() - stdout_lines = stdout.split('\n') - - if reduce( - lambda aggr, line: aggr | line.startswith('Would install') and __normalize_dependency(line).find( - requirement.dependency) >= 0, stdout_lines, False): - if dry_run: - __run_pip_install_command(requirement, print_args=True) - else: - print(stdout) - except RuntimeError: - __pip_install(requirement) - - -def __normalize_dependency(dependency: str): - return dependency.replace('_', '-').lower() - - -def __find_requirements(requirements_file: str, *dependencies: str, raise_error: bool = True) -> List[Requirement]: - with open(requirements_file, mode='r', encoding='utf-8') as file: - lines = [line.split(' ') for line in file.readlines()] - requirements = [ - Requirement(dependency=__normalize_dependency(parts[0].strip()), - version=' '.join(parts[1:]).strip() if len(parts) > 1 else None) for parts in lines - ] - requirements = {requirement.dependency: requirement for requirement in requirements} - - if dependencies: - found_requirements = [] - - for dependency in dependencies: - if __normalize_dependency(dependency) in requirements: - found_requirements.append(requirements[dependency]) - elif raise_error: - raise RuntimeError('Dependency "' + dependency + '" not found in requirements file "' - + requirements_file + '"') - - return found_requirements - - return list(requirements.values()) - - -def __install_module_dependencies(module: Module, *dependencies: str): - requirements_file = module.requirements_file - - if path.isfile(requirements_file): - install_dependencies(requirements_file, *dependencies) - - -def install_dependencies(requirements_file: str, *dependencies: str): - """ - Installs one or several dependencies if they are listed in a given requirements.txt file. - - :param requirements_file: The path of the requirements.txt file that specifies the dependency versions - :param dependencies: The names of the dependencies that should be installed - """ - for requirement in __find_requirements(requirements_file, *dependencies): - __pip_install(requirement, dry_run=True) - - -def install_build_dependencies(*dependencies: str): - """ - Installs one or several dependencies that are required by the build system. - - :param dependencies: The names of the dependencies that should be installed - """ - __install_module_dependencies(BUILD_MODULE, *dependencies) - - -def install_runtime_dependencies(**_): - """ - Installs all runtime dependencies that are required by the Python and C++ module. - """ - __install_module_dependencies(PYTHON_MODULE) - __install_module_dependencies(CPP_MODULE) - - -def check_dependency_versions(**_): - """ - Installs all dependencies used by the project and checks for outdated dependencies. - """ - print('Installing all dependencies...') - for module in ALL_MODULES: - __install_module_dependencies(module) - - print('Checking for outdated dependencies...') - out = __run_pip_command('list', '--outdated', print_cmd=False, capture_output=True) - stdout = str(out.stdout).strip() - stdout_lines = stdout.split('\n') - i = 0 - - for line in stdout_lines: - i += 1 - - if line.startswith('----'): - break - - outdated_dependencies = [] - - for line in stdout_lines[i:]: - dependency = __normalize_dependency(line.split()[0]) - - for module in ALL_MODULES: - requirements_file = module.requirements_file - - if path.isfile(requirements_file): - requirements = __find_requirements(requirements_file, dependency, raise_error=False) - - if requirements and requirements[0].version: - outdated_dependencies.append(line) - break - - if outdated_dependencies: - print('The following dependencies are outdated:\n') - - for header_line in stdout_lines[:i]: - print(header_line) - - for outdated_dependency in outdated_dependencies: - print(outdated_dependency) - else: - print('All dependencies are up-to-date!') diff --git a/scons/documentation.py b/scons/documentation.py deleted file mode 100644 index d2bca9f950..0000000000 --- a/scons/documentation.py +++ /dev/null @@ -1,181 +0,0 @@ -""" -Author: Michael Rapp (michael.rapp.ml@gmail.com) - -Provides utility functions for generating the documentation. -""" -from os import environ, makedirs, path, remove -from typing import List - -from environment import set_env -from modules import CPP_MODULE, DOC_MODULE, PYTHON_MODULE -from run import run_program - - -def __doxygen(project_name: str, input_dir: str, output_dir: str): - makedirs(output_dir, exist_ok=True) - env = environ.copy() - set_env(env, 'DOXYGEN_PROJECT_NAME', 'libmlrl' + project_name) - set_env(env, 'DOXYGEN_INPUT_DIR', input_dir) - set_env(env, 'DOXYGEN_OUTPUT_DIR', output_dir) - set_env(env, 'DOXYGEN_PREDEFINED', 'MLRL' + project_name.upper() + '_API=') - run_program('doxygen', DOC_MODULE.doxygen_config_file, print_args=True, install_program=False, env=env) - - -def __breathe_apidoc(source_dir: str, output_dir: str, project: str): - run_program('breathe-apidoc', - '--members', - '--project', - project, - '-g', - 'file', - '-o', - output_dir, - source_dir, - print_args=True, - additional_dependencies=['breathe'], - requirements_file=DOC_MODULE.requirements_file, - install_program=False) - - -def __sphinx_apidoc(source_dir: str, output_dir: str): - run_program('sphinx-apidoc', - '--separate', - '--module-first', - '--no-toc', - '-o', - output_dir, - source_dir, - '*.so*', - print_args=True, - additional_dependencies=['sphinx'], - requirements_file=DOC_MODULE.requirements_file, - install_program=False) - - root_rst_file = path.join(output_dir, 'mlrl.rst') - - if path.isfile(root_rst_file): - remove(root_rst_file) - - -def __sphinx_build(source_dir: str, output_dir: str): - run_program('sphinx-build', - '--jobs', - 'auto', - source_dir, - output_dir, - print_args=True, - additional_dependencies=[ - 'furo', - 'myst-parser', - 'sphinxext-opengraph', - 'sphinx-inline-tabs', - 'sphinx-copybutton', - 'sphinx-favicon', - ], - requirements_file=DOC_MODULE.requirements_file, - install_program=False) - - -def __read_tocfile_template(directory: str) -> List[str]: - with open(path.join(directory, 'index.md.template'), mode='r', encoding='utf-8') as file: - return file.readlines() - - -def __write_tocfile(directory: str, tocfile_entries: List[str]): - tocfile_template = __read_tocfile_template(directory) - tocfile = [] - - for line in tocfile_template: - if line.strip() == '%s': - tocfile.extend(tocfile_entries) - else: - tocfile.append(line) - - with open(path.join(directory, 'index.md'), mode='w', encoding='utf-8') as file: - file.writelines(tocfile) - - -# pylint: disable=unused-argument -def apidoc_cpp(env, target, source): - """ - Builds the API documentation for a single C++ subproject. - - :param env: The scons environment - :param target: The path of the files that belong to the API documentation, if it has already been built, or the - path of the directory, where the API documentation should be stored - :param source: The paths of the source files from which the API documentation should be built - """ - if target: - apidoc_subproject = DOC_MODULE.find_cpp_apidoc_subproject(target[0].path) - - if apidoc_subproject: - subproject_name = apidoc_subproject.name - print('Generating C++ API documentation for subproject "' + subproject_name + '"...') - include_dir = path.join(apidoc_subproject.source_subproject.root_dir, 'include') - build_dir = apidoc_subproject.build_dir - __doxygen(project_name=subproject_name, input_dir=include_dir, output_dir=build_dir) - __breathe_apidoc(source_dir=path.join(build_dir, 'xml'), output_dir=build_dir, project=subproject_name) - - -def apidoc_cpp_tocfile(**_): - """ - Generates a tocfile referencing the C++ API documentation for all existing subprojects. - """ - print('Generating tocfile referencing the C++ API documentation for all subprojects...') - tocfile_entries = [] - - for subproject in CPP_MODULE.find_subprojects(): - apidoc_subproject = DOC_MODULE.get_cpp_apidoc_subproject(subproject) - root_file = apidoc_subproject.root_file - - if path.isfile(root_file): - tocfile_entries.append('Library libmlrl' + apidoc_subproject.name + ' <' - + path.relpath(root_file, DOC_MODULE.apidoc_dir_cpp) + '>\n') - - __write_tocfile(DOC_MODULE.apidoc_dir_cpp, tocfile_entries) - - -# pylint: disable=unused-argument -def apidoc_python(env, target, source): - """ - Builds the API documentation for a single Python subproject. - - :param env: The scons environment - :param target: The path of the files that belong to the API documentation, if it has already been built, or the - path of the directory, where the API documentation should be stored - :param source: The paths of the source files from which the API documentation should be built - """ - if target: - apidoc_subproject = DOC_MODULE.find_python_apidoc_subproject(target[0].path) - - if apidoc_subproject: - print('Generating Python API documentation for subproject "' + apidoc_subproject.name + '"...') - build_dir = apidoc_subproject.build_dir - makedirs(build_dir, exist_ok=True) - __sphinx_apidoc(source_dir=apidoc_subproject.source_subproject.source_dir, output_dir=build_dir) - - -def apidoc_python_tocfile(**_): - """ - Generates a tocfile referencing the Python API documentation for all existing subprojects. - """ - print('Generating tocfile referencing the Python API documentation for all subprojects...') - tocfile_entries = [] - - for subproject in PYTHON_MODULE.find_subprojects(): - apidoc_subproject = DOC_MODULE.get_python_apidoc_subproject(subproject) - root_file = apidoc_subproject.root_file - - if path.isfile(root_file): - tocfile_entries.append('Package mlrl-' + apidoc_subproject.name + ' <' - + path.relpath(root_file, DOC_MODULE.apidoc_dir_python) + '>\n') - - __write_tocfile(DOC_MODULE.apidoc_dir_python, tocfile_entries) - - -def doc(**_): - """ - Builds the documentation. - """ - print('Generating documentation...') - __sphinx_build(source_dir=DOC_MODULE.root_dir, output_dir=DOC_MODULE.build_dir) diff --git a/scons/github_actions.py b/scons/github_actions.py deleted file mode 100644 index d2bd3a9814..0000000000 --- a/scons/github_actions.py +++ /dev/null @@ -1,354 +0,0 @@ -""" -Author: Michael Rapp (michael.rapp.ml@gmail.com) - -Provides utility functions for checking the project's GitHub workflows for outdated Actions. -""" -import sys - -from dataclasses import dataclass, field -from functools import reduce -from glob import glob -from os import environ, path -from typing import List, Optional, Set - -from dependencies import install_build_dependencies -from environment import get_env - -ENV_GITHUB_TOKEN = 'GITHUB_TOKEN' - -WORKFLOW_ENCODING = 'utf-8' - - -@dataclass -class ActionVersion: - """ - The version of a GitHub Action. - - Attributes: - version: The full version string - """ - version: str - - SEPARATOR = '.' - - @staticmethod - def from_version_numbers(*version_numbers: int) -> 'ActionVersion': - """ - Creates and returns the version of a GitHub Action from one or several version numbers. - - :param version_numbers: The version numbers - :return: The version that has been created - """ - return ActionVersion(ActionVersion.SEPARATOR.join([str(version_number) for version_number in version_numbers])) - - @property - def version_numbers(self) -> List[int]: - """ - A list that stores the individual version numbers, the full version consists of. - """ - return [int(version_number) for version_number in str(self).split(self.SEPARATOR)] - - def __str__(self) -> str: - return self.version.lstrip('v') - - def __lt__(self, other: 'ActionVersion') -> bool: - first_version_numbers = self.version_numbers - second_version_numbers = other.version_numbers - - for i in range(min(len(first_version_numbers), len(second_version_numbers))): - first_version_number = first_version_numbers[i] - second_version_number = second_version_numbers[i] - - if first_version_number > second_version_number: - return False - if first_version_number < second_version_number: - return True - - return False - - -@dataclass -class Action: - """ - A GitHub Action. - - Attributes: - name: The name of the Action - version: The version of the Action - latest_version: The latest version of the Action, if known - """ - name: str - version: ActionVersion - latest_version: Optional[ActionVersion] = None - - SEPARATOR = '@' - - @staticmethod - def from_uses_clause(uses_clause: str) -> 'Action': - """ - Creates and returns a GitHub Action from the uses-clause of a workflow. - - :param uses_clause: The uses-clause - :return: The GitHub Action that has been created - """ - parts = uses_clause.split(Action.SEPARATOR) - - if len(parts) != 2: - raise ValueError('Uses-clause must contain the symbol + "' + Action.SEPARATOR + '", but got "' + uses_clause - + '"') - - return Action(name=parts[0], version=ActionVersion(parts[1])) - - @property - def repository(self) -> str: - """ - The name of the repository, where the GitHub Action is hosted. - """ - repository = self.name - separator = '/' - parts = repository.split(separator) - return separator.join(parts[:2]) if len(parts) > 2 else repository - - @property - def is_outdated(self) -> bool: - """ - True, if the GitHub Action is known to be outdated, False otherwise. - """ - return self.latest_version and self.version < self.latest_version - - def __str__(self) -> str: - return self.name + self.SEPARATOR + str(self.version) - - def __eq__(self, other: 'Action') -> bool: - return str(self) == str(other) - - def __hash__(self): - return hash(str(self)) - - -@dataclass -class Workflow: - """ - A GitHub workflow. - - Attributes: - workflow_file: The path of the workflow definition file - yaml_dict: A dictionary that stores the YAML structure of the workflow definition file - actions: A set that stores all Actions in the workflow - """ - workflow_file: str - yaml_dict: dict - actions: Set[Action] = field(default_factory=set) - - TAG_USES = 'uses' - - @property - def uses_clauses(self) -> List[str]: - """ - A list that contains all uses-clauses in the workflow. - """ - uses_clauses = [] - - for job in self.yaml_dict.get('jobs', {}).values(): - for step in job.get('steps', []): - uses_clause = step.get(self.TAG_USES, None) - - if uses_clause: - uses_clauses.append(uses_clause) - - return uses_clauses - - @property - def outdated_actions(self) -> Set[Action]: - """ - A set that stores all Actions in the workflow that are known to be outdated. - """ - return {action for action in self.actions if action.is_outdated} - - def __eq__(self, other: 'Workflow') -> bool: - return self.workflow_file == other.workflow_file - - def __hash__(self): - return hash(self.workflow_file) - - -def __read_workflow(workflow_file: str) -> Workflow: - install_build_dependencies('pyyaml') - # pylint: disable=import-outside-toplevel - import yaml - with open(workflow_file, mode='r', encoding=WORKFLOW_ENCODING) as file: - yaml_dict = yaml.load(file.read(), Loader=yaml.CLoader) - return Workflow(workflow_file=workflow_file, yaml_dict=yaml_dict) - - -def __read_workflow_lines(workflow_file: str) -> List[str]: - with open(workflow_file, mode='r', encoding=WORKFLOW_ENCODING) as file: - return file.readlines() - - -def __write_workflow_lines(workflow_file: str, lines: List[str]): - with open(workflow_file, mode='w', encoding=WORKFLOW_ENCODING) as file: - file.writelines(lines) - - -def __update_workflow(workflow_file: str, *updated_actions: Action): - updated_actions_by_name = reduce(lambda aggr, x: dict(aggr, **{x.name: x}), updated_actions, {}) - lines = __read_workflow_lines(workflow_file) - uses_prefix = Workflow.TAG_USES + ':' - updated_lines = [] - - for line in lines: - updated_lines.append(line) - line_stripped = line.strip() - - if line_stripped.startswith(uses_prefix): - uses_clause = line_stripped[len(uses_prefix):].strip() - action = Action.from_uses_clause(uses_clause) - updated_action = updated_actions_by_name.get(action.name) - - if updated_action: - updated_lines[-1] = line.replace(str(action.version), str(updated_action.version)) - - __write_workflow_lines(workflow_file, updated_lines) - - -def __parse_workflow(workflow_file: str) -> Workflow: - print('Searching for GitHub Actions in workflow "' + workflow_file + '"...') - workflow = __read_workflow(workflow_file) - - for uses_clause in workflow.uses_clauses: - try: - workflow.actions.add(Action.from_uses_clause(uses_clause)) - except ValueError as error: - print('Failed to parse uses-clause in workflow "' + workflow_file + '": ' + str(error)) - sys.exit(-1) - - return workflow - - -def __parse_workflows(*workflow_files: str) -> Set[Workflow]: - return {__parse_workflow(workflow_file) for workflow_file in workflow_files} - - -def __query_latest_action_version(action: Action, github_token: Optional[str] = None) -> Optional[ActionVersion]: - repository_name = action.repository - install_build_dependencies('pygithub') - # pylint: disable=import-outside-toplevel - from github import Auth, Github, UnknownObjectException - - try: - github_auth = Auth.Token(github_token) if github_token else None - github_client = Github(auth=github_auth) - github_repository = github_client.get_repo(repository_name) - latest_release = github_repository.get_latest_release() - latest_tag = latest_release.tag_name - return ActionVersion(latest_tag) - except UnknownObjectException as error: - print('Query to GitHub API failed for action "' + str(action) + '" hosted in repository "' + repository_name - + '": ' + str(error)) - sys.exit(-1) - - -def __get_github_token() -> Optional[str]: - github_token = get_env(environ, ENV_GITHUB_TOKEN) - - if not github_token: - print('No GitHub API token is set. You can specify it via the environment variable ' + ENV_GITHUB_TOKEN + '.') - - return github_token - - -def __determine_latest_action_versions(*workflows: Workflow) -> Set[Workflow]: - github_token = __get_github_token() - version_cache = {} - - for workflow in workflows: - for action in workflow.actions: - latest_version = version_cache.get(action.name) - - if not latest_version: - print('Checking version of GitHub Action "' + action.name + '"...') - latest_version = __query_latest_action_version(action, github_token=github_token) - version_cache[action.name] = latest_version - - action.latest_version = latest_version - - return set(workflows) - - -def __parse_all_workflows() -> Set[Workflow]: - workflow_directory = path.join('.github', 'workflows') - workflow_files = glob(path.join(workflow_directory, '*.y*ml')) - return __determine_latest_action_versions(*__parse_workflows(*workflow_files)) - - -def __print_table(header: List[str], rows: List[List[str]]): - install_build_dependencies('tabulate') - # pylint: disable=import-outside-toplevel - from tabulate import tabulate - print(tabulate(rows, headers=header)) - - -def __print_outdated_actions(*workflows: Workflow): - rows = [] - - for workflow in workflows: - for action in workflow.outdated_actions: - rows.append([workflow.workflow_file, str(action.name), str(action.version), str(action.latest_version)]) - - if rows: - rows.sort(key=lambda row: (row[0], row[1])) - header = ['Workflow', 'Action', 'Current version', 'Latest version'] - print('The following GitHub Actions are outdated:\n') - __print_table(header=header, rows=rows) - else: - print('All GitHub Actions are up-to-date!') - - -def __update_outdated_actions(*workflows: Workflow) -> Set[Workflow]: - rows = [] - - for workflow in workflows: - outdated_actions = workflow.outdated_actions - - if outdated_actions: - workflow_file = workflow.workflow_file - updated_actions = set() - - for action in outdated_actions: - previous_version = action.version - previous_version_numbers = previous_version.version_numbers - latest_version_numbers = action.latest_version.version_numbers - max_version_numbers = min(len(previous_version_numbers), len(latest_version_numbers)) - updated_version = ActionVersion.from_version_numbers(*latest_version_numbers[:max_version_numbers]) - rows.append([workflow_file, action.name, str(previous_version), str(updated_version)]) - action.version = updated_version - updated_actions.add(action) - - __update_workflow(workflow_file, *updated_actions) - - if rows: - rows.sort(key=lambda row: (row[0], row[1])) - header = ['Workflow', 'Action', 'Previous version', 'Updated version'] - print('The following GitHub Actions have been updated:\n') - __print_table(header=header, rows=rows) - else: - print('No GitHub Actions have been updated.') - - return set(workflows) - - -def check_github_actions(**_): - """ - Checks the project's GitHub workflows for outdated Actions. - """ - workflows = __parse_all_workflows() - __print_outdated_actions(*workflows) - - -def update_github_actions(**_): - """ - Updates the versions of outdated GitHub Actions in the project's workflows. - """ - workflows = __parse_all_workflows() - __update_outdated_actions(*workflows) diff --git a/scons/modules.py b/scons/modules.py deleted file mode 100644 index 7746ac88a1..0000000000 --- a/scons/modules.py +++ /dev/null @@ -1,517 +0,0 @@ -""" -Author: Michael Rapp (michael.rapp.ml@gmail.com) - -Provides access to directories and files belonging to different modules that are part of the project. -""" -from abc import ABC, abstractmethod -from glob import glob -from os import environ, path, walk -from typing import Callable, List, Optional - -from environment import get_env_array - - -def find_files_recursively(directory: str, - directory_filter: Callable[[str, str], bool] = lambda *_: True, - file_filter: Callable[[str, str], bool] = lambda *_: True) -> List[str]: - """ - Finds and returns files in a directory and its subdirectories that match a given filter. - - :param directory: The directory to be searched - :param directory_filter: A function to be used for filtering subdirectories - :param file_filter: A function to be used for filtering files - :return: A list that contains the paths of all files that have been found - """ - result = [] - - for parent_directory, subdirectories, files in walk(directory, topdown=True): - subdirectories[:] = [ - subdirectory for subdirectory in subdirectories if directory_filter(parent_directory, subdirectory) - ] - - for file in files: - if file_filter(parent_directory, file): - result.append(path.join(parent_directory, file)) - - return result - - -class Module(ABC): - """ - An abstract base class for all classes that provide access to directories and files that belong to a module. - """ - - @property - @abstractmethod - def root_dir(self) -> str: - """ - The path to the module's root directory. - """ - - @property - def build_dir(self) -> str: - """ - The path to the directory, where build files are stored. - """ - return path.join(self.root_dir, 'build') - - @property - def requirements_file(self) -> str: - """ - The path to the requirements.txt file that specifies dependencies required by a module. - """ - return path.join(self.root_dir, 'requirements.txt') - - -class SourceModule(Module, ABC): - """ - An abstract base class for all classes that provide access to directories and files that belong to a module, which - contains source code. - """ - - class Subproject(ABC): - """ - An abstract base class for all classes that provide access to directories and files that belong to an individual - subproject that is part of a module, which contains source files. - """ - - def __init__(self, parent_module: 'SourceModule', root_dir: str): - """ - :param parent_module: The `SourceModule`, the subproject belongs to - :param root_dir: The root directory of the suproject - """ - self.parent_module = parent_module - self.root_dir = root_dir - - @property - def name(self) -> str: - """ - The name of the subproject. - """ - return path.basename(self.root_dir) - - def is_enabled(self) -> bool: - """ - Returns whether the subproject is enabled or not. - - :return: True, if the subproject is enabled, False otherwise - """ - enabled_subprojects = get_env_array(environ, 'SUBPROJECTS') - return not enabled_subprojects or self.name in enabled_subprojects - - -class PythonModule(SourceModule): - """ - Provides access to directories and files that belong to the project's Python code. - """ - - class Subproject(SourceModule.Subproject): - """ - Provides access to directories and files that belong to an individual subproject that is part of the project's - Python code. - """ - - @staticmethod - def __filter_pycache_directories(_: str, directory: str) -> bool: - return directory != '__pycache__' - - @property - def source_dir(self) -> str: - """ - The directory that contains the subproject's source code. - """ - return path.join(self.root_dir, 'mlrl') - - @property - def test_dir(self) -> str: - """ - The directory that contains the subproject's automated tests. - """ - return path.join(self.root_dir, 'tests') - - @property - def dist_dir(self) -> str: - """ - The directory that contains all wheel packages that have been built for the subproject. - """ - return path.join(self.root_dir, 'dist') - - @property - def build_dirs(self) -> List[str]: - """ - A list that contains all directories, where the subproject's build files are stored. - """ - return [self.dist_dir, path.join(self.root_dir, 'build')] + glob(path.join(self.root_dir, '*.egg-info')) - - def find_wheels(self) -> List[str]: - """ - Finds and returns all wheel packages that have been built for the subproject. - - :return: A list that contains the paths of the wheel packages that have been found - """ - return glob(path.join(self.dist_dir, '*.whl')) - - def find_source_files(self) -> List[str]: - """ - Finds and returns all source files that are contained by the subproject. - - :return: A list that contains the paths of the source files that have been found - """ - return find_files_recursively(self.source_dir, directory_filter=self.__filter_pycache_directories) - - def find_shared_libraries(self) -> List[str]: - """ - Finds and returns all shared libraries that are contained in the subproject's source tree. - - :return: A list that contains all shared libraries that have been found - """ - - def file_filter(_: str, file: str) -> bool: - return (file.startswith('lib') and file.find('.so') >= 0) \ - or file.endswith('.dylib') \ - or (file.startswith('mlrl') and file.endswith('.lib')) \ - or file.endswith('.dll') - - return find_files_recursively(self.source_dir, - directory_filter=self.__filter_pycache_directories, - file_filter=file_filter) - - def find_extension_modules(self) -> List[str]: - """ - Finds and returns all extension modules that are contained in the subproject's source tree. - - :return: A list that contains all extension modules that have been found - """ - - def file_filter(_: str, file: str) -> bool: - return (not file.startswith('lib') and file.endswith('.so')) \ - or file.endswith('.pyd') \ - or (not file.startswith('mlrl') and file.endswith('.lib')) - - return find_files_recursively(self.source_dir, - directory_filter=self.__filter_pycache_directories, - file_filter=file_filter) - - @property - def root_dir(self) -> str: - return 'python' - - def find_subprojects(self, return_all: bool = False) -> List[Subproject]: - """ - Finds and returns all subprojects that are part of the Python code. - - :param return_all: True, if all subprojects should be returned, even if they are disabled, False otherwise - :return: A list that contains all subrojects that have been found - """ - subprojects = [ - PythonModule.Subproject(self, file) for file in glob(path.join(self.root_dir, 'subprojects', '*')) - if path.isdir(file) - ] - return subprojects if return_all else [subproject for subproject in subprojects if subproject.is_enabled()] - - def find_subproject(self, file: str) -> Optional[Subproject]: - """ - Finds and returns the subproject to which a given file belongs. - - :param file: The path of the file - :return: The subproject to which the given file belongs or None, if no such subproject is available - """ - for subproject in self.find_subprojects(): - if file.startswith(subproject.root_dir): - return subproject - - return None - - -class CppModule(SourceModule): - """ - Provides access to directories and files that belong to the project's C++ code. - """ - - class Subproject(SourceModule.Subproject): - """ - Provides access to directories and files that belong to an individual subproject that is part of the project's - C++ code. - """ - - @property - def include_dir(self) -> str: - """ - The directory that contains the header files. - """ - return path.join(self.root_dir, 'include') - - @property - def src_dir(self) -> str: - """ - The directory that contains the source files. - """ - return path.join(self.root_dir, 'src') - - @property - def test_dir(self) -> str: - """ - The directory that contains the source code for automated tests. - """ - return path.join(self.root_dir, 'test') - - def find_source_files(self) -> List[str]: - """ - Finds and returns all source files that are contained by the subproject. - - :return: A list that contains the paths of the source files that have been found - """ - - def file_filter(_: str, file: str) -> bool: - return file.endswith('.hpp') or file.endswith('.cpp') - - return find_files_recursively(self.root_dir, file_filter=file_filter) - - @property - def root_dir(self) -> str: - return 'cpp' - - def find_subprojects(self, return_all: bool = False) -> List[Subproject]: - """ - Finds and returns all subprojects that are part of the C++ code. - - - :param return_all: True, if all subprojects should be returned, even if they are disabled, False otherwise - :return: A list that contains all subprojects that have been found - """ - subprojects = [ - CppModule.Subproject(self, file) for file in glob(path.join(self.root_dir, 'subprojects', '*')) - if path.isdir(file) - ] - return subprojects if return_all else [subproject for subproject in subprojects if subproject.is_enabled()] - - -class BuildModule(Module): - """ - Provides access to directories and files that belong to the build system. - """ - - @property - def root_dir(self) -> str: - return 'scons' - - -class DocumentationModule(Module): - """ - Provides access to directories and files that belong to the project's documentation. - """ - - class ApidocSubproject(ABC): - """ - An abstract base class for all classes that provide access to directories and files that are needed for building - the API documentation of a certain C++ or Python subproject. - """ - - def __init__(self, parent_module: 'DocumentationModule', source_subproject: SourceModule.Subproject): - """ - :param parent_module: The `DocumentationModule` this subproject belongs to - :param source_subproject: The subproject of which the API documentation should be built - """ - self.parent_module = parent_module - self.source_subproject = source_subproject - - @property - def name(self) -> str: - """ - The name of the subproject of which the API documentation should be built. - """ - return self.source_subproject.name - - @property - @abstractmethod - def build_dir(self) -> str: - """ - The directory, where build files should be stored. - """ - - @property - @abstractmethod - def root_file(self) -> str: - """ - The path of the root file of the API documentation. - """ - - def find_build_files(self) -> List[str]: - """ - Finds and returns all build files that have been created when building the API documentation. - - :return: A list that contains the paths of all build files that have been found - """ - return find_files_recursively(self.build_dir) - - class CppApidocSubproject(ApidocSubproject): - """ - Provides access to the directories and files that are necessary for building the API documentation of a certain - C++ subproject. - """ - - @property - def build_dir(self) -> str: - return path.join(self.parent_module.apidoc_dir_cpp, self.name) - - @property - def root_file(self) -> str: - return path.join(self.build_dir, 'filelist.rst') - - class PythonApidocSubproject(ApidocSubproject): - """ - Provides access to the directories and files that are necessary for building the API documentation of a certain - Python subproject. - """ - - @property - def build_dir(self) -> str: - return path.join(self.parent_module.apidoc_dir_python, self.name) - - @property - def root_file(self) -> str: - return path.join(self.build_dir, 'mlrl.' + self.name + '.rst') - - @property - def root_dir(self) -> str: - return 'doc' - - @property - def doxygen_config_file(self) -> str: - """ - The Doxygen config file. - """ - return path.join(self.root_dir, 'Doxyfile') - - @property - def config_file(self) -> str: - """ - The config file that should be used for building the documentation. - """ - return path.join(self.root_dir, 'conf.py') - - @property - def apidoc_dir(self) -> str: - """ - The directory, where API documentations should be stored. - """ - return path.join(self.root_dir, 'developer_guide', 'api') - - @property - def apidoc_dir_python(self) -> str: - """ - The directory, where Python API documentations should be stored. - """ - return path.join(self.apidoc_dir, 'python') - - @property - def apidoc_tocfile_python(self) -> str: - """ - The tocfile referencing all Python API documentations. - """ - return path.join(self.apidoc_dir_python, 'index.md') - - @property - def apidoc_dir_cpp(self) -> str: - """ - The directory, where C++ API documentations should be stored. - """ - return path.join(self.apidoc_dir, 'cpp') - - @property - def apidoc_tocfile_cpp(self) -> str: - """ - The tocfile referencing all C++ API documentations. - """ - return path.join(self.apidoc_dir_cpp, 'index.md') - - @property - def build_dir(self) -> str: - """ - The directory, where the documentation should be stored. - """ - return path.join(self.root_dir, '_build', 'html') - - def find_build_files(self) -> List[str]: - """ - Finds and returns all files that belong to the documentation that has been built. - - :return: A list that contains the paths of the build files that have been found - """ - return find_files_recursively(self.build_dir) - - def find_source_files(self) -> List[str]: - """ - Finds and returns all source files from which the documentation is built. - - :return: A list that contains the paths of the source files that have been found - """ - - def directory_filter(parent_directory: str, directory: str) -> bool: - return path.join(parent_directory, directory) != self.build_dir - - def file_filter(_: str, file: str) -> bool: - return file == 'conf.py' or file.endswith('.rst') or file.endswith('.svg') or file.endswith('.md') - - return find_files_recursively(self.root_dir, directory_filter=directory_filter, file_filter=file_filter) - - def get_cpp_apidoc_subproject(self, cpp_subproject: CppModule.Subproject) -> CppApidocSubproject: - """ - Returns a `CppApidocSubproject` for building the API documentation of a given C++ subproject. - - :param cpp_subproject: The C++ subproject of which the API documentation should be built - :return: A `CppApidocSubproject` - """ - return DocumentationModule.CppApidocSubproject(self, cpp_subproject) - - def get_python_apidoc_subproject(self, python_subproject: PythonModule.Subproject) -> PythonApidocSubproject: - """ - Returns a `PythonApidocSubproject` for building the API documentation of a given Python subproject. - - :param python_subproject: The Python subproject of which the API documentation should be built - :return: A `PythonApidocSubproject` - """ - return DocumentationModule.PythonApidocSubproject(self, python_subproject) - - def find_cpp_apidoc_subproject(self, file: str) -> Optional[CppApidocSubproject]: - """ - Finds and returns the `CppApidocSubproject` to which a given file belongs. - - :param file: The path of the file - :return: The `CppApiSubproject` to which the given file belongs or None, if no such subproject is - available - """ - for subproject in CPP_MODULE.find_subprojects(): - apidoc_subproject = self.get_cpp_apidoc_subproject(subproject) - - if file.startswith(apidoc_subproject.build_dir): - return apidoc_subproject - - return None - - def find_python_apidoc_subproject(self, file: str) -> Optional[PythonApidocSubproject]: - """ - Finds and returns the `PythonApidocSubproject` to which a given file belongs. - - :param file: The path of the file - :return: The `PythonApidocSubproject` to which the given file belongs or None, if no such subproject is - available - """ - for subproject in PYTHON_MODULE.find_subprojects(): - apidoc_subproject = self.get_python_apidoc_subproject(subproject) - - if file.startswith(apidoc_subproject.build_dir): - return apidoc_subproject - - return None - - -BUILD_MODULE = BuildModule() - -PYTHON_MODULE = PythonModule() - -CPP_MODULE = CppModule() - -DOC_MODULE = DocumentationModule() - -ALL_MODULES = [BUILD_MODULE, PYTHON_MODULE, CPP_MODULE, DOC_MODULE] diff --git a/scons/packaging.py b/scons/packaging.py deleted file mode 100644 index 926dc9167b..0000000000 --- a/scons/packaging.py +++ /dev/null @@ -1,64 +0,0 @@ -""" -Author: Michael Rapp (michael.rapp.ml@gmail.com) - -Provides utility functions for building and installing Python wheel packages. -""" -from typing import List - -from modules import PYTHON_MODULE -from run import run_python_program - - -def __build_python_wheel(package_dir: str): - run_python_program('build', - '--no-isolation', - '--wheel', - package_dir, - print_args=True, - additional_dependencies=['wheel', 'setuptools']) - - -def __install_python_wheels(wheels: List[str]): - run_python_program('pip', - 'install', - '--force-reinstall', - '--no-deps', - '--disable-pip-version-check', - *wheels, - print_args=True, - install_program=False) - - -# pylint: disable=unused-argument -def build_python_wheel(env, target, source): - """ - Builds a Python wheel package for a single subproject. - - :param env: The scons environment - :param target: The path of the wheel package to be built, if it does already exist, or the path of the directory, - where the wheel package should be stored - :param source: The source files from which the wheel package should be built - """ - if target: - subproject = PYTHON_MODULE.find_subproject(target[0].path) - - if subproject: - print('Building Python wheels for subproject "' + subproject.name + '"...') - __build_python_wheel(subproject.root_dir) - - -# pylint: disable=unused-argument -def install_python_wheels(env, target, source): - """ - Installs all Python wheel packages that have been built for a single subproject. - - :param env: The scons environment - :param target: The path of the subproject's root directory - :param source: The paths of the wheel packages to be installed - """ - if source: - subproject = PYTHON_MODULE.find_subproject(source[0].path) - - if subproject: - print('Installing Python wheels for subproject "' + subproject.name + '"...') - __install_python_wheels(subproject.find_wheels()) diff --git a/scons/requirements.txt b/scons/requirements.txt deleted file mode 100644 index 46cc120e2d..0000000000 --- a/scons/requirements.txt +++ /dev/null @@ -1,19 +0,0 @@ -build >= 1.2, < 1.3 -cpplint >= 2.0, < 2.1 -clang-format >= 19.1, < 19.2 -cython >= 3.0, < 3.1 -isort >= 5.13, < 5.14 -mdformat >= 0.7, < 0.8 -mdformat-myst >= 0.2, < 0.3 -meson >= 1.6, < 1.7 -ninja >= 1.11, < 1.12 -pygithub >= 2.5, < 2.6 -pylint >= 3.3, < 3.4 -pyyaml >= 6.0, < 6.1 -setuptools -scons >= 4.8, < 4.9 -tabulate >= 0.9, < 0.10 -unittest-xml-reporting >= 3.2, < 3.3 -wheel >= 0.45, < 0.46 -yamlfix >= 1.17, < 1.18 -yapf >= 0.43, < 0.44 diff --git a/scons/run.py b/scons/run.py deleted file mode 100644 index 25cb1c3bc7..0000000000 --- a/scons/run.py +++ /dev/null @@ -1,77 +0,0 @@ -""" -Author: Michael Rapp (michael.rapp.ml@gmail.com) - -Provides utility functions for running external programs during the build process. -""" -from typing import List, Optional - -from command_line import run_command -from dependencies import install_dependencies -from modules import BUILD_MODULE - - -def run_program(program: str, - *args, - print_args: bool = False, - additional_dependencies: Optional[List[str]] = None, - requirements_file: str = BUILD_MODULE.requirements_file, - install_program: bool = True, - env=None): - """ - Runs an external program that has been installed into the virtual environment. - - :param program: The name of the program to be run - :param args: Optional arguments that should be passed to the program - :param print_args: True, if the arguments should be included in log statements, False otherwise - :param additional_dependencies: The names of dependencies that should be installed before running the program - :param requirements_file: The path of the requirements.txt file that specifies the dependency versions - :param install_program: True, if the program should be installed before being run, False otherwise - :param env: The environment variables to be passed to the program - """ - dependencies = [] - - if install_program: - dependencies.append(program) - - if additional_dependencies: - dependencies.extend(additional_dependencies) - - install_dependencies(requirements_file, *dependencies) - run_command(program, *args, print_args=print_args, env=env) - - -def run_python_program(program: str, - *args, - print_args: bool = False, - additional_dependencies: Optional[List[str]] = None, - requirements_file: str = BUILD_MODULE.requirements_file, - install_program: bool = True, - env=None): - """ - Runs an external Python program. - - :param program: The name of the program to be run - :param args: Optional arguments that should be passed to the program - :param print_args: True, if the arguments should be included in log statements, False otherwise - :param additional_dependencies: The names of dependencies that should be installed before running the program - :param requirements_file: The path of the requirements.txt file that specifies the dependency versions - :param install_program: True, if the program should be installed before being run, False otherwise - :param env: The environment variable to be passed to the program - """ - dependencies = [] - - if install_program: - dependencies.append(program) - - if additional_dependencies: - dependencies.extend(additional_dependencies) - - run_program('python', - '-m', - program, - *args, - print_args=print_args, - additional_dependencies=dependencies, - requirements_file=requirements_file, - install_program=False, - env=env) diff --git a/scons/sconstruct.py b/scons/sconstruct.py deleted file mode 100644 index b8b6b8ecf6..0000000000 --- a/scons/sconstruct.py +++ /dev/null @@ -1,322 +0,0 @@ -""" -Author: Michael Rapp (michael.rapp.ml@gmail.com) - -Defines the individual targets of the build process. -""" -import sys - -from functools import reduce -from os import path - -from changelog import print_latest_changelog, update_changelog_bugfix, update_changelog_feature, \ - update_changelog_main, validate_changelog_bugfix, validate_changelog_feature, validate_changelog_main -from code_style import check_cpp_code_style, check_md_code_style, check_python_code_style, check_yaml_code_style, \ - enforce_cpp_code_style, enforce_md_code_style, enforce_python_code_style, enforce_yaml_code_style -from compilation import compile_cpp, compile_cython, install_cpp, install_cython, setup_cpp, setup_cython -from dependencies import check_dependency_versions, install_runtime_dependencies -from documentation import apidoc_cpp, apidoc_cpp_tocfile, apidoc_python, apidoc_python_tocfile, doc -from github_actions import check_github_actions, update_github_actions -from modules import BUILD_MODULE, CPP_MODULE, DOC_MODULE, PYTHON_MODULE -from packaging import build_python_wheel, install_python_wheels -from testing import tests_cpp, tests_python -from versioning import apply_development_version, increment_development_version, increment_major_version, \ - increment_minor_version, increment_patch_version, print_current_version, reset_development_version - -from SCons.Script import COMMAND_LINE_TARGETS -from SCons.Script.SConscript import SConsEnvironment - - -def __create_phony_target(environment, target, action=None): - return environment.AlwaysBuild(environment.Alias(target, None, action)) - - -def __print_if_clean(environment, message: str): - if environment.GetOption('clean'): - print(message) - - -# Define target names... -TARGET_NAME_INCREMENT_DEVELOPMENT_VERSION = 'increment_development_version' -TARGET_NAME_RESET_DEVELOPMENT_VERSION = 'reset_development_version' -TARGET_NAME_APPLY_DEVELOPMENT_VERSION = 'apply_development_version' -TARGET_NAME_INCREMENT_PATCH_VERSION = 'increment_patch_version' -TARGET_NAME_INCREMENT_MINOR_VERSION = 'increment_minor_version' -TARGET_NAME_INCREMENT_MAJOR_VERSION = 'increment_major_version' -TARGET_NAME_VALIDATE_CHANGELOG_BUGFIX = 'validate_changelog_bugfix' -TARGET_NAME_VALIDATE_CHANGELOG_FEATURE = 'validate_changelog_feature' -TARGET_NAME_VALIDATE_CHANGELOG_MAIN = 'validate_changelog_main' -TARGET_NAME_UPDATE_CHANGELOG_BUGFIX = 'update_changelog_bugfix' -TARGET_NAME_UPDATE_CHANGELOG_FEATURE = 'update_changelog_feature' -TARGET_NAME_UPDATE_CHANGELOG_MAIN = 'update_changelog_main' -TARGET_NAME_PRINT_VERSION = 'print_version' -TARGET_NAME_PRINT_LATEST_CHANGELOG = 'print_latest_changelog' -TARGET_NAME_TEST_FORMAT = 'test_format' -TARGET_NAME_TEST_FORMAT_PYTHON = TARGET_NAME_TEST_FORMAT + '_python' -TARGET_NAME_TEST_FORMAT_CPP = TARGET_NAME_TEST_FORMAT + '_cpp' -TARGET_NAME_TEST_FORMAT_MD = TARGET_NAME_TEST_FORMAT + '_md' -TARGET_NAME_TEST_FORMAT_YAML = TARGET_NAME_TEST_FORMAT + '_yaml' -TARGET_NAME_FORMAT = 'format' -TARGET_NAME_FORMAT_PYTHON = TARGET_NAME_FORMAT + '_python' -TARGET_NAME_FORMAT_CPP = TARGET_NAME_FORMAT + '_cpp' -TARGET_NAME_FORMAT_MD = TARGET_NAME_FORMAT + '_md' -TARGET_NAME_FORMAT_YAML = TARGET_NAME_FORMAT + '_yaml' -TARGET_NAME_DEPENDENCIES_CHECK = 'check_dependencies' -TARGET_NAME_GITHUB_ACTIONS_CHECK = 'check_github_actions' -TARGET_NAME_GITHUB_ACTIONS_UPDATE = 'update_github_actions' -TARGET_NAME_VENV = 'venv' -TARGET_NAME_COMPILE = 'compile' -TARGET_NAME_COMPILE_CPP = TARGET_NAME_COMPILE + '_cpp' -TARGET_NAME_COMPILE_CYTHON = TARGET_NAME_COMPILE + '_cython' -TARGET_NAME_INSTALL = 'install' -TARGET_NAME_INSTALL_CPP = TARGET_NAME_INSTALL + '_cpp' -TARGET_NAME_INSTALL_CYTHON = TARGET_NAME_INSTALL + '_cython' -TARGET_NAME_BUILD_WHEELS = 'build_wheels' -TARGET_NAME_INSTALL_WHEELS = 'install_wheels' -TARGET_NAME_TESTS = 'tests' -TARGET_NAME_TESTS_CPP = TARGET_NAME_TESTS + '_cpp' -TARGET_NAME_TESTS_PYTHON = TARGET_NAME_TESTS + '_python' -TARGET_NAME_APIDOC = 'apidoc' -TARGET_NAME_APIDOC_CPP = TARGET_NAME_APIDOC + '_cpp' -TARGET_NAME_APIDOC_PYTHON = TARGET_NAME_APIDOC + '_python' -TARGET_NAME_DOC = 'doc' - -VALID_TARGETS = { - TARGET_NAME_INCREMENT_DEVELOPMENT_VERSION, TARGET_NAME_RESET_DEVELOPMENT_VERSION, - TARGET_NAME_APPLY_DEVELOPMENT_VERSION, TARGET_NAME_INCREMENT_PATCH_VERSION, TARGET_NAME_INCREMENT_MINOR_VERSION, - TARGET_NAME_INCREMENT_MAJOR_VERSION, TARGET_NAME_VALIDATE_CHANGELOG_BUGFIX, TARGET_NAME_VALIDATE_CHANGELOG_FEATURE, - TARGET_NAME_VALIDATE_CHANGELOG_MAIN, TARGET_NAME_UPDATE_CHANGELOG_BUGFIX, TARGET_NAME_UPDATE_CHANGELOG_FEATURE, - TARGET_NAME_UPDATE_CHANGELOG_MAIN, TARGET_NAME_PRINT_VERSION, TARGET_NAME_PRINT_LATEST_CHANGELOG, - TARGET_NAME_TEST_FORMAT, TARGET_NAME_TEST_FORMAT_PYTHON, TARGET_NAME_TEST_FORMAT_CPP, TARGET_NAME_TEST_FORMAT_MD, - TARGET_NAME_TEST_FORMAT_YAML, TARGET_NAME_FORMAT, TARGET_NAME_FORMAT_PYTHON, TARGET_NAME_FORMAT_CPP, - TARGET_NAME_FORMAT_MD, TARGET_NAME_FORMAT_YAML, TARGET_NAME_DEPENDENCIES_CHECK, TARGET_NAME_GITHUB_ACTIONS_CHECK, - TARGET_NAME_GITHUB_ACTIONS_UPDATE, TARGET_NAME_VENV, TARGET_NAME_COMPILE, TARGET_NAME_COMPILE_CPP, - TARGET_NAME_COMPILE_CYTHON, TARGET_NAME_INSTALL, TARGET_NAME_INSTALL_CPP, TARGET_NAME_INSTALL_CYTHON, - TARGET_NAME_BUILD_WHEELS, TARGET_NAME_INSTALL_WHEELS, TARGET_NAME_TESTS, TARGET_NAME_TESTS_CPP, - TARGET_NAME_TESTS_PYTHON, TARGET_NAME_APIDOC, TARGET_NAME_APIDOC_CPP, TARGET_NAME_APIDOC_PYTHON, TARGET_NAME_DOC -} - -DEFAULT_TARGET = TARGET_NAME_INSTALL_WHEELS - -# Raise an error if any invalid targets are given... -invalid_targets = [target for target in COMMAND_LINE_TARGETS if target not in VALID_TARGETS] - -if invalid_targets: - print('The following targets are unknown: ' - + reduce(lambda aggr, target: aggr + (', ' if len(aggr) > 0 else '') + target, invalid_targets, '')) - sys.exit(-1) - -# Create temporary file ".sconsign.dblite" in the build directory... -env = SConsEnvironment() -env.SConsignFile(name=path.relpath(path.join(BUILD_MODULE.build_dir, '.sconsign'), BUILD_MODULE.root_dir)) - -# Defines targets for updating the project's version... -__create_phony_target(env, TARGET_NAME_INCREMENT_DEVELOPMENT_VERSION, action=increment_development_version) -__create_phony_target(env, TARGET_NAME_RESET_DEVELOPMENT_VERSION, action=reset_development_version) -__create_phony_target(env, TARGET_NAME_APPLY_DEVELOPMENT_VERSION, action=apply_development_version) -__create_phony_target(env, TARGET_NAME_INCREMENT_PATCH_VERSION, action=increment_patch_version) -__create_phony_target(env, TARGET_NAME_INCREMENT_MINOR_VERSION, action=increment_minor_version) -__create_phony_target(env, TARGET_NAME_INCREMENT_MAJOR_VERSION, action=increment_major_version) - -# Define targets for validating changelogs... -__create_phony_target(env, TARGET_NAME_VALIDATE_CHANGELOG_BUGFIX, action=validate_changelog_bugfix) -__create_phony_target(env, TARGET_NAME_VALIDATE_CHANGELOG_FEATURE, action=validate_changelog_feature) -__create_phony_target(env, TARGET_NAME_VALIDATE_CHANGELOG_MAIN, action=validate_changelog_main) - -# Define targets for updating the project's changelog... -__create_phony_target(env, TARGET_NAME_UPDATE_CHANGELOG_BUGFIX, action=update_changelog_bugfix) -__create_phony_target(env, TARGET_NAME_UPDATE_CHANGELOG_FEATURE, action=update_changelog_feature) -__create_phony_target(env, TARGET_NAME_UPDATE_CHANGELOG_MAIN, action=update_changelog_main) - -# Define targets for printing information about the project... -__create_phony_target(env, TARGET_NAME_PRINT_VERSION, action=print_current_version) -__create_phony_target(env, TARGET_NAME_PRINT_LATEST_CHANGELOG, action=print_latest_changelog) - -# Define targets for checking code style definitions... -target_test_format_python = __create_phony_target(env, TARGET_NAME_TEST_FORMAT_PYTHON, action=check_python_code_style) -target_test_format_cpp = __create_phony_target(env, TARGET_NAME_TEST_FORMAT_CPP, action=check_cpp_code_style) -target_test_format_md = __create_phony_target(env, TARGET_NAME_TEST_FORMAT_MD, action=check_md_code_style) -target_test_format_yaml = __create_phony_target(env, TARGET_NAME_TEST_FORMAT_YAML, action=check_yaml_code_style) -target_test_format = __create_phony_target(env, TARGET_NAME_TEST_FORMAT) -env.Depends(target_test_format, - [target_test_format_python, target_test_format_cpp, target_test_format_md, target_test_format_yaml]) - -# Define targets for enforcing code style definitions... -target_format_python = __create_phony_target(env, TARGET_NAME_FORMAT_PYTHON, action=enforce_python_code_style) -target_format_cpp = __create_phony_target(env, TARGET_NAME_FORMAT_CPP, action=enforce_cpp_code_style) -target_format_md = __create_phony_target(env, TARGET_NAME_FORMAT_MD, action=enforce_md_code_style) -target_format_yaml = __create_phony_target(env, TARGET_NAME_FORMAT_YAML, action=enforce_yaml_code_style) -target_format = __create_phony_target(env, TARGET_NAME_FORMAT) -env.Depends(target_format, [target_format_python, target_format_cpp, target_format_md, target_format_yaml]) - -# Define target for checking dependency versions... -__create_phony_target(env, TARGET_NAME_DEPENDENCIES_CHECK, action=check_dependency_versions) - -# Define target for checking and updating the versions of GitHub Actions... -__create_phony_target(env, TARGET_NAME_GITHUB_ACTIONS_CHECK, action=check_github_actions) -__create_phony_target(env, TARGET_NAME_GITHUB_ACTIONS_UPDATE, action=update_github_actions) - -# Define target for installing runtime dependencies... -target_venv = __create_phony_target(env, TARGET_NAME_VENV, action=install_runtime_dependencies) - -# Define targets for compiling the C++ and Cython code... -env.Command(CPP_MODULE.build_dir, None, action=setup_cpp) -target_compile_cpp = __create_phony_target(env, TARGET_NAME_COMPILE_CPP, action=compile_cpp) -env.Depends(target_compile_cpp, [target_venv, CPP_MODULE.build_dir]) - -env.Command(PYTHON_MODULE.build_dir, None, action=setup_cython) -target_compile_cython = __create_phony_target(env, TARGET_NAME_COMPILE_CYTHON, action=compile_cython) -env.Depends(target_compile_cython, [target_compile_cpp, PYTHON_MODULE.build_dir]) - -target_compile = __create_phony_target(env, TARGET_NAME_COMPILE) -env.Depends(target_compile, [target_compile_cpp, target_compile_cython]) - -# Define targets for cleaning up C++ and Cython build directories... -if not COMMAND_LINE_TARGETS \ - or TARGET_NAME_COMPILE_CPP in COMMAND_LINE_TARGETS \ - or TARGET_NAME_COMPILE in COMMAND_LINE_TARGETS: - __print_if_clean(env, 'Removing C++ build files...') - env.Clean([target_compile_cpp, DEFAULT_TARGET], CPP_MODULE.build_dir) - -if not COMMAND_LINE_TARGETS \ - or TARGET_NAME_COMPILE_CYTHON in COMMAND_LINE_TARGETS \ - or TARGET_NAME_COMPILE in COMMAND_LINE_TARGETS: - __print_if_clean(env, 'Removing Cython build files...') - env.Clean([target_compile_cython, DEFAULT_TARGET], PYTHON_MODULE.build_dir) - -# Define targets for installing shared libraries and extension modules into the source tree... -target_install_cpp = __create_phony_target(env, TARGET_NAME_INSTALL_CPP, action=install_cpp) -env.Depends(target_install_cpp, target_compile_cpp) - -target_install_cython = __create_phony_target(env, TARGET_NAME_INSTALL_CYTHON, action=install_cython) -env.Depends(target_install_cython, target_compile_cython) - -target_install = env.Alias(TARGET_NAME_INSTALL, None, None) -env.Depends(target_install, [target_install_cpp, target_install_cython]) - -# Define targets for removing shared libraries and extension modules from the source tree... -if not COMMAND_LINE_TARGETS \ - or TARGET_NAME_INSTALL_CPP in COMMAND_LINE_TARGETS \ - or TARGET_NAME_INSTALL in COMMAND_LINE_TARGETS: - __print_if_clean(env, 'Removing shared libraries from source tree...') - - for subproject in PYTHON_MODULE.find_subprojects(return_all=True): - env.Clean([target_install_cpp, DEFAULT_TARGET], subproject.find_shared_libraries()) - -if not COMMAND_LINE_TARGETS \ - or TARGET_NAME_INSTALL_CYTHON in COMMAND_LINE_TARGETS \ - or TARGET_NAME_INSTALL in COMMAND_LINE_TARGETS: - __print_if_clean(env, 'Removing extension modules from source tree...') - - for subproject in PYTHON_MODULE.find_subprojects(return_all=True): - env.Clean([target_install_cython, DEFAULT_TARGET], subproject.find_extension_modules()) - -# Define targets for building and installing Python wheels... -commands_build_wheels = [] -commands_install_wheels = [] - -for subproject in PYTHON_MODULE.find_subprojects(): - wheels = subproject.find_wheels() - targets_build_wheels = wheels if wheels else subproject.dist_dir - - command_build_wheels = env.Command(targets_build_wheels, subproject.find_source_files(), action=build_python_wheel) - commands_build_wheels.append(command_build_wheels) - - command_install_wheels = env.Command(subproject.root_dir, targets_build_wheels, action=install_python_wheels) - env.Depends(command_install_wheels, command_build_wheels) - commands_install_wheels.append(command_install_wheels) - -target_build_wheels = env.Alias(TARGET_NAME_BUILD_WHEELS, None, None) -env.Depends(target_build_wheels, [target_install] + commands_build_wheels) - -target_install_wheels = env.Alias(TARGET_NAME_INSTALL_WHEELS, None, None) -env.Depends(target_install_wheels, [target_install] + commands_install_wheels) - -# Define target for cleaning up Python wheels and associated build directories... -if not COMMAND_LINE_TARGETS or TARGET_NAME_BUILD_WHEELS in COMMAND_LINE_TARGETS: - __print_if_clean(env, 'Removing Python wheels...') - - for subproject in PYTHON_MODULE.find_subprojects(return_all=True): - env.Clean([target_build_wheels, DEFAULT_TARGET], subproject.build_dirs) - -# Define targets for running automated tests... -target_tests_cpp = __create_phony_target(env, TARGET_NAME_TESTS_CPP, action=tests_cpp) -env.Depends(target_tests_cpp, target_compile_cpp) - -target_tests_python = __create_phony_target(env, TARGET_NAME_TESTS_PYTHON, action=tests_python) -env.Depends(target_tests_python, target_install_wheels) - -target_tests = __create_phony_target(env, TARGET_NAME_TESTS) -env.Depends(target_tests, [target_tests_cpp, target_tests_python]) - -# Define targets for generating the documentation... -commands_apidoc_cpp = [] -commands_apidoc_python = [] - -for subproject in CPP_MODULE.find_subprojects(): - apidoc_subproject = DOC_MODULE.get_cpp_apidoc_subproject(subproject) - build_files = apidoc_subproject.find_build_files() - targets_apidoc_cpp = build_files if build_files else apidoc_subproject.build_dir - command_apidoc_cpp = env.Command(targets_apidoc_cpp, subproject.find_source_files(), action=apidoc_cpp) - env.NoClean(command_apidoc_cpp) - commands_apidoc_cpp.append(command_apidoc_cpp) - -command_apidoc_cpp_tocfile = env.Command(DOC_MODULE.apidoc_tocfile_cpp, None, action=apidoc_cpp_tocfile) -env.NoClean(command_apidoc_cpp_tocfile) -env.Depends(command_apidoc_cpp_tocfile, commands_apidoc_cpp) - -target_apidoc_cpp = env.Alias(TARGET_NAME_APIDOC_CPP, None, None) -env.Depends(target_apidoc_cpp, command_apidoc_cpp_tocfile) - -for subproject in PYTHON_MODULE.find_subprojects(): - apidoc_subproject = DOC_MODULE.get_python_apidoc_subproject(subproject) - build_files = apidoc_subproject.find_build_files() - targets_apidoc_python = build_files if build_files else apidoc_subproject.build_dir - command_apidoc_python = env.Command(targets_apidoc_python, subproject.find_source_files(), action=apidoc_python) - env.NoClean(command_apidoc_python) - env.Depends(command_apidoc_python, target_install_wheels) - commands_apidoc_python.append(command_apidoc_python) - -command_apidoc_python_tocfile = env.Command(DOC_MODULE.apidoc_tocfile_python, None, action=apidoc_python_tocfile) -env.NoClean(command_apidoc_python_tocfile) -env.Depends(command_apidoc_python_tocfile, commands_apidoc_python) - -target_apidoc_python = env.Alias(TARGET_NAME_APIDOC_PYTHON, None, None) -env.Depends(target_apidoc_python, command_apidoc_python_tocfile) - -target_apidoc = env.Alias(TARGET_NAME_APIDOC, None, None) -env.Depends(target_apidoc, [target_apidoc_cpp, target_apidoc_python]) - -doc_files = DOC_MODULE.find_build_files() -targets_doc = doc_files if doc_files else DOC_MODULE.build_dir -command_doc = env.Command(targets_doc, DOC_MODULE.find_source_files(), action=doc) -env.Depends(command_doc, target_apidoc) -target_doc = env.Alias(TARGET_NAME_DOC, None, None) -env.Depends(target_doc, command_doc) - -# Define target for cleaning up the documentation and associated build directories... -if not COMMAND_LINE_TARGETS \ - or TARGET_NAME_APIDOC_CPP in COMMAND_LINE_TARGETS \ - or TARGET_NAME_APIDOC in COMMAND_LINE_TARGETS: - __print_if_clean(env, 'Removing C++ API documentation...') - env.Clean([target_apidoc_cpp, DEFAULT_TARGET], DOC_MODULE.apidoc_tocfile_cpp) - - for subproject in CPP_MODULE.find_subprojects(return_all=True): - apidoc_subproject = DOC_MODULE.get_cpp_apidoc_subproject(subproject) - env.Clean([target_apidoc_cpp, DEFAULT_TARGET], apidoc_subproject.build_dir) - -if not COMMAND_LINE_TARGETS \ - or TARGET_NAME_APIDOC_PYTHON in COMMAND_LINE_TARGETS \ - or TARGET_NAME_APIDOC in COMMAND_LINE_TARGETS: - __print_if_clean(env, 'Removing Python API documentation...') - env.Clean([target_apidoc_python, DEFAULT_TARGET], DOC_MODULE.apidoc_tocfile_python) - - for subproject in PYTHON_MODULE.find_subprojects(return_all=True): - apidoc_subproject = DOC_MODULE.get_python_apidoc_subproject(subproject) - env.Clean([target_apidoc_python, DEFAULT_TARGET], apidoc_subproject.build_dir) - -if not COMMAND_LINE_TARGETS or TARGET_NAME_DOC in COMMAND_LINE_TARGETS: - __print_if_clean(env, 'Removing documentation...') - env.Clean([target_doc, DEFAULT_TARGET], DOC_MODULE.build_dir) - -# Set the default target... -env.Default(DEFAULT_TARGET) diff --git a/scons/testing.py b/scons/testing.py deleted file mode 100644 index c50e766709..0000000000 --- a/scons/testing.py +++ /dev/null @@ -1,55 +0,0 @@ -""" -Author: Michael Rapp (michael.rapp.ml@gmail.com) - -Provides utility functions for running automated tests. -""" -from os import environ, path - -from environment import get_env_bool -from modules import CPP_MODULE, PYTHON_MODULE -from run import run_program, run_python_program - - -def __meson_test(build_dir: str): - run_program('meson', 'test', '-C', build_dir, '-v', print_args=True) - - -def __python_unittest(directory: str, fail_fast: bool = False): - args = [ - 'discover', - '--verbose', - '--start-directory', - directory, - '--output', - path.join(PYTHON_MODULE.build_dir, 'test-results'), - ] - - if fail_fast: - args.append('--failfast') - - run_python_program('xmlrunner', - *args, - print_args=True, - install_program=False, - additional_dependencies=['unittest-xml-reporting']) - - -def tests_cpp(**_): - """ - Runs all automated tests of C++ code. - """ - __meson_test(CPP_MODULE.build_dir) - - -def tests_python(**_): - """ - Runs all automated tests of Python code. - """ - fail_fast = get_env_bool(environ, 'FAIL_FAST') - - for subproject in PYTHON_MODULE.find_subprojects(): - test_dir = subproject.test_dir - - if path.isdir(test_dir): - print('Running automated tests for subpackage "' + subproject.name + '"...') - __python_unittest(test_dir, fail_fast=fail_fast) diff --git a/scons/versioning.py b/scons/versioning.py deleted file mode 100644 index d8f0aee314..0000000000 --- a/scons/versioning.py +++ /dev/null @@ -1,178 +0,0 @@ -""" -Author: Michael Rapp (michael.rapp.ml@gmail.com) - -Provides utility functions for updating the project's version. -""" -import sys - -from dataclasses import dataclass -from typing import Optional - -VERSION_FILE = '.version' - -DEV_VERSION_FILE = '.version-dev' - -VERSION_FILE_ENCODING = 'utf-8' - - -@dataclass -class Version: - """ - Represents a semantic version. - - Attributes: - major: The major version number - minor: The minor version number - patch: The patch version number - dev: The development version number - """ - major: int - minor: int - patch: int - dev: Optional[int] = None - - def __str__(self) -> str: - version = str(self.major) + '.' + str(self.minor) + '.' + str(self.patch) - - if self.dev: - version += '.dev' + str(self.dev) - - return version - - -def __read_version_file(version_file) -> str: - with open(version_file, mode='r', encoding=VERSION_FILE_ENCODING) as file: - lines = file.readlines() - - if len(lines) != 1: - print('File "' + version_file + '" must contain exactly one line') - sys.exit(-1) - - return lines[0] - - -def __write_version_file(version_file, version: str): - with open(version_file, mode='w', encoding=VERSION_FILE_ENCODING) as file: - file.write(version) - - -def __parse_version_number(version_number: str) -> int: - try: - number = int(version_number) - - if number < 0: - raise ValueError() - - return number - except ValueError: - print('Version numbers must only consist of non-negative integers, but got: ' + version_number) - sys.exit(-1) - - -def __get_current_development_version() -> int: - current_version = __read_version_file(DEV_VERSION_FILE) - print('Current development version is "' + current_version + '"') - return __parse_version_number(current_version) - - -def __update_development_version(dev: int): - updated_version = str(dev) - print('Updated version to "' + updated_version + '"') - __write_version_file(DEV_VERSION_FILE, updated_version) - - -def __parse_version(version: str) -> Version: - parts = version.split('.') - - if len(parts) != 3: - print('Version must be given in format MAJOR.MINOR.PATCH or MAJOR.MINOR.PATCH.devN, but got: ' + version) - sys.exit(-1) - - major = __parse_version_number(parts[0]) - minor = __parse_version_number(parts[1]) - patch = __parse_version_number(parts[2]) - return Version(major=major, minor=minor, patch=patch) - - -def __get_current_version() -> Version: - current_version = __read_version_file(VERSION_FILE) - print('Current version is "' + current_version + '"') - return __parse_version(current_version) - - -def __update_version(version: Version): - updated_version = str(version) - print('Updated version to "' + updated_version + '"') - __write_version_file(VERSION_FILE, updated_version) - - -def get_current_version() -> Version: - """ - Returns the project's current version. - - :return: The project's current version - """ - return __parse_version(__read_version_file(VERSION_FILE)) - - -def print_current_version(**_): - """ - Prints the project's current version. - """ - return print(str(get_current_version())) - - -def increment_development_version(**_): - """ - Increments the development version. - """ - dev = __get_current_development_version() - dev += 1 - __update_development_version(dev) - - -def reset_development_version(**_): - """ - Resets the development version. - """ - __get_current_development_version() - __update_development_version(0) - - -def apply_development_version(**_): - """ - Appends the development version to the current semantic version. - """ - version = __get_current_version() - version.dev = __get_current_development_version() - __update_version(version) - - -def increment_patch_version(**_): - """ - Increments the patch version. - """ - version = __get_current_version() - version.patch += 1 - __update_version(version) - - -def increment_minor_version(**_): - """ - Increments the minor version. - """ - version = __get_current_version() - version.minor += 1 - version.patch = 0 - __update_version(version) - - -def increment_major_version(**_): - """ - Increments the major version. - """ - version = __get_current_version() - version.major += 1 - version.minor = 0 - version.patch = 0 - __update_version(version)