diff --git a/.gitignore b/.gitignore index 9fd85da1..e4f65d63 100644 --- a/.gitignore +++ b/.gitignore @@ -4,9 +4,9 @@ __pycache__/ *$py.class # Build directory for documentation -docs/build -docs/source/api -docs/source/apidoc +Documentation/build +Documentation/source/api +Documentation/source/apidoc # C extensions *.so diff --git a/run_configs/gcom/build_gcom_ar.py b/run_configs/gcom/build_gcom_ar.py index f89b4380..c52a2048 100755 --- a/run_configs/gcom/build_gcom_ar.py +++ b/run_configs/gcom/build_gcom_ar.py @@ -15,7 +15,7 @@ if __name__ == '__main__': with BuildConfig(project_label='gcom object archive $compiler', - tool_box=ToolBox()) as state: + mpi=True, openmp=False, tool_box=ToolBox()) as state: common_build_steps(state) archive_objects(state, output_fpath='$output/libgcom.a') cleanup_prebuilds(state, all_unused=True) diff --git a/run_configs/gcom/build_gcom_so.py b/run_configs/gcom/build_gcom_so.py index 09a97af1..a5110536 100755 --- a/run_configs/gcom/build_gcom_so.py +++ b/run_configs/gcom/build_gcom_so.py @@ -20,7 +20,7 @@ parsed_args = arg_parser.parse_args() with BuildConfig(project_label='gcom shared library $compiler', - tool_box=ToolBox()) as state: + mpi=True, openmp=False, tool_box=ToolBox()) as state: common_build_steps(state, fpic=True) - link_shared_object(state, output_fpath='$output/libgcom.so'), + link_shared_object(state, output_fpath='$output/libgcom.so') cleanup_prebuilds(state, all_unused=True) diff --git a/run_configs/jules/build_jules.py b/run_configs/jules/build_jules.py index f3fc983c..aba22c7c 100755 --- a/run_configs/jules/build_jules.py +++ b/run_configs/jules/build_jules.py @@ -42,12 +42,15 @@ def __init__(self): tool_box.add_tool(Linker(compiler=fc)) with BuildConfig(project_label=f'jules {revision} $compiler', - tool_box=tool_box) as state: - # grab the source. todo: use some checkouts instead of exports in these configs. - fcm_export(state, src='fcm:jules.xm_tr/src', revision=revision, dst_label='src') - fcm_export(state, src='fcm:jules.xm_tr/utils', revision=revision, dst_label='utils') + mpi=False, openmp=False, tool_box=tool_box) as state: + # grab the source. todo: use some checkouts instead of exports + # in these configs. + fcm_export(state, src='fcm:jules.xm_tr/src', revision=revision, + dst_label='src') + fcm_export(state, src='fcm:jules.xm_tr/utils', revision=revision, + dst_label='utils') - grab_pre_build(state, path='/not/a/real/folder', allow_fail=True), + grab_pre_build(state, path='/not/a/real/folder', allow_fail=True) # find the source files find_source_files(state, path_filters=[ @@ -61,9 +64,11 @@ def __init__(self): # move inc files to the root for easy tool use root_inc_files(state) - preprocess_fortran(state, common_flags=['-P', '-DMPI_DUMMY', '-DNCDF_DUMMY', '-I$output']) + preprocess_fortran(state, common_flags=['-P', '-DMPI_DUMMY', + '-DNCDF_DUMMY', '-I$output']) - analyse(state, root_symbol='jules', unreferenced_deps=['imogen_update_carb']) + analyse(state, root_symbol='jules', + unreferenced_deps=['imogen_update_carb']) compile_fortran(state) diff --git a/run_configs/lfric/atm.py b/run_configs/lfric/atm.py index f1c31017..3f93c588 100755 --- a/run_configs/lfric/atm.py +++ b/run_configs/lfric/atm.py @@ -172,7 +172,7 @@ def file_filtering(config): gpl_utils_source = gpl_utils_source_config.source_root / 'gpl_utils' with BuildConfig(project_label='atm $compiler $two_stage', - tool_box=ToolBox()) as state: + mpi=False, openmp=False, tool_box=ToolBox()) as state: # todo: use different dst_labels because they all go into the same folder, # making it hard to see what came from where? diff --git a/run_configs/lfric/grab_lfric.py b/run_configs/lfric/grab_lfric.py index c649ada2..82a18897 100755 --- a/run_configs/lfric/grab_lfric.py +++ b/run_configs/lfric/grab_lfric.py @@ -16,10 +16,12 @@ # these configs are interrogated by the build scripts # todo: doesn't need two separate configs, they use the same project workspace tool_box = ToolBox() -lfric_source_config = BuildConfig(project_label=f'lfric source {LFRIC_REVISION}', - tool_box=tool_box) -gpl_utils_source_config = BuildConfig(project_label=f'lfric source {LFRIC_REVISION}', - tool_box=tool_box) +lfric_source_config = BuildConfig( + project_label=f'lfric source {LFRIC_REVISION}', + tool_box=tool_box) +gpl_utils_source_config = BuildConfig( + project_label=f'lfric source {LFRIC_REVISION}', + tool_box=tool_box) if __name__ == '__main__': diff --git a/run_configs/lfric/gungho.py b/run_configs/lfric/gungho.py index caf59216..7f075c10 100755 --- a/run_configs/lfric/gungho.py +++ b/run_configs/lfric/gungho.py @@ -33,7 +33,7 @@ gpl_utils_source = gpl_utils_source_config.source_root / 'gpl_utils' with BuildConfig(project_label='gungho $compiler $two_stage', - tool_box=ToolBox()) as state: + mpi=True, openmp=True, tool_box=ToolBox()) as state: grab_folder(state, src=lfric_source / 'infrastructure/source/', dst_label='') grab_folder(state, src=lfric_source / 'components/driver/source/', dst_label='') grab_folder(state, src=lfric_source / 'components' / 'inventory' / 'source', dst_label='') @@ -87,7 +87,7 @@ state, common_flags=[ '-c', - '-ffree-line-length-none', '-fopenmp', + '-ffree-line-length-none', '-g', '-std=f2008', @@ -104,8 +104,6 @@ link_exe( state, flags=[ - '-fopenmp', - '-lyaxt', '-lyaxt_c', '-lnetcdff', '-lnetcdf', '-lhdf5', # EXTERNAL_DYNAMIC_LIBRARIES '-lxios', # EXTERNAL_STATIC_LIBRARIES '-lstdc++', diff --git a/run_configs/lfric/mesh_tools.py b/run_configs/lfric/mesh_tools.py index f49aa43b..fde5b793 100755 --- a/run_configs/lfric/mesh_tools.py +++ b/run_configs/lfric/mesh_tools.py @@ -25,7 +25,7 @@ psyclone_overrides = Path(__file__).parent / 'mesh_tools_overrides' with BuildConfig(project_label='mesh tools $compiler $two_stage', - tool_box=ToolBox()) as state: + mpi=True, openmp=False, tool_box=ToolBox()) as state: grab_folder(state, src=lfric_source / 'infrastructure/source/', dst_label='') grab_folder(state, src=lfric_source / 'mesh_tools/source/', dst_label='') grab_folder(state, src=lfric_source / 'components/science/source/', dst_label='') diff --git a/run_configs/tiny_fortran/build_tiny_fortran.py b/run_configs/tiny_fortran/build_tiny_fortran.py index 17907cdd..09a6ad49 100755 --- a/run_configs/tiny_fortran/build_tiny_fortran.py +++ b/run_configs/tiny_fortran/build_tiny_fortran.py @@ -33,13 +33,13 @@ def __init__(self): with BuildConfig(project_label='tiny_fortran $compiler', tool_box=tool_box) as state: git_checkout(state, src='https://github.com/metomi/fab-test-data.git', - revision='main', dst_label='src'), + revision='main', dst_label='src') - find_source_files(state), + find_source_files(state) - preprocess_fortran(state), + preprocess_fortran(state) - analyse(state, root_symbol='my_prog'), + analyse(state, root_symbol='my_prog') - compile_fortran(state), - link_exe(state), + compile_fortran(state) + link_exe(state) diff --git a/run_configs/um/build_um.py b/run_configs/um/build_um.py index 05177bd2..4cf38e4c 100755 --- a/run_configs/um/build_um.py +++ b/run_configs/um/build_um.py @@ -124,8 +124,11 @@ def replace_in_file(inpath, outpath, find, replace): revision = 'vn12.1' um_revision = revision.replace('vn', 'um') - state = BuildConfig(project_label=f'um atmos safe {revision} $compiler $two_stage', - tool_box=ToolBox()) + # The original build script disabled openmp, so for now + # we keep this disabled. + state = BuildConfig( + project_label=f'um atmos safe {revision} $compiler $two_stage', + mpi=True, openmp=False, tool_box=ToolBox()) # compiler-specific flags compiler = state.tool_box[Category.FORTRAN_COMPILER] diff --git a/source/fab/build_config.py b/source/fab/build_config.py index 614c4328..5dfd1309 100644 --- a/source/fab/build_config.py +++ b/source/fab/build_config.py @@ -22,7 +22,8 @@ from fab.artefacts import ArtefactSet, ArtefactStore from fab.constants import BUILD_OUTPUT, SOURCE_ROOT, PREBUILD -from fab.metrics import send_metric, init_metrics, stop_metrics, metrics_summary +from fab.metrics import (send_metric, init_metrics, stop_metrics, + metrics_summary) from fab.tools.category import Category from fab.tools.tool_box import ToolBox from fab.steps.cleanup_prebuilds import CLEANUP_COUNT, cleanup_prebuilds @@ -41,36 +42,50 @@ class BuildConfig(): """ def __init__(self, project_label: str, tool_box: ToolBox, - multiprocessing: bool = True, n_procs: Optional[int] = None, + mpi: bool = False, + openmp: bool = False, + multiprocessing: bool = True, + n_procs: Optional[int] = None, reuse_artefacts: bool = False, - fab_workspace: Optional[Path] = None, two_stage=False, - verbose=False): + fab_workspace: Optional[Path] = None, + two_stage: bool = False, + verbose: bool = False): """ :param project_label: - Name of the build project. The project workspace folder is created from this name, with spaces replaced - by underscores. + Name of the build project. The project workspace folder is + created from this name, with spaces replaced by underscores. :param tool_box: The ToolBox with all tools to use in the build. + :param mpi: whether the project uses MPI or not. This is used to + pick a default compiler (if not explicitly set in the ToolBox), + and controls PSyclone parameters. + :param openmp: whether the project should use OpenMP or not. :param multiprocessing: An option to disable multiprocessing to aid debugging. :param n_procs: - The number of cores to use for multiprocessing operations. Defaults to the number of available cores. + The number of cores to use for multiprocessing operations. + Defaults to the number of available cores. :param reuse_artefacts: A flag to avoid reprocessing certain files on subsequent runs. - WARNING: Currently unsophisticated, this flag should only be used by Fab developers. - The logic behind flag will soon be improved, in a work package called "incremental build". + WARNING: Currently unsophisticated, this flag should only be + used by Fab developers. The logic behind flag will soon be + improved, in a work package called "incremental build". :param fab_workspace: Overrides the FAB_WORKSPACE environment variable. - If not set, and FAB_WORKSPACE is not set, the fab workspace defaults to *~/fab-workspace*. + If not set, and FAB_WORKSPACE is not set, the fab workspace + defaults to *~/fab-workspace*. :param two_stage: - Compile .mod files first in a separate pass. Theoretically faster in some projects.. + Compile .mod files first in a separate pass. Theoretically faster + in some projects. :param verbose: DEBUG level logging. """ self._tool_box = tool_box + self._mpi = mpi + self._openmp = openmp self.two_stage = two_stage self.verbose = verbose - compiler = tool_box[Category.FORTRAN_COMPILER] + compiler = tool_box.get_tool(Category.FORTRAN_COMPILER, mpi=mpi) project_label = Template(project_label).safe_substitute( compiler=compiler.name, two_stage=f'{int(two_stage)+1}stage') @@ -83,7 +98,8 @@ def __init__(self, project_label: str, logger.info(f"fab workspace is {fab_workspace}") self.project_workspace: Path = fab_workspace / self.project_label - self.metrics_folder: Path = self.project_workspace / 'metrics' / self.project_label + self.metrics_folder: Path = (self.project_workspace / 'metrics' / + self.project_label) # source config self.source_root: Path = self.project_workspace / SOURCE_ROOT @@ -93,7 +109,8 @@ def __init__(self, project_label: str, self.multiprocessing = multiprocessing # turn off multiprocessing when debugging - # todo: turn off multiprocessing when running tests, as a good test runner will run using mp + # todo: turn off multiprocessing when running tests, as a good test + # runner will run using mp if 'pydevd' in str(sys.gettrace()): logger.info('debugger detected, running without multiprocessing') self.multiprocessing = False @@ -129,7 +146,8 @@ def __enter__(self): self._start_time = datetime.now().replace(microsecond=0) self._run_prep() - with TimerLogger(f'running {self.project_label} build steps') as build_timer: + with TimerLogger(f'running {self.project_label} ' + f'build steps') as build_timer: # this will return to the build script self._build_timer = build_timer return self @@ -138,10 +156,12 @@ def __exit__(self, exc_type, exc_val, exc_tb): if not exc_type: # None if there's no error. if CLEANUP_COUNT not in self.artefact_store: - logger.info("no housekeeping step was run, using a default hard cleanup") + logger.info("no housekeeping step was run, using a " + "default hard cleanup") cleanup_prebuilds(config=self, all_unused=True) - logger.info(f"Building '{self.project_label}' took {datetime.now() - self._start_time}") + logger.info(f"Building '{self.project_label}' took " + f"{datetime.now() - self._start_time}") # always self._finalise_metrics(self._start_time, self._build_timer) @@ -164,9 +184,20 @@ def build_output(self) -> Path: ''' return self.project_workspace / BUILD_OUTPUT + @property + def mpi(self) -> bool: + ''':returns: whether MPI is requested or not in this config.''' + return self._mpi + + @property + def openmp(self) -> bool: + ''':returns: whether OpenMP is requested or not in this config.''' + return self._openmp + def add_current_prebuilds(self, artefacts: Iterable[Path]): """ - Mark the given file paths as being current prebuilds, not to be cleaned during housekeeping. + Mark the given file paths as being current prebuilds, not to be + cleaned during housekeeping. """ self.artefact_store[ArtefactSet.CURRENT_PREBUILDS].update(artefacts) @@ -193,7 +224,8 @@ def _prep_folders(self): def _init_logging(self): # add a file logger for our run self.project_workspace.mkdir(parents=True, exist_ok=True) - log_file_handler = RotatingFileHandler(self.project_workspace / 'log.txt', backupCount=5, delay=True) + log_file_handler = RotatingFileHandler( + self.project_workspace / 'log.txt', backupCount=5, delay=True) log_file_handler.doRollover() logging.getLogger('fab').addHandler(log_file_handler) @@ -207,9 +239,11 @@ def _init_logging(self): def _finalise_logging(self): # remove our file logger fab_logger = logging.getLogger('fab') - log_file_handlers = list(by_type(fab_logger.handlers, RotatingFileHandler)) + log_file_handlers = list(by_type(fab_logger.handlers, + RotatingFileHandler)) if len(log_file_handlers) != 1: - warnings.warn(f'expected to find 1 RotatingFileHandler for removal, found {len(log_file_handlers)}') + warnings.warn(f'expected to find 1 RotatingFileHandler for ' + f'removal, found {len(log_file_handlers)}') fab_logger.removeHandler(log_file_handlers[0]) def _finalise_metrics(self, start_time, steps_timer): @@ -249,14 +283,16 @@ def __init__(self, match: str, flags: List[str]): # For source in the um folder, add an absolute include path AddFlags(match="$source/um/*", flags=['-I$source/include']), - # For source in the um folder, add an include path relative to each source file. + # For source in the um folder, add an include path relative to + # each source file. AddFlags(match="$source/um/*", flags=['-I$relative/include']), """ self.match: str = match self.flags: List[str] = flags - # todo: we don't need the project_workspace, we could just pass in the output folder + # todo: we don't need the project_workspace, we could just pass in the + # output folder def run(self, fpath: Path, input_flags: List[str], config): """ Check if our filter matches a given file. If it does, add our flags. @@ -269,12 +305,16 @@ def run(self, fpath: Path, input_flags: List[str], config): Contains the folders for templating `$source` and `$output`. """ - params = {'relative': fpath.parent, 'source': config.source_root, 'output': config.build_output} + params = {'relative': fpath.parent, + 'source': config.source_root, + 'output': config.build_output} # does the file path match our filter? - if not self.match or fnmatch(str(fpath), Template(self.match).substitute(params)): + if not self.match or fnmatch(str(fpath), + Template(self.match).substitute(params)): # use templating to render any relative paths in our flags - add_flags = [Template(flag).substitute(params) for flag in self.flags] + add_flags = [Template(flag).substitute(params) + for flag in self.flags] # add our flags input_flags += add_flags @@ -284,15 +324,18 @@ class FlagsConfig(): """ Return command-line flags for a given path. - Simply allows appending flags but may evolve to also replace and remove flags. + Simply allows appending flags but may evolve to also replace and + remove flags. """ - def __init__(self, common_flags: Optional[List[str]] = None, path_flags: Optional[List[AddFlags]] = None): + def __init__(self, common_flags: Optional[List[str]] = None, + path_flags: Optional[List[AddFlags]] = None): """ :param common_flags: List of flags to apply to all files. E.g `['-O2']`. :param path_flags: - List of :class:`~fab.build_config.AddFlags` objects which apply flags to selected paths. + List of :class:`~fab.build_config.AddFlags` objects which apply + flags to selected paths. """ self.common_flags = common_flags or [] @@ -311,8 +354,8 @@ def flags_for_path(self, path: Path, config): """ # We COULD make the user pass these template params to the constructor - # but we have a design requirement to minimise the config burden on the user, - # so we take care of it for them here instead. + # but we have a design requirement to minimise the config burden on + # the user, so we take care of it for them here instead. params = {'source': config.source_root, 'output': config.build_output} flags = [Template(i).substitute(params) for i in self.common_flags] diff --git a/source/fab/cli.py b/source/fab/cli.py index 07154eec..ae3b626c 100644 --- a/source/fab/cli.py +++ b/source/fab/cli.py @@ -34,7 +34,7 @@ def _generic_build_config(folder: Path, kwargs=None) -> BuildConfig: # Set the default Fortran compiler as linker (otherwise e.g. the # C compiler might be used in linking, requiring additional flags) tr = ToolRepository() - fc = tr.get_default(Category.FORTRAN_COMPILER) + fc = tr.get_default(Category.FORTRAN_COMPILER, mpi=False) # TODO: This assumes a mapping of compiler name to the corresponding # linker name (i.e. `linker-gfortran` or `linker-ifort`). Still, that's # better than hard-coding gnu here. @@ -44,7 +44,7 @@ def _generic_build_config(folder: Path, kwargs=None) -> BuildConfig: tool_box.add_tool(linker) # Within the fab workspace, we'll create a project workspace. # Ideally we'd just use folder.name, but to avoid clashes, we'll use the full absolute path. - with BuildConfig(project_label=project_label, + with BuildConfig(project_label=project_label, mpi=False, openmp=False, tool_box=tool_box, **kwargs) as config: grab_folder(config, folder) find_source_files(config) diff --git a/source/fab/steps/compile_c.py b/source/fab/steps/compile_c.py index 8ac03f65..320b3d72 100644 --- a/source/fab/steps/compile_c.py +++ b/source/fab/steps/compile_c.py @@ -13,7 +13,8 @@ from typing import List, Dict, Optional, Tuple from fab import FabException -from fab.artefacts import ArtefactsGetter, ArtefactSet, FilterBuildTrees +from fab.artefacts import (ArtefactsGetter, ArtefactSet, ArtefactStore, + FilterBuildTrees) from fab.build_config import BuildConfig, FlagsConfig from fab.metrics import send_metric from fab.parse.c import AnalysedC @@ -36,9 +37,11 @@ class MpCommonArgs: @step def compile_c(config, common_flags: Optional[List[str]] = None, - path_flags: Optional[List] = None, source: Optional[ArtefactsGetter] = None): + path_flags: Optional[List] = None, + source: Optional[ArtefactsGetter] = None): """ - Compiles all C files in all build trees, creating or extending a set of compiled files for each target. + Compiles all C files in all build trees, creating or extending a set of + compiled files for each target. This step uses multiprocessing. All C files are compiled in a single pass. @@ -47,22 +50,22 @@ def compile_c(config, common_flags: Optional[List[str]] = None, Uses multiprocessing, unless disabled in the *config*. :param config: - The :class:`fab.build_config.BuildConfig` object where we can read settings - such as the project workspace folder or the multiprocessing flag. + The :class:`fab.build_config.BuildConfig` object where we can read + settings such as the project workspace folder or the multiprocessing + flag. :param common_flags: - A list of strings to be included in the command line call, for all files. + A list of strings to be included in the command line call, for all + files. :param path_flags: - A list of :class:`~fab.build_config.AddFlags`, defining flags to be included in the command line call - for selected files. + A list of :class:`~fab.build_config.AddFlags`, defining flags to be + included in the command line call for selected files. :param source: - An :class:`~fab.artefacts.ArtefactsGetter` which give us our c files to process. + An :class:`~fab.artefacts.ArtefactsGetter` which give us our c files + to process. """ # todo: tell the compiler (and other steps) which artefact name to create? - compiler = config.tool_box[Category.C_COMPILER] - logger.info(f'C compiler is {compiler}') - env_flags = os.getenv('CFLAGS', '').split() common_flags = env_flags + (common_flags or []) @@ -74,6 +77,13 @@ def compile_c(config, common_flags: Optional[List[str]] = None, to_compile: list = sum(build_lists.values(), []) logger.info(f"compiling {len(to_compile)} c files") + if len(to_compile) == 0: + # No need to look for compiler etc if there is nothing to do + return + + compiler = config.tool_box.get_tool(Category.C_COMPILER, config.mpi) + logger.info(f'C compiler is {compiler}') + mp_payload = MpCommonArgs(config=config, flags=flags) mp_items = [(fpath, mp_payload) for fpath in to_compile] @@ -83,7 +93,8 @@ def compile_c(config, common_flags: Optional[List[str]] = None, compiled_c = list(by_type(compilation_results, CompiledFile)) logger.info(f"compiled {len(compiled_c)} c files") - # record the prebuild files as being current, so the cleanup knows not to delete them + # record the prebuild files as being current, so the cleanup knows not + # to delete them prebuild_files = {r.output_fpath for r in compiled_c} config.add_current_prebuilds(prebuild_files) @@ -92,9 +103,12 @@ def compile_c(config, common_flags: Optional[List[str]] = None, # todo: very similar code in fortran compiler -def store_artefacts(compiled_files: List[CompiledFile], build_lists: Dict[str, List], artefact_store): +def store_artefacts(compiled_files: List[CompiledFile], + build_lists: Dict[str, List], + artefact_store: ArtefactStore): """ - Create our artefact collection; object files for each compiled file, per root symbol. + Create our artefact collection; object files for each compiled file, + per root symbol. """ # add the new object files to the artefact store, by target @@ -117,25 +131,31 @@ def _compile_file(arg: Tuple[AnalysedC, MpCommonArgs]): config=config)) obj_combo_hash = _get_obj_combo_hash(compiler, analysed_file, flags) - obj_file_prebuild = config.prebuild_folder / f'{analysed_file.fpath.stem}.{obj_combo_hash:x}.o' + obj_file_prebuild = (config.prebuild_folder / + f'{analysed_file.fpath.stem}.' + f'{obj_combo_hash:x}.o') # prebuild available? if obj_file_prebuild.exists(): - log_or_dot(logger, f'CompileC using prebuild: {analysed_file.fpath}') + log_or_dot(logger, f'CompileC using prebuild: ' + f'{analysed_file.fpath}') else: obj_file_prebuild.parent.mkdir(parents=True, exist_ok=True) log_or_dot(logger, f'CompileC compiling {analysed_file.fpath}') try: compiler.compile_file(analysed_file.fpath, obj_file_prebuild, + openmp=config.openmp, add_flags=flags) except Exception as err: - return FabException(f"error compiling {analysed_file.fpath}:\n{err}") + return FabException(f"error compiling " + f"{analysed_file.fpath}:\n{err}") send_metric( group="compile c", name=str(analysed_file.fpath), value={'time_taken': timer.taken, 'start': timer.start}) - return CompiledFile(input_fpath=analysed_file.fpath, output_fpath=obj_file_prebuild) + return CompiledFile(input_fpath=analysed_file.fpath, + output_fpath=obj_file_prebuild) def _get_obj_combo_hash(compiler, analysed_file, flags: Flags): @@ -146,6 +166,7 @@ def _get_obj_combo_hash(compiler, analysed_file, flags: Flags): flags.checksum(), compiler.get_hash(), ]) - except TypeError: - raise ValueError("could not generate combo hash for object file") + except TypeError as err: + raise ValueError("could not generate combo hash for " + "object file") from err return obj_combo_hash diff --git a/source/fab/steps/compile_fortran.py b/source/fab/steps/compile_fortran.py index 734abad9..4b065811 100644 --- a/source/fab/steps/compile_fortran.py +++ b/source/fab/steps/compile_fortran.py @@ -33,7 +33,8 @@ @dataclass class MpCommonArgs: - """Arguments to be passed into the multiprocessing function, alongside the filenames.""" + """Arguments to be passed into the multiprocessing function, + alongside the filenames.""" config: BuildConfig flags: FlagsConfig mod_hashes: Dict[str, int] @@ -41,50 +42,61 @@ class MpCommonArgs: @step -def compile_fortran(config: BuildConfig, common_flags: Optional[List[str]] = None, - path_flags: Optional[List] = None, source: Optional[ArtefactsGetter] = None): +def compile_fortran(config: BuildConfig, + common_flags: Optional[List[str]] = None, + path_flags: Optional[List] = None, + source: Optional[ArtefactsGetter] = None): """ - Compiles all Fortran files in all build trees, creating/extending a set of compiled files for each build target. + Compiles all Fortran files in all build trees, creating/extending a set + of compiled files for each build target. - Files are compiled in multiple passes, with each pass enabling further files to be compiled in the next pass. + Files are compiled in multiple passes, with each pass enabling further + files to be compiled in the next pass. Uses multiprocessing, unless disabled in the config. :param config: - The :class:`fab.build_config.BuildConfig` object where we can read settings - such as the project workspace folder or the multiprocessing flag. + The :class:`fab.build_config.BuildConfig` object where we can read + settings such as the project workspace folder or the multiprocessing + flag. :param common_flags: - A list of strings to be included in the command line call, for all files. + A list of strings to be included in the command line call, for + all files. :param path_flags: - A list of :class:`~fab.build_config.AddFlags`, defining flags to be included in the command line call - for selected files. + A list of :class:`~fab.build_config.AddFlags`, defining flags to be + included in the command line call for selected files. :param source: - An :class:`~fab.artefacts.ArtefactsGetter` which gives us our Fortran files to process. + An :class:`~fab.artefacts.ArtefactsGetter` which gives us our Fortran + files to process. """ - compiler, flags_config = handle_compiler_args(config, common_flags, - path_flags) - # Set module output folder: - compiler.set_module_output_path(config.build_output) - source_getter = source or DEFAULT_SOURCE_GETTER mod_hashes: Dict[str, int] = {} # get all the source to compile, for all build trees, into one big lump build_lists: Dict[str, List] = source_getter(config.artefact_store) + # compile everything in multiple passes + compiled: Dict[Path, CompiledFile] = {} + uncompiled: Set[AnalysedFortran] = set(sum(build_lists.values(), [])) + logger.info(f"compiling {len(uncompiled)} fortran files") + + # No need to do anything else if there are no files to compile + if len(uncompiled) == 0: + return + + compiler, flags_config = handle_compiler_args(config, common_flags, + path_flags) + # Set module output folder: + compiler.set_module_output_path(config.build_output) + syntax_only = compiler.has_syntax_only and config.two_stage # build the arguments passed to the multiprocessing function mp_common_args = MpCommonArgs( config=config, flags=flags_config, mod_hashes=mod_hashes, syntax_only=syntax_only) - # compile everything in multiple passes - compiled: Dict[Path, CompiledFile] = {} - uncompiled: Set[AnalysedFortran] = set(sum(build_lists.values(), [])) - logger.info(f"compiling {len(uncompiled)} fortran files") - if syntax_only: logger.info("Starting two-stage compile: mod files, multiple passes") elif config.two_stage: @@ -92,16 +104,19 @@ def compile_fortran(config: BuildConfig, common_flags: Optional[List[str]] = Non f"disabling two-stage compile.") while uncompiled: - uncompiled = compile_pass(config=config, compiled=compiled, uncompiled=uncompiled, - mp_common_args=mp_common_args, mod_hashes=mod_hashes) + uncompiled = compile_pass(config=config, compiled=compiled, + uncompiled=uncompiled, + mp_common_args=mp_common_args, + mod_hashes=mod_hashes) log_or_dot_finish(logger) if syntax_only: logger.info("Finalising two-stage compile: object files, single pass") mp_common_args.syntax_only = False - # a single pass should now compile all the object files in one go - uncompiled = set(sum(build_lists.values(), [])) # todo: order by last compile duration + # A single pass should now compile all the object files in one go + # todo: order by last compile duration + uncompiled = set(sum(build_lists.values(), [])) mp_args = [(fpath, mp_common_args) for fpath in uncompiled] results_this_pass = run_mp(config, items=mp_args, func=process_file) log_or_dot_finish(logger) @@ -127,29 +142,36 @@ def handle_compiler_args(config: BuildConfig, common_flags=None, # Collate the flags from 1) flags env and 2) parameters. env_flags = os.getenv('FFLAGS', '').split() common_flags = env_flags + (common_flags or []) - flags_config = FlagsConfig(common_flags=common_flags, path_flags=path_flags) + flags_config = FlagsConfig(common_flags=common_flags, + path_flags=path_flags) return compiler, flags_config -def compile_pass(config, compiled: Dict[Path, CompiledFile], uncompiled: Set[AnalysedFortran], +def compile_pass(config, compiled: Dict[Path, CompiledFile], + uncompiled: Set[AnalysedFortran], mp_common_args: MpCommonArgs, mod_hashes: Dict[str, int]): # what can we compile next? compile_next = get_compile_next(compiled, uncompiled) # compile - logger.info(f"\ncompiling {len(compile_next)} of {len(uncompiled)} remaining files") + logger.info(f"\ncompiling {len(compile_next)} of {len(uncompiled)} " + f"remaining files") mp_args = [(fpath, mp_common_args) for fpath in compile_next] results_this_pass = run_mp(config, items=mp_args, func=process_file) - # there's a compilation result and a list of prebuild files for each compiled file - compilation_results, prebuild_files = zip(*results_this_pass) if results_this_pass else (tuple(), tuple()) + # there's a compilation result and a list of prebuild files for each + # compiled file + compilation_results, prebuild_files = (zip(*results_this_pass) + if results_this_pass + else (tuple(), tuple())) check_for_errors(compilation_results, caller_label="compile_pass") compiled_this_pass = list(by_type(compilation_results, CompiledFile)) logger.debug(f"compiled {len(compiled_this_pass)} files") - # record the prebuild files as being current, so the cleanup knows not to delete them + # record the prebuild files as being current, so the cleanup knows + # not to delete them config.add_current_prebuilds(chain(*prebuild_files)) # hash the modules we just created @@ -164,15 +186,19 @@ def compile_pass(config, compiled: Dict[Path, CompiledFile], uncompiled: Set[Ana return uncompiled -def get_compile_next(compiled: Dict[Path, CompiledFile], uncompiled: Set[AnalysedFortran]) \ - -> Set[AnalysedFortran]: - - # find what to compile next +def get_compile_next(compiled: Dict[Path, CompiledFile], + uncompiled: Set[AnalysedFortran]) -> Set[AnalysedFortran]: + '''Find what to compile next. + :param compiled: A dictionary with already compiled files. + :param uncompiled: The set of still to be compiled files. + :returns: A set with all files that can now be compiled. + ''' compile_next = set() not_ready: Dict[Path, List[Path]] = {} for af in uncompiled: # all deps ready? - unfulfilled = [dep for dep in af.file_deps if dep not in compiled and dep.suffix == '.f90'] + unfulfilled = [dep for dep in af.file_deps + if dep not in compiled and dep.suffix == '.f90'] if unfulfilled: not_ready[af.fpath] = unfulfilled else: @@ -195,7 +221,8 @@ def store_artefacts(compiled_files: Dict[Path, CompiledFile], build_lists: Dict[str, List], artefact_store: ArtefactStore): """ - Create our artefact collection; object files for each compiled file, per root symbol. + Create our artefact collection; object files for each compiled file, per + root symbol. """ # add the new object files to the artefact store, by target @@ -208,32 +235,40 @@ def store_artefacts(compiled_files: Dict[Path, CompiledFile], def process_file(arg: Tuple[AnalysedFortran, MpCommonArgs]) \ -> Union[Tuple[CompiledFile, List[Path]], Tuple[Exception, None]]: """ - Prepare to compile a fortran file, and compile it if anything has changed since it was last compiled. + Prepare to compile a fortran file, and compile it if anything has changed + since it was last compiled. Object files are created directly as artefacts in the prebuild folder. - Mod files are created in the module folder and copied as artefacts into the prebuild folder. - If nothing has changed, prebuilt mod files are copied *from* the prebuild folder into the module folder. + Mod files are created in the module folder and copied as artefacts into + the prebuild folder. If nothing has changed, prebuilt mod files are copied + *from* the prebuild folder into the module folder. .. note:: - Prebuild filenames include a "combo-hash" of everything that, if changed, must trigger a recompile. - For mod and object files, this includes a checksum of: *source code, compiler*. - For object files, this also includes a checksum of: *compiler flags, modules on which we depend*. + Prebuild filenames include a "combo-hash" of everything that, if + changed, must trigger a recompile. For mod and object files, this + includes a checksum of: *source code, compiler*. For object files, + this also includes a checksum of: *compiler flags, modules on which + we depend*. - Before compiling a file, we calculate the combo hashes and see if the output files already exists. + Before compiling a file, we calculate the combo hashes and see if the + output files already exists. - Returns a compilation result, regardless of whether it was compiled or prebuilt. + Returns a compilation result, regardless of whether it was compiled or + prebuilt. """ with Timer() as timer: analysed_file, mp_common_args = arg config = mp_common_args.config - compiler = config.tool_box[Category.FORTRAN_COMPILER] + compiler = config.tool_box.get_tool(Category.FORTRAN_COMPILER, + config.mpi) if not isinstance(compiler, FortranCompiler): raise RuntimeError(f"Unexpected tool '{compiler.name}' of type " f"'{type(compiler)}' instead of " f"FortranCompiler") - flags = Flags(mp_common_args.flags.flags_for_path(path=analysed_file.fpath, config=config)) + flags = Flags(mp_common_args.flags.flags_for_path( + path=analysed_file.fpath, config=config)) mod_combo_hash = _get_mod_combo_hash(analysed_file, compiler=compiler) obj_combo_hash = _get_obj_combo_hash(analysed_file, @@ -241,14 +276,18 @@ def process_file(arg: Tuple[AnalysedFortran, MpCommonArgs]) \ compiler=compiler, flags=flags) # calculate the incremental/prebuild artefact filenames - obj_file_prebuild = mp_common_args.config.prebuild_folder / f'{analysed_file.fpath.stem}.{obj_combo_hash:x}.o' + obj_file_prebuild = ( + mp_common_args.config.prebuild_folder / + f'{analysed_file.fpath.stem}.{obj_combo_hash:x}.o') mod_file_prebuilds = [ - mp_common_args.config.prebuild_folder / f'{mod_def}.{mod_combo_hash:x}.mod' + (mp_common_args.config.prebuild_folder / + f'{mod_def}.{mod_combo_hash:x}.mod') for mod_def in analysed_file.module_defs ] # have we got all the prebuilt artefacts we need to avoid a recompile? - prebuilds_exist = list(map(lambda f: f.exists(), [obj_file_prebuild] + mod_file_prebuilds)) + prebuilds_exist = list(map(lambda f: f.exists(), + [obj_file_prebuild] + mod_file_prebuilds)) if not all(prebuilds_exist): # compile try: @@ -257,28 +296,34 @@ def process_file(arg: Tuple[AnalysedFortran, MpCommonArgs]) \ output_fpath=obj_file_prebuild, mp_common_args=mp_common_args) except Exception as err: - return Exception(f"Error compiling {analysed_file.fpath}:\n{err}"), None + return Exception(f"Error compiling {analysed_file.fpath}:\n" + f"{err}"), None # copy the mod files to the prebuild folder as artefacts for reuse - # note: perhaps we could sometimes avoid these copies because mods can change less frequently than obj + # note: perhaps we could sometimes avoid these copies because mods + # can change less frequently than obj for mod_def in analysed_file.module_defs: shutil.copy2( mp_common_args.config.build_output / f'{mod_def}.mod', - mp_common_args.config.prebuild_folder / f'{mod_def}.{mod_combo_hash:x}.mod', + (mp_common_args.config.prebuild_folder / + f'{mod_def}.{mod_combo_hash:x}.mod'), ) else: - log_or_dot(logger, f'CompileFortran using prebuild: {analysed_file.fpath}') + log_or_dot(logger, + f'CompileFortran using prebuild: {analysed_file.fpath}') # copy the prebuilt mod files from the prebuild folder for mod_def in analysed_file.module_defs: shutil.copy2( - mp_common_args.config.prebuild_folder / f'{mod_def}.{mod_combo_hash:x}.mod', + (mp_common_args.config.prebuild_folder + / f'{mod_def}.{mod_combo_hash:x}.mod'), mp_common_args.config.build_output / f'{mod_def}.mod', ) # return the results - compiled_file = CompiledFile(input_fpath=analysed_file.fpath, output_fpath=obj_file_prebuild) + compiled_file = CompiledFile(input_fpath=analysed_file.fpath, + output_fpath=obj_file_prebuild) artefacts = [obj_file_prebuild] + mod_file_prebuilds metric_name = "compile fortran" @@ -298,7 +343,8 @@ def _get_obj_combo_hash(analysed_file, mp_common_args: MpCommonArgs, # get a combo hash of things which matter to the object file we define # todo: don't just silently use 0 for a missing dep hash mod_deps_hashes = { - mod_dep: mp_common_args.mod_hashes.get(mod_dep, 0) for mod_dep in analysed_file.module_deps} + mod_dep: mp_common_args.mod_hashes.get(mod_dep, 0) + for mod_dep in analysed_file.module_deps} try: obj_combo_hash = sum([ analysed_file.file_hash, @@ -306,8 +352,9 @@ def _get_obj_combo_hash(analysed_file, mp_common_args: MpCommonArgs, sum(mod_deps_hashes.values()), compiler.get_hash(), ]) - except TypeError: - raise ValueError("could not generate combo hash for object file") + except TypeError as err: + raise ValueError("Could not generate combo hash " + "for object file") from err return obj_combo_hash @@ -318,8 +365,9 @@ def _get_mod_combo_hash(analysed_file, compiler: Compiler): analysed_file.file_hash, compiler.get_hash(), ]) - except TypeError: - raise ValueError("could not generate combo hash for mod files") + except TypeError as err: + raise ValueError("Could not generate combo " + "hash for mod files") from err return mod_combo_hash @@ -340,11 +388,13 @@ def compile_file(analysed_file, flags, output_fpath, mp_common_args): compiler = config.tool_box[Category.FORTRAN_COMPILER] compiler.compile_file(input_file=analysed_file, output_file=output_fpath, + openmp=config.openmp, add_flags=flags, syntax_only=mp_common_args.syntax_only) -def get_mod_hashes(analysed_files: Set[AnalysedFortran], config) -> Dict[str, int]: +def get_mod_hashes(analysed_files: Set[AnalysedFortran], + config: BuildConfig) -> Dict[str, int]: """ Get the hash of every module file defined in the list of analysed files. diff --git a/source/fab/steps/link.py b/source/fab/steps/link.py index 5c6d15ce..78146ef6 100644 --- a/source/fab/steps/link.py +++ b/source/fab/steps/link.py @@ -22,8 +22,9 @@ class DefaultLinkerSource(ArtefactsGetter): """ A source getter specifically for linking. - Looks for the default output from archiving objects, falls back to default compiler output. - This allows a link step to work with or without a preceding object archive step. + Looks for the default output from archiving objects, falls back to + default compiler output. This allows a link step to work with or without + a preceding object archive step. """ def __call__(self, artefact_store): @@ -36,15 +37,18 @@ def link_exe(config, flags=None, source: Optional[ArtefactsGetter] = None): """ Link object files into an executable for every build target. - Expects one or more build targets from its artefact getter, of the form Dict[name, object_files]. + Expects one or more build targets from its artefact getter, of the form + Dict[name, object_files]. - The default artefact getter, :py:const:`~fab.steps.link_exe.DefaultLinkerSource`, looks for any output - from an :class:`~fab.steps.archive_objects.ArchiveObjects` step, and falls back to using output from - compiler steps. + The default artefact getter, + :py:const:`~fab.steps.link_exe.DefaultLinkerSource`, looks for any output + from an :class:`~fab.steps.archive_objects.ArchiveObjects` step, and + falls back to using output from compiler steps. :param config: - The :class:`fab.build_config.BuildConfig` object where we can read settings - such as the project workspace folder or the multiprocessing flag. + The :class:`fab.build_config.BuildConfig` object where we can read + settings such as the project workspace folder or the multiprocessing + flag. :param flags: A list of flags to pass to the linker. :param source: @@ -52,7 +56,7 @@ def link_exe(config, flags=None, source: Optional[ArtefactsGetter] = None): output from compiler steps, which typically is the expected behaviour. """ - linker = config.tool_box[Category.LINKER] + linker = config.tool_box.get_tool(Category.LINKER, config.mpi) logger.info(f'Linker is {linker.name}') flags = flags or [] @@ -61,25 +65,29 @@ def link_exe(config, flags=None, source: Optional[ArtefactsGetter] = None): target_objects = source_getter(config.artefact_store) for root, objects in target_objects.items(): exe_path = config.project_workspace / f'{root}' - linker.link(objects, exe_path, flags) + linker.link(objects, exe_path, openmp=config.openmp, add_libs=flags) config.artefact_store.add(ArtefactSet.EXECUTABLES, exe_path) -# todo: the bit about Dict[None, object_files] seems too obscure - try to rethink this. +# todo: the bit about Dict[None, object_files] seems too obscure - try to +# rethink this. @step def link_shared_object(config, output_fpath: str, flags=None, source: Optional[ArtefactsGetter] = None): """ Produce a shared object (*.so*) file from the given build target. - Expects a *single build target* from its artefact getter, of the form Dict[None, object_files]. - We can assume the list of object files is the entire project source, compiled. + Expects a *single build target* from its artefact getter, of the form + Dict[None, object_files]. We can assume the list of object files is the + entire project source, compiled. - Params are as for :class:`~fab.steps.link_exe.LinkerBase`, with the addition of: + Params are as for :class:`~fab.steps.link_exe.LinkerBase`, with the + addition of: :param config: - The :class:`fab.build_config.BuildConfig` object where we can read settings - such as the project workspace folder or the multiprocessing flag. + The :class:`fab.build_config.BuildConfig` object where we can read + settings such as the project workspace folder or the multiprocessing + flag. :param output_fpath: File path of the shared object to create. :param flags: @@ -100,10 +108,11 @@ def link_shared_object(config, output_fpath: str, flags=None, if f not in flags: flags.append(f) - # We expect a single build target containing the whole codebase, with no name (as it's not a root symbol). + # We expect a single build target containing the whole codebase, with no + # name (as it's not a root symbol). target_objects = source_getter(config.artefact_store) assert list(target_objects.keys()) == [None] objects = target_objects[None] out_name = Template(output_fpath).substitute(output=config.build_output) - linker.link(objects, out_name, add_libs=flags) + linker.link(objects, out_name, openmp=config.openmp, add_libs=flags) diff --git a/source/fab/tools/__init__.py b/source/fab/tools/__init__.py index f30cf7fa..ed5850c5 100644 --- a/source/fab/tools/__init__.py +++ b/source/fab/tools/__init__.py @@ -11,7 +11,8 @@ from fab.tools.category import Category from fab.tools.compiler import (CCompiler, Compiler, FortranCompiler, Gcc, Gfortran, GnuVersionHandling, Icc, Ifort, - IntelVersionHandling) + IntelVersionHandling, MpiGcc, MpiGfortran, + MpiIcc, MpiIfort) from fab.tools.flags import Flags from fab.tools.linker import Linker from fab.tools.psyclone import Psyclone @@ -42,6 +43,10 @@ "Ifort", "IntelVersionHandling", "Linker", + "MpiGcc", + "MpiGfortran", + "MpiIcc", + "MpiIfort", "Preprocessor", "Psyclone", "Rsync", diff --git a/source/fab/tools/compiler.py b/source/fab/tools/compiler.py index 13e458ae..52c5a0cb 100644 --- a/source/fab/tools/compiler.py +++ b/source/fab/tools/compiler.py @@ -11,6 +11,7 @@ import os import re from pathlib import Path +import warnings from typing import List, Optional, Tuple, Union import zlib @@ -30,11 +31,12 @@ class Compiler(CompilerSuiteTool): :param exec_name: name of the executable to start. :param suite: name of the compiler suite this tool belongs to. :param category: the Category (C_COMPILER or FORTRAN_COMPILER). + :param mpi: whether the compiler or linker support MPI. :param compile_flag: the compilation flag to use when only requesting compilation (not linking). :param output_flag: the compilation flag to use to indicate the name of the output file - :param omp_flag: the flag to use to enable OpenMP + :param openmp_flag: the flag to use to enable OpenMP ''' # pylint: disable=too-many-arguments @@ -42,14 +44,15 @@ def __init__(self, name: str, exec_name: Union[str, Path], suite: str, category: Category, + mpi: bool = False, compile_flag: Optional[str] = None, output_flag: Optional[str] = None, - omp_flag: Optional[str] = None): - super().__init__(name, exec_name, suite, category) + openmp_flag: Optional[str] = None): + super().__init__(name, exec_name, suite, mpi=mpi, category=category) self._version: Union[Tuple[int, ...], None] = None self._compile_flag = compile_flag if compile_flag else "-c" self._output_flag = output_flag if output_flag else "-o" - self._omp_flag = omp_flag + self._openmp_flag = openmp_flag if openmp_flag else "" self.flags.extend(os.getenv("FFLAGS", "").split()) def get_hash(self) -> int: @@ -58,7 +61,15 @@ def get_hash(self) -> int: return (zlib.crc32(self.name.encode()) + zlib.crc32(self.get_version_string().encode())) - def compile_file(self, input_file: Path, output_file: Path, + @property + def openmp_flag(self) -> str: + ''':returns: The flag to enable OpenMP for this compiler. + ''' + return self._openmp_flag + + def compile_file(self, input_file: Path, + output_file: Path, + openmp: bool, add_flags: Union[None, List[str]] = None): '''Compiles a file. It will add the flag for compilation-only automatically, as well as the output directives. The current working @@ -68,12 +79,20 @@ def compile_file(self, input_file: Path, output_file: Path, them to have different checksums depending on where they live. :param input_file: the path of the input file. - :param outpout_file: the path of the output file. + :param output_file: the path of the output file. + :param opemmp: whether OpenMP should be used or not. :param add_flags: additional compiler flags. ''' params: List[Union[Path, str]] = [self._compile_flag] + if openmp: + params.append(self._openmp_flag) if add_flags: + if self._openmp_flag in add_flags: + warnings.warn( + f"OpenMP flag '{self._openmp_flag}' explicitly provided. " + f"OpenMP should be enabled in the BuildConfiguration " + f"instead.") params += add_flags params.extend([input_file.name, @@ -191,18 +210,24 @@ class CCompiler(Compiler): :param name: name of the compiler. :param exec_name: name of the executable to start. :param suite: name of the compiler suite. + :param mpi: whether the compiler or linker support MPI. :param compile_flag: the compilation flag to use when only requesting compilation (not linking). :param output_flag: the compilation flag to use to indicate the name of the output file - :param omp_flag: the flag to use to enable OpenMP + :param openmp_flag: the flag to use to enable OpenMP ''' # pylint: disable=too-many-arguments def __init__(self, name: str, exec_name: str, suite: str, - compile_flag=None, output_flag=None, omp_flag=None): - super().__init__(name, exec_name, suite, Category.C_COMPILER, - compile_flag, output_flag, omp_flag) + mpi: bool = False, + compile_flag: Optional[str] = None, + output_flag: Optional[str] = None, + openmp_flag: Optional[str] = None): + super().__init__(name, exec_name, suite, + category=Category.C_COMPILER, mpi=mpi, + compile_flag=compile_flag, output_flag=output_flag, + openmp_flag=openmp_flag) # ============================================================================ @@ -214,27 +239,36 @@ class FortranCompiler(Compiler): :param name: name of the compiler. :param exec_name: name of the executable to start. :param suite: name of the compiler suite. - :param module_folder_flag: the compiler flag to indicate where to - store created module files. - :param syntax_only_flag: flag to indicate to only do a syntax check. - The side effect is that the module files are created. + :param mpi: whether the compiler or linker support MPI. :param compile_flag: the compilation flag to use when only requesting compilation (not linking). :param output_flag: the compilation flag to use to indicate the name of the output file - :param omp_flag: the flag to use to enable OpenMP + :param module_folder_flag: the compiler flag to indicate where to + store created module files. + :param openmp_flag: the flag to use to enable OpenMP + :param syntax_only_flag: flag to indicate to only do a syntax check. + The side effect is that the module files are created. ''' # pylint: disable=too-many-arguments def __init__(self, name: str, exec_name: str, suite: str, - module_folder_flag: str, syntax_only_flag=None, - compile_flag=None, output_flag=None, omp_flag=None): - - super().__init__(name, exec_name, suite, Category.FORTRAN_COMPILER, - compile_flag, output_flag, omp_flag) - self._module_folder_flag = module_folder_flag - self._module_output_path = "" + mpi: bool = False, + compile_flag: Optional[str] = None, + output_flag: Optional[str] = None, + openmp_flag: Optional[str] = None, + module_folder_flag: Optional[str] = None, + syntax_only_flag: Optional[str] = None, + ): + + super().__init__(name=name, exec_name=exec_name, suite=suite, + category=Category.FORTRAN_COMPILER, + mpi=mpi, compile_flag=compile_flag, + output_flag=output_flag, openmp_flag=openmp_flag) + self._module_folder_flag = (module_folder_flag if module_folder_flag + else "") self._syntax_only_flag = syntax_only_flag + self._module_output_path = "" @property def has_syntax_only(self) -> bool: @@ -248,7 +282,9 @@ def set_module_output_path(self, path: Path): ''' self._module_output_path = str(path) - def compile_file(self, input_file: Path, output_file: Path, + def compile_file(self, input_file: Path, + output_file: Path, + openmp: bool, add_flags: Union[None, List[str]] = None, syntax_only: bool = False): '''Compiles a file. @@ -274,7 +310,8 @@ def compile_file(self, input_file: Path, output_file: Path, if self._module_folder_flag and self._module_output_path: params.append(self._module_folder_flag) params.append(self._module_output_path) - super().compile_file(input_file, output_file, params) + super().compile_file(input_file, output_file, openmp=openmp, + add_flags=params) # ============================================================================ @@ -321,11 +358,26 @@ class Gcc(GnuVersionHandling, CCompiler): :param name: name of this compiler. :param exec_name: name of the executable. + :param mpi: whether the compiler supports MPI. ''' def __init__(self, name: str = "gcc", - exec_name: str = "gcc"): - super().__init__(name, exec_name, "gnu", omp_flag="-fopenmp") + exec_name: str = "gcc", + mpi: bool = False): + super().__init__(name, exec_name, suite="gnu", mpi=mpi, + openmp_flag="-fopenmp") + + +# ============================================================================ +class MpiGcc(Gcc): + '''Class for a simple wrapper around gcc that supports MPI. + It calls `mpicc`. + ''' + + def __init__(self): + super().__init__(name="mpicc-gcc", + exec_name="mpicc", + mpi=True) # ============================================================================ @@ -334,16 +386,31 @@ class Gfortran(GnuVersionHandling, FortranCompiler): :param name: name of this compiler. :param exec_name: name of the executable. + :param mpi: whether the compiler supports MPI. ''' + def __init__(self, name: str = "gfortran", - exec_name: str = "gfortran"): - super().__init__(name, exec_name, "gnu", + exec_name: str = "gfortran", + mpi: bool = False): + super().__init__(name, exec_name, suite="gnu", mpi=mpi, + openmp_flag="-fopenmp", module_folder_flag="-J", - omp_flag="-fopenmp", syntax_only_flag="-fsyntax-only") +# ============================================================================ +class MpiGfortran(Gfortran): + '''Class for a simple wrapper around gfortran that supports MPI. + It calls `mpif90`. + ''' + + def __init__(self): + super().__init__(name="mpif90-gfortran", + exec_name="mpif90", + mpi=True) + + # ============================================================================ class IntelVersionHandling(): '''Mixin to handle version information from Intel compilers''' @@ -384,12 +451,26 @@ class Icc(IntelVersionHandling, CCompiler): :param name: name of this compiler. :param exec_name: name of the executable. + :param mpi: whether the compiler supports MPI. ''' def __init__(self, name: str = "icc", - exec_name: str = "icc"): - super().__init__(name, exec_name, "intel-classic", - omp_flag="-qopenmp") + exec_name: str = "icc", + mpi: bool = False): + super().__init__(name, exec_name, suite="intel-classic", mpi=mpi, + openmp_flag="-qopenmp") + + +# ============================================================================ +class MpiIcc(Icc): + '''Class for a simple wrapper around icc that supports MPI. + It calls `mpicc`. + ''' + + def __init__(self): + super().__init__(name="mpicc-icc", + exec_name="mpicc", + mpi=True) # ============================================================================ @@ -398,11 +479,26 @@ class Ifort(IntelVersionHandling, FortranCompiler): :param name: name of this compiler. :param exec_name: name of the executable. + :param mpi: whether the compiler supports MPI. ''' + def __init__(self, name: str = "ifort", - exec_name: str = "ifort"): - super().__init__(name, exec_name, "intel-classic", + exec_name: str = "ifort", + mpi: bool = False): + super().__init__(name, exec_name, suite="intel-classic", mpi=mpi, module_folder_flag="-module", - omp_flag="-qopenmp", + openmp_flag="-qopenmp", syntax_only_flag="-syntax-only") + + +# ============================================================================ +class MpiIfort(Ifort): + '''Class for a simple wrapper around ifort that supports MPI. + It calls `mpif90`. + ''' + + def __init__(self): + super().__init__(name="mpif90-ifort", + exec_name="mpif90", + mpi=True) diff --git a/source/fab/tools/linker.py b/source/fab/tools/linker.py index 06bb5cfa..02932a18 100644 --- a/source/fab/tools/linker.py +++ b/source/fab/tools/linker.py @@ -62,12 +62,14 @@ def check_available(self) -> bool: return super().check_available() def link(self, input_files: List[Path], output_file: Path, + openmp: bool, add_libs: Optional[List[str]] = None) -> str: '''Executes the linker with the specified input files, creating `output_file`. :param input_files: list of input files to link. :param output_file: output file. + :param openm: whether OpenMP is requested or not. :param add_libs: additional linker flags. :returns: the stdout of the link command @@ -75,6 +77,8 @@ def link(self, input_files: List[Path], output_file: Path, if self._compiler: # Create a copy: params = self._compiler.flags[:] + if openmp: + params.append(self._compiler.openmp_flag) else: params = [] # TODO: why are the .o files sorted? That shouldn't matter diff --git a/source/fab/tools/tool.py b/source/fab/tools/tool.py index af9b8bfb..9eaa42e1 100644 --- a/source/fab/tools/tool.py +++ b/source/fab/tools/tool.py @@ -181,13 +181,20 @@ class CompilerSuiteTool(Tool): :param exec_name: name of the executable to start. :param suite: name of the compiler suite. :param category: the Category to which this tool belongs. + :param mpi: whether the compiler or linker support MPI. ''' def __init__(self, name: str, exec_name: Union[str, Path], suite: str, - category: Category): + category: Category, mpi: bool = False): super().__init__(name, exec_name, category) self._suite = suite + self._mpi = mpi @property def suite(self) -> str: ''':returns: the compiler suite of this tool.''' return self._suite + + @property + def mpi(self) -> bool: + ''':returns: whether this tool supports MPI or not.''' + return self._mpi diff --git a/source/fab/tools/tool_box.py b/source/fab/tools/tool_box.py index 7704feeb..b1aafb10 100644 --- a/source/fab/tools/tool_box.py +++ b/source/fab/tools/tool_box.py @@ -8,7 +8,7 @@ ''' import warnings -from typing import Dict +from typing import Dict, Optional from fab.tools.category import Category from fab.tools.tool import Tool @@ -46,19 +46,29 @@ def add_tool(self, tool: Tool, f"'{tool}'.") self._all_tools[tool.category] = tool - def get_tool(self, category: Category) -> Tool: + def get_tool(self, category: Category, mpi: Optional[bool] = None) -> Tool: '''Returns the tool for the specified category. :param category: the name of the category in which to look for the tool. + :param mpi: if no compiler or linker is specified when requesting one, + use the MPI setting to find an appropriate default. :raises KeyError: if the category is not known. ''' if category in self._all_tools: + # TODO: Should we test if the compiler has MPI support if + # required? The original LFRic setup compiled files without + # MPI support (and used an mpi wrapper at link time), so for + # now we don't raise an exception here to ease porting - but + # we probably should raise one tbh. return self._all_tools[category] # No tool was specified for this category, get the default tool - # from the ToolRepository: + # from the ToolRepository, and add it, so we don't need to look + # it up again later. tr = ToolRepository() - return tr.get_default(category) + tool = tr.get_default(category, mpi=mpi) + self._all_tools[category] = tool + return tool diff --git a/source/fab/tools/tool_repository.py b/source/fab/tools/tool_repository.py index 36aaa514..944b421c 100644 --- a/source/fab/tools/tool_repository.py +++ b/source/fab/tools/tool_repository.py @@ -12,7 +12,7 @@ from __future__ import annotations import logging -from typing import Any, Type +from typing import Any, Optional, Type from fab.tools.tool import Tool from fab.tools.category import Category @@ -43,6 +43,7 @@ def __init__(self): # time the instance is requested (since we overwrite __new__). But # we only want to initialise the instance once, so let the constructor # not do anything if the singleton already exists: + # pylint: disable=too-many-locals if ToolRepository._singleton: return @@ -59,9 +60,11 @@ def __init__(self): # We get circular dependencies if imported at top of the file: # pylint: disable=import-outside-toplevel from fab.tools import (Ar, Cpp, CppFortran, Gcc, Gfortran, - Icc, Ifort, Psyclone, Rsync) + Icc, Ifort, MpiGcc, MpiGfortran, + MpiIcc, MpiIfort, Psyclone, Rsync) for cls in [Gcc, Icc, Gfortran, Ifort, Cpp, CppFortran, + MpiGcc, MpiGfortran, MpiIcc, MpiIfort, Fcm, Git, Subversion, Ar, Psyclone, Rsync]: self.add_tool(cls) @@ -117,26 +120,50 @@ def set_default_compiler_suite(self, suite: str): ''' for category in [Category.FORTRAN_COMPILER, Category.C_COMPILER, Category.LINKER]: - all_members = [tool for tool in self[category] - if tool.suite == suite] - if len(all_members) == 0: + # Now sort the tools in this category to have all tools with the + # right suite at the front. We use the stable sorted function with + # the key being tool.suite != suite --> all tools with the right + # suite use False as key, all other tools True. Since False < True + # this results in all suite tools to be at the front of the list + self[category] = sorted(self[category], + key=lambda x: x.suite != suite) + if len(self[category]) > 0 and self[category][0].suite != suite: raise RuntimeError(f"Cannot find '{category}' " f"in the suite '{suite}'.") - tool = all_members[0] - if tool != self[category][0]: - self[category].remove(tool) - self[category].insert(0, tool) - def get_default(self, category: Category): - '''Returns the default tool for a given category, which is just - the first tool in the category. + def get_default(self, category: Category, + mpi: Optional[bool] = None): + '''Returns the default tool for a given category. For most tools + that will be the first entry in the list of tools. The exception + are compilers and linker: in this case it must be specified if + MPI support is required or not. And the default return will be + the first tool that either supports MPI or not. :param category: the category for which to return the default tool. + :param mpi: if a compiler or linker is required that supports MPI. :raises KeyError: if the category does not exist. + :raises RuntimeError: if no compiler/linker is found with the + requested level of MPI support (yes or no). ''' if not isinstance(category, Category): raise RuntimeError(f"Invalid category type " f"'{type(category).__name__}'.") - return self[category][0] + + # If not a compiler or linker, return the first tool + if not category.is_compiler and category != Category.LINKER: + return self[category][0] + + if not isinstance(mpi, bool): + raise RuntimeError(f"Invalid or missing mpi specification " + f"for '{category}'.") + + for tool in self[category]: + # If the tool supports/does not support MPI, return the first one + if mpi == tool.mpi: + return tool + + # Don't bother returning an MPI enabled tool if no-MPI is requested - + # that seems to be an unlikely scenario. + raise RuntimeError(f"Could not find '{category}' that supports MPI.") diff --git a/tests/conftest.py b/tests/conftest.py index 55d948fd..835cd294 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -23,6 +23,7 @@ def fixture_mock_c_compiler(): mock_compiler._version = (1, 2, 3) mock_compiler._name = "mock_c_compiler" mock_compiler._exec_name = "mock_c_compiler.exe" + mock_compiler._openmp_flag = "-fopenmp" return mock_compiler @@ -32,7 +33,7 @@ def fixture_mock_fortran_compiler(): mock_compiler = FortranCompiler("mock_fortran_compiler", "mock_exec", "suite", module_folder_flag="", syntax_only_flag=None, compile_flag=None, - output_flag=None, omp_flag=None) + output_flag=None, openmp_flag=None) mock_compiler.run = mock.Mock() mock_compiler._name = "mock_fortran_compiler" mock_compiler._exec_name = "mock_fortran_compiler.exe" diff --git a/tests/system_tests/incremental_fortran/test_incremental_fortran.py b/tests/system_tests/incremental_fortran/test_incremental_fortran.py index bc4c39eb..acde2066 100644 --- a/tests/system_tests/incremental_fortran/test_incremental_fortran.py +++ b/tests/system_tests/incremental_fortran/test_incremental_fortran.py @@ -38,13 +38,15 @@ class TestIncremental(): def config(self, tmp_path): # tmp_path is a pytest fixture which differs per test, per run logging.getLogger('fab').setLevel(logging.WARNING) - with BuildConfig(project_label=PROJECT_LABEL, - tool_box=ToolBox(), fab_workspace=tmp_path, + with BuildConfig(project_label=PROJECT_LABEL, tool_box=ToolBox(), + fab_workspace=tmp_path, multiprocessing=False) as grab_config: - grab_folder(grab_config, Path(__file__).parent / 'project-source', dst_label='src') + grab_folder(grab_config, Path(__file__).parent / 'project-source', + dst_label='src') build_config = BuildConfig(project_label=PROJECT_LABEL, - tool_box=ToolBox(), fab_workspace=tmp_path, + tool_box=ToolBox(), + fab_workspace=tmp_path, multiprocessing=False) return build_config @@ -244,8 +246,7 @@ class TestCleanupPrebuilds(): @pytest.mark.parametrize("kwargs,expect", in_out) def test_clean(self, tmp_path, kwargs, expect): - with BuildConfig(project_label=PROJECT_LABEL, - tool_box=ToolBox(), + with BuildConfig(project_label=PROJECT_LABEL, tool_box=ToolBox(), fab_workspace=tmp_path, multiprocessing=False) as config: remaining = self._prune(config, kwargs=kwargs) @@ -255,8 +256,8 @@ def test_prune_unused(self, tmp_path): # pruning everything not current current_prebuilds = ArtefactSet.CURRENT_PREBUILDS - with BuildConfig(project_label=PROJECT_LABEL, - tool_box=ToolBox(), fab_workspace=tmp_path, + with BuildConfig(project_label=PROJECT_LABEL, tool_box=ToolBox(), + fab_workspace=tmp_path, multiprocessing=False) as config: config._artefact_store = {current_prebuilds: { tmp_path / PROJECT_LABEL / BUILD_OUTPUT / PREBUILD / 'a.123.foo', diff --git a/tests/unit_tests/parse/c/test_c_analyser.py b/tests/unit_tests/parse/c/test_c_analyser.py index 934c8641..b4f84c94 100644 --- a/tests/unit_tests/parse/c/test_c_analyser.py +++ b/tests/unit_tests/parse/c/test_c_analyser.py @@ -16,7 +16,8 @@ def test_simple_result(tmp_path): c_analyser = CAnalyser() - c_analyser._config = BuildConfig('proj', ToolBox(), fab_workspace=tmp_path) + c_analyser._config = BuildConfig('proj', ToolBox(), mpi=False, + openmp=False, fab_workspace=tmp_path) with mock.patch('fab.parse.AnalysedFile.save'): fpath = Path(__file__).parent / "test_c_analyser.c" diff --git a/tests/unit_tests/steps/test_analyse.py b/tests/unit_tests/steps/test_analyse.py index 79d0ef50..2cec86df 100644 --- a/tests/unit_tests/steps/test_analyse.py +++ b/tests/unit_tests/steps/test_analyse.py @@ -6,8 +6,8 @@ from fab.build_config import BuildConfig from fab.dep_tree import AnalysedDependent from fab.parse.fortran import AnalysedFortran, FortranParserWorkaround -from fab.steps.analyse import _add_manual_results, _add_unreferenced_deps, _gen_file_deps, _gen_symbol_table, \ - _parse_files +from fab.steps.analyse import (_add_manual_results, _add_unreferenced_deps, + _gen_file_deps, _gen_symbol_table, _parse_files) from fab.tools import ToolBox from fab.util import HashedFile @@ -16,8 +16,10 @@ class Test_gen_symbol_table(object): @pytest.fixture def analysed_files(self): - return [AnalysedDependent(fpath=Path('foo.c'), symbol_defs=['foo_1', 'foo_2'], file_hash=0), - AnalysedDependent(fpath=Path('bar.c'), symbol_defs=['bar_1', 'bar_2'], file_hash=0)] + return [AnalysedDependent(fpath=Path('foo.c'), + symbol_defs=['foo_1', 'foo_2'], file_hash=0), + AnalysedDependent(fpath=Path('bar.c'), + symbol_defs=['bar_1', 'bar_2'], file_hash=0)] def test_vanilla(self, analysed_files): result = _gen_symbol_table(analysed_files=analysed_files) @@ -58,12 +60,14 @@ def test_vanilla(self): analysed_files = [ mock.Mock( - spec=AnalysedDependent, fpath=my_file, symbol_deps={'my_func', 'dep1_mod', 'dep2'}, file_deps=set()), + spec=AnalysedDependent, fpath=my_file, + symbol_deps={'my_func', 'dep1_mod', 'dep2'}, file_deps=set()), ] _gen_file_deps(analysed_files=analysed_files, symbols=symbols) - assert analysed_files[0].file_deps == {symbols['dep1_mod'], symbols['dep2']} + assert analysed_files[0].file_deps == {symbols['dep1_mod'], + symbols['dep2']} # todo: this is fortran-ey, move it? @@ -86,19 +90,26 @@ def test_vanilla(self): Path('root_dep.f90'): AnalysedFortran(fpath=Path(), file_hash=0), } - # we want to force this symbol into the build (because it's not used via modules) + # we want to force this symbol into the build (because it's not used + # via modules) unreferenced_deps = ['util'] # the stuff to add to the build tree will be found in here all_analysed_files = { - # root.f90 and root_util.f90 would also be in here but the test doesn't need them - Path('util.f90'): AnalysedFortran(fpath=Path('util.f90'), file_deps={Path('util_dep.f90')}, file_hash=0), - Path('util_dep.f90'): AnalysedFortran(fpath=Path('util_dep.f90'), file_hash=0), + # root.f90 and root_util.f90 would also be in here but the test + # doesn't need them + Path('util.f90'): AnalysedFortran(fpath=Path('util.f90'), + file_deps={Path('util_dep.f90')}, + file_hash=0), + Path('util_dep.f90'): AnalysedFortran(fpath=Path('util_dep.f90'), + file_hash=0), } _add_unreferenced_deps( unreferenced_deps=unreferenced_deps, - symbol_table=symbol_table, all_analysed_files=all_analysed_files, build_tree=build_tree) + symbol_table=symbol_table, + all_analysed_files=all_analysed_files, + build_tree=build_tree) assert Path('util.f90') in build_tree assert Path('util_dep.f90') in build_tree @@ -111,33 +122,46 @@ def test_vanilla(self): class Test_parse_files(object): - # todo: test the correct artefacts are marked as current for the cleanup step + # todo: test the correct artefacts are marked as current for the + # cleanup step # todo: this method should be tested a bit more thoroughly def test_exceptions(self, tmp_path): # make sure parse exceptions do not stop the build - with mock.patch('fab.steps.run_mp', return_value=[(Exception('foo'), None)]), \ + with mock.patch('fab.steps.run_mp', + return_value=[(Exception('foo'), None)]), \ pytest.warns(UserWarning, match="deprecated 'DEPENDS ON:'"): - # The warning "deprecated 'DEPENDS ON:' comment found in fortran code" - # is in "def _parse_files" in "source/steps/analyse.py" + # The warning "deprecated 'DEPENDS ON:' comment found in fortran + # code" is in "def _parse_files" in "source/steps/analyse.py" config = BuildConfig('proj', ToolBox(), fab_workspace=tmp_path) - # the exception should be suppressed (and logged) and this step should run to completion - _parse_files(config, files=[], fortran_analyser=mock.Mock(), c_analyser=mock.Mock()) + # the exception should be suppressed (and logged) and this step + # should run to completion + _parse_files(config, files=[], fortran_analyser=mock.Mock(), + c_analyser=mock.Mock()) -class Test_add_manual_results(object): - # test user-specified analysis results, for when fparser fails to parse a valid file. +class TestAddManualResults: + '''test user-specified analysis results, for when fparser fails to parse a + valid file. + ''' def test_vanilla(self): # test normal usage of manual analysis results - workaround = FortranParserWorkaround(fpath=Path('foo.f'), symbol_defs={'foo', }) + workaround = FortranParserWorkaround(fpath=Path('foo.f'), + symbol_defs={'foo', }) analysed_files = set() - with mock.patch('fab.parse.fortran.file_checksum', return_value=HashedFile(None, 123)), \ - pytest.warns(UserWarning, match="SPECIAL MEASURE: injecting user-defined analysis results"): - # This warning "UserWarning: SPECIAL MEASURE: injecting user-defined analysis results" - # is in "def _add_manual_results" in "source/steps/analyse.py" - _add_manual_results(special_measure_analysis_results=[workaround], analysed_files=analysed_files) - - assert analysed_files == {AnalysedFortran(fpath=Path('foo.f'), file_hash=123, symbol_defs={'foo', })} + with mock.patch('fab.parse.fortran.file_checksum', + return_value=HashedFile(None, 123)), \ + pytest.warns(UserWarning, match="SPECIAL MEASURE: injecting user-" + "defined analysis results"): + # This warning "UserWarning: SPECIAL MEASURE: injecting + # user-defined analysis results" is in "def _add_manual_results" + # in "source/steps/analyse.py" + _add_manual_results(special_measure_analysis_results=[workaround], + analysed_files=analysed_files) + + assert analysed_files == {AnalysedFortran(fpath=Path('foo.f'), + file_hash=123, + symbol_defs={'foo', })} diff --git a/tests/unit_tests/steps/test_archive_objects.py b/tests/unit_tests/steps/test_archive_objects.py index d366f422..30e41781 100644 --- a/tests/unit_tests/steps/test_archive_objects.py +++ b/tests/unit_tests/steps/test_archive_objects.py @@ -83,7 +83,7 @@ def test_incorrect_tool(self): config = BuildConfig('proj', ToolBox()) tool_box = config.tool_box - cc = tool_box[Category.C_COMPILER] + cc = tool_box.get_tool(Category.C_COMPILER, config.mpi) # And set its category to C_COMPILER cc._category = Category.AR # So overwrite the C compiler with the re-categories Fortran compiler diff --git a/tests/unit_tests/steps/test_link.py b/tests/unit_tests/steps/test_link.py index f015bb27..a675f54c 100644 --- a/tests/unit_tests/steps/test_link.py +++ b/tests/unit_tests/steps/test_link.py @@ -22,9 +22,12 @@ def test_run(self, tool_box): config = SimpleNamespace( project_workspace=Path('workspace'), artefact_store=ArtefactStore(), - tool_box=tool_box + tool_box=tool_box, + mpi=False, + openmp=False, ) - config.artefact_store[ArtefactSet.OBJECT_FILES] = {'foo': {'foo.o', 'bar.o'}} + config.artefact_store[ArtefactSet.OBJECT_FILES] = \ + {'foo': {'foo.o', 'bar.o'}} with mock.patch('os.getenv', return_value='-L/foo1/lib -L/foo2/lib'): # We need to create a linker here to pick up the env var: @@ -35,8 +38,9 @@ def test_run(self, tool_box): mock_result = mock.Mock(returncode=0, stdout="abc\ndef".encode()) with mock.patch('fab.tools.tool.subprocess.run', return_value=mock_result) as tool_run, \ - pytest.warns(UserWarning, match="_metric_send_conn not " - "set, cannot send metrics"): + pytest.warns(UserWarning, + match="_metric_send_conn not " + "set, cannot send metrics"): link_exe(config, flags=['-fooflag', '-barflag']) tool_run.assert_called_with( diff --git a/tests/unit_tests/steps/test_link_shared_object.py b/tests/unit_tests/steps/test_link_shared_object.py index 224dda19..700a3de3 100644 --- a/tests/unit_tests/steps/test_link_shared_object.py +++ b/tests/unit_tests/steps/test_link_shared_object.py @@ -26,6 +26,7 @@ def test_run(tool_box): project_workspace=Path('workspace'), build_output=Path("workspace"), artefact_store=ArtefactStore(), + openmp=False, tool_box=tool_box ) config.artefact_store[ArtefactSet.OBJECT_FILES] = \ diff --git a/tests/unit_tests/steps/test_preprocess.py b/tests/unit_tests/steps/test_preprocess.py index 32e7e09f..38376503 100644 --- a/tests/unit_tests/steps/test_preprocess.py +++ b/tests/unit_tests/steps/test_preprocess.py @@ -50,7 +50,7 @@ def source_getter(artefact_store): cpp._category = Category.FORTRAN_PREPROCESSOR # Now overwrite the Fortran preprocessor with the re-categorised # C preprocessor: - tool_box.add_tool(cpp) + tool_box.add_tool(cpp, silent_replace=True) with pytest.raises(RuntimeError) as err: preprocess_fortran(config=config) diff --git a/tests/unit_tests/test_config.py b/tests/unit_tests/test_config.py index 12357c37..201aa0bb 100644 --- a/tests/unit_tests/test_config.py +++ b/tests/unit_tests/test_config.py @@ -8,8 +8,9 @@ class TestAddFlags: def test_run(self): - add_flags = AddFlags(match="$source/foo/*", flags=['-I', '$relative/include']) - config = BuildConfig('proj', ToolBox(), + add_flags = AddFlags(match="$source/foo/*", + flags=['-I', '$relative/include']) + config = BuildConfig('proj', ToolBox(), mpi=False, openmp=False, fab_workspace=Path("/fab_workspace")) # anything in $source/foo should get the include folder @@ -18,7 +19,8 @@ def test_run(self): fpath=Path(f"/fab_workspace/proj/{SOURCE_ROOT}/foo/bar.c"), input_flags=my_flags, config=config) - assert my_flags == ['-foo', '-I', f'/fab_workspace/proj/{SOURCE_ROOT}/foo/include'] + assert my_flags == ['-foo', '-I', + f'/fab_workspace/proj/{SOURCE_ROOT}/foo/include'] # anything in $source/bar should NOT get the include folder my_flags = ["-foo"] diff --git a/tests/unit_tests/tools/test_compiler.py b/tests/unit_tests/tools/test_compiler.py index 28d41f2f..23544f16 100644 --- a/tests/unit_tests/tools/test_compiler.py +++ b/tests/unit_tests/tools/test_compiler.py @@ -14,29 +14,35 @@ import pytest -from fab.tools import (Category, CCompiler, FortranCompiler, Gcc, Gfortran, - Icc, Ifort) +from fab.tools import (Category, CCompiler, FortranCompiler, + Gcc, Gfortran, Icc, Ifort, MpiGcc, MpiGfortran, + MpiIcc, MpiIfort) def test_compiler(): '''Test the compiler constructor.''' - cc = CCompiler("gcc", "gcc", "gnu") + cc = CCompiler("gcc", "gcc", "gnu", openmp_flag="-fopenmp") assert cc.category == Category.C_COMPILER assert cc._compile_flag == "-c" assert cc._output_flag == "-o" assert cc.flags == [] assert cc.suite == "gnu" + assert not cc.mpi + assert cc.openmp_flag == "-fopenmp" with pytest.raises(NotImplementedError) as err: cc.parse_version_output(Category.FORTRAN_COMPILER, "NOT NEEDED") assert ("The method `parse_version_output` must be provided using a mixin." in str(err.value)) - fc = FortranCompiler("gfortran", "gfortran", "gnu", "-J") + fc = FortranCompiler("gfortran", "gfortran", "gnu", openmp_flag="-fopenmp", + module_folder_flag="-J") assert fc._compile_flag == "-c" assert fc._output_flag == "-o" assert fc.category == Category.FORTRAN_COMPILER assert fc.suite == "gnu" assert fc.flags == [] + assert not fc.mpi + assert fc.openmp_flag == "-fopenmp" with pytest.raises(NotImplementedError) as err: fc.parse_version_output(Category.FORTRAN_COMPILER, "NOT NEEDED") assert ("The method `parse_version_output` must be provided using a mixin." @@ -98,7 +104,8 @@ def test_compiler_hash_invalid_version(): with mock.patch.object(cc, "run", mock.Mock(return_value='foo v1')): with pytest.raises(RuntimeError) as err: cc.get_hash() - assert "Unexpected version output format for compiler 'gcc'" in str(err.value) + assert ("Unexpected version output format for compiler 'gcc'" + in str(err.value)) def test_compiler_with_env_fflags(): @@ -112,25 +119,55 @@ def test_compiler_with_env_fflags(): def test_compiler_syntax_only(): '''Tests handling of syntax only flags.''' - fc = FortranCompiler("gfortran", "gfortran", "gnu", "-J") + fc = FortranCompiler("gfortran", "gfortran", "gnu", + openmp_flag="-fopenmp", module_folder_flag="-J") + # Empty since no flag is defined assert not fc.has_syntax_only - fc = FortranCompiler("gfortran", "gfortran", "gnu", "-J", - syntax_only_flag=None) + + fc = FortranCompiler("gfortran", "gfortran", "gnu", openmp_flag="-fopenmp", + module_folder_flag="-J", syntax_only_flag=None) + # Empty since no flag is defined assert not fc.has_syntax_only - fc = FortranCompiler("gfortran", "gfortran", "gnu", "-J", + fc = FortranCompiler("gfortran", "gfortran", "gnu", + openmp_flag="-fopenmp", + module_folder_flag="-J", syntax_only_flag="-fsyntax-only") - fc.set_module_output_path("/tmp") assert fc.has_syntax_only assert fc._syntax_only_flag == "-fsyntax-only" + + +def test_compiler_without_openmp(): + '''Tests that the openmp flag is not used when openmp is not enabled. ''' + fc = FortranCompiler("gfortran", "gfortran", "gnu", + openmp_flag="-fopenmp", + module_folder_flag="-J", + syntax_only_flag="-fsyntax-only") + fc.set_module_output_path("/tmp") fc.run = mock.Mock() - fc.compile_file(Path("a.f90"), "a.o", syntax_only=True) + fc.compile_file(Path("a.f90"), "a.o", openmp=False, syntax_only=True) fc.run.assert_called_with(cwd=Path('.'), additional_parameters=['-c', '-fsyntax-only', "-J", '/tmp', 'a.f90', '-o', 'a.o', ]) +def test_compiler_with_openmp(): + '''Tests that the openmp flag is used as expected if openmp is enabled. + ''' + fc = FortranCompiler("gfortran", "gfortran", "gnu", + openmp_flag="-fopenmp", + module_folder_flag="-J", + syntax_only_flag="-fsyntax-only") + fc.set_module_output_path("/tmp") + fc.run = mock.Mock() + fc.compile_file(Path("a.f90"), "a.o", openmp=True, syntax_only=False) + fc.run.assert_called_with(cwd=Path('.'), + additional_parameters=['-c', '-fopenmp', + "-J", '/tmp', 'a.f90', + '-o', 'a.o', ]) + + def test_compiler_module_output(): '''Tests handling of module output_flags.''' fc = FortranCompiler("gfortran", "gfortran", suite="gnu", @@ -138,7 +175,7 @@ def test_compiler_module_output(): fc.set_module_output_path("/module_out") assert fc._module_output_path == "/module_out" fc.run = mock.MagicMock() - fc.compile_file(Path("a.f90"), "a.o", syntax_only=True) + fc.compile_file(Path("a.f90"), "a.o", openmp=False, syntax_only=True) fc.run.assert_called_with(cwd=PosixPath('.'), additional_parameters=['-c', '-J', '/module_out', 'a.f90', '-o', 'a.o']) @@ -146,26 +183,32 @@ def test_compiler_module_output(): def test_compiler_with_add_args(): '''Tests that additional arguments are handled as expected.''' - fc = FortranCompiler("gfortran", "gfortran", "gnu", + fc = FortranCompiler("gfortran", "gfortran", suite="gnu", + openmp_flag="-fopenmp", module_folder_flag="-J") fc.set_module_output_path("/module_out") assert fc._module_output_path == "/module_out" fc.run = mock.MagicMock() with pytest.warns(UserWarning, match="Removing managed flag"): fc.compile_file(Path("a.f90"), "a.o", add_flags=["-J/b", "-O3"], - syntax_only=True) + openmp=False, syntax_only=True) # Notice that "-J/b" has been removed fc.run.assert_called_with(cwd=PosixPath('.'), additional_parameters=['-c', "-O3", '-J', '/module_out', 'a.f90', '-o', 'a.o']) + with pytest.warns(UserWarning, + match="explicitly provided. OpenMP should be enabled in " + "the BuildConfiguration"): + fc.compile_file(Path("a.f90"), "a.o", + add_flags=["-fopenmp", "-O3"], + openmp=True, syntax_only=True) def test_get_version_string(): '''Tests the get_version_string() method. ''' full_output = 'GNU Fortran (gcc) 6.1.0' - c = Gfortran() with mock.patch.object(c, "run", mock.Mock(return_value=full_output)): assert c.get_version_string() == "6.1.0" @@ -328,11 +371,22 @@ def test_gcc(): assert gcc.name == "gcc" assert isinstance(gcc, CCompiler) assert gcc.category == Category.C_COMPILER + assert not gcc.mpi + +def test_mpi_gcc(): + '''Tests the MPI enables gcc class.''' + mpi_gcc = MpiGcc() + assert mpi_gcc.name == "mpicc-gcc" + assert isinstance(mpi_gcc, CCompiler) + assert mpi_gcc.category == Category.C_COMPILER + assert mpi_gcc.mpi -def test_gcc_get_version(): + +@pytest.mark.parametrize("compiler", [Gcc, MpiGcc]) +def test_gcc_get_version(compiler): '''Tests the gcc class get_version method.''' - gcc = Gcc() + gcc = compiler() full_output = dedent(""" gcc (GCC) 8.5.0 20210514 (Red Hat 8.5.0-20) Copyright (C) 2018 Free Software Foundation, Inc. @@ -341,9 +395,10 @@ def test_gcc_get_version(): assert gcc.get_version() == (8, 5, 0) -def test_gcc_get_version_with_icc_string(): +@pytest.mark.parametrize("compiler", [Gcc, MpiGcc]) +def test_gcc_get_version_with_icc_string(compiler): '''Tests the gcc class with an icc version output.''' - gcc = Gcc() + gcc = compiler() full_output = dedent(""" icc (ICC) 2021.10.0 20230609 Copyright (C) 1985-2023 Intel Corporation. All rights reserved. @@ -362,6 +417,16 @@ def test_gfortran(): assert gfortran.name == "gfortran" assert isinstance(gfortran, FortranCompiler) assert gfortran.category == Category.FORTRAN_COMPILER + assert not gfortran.mpi + + +def test_mpi_gfortran(): + '''Tests the MPI enabled gfortran class.''' + mpi_gfortran = MpiGfortran() + assert mpi_gfortran.name == "mpif90-gfortran" + assert isinstance(mpi_gfortran, FortranCompiler) + assert mpi_gfortran.category == Category.FORTRAN_COMPILER + assert mpi_gfortran.mpi # Possibly overkill to cover so many gfortran versions but I had to go @@ -369,7 +434,8 @@ def test_gfortran(): # Note: different sources, e.g conda, change the output slightly... -def test_gfortran_get_version_4(): +@pytest.mark.parametrize("compiler", [Gfortran, MpiGfortran]) +def test_gfortran_get_version_4(compiler): '''Test gfortran 4.8.5 version detection.''' full_output = dedent(""" GNU Fortran (GCC) 4.8.5 20150623 (Red Hat 4.8.5-44) @@ -381,12 +447,13 @@ def test_gfortran_get_version_4(): For more information about these matters, see the file named COPYING """) - gfortran = Gfortran() + gfortran = compiler() with mock.patch.object(gfortran, "run", mock.Mock(return_value=full_output)): assert gfortran.get_version() == (4, 8, 5) -def test_gfortran_get_version_6(): +@pytest.mark.parametrize("compiler", [Gfortran, MpiGfortran]) +def test_gfortran_get_version_6(compiler): '''Test gfortran 6.1.0 version detection.''' full_output = dedent(""" GNU Fortran (GCC) 6.1.0 @@ -395,12 +462,13 @@ def test_gfortran_get_version_6(): warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. """) - gfortran = Gfortran() + gfortran = compiler() with mock.patch.object(gfortran, "run", mock.Mock(return_value=full_output)): assert gfortran.get_version() == (6, 1, 0) -def test_gfortran_get_version_8(): +@pytest.mark.parametrize("compiler", [Gfortran, MpiGfortran]) +def test_gfortran_get_version_8(compiler): '''Test gfortran 8.5.0 version detection.''' full_output = dedent(""" GNU Fortran (conda-forge gcc 8.5.0-16) 8.5.0 @@ -409,12 +477,13 @@ def test_gfortran_get_version_8(): warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. """) - gfortran = Gfortran() + gfortran = compiler() with mock.patch.object(gfortran, "run", mock.Mock(return_value=full_output)): assert gfortran.get_version() == (8, 5, 0) -def test_gfortran_get_version_10(): +@pytest.mark.parametrize("compiler", [Gfortran, MpiGfortran]) +def test_gfortran_get_version_10(compiler): '''Test gfortran 10.4.0 version detection.''' full_output = dedent(""" GNU Fortran (conda-forge gcc 10.4.0-16) 10.4.0 @@ -423,12 +492,13 @@ def test_gfortran_get_version_10(): warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. """) - gfortran = Gfortran() + gfortran = compiler() with mock.patch.object(gfortran, "run", mock.Mock(return_value=full_output)): assert gfortran.get_version() == (10, 4, 0) -def test_gfortran_get_version_12(): +@pytest.mark.parametrize("compiler", [Gfortran, MpiGfortran]) +def test_gfortran_get_version_12(compiler): '''Test gfortran 12.1.0 version detection.''' full_output = dedent(""" GNU Fortran (conda-forge gcc 12.1.0-16) 12.1.0 @@ -437,19 +507,20 @@ def test_gfortran_get_version_12(): warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. """) - gfortran = Gfortran() + gfortran = compiler() with mock.patch.object(gfortran, "run", mock.Mock(return_value=full_output)): assert gfortran.get_version() == (12, 1, 0) -def test_gfortran_get_version_with_ifort_string(): +@pytest.mark.parametrize("compiler", [Gfortran, MpiGfortran]) +def test_gfortran_get_version_with_ifort_string(compiler): '''Tests the gfortran class with an ifort version output.''' full_output = dedent(""" ifort (IFORT) 14.0.3 20140422 Copyright (C) 1985-2014 Intel Corporation. All rights reserved. """) - gfortran = Gfortran() + gfortran = compiler() with mock.patch.object(gfortran, "run", mock.Mock(return_value=full_output)): with pytest.raises(RuntimeError) as err: gfortran.get_version() @@ -463,27 +534,39 @@ def test_icc(): assert icc.name == "icc" assert isinstance(icc, CCompiler) assert icc.category == Category.C_COMPILER + assert not icc.mpi + +def test_mpi_icc(): + '''Tests the MPI enabled icc class.''' + mpi_icc = MpiIcc() + assert mpi_icc.name == "mpicc-icc" + assert isinstance(mpi_icc, CCompiler) + assert mpi_icc.category == Category.C_COMPILER + assert mpi_icc.mpi -def test_icc_get_version(): + +@pytest.mark.parametrize("compiler", [Icc, MpiIcc]) +def test_icc_get_version(compiler): '''Tests the icc class get_version method.''' full_output = dedent(""" icc (ICC) 2021.10.0 20230609 Copyright (C) 1985-2023 Intel Corporation. All rights reserved. """) - icc = Icc() + icc = compiler() with mock.patch.object(icc, "run", mock.Mock(return_value=full_output)): assert icc.get_version() == (2021, 10, 0) -def test_icc_get_version_with_gcc_string(): +@pytest.mark.parametrize("compiler", [Icc, MpiIcc]) +def test_icc_get_version_with_gcc_string(compiler): '''Tests the icc class with a GCC version output.''' full_output = dedent(""" gcc (GCC) 8.5.0 20210514 (Red Hat 8.5.0-20) Copyright (C) 2018 Free Software Foundation, Inc. """) - icc = Icc() + icc = compiler() with mock.patch.object(icc, "run", mock.Mock(return_value=full_output)): with pytest.raises(RuntimeError) as err: icc.get_version() @@ -497,75 +580,91 @@ def test_ifort(): assert ifort.name == "ifort" assert isinstance(ifort, FortranCompiler) assert ifort.category == Category.FORTRAN_COMPILER + assert not ifort.mpi -def test_ifort_get_version_14(): +def test_mpi_ifort(): + '''Tests the MPI enabled ifort class.''' + mpi_ifort = MpiIfort() + assert mpi_ifort.name == "mpif90-ifort" + assert isinstance(mpi_ifort, FortranCompiler) + assert mpi_ifort.category == Category.FORTRAN_COMPILER + assert mpi_ifort.mpi + + +@pytest.mark.parametrize("compiler", [Ifort, MpiIfort]) +def test_ifort_get_version_14(compiler): '''Test ifort 14.0.3 version detection.''' full_output = dedent(""" ifort (IFORT) 14.0.3 20140422 Copyright (C) 1985-2014 Intel Corporation. All rights reserved. """) - ifort = Ifort() + ifort = compiler() with mock.patch.object(ifort, "run", mock.Mock(return_value=full_output)): assert ifort.get_version() == (14, 0, 3) -def test_ifort_get_version_15(): +@pytest.mark.parametrize("compiler", [Ifort, MpiIfort]) +def test_ifort_get_version_15(compiler): '''Test ifort 15.0.2 version detection.''' full_output = dedent(""" ifort (IFORT) 15.0.2 20150121 Copyright (C) 1985-2015 Intel Corporation. All rights reserved. """) - ifort = Ifort() + ifort = compiler() with mock.patch.object(ifort, "run", mock.Mock(return_value=full_output)): assert ifort.get_version() == (15, 0, 2) -def test_ifort_get_version_17(): +@pytest.mark.parametrize("compiler", [Ifort, MpiIfort]) +def test_ifort_get_version_17(compiler): '''Test ifort 17.0.7 version detection.''' full_output = dedent(""" ifort (IFORT) 17.0.7 20180403 Copyright (C) 1985-2018 Intel Corporation. All rights reserved. """) - ifort = Ifort() + ifort = compiler() with mock.patch.object(ifort, "run", mock.Mock(return_value=full_output)): assert ifort.get_version() == (17, 0, 7) -def test_ifort_get_version_19(): +@pytest.mark.parametrize("compiler", [Ifort, MpiIfort]) +def test_ifort_get_version_19(compiler): '''Test ifort 19.0.0.117 version detection.''' full_output = dedent(""" ifort (IFORT) 19.0.0.117 20180804 Copyright (C) 1985-2018 Intel Corporation. All rights reserved. """) - ifort = Ifort() + ifort = compiler() with mock.patch.object(ifort, "run", mock.Mock(return_value=full_output)): assert ifort.get_version() == (19, 0, 0, 117) -def test_ifort_get_version_with_icc_string(): +@pytest.mark.parametrize("compiler", [Ifort, MpiIfort]) +def test_ifort_get_version_with_icc_string(compiler): '''Tests the ifort class with an icc version output.''' full_output = dedent(""" icc (ICC) 2021.10.0 20230609 Copyright (C) 1985-2023 Intel Corporation. All rights reserved. """) - ifort = Ifort() + ifort = compiler() with mock.patch.object(ifort, "run", mock.Mock(return_value=full_output)): with pytest.raises(RuntimeError) as err: ifort.get_version() assert "Unexpected version output format for compiler" in str(err.value) +@pytest.mark.parametrize("compiler", [Ifort, MpiIfort]) @pytest.mark.parametrize("version", ["5.15f.2", ".0.5.1", "0.5.1.", "0.5..1"]) -def test_ifort_get_version_invalid_version(version): +def test_ifort_get_version_invalid_version(compiler, version): '''Tests the icc class with an icc version string that contains an invalid version number.''' full_output = dedent(f""" @@ -573,7 +672,7 @@ def test_ifort_get_version_invalid_version(version): Copyright (C) 1985-2023 Intel Corporation. All rights reserved. """) - icc = Icc() + icc = compiler() with mock.patch.object(icc, "run", mock.Mock(return_value=full_output)): with pytest.raises(RuntimeError) as err: icc.get_version() @@ -589,8 +688,13 @@ def __init__(self): super().__init__(name="mpif90-intel", exec_name="mpif90") + @property + def mpi(self): + return True + mpif90 = MpiF90() assert mpif90.suite == "intel-classic" assert mpif90.category == Category.FORTRAN_COMPILER assert mpif90.name == "mpif90-intel" assert mpif90.exec_name == "mpif90" + assert mpif90.mpi diff --git a/tests/unit_tests/tools/test_linker.py b/tests/unit_tests/tools/test_linker.py index 927cd008..772cd7ec 100644 --- a/tests/unit_tests/tools/test_linker.py +++ b/tests/unit_tests/tools/test_linker.py @@ -75,30 +75,38 @@ def test_linker_check_available(mock_c_compiler): ["ld", "--version"], capture_output=True, env=None, cwd=None, check=False) - # Third test: assume the tool does not exist, run will raise - # runtime error: + # Third test: assume the tool does not exist, check_available + # will return False (and not raise an exception) + linker._is_available = None with mock.patch("fab.tools.tool.Tool.run", side_effect=RuntimeError("")) as tool_run: - linker.check_available() + assert linker.check_available() is False def test_linker_c(mock_c_compiler): - '''Test the link command line.''' + '''Test the link command line when no additional libraries are + specified.''' linker = Linker(compiler=mock_c_compiler) mock_result = mock.Mock(returncode=0) with mock.patch('fab.tools.tool.subprocess.run', return_value=mock_result) as tool_run: - linker.link([Path("a.o")], Path("a.out")) + linker.link([Path("a.o")], Path("a.out"), openmp=False) tool_run.assert_called_with( ["mock_c_compiler.exe", 'a.o', '-o', 'a.out'], capture_output=True, env=None, cwd=None, check=False) + +def test_linker_c_with_libraries(mock_c_compiler): + '''Test the link command line when additional libraries are specified.''' + linker = Linker(compiler=mock_c_compiler) with mock.patch.object(linker, "run") as link_run: - linker.link([Path("a.o")], Path("a.out"), add_libs=["-L", "/tmp"]) - link_run.assert_called_with(['a.o', '-L', '/tmp', '-o', 'a.out']) + linker.link([Path("a.o")], Path("a.out"), add_libs=["-L", "/tmp"], + openmp=True) + link_run.assert_called_with(['-fopenmp', 'a.o', '-L', '/tmp', + '-o', 'a.out']) -def test_linker_add_compiler_flag(mock_c_compiler): +def test_compiler_linker_add_compiler_flag(mock_c_compiler): '''Test that a flag added to the compiler will be automatically added to the link line (even if the flags are modified after creating the linker ... in case that the user specifies additional @@ -109,19 +117,22 @@ def test_linker_add_compiler_flag(mock_c_compiler): mock_result = mock.Mock(returncode=0) with mock.patch('fab.tools.tool.subprocess.run', return_value=mock_result) as tool_run: - linker.link([Path("a.o")], Path("a.out")) + linker.link([Path("a.o")], Path("a.out"), openmp=False) tool_run.assert_called_with( ['mock_c_compiler.exe', '-my-flag', 'a.o', '-o', 'a.out'], capture_output=True, env=None, cwd=None, check=False) - # Make also sure the code works if a linker is created without - # a compiler: + +def test_linker_add_compiler_flag(): + '''Make sure linker flags work if a linker is created without + a compiler: + ''' linker = Linker("no-compiler", "no-compiler.exe", "suite") linker.flags.append("-some-other-flag") mock_result = mock.Mock(returncode=0) with mock.patch('fab.tools.tool.subprocess.run', return_value=mock_result) as tool_run: - linker.link([Path("a.o")], Path("a.out")) + linker.link([Path("a.o")], Path("a.out"), openmp=False) tool_run.assert_called_with( ['no-compiler.exe', '-some-other-flag', 'a.o', '-o', 'a.out'], capture_output=True, env=None, cwd=None, check=False) diff --git a/tests/unit_tests/tools/test_tool_box.py b/tests/unit_tests/tools/test_tool_box.py index 5ac55ac4..b8e2e903 100644 --- a/tests/unit_tests/tools/test_tool_box.py +++ b/tests/unit_tests/tools/test_tool_box.py @@ -24,15 +24,16 @@ def test_tool_box_get_tool(): '''Tests get_tool.''' tb = ToolBox() # No tool is defined, so the default Fortran compiler must be returned: - default_compiler = tb.get_tool(Category.FORTRAN_COMPILER) + default_compiler = tb.get_tool(Category.FORTRAN_COMPILER, mpi=False) tr = ToolRepository() - assert default_compiler is tr.get_default(Category.FORTRAN_COMPILER) + assert default_compiler is tr.get_default(Category.FORTRAN_COMPILER, + mpi=False) # Check that dictionary-like access works as expected: assert tb[Category.FORTRAN_COMPILER] == default_compiler # Now add gfortran as Fortran compiler to the tool box tr_gfortran = tr.get_tool(Category.FORTRAN_COMPILER, "gfortran") - tb.add_tool(tr_gfortran) + tb.add_tool(tr_gfortran, silent_replace=True) gfortran = tb.get_tool(Category.FORTRAN_COMPILER) assert gfortran is tr_gfortran diff --git a/tests/unit_tests/tools/test_tool_repository.py b/tests/unit_tests/tools/test_tool_repository.py index 4a315150..e16ad00d 100644 --- a/tests/unit_tests/tools/test_tool_repository.py +++ b/tests/unit_tests/tools/test_tool_repository.py @@ -7,10 +7,12 @@ '''This module tests the ToolRepository. ''' +from unittest import mock import pytest -from fab.tools import Category, Gcc, Gfortran, Ifort, Linker, ToolRepository +from fab.tools import (Ar, Category, Gcc, Gfortran, Ifort, Linker, + ToolRepository) def test_tool_repository_get_singleton_new(): @@ -57,38 +59,64 @@ def test_tool_repository_get_tool_error(): def test_tool_repository_get_default(): '''Tests get_default.''' tr = ToolRepository() - gfortran = tr.get_default(Category.FORTRAN_COMPILER) + gfortran = tr.get_default(Category.FORTRAN_COMPILER, mpi=False) assert isinstance(gfortran, Gfortran) - gcc_linker = tr.get_default(Category.LINKER) + gcc_linker = tr.get_default(Category.LINKER, mpi=False) assert isinstance(gcc_linker, Linker) assert gcc_linker.name == "linker-gcc" - gcc = tr.get_default(Category.C_COMPILER) + gcc = tr.get_default(Category.C_COMPILER, mpi=False) assert isinstance(gcc, Gcc) + # Test a non-compiler + ar = tr.get_default(Category.AR) + assert isinstance(ar, Ar) -def test_tool_repository_get_default_error(): - '''Tests error handling in get_default.''' + +def test_tool_repository_get_default_error_invalid_category(): + '''Tests error handling in get_default, the category + must be a Category, not e.g. a string.''' tr = ToolRepository() with pytest.raises(RuntimeError) as err: - tr.get_default("unknown-category") + tr.get_default("unknown-category-type") assert "Invalid category type 'str'." in str(err.value) +def test_tool_repository_get_default_error_missing_mpi(): + '''Tests error handling in get_default when the optional MPI + parameter is missing (which is required for a compiler).''' + tr = ToolRepository() + with pytest.raises(RuntimeError) as err: + tr.get_default(Category.FORTRAN_COMPILER) + assert ("Invalid or missing mpi specification for 'FORTRAN_COMPILER'" + in str(err.value)) + + +def test_tool_repository_get_default_error_missing_compiler(): + '''Tests error handling in get_default when there is no compiler + that fulfils the requirements.''' + tr = ToolRepository() + with mock.patch.dict(tr, {Category.FORTRAN_COMPILER: []}), \ + pytest.raises(RuntimeError) as err: + tr.get_default(Category.FORTRAN_COMPILER, mpi=True) + assert ("Could not find 'FORTRAN_COMPILER' that supports MPI." + in str(err.value)) + + def test_tool_repository_default_compiler_suite(): '''Tests the setting of default suite for compiler and linker.''' tr = ToolRepository() tr.set_default_compiler_suite("gnu") for cat in [Category.C_COMPILER, Category.FORTRAN_COMPILER, Category.LINKER]: - def_tool = tr.get_default(cat) + def_tool = tr.get_default(cat, mpi=False) assert def_tool.suite == "gnu" tr.set_default_compiler_suite("intel-classic") for cat in [Category.C_COMPILER, Category.FORTRAN_COMPILER, Category.LINKER]: - def_tool = tr.get_default(cat) + def_tool = tr.get_default(cat, mpi=False) assert def_tool.suite == "intel-classic" with pytest.raises(RuntimeError) as err: tr.set_default_compiler_suite("does-not-exist")