diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..0741815 --- /dev/null +++ b/LICENSE @@ -0,0 +1,11 @@ +Copyright 2024 Gregory Tucker, European Spallation Source ERIC + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index 3aaf69e..a85b27e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,20 +1,25 @@ [build-system] -requires = ["setuptools>=45", "setuptools_scm[toml]>=6.2"] +requires = [ + "setuptools>=45", + "setuptools_scm[toml]>=6.2", + "antlr4-tools>=0.2.1", + 'antlr4-python3-runtime>=4.13.2' +] build-backend = "setuptools.build_meta" [project] name = "mccode-antlr" dependencies = [ - 'antlr4-python3-runtime==4.13.2', # should match the version of antlr4 used + 'antlr4-python3-runtime>=4.13.2', 'numpy>=2', 'pooch>=1.7.0', 'confuse>=2.0.1', 'loguru>=0.7.2', 'gitpython>=3.1.43', - "importlib_metadata; python_version<'3.8'", ] description = "ANTLR4 grammars for McStas and McXtrace" readme = "README.md" +license = {text = "BSD-3-Clause"} requires-python = ">=3.9" authors = [ { name = "Gregory Tucker", email = "gregory.tucker@ess.eu" }, @@ -34,7 +39,6 @@ dynamic = ["version"] [project.optional-dependencies] test = ["gputil==1.4.0", 'pytest'] hdf5 = ["h5py>=3.11.0"] -antlr = ['antlr4-tools==0.2.1'] [project.urls] "Homepage" = "https://github.com/McStasMcXtrace/mccode-antlr" diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..77287a0 --- /dev/null +++ b/setup.py @@ -0,0 +1,164 @@ +from __future__ import annotations + +from setuptools import Command, setup +import setuptools.command.build + +from pathlib import Path +from enum import Enum + +class Target(Enum): + python = 0 + cpp = 1 + def __str__(self): + if self == Target.python: + return 'Python3' + elif self == Target.cpp: + return 'Cpp' + raise ValueError(f'Unknown target {self}') + + +class Feature(Enum): + listener = 0 + visitor = 1 + lexer = 2 + parser = 3 + def __str__(self): + if self == Feature.listener: + return 'Listener' + if self == Feature.visitor: + return 'Visitor' + if self == Feature.lexer: + return 'Lexer' + if self == Feature.parser: + return 'Parser' + + +def BuildANTLRCommand(source: Path, destination: str, grammars): + + def antlr4_runtime_version(): + """Retrieve the ANTLR4 version used by the available antlr4-python3-runtime""" + from importlib import metadata + try: + return metadata.metadata('antlr4-python3-runtime').get('version') + except metadata.PackageNotFoundError: + raise RuntimeError("antlr4-python3-runtime is a build dependency!") + + def build_language(grammar_file: Path, + target: Target, + features: list[Feature], + output=None, + ): + from subprocess import run + from antlr4_tool_runner import initialize_paths, install_jre_and_antlr + args = [ + f'-Dlanguage={target}', + '-visitor' if Feature.visitor in features else '-no-visitor', + '-listener' if Feature.listener in features else '-no-listener', + '-o', output.resolve(), + grammar_file.name + ] + print(f"Generating ANTLR {target} {' '.join(str(f) for f in features)} in {output} for {grammar_file}") + # The following copies the implementation of antlr4_tool_runner.tool, + # which pulls `args` from the system argv list + initialize_paths() + jar, java = install_jre_and_antlr(antlr4_runtime_version()) + run([java, '-cp', jar, 'org.antlr.v4.Tool'] + args, cwd=grammar_file.parent) + + class BuildANTLR(Command): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.build_lib = None + self.editable_mode = False + + def initialize_options(self): + """Initialize command state to defaults""" + ... + + def finalize_options(self): + """ + Populate the command state. This is where I traverse the directory + tree to search for the *.ksc files to compile them later. + The self.set_undefined_options is used to inherit the `build_lib` + attribute from the `build_py` command. + """ + self.set_undefined_options("build_py", ("build_lib", "build_lib")) + + def run(self): + """ + Perform actions with side-effects, such as invoking a ksc to python compiler. + The directory to which outputs are written depends on `editable_mode` attribute. + When editable_mode == False, the outputs are written to directory pointed by build_lib. + When editable_mode == True, the outputs are written in-place, + i.e. into the directory containing the sources. + The `run` method is not executed during sdist builds. + """ + dest = Path(self.build_lib) / destination + for grammar, options in grammars.items(): + build_language(source/f'{grammar}.g4', + target=options['target'], + features=options['features'], + output=dest) + + def get_output_mapping(self): + """ + Return dict mapping output file paths to input file paths + Example: + dict = { "build/lib/output.py": "src/grammar/grammar.g4" } + """ + files = {} + dest = Path(self.build_lib) / destination + for grammar, options in grammars.items(): + for feature in [Feature.lexer, Feature.parser] + options['features']: + files[dest / f"{grammar}{feature}.py"] = source / f"{grammar}.g4" + if deps := options.get("deps"): + for dep in deps: + files[dest / f"{dep}{feature}.py"] = source / f"{dep}.g4" + return {str(k): str(v) for k, v in files.items()} + + def get_outputs(self): + """Return list containing paths to output files (generated *.py files)""" + files = [] + dest = Path(self.build_lib) / destination + for grammar, options in grammars.items(): + for feature in [Feature.lexer, Feature.parser] + options['features']: + files.append(dest / f"{grammar}{feature}.py") + if deps := options.get("deps"): + files.extend(dest / f"{dep}{feature}.py" for dep in deps) + return [str(file) for file in files] + + def get_source_files(self): + """Returns list containing paths to input files (*.g4 ANTLR grammars)""" + files = [] + for grammar, options in grammars.items(): + files.append(source / f"{grammar}.g4") + if deps := options.get("deps"): + files.extend(source / f"{dep}.g4" for dep in deps) + return [str(file) for file in files] + + return BuildANTLR + + +setuptools.command.build.build.sub_commands.append(("build_antlr", None)) + +setup(cmdclass={ + "build_antlr": BuildANTLRCommand( + source=Path() / "src" / "grammar", # grammar file loc relative to this file + destination="mccode_antlr/grammar", # generated file loc in build dir + grammars={ + 'McComp': { + 'target': Target.python, + 'features': [Feature.visitor], + 'deps': ('McCommon', 'c99'), + }, + 'McInstr': { + 'target': Target.python, + 'features': [Feature.visitor], + 'deps': ('McCommon', 'c99'), + }, + 'C': { + 'target': Target.python, + 'features': [Feature.visitor, Feature.listener], + }, + }, + ) +}) diff --git a/src/grammar/README.md b/src/grammar/README.md index da99259..888a87a 100644 --- a/src/grammar/README.md +++ b/src/grammar/README.md @@ -4,15 +4,18 @@ contents of this folder are generated by `ANTLR` from the grammar files, `C.g4`, `cpp.g4`, `McCommon.g4`, `McComp.g4` and `McInstr.g4`. +As such, the generated files should not be edited by hand. +Instead, modify the grammar files, and possibly any +functions which make use of `Mc*Visitor` or `CListener`. + + +## Automatic generation + The (re)generation of the various language files can be performed by calling `builder.py`, e.g., ```console python builder.py --verbose ``` -Or, if working with the installed package, -```console -mccode-antlr-builder --verbose -``` Building the parser, visitor, and listener implementations requires `antlr4-tools` to be installed. This can be done via @@ -22,6 +25,19 @@ pip install "antlr4-tools=0.2.1" Other versions of `antlr4-tools` may work, but unannounced changes in its API have been known to break the build process. -As such, the generated files should not be edited by hand. -Instead, modify the grammar files, and possibly any -functions which make use of `Mc*Visitor` or `CListener`. +### ANTLR version +The version of ANTLR used in generating the required files can be +specified as a command line argument, `-v` or `--version`. +This version **must** match the version of `antlr4-python3-runtime` +used by Python, and which `mccode-antlr` depends on. + +If no version is specified, the version of the installed `antlr4-python3-runtime` +is taken, such that the produced files will work with the used Python environment. + +If no version is specified and `antlr4-python3-runtime` is not installed, +the value of the `${ANTLR4_TOOLS_ANTLR_VERSION}` environment variable, +the result of querying the latest available `antlr` version on +[maven](https://mvnrepository.com/artifact/org.antlr/antlr4), +or a version already present on the running system will be used, +in that order. + diff --git a/src/grammar/builder.py b/src/grammar/builder.py index 01d21c0..6750de5 100644 --- a/src/grammar/builder.py +++ b/src/grammar/builder.py @@ -1,7 +1,41 @@ from __future__ import annotations + +from importlib.metadata import PackageNotFoundError + from loguru import logger from enum import Enum -from pathlib import Path +from functools import cache + +@cache +def antlr4_version(version: str | None = None): + """Query maven for the most recent antlr4 version. + + Note + ---- + This method copies part of the antlr4-tools argument parser to allow + overriding any discovery by specifying an environment variable + `$ANTLR4_TOOLS_ANTLR_VERSION`. If that environment variable is unset, + maven is queried for the most up-to-date antlr4 version. If the query + times out, the local maven folders are checked to find a suitable version + (presumably the most recent one there). + Since the maven query takes time and could time out, and we only want + one version for all generated files, this method is cached to always + give the same output during a single runtime. + """ + from os import environ + from antlr4_tool_runner import latest_version + return version or environ.get("ANTLR4_TOOLS_ANTLR_VERSION") or latest_version() + + +def antlr4_runtime_version(): + """Retrieve the ANTLR4 version used by the available antlr4-python3-runtime""" + from importlib import metadata + try: + return metadata.metadata('antlr4-python3-runtime').get('version') + except PackageNotFoundError: + logger.warning(f'The ANTLR4 Python runtime must match the ANTLR4 version, but is not installed') + logger.info(f'Install antlr4-python3-runtime=={antlr4_version()} to use the generated files') + return None class Target(Enum): @@ -24,15 +58,13 @@ def rebuild_language(grammar_file, target: Target, features: list[Feature], verbose=False, - output=None + output=None, + dryrun=False, + version: str | None = None, ): from pathlib import Path from subprocess import Popen, PIPE from antlr4_tool_runner import initialize_paths, install_jre_and_antlr - from antlr4_tool_runner import process_args - - def antlr4_version(): - return process_args()[1] if not isinstance(grammar_file, Path): grammar_file = Path(grammar_file) @@ -51,8 +83,11 @@ def antlr4_version(): # The following copies the implementation of antlr4_tool_runner.tool, which pulls `args` from the system argv list # Setup: initialize_paths() - version = antlr4_version() - jar, java = install_jre_and_antlr(version) + + if dryrun: + return + + jar, java = install_jre_and_antlr(antlr4_version(version)) # Call antlr4 p = Popen([java, '-cp', jar, 'org.antlr.v4.Tool'] + args, stdout=PIPE, stderr=PIPE) out, err = [x.decode('UTF-8') for x in p.communicate()] @@ -62,8 +97,13 @@ def antlr4_version(): print(out, end='') -def language_present_and_up_to_date(grammar_file, newest, features, path, verbose=False): +def language_missing_or_outdated(grammar_file, newest, features, path, + verbose=False, + dryrun=False, + version: str | None = None, + ): from pathlib import Path + import re if not isinstance(grammar_file, Path): grammar_file = Path(grammar_file) @@ -84,16 +124,33 @@ def language_present_and_up_to_date(grammar_file, newest, features, path, verbos if not all(x.exists() for x in generated_files): if verbose: logger.info(f'Not all language files exist for {grammar_file}') - return False + return True if any(x.stat().st_mtime < newest for x in generated_files): if verbose: logger.info(f'Not all language files up-to-date for {grammar_file}') - return False + return True + + # Finally check if the antlr4 version of the generated file is as requested + version = antlr4_version(version) + # some antlr components use `self.checkVersion("{hard coded version string}") + # to verify that they correspond to the same version as the runtime. + r_checkversion = r'checkVersion\(\"(?P[0-9]+\.[0-9]+\.[0-9]+)\"\)' + # others (all?) have a comment string on their first line with ANTLR {version} + r_antlr_version = r'ANTLR (?P[0-9]+\.[0-9]+\.[0-9]+)' + for file in generated_files: + with file.open('r') as f: + contents = f.read() + checkversion_matches = re.findall(r_checkversion, contents, re.MULTILINE) + antlr_version_matches = re.findall(r_antlr_version, contents, re.MULTILINE) + if any(v != version for v in checkversion_matches + antlr_version_matches): + if verbose: + logger.info(f'Not all language files match requested ANTLR version {version}') + return True if verbose: logger.info(f'Language files for {grammar_file} are up-to-date') - return True + return False # def rebuild_speedy_language(grammar_file, features: list[Feature], output: Path, verbose=False): @@ -114,7 +171,7 @@ def ensure_language_up_to_date( target: Target, features: list[Feature], deps=None, - verbose=False, + **kwargs ): """Ensure the ANTLR parsed language files are up-to-date.""" from pathlib import Path @@ -122,28 +179,43 @@ def ensure_language_up_to_date( # which the grammar defining files are under grammar_file = Path(__file__).parent / f'{grammar}.g4' # and we want to put Python files under src/mccode_antlr/grammar - output_path = Path(__file__).parent.parent / "mccode_antlr" / "grammar" + output = Path(__file__).parent.parent / "mccode_antlr" / "grammar" newest = grammar_file.stat().st_mtime - if deps: - for dep in deps: - newest = max(newest, Path(__file__).parent.joinpath(f'{dep}.g4').stat().st_mtime) + for dep in deps or []: + newest = max(newest, Path(__file__).parent.joinpath(f'{dep}.g4').stat().st_mtime) - if not language_present_and_up_to_date(grammar_file, newest, features, output_path, verbose=verbose): - rebuild_language(grammar_file, target, features, output=output_path, verbose=verbose) + if language_missing_or_outdated(grammar_file, newest, features, output, **kwargs): + rebuild_language(grammar_file, target, features, output=output, **kwargs) def main(): from argparse import ArgumentParser - parser = ArgumentParser(prog="mccode-antlr-build", description='Ensure ANTLR files are up-to-date') - parser.add_argument('-v ', '--verbose', action='store_true', help='Print out more information') + parser = ArgumentParser(prog="mccode-antlr-build", description='Ensure ANTLR files are up-to-date', allow_abbrev=False) + parser.add_argument('-v', '--version', type=str, default=None, help='Version of ANTLR to build with') + parser.add_argument('--verbose', action='store_true', help='Print out more information') + parser.add_argument('--dryrun', action='store_true', help='Setup but do not exectute build') args = parser.parse_args() - verbose = args.verbose - - ensure_language_up_to_date('McComp', target=Target.python, features=[Feature.visitor], deps=('McCommon', 'c99'), verbose=verbose) - ensure_language_up_to_date('McInstr', target=Target.python, features=[Feature.visitor], deps=('McCommon', 'c99'), verbose=verbose) - ensure_language_up_to_date('C', target=Target.python, features=[Feature.visitor, Feature.listener], verbose=verbose) + kwargs = { + 'version': args.version, 'verbose' : args.verbose, 'dryrun' : args.dryrun + } + mc_kwargs = { + 'target': Target.python, + 'features': [Feature.visitor], + 'deps': ('McCommon', 'c99'), + } + c_kwargs = { + 'target': Target.python, + 'features': [Feature.visitor, Feature.listener], + } + if kwargs['version'] is None: + kwargs['version'] = antlr4_runtime_version() + + ensure_language_up_to_date('McComp', **mc_kwargs, **kwargs) + ensure_language_up_to_date('McInstr', **mc_kwargs, **kwargs) + ensure_language_up_to_date('C', **c_kwargs, **kwargs) if __name__ == '__main__': + # __name__ = 'builder.py' main() diff --git a/src/mccode_antlr/compiler/c.py b/src/mccode_antlr/compiler/c.py index 1778746..1915872 100644 --- a/src/mccode_antlr/compiler/c.py +++ b/src/mccode_antlr/compiler/c.py @@ -5,6 +5,7 @@ from mccode_antlr.instr import Instr from mccode_antlr.translators.c import CTargetVisitor from loguru import logger +from .check import compiled, gpu_only, mpi_only class CBinaryTarget: @@ -90,8 +91,38 @@ def instrument_source(instrument: Instr, generator: dict, config: dict, verbose: return visitor.contents() -def compile_instrument(instrument: Instr, target: CBinaryTarget, output: Union[str, Path] = None, - recompile: bool = False, replace: bool = True, dump_source: bool = False, **kwargs): +def get_compiler_linker_flags(instrument: Instr, target: CBinaryTarget): + # the type of binary requested determines (some of) the required flags: + compiler_flags = target.flags + target.extra_flags + linker_flags = target.linker_flags + # the instrument-defined flags are always(?) linker flags: + # the flags in an instrument *might* contain ENV, CMD, GETPATH directives which need to be expanded via decode: + linker_flags.extend( + [word for flag in instrument.decoded_flags() for word in flag.split()]) + + # Why is this addition necessary? + if any('OPENACC' in word for word in compiler_flags) and any( + 'NeXus' in word for word in compiler_flags): + compiler_flags.append('-D__GNUC__') + return compiler_flags, linker_flags + + +def _compile_instrument( + instrument: Instr, + target: CBinaryTarget, + output: Union[str, Path] = None, + replace: bool = False, + dump_source: bool = False, + **kwargs +): + """Do the actual compilation -- should not be called directly by users + + Note + ---- + If you are a user of the mccode-antlr module, call the `compile_instrument` + gateway method instead to enable a cached check that your system compiler is + configured correctly. + """ from os import R_OK, access from subprocess import run, CalledProcessError from mccode_antlr.config import config @@ -107,26 +138,10 @@ def compile_instrument(instrument: Instr, target: CBinaryTarget, output: Union[s # allow for the user to specify only the output *directory* output = output.joinpath(instrument.name).with_suffix(config['ext'].get(str)) - if output.exists() and not recompile: - raise RuntimeError(f"Output {output} exists but recompile is not requested.") if output.exists() and not replace: return output - logger.info(f'Sort out flags for compilation') - - # the type of binary requested determines (some of) the required flags: - compiler_flags = target.flags + target.extra_flags - linker_flags = target.linker_flags - # the instrument-defined flags are always(?) linker flags: - # the flags in an instrument *might* contain ENV, CMD, GETPATH directives which need to be expanded via decode: - linker_flags.extend([word for flag in instrument.decoded_flags() for word in flag.split()]) - - logger.info(f'{compiler_flags = }') - logger.info(f'{linker_flags = }') - - # Why is this addition necessary? - if any('OPENACC' in word for word in compiler_flags) and any('NeXus' in word for word in compiler_flags): - compiler_flags.append('-D__GNUC__') + compiler_flags, linker_flags = get_compiler_linker_flags(instrument, target) # The solitary '-' specifies *where* the stdin source should be processed, which is critical for getting # linking flags right on (some) Linux systems @@ -140,20 +155,59 @@ def compile_instrument(instrument: Instr, target: CBinaryTarget, output: Union[s result = run(command, input=source, text=True, capture_output=True) if result.returncode: raise RuntimeError(f"Compilation\n{command}\nfailed with output\n{result.stdout}\nand error\n{result.stderr}") - # - # try: - # run(command, input=instrument_source(instrument, **kwargs), text=True, check=True) - # except CalledProcessError as error: - # raise RuntimeError(f'Compilation failed, raising error {error}') - if not output.exists(): raise RuntimeError(f"Compilation should have produced {output}, but it does not appear to exist") if not access(output, R_OK): raise RuntimeError(f"{output} exists but is not an executable") - return output +@gpu_only +def compile_acc_instrument(*args, **kwargs): + return _compile_instrument(*args, **kwargs) + + +@mpi_only +def compile_mpi_instrument(*args, **kwargs): + return _compile_instrument(*args, **kwargs) + + +@compiled +def compile_c_instrument(*args, **kwargs): + return _compile_instrument(*args, **kwargs) + + +def compile_instrument( + instrument: Instr, + target: CBinaryTarget, + output: Union[str, Path] = None, + replace: bool = False, + dump_source: bool = False, + **kwargs +): + """Compile an Instr object to one of the possible C targets + + Parameters + ---------- + instrument: Instr + The Instr to turn into a compiled binary + target: CBinaryTarget + The type of binary to produce: C99, OpenACC, MPI, etc. + output: + The path (and optionally name) where to store the produced binary + replace: + If true compilation will proceed even if a same-named path exists already + dump_source: + A diagnostic mode that outputs the generated C code into the current working + directory + """ + if target.type & CBinaryTarget.Type.acc: + return compile_acc_instrument(instrument, target, output, replace, dump_source, **kwargs) + if target.type & CBinaryTarget.Type.mpi: + return compile_mpi_instrument(instrument, target, output, replace, dump_source, **kwargs) + return compile_c_instrument(instrument, target, output, replace, dump_source, **kwargs) + + def run_compiled_instrument(binary: Path, target: CBinaryTarget, options: str, capture=False, dry_run: bool = False): from subprocess import run, CalledProcessError from platform import system diff --git a/src/mccode_antlr/compiler/check.py b/src/mccode_antlr/compiler/check.py new file mode 100644 index 0000000..47e2da5 --- /dev/null +++ b/src/mccode_antlr/compiler/check.py @@ -0,0 +1,112 @@ +from __future__ import annotations +from functools import cache + +def subprocess_fails(args: list[str]): + import subprocess + try: + subprocess.run(args, check=True) + return False + except FileNotFoundError: + pass + except RuntimeError: + pass + return True + + +@cache +def check_for_mccode_antlr_compiler(which: str) -> bool: + from loguru import logger + from ..config import config + cc = config + for key in which.split('/'): + cc = cc[key] + cc = cc.get(str) + + # different compilers support different 'version' or 'help' command line options + options = '--version', '/?', '--help' + if all(subprocess_fails([cc, opt]) for opt in options): + logger.info(f"Compiler '{cc}' not found.") + which = which.replace('/','_') + logger.info(f'Provide alternate via MCCODE_ANTLR_{which} environment variable') + return False + return True + + +def compiles(compiler: str, instr): + from os import access, R_OK + from loguru import logger + from pathlib import Path + from tempfile import TemporaryDirectory + from subprocess import run + from mccode_antlr.translators.target import MCSTAS_GENERATOR + from .c import CBinaryTarget, get_compiler_linker_flags, instrument_source + from ..config import config as module_config + + target = CBinaryTarget(mpi='mpi' in compiler, acc=compiler == 'acc', count=1, nexus=False) + + compile_config = dict(default_main=True, enable_trace=False, portable=False, + include_runtime=True, embed_instrument_file=False, verbose=False) + + compiler_flags, linker_flags = get_compiler_linker_flags(instr, target) + + with TemporaryDirectory() as directory: + binary = Path(directory) / f"output{module_config['ext'].get(str)}" + command = [target.compiler, *compiler_flags, '-o', str(binary), '-', *linker_flags] + source = instrument_source(instr, generator=MCSTAS_GENERATOR, config=compile_config) + result = run(command, input=source, text=True, capture_output=True) + + if result.returncode: + logger.info(f'Failed compiling simple instrument with error: {result.stderr}') + logger.info(f"Verify compiler {target.compiler} accepts {compiler_flags} and linker accepts {linker_flags}") + return False + if not binary.exists() or not binary.is_file() or not access(binary, R_OK): + logger.info(f"Compilation did not produce an executable output file, check that {target.compiler} works") + return False + + return True + + +@cache +def simple_instr_compiles(which: str) -> bool: + from subprocess import CalledProcessError + if not check_for_mccode_antlr_compiler(which): + return False + try: + from mccode_antlr.loader import parse_mcstas_instr + instr = parse_mcstas_instr("define instrument check() trace component a = Arm() at (0,0,0) absolute end") + return compiles(which, instr) + except RuntimeError: + return False + except FileNotFoundError: + return False + except CalledProcessError: + return False + + +def compiled(method, compiler: str | None = None): + from unittest import TestCase + if compiler is None: + # Basic compiled instruments only need the 'cc' compiler specified in the config file + compiler = 'cc' + + def wrapper(*args, **kwargs): + if simple_instr_compiles(compiler): + return method(*args, **kwargs) + elif isinstance(args[0], TestCase): + args[0].skipTest(f'Skipping due to lack of working ${compiler}') + else: + raise RuntimeError(f'A working compiler is required to use function {method.__name__}') + + return wrapper + + +def gpu_only(method): + from loguru import logger + # GPU compiled instruments need the specific OpenACC compiler + # **PLUS** they need to _actually_ have the openACC header (macOS and Windows don't use different compilers) + return compiled(method, 'acc') + + +def mpi_only(method): + # MPI compiled instruments need the specified compiler + return compiled(method, 'mpi/cc') diff --git a/src/mccode_antlr/config/__init__.py b/src/mccode_antlr/config/__init__.py index a6aa32c..0b4926b 100644 --- a/src/mccode_antlr/config/__init__.py +++ b/src/mccode_antlr/config/__init__.py @@ -1,12 +1,13 @@ import confuse +from os import environ # Try and simplify handling configuration values need for, e.g., compiling different versions of the runtimes # under different operating systems, while allowing a user to 'easily' override defaults if necessary. # # Any platform independent configuration settings can go in 'config_default.yaml' -config = confuse.LazyConfig('mccode_antlr', __name__) +config = confuse.LazyConfig('mccodeantlr', __name__) -# use environment variables specified as 'MCCODE_XYZ' as configuration entries 'xyz' +# use environment variables specified as 'MCCODEANTLR_XYZ' as configuration entries 'xyz' config.set_env() @@ -72,3 +73,8 @@ def version_macro(): config.add(_platform_defaults()) # config.add(_common_defaults()) + +# Allow overriding with pseudo-standard environment variables: +for env in ('CC',): + if env in environ: + config[env.lower()] = environ[env] diff --git a/src/mccode_antlr/run/range.py b/src/mccode_antlr/run/range.py index 0e0869d..b56f36b 100644 --- a/src/mccode_antlr/run/range.py +++ b/src/mccode_antlr/run/range.py @@ -93,7 +93,7 @@ def __eq__(self, other): return self.value == other.value and self.maximum == other.maximum def __str__(self): - return f'{self.value}(up to {self.maximum} times)' + return f'{self.value}' def __repr__(self): return f'Singular({self.value}, {self.maximum})' @@ -201,14 +201,15 @@ def parse_command_line_parameters(unparsed: list[str]) -> dict[str, Union[Singul :parameter unparsed: A list of parameters. """ + # TODO work out why the keys for ranges were .lower()'ed before ranges = {} index = 0 while index < len(unparsed): if '=' in unparsed[index]: k, v = unparsed[index].split('=', 1) - ranges[k.lower()] = _MRange_or_Singular(v) + ranges[k] = _MRange_or_Singular(v) elif index + 1 < len(unparsed) and '=' not in unparsed[index + 1]: - ranges[unparsed[index].lower()] = _MRange_or_Singular(unparsed[index + 1]) + ranges[unparsed[index]] = _MRange_or_Singular(unparsed[index + 1]) index += 1 else: raise ValueError(f'Invalid parameter: {unparsed[index]}') @@ -224,7 +225,7 @@ def parse_scan_parameters(unparsed: list[str]) -> dict[str, MRange | Singular]: maximum iterations of all the ranges to avoid infinite iterations. """ ranges = parse_command_line_parameters(unparsed) - max_length = max(len(v) if isinstance(v, MRange) else 1 for v in ranges.values()) + max_length = max(len(v) if isinstance(v, MRange) else 1 for v in ranges.values()) if len(ranges) else 1 for k, v in ranges.items(): if isinstance(v, Singular) and v.maximum is None: ranges[k] = Singular(v.value, max_length) diff --git a/src/mccode_antlr/run/runner.py b/src/mccode_antlr/run/runner.py index e4bde6a..4810abb 100644 --- a/src/mccode_antlr/run/runner.py +++ b/src/mccode_antlr/run/runner.py @@ -2,7 +2,6 @@ from pathlib import Path from mccode_antlr.reader import Registry - def regular_mccode_runtime_dict(args: dict) -> dict: def insert_best_of(src: dict, snk: dict, names: tuple): def get_best_of(): @@ -97,7 +96,6 @@ def resolvable(name: str): aa('-n', '--ncount', nargs=1, type=int, default=None, help='Number of neutrons to simulate') aa('-m', '--mesh', action='store_true', default=False, help='N-dimensional mesh scan') aa('-s', '--seed', nargs=1, type=int, default=None, help='Random number generator seed') - aa('-t', '--trace', action='store_true', default=False, help='Enable tracing') aa('-g', '--gravitation', action='store_true', default=False, help='Enable gravitation for all trajectories') aa('--bufsiz', nargs=1, type=int, default=None, help='Monitor_nD list/buffer-size') @@ -142,12 +140,9 @@ def mccode_compile(instr, directory, generator, target: dict | None = None, conf try: binary = compile_instrument(instr, def_target, directory, generator=generator, config=def_config, **kwargs) - except RuntimeError as e: - logger.error(f'Failed to compile instrument: {e}') - raise e - # binary = Path(directory).joinpath(f'{instr.name}{module_config["ext"].get(str)}') - # if not binary.exists() or not binary.is_file() or not access(binary, R_OK): - # raise FileNotFoundError(f"No executable binary, {binary}, produced") + except RuntimeError as compilation_error: + logger.error(f'Failed to compile instrument: {compilation_error}') + raise compilation_error return binary, def_target @@ -188,6 +183,7 @@ def mccode_run_scan(name: str, binary, target, parameters, directory, grid: bool return results else: directory.parent.mkdir(parents=True, exist_ok=True) + print(parameters) pars = mccode_runtime_parameters(args, parameters) return mccode_run_compiled(binary, target, directory, pars, capture=capture, dry_run=dry_run) @@ -237,11 +233,12 @@ def mccode_run_cmd(flavor: str, registry: Registry, generator: dict): from os import R_OK, access args, parameters = parse_mccode_run_script(flavor) + filename = args.filename if isinstance(args.filename, str) else next(iter(args.filename)) config = dict( enable_trace=args.trace if args.trace is not None else False, embed_instrument_file=args.source if args.source is not None else False, verbose=args.verbose if args.verbose is not None else False, - output=args.output_file if args.output_file is not None else args.filename.with_suffix('.c') + output=args.output_file if args.output_file is not None else filename.with_suffix('.c') ) target = dict( mpi=args.parallel, @@ -260,9 +257,9 @@ def mccode_run_cmd(flavor: str, registry: Registry, generator: dict): capture=(not args.verbose) if args.verbose is not None else False, ) # check if the filename is actually a compiled instrument already: - if args.output_file is None and args.filename.exists() and access(args.filename, R_OK): - binary = args.filename - name = args.filename.stem + if args.output_file is None and filename.exists() and access(filename, R_OK): + binary = filename + name = filename.stem else: # McCode always requires access to a remote Pooch repository: registries = [registry] @@ -274,12 +271,18 @@ def mccode_run_cmd(flavor: str, registry: Registry, generator: dict): # Construct the object which will read the instrument and component files, producing Python objects reader = Reader(registries=registries) # Read the provided .instr file, including all specified .instr and .comp files along the way - instrument = reader.get_instrument(args.filename) + instrument = reader.get_instrument(filename) name = instrument.name # Generate the C binary for the instrument -- will output to, e.g., {instrument.name}.out, in the current directory # unless if output_file was specified binary, target = mccode_compile(instrument, args.output_file, generator, target=target, config=config) + if not len(parameters): + from loguru import logger + logger.error("Interactive parameter entry does not currently work") + logger.info(f"Execute `{binary} --list-parameters` to check expected parameters") + return + mccode_run_scan(name, binary, target, parameters, args.directory, args.mesh, **runtime) diff --git a/tests/runtime/compiled.py b/tests/runtime/compiled.py index 7ad90ce..e4c99fa 100644 --- a/tests/runtime/compiled.py +++ b/tests/runtime/compiled.py @@ -1,65 +1,7 @@ from __future__ import annotations from functools import cache from unittest import TestCase - - -@cache -def check_for_mccode_antlr_compiler(which: str) -> bool: - import subprocess - from loguru import logger - from mccode_antlr.config import config - cc = config - for key in which.split('/'): - cc = cc[key] - cc = cc.get(str) - - try: - subprocess.run([cc, '--version'], check=True) - return True - except FileNotFoundError: - logger.info(f'Expected compiler {cc} not found.') - logger.info('Provide alternate C compiler via MCCODE_ANTLR_CC environment variable') - return False - - -@cache -def simple_instr_compiles(which: str) -> bool: - if check_for_mccode_antlr_compiler(which): - try: - from mccode_antlr.loader import parse_mcstas_instr - instr = parse_mcstas_instr("define instrument check() trace component a = Arm() at (0,0,0) absolute end") - compile_and_run(instr, "-n 1", run=False, target={'acc': which == 'acc'}) - except RuntimeError: - return False - except FileNotFoundError: - return False - return True - - -def compiled(method, compiler: str | None = None): - if compiler is None: - # Basic compiled instruments only need the 'cc' compiler specified in the config file - compiler = 'cc' - - def wrapper(*args, **kwargs): - if simple_instr_compiles(compiler): - method(*args, **kwargs) - elif isinstance(args[0], TestCase): - args[0].skipTest(f'Skipping due to lack of working ${compiler}') - - return wrapper - - -def gpu_only(method): - # GPU compiled instruments need the specific OpenACC compiler - # **PLUS** they need to _actually_ have the openACC header (macOS and Windows don't use different compilers) - return compiled(method, 'acc') - - -def mpi_only(method): - # MPI compiled instruments need the specified compiler - return compiled(method, 'mpi/cc') - +from mccode_antlr.compiler.check import simple_instr_compiles, compiled, gpu_only, mpi_only @cache def mcpl_config_available():