Skip to content

Commit

Permalink
Incremental PSyclone processing(#201)
Browse files Browse the repository at this point in the history
  • Loading branch information
bblay authored Feb 24, 2023
1 parent 4414985 commit 35c6d98
Show file tree
Hide file tree
Showing 46 changed files with 1,287 additions and 221 deletions.
14 changes: 12 additions & 2 deletions envs/docker/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,10 +1,20 @@
# Usage:
# docker build -t fab .
# docker run --env PYTHONPATH=/fab/source -v /home/byron/git/fab:/fab -v /home/byron:/home/byron -it fab bash

FROM ubuntu:20.04

RUN apt update && apt install -y gcc gfortran libclang-dev python-clang python3-pip rsync
RUN apt update && apt install -y gcc gfortran libclang-dev python-clang python3-pip rsync git

RUN mkdir -p ~/.local/lib/python3.8/site-packages
RUN cp -vr /usr/lib/python3/dist-packages/clang ~/.local/lib/python3.8/site-packages/

RUN pip install flake8 fparser matplotlib mypy pytest sphinx sphinx_rtd_theme
RUN pip install pytest pytest-cov pytest-mock flake8 mypy
RUN pip install sphinx sphinx_rtd_theme sphinx-autodoc-typehints
RUN pip install svn GitPython matplotlib
RUN pip install fparser psyclone==2.1.0

RUN mkdir /usr/share/psyclone
RUN ln -s /usr/local/share/psyclone/psyclone.cfg /usr/share/psyclone/psyclone.cfg

CMD [ "python3", "--version" ]
12 changes: 9 additions & 3 deletions run_configs/lfric/atm.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,12 @@
from fab.steps.grab.folder import GrabFolder
from fab.steps.link import LinkExe
from fab.steps.preprocess import fortran_preprocessor, c_preprocessor
from fab.steps.psyclone import Psyclone, psyclone_preprocessor
from fab.steps.root_inc_files import RootIncFiles
from fab.steps.find_source_files import FindSourceFiles, Exclude, Include

from grab_lfric import lfric_source_config, gpl_utils_source_config
from lfric_common import Configurator, FparserWorkaround_StopConcatenation, psyclone_preprocessor, Psyclone
from lfric_common import Configurator, FparserWorkaround_StopConcatenation

logger = logging.getLogger('fab')

Expand Down Expand Up @@ -103,9 +104,14 @@ def atm_config(two_stage=False, opt='Og'):
],
),

psyclone_preprocessor(set_um_physics=True),
# todo: put this inside the psyclone step, no need for it to be separate, there's nothing required between them
psyclone_preprocessor(common_flags=['-DUM_PHYSICS', '-DRDEF_PRECISION=64', '-DUSE_XIOS', '-DCOUPLED']),

Psyclone(kernel_roots=[config.build_output]),
Psyclone(
kernel_roots=[config.build_output],
transformation_script=lfric_source / 'lfric_atm/optimisation/meto-spice/global.py',
cli_args=[],
),

# todo: do we need this one in here?
FparserWorkaround_StopConcatenation(name='fparser stop bug workaround'),
Expand Down
11 changes: 8 additions & 3 deletions run_configs/lfric/gungho.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,10 @@
from fab.steps.link import LinkExe
from fab.steps.preprocess import fortran_preprocessor
from fab.steps.find_source_files import FindSourceFiles, Exclude
from fab.steps.psyclone import Psyclone, psyclone_preprocessor

from grab_lfric import lfric_source_config, gpl_utils_source_config
from lfric_common import Configurator, FparserWorkaround_StopConcatenation, psyclone_preprocessor, Psyclone
from lfric_common import Configurator, FparserWorkaround_StopConcatenation

logger = logging.getLogger('fab')

Expand Down Expand Up @@ -66,9 +67,13 @@ def gungho_config(two_stage=False, opt='Og'):
'-DRDEF_PRECISION=64', '-DR_SOLVER_PRECISION=64', '-DR_TRAN_PRECISION=64', '-DUSE_XIOS',
]),

psyclone_preprocessor(),
psyclone_preprocessor(common_flags=['-DRDEF_PRECISION=64', '-DUSE_XIOS', '-DCOUPLED']),

Psyclone(kernel_roots=[config.build_output]),
Psyclone(
kernel_roots=[config.build_output],
transformation_script=lfric_source / 'gungho/optimisation/meto-spice/global.py',
cli_args=[],
),

FparserWorkaround_StopConcatenation(name='fparser stop bug workaround'),

Expand Down
80 changes: 1 addition & 79 deletions run_configs/lfric/lfric_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,7 @@
from pathlib import Path
from typing import Dict

from fab.artefacts import SuffixFilter
from fab.steps import Step, check_for_errors
from fab.steps.preprocess import PreProcessor
from fab.util import log_or_dot, input_to_output_fpath
from fab.steps import Step
from fab.tools import run_command

logger = logging.getLogger('fab')
Expand Down Expand Up @@ -110,78 +107,3 @@ def run(self, artefact_store, config):

open(feign_config_mod_fpath, 'wt').write(
open(broken_version, 'rt').read().replace(bad, good))


def psyclone_preprocessor(set_um_physics=False):
um_physics = ['-DUM_PHYSICS'] if set_um_physics else []

return PreProcessor(
preprocessor='cpp -traditional-cpp',

source=SuffixFilter('all_source', '.x90'),
output_collection='preprocessed_x90',

output_suffix='.x90',
name='preprocess x90',
common_flags=[
'-P',
'-DRDEF_PRECISION=64', '-DUSE_XIOS', '-DCOUPLED',
*um_physics,
],
)


class Psyclone(Step):

def __init__(self, name=None, kernel_roots=None):
super().__init__(name=name or 'psyclone')
self.kernel_roots = kernel_roots or []

def run(self, artefact_store: Dict, config):
super().run(artefact_store=artefact_store, config=config)

results = self.run_mp(artefact_store['preprocessed_x90'], self.do_one_file)
check_for_errors(results, caller_label=self.name)

successes = list(filter(lambda r: not isinstance(r, Exception), results))
logger.info(f"success with {len(successes)} files")
artefact_store['psyclone_output'] = []
for files in successes:
artefact_store['psyclone_output'].extend(files)

def do_one_file(self, x90_file):
log_or_dot(logger=logger, msg=str(x90_file))

generated = x90_file.parent / (str(x90_file.stem) + '_psy.f90')
modified_alg = x90_file.with_suffix('.f90')

# generate into the build output, not the source
generated = input_to_output_fpath(config=self._config, input_path=generated)
modified_alg = input_to_output_fpath(config=self._config, input_path=modified_alg)
generated.parent.mkdir(parents=True, exist_ok=True)

# -d specifies "a root directory structure containing kernel source"
kernel_options = sum([['-d', k] for k in self.kernel_roots], [])

command = [
'psyclone', '-api', 'dynamo0.3',
'-l', 'all',
*kernel_options,
'-opsy', generated, # filename of generated PSy code
'-oalg', modified_alg, # filename of transformed algorithm code
x90_file,
]

if self._config.reuse_artefacts and Path(modified_alg).exists():
pass
else:
try:
run_command(command)
except Exception as err:
logger.error(err)
return err

result = [modified_alg]
if Path(generated).exists():
result.append(generated)
return result
5 changes: 3 additions & 2 deletions run_configs/lfric/mesh_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,9 @@
from fab.steps.link import LinkExe
from fab.steps.preprocess import fortran_preprocessor
from fab.steps.find_source_files import FindSourceFiles, Exclude
from fab.steps.psyclone import Psyclone, psyclone_preprocessor

from lfric_common import Configurator, psyclone_preprocessor, Psyclone, FparserWorkaround_StopConcatenation
from lfric_common import Configurator, FparserWorkaround_StopConcatenation
from grab_lfric import lfric_source_config, gpl_utils_source_config


Expand Down Expand Up @@ -44,7 +45,7 @@ def mesh_tools_config(two_stage=False, opt='Og'):

fortran_preprocessor(preprocessor='cpp -traditional-cpp', common_flags=['-P']),

psyclone_preprocessor(),
psyclone_preprocessor(common_flags=['-DRDEF_PRECISION=64', '-DUSE_XIOS', '-DCOUPLED']),

Psyclone(kernel_roots=[config.build_output]),

Expand Down
16 changes: 15 additions & 1 deletion run_configs/um/build_um.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,12 @@
import re
import warnings
from argparse import ArgumentParser
from pathlib import Path

from fab.artefacts import CollectionGetter
from fab.build_config import AddFlags, BuildConfig
from fab.constants import PRAGMAD_C
from fab.parse.fortran import FortranParserWorkaround
from fab.steps import Step
from fab.steps.analyse import Analyse
from fab.steps.archive_objects import ArchiveObjects
Expand Down Expand Up @@ -128,7 +130,19 @@ def um_atmos_safe_config(revision, two_stage=False):
],
),

Analyse(root_symbol='um_main'),
Analyse(
root_symbol='um_main',

# depending on environment, fparser2 can fail to parse this file but it does compile.
special_measure_analysis_results=[
FortranParserWorkaround(
fpath=Path(config.build_output / "casim/lookup.f90"),
symbol_defs={'lookup'},
symbol_deps={'mphys_die', 'variable_precision', 'mphys_switches', 'mphys_parameters', 'special',
'passive_fields', 'casim_moments_mod', 'yomhook', 'parkind1'},
)
]
),

CompileC(compiler='gcc', common_flags=['-c', '-std=c99']),

Expand Down
3 changes: 2 additions & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,10 @@
else:
raise RuntimeError('Cannot determine package version.')


tests = ['pytest', 'pytest-cov', 'pytest-mock', 'flake8', 'mypy']
docs = ['sphinx', 'sphinx_rtd_theme', 'sphinx-autodoc-typehints']
features = ['GitPython', 'matplotlib']
features = ['GitPython', 'matplotlib', 'jinja2', 'psyclone==2.1.0']

setuptools.setup(
name='sci-fab',
Expand Down
2 changes: 1 addition & 1 deletion source/fab/artefacts.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
from typing import Iterable, Union, Dict, List

from fab.constants import BUILD_TREES

from fab.dep_tree import filter_source_tree, AnalysedDependent
from fab.util import suffix_filter

Expand Down Expand Up @@ -86,6 +85,7 @@ def __init__(self, collections: Iterable[Union[str, ArtefactsGetter]]):
# todo: ensure the labelled values are iterables
def __call__(self, artefact_store: Dict):
super().__call__(artefact_store)
# todo: this should be a set, in case a file appears in multiple collections
result = []
for collection in self.collections:
if isinstance(collection, str):
Expand Down
12 changes: 7 additions & 5 deletions source/fab/build_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,8 +119,6 @@ def add_current_prebuilds(self, artefacts: Iterable[Path]):
Mark the given file paths as being current prebuilds, not to be cleaned during housekeeping.
"""
if not self._artefact_store.get(CURRENT_PREBUILDS):
self.init_artefact_store()
self._artefact_store[CURRENT_PREBUILDS].update(artefacts)

def run(self):
Expand Down Expand Up @@ -151,16 +149,16 @@ def run(self):
self._finalise_logging()

def _run_prep(self):
self._init_logging()

logger.info('')
logger.info('------------------------------------------------------------')
logger.info(f'running {self.project_label}')
logger.info('------------------------------------------------------------')
logger.info('')

self.build_output.mkdir(parents=True, exist_ok=True)
self.prebuild_folder.mkdir(parents=True, exist_ok=True)
self._prep_output_folders()

self._init_logging()
init_metrics(metrics_folder=self.metrics_folder)

# note: initialising here gives a new set of artefacts each run
Expand All @@ -172,6 +170,10 @@ def _run_prep(self):
logger.info("no housekeeping specified, adding a default hard cleanup")
self.steps.append(CleanupPrebuilds(all_unused=True))

def _prep_output_folders(self):
self.build_output.mkdir(parents=True, exist_ok=True)
self.prebuild_folder.mkdir(parents=True, exist_ok=True)

def _init_logging(self):
# add a file logger for our run
self.project_workspace.mkdir(parents=True, exist_ok=True)
Expand Down
6 changes: 4 additions & 2 deletions source/fab/dep_tree.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@
"""

# todo: we've since adopted the term "source tree", so we should probably rename this module to match.
import logging
from abc import ABC
import logging
from pathlib import Path
from typing import Set, Dict, Iterable, List, Union, Optional, Any

Expand Down Expand Up @@ -87,13 +87,15 @@ def to_dict(self) -> Dict[str, Any]:

@classmethod
def from_dict(cls, d):
return cls(
result = cls(
fpath=Path(d["fpath"]),
file_hash=d["file_hash"],
symbol_defs=set(d["symbol_defs"]),
symbol_deps=set(d["symbol_deps"]),
file_deps=set(map(Path, d["file_deps"])),
)
assert result.file_hash is not None
return result


def extract_sub_tree(source_tree: Dict[Path, AnalysedDependent], root: Path, verbose=False)\
Expand Down
14 changes: 10 additions & 4 deletions source/fab/parse/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,8 @@ def file_hash(self):
return self._file_hash

def __eq__(self, other):
return vars(self) == vars(other) and type(self) == type(other)
# todo: better to use self.field_names() instead of vars(self) in order to evaluate any lazy attributes?
return vars(self) == vars(other)

# persistence
def to_dict(self) -> Dict[str, Any]:
Expand Down Expand Up @@ -104,13 +105,13 @@ def __repr__(self):
return f'{self.__class__.__name__}({params})'

# We need to be hashable before we can go into a set, which is useful for our subclasses.
# Note, the result will change with each Python invocation.
# Note, the numerical result will change with each Python invocation.
def __hash__(self):
# Build up a list of things to hash, from our attributes.
# We use self.field_names() rather than vars(self) because we want to evaluate any lazy attributes.
# We turn dicts and sets into sorted tuples for hashing.
# todo: There's a good reason dicts and sets aren't hashable, so we should be sure we're happy doing this.
# Discuss.
# todo: There's a good reason dicts and sets aren't supposed to be hashable.
# Please see https://github.com/metomi/fab/issues/229
things = set()
for field_name in self.field_names():
thing = getattr(self, field_name)
Expand Down Expand Up @@ -139,3 +140,8 @@ def __init__(self, fpath: Union[str, Path]):
"""
super().__init__(fpath=fpath)

@classmethod
def from_dict(cls, d):
# todo: load & save should be implemented here and used by the calling code, to save reanalysis.
raise NotImplementedError
7 changes: 5 additions & 2 deletions source/fab/parse/c.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,13 +11,14 @@
from pathlib import Path
from typing import List, Optional, Union, Tuple

from fab.dep_tree import AnalysedDependent

try:
import clang # type: ignore
import clang.cindex # type: ignore
except ImportError:
clang = None

from fab.dep_tree import AnalysedDependent
from fab.util import log_or_dot, file_checksum

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -109,15 +110,17 @@ def run(self, fpath: Path) \
msg = 'clang not available, C analysis disabled'
warnings.warn(msg, ImportWarning)
return ImportWarning(msg), None
log_or_dot(logger, f"analysing {fpath}")

# do we already have analysis results for this file?
# todo: dupe - probably best in a parser base class
file_hash = file_checksum(fpath).file_hash
analysis_fpath = Path(self._config.prebuild_folder / f'{fpath.stem}.{file_hash}.an')
if analysis_fpath.exists():
log_or_dot(logger, f"found analysis prebuild for {fpath}")
return AnalysedC.load(analysis_fpath), analysis_fpath

log_or_dot(logger, f"analysing {fpath}")

analysed_file = AnalysedC(fpath=fpath, file_hash=file_hash)

# parse the file
Expand Down
Loading

0 comments on commit 35c6d98

Please sign in to comment.