diff --git a/.github/workflows/build-and-test.yml b/.github/workflows/build-and-test.yml index 211b25cc..3e6a4f51 100644 --- a/.github/workflows/build-and-test.yml +++ b/.github/workflows/build-and-test.yml @@ -13,7 +13,7 @@ jobs: needs: [setup_test] runs-on: ubuntu-latest container: - image: precice/precice:latest + image: precice/precice:develop options: --user root steps: - name: Checkout Repository @@ -37,7 +37,7 @@ jobs: name: Run setup install runs-on: ubuntu-latest container: - image: precice/precice:latest + image: precice/precice:develop options: --user root steps: - name: Checkout Repository @@ -55,20 +55,23 @@ jobs: - name: Run setup install run: python3 setup.py install --user - name: Test install - run: python3 -c "import precice" + run: | + export LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH + python3 -c "import precice" setup_install_single_version_externally_managed: name: Run setup install --single-version-externally-managed (for spack) needs: [setup_install] runs-on: ubuntu-latest container: - image: precice/precice:latest + image: precice/precice:develop options: --user root steps: - name: Checkout Repository uses: actions/checkout@v2 - name: Install pip3, pkgconfig and upgrade pip3 run: | + su root apt-get -yy update apt-get install -y python3-pip pkg-config rm -rf /var/lib/apt/lists/* @@ -97,12 +100,14 @@ jobs: packages: toml - name: Checkout precice and make required files discoverable run: | - git clone -b v2.5.0 https://github.com/precice/precice.git precice-core + git clone --branch develop https://github.com/precice/precice.git precice-core mkdir -p precice - cp precice-core/src/precice/SolverInterface.hpp precice/SolverInterface.hpp + cp precice-core/src/precice/Participant.hpp precice/Participant.hpp + cp precice-core/src/precice/Tooling.hpp precice/Tooling.hpp + cp precice-core/src/precice/Tooling.hpp precice/Tooling.cpp cd precice-core mkdir build && cd build - cmake .. -DPRECICE_MPICommunication=OFF -DPRECICE_PETScMapping=OFF -DPRECICE_PythonActions=OFF -DBUILD_TESTING=OFF + cmake .. -DPRECICE_FEATURE_MPI_COMMUNICATION=OFF -DPRECICE_FEATURE_PETSC_MAPPING=OFF -DPRECICE_FEATURE_PYTHON_ACTIONS=OFF -DBUILD_TESTING=OFF - name: Install dependencies run: | python3 -c 'import toml; c = toml.load("pyproject.toml"); print("\n".join(c["build-system"]["requires"]))' | pip3 install -r /dev/stdin @@ -119,7 +124,7 @@ jobs: needs: [setup_test] runs-on: ubuntu-latest container: - image: precice/precice:latest + image: precice/precice:develop options: --user root steps: - name: Checkout Repository @@ -133,14 +138,16 @@ jobs: - name: Run pip install run: pip3 install --user . - name: Run pip install - run: python3 -c "import precice" + run: | + export LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH + python3 -c "import precice" solverdummy_test: name: Run solverdummy needs: [setup_install, setup_test] runs-on: ubuntu-latest container: - image: precice/precice:latest + image: precice/precice:develop options: --user root steps: - name: Checkout Repository @@ -155,6 +162,6 @@ jobs: run: pip3 install --user . - name: Run solverdummy run: | + export LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH cd examples/solverdummy/ python3 solverdummy.py precice-config.xml SolverOne MeshOne & python3 solverdummy.py precice-config.xml SolverTwo MeshTwo - diff --git a/.github/workflows/build-docker.yml b/.github/workflows/build-docker.yml index c9c4ea09..ea12b8cd 100644 --- a/.github/workflows/build-docker.yml +++ b/.github/workflows/build-docker.yml @@ -1,10 +1,19 @@ name: Update docker image on: - workflow_dispatch: # Trigger by hand from the UI + workflow_dispatch: # Trigger by hand from the UI + inputs: + branch: + type: choice + description: branch to build the image from + options: + - develop + - master push: branches: - develop + - master + jobs: build-and-release-docker-image: @@ -13,10 +22,26 @@ jobs: env: docker_username: precice steps: - - name: Get branch name - if: github.event_name != 'pull_request' + - name: Set branch name for manual triggering + if: github.event_name == 'workflow_dispatch' + shell: bash + run: echo "BINDINGS_REF=${{ inputs.branch }}" >> $GITHUB_ENV + - name: Set branch name for "on pull request" triggering + if: github.event_name != 'pull_request' && github.event_name != 'workflow_dispatch' + shell: bash + run: echo "BINDINGS_REF=${{ github.ref_name }}" >> $GITHUB_ENV + - name: Set PRECICE_TAG and the TAG depending on branch shell: bash - run: echo "branch=$(echo ${GITHUB_REF#refs/heads/} | tr / -)" >> $GITHUB_ENV + run: | + if [[ '${{ env.BINDINGS_REF }}' == 'master' ]]; then + echo "PRECICE_TAG=latest" >> "$GITHUB_ENV" + echo "TAG=latest" >> "$GITHUB_ENV" + echo "Building TAG: latest" + else + echo "PRECICE_TAG=${{ env.BINDINGS_REF }}" >> "$GITHUB_ENV" + echo "TAG=${{ env.BINDINGS_REF }}" >> "$GITHUB_ENV" + echo "Building TAG: ${{ env.BINDINGS_REF }}" + fi - name: Checkout Repository uses: actions/checkout@v2 - name: Set up Docker Buildx @@ -31,6 +56,7 @@ jobs: with: push: true file: "./tools/releasing/packaging/docker/Dockerfile" - tags: ${{ env.docker_username }}/python-bindings:${{ env.branch }},${{ env.docker_username }}/python-bindings:latest + tags: ${{ env.docker_username }}/python-bindings:${{ env.TAG }} build-args: | - branch=${{ env.branch }} + PRECICE_TAG=${{ env.PRECICE_TAG }} + PYTHON_BINDINGS_REF=${{ env.BINDINGS_REF }} diff --git a/.github/workflows/check-markdown.yml b/.github/workflows/check-markdown.yml new file mode 100644 index 00000000..63a0df9a --- /dev/null +++ b/.github/workflows/check-markdown.yml @@ -0,0 +1,14 @@ +name: Lint docs +on: [push, pull_request] +jobs: + check_md: + runs-on: ubuntu-latest + steps: + - name: Check out repository + uses: actions/checkout@v2 + - name: Lint markdown files (markdownlint) + uses: articulate/actions-markdownlint@v1 + with: + config: .markdownlint.json + files: '.' + ignore: changelog-entries diff --git a/.github/workflows/pythonpublish.yml b/.github/workflows/pythonpublish.yml index 18aa3722..66e7be4e 100644 --- a/.github/workflows/pythonpublish.yml +++ b/.github/workflows/pythonpublish.yml @@ -27,8 +27,8 @@ jobs: pkgconfig - name: Build and publish env: - TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} - TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} + TWINE_USERNAME: __token__ + TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} run: | python setup.py sdist twine upload dist/* diff --git a/.github/workflows/run-solverdummy.yml b/.github/workflows/run-solverdummy.yml index 08b04d51..1704a108 100644 --- a/.github/workflows/run-solverdummy.yml +++ b/.github/workflows/run-solverdummy.yml @@ -11,7 +11,7 @@ jobs: name: Run solverdummies runs-on: ubuntu-latest container: - image: precice/precice:latest + image: precice/precice:develop options: --user root steps: - name: Checkout Repository @@ -25,7 +25,7 @@ jobs: - name: Install bindings run: pip3 install --user . - name: Check whether preCICE was built with MPI # reformat version information as a dict and check whether preCICE was compiled with MPI - run: python3 -c "import precice; assert({item.split('=')[0]:item.split('=')[-1] for item in str(precice.get_version_information()).split(';')}['PRECICE_MPICommunication']=='Y')" + run: python3 -c "import precice; assert({item.split('=')[0]:item.split('=')[-1] for item in str(precice.get_version_information()).split(';')}['PRECICE_FEATURE_MPI_COMMUNICATION']=='Y')" - name: Run solverdummies run: | cd examples/solverdummy/ diff --git a/.markdownlint.json b/.markdownlint.json new file mode 100644 index 00000000..e53595a9 --- /dev/null +++ b/.markdownlint.json @@ -0,0 +1,7 @@ +{ + "MD013": false, + "MD014": false, + "MD024": false, + "MD034": false, + "MD033": false +} diff --git a/CHANGELOG.md b/CHANGELOG.md index d09c07f7..65e257ea 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,13 @@ All notable changes to this project will be documented in this file. +## 3.0.0.0 + +* Add Cython as build time dependency https://github.com/precice/python-bindings/pull/177 +* Update CMake configuration flags for preCICE source installation in Actions. https://github.com/precice/python-bindings/commit/23a840144c2647d6cf09c0ed87be3b768a22feb7 +* Remove API functions `has_mesh` and `has_data` and rename `get_mesh_vertices_and_ids` to `get_mesh_vertices_and_coordinates`. https://github.com/precice/python-bindings/commit/cd446d2807b841d81a4cf5c9dd6656ab43c278c3 +* Update API according to preCICE v3.0.0 https://github.com/precice/python-bindings/pull/179 + ## 2.5.0.4 * Add `tag_prefix = v` in versioneer configuration of `setup.cfg`. diff --git a/README.md b/README.md index f07343dc..bbbd5a54 100644 --- a/README.md +++ b/README.md @@ -1,17 +1,16 @@ -⚠️ The latest version of the documentation for the python bindings can be found on [precice.org](https://precice.org/installation-bindings-python.html). The information from this `README` is currently under revision and will be moved ⚠️ +# Python language bindings for the C++ library preCICE -Python language bindings for the C++ library preCICE ----------------------------------------------------- +⚠️ The latest version of the documentation for the python bindings can be found on [precice.org](https://precice.org/installation-bindings-python.html). The information from this `README` is currently under revision and will be moved ⚠️ [![Upload Python Package](https://github.com/precice/python-bindings/workflows/Upload%20Python%20Package/badge.svg?branch=master)](https://pypi.org/project/pyprecice/) This package provides python language bindings for the C++ library [preCICE](https://github.com/precice/precice). Note that the first three digits of the version number of the bindings indicate the preCICE version that the bindings support. The last digit represents the version of the bindings. Example: `v2.0.0.1` and `v2.0.0.2` of the bindings represent versions `1` and `2` of the bindings that are compatible with preCICE `v2.0.0`. -# User documentation +## User documentation Please refer to [the preCICE documentation](https://www.precice.org/installation-bindings-python.html) for information on how to install and use the python bindings. Information below is intended for advanced users and developers. -# Required dependencies +## Required dependencies **preCICE**: Refer to [the preCICE documentation](https://precice.org/installation-overview.html) for information on building and installation. @@ -19,143 +18,168 @@ Please refer to [the preCICE documentation](https://www.precice.org/installation **MPI**: `mpi4py` requires MPI to be installed on your system. -# Installing the package +## Installing the package We recommend using pip3 (version 19.0.0 or newer required) for the sake of simplicity. You can check your pip3 version via `pip3 --version`. To update pip3, use the following line: -``` +```bash $ pip3 install --user --upgrade pip ``` -## Using pip3 +### Using pip3 -### preCICE system installs +#### preCICE system installs For system installs of preCICE, installation works out of the box. There are different ways how pip can be used to install pyprecice. pip will fetch cython and other build-time dependencies, compile the bindings and finally install the package pyprecice. * (recommended) install [pyprecice from PyPI](https://pypi.org/project/pyprecice/) - ``` + ```bash $ pip3 install --user pyprecice ``` * provide the link to this repository to pip (replace `` with the branch you want to use, preferably `master` or `develop`) - ``` + ```bash $ pip3 install --user https://github.com/precice/python-bindings/archive/.zip ``` * if you already cloned this repository, execute the following command from this directory: - ``` + ```bash $ pip3 install --user . ``` + *note the dot at the end of the line* -### preCICE at custom location (setting PATHS) +#### preCICE at custom location (setting PATHS) If preCICE (the C++ library) was installed in a custom prefix, or only built but not installed at all, you have to extend the following environment variables: -- `LIBRARY_PATH`, `LD_LIBRARY_PATH` to the library location, or `$prefix/lib` -- `CPATH` either to the `src` directory or the `$prefix/include` +* `LIBRARY_PATH`, `LD_LIBRARY_PATH` to the library location, or `$prefix/lib` +* `CPATH` either to the `src` directory or the `$prefix/include` The preCICE documentation provides more informaiton on [linking preCICE](https://precice.org/installation-linking.html). -## Using Spack +### Using Spack You can also install the python language bindings for preCICE via Spack by installing the Spack package `py-pyprecice`. Refer to [our installation guide for preCICE via Spack](https://precice.org/installation-spack.html) for getting started with Spack. -## Using setup.py +### Using setup.py -### preCICE system installs +#### preCICE system installs In this directory, execute: -``` + +```bash $ python3 setup.py install --user ``` -### preCICE at custom location (setting PATHS) +#### preCICE at custom location (setting PATHS) see above. Then run -``` + +```bash $ python3 setup.py install --user ``` -### preCICE at custom location (explicit include path, library path) +#### preCICE at custom location (explicit include path, library path) 1. Install cython and other dependencies via pip3 - ``` + + ```bash $ pip3 install --user setuptools wheel cython packaging numpy ``` + 2. Open terminal in this folder. 3. Build the bindings - ``` + + ```bash $ python3 setup.py build_ext --include-dirs=$PRECICE_ROOT/src --library-dirs=$PRECICE_ROOT/build/last ``` - **Options:** - - `--include-dirs=`, default: `''` - Path to the headers of preCICE, point to the sources `$PRECICE_ROOT/src`, or the your custom install prefix `$prefix/include`. - - **NOTES:** - - - If you have built preCICE using CMake, you can pass the path to the CMake binary directory using `--library-dirs`. - - It is recommended to use preCICE as a shared library here. + **Options:** + * `--include-dirs=`, default: `''` + Path to the headers of preCICE, point to the sources `$PRECICE_ROOT/src`, or the your custom install prefix `$prefix/include`. + + **NOTES:** + + * If you have built preCICE using CMake, you can pass the path to the CMake binary directory using `--library-dirs`. + * It is recommended to use preCICE as a shared library here. 4. Install the bindings - ``` + + ```bash $ python3 setup.py install --user ``` -5. Clean-up _optional_ - ``` +5. Clean-up *optional* + + ```bash $ python3 setup.py clean --all ``` -# Test the installation +## Test the installation Update `LD_LIBRARY_PATH` such that python can find `precice.so` -``` +```bash $ export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$PRECICE_ROOT/build/last ``` Run the following to test the installation: -``` +```bash $ python3 -c "import precice" ``` -## Unit tests +### Unit tests -1. Clean-up __mandatory__ (because we must not link against the real `precice.so`, but we use a mocked version) - ``` +1. Clean-up **mandatory** (because we must not link against the real `precice.so`, but we use a mocked version) + + ```bash $ python3 setup.py clean --all ``` 2. Set `CPLUS_INCLUDE_PATH` (we cannot use `build_ext` and the `--include-dirs` option here) - ``` + + ```bash $ export CPLUS_INCLUDE_PATH=$CPLUS_INCLUDE_PATH:$PRECICE_ROOT/src ``` 3. Run tests with - ``` + + ```bash $ python3 setup.py test ``` -# Usage +## Usage -You can find the documentation of the implemented interface in the file `precice.pyx`. For an example of how `pyprecice` can be used please refer to the [1D elastic tube example](https://precice.org/tutorials-elastic-tube-1d.html#python). +You can find the documentation of the implemented interface in the file `precice.pyx`. For an example of how `pyprecice` can be used please refer to the [1D elastic tube example](https://precice.org/tutorials-elastic-tube-1d.html#python). **Note** The python package that is installed is called `pyprecice`. It provides the python module `precice` that can be use in your code via `import precice`, for example. -# Troubleshooting & miscellaneous +## Troubleshooting & miscellaneous ### preCICE is not found The following error shows up during installation, if preCICE is not found: +```bash + /tmp/pip-install-d_fjyo1h/pyprecice/precice.cpp:643:10: fatal error: precice/Participant.hpp: No such file or directory + 643 | #include "precice/Participant.hpp" + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + compilation terminated. + error: command 'x86_64-linux-gnu-gcc' failed with exit status 1 + ---------------------------------------- + ERROR: Failed building wheel for pyprecice +Failed to build pyprecice +ERROR: Could not build wheels for pyprecice which use PEP 517 and cannot be installed directly ``` + +Or, for preCICE v2: + +```bash /tmp/pip-install-d_fjyo1h/pyprecice/precice.cpp:643:10: fatal error: precice/SolverInterface.hpp: No such file or directory 643 | #include "precice/SolverInterface.hpp" | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -178,7 +202,7 @@ In case the compilation fails with `shared_ptr.pxd not found` messages, check if ### `Python.h` missing -``` +```bash $ python3 -m pip install pyprecice Collecting pyprecice ... @@ -187,14 +211,14 @@ Collecting pyprecice error: command 'x86_64-linux-gnu-gcc' failed with exit status 1 ---------------------------------------- - Failed building wheel for pyprecice + Failed building wheel for pyprecice ``` Please try to install `python3-dev`. E.g. via `apt install python3-dev`. Please make sure that you use the correct version (e.g. `python3.5-dev` or `python3.6-dev`). You can check your version via `python3 --version`. ### `libprecice.so` is not found at runtime -``` +```bash $ python3 -c "import precice" Traceback (most recent call last): File "", line 1, in @@ -216,43 +240,53 @@ If you want to use the old experimental python bindings (released with preCICE v This guide provides steps to install python bindings for precice-1.6.1 for a conda environment Python 2.7.17 on the CoolMUC. Note that preCICE no longer supports Python 2 after v1.4.0. Hence, some modifications to the python setup code was necessary. Most steps are similar if not identical to the basic guide without petsc or python above. This guide assumes that the Eigen dependencies have already been installed. Load the prerequisite libraries: -``` + +```bash module load gcc/7 module unload mpi.intel module load mpi.intel/2018_gcc module load cmake/3.12.1 ``` + At the time of this writing `module load boost/1.68.0` is no longer available. Instead -boost 1.65.1 was installed per the `boost and yaml-cpp` guide above. +boost 1.65.1 was installed per the `boost and yaml-cpp` guide above. In order to have the right python dependencies, a packaged conda environment was transferred to SuperMUC. The following dependencies were installed: -- numpy -- mpi4py + +* numpy +* mpi4py With the python environment active, we have to feed the right python file directories to the cmake command. Note that -DPYTHON_LIBRARY expects a python shared library. You can likely modify the version to fit what is required. -``` + +```bash mkdir build && cd build -cmake -DBUILD_SHARED_LIBS=ON -DPRECICE_PETScMapping=OFF -DPRECICE_PythonActions=ON -DCMAKE_INSTALL_PREFIX=/path/to/precice -DCMAKE_BUILD_TYPE=Debug .. -DPYTHON_INCLUDE_DIR=$(python -c "from distutils.sysconfig import get_python_inc; print(get_python_inc())") -DPYTHON_LIBRARY=$(python -c "import distutils.sysconfig as sysconfig; print(sysconfig.get_config_var('LIBDIR')+'/libpython2.7.so')") -DNumPy_INCLUDE_DIR=$(python -c "import numpy; print(numpy.get_include())") +cmake -DBUILD_SHARED_LIBS=ON -DPRECICE_FEATURE_PETSC_MAPPING=OFF -DPRECICE_FEATURE_PYTHON_ACTIONS=ON -DCMAKE_INSTALL_PREFIX=/path/to/precice -DCMAKE_BUILD_TYPE=Debug .. -DPYTHON_INCLUDE_DIR=$(python -c "from distutils.sysconfig import get_python_inc; print(get_python_inc())") -DPYTHON_LIBRARY=$(python -c "import distutils.sysconfig as sysconfig; print(sysconfig.get_config_var('LIBDIR')+'/libpython2.7.so')") -DNumPy_INCLUDE_DIR=$(python -c "import numpy; print(numpy.get_include())") make -j 12 make install ``` + After installing, make sure you add the preCICE installation paths to your `.bashrc`, so that other programs can find it: -``` + +```bash export PRECICE_ROOT="path/to/precice_install" export PKG_CONFIG_PATH="path/to/precice_install/lib/pkgconfig:${PKG_CONFIG_PATH}" export CPLUS_INCLUDE_PATH="path/to/precice_install/include:${CPLUS_INCLUDE_PATH}" export LD_LIBRARY_PATH="path/to/precice_install/lib:${LD_LIBRARY_PATH}" ``` + Then, navigate to the python_future bindings script. -``` + +```bash cd /path/to/precice/src/precice/bindings/python_future ``` + Append the following to the head of the file to allow Python2 to run Python3 code. Note that -importing `unicode_literals` from `future` will cause errors in `setuptools` methods as string literals +importing `unicode_literals` from `future` will cause errors in `setuptools` methods as string literals in code are interpreted as `unicode` with this import. -``` + +```python from __future__ import (absolute_import, division, print_function) from builtins import ( @@ -261,14 +295,18 @@ from builtins import ( pow, round, super, filter, map, zip) ``` + Modify `mpicompiler_default = "mpic++"` to `mpicompiler_default = "mpicxx"` in line 100. Run the setup file using the default Python 2.7.17. -``` + +```bash python setup.py install --user ``` + ### ValueError while importing preCICE + If you face the error: ```bash @@ -281,8 +319,7 @@ make sure that you are using an up-to-date version of NumPy. You can update NumP pip3 install numpy --upgrade ``` - -# Contributors +## Contributors * [Benjamin Rodenberg](https://github.com/BenjaminRodenberg) * [Ishaan Desai](https://github.com/IshaanDesai) diff --git a/cyprecice/Participant.pxd b/cyprecice/Participant.pxd new file mode 100644 index 00000000..ffe9783b --- /dev/null +++ b/cyprecice/Participant.pxd @@ -0,0 +1,88 @@ +from libcpp cimport bool +from libcpp.set cimport set +from libcpp.string cimport string +from libcpp.vector cimport vector + +cdef extern from "precice/Participant.hpp" namespace "precice": + cdef cppclass Participant: + # construction and configuration + + Participant (const string&, const string&, int, int) except + + + Participant (const string&, const string&, int, int, void*) except + + + void configure (const string&) + + # steering methods + + void initialize () + + void advance (double computedTimestepLength) + + void finalize() + + # status queries + + int getMeshDimensions(const string& meshName) const + + int getDataDimensions(const string& meshName, const string& dataName) const + + bool isCouplingOngoing() const + + bool isTimeWindowComplete() const + + double getMaxTimeStepSize() const + + bool requiresInitialData() + + bool requiresWritingCheckpoint() + + bool requiresReadingCheckpoint() + + # mesh access + + bool requiresMeshConnectivityFor (const string& meshName) const + + int setMeshVertex (const string& meshName, vector[double] position) + + int getMeshVertexSize (const string& meshName) const + + void setMeshVertices (const string& meshName, vector[double] positions, vector[int]& ids) + + void setMeshEdge (const string& meshName, int firstVertexID, int secondVertexID) + + void setMeshEdges (const string& meshName, vector[int] vertices) + + void setMeshTriangle (const string& meshName, int firstVertexID, int secondVertexID, int thirdVertexID) + + void setMeshTriangles (const string& meshName, vector[int] vertices) + + void setMeshQuad (const string& meshName, int firstVertexID, int secondVertexID, int thirdVertexID, int fourthVertexID) + + void setMeshQuads (const string& meshName, vector[int] vertices) + + void setMeshTetrahedron (const string& meshName, int firstVertexID, int secondVertexID, int thirdVertexID, int fourthVertexID) + + void setMeshTetrahedra (const string& meshName, vector[int] vertices) + + # data access + + void writeData (const string& meshName, const string& dataName, vector[int] vertices, vector[double] values) + + void readData (const string& meshName, const string& dataName, vector[int] vertices, const double relativeReadTime, vector[double]& values) const + + # direct access + + void setMeshAccessRegion (const string& meshName, vector[double] boundingBox) const + + void getMeshVertexIDsAndCoordinates (const string& meshName, vector[int]& ids, vector[double]& coordinates) const + + # Gradient related API + + bool requiresGradientDataFor(const string& meshName, const string& dataName) const + + void writeGradientData(const string& meshName, const string& dataName, vector[int] vertices, vector[double] gradientValues) + + +cdef extern from "precice/Tooling.hpp" namespace "precice": + string getVersionInformation() diff --git a/cyprecice/SolverInterface.pxd b/cyprecice/SolverInterface.pxd deleted file mode 100644 index 633442d0..00000000 --- a/cyprecice/SolverInterface.pxd +++ /dev/null @@ -1,136 +0,0 @@ -from libcpp cimport bool -from libcpp.set cimport set -from libcpp.string cimport string - - -cdef extern from "precice/SolverInterface.hpp" namespace "precice": - cdef cppclass SolverInterface: - # construction and configuration - - SolverInterface (const string&, const string&, int, int) except + - - SolverInterface (const string&, const string&, int, int, void*) except + - - void configure (const string&) - - # steering methods - - double initialize () - - void initializeData () - - double advance (double computedTimestepLength) - - void finalize() - - # status queries - - int getDimensions() const - - bool isCouplingOngoing() const - - bool isReadDataAvailable() const - - bool isWriteDataRequired (double computedTimestepLength) const - - bool isTimeWindowComplete() const - - bool hasToEvaluateSurrogateModel () const - - bool hasToEvaluateFineModel () const - - # action methods - - bool isActionRequired (const string& action) const - - void markActionFulfilled (const string& action) - - # mesh access - - bool hasMesh (const string& meshName ) const - - int getMeshID (const string& meshName) const - - set[int] getMeshIDs () - - bool isMeshConnectivityRequired (int meshID) const - - int setMeshVertex (int meshID, const double* position) - - int getMeshVertexSize (int meshID) const - - void setMeshVertices (int meshID, int size, const double* positions, int* ids) - - void getMeshVertices (int meshID, int size, const int* ids, double* positions) const - - void getMeshVertexIDsFromPositions (int meshID, int size, double* positions, int* ids) const - - int setMeshEdge (int meshID, int firstVertexID, int secondVertexID) - - void setMeshTriangle (int meshID, int firstEdgeID, int secondEdgeID, int thirdEdgeID) - - void setMeshTriangleWithEdges (int meshID, int firstVertexID, int secondVertexID, int thirdVertexID) - - void setMeshQuad (int meshID, int firstEdgeID, int secondEdgeID, int thirdEdgeID, int fourthEdgeID) - - void setMeshQuadWithEdges (int meshID, int firstVertexID, int secondVertexID, int thirdVertexID, int fourthVertexID) - - # data access - - bool hasData (const string& dataName, int meshID) const - - int getDataID (const string& dataName, int meshID) const - - void mapReadDataTo (int toMeshID) - - void mapWriteDataFrom (int fromMeshID) - - void writeBlockVectorData (const int dataID, const int size, const int* valueIndices, const double* values) - - void writeVectorData (const int dataID, const int valueIndex, const double* value) - - void writeBlockScalarData (const int dataID, const int size, const int* valueIndices, const double* values) - - void writeScalarData (const int dataID, const int valueIndex, const double value) - - void readBlockVectorData (const int dataID, const int size, const int* valueIndices, double* values) const - - void readBlockVectorData (const int dataID, const int size, const int* valueIndices, double relativeReadTime, double* values) const - - void readVectorData (const int dataID, const int valueIndex, double* value) const - - void readVectorData (const int dataID, const int valueIndex, double relativeReadTime, double* value) const - - void readBlockScalarData (const int dataID, const int size, const int* valueIndices, double* values) const - - void readBlockScalarData (const int dataID, const int size, const int* valueIndices, double relativeReadTime, double* values) const - - void readScalarData (const int dataID, const int valueIndex, double& value) const - - void readScalarData (const int dataID, const int valueIndex, double relativeReadTime, double& value) const - - # Gradient related API - - bool isGradientDataRequired(int dataID) const; - - void writeBlockVectorGradientData(int dataID, int size, const int* valueIndices, const double* gradientValues); - - void writeScalarGradientData(int dataID, int valueIndex, const double* gradientValues); - - void writeVectorGradientData(int dataID, int valueIndex, const double* gradientValues); - - void writeBlockScalarGradientData(int dataID, int size, const int* valueIndices, const double* gradientValues); - - # direct mesh access - - void setMeshAccessRegion (const int meshID, const double* boundingBox) const - - void getMeshVerticesAndIDs (const int meshID, const int size, int* ids, double* coordinates) const - -cdef extern from "precice/SolverInterface.hpp" namespace "precice": - string getVersionInformation() - -cdef extern from "precice/SolverInterface.hpp" namespace "precice::constants": - const string& actionWriteInitialData() - const string& actionWriteIterationCheckpoint() - const string& actionReadIterationCheckpoint() diff --git a/cyprecice/cyprecice.pxd b/cyprecice/cyprecice.pxd index 0e6953db..32e819e4 100644 --- a/cyprecice/cyprecice.pxd +++ b/cyprecice/cyprecice.pxd @@ -5,10 +5,10 @@ The python module precice offers python language bindings to the C++ coupling li cimport numpy as np cimport cython -cimport SolverInterface +cimport Participant as CppParticipant from cpython.version cimport PY_MAJOR_VERSION # important for determining python version in order to properly normalize string input. See http://docs.cython.org/en/latest/src/tutorial/strings.html#general-notes-about-c-strings and https://github.com/precice/precice/issues/68 . @cython.embedsignature(True) -cdef class Interface: - cdef SolverInterface.SolverInterface *thisptr # hold a C++ instance being wrapped +cdef class Participant: + cdef CppParticipant.Participant *thisptr # hold a C++ instance being wrapped diff --git a/cyprecice/cyprecice.pyx b/cyprecice/cyprecice.pyx index 056f9b2a..a2897186 100644 --- a/cyprecice/cyprecice.pyx +++ b/cyprecice/cyprecice.pyx @@ -10,6 +10,8 @@ cimport numpy import numpy as np from mpi4py import MPI import warnings +from libcpp.string cimport string +from libcpp.vector cimport vector from cpython.version cimport PY_MAJOR_VERSION # important for determining python version in order to properly normalize string input. See http://docs.cython.org/en/latest/src/tutorial/strings.html#general-notes-about-c-strings and https://github.com/precice/precice/issues/68 . @@ -33,15 +35,14 @@ def check_array_like(argument, argument_name, function_name): raise TypeError("{} requires array_like input for {}, but was provided the following input type: {}".format( function_name, argument_name, type(argument))) from None -cdef class Interface: +cdef class Participant: """ Main Application Programming Interface of preCICE. To adapt a solver to preCICE, follow the following main structure: - - Create an object of SolverInterface with Interface() - - Configure the object with Interface::configure() - - Initialize preCICE with Interface::initialize() - - Advance to the next (time)step with Interface::advance() - - Finalize preCICE with Interface::finalize() + - Create an object of Participant with Participant() + - Initialize preCICE with Participant::initialize() + - Advance to the next (time)step with Participant::advance() + - Finalize preCICE with Participant::finalize() - We use solver, simulation code, and participant as synonyms. - The preferred name in the documentation is participant. """ @@ -49,7 +50,7 @@ cdef class Interface: # fake __init__ needed to display docstring for __cinit__ (see https://stackoverflow.com/a/42733794/5158031) def __init__(self, solver_name, configuration_file_name, solver_process_index, solver_process_size, communicator=None): """ - Constructor of Interface class. + Constructor of Participant class. Parameters ---------- @@ -66,12 +67,12 @@ cdef class Interface: Returns ------- - SolverInterface : object - Object pointing to the defined coupling interface + Participant : object + Object pointing to the defined participant Example ------- - >>> interface = precice.Interface("SolverOne", "precice-config.xml", 0, 1) + >>> participant = precice.Participant("SolverOne", "precice-config.xml", 0, 1) preCICE: This is preCICE version X.X.X preCICE: Revision info: vX.X.X-X-XXXXXXXXX preCICE: Configuring preCICE with configuration: "precice-config.xml" @@ -83,22 +84,26 @@ cdef class Interface: cdef void* communicator_ptr if communicator: communicator_ptr = communicator - self.thisptr = new SolverInterface.SolverInterface (convert(solver_name), convert(configuration_file_name), solver_process_index, solver_process_size, communicator_ptr) + self.thisptr = new CppParticipant.Participant (convert(solver_name), convert(configuration_file_name), solver_process_index, solver_process_size, communicator_ptr) else: - self.thisptr = new SolverInterface.SolverInterface (convert(solver_name), convert(configuration_file_name), solver_process_index, solver_process_size) + self.thisptr = new CppParticipant.Participant (convert(solver_name), convert(configuration_file_name), solver_process_index, solver_process_size) pass def __dealloc__ (self): """ - Destructor of Interface class + Destructor of Participant class """ del self.thisptr + # steering methods def initialize (self): """ - Fully initializes preCICE. + Fully initializes preCICE and initializes coupling data. The starting values for coupling data are zero by + default. To provide custom values, first set the data using the Data Access methods before calling this + method to finally exchange the data. + This function handles: - Parallel communication to the coupling partner/s is setup. - Meshes are exchanged between coupling partners and the parallel partitions are created. @@ -110,33 +115,7 @@ cdef class Interface: max_timestep : double Maximum length of first timestep to be computed by the solver. """ - return self.thisptr.initialize () - - def initialize_data (self): - """ - Initializes coupling data. The starting values for coupling data are zero by default. - To provide custom values, first set the data using the Data Access methods and - call this method to finally exchange the data. - - Serial Coupling Scheme: Only the first participant has to call this method, the second participant - receives the values on calling initialize(). - - Parallel Coupling Scheme: - - Values in both directions are exchanged. - - Both participants need to call initializeData(). - - Notes - ----- - Previous calls: - initialize() has been called successfully. - The action WriteInitialData is required - advance() has not yet been called. - finalize() has not yet been called. - - Tasks completed: - Initial coupling data was exchanged. - """ - self.thisptr.initializeData () + self.thisptr.initialize () def advance (self, double computed_timestep_length): @@ -148,11 +127,6 @@ cdef class Interface: computed_timestep_length : double Length of timestep used by the solver. - Returns - ------- - max_timestep : double - Maximum length of next timestep to be computed by solver. - Notes ----- Previous calls: @@ -169,7 +143,7 @@ cdef class Interface: [Second Participant] Configured post processing schemes are applied. Meshes with data are exported to files if configured. """ - return self.thisptr.advance (computed_timestep_length) + self.thisptr.advance (computed_timestep_length) def finalize (self): @@ -187,90 +161,66 @@ cdef class Interface: """ self.thisptr.finalize () + # status queries - def get_dimensions (self): + def get_mesh_dimensions (self, mesh_name): """ - Returns the number of spatial dimensions configured. Currently, two and three dimensional problems - can be solved using preCICE. The dimension is specified in the XML configuration. + Returns the spatial dimensionality of the given mesh. + + Parameters + ---------- + mesh_name : string + Name of the mesh. Returns ------- dimension : int - The configured dimension. - """ - return self.thisptr.getDimensions () - - - def is_coupling_ongoing (self): + The dimensions of the given mesh. """ - Checks if the coupled simulation is still ongoing. - A coupling is ongoing as long as - - the maximum number of timesteps has not been reached, and - - the final time has not been reached. - The user should call finalize() after this function returns false. - - Returns - ------- - tag : bool - Whether the coupling is ongoing. - Notes - ----- - Previous calls: - initialize() has been called successfully. - """ - return self.thisptr.isCouplingOngoing () + return self.thisptr.getMeshDimensions (convert(mesh_name)) - def is_read_data_available (self): + def get_data_dimensions (self, mesh_name, data_name): """ - Checks if new data to be read is available. Data is classified to be new, if it has been received - while calling initialize() and before calling advance(), or in the last call of advance(). - This is always true, if a participant does not make use of subcycling, i.e. choosing smaller - timesteps than the limits returned in intitialize() and advance(). + Returns the spatial dimensionality of the given data on the given mesh. - It is allowed to read data even if this function returns false. This is not recommended - due to performance reasons. Use this function to prevent unnecessary reads. + Parameters + ---------- + mesh_name : string + Name of the mesh. + data_name : string + Name of the data. Returns ------- - tag : bool - Whether new data is available to be read. - - Notes - ----- - Previous calls: - initialize() has been called successfully. + dimension : int + The dimensions of the given data. """ - return self.thisptr.isReadDataAvailable () + return self.thisptr.getDataDimensions (convert(mesh_name), convert(data_name)) - def is_write_data_required (self, double computed_timestep_length): - """ - Checks if new data has to be written before calling advance(). - This is always true, if a participant does not make use of subcycling, i.e. choosing smaller - timesteps than the limits returned in intitialize() and advance(). - - It is allowed to write data even if this function returns false. This is not recommended - due to performance reasons. Use this function to prevent unnecessary writes. - Parameters - ---------- - computed_timestep_length : double - Length of timestep used by the solver. + def is_coupling_ongoing (self): + """ + Checks if the coupled simulation is still ongoing. + A coupling is ongoing as long as + - the maximum number of timesteps has not been reached, and + - the final time has not been reached. + The user should call finalize() after this function returns false. Returns ------- tag : bool - Whether new data has to be written. + Whether the coupling is ongoing. Notes ----- Previous calls: - initialize() has been called successfully. + initialize() has been called successfully. """ - return self.thisptr.isWriteDataRequired (computed_timestep_length) + return self.thisptr.isCouplingOngoing () def is_time_window_complete (self): @@ -293,131 +243,85 @@ cdef class Interface: return self.thisptr.isTimeWindowComplete () - def has_to_evaluate_surrogate_model (self): - """ - Returns whether the solver has to evaluate the surrogate model representation. - The solver may still have to evaluate the fine model representation. - DEPRECATED: Only necessary for deprecated manifold mapping. - - Returns - ------- - tag : bool - Whether the surrogate model has to be evaluated. + def get_max_time_step_size (self): """ - return self.thisptr.hasToEvaluateSurrogateModel () + Get the maximum allowed time step size of the current window. - - def has_to_evaluate_fine_model (self): - """ - Checks if the solver has to evaluate the fine model representation. - The solver may still have to evaluate the surrogate model representation. - DEPRECATED: Only necessary for deprecated manifold mapping. + Allows the user to query the maximum allowed time step size in the current window. + This should be used to compute the actual time step that the solver uses. Returns ------- - tag : bool - Whether the fine model has to be evaluated. + tag : double + Maximum size of time step to be computed by solver. + + Notes + ----- + Previous calls: + initialize() has been called successfully. """ - return self.thisptr.hasToEvaluateFineModel () + return self.thisptr.getMaxTimeStepSize () - # action methods - def is_action_required (self, action): + def requires_initial_data (self): """ - Checks if the provided action is required. - Some features of preCICE require a solver to perform specific actions, in order to be - in valid state for a coupled simulation. A solver is made eligible to use those features, - by querying for the required actions, performing them on demand, and calling markActionfulfilled() - to signalize preCICE the correct behavior of the solver. - - Parameters - ---------- - action : preCICE action - Name of the action. + Checks if the participant is required to provide initial data. + If true, then the participant needs to write initial data to defined vertices + prior to calling initialize(). Returns ------- tag : bool - Returns True if action is required. - """ - return self.thisptr.isActionRequired (action) - - - def mark_action_fulfilled (self, action): - """ - Indicates preCICE that a required action has been fulfilled by a solver. - - Parameters - ---------- - action : preCICE action - Name of the action. + Returns True if inital data is required. Notes ----- Previous calls: - The solver fulfilled the specified action. + initialize() has not yet been called """ - self.thisptr.markActionFulfilled (action) - - # mesh access + return self.thisptr.requiresInitialData () - def has_mesh(self, mesh_name): + def requires_writing_checkpoint (self): """ - Checks if the mesh with the given name is used by a solver. - - Parameters - ---------- - mesh_name : string - Name of the mesh. + Checks if the participant is required to write an iteration checkpoint. + + If true, the participant is required to write an iteration checkpoint before + calling advance(). + + preCICE refuses to proceed if writing a checkpoint is required, + but this method isn't called prior to advance(). - Returns - ------- - tag : bool - Returns true is the mesh is used. + Notes + ----- + Previous calls: + initialize() has been called """ - return self.thisptr.hasMesh (convert(mesh_name)) - + return self.thisptr.requiresWritingCheckpoint () - def get_mesh_id (self, mesh_name): + def requires_reading_checkpoint (self): """ - Returns the ID belonging to the mesh with given name. - - Parameters - ---------- - mesh_name : string - Name of the mesh. - - Returns - ------- - id : int - ID of the corresponding mesh. - - Example - ------- - >>> mesh_id = interface.get_mesh_id("MeshOne") - >>> mesh_id - 0 + Checks if the participant is required to read an iteration checkpoint. - """ - return self.thisptr.getMeshID (convert(mesh_name)) + If true, the participant is required to read an iteration checkpoint before + calling advance(). + preCICE refuses to proceed if reading a checkpoint is required, + but this method isn't called prior to advance(). - def get_mesh_ids (self): - """ - Returns the ID-set of all used meshes by this participant. + Notes + ----- + This function returns false before the first call to advance(). - Returns - ------- - id_array : numpy.ndarray - Numpy array containing all IDs. + Previous calls: + initialize() has been called """ - return self.thisptr.getMeshIDs () + return self.thisptr.requiresReadingCheckpoint () + # mesh access - def get_mesh_handle(self, mesh_name): + def requires_mesh_connectivity_for (self, mesh_name): """ - Returns a handle to a created mesh. - WARNING: This function is not yet available for the Python bindings + Checks if the given mesh requires connectivity. Parameters ---------- @@ -426,20 +330,20 @@ cdef class Interface: Returns ------- - tag : object - Handle to the mesh. + tag : bool + True if mesh connectivity is required. """ - raise Exception("The API method get_mesh_handle is not yet available for the Python bindings.") + return self.thisptr.requiresMeshConnectivityFor(convert(mesh_name)) - def set_mesh_vertex(self, mesh_id, position): + def set_mesh_vertex(self, mesh_name, position): """ Creates a mesh vertex Parameters ---------- - mesh_id : int - ID of the mesh to add the vertex to. + mesh_name : str + Name of the mesh to add the vertex to. position : array_like The coordinates of the vertex. @@ -457,38 +361,43 @@ cdef class Interface: if len(position) > 0: dimensions = len(position) - assert dimensions == self.get_dimensions(), "Dimensions of vertex coordinate in set_mesh_vertex does not match with dimensions in problem definition. Provided dimensions: {}, expected dimensions: {}".format(dimensions, self.get_dimensions()) + assert dimensions == self.get_mesh_dimensions(mesh_name), "Dimensions of vertex coordinate in set_mesh_vertex does not match with dimensions in problem definition. Provided dimensions: {}, expected dimensions: {}".format(dimensions, self.get_mesh_dimensions(mesh_name)) elif len(position) == 0: - dimensions = self.get_dimensions() + dimensions = self.get_mesh_dimensions(mesh_name) + + cdef vector[double] cpp_position = position + + vertex_id = self.thisptr.setMeshVertex(convert(mesh_name), cpp_position) - cdef np.ndarray[double, ndim=1] _position = np.ascontiguousarray(position, dtype=np.double) - vertex_id = self.thisptr.setMeshVertex(mesh_id, _position.data) return vertex_id - def get_mesh_vertex_size (self, mesh_id): + + def get_mesh_vertex_size (self, mesh_name): """ Returns the number of vertices of a mesh Parameters ---------- - mesh_id : int - ID of the mesh. + mesh_name : str + Name of the mesh. Returns ------- sum : int Number of vertices of the mesh. """ - return self.thisptr.getMeshVertexSize(mesh_id) - def set_mesh_vertices (self, mesh_id, positions): + return self.thisptr.getMeshVertexSize(convert(mesh_name)) + + + def set_mesh_vertices (self, mesh_name, positions): """ Creates multiple mesh vertices Parameters ---------- - mesh_id : int - ID of the mesh to add the vertices to. + mesh_name : str + Name of the mesh to add the vertices to. positions : array_like The coordinates of the vertices in a numpy array [N x D] where N = number of vertices and D = dimensions of geometry. @@ -509,21 +418,21 @@ cdef class Interface: -------- Set mesh vertices for a 2D problem with 5 mesh vertices. - >>> mesh_id = interface.get_mesh_id("MeshOne") >>> positions = np.array([[1, 1], [2, 2], [3, 3], [4, 4], [5, 5]]) >>> positions.shape (5, 2) - >>> vertex_ids = interface.set_mesh_vertices(mesh_id, positions) + >>> mesh_name = "MeshOne" + >>> vertex_ids = participant.set_mesh_vertices(mesh_name, positions) >>> vertex_ids.shape (5,) Set mesh vertices for a 3D problem with 5 mesh vertices. - >>> mesh_id = interface.get_mesh_id("MeshOne") >>> positions = np.array([[1, 1, 1], [2, 2, 2], [3, 3, 3], [4, 4, 4], [5, 5, 5]]) >>> positions.shape (5, 3) - >>> vertex_ids = interface.set_mesh_vertices(mesh_id, positions) + >>> mesh_name = "MeshOne" + >>> vertex_ids = participant.set_mesh_vertices(mesh_name, positions) >>> vertex_ids.shape (5,) """ @@ -534,137 +443,32 @@ cdef class Interface: if len(positions) > 0: size, dimensions = positions.shape - assert dimensions == self.get_dimensions(), "Dimensions of vertex coordinates in set_mesh_vertices does not match with dimensions in problem definition. Provided dimensions: {}, expected dimensions: {}".format(dimensions, self.get_dimensions()) + assert dimensions == self.get_mesh_dimensions(mesh_name), "Dimensions of vertex coordinates in set_mesh_vertices does not match with dimensions in problem definition. Provided dimensions: {}, expected dimensions: {}".format(dimensions, self.get_mesh_dimensions(mesh_name)) elif len(positions) == 0: - size = positions.shape[0] - dimensions = self.get_dimensions() - - cdef np.ndarray[double, ndim=1] _positions = np.ascontiguousarray(positions.flatten(), dtype=np.double) - cdef np.ndarray[int, ndim=1] vertex_ids = np.empty(size, dtype=np.int32) - self.thisptr.setMeshVertices (mesh_id, size, _positions.data, vertex_ids.data) - return vertex_ids - - def get_mesh_vertices(self, mesh_id, vertex_ids): - """ - Get vertex positions for multiple vertex ids from a given mesh - - Parameters - ---------- - mesh_id : int - ID of the mesh to read the vertices from. - vertex_ids : array_like - IDs of the vertices to lookup. - - Returns - ------- - positions : numpy.ndarray - The coordinates of the vertices in a numpy array [N x D] where - N = number of vertices and D = dimensions of geometry - - Notes - ----- - Previous calls: - count of available elements at positions matches the configured dimension * size - count of available elements at ids matches size - - Examples - -------- - Return data structure for a 2D problem with 5 vertices: - >>> mesh_id = interface.get_mesh_id("MeshOne") - >>> vertex_ids = [1, 2, 3, 4, 5] - >>> positions = interface.get_mesh_vertices(mesh_id, vertex_ids) - >>> positions.shape - (5, 2) - - Return data structure for a 3D problem with 5 vertices: - >>> mesh_id = interface.get_mesh_id("MeshOne") - >>> vertex_ids = [1, 2, 3, 4, 5] - >>> positions = interface.get_mesh_vertices(mesh_id, vertex_ids) - >>> positions.shape - (5, 3) - """ - check_array_like(vertex_ids, "vertex_ids", "get_mesh_vertices") - - cdef np.ndarray[int, ndim=1] _vertex_ids = np.ascontiguousarray(vertex_ids, dtype=np.int32) - size = _vertex_ids.size - cdef np.ndarray[double, ndim=1] _positions = np.empty(size * self.get_dimensions(), dtype=np.double) - self.thisptr.getMeshVertices (mesh_id, size, _vertex_ids.data, _positions.data) - return _positions.reshape((size, self.get_dimensions())) - - def get_mesh_vertex_ids_from_positions (self, mesh_id, positions): - """ - Gets mesh vertex IDs from positions. - prefer to reuse the IDs returned from calls to set_mesh_vertex() and set_mesh_vertices(). - - Parameters - ---------- - mesh_id : int - ID of the mesh to retrieve positions from. - positions : array_like - The coordinates of the vertices. Coordinates of vertices are stored in a - numpy array [N x D] where N = number of vertices and D = dimensions of geometry - - Returns - ------- - vertex_ids : numpy.ndarray - IDs of mesh vertices. - - Notes - ----- - Previous calls: - count of available elements at positions matches the configured dimension * size - count of available elements at ids matches size - - Examples - -------- - Get mesh vertex ids from positions for a 2D (D=2) problem with 5 (N=5) mesh vertices. - - >>> mesh_id = interface.get_mesh_id("MeshOne") - >>> positions = np.array([[1, 1], [2, 2], [3, 3], [4, 4], [5, 5]]) - >>> positions.shape - (5, 2) - >>> vertex_ids = interface.get_mesh_vertex_ids_from_positions(mesh_id, positions) - >>> vertex_ids - array([1, 2, 3, 4, 5]) + size = 0 + dimensions = self.get_mesh_dimensions(mesh_name) - Get mesh vertex ids from positions for a 3D problem with 5 vertices. + cdef vector[double] cpp_positions = positions.flatten() + cdef vector[int] cpp_ids = [-1 for _ in range(size)] - >>> mesh_id = interface.get_mesh_id("MeshOne") - >>> positions = np.array([[1, 1, 1], [2, 2, 2], [3, 3, 3], [4, 4, 4], [5, 5, 5]]) - >>> positions.shape - (5, 3) - >>> vertex_ids = interface.get_mesh_vertex_ids_from_positions(mesh_id, positions) - >>> vertex_ids - array([1, 2, 3, 4, 5]) - """ - check_array_like(positions, "positions", "get_mesh_vertex_ids_from_positions") + self.thisptr.setMeshVertices (convert(mesh_name), cpp_positions, cpp_ids) - if not isinstance(positions, np.ndarray): - positions = np.asarray(positions) + cdef np.ndarray[int, ndim=1] np_ids = np.array(cpp_ids, dtype=np.int32) - if len(positions) > 0: - size, dimensions = positions.shape - assert dimensions == self.get_dimensions(), "Dimensions of position coordinates in get_mesh_vertex_ids_from_positions does not match with dimensions in problem definition. Provided dimensions: {}, expected dimensions: {}".format(dimensions, self.get_dimensions()) - elif len(positions) == 0: - size = positions.shape[0] - dimensions = self.get_dimensions() + return np_ids - cdef np.ndarray[double, ndim=1] _positions = np.ascontiguousarray(positions.flatten(), dtype=np.double) - cdef np.ndarray[int, ndim=1] vertex_ids = np.empty(int(size), dtype=np.int32) - self.thisptr.getMeshVertexIDsFromPositions (mesh_id, size, _positions.data, vertex_ids.data) - return vertex_ids - def set_mesh_edge (self, mesh_id, first_vertex_id, second_vertex_id): + def set_mesh_edge (self, mesh_name, first_vertex_id, second_vertex_id): """ Sets mesh edge from vertex IDs, returns edge ID. Parameters ---------- - mesh_id : int - ID of the mesh to add the edge to. - firstVertexID : int + mesh_name : str + Name of the mesh to add the edge to. + first_vertex_id : int ID of the first vertex of the edge. - secondVertexID : int + second_vertex_id : int ID of the second vertex of the edge. Returns @@ -675,384 +479,321 @@ cdef class Interface: Notes ----- Previous calls: - vertices with firstVertexID and secondVertexID were added to the mesh with the ID meshID + vertices with firstVertexID and secondVertexID were added to the mesh with name mesh_name """ - return self.thisptr.setMeshEdge (mesh_id, first_vertex_id, second_vertex_id) - def set_mesh_triangle (self, mesh_id, first_edge_id, second_edge_id, third_edge_id): + self.thisptr.setMeshEdge (convert(mesh_name), first_vertex_id, second_vertex_id) + + + def set_mesh_edges (self, mesh_name, vertices): """ - Sets mesh triangle from edge IDs + Creates multiple mesh edges Parameters ---------- - mesh_id : int - ID of the mesh to add the triangle to. - first_edge_id : int - ID of the first edge of the triangle. - second_edge_id : int - ID of the second edge of the triangle. - third_edge_id : int - ID of the third edge of the triangle. + mesh_name : str + Name of the mesh to add the vertices to. + vertices : array_like + The IDs of the vertices in a numpy array [N x 2] where + N = number of edges and D = dimensions of geometry. - Notes - ----- - Previous calls: - edges with first_edge_id, second_edge_id, and third_edge_id were added to the mesh with the ID meshID - """ - self.thisptr.setMeshTriangle (mesh_id, first_edge_id, second_edge_id, third_edge_id) + Examples + -------- + Set mesh edges for a problem with 4 mesh vertices in the form of a square with both diagonals which are fully interconnected. - def set_mesh_triangle_with_edges (self, mesh_id, first_vertex_id, second_vertex_id, third_vertex_id): + >>> vertices = np.array([[1, 2], [1, 3], [1, 4], [2, 3], [2, 4], [3, 4]]) + >>> vertices.shape + (6, 2) + >>> participant.set_mesh_edges(mesh_name, vertices) """ - Sets mesh triangle from vertex IDs. - WARNING: This routine is supposed to be used, when no edge information is available per se. - Edges are created on the fly within preCICE. This routine is significantly slower than the one - using edge IDs, since it needs to check, whether an edge is created already or not. + check_array_like(vertices, "vertices", "set_mesh_edges") - Parameters - ---------- - mesh_id : int - ID of the mesh to add the triangle to. - first_vertex_id : int - ID of the first vertex of the triangle. - second_vertex_id : int - ID of the second vertex of the triangle. - third_vertex_id ID : int - ID of the third vertex of the triangle. + if not isinstance(vertices, np.ndarray): + vertices = np.asarray(vertices) - Notes - ----- - Previous calls: - edges with first_vertex_id, second_vertex_id, and third_vertex_id were added to the mesh with the ID meshID - """ - self.thisptr.setMeshTriangleWithEdges (mesh_id, first_vertex_id, second_vertex_id, third_vertex_id) + if len(vertices) > 0: + _, n = vertices.shape + assert n == 2, "Provided vertices are not of a [N x 2] format, but instead of a [N x {}]".format(n) + elif len(vertices) == 0: + dimensions = self.get_mesh_dimensions(mesh_name) - def set_mesh_quad (self, mesh_id, first_edge_id, second_edge_id, third_edge_id, fourth_edge_id): - """ - Sets mesh Quad from edge IDs. - WARNING: Quads are not fully implemented yet. + cdef vector[int] cpp_vertices = vertices.flatten() - Parameters - ---------- - mesh_id : int - ID of the mesh to add the Quad to. - first_edge_id : int - ID of the first edge of the Quad. - second_edge_id : int - ID of the second edge of the Quad. - third_edge_id : int - ID of the third edge of the Quad. - fourth_edge_id : int - ID of the forth edge of the Quad. + self.thisptr.setMeshEdges (convert(mesh_name), cpp_vertices) - Notes - ----- - Previous calls: - edges with first_edge_id, second_edge_id, third_edge_id, and fourth_edge_id were added - to the mesh with the ID mesh_id - """ - self.thisptr.setMeshQuad (mesh_id, first_edge_id, second_edge_id, third_edge_id, fourth_edge_id) - def set_mesh_quad_with_edges (self, mesh_id, first_vertex_id, second_vertex_id, third_vertex_id, fourth_vertex_id): + def set_mesh_triangle (self, mesh_name, first_vertex_id, second_vertex_id, third_vertex_id): """ - Sets surface mesh quadtriangle from vertex IDs. - WARNING: This routine is supposed to be used, when no edge information is available per se. Edges are - created on the fly within preCICE. This routine is significantly slower than the one using - edge IDs, since it needs to check, whether an edge is created already or not. + Set a mesh triangle from edge IDs Parameters ---------- - mesh_id : int - ID of the mesh to add the Quad to. + mesh_name : str + Name of the mesh to add the triangle to. first_vertex_id : int - ID of the first vertex of the Quad. + ID of the first vertex of the triangle. second_vertex_id : int - ID of the second vertex of the Quad. + ID of the second vertex of the triangle. third_vertex_id : int - ID of the third vertex of the Quad. - fourth_vertex_id : int - ID of the fourth vertex of the Quad. + ID of the third vertex of the triangle. Notes ----- Previous calls: - edges with first_vertex_id, second_vertex_id, third_vertex_id, and fourth_vertex_id were added - to the mesh with the ID mesh_id + vertices with first_vertex_id, second_vertex_id, and third_vertex_id were added to the mesh with the name mesh_name """ - self.thisptr.setMeshQuadWithEdges (mesh_id, first_vertex_id, second_vertex_id, third_vertex_id, fourth_vertex_id) - # data access + self.thisptr.setMeshTriangle (convert(mesh_name), first_vertex_id, second_vertex_id, third_vertex_id) - def is_mesh_connectivity_required (self, mesh_id): + + def set_mesh_triangles (self, mesh_name, vertices): """ - Checks if the given mesh requires connectivity. + Creates multiple mesh triangles Parameters ---------- - mesh_id : int - ID of the associated mesh. + mesh_name : str + Name of the mesh to add the triangles to. + vertices : array_like + The IDs of the vertices in a numpy array [N x 3] where + N = number of triangles and D = dimensions of geometry. - Returns - ------- - tag : bool - True if mesh connectivity is required. - """ - return self.thisptr.isMeshConnectivityRequired(mesh_id) + Examples + -------- + Set mesh triangles for a problem with 4 mesh vertices in the form of a square with both diagonals which are fully interconnected. - def has_data (self, str data_name, mesh_id): + >>> vertices = np.array([[1, 2, 3], [1, 3, 4], [1, 2, 4], [1, 3, 4]]) + >>> vertices.shape + (4, 2) + >>> participant.set_mesh_triangles(mesh_name, vertices) """ - Checks if the data with given name is used by a solver and mesh. + check_array_like(vertices, "vertices", "set_mesh_triangles") - Parameters - ---------- - data_name : string - Name of the data. - mesh_id : int - ID of the associated mesh. + if not isinstance(vertices, np.ndarray): + vertices = np.asarray(vertices) - Returns - ------- - tag : bool - True if the mesh is already used. - """ - return self.thisptr.hasData(convert(data_name), mesh_id) + if len(vertices) > 0: + _, n = vertices.shape + assert n == self.get_mesh_dimensions(mesh_name), "Provided vertices are not of a [N x {}] format, but instead of a [N x {}]".format(self.get_mesh_dimensions(mesh_name), n) + elif len(vertices) == 0: + dimensions = self.get_mesh_dimensions(mesh_name) - def get_data_id (self, str data_name, mesh_id): - """ - Returns the ID of the data associated with the given name and mesh. + cdef vector[int] cpp_vertices = vertices.flatten() - Parameters - ---------- - data_name : string - Name of the data - mesh_id : int - ID of the associated mesh. + self.thisptr.setMeshTriangles (convert(mesh_name), cpp_vertices) - Returns - ------- - data_id : int - ID of the corresponding data. - """ - return self.thisptr.getDataID (convert(data_name), mesh_id) - def map_read_data_to (self, to_mesh_id): + def set_mesh_quad (self, mesh_name, first_vertex_id, second_vertex_id, third_vertex_id, fourth_vertex_id): """ - Computes and maps all read data mapped to the mesh with given ID. - This is an explicit request to map read data to the Mesh associated with toMeshID. - It also computes the mapping if necessary. + Set a mesh Quad from vertex IDs. Parameters ---------- - to_mesh_id : int - ID of mesh to map the read data to. + mesh_name : str + Name of the mesh to add the quad to. + first_vertex_id : int + ID of the first vertex of the quad. + second_vertex_id : int + ID of the second vertex of the quad. + third_vertex_id : int + ID of the third vertex of the quad. + fourth_vertex_id : int + ID of the third vertex of the quad. Notes ----- Previous calls: - A mapping to to_mesh_id was configured. - """ - self.thisptr.mapReadDataTo (to_mesh_id) - - def map_write_data_from (self, from_mesh_id): + vertices with first_vertex_id, second_vertex_id, third_vertex_id, and fourth_vertex_id were added + to the mesh with the name mesh_name """ - Computes and maps all write data mapped from the mesh with given ID. This is an explicit request - to map write data from the Mesh associated with fromMeshID. It also computes the mapping if necessary. - Parameters - ---------- - from_mesh_id : int - ID from which to map write data. + self.thisptr.setMeshQuad (convert(mesh_name), first_vertex_id, second_vertex_id, third_vertex_id, fourth_vertex_id) - Notes - ----- - Previous calls: - A mapping from from_mesh_id was configured. - """ - self.thisptr.mapWriteDataFrom (from_mesh_id) - def write_block_vector_data (self, data_id, vertex_ids, values): + def set_mesh_quads (self, mesh_name, vertices): """ - Writes vector data given as block. This function writes values of specified vertices to a dataID. - Values are provided as a block of continuous memory. Values are stored in a numpy array [N x D] where N = number - of vertices and D = dimensions of geometry + Creates multiple mesh quads Parameters ---------- - data_id : int - Data ID to write to. - vertex_ids : array_like - Indices of the vertices. - values : array_like - Vector values of data - - Notes - ----- - Previous calls: - count of available elements at values matches the configured dimension * size - count of available elements at vertex_ids matches the given size - initialize() has been called + mesh_name : str + Name of the mesh to add the quads to. + vertices : array_like + The IDs of the vertices in a numpy array [N x 4] where + N = number of quads and D = dimensions of geometry. Examples -------- - Write block vector data for a 2D problem with 5 vertices: - >>> data_id = 1 - >>> vertex_ids = [1, 2, 3, 4, 5] - >>> values = np.array([[v1_x, v1_y], [v2_x, v2_y], [v3_x, v3_y], [v4_x, v4_y], [v5_x, v5_y]]) - >>> interface.write_block_vector_data(data_id, vertex_ids, values) + Set mesh quads for a problem with 4 mesh vertices in the form of a square with both diagonals which are fully interconnected. - Write block vector data for a 3D (D=3) problem with 5 (N=5) vertices: - >>> data_id = 1 - >>> vertex_ids = [1, 2, 3, 4, 5] - >>> values = np.array([[v1_x, v1_y, v1_z], [v2_x, v2_y, v2_z], [v3_x, v3_y, v3_z], [v4_x, v4_y, v4_z], [v5_x, v5_y, v5_z]]) - >>> interface.write_block_vector_data(data_id, vertex_ids, values) + >>> vertices = np.array([[1, 2, 3, 4]]) + >>> vertices.shape + (1, 2) + >>> participant.set_mesh_quads(mesh_name, vertices) """ - check_array_like(vertex_ids, "vertex_ids", "write_block_vector_data") - check_array_like(values, "values", "write_block_vector_data") + check_array_like(vertices, "vertices", "set_mesh_quads") - if not isinstance(values, np.ndarray): - values = np.asarray(values) + if not isinstance(vertices, np.ndarray): + vertices = np.asarray(vertices) - if len(values) > 0: - size, dimensions = values.shape - assert dimensions == self.get_dimensions(), "Dimensions of vector data in write_block_vector_data does not match with dimensions in problem definition. Provided dimensions: {}, expected dimensions: {}".format(dimensions, self.get_dimensions()) - if len(values) == 0: - size = 0 + if len(vertices) > 0: + _, n = vertices.shape + assert n == 4, "Provided vertices are not of a [N x 4] format, but instead of a [N x {}]".format(n) + elif len(vertices) == 0: + dimensions = self.get_mesh_dimensions(mesh_name) - cdef np.ndarray[int, ndim=1] _vertex_ids = np.ascontiguousarray(vertex_ids, dtype=np.int32) - cdef np.ndarray[double, ndim=1] _values = np.ascontiguousarray(values.flatten(), dtype=np.double) + cdef vector[int] cpp_vertices = vertices.flatten() - assert _values.size == size * self.get_dimensions(), "Vector data is not provided for all vertices in write_block_vector_data. Check length of input data provided. Provided size: {}, expected size: {}".format(_values.size, size * self.get_dimensions()) - assert _vertex_ids.size == size, "Vertex IDs are of incorrect length in write_block_vector_data. Check length of vertex ids input. Provided size: {}, expected size: {}".format(_vertex_ids.size, size) + self.thisptr.setMeshQuads (convert(mesh_name), cpp_vertices) - self.thisptr.writeBlockVectorData (data_id, size, _vertex_ids.data, _values.data) - def write_vector_data (self, data_id, vertex_id, value): + def set_mesh_tetrahedron (self, mesh_name, first_vertex_id, second_vertex_id, third_vertex_id, fourth_vertex_id): """ - Writes vector data to a vertex. This function writes a value of a specified vertex to a dataID. - Values are provided as a block of continuous memory. - The 2D-format of value is a numpy array of shape 2 - The 3D-format of value is a numpy array of shape 3 + Sets a mesh tetrahedron from vertex IDs. Parameters ---------- - data_id : int - ID to write to. - vertex_id : int - Index of the vertex. - value : array_like - Single vector value + mesh_name : str + Name of the mesh to add the tetrahedron to. + first_vertex_id : int + ID of the first vertex of the tetrahedron. + second_vertex_id : int + ID of the second vertex of the tetrahedron. + third_vertex_id : int + ID of the third vertex of the tetrahedron. + fourth_vertex_id : int + ID of the third vertex of the tetrahedron. Notes ----- Previous calls: - count of available elements at value matches the configured dimension - initialize() has been called + vertices with first_vertex_id, second_vertex_id, third_vertex_id, and fourth_vertex_id were added + to the mesh with the name mesh_name + """ + + self.thisptr.setMeshTetrahedron (convert(mesh_name), first_vertex_id, second_vertex_id, third_vertex_id, fourth_vertex_id) + + + def set_mesh_tetrahedra (self, mesh_name, vertices): + """ + Creates multiple mesh tetdrahedrons + + Parameters + ---------- + mesh_name : str + Name of the mesh to add the tetrahedrons to. + vertices : array_like + The IDs of the vertices in a numpy array [N x 4] where + N = number of quads and D = dimensions of geometry. Examples -------- - Write vector data for a 2D problem with 5 vertices: - >>> data_id = 1 - >>> vertex_id = 5 - >>> value = np.array([v5_x, v5_y]) - >>> interface.write_vector_data(data_id, vertex_id, value) + Set mesh tetrahedrons for a problem with 4 mesh vertices. - Write vector data for a 3D (D=3) problem with 5 (N=5) vertices: - >>> data_id = 1 - >>> vertex_id = 5 - >>> value = np.array([v5_x, v5_y, v5_z]) - >>> interface.write_vector_data(data_id, vertex_id, value) + >>> vertices = np.array([[1, 2, 3, 4]]) + >>> vertices.shape + (1, 2) + >>> participant.set_mesh_tetradehra(mesh_name, vertices) """ - check_array_like(value, "value", "write_vector_data") - assert len(value) > 0, "Input vector data is empty in write_vector_data" + check_array_like(vertices, "vertices", "set_mesh_tetrahedra") - dimensions = len(value) + if not isinstance(vertices, np.ndarray): + vertices = np.asarray(vertices) - assert dimensions == self.get_dimensions(), "Dimensions of vector data in write_vector_data does not match with dimensions in problem definition. Provided dimensions: {}, expected dimensions: {}".format(dimensions, self.get_dimensions()) + if len(vertices) > 0: + _, n = vertices.shape + assert n == 4, "Provided vertices are not of a [N x 4] format, but instead of a [N x {}]".format(n) + elif len(vertices) == 0: + dimensions = self.get_mesh_dimensions(mesh_name) - cdef np.ndarray[np.double_t, ndim=1] _value = np.ascontiguousarray(value, dtype=np.double) + cdef vector[int] cpp_vertices = vertices.flatten() - self.thisptr.writeVectorData (data_id, vertex_id, _value.data) + self.thisptr.setMeshTetrahedra (convert(mesh_name), cpp_vertices) - def write_block_scalar_data (self, data_id, vertex_ids, values): + # data access + + def write_data (self, mesh_name, data_name, vertex_ids, values): """ - Writes scalar data given as a block. This function writes values of specified vertices to a dataID. + This function writes values of specified vertices to data of a mesh. + Values are provided as a block of continuous memory defined by values. Values are stored in a numpy array [N x D] where N = number of vertices and D = dimensions of geometry. + The order of the provided data follows the order specified by vertices. Parameters ---------- - data_id : int - ID to write to. + mesh_name : str + name of the mesh to write to. + data_name : str + Data name to write to. vertex_ids : array_like Indices of the vertices. values : array_like - Values to be written + Values of data Notes ----- Previous calls: - count of available elements at values matches the given size + count of available elements at values matches the configured dimension * size count of available elements at vertex_ids matches the given size initialize() has been called Examples -------- - Write block scalar data for a 2D and 3D problem with 5 (N=5) vertices: - >>> data_id = 1 + Write scalar data for a 2D problem with 5 vertices: + >>> mesh_name = "MeshOne" + >>> data_name = "DataOne" >>> vertex_ids = [1, 2, 3, 4, 5] >>> values = np.array([v1, v2, v3, v4, v5]) - >>> interface.write_block_scalar_data(data_id, vertex_ids, values) + >>> participant.write_data(mesh_name, data_name, vertex_ids, values) + + Write vector data for a 2D problem with 5 vertices: + >>> mesh_name = "MeshOne" + >>> data_name = "DataOne" + >>> vertex_ids = [1, 2, 3, 4, 5] + >>> values = np.array([[v1_x, v1_y], [v2_x, v2_y], [v3_x, v3_y], [v4_x, v4_y], [v5_x, v5_y]]) + >>> participant.write_data(mesh_name, data_name, vertex_ids, values) + + Write vector data for a 3D (D=3) problem with 5 (N=5) vertices: + >>> mesh_name = "MeshOne" + >>> data_name = "DataOne" + >>> vertex_ids = [1, 2, 3, 4, 5] + >>> values = np.array([[v1_x, v1_y, v1_z], [v2_x, v2_y, v2_z], [v3_x, v3_y, v3_z], [v4_x, v4_y, v4_z], [v5_x, v5_y, v5_z]]) + >>> participant.write_data(mesh_name, data_name, vertex_ids, values) """ - check_array_like(vertex_ids, "vertex_ids", "write_block_scalar_data") - check_array_like(values, "values", "write_block_scalar_data") + check_array_like(vertex_ids, "vertex_ids", "write_data") + check_array_like(values, "values", "write_data") + + if not isinstance(values, np.ndarray): + values = np.asarray(values) - if len(values) > 0: - assert(len(vertex_ids) == len(values)) - size = len(vertex_ids) if len(values) == 0: size = 0 + elif self.get_data_dimensions(mesh_name, data_name) == 1: + size = values.flatten().shape[0] + dimensions = 1 + else: + assert len(values.shape) == 2, "Vector valued data has to be provided as a numpy array of shape [N x D] where N = number of vertices and D = number of dimensions." + size, dimensions = values.shape - cdef np.ndarray[int, ndim=1] _vertex_ids = np.ascontiguousarray(vertex_ids, dtype=np.int32) - cdef np.ndarray[double, ndim=1] _values = np.ascontiguousarray(values, dtype=np.double) - - assert _values.size == size, "Scalar data is not provided for all vertices in write_block_scalar_data. Check size of input data provided. Provided size: {}, expected size: {}".format(_values.size, size) - assert _vertex_ids.size == size, "Vertex IDs are of incorrect length in write_block_scalar_data. Check size of vertex ids input. Provided size: {}, expected size: {}".format(_vertex_ids.size, size) - self.thisptr.writeBlockScalarData (data_id, size, _vertex_ids.data, _values.data) + assert dimensions == self.get_data_dimensions(mesh_name, data_name), "Dimensions of vector data in write_data do not match with dimensions in problem definition. Provided dimensions: {}, expected dimensions: {}".format(dimensions, self.get_data_dimensions(mesh_name, data_name)) - def write_scalar_data (self, data_id, vertex_id, double value): - """ - Writes scalar data to a vertex - This function writes a value of a specified vertex to a dataID. + assert len(vertex_ids) == size, "Vertex IDs are of incorrect length in write_data. Check length of vertex ids input. Provided size: {}, expected size: {}".format(vertex_ids.size, size) - Parameters - ---------- - data_id : int - ID to write to. - vertex_id : int - Index of the vertex. - value : double - The value to write. + cdef vector[int] cpp_ids = vertex_ids + cdef vector[double] cpp_values = values.flatten() - Notes - ----- - Previous calls: - initialize() has been called + self.thisptr.writeData (convert(mesh_name), convert(data_name), cpp_ids, cpp_values) - Examples - -------- - Write scalar data for a 2D or 3D problem with 5 vertices: - >>> data_id = 1 - >>> vertex_id = 5 - >>> value = v5 - >>> interface.write_scalar_data(data_id, vertex_id, value) - """ - self.thisptr.writeScalarData (data_id, vertex_id, value) - def read_block_vector_data (self, data_id, vertex_ids, relative_read_time=None): + def read_data (self, mesh_name, data_name, vertex_ids, relative_read_time): """ - Reads vector data into a provided block. This function reads values of specified vertices + Reads data into a provided block. This function reads values of specified vertices from a dataID. Values are read into a block of continuous memory. Parameters ---------- - data_id : int + mesh_name : str + Name of the mesh to write to. + data_name : str ID to read from. vertex_ids : array_like Indices of the vertices. @@ -1073,180 +814,72 @@ cdef class Interface: Examples -------- - Read block vector data for a 2D problem with 5 vertices: - >>> data_id = 1 + Read scalar data for a 2D problem with 5 vertices: + >>> mesh_name = "MeshOne" + >>> data_name = "DataOne" >>> vertex_ids = [1, 2, 3, 4, 5] - >>> values = read_block_vector_data(data_id, vertex_ids) + >>> values = read_data(mesh_name, data_name, vertex_ids) >>> values.shape - >>> (5, 2) + >>> (5, ) - Read block vector data for a 3D system with 5 vertices: - >>> data_id = 1 + Read vector data for a 2D problem with 5 vertices: + >>> mesh_name = "MeshOne" + >>> data_name = "DataOne" >>> vertex_ids = [1, 2, 3, 4, 5] - >>> values = read_block_vector_data(data_id, vertex_ids) + >>> values = read_data(mesh_name, data_name, vertex_ids) >>> values.shape - >>> (5, 3) - """ - check_array_like(vertex_ids, "vertex_ids", "read_block_vector_data") - - cdef np.ndarray[int, ndim=1] _vertex_ids = np.ascontiguousarray(vertex_ids, dtype=np.int32) - size = _vertex_ids.size - dimensions = self.get_dimensions() - cdef np.ndarray[np.double_t, ndim=1] _values = np.empty(size * dimensions, dtype=np.double) - if relative_read_time is None: - self.thisptr.readBlockVectorData (data_id, size, _vertex_ids.data, _values.data) - else: - self.thisptr.readBlockVectorData (data_id, size, _vertex_ids.data, relative_read_time, _values.data) - return _values.reshape((size, dimensions)) - - def read_vector_data (self, data_id, vertex_id, relative_read_time=None): - """ - Reads vector data form a vertex. This function reads a value of a specified vertex - from a dataID. - - Parameters - ---------- - data_id : int - ID to read from. - vertex_id : int - Index of the vertex. - relative_read_time : double - Point in time where data is read relative to the beginning of the current time step - - Returns - ------- - value : numpy.ndarray - Contains the read data. - - Notes - ----- - Previous calls: - count of available elements at value matches the configured dimension - initialize() has been called - - Examples - -------- - Read vector data for 2D problem: - >>> data_id = 1 - >>> vertex_id = 5 - >>> value = interface.read_vector_data(data_id, vertex_id) - >>> value.shape - (1, 2) - - Read vector data for 2D problem: - >>> data_id = 1 - >>> vertex_id = 5 - >>> value = interface.read_vector_data(data_id, vertex_id) - >>> value.shape - (1, 3) - """ - dimensions = self.get_dimensions() - cdef np.ndarray[double, ndim=1] _value = np.empty(dimensions, dtype=np.double) - if relative_read_time == None: - self.thisptr.readVectorData (data_id, vertex_id, _value.data) - else: - self.thisptr.readVectorData (data_id, vertex_id, relative_read_time, _value.data) - return _value - - def read_block_scalar_data (self, data_id, vertex_ids, relative_read_time=None): - """ - Reads scalar data as a block. This function reads values of specified vertices from a dataID. - Values are provided as a block of continuous memory. - - Parameters - ---------- - data_id : int - ID to read from. - vertex_ids : array_like - Indices of the vertices. - relative_read_time : double - Point in time where data is read relative to the beginning of the current time step - - Returns - ------- - values : numpy.ndarray - Contains the read data. - - Notes - ----- - Previous calls: - count of available elements at values matches the given size - count of available elements at vertex_ids matches the given size - initialize() has been called + >>> (5, 2) - Examples - -------- - Read block scalar data for 2D and 3D problems with 5 vertices: - >>> data_id = 1 + Read vector data for a 3D system with 5 vertices: + >>> mesh_name = "MeshOne" + >>> data_name = "DataOne" >>> vertex_ids = [1, 2, 3, 4, 5] - >>> values = interface.read_block_scalar_data(data_id, vertex_ids) - >>> values.size - >>> 5 - + >>> values = read_data(mesh_name, data_name, vertex_ids) + >>> values.shape + >>> (5, 3) """ - check_array_like(vertex_ids, "vertex_ids", "read_block_scalar_data") + check_array_like(vertex_ids, "vertex_ids", "read_data") - cdef np.ndarray[int, ndim=1] _vertex_ids = np.ascontiguousarray(vertex_ids, dtype=np.int32) - size = _vertex_ids.size - cdef np.ndarray[double, ndim=1] _values = np.empty(size, dtype=np.double) - if relative_read_time == None: - self.thisptr.readBlockScalarData (data_id, size, _vertex_ids.data, _values.data) + if len(vertex_ids) == 0: + size = 0 + dimensions = self.get_data_dimensions(mesh_name, data_name) + elif self.get_data_dimensions(mesh_name, data_name) == 1: + size = len(vertex_ids) + dimensions = 1 else: - self.thisptr.readBlockScalarData (data_id, size, _vertex_ids.data, relative_read_time, _values.data) - - return _values - - def read_scalar_data (self, data_id, vertex_id, relative_read_time=None): - """ - Reads scalar data of a vertex. This function needs a value of a specified vertex from a dataID. + size = len(vertex_ids) + dimensions = self.get_data_dimensions(mesh_name, data_name) - Parameters - ---------- - data_id : int - ID to read from. - vertex_id : int - Index of the vertex. - relative_read_time : double - Point in time where data is read relative to the beginning of the current time step + cdef vector[int] cpp_ids = vertex_ids + cdef vector[double] cpp_values = [-1 for _ in range(size * dimensions)] - Returns - ------- - value : double - Contains the read value + self.thisptr.readData (convert(mesh_name), convert(data_name), cpp_ids, relative_read_time, cpp_values) - Notes - ----- - Previous calls: - initialize() has been called. + cdef np.ndarray[double, ndim=1] np_values = np.array(cpp_values, dtype=np.double) - Examples - -------- - Read scalar data for 2D and 3D problems: - >>> data_id = 1 - >>> vertex_id = 5 - >>> value = interface.read_scalar_data(data_id, vertex_id) - """ - cdef double _value - if relative_read_time == None: - self.thisptr.readScalarData (data_id, vertex_id, _value) + if len(vertex_ids) == 0: + return np_values.reshape((size)) + elif self.get_data_dimensions(mesh_name, data_name) == 1: + return np_values.reshape((size)) else: - self.thisptr.readScalarData (data_id, vertex_id, relative_read_time, _value) + return np_values.reshape((size, dimensions)) - return _value - def write_block_vector_gradient_data (self, data_id, vertex_ids, gradientValues): + def write_gradient_data (self, mesh_name, data_name, vertex_ids, gradients): """ - Writes vector gradient data given as block. This function writes gradient values of specified vertices to a dataID. + Writes gradient data given as block. This function writes gradient values of specified vertices to a dataID. Values are provided as a block of continuous memory. Values are stored in a numpy array [N x D] where N = number of vertices and D = number of gradient components. Parameters ---------- - data_id : int - Data ID to write to. + mesh_name : str + Name of the mesh to write to. + data_name : str + Data name to write to. vertex_ids : array_like Indices of the vertices. - gradientValues : array_like + gradients : array_like Gradient values differentiated in the spacial direction (dx, dy) for 2D space, (dx, dy, dz) for 3D space Notes @@ -1259,221 +892,69 @@ cdef class Interface: Examples -------- - Write block gradient vector data for a 2D problem with 2 vertices: - >>> data_id = 1 + Write gradient vector data for a 2D problem with 2 vertices: + >>> mesh_name = "MeshOne" + >>> data_name = "DataOne" >>> vertex_ids = [1, 2] - >>> gradientValues = np.array([[v1x_dx, v1y_dx, v1x_dy, v1y_dy], [v2x_dx, v2y_dx, v2x_dy, v2y_dy]]) - >>> interface.write_block_vector_gradient_data(data_id, vertex_ids, gradientValues) + >>> gradients = np.array([[v1x_dx, v1y_dx, v1x_dy, v1y_dy], [v2x_dx, v2y_dx, v2x_dy, v2y_dy]]) + >>> participant.write_gradient_data(mesh_name, data_name, vertex_ids, gradients) - Write block vector data for a 3D (D=3) problem with 2 (N=2) vertices: - >>> data_id = 1 + Write vector data for a 3D problem with 2 vertices: + >>> mesh_name = "MeshOne" + >>> data_name = "DataOne" >>> vertex_ids = [1, 2] - >>> gradientValues = np.array([[v1x_dx, v1y_dx, v1z_dx, v1x_dy, v1y_dy, v1z_dy, v1x_dz, v1y_dz, v1z_dz], [v2x_dx, v2y_dx, v2z_dx, v2x_dy, v2y_dy, v2z_dy, v2x_dz, v2y_dz, v2z_dz]]) - >>> interface.write_block_vector_gradient_data(data_id, vertex_ids, gradientValues) + >>> gradients = np.array([[v1x_dx, v1y_dx, v1z_dx, v1x_dy, v1y_dy, v1z_dy, v1x_dz, v1y_dz, v1z_dz], [v2x_dx, v2y_dx, v2z_dx, v2x_dy, v2y_dy, v2z_dy, v2x_dz, v2y_dz, v2z_dz]]) + >>> participant.write_gradient_data(mesh_name, data_name, vertex_ids, gradients) """ - check_array_like(vertex_ids, "vertex_ids", "write_block_vector_gradient_data") - check_array_like(gradientValues, "gradientValues", "write_block_vector_gradient_data") + check_array_like(vertex_ids, "vertex_ids", "write_gradient_data") + check_array_like(gradients, "gradients", "write_gradient_data") - if not isinstance(gradientValues, np.ndarray): - gradientValues = np.asarray(gradientValues) + if not isinstance(gradients, np.ndarray): + gradients = np.asarray(gradients) - if len(gradientValues) > 0: - size, dimensions = gradientValues.shape - assert dimensions == self.get_dimensions() * self.get_dimensions(), "Dimensions of vector data in write_block_vector_gradient_data does not match with dimensions in problem definition. Provided dimensions: {}, expected dimensions: {}".format(dimensions, self.get_dimensions() * self.get_dimensions()) - if len(gradientValues) == 0: + if len(gradients) > 0: + size, dimensions = gradients.shape + assert dimensions == self.get_mesh_dimensions(mesh_name) * self.get_data_dimensions(mesh_name, data_name), "Dimensions of vector data in write_gradient_data does not match with dimensions in problem definition. Provided dimensions: {}, expected dimensions: {}".format(dimensions, self.get_mesh_dimensions(mesh_name) * self.get_data_dimensions (mesh_name, data_name)) + if len(gradients) == 0: size = 0 - cdef np.ndarray[int, ndim=1] _vertex_ids = np.ascontiguousarray(vertex_ids, dtype=np.int32) - cdef np.ndarray[double, ndim=1] _gradientValues = np.ascontiguousarray(gradientValues.flatten(), dtype=np.double) - - assert _gradientValues.size == size * self.get_dimensions() * self.get_dimensions(), "Dimension of vector gradient data provided in write_block_vector_gradient_data does not match problem definition. Check length of input data provided. Provided size: {}, expected size: {}".format(_gradientValues.size, size * self.get_dimensions() * self.get_dimensions()) - assert _vertex_ids.size == size, "Vertex IDs are of incorrect length in write_block_vector_gradient_data. Check length of vertex ids input. Provided size: {}, expected size: {}".format(_vertex_ids.size, size) + cdef vector[int] cpp_vertex_ids = vertex_ids + cdef vector[double] cpp_gradients = gradients.flatten() - self.thisptr.writeBlockVectorGradientData (data_id, size, _vertex_ids.data, _gradientValues.data) + assert cpp_gradients.size() == size * self.get_mesh_dimensions(mesh_name) * self.get_data_dimensions (mesh_name, data_name), "Dimension of gradient data provided in write_gradient_data does not match problem definition. Check length of input data provided. Provided size: {}, expected size: {}".format(cpp_gradients.size(), size * self.get_mesh_dimensions(mesh_name) * self.get_data_dimensions (mesh_name, data_name)) + assert cpp_vertex_ids.size() == size, "Vertex IDs are of incorrect length in write_gradient_data. Check length of vertex ids input. Provided size: {}, expected size: {}".format(cpp_vertex_ids.size(), size) - def write_scalar_gradient_data (self, data_id, vertex_id, gradientValues): - """ - Writes scalar gradient data to a vertex - This function writes the corresponding gradient matrix value of a specified vertex to a dataID. - - The gradients need to be provided in the following format: - - The 2D-format of gradientValues is (v_dx, v_dy) vector corresponding to the data block v = (v) - differentiated respectively in x-direction dx and y-direction dy - - The 3D-format of gradientValues is (v_dx, v_dy, v_dz) vector - corresponding to the data block v = (v) differentiated respectively in spatial directions x-direction dx and y-direction dy and z-direction dz - - Parameters - ---------- - data_id : int - ID to write to. - vertex_id : int - Index of the vertex. - gradientValue : array_like - A vector of the gradient values. - - Notes - ----- - Count of available elements at value matches the configured dimension - Vertex with dataID exists and contains data - Data with dataID has attribute hasGradient = true - - Previous calls: - initialize() has been called - - Examples - -------- - Write scalar data for a 2D problem: - >>> data_id = 1 - >>> vertex_id = 5 - >>> gradientValue = [v5_dx, v5_dy] - >>> interface.write_scalar_gradient_data(data_id, vertex_id, gradientValue) - """ - - check_array_like(gradientValues, "gradientValues", "write_scalar_gradient_data") - - if not isinstance(gradientValues, np.ndarray): - gradientValues = np.asarray(gradientValues) + self.thisptr.writeGradientData (convert(mesh_name), convert(data_name), cpp_vertex_ids, cpp_gradients) - cdef np.ndarray[double, ndim=1] _gradientValues = np.ascontiguousarray(gradientValues.flatten(), dtype=np.double) - assert _gradientValues.size == self.get_dimensions(), "Vector data provided for vertex {} in write_scalar_gradient_data does not match problem definition. Check length of input data provided. Provided size: {}, expected size: {}".format(_gradientValues.size, self.get_dimensions()) - - self.thisptr.writeScalarGradientData(data_id, vertex_id, _gradientValues.data) - - def write_vector_gradient_data (self, data_id, vertex_id, gradientValues): - """ - Writes vector gradient data to a vertex - This function writes the corresponding gradient matrix value of a specified vertex to a dataID. - - The gradients need to be provided in the following format: - - The 2D-format of \p gradientValues is (vx_dx, vy_dx, vx_dy, vy_dy) vector corresponding to the data block v = (vx, vy) - differentiated respectively in x-direction dx and y-direction dy - - The 3D-format of \p gradientValues is (vx_dx, vy_dx, vz_dx, vx_dy, vy_dy, vz_dy, vx_dz, vy_dz, vz_dz) vector - corresponding to the data block v = (vx, vy, vz) differentiated respectively in spatial directions x-direction dx and y-direction dy and z-direction dz - - Parameters - ---------- - data_id : int - ID to write to. - vertex_id : int - Index of the vertex. - gradientValue : array_like - A vector of the gradient values. - - Notes - ----- - Count of available elements at value matches the configured dimension - Vertex with dataID exists and contains data - Data with dataID has attribute hasGradient = true - - Previous calls: - initialize() has been called - - Examples - -------- - Write scalar data for a 2D problem: - >>> data_id = 1 - >>> vertex_id = 5 - >>> gradientValue = [v5x_dx, v5y_dx, v5x_dy,v5y_dy] - >>> interface.write_vector_gradient_data(data_id, vertex_id, gradientValue) - """ - - check_array_like(gradientValues, "gradientValues", "write_vector_gradient_data") - - if not isinstance(gradientValues, np.ndarray): - gradientValues = np.asarray(gradientValues) - - cdef np.ndarray[double, ndim=1] _gradientValues = np.ascontiguousarray(gradientValues.flatten(), dtype=np.double) - - assert _gradientValues.size == self.get_dimensions() * self.get_dimensions(), "Dimensions of vector gradient data provided for vertex {} in write_vector_gradient_data does not match problem definition. Check length of input data provided. Provided size: {}, expected size: {}".format(_gradientValues.size, self.get_dimensions() * self.get_dimensions()) - - self.thisptr.writeVectorGradientData(data_id, vertex_id, _gradientValues.data) - - def write_block_scalar_gradient_data (self, data_id, vertex_ids, gradientValues): - """ - Writes scalar gradient data given as block. This function writes values of specified vertices to a dataID. - Values are provided as a block of continuous memory. Values are stored in a numpy array [N x D] where N = number - of vertices and D = dimensions of geometry. - - Parameters - ---------- - data_id : int - Data ID to write to. - vertex_ids : array_like - Indices of the vertices. - gradientValues : array_like - Gradient values differentiated in the spacial direction (dx, dy) for 2D space, (dx, dy, dz) for 3D space - - Notes - ----- - Previous calls: - Count of available elements at values matches the configured dimension - Count of available elements at vertex_ids matches the given size - Initialize() has been called - Data with dataID has attribute hasGradient = true - - Examples - -------- - Write block gradient scalar data for a 2D problem with 2 vertices: - >>> data_id = 1 - >>> vertex_ids = [1, 2] - >>> gradientValues = np.array([[v1_dx, v1_dy], [v2_dx, v2_dy]]) - >>> interface.write_block_scalar_gradient_data(data_id, vertex_ids, gradientValues) - - Write block scalar data for a 3D (D=3) problem with 2 (N=2) vertices: - >>> data_id = 1 - >>> vertex_ids = [1, 2] - >>> values = np.array([[v1_dx, v1_dy, v1x_dz], [v2_dx, v2_dy, v2_dz]]) - >>> interface.write_block_scalar_gradient_data(data_id, vertex_ids, values) - """ - check_array_like(vertex_ids, "vertex_ids", "write_block_scalar_gradient_data") - check_array_like(gradientValues, "gradientValues", "write_block_sclar_gradient_data") - - if not isinstance(gradientValues, np.ndarray): - gradientValues = np.asarray(gradientValues) - - if len(gradientValues) > 0: - size, dimensions = gradientValues.shape - assert dimensions == self.get_dimensions() , "Dimensions of scalar gradient data provided in write_block_scalar_gradient_data does not match with dimensions in problem definition. Provided dimensions: {}, expected dimensions: {}".format(dimensions, self.get_dimensions()) - if len(gradientValues) == 0: - size = 0 - - cdef np.ndarray[int, ndim=1] _vertex_ids = np.ascontiguousarray(vertex_ids, dtype=np.int32) - cdef np.ndarray[double, ndim=1] _gradientValues = np.ascontiguousarray(gradientValues.flatten(), dtype=np.double) - - assert _gradientValues.size == size * self.get_dimensions(), "Scalar gradient data is not provided for all vertices in write_block_scalar_gradient_data. Check length of input data provided. Provided size: {}, expected size: {}".format(_gradientValues.size, size * self.get_dimensions()) - assert _vertex_ids.size == size, "Vertex IDs are of incorrect length in write_block_scalar_gradient_data. Check length of vertex ids input. Provided size: {}, expected size: {}".format(_vertex_ids.size, size) - - self.thisptr.writeBlockScalarGradientData (data_id, size, _vertex_ids.data, _gradientValues.data) - - def is_gradient_data_required(self,data_id): + def requires_gradient_data_for(self, mesh_name, data_name): """ Checks if the given data set requires gradient data. We check if the data object has been intialized with the gradient flag. Parameters ---------- - data_id : int - Data ID to check. + mesh_name : str + Mesh name to check. + data_name : str + Data name to check. Returns ------- bool - True if gradient data is required for a dataID. + True if gradient data is required for a data. Examples -------- - Check if gradient data is required for a dataID: - >>> data_id = 1 - >>> interface.is_gradient_data_required(data_id) + Check if gradient data is required for a data: + >>> mesh_name = "MeshOne" + >>> data_name = "DataOne" + >>> participant.is_gradient_data_required(mesh_name, data_name) """ - return self.thisptr.isGradientDataRequired(data_id) + return self.thisptr.requiresGradientDataFor(convert(mesh_name), convert(data_name)) - def set_mesh_access_region (self, mesh_id, bounding_box): + + def set_mesh_access_region (self, mesh_name, bounding_box): """ This function is required if you don't want to use the mapping schemes in preCICE, but rather want to use your own solver for data mapping. As opposed to the usual preCICE mapping, only a @@ -1486,8 +967,8 @@ cdef class Interface: Parameters ---------- - mesh_id : int - ID of the mesh you want to access through the bounding box + mesh_name : str + Name of the mesh you want to access through the bounding box bounding_box : array_like Axis aligned bounding box. Example for 3D the format: [x_min, x_max, y_min, y_max, z_min, z_max] @@ -1516,8 +997,6 @@ cdef class Interface: 0.5, i.e. the defined access region as computed through the involved provided mesh is by 50% enlarged. """ - warnings.warn("The function set_mesh_access_region is still experimental.") - check_array_like(bounding_box, "bounding_box", "set_mesh_access_region") if not isinstance(bounding_box, np.ndarray): @@ -1525,39 +1004,42 @@ cdef class Interface: assert len(bounding_box) > 0, "Bounding box cannot be empty." - assert len(bounding_box) == (self.get_dimensions() * 2), "Dimensions of bounding box in set_mesh_access_region does not match with dimensions in problem definition." + assert len(bounding_box) == (self.get_mesh_dimensions(mesh_name) * 2), "Dimensions of bounding box in set_mesh_access_region does not match with dimensions in problem definition." - cdef np.ndarray[double, ndim=1] _bounding_box = np.ascontiguousarray(bounding_box, dtype=np.double) + cdef vector[double] cpp_bounding_box = list(bounding_box) - self.thisptr.setMeshAccessRegion(mesh_id, _bounding_box.data) + self.thisptr.setMeshAccessRegion(convert(mesh_name), cpp_bounding_box) - def get_mesh_vertices_and_ids (self, mesh_id): + + def get_mesh_vertex_ids_and_coordinates (self, mesh_name): """ Iterating over the region of interest defined by bounding boxes and reading the corresponding coordinates omitting the mapping. This function is still experimental. Parameters ---------- - mesh_id : int - Corresponding mesh ID + mesh_name : str + Corresponding mesh name Returns ------- ids : numpy.ndarray - Vertex IDs correspdoning to the coordinates + Vertex IDs corresponding to the coordinates coordinates : numpy.ndarray he coordinates associated to the IDs and corresponding data values (dim * size) """ - warnings.warn("The function get_mesh_vertices_and_ids is still experimental.") + size = self.get_mesh_vertex_size(mesh_name) + dimensions = self.get_mesh_dimensions(mesh_name) + + cdef vector[int] cpp_ids = [-1 for _ in range(size)] + cdef vector[double] cpp_coordinates = [-1 for _ in range(size * dimensions)] - size = self.get_mesh_vertex_size(mesh_id) - cdef np.ndarray[int, ndim=1] _ids = np.empty(size, dtype=np.int32) - dimensions = self.get_dimensions() - cdef np.ndarray[double, ndim=1] _coordinates = np.empty(size*dimensions, dtype=np.double) + self.thisptr.getMeshVertexIDsAndCoordinates(convert(mesh_name), cpp_ids, cpp_coordinates) - self.thisptr.getMeshVerticesAndIDs(mesh_id, size, _ids.data, _coordinates.data) + cdef np.ndarray[int, ndim=1] np_ids = np.array(cpp_ids, dtype=np.int32) + cdef np.ndarray[double, ndim=1] np_coordinates = np.array(cpp_coordinates, dtype=np.double) - return _ids, _coordinates.reshape((size, dimensions)) + return np_ids, np_coordinates.reshape((size, dimensions)) def get_version_information (): """ @@ -1565,28 +1047,4 @@ def get_version_information (): ------- Current preCICE version information """ - return SolverInterface.getVersionInformation() - -def action_write_initial_data (): - """ - Returns - ------- - Name of action for writing initial data - """ - return SolverInterface.actionWriteInitialData() - -def action_write_iteration_checkpoint (): - """ - Returns - ------- - Name of action for writing iteration checkpoint - """ - return SolverInterface.actionWriteIterationCheckpoint() - -def action_read_iteration_checkpoint (): - """ - Returns - ------- - Name of action for reading iteration checkpoint - """ - return SolverInterface.actionReadIterationCheckpoint() + return CppParticipant.getVersionInformation() diff --git a/docs/MigrationGuide.md b/docs/MigrationGuide.md index b0824070..46d7d610 100644 --- a/docs/MigrationGuide.md +++ b/docs/MigrationGuide.md @@ -1,11 +1,10 @@ -Migration Guide for Python language bindings for preCICE version 2.0 ------------------------------------- +# Migration Guide for Python language bindings for preCICE version 2.0 -# Steps to move from old Python API to the new API +## Steps to move from old Python API to the new API ### 1. Python language bindings moved to a new repository in the preCICE Project -Previously, the Python language bindings were part of the repository [`precice/precice`](https://github.com/precice/precice). +Previously, the Python language bindings were part of the repository [`precice/precice`](https://github.com/precice/precice). The bindings have now been moved to the independent repository [`precice/python-bindings`](https://github.com/precice/python-bindings). The installation procedure is the same as before. Please refer to the [README](https://github.com/precice/python-bindings/blob/develop/README.md). @@ -16,13 +15,15 @@ The initialization of the `Interface` object now initializes the solver and also file provided by the user. **Old:** Before preCICE Version 2 you had to call: -``` + +```python interface = precice.Interface(solverName, processRank, processSize) interface.configure(configFileName) ``` **New:** The two commands have now been combined into a single one: -``` + +```python interface = precice.Interface(solverName, configFileName, processRank, processSize) ``` @@ -33,14 +34,17 @@ Unlike the old bindings, API calls now do not need the array size to be passed a For example let us consider the call `write_block_vector_data`: **Old:** The previous call was: -``` + +```python interface.write_block_vector_data(writeDataID, writeDataSize, vertexIDs, writeDataArray) ``` **New:** The new function call is: -``` + +```python interface.write_block_vector_data(writeDataID, vertexIDs, writeDataArray) ``` + The same change is applied for all other calls which work with arrays of data. ### 4. API functions use a return value, if appropriate @@ -50,16 +54,20 @@ In older versions of the python bindings arrays were modified by the API in a ca For example let us consider the interface function `set_mesh_vertices`. `set_mesh_vertices` is used to register vertices for a mesh and it returns an array of `vertexIDs`. **Old:** The old signature of this function was: -``` + +```python vertexIDs = np.zeros(numberofVertices) interface.set_mesh_vertices(meshID, numberofVertices, grid, vertexIDs) ``` + Note that `vertexIDs` is passed as an argument to the function. **New:** This has now been changed to: -``` + +```python vertexIDs = interface.set_mesh_vertices(meshID, grid) ``` + Here, `vertexIDs` is directly returned by `set_mesh_vertices`. The same change has been applied to the functions `read_block_scalar_data` and `read_block_vector_data`. diff --git a/docs/ReleaseGuide.md b/docs/ReleaseGuide.md index e0c0be86..23c952fa 100644 --- a/docs/ReleaseGuide.md +++ b/docs/ReleaseGuide.md @@ -1,4 +1,5 @@ -## Guide to release new version of python-bindings +# Guide to release new version of python-bindings + The developer who is releasing a new version of the python-bindings is expected to follow this work flow: The release of the `python-bindings` repository is made directly from a release branch called `python-bindings-v2.1.1.1`. This branch is mainly needed to help other developers with testing. @@ -12,7 +13,7 @@ The release of the `python-bindings` repository is made directly from a release * `CHANGELOG.md` on `python-bindings-v2.1.1.1`. * There is no need to bump the version anywhere else, since we use the [python-versioneer](https://github.com/python-versioneer/python-versioneer/) for maintaining the version everywhere else. -4. [Draft a New Release](https://github.com/precice/python-bindings/releases/new) in the `Releases` section of the repository page in a web browser. The release tag needs to be the exact version number (i.e.`v2.1.1.1` or `v2.1.1.1rc1`, compare to [existing tags](https://github.com/precice/python-bindings/tags)). Use `@target:master`. Release title is also the version number (i.e. `v2.1.1.1` or `v2.1.1.1rc1`, compare to [existing releases](https://github.com/precice/python-bindings/tags)). +4. [Draft a New Release](https://github.com/precice/python-bindings/releases/new) in the `Releases` section of the repository page in a web browser. The release tag needs to be the exact version number (i.e.`v2.1.1.1` or `v2.1.1.1rc1`, compare to [existing tags](https://github.com/precice/python-bindings/tags)). Use `@target:master`. Release title is also the version number (i.e. `v2.1.1.1` or `v2.1.1.1rc1`, compare to [existing releases](https://github.com/precice/python-bindings/tags)). * *Note:* We use the [python-versioneer](https://github.com/python-versioneer/python-versioneer/) for maintaining the version. Therefore the tag directly defines the version in all relevant places. * *Note:* If it is a pre-release then the option *This is a pre-release* needs to be selected at the bottom of the page. Use `@target:python-bindings-v2.1.1.1` for a pre-release, since we will never merge a pre-release into master. @@ -28,23 +29,23 @@ The release of the `python-bindings` repository is made directly from a release 7. Add an empty commit on master via `git checkout master`, then `git commit --allow-empty -m "post-tag bump"`. Check that everything is in order via `git log`. Important: The `tag` and `origin/master` should not point to the same commit. For example: -``` -commit 44b715dde4e3194fa69e61045089ca4ec6925fe3 (HEAD -> master, origin/master) -Author: Benjamin Rodenberg -Date: Wed Oct 20 10:52:41 2021 +0200 + ```bash + commit 44b715dde4e3194fa69e61045089ca4ec6925fe3 (HEAD -> master, origin/master) + Author: Benjamin Rodenberg + Date: Wed Oct 20 10:52:41 2021 +0200 + + post-tag bump - post-tag bump + commit d2645cc51f84ad5eda43b9c673400aada8e1505a (tag: v2.3.0.1) + Merge: 2039557 aca2354 + Author: Benjamin Rodenberg + Date: Tue Oct 19 12:57:24 2021 +0200 -commit d2645cc51f84ad5eda43b9c673400aada8e1505a (tag: v2.3.0.1) -Merge: 2039557 aca2354 -Author: Benjamin Rodenberg -Date: Tue Oct 19 12:57:24 2021 +0200 + Merge pull request #132 from precice/python-bindings-v2.3.0.1 - Merge pull request #132 from precice/python-bindings-v2.3.0.1 - - Release v2.3.0.1 -``` + Release v2.3.0.1 + ``` -For more details refer to https://github.com/precice/python-bindings/issues/109 and https://github.com/python-versioneer/python-versioneer/issues/217. + For more details refer to https://github.com/precice/python-bindings/issues/109 and https://github.com/python-versioneer/python-versioneer/issues/217. 8. Update Spack package (refer to `python-bindings/spack/README.md`). diff --git a/examples/solverdummy/README.md b/examples/solverdummy/README.md index db565ada..c1abc86b 100644 --- a/examples/solverdummy/README.md +++ b/examples/solverdummy/README.md @@ -1,15 +1,18 @@ -# Install Dependencies +# Solverdummies + +## Install Dependencies * [preCICE](https://github.com/precice/precice) * [python bindings](https://github.com/precice/python-bindings) * Run in this directory `pip3 install --user -r requirements.txt` -# Run +## Run You can test the dummy solver by coupling two instances with each other. Open two terminals and run - * `python3 solverdummy.py precice-config.xml SolverOne` - * `python3 solverdummy.py precice-config.xml SolverTwo` -# Next Steps +* `python3 solverdummy.py precice-config.xml SolverOne` +* `python3 solverdummy.py precice-config.xml SolverTwo` + +## Next Steps If you want to couple any other solver against this dummy solver be sure to adjust the preCICE configuration (participant names, mesh names, data names etc.) to the needs of your solver, compare our [step-by-step guide for new adapters](https://github.com/precice/precice/wiki/Adapter-Example). diff --git a/examples/solverdummy/precice-config.xml b/examples/solverdummy/precice-config.xml index d8e45221..28fd5246 100644 --- a/examples/solverdummy/precice-config.xml +++ b/examples/solverdummy/precice-config.xml @@ -1,52 +1,59 @@ - - + - - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/examples/solverdummy/solverdummy.py b/examples/solverdummy/solverdummy.py index 887f3eb5..7143617d 100644 --- a/examples/solverdummy/solverdummy.py +++ b/examples/solverdummy/solverdummy.py @@ -20,69 +20,59 @@ participant_name = args.participantName if participant_name == 'SolverOne': - write_data_name = 'dataOne' - read_data_name = 'dataTwo' - mesh_name = 'MeshOne' + write_data_name = 'Data-One' + read_data_name = 'Data-Two' + mesh_name = 'SolverOne-Mesh' if participant_name == 'SolverTwo': - read_data_name = 'dataOne' - write_data_name = 'dataTwo' - mesh_name = 'MeshTwo' + read_data_name = 'Data-One' + write_data_name = 'Data-Two' + mesh_name = 'SolverTwo-Mesh' num_vertices = 3 # Number of vertices solver_process_index = 0 solver_process_size = 1 -interface = precice.Interface(participant_name, configuration_file_name, - solver_process_index, solver_process_size) +participant = precice.Participant(participant_name, configuration_file_name, + solver_process_index, solver_process_size) -mesh_id = interface.get_mesh_id(mesh_name) +assert (participant.requires_mesh_connectivity_for(mesh_name) is False) -assert (interface.is_mesh_connectivity_required(mesh_id) is False) - -dimensions = interface.get_dimensions() - -vertices = np.zeros((num_vertices, dimensions)) -read_data = np.zeros((num_vertices, dimensions)) -write_data = np.zeros((num_vertices, dimensions)) +vertices = np.zeros((num_vertices, participant.get_mesh_dimensions(mesh_name))) +read_data = np.zeros((num_vertices, participant.get_data_dimensions(mesh_name, read_data_name))) +write_data = np.zeros((num_vertices, participant.get_data_dimensions(mesh_name, write_data_name))) for x in range(num_vertices): - for y in range(0, dimensions): + for y in range(participant.get_mesh_dimensions(mesh_name)): vertices[x, y] = x + + for y in range(participant.get_data_dimensions(mesh_name, read_data_name)): read_data[x, y] = x + + for y in range(participant.get_data_dimensions(mesh_name, write_data_name)): write_data[x, y] = x -vertex_ids = interface.set_mesh_vertices(mesh_id, vertices) -read_data_id = interface.get_data_id(read_data_name, mesh_id) -write_data_id = interface.get_data_id(write_data_name, mesh_id) +vertex_ids = participant.set_mesh_vertices(mesh_name, vertices) -dt = interface.initialize() +participant.initialize() -while interface.is_coupling_ongoing(): - if interface.is_action_required( - precice.action_write_iteration_checkpoint()): +while participant.is_coupling_ongoing(): + if participant.requires_writing_checkpoint(): print("DUMMY: Writing iteration checkpoint") - interface.mark_action_fulfilled( - precice.action_write_iteration_checkpoint()) - if interface.is_read_data_available(): - read_data = interface.read_block_vector_data(read_data_id, vertex_ids) + dt = participant.get_max_time_step_size() + read_data = participant.read_data(mesh_name, read_data_name, vertex_ids, dt) write_data = read_data + 1 - if interface.is_write_data_required(dt): - interface.write_block_vector_data( - write_data_id, vertex_ids, write_data) + participant.write_data(mesh_name, write_data_name, vertex_ids, write_data) print("DUMMY: Advancing in time") - dt = interface.advance(dt) + participant.advance(dt) - if interface.is_action_required( - precice.action_read_iteration_checkpoint()): + if participant.requires_reading_checkpoint(): print("DUMMY: Reading iteration checkpoint") - interface.mark_action_fulfilled( - precice.action_read_iteration_checkpoint()) -interface.finalize() +participant.finalize() print("DUMMY: Closing python solver dummy...") diff --git a/precice/__init__.py b/precice/__init__.py index 70ffa0a0..b5df19cf 100644 --- a/precice/__init__.py +++ b/precice/__init__.py @@ -1,15 +1,12 @@ +from ._version import get_versions __version__ = "unknown" import warnings -from cyprecice import Interface, action_read_iteration_checkpoint, action_write_iteration_checkpoint, action_write_initial_data, get_version_information +from cyprecice import Participant, get_version_information -def SolverInterface(*args): - """ - This is just a dummy function to avoid wrong usage of the interface. Please use precice.Interface, if you want to establish a connection to preCICE. See https://github.com/precice/python-bindings/issues/92 for more information. - """ - warnings.warn("please use precice.Interface to create the interface to C++ preCICE. Note that this function (precice.SolverInterface) does not do anything but throwing this warning. See https://github.com/precice/python-bindings/issues/92 for more information.") - +__version__ = get_versions()['version'] +del get_versions from . import _version __version__ = _version.get_versions()['version'] diff --git a/setup.py b/setup.py index 29484ddf..a82e8569 100644 --- a/setup.py +++ b/setup.py @@ -64,7 +64,7 @@ def get_extensions(is_test): compile_args = [] link_args = [] - compile_args.append("-std=c++11") + compile_args.append("-std=c++17") compile_args.append("-I{}".format(numpy.get_include())) bindings_sources = [os.path.join(PYTHON_BINDINGS_PATH, "cyprecice", @@ -76,7 +76,7 @@ def get_extensions(is_test): link_args += pkgconfig.libs('libprecice').split() if is_test: bindings_sources.append(os.path.join(PYTHON_BINDINGS_PATH, "test", - "SolverInterface.cpp")) + "Participant.cpp")) return [ Extension( @@ -148,7 +148,7 @@ def initialize_options(self): author_email='info@precice.org', license='LGPL-3.0', python_requires='>=3', - install_requires=['numpy', 'mpi4py'], + install_requires=['numpy', 'mpi4py', 'Cython'], # mpi4py is only needed, if preCICE was compiled with MPI # see https://github.com/precice/python-bindings/issues/8 packages=['precice'], diff --git a/spack/repo/packages/py-pyprecice/package.py b/spack/repo/packages/py-pyprecice/package.py index e4af040e..56a744f9 100644 --- a/spack/repo/packages/py-pyprecice/package.py +++ b/spack/repo/packages/py-pyprecice/package.py @@ -1,4 +1,4 @@ -# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) @@ -15,10 +15,11 @@ class PyPyprecice(PythonPackage): homepage = "https://precice.org" git = "https://github.com/precice/python-bindings.git" url = "https://github.com/precice/python-bindings/archive/v2.4.0.0.tar.gz" - maintainers = ["ajaust", "BenjaminRodenberg", "IshaanDesai"] + maintainers("ajaust", "BenjaminRodenberg", "IshaanDesai") # Always prefer final version of release candidate version("develop", branch="develop") + version("2.5.0.2", sha256="6d7b78da830db6c5133b44617196ee90be8c7d6c8e14c8994a4800b3d4856416") version("2.5.0.1", sha256="e2602f828d4f907ea93e34f7d4adb8db086044a75a446592a4099423d56ed62c") version("2.5.0.0", sha256="9f55a22594bb602cde8a5987217728569f16d9576ea53ed00497e9046a2e1794") version("2.4.0.0", sha256="e80d16417b8ce1fdac80c988cb18ae1e16f785c5eb1035934d8b37ac18945242") diff --git a/test/Participant.cpp b/test/Participant.cpp new file mode 100644 index 00000000..8a36f2af --- /dev/null +++ b/test/Participant.cpp @@ -0,0 +1,323 @@ +#include "precice/Participant.hpp" +#include "precice/Tooling.hpp" +#include +#include +#include +#include + +std::string fake_version; +std::vector fake_read_write_buffer; +int fake_mesh_dimensions; +int fake_scalar_data_dimensions; +int fake_vector_data_dimensions; +std::vector fake_ids; +int n_fake_vertices; +std::string fake_mesh_name; +std::string fake_scalar_data_name; +std::string fake_vector_data_name; +int fake_data_id; +std::vector fake_bounding_box; +std::vector fake_coordinates; + +namespace precice { + +namespace impl{ +class ParticipantImpl{}; +} + +Participant:: Participant +( + precice::string_view participantName, + precice::string_view configurationFileName, + int solverProcessIndex, + int solverProcessSize ) +{ + fake_version = "dummy"; + fake_read_write_buffer = std::vector(); + fake_mesh_dimensions = 3; + fake_scalar_data_dimensions = 1; + fake_vector_data_dimensions = 3; + fake_data_id = 15; + fake_mesh_name = "FakeMesh"; + fake_scalar_data_name = "FakeScalarData"; + fake_vector_data_name = "FakeVectorData"; + n_fake_vertices = 3; + fake_ids.resize(n_fake_vertices); + std::iota(fake_ids.begin(), fake_ids.end(), 0); + fake_bounding_box.resize(fake_mesh_dimensions*2); + std::iota(fake_bounding_box.begin(), fake_bounding_box.end(), 0); + fake_coordinates.resize(n_fake_vertices*fake_mesh_dimensions); + std::iota(fake_coordinates.begin(), fake_coordinates.end(), 0); +} + +Participant::Participant( + precice::string_view participantName, + precice::string_view configurationFileName, + int solverProcessIndex, + int solverProcessSize, + void * communicator) +{ + fake_version = "dummy"; + fake_read_write_buffer = std::vector(); + fake_mesh_dimensions = 3; + fake_scalar_data_dimensions = 1; + fake_vector_data_dimensions = 3; + fake_data_id = 15; + fake_mesh_name = "FakeMesh"; + fake_scalar_data_name = "FakeScalarData"; + fake_vector_data_name = "FakeVectorData"; + n_fake_vertices = 3; + fake_ids.resize(n_fake_vertices); + std::iota(fake_ids.begin(), fake_ids.end(), 0); + fake_bounding_box.resize(fake_mesh_dimensions*2); + std::iota(fake_bounding_box.begin(), fake_bounding_box.end(), 0); +} + +Participant::~Participant() = default; + +void Participant:: initialize() +{ +} + +void Participant:: advance +( + double computedTimestepLength) +{ +} + +void Participant:: finalize() +{} + +int Participant:: getMeshDimensions +( + precice::string_view meshName) const +{ + return fake_mesh_dimensions; +} + +int Participant:: getDataDimensions +( + precice::string_view meshName, + precice::string_view dataName) const +{ + if (dataName.data() == fake_scalar_data_name) { + return fake_scalar_data_dimensions; + } else if (dataName.data() == fake_vector_data_name) { + return fake_vector_data_dimensions; + } else { + return -1; + } +} + +bool Participant:: isCouplingOngoing() const +{ + return 0; +} + +bool Participant:: isTimeWindowComplete() const +{ + return 0; +} + +double Participant:: getMaxTimeStepSize() const +{ + return -1.0; +} + +bool Participant:: requiresInitialData() +{ + return 0; +} + +bool Participant:: requiresReadingCheckpoint() +{ + return 0; +} + +bool Participant:: requiresWritingCheckpoint() +{ + return 0; +} + +bool Participant:: requiresMeshConnectivityFor +( + precice::string_view meshName) const +{ + return 0; +} + +bool Participant:: requiresGradientDataFor +( + precice::string_view meshName, + precice::string_view dataName) const +{ + return 0; +} + +int Participant:: setMeshVertex +( + precice::string_view meshName, + precice::span position ) +{ + return 0; +} + +int Participant:: getMeshVertexSize +( + precice::string_view meshName) const +{ + return n_fake_vertices; +} + +void Participant:: setMeshVertices +( + precice::string_view meshName, + precice::span positions, + precice::span ids) +{ + if(ids.size() > 0) { + assert (ids.size() == fake_ids.size()); + std::copy(fake_ids.begin(), fake_ids.end(), ids.data()); + } +} + +void Participant:: setMeshEdge +( + precice::string_view meshName, + int firstVertexID, + int secondVertexID) +{} + +void Participant::setMeshEdges( + precice::string_view meshName, + precice::span vertices) +{} + +void Participant:: setMeshTriangle +( + precice::string_view meshName, + int firstVertexID, + int secondVertexID, + int thirdVertexID ) +{} + +void Participant:: setMeshTriangles +( + precice::string_view meshName, + precice::span vertices ) +{} + +void Participant:: setMeshQuad +( + precice::string_view meshName, + int firstVertexID, + int secondVertexID, + int thirdVertexID, + int fourthVertexID ) +{} + +void Participant:: setMeshQuads +( + precice::string_view meshName, + precice::span vertices) +{} + +void Participant::setMeshTetrahedron +( + precice::string_view meshName, + int firstVertexID, + int secondVertexID, + int thirdVertexID, + int fourthVertexID) +{} + +void Participant::setMeshTetrahedra +( + precice::string_view meshName, + precice::span vertices) +{} + +void Participant:: writeData +( + precice::string_view meshName, + precice::string_view dataName, + precice::span vertices, + precice::span values) +{ + fake_read_write_buffer.clear(); + + for(const double value: values) { + fake_read_write_buffer.push_back(value); + } +} + +void Participant:: readData +( + precice::string_view meshName, + precice::string_view dataName, + precice::span vertices, + double relativeReadTime, + precice::span values) const +{ + if (dataName.data() == fake_scalar_data_name) { + for(const int id: vertices) { + values[id] = fake_read_write_buffer[id]; + } + } else if (dataName.data() == fake_vector_data_name) { + for(const int id: vertices) { + for(int d = 0; d < fake_vector_data_dimensions; d++) { + const int linearized_id = fake_vector_data_dimensions * id + d; + values[linearized_id] = fake_read_write_buffer[linearized_id]; + } + } + } +} + +void Participant:: setMeshAccessRegion +( + precice::string_view meshName, + precice::span boundingBox ) const +{ + assert(meshName == fake_mesh_name); + + for(std::size_t i = 0; i < fake_bounding_box.size(); i++){ + assert(boundingBox[i] == fake_bounding_box[i]); + } +} + +void Participant:: getMeshVertexIDsAndCoordinates +( + precice::string_view meshName, + precice::span valueIndices, + precice::span coordinates ) const +{ + assert(meshName == fake_mesh_name); + assert(valueIndices.size() == fake_ids.size()); + assert(coordinates.size() == fake_coordinates.size()); + + for(std::size_t i = 0; i < fake_ids.size(); i++){ + valueIndices[i] = fake_ids[i]; + } + for(std::size_t i = 0; i < fake_coordinates.size(); i++){ + coordinates[i] = fake_coordinates[i]; + } +} + +void Participant::writeGradientData( + precice::string_view meshName, + precice::string_view dataName, + precice::span vertices, + precice::span gradients) +{ + fake_read_write_buffer.clear(); + for (const double gradient: gradients) { + fake_read_write_buffer.push_back(gradient); + } +} + +std::string getVersionInformation() +{ + return fake_version; +} + +} // namespace precice \ No newline at end of file diff --git a/test/SolverInterface.cpp b/test/SolverInterface.cpp deleted file mode 100644 index 722820fc..00000000 --- a/test/SolverInterface.cpp +++ /dev/null @@ -1,524 +0,0 @@ -#include "precice/SolverInterface.hpp" -#include -#include -#include - -std::vector fake_read_write_buffer; -int fake_dimensions; -int fake_mesh_id; -std::vector fake_ids; -int n_fake_vertices; -std::string fake_data_name; -int fake_data_id; -std::vector fake_bounding_box; -std::vector fake_coordinates; - -namespace precice { - -namespace impl{ -class SolverInterfaceImpl{}; -} - -SolverInterface:: SolverInterface -( - const std::string& participantName, - const std::string& configurationFileName, - int solverProcessIndex, - int solverProcessSize ) -{ - fake_read_write_buffer = std::vector(); - fake_dimensions = 3; - fake_mesh_id = 0; - fake_data_id = 15; - fake_data_name = "FakeData"; - n_fake_vertices = 3; - fake_ids.resize(n_fake_vertices); - std::iota(fake_ids.begin(), fake_ids.end(), 0); - fake_bounding_box.resize(fake_dimensions*2); - std::iota(fake_bounding_box.begin(), fake_bounding_box.end(), 0); - fake_coordinates.resize(n_fake_vertices*fake_dimensions); - std::iota(fake_coordinates.begin(), fake_coordinates.end(), 0); -} - -SolverInterface::SolverInterface( - const std::string& participantName, - const std::string& configurationFileName, - int solverProcessIndex, - int solverProcessSize, - void * communicator) -{ - fake_read_write_buffer = std::vector(); - fake_dimensions = 3; - fake_mesh_id = 0; - fake_data_id = 15; - fake_data_name = "FakeData"; - n_fake_vertices = 3; - fake_ids.resize(n_fake_vertices); - std::iota(fake_ids.begin(), fake_ids.end(), 0); - fake_bounding_box.resize(fake_dimensions*2); - std::iota(fake_bounding_box.begin(), fake_bounding_box.end(), 0); -} - -SolverInterface::~SolverInterface() = default; - -double SolverInterface:: initialize(){return -1;} - -void SolverInterface:: initializeData() -{} - -double SolverInterface:: advance -( - double computedTimestepLength ) -{return -1;} - -void SolverInterface:: finalize() -{} - -int SolverInterface:: getDimensions() const -{return fake_dimensions;} - -bool SolverInterface:: isCouplingOngoing() const -{ - return 0; -} - -bool SolverInterface:: isReadDataAvailable() const -{ - return 0; -} - -bool SolverInterface:: isWriteDataRequired -( - double computedTimestepLength ) const -{ - return 0; -} - -bool SolverInterface:: isTimeWindowComplete() const -{ - return 0; -} - -bool SolverInterface:: isActionRequired -( - const std::string& action ) const -{ - return 0; -} - -void SolverInterface:: markActionFulfilled -( - const std::string& action ) -{} - -bool SolverInterface:: hasMesh -( - const std::string& meshName ) const -{ - return 0; -} - -int SolverInterface:: getMeshID -( - const std::string& meshName ) const -{ - return fake_mesh_id; -} - -std::set SolverInterface:: getMeshIDs() const -{ - return std::set(); -} - -bool SolverInterface:: hasData -( - const std::string& dataName, int meshID ) const -{ - return 0; -} - -int SolverInterface:: getDataID -( - const std::string& dataName, int meshID ) const -{ - if(meshID == fake_mesh_id && dataName == fake_data_name) - { - return fake_data_id; - } - else - { - return -1; - } -} - -bool SolverInterface::hasToEvaluateSurrogateModel() const -{ - return 0; -} - -bool SolverInterface::hasToEvaluateFineModel() const -{ - return 0; -} - -bool SolverInterface:: isMeshConnectivityRequired -( - int meshID ) const -{ - return 0; -} - -int SolverInterface:: setMeshVertex -( - int meshID, - const double* position ) -{ - return 0; -} - -int SolverInterface:: getMeshVertexSize -( - int meshID) const -{ - return n_fake_vertices; -} - -void SolverInterface:: setMeshVertices -( - int meshID, - int size, - const double* positions, - int* ids ) -{ - assert (size == fake_ids.size()); - std::copy(fake_ids.begin(), fake_ids.end(), ids); -} - -void SolverInterface:: getMeshVertices -( - int meshID, - int size, - const int* ids, - double* positions ) const -{ - for(int i = 0; i < size; i++){ - positions[fake_dimensions * i] = i; - positions[fake_dimensions * i + 1] = i + n_fake_vertices; - positions[fake_dimensions * i + 2] = i + 2 * n_fake_vertices; - } -} - -void SolverInterface:: getMeshVertexIDsFromPositions -( - int meshID, - int size, - const double* positions, - int* ids ) const -{ - assert (size == fake_ids.size()); - std::copy(fake_ids.begin(), fake_ids.end(), ids); -} - -int SolverInterface:: setMeshEdge -( - int meshID, - int firstVertexID, - int secondVertexID ) -{ - return -1; -} - -void SolverInterface:: setMeshTriangle -( - int meshID, - int firstEdgeID, - int secondEdgeID, - int thirdEdgeID ) -{} - -void SolverInterface:: setMeshTriangleWithEdges -( - int meshID, - int firstVertexID, - int secondVertexID, - int thirdVertexID ) -{} - -void SolverInterface:: setMeshQuad -( - int meshID, - int firstEdgeID, - int secondEdgeID, - int thirdEdgeID, - int fourthEdgeID ) -{} - -void SolverInterface:: setMeshQuadWithEdges -( - int meshID, - int firstVertexID, - int secondVertexID, - int thirdVertexID, - int fourthVertexID ) -{} - -void SolverInterface:: mapReadDataTo -( - int toMeshID ) -{} - -void SolverInterface:: mapWriteDataFrom -( - int fromMeshID ) -{} - - -void SolverInterface:: writeBlockVectorData -( - int dataID, - int size, - const int* valueIndices, - const double* values ) -{ - fake_read_write_buffer.clear(); - for(int i = 0; i < size * this->getDimensions(); i++){ - fake_read_write_buffer.push_back(values[i]); - } -} - -void SolverInterface:: writeVectorData -( - int dataID, - int valueIndex, - const double* value ) -{ - fake_read_write_buffer.clear(); - for(int i = 0; i < this->getDimensions(); i++){ - fake_read_write_buffer.push_back(value[i]); - } -} - -void SolverInterface:: writeBlockScalarData -( - int dataID, - int size, - const int* valueIndices, - const double* values ) -{ - fake_read_write_buffer.clear(); - for(int i = 0; i < size; i++){ - fake_read_write_buffer.push_back(values[i]); - } -} - -void SolverInterface:: writeScalarData -( - int dataID, - int valueIndex, - double value ) -{ - fake_read_write_buffer.clear(); - fake_read_write_buffer.push_back(value); -} - -void SolverInterface:: readBlockVectorData -( - int dataID, - int size, - const int* valueIndices, - double* values ) const -{ - for(int i = 0; i < size * this->getDimensions(); i++){ - values[i] = fake_read_write_buffer[i]; - } -} - -void SolverInterface:: readBlockVectorData -( - int dataID, - int size, - const int* valueIndices, - double relativeReadTime, - double* values ) const -{ - for(int i = 0; i < size * this->getDimensions(); i++){ - values[i] = fake_read_write_buffer[i]; - } -} - -void SolverInterface:: readVectorData -( - int dataID, - int valueIndex, - double* value ) const -{ - for(int i = 0; i < this->getDimensions(); i++){ - value[i] = fake_read_write_buffer[i]; - } -} - -void SolverInterface:: readVectorData -( - int dataID, - int valueIndex, - double relativeReadTime, - double* value ) const -{ - for(int i = 0; i < this->getDimensions(); i++){ - value[i] = fake_read_write_buffer[i]; - } -} - -void SolverInterface:: readBlockScalarData -( - int dataID, - int size, - const int* valueIndices, - double* values ) const -{ - for(int i = 0; i < size; i++){ - values[i] = fake_read_write_buffer[i]; - } -} - -void SolverInterface:: readBlockScalarData -( - int dataID, - int size, - const int* valueIndices, - double relativeReadTime, - double* values ) const -{ - for(int i = 0; i < size; i++){ - values[i] = fake_read_write_buffer[i]; - } -} - -void SolverInterface:: readScalarData -( - int dataID, - int valueIndex, - double& value ) const -{ - value = fake_read_write_buffer[0]; -} - -void SolverInterface:: readScalarData -( - int dataID, - int valueIndex, - double relativeReadTime, - double& value ) const -{ - value = fake_read_write_buffer[0]; -} - -void SolverInterface:: setMeshAccessRegion -( - const int meshID, - const double* boundingBox ) const -{ - assert(meshID == fake_mesh_id); - - for(int i = 0; i < fake_bounding_box.size(); i++){ - assert(boundingBox[i] == fake_bounding_box[i]); - } -} - -void SolverInterface:: getMeshVerticesAndIDs -( - const int meshID, - const int size, - int* valueIndices, - double* coordinates ) const -{ - assert(meshID == fake_mesh_id); - assert(size == fake_ids.size()); - - for(int i = 0; i < fake_ids.size(); i++){ - valueIndices[i] = fake_ids[i]; - } - for(int i = 0; i < fake_coordinates.size(); i++){ - coordinates[i] = fake_coordinates[i]; - } -} - -bool SolverInterface::isGradientDataRequired(int dataID) const -{ - return 0; -} - -void SolverInterface::writeBlockVectorGradientData( - int dataID, - int size, - const int *valueIndices, - const double *gradientValues) -{ - fake_read_write_buffer.clear(); - for (int i = 0; i < size * this->getDimensions() * this->getDimensions(); i++) { - fake_read_write_buffer.push_back(gradientValues[i]); - } -} - -void SolverInterface::writeScalarGradientData( - int dataID, - int valueIndex, - const double *gradientValues) -{ - fake_read_write_buffer.clear(); - for (int i = 0; i < this->getDimensions(); i++) { - fake_read_write_buffer.push_back(gradientValues[i]); - } -} -void SolverInterface::writeBlockScalarGradientData( - int dataID, - int size, - const int *valueIndices, - const double *gradientValues) -{ - fake_read_write_buffer.clear(); - for (int i = 0; i < size * this->getDimensions(); i++) { - fake_read_write_buffer.push_back(gradientValues[i]); - } -} - -void SolverInterface::writeVectorGradientData( - int dataID, - int valueIndex, - const double *gradientValues) -{ - fake_read_write_buffer.clear(); - for (int i = 0; i < this->getDimensions() * this->getDimensions(); i++) { - fake_read_write_buffer.push_back(gradientValues[i]); - } -} - -std::string getVersionInformation() -{ - std::string dummy ("dummy"); - return dummy; -} - -namespace constants { - -const std::string& actionWriteInitialData() -{ - static std::string dummy ("dummy_write_initial_data"); - return dummy; -} - -const std::string& actionWriteIterationCheckpoint() -{ - static std::string dummy ("dummy_write_iteration"); - return dummy; -} - -const std::string& actionReadIterationCheckpoint() -{ - static std::string dummy ("dummy_read_iteration"); - return dummy; -} - -} // namespace precice, constants - -} // namespace precice \ No newline at end of file diff --git a/test/test_bindings_module.py b/test/test_bindings_module.py index e20094b6..a34c3983 100644 --- a/test/test_bindings_module.py +++ b/test/test_bindings_module.py @@ -10,246 +10,188 @@ class TestBindings(TestCase): """ def test_constructor(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) + participant = precice.Participant("test", "dummy.xml", 0, 1) self.assertTrue(True) def test_constructor_custom_mpi_comm(self): - solver_interface = precice.Interface( + participant = precice.Participant( "test", "dummy.xml", 0, 1, MPI.COMM_WORLD) self.assertTrue(True) def test_version(self): precice.__version__ - def test_get_dimensions(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) + def test_get_mesh_dimensions(self): + participant = precice.Participant("test", "dummy.xml", 0, 1) # TODO: it would be nice to be able to mock the output of the interface - # directly in the test, not in test/SolverInterface.hpp - fake_dimension = 3 # compare to test/SolverInterface.hpp, fake_dimensions + # directly in the test, not in test/Participant.hpp + fake_mesh_dimension = 3 # compare to test/Participant.hpp, fake_mesh_dimension # TODO: it would be nice to be able to mock the output of the interface - # directly in the test, not in test/SolverInterface.hpp - self.assertEqual(fake_dimension, solver_interface.get_dimensions()) - - def test_is_mesh_connectivity_required(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) + # directly in the test, not in test/Participant.hpp + self.assertEqual(fake_mesh_dimension, participant.get_mesh_dimensions("dummy")) + + def test_get_data_dimensions(self): + participant = precice.Participant("test", "dummy.xml", 0, 1) + fake_scalar_data_dimension = 1 # compare to test/Participant.hpp, fake_scalar_data_dimension + self.assertEqual(fake_scalar_data_dimension, participant.get_data_dimensions("dummy", "FakeScalarData")) + fake_vector_data_dimension = 3 # compare to test/Participant.hpp, fake_vector_data_dimension + self.assertEqual(fake_vector_data_dimension, participant.get_data_dimensions("dummy", "FakeVectorData")) + + def test_requires_mesh_connectivity_for(self): + participant = precice.Participant("test", "dummy.xml", 0, 1) fake_bool = 0 # compare to output in test/SolverInterface.cpp - fake_mesh_id = 0 - self.assertEqual(fake_bool, solver_interface.is_mesh_connectivity_required(fake_mesh_id)) - - def test_get_mesh_id(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) - # TODO: it would be nice to be able to mock the output of the interface - # directly in the test, not in test/SolverInterface.hpp - fake_mesh_id = 0 # compare to test/SolverInterface.hpp, fake_mesh_id - actual_output = solver_interface.get_mesh_id("testMesh") - self.assertEqual(fake_mesh_id, actual_output) + fake_mesh_name = "FakeMesh" + self.assertEqual(fake_bool, participant.requires_mesh_connectivity_for(fake_mesh_name)) def test_set_mesh_vertices(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) - fake_mesh_id = 0 # compare to test/SolverInterface.cpp, fake_mesh_id + participant = precice.Participant("test", "dummy.xml", 0, 1) + fake_mesh_name = "FakeMesh" # compare to test/SolverInterface.cpp, fake_mesh_name fake_dimension = 3 # compare to test/SolverInterface.cpp, fake_dimensions n_fake_vertices = 3 # compare to test/SolverInterface.cpp, n_fake_vertices positions = np.random.rand(n_fake_vertices, fake_dimension) expected_output = np.array(range(n_fake_vertices)) - actual_output = solver_interface.set_mesh_vertices(fake_mesh_id, positions) + actual_output = participant.set_mesh_vertices(fake_mesh_name, positions) self.assertTrue(np.array_equal(expected_output, actual_output)) def test_set_mesh_vertices_empty(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) - fake_mesh_id = 0 # compare to test/SolverInterface.cpp, fake_mesh_id + participant = precice.Participant("test", "dummy.xml", 0, 1) + fake_mesh_name = "FakeMesh" # compare to test/SolverInterface.cpp, fake_mesh_name fake_dimension = 3 # compare to test/SolverInterface.cpp, fake_dimensions n_fake_vertices = 0 # compare to test/SolverInterface.cpp, n_fake_vertices - positions = np.random.rand(n_fake_vertices, fake_dimension) + positions = np.zeros((n_fake_vertices, fake_dimension)) expected_output = np.array(range(n_fake_vertices)) - actual_output = solver_interface.set_mesh_vertices(fake_mesh_id, positions) + actual_output = participant.set_mesh_vertices(fake_mesh_name, positions) self.assertTrue(np.array_equal(expected_output, actual_output)) def test_set_mesh_vertices_list(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) - fake_mesh_id = 0 # compare to test/SolverInterface.cpp, fake_mesh_id + participant = precice.Participant("test", "dummy.xml", 0, 1) + fake_mesh_name = "FakeMesh" # compare to test/SolverInterface.cpp, fake_mesh_name fake_dimension = 3 # compare to test/SolverInterface.cpp, fake_dimensions n_fake_vertices = 3 # compare to test/SolverInterface.cpp, n_fake_vertices positions = np.random.rand(n_fake_vertices, fake_dimension) positions = list(list(positions[i, j] for j in range( positions.shape[1])) for i in range(positions.shape[0])) expected_output = np.array(range(n_fake_vertices)) - actual_output = solver_interface.set_mesh_vertices(fake_mesh_id, positions) + actual_output = participant.set_mesh_vertices(fake_mesh_name, positions) self.assertTrue(np.array_equal(expected_output, actual_output)) def test_set_mesh_vertices_empty_list(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) - fake_mesh_id = 0 # compare to test/SolverInterface.cpp, fake_mesh_id + participant = precice.Participant("test", "dummy.xml", 0, 1) + fake_mesh_name = "FakeMesh" # compare to test/SolverInterface.cpp, fake_mesh_name positions = [] n_fake_vertices = 0 expected_output = np.array(range(n_fake_vertices)) - actual_output = solver_interface.set_mesh_vertices(fake_mesh_id, positions) + actual_output = participant.set_mesh_vertices(fake_mesh_name, positions) self.assertTrue(np.array_equal(expected_output, actual_output)) def test_set_mesh_vertices_tuple(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) - fake_mesh_id = 0 # compare to test/SolverInterface.cpp, fake_mesh_id + participant = precice.Participant("test", "dummy.xml", 0, 1) + fake_mesh_name = "FakeMesh" # compare to test/SolverInterface.cpp, fake_mesh_name fake_dimension = 3 # compare to test/SolverInterface.cpp, fake_dimensions n_fake_vertices = 3 # compare to test/SolverInterface.cpp, n_fake_vertices positions = np.random.rand(n_fake_vertices, fake_dimension) positions = tuple(tuple(positions[i, j] for j in range( positions.shape[1])) for i in range(positions.shape[0])) expected_output = np.array(range(n_fake_vertices)) - actual_output = solver_interface.set_mesh_vertices(fake_mesh_id, positions) + actual_output = participant.set_mesh_vertices(fake_mesh_name, positions) self.assertTrue(np.array_equal(expected_output, actual_output)) def test_set_mesh_vertices_empty_tuple(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) - fake_mesh_id = 0 # compare to test/SolverInterface.cpp, fake_mesh_id + participant = precice.Participant("test", "dummy.xml", 0, 1) + fake_mesh_name = "FakeMesh" # compare to test/SolverInterface.cpp, fake_mesh_name positions = () n_fake_vertices = 0 expected_output = np.array(range(n_fake_vertices)) - actual_output = solver_interface.set_mesh_vertices(fake_mesh_id, positions) + actual_output = participant.set_mesh_vertices(fake_mesh_name, positions) self.assertTrue(np.array_equal(expected_output, actual_output)) def test_set_mesh_vertices_mixed(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) - fake_mesh_id = 0 # compare to test/SolverInterface.cpp, fake_mesh_id + participant = precice.Participant("test", "dummy.xml", 0, 1) + fake_mesh_name = "FakeMesh" # compare to test/SolverInterface.cpp, fake_mesh_name fake_dimension = 3 # compare to test/SolverInterface.cpp, fake_dimensions n_fake_vertices = 3 # compare to test/SolverInterface.cpp, n_fake_vertices positions = np.random.rand(n_fake_vertices, fake_dimension) positions = list(tuple(positions[i, j] for j in range( positions.shape[1])) for i in range(positions.shape[0])) expected_output = np.array(range(n_fake_vertices)) - actual_output = solver_interface.set_mesh_vertices(fake_mesh_id, positions) + actual_output = participant.set_mesh_vertices(fake_mesh_name, positions) self.assertTrue(np.array_equal(expected_output, actual_output)) def test_set_mesh_vertex(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) - fake_mesh_id = 0 # compare to test/SolverInterface.cpp, fake_mesh_id + participant = precice.Participant("test", "dummy.xml", 0, 1) + fake_mesh_name = "FakeMesh" # compare to test/SolverInterface.cpp, fake_mesh_name fake_dimension = 3 # compare to test/SolverInterface.cpp, fake_dimensions position = np.random.rand(fake_dimension) - vertex_id = solver_interface.set_mesh_vertex(fake_mesh_id, position) + vertex_id = participant.set_mesh_vertex(fake_mesh_name, position) self.assertTrue(0 == vertex_id) def test_set_mesh_vertex_empty(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) - fake_mesh_id = 0 # compare to test/SolverInterface.cpp, fake_mesh_id + participant = precice.Participant("test", "dummy.xml", 0, 1) + fake_mesh_name = "FakeMesh" # compare to test/SolverInterface.cpp, fake_mesh_name fake_dimension = 0 # compare to test/SolverInterface.cpp, fake_dimensions position = np.random.rand(fake_dimension) - vertex_id = solver_interface.set_mesh_vertex(fake_mesh_id, position) + vertex_id = participant.set_mesh_vertex(fake_mesh_name, position) self.assertTrue(0 == vertex_id) def test_set_mesh_vertex_list(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) - fake_mesh_id = 0 # compare to test/SolverInterface.cpp, fake_mesh_id + participant = precice.Participant("test", "dummy.xml", 0, 1) + fake_mesh_name = "FakeMesh" # compare to test/SolverInterface.cpp, fake_mesh_name fake_dimension = 3 # compare to test/SolverInterface.cpp, fake_dimensions position = list(np.random.rand(fake_dimension)) - vertex_id = solver_interface.set_mesh_vertex(fake_mesh_id, position) + vertex_id = participant.set_mesh_vertex(fake_mesh_name, position) self.assertTrue(0 == vertex_id) def test_set_mesh_vertex_empty_list(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) - fake_mesh_id = 0 # compare to test/SolverInterface.cpp, fake_mesh_id + participant = precice.Participant("test", "dummy.xml", 0, 1) + fake_mesh_name = "FakeMesh" # compare to test/SolverInterface.cpp, fake_mesh_name position = [] - vertex_id = solver_interface.set_mesh_vertex(fake_mesh_id, position) + vertex_id = participant.set_mesh_vertex(fake_mesh_name, position) self.assertTrue(0 == vertex_id) def test_set_mesh_vertex_tuple(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) - fake_mesh_id = 0 # compare to test/SolverInterface.cpp, fake_mesh_id + participant = precice.Participant("test", "dummy.xml", 0, 1) + fake_mesh_name = "FakeMesh" # compare to test/SolverInterface.cpp, fake_mesh_name fake_dimension = 3 # compare to test/SolverInterface.cpp, fake_dimensions position = tuple(np.random.rand(fake_dimension)) - vertex_id = solver_interface.set_mesh_vertex(fake_mesh_id, position) + vertex_id = participant.set_mesh_vertex(fake_mesh_name, position) self.assertTrue(0 == vertex_id) def test_set_mesh_vertex_empty_tuple(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) - fake_mesh_id = 0 # compare to test/SolverInterface.cpp, fake_mesh_id + participant = precice.Participant("test", "dummy.xml", 0, 1) + fake_mesh_name = "FakeMesh" # compare to test/SolverInterface.cpp, fake_mesh_name position = () - vertex_id = solver_interface.set_mesh_vertex(fake_mesh_id, position) + vertex_id = participant.set_mesh_vertex(fake_mesh_name, position) self.assertTrue(0 == vertex_id) - def test_get_mesh_vertex_ids_from_positions(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) - fake_mesh_id = 0 # compare to test/SolverInterface.cpp, fake_mesh_id - fake_dimension = 3 # compare to test/SolverInterface.cpp, fake_dimensions - n_fake_vertices = 3 # compare to test/SolverInterface.cpp, n_fake_vertices - positions = np.random.rand(n_fake_vertices, fake_dimension) - fake_vertex_ids = range(n_fake_vertices) - vertex_ids = solver_interface.get_mesh_vertex_ids_from_positions(fake_mesh_id, positions) - self.assertTrue(np.array_equal(fake_vertex_ids, vertex_ids)) - - def test_get_mesh_vertex_ids_from_positions_list(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) - fake_mesh_id = 0 # compare to test/SolverInterface.cpp, fake_mesh_id - fake_dimension = 3 # compare to test/SolverInterface.cpp, fake_dimensions - n_fake_vertices = 3 # compare to test/SolverInterface.cpp, n_fake_vertices - positions = np.random.rand(n_fake_vertices, fake_dimension) - positions = list(list(positions[i, j] for j in range( - positions.shape[1])) for i in range(positions.shape[0])) - fake_vertex_ids = range(n_fake_vertices) - vertex_ids = solver_interface.get_mesh_vertex_ids_from_positions(fake_mesh_id, positions) - self.assertTrue(np.array_equal(fake_vertex_ids, vertex_ids)) - - def test_get_mesh_vertex_ids_from_positions_tuple(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) - fake_mesh_id = 0 # compare to test/SolverInterface.cpp, fake_mesh_id - fake_dimension = 3 # compare to test/SolverInterface.cpp, fake_dimensions - n_fake_vertices = 3 # compare to test/SolverInterface.cpp, n_fake_vertices - positions = np.random.rand(n_fake_vertices, fake_dimension) - positions = tuple(tuple(positions[i, j] for j in range( - positions.shape[1])) for i in range(positions.shape[0])) - fake_vertex_ids = range(n_fake_vertices) - vertex_ids = solver_interface.get_mesh_vertex_ids_from_positions(fake_mesh_id, positions) - self.assertTrue(np.array_equal(fake_vertex_ids, vertex_ids)) - - def test_get_mesh_vertex_ids_from_positions_mixed(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) - fake_mesh_id = 0 # compare to test/SolverInterface.cpp, fake_mesh_id - fake_dimension = 3 # compare to test/SolverInterface.cpp, fake_dimensions - n_fake_vertices = 3 # compare to test/SolverInterface.cpp, n_fake_vertices - positions = np.random.rand(n_fake_vertices, fake_dimension) - positions = list(tuple(positions[i, j] for j in range( - positions.shape[1])) for i in range(positions.shape[0])) - fake_vertex_ids = range(n_fake_vertices) - vertex_ids = solver_interface.get_mesh_vertex_ids_from_positions(fake_mesh_id, positions) - self.assertTrue(np.array_equal(fake_vertex_ids, vertex_ids)) - def test_get_mesh_vertex_size(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) - fake_mesh_id = 0 # compare to test/SolverInterface.cpp, fake_mesh_id + participant = precice.Participant("test", "dummy.xml", 0, 1) + fake_mesh_name = "FakeMesh" # compare to test/SolverInterface.cpp, fake_mesh_name n_fake_vertices = 3 # compare to test/SolverInterface.cpp, n_fake_vertices - n_vertices = solver_interface.get_mesh_vertex_size(fake_mesh_id) + n_vertices = participant.get_mesh_vertex_size(fake_mesh_name) self.assertTrue(n_fake_vertices == n_vertices) - def test_get_mesh_vertices(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) - fake_mesh_id = 0 # compare to test/SolverInterface.cpp, fake_mesh_id - n_fake_vertices = 3 # compare to test/SolverInterface.cpp, n_fake_vertices - fake_dimension = 3 # compare to test/SolverInterface.cpp, fake_dimensions - fake_vertices = np.zeros((n_fake_vertices, fake_dimension)) - for i in range(n_fake_vertices): - fake_vertices[i, 0] = i - fake_vertices[i, 1] = i + n_fake_vertices - fake_vertices[i, 2] = i + 2 * n_fake_vertices - vertices = solver_interface.get_mesh_vertices(fake_mesh_id, range(n_fake_vertices)) - self.assertTrue(np.array_equal(fake_vertices, vertices)) - def test_read_write_block_scalar_data(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) + participant = precice.Participant("test", "dummy.xml", 0, 1) write_data = np.array([3, 7, 8], dtype=np.double) - solver_interface.write_block_scalar_data(1, np.array([1, 2, 3]), write_data) - read_data = solver_interface.read_block_scalar_data(1, np.array([1, 2, 3])) + participant.write_data("FakeMesh", "FakeScalarData", np.array([0, 1, 2]), write_data) + dt = 1 + read_data = participant.read_data("FakeMesh", "FakeScalarData", np.array([0, 1, 2]), dt) self.assertTrue(np.array_equal(write_data, read_data)) def test_read_write_block_scalar_data_single_float(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) + participant = precice.Participant("test", "dummy.xml", 0, 1) write_data = 8 with self.assertRaises(TypeError): - solver_interface.write_block_scalar_data(1, 1, write_data) + participant.write_data("FakeMesh", "FakeScalarData", 1, write_data) with self.assertRaises(TypeError): - solver_interface.read_block_scalar_data(1, 1) + participant.read_data("FakeMesh", "FakeScalarData", 1) def test_read_write_block_scalar_data_empty(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) + participant = precice.Participant("test", "dummy.xml", 0, 1) write_data = np.array([]) - solver_interface.write_block_scalar_data(1, [], write_data) - read_data = solver_interface.read_block_scalar_data(1, []) + participant.write_data("FakeMesh", "FakeScalarData", [], write_data) + dt = 1 + read_data = participant.read_data("FakeMesh", "FakeScalarData", [], dt) self.assertTrue(len(read_data) == 0) def test_read_write_block_scalar_data_non_contiguous(self): @@ -258,55 +200,62 @@ def test_read_write_block_scalar_data_non_contiguous(self): Note: Check whether np.ndarray is contiguous via np.ndarray.flags. """ - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) + participant = precice.Participant("test", "dummy.xml", 0, 1) dummy_array = np.random.rand(3, 3) write_data = dummy_array[:, 1] - assert(write_data.flags["C_CONTIGUOUS"] is False) - solver_interface.write_block_scalar_data(1, np.array([1, 2, 3]), write_data) - read_data = solver_interface.read_block_scalar_data(1, np.array([1, 2, 3])) + assert (write_data.flags["C_CONTIGUOUS"] is False) + participant.write_data("FakeMesh", "FakeScalarData", np.array([0, 1, 2]), write_data) + dt = 1 + read_data = participant.read_data("FakeMesh", "FakeScalarData", np.array([0, 1, 2]), dt) self.assertTrue(np.array_equal(write_data, read_data)) def test_read_write_scalar_data(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) - write_data = 3 - solver_interface.write_scalar_data(1, 1, write_data) - read_data = solver_interface.read_scalar_data(1, 1) + participant = precice.Participant("test", "dummy.xml", 0, 1) + write_data = [3] + participant.write_data("FakeMesh", "FakeScalarData", [0], write_data) + dt = 1 + read_data = participant.read_data("FakeMesh", "FakeScalarData", [0], dt) self.assertTrue(np.array_equal(write_data, read_data)) def test_read_write_block_vector_data(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) + participant = precice.Participant("test", "dummy.xml", 0, 1) write_data = np.array([[3, 7, 8], [7, 6, 5]], dtype=np.double) - solver_interface.write_block_vector_data(1, np.array([1, 2]), write_data) - read_data = solver_interface.read_block_vector_data(1, np.array([1, 2])) + participant.write_data("FakeMesh", "FakeVectorData", np.array([0, 1]), write_data) + dt = 1 + read_data = participant.read_data("FakeMesh", "FakeVectorData", np.array([0, 1]), dt) self.assertTrue(np.array_equal(write_data, read_data)) def test_read_write_block_vector_data_empty(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) + participant = precice.Participant("test", "dummy.xml", 0, 1) write_data = np.array([]) - solver_interface.write_block_vector_data(1, [], write_data) - read_data = solver_interface.read_block_vector_data(1, []) + participant.write_data("FakeMesh", "FakeVectorData", [], write_data) + dt = 1 + read_data = participant.read_data("FakeMesh", "FakeVectorData", [], dt) self.assertTrue(len(read_data) == 0) def test_read_write_block_vector_data_list(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) + participant = precice.Participant("test", "dummy.xml", 0, 1) write_data = [[3, 7, 8], [7, 6, 5]] - solver_interface.write_block_vector_data(1, np.array([1, 2]), write_data) - read_data = solver_interface.read_block_vector_data(1, np.array([1, 2])) + participant.write_data("FakeMesh", "FakeVectorData", np.array([0, 1]), write_data) + dt = 1 + read_data = participant.read_data("FakeMesh", "FakeVectorData", np.array([0, 1]), dt) self.assertTrue(np.array_equal(write_data, read_data)) def test_read_write_block_vector_data_tuple(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) + participant = precice.Participant("test", "dummy.xml", 0, 1) write_data = ((3, 7, 8), (7, 6, 5)) - solver_interface.write_block_vector_data(1, np.array([1, 2]), write_data) - read_data = solver_interface.read_block_vector_data(1, np.array([1, 2])) + participant.write_data("FakeMesh", "FakeVectorData", np.array([0, 1]), write_data) + dt = 1 + read_data = participant.read_data("FakeMesh", "FakeVectorData", np.array([0, 1]), dt) self.assertTrue(np.array_equal(write_data, read_data)) def test_read_write_block_vector_data_mixed(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) + participant = precice.Participant("test", "dummy.xml", 0, 1) write_data = [(3, 7, 8), (7, 6, 5)] - solver_interface.write_block_vector_data(1, np.array([1, 2]), write_data) - read_data = solver_interface.read_block_vector_data(1, np.array([1, 2])) + participant.write_data("FakeMesh", "FakeVectorData", np.array([0, 1]), write_data) + dt = 1 + read_data = participant.read_data("FakeMesh", "FakeVectorData", np.array([0, 1]), dt) self.assertTrue(np.array_equal(write_data, read_data)) def test_read_write_block_vector_data_non_contiguous(self): @@ -315,35 +264,39 @@ def test_read_write_block_vector_data_non_contiguous(self): Note: Check whether np.ndarray is contiguous via np.ndarray.flags. """ - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) + participant = precice.Participant("test", "dummy.xml", 0, 1) size = 6 dummy_array = np.random.rand(size, 5) write_data = dummy_array[:, 1:4] - assert(write_data.flags["C_CONTIGUOUS"] is False) + assert (write_data.flags["C_CONTIGUOUS"] is False) vertex_ids = np.arange(size) - solver_interface.write_block_vector_data(1, vertex_ids, write_data) - read_data = solver_interface.read_block_vector_data(1, vertex_ids) + participant.write_data("FakeMesh", "FakeVectorData", vertex_ids, write_data) + dt = 1 + read_data = participant.read_data("FakeMesh", "FakeVectorData", vertex_ids, dt) self.assertTrue(np.array_equal(write_data, read_data)) def test_read_write_vector_data(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) - write_data = np.array([1, 2, 3], dtype=np.double) - solver_interface.write_vector_data(1, 1, write_data) - read_data = solver_interface.read_vector_data(1, 1) + participant = precice.Participant("test", "dummy.xml", 0, 1) + write_data = np.array([[0, 1, 2]], dtype=np.double) + participant.write_data("FakeMesh", "FakeVectorData", [0], write_data) + dt = 1 + read_data = participant.read_data("FakeMesh", "FakeVectorData", [0], dt) self.assertTrue(np.array_equal(write_data, read_data)) def test_read_write_vector_data_list(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) - write_data = [1, 2, 3] - solver_interface.write_vector_data(1, 1, write_data) - read_data = solver_interface.read_vector_data(1, 1) + participant = precice.Participant("test", "dummy.xml", 0, 1) + write_data = [[0, 1, 2]] + participant.write_data("FakeMesh", "FakeVectorData", [0], write_data) + dt = 1 + read_data = participant.read_data("FakeMesh", "FakeVectorData", [0], dt) self.assertTrue(np.array_equal(write_data, read_data)) def test_read_write_vector_data_tuple(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) - write_data = (1, 2, 3) - solver_interface.write_vector_data(1, 1, write_data) - read_data = solver_interface.read_vector_data(1, 1) + participant = precice.Participant("test", "dummy.xml", 0, 1) + write_data = [(1, 2, 3)] + participant.write_data("FakeMesh", "FakeVectorData", [0], write_data) + dt = 1 + read_data = participant.read_data("FakeMesh", "FakeVectorData", [0], dt) self.assertTrue(np.array_equal(write_data, read_data)) def test_read_write_vector_data_non_contiguous(self): @@ -352,52 +305,31 @@ def test_read_write_vector_data_non_contiguous(self): Note: Check whether np.ndarray is contiguous via np.ndarray.flags. """ - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) + participant = precice.Participant("test", "dummy.xml", 0, 1) dummy_array = np.random.rand(3, 3) write_data = dummy_array[:, 1] - assert(write_data.flags["C_CONTIGUOUS"] is False) - solver_interface.write_vector_data(1, 1, write_data) - read_data = solver_interface.read_vector_data(1, 1) + assert (write_data.flags["C_CONTIGUOUS"] is False) + write_data = [write_data] + participant.write_data("FakeMesh", "FakeVectorData", [0], write_data) + dt = 1 + read_data = participant.read_data("FakeMesh", "FakeVectorData", [0], dt) self.assertTrue(np.array_equal(write_data, read_data)) - def test_get_data_id(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) - fake_mesh_id = 0 # compare to test/SolverInterface.cpp, fake_mesh_id - fake_data_name = "FakeData" # compare to test/SolverInterface.cpp, fake_data_name - fake_data_id = 15 # compare to test/SolverInterface.cpp, fake_data_ID - data_id = solver_interface.get_data_id(fake_data_name, fake_mesh_id) - self.assertTrue(data_id == fake_data_id) - def test_get_version_information(self): version_info = precice.get_version_information() fake_version_info = b"dummy" # compare to test/SolverInterface.cpp self.assertEqual(version_info, fake_version_info) - def test_action_write_initial_data(self): - return_constant = precice.action_write_initial_data() - dummy_constant = b"dummy_write_initial_data" # compare to test/SolverInterface.cpp - self.assertEqual(return_constant, dummy_constant) - - def test_action_write_iteration_checkpoint(self): - return_constant = precice.action_write_iteration_checkpoint() - dummy_constant = b"dummy_write_iteration" # compare to test/SolverInterface.cpp - self.assertEqual(return_constant, dummy_constant) - - def test_action_read_iteration_checkpoint(self): - return_constant = precice.action_read_iteration_checkpoint() - dummy_constant = b"dummy_read_iteration" # compare to test/SolverInterface.cpp - self.assertEqual(return_constant, dummy_constant) - def test_set_mesh_access_region(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) - fake_mesh_id = 0 # compare to test/SolverInterface.cpp, fake_mesh_id + participant = precice.Participant("test", "dummy.xml", 0, 1) + fake_mesh_name = "FakeMesh" # compare to test/SolverInterface.cpp, fake_mesh_name fake_dimension = 3 # compare to test/SolverInterface.cpp, fake_dimensions fake_bounding_box = np.arange(fake_dimension * 2) - solver_interface.set_mesh_access_region(fake_mesh_id, fake_bounding_box) + participant.set_mesh_access_region(fake_mesh_name, fake_bounding_box) - def test_get_mesh_vertices_and_ids(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) - fake_mesh_id = 0 # compare to test/SolverInterface.cpp, fake_mesh_id + def test_get_mesh_vertex_ids_and_coordinates(self): + participant = precice.Participant("test", "dummy.xml", 0, 1) + fake_mesh_name = "FakeMesh" # compare to test/SolverInterface.cpp, fake_mesh_name n_fake_vertices = 3 # compare to test/SolverInterface.cpp, n_fake_vertices fake_dimension = 3 # compare to test/SolverInterface.cpp, fake_dimensions vertex_ids = np.arange(n_fake_vertices) @@ -406,146 +338,162 @@ def test_get_mesh_vertices_and_ids(self): coordinates[i, 0] = i * fake_dimension coordinates[i, 1] = i * fake_dimension + 1 coordinates[i, 2] = i * fake_dimension + 2 - fake_ids, fake_coordinates = solver_interface.get_mesh_vertices_and_ids(fake_mesh_id) + fake_ids, fake_coordinates = participant.get_mesh_vertex_ids_and_coordinates(fake_mesh_name) self.assertTrue(np.array_equal(fake_ids, vertex_ids)) self.assertTrue(np.array_equal(fake_coordinates, coordinates)) - def test_is_gradient_data_required(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) + def test_requires_gradient_data_for(self): + participant = precice.Participant("test", "dummy.xml", 0, 1) fake_bool = 0 # compare to output in test/SolverInterface.cpp - fake_data_id = 0 - self.assertEqual(fake_bool, solver_interface.is_gradient_data_required(fake_data_id)) + fake_mesh_name = "FakeMesh" + fake_data_name = "FakeName" + self.assertEqual(fake_bool, participant.requires_gradient_data_for(fake_mesh_name, fake_data_name)) def test_write_block_scalar_gradient_data(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) - write_data = np.array([[1, 2, 3], [6, 7, 8], [9, 10, 11]], dtype=np.double) - solver_interface.write_block_scalar_gradient_data(1, np.array([1, 2, 3]), write_data) - read_data = solver_interface.read_block_scalar_data(1, np.array(range(9))) + participant = precice.Participant("test", "dummy.xml", 0, 1) + write_data = np.array([[0, 1, 2], [6, 7, 8], [9, 10, 11]], dtype=np.double) + participant.write_gradient_data("FakeMesh", "FakeScalarData", np.array([0, 1, 2]), write_data) + dt = 1 + read_data = participant.read_data("FakeMesh", "FakeScalarData", np.array(range(9)), dt) self.assertTrue(np.array_equiv(np.array(write_data).flatten(), read_data.flatten())) def test_write_block_scalar_gradient_data_single_float(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) + participant = precice.Participant("test", "dummy.xml", 0, 1) fake_dimension = 3 - n_fake_vertices = 4 + n_fake_vertices = 1 vertex_ids = np.arange(n_fake_vertices) write_data = np.random.rand(n_fake_vertices, fake_dimension) - solver_interface.write_block_scalar_gradient_data(1, vertex_ids, write_data) - read_data = solver_interface.read_block_vector_data(1, vertex_ids) - self.assertTrue(np.array_equal(write_data, read_data)) + participant.write_gradient_data("FakeMesh", "FakeScalarData", vertex_ids, write_data) + dt = 1 + read_data = participant.read_data("FakeMesh", "FakeScalarData", np.arange(n_fake_vertices * fake_dimension), dt) + self.assertTrue(np.array_equal(write_data.flatten(), read_data)) def test_write_block_scalar_gradient_data_empty(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) + participant = precice.Participant("test", "dummy.xml", 0, 1) write_data = np.array([]) - solver_interface.write_block_scalar_gradient_data(1, [], write_data) - read_data = solver_interface.read_block_scalar_data(1, []) + participant.write_gradient_data("FakeMesh", "FakeScalarData", [], write_data) + dt = 1 + read_data = participant.read_data("FakeMesh", "FakeScalarData", [], dt) self.assertTrue(np.array_equiv(np.array(write_data).flatten(), read_data.flatten())) def test_write_block_scalar_gradient_data_non_contiguous(self): """ - Tests behaviour of solver interface, if a non contiguous array is passed to the interface. - + Tests behavior of solver interface, if a non contiguous array is passed to the interface. Note: Check whether np.ndarray is contiguous via np.ndarray.flags. """ - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) + participant = precice.Participant("test", "dummy.xml", 0, 1) dummy_array = np.random.rand(3, 9) write_data = dummy_array[:, 3:6] assert write_data.flags["C_CONTIGUOUS"] is False - solver_interface.write_block_scalar_gradient_data(1, np.array([1, 2, 3]), write_data) - read_data = solver_interface.read_block_scalar_data(1, np.array(range(9))) + participant.write_gradient_data("FakeMesh", "FakeScalarData", np.array([0, 1, 2]), write_data) + dt = 1 + read_data = participant.read_data("FakeMesh", "FakeScalarData", np.array(range(9)), dt) self.assertTrue(np.array_equiv(np.array(write_data).flatten(), read_data.flatten())) def test_write_scalar_gradient_data(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) + participant = precice.Participant("test", "dummy.xml", 0, 1) fake_dimension = 3 - write_data = np.random.rand(fake_dimension) - solver_interface.write_scalar_gradient_data(1, 1, write_data) - read_data = solver_interface.read_vector_data(1, 1) + write_data = [np.random.rand(fake_dimension)] + participant.write_gradient_data("FakeMesh", "FakeScalarData", [0], write_data) + dt = 1 + # Gradient data is essential vector data, hence the appropriate data name is used here + read_data = participant.read_data("FakeMesh", "FakeVectorData", [0], dt) self.assertTrue(np.array_equiv(write_data, read_data)) def test_write_block_vector_gradient_data(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) + participant = precice.Participant("test", "dummy.xml", 0, 1) fake_dimension = 3 n_fake_vertices = 4 vertex_ids = np.arange(n_fake_vertices) write_data = np.random.rand(n_fake_vertices, fake_dimension * fake_dimension) - solver_interface.write_block_vector_gradient_data(1, vertex_ids, write_data) - read_data = solver_interface.read_block_vector_data(1, np.array(range(n_fake_vertices * fake_dimension))) + participant.write_gradient_data("FakeMesh", "FakeVectorData", vertex_ids, write_data) + dt = 1 + read_data = participant.read_data( + "FakeMesh", "FakeVectorData", np.array(range(n_fake_vertices * fake_dimension)), dt) self.assertTrue(np.array_equiv(write_data.flatten(), read_data.flatten())) def test_write_block_vector_gradient_data_empty(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) + participant = precice.Participant("test", "dummy.xml", 0, 1) write_data = np.array([]) - solver_interface.write_block_vector_gradient_data(1, [], write_data) - read_data = solver_interface.read_block_scalar_data(1, []) + participant.write_gradient_data("FakeMesh", "FakeVectorData", [], write_data) + dt = 1 + read_data = participant.read_data("FakeMesh", "FakeVectorData", [], dt) self.assertTrue(len(read_data) == 0) def test_write_block_vector_gradient_data_list(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) + participant = precice.Participant("test", "dummy.xml", 0, 1) write_data = [[3.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0, 13.0, 14.0], [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 6.0, 5.0]] - solver_interface.write_block_vector_gradient_data(1, np.array([1, 2]), write_data) - read_data = solver_interface.read_block_scalar_data(1, np.array(range(18))) + participant.write_gradient_data("FakeMesh", "FakeVectorData", np.array([0, 1]), write_data) + dt = 1 + read_data = participant.read_data("FakeMesh", "FakeVectorData", np.array(range(6)), dt) self.assertTrue(np.array_equiv(np.array(write_data).flatten(), read_data.flatten())) def test_write_block_vector_gradient_data_tuple(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) + participant = precice.Participant("test", "dummy.xml", 0, 1) write_data = ((1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 3.0, 7.0, 8.0), (1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 6.0, 5.0)) - solver_interface.write_block_vector_gradient_data(1, np.array([1, 2]), write_data) - read_data = solver_interface.read_block_scalar_data(1, np.array(range(18))) + participant.write_gradient_data("FakeMesh", "FakeVectorData", np.array([0, 1]), write_data) + dt = 1 + read_data = participant.read_data("FakeMesh", "FakeVectorData", np.array(range(6)), dt) self.assertTrue(np.array_equiv(np.array(write_data).flatten(), read_data.flatten())) def test_write_block_vector_gradient_data_mixed(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) + participant = precice.Participant("test", "dummy.xml", 0, 1) write_data = [(1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 3.0, 7.0, 8.0), (4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 7.0, 6.0, 5.0)] - solver_interface.write_block_vector_gradient_data(1, np.array([1, 2]), write_data) - read_data = solver_interface.read_block_scalar_data(1, np.array(range(18))) + participant.write_gradient_data("FakeMesh", "FakeVectorData", np.array([0, 1]), write_data) + dt = 1 + read_data = participant.read_data("FakeMesh", "FakeVectorData", np.array(range(6)), dt) self.assertTrue(np.array_equiv(np.array(write_data).flatten(), read_data.flatten())) def test_write_block_vector_gradient_data_non_contiguous(self): """ - Tests behaviour of solver interface, if a non contiguous array is passed to the interface. - + Tests behavior of solver interface, if a non contiguous array is passed to the interface. Note: Check whether np.ndarray is contiguous via np.ndarray.flags. """ - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) + participant = precice.Participant("test", "dummy.xml", 0, 1) dummy_array = np.random.rand(3, 15) write_data = dummy_array[:, 2:11] assert write_data.flags["C_CONTIGUOUS"] is False vertex_ids = np.arange(3) - solver_interface.write_block_vector_gradient_data(1, vertex_ids, write_data) - read_data = solver_interface.read_block_scalar_data(1, np.array(range(27))) + participant.write_gradient_data("FakeMesh", "FakeVectorData", vertex_ids, write_data) + dt = 1 + read_data = participant.read_data("FakeMesh", "FakeVectorData", np.array(range(9)), dt) self.assertTrue(np.array_equiv(np.array(write_data).flatten(), read_data.flatten())) def test_write_vector_gradient_data(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) - write_data = np.arange(0, 9, dtype=np.double) - solver_interface.write_vector_gradient_data(1, 1, write_data) - read_data = solver_interface.read_block_scalar_data(1, np.array(range(9))) + participant = precice.Participant("test", "dummy.xml", 0, 1) + write_data = [np.arange(0, 9, dtype=np.double)] + participant.write_gradient_data("FakeMesh", "FakeVectorData", [0], write_data) + dt = 1 + read_data = participant.read_data("FakeMesh", "FakeVectorData", np.array(range(3)), dt) self.assertTrue(np.array_equiv(np.array(write_data).flatten(), read_data.flatten())) def test_write_vector_gradient_data_list(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) - write_data = [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0] - solver_interface.write_vector_gradient_data(1, 1, write_data) - read_data = solver_interface.read_block_scalar_data(1, np.array(range(9))) + participant = precice.Participant("test", "dummy.xml", 0, 1) + write_data = [[1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0]] + participant.write_gradient_data("FakeMesh", "FakeVectorData", [0], write_data) + dt = 1 + read_data = participant.read_data("FakeMesh", "FakeVectorData", np.array(range(3)), dt) self.assertTrue(np.array_equiv(np.array(write_data).flatten(), read_data.flatten())) def test_write_vector_gradient_data_tuple(self): - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) - write_data = (1.0, 2.0, 3.0, 9.0, 8.0, 7.0, 6.0, 5.0, 4.0) - solver_interface.write_vector_gradient_data(1, 1, write_data) - read_data = solver_interface.read_block_scalar_data(1, np.array(range(9))) + participant = precice.Participant("test", "dummy.xml", 0, 1) + write_data = [(1.0, 2.0, 3.0, 9.0, 8.0, 7.0, 6.0, 5.0, 4.0)] + participant.write_gradient_data("FakeMesh", "FakeVectorData", [0], write_data) + dt = 1 + read_data = participant.read_data("FakeMesh", "FakeVectorData", np.array(range(3)), dt) self.assertTrue(np.array_equiv(np.array(write_data).flatten(), read_data.flatten())) def test_write_vector_gradient_data_non_contiguous(self): """ - Tests behaviour of solver interface, if a non contiguous array is passed to the interface. - + Tests behavior of solver interface, if a non contiguous array is passed to the interface. Note: Check whether np.ndarray is contiguous via np.ndarray.flags. """ - solver_interface = precice.Interface("test", "dummy.xml", 0, 1) + participant = precice.Participant("test", "dummy.xml", 0, 1) dummy_array = np.random.rand(9, 3) write_data = dummy_array[:, 1] assert write_data.flags["C_CONTIGUOUS"] is False - solver_interface.write_vector_gradient_data(1, 1, write_data) - read_data = solver_interface.read_block_scalar_data(1, np.array(range(9))) + write_data = [write_data] + participant.write_gradient_data("FakeMesh", "FakeVectorData", [0], write_data) + dt = 1 + read_data = participant.read_data("FakeMesh", "FakeVectorData", np.array(range(3)), dt) self.assertTrue(np.array_equiv(np.array(write_data).flatten(), read_data.flatten())) diff --git a/tools/releasing/packaging/docker/Dockerfile b/tools/releasing/packaging/docker/Dockerfile index 496390f1..6eb6254c 100644 --- a/tools/releasing/packaging/docker/Dockerfile +++ b/tools/releasing/packaging/docker/Dockerfile @@ -1,6 +1,6 @@ # Dockerfile to build a ubuntu image containing the installed Debian package of a release -ARG branch=develop -ARG from=precice/precice:latest +ARG PRECICE_TAG=develop +ARG from=precice/precice:${PRECICE_TAG} FROM $from USER root @@ -16,14 +16,15 @@ RUN apt-get -qq update && apt-get -qq install \ pkg-config && \ rm -rf /var/lib/apt/lists/* -## Needed, because precice/precice:latest does not create a user? See also https://github.com/precice/precice/pull/1090 +## Needed, if base image does not create a user? See also https://github.com/precice/precice/pull/1090 +## At the moment: precice/precice:latest does not create a user, but benjaminrodenberg/precice:develop creates a user ## ------> # Create user precice -ARG uid=1000 -ARG gid=1000 -RUN groupadd -g ${gid} precice \ - && useradd -u ${uid} -g ${gid} -m -s /bin/bash precice \ - && sudo usermod -a -G sudo precice +# ARG uid=1000 +# ARG gid=1000 +# RUN groupadd -g ${gid} precice \ +# && useradd -u ${uid} -g ${gid} -m -s /bin/bash precice \ +# && sudo usermod -a -G sudo precice # Setup passwordless sudo RUN echo "ALL ALL = (ALL) NOPASSWD: ALL" >> /etc/sudoers @@ -41,8 +42,7 @@ SHELL ["/bin/bash", "-c"] RUN python3 -m pip install --user --upgrade pip # Rebuild image if force_rebuild after that command -ARG CACHEBUST -ARG branch=develop +ARG PYTHON_BINDINGS_REF=develop # Builds the precice python bindings for python3 -RUN pip3 install --user git+https://github.com/precice/python-bindings.git@$branch +RUN pip3 install --user git+https://github.com/precice/python-bindings.git@${PYTHON_BINDINGS_REF}