From 97918e4773e51743395a9bdbdba8932b63e827c3 Mon Sep 17 00:00:00 2001 From: Trygve Aspelien Date: Tue, 25 Apr 2023 19:19:03 +0200 Subject: [PATCH] Restructure. Enhance unit testing and use poetry (#18) * Use poetry. Import files directly, re-structure * More rewrite and use pytest and mockings * Finally linting ok. Resstructuring. More unit tests. Gridpp only in interpolation * Much more testing and re-writing * Improve coverage and unit testing * Linting mostly * Lost changes * Add a general obs2json entry point to be used for e.g obsoul * Need all values in ncgen now * Correct linting * Correct name and increase version * Split dependencies * Adapt to pep517 * Try commenting out groups * Correct workflow * Correct typo * Correct typo * Assume yes * Add deps * Install ncgen * Use only toml and update README/INSTALL * Install coveralls * Disable auto * Move coveralls group * Do as I'm told * Change coveralls tool * Make is not in poetry * Try docs * Try docs * Try docs * Call sphinx-build from poetry --- .flakeheaven.toml | 49 + .github/workflows/linting.yaml | 69 + .github/workflows/python-package.yml | 180 +- INSTALL | 16 +- README.rst | 41 +- auto_sphinx.py | 47 +- bin/FirstGuess4gridpp | 18 - bin/bufr2json | 18 - bin/create_forcing | 18 - bin/create_lsm_file | 18 - bin/create_namelist | 18 - bin/cryoclim_pseudoobs | 17 - bin/dump_environ | 8 - bin/gridpp | 17 - bin/hm2pysurfex | 17 - bin/json_gui2toml | 76 - bin/masterodb | 18 - bin/merge_json_namelist_settings | 19 - bin/merge_qc_data | 17 - bin/merge_toml_files | 18 - bin/modify_forcing | 18 - bin/offline | 18 - bin/oi2soda | 18 - bin/perturbed_offline | 18 - bin/pgd | 19 - bin/plot_field | 20 - bin/plot_points | 19 - bin/plot_timeseries | 17 - bin/prep | 17 - bin/qc2obsmon | 18 - bin/sentinel_obs | 17 - bin/set_domain | 61 - bin/set_geo_from_obs_set | 21 - bin/set_geo_from_stationlist | 21 - bin/shape2ign | 18 - bin/soda | 19 - bin/timeseries2json | 17 - bin/titan | 19 - conf.py | 38 +- docs/Makefile | 2 +- docs/example.rst | 7 +- examples/find_subset.py | 43 +- index.rst | 927 +++--- prepare_testdata.sh | 40 - pyproject.toml | 153 + setup.py | 205 -- surfex/__init__.py | 234 +- surfex/assim.py | 124 - surfex/binary_input.py | 904 ++++++ surfex/bufr.py | 281 +- surfex/cache.py | 31 +- surfex/cli.py | 2959 ++++++++------------ surfex/cmd_parsing.py | 2183 +++++++++++++++ surfex/configuration.py | 365 ++- surfex/datetime_utils.py | 51 + surfex/ecoclimap.py | 254 ++ surfex/fa.py | 70 +- surfex/file.py | 831 +++--- surfex/forcing.py | 562 ++-- surfex/geo.py | 465 +-- surfex/grib.py | 548 ++-- surfex/input_methods.py | 397 +++ surfex/interpolation.py | 514 +++- surfex/namelist.py | 2837 ++++++++----------- surfex/netcdf.py | 394 ++- surfex/obs.py | 866 +++--- surfex/observation.py | 159 ++ surfex/obsmon.py | 182 +- surfex/obsoul.py | 174 ++ surfex/platform.py | 437 +++ surfex/plot.py | 1 + surfex/read.py | 221 +- surfex/run.py | 298 +- surfex/timeseries.py | 169 +- surfex/titan.py | 809 ++++-- surfex/util.py | 160 +- surfex/variable.py | 209 +- test/bin/CANARI | 31 - test/bin/MASTERODB | 24 - test/bin/MASTERODB_CANARI | 1 - test/bin/OFFLINE.exe | 1 - test/bin/OFFLINE_NC | 1 - test/bin/PGD.exe | 8 - test/bin/PGD_NC | 1 - test/bin/PREP.exe | 1 - test/bin/PREP_NC | 1 - test/bin/SODA.exe | 1 - test/bin/SODA_NC | 1 - test/bin/SURFEX | 56 - test/fixtures/config.yml | 165 -- test/nam/POLYNOMES_ISBA | 48 - test/nam/constants.json | 6 - test/nam/cv.json | 23 - test/nam/cv_sg.json | 24 - test/nam/flake.json | 8 - test/nam/gridpp.json | 13 - test/nam/io.json | 14 - test/nam/meb_settings.json | 13 - test/nam/offline.json | 38 - test/nam/prep.json | 14 - test/nam/prep_from_namelist_values.json | 36 - test/nam/prep_sice.json | 6 - test/nam/prep_snow.json | 5 - test/nam/rsmin.json | 23 - test/nam/rsmin_mod.json | 13 - test/nam/rsmin_sg.json | 24 - test/nam/rsmin_sg_mod.json | 15 - test/nam/sea.json | 5 - test/nam/selected_output.json | 131 - test/nam/sice.json | 7 - test/nam/soda.json | 9 - test/nam/soda_isba_oi.json | 14 - test/nam/treedrag.json | 5 - test/settings/conf_proj_test.json | 19 - test/settings/domains.json | 96 - test/settings/hm_env.json | 229 -- test/settings/nc.json | 8 - test/settings/nc.toml | 3 - test/settings/test_system.json | 6 - test/test_converter.py | 123 - test/test_firstguess4oi.py | 52 - test/test_forcing.py | 79 - test/test_geo.py | 275 -- test/test_grib.py | 96 - test/test_gridpp.py | 58 - test/test_hm2pysurfex.py | 23 - test/test_obs.py | 140 - test/test_obsmon.py | 35 - test/test_oi2soda.py | 44 - test/test_plot.py | 78 - test/test_run_binary.py | 381 --- test/test_titan.py | 338 --- test/test_variable.py | 109 - {test => tests}/__init__.py | 0 tests/conftest.py | 817 ++++++ tests/smoke/test_cli_fg_titan_oi_obsmon.py | 623 +++++ tests/smoke/test_cli_forcing.py | 63 + tests/smoke/test_cli_hm2pysurfex.py | 250 ++ tests/smoke/test_cli_misc.py | 212 ++ tests/smoke/test_cli_namelist.py | 67 + tests/smoke/test_cli_obs.py | 50 + tests/smoke/test_cli_plot.py | 106 + tests/smoke/test_cli_run_binary.py | 400 +++ tests/smoke/test_cli_set_geo.py | 103 + tests/unit/test_binary_input_data.py | 110 + tests/unit/test_bufr.py | 34 + tests/unit/test_converter.py | 40 + tests/unit/test_fa.py | 0 tests/unit/test_file.py | 793 ++++++ tests/unit/test_frost.py | 21 + tests/unit/test_geo.py | 245 ++ tests/unit/test_grib.py | 163 ++ tests/unit/test_interpolation.py | 0 tests/unit/test_namelist.py | 80 + tests/unit/test_netatmo.py | 85 + tests/unit/test_netcdf.py | 10 + tests/unit/test_obsoul.py | 65 + tests/unit/test_obsset.py | 60 + tests/unit/test_oi2soda.py | 21 + tests/unit/test_timeseries.py | 59 + tests/unit/test_titan.py | 140 + tests/unit/test_variable.py | 273 ++ 162 files changed, 16861 insertions(+), 11185 deletions(-) create mode 100644 .flakeheaven.toml create mode 100644 .github/workflows/linting.yaml delete mode 100755 bin/FirstGuess4gridpp delete mode 100755 bin/bufr2json delete mode 100755 bin/create_forcing delete mode 100755 bin/create_lsm_file delete mode 100755 bin/create_namelist delete mode 100755 bin/cryoclim_pseudoobs delete mode 100755 bin/dump_environ delete mode 100755 bin/gridpp delete mode 100755 bin/hm2pysurfex delete mode 100755 bin/json_gui2toml delete mode 100755 bin/masterodb delete mode 100755 bin/merge_json_namelist_settings delete mode 100755 bin/merge_qc_data delete mode 100755 bin/merge_toml_files delete mode 100755 bin/modify_forcing delete mode 100755 bin/offline delete mode 100755 bin/oi2soda delete mode 100755 bin/perturbed_offline delete mode 100755 bin/pgd delete mode 100755 bin/plot_field delete mode 100755 bin/plot_points delete mode 100755 bin/plot_timeseries delete mode 100755 bin/prep delete mode 100755 bin/qc2obsmon delete mode 100755 bin/sentinel_obs delete mode 100755 bin/set_domain delete mode 100755 bin/set_geo_from_obs_set delete mode 100755 bin/set_geo_from_stationlist delete mode 100755 bin/shape2ign delete mode 100755 bin/soda delete mode 100755 bin/timeseries2json delete mode 100755 bin/titan delete mode 100755 prepare_testdata.sh create mode 100644 pyproject.toml delete mode 100644 setup.py delete mode 100644 surfex/assim.py create mode 100644 surfex/binary_input.py create mode 100644 surfex/cmd_parsing.py create mode 100644 surfex/datetime_utils.py create mode 100644 surfex/ecoclimap.py create mode 100644 surfex/input_methods.py create mode 100644 surfex/observation.py create mode 100644 surfex/obsoul.py create mode 100644 surfex/platform.py create mode 100644 surfex/plot.py delete mode 100755 test/bin/CANARI delete mode 100755 test/bin/MASTERODB delete mode 120000 test/bin/MASTERODB_CANARI delete mode 120000 test/bin/OFFLINE.exe delete mode 120000 test/bin/OFFLINE_NC delete mode 100755 test/bin/PGD.exe delete mode 120000 test/bin/PGD_NC delete mode 120000 test/bin/PREP.exe delete mode 120000 test/bin/PREP_NC delete mode 120000 test/bin/SODA.exe delete mode 120000 test/bin/SODA_NC delete mode 100755 test/bin/SURFEX delete mode 100644 test/fixtures/config.yml delete mode 100644 test/nam/POLYNOMES_ISBA delete mode 100644 test/nam/constants.json delete mode 100644 test/nam/cv.json delete mode 100644 test/nam/cv_sg.json delete mode 100644 test/nam/flake.json delete mode 100644 test/nam/gridpp.json delete mode 100644 test/nam/io.json delete mode 100644 test/nam/meb_settings.json delete mode 100644 test/nam/offline.json delete mode 100644 test/nam/prep.json delete mode 100644 test/nam/prep_from_namelist_values.json delete mode 100644 test/nam/prep_sice.json delete mode 100644 test/nam/prep_snow.json delete mode 100644 test/nam/rsmin.json delete mode 100644 test/nam/rsmin_mod.json delete mode 100644 test/nam/rsmin_sg.json delete mode 100644 test/nam/rsmin_sg_mod.json delete mode 100644 test/nam/sea.json delete mode 100644 test/nam/selected_output.json delete mode 100644 test/nam/sice.json delete mode 100644 test/nam/soda.json delete mode 100644 test/nam/soda_isba_oi.json delete mode 100644 test/nam/treedrag.json delete mode 100644 test/settings/conf_proj_test.json delete mode 100644 test/settings/domains.json delete mode 100644 test/settings/hm_env.json delete mode 100644 test/settings/nc.json delete mode 100644 test/settings/nc.toml delete mode 100644 test/settings/test_system.json delete mode 100644 test/test_converter.py delete mode 100644 test/test_firstguess4oi.py delete mode 100644 test/test_forcing.py delete mode 100644 test/test_geo.py delete mode 100644 test/test_grib.py delete mode 100644 test/test_gridpp.py delete mode 100644 test/test_hm2pysurfex.py delete mode 100644 test/test_obs.py delete mode 100644 test/test_obsmon.py delete mode 100644 test/test_oi2soda.py delete mode 100644 test/test_plot.py delete mode 100644 test/test_run_binary.py delete mode 100644 test/test_titan.py delete mode 100644 test/test_variable.py rename {test => tests}/__init__.py (100%) create mode 100644 tests/conftest.py create mode 100644 tests/smoke/test_cli_fg_titan_oi_obsmon.py create mode 100644 tests/smoke/test_cli_forcing.py create mode 100644 tests/smoke/test_cli_hm2pysurfex.py create mode 100644 tests/smoke/test_cli_misc.py create mode 100644 tests/smoke/test_cli_namelist.py create mode 100644 tests/smoke/test_cli_obs.py create mode 100644 tests/smoke/test_cli_plot.py create mode 100644 tests/smoke/test_cli_run_binary.py create mode 100644 tests/smoke/test_cli_set_geo.py create mode 100644 tests/unit/test_binary_input_data.py create mode 100644 tests/unit/test_bufr.py create mode 100644 tests/unit/test_converter.py create mode 100644 tests/unit/test_fa.py create mode 100644 tests/unit/test_file.py create mode 100644 tests/unit/test_frost.py create mode 100644 tests/unit/test_geo.py create mode 100644 tests/unit/test_grib.py create mode 100644 tests/unit/test_interpolation.py create mode 100644 tests/unit/test_namelist.py create mode 100644 tests/unit/test_netatmo.py create mode 100644 tests/unit/test_netcdf.py create mode 100644 tests/unit/test_obsoul.py create mode 100644 tests/unit/test_obsset.py create mode 100644 tests/unit/test_oi2soda.py create mode 100644 tests/unit/test_timeseries.py create mode 100644 tests/unit/test_titan.py create mode 100644 tests/unit/test_variable.py diff --git a/.flakeheaven.toml b/.flakeheaven.toml new file mode 100644 index 0000000..f73cd9c --- /dev/null +++ b/.flakeheaven.toml @@ -0,0 +1,49 @@ +[tool.flakeheaven] + exclude = [".*/", "tmp/", "*/tmp/", "*.ipynb"] + # Group output by file. Colored. + format = "grouped" + # Show line of source code in output, with syntax highlighting + show_source = true + # flake8-darglint params + docstring_style = "google" + strictness = "short" + + # list of plugins and rules for them + [tool.flakeheaven.plugins] + # Activate all rules for all plugins by default + "*" = ["+*"] + # Remove from flake8-bandit: + # "S403": Consider possible security implications associated with pickle + # "S404": Consider possible security implications associated with subprocess + # "S603": To allow using subprocess.call/run + # "S606": To allow using os.startfile + flake8-bandit = ["+*", "-S403", "-S404", "-S603", "-S606"] + # Remove C408 from flake8-comprehensions because I think sometimes the "dict" syntax + # looks cleaner than literal "{}". Dict creation performance is not an issue here. + flake8-comprehensions = ["+*", "-C408"] + flake8-docstrings = ["+*", "-D105"] # Remove "D105: Missing docstring in magic method" + # Exclude some errors from pycodestyle for compatibility with black. + # "E501" is for max_line_length violations. Leave this for black to handle. + # For the other excluded errors, see: + # + # + pycodestyle = ["+*", "-W503", "-E203", "-E501"] + # Disable pylint plugin at the moment. pylint will be run separately. + pylint = ["-*"] + + [tool.flakeheaven.exceptions."*/wsgi.py"] + # Ignore "F401 (imported but unused)" in this case + pyflakes = ["+*", "-F401"] + + [tool.flakeheaven.exceptions."tests/*.py"] + # Disable some flake8-bandit checks in tests: + # "S101": To allow assert use + # "S301": To Allow testing pickle/unpickle + flake8-bandit = ["+*", "-S101", "-S301"] + # Ignore "-D105" and "-D102" (Missing docstring in public class/method) in unit tests. + # The unit tests class and method names are supposed to be self-explanatory. + flake8-docstrings = ["+*", "-D105", "-D101", "-D102", "-D103"] + + [tool.flakeheaven.exceptions."deode/templates/*.py"] + # "E265" is for block comments starting with "# ". It may break ecflow envsubst. + pycodestyle = ["-E265"] diff --git a/.github/workflows/linting.yaml b/.github/workflows/linting.yaml new file mode 100644 index 0000000..9bde1c7 --- /dev/null +++ b/.github/workflows/linting.yaml @@ -0,0 +1,69 @@ +#.github/workflows/linting.yaml +name: Linting Checks + +on: + pull_request: + branches: + - master + - develop + paths: + - '**.py' + push: + branches: + - '**' # Every branch + paths: + - '**.py' + +jobs: + linting: + if: github.repository_owner == 'metno' + name: Run Linters + runs-on: ubuntu-latest + steps: + #---------------------------------------------- + # check-out repo and set-up python + #---------------------------------------------- + - name: Check out repository + uses: actions/checkout@v3 + - name: Set up python + id: setup-python + uses: actions/setup-python@v4 + with: + python-version: '3.8' + + #---------------------------------------------- + # --- configure poetry & install project ---- + #---------------------------------------------- + - name: Install Poetry + uses: snok/install-poetry@v1 + with: + virtualenvs-create: true + virtualenvs-in-project: true + + - name: Load cached venv (if cache exists) + id: cached-poetry-dependencies + uses: actions/cache@v3 + with: + path: .venv + key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/pyproject.toml') }} + + - name: Install dependencies (if venv cache is not found) + if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' + run: poetry install --no-interaction --no-root --only linting + + - name: Install the project itself + run: poetry install --no-interaction --only-root + + #---------------------------------------------- + # Run the linting checks + #---------------------------------------------- + - name: isort + run: | + poetry run isort --check-only . + - name: black + run: | + poetry run black --check . + - name: flakeheaven + run: | + export FLAKEHEAVEN_CACHE="${TMPDIR:-${TEMP:-${TMP:-/tmp}}}/flakeheaven_cache.$(openssl rand -hex 12)" + poetry run flakeheaven lint . diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 6c6fc9d..0f2f2fb 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -11,104 +11,92 @@ on: jobs: build: - runs-on: ubuntu-20.04 + if: github.repository_owner == 'metno' strategy: + fail-fast: true matrix: - python-version: [3.8] + os: [ "ubuntu-latest" ] + env: [ "pytest" ] + python-version: [ "3.8" ] + + name: "${{ matrix.os }}, python=${{ matrix.python-version }}" + runs-on: ${{ matrix.os }} + + container: + image: python:${{ matrix.python-version }}-bullseye + env: + COVERAGE_FILE: ".coverage.${{ matrix.env }}.${{ matrix.python-version }}" steps: - - uses: actions/checkout@v2 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - - name: Check config - run: | - cat > sshd_config < - ssh-keygen -t rsa -b 4096 -N '' -f ~/.ssh/id_rsa - - name: Add key to auth file - run: > - cat ~/.ssh/id_rsa.pub | tee -a ~/.ssh/authorized_keys - - name: Ensure the owner of the key is correct - run: | - chmod 600 ~/.ssh/authorized_keys - chmod 700 ~/.ssh - sudo chmod -c 0755 ~/ - ls -la ~/.ssh - - name: Test SSH connection to localhost - run: > - ssh -vvv -i ~/.ssh/id_rsa -o BatchMode=yes -o StrictHostKeyChecking=no $(whoami)@localhost - - name: Install dependencies - run: | - sudo apt update && sudo apt install libudunits2-dev libboost-dev libproj-dev libeccodes0 libeccodes-dev \ - libarmadillo-dev libgsl-dev python3-setuptools python3-nose python3-numpy python3-pyproj + #---------------------------------------------- + # check-out repo + #---------------------------------------------- + - name: Check out repository + uses: actions/checkout@v3 + + - name: Install dependencies + run: | + apt-get update + apt-get install -y libudunits2-dev libproj-dev libeccodes0 libeccodes-dev libnetcdf-dev netcdf-bin + + #---------------------------------------------- + # --- configure poetry & install project ---- + #---------------------------------------------- + - name: Install Poetry + uses: snok/install-poetry@v1 + with: + virtualenvs-create: true + virtualenvs-in-project: true + + - name: Load cached venv (if cache exists) + id: cached-poetry-dependencies + uses: actions/cache@v3 + with: + path: .venv + key: venv-${{ runner.os }}-${{ matrix.python-version }}-${{ hashFiles('**/pyproject.toml') }} + + - name: Install dependencies (if venv cache is not found) + if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' + run: poetry install --no-interaction --no-root --only main,test + + - name: Install the project itself + run: poetry install --no-interaction --only-root + + #---------------------------------------------- + # run test suite and report coverage + #---------------------------------------------- + - name: Run tests + run: | + poetry run pytest + + - name: Coveralls + if: ${{ matrix.python-version == 3.8 }} + run: | + export COVERALLS_REPO_TOKEN=${{secrets.COVERALLS_REPO_TOKEN}} + git config --global --add safe.directory $PWD + poetry run coveralls - python -m pip install --upgrade pip - python --version - pip install numpy - python -c "import numpy" - pip install flake8 pytest - pip install sphinx - if [ -f requirements.txt ]; then pip install -r requirements.txt; fi - #- name: Lint with flake8 - # run: | - # # stop the build if there are Python syntax errors or undefined names - # flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics - # # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide - # flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics - - name: Install package - run: | - pip install -e .[test,plot] - - name: Test and create coverage - run: | - wget --no-check-certificate --load-cookies /tmp/cookies.txt "https://docs.google.com/uc?export=download&confirm=$(wget --quiet --save-cookies /tmp/cookies.txt --keep-session-cookies --no-check-certificate 'https://docs.google.com/uc?export=download&id=16XcmpkaIRki2-F5D16j133kbYdZo_Ws9' -O- | sed -rn 's/.*confirm=([0-9A-Za-z_]+).*/\1\n/p')&id=16XcmpkaIRki2-F5D16j133kbYdZo_Ws9" -O testdata.zip && rm -rf /tmp/cookies.txt - unzip -o testdata.zip - echo "Testdata" - ./prepare_testdata.sh - export PATH=$PWD/test/bin:/usr/bin/:$PATH - export CLIENTID=${{secrets.CLIENTID}} - coverage run --source=. -m unittest discover - - name: Coveralls - if: ${{ matrix.python-version == 3.8 }} - run: | - export CLIENTID=${{ secrets.CLIENTID }} - # coverage html -d coverage - COVERALLS_REPO_TOKEN=${{ secrets.COVERALLS_REPO_TOKEN }} coveralls - - name: Create documentation - if: ${{ matrix.python-version == 3.8 }} - run: | - cd docs - make html - - name: Commit documentation changes - if: ${{ matrix.python-version == 3.8 }} - run: | - git clone https://github.com/metno/pysurfex.git --branch gh-pages --single-branch gh-pages - cp -r docs/build/html/* gh-pages/ - cd gh-pages - git config --local user.email "action@github.com" - git config --local user.name "GitHub Action" - git add . - git commit -m "Update documentation" -a || true - # The above command will fail if no changes were present, so we ignore - # the return code. - - name: Push changes - if: ${{ matrix.python-version == 3.8 }} && github.repository == 'metno/pysurfex' - uses: ad-m/github-push-action@master - with: - branch: gh-pages - directory: gh-pages - github_token: ${{ secrets.GITHUB_TOKEN }} + - name: Create documentation + if: ${{ matrix.python-version == 3.8 }} + run: | + cd docs + make html + - name: Commit documentation changes + if: ${{ matrix.python-version == 3.8 }} + run: | + git clone https://github.com/metno/pysurfex.git --branch gh-pages --single-branch gh-pages + cp -r docs/build/html/* gh-pages/ + cd gh-pages + git config --local user.email "action@github.com" + git config --local user.name "GitHub Action" + git add . + git commit -m "Update documentation" -a || true + # The above command will fail if no changes were present, so we ignore + # the return code. + - name: Push changes + if: ${{ matrix.python-version == 3.8 && github.event_name != 'pull_request' }} + uses: ad-m/github-push-action@master + with: + branch: gh-pages + directory: gh-pages + github_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/INSTALL b/INSTALL index 82397bc..c24bce9 100644 --- a/INSTALL +++ b/INSTALL @@ -1,17 +1,3 @@ - -# PPI centos7 -module load Python/3.6.8 -mv ~/.local ~/.local.old -pip3 install titanlib==0.2.1 --user --global-option=build_ext --global-option="-I/modules/centos7/boost/1.69.0/include:/modules/centos7/gsl/2.5/include" --global-option=build_ext --global-option="-L/modules/centos7/gsl/2.5/lib" -pip3 install pysurfex --user -pip3 install tomlkit --user -# Remove enum -mkdir /modules/centos7/user-apps/suv/pysurfex/0.0.1a8 - -# Create module -mv ~/.local/* /modules/centos7/user-apps/suv/pysurfex/0.0.1a8/. -cp -i /modules/MET/centos7/user-modules/suv/pysurfex/0.0.1-dev /modules/MET/centos7/user-modules/suv/pysurfex/0.0.1a8 -# Modify /modules/MET/centos7/user-modules/suv/pysurfex/0.0.1a8 - +poetry install diff --git a/README.rst b/README.rst index 9d29a34..f4955ca 100644 --- a/README.rst +++ b/README.rst @@ -23,13 +23,13 @@ Installation of pregenerated packages from pypi (pip) .. code-block:: bash - pip3 install pysurfex --use-feature=2020-resolver + pip3 install pysurfex User installation: .. code-block:: bash - pip3 install pysurfex --user --use-feature=2020-resolver + pip3 install pysurfex --user @@ -42,18 +42,7 @@ Install the required pacakges (some might be obsolete if the pip packages contai .. code-block:: bash sudo apt-get update - # Python tools - sudo apt-get install python3-setuptools python3-numpy python3-nose - # Cfunits - sudo apt-get install libudunits2-dev - # Projection - sudo apt-get install python3-pyproj - # Eccodes for bufr/grib1/grib2 - sudo apt-get install libeccodes0 libeccodes-dev - # Ecflow for user experiements - sudo apt-get install ecflow-server ecflow-client python3-ecflow - # Titanlib - sudo apt-get install libboost-dev libproj-dev libarmadillo-dev libgsl-dev + sudo apt-get install -y libudunits2-dev libproj-dev libeccodes0 libeccodes-dev libnetcdf-dev netcdf-bin The following depencies are needed. Install the non-standard ones e.g. with pip or your system installation system. @@ -63,20 +52,14 @@ General dependencies (from pypi) .. code-block:: bash numpy - scipy netCDF4 cfunits pyproj pyyaml toml netCDF4 - datetime f90nml requests - json; python_version < '3' - StringIO; python_version < '3' - eccodes - db-sqlite3 To read NetCDF files: @@ -97,7 +80,6 @@ To plot: .. code-block:: bash matplotlib - cartopy To get observations from frost.met.no API: @@ -110,6 +92,7 @@ For Quality control of observations .. code-block:: bash titanlib + db-sqlite3 For optimal interpolation and observation operators @@ -121,9 +104,7 @@ For testing: .. code-block:: bash - unittest - nose - Testdata from https://docs.google.com/uc?export=download&id=1FSNRQE998-ulBq8GZ0zZ40cP-TLrQulV + pytest Download the source code, then install ``pysurfex`` by executing the following inside the extracted folder: @@ -132,13 +113,7 @@ Install pysurfex ------------------------------------------- .. code-block:: bash - sudo pip install -e . - -or - -.. code-block:: bash - - sudo pip install -e . --user + poetry install Create documentation --------------------------------------------- @@ -148,10 +123,6 @@ Create documentation cd docs # Create html documentation make html - # Create latex documentation - make latex - # Create a pdf documentation - make latexpdf Examples diff --git a/auto_sphinx.py b/auto_sphinx.py index b9684a6..2be6e7b 100755 --- a/auto_sphinx.py +++ b/auto_sphinx.py @@ -1,4 +1,5 @@ #!/usr/bin/env python3 +"""Create auto documentation.""" import os classes = [] @@ -6,17 +7,11 @@ methods = [] code_dirs = ["scheduler", "surfex"] for code_dir in code_dirs: - for root, dirs, files in os.walk("./" + code_dir): - # print(files) + for root, __, files in os.walk("./" + code_dir): for f in files: f = f.strip() - # print(f, root) if f.endswith(".py"): root = root.replace("./", "") - # print("root", root) - # print("dirs", dirs) - # print("item", f) - ff = f.replace(".py", "") fname = root + "/" + f with open(fname, "r") as fh: @@ -27,26 +22,42 @@ if line.find("(") > 0 and line.find(":") == (len(line) - 1): cl = line.split(" ")[1] cl = cl.split("(")[0] - cl = root + "." + cl + cl = root + "." + ff + "." + cl classes.append(cl) elif " def " in line: if line.find("(") > 0 and line.find(":") == (len(line) - 1): line = line.lstrip() - # print(line.split(" ")) m = line.split(" ")[1] m = m.split("(")[0] if cl is not None: class_methods.append(cl + "." + m) else: if "def " in line: - if line.find("(") > 0 and line.find(":") == (len(line) - 1): - # print(line) + if line.find("(") > 0 and line.find(":") == ( + len(line) - 1 + ): line = line.lstrip() m = line.split(" ")[1] m = m.split("(")[0] - methods.append(root + "." + m) + methods.append(root + "." + ff + "." + m) + -print("\nClasses") +print(".. SURFEX Python API documentation master file, created by") +print(" sphinx-quickstart on Mon Mar 2 18:25:38 2020.") +print(" You can adapt this file completely to your liking, but it should at least") +print(" contain the root `toctree` directive.") +print("") +print("PYSURFEX documentation") +print("=============================================") +print("") +print(".. toctree::") +print(" :maxdepth: 3") +print(" :caption: Contents:") +print("") +print(".. include:: README.rst") +print(".. include:: docs/example.rst") +print("") +print("Classes") print("---------------------------------------------") for cl in classes: print(".. autoclass:: " + cl) @@ -54,9 +65,17 @@ print("\nClass methods") print("---------------------------------------------") for m in class_methods: - print(".. autofunction:: " + m) + print(".. automethod:: " + m) print("\nMethods") print("---------------------------------------------") for m in methods: print(".. autofunction:: " + m) +print("") +print("* :ref: `README`") +print("") +print("Indices and tables") +print("==================") +print("") +print("* :ref:`genindex`") +print("* :ref:`search`") diff --git a/bin/FirstGuess4gridpp b/bin/FirstGuess4gridpp deleted file mode 100755 index c19c65f..0000000 --- a/bin/FirstGuess4gridpp +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/env python3 -"""Create a first guess to be used for gridpp.""" -import logging -import sys -import surfex - - -if __name__ == "__main__": - kwargs = surfex.parse_args_first_guess_for_oi(sys.argv[1:]) - debug = kwargs.get("debug") - - if debug: - logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - else: - logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) - logging.info("************ FirstGuess4gridpp ******************") - surfex.first_guess_for_oi(**kwargs) diff --git a/bin/bufr2json b/bin/bufr2json deleted file mode 100755 index 76a0018..0000000 --- a/bin/bufr2json +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/env python3 -"""Convert bufr to json observation set.""" -import logging -import sys -import surfex - - -if __name__ == "__main__": - kwargs = surfex.parse_args_bufr2json(sys.argv[1:]) - debug = kwargs.get("debug") - - if debug: - logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - else: - logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) - logging.info("************ bufr2json ******************") - surfex.run_bufr2json(**kwargs) diff --git a/bin/create_forcing b/bin/create_forcing deleted file mode 100755 index 14772e8..0000000 --- a/bin/create_forcing +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/env python3 -"""Create forcing.""" -import logging -import sys -import surfex - -if __name__ == '__main__': - kwargs = surfex.parse_args_create_forcing(sys.argv[1:]) - debug = kwargs.get("debug") - - if debug: - logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - else: - logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) - logging.info("************ create_forcing ******************") - options, var_objs, att_objs = surfex.forcing.set_forcing_config(**kwargs) - surfex.forcing.run_time_loop(options, var_objs, att_objs) diff --git a/bin/create_lsm_file b/bin/create_lsm_file deleted file mode 100755 index 520e5b7..0000000 --- a/bin/create_lsm_file +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/env python3 -"""Create a LSM mask.""" -import logging -import sys -import surfex - - -if __name__ == "__main__": - kwargs = surfex.parse_args_lsm_file_assim(sys.argv[1:]) - debug = kwargs.get("debug") - - if debug: - logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - else: - logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) - logging.info("************ create_lsm_fil ******************") - surfex.run_lsm_file_assim(**kwargs) diff --git a/bin/create_namelist b/bin/create_namelist deleted file mode 100755 index 173d65a..0000000 --- a/bin/create_namelist +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/env python3 -"""Running offline binary.""" -import logging -import sys -import surfex - -if __name__ == "__main__": - - kwargs = surfex.parse_args_create_namelist(sys.argv[1:]) - debug = kwargs.get("debug") - mode = kwargs.get("mode") - if debug: - logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - else: - logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) - logging.info("************ %s ******************", mode) - surfex.run_create_namelist(**kwargs) diff --git a/bin/cryoclim_pseudoobs b/bin/cryoclim_pseudoobs deleted file mode 100755 index 5dac4c4..0000000 --- a/bin/cryoclim_pseudoobs +++ /dev/null @@ -1,17 +0,0 @@ -#!/usr/bin/env python3 -"""Create pseudo observations from cryoclim.""" -import logging -import sys -import surfex - -if __name__ == "__main__": - kwargs = surfex.parse_cryoclim_pseudoobs(sys.argv[1:]) - debug = kwargs.get("debug") - - if debug: - logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - else: - logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) - logging.info("************ cryoclim_pseudoobs ******************") - surfex.run_cryoclim_pseuodoobs(**kwargs) diff --git a/bin/dump_environ b/bin/dump_environ deleted file mode 100755 index 9fc7860..0000000 --- a/bin/dump_environ +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env python3 -"""Dump environment.""" -import json -import os - -if __name__ == "__main__": - with open("rte.json", mode="w", encoding="utf-8") as file_handler: - json.dump(os.environ.copy(), file_handler) diff --git a/bin/gridpp b/bin/gridpp deleted file mode 100755 index f02d80d..0000000 --- a/bin/gridpp +++ /dev/null @@ -1,17 +0,0 @@ -#!/usr/bin/env python3 -"""Do horizontal OI with gridpp.""" -import logging -import sys -import surfex - -if __name__ == "__main__": - kwargs = surfex.parse_args_gridpp(sys.argv[1:]) - debug = kwargs.get("debug") - - if debug: - logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - else: - logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) - logging.info("************ gridpp ******************") - surfex.run_gridpp(**kwargs) diff --git a/bin/hm2pysurfex b/bin/hm2pysurfex deleted file mode 100755 index bac49a1..0000000 --- a/bin/hm2pysurfex +++ /dev/null @@ -1,17 +0,0 @@ -#!/usr/bin/env python3 -"""Convert Harmonie environment to pysurfex config.""" -import logging -import sys -import surfex - -if __name__ == "__main__": - kwargs = surfex.parse_args_hm2pysurfex(sys.argv[1:]) - debug = kwargs.get("debug") - - if debug: - logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - else: - logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) - logging.info("************ hm2pysurfex ******************") - surfex.hm2pysurfex(**kwargs) diff --git a/bin/json_gui2toml b/bin/json_gui2toml deleted file mode 100755 index 00c8f7d..0000000 --- a/bin/json_gui2toml +++ /dev/null @@ -1,76 +0,0 @@ -#!/usr/bin/env python3 -"""Convert json settings from gui to toml files.""" -import sys -import os -import logging -import json -import argparse -import surfex - - -def parse(): - """Parse the command line input arguments.""" - parser = argparse.ArgumentParser("Creating the TOML settings file to run SURFEX from " - "JSON files from GUI") - - parser.add_argument('--version', action='version', - version=f'surfex {surfex.__version__}') - parser.add_argument('--input', '-i', type=str, nargs="?", required=True, - help="Input TOML file if wanted") - parser.add_argument('--output', '-o', required=True, nargs='?') - parser.add_argument('--debug', help="Show debug information", action="store_true") - - if len(sys.argv) == 1: - parser.print_help() - sys.exit() - - args = parser.parse_args() - - return args.input, args.output, args.debug - - -def recursive_items(dictionary): - """Recursive. - - Args: - dictionary (_type_): _description_ - - Yields: - _type_: _description_ - """ - for my_key, my_value in dictionary.items(): - if isinstance(my_value, dict): - yield from recursive_items(my_value) - else: - yield my_key, my_value - - -if __name__ == "__main__": - - input_file, output_file, debug = parse() - if debug: - logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - else: - logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) - logging.info("************ json_gui2toml ******************") - - logging.info("Writing settings to %s", output_file) - ofh = open(output_file, mode="w", encoding="utf-8") - if os.path.exists(input_file): - print("Read toml settings from " + input_file) - with open(input_file, mode="r", encoding="utf-8") as ifile: - input_data = json.load(ifile) - for key, value in recursive_items(input_data): - if isinstance(value, bool): - if value: - val = ".TRUE." - else: - val = ".FALSE." - else: - val = value - logging.debug('%s=%s', key, val) - ofh.write(key + '=' + val + '\n') - else: - raise FileNotFoundError(f"Input file does not exist: {input_file}") - ofh.close() diff --git a/bin/masterodb b/bin/masterodb deleted file mode 100755 index 9aea2e3..0000000 --- a/bin/masterodb +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/env python3 -"""Setup of namelist and output files for MASTERODB binary.""" -import logging -import sys -import surfex - -if __name__ == "__main__": - - kwargs = surfex.parse_args_masterodb(sys.argv[1:]) - debug = kwargs.get("debug") - - if debug: - logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - else: - logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) - logging.info("************ masterodb ******************") - surfex.run_masterodb(**kwargs) diff --git a/bin/merge_json_namelist_settings b/bin/merge_json_namelist_settings deleted file mode 100755 index c5a0d4c..0000000 --- a/bin/merge_json_namelist_settings +++ /dev/null @@ -1,19 +0,0 @@ -#!/usr/bin/env python3 -"""Merge json files with namelist settings.""" -import sys -import logging -import surfex - - -if __name__ == "__main__": - - kwargs = surfex.parse_merge_namelist_settings(sys.argv[1:]) - debug = kwargs.get("debug") - - if debug: - logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - else: - logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) - logging.info("************ merge_json_namelist_settings ******************") - surfex.run_merge_namelist_settings(kwargs["json"], kwargs["output"], indent=kwargs["indent"]) diff --git a/bin/merge_qc_data b/bin/merge_qc_data deleted file mode 100755 index f82724a..0000000 --- a/bin/merge_qc_data +++ /dev/null @@ -1,17 +0,0 @@ -#!/usr/bin/env python3 -"""Merge json QC dataset files.""" -import sys -import logging -import surfex - -if __name__ == "__main__": - kwargs = surfex.parse_args_merge_qc_data(sys.argv[1:]) - debug = kwargs.get("debug") - - if debug: - logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - else: - logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) - logging.info("************ merge_qc_data ******************") - surfex.merge_qc_data(kwargs["validtime"], kwargs["filenames"], ["output"], indent=["indent"]) diff --git a/bin/merge_toml_files b/bin/merge_toml_files deleted file mode 100755 index 7349e1c..0000000 --- a/bin/merge_toml_files +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/env python3 -"""Merge toml config files.""" -import sys -import logging -import surfex - -if __name__ == "__main__": - - kwargs = surfex.parse_merge_toml_settings(sys.argv[1:]) - debug = kwargs.get("debug") - - if debug: - logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - else: - logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) - logging.info("************ merge_toml_files ******************") - surfex.run_merge_toml_settings(**kwargs) diff --git a/bin/modify_forcing b/bin/modify_forcing deleted file mode 100755 index 74db8ea..0000000 --- a/bin/modify_forcing +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/env python3 -"""Modify the forcing setting first time step from previous forcing.""" -import logging -import sys -import surfex - -if __name__ == "__main__": - - kwargs = surfex.parse_args_modify_forcing(sys.argv[1:]) - debug = kwargs.get("debug") - - if debug: - logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - else: - logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) - logging.info("************ modify_forcing ******************") - surfex.modify_forcing(**kwargs) diff --git a/bin/offline b/bin/offline deleted file mode 100755 index e53bff4..0000000 --- a/bin/offline +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/env python3 -"""Running offline binary.""" -import logging -import sys -import surfex - -if __name__ == "__main__": - - kwargs = surfex.parse_args_surfex_binary(sys.argv[1:], "offline") - debug = kwargs.get("debug") - - if debug: - logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - else: - logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) - logging.info("************ offline ******************") - surfex.run_surfex_binary("offline", **kwargs) diff --git a/bin/oi2soda b/bin/oi2soda deleted file mode 100755 index a478039..0000000 --- a/bin/oi2soda +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/env python3 -"""Convert horizontal analysis files to an ascii file used in SODA.""" -import logging -import sys -import surfex - -if __name__ == "__main__": - - kwargs = surfex.parse_args_oi2soda(sys.argv[1:]) - debug = kwargs.get("debug") - - if debug: - logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - else: - logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) - logging.info("************ oi2soda ******************") - surfex.run_oi2soda(**kwargs) diff --git a/bin/perturbed_offline b/bin/perturbed_offline deleted file mode 100755 index c1add61..0000000 --- a/bin/perturbed_offline +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/env python3 -"""Run a perturbed offline forecast.""" -import logging -import sys -import surfex - -if __name__ == "__main__": - - kwargs = surfex.parse_args_surfex_binary(sys.argv[1:], "perturbed") - debug = kwargs.get("debug") - - if debug: - logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - else: - logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) - logging.info("************ offline ******************") - surfex.run_surfex_binary("perturbed", **kwargs) diff --git a/bin/pgd b/bin/pgd deleted file mode 100755 index ed97277..0000000 --- a/bin/pgd +++ /dev/null @@ -1,19 +0,0 @@ -#!/usr/bin/env python3 -"""Run PGD.""" -import logging -import sys -import surfex - - -if __name__ == "__main__": - - kwargs = surfex.parse_args_surfex_binary(sys.argv[1:], "pgd") - debug = kwargs.get("debug") - - if debug: - logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - else: - logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) - logging.info("************ pgd ******************") - surfex.run_surfex_binary("pgd", **kwargs) diff --git a/bin/plot_field b/bin/plot_field deleted file mode 100755 index 883b824..0000000 --- a/bin/plot_field +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/env python3 -"""Plot a field. Use plot_points instead.""" -# import logging -# import sys -# import surfex - - -if __name__ == "__main__": - - raise NotImplementedError("Not working yet") - # kwargs = surfex.parse_args_plot_field(sys.argv[1:]) - # debug = kwargs.get("debug") - # - # if debug: - # logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - # level=logging.DEBUG) - # else: - # logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) - # logging.info("************ plot_field ******************") - # surfex.run_plot_field(**kwargs) diff --git a/bin/plot_points b/bin/plot_points deleted file mode 100755 index 899e9c7..0000000 --- a/bin/plot_points +++ /dev/null @@ -1,19 +0,0 @@ -#!/usr/bin/env python3 -"""Plot points using a converter.""" -import logging -import sys -import surfex - - -if __name__ == "__main__": - - kwargs = surfex.parse_args_plot_points(sys.argv[1:]) - debug = kwargs.get("debug") - - if debug: - logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - else: - logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) - logging.info("************ plot_points ******************") - surfex.run_plot_points(**kwargs) diff --git a/bin/plot_timeseries b/bin/plot_timeseries deleted file mode 100755 index 1db8d5f..0000000 --- a/bin/plot_timeseries +++ /dev/null @@ -1,17 +0,0 @@ -#!/usr/bin/env python3 -"""Plot a timeseries using a converter.""" -import logging -import sys -import surfex - -if __name__ == "__main__": - kwargs = surfex.parse_plot_timeseries_args(sys.argv[1:]) - debug = kwargs.get("debug") - - if debug: - logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - else: - logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) - logging.info("************ plot_timeseries ******************") - surfex.run_plot_timeseries_from_json(**kwargs) diff --git a/bin/prep b/bin/prep deleted file mode 100755 index 1a8539c..0000000 --- a/bin/prep +++ /dev/null @@ -1,17 +0,0 @@ -#!/usr/bin/env python3 -"""Run PREP.""" -import logging -import sys -import surfex - -if __name__ == "__main__": - kwargs = surfex.parse_args_surfex_binary(sys.argv[1:], "prep") - debug = kwargs.get("debug") - - if debug: - logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - else: - logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) - logging.info("************ prep ******************") - surfex.run_surfex_binary("prep", **kwargs) diff --git a/bin/qc2obsmon b/bin/qc2obsmon deleted file mode 100755 index ff37835..0000000 --- a/bin/qc2obsmon +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/env python3 -"""Convert quality control and analysis data to a SQLite file to be used by obsmon.""" -import logging -import sys -import surfex - -if __name__ == "__main__": - - kwargs = surfex.parse_args_qc2obsmon(sys.argv[1:]) - debug = kwargs.get("debug") - - if debug: - logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - else: - logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) - logging.info("************ qc2obsmon ******************") - surfex.write_obsmon_sqlite_file(**kwargs) diff --git a/bin/sentinel_obs b/bin/sentinel_obs deleted file mode 100755 index 084ed4e..0000000 --- a/bin/sentinel_obs +++ /dev/null @@ -1,17 +0,0 @@ -#!/usr/bin/env python3 -"""Create observation data set from sentinel data.""" -import logging -import sys -import surfex - -if __name__ == "__main__": - kwargs = surfex.parse_sentinel_obs(sys.argv[1:]) - debug = kwargs.get("debug") - - if debug: - logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - else: - logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) - logging.info("************ sentinel_obs ******************") - surfex.run_sentinel_obs(**kwargs) diff --git a/bin/set_domain b/bin/set_domain deleted file mode 100755 index 60ba1bc..0000000 --- a/bin/set_domain +++ /dev/null @@ -1,61 +0,0 @@ -#!/usr/bin/env python3 -"""Set the domain from a json file containing definitions.""" -import argparse -import sys -import json -import os -import logging -import surfex - - -def parse(argv): - """Parse the command line input arguments.""" - parser = argparse.ArgumentParser() - - parser.add_argument('--version', action='version', - version=f'surfex {surfex.__version__}') - parser.add_argument('--domain', '-d', required=True, type=str, help="Name of domain") - parser.add_argument('--domains', required=True, type=str, help="Domain definitions") - parser.add_argument('--harmonie', action="store_true", help="Domain in harmonie definition") - parser.add_argument('--indent', required=False, default=2, type=int, help="Indented output") - parser.add_argument('--output', '-o', required=True, nargs='?') - parser.add_argument('--debug', help="Show debug information", action="store_true") - - if len(sys.argv) == 1: - parser.print_help() - sys.exit() - - return parser.parse_args(argv) - - -if __name__ == "__main__": - - try: - args = parse(sys.argv[1:]) - debug = args.debug - - if debug: - logging.basicConfig( - format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - else: - logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) - logging.info("************ set_domain ******************") - domain = args.domain - domains = args.domains - output = args.output - indent = args.indent - harmonie_mode = args.harmonie - if os.path.exists(domains): - with open(domains, mode="r", encoding="utf-8") as file_handler: - domains = json.load(file_handler) - domain_json = surfex.set_domain(domains, domain, hm_mode=harmonie_mode) - if domain_json is not None: - with open(output, mode="w", encoding="utf-8") as file_handler: - json.dump(domain_json, file_handler, indent=indent) - else: - raise Exception - else: - raise FileNotFoundError - except Exception as ex: - raise f"Could not set domain: {str(ex)}" diff --git a/bin/set_geo_from_obs_set b/bin/set_geo_from_obs_set deleted file mode 100755 index e626479..0000000 --- a/bin/set_geo_from_obs_set +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env python3 -"""Set the geometry definition (LonLatVal) from a observations data set.""" -import logging -import sys -import json -import surfex - -if __name__ == "__main__": - kwargs = surfex.parse_args_set_geo_from_obs_set(sys.argv[1:]) - debug = kwargs.get("debug") - - if debug: - logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - else: - logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) - logging.info("************ set_geo_from_obs_set ******************") - geo = surfex.set_geo_from_obs_set(**kwargs) - output = kwargs["output"] - with open(output, mode="w", encoding="utf-8") as file_handler: - json.dump(geo.json, file_handler) diff --git a/bin/set_geo_from_stationlist b/bin/set_geo_from_stationlist deleted file mode 100755 index fe5e0ed..0000000 --- a/bin/set_geo_from_stationlist +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env python3 -"""Set geometry from a stations list.""" -import sys -import logging -import json -import surfex - -if __name__ == "__main__": - kwargs = surfex.parse_args_set_geo_from_stationlist(sys.argv[1:]) - debug = kwargs.get("debug") - - if debug: - logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - else: - logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) - logging.info("************ set_geo_from_stationlist ******************") - geo = surfex.set_geo_from_stationlist(**kwargs) - output = kwargs["output"] - with open(output, mode="w", encoding="utf-8") as file_handler: - json.dump(geo.json, file_handler) diff --git a/bin/shape2ign b/bin/shape2ign deleted file mode 100755 index 60ae702..0000000 --- a/bin/shape2ign +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/env python3 -"""Set a IGN geometry from a shape file.""" -import logging -import sys -import surfex - -if __name__ == "__main__": - - kwargs = surfex.parse_args_shape2ign(sys.argv[1:]) - debug = kwargs.get("debug") - - if debug: - logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - else: - logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) - logging.info("************ shape2ign ******************") - surfex.run_shape2ign(**kwargs) diff --git a/bin/soda b/bin/soda deleted file mode 100755 index 74fdec6..0000000 --- a/bin/soda +++ /dev/null @@ -1,19 +0,0 @@ -#!/usr/bin/env python3 -"""Run the SODA binary.""" -import sys -import logging -import surfex - - -if __name__ == "__main__": - - kwargs = surfex.parse_args_surfex_binary(sys.argv[1:], "soda") - debug = kwargs.get("debug") - - if debug: - logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - else: - logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) - logging.info("************ soda ******************") - surfex.run_surfex_binary("soda", **kwargs) diff --git a/bin/timeseries2json b/bin/timeseries2json deleted file mode 100755 index 58eba6a..0000000 --- a/bin/timeseries2json +++ /dev/null @@ -1,17 +0,0 @@ -#!/usr/bin/env python3 -"""Convert a time series to json.""" -import sys -import logging -import surfex - -if __name__ == "__main__": - kwargs = surfex.parse_timeseries2json(sys.argv[1:]) - debug = kwargs.get("debug") - - if debug: - logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - else: - logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) - logging.info("************ timeseries2json ******************") - surfex.run_timeseries2json(**kwargs) diff --git a/bin/titan b/bin/titan deleted file mode 100755 index 926b570..0000000 --- a/bin/titan +++ /dev/null @@ -1,19 +0,0 @@ -#!/usr/bin/env python3 -"""Run quality control using titan.""" -import sys -import logging -import surfex - - -if __name__ == "__main__": - - kwargs = surfex.parse_args_titan(sys.argv[1:]) - debug = kwargs.get("debug") - - if debug: - logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - else: - logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) - logging.info("************ titan ******************") - surfex.run_titan(**kwargs) diff --git a/conf.py b/conf.py index 462b8b3..fc50992 100644 --- a/conf.py +++ b/conf.py @@ -1,3 +1,4 @@ +"""Configuration file for documentation with sphinx.""" # Configuration file for the Sphinx documentation builder. # # This file only contains a selection of the most common options. For a full @@ -10,16 +11,12 @@ # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # -# import os -# import sys -# sys.path.insert(0, os.path.abspath('.')) - # -- Project information ----------------------------------------------------- -project = 'SURFEX Python API (pysurfex)' -copyright = '2020, Trygve Aspelien' -author = 'Trygve Aspelien' +project = "SURFEX Python API (pysurfex)" +copyright = "2020, Trygve Aspelien" # noqa A001 +author = "Trygve Aspelien" # -- General configuration --------------------------------------------------- @@ -27,20 +24,26 @@ # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. -#extensions = [ -#] -#extensions = ['sphinx.ext.autodoc'] -extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', - 'sphinx.ext.todo', 'sphinx.ext.viewcode', 'sphinx.ext.imgmath', - 'sphinx.ext.ifconfig', 'sphinx.ext.imgconverter', 'sphinx.ext.napoleon'] + +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.doctest", + "sphinx.ext.intersphinx", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "sphinx.ext.imgmath", + "sphinx.ext.ifconfig", + "sphinx.ext.imgconverter", + "sphinx.ext.napoleon", +] # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. -exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] # -- Options for HTML output ------------------------------------------------- @@ -48,10 +51,9 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # -#html_theme = 'alabaster' -html_theme = 'default' +html_theme = "default" # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['docs/_static'] +html_static_path = ["docs/_static"] diff --git a/docs/Makefile b/docs/Makefile index e1eb95f..c9d62b5 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -4,7 +4,7 @@ # You can set these variables from the command line, and also # from the environment for the first two. SPHINXOPTS ?= -SPHINXBUILD ?= sphinx-build +SPHINXBUILD ?= poetry run sphinx-build SOURCEDIR = .. BUILDDIR = build diff --git a/docs/example.rst b/docs/example.rst index be0ff6e..4d3bff4 100644 --- a/docs/example.rst +++ b/docs/example.rst @@ -2,7 +2,7 @@ Examples ======================================================= -First you must install pysurfex and make sure you have it in PYTHONPATH and the bin directory in your path +First you must install pysurfex for example with poetry Create forcing from MET-Nordic analysis @@ -190,6 +190,11 @@ Run surfex from the module suv/surfex/cy43-dev on PPI (cy43 development version) +Plot MEPS data from thredds +======================================================= + +.. code-block:: bash + plot_points -v air_temperature_2m -g examples/domains/drammen.json -it netcdf -i https://thredds.met.no/thredds/dodsC/meps25epsarchive/2023/04/13/meps_det_2_5km_20230413T06Z.nc -t 2023041307 diff --git a/examples/find_subset.py b/examples/find_subset.py index 1f090b8..acabd1f 100755 --- a/examples/find_subset.py +++ b/examples/find_subset.py @@ -1,11 +1,17 @@ #!/usr/bin/env python3 - -import surfex -import pyproj +"""Example.""" import json + import numpy as np +import pyproj + +import surfex -from_json = json.load(open("/home/trygveasp/revision_control/pysurfex/examples/domains/met_nordic.json", "r")) +from_json = json.load( + open( + "/home/trygveasp/revision_control/pysurfex/examples/domains/met_nordic.json", "r" + ) +) geo = surfex.get_geo_object(from_json) wanted_lon = 10.0 @@ -13,9 +19,19 @@ wanted_lat = 60.0 ny = 100 -earth = 6.37122e+6 -proj_string = "+proj=lcc +lat_0=" + str(geo.xlat0) + " +lon_0=" + str(geo.xlon0) + " +lat_1=" + \ - str(geo.xlat0) + " +lat_2=" + str(geo.xlat0) + " +units=m +no_defs +R=" + str(earth) +earth = 6.37122e6 +proj_string = ( + "+proj=lcc +lat_0=" + + str(geo.xlat0) + + " +lon_0=" + + str(geo.xlon0) + + " +lat_1=" + + str(geo.xlat0) + + " +lat_2=" + + str(geo.xlat0) + + " +units=m +no_defs +R=" + + str(earth) +) proj = pyproj.CRS.from_string(proj_string) wgs84 = pyproj.CRS.from_string("EPSG:4326") @@ -29,17 +45,22 @@ x0 = np.empty([geo.nimax]) y0 = np.empty([geo.njmax]) for i in range(0, geo.nimax): - # print(i, j, lons[j][i], wanted_lon, lats[j][i], wanted_lat) x0[i] = float(geo.x[i]) - (0.5 * ((float(nx) - 1.0) * geo.xdx)) for j in range(0, geo.njmax): y0[j] = float(geo.y[j]) - (0.5 * ((float(ny) - 1.0) * geo.xdy)) x0v, y0v = np.meshgrid(x0, y0) -# print(x0v, y0v) lonc, latc = pyproj.Transformer.from_crs(proj, wgs84, always_xy=True).transform(x0v, y0v) for i in range(0, geo.nimax): for j in range(0, geo.njmax): - # print(i, j, lonc[j][i], wanted_lon, latc[j][i], wanted_lat) if abs(lonc[j][i] - wanted_lon) < 0.01 and abs(latc[j][i] - wanted_lat) < 0.01: - print("Possible subset centre points", i, j, xv[j, i], lonc[j][i], yv[j, i], latc[j][i]) + print( + "Possible subset centre points", + i, + j, + xv[j, i], + lonc[j][i], + yv[j, i], + latc[j][i], + ) diff --git a/index.rst b/index.rst index 6bc94ce..997ccec 100644 --- a/index.rst +++ b/index.rst @@ -15,470 +15,479 @@ PYSURFEX documentation Classes --------------------------------------------- -.. autoclass:: surfex.Grib -.. autoclass:: surfex.Grib1Variable -.. autoclass:: surfex.Grib2Variable -.. autoclass:: surfex.QualityControl -.. autoclass:: surfex.Plausibility -.. autoclass:: surfex.FirstGuess -.. autoclass:: surfex.Fraction -.. autoclass:: surfex.Sct -.. autoclass:: surfex.Buddy -.. autoclass:: surfex.Climatology -.. autoclass:: surfex.Redundancy -.. autoclass:: surfex.Blacklist -.. autoclass:: surfex.DomainCheck -.. autoclass:: surfex.NoMeta -.. autoclass:: surfex.QCDataSet -.. autoclass:: surfex.TitanDataSet -.. autoclass:: surfex.ObsOperator -.. autoclass:: surfex.Departure -.. autoclass:: surfex.Observation -.. autoclass:: surfex.ObservationSet -.. autoclass:: surfex.NetatmoObservationSet -.. autoclass:: surfex.MetKlappObservations -.. autoclass:: surfex.MetFrostObservations -.. autoclass:: surfex.JsonObservationSet -.. autoclass:: surfex.ObservationFromTitanJsonFile -.. autoclass:: surfex.YamlReaderError -.. autoclass:: surfex.Cache -.. autoclass:: surfex.Geo -.. autoclass:: surfex.SurfexGeo -.. autoclass:: surfex.ConfProj -.. autoclass:: surfex.LonLatVal -.. autoclass:: surfex.Cartesian -.. autoclass:: surfex.LonLatReg -.. autoclass:: surfex.IGN -.. autoclass:: surfex.Interpolation -.. autoclass:: surfex.NearestNeighbour -.. autoclass:: surfex.Linear -.. autoclass:: surfex.NoInterpolation -.. autoclass:: surfex.Fa -.. autoclass:: surfex.SurfexIO -.. autoclass:: surfex.SurfexSurfIO -.. autoclass:: surfex.PGDFile -.. autoclass:: surfex.PREPFile -.. autoclass:: surfex.SURFFile -.. autoclass:: surfex.SurfexFileVariable -.. autoclass:: surfex.AsciiSurfexFile -.. autoclass:: surfex.NCSurfexFile -.. autoclass:: surfex.FaSurfexFile -.. autoclass:: surfex.SurfFileTypeExtension -.. autoclass:: surfex.NetCDFSurfexFile -.. autoclass:: surfex.TexteSurfexFile -.. autoclass:: surfex.ForcingFileNetCDF -.. autoclass:: surfex.ReadData -.. autoclass:: surfex.Converter -.. autoclass:: surfex.ConvertedInput -.. autoclass:: surfex.ConstantValue -.. autoclass:: surfex.Configuration -.. autoclass:: surfex.ConfigurationFromHarmonie -.. autoclass:: surfex.Variable -.. autoclass:: surfex.NetcdfVariable -.. autoclass:: surfex.GribVariable -.. autoclass:: surfex.SurfexVariable -.. autoclass:: surfex.FaVariable -.. autoclass:: surfex.ObservationVariable -.. autoclass:: surfex.LoadFromFile -.. autoclass:: surfex.SystemFilePaths -.. autoclass:: surfex.SystemFilePathsFromFile -.. autoclass:: surfex.ExternalSurfexInputFile -.. autoclass:: surfex.BaseNamelist -.. autoclass:: surfex.Cy43 -.. autoclass:: surfex.Cy40 -.. autoclass:: surfex.Namelist -.. autoclass:: surfex.Ecoclimap -.. autoclass:: surfex.EcoclimapSG -.. autoclass:: surfex.PgdInputData -.. autoclass:: surfex.PrepInputData -.. autoclass:: surfex.OfflineInputData -.. autoclass:: surfex.InlineForecastInputData -.. autoclass:: surfex.SodaInputData -.. autoclass:: surfex.SurfexForcing -.. autoclass:: surfex.NetCDFOutput -.. autoclass:: surfex.AsciiOutput -.. autoclass:: surfex.BufrObservationSet -.. autoclass:: surfex.TimeSeries -.. autoclass:: surfex.TimeSeriesFromJson -.. autoclass:: surfex.TimeSeriesFromConverter -.. autoclass:: surfex.Netcdf -.. autoclass:: surfex.Axis -.. autoclass:: surfex.NetCDFFileVariable -.. autoclass:: surfex.BatchJob -.. autoclass:: surfex.SURFEXBinary -.. autoclass:: surfex.PerturbedOffline -.. autoclass:: surfex.Masterodb -.. autoclass:: surfex.InputDataToSurfexBinaries -.. autoclass:: surfex.OutputDataFromSurfexBinaries -.. autoclass:: surfex.JsonOutputData -.. autoclass:: surfex.JsonOutputDataFromFile -.. autoclass:: surfex.JsonInputData -.. autoclass:: surfex.JsonInputDataFromFile +.. autoclass:: surfex.geo.Geo +.. autoclass:: surfex.geo.SurfexGeo +.. autoclass:: surfex.geo.ConfProj +.. autoclass:: surfex.geo.LonLatVal +.. autoclass:: surfex.geo.Cartesian +.. autoclass:: surfex.geo.LonLatReg +.. autoclass:: surfex.geo.IGN +.. autoclass:: surfex.ecoclimap.ExternalSurfexInputFile +.. autoclass:: surfex.ecoclimap.Ecoclimap +.. autoclass:: surfex.ecoclimap.EcoclimapSG +.. autoclass:: surfex.bufr.BufrObservationSet +.. autoclass:: surfex.titan.QualityControl +.. autoclass:: surfex.titan.Plausibility +.. autoclass:: surfex.titan.FirstGuess +.. autoclass:: surfex.titan.Fraction +.. autoclass:: surfex.titan.Sct +.. autoclass:: surfex.titan.Buddy +.. autoclass:: surfex.titan.Climatology +.. autoclass:: surfex.titan.Redundancy +.. autoclass:: surfex.titan.Blacklist +.. autoclass:: surfex.titan.DomainCheck +.. autoclass:: surfex.titan.NoMeta +.. autoclass:: surfex.titan.QCDataSet +.. autoclass:: surfex.titan.TitanDataSet +.. autoclass:: surfex.titan.Departure +.. autoclass:: surfex.configuration.Configuration +.. autoclass:: surfex.configuration.ConfigurationFromHarmonie +.. autoclass:: surfex.configuration.ConfigurationFromHarmonieAndConfigFile +.. autoclass:: surfex.configuration.ConfigurationFromTomlFile +.. autoclass:: surfex.read.ReadData +.. autoclass:: surfex.read.ConvertedInput +.. autoclass:: surfex.read.ConstantValue +.. autoclass:: surfex.read.Converter +.. autoclass:: surfex.fa.Fa +.. autoclass:: surfex.observation.Observation +.. autoclass:: surfex.namelist.BaseNamelist +.. autoclass:: surfex.namelist.Namelist +.. autoclass:: surfex.timeseries.TimeSeries +.. autoclass:: surfex.timeseries.TimeSeriesFromConverter +.. autoclass:: surfex.obs.ObservationSet +.. autoclass:: surfex.obs.NetatmoObservationSet +.. autoclass:: surfex.obs.MetFrostObservations +.. autoclass:: surfex.obs.JsonObservationSet +.. autoclass:: surfex.obs.ObservationFromTitanJsonFile +.. autoclass:: surfex.netcdf.Netcdf +.. autoclass:: surfex.netcdf.Axis +.. autoclass:: surfex.netcdf.NetCDFReadVariable +.. autoclass:: surfex.netcdf.NetCDFFileVariable +.. autoclass:: surfex.interpolation.Interpolation +.. autoclass:: surfex.interpolation.ObsOperator +.. autoclass:: surfex.variable.Variable +.. autoclass:: surfex.binary_input.InputDataToSurfexBinaries +.. autoclass:: surfex.binary_input.OutputDataFromSurfexBinaries +.. autoclass:: surfex.binary_input.JsonOutputData +.. autoclass:: surfex.binary_input.JsonOutputDataFromFile +.. autoclass:: surfex.binary_input.JsonInputData +.. autoclass:: surfex.binary_input.JsonInputDataFromFile +.. autoclass:: surfex.binary_input.PgdInputData +.. autoclass:: surfex.binary_input.PrepInputData +.. autoclass:: surfex.binary_input.OfflineInputData +.. autoclass:: surfex.binary_input.InlineForecastInputData +.. autoclass:: surfex.binary_input.SodaInputData +.. autoclass:: surfex.platform.SystemFilePaths +.. autoclass:: surfex.platform.SystemFilePathsFromFile +.. autoclass:: surfex.cmd_parsing.LoadFromFile +.. autoclass:: surfex.run.BatchJob +.. autoclass:: surfex.run.SURFEXBinary +.. autoclass:: surfex.run.PerturbedOffline +.. autoclass:: surfex.run.Masterodb +.. autoclass:: surfex.file.SurfexIO +.. autoclass:: surfex.file.SurfexSurfIO +.. autoclass:: surfex.file.PGDFile +.. autoclass:: surfex.file.PREPFile +.. autoclass:: surfex.file.SURFFile +.. autoclass:: surfex.file.SurfexFileVariable +.. autoclass:: surfex.file.AsciiSurfexFile +.. autoclass:: surfex.file.NCSurfexFile +.. autoclass:: surfex.file.FaSurfexFile +.. autoclass:: surfex.file.SurfFileTypeExtension +.. autoclass:: surfex.file.NetCDFSurfexFile +.. autoclass:: surfex.file.TexteSurfexFile +.. autoclass:: surfex.file.ForcingFileNetCDF +.. autoclass:: surfex.grib.Grib +.. autoclass:: surfex.grib.Grib2Variable +.. autoclass:: surfex.forcing.SurfexForcing +.. autoclass:: surfex.forcing.SurfexNetCDFForcing +.. autoclass:: surfex.forcing.SurfexOutputForcing +.. autoclass:: surfex.forcing.NetCDFOutput +.. autoclass:: surfex.forcing.AsciiOutput +.. autoclass:: surfex.obsoul.ObservationDataSetFromObsoul +.. autoclass:: surfex.obsoul.ObservationDataSetFromObsoulFile Class methods --------------------------------------------- -.. automethod:: surfex.Grib.__init__ -.. automethod:: surfex.Grib.field -.. automethod:: surfex.Grib.points -.. automethod:: surfex.Grib1Variable.__init__ -.. automethod:: surfex.Grib1Variable.is_accumulated -.. automethod:: surfex.Grib1Variable.matches -.. automethod:: surfex.Grib1Variable.print_keys -.. automethod:: surfex.Grib2Variable.__init__ -.. automethod:: surfex.Grib2Variable.matches -.. automethod:: surfex.Grib2Variable.is_accumulated -.. automethod:: surfex.Grib2Variable.print_keys -.. automethod:: surfex.QualityControl.__init__ -.. automethod:: surfex.QualityControl.set_input -.. automethod:: surfex.QualityControl.test -.. automethod:: surfex.QualityControl.set_flags -.. automethod:: surfex.Plausibility.__init__ -.. automethod:: surfex.Plausibility.set_input -.. automethod:: surfex.Plausibility.test -.. automethod:: surfex.FirstGuess.set_input -.. automethod:: surfex.FirstGuess.test -.. automethod:: surfex.Fraction.set_input -.. automethod:: surfex.Fraction.test -.. automethod:: surfex.Sct.set_input -.. automethod:: surfex.Sct.test -.. automethod:: surfex.Buddy.set_input -.. automethod:: surfex.Buddy.test -.. automethod:: surfex.Climatology.__init__ -.. automethod:: surfex.Climatology.set_input -.. automethod:: surfex.Climatology.test -.. automethod:: surfex.Redundancy.__init__ -.. automethod:: surfex.Redundancy.set_input -.. automethod:: surfex.Redundancy.test -.. automethod:: surfex.Blacklist.__init__ -.. automethod:: surfex.Blacklist.set_input -.. automethod:: surfex.Blacklist.test -.. automethod:: surfex.DomainCheck.__init__ -.. automethod:: surfex.DomainCheck.set_input -.. automethod:: surfex.DomainCheck.test -.. automethod:: surfex.NoMeta.__init__ -.. automethod:: surfex.NoMeta.set_input -.. automethod:: surfex.NoMeta.test -.. automethod:: surfex.QCDataSet.get_stid_index -.. automethod:: surfex.QCDataSet.get_pos_index -.. automethod:: surfex.QCDataSet.perform_tests -.. automethod:: surfex.QCDataSet.write_output -.. automethod:: surfex.QCDataSet.normalize_ci -.. automethod:: surfex.TitanDataSet.__init__ -.. automethod:: surfex.TitanDataSet.perform_tests -.. automethod:: surfex.ObsOperator.__init__ -.. automethod:: surfex.ObsOperator.get_obs_value -.. automethod:: surfex.ObsOperator.is_in_grid -.. automethod:: surfex.Departure.__init__ -.. automethod:: surfex.Departure.get_departure -.. automethod:: surfex.Departure.get_values -.. automethod:: surfex.Observation.__init__ -.. automethod:: surfex.Observation.print_obs -.. automethod:: surfex.Observation.vectors2obs -.. automethod:: surfex.Observation.obs2vectors -.. automethod:: surfex.Observation.format_lon -.. automethod:: surfex.Observation.format_lat -.. automethod:: surfex.Observation.get_pos_from_stid -.. automethod:: surfex.Observation.get_stid_from_stationlist -.. automethod:: surfex.ObservationSet.__init__ -.. automethod:: surfex.ObservationSet.get_stid_index -.. automethod:: surfex.ObservationSet.get_pos_index -.. automethod:: surfex.ObservationSet.get_obs -.. automethod:: surfex.ObservationSet.matching_obs -.. automethod:: surfex.ObservationSet.points -.. automethod:: surfex.ObservationSet.write_json_file -.. automethod:: surfex.JsonObservationSet.__init__ -.. automethod:: surfex.ObservationFromTitanJsonFile.__init__ -.. automethod:: surfex.Cache.__init__ -.. automethod:: surfex.Cache.set_file_handler -.. automethod:: surfex.Cache.get_file_handler -.. automethod:: surfex.Cache.file_open -.. automethod:: surfex.Cache.interpolator_is_set -.. automethod:: surfex.Cache.get_interpolator -.. automethod:: surfex.Cache.update_interpolator -.. automethod:: surfex.Cache.save_field -.. automethod:: surfex.Cache.clean_fields -.. automethod:: surfex.Cache.is_saved -.. automethod:: surfex.Cache.generate_grib_id -.. automethod:: surfex.Cache.generate_netcdf_id -.. automethod:: surfex.Cache.generate_surfex_id -.. automethod:: surfex.Cache.generate_obs_id -.. automethod:: surfex.Geo.__init__ -.. automethod:: surfex.Geo.identifier -.. automethod:: surfex.Geo.is_identical -.. automethod:: surfex.SurfexGeo.__init__ -.. automethod:: surfex.SurfexGeo.update_namelist -.. automethod:: surfex.ConfProj.__init__ -.. automethod:: surfex.ConfProj.update_namelist -.. automethod:: surfex.LonLatVal.__init__ -.. automethod:: surfex.LonLatVal.update_namelist -.. automethod:: surfex.Cartesian.__init__ -.. automethod:: surfex.Cartesian.update_namelist -.. automethod:: surfex.LonLatReg.__init__ -.. automethod:: surfex.LonLatReg.update_namelist -.. automethod:: surfex.IGN.__init__ -.. automethod:: surfex.IGN.get_coord -.. automethod:: surfex.IGN.ign_mask -.. automethod:: surfex.IGN.update_namelist -.. automethod:: surfex.Interpolation.__init__ -.. automethod:: surfex.Interpolation.interpolate -.. automethod:: surfex.Interpolation.rotate_wind_to_geographic -.. automethod:: surfex.Interpolation.distance -.. automethod:: surfex.Interpolation.alpha_grid_rot -.. automethod:: surfex.NearestNeighbour.__init__ -.. automethod:: surfex.NearestNeighbour.interpolate -.. automethod:: surfex.Linear.__init__ -.. automethod:: surfex.Linear.interpolate -.. automethod:: surfex.NoInterpolation.__init__ -.. automethod:: surfex.NoInterpolation.interpolate -.. automethod:: surfex.Fa.__init__ -.. automethod:: surfex.Fa.field -.. automethod:: surfex.Fa.points -.. automethod:: surfex.SurfexIO.__init__ -.. automethod:: surfex.SurfexIO.field -.. automethod:: surfex.SurfexIO.points -.. automethod:: surfex.SurfexIO.interpolate_field -.. automethod:: surfex.SurfexSurfIO.__init__ -.. automethod:: surfex.SurfexSurfIO.symlink_input -.. automethod:: surfex.SurfexSurfIO.copy_input -.. automethod:: surfex.SurfexSurfIO.archive_output_file -.. automethod:: surfex.SurfexFileVariable.__init__ -.. automethod:: surfex.SurfexFileVariable.print_var -.. automethod:: surfex.AsciiSurfexFile.__init__ -.. automethod:: surfex.AsciiSurfexFile.get_geo -.. automethod:: surfex.AsciiSurfexFile.read -.. automethod:: surfex.AsciiSurfexFile.field -.. automethod:: surfex.AsciiSurfexFile.points -.. automethod:: surfex.NCSurfexFile.__init__ -.. automethod:: surfex.NCSurfexFile.get_geo -.. automethod:: surfex.NCSurfexFile.field -.. automethod:: surfex.NCSurfexFile.points -.. automethod:: surfex.FaSurfexFile.__init__ -.. automethod:: surfex.FaSurfexFile.field -.. automethod:: surfex.FaSurfexFile.points -.. automethod:: surfex.SurfFileTypeExtension.__init__ -.. automethod:: surfex.NetCDFSurfexFile.__init__ -.. automethod:: surfex.NetCDFSurfexFile.read -.. automethod:: surfex.NetCDFSurfexFile.field -.. automethod:: surfex.NetCDFSurfexFile.points -.. automethod:: surfex.TexteSurfexFile.__init__ -.. automethod:: surfex.TexteSurfexFile.read -.. automethod:: surfex.TexteSurfexFile.field -.. automethod:: surfex.TexteSurfexFile.points -.. automethod:: surfex.ForcingFileNetCDF.__init__ -.. automethod:: surfex.ForcingFileNetCDF.read_field -.. automethod:: surfex.ForcingFileNetCDF.field -.. automethod:: surfex.ForcingFileNetCDF.points -.. automethod:: surfex.ReadData.__init__ -.. automethod:: surfex.ReadData.read_time_step -.. automethod:: surfex.ReadData.print_info -.. automethod:: surfex.Converter.__init__ -.. automethod:: surfex.Converter.print_info -.. automethod:: surfex.Converter.create_variable -.. automethod:: surfex.ConvertedInput.__init__ -.. automethod:: surfex.ConvertedInput.read_time_step -.. automethod:: surfex.ConvertedInput.print_info -.. automethod:: surfex.ConstantValue.__init__ -.. automethod:: surfex.ConstantValue.read_time_step -.. automethod:: surfex.ConstantValue.print_info -.. automethod:: surfex.Configuration.__init__ -.. automethod:: surfex.Configuration.max_fc_length -.. automethod:: surfex.Configuration.has_sfc_analysis -.. automethod:: surfex.Configuration.setting_is -.. automethod:: surfex.Configuration.setting_is_not -.. automethod:: surfex.Configuration.value_is_one_of -.. automethod:: surfex.Configuration.value_is_not_one_of -.. automethod:: surfex.Configuration.setting_is_one_of -.. automethod:: surfex.Configuration.setting_is_not_one_of -.. automethod:: surfex.Configuration.get_setting -.. automethod:: surfex.Configuration.update_setting -.. automethod:: surfex.Configuration.get_total_unique_hh_list -.. automethod:: surfex.Configuration.get_fcint -.. automethod:: surfex.Configuration.get_hh_list -.. automethod:: surfex.Configuration.get_ll_list -.. automethod:: surfex.ConfigurationFromHarmonie.__init__ -.. automethod:: surfex.Variable.__init__ -.. automethod:: surfex.Variable.read_variable -.. automethod:: surfex.Variable.print_variable_info -.. automethod:: surfex.Variable.deaccumulate -.. automethod:: surfex.Variable.open_new_file -.. automethod:: surfex.Variable.rotate_geographic_wind -.. automethod:: surfex.NetcdfVariable.__init__ -.. automethod:: surfex.NetcdfVariable.read_variable -.. automethod:: surfex.NetcdfVariable.print_variable_info -.. automethod:: surfex.GribVariable.__init__ -.. automethod:: surfex.GribVariable.read_variable -.. automethod:: surfex.GribVariable.print_variable_info -.. automethod:: surfex.SurfexVariable.__init__ -.. automethod:: surfex.SurfexVariable.read_variable -.. automethod:: surfex.SurfexVariable.print_variable_info -.. automethod:: surfex.FaVariable.__init__ -.. automethod:: surfex.FaVariable.read_variable -.. automethod:: surfex.FaVariable.print_variable_info -.. automethod:: surfex.ObservationVariable.__init__ -.. automethod:: surfex.ObservationVariable.read_variable -.. automethod:: surfex.ObservationVariable.print_variable_info -.. automethod:: surfex.LoadFromFile.__call__ -.. automethod:: surfex.SystemFilePaths.__init__ -.. automethod:: surfex.SystemFilePaths.get_system_path -.. automethod:: surfex.SystemFilePaths.find_matching_data_dir -.. automethod:: surfex.SystemFilePaths.get_system_file -.. automethod:: surfex.SystemFilePaths.parse_setting -.. automethod:: surfex.SystemFilePaths.substitute_string -.. automethod:: surfex.SystemFilePaths.add_system_file_path -.. automethod:: surfex.SystemFilePathsFromFile.__init__ -.. automethod:: surfex.ExternalSurfexInputFile.__init__ -.. automethod:: surfex.ExternalSurfexInputFile.set_input_data_from_format -.. automethod:: surfex.BaseNamelist.__init__ -.. automethod:: surfex.BaseNamelist.prolog -.. automethod:: surfex.BaseNamelist.set_pgd_namelist -.. automethod:: surfex.BaseNamelist.set_prep_namelist -.. automethod:: surfex.BaseNamelist.set_offline_namelist -.. automethod:: surfex.BaseNamelist.set_soda_namelist -.. automethod:: surfex.BaseNamelist.epilog -.. automethod:: surfex.BaseNamelist.override -.. automethod:: surfex.BaseNamelist.set_direct_data_namelist -.. automethod:: surfex.BaseNamelist.set_dirtyp_data_namelist -.. automethod:: surfex.BaseNamelist.capitalize_namelist_dict -.. automethod:: surfex.BaseNamelist.lower_case_namelist_dict -.. automethod:: surfex.BaseNamelist.merge_namelist_dicts -.. automethod:: surfex.BaseNamelist.ascii2nml -.. automethod:: surfex.BaseNamelist.ascii_file2nml -.. automethod:: surfex.BaseNamelist.nml2ascii -.. automethod:: surfex.BaseNamelist.merge_json_namelist_file -.. automethod:: surfex.BaseNamelist.get_namelist -.. automethod:: surfex.Cy43.__init__ -.. automethod:: surfex.Cy40.__init__ -.. automethod:: surfex.Namelist.__init__ -.. automethod:: surfex.Ecoclimap.__init__ -.. automethod:: surfex.Ecoclimap.set_input -.. automethod:: surfex.Ecoclimap.set_bin_files -.. automethod:: surfex.EcoclimapSG.__init__ -.. automethod:: surfex.EcoclimapSG.set_bin_files -.. automethod:: surfex.EcoclimapSG.set_input -.. automethod:: surfex.EcoclimapSG.parse_fnames -.. automethod:: surfex.PgdInputData.__init__ -.. automethod:: surfex.PrepInputData.__init__ -.. automethod:: surfex.OfflineInputData.__init__ -.. automethod:: surfex.InlineForecastInputData.__init__ -.. automethod:: surfex.SodaInputData.__init__ -.. automethod:: surfex.SodaInputData.set_input_observations -.. automethod:: surfex.SodaInputData.set_input_sea_assimilation -.. automethod:: surfex.SodaInputData.set_input_vertical_soil_oi -.. automethod:: surfex.SodaInputData.set_input_vertical_soil_ekf -.. automethod:: surfex.SurfexForcing.__init__ -.. automethod:: surfex.SurfexForcing._check_sanity -.. automethod:: surfex.SurfexForcing.write_forcing -.. automethod:: surfex.NetCDFOutput.__init__ -.. automethod:: surfex.NetCDFOutput.write_forcing -.. automethod:: surfex.NetCDFOutput._define_forcing -.. automethod:: surfex.NetCDFOutput.finalize -.. automethod:: surfex.AsciiOutput.__init__ -.. automethod:: surfex.AsciiOutput.write_forcing -.. automethod:: surfex.AsciiOutput._define_forcing -.. automethod:: surfex.AsciiOutput.finalize -.. automethod:: surfex.BufrObservationSet.__init__ -.. automethod:: surfex.BufrObservationSet.td2rh -.. automethod:: surfex.BufrObservationSet.inside_window -.. automethod:: surfex.TimeSeries.__init__ -.. automethod:: surfex.TimeSeries.update_stids_from_file -.. automethod:: surfex.TimeSeries.write_json -.. automethod:: surfex.TimeSeriesFromJson.__init__ -.. automethod:: surfex.Netcdf.__init__ -.. automethod:: surfex.Netcdf.num_height -.. automethod:: surfex.Netcdf.num_time -.. automethod:: surfex.Netcdf.field -.. automethod:: surfex.NetCDFFileVariable.__init__ -.. automethod:: surfex.NetCDFFileVariable.is_level -.. automethod:: surfex.BatchJob.__init__ -.. automethod:: surfex.BatchJob.run -.. automethod:: surfex.SURFEXBinary.__init__ -.. automethod:: surfex.Masterodb.archive_output -.. automethod:: surfex.InputDataToSurfexBinaries.__init__ -.. automethod:: surfex.InputDataToSurfexBinaries.prepare_input -.. automethod:: surfex.OutputDataFromSurfexBinaries.__init__ -.. automethod:: surfex.OutputDataFromSurfexBinaries.archive_files -.. automethod:: surfex.JsonOutputData.__init__ -.. automethod:: surfex.JsonOutputData.archive_files -.. automethod:: surfex.JsonOutputDataFromFile.__init__ -.. automethod:: surfex.JsonOutputDataFromFile.archive_files -.. automethod:: surfex.JsonInputData.__init__ -.. automethod:: surfex.JsonInputData.prepare_input -.. automethod:: surfex.JsonInputData.add_data -.. automethod:: surfex.JsonInputDataFromFile.__init__ -.. automethod:: surfex.JsonInputDataFromFile.prepare_input +.. automethod:: surfex.geo.Geo.__init__ +.. automethod:: surfex.geo.Geo.identifier +.. automethod:: surfex.geo.Geo.is_identical +.. automethod:: surfex.geo.Geo.write_proj_info +.. automethod:: surfex.geo.SurfexGeo.__init__ +.. automethod:: surfex.geo.SurfexGeo.update_namelist +.. automethod:: surfex.geo.SurfexGeo.subset +.. automethod:: surfex.geo.ConfProj.__init__ +.. automethod:: surfex.geo.ConfProj.update_namelist +.. automethod:: surfex.geo.ConfProj.subset +.. automethod:: surfex.geo.LonLatVal.__init__ +.. automethod:: surfex.geo.LonLatVal.update_namelist +.. automethod:: surfex.geo.LonLatVal.subset +.. automethod:: surfex.geo.Cartesian.__init__ +.. automethod:: surfex.geo.Cartesian.update_namelist +.. automethod:: surfex.geo.Cartesian.subset +.. automethod:: surfex.geo.LonLatReg.__init__ +.. automethod:: surfex.geo.LonLatReg.update_namelist +.. automethod:: surfex.geo.LonLatReg.subset +.. automethod:: surfex.geo.IGN.__init__ +.. automethod:: surfex.geo.IGN.get_coord +.. automethod:: surfex.geo.IGN.ign_mask +.. automethod:: surfex.geo.IGN.update_namelist +.. automethod:: surfex.geo.IGN.subset +.. automethod:: surfex.ecoclimap.ExternalSurfexInputFile.__init__ +.. automethod:: surfex.ecoclimap.Ecoclimap.__init__ +.. automethod:: surfex.ecoclimap.Ecoclimap.set_input +.. automethod:: surfex.ecoclimap.Ecoclimap.set_bin_files +.. automethod:: surfex.ecoclimap.EcoclimapSG.__init__ +.. automethod:: surfex.ecoclimap.EcoclimapSG.set_bin_files +.. automethod:: surfex.ecoclimap.EcoclimapSG.set_input +.. automethod:: surfex.ecoclimap.EcoclimapSG.parse_fnames +.. automethod:: surfex.bufr.BufrObservationSet.td2rh +.. automethod:: surfex.bufr.BufrObservationSet.inside_window +.. automethod:: surfex.titan.QualityControl.__init__ +.. automethod:: surfex.titan.QualityControl.set_input +.. automethod:: surfex.titan.QualityControl.test +.. automethod:: surfex.titan.QualityControl.set_flags +.. automethod:: surfex.titan.Plausibility.__init__ +.. automethod:: surfex.titan.Plausibility.set_input +.. automethod:: surfex.titan.Plausibility.test +.. automethod:: surfex.titan.FirstGuess.set_input +.. automethod:: surfex.titan.FirstGuess.test +.. automethod:: surfex.titan.Fraction.set_input +.. automethod:: surfex.titan.Fraction.test +.. automethod:: surfex.titan.Sct.set_input +.. automethod:: surfex.titan.Sct.test +.. automethod:: surfex.titan.Buddy.test +.. automethod:: surfex.titan.Climatology.__init__ +.. automethod:: surfex.titan.Climatology.set_input +.. automethod:: surfex.titan.Climatology.test +.. automethod:: surfex.titan.Redundancy.__init__ +.. automethod:: surfex.titan.Redundancy.set_input +.. automethod:: surfex.titan.Redundancy.test +.. automethod:: surfex.titan.Blacklist.__init__ +.. automethod:: surfex.titan.Blacklist.set_input +.. automethod:: surfex.titan.Blacklist.test +.. automethod:: surfex.titan.DomainCheck.__init__ +.. automethod:: surfex.titan.DomainCheck.set_input +.. automethod:: surfex.titan.DomainCheck.test +.. automethod:: surfex.titan.NoMeta.__init__ +.. automethod:: surfex.titan.NoMeta.set_input +.. automethod:: surfex.titan.NoMeta.test +.. automethod:: surfex.titan.QCDataSet.get_stid_index +.. automethod:: surfex.titan.QCDataSet.get_pos_index +.. automethod:: surfex.titan.QCDataSet.perform_tests +.. automethod:: surfex.titan.QCDataSet.write_output +.. automethod:: surfex.titan.QCDataSet.normalize_ci +.. automethod:: surfex.titan.QCDataSet.ecdf +.. automethod:: surfex.titan.QCDataSet._ecdf +.. automethod:: surfex.titan.TitanDataSet.perform_tests +.. automethod:: surfex.titan.Departure.__init__ +.. automethod:: surfex.titan.Departure.get_departure +.. automethod:: surfex.titan.Departure.get_values +.. automethod:: surfex.configuration.Configuration.__init__ +.. automethod:: surfex.configuration.Configuration.dump_json +.. automethod:: surfex.configuration.Configuration.update_setting +.. automethod:: surfex.configuration.ConfigurationFromHarmonie.__init__ +.. automethod:: surfex.configuration.ConfigurationFromHarmonieAndConfigFile.__init__ +.. automethod:: surfex.configuration.ConfigurationFromTomlFile.__init__ +.. automethod:: surfex.read.ReadData.__init__ +.. automethod:: surfex.read.ReadData.read_time_step +.. automethod:: surfex.read.ReadData.print_info +.. automethod:: surfex.read.ConvertedInput.__init__ +.. automethod:: surfex.read.ConvertedInput.read_time_step +.. automethod:: surfex.read.ConvertedInput.print_info +.. automethod:: surfex.read.ConstantValue.__init__ +.. automethod:: surfex.read.ConstantValue.read_time_step +.. automethod:: surfex.read.ConstantValue.print_info +.. automethod:: surfex.read.Converter.__init__ +.. automethod:: surfex.read.Converter.print_info +.. automethod:: surfex.read.Converter.create_variable +.. automethod:: surfex.read.Converter.mslp2ps +.. automethod:: surfex.read.Converter.read_time_step +.. automethod:: surfex.fa.Fa.__init__ +.. automethod:: surfex.fa.Fa.field +.. automethod:: surfex.fa.Fa.points +.. automethod:: surfex.observation.Observation.__init__ +.. automethod:: surfex.observation.Observation.print_obs +.. automethod:: surfex.observation.Observation.vectors2obs +.. automethod:: surfex.observation.Observation.obs2vectors +.. automethod:: surfex.observation.Observation.format_lon +.. automethod:: surfex.observation.Observation.format_lat +.. automethod:: surfex.observation.Observation.get_pos_from_stid +.. automethod:: surfex.observation.Observation.get_stid_from_stationlist +.. automethod:: surfex.namelist.BaseNamelist.prolog +.. automethod:: surfex.namelist.BaseNamelist.set_pgd_namelist +.. automethod:: surfex.namelist.BaseNamelist.set_offline_namelist +.. automethod:: surfex.namelist.BaseNamelist.set_soda_namelist +.. automethod:: surfex.namelist.BaseNamelist.epilog +.. automethod:: surfex.namelist.BaseNamelist.override +.. automethod:: surfex.namelist.BaseNamelist.set_direct_data_namelist +.. automethod:: surfex.namelist.BaseNamelist.capitalize_namelist_dict +.. automethod:: surfex.namelist.BaseNamelist.lower_case_namelist_dict +.. automethod:: surfex.namelist.BaseNamelist.merge_namelist_dicts +.. automethod:: surfex.namelist.BaseNamelist.ascii2nml +.. automethod:: surfex.namelist.BaseNamelist.ascii_file2nml +.. automethod:: surfex.namelist.BaseNamelist.nml2ascii +.. automethod:: surfex.namelist.BaseNamelist.merge_json_namelist_file +.. automethod:: surfex.namelist.BaseNamelist.get_namelist +.. automethod:: surfex.namelist.Namelist.prolog +.. automethod:: surfex.namelist.Namelist.set_pgd_namelist +.. automethod:: surfex.namelist.Namelist.set_offline_namelist +.. automethod:: surfex.namelist.Namelist.prepare_offline_perturbation +.. automethod:: surfex.namelist.Namelist.set_obs +.. automethod:: surfex.namelist.Namelist.set_soda_namelist +.. automethod:: surfex.namelist.Namelist.epilog +.. automethod:: surfex.namelist.Namelist.override +.. automethod:: surfex.namelist.Namelist.sub +.. automethod:: surfex.namelist.Namelist.delete +.. automethod:: surfex.namelist.Namelist.get_filetype_from_suffix +.. automethod:: surfex.namelist.Namelist.set_direct_data_namelist +.. automethod:: surfex.namelist.Namelist.capitalize_namelist_dict +.. automethod:: surfex.namelist.Namelist.lower_case_namelist_dict +.. automethod:: surfex.namelist.Namelist.merge_namelist_dicts +.. automethod:: surfex.namelist.Namelist.ascii2nml +.. automethod:: surfex.namelist.Namelist.ascii_file2nml +.. automethod:: surfex.namelist.Namelist.nml2ascii +.. automethod:: surfex.namelist.Namelist.merge_json_namelist_file +.. automethod:: surfex.namelist.Namelist.get_namelist +.. automethod:: surfex.timeseries.TimeSeries.__init__ +.. automethod:: surfex.timeseries.TimeSeries.write_json +.. automethod:: surfex.obs.ObservationSet.__init__ +.. automethod:: surfex.obs.ObservationSet.get_stid_index +.. automethod:: surfex.obs.ObservationSet.get_pos_index +.. automethod:: surfex.obs.ObservationSet.get_obs +.. automethod:: surfex.obs.ObservationSet.matching_obs +.. automethod:: surfex.obs.ObservationSet.points +.. automethod:: surfex.obs.ObservationSet.write_json_file +.. automethod:: surfex.obs.JsonObservationSet.__init__ +.. automethod:: surfex.obs.ObservationFromTitanJsonFile.__init__ +.. automethod:: surfex.netcdf.Netcdf.__init__ +.. automethod:: surfex.netcdf.Netcdf.field +.. automethod:: surfex.netcdf.Netcdf.points +.. automethod:: surfex.netcdf.NetCDFReadVariable.__init__ +.. automethod:: surfex.netcdf.NetCDFFileVariable.__init__ +.. automethod:: surfex.netcdf.NetCDFFileVariable.axis_types +.. automethod:: surfex.netcdf.NetCDFFileVariable.dim_names +.. automethod:: surfex.netcdf.NetCDFFileVariable.units +.. automethod:: surfex.netcdf.NetCDFFileVariable.lats +.. automethod:: surfex.netcdf.NetCDFFileVariable.lons +.. automethod:: surfex.netcdf.NetCDFFileVariable.datetimes +.. automethod:: surfex.netcdf.NetCDFFileVariable.times +.. automethod:: surfex.netcdf.NetCDFFileVariable.members +.. automethod:: surfex.netcdf.NetCDFFileVariable.levels +.. automethod:: surfex.netcdf.NetCDFFileVariable.is_level +.. automethod:: surfex.netcdf.NetCDFFileVariable.check_input_to_soda_dimensions +.. automethod:: surfex.interpolation.Interpolation.__init__ +.. automethod:: surfex.interpolation.Interpolation.interpolate +.. automethod:: surfex.interpolation.Interpolation.rotate_wind_to_geographic +.. automethod:: surfex.interpolation.Interpolation.distance +.. automethod:: surfex.interpolation.Interpolation.alpha_grid_rot +.. automethod:: surfex.interpolation.ObsOperator.__init__ +.. automethod:: surfex.interpolation.ObsOperator.get_obs_value +.. automethod:: surfex.interpolation.ObsOperator.is_in_grid +.. automethod:: surfex.interpolation.ObsOperator.obs2vectors +.. automethod:: surfex.variable.Variable.__init__ +.. automethod:: surfex.variable.Variable.get_filename +.. automethod:: surfex.variable.Variable.get_filehandler +.. automethod:: surfex.variable.Variable.read_var_points +.. automethod:: surfex.variable.Variable.set_var +.. automethod:: surfex.variable.Variable.read_variable +.. automethod:: surfex.variable.Variable.print_variable_info +.. automethod:: surfex.variable.Variable.deaccumulate +.. automethod:: surfex.variable.Variable.get_basetime +.. automethod:: surfex.variable.Variable.rotate_geographic_wind +.. automethod:: surfex.binary_input.InputDataToSurfexBinaries.__init__ +.. automethod:: surfex.binary_input.InputDataToSurfexBinaries.prepare_input +.. automethod:: surfex.binary_input.OutputDataFromSurfexBinaries.__init__ +.. automethod:: surfex.binary_input.OutputDataFromSurfexBinaries.archive_files +.. automethod:: surfex.binary_input.JsonOutputData.__init__ +.. automethod:: surfex.binary_input.JsonOutputData.archive_files +.. automethod:: surfex.binary_input.JsonOutputDataFromFile.__init__ +.. automethod:: surfex.binary_input.JsonOutputDataFromFile.archive_files +.. automethod:: surfex.binary_input.JsonInputData.__init__ +.. automethod:: surfex.binary_input.JsonInputData.prepare_input +.. automethod:: surfex.binary_input.JsonInputData.add_data +.. automethod:: surfex.binary_input.JsonInputDataFromFile.__init__ +.. automethod:: surfex.binary_input.JsonInputDataFromFile.prepare_input +.. automethod:: surfex.binary_input.PgdInputData.__init__ +.. automethod:: surfex.binary_input.OfflineInputData.__init__ +.. automethod:: surfex.binary_input.InlineForecastInputData.__init__ +.. automethod:: surfex.binary_input.SodaInputData.set_input_observations +.. automethod:: surfex.binary_input.SodaInputData.set_input_sea_assimilation +.. automethod:: surfex.binary_input.SodaInputData.set_input_vertical_soil_oi +.. automethod:: surfex.platform.SystemFilePaths.__init__ +.. automethod:: surfex.platform.SystemFilePaths.substitute_string +.. automethod:: surfex.platform.SystemFilePathsFromFile.__init__ +.. automethod:: surfex.cmd_parsing.LoadFromFile.__call__ +.. automethod:: surfex.run.BatchJob.__init__ +.. automethod:: surfex.run.BatchJob.run +.. automethod:: surfex.run.SURFEXBinary.__init__ +.. automethod:: surfex.run.Masterodb.archive_output +.. automethod:: surfex.file.SurfexIO.__init__ +.. automethod:: surfex.file.SurfexIO.field +.. automethod:: surfex.file.SurfexIO.points +.. automethod:: surfex.file.SurfexIO.interpolate_field +.. automethod:: surfex.file.SurfexSurfIO.symlink_input +.. automethod:: surfex.file.SurfexSurfIO.copy_input +.. automethod:: surfex.file.SurfexSurfIO.archive_output_file +.. automethod:: surfex.file.SurfexFileVariable.print_var +.. automethod:: surfex.file.AsciiSurfexFile.__init__ +.. automethod:: surfex.file.AsciiSurfexFile.get_geo +.. automethod:: surfex.file.AsciiSurfexFile.read +.. automethod:: surfex.file.AsciiSurfexFile.field +.. automethod:: surfex.file.AsciiSurfexFile.points +.. automethod:: surfex.file.NCSurfexFile.__init__ +.. automethod:: surfex.file.NCSurfexFile.get_geo +.. automethod:: surfex.file.NCSurfexFile.field +.. automethod:: surfex.file.NCSurfexFile.points +.. automethod:: surfex.file.FaSurfexFile.__init__ +.. automethod:: surfex.file.FaSurfexFile.field +.. automethod:: surfex.file.FaSurfexFile.points +.. automethod:: surfex.file.SurfFileTypeExtension.__init__ +.. automethod:: surfex.file.NetCDFSurfexFile.__init__ +.. automethod:: surfex.file.NetCDFSurfexFile.read +.. automethod:: surfex.file.NetCDFSurfexFile.field +.. automethod:: surfex.file.NetCDFSurfexFile.points +.. automethod:: surfex.file.TexteSurfexFile.__init__ +.. automethod:: surfex.file.TexteSurfexFile.read +.. automethod:: surfex.file.TexteSurfexFile.field +.. automethod:: surfex.file.TexteSurfexFile.points +.. automethod:: surfex.file.ForcingFileNetCDF.__init__ +.. automethod:: surfex.file.ForcingFileNetCDF.read_field +.. automethod:: surfex.file.ForcingFileNetCDF.field +.. automethod:: surfex.file.ForcingFileNetCDF.points +.. automethod:: surfex.grib.Grib.__init__ +.. automethod:: surfex.grib.Grib.field +.. automethod:: surfex.grib.Grib.read_geo_info +.. automethod:: surfex.grib.Grib.read_field_in_message +.. automethod:: surfex.grib.Grib.points +.. automethod:: surfex.grib.Grib.__init__ +.. automethod:: surfex.grib.Grib.is_accumulated +.. automethod:: surfex.grib.Grib.matches +.. automethod:: surfex.grib.Grib.print_keys +.. automethod:: surfex.grib.Grib.generate_grib_id +.. automethod:: surfex.grib.Grib2Variable.__init__ +.. automethod:: surfex.grib.Grib2Variable.matches +.. automethod:: surfex.grib.Grib2Variable.is_accumulated +.. automethod:: surfex.grib.Grib2Variable.print_keys +.. automethod:: surfex.grib.Grib2Variable.generate_grib_id +.. automethod:: surfex.forcing.SurfexForcing.__init__ +.. automethod:: surfex.forcing.SurfexNetCDFForcing.__init__ +.. automethod:: surfex.forcing.SurfexOutputForcing.__init__ +.. automethod:: surfex.forcing.SurfexOutputForcing._check_sanity +.. automethod:: surfex.forcing.SurfexOutputForcing.write_forcing +.. automethod:: surfex.forcing.NetCDFOutput.write_forcing +.. automethod:: surfex.forcing.NetCDFOutput._define_forcing +.. automethod:: surfex.forcing.NetCDFOutput.finalize +.. automethod:: surfex.forcing.AsciiOutput.write_forcing +.. automethod:: surfex.forcing.AsciiOutput._define_forcing +.. automethod:: surfex.forcing.AsciiOutput.finalize Methods --------------------------------------------- -.. autofunction:: surfex.print_grib_id -.. autofunction:: surfex.define_quality_control -.. autofunction:: surfex.dataset_from_file -.. autofunction:: surfex.dataset_from_json -.. autofunction:: surfex.merge_json_qc_data_sets -.. autofunction:: surfex.get_datasources -.. autofunction:: surfex.error -.. autofunction:: surfex.info -.. autofunction:: surfex.warning -.. autofunction:: surfex.unixtime_to_datenum -.. autofunction:: surfex.data_merge -.. autofunction:: surfex.get_geo_object -.. autofunction:: surfex.set_domain -.. autofunction:: surfex.get_surfex_io_object -.. autofunction:: surfex.guess_file_format -.. autofunction:: surfex.parse_filepattern -.. autofunction:: surfex.remove_existing_file -.. autofunction:: surfex.toml_load -.. autofunction:: surfex.toml_dump -.. autofunction:: surfex.process_merged_settings -.. autofunction:: surfex.merge_toml_env -.. autofunction:: surfex.merge_toml_env_from_files -.. autofunction:: surfex.merge_toml_env_from_file -.. autofunction:: surfex.merge_toml_env_from_config_dicts -.. autofunction:: surfex.flatten -.. autofunction:: surfex.get_member_settings -.. autofunction:: surfex.deep_update -.. autofunction:: surfex.open_db -.. autofunction:: surfex.close_db -.. autofunction:: surfex.create_db -.. autofunction:: surfex.populate_usage_db -.. autofunction:: surfex.rmse -.. autofunction:: surfex.bias -.. autofunction:: surfex.absbias -.. autofunction:: surfex.mean -.. autofunction:: surfex.calculate_statistics -.. autofunction:: surfex.populate_obsmon_db -.. autofunction:: surfex.write_obsmon_sqlite_file -.. autofunction:: surfex.parse_args_create_forcing -.. autofunction:: surfex.run_create_forcing -.. autofunction:: surfex.parse_args_qc2obsmon -.. autofunction:: surfex.parse_args_create_surfex_json_namelist -.. autofunction:: surfex.create_surfex_json_namelist -.. autofunction:: surfex.parse_args_create_surfex_json_input -.. autofunction:: surfex.create_surfex_json_input -.. autofunction:: surfex.parse_args_first_guess_for_oi -.. autofunction:: surfex.first_guess_for_oi -.. autofunction:: surfex.parse_args_masterodb -.. autofunction:: surfex.run_masterodb -.. autofunction:: surfex.parse_args_surfex_binary -.. autofunction:: surfex.run_surfex_binary -.. autofunction:: surfex.parse_args_gridpp -.. autofunction:: surfex.run_gridpp -.. autofunction:: surfex.parse_args_titan -.. autofunction:: surfex.run_titan -.. autofunction:: surfex.parse_args_oi2soda -.. autofunction:: surfex.run_oi2soda -.. autofunction:: surfex.parse_lsm_file_assim -.. autofunction:: surfex.lsm_file_assim -.. autofunction:: surfex.hm2pysurfex -.. autofunction:: surfex.write_formatted_array -.. autofunction:: surfex.run_time_loop -.. autofunction:: surfex.set_forcing_config -.. autofunction:: surfex.create_netcdf_first_guess_template -.. autofunction:: surfex.read_first_guess_netcdf_file -.. autofunction:: surfex.write_analysis_netcdf_file -.. autofunction:: surfex.oi2soda -.. autofunction:: surfex.create_working_dir -.. autofunction:: surfex.clean_working_dir - +.. autofunction:: surfex.obsmon.open_db +.. autofunction:: surfex.obsmon.close_db +.. autofunction:: surfex.obsmon.create_db +.. autofunction:: surfex.obsmon.populate_usage_db +.. autofunction:: surfex.obsmon.rmse +.. autofunction:: surfex.obsmon.bias +.. autofunction:: surfex.obsmon.absbias +.. autofunction:: surfex.obsmon.mean +.. autofunction:: surfex.obsmon.calculate_statistics +.. autofunction:: surfex.obsmon.populate_obsmon_db +.. autofunction:: surfex.obsmon.write_obsmon_sqlite_file +.. autofunction:: surfex.geo.get_geo_object +.. autofunction:: surfex.geo.set_domain +.. autofunction:: surfex.geo.shape2ign +.. autofunction:: surfex.cli.get_geo_and_config_from_cmd +.. autofunction:: surfex.cli.run_first_guess_for_oi +.. autofunction:: surfex.cli.run_masterodb +.. autofunction:: surfex.cli.run_surfex_binary +.. autofunction:: surfex.cli.run_create_namelist +.. autofunction:: surfex.cli.run_gridpp +.. autofunction:: surfex.cli.run_titan +.. autofunction:: surfex.cli.run_oi2soda +.. autofunction:: surfex.cli.run_hm2pysurfex +.. autofunction:: surfex.cli.run_plot_points +.. autofunction:: surfex.cli.set_geo_from_stationlist +.. autofunction:: surfex.cli.sentinel_obs +.. autofunction:: surfex.cli.qc2obsmon +.. autofunction:: surfex.cli.prep +.. autofunction:: surfex.cli.plot_points +.. autofunction:: surfex.cli.pgd +.. autofunction:: surfex.cli.perturbed_offline +.. autofunction:: surfex.cli.offline +.. autofunction:: surfex.cli.cli_oi2soda +.. autofunction:: surfex.cli.cli_modify_forcing +.. autofunction:: surfex.cli.cli_merge_qc_data +.. autofunction:: surfex.cli.masterodb +.. autofunction:: surfex.cli.hm2pysurfex +.. autofunction:: surfex.cli.gridpp +.. autofunction:: surfex.cli.dump_environ +.. autofunction:: surfex.cli.first_guess_for_oi +.. autofunction:: surfex.cli.cryoclim_pseudoobs +.. autofunction:: surfex.cli.create_namelist +.. autofunction:: surfex.cli.create_lsm_file_assim +.. autofunction:: surfex.cli.create_forcing +.. autofunction:: surfex.cli.bufr2json +.. autofunction:: surfex.cli.obs2json +.. autofunction:: surfex.cli.cli_set_domain +.. autofunction:: surfex.cli.cli_set_geo_from_obs_set +.. autofunction:: surfex.cli.cli_set_geo_from_stationlist +.. autofunction:: surfex.cli.cli_shape2ign +.. autofunction:: surfex.cli.soda +.. autofunction:: surfex.cli.titan +.. autofunction:: surfex.input_methods.get_datasources +.. autofunction:: surfex.titan.define_quality_control +.. autofunction:: surfex.titan.merge_json_qc_data_sets +.. autofunction:: surfex.util.merge_toml_env +.. autofunction:: surfex.util.merge_toml_env_from_files +.. autofunction:: surfex.util.deep_update +.. autofunction:: surfex.util.remove_existing_file +.. autofunction:: surfex.util.parse_filepattern +.. autofunction:: surfex.netcdf.read_first_guess_netcdf_file +.. autofunction:: surfex.netcdf.oi2soda +.. autofunction:: surfex.netcdf.read_cryoclim_nc +.. autofunction:: surfex.netcdf.read_sentinel_nc +.. autofunction:: surfex.interpolation.fill_field +.. autofunction:: surfex.interpolation.grid2points +.. autofunction:: surfex.interpolation.inside_grid +.. autofunction:: surfex.cmd_parsing.parse_args_create_forcing +.. autofunction:: surfex.cmd_parsing.parse_args_modify_forcing +.. autofunction:: surfex.cmd_parsing.parse_args_qc2obsmon +.. autofunction:: surfex.cmd_parsing.parse_args_first_guess_for_oi +.. autofunction:: surfex.cmd_parsing.parse_args_masterodb +.. autofunction:: surfex.cmd_parsing.parse_args_surfex_binary +.. autofunction:: surfex.cmd_parsing.parse_args_create_namelist +.. autofunction:: surfex.cmd_parsing.parse_args_gridpp +.. autofunction:: surfex.cmd_parsing.parse_args_titan +.. autofunction:: surfex.cmd_parsing.parse_args_oi2soda +.. autofunction:: surfex.cmd_parsing.parse_args_lsm_file_assim +.. autofunction:: surfex.cmd_parsing.parse_args_hm2pysurfex +.. autofunction:: surfex.cmd_parsing.parse_args_bufr2json +.. autofunction:: surfex.cmd_parsing.parse_args_obs2json +.. autofunction:: surfex.cmd_parsing.parse_args_plot_points +.. autofunction:: surfex.cmd_parsing.parse_args_set_geo_from_obs_set +.. autofunction:: surfex.cmd_parsing.parse_args_set_geo_from_stationlist +.. autofunction:: surfex.cmd_parsing.parse_args_merge_qc_data +.. autofunction:: surfex.cmd_parsing.parse_timeseries2json +.. autofunction:: surfex.cmd_parsing.parse_cryoclim_pseudoobs +.. autofunction:: surfex.cmd_parsing.parse_sentinel_obs +.. autofunction:: surfex.cmd_parsing.parse_args_shape2ign +.. autofunction:: surfex.cmd_parsing.parse_set_domain +.. autofunction:: surfex.datetime_utils.as_datetime +.. autofunction:: surfex.datetime_utils.as_datetime_string +.. autofunction:: surfex.datetime_utils.as_timedelta +.. autofunction:: surfex.datetime_utils.fromtimestamp +.. autofunction:: surfex.datetime_utils.utcfromtimestamp +.. autofunction:: surfex.datetime_utils.isdatetime +.. autofunction:: surfex.datetime_utils.as_datetime_args +.. autofunction:: surfex.run.create_working_dir +.. autofunction:: surfex.run.clean_working_dir +.. autofunction:: surfex.file.guess_file_format +.. autofunction:: surfex.forcing.write_formatted_array +.. autofunction:: surfex.forcing.run_time_loop +.. autofunction:: surfex.forcing.set_forcing_config +.. autofunction:: surfex.forcing.modify_forcing * :ref: `README` @@ -487,5 +496,3 @@ Indices and tables * :ref:`genindex` * :ref:`search` - - diff --git a/prepare_testdata.sh b/prepare_testdata.sh deleted file mode 100755 index a13ffa3..0000000 --- a/prepare_testdata.sh +++ /dev/null @@ -1,40 +0,0 @@ -#!/bin/bash - -rm -rf /tmp/host0 /tmp/host1 -mkdir -p /tmp/host0/job -mkdir -p /tmp/host1 -ln -s /tmp/host0/job /tmp/host1/job - -cp -r testdata /tmp/host1/. -mkdir -p /tmp/host1/testdata/input_paths/ecoclimap_bin_dir -touch /tmp/host1/testdata/input_paths/ecoclimap_bin_dir/ecoclimapI_covers_param.bin -touch /tmp/host1/testdata/input_paths/ecoclimap_bin_dir/ecoclimapII_af_covers_param.bin -touch /tmp/host1/testdata/input_paths/ecoclimap_bin_dir/ecoclimapII_eu_covers_param.bin -mkdir -p /tmp/host1/testdata/input_paths/flake_dir/ -touch /tmp/host1/testdata/input_paths/flake_dir/GlobalLakeDepth_V3.0.dir -touch /tmp/host1/testdata/input_paths/flake_dir/GlobalLakeStatus_V3.0.dir -touch /tmp/host1/testdata/input_paths/flake_dir/LAKE_LTA_NEW.nc -mkdir -p /tmp/host1/testdata/input_paths/sand_dir -touch /tmp/host1/testdata/input_paths/sand_dir/sand_fao.dir -touch /tmp/host1/testdata/input_paths/sand_dir/sand_fao.hdr -mkdir -p /tmp/host1/testdata/input_paths/clay_dir -touch /tmp/host1/testdata/input_paths/clay_dir/clay_fao.dir -touch /tmp/host1/testdata/input_paths/clay_dir/clay_fao.hdr -mkdir -p /tmp/host1/testdata/input_paths/soc_top_dir -touch /tmp/host1/testdata/input_paths/soc_top_dir/soc_top.dir -touch /tmp/host1/testdata/input_paths/soc_top_dir/soc_top.hdr -mkdir -p /tmp/host1/testdata/input_paths/soc_sub_dir -touch /tmp/host1/testdata/input_paths/soc_sub_dir/soc_sub.dir -touch /tmp/host1/testdata/input_paths/soc_sub_dir/soc_sub.hdr -mkdir -p /tmp/host1/testdata/input_paths/ecoclimap_cover_dir -touch /tmp/host1/testdata/input_paths/ecoclimap_cover_dir/ECOCLIMAP_2_5_p.dir -mkdir -p /tmp/host1/testdata/input_paths/oro_dir -touch /tmp/host1/testdata/input_paths/oro_dir/gmted2010.dir -touch /tmp/host1/testdata/input_paths/oro_dir/gmted2010.hdr - -rm -f /tmp/host1/scratch/hm_home/test_start_and_run/unittest_ok -mkdir -p /tmp/host1/scratch/hm_home/prep_task/climate -touch /tmp/host1/scratch/hm_home/prep_task/climate/PGD.nc -mkdir -p /tmp/host1/scratch/hm_home/quality_control_sd_task/archive/2020/11/13/06/ -cp testdata/unittest_FirstGuess4gridpp_grib2.nc /tmp/host1/scratch/hm_home/quality_control_sd_task/archive/2020/11/13/06/raw.nc - diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..9d90d3d --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,153 @@ +[tool.poetry] + name = "surfex" + version = "0.0.3" + description = "Python API to SURFEX" + authors = ["Trygve Aspelien"] + license = "MIT" + readme = "README.rst" + repository = "https://github.com/metno/pysurfex" + documentation = "https://metno.github.io/pysurfex/" + +[tool.poetry.scripts] + bufr2json = "surfex.cli:bufr2json" + create_forcing = "surfex.cli:create_forcing" + create_lsm_file = "surfex.cli:create_lsm_file_assim" + create_namelist = "surfex.cli:create_namelist" + cryoclim_pseudoobs = "surfex.cli:cryoclim_pseudoobs" + FirstGuess4gridpp = "surfex.cli:first_guess_for_oi" + dump_environ = "surfex.cli:dump_environ" + gridpp = "surfex.cli:gridpp" + hm2pysurfex = "surfex.cli:hm2pysurfex" + masterodb = "surfex.cli:masterodb" + merge_qc_data = "surfex.cli:cli_merge_qc_data" + modify_forcing = "surfex.cli:cli_modify_forcing" + obs2json = "surfex.cli:obs2json" + oi2soda = "surfex.cli:cli_oi2soda" + offline = "surfex.cli:offline" + perturbed_offline = "surfex.cli:perturbed_offline" + pgd = "surfex.cli:pgd" + plot_points = "surfex.cli:plot_points" + prep = "surfex.cli:prep" + qc2obsmon = "surfex.cli:qc2obsmon" + sentinel_obs = "surfex.cli:sentinel_obs" + set_domain = "surfex.cli:cli_set_domain" + set_geo_from_obs_set = "surfex.cli:cli_set_geo_from_obs_set" + set_geo_from_stationlist = "surfex.cli:set_geo_from_stationlist" + shape2ign = "surfex.cli:cli_shape2ign" + soda = "surfex.cli:soda" + titan = "surfex.cli:titan" + +[build-system] + build-backend = "poetry.core.masonry.api" + requires = ["poetry-core > 1.2.0"] + +[tool.poetry.dependencies] + python = "^3.8" + dateutils = "^0.6.12" + fastjsonschema = "2.16.2" + f90nml = "^1.4.3" + humanize = "^3.14.0" + numexpr = "^2.8.4" + numpy = "^1.22.4" + pandas = "^1.4.0" + pyproj = "^3.3.0" + pyyaml = "^6.0" + toml = "^0.10.2" + +#[tool.poetry.group.formats.dependencies] + netcdf4 = "^1.6.3" + cfunits = "^3.3.5" + eccodes = "^1.5.1" + +#[tool.poetry.group.points.dependencies] + requests = "^2.28.2" + gridpp = "^0.6.0" + titanlib = "^0.3.3" + +#[tool.poetry.group.plot.dependencies] + matplotlib = "^3.7.1" + +[tool.poetry.group.dev.dependencies] + jupyterlab = "^3.6.1" + nb-clean = "^2.4.0" + poetryup = "^0.12.7" + pre-commit = "^3.0.4" + +[tool.poetry.group.test.dependencies] + pytest = "^7.2.2" + pytest-cov = "^3.0.0" + pytest-mock = "^3.7.0" + pytest-profiling = "^1.7.0" + pytest-timeout = "^2.1.0" + pytest-xdist = "^3.2.0" + coveralls = "^3.3.1" + sphinx = "^6.2.1" + +[tool.poetry.group.linting.dependencies] + black = "^22.1.0" + flake8 = "^4.0.1" + flakeheaven = "^0.11.1" + isort = "^5.10.1" + pyment = "0.3.3" + pylint = "^2.12.2" + # flake8 plugins + darglint = "^1.8.1" + flake8-bandit = "^3.0.0" + flake8-bugbear = "^22.1.11" + flake8-builtins = "^1.5.3" + flake8-comprehensions = "^3.8.0" + flake8-docstrings = "^1.6.0" + flake8-eradicate = "^1.2.0" + flake8-logging-format = "^0.9.0" + flake8-mutable = "^1.2.0" + flake8-pytest-style = "^1.6.0" + mccabe = "^0.6.1" + pep8-naming = "^0.12.1" + +[tool.pytest.ini_options] + testpaths = ["tests/unit", "tests/smoke"] + addopts = "-v --cov-report=term-missing --cov-report=term:skip-covered --cov-report=xml:.coverage.xml --cov=./" + #addopts = "-v --cov-report=term-missing --cov-report=term:skip-covered -cov-report=html --cov=./" + log_cli_level = "INFO" + +[tool.black] + line-length = 90 + +[tool.isort] + profile = "black" + line_length = 90 + +[tool.pylint.MASTER] + # See + load-plugins = "pylint.extensions.docparams" + +[tool.pylint.BASIC] + # We'll use pylint separate from flakheaven. Some pylint options don't seem to be + # respected when using it via flakeheaven (e.g., fail-under). + # For a description of the config opts used here, see: + # + # + # + # Also, run "pylint --generate-rcfile" to see all applied pylint opts. + fail-under = "8.0" + good-names = ["i", "j", "n", "x", "y", "df", "db", "dt", "g0"] + # Default value for the rgx below: "[a-z_][a-z0-9_]{2,30}$" + attr-rgx = "[a-z_][a-z0-9_]{1,30}$" + +[tool.pylint.messages_control] + # Disable C0116 (Missing function or method docstring). Leave it to flake8-docstrings + # Disable C0326 and C0330 to avoid clashing with black + # Disable C0301 (Line too long). Leave it for black to control. + # See + # Disable E0402 due to false positives + disable = "C0116, C0301, C0326, C0330, E0402" + # Fix "no-name-in-module" error when linting pydantic-based code. + # See + extension-pkg-whitelist = "pydantic" + +[tool.pylint.format] + # Set max-line-length compatible with black + max-line-length = "90" + +[tool.flakeheaven] + base = ".flakeheaven.toml" diff --git a/setup.py b/setup.py deleted file mode 100644 index 70734f8..0000000 --- a/setup.py +++ /dev/null @@ -1,205 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -"""Setup for pip.""" - -from os import path -import codecs -from setuptools import setup, find_packages -# To use a consistent encoding - -here = path.abspath(path.dirname(__file__)) - -# Get the long description from the relevant file -with codecs.open(path.join(here, 'README.rst'), encoding='utf-8') as f: - long_description = f.read() - # print(long_description) - - -def read(rel_path): - """Read path. - - Args: - rel_path (str): Relative path. - - Returns: - str: Relattive path. - - """ - with codecs.open(path.join(here, rel_path), 'r') as f_p: - return f_p.read() - - -def get_version(rel_path): - """Get the version. - - Args: - rel_path (str): Realtive path - - Raises: - RuntimeError: If not found. - - Returns: - str: Version - - """ - for line in read(rel_path).splitlines(): - if line.startswith('__version__'): - delim = '"' if '"' in line else "'" - return line.split(delim)[1] - raise RuntimeError("Unable to find version string.") - - -setup( - name='pysurfex', - - # Versions should comply with PEP440. For a discussion on single-sourcing - # the version across setup.py and the project code, see - # https://packaging.python.org/en/latest/single_source_version.html - version=get_version("surfex/__init__.py"), - - description='Python API to SURFEX', - long_description='Python API to SURFEX', - - # The project's main homepage. - url='https://github.com/metno/forcing-offline-surfex', - - # Author details - author='Trygve Aspelien', - author_email='trygve.aspelien@met.no', - - # Choose your license - license='BSD-3', - - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - # How mature is this project? Common values are - # 3 - Alpha - # 4 - Beta - # 5 - Production/Stable - 'Development Status :: 4 - Beta', - - # Indicate who your project is intended for - 'Intended Audience :: Science/Research', - 'Topic :: Scientific/Engineering :: Atmospheric Science', - 'Topic :: Scientific/Engineering :: Information Analysis', - - # Pick your license as you wish (should match "license" above) - 'License :: OSI Approved :: BSD License', - - # Specify the Python versions you support here. In particular, ensure - # that you indicate whether you support Python 2, Python 3 or both. - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - ], - - # What does your project relate to? - keywords='meteorology weather prediction offline surfex forcing', - - # You can just specify the packages manually here if your project is - # simple. Or you can use find_packages(). - packages=find_packages(exclude=['contrib', 'docs', '*tests*']), - - # List run-time dependencies here. These will be installed by pip when - # your project is installed. For an analysis of "install_requires" vs pip's - # requirements files see: - # https://packaging.python.org/en/latest/requirements.html - install_requires=[ - "numpy", - "netCDF4", - "cfunits", - "pyproj", - "pyyaml", - "toml", - "netCDF4", - "datetime", - "f90nml", - "cfunits", - "requests", - "json; python_version < '3'", - "StringIO; python_version < '3'", - "eccodes", - "db-sqlite3", - "titanlib >= 0.3.0.dev3", - "gridpp >= 0.6.0.dev16" - ], - - # Not on pypi - # epygram - - # List additional groups of dependencies here (e.g. development - # dependencies). You can install these using the following syntax, - # for example: - # $ pip install -e .[dev,test] - extras_require={ - 'test': [ - "coverage", - "coveralls", - "pep8", - "tomlkit" - ], - 'plot': ['matplotlib'], - 'plot_on_map': ['cartopy'], - }, - - test_suite="surfex.tests", - - # If there are data files included in your packages that need to be - # installed, specify them here. If using Python 2.6 or less, then these - # have to be included in MANIFEST.in as well. - # package_dir={'forcing': 'forcing', "config": "config"}, - package_data={ - 'surfex': ['cfg/config.yml', 'cfg/area.yml', 'cfg/user.yml', 'cfg/first_guess.yml', - "cfg/config_exp.toml", "cfg/config_exp_surfex.toml"], - }, - # include_package_data=True, - - # Although 'package_data' is the preferred approach, in some case you may - # need to place data files outside of your packages. See: - # http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files # noqa - # In this case, 'data_file' will be installed into '/my_data' - # data_files=[('forcing', ['cfg/config.yml','cfg/area.yml','cfg/user.yml'])], - - # To provide executable scripts, use entry points in preference to the - # "scripts" keyword. Entry points provide cross-platform support and allow - # pip to create the appropriate form of executable for the target platform. - - # entry_points={ - # 'console_scripts': [ - # 'create_forcing=forcing:create_forcing', - # 'plot_offline=forcing:plot_offline', - # ], - # }, - scripts=[ - 'bin/create_forcing', - 'bin/qc2obsmon', - 'bin/create_forcing', - 'bin/create_namelist', - 'bin/modify_forcing', - 'bin/FirstGuess4gridpp', - 'bin/gridpp', - 'bin/json_gui2toml', - 'bin/masterodb', - 'bin/merge_json_namelist_settings', - 'bin/merge_toml_files', - 'bin/merge_qc_data', - 'bin/offline', - 'bin/oi2soda', - 'bin/perturbed_offline', - 'bin/pgd', - 'bin/plot_field', - 'bin/plot_points', - 'bin/plot_timeseries', - 'bin/prep', - 'bin/set_geo_from_obs_set', - 'bin/set_geo_from_stationlist', - 'bin/set_domain', - 'bin/soda', - 'bin/titan', - 'bin/bufr2json', - 'bin/cryoclim_pseudoobs', - 'bin/sentinel_obs', - 'bin/dump_environ', - 'bin/shape2ign' - ], -) diff --git a/surfex/__init__.py b/surfex/__init__.py index 456adc2..195f545 100644 --- a/surfex/__init__.py +++ b/surfex/__init__.py @@ -1,232 +1,6 @@ +#!/usr/bin/env python3 """pysurfex module.""" -__version__ = '0.0.2' +from importlib.metadata import version -from .file import SurfexIO, SurfexSurfIO, AsciiSurfexFile, NetCDFSurfexFile, ForcingFileNetCDF, \ - SurfFileTypeExtension, FaSurfexFile, SurfexFileVariable, NCSurfexFile, TexteSurfexFile, \ - PGDFile, PREPFile, SURFFile, read_surfex_points, guess_file_format, parse_filepattern, \ - read_surfex_field, get_surfex_io_object -from .geo import Geo, SurfexGeo, Cartesian, ConfProj, IGN, LonLatReg, LonLatVal, \ - get_geo_object, set_domain, shape2ign -from .read import Converter, ConvertedInput, ConstantValue, ReadData, \ - remove_existing_file -from .run import BatchJob, InputDataToSurfexBinaries, JsonInputData, JsonInputDataFromFile, \ - OutputDataFromSurfexBinaries, JsonOutputData, JsonOutputDataFromFile, Masterodb, SURFEXBinary, \ - PerturbedOffline, clean_working_dir, create_working_dir -from .assim import horizontal_oi -from .obs import Observation, ObservationSet, ObservationFromTitanJsonFile, JsonObservationSet, \ - MetFrostObservations, NetatmoObservationSet, sm_obs_sentinel, snow_pseudo_obs_cryoclim, \ - get_datasources, set_geo_from_obs_set -from .forcing import SurfexForcing, SurfexOutputForcing, AsciiOutput, NetCDFOutput, \ - SurfexNetCDFForcing, modify_forcing, write_formatted_array, set_forcing_config, \ - set_input_object, run_time_loop -from .netcdf import NetCDFReadVariable, NetCDFFileVariable, Netcdf, \ - create_netcdf_first_guess_template, read_cryoclim_nc, read_sentinel_nc, \ - read_first_guess_netcdf_file, write_analysis_netcdf_file, oi2soda -from .grib import Grib, Grib1Variable, Grib2Variable -from .bufr import BufrObservationSet -from .variable import Variable -from .util import deep_update, data_merge, merge_toml_env, merge_toml_env_from_files -from .cache import Cache -from .interpolation import Interpolation, fill_field, grid2points, get_num_neighbours -from .titan import QCDataSet, QualityControl, Departure, TitanDataSet, DomainCheck, \ - Climatology, Sct, Blacklist, Buddy, Plausibility, FirstGuess, Fraction, NoMeta, ObsOperator, \ - Redundancy, dataset_from_file, dataset_from_json, define_quality_control, \ - merge_json_qc_data_sets -from .timeseries import TimeSeriesFromConverter, TimeSeriesFromJson, TimeSeries -from .obsmon import write_obsmon_sqlite_file -from .fa import Fa -from .cli import parse_args_bufr2json, parse_args_create_forcing, parse_args_first_guess_for_oi, \ - parse_args_gridpp, parse_args_hm2pysurfex, parse_args_lsm_file_assim, parse_args_masterodb, \ - parse_args_merge_qc_data, parse_args_modify_forcing, parse_args_oi2soda, \ - parse_args_plot_points, parse_args_qc2obsmon, parse_args_set_geo_from_obs_set, \ - parse_args_set_geo_from_stationlist, parse_args_shape2ign, parse_args_surfex_binary, \ - parse_args_titan, parse_cryoclim_pseudoobs, parse_merge_namelist_settings, \ - parse_merge_toml_settings, parse_plot_timeseries_args, parse_sentinel_obs, \ - parse_args_create_namelist, \ - parse_timeseries2json, run_plot_points, run_bufr2json, run_cryoclim_pseuodoobs, run_gridpp, \ - run_lsm_file_assim, run_masterodb, run_merge_namelist_settings, run_merge_toml_settings, \ - run_oi2soda, run_plot_timeseries_from_json, run_sentinel_obs, run_shape2ign, \ - run_surfex_binary, run_timeseries2json, run_titan, first_guess_for_oi, run_create_namelist, \ - set_geo_from_stationlist, hm2pysurfex, merge_qc_data, LoadFromFile -from .namelist import Namelist, BaseNamelist, PgdInputData, PrepInputData, SodaInputData, \ - OfflineInputData, InlineForecastInputData, Ecoclimap, EcoclimapSG, ExternalSurfexInputFile, \ - SystemFilePaths, SystemFilePathsFromFile -from .configuration import Configuration, ConfigurationFromHarmonie, \ - ConfigurationFromHarmonieAndConfigFile, ConfigurationFromJsonFile - -__all__ = ["AsciiOutput", - "AsciiSurfexFile", - "BaseNamelist", - "BatchJob", - "Blacklist", - "Buddy", - "BufrObservationSet", - "Cache", - "Cartesian", - "Climatology", - "ConfProj", - "Configuration", - "ConfigurationFromHarmonie", - "ConfigurationFromHarmonieAndConfigFile", - "ConfigurationFromJsonFile", - "ConstantValue", - "Converter", - "ConvertedInput", - "Departure", - "DomainCheck", - "Ecoclimap", - "EcoclimapSG", - "ExternalSurfexInputFile", - "Fa", - "FaSurfexFile", - "FirstGuess", - "ForcingFileNetCDF", - "Fraction", - "Geo", - "Grib", - "Grib1Variable", - "Grib2Variable", - "IGN", - "InlineForecastInputData", - "InputDataToSurfexBinaries", - "Interpolation", - "JsonInputData", - "JsonInputDataFromFile", - "JsonObservationSet", - "JsonOutputData", - "JsonOutputDataFromFile", - "LoadFromFile", - "LonLatReg", - "LonLatVal", - "Masterodb", - "MetFrostObservations", - "Namelist", - "NetatmoObservationSet", - "Netcdf", - "NetCDFSurfexFile", - "NCSurfexFile", - "NetCDFOutput", - "NetCDFReadVariable", - "NetCDFFileVariable", - "NoMeta", - "Observation", - "ObservationSet", - "ObservationFromTitanJsonFile", - "ObsOperator", - "OfflineInputData", - "OutputDataFromSurfexBinaries", - "QCDataSet", - "QualityControl", - "PerturbedOffline", - "PGDFile", - "PgdInputData", - "PREPFile", - "Plausibility", - "PrepInputData", - "ReadData", - "Redundancy", - "Sct", - "SodaInputData", - "SURFEXBinary", - "SurfexFileVariable", - "SurfexForcing", - "SurfexGeo", - "SurfexIO", - "SurfexSurfIO", - "SurfexOutputForcing", - "SurfexNetCDFForcing", - "SURFFile", - "SurfFileTypeExtension", - "SystemFilePaths", - "SystemFilePathsFromFile", - "TexteSurfexFile", - "TimeSeriesFromConverter", - "TimeSeriesFromJson", - "TimeSeries", - "TitanDataSet", - "Variable", - # - "clean_working_dir", - "create_netcdf_first_guess_template", - "create_working_dir", - "data_merge", - "deep_update", - "dataset_from_file", - "dataset_from_json", - "define_quality_control", - "horizontal_oi", - "get_surfex_io_object", - "get_geo_object", - "get_datasources", - "get_num_neighbours", - "guess_file_format", - "fill_field", - "first_guess_for_oi", - "grid2points", - "hm2pysurfex", - "merge_json_qc_data_sets", - "merge_toml_env", - "merge_toml_env_from_files", - "merge_qc_data", - "modify_forcing", - "oi2soda", - "parse_args_bufr2json", - "parse_args_create_forcing", - "parse_args_create_namelist", - "parse_args_first_guess_for_oi", - "parse_args_gridpp", - "parse_args_hm2pysurfex", - "parse_args_lsm_file_assim", - "parse_args_masterodb", - "parse_args_merge_qc_data", - "parse_args_modify_forcing", - "parse_args_oi2soda", - "parse_args_plot_points", - "parse_args_qc2obsmon", - "parse_args_set_geo_from_obs_set", - "parse_args_set_geo_from_stationlist", - "parse_args_shape2ign", - "parse_args_surfex_binary", - "parse_args_titan", - "parse_cryoclim_pseudoobs", - "parse_merge_namelist_settings", - "parse_merge_toml_settings", - "parse_plot_timeseries_args", - "parse_sentinel_obs", - "parse_timeseries2json", - "parse_filepattern", - "read_cryoclim_nc", - "read_first_guess_netcdf_file", - "read_sentinel_nc", - "read_surfex_field", - "read_surfex_points", - "remove_existing_file", - "run_plot_points", - "run_bufr2json", - "run_cryoclim_pseuodoobs", - "run_create_namelist", - "run_gridpp", - "run_lsm_file_assim", - "run_masterodb", - "run_merge_namelist_settings", - "run_merge_toml_settings", - "run_oi2soda", - "run_plot_timeseries_from_json", - "run_sentinel_obs", - "run_shape2ign", - "run_surfex_binary", - "run_timeseries2json", - "run_titan", - "run_time_loop", - "set_domain", - "set_forcing_config", - "set_geo_from_obs_set", - "set_geo_from_stationlist", - "set_input_object", - "shape2ign", - "sm_obs_sentinel", - "snow_pseudo_obs_cryoclim", - "write_analysis_netcdf_file", - "write_formatted_array", - "write_obsmon_sqlite_file", - ] +PACKAGE_NAME = __name__ +__version__ = version(__name__) diff --git a/surfex/assim.py b/surfex/assim.py deleted file mode 100644 index 25f9497..0000000 --- a/surfex/assim.py +++ /dev/null @@ -1,124 +0,0 @@ -"""gridpp.""" -import logging -import numpy as np -try: - import gridpp -except ImportError: - gridpp = None - - -def horizontal_oi(geo, background, observations, gelevs, hlength=10000., - vlength=10000., wlength=0.5, elev_gradient=0, structure_function="Barnes", - max_locations=50, epsilon=0.5, minvalue=None, maxvalue=None, - interpol="bilinear", only_diff=False): - """Do horizontal OI. - - Args: - geo (_type_): _description_ - background (_type_): _description_ - observations (_type_): _description_ - gelevs (_type_): _description_ - hlength (_type_, optional): _description_. Defaults to 10000.. - vlength (_type_, optional): _description_. Defaults to 10000.. - wlength (float, optional): _description_. Defaults to 0.5. - elev_gradient (int, optional): _description_. Defaults to 0. - structure_function (str, optional): _description_. Defaults to "Barnes". - max_locations (int, optional): _description_. Defaults to 50. - epsilon (float, optional): _description_. Defaults to 0.5. - minvalue (_type_, optional): _description_. Defaults to None. - maxvalue (_type_, optional): _description_. Defaults to None. - interpol (str, optional): _description_. Defaults to "bilinear". - only_diff (bool, optional): _description_. Defaults to False. - - Raises: - Exception: _description_ - NotImplementedError: _description_ - NotImplementedError: _description_ - NotImplementedError: _description_ - - Returns: - _type_: _description_ - - """ - if gridpp is None: - raise Exception("You need gridpp to perform OI") - - logging.debug(gridpp.__file__) - logging.debug(gridpp.__version__) - glats = geo.lats - glons = geo.lons - - def obs2vectors(my_obs): - return my_obs.lons, my_obs.lats, my_obs.stids, my_obs.elevs, \ - my_obs.values, my_obs.cis, my_obs.lafs - - vectors = np.vectorize(obs2vectors) - lons, lats, __, elevs, values, __, __ = vectors(observations) - - glats = np.transpose(glats) - glons = np.transpose(glons) - background = np.transpose(background) - gelevs = np.transpose(gelevs) - - logging.debug("glats.shape=%s glons.shape=%s gelevs.shape=%s", glats.shape, - glons.shape, gelevs.shape) - bgrid = gridpp.Grid(glats, glons, gelevs) - points = gridpp.Points(lats, lons, elevs) - if interpol == "bilinear": - pbackground = gridpp.simple_gradient(bgrid, points, background, elev_gradient, - gridpp.Bilinear) - elif interpol == "nearest": - pbackground = gridpp.simple_gradient(bgrid, points, background, elev_gradient, - gridpp.Nearest) - else: - raise NotImplementedError(f"Interpolation method {interpol} not implemented") - - # Remove undefined backgrounds - if any(np.isnan(pbackground)): - print("Found undefined backgrounds. Remove them") - lons2 = [] - lats2 = [] - elevs2 = [] - values2 = [] - for point in range(0, len(lons)): - if np.isnan(pbackground[point]): - logging.info("Undefined background in lon=%s lat=%s value=%s", - lons[point], lats[point], values[point]) - else: - lons2.append(lons[point]) - lats2.append(lats[point]) - elevs2.append(elevs[point]) - values2.append(values[point]) - values = values2 - points = gridpp.Points(lats2, lons2, elevs2) - if interpol == "bilinear": - # TODO - pbackground = gridpp.bilinear(bgrid, points, background) - # pbackground = gridpp.simple_gradient(bgrid, points, background, elev_gradient, - # gridpp.Bilinear) - - elif interpol == "nearest": - # pbackground = gridpp.nearest(bgrid, points, background) - pbackground = gridpp.simple_gradient(bgrid, points, background, elev_gradient, - gridpp.Nearest) - else: - raise NotImplementedError - - variance_ratios = np.full(points.size(), epsilon) - - if structure_function == "Barnes": - structure = gridpp.BarnesStructure(hlength, vlength, wlength) - else: - raise NotImplementedError - - field = gridpp.optimal_interpolation(bgrid, background, points, values, variance_ratios, - pbackground, structure, - max_locations) - field = np.asarray(field) - if minvalue is not None: - field[field < minvalue] = minvalue - if maxvalue is not None: - field[field > maxvalue] = maxvalue - if only_diff: - field[field == background] = np.nan - return np.transpose(field) diff --git a/surfex/binary_input.py b/surfex/binary_input.py new file mode 100644 index 0000000..73b78ca --- /dev/null +++ b/surfex/binary_input.py @@ -0,0 +1,904 @@ +"""Input data for surfex binaries.""" +import json +import logging +import os +import subprocess +from abc import ABC, abstractmethod + +from .datetime_utils import as_datetime, as_timedelta +from .ecoclimap import Ecoclimap, EcoclimapSG, ExternalSurfexInputFile +from .file import AsciiSurfexFile, FaSurfexFile, NCSurfexFile + + +class InputDataToSurfexBinaries(ABC): + """Abstract input data.""" + + @abstractmethod + def __init__(self): + """Construct.""" + return NotImplementedError + + @abstractmethod + def prepare_input(self): + """Prepare input.""" + return NotImplementedError + + +class OutputDataFromSurfexBinaries(ABC): + """Abstract output data.""" + + @abstractmethod + def __init__(self): + """Construct.""" + return NotImplementedError + + @abstractmethod + def archive_files(self): + """Archive files.""" + return NotImplementedError + + +class JsonOutputData(OutputDataFromSurfexBinaries): + """Output data.""" + + def __init__(self, data): + """Output data from dict. + + Args: + data (dict): Output data. + + """ + OutputDataFromSurfexBinaries.__init__(self) + self.data = data + + def archive_files(self): + """Archive files.""" + for output_file, target in self.data.items(): + + logging.info("%s -> %s", output_file, target) + command = "mv" + if isinstance(target, dict): + for key in target: + logging.debug("%s %s %s", output_file, key, target[key]) + command = target[key] + target = key + + cmd = command + " " + output_file + " " + target + try: + logging.info(cmd) + subprocess.check_call(cmd, shell=True) # noqaS602 + except IOError: + logging.error("%s failed", cmd) + raise RuntimeError(cmd + " failed") from IOError + + +class JsonOutputDataFromFile(JsonOutputData): + """JSON output data.""" + + def __init__(self, file): + """Construct from json file.""" + with open(file, mode="r", encoding="utf-8") as file_handler: + data = json.load(file_handler) + JsonOutputData.__init__(self, data) + + def archive_files(self): + """Archive files.""" + JsonOutputData.archive_files(self) + + +class JsonInputData(InputDataToSurfexBinaries): + """JSON input data.""" + + def __init__(self, data): + """Construct input data. + + Args: + data (dict): Input data. + """ + InputDataToSurfexBinaries.__init__(self) + self.data = data + + def prepare_input(self): + """Prepare input.""" + for target, input_file in self.data.items(): + + logging.info("%s -> %s", target, input_file) + logging.debug(os.path.realpath(target)) + command = None + if isinstance(input_file, dict): + for key in input_file: + logging.debug(key) + logging.debug(input_file[key]) + command = str(input_file[key]) + input_file = str(key) + command = command.replace("@INPUT@", input_file) + command = command.replace("@TARGET@", target) + + if os.path.realpath(target) == os.path.realpath(input_file): + logging.info("Target and input file is the same file") + else: + if command is None: + cmd = "ln -sf " + input_file + " " + target + else: + cmd = command + try: + logging.info(cmd) + subprocess.check_call(cmd, shell=True) # noqaS602 + except IOError: + raise (cmd + " failed") from IOError + + def add_data(self, data): + """Add data. + + Args: + data (dict): Data to add + """ + for key in data: + value = data[key] + self.data.update({key: value}) + + +class JsonInputDataFromFile(JsonInputData): + """JSON input data.""" + + def __init__(self, file): + """Construct JSON input data. + + Args: + file (str): JSON file name + + """ + with open(file, mode="r", encoding="utf-8") as file_handler: + data = json.load(file_handler) + JsonInputData.__init__(self, data) + + def prepare_input(self): + """Prepare input.""" + JsonInputData.prepare_input(self) + + +class PgdInputData(JsonInputData): + """PGD input.""" + + def __init__(self, config, system_file_paths, check_existence=True): + """Construct PD input. + + Args: + config (Configuration): Surfex configuration + system_file_paths (SystemFilePaths): System file paths + check_existence (bool, optional): Check if input files exist. Defaults to True. + + """ + # Ecoclimap settings + eco_sg = config.get_setting("SURFEX#COVER#SG") + if eco_sg: + ecoclimap = EcoclimapSG(config, system_file_paths=system_file_paths) + else: + ecoclimap = Ecoclimap(config, system_file_paths=system_file_paths) + + data = ecoclimap.set_input(check_existence=check_existence) + + ext_data = ExternalSurfexInputFile(system_file_paths) + # Set direct input files + if config.get_setting("SURFEX#TILES#INLAND_WATER") == "FLAKE": + version = config.get_setting("SURFEX#FLAKE#LDB_VERSION") + if version != "": + version = "_V" + version + datadir = "flake_dir" + fname = "GlobalLakeDepth" + version + ".dir" + linkbasename = "GlobalLakeDepth" + data.update( + ext_data.set_input_data_from_format( + datadir, + fname, + default_dir="climdir", + linkbasename=linkbasename, + check_existence=check_existence, + ) + ) + fname = "GlobalLakeStatus" + version + ".dir" + linkbasename = "GlobalLakeStatus" + data.update( + ext_data.set_input_data_from_format( + datadir, + fname, + default_dir="climdir", + linkbasename=linkbasename, + check_existence=check_existence, + ) + ) + + possible_direct_data = { + "ISBA": { + "YSAND": "sand_dir", + "YCLAY": "clay_dir", + "YSOC_TOP": "soc_top_dir", + "YSOC_SUB": "soc_sub_dir", + }, + "COVER": {"YCOVER": ecoclimap.cover_dir}, + "ZS": {"YZS": "oro_dir"}, + } + for namelist_section, ftypes in possible_direct_data.items(): + for ftype, data_dir in ftypes.items(): + fname = str( + config.get_setting("SURFEX#" + namelist_section + "#" + ftype) + ) + data.update( + ext_data.set_input_data_from_format( + data_dir, + fname, + default_dir="climdir", + check_existence=check_existence, + ) + ) + + # Treedrag + if config.get_setting("SURFEX#TREEDRAG#TREEDATA_FILE") != "": + fname = config.get_setting("SURFEX#TREEDRAG#TREEDATA_FILE") + data_dir = "tree_height_dir" + data.update( + ext_data.set_input_data_from_format( + data_dir, + fname, + default_dir="climdir", + check_existence=check_existence, + ) + ) + + JsonInputData.__init__(self, data) + + +class PrepInputData(JsonInputData): + """Input data for PREP.""" + + def __init__( + self, + config, + system_file_paths, + check_existence=True, + prep_file=None, + prep_pgdfile=None, + ): + """Construct input data for PREP. + + Args: + config (Configuration): Surfex configuration + system_file_paths (SystemFilePaths): System file paths + check_existence (bool, optional): Check if input files exist. Defaults to True. + prep_file (str, optional): Prep input file. Defaults to None. + prep_pgdfile (str, optional): Filetype for prep input. Defaults to None. + + """ + data = {} + # Ecoclimap settings + eco_sg = config.get_setting("SURFEX#COVER#SG") + if not eco_sg: + ecoclimap = Ecoclimap(config, system_file_paths) + data.update(ecoclimap.set_bin_files(check_existence=check_existence)) + + logging.debug("prep class %s", system_file_paths.__class__) + ext_data = ExternalSurfexInputFile(system_file_paths) + if prep_file is not None: + if not prep_file.endswith(".json"): + fname = os.path.basename(prep_file) + if fname != prep_file: + data.update({fname: prep_file}) + if prep_pgdfile is not None: + fname = os.path.basename(prep_pgdfile) + if fname != prep_pgdfile: + data.update({fname: prep_pgdfile}) + + if config.get_setting("SURFEX#TILES#INLAND_WATER") == "FLAKE": + data_dir = "flake_dir" + fname = "LAKE_LTA_NEW.nc" + data.update( + ext_data.set_input_data_from_format( + data_dir, + fname, + default_dir="climdir", + check_existence=check_existence, + ) + ) + + JsonInputData.__init__(self, data) + + +class OfflineInputData(JsonInputData): + """Input data for offline.""" + + def __init__(self, config, system_file_paths, check_existence=True): + """Construct input data for offline. + + Args: + config (Configuration): Surfex configuration + system_file_paths (SystemFilePaths): System file paths + check_existence (bool, optional): Check if input files exist. Defaults to True. + + Raises: + NotImplementedError: Filetype not implemented + + """ + data = {} + # Ecoclimap settings + eco_sg = config.get_setting("SURFEX#COVER#SG") + if not eco_sg: + ecoclimap = Ecoclimap(config, system_file_paths) + data.update(ecoclimap.set_bin_files(check_existence=check_existence)) + + data_dir = "forcing_dir" + if config.get_setting("SURFEX#IO#CFORCING_FILETYPE") == "NETCDF": + fname = "FORCING.nc" + data.update( + { + fname: system_file_paths.get_system_file( + data_dir, fname, default_dir=None + ) + } + ) + else: + raise NotImplementedError + + JsonInputData.__init__(self, data) + + +class InlineForecastInputData(JsonInputData): + """Inline forecast input data.""" + + def __init__(self, config, system_file_paths, check_existence=True): + """Construct input data for inline forecast. + + Args: + config (Configuration): Surfex configuration + system_file_paths (SystemFilePaths): System file paths + check_existence (bool, optional): Check if input files exist. Defaults to True. + + """ + data = {} + # Ecoclimap settings + eco_sg = config.get_setting("SURFEX#COVER#SG") + if not eco_sg: + ecoclimap = Ecoclimap(config, system_file_paths) + data.update(ecoclimap.set_bin_files(check_existence=check_existence)) + + JsonInputData.__init__(self, data) + + +class SodaInputData(JsonInputData): + """Input data for SODA.""" + + def __init__( + self, + config, + system_file_paths, + check_existence=True, + masterodb=True, + perturbed_file_pattern=None, + dtg=None, + ): + """Construct input data for SODA. + + Args: + config (Configuration): Surfex configuration + system_file_paths (SystemFilePaths): System file paths + check_existence (bool, optional): Check if input files exist. Defaults to True. + masterodb (bool, optional): Files produced with masterodb. Defaults to True. + perturbed_file_pattern (str, optional): File pattern for perturbed files. Defaults to None. + dtg (datetime, optional): Basetime. Defaults to None. + + """ + self.config = config + self.system_file_paths = system_file_paths + self.file_paths = ExternalSurfexInputFile(self.system_file_paths) + if dtg is not None: + if isinstance(dtg, str): + dtg = as_datetime(dtg) + self.dtg = dtg + JsonInputData.__init__(self, {}) + + # Ecoclimap settings + eco_sg = self.config.get_setting("SURFEX#COVER#SG") + if not eco_sg: + ecoclimap = Ecoclimap(self.config, self.system_file_paths) + self.add_data(ecoclimap.set_bin_files(check_existence=check_existence)) + + # OBS + nnco = self.config.get_setting("SURFEX#ASSIM#OBS#NNCO") + need_obs = False + for pobs in nnco: + if pobs == 1: + need_obs = True + if need_obs: + self.add_data(self.set_input_observations(check_existence=check_existence)) + + # SEA + if self.config.get_setting("SURFEX#ASSIM#SCHEMES#SEA") != "NONE": + if self.config.get_setting("SURFEX#ASSIM#SCHEMES#SEA") == "INPUT": + self.add_data( + self.set_input_sea_assimilation(check_existence=check_existence) + ) + + # WATER + if self.config.get_setting("SURFEX#ASSIM#SCHEMES#INLAND_WATER") != "NONE": + pass + + # NATURE + if self.config.get_setting("SURFEX#ASSIM#SCHEMES#ISBA") != "NONE": + if self.config.setting_is("SURFEX#ASSIM#SCHEMES#ISBA", "EKF"): + data = self.set_input_vertical_soil_ekf( + check_existence=check_existence, + masterodb=masterodb, + pert_fp=perturbed_file_pattern, + ) + self.add_data(data) + if self.config.setting_is("SURFEX#ASSIM#SCHEMES#ISBA", "OI"): + self.add_data(self.set_input_vertical_soil_oi()) + if self.config.setting_is("SURFEX#ASSIM#SCHEMES#ISBA", "ENKF"): + self.add_data( + self.set_input_vertical_soil_enkf( + check_existence=check_existence, + masterodb=masterodb, + pert_fp=perturbed_file_pattern, + ) + ) + + # Town + if self.config.get_setting("SURFEX#ASSIM#SCHEMES#TEB") != "NONE": + pass + + def set_input_observations(self, check_existence=True): + """Input data for observations. + + Args: + check_existence (bool, optional): Check if input files exist. Defaults to True. + + Raises: + NotImplementedError: File format not implemented + RuntimeError: Obs ASCII file needs DTG information + + Returns: + obssettings: Input observations. + + """ + cfile_format_obs = self.config.get_setting("SURFEX#ASSIM#OBS#CFILE_FORMAT_OBS") + if cfile_format_obs == "ASCII": + if self.dtg is None: + raise RuntimeError("Obs ASCII file needs DTG information") + cyy = self.dtg.strftime("%y") + cmm = self.dtg.strftime("%m") + cdd = self.dtg.strftime("%d") + chh = self.dtg.strftime("%H") + target = "OBSERVATIONS_" + cyy + cmm + cdd + "H" + chh + ".DAT" + elif cfile_format_obs == "FA": + target = "ICMSHANAL+0000" + else: + raise NotImplementedError(cfile_format_obs) + + data_dir = "obs_dir" + obsfile = self.system_file_paths.get_system_file( + data_dir, + target, + default_dir="assim_dir", + check_existence=check_existence, + basedtg=self.dtg, + ) + obssettings = {target: obsfile} + return obssettings + + def set_input_sea_assimilation(self, check_existence=True): + """Input data for sea assimilation. + + Args: + check_existence (bool, optional): Check if input files are existing. Defaults to True. + + Raises: + NotImplementedError: File format not implemented + + Returns: + sea_settings(dict): Input filed for sea assimilation + + """ + cfile_format_sst = self.config.get_setting("SURFEX#ASSIM#SEA#CFILE_FORMAT_SST") + if cfile_format_sst.upper() == "ASCII": + target = "SST_SIC.DAT" + elif cfile_format_sst.upper() == "FA": + target = "SST_SIC" + else: + raise NotImplementedError(cfile_format_sst) + + data_dir = "sst_file_dir" + sstfile = self.system_file_paths.get_system_file( + data_dir, + target, + basedtg=self.dtg, + check_existence=check_existence, + default_dir="assim_dir", + ) + sea_settings = {target: sstfile} + return sea_settings + + def set_input_vertical_soil_oi(self): + """Input data for OI in soil. + + Raises: + NotImplementedError: File format not implemented + RuntimeError: You must set DTG + + Returns: + oi_settings(dict): Input files for OI + + """ + oi_settings = {} + # Climate + cfile_format_clim = self.config.get_setting( + "SURFEX#ASSIM#ISBA#OI#CFILE_FORMAT_CLIM" + ) + if cfile_format_clim.upper() == "ASCII": + target = "CLIMATE.DAT" + elif cfile_format_clim.upper() == "FA": + target = "clim_isba" + else: + raise NotImplementedError(cfile_format_clim) + + data_dir = "climdir" + climfile = self.system_file_paths.get_system_file( + data_dir, target, default_dir="assim_dir", check_existence=True + ) + oi_settings.update({target: climfile}) + + # First guess for SURFEX + cfile_format_fg = self.config.get_setting("SURFEX#ASSIM#ISBA#OI#CFILE_FORMAT_FG") + if cfile_format_fg.upper() == "ASCII": + if self.dtg is None: + raise RuntimeError("First guess in ASCII format needs DTG information") + cyy = self.dtg.strftime("%y") + cmm = self.dtg.strftime("%m") + cdd = self.dtg.strftime("%d") + chh = self.dtg.strftime("%H") + target = "FIRST_GUESS_" + cyy + cmm + cdd + "H" + chh + ".DAT" + elif cfile_format_fg.upper() == "FA": + target = "FG_OI_MAIN" + else: + raise NotImplementedError(cfile_format_fg) + + data_dir = "first_guess_dir" + first_guess = self.system_file_paths.get_system_file( + data_dir, + target, + default_dir="assim_dir", + basedtg=self.dtg, + check_existence=True, + ) + oi_settings.update({target: first_guess}) + + data_dir = "ascat_dir" + ascatfile = self.system_file_paths.get_system_file( + data_dir, + target, + default_dir="assim_dir", + basedtg=self.dtg, + check_existence=True, + ) + oi_settings.update({"ASCAT_SM.DAT": ascatfile}) + + # OI coefficients + data_dir = "oi_coeffs_dir" + oi_coeffs = self.config.get_setting("SURFEX#ASSIM#ISBA#OI#COEFFS") + oi_coeffs = self.system_file_paths.get_system_file( + data_dir, oi_coeffs, default_dir="assim_dir", check_existence=True + ) + oi_settings.update({"fort.61": oi_coeffs}) + + # LSM + cfile_format_lsm = self.config.get_setting("SURFEX#ASSIM#CFILE_FORMAT_LSM") + if cfile_format_lsm.upper() == "ASCII": + target = "LSM.DAT" + elif cfile_format_lsm.upper() == "FA": + target = "FG_OI_MAIN" + else: + raise NotImplementedError(cfile_format_lsm) + + data_dir = "lsm_dir" + lsmfile = self.system_file_paths.get_system_file( + data_dir, + target, + default_dir="assim_dir", + basedtg=self.dtg, + check_existence=True, + ) + oi_settings.update({target: lsmfile}) + return oi_settings + + def set_input_vertical_soil_ekf( + self, check_existence=True, masterodb=True, pert_fp=None, geo=None + ): + """Input data for EKF in soil. + + Args: + check_existence (bool, optional): Check if files exist. Defaults to True. + masterodb (bool, optional): Files produced with masterodb. Defaults to True. + pert_fp (str, optional): File pattern for perturbed files. Defaults to None. + geo (surfex.geo.Geo, optional): Geometry. Defaults to None. + + Raises: + NotImplementedError: File type not implmented + RuntimeError: You must set DTG + + Returns: + ekf_settings(dict): EKF input files + + """ + if self.dtg is None: + raise RuntimeError("You must set DTG") + + cyy = self.dtg.strftime("%y") + cmm = self.dtg.strftime("%m") + cdd = self.dtg.strftime("%d") + chh = self.dtg.strftime("%H") + ekf_settings = {} + + # TODO + fcint = 3 + fg_dtg = self.dtg - as_timedelta(seconds=fcint * 3600.0) + data_dir = "first_guess_dir" + first_guess = self.system_file_paths.get_system_path( + data_dir, + default_dir="assim_dir", + validtime=self.dtg, + basedtg=fg_dtg, + check_existence=check_existence, + ) + # First guess for SURFEX + csurf_filetype = self.config.get_setting("SURFEX#IO#CSURF_FILETYPE").lower() + fgf = self.config.get_setting( + "SURFEX#IO#CSURFFILE", validtime=self.dtg, basedtg=fg_dtg + ) + first_guess = first_guess + "/" + fgf + if csurf_filetype == "ascii": + fg_file = AsciiSurfexFile(first_guess, geo=geo) + fgf = fg_file.filename + elif csurf_filetype == "nc": + logging.debug("%s", fgf) + fg_file = NCSurfexFile(first_guess, geo=geo) + fgf = fg_file.filename + elif csurf_filetype == "fa": + lfagmap = self.config.get_setting("SURFEX#IO#LFAGMAP") + # TODO for now assume that first guess always is a inline forecast with FA format + fg_file = FaSurfexFile(first_guess, lfagmap=lfagmap, masterodb=masterodb) + fgf = fg_file.filename + else: + raise NotImplementedError + + # We never run inline model for perturbations or in SODA + extension = fg_file.extension + if csurf_filetype == "fa": + extension = "fa" + + ekf_settings.update({"PREP_INIT." + extension: fgf}) + ekf_settings.update( + {"PREP_" + cyy + cmm + cdd + "H" + chh + "." + extension: fgf} + ) + + nncv = self.config.get_setting("SURFEX#ASSIM#ISBA#EKF#NNCV") + llincheck = self.config.get_setting("SURFEX#ASSIM#ISBA#EKF#LLINCHECK") + lnncv = len(nncv) + 1 + if llincheck: + lnncv = (len(nncv) * 2) + 1 + pert_ekf = 0 + pert_input = 0 + for ppp in range(0, lnncv): + exists = False + if ppp > 0: + p_p = ppp + if llincheck and ppp > len(nncv): + p_p = ppp - len(nncv) + if nncv[p_p - 1] == 1: + exists = True + pert_input = ppp + else: + exists = True + + if exists: + data_dir = "perturbed_run_dir" + if pert_fp is None: + logging.info("Use default CSURFFILE for perturbed file names") + pert_fp = ( + self.config.get_setting( + "SURFEX#IO#CSURFFILE", check_parsing=False + ) + + "." + + extension + ) + + # TODO depending on when perturbations are run + pert_run = self.system_file_paths.get_system_file( + data_dir, + pert_fp, + validtime=self.dtg, + basedtg=fg_dtg, + check_existence=check_existence, + default_dir="assim_dir", + pert=pert_input, + ) + + target = ( + "PREP_" + + cyy + + cmm + + cdd + + "H" + + chh + + "_EKF_PERT" + + str(pert_ekf) + + "." + + extension + ) + ekf_settings.update({target: pert_run}) + pert_ekf = pert_ekf + 1 + + # LSM + # Fetch first_guess needed for LSM for extrapolations + if self.config.get_setting("SURFEX#ASSIM#INLAND_WATER#LEXTRAP_WATER"): + cfile_format_lsm = self.config.get_setting("SURFEX#ASSIM#CFILE_FORMAT_LSM") + if cfile_format_lsm.upper() == "ASCII": + target = "LSM.DAT" + elif cfile_format_lsm.upper() == "FA": + target = "FG_OI_MAIN" + else: + raise NotImplementedError(cfile_format_lsm) + + data_dir = "lsm_dir" + lsmfile = self.system_file_paths.get_system_file( + data_dir, + target, + default_dir="assim_dir", + validtime=self.dtg, + basedtg=fg_dtg, + check_existence=check_existence, + ) + ekf_settings.update({target: lsmfile}) + return ekf_settings + + def set_input_vertical_soil_enkf( + self, check_existence=True, masterodb=True, pert_fp=None, geo=None + ): + """Input data for ENKF in soil. + + Args: + check_existence (bool, optional): Check if files exist. Defaults to True. + masterodb (bool, optional): Files produced with masterodb. Defaults to True. + pert_fp (str, optional): File pattern for perturbed files. Defaults to None. + geo (surfex.geo.Geo, optional): Geometry. Defaults to None. + + Returns: + enkf_settings(dict): ENKF input data + + Raises: + NotImplementedError: File type not implemented + RuntimeError: You must set DTG + + """ + if self.dtg is None: + raise RuntimeError("You must set DTG") + + cyy = self.dtg.strftime("%y") + cmm = self.dtg.strftime("%m") + cdd = self.dtg.strftime("%d") + chh = self.dtg.strftime("%H") + enkf_settings = {} + + # First guess for SURFEX + csurf_filetype = self.config.get_setting("SURFEX#IO#CSURF_FILETYPE").lower() + + # TODO + fcint = 3 + fg_dtg = self.dtg - as_timedelta(seconds=fcint * 3600) + fgf = self.config.get_setting( + "SURFEX#IO#CSURFFILE", validtime=self.dtg, basedtg=fg_dtg + ) + if csurf_filetype == "ascii": + fg_file = AsciiSurfexFile(fgf, geo=geo) + fgf = fg_file.filename + elif csurf_filetype == "nc": + fg_file = NCSurfexFile(fgf, geo=geo) + fgf = fg_file.filename + elif csurf_filetype == "fa": + lfagmap = self.config.get_setting("SURFEX#IO#LFAGMAP") + # TODO for now assume that first guess always is a inline forecast with FA format + fg_file = FaSurfexFile(fgf, lfagmap=lfagmap, geo=geo, masterodb=masterodb) + fgf = fg_file.filename + else: + raise NotImplementedError + + data_dir = "first_guess_dir" + first_guess = self.system_file_paths.get_system_file( + data_dir, + fgf, + default_dir="assim_dir", + validtime=self.dtg, + basedtg=fg_dtg, + check_existence=check_existence, + ) + + # We newer run inline model for perturbations or in SODA + extension = fg_file.extension + if csurf_filetype == "fa": + extension = "fa" + + nens_m = self.config.get_setting("SURFEX#ASSIM#ISBA#ENKF#NENS_M") + + enkf_settings.update({"PREP_INIT." + extension: first_guess}) + enkf_settings.update( + {"PREP_" + cyy + cmm + cdd + "H" + chh + "." + extension: first_guess} + ) + enkf_settings.update( + { + "PREP_" + + cyy + + cmm + + cdd + + "H" + + chh + + "_EKF_ENS" + + str(nens_m) + + "." + + extension: first_guess + } + ) + + for ppp in range(0, nens_m): + data_dir = "perturbed_run_dir" + if pert_fp is None: + logging.info("Use default CSURFFILE for perturbed file names") + perturbed_file_pattern = ( + self.config.get_setting("SURFEX#IO#CSURFFILE", check_parsing=False) + + "." + + extension + ) + + # TODO depending on when perturbations are run + perturbed_run = self.system_file_paths.get_system_file( + data_dir, + perturbed_file_pattern, + validtime=self.dtg, + basedtg=fg_dtg, + check_existence=check_existence, + default_dir="assim_dir", + pert=ppp, + ) + + target = ( + "PREP_" + + cyy + + cmm + + cdd + + "H" + + chh + + "_EKF_ENS" + + str(ppp) + + "." + + extension + ) + enkf_settings.update({target: perturbed_run}) + + # LSM + # Fetch first_guess needed for LSM for extrapolations + if self.config.get_setting("SURFEX#ASSIM#INLAND_WATER#LEXTRAP_WATER"): + cfile_format_lsm = self.config.get_setting("SURFEX#ASSIM#CFILE_FORMAT_LSM") + if cfile_format_lsm.upper() == "ASCII": + target = "LSM.DAT" + elif cfile_format_lsm.upper() == "FA": + target = "FG_OI_MAIN" + else: + raise NotImplementedError(cfile_format_lsm) + + data_dir = "lsm_dir" + lsmfile = self.system_file_paths.get_system_file( + data_dir, + target, + default_dir="assim_dir", + validtime=self.dtg, + basedtg=fg_dtg, + check_existence=check_existence, + ) + enkf_settings.update({target: lsmfile}) + return enkf_settings diff --git a/surfex/bufr.py b/surfex/bufr.py index aaa0d83..2c637d1 100644 --- a/surfex/bufr.py +++ b/surfex/bufr.py @@ -1,10 +1,10 @@ """bufr treatment.""" -from datetime import datetime -from math import exp -import sys import logging -import surfex +import sys +from math import exp + import numpy as np + try: import eccodes # type: ignore except ImportError: @@ -14,16 +14,30 @@ eccodes = None logging.warning("ECCODES not found. Needed for bufr reading") # Needed in Python 3.5 -except Exception as ex: - logging.warning("Could not load eccodes %s", str(ex)) +except Exception: + logging.warning("Could not load eccodes") eccodes = None -class BufrObservationSet(surfex.obs.ObservationSet): +from .datetime_utils import as_datetime_args +from .obs import ObservationSet +from .observation import Observation + + +class BufrObservationSet(ObservationSet): """Create observation data set from bufr observations.""" - def __init__(self, bufrfile, variables, valid_dtg, valid_range, lonrange=None, latrange=None, - label="bufr", use_first=False): + def __init__( + self, + bufrfile, + variables, + valid_dtg, + valid_range, + lonrange=None, + latrange=None, + label="bufr", + use_first=False, + ): """Initialize a bufr observation set. Args: @@ -36,6 +50,10 @@ def __init__(self, bufrfile, variables, valid_dtg, valid_range, lonrange=None, l label (str): A label for the resulting observations set use_first (bool): Use only the first valid observation for a point if more are found + Raises: + RuntimeError: ECCODES not found. Needed for bufr reading + NotImplementedError: Not implemented + """ if lonrange is None: lonrange = [-180, 180] @@ -43,7 +61,7 @@ def __init__(self, bufrfile, variables, valid_dtg, valid_range, lonrange=None, l latrange = [-90, 90] if eccodes is None: - raise Exception("ECCODES not found. Needed for bufr reading") + raise RuntimeError("ECCODES not found. Needed for bufr reading") logging.debug(eccodes.__file__) # open bufr file @@ -51,19 +69,19 @@ def __init__(self, bufrfile, variables, valid_dtg, valid_range, lonrange=None, l # define the keys to be printed keys = [ - 'latitude', - 'localLatitude', - 'longitude', - 'localLongitude', - 'year', - 'month', - 'day', - 'hour', - 'minute', - 'heightOfStationGroundAboveMeanSeaLevel', - 'heightOfStation', - 'stationNumber', - 'blockNumber' + "latitude", + "localLatitude", + "longitude", + "localLongitude", + "year", + "month", + "day", + "hour", + "minute", + "heightOfStationGroundAboveMeanSeaLevel", + "heightOfStation", + "stationNumber", + "blockNumber", ] nerror = {} ntime = {} @@ -74,20 +92,32 @@ def __init__(self, bufrfile, variables, valid_dtg, valid_range, lonrange=None, l if var == "relativeHumidityAt2M": keys.append("airTemperatureAt2M") keys.append("dewpointTemperatureAt2M") - keys.append("/heightOfSensorAboveLocalGroundOrDeckOfMarinePlatform=2" - "/airTemperature") - keys.append("/heightOfSensorAboveLocalGroundOrDeckOfMarinePlatform=1.5" - "/airTemperature") - keys.append("/heightOfSensorAboveLocalGroundOrDeckOfMarinePlatform=2" - "/dewpointTemperature") - keys.append("/heightOfSensorAboveLocalGroundOrDeckOfMarinePlatform=1.5" - "/dewpointTemperature") + keys.append( + "/heightOfSensorAboveLocalGroundOrDeckOfMarinePlatform=2" + "/airTemperature" + ) + keys.append( + "/heightOfSensorAboveLocalGroundOrDeckOfMarinePlatform=1.5" + "/airTemperature" + ) + keys.append( + "/heightOfSensorAboveLocalGroundOrDeckOfMarinePlatform=2" + "/dewpointTemperature" + ) + keys.append( + "/heightOfSensorAboveLocalGroundOrDeckOfMarinePlatform=1.5" + "/dewpointTemperature" + ) elif var == "airTemperatureAt2M": keys.append("airTemperatureAt2M") - keys.append("/heightOfSensorAboveLocalGroundOrDeckOfMarinePlatform=2" - "/airTemperature") - keys.append("/heightOfSensorAboveLocalGroundOrDeckOfMarinePlatform=1.5" - "/airTemperature") + keys.append( + "/heightOfSensorAboveLocalGroundOrDeckOfMarinePlatform=2" + "/airTemperature" + ) + keys.append( + "/heightOfSensorAboveLocalGroundOrDeckOfMarinePlatform=1.5" + "/airTemperature" + ) else: keys.append(var) nerror.update({var: 0}) @@ -96,18 +126,13 @@ def __init__(self, bufrfile, variables, valid_dtg, valid_range, lonrange=None, l ndomain.update({var: 0}) nobs.update({var: 0}) - print("Reading " + bufrfile) - print("Looking for keys: " + str(keys)) + logging.info("Reading %s", bufrfile) + logging.info("Looking for keys: %s", str(keys)) cnt = 0 observations = list() # loop for the messages in the file - # nerror = 0 - # ndomain = 0 - # nundef = 0 - # ntime = 0 not_decoded = 0 - # removed = 0 registry = {} while 1: # get handle for message @@ -115,12 +140,10 @@ def __init__(self, bufrfile, variables, valid_dtg, valid_range, lonrange=None, l if bufr is None: break - # print("message: %s" % cnt) - # we need to instruct ecCodes to expand all the descriptors # i.e. unpack the data values try: - eccodes.codes_set(bufr, 'unpack', 1) + eccodes.codes_set(bufr, "unpack", 1) decoded = True except eccodes.CodesInternalError as err: not_decoded = not_decoded + 1 @@ -154,7 +177,10 @@ def __init__(self, bufrfile, variables, valid_dtg, valid_range, lonrange=None, l logging.debug("Decode: %s", key) val = eccodes.codes_get(bufr, key) logging.debug("Got: %s=%s", key, val) - if val == eccodes.CODES_MISSING_DOUBLE or val == eccodes.CODES_MISSING_LONG: + if ( + val == eccodes.CODES_MISSING_DOUBLE + or val == eccodes.CODES_MISSING_LONG + ): val = np.nan if key == "latitude": lat = val @@ -194,17 +220,25 @@ def __init__(self, bufrfile, variables, valid_dtg, valid_range, lonrange=None, l block_number = val if key == "airTemperatureAt2M": t2m = val - if key == "/heightOfSensorAboveLocalGroundOrDeckOfMarinePlatform=2"\ - "/airTemperature" or \ - key == "/heightOfSensorAboveLocalGroundOrDeckOfMarinePlatform=1.5"\ - "/airTemperature": + if ( + key + == "/heightOfSensorAboveLocalGroundOrDeckOfMarinePlatform=2" + "/airTemperature" + or key + == "/heightOfSensorAboveLocalGroundOrDeckOfMarinePlatform=1.5" + "/airTemperature" + ): temp = val if key == "dewpointTemperatureAt2M": td2m = val - if key == "/heightOfSensorAboveLocalGroundOrDeckOfMarinePlatform=2"\ - "/dewpointTemperature" or \ - key == "/heightOfSensorAboveLocalGroundOrDeckOfMarinePlatform=1.5"\ - "/dewpointTemperature": + if ( + key + == "/heightOfSensorAboveLocalGroundOrDeckOfMarinePlatform=2" + "/dewpointTemperature" + or key + == "/heightOfSensorAboveLocalGroundOrDeckOfMarinePlatform=1.5" + "/dewpointTemperature" + ): t_d = val if key == "totalSnowDepth": s_d = val @@ -213,8 +247,6 @@ def __init__(self, bufrfile, variables, valid_dtg, valid_range, lonrange=None, l except eccodes.CodesInternalError: logging.debug('Report does not contain key="%s"', key) - # all_found = False - # print('Report does not contain key="%s" : %s' % (key, err.msg)) got_pos = True if np.isnan(lat): @@ -246,17 +278,19 @@ def __init__(self, bufrfile, variables, valid_dtg, valid_range, lonrange=None, l try: value = self.td2rh(td2m, t2m) value = value * 0.01 - except Exception as conv_ex: - logging.debug("Got exception for %s:%s", var, str(conv_ex)) + except Exception: + logging.debug("Got exception for %s:", var) value = np.nan else: if not np.isnan(temp) and not np.isnan(t_d): try: value = self.td2rh(t_d, temp) value = value * 0.01 - except Exception as conv_ex: - logging.debug("Got exception for %s:%s", var, - str(conv_ex)) + except Exception: + logging.debug( + "Got exception for %s", + var, + ) value = np.nan elif var == "airTemperatureAt2M": if np.isnan(t2m): @@ -269,7 +303,9 @@ def __init__(self, bufrfile, variables, valid_dtg, valid_range, lonrange=None, l elif var == "heightOfBaseOfCloud": value = c_b else: - raise NotImplementedError(f"Var {var} is not coded! Please do it!") + raise NotImplementedError( + f"Var {var} is not coded! Please do it!" + ) else: logging.debug("Pos already exists %s %s", pos, var) @@ -295,24 +331,48 @@ def __init__(self, bufrfile, variables, valid_dtg, valid_range, lonrange=None, l if not all_found: nerror.update({var: nerror[var] + 1}) - logging.debug("Check on position in space and time %s %s %s %s %s %s", - lon, lonrange[0], lonrange[1], lat, - latrange[0], latrange[1]) - if latrange[0] <= lat <= latrange[1] and lonrange[0] <= lon <= lonrange[1]: - obs_dtg = datetime(year=year, month=month, day=day, hour=hour, - minute=minute) - # print(value) + logging.debug( + "Check on position in space and time %s %s %s %s %s %s", + lon, + lonrange[0], + lonrange[1], + lat, + latrange[0], + latrange[1], + ) + if ( + latrange[0] <= lat <= latrange[1] + and lonrange[0] <= lon <= lonrange[1] + ): + obs_dtg = as_datetime_args( + year=year, month=month, day=day, hour=hour, minute=minute + ) if not np.isnan(value): if self.inside_window(obs_dtg, valid_dtg, valid_range): - logging.debug("Valid DTG for station %s %s %s %s %s %s %s %s", - obs_dtg, valid_dtg, valid_range, lon, lat, - value, elev, stid) + logging.debug( + "Valid DTG for station %s %s %s %s %s %s %s %s", + obs_dtg, + valid_dtg, + valid_range, + lon, + lat, + value, + elev, + stid, + ) if station_number > 0 and block_number > 0: stid = str((block_number * 1000) + station_number) - observations.append(surfex.obs.Observation(obs_dtg, lon, lat, - value, - elev=elev, stid=stid, - varname=var)) + observations.append( + Observation( + obs_dtg, + lon, + lat, + value, + elev=elev, + stid=stid, + varname=var, + ) + ) if use_first: registry[pos].update({var: True}) nobs.update({var: nobs[var] + 1}) @@ -326,37 +386,46 @@ def __init__(self, bufrfile, variables, valid_dtg, valid_range, lonrange=None, l cnt += 1 if (cnt % 1000) == 0: - print('.', end='') + logging.info(".", end="") sys.stdout.flush() # delete handle eccodes.codes_release(bufr) - print("\nFound " + str(len(observations)) + "/" + str(cnt)) - print("Not decoded: " + str(not_decoded)) + logging.info("\nFound %s/%s", str(len(observations)), str(cnt)) + logging.info("Not decoded: %s", str(not_decoded)) for var in variables: - print("\nObservations for var=" + var + ": " + str(nobs[var])) - print("Observations removed because of domain check: " + str(ndomain[var])) - print("Observations removed because of not being defined/found: " + str(nundef[var])) - print("Observations removed because of time window: " + str(ntime[var])) - print("Messages not containing information on all keys: " + str(nerror[var])) + logging.info("\nObservations for var=%s: %s", var, str(nobs[var])) + logging.info( + "Observations removed because of domain check: %s", str(ndomain[var]) + ) + logging.info( + "Observations removed because of not being defined/found: %s", + str(nundef[var]), + ) + logging.info( + "Observations removed because of time window: %s", str(ntime[var]) + ) + logging.info( + "Messages not containing information on all keys: %s", str(nerror[var]) + ) # close the file file_handler.close() - surfex.obs.ObservationSet.__init__(self, observations, label=label) + ObservationSet.__init__(self, observations, label=label) @staticmethod def td2rh(t_d, temp, kelvin=True): """Convert dew point to temperature. Args: - td (int): _description_ - t (int): _description_ - kelvin (bool, optional): _description_. Defaults to True. + t_d (float): Dew point temperature + temp (float): Temperature + kelvin (bool, optional): Kelvin. Defaults to True. Raises: - Exception: _description_ - Exception: _description_ + RuntimeError: Dew point temperature is probably not Kelvin + RuntimeError: Temperature is probably not Kelvin Returns: float: Relative humidity (percent) @@ -364,20 +433,30 @@ def td2rh(t_d, temp, kelvin=True): """ if kelvin: if t_d < 100: - raise Exception("Dew point temperature is probably not Kelvin") + raise RuntimeError("Dew point temperature is probably not Kelvin") if temp < 100: - raise Exception("Temperature is probably not Kelvin") + raise RuntimeError("Temperature is probably not Kelvin") t_d = t_d - 273.15 temp = temp - 273.15 - r_h = 100 * (exp((17.625 * t_d) / (243.04 + t_d)) / exp((17.625 * temp) / (243.04 + temp))) + r_h = 100 * ( + exp((17.625 * t_d) / (243.04 + t_d)) / exp((17.625 * temp) / (243.04 + temp)) + ) if r_h > 110 or r_h < 1: - logging.warning("\nWARNING: Calculated rh to %s from %s and %s. Set it to missing", - str(r_h), str(t_d), str(temp)) + logging.warning( + "\nWARNING: Calculated rh to %s from %s and %s. Set it to missing", + str(r_h), + str(t_d), + str(temp), + ) r_h = np.nan elif r_h > 100: - logging.warning("\nWARNING: Calculated rh to %s from %s and %s.", - str(r_h), str(t_d), str(temp) + " Truncate to 100%") + logging.warning( + "\nWARNING: Calculated rh to %s from %s and %s.", + str(r_h), + str(t_d), + str(temp) + " Truncate to 100%", + ) r_h = 100 return r_h @@ -386,12 +465,12 @@ def inside_window(obs_dtg, valid_dtg, valid_range): """Check if inside window. Args: - obs_dtg (_type_): _description_ - valid_dtg (_type_): _description_ - valid_range (_type_): _description_ + obs_dtg (as_datetime): Observation datetime + valid_dtg (as_datetime): Valid datetime + valid_range (as_timedelta): Window Returns: - _type_: _description_ + bool: True if inside window """ if valid_dtg is None: diff --git a/surfex/cache.py b/surfex/cache.py index 7a86442..5de1388 100644 --- a/surfex/cache.py +++ b/surfex/cache.py @@ -1,6 +1,7 @@ """Cache.""" import logging -import datetime + +from .datetime_utils import as_datetime_args class Cache: @@ -88,7 +89,9 @@ def interpolator_is_set(self, inttype, geo_in, geo_out): logging.debug("identifier_out: %s", self.interpolators[inttype]) if identifier_out in self.interpolators[inttype]: logging.debug("identifier_out: %s", identifier_out) - logging.debug("interpolators: %s", self.interpolators[inttype][identifier_out]) + logging.debug( + "interpolators: %s", self.interpolators[inttype][identifier_out] + ) if identifier_in in self.interpolators[inttype][identifier_out]: return True else: @@ -132,7 +135,9 @@ def update_interpolator(self, inttype, geo_in, geo_out, value): identifier_in = geo_in.identifier() identifier_out = geo_out.identifier() - logging.debug("Update interpolator %s %s %s", inttype, identifier_in, identifier_out) + logging.debug( + "Update interpolator %s %s %s", inttype, identifier_in, identifier_out + ) if inttype in self.interpolators: out_geos = {} if identifier_out in self.interpolators[inttype]: @@ -140,13 +145,19 @@ def update_interpolator(self, inttype, geo_in, geo_out, value): in_geos = {} for fint in self.interpolators[inttype][out_grid]: logging.debug("Found %s for grid %s", fint, out_grid) - in_geos.update({fint: self.interpolators[inttype][out_grid][fint]}) + in_geos.update( + {fint: self.interpolators[inttype][out_grid][fint]} + ) if identifier_out == out_grid: - logging.debug("Update: %s for out geo %s", identifier_in, identifier_out) + logging.debug( + "Update: %s for out geo %s", identifier_in, identifier_out + ) in_geos.update({identifier_in: value}) out_geos.update({out_grid: in_geos}) else: - logging.debug("Setting new: %s for out geo %s", identifier_in, identifier_out) + logging.debug( + "Setting new: %s for out geo %s", identifier_in, identifier_out + ) out_geos.update({identifier_out: {identifier_in: value}}) self.interpolators.update({inttype: out_geos}) else: @@ -172,7 +183,7 @@ def clean_fields(self, this_time): month = int(key[-6:-4]) day = int(key[-4:-2]) hour = int(float(key[-2:])) - field_time = datetime.datetime(year, month, day, hour) + field_time = as_datetime_args(year=year, month=month, day=day, hour=hour) time_duration = (this_time - field_time).total_seconds() if time_duration > self.max_age: del_keys.append(key) @@ -258,8 +269,10 @@ def generate_surfex_id(varname, patches, layers, filename, validtime): _type_: _description_ """ - return f"{varname}{patches}{layers}{filename.split('/')[-1]}"\ - f"{validtime.strftime('%Y%m%d%H')}" + return ( + f"{varname}{patches}{layers}{filename.split('/')[-1]}" + f"{validtime.strftime('%Y%m%d%H')}" + ) @staticmethod def generate_obs_id(varname, filename, validtime): diff --git a/surfex/cli.py b/surfex/cli.py index e6fd032..6628066 100644 --- a/surfex/cli.py +++ b/surfex/cli.py @@ -1,414 +1,140 @@ """Command line interfaces.""" -import sys -from argparse import ArgumentParser, Action -from datetime import datetime, timedelta import json -import os import logging -import yaml -import toml +import os +import sys + import numpy as np -import surfex +import toml +import yaml + try: import matplotlib.pyplot as plt except ModuleNotFoundError: plt = None -class LoadFromFile (Action): - """Load arguments from a file.""" - - def __call__(self, parser, namespace, values, option_string=None): - """Override __call__ method.""" - with values as f_h: - # parse arguments in the file and store them in the target namespace - parser.parse_args(f_h.read().split(), namespace) - - -def parse_args_create_forcing(argv): - """Parse arguments to create forcing. - - Args: - argv (list): List with arguments. - - Returns: - dict: Parsed arguments. - - """ - parser = ArgumentParser(description="Create offline forcing") - parser.add_argument('dtg_start', type=str, help="Start DTG", nargs="?") - parser.add_argument('dtg_stop', type=str, help="Stop DTG", nargs="?") - parser.add_argument('-d', dest="domain", type=str, - help="Domain file describing the points or locations", - nargs="?", required=False, default=None) - parser.add_argument('--harmonie', action="store_true", default=False, - help="Surfex configuration (domain) created from Harmonie environment") - parser.add_argument('--config_exp_surfex', dest="config_exp_surfex", type=str, - help="Toml configuration file for surfex settings potentially " - + "used if --harmonie is set", - default=None, nargs="?") - parser.add_argument('-fb', type=str, help="First base time unless equal to dtg_start", - default=None) - parser.add_argument('--options', type=open, action=LoadFromFile, help="Load options from file") - parser.add_argument('-c', '--config', dest="user_config", type=str, help="Configuration file " - + "in yaml format describing customized variable setup", - default=None, nargs="?") - parser.add_argument('-t', '--timestep', type=int, help="Surfex time step", - default=3600, nargs="?") - parser.add_argument('-ci', '--cache_interval', type=int, help="clear cached fields after..", - default=3600, nargs="?") - parser.add_argument('-i', '--input_format', type=str, help="Default input file format", - default="netcdf", - choices=["netcdf", "grib1", "grib2", "surfex", "fa"]) - parser.add_argument('-ig', '--input_geo', dest="geo_input", type=str, - help="Default input geometry if needed", - default=None, required=False) - parser.add_argument('-o', '--output_format', type=str, help="Output file format", default="nc4", - choices=["netcdf", "nc4", "ascii"], nargs="?") - parser.add_argument('-a', dest="analysis", action="store_true", default=False) - parser.add_argument('--interpolation', dest="interpolation", required=False, default="bilinear", - choices=["nearest", "bilinear"]) - parser.add_argument('-of', type=str, help="Output file name", default=None, nargs="?") - parser.add_argument('-p', '--pattern', type=str, help="Filepattern", default=None, nargs="?") - parser.add_argument('--zref', type=str, help="Temperature/humidity reference height", - default="ml", choices=["ml", "screen"]) - parser.add_argument('--uref', type=str, help="Wind reference height: screen/ml/", default="ml", - choices=["ml", "screen"]) - parser.add_argument('--debug', help="Show debug information", action="store_true") - parser.add_argument('--single', help="Print single time step twice", action="store_true") - parser.add_argument('--version', action="version", version=surfex.__version__) - - group_ta = parser.add_argument_group('TA', description="Air temperature [K]") - group_ta.add_argument("--ta", type=str, help="Input format", default="default", - choices=["default", "netcdf", "grib1", "grib2", "surfex"]) - group_ta.add_argument("--ta_converter", type=str, help="Converter function to air temperature", - default="none", choices=["none"]) - - group_qa = parser.add_argument_group('QA', description="Specific humidity") - group_qa.add_argument("--qa", type=str, help="Input format", default="default", - choices=["default", "netcdf", "grib1", "grib2", "surfex"]) - group_qa.add_argument("--qa_converter", type=str, - help="Converter function to specific humidity", - default="none", choices=["none", "rh2q", "rh2q_mslp"]) - - group_ps = parser.add_argument_group('PS', description="Surface air pressure [Pa]") - group_ps.add_argument('--ps', type=str, help="Surface air pressure input format", - default="default", - choices=["default", "netcdf", "grib1", "grib2", "surfex", "constant"]) - group_ps.add_argument("--ps_converter", type=str, - help="Converter function to surface air pressure", - default="none", choices=["none", "mslp2ps"]) - - group_dir_sw = parser.add_argument_group('DIR_SW', description="Direct shortwave radiation") - group_dir_sw.add_argument('--dir_sw', type=str, help="Direct short wave radiation input format", - default="default", - choices=["default", "netcdf", "grib1", "grib2", "surfex", "constant"]) - group_dir_sw.add_argument("--dir_sw_converter", type=str, - help="Converter function to direct short wave radiation", - default="none", choices=["none", "analysis"]) - - group_sca_sw = parser.add_argument_group('SCA_SW', - description="Scattered short wave radiation flux") - group_sca_sw.add_argument('--sca_sw', type=str, - help="Scattered short wave radiation input format", - default="default", - choices=["netcdf", "grib1", "grib2", "surfex", "constant"]) - group_sca_sw.add_argument("--sca_sw_converter", type=str, - help="Converter function to scattered shortwave radiation flux", - default="none", choices=["none"]) - - group_lw = parser.add_argument_group('LW', description="Long wave radiation flux") - group_lw.add_argument('--lw', type=str, help="Long wave radiation input format", - default="default", - choices=["netcdf", "grib1", "grib2", "surfex", "constant"]) - group_lw.add_argument("--lw_converter", type=str, - help="Converter function to long wave radiation flux", - default="none", choices=["none", "analysis"]) - - group_rain = parser.add_argument_group('RAIN', description="Rainfall rate") - group_rain.add_argument("--rain", type=str, help="Input format", default="default", - choices=["default", "netcdf", "grib1", "grib2", "surfex"]) - group_rain.add_argument("--rain_converter", type=str, - help="Converter function to rainfall rate", - default="totalprec", choices=["none", "totalprec", "calcrain"]) - - group_snow = parser.add_argument_group('SNOW', description="Snowfall rate") - group_snow.add_argument("--snow", type=str, help="Input format", default="default", - choices=["default", "netcdf", "grib1", "grib2", "surfex"]) - group_snow.add_argument("--snow_converter", type=str, - help="Converter function to snowfall rate", default="none", - choices=["none", "calcsnow", "snowplusgraupel"]) - - group_wind = parser.add_argument_group('WIND', description="Wind speed") - group_wind.add_argument("--wind", type=str, help="Input format", default="default", - choices=["default", "netcdf", "grib1", "grib2", "surfex"]) - group_wind.add_argument("--wind_converter", type=str, help="Converter function to windspeed", - default="windspeed", choices=["none", "windspeed"]) - - group_wind_dir = parser.add_argument_group('WIND_DIR', description="Wind direction") - group_wind_dir.add_argument("--wind_dir", type=str, help="Input format", default="default", - choices=["default", "netcdf", "grib1", "grib2", "surfex"]) - group_wind_dir.add_argument("--wind_dir_converter", type=str, - help="Converter function to wind direction", - default="winddir", choices=["none", "winddir"]) - - group_co2 = parser.add_argument_group('CO2', description="Carbon dioxide") - group_co2.add_argument('--co2', type=str, help="CO2 input format", default="default", - choices=["netcdf", "grib1", "constant", "grib2", "surfex"]) - group_co2.add_argument("--co2_converter", type=str, - help="Converter function to carbon dioxide", default="none", - choices=["none"]) - - group_zs = parser.add_argument_group('ZS', description="Surface geopotential") - group_zs.add_argument('--zsoro', type=str, help="ZS input format", default="default", - choices=["netcdf", "grib1", "grib2", "surfex", "constant"]) - group_zs.add_argument("--zsoro_converter", type=str, help="Converter function to ZS", - default="none", choices=["none", "phi2m"]) - - group_zval = parser.add_argument_group('ZREF', description="Reference height for temperature " - "and humidity") - group_zval.add_argument('--zval', type=str, help="ZREF input format", default="default", - choices=["netcdf", "grib1", "grib2", "surfex", "constant"]) - group_zval.add_argument("--zval_converter", type=str, help="Converter function to ZREF", - default="none", - choices=["none"]) - - group_uval = parser.add_argument_group('UREF', description="Reference height for wind") - group_uval.add_argument('--uval', type=str, help="UREF input format", default="default", - choices=["netcdf", "grib1", "grib2", "surfex", "constant"]) - group_uval.add_argument("--uval_converter", type=str, help="Converter function to UREF", - default="none", - choices=["none"]) - - if len(argv) < 4: - parser.print_help() - sys.exit(1) - - args = parser.parse_args(argv) - kwargs = {} - for arg in vars(args): - kwargs.update({arg: getattr(args, arg)}) - - user_config = {} - if "user_config" in kwargs and kwargs["user_config"] is not None: - user_config = yaml.safe_load(open(kwargs["user_config"], mode="r", encoding="utf-8")) or {} - kwargs.update({"user_config": user_config}) - - # Find name of global config file - root = __file__ - if os.path.islink(root): - root = os.path.realpath(root) - base = os.path.dirname(os.path.abspath(root)) - yaml_config = base + "/cfg/config.yml" - - default_conf = yaml.safe_load(open(yaml_config, mode="r", encoding="utf-8")) or sys.exit(1) - kwargs.update({"config": default_conf}) - return kwargs - - -def parse_args_modify_forcing(argv): - """Parse arguments to modify forcing. - - Args: - argv (list): List with arguments. - - Returns: - dict: Parsed arguments. - - """ - parser = ArgumentParser(description="Modify offline forcing NetCDF file") - parser.add_argument('-i', '--input_file', type=str, help="Input forcing file", nargs="?", - required=True) - parser.add_argument('-t', '--time_step', type=str, help="Time step ", nargs="?", - required=False, default=-1) - parser.add_argument('-o', '--output_file', type=str, help="Output forcing file", nargs="?", - required=True) - parser.add_argument('variables', type=str, nargs="+", help="Variables to substitute") - - if len(argv) == 0: - parser.print_help() - sys.exit(1) - - args = parser.parse_args(argv) - kwargs = {} - for arg in vars(args): - kwargs.update({arg: getattr(args, arg)}) - return kwargs - - -def parse_args_qc2obsmon(argv): - """Parse arguments for qc2obsmon. - - Args: - argv (list): List with arguments. - - Returns: - dict: Parsed arguments. - - """ - parser = ArgumentParser("Create SQLite data base for obsmon") - parser.add_argument('dtg', type=str, help="YYYYMMDDHH") - parser.add_argument('varname', type=str, help="Variable name") - parser.add_argument('qc', type=str, help="QC dataset JSONfile") - parser.add_argument('--options', type=open, action=LoadFromFile, - help="Load options from file") - parser.add_argument('--operator', type=str, help="Obs operator", - choices=["bilinear", "nearest"], - default="bilinear", required=False) - parser.add_argument('--fg_file', type=str, help="First guess file", required=True) - parser.add_argument('--an_file', type=str, help="Analysis file", required=True) - parser.add_argument('--file_var', type=str, help="File variable", required=True) - parser.add_argument('-o', dest="output", type=str, nargs='?', help="output file", - default="ecma.db") - parser.add_argument('--debug', action="store_true", help="Debug", - required=False, default=False) - parser.add_argument('--version', action='version', version=surfex.__version__) - - if len(argv) == 0: - parser.print_help() - sys.exit(1) - - args = parser.parse_args(argv) - kwargs = {} - for arg in vars(args): - kwargs.update({arg: getattr(args, arg)}) - return kwargs - - -def parse_args_first_guess_for_oi(argv): - """Parse arguments for firstguess4oi. - - Args: - argv (list): List with arguments. - - Returns: - dict: Parsed arguments. - - """ - parser = ArgumentParser(description="Create first guess file for gridpp") - parser.add_argument('--options', type=open, action=LoadFromFile, - help="Load options from file") - parser.add_argument('-dtg', dest="dtg", type=str, help="Date (YYYYMMDDHH)", required=True) - parser.add_argument('-i', "--inputfile", type=str, default=None, help="Default input file", - nargs="?") - parser.add_argument('-if', dest="inputformat", type=str, help="Input file format", - default="grib2") - parser.add_argument('-d', dest="domain", type=str, help="Domain", required=False, default=None) - parser.add_argument('--harmonie', action="store_true", default=False, - help="Surfex configuration (domain) created from Harmonie environment") - - parser.add_argument('-t2m_file', type=str, default=None, help="File with T2M", nargs="?") - parser.add_argument('-t2m_format', type=str, default=None, - help="File format for file with T2M", nargs="?", - choices=["grib1", "grib2", "netcdf", "surfex", "fa"]) - parser.add_argument('-t2m_converter', type=str, default="none", - help="Converter for T2M", nargs="?", - choices=["none", "tap"]) - parser.add_argument('-rh2m_file', type=str, default=None, help="File with RH2M", nargs="?") - parser.add_argument('-rh2m_format', type=str, default=None, - help="File format for file with RH2M", nargs="?", - choices=["grib1", "grib2", "netcdf", "surfex", "fa"]) - parser.add_argument('-rh2m_converter', type=str, default="none", - help="Converter for RH2M", nargs="?", - choices=["none", "rhp"]) - - parser.add_argument('-sd_file', type=str, default=None, help="Snow depth file", nargs="?") - parser.add_argument('-sd_format', type=str, default=None, - help="Snow depth file format", nargs="?", - choices=["grib1", "grib2", "netcdf", "surfex", "fa"]) - parser.add_argument('--sd_converter', type=str, default="none", help="", nargs="?", - choices=["none", "sweclim", "swe2sd", "sdp"]) - - parser.add_argument('-cb_file', type=str, default=None, help="Cloud base file", nargs="?") - parser.add_argument('-cb_format', type=str, default=None, - help="Cloud base file format", nargs="?", - choices=["grib1", "grib2", "netcdf", "surfex", "fa"]) - parser.add_argument('--cb_converter', type=str, default="cloud_base", help="", nargs="?", - choices=["cloud_base"]) - - parser.add_argument('-sm_file', type=str, default=None, help="Soil moisture file", nargs="?") - parser.add_argument('-sm_format', type=str, default=None, - help="Soil moisture file format", nargs="?", - choices=["grib1", "grib2", "netcdf", "surfex", "fa"]) - parser.add_argument('--sm_converter', type=str, default="none", help="", nargs="?", - choices=["none", "smp"]) - - parser.add_argument('-laf_file', type=str, default=None, - help="Land area fraction grib file", nargs="?") - parser.add_argument('-laf_format', type=str, default=None, - help="Snow depth file format", nargs="?", - choices=["grib1", "grib2", "netcdf", "surfex", "fa"]) - parser.add_argument('--laf_converter', type=str, default="nature_town", help="", nargs="?", - choices=["none", "sea2land", "nature_town"]) - - parser.add_argument('-altitude_file', type=str, default=None, - help="SURFEX grib file", nargs="?") - parser.add_argument('-altitude_format', type=str, default=None, - help="Snow depth file format", nargs="?", - choices=["grib1", "grib2", "netcdf", "surfex", "fa"]) - parser.add_argument('--altitude_converter', type=str, default="phi2m", help="", nargs="?", - choices=["none", "phi2m"]) - - parser.add_argument('-o', dest="output", type=str, help="Output file", default="raw.nc") - parser.add_argument('--config', '-c', dest="config", type=str, help="YAML config file", - default="first_guess.yml", nargs="?") - parser.add_argument('variables', nargs="+", - choices=["air_temperature_2m", "relative_humidity_2m", - "surface_snow_thickness", "cloud_base", "surface_soil_moisture"], - help="Variables to create first guess for") - parser.add_argument('--debug', action="store_true", help="Debug", - required=False, default=False) - parser.add_argument('--version', action='version', version=surfex.__version__) - - if len(argv) == 0: - parser.print_help() - sys.exit(1) - - args = parser.parse_args(argv) - kwargs = {} - for arg in vars(args): - kwargs.update({arg: getattr(args, arg)}) - return kwargs - - -def first_guess_for_oi(**kwargs): - """Run first guess for oi.""" +from .binary_input import ( + InlineForecastInputData, + JsonOutputDataFromFile, + OfflineInputData, + PgdInputData, + PrepInputData, + SodaInputData, +) +from .cache import Cache +from .cmd_parsing import ( + parse_args_bufr2json, + parse_args_create_forcing, + parse_args_create_namelist, + parse_args_first_guess_for_oi, + parse_args_gridpp, + parse_args_hm2pysurfex, + parse_args_lsm_file_assim, + parse_args_masterodb, + parse_args_merge_qc_data, + parse_args_modify_forcing, + parse_args_obs2json, + parse_args_oi2soda, + parse_args_plot_points, + parse_args_qc2obsmon, + parse_args_set_geo_from_obs_set, + parse_args_set_geo_from_stationlist, + parse_args_shape2ign, + parse_args_surfex_binary, + parse_args_titan, + parse_cryoclim_pseudoobs, + parse_sentinel_obs, + parse_set_domain, +) +from .configuration import ( + ConfigurationFromHarmonieAndConfigFile, + ConfigurationFromTomlFile, +) +from .datetime_utils import as_datetime, as_datetime_args, as_timedelta +from .file import PGDFile, PREPFile, SurfexFileVariable, SURFFile +from .forcing import modify_forcing, run_time_loop, set_forcing_config +from .geo import LonLatVal, get_geo_object, set_domain, shape2ign +from .grib import Grib1Variable, Grib2Variable +from .input_methods import create_obsset_file, get_datasources, set_geo_from_obs_set +from .interpolation import horizontal_oi +from .namelist import BaseNamelist, Namelist +from .netcdf import ( + create_netcdf_first_guess_template, + oi2soda, + read_cryoclim_nc, + read_first_guess_netcdf_file, + read_sentinel_nc, + write_analysis_netcdf_file, +) +from .obs import Observation, sm_obs_sentinel, snow_pseudo_obs_cryoclim +from .obsmon import write_obsmon_sqlite_file +from .platform import SystemFilePathsFromFile +from .read import ConvertedInput, Converter +from .run import BatchJob, Masterodb, PerturbedOffline, SURFEXBinary +from .titan import ( + TitanDataSet, + dataset_from_file, + define_quality_control, + merge_json_qc_data_sets, +) + + +def get_geo_and_config_from_cmd(**kwargs): + """Get geo and config from cmd.""" if "harmonie" in kwargs and kwargs["harmonie"]: config_exp = None if "config_exp" in kwargs: if kwargs["config_exp"] is not None: config_exp = kwargs["config_exp"] if config_exp is None: - config_exp = surfex.__path__[0] + "/cfg/config_exp_surfex.toml" + config_exp = ( + f"{os.path.abspath(os.path.dirname(__file__))}/cfg/config_exp_surfex.toml" + ) logging.info("Using default config from: %s", config_exp) - input_data = toml.load(open(config_exp, "r", encoding="utf-8")) - config = surfex.ConfigurationFromHarmonie(os.environ, input_data) + config = ConfigurationFromHarmonieAndConfigFile(os.environ, config_exp) geo = config.geo else: if "domain" in kwargs: domain = kwargs["domain"] if os.path.exists(domain): - geo = surfex.geo.get_geo_object(json.load(open(domain, "r", encoding="utf-8"))) + with open(domain, mode="r", encoding="utf-8") as fhandler: + geo = get_geo_object(json.load(fhandler)) else: raise FileNotFoundError(domain) else: - raise Exception("Domain is needed") + geo = None - config_file = kwargs["config"] + if "config" in kwargs: + config = kwargs["config"] + if os.path.exists(config): + config = ConfigurationFromTomlFile(config) + else: + raise FileNotFoundError("File not found: " + config) + else: + config = None + return config, geo + + +def run_first_guess_for_oi(**kwargs): + """Run first guess for oi.""" + config, geo = get_geo_and_config_from_cmd(**kwargs) + + config_file = kwargs["input_config"] if not os.path.exists(config_file): raise FileNotFoundError(config_file) if "output" in kwargs: output = kwargs["output"] else: - raise Exception("No output file provided") + raise RuntimeError("No output file provided") dtg = kwargs["dtg"] - validtime = datetime.strptime(dtg, "%Y%m%d%H") + validtime = as_datetime(dtg) variables = kwargs["variables"] variables = variables + ["altitude", "land_area_fraction"] - cache = surfex.cache.Cache(3600) + cache = Cache(3600) f_g = None for var in variables: @@ -458,7 +184,10 @@ def first_guess_for_oi(**kwargs): inputfile = kwargs["altitude_file"] if "altitude_format" in kwargs and kwargs["altitude_format"] is not None: fileformat = kwargs["altitude_format"] - if "altitude_converter" in kwargs and kwargs["altitude_converter"] is not None: + if ( + "altitude_converter" in kwargs + and kwargs["altitude_converter"] is not None + ): converter = kwargs["altitude_converter"] elif var == "land_area_fraction": if "laf_file" in kwargs and kwargs["laf_file"] is not None: @@ -471,10 +200,10 @@ def first_guess_for_oi(**kwargs): raise NotImplementedError("Variable not implemented " + var) if inputfile is None: - raise Exception("You must set input file") + raise RuntimeError("You must set input file") if fileformat is None: - raise Exception("You must set file format") + raise RuntimeError("You must set file format") logging.debug(inputfile) logging.debug(fileformat) @@ -489,32 +218,34 @@ def first_guess_for_oi(**kwargs): if converter not in config[var][fileformat]["converter"]: logging.debug("config_file: %s", config_file) logging.debug("config: %s", config) - raise Exception(f"No converter {converter} definition found in {config}!") + raise RuntimeError(f"No converter {converter} definition found in {config}!") - initial_basetime = validtime - timedelta(seconds=10800) - converter = surfex.read.Converter(converter, initial_basetime, defs, converter_conf, - fileformat) - field = surfex.read.ConvertedInput(geo, var, converter).read_time_step(validtime, cache) + initial_basetime = validtime - as_timedelta(seconds=10800) + converter = Converter( + converter, initial_basetime, defs, converter_conf, fileformat + ) + field = ConvertedInput(geo, var, converter).read_time_step(validtime, cache) field = np.reshape(field, [geo.nlons, geo.nlats]) # Create file if f_g is None: n_x = geo.nlons n_y = geo.nlats - f_g = surfex.create_netcdf_first_guess_template(variables, n_x, n_y, output, geo=geo) - epoch = float((validtime - datetime(1970, 1, 1)).total_seconds()) + f_g = create_netcdf_first_guess_template(variables, n_x, n_y, output, geo=geo) + epoch = float( + (validtime - as_datetime_args(year=1970, month=1, day=1)).total_seconds() + ) f_g.variables["time"][:] = epoch f_g.variables["longitude"][:] = np.transpose(geo.lons) f_g.variables["latitude"][:] = np.transpose(geo.lats) - f_g.variables["x"][:] = [i for i in range(0, n_x)] - f_g.variables["y"][:] = [i for i in range(0, n_y)] + f_g.variables["x"][:] = list(range(0, n_x)) + f_g.variables["y"][:] = list(range(0, n_y)) if var == "altitude": field[field < 0] = 0 if np.isnan(np.sum(field)): fill_nan_value = f_g.variables[var].getncattr("_FillValue") - # fill_nan_value = fg.variables[var]._FillValue logging.info("Field %s got Nan. Fill with: %s", var, str(fill_nan_value)) field[np.where(np.isnan(field))] = fill_nan_value @@ -524,104 +255,17 @@ def first_guess_for_oi(**kwargs): f_g.close() -def parse_args_masterodb(argv): - """Parse the command line input arguments for masterodb. - - Args: - argv (list): List with arguments. - - Returns: - dict: Parsed arguments. - - """ - parser = ArgumentParser(description="SURFEX for MASTERRODB") - parser.add_argument('--options', type=open, action=LoadFromFile, help="Load options from file") - parser.add_argument('--version', action='version', - version=f'surfex {surfex.__version__}') - parser.add_argument('--debug', action="store_true", help="Debug", - required=False, default=False) - parser.add_argument('--wrapper', '-w', type=str, default="", help="Execution wrapper command") - parser.add_argument('--harmonie', action="store_true", default=False, - help="Surfex configuration created from Harmonie environment") - parser.add_argument('--pgd', type=str, nargs="?", required=True, help="Name of the PGD file") - parser.add_argument('--prep', type=str, nargs="?", required=True, help="Name of the PREP file") - parser.add_argument('--force', '-f', action="store_true", default=False, - help="Force re-creation") - parser.add_argument('--rte', '-r', required=True, nargs='?') - parser.add_argument('--config', '-c', required=False, nargs='?') - parser.add_argument('--system_file_paths', '-s', required=True, nargs='?', - help="Input file paths on your system") - parser.add_argument('--namelist_path', '-n', required=True, nargs='?') - parser.add_argument('--domain', type=str, required=False, help="JSON file with domain") - parser.add_argument('--dtg', type=str, required=False, default=None) - parser.add_argument('--output', '-o', type=str, required=False, default=None) - parser.add_argument('--only_archive', action="store_true", - default=False, help="Only call archiving") - parser.add_argument('--tolerate_missing', action="store_true", - default=False, help="Tolerate missing files") - parser.add_argument('--print_namelist', action="store_true", - default=False, help="Print namelsist used") - parser.add_argument('--mode', '-m', type=str, required=True, choices=["forecast", "canari"]) - parser.add_argument('--archive', '-a', required=False, default=None, nargs='?', - help="JSON file with archive output") - parser.add_argument('--binary', '-b', required=False, default=None, nargs='?', - help="Full path of MASTERODB binary") - - if len(argv) == 0: - parser.print_help() - sys.exit(1) - - args = parser.parse_args(argv) - kwargs = {} - for arg in vars(args): - kwargs.update({arg: getattr(args, arg)}) - return kwargs - - def run_masterodb(**kwargs): """Run masterodb.""" logging.debug("ARGS: %s", kwargs) - if "harmonie" in kwargs and kwargs["harmonie"]: - config_exp = None - if "config" in kwargs: - if kwargs["config"] is not None: - config_exp = kwargs["config"] - if config_exp is None: - config_exp = surfex.__path__[0] + "/cfg/config_exp_surfex.toml" - logging.info("Using default config from: %s", config_exp) - with open(config_exp, mode="r", encoding="utf-8") as file_handler: - input_data = toml.load(file_handler) - config = surfex.ConfigurationFromHarmonie(os.environ, input_data) - geo = config.geo - else: - if "domain" not in kwargs: - raise Exception("Missing domain definition") - if "config" not in kwargs: - raise Exception("Missing config") - - domain = kwargs["domain"] - if os.path.exists(domain): - with open(domain, mode="r", encoding="utf-8") as file_handler: - domain_json = json.load(file_handler) - geo = surfex.geo.get_geo_object(domain_json) - else: - raise FileNotFoundError("File not found: " + domain) - - config = kwargs["config"] - if os.path.exists(config): - with open(config, mode="r", encoding="utf-8") as file_handler: - logging.debug("config %s", config) - input_data = toml.load(file_handler) - config = surfex.Configuration(input_data) - else: - raise FileNotFoundError("File not found: " + config) + config, geo = get_geo_and_config_from_cmd(**kwargs) if "config" in kwargs: del kwargs["config"] system_file_paths = kwargs["system_file_paths"] if os.path.exists(system_file_paths): - system_file_paths = surfex.SystemFilePathsFromFile(system_file_paths) + system_file_paths = SystemFilePathsFromFile(system_file_paths) else: raise FileNotFoundError("File not found: " + system_file_paths) del kwargs["system_file_paths"] @@ -645,7 +289,7 @@ def run_masterodb(**kwargs): dtg = None if "dtg" in kwargs: if kwargs["dtg"] is not None and isinstance(kwargs["dtg"], str): - dtg = datetime.strptime(kwargs["dtg"], "%Y%m%d%H") + dtg = as_datetime(kwargs["dtg"]) kwargs.update({"dtg": dtg}) # TODO @@ -659,37 +303,43 @@ def run_masterodb(**kwargs): if os.path.exists(rte): with open(rte, mode="r", encoding="utf-8") as file_handler: rte = json.load(file_handler) - my_batch = surfex.BatchJob(rte, wrapper=wrapper) + my_batch = BatchJob(rte, wrapper=wrapper) else: raise FileNotFoundError my_archive = None if archive is not None: if os.path.exists(archive): - my_archive = surfex.JsonOutputDataFromFile(archive) + my_archive = JsonOutputDataFromFile(archive) else: raise FileNotFoundError if mode == "forecast": - input_data = surfex.InlineForecastInputData(config, system_file_paths, - check_existence=check_existence) + input_data = InlineForecastInputData( + config, system_file_paths, check_existence=check_existence + ) mode = "offline" elif mode == "canari": - input_data = surfex.SodaInputData(config, system_file_paths, - check_existence=check_existence, - perturbed_file_pattern=perturbed_file_pattern, - dtg=dtg) + input_data = SodaInputData( + config, + system_file_paths, + check_existence=check_existence, + perturbed_file_pattern=perturbed_file_pattern, + dtg=dtg, + ) mode = "soda" else: raise NotImplementedError(mode + " is not implemented!") blocks = False if blocks: - my_settings = surfex.Namelist(mode, config, namelist_path, - dtg=dtg, fcint=3).get_namelist() + my_settings = Namelist( + mode, config, namelist_path, dtg=dtg, fcint=3 + ).get_namelist() else: - my_settings = surfex.BaseNamelist(mode, config, namelist_path, - dtg=dtg, fcint=3).get_namelist() + my_settings = BaseNamelist( + mode, config, namelist_path, dtg=dtg, fcint=3 + ).get_namelist() geo.update_namelist(my_settings) # Create input @@ -714,16 +364,39 @@ def run_masterodb(**kwargs): if binary is None: my_batch = None - my_pgdfile = surfex.file.PGDFile(my_format, my_pgdfile, input_file=pgd_file_path, - lfagmap=lfagmap, masterodb=True) - my_prepfile = surfex.PREPFile(my_format, my_prepfile, input_file=prep_file_path, - lfagmap=lfagmap, masterodb=True) - surffile = surfex.SURFFile(my_format, my_surffile, archive_file=output, - lfagmap=lfagmap, masterodb=True) - - masterodb = surfex.Masterodb(my_pgdfile, my_prepfile, surffile, my_settings, - input_data, binary=binary, print_namelist=print_namelist, - batch=my_batch, archive_data=my_archive) + my_pgdfile = PGDFile( + my_format, + my_pgdfile, + input_file=pgd_file_path, + lfagmap=lfagmap, + masterodb=True, + ) + my_prepfile = PREPFile( + my_format, + my_prepfile, + input_file=prep_file_path, + lfagmap=lfagmap, + masterodb=True, + ) + surffile = SURFFile( + my_format, + my_surffile, + archive_file=output, + lfagmap=lfagmap, + masterodb=True, + ) + + masterodb = Masterodb( + my_pgdfile, + my_prepfile, + surffile, + my_settings, + input_data, + binary=binary, + print_namelist=print_namelist, + batch=my_batch, + archive_data=my_archive, + ) else: logging.info("%s already exists!", output) @@ -735,132 +408,14 @@ def run_masterodb(**kwargs): logging.info("Masterodb is None") -def parse_args_surfex_binary(argv, mode): - """Parse the command line input arguments for surfex binary. - - Args: - argv (list): List with arguments. - - Returns: - dict: Parsed arguments. - - """ - pert = False - need_pgd = True - need_prep = True - if mode == "pgd": - need_pgd = False - need_prep = False - desc = "Create physiography for SURFEX (PGD)" - elif mode == "prep": - need_prep = False - desc = "Prepare initial conditions for SURFEX" - elif mode == "offline": - desc = "Run Offline SURFEX" - elif mode == "soda": - desc = "Run SURFEX data assimilation (SODA)" - elif mode == "perturbed": - pert = True - desc = "Run perturbed Offline SURFEX" - else: - raise NotImplementedError(mode + " is not implemented!") - - parser = ArgumentParser(description=desc) - parser.add_argument('--options', type=open, action=LoadFromFile, help="Load options from file") - parser.add_argument('--version', action='version', version=surfex.__version__) - parser.add_argument('--debug', action="store_true", help="Debug", required=False, default=False) - parser.add_argument('--wrapper', '-w', type=str, default="", help="Execution wrapper command") - if need_pgd: - parser.add_argument('--pgd', type=str, nargs="?", required=True, - help="Name of the PGD file") - if need_prep: - parser.add_argument('--prep', type=str, nargs="?", required=True, - help="Name of the PREP file") - if mode == "prep": - parser.add_argument('--prep_file', required=False, default=None, nargs='?') - parser.add_argument('--prep_filetype', required=False, default=None, nargs='?') - parser.add_argument('--prep_pgdfile', required=False, default=None, nargs='?') - parser.add_argument('--prep_pgdfiletype', required=False, default=None, nargs='?') - if mode == "offline" or mode == "perturbed": - parser.add_argument('--forc_zs', action="store_true", default=False, - help="Set model ZS to forcing ZS") - parser.add_argument('--forcing_dir', required=False, default=None, nargs='?') - parser.add_argument('--force', '-f', action="store_true", help="Force re-creation") - parser.add_argument('--harmonie', action="store_true", default=False, - help="Surfex configuration created from Harmonie environment") - parser.add_argument('--print_namelist', action="store_true", default=False, - help="Print namelist used") - parser.add_argument('--tolerate_missing', action="store_true", default=False, - help="Tolerate missing files") - parser.add_argument('--masterodb', action="store_true", default=False, - help="Input file written by masterodb") - parser.add_argument('--rte', '-r', required=True, nargs='?') - parser.add_argument('--config', '-c', required=False, nargs='?') - parser.add_argument('--system_file_paths', '-s', required=True, nargs='?', - help="Input file paths on your system") - parser.add_argument('--namelist_path', '-n', required=True, nargs='?') - parser.add_argument('--domain', type=str, required=False, help="JSON file with domain") - parser.add_argument('--output', '-o', type=str, required=True) - parser.add_argument('--dtg', type=str, required=False, default=None) - if pert: - parser.add_argument('--pert', '-p', type=int, required=False, default=None) - parser.add_argument('--negpert', action="store_true", default=False, - help="Negative perturbation") - parser.add_argument('--archive', '-a', type=str, required=False, default=None, nargs='?', - help="JSON file with archive output") - parser.add_argument('binary', type=str, help="Command to run") - - if len(argv) == 0: - parser.print_help() - sys.exit(1) - - args = parser.parse_args(argv) - kwargs = {} - for arg in vars(args): - kwargs.update({arg: getattr(args, arg)}) - return kwargs - - def run_surfex_binary(mode, **kwargs): """Run a surfex binary.""" logging.debug("ARGS: %s", kwargs) - if "harmonie" in kwargs and kwargs["harmonie"]: - config_exp = None - if "config" in kwargs: - if kwargs["config"] is not None: - config_exp = kwargs["config"] - if config_exp is None: - config_exp = surfex.__path__[0] + "/cfg/config_exp_surfex.toml" - logging.info("Using default config from: %s", config_exp) - input_data = toml.load(open(config_exp, mode="r", encoding="utf-8")) - config = surfex.ConfigurationFromHarmonie(os.environ, input_data) - geo = config.geo - else: - if "domain" not in kwargs: - raise Exception("Missing domain definition") - if "config" not in kwargs: - raise Exception("Missing config") - - domain = kwargs["domain"] - if os.path.exists(domain): - with open(domain, mode="r", encoding="utf-8") as file_handler: - domain_json = json.load(file_handler) - geo = surfex.geo.get_geo_object(domain_json) - else: - raise FileNotFoundError("File not found: " + domain) - - config = kwargs["config"] - if os.path.exists(config): - with open(config, mode="r", encoding="utf-8") as file_handler: - logging.debug(config) - input_data = toml.load(file_handler) - config = surfex.Configuration(input_data) - else: - raise FileNotFoundError("File not found: " + config) + config, geo = get_geo_and_config_from_cmd(**kwargs) system_file_paths = kwargs["system_file_paths"] if os.path.exists(system_file_paths): - system_file_paths = surfex.SystemFilePathsFromFile(system_file_paths) + system_file_paths = SystemFilePathsFromFile(system_file_paths) else: raise FileNotFoundError("File not found: " + system_file_paths) @@ -898,7 +453,7 @@ def run_surfex_binary(mode, **kwargs): dtg = None if "dtg" in kwargs: if kwargs["dtg"] is not None and isinstance(kwargs["dtg"], str): - dtg = datetime.strptime(kwargs["dtg"], "%Y%m%d%H") + dtg = as_datetime(kwargs["dtg"]) kwargs.update({"dtg": dtg}) logging.debug("kwargs: %s", str(kwargs)) @@ -906,28 +461,37 @@ def run_surfex_binary(mode, **kwargs): pgd = True need_pgd = False need_prep = False - input_data = surfex.PgdInputData(config, system_file_paths, - check_existence=check_existence) + input_data = PgdInputData( + config, system_file_paths, check_existence=check_existence + ) elif mode == "prep": prep = True need_prep = False - input_data = surfex.PrepInputData(config, system_file_paths, - check_existence=check_existence, - prep_file=prep_input_file, - prep_pgdfile=prep_input_pgdfile) + input_data = PrepInputData( + config, + system_file_paths, + check_existence=check_existence, + prep_file=prep_input_file, + prep_pgdfile=prep_input_pgdfile, + ) elif mode == "offline": - input_data = surfex.OfflineInputData(config, system_file_paths, - check_existence=check_existence) + input_data = OfflineInputData( + config, system_file_paths, check_existence=check_existence + ) elif mode == "soda": - input_data = surfex.SodaInputData(config, system_file_paths, - check_existence=check_existence, - masterodb=kwargs["masterodb"], - perturbed_file_pattern=perturbed_file_pattern, - dtg=dtg) + input_data = SodaInputData( + config, + system_file_paths, + check_existence=check_existence, + masterodb=kwargs["masterodb"], + perturbed_file_pattern=perturbed_file_pattern, + dtg=dtg, + ) elif mode == "perturbed": perturbed = True - input_data = surfex.OfflineInputData(config, system_file_paths, - check_existence=check_existence) + input_data = OfflineInputData( + config, system_file_paths, check_existence=check_existence + ) else: raise NotImplementedError(mode + " is not implemented!") @@ -963,33 +527,45 @@ def run_surfex_binary(mode, **kwargs): if os.path.exists(rte): with open(rte, mode="r", encoding="utf-8") as file_handler: rte = json.load(file_handler) - my_batch = surfex.BatchJob(rte, wrapper=wrapper) + my_batch = BatchJob(rte, wrapper=wrapper) else: raise FileNotFoundError("File not found: " + rte) my_archive = None if archive is not None: if os.path.exists(archive): - my_archive = surfex.JsonOutputDataFromFile(archive) + my_archive = JsonOutputDataFromFile(archive) else: raise FileNotFoundError("File not found: " + archive) if not os.path.exists(output) or force: blocks = False if blocks: - my_settings = surfex.Namelist(mode, config, namelist_path, forc_zs=forc_zs, - prep_file=prep_input_file, - prep_filetype=prep_input_filetype, - prep_pgdfile=prep_input_pgdfile, - prep_pgdfiletype=prep_input_pgdfiletype, - dtg=dtg, fcint=3).get_namelist() + my_settings = Namelist( + mode, + config, + namelist_path, + forc_zs=forc_zs, + prep_file=prep_input_file, + prep_filetype=prep_input_filetype, + prep_pgdfile=prep_input_pgdfile, + prep_pgdfiletype=prep_input_pgdfiletype, + dtg=dtg, + fcint=3, + ).get_namelist() else: - my_settings = surfex.BaseNamelist(mode, config, namelist_path, forc_zs=forc_zs, - prep_file=prep_input_file, - prep_filetype=prep_input_filetype, - prep_pgdfile=prep_input_pgdfile, - prep_pgdfiletype=prep_input_pgdfiletype, - dtg=dtg, fcint=3).get_namelist() + my_settings = BaseNamelist( + mode, + config, + namelist_path, + forc_zs=forc_zs, + prep_file=prep_input_file, + prep_filetype=prep_input_filetype, + prep_pgdfile=prep_input_pgdfile, + prep_pgdfiletype=prep_input_pgdfiletype, + dtg=dtg, + fcint=3, + ).get_namelist() geo.update_namelist(my_settings) # Create input @@ -1004,135 +580,111 @@ def run_surfex_binary(mode, **kwargs): logging.debug("pgdfile=%s lfagmap=%s %s", my_pgdfile, lfagmap, pgd_file_path) if need_pgd: logging.debug("Need pgd") - my_pgdfile = surfex.file.PGDFile(my_format, my_pgdfile, input_file=pgd_file_path, - lfagmap=lfagmap, - masterodb=masterodb) + my_pgdfile = PGDFile( + my_format, + my_pgdfile, + input_file=pgd_file_path, + lfagmap=lfagmap, + masterodb=masterodb, + ) if need_prep: logging.debug("Need prep") - my_prepfile = surfex.PREPFile(my_format, my_prepfile, input_file=prep_file_path, - lfagmap=lfagmap, - masterodb=masterodb) + my_prepfile = PREPFile( + my_format, + my_prepfile, + input_file=prep_file_path, + lfagmap=lfagmap, + masterodb=masterodb, + ) surffile = None if need_prep and need_pgd: logging.debug("Need pgd and prep") - surffile = surfex.SURFFile(my_format, my_surffile, archive_file=output, - lfagmap=lfagmap, - masterodb=masterodb) + surffile = SURFFile( + my_format, + my_surffile, + archive_file=output, + lfagmap=lfagmap, + masterodb=masterodb, + ) if perturbed: - surfex.PerturbedOffline(binary, my_batch, my_prepfile, pert, my_settings, input_data, - pgdfile=my_pgdfile, surfout=surffile, archive_data=my_archive, - print_namelist=print_namelist, negpert=negpert) + PerturbedOffline( + binary, + my_batch, + my_prepfile, + pert, + my_settings, + input_data, + pgdfile=my_pgdfile, + surfout=surffile, + archive_data=my_archive, + print_namelist=print_namelist, + negpert=negpert, + ) elif pgd: - my_pgdfile = surfex.file.PGDFile(my_format, my_pgdfile, input_file=pgd_file_path, - archive_file=output, lfagmap=lfagmap, - masterodb=masterodb) - surfex.SURFEXBinary(binary, my_batch, my_pgdfile, my_settings, input_data, - archive_data=my_archive, print_namelist=print_namelist) + my_pgdfile = PGDFile( + my_format, + my_pgdfile, + input_file=pgd_file_path, + archive_file=output, + lfagmap=lfagmap, + masterodb=masterodb, + ) + SURFEXBinary( + binary, + my_batch, + my_pgdfile, + my_settings, + input_data, + archive_data=my_archive, + print_namelist=print_namelist, + ) elif prep: - my_prepfile = surfex.PREPFile(my_format, my_prepfile, archive_file=output, - lfagmap=lfagmap, - masterodb=masterodb) - surfex.SURFEXBinary(binary, my_batch, my_prepfile, my_settings, input_data, - pgdfile=my_pgdfile, - archive_data=my_archive, print_namelist=print_namelist) + my_prepfile = PREPFile( + my_format, + my_prepfile, + archive_file=output, + lfagmap=lfagmap, + masterodb=masterodb, + ) + SURFEXBinary( + binary, + my_batch, + my_prepfile, + my_settings, + input_data, + pgdfile=my_pgdfile, + archive_data=my_archive, + print_namelist=print_namelist, + ) else: - surfex.SURFEXBinary(binary, my_batch, my_prepfile, my_settings, input_data, - pgdfile=my_pgdfile, - surfout=surffile, archive_data=my_archive, - print_namelist=print_namelist) + SURFEXBinary( + binary, + my_batch, + my_prepfile, + my_settings, + input_data, + pgdfile=my_pgdfile, + surfout=surffile, + archive_data=my_archive, + print_namelist=print_namelist, + ) else: logging.info("%s already exists!", output) -def parse_args_create_namelist(argv): - """Parse the command line input arguments for creating a namelist. - - Args: - argv (list): List with arguments. - - Returns: - dict: Parsed arguments. - - """ - parser = ArgumentParser(description="Create namelist") - parser.add_argument('--version', action='version', version=surfex.__version__) - parser.add_argument('--debug', action="store_true", help="Debug", required=False, default=False) - parser.add_argument('--wrapper', '-w', type=str, default="", help="Execution wrapper command") - parser.add_argument('mode', type=str, help="Type of namelist") - parser.add_argument('--method', required=False, default="blocks", nargs='?') - parser.add_argument('--prep_file', required=False, default=None, nargs='?') - parser.add_argument('--prep_filetype', required=False, default=None, nargs='?') - parser.add_argument('--prep_pgdfile', required=False, default=None, nargs='?') - parser.add_argument('--prep_pgdfiletype', required=False, default=None, nargs='?') - parser.add_argument('--forc_zs', action="store_true", default=False, - help="Set model ZS to forcing ZS") - parser.add_argument('--forcing_dir', required=False, default=None, nargs='?') - parser.add_argument('--harmonie', action="store_true", default=False, - help="Surfex configuration created from Harmonie environment") - parser.add_argument('--system_file_paths', '-s', required=True, nargs='?', - help="Input file paths on your system") - parser.add_argument('--config', '-c', required=False, nargs='?') - parser.add_argument('--namelist_path', '-n', required=True, nargs='?') - parser.add_argument('--domain', type=str, required=False, help="JSON file with domain") - parser.add_argument('--output', '-o', type=str, required=False) - parser.add_argument('--dtg', type=str, required=False, default=None) - - if len(argv) == 0: - parser.print_help() - sys.exit(1) - - args = parser.parse_args(argv) - kwargs = {} - for arg in vars(args): - kwargs.update({arg: getattr(args, arg)}) - return kwargs - - def run_create_namelist(**kwargs): """Create a namelist.""" logging.debug("ARGS: %s", kwargs) - mode = kwargs.get('mode') - if "harmonie" in kwargs and kwargs["harmonie"]: - config_exp = None - if "config" in kwargs: - if kwargs["config"] is not None: - config_exp = kwargs["config"] - if config_exp is None: - config_exp = surfex.__path__[0] + "/cfg/config_exp_surfex.toml" - logging.info("Using default config from: %s", config_exp) - input_data = toml.load(open(config_exp, mode="r", encoding="utf-8")) - config = surfex.ConfigurationFromHarmonie(os.environ, input_data) - geo = config.geo - else: - if "domain" not in kwargs: - raise Exception("Missing domain definition") - if "config" not in kwargs: - raise Exception("Missing config") - - domain = kwargs["domain"] - if os.path.exists(domain): - with open(domain, mode="r", encoding="utf-8") as file_handler: - domain_json = json.load(file_handler) - geo = surfex.geo.get_geo_object(domain_json) - else: - raise FileNotFoundError("File not found: " + domain) - - config = kwargs["config"] - if os.path.exists(config): - with open(config, mode="r", encoding="utf-8") as file_handler: - logging.debug(config) - input_data = toml.load(file_handler) - config = surfex.Configuration(input_data) - else: - raise FileNotFoundError("File not found: " + config) + config, geo = get_geo_and_config_from_cmd(**kwargs) + mode = kwargs.get("mode") system_file_paths = kwargs["system_file_paths"] if os.path.exists(system_file_paths): - system_file_paths = surfex.SystemFilePathsFromFile(system_file_paths) + system_file_paths = SystemFilePathsFromFile(system_file_paths) else: raise FileNotFoundError("File not found: " + system_file_paths) @@ -1163,7 +715,7 @@ def run_create_namelist(**kwargs): dtg = None if "dtg" in kwargs: if kwargs["dtg"] is not None and isinstance(kwargs["dtg"], str): - dtg = datetime.strptime(kwargs["dtg"], "%Y%m%d%H") + dtg = as_datetime(kwargs["dtg"]) kwargs.update({"dtg": dtg}) logging.debug("kwargs: %s", str(kwargs)) @@ -1174,71 +726,39 @@ def run_create_namelist(**kwargs): forc_zs = kwargs["forc_zs"] if kwargs.get("method") == "blocks": - my_settings = surfex.Namelist(mode, config, namelist_path, forc_zs=forc_zs, - prep_file=prep_input_file, geo=geo, - prep_filetype=prep_input_filetype, - prep_pgdfile=prep_input_pgdfile, - prep_pgdfiletype=prep_input_pgdfiletype, - dtg=dtg, fcint=3).get_namelist() + my_settings = Namelist( + mode, + config, + namelist_path, + forc_zs=forc_zs, + prep_file=prep_input_file, + geo=geo, + prep_filetype=prep_input_filetype, + prep_pgdfile=prep_input_pgdfile, + prep_pgdfiletype=prep_input_pgdfiletype, + dtg=dtg, + fcint=3, + ).get_namelist() else: - my_settings = surfex.BaseNamelist(mode, config, namelist_path, forc_zs=forc_zs, - prep_file=prep_input_file, - prep_filetype=prep_input_filetype, - prep_pgdfile=prep_input_pgdfile, - prep_pgdfiletype=prep_input_pgdfiletype, geo=geo, - dtg=dtg, fcint=3).get_namelist() + my_settings = BaseNamelist( + mode, + config, + namelist_path, + forc_zs=forc_zs, + prep_file=prep_input_file, + prep_filetype=prep_input_filetype, + prep_pgdfile=prep_input_pgdfile, + prep_pgdfiletype=prep_input_pgdfiletype, + geo=geo, + dtg=dtg, + fcint=3, + ).get_namelist() geo.update_namelist(my_settings) if os.path.exists(output): os.remove(output) my_settings.write(output) -def parse_args_gridpp(argv): - """Parse the command line input arguments for gridpp. - - Args: - argv (list): List with arguments. - - Returns: - dict: Parsed arguments. - - """ - parser = ArgumentParser(description="Create horisontal OI analysis") - parser.add_argument('--options', type=open, action=LoadFromFile, help="Load options from file") - parser.add_argument('-i', '--input_file', type=str, - help="Input NetCDF file with all variables", required=True) - parser.add_argument('-obs', '--obs_file', type=str, - help="Input JSON file with QC observations", required=True) - parser.add_argument('-o', '--output_file', type=str, - help="Output NetCDF file with all variables", required=True) - parser.add_argument('-v', '--var', type=str, help="Variable", required=True) - parser.add_argument('-hor', dest='hlength', type=float, required=True) - parser.add_argument('-vert', dest='vlength', type=float, default=100000, required=False) - parser.add_argument('--wlength', dest='wlength', type=float, default=0., required=False) - parser.add_argument('--maxLocations', dest='max_locations', type=int, default=20, - required=False) - parser.add_argument('--elevGradient', dest='elev_gradient', type=float, default=0, - required=False, choices=[0, -0.0065]) - parser.add_argument('--epsilon', dest='epsilon', type=float, default=0.25, required=False) - parser.add_argument('--minvalue', dest='minvalue', type=float, default=None, required=False) - parser.add_argument('--maxvalue', dest='maxvalue', type=float, default=None, required=False) - parser.add_argument('--only_diff', action="store_true", - help="Only write differences to file", required=False, - default=False) - parser.add_argument('--debug', action="store_true", help="Debug", required=False, default=False) - parser.add_argument('--version', action='version', version=surfex.__version__) - - if len(argv) == 0: - parser.print_help() - sys.exit(1) - - args = parser.parse_args(argv) - kwargs = {} - for arg in vars(args): - kwargs.update({arg: getattr(args, arg)}) - return kwargs - - def run_gridpp(**kwargs): """Gridpp.""" var = kwargs["var"] @@ -1260,7 +780,7 @@ def run_gridpp(**kwargs): if "elev_gradient" in kwargs: elev_gradient = kwargs["elev_gradient"] if elev_gradient != -0.0065 and elev_gradient != 0: - raise Exception("Not a valid elevation gradient") + raise RuntimeError("Not a valid elevation gradient") epsilon = 0.25 if "epsilon" in kwargs: epsilon = kwargs["epsilon"] @@ -1276,94 +796,47 @@ def run_gridpp(**kwargs): obs_file = kwargs["obs_file"] # Get input fields - geo, validtime, background, glafs, gelevs = surfex.read_first_guess_netcdf_file(input_file, var) + geo, validtime, background, glafs, gelevs = read_first_guess_netcdf_file( + input_file, var + ) an_time = validtime # Read OK observations - observations = surfex.dataset_from_file(an_time, obs_file, qc_flag=0) + observations = dataset_from_file(an_time, obs_file, qc_flag=0) logging.info("Found %s observations with QC flag == 0", str(len(observations.lons))) - field = surfex.horizontal_oi(geo, background, observations, gelevs, hlength=hlength, - vlength=vlength, wlength=wlength, structure_function="Barnes", - max_locations=max_locations, elev_gradient=elev_gradient, - epsilon=epsilon, minvalue=minvalue, maxvalue=maxvalue, - interpol="bilinear", only_diff=only_diff) + field = horizontal_oi( + geo, + background, + observations, + gelevs, + hlength=hlength, + vlength=vlength, + wlength=wlength, + structure_function="Barnes", + max_locations=max_locations, + elev_gradient=elev_gradient, + epsilon=epsilon, + minvalue=minvalue, + maxvalue=maxvalue, + interpol="bilinear", + only_diff=only_diff, + ) if output_file is not None: - surfex.write_analysis_netcdf_file(output_file, field, var, validtime, gelevs, glafs, - new_file=True, geo=geo) - - -def parse_args_titan(argv): - """Parse the command line input arguments for titan. - - Args: - argv (list): List with arguments. - - Returns: - dict: Parsed arguments. - - """ - parser = ArgumentParser(description="Do quality control of observations") - parser.add_argument('--options', type=open, action=LoadFromFile, help="Load options from file") - parser.add_argument('-i', '--input_file', type=str, - help="Input json file with observation sets and test settings", - required=True) - parser.add_argument('-o', '--output_file', type=str, - help="Output json file with quality checked observations", - required=False, default="qc_obs.json") - parser.add_argument('-v', '--variable', type=str, required=True, help="Observation variable") - parser.add_argument('--indent', type=int, default=None, help="Indent") - parser.add_argument('-dtg', type=str, help="Date time group YYYYMMDDHH", required=True) - parser.add_argument('--harmonie', action="store_true", default=False, - help="Surfex configuration created from Harmonie environment") - parser.add_argument('tests', nargs='+', type=str, help="Which tests to run and order to run") - parser.add_argument('--blacklist', dest="blacklist_file", type=str, required=False, - default=None, - help="JSON file with blacklist") - parser.add_argument('--domain', type=str, required=False, default=None, - help="JSON file with domain") - parser.add_argument('--debug', action="store_true", help="Debug", required=False, default=False) - parser.add_argument('--version', action='version', version=surfex.__version__) - - if len(argv) == 0: - parser.print_help() - sys.exit(1) - - args = parser.parse_args(argv) - kwargs = {} - for arg in vars(args): - kwargs.update({arg: getattr(args, arg)}) - return kwargs + write_analysis_netcdf_file( + output_file, field, var, validtime, gelevs, glafs, new_file=True, geo=geo + ) def run_titan(**kwargs): """Titan.""" - geo = None - if "harmonie" in kwargs and kwargs["harmonie"]: - config_exp = None - if "config" in kwargs: - if kwargs["config"] is not None: - config_exp = kwargs["config"] - if config_exp is None: - config_exp = surfex.__path__[0] + "/cfg/config_exp_surfex.toml" - logging.info("Using default config from: %s", config_exp) - input_data = toml.load(open(config_exp, "r", encoding="utf-8")) - config = surfex.ConfigurationFromHarmonie(os.environ, input_data) - geo = config.geo - elif "domain" in kwargs: - if kwargs["domain"] is not None: - geo = surfex.get_geo_object(json.load(open(kwargs["domain"], "r", encoding="utf-8"))) - - # Set domain geo if set + __, domain_geo = get_geo_and_config_from_cmd(**kwargs) if "domain_geo" in kwargs: - if geo is not None: + if domain_geo is not None: logging.info("Override domain with domain_geo") - geo = kwargs["domain_geo"] - - if geo is None: - raise Exception("You must set domain geometry!") - domain_geo = geo + with open(kwargs["domain_geo"], mode="r", encoding="utf-8") as fhandler: + domain_geo = json.load(fhandler) blacklist = None if "blacklist" in kwargs: @@ -1371,7 +844,6 @@ def run_titan(**kwargs): elif "blacklist_file" in kwargs: if kwargs["blacklist_file"] is not None: blacklist = json.load(open(kwargs["blacklist_file"], "r", encoding="utf-8")) - # kwargs.update({"blacklist": blacklist}) if "input_file" in kwargs: input_file = kwargs["input_file"] @@ -1383,7 +855,7 @@ def run_titan(**kwargs): if "input_data" in kwargs: settings = kwargs["input_data"] else: - raise Exception("You must specify input_file or input_data") + raise RuntimeError("You must specify input_file or input_data") tests = kwargs["tests"] output_file = None @@ -1395,66 +867,21 @@ def run_titan(**kwargs): an_time = kwargs["dtg"] if isinstance(an_time, str): - an_time = datetime.strptime(an_time, "%Y%m%d%H") - # kwargs.update({"an_time": an_time}) + an_time = as_datetime(an_time) var = kwargs["variable"] - tests = surfex.define_quality_control(tests, settings[var], an_time, domain_geo=domain_geo, - blacklist=blacklist) + tests = define_quality_control( + tests, settings[var], an_time, domain_geo=domain_geo, blacklist=blacklist + ) logging.debug("Settings: %s", settings) - datasources = surfex.get_datasources(an_time, settings[var]["sets"]) - data_set = surfex.TitanDataSet(var, settings[var], tests, datasources, an_time) + datasources = get_datasources(an_time, settings[var]["sets"]) + data_set = TitanDataSet(var, settings[var], tests, datasources, an_time) data_set.perform_tests() if output_file is not None: data_set.write_output(output_file, indent=indent) -def parse_args_oi2soda(argv): - """Parse the command line input arguments for oi2soda. - - Args: - argv (list): List with arguments. - - Returns: - dict: Parsed arguments. - - """ - parser = ArgumentParser(description="Create ASCII input for SODA from gridpp files") - parser.add_argument('--options', type=open, action=LoadFromFile, help="Load options from file") - parser.add_argument('--t2m_file', type=str, help="NetCDF file for T2M", - required=False, default=None) - parser.add_argument('--t2m_var', type=str, help="NetCDF variable name for T2M", required=False, - default="air_temperature_2m") - parser.add_argument('--rh2m_file', type=str, help="NetCDF file for RH2M", - required=False, default=None) - parser.add_argument('--rh2m_var', type=str, help="NetCDF variable name for RH2M", - required=False, default="relative_humidity_2m") - parser.add_argument('--sd_file', type=str, help="NetCDF file for SD", required=False, - default=None) - parser.add_argument('--sd_var', type=str, help="NetCDF variable name for SD", required=False, - default="surface_snow_thickness") - parser.add_argument('--sm_file', type=str, help="NetCDF file for SM", required=False, - default=None) - parser.add_argument('--sm_var', type=str, help="NetCDF variable name for SM", required=False, - default="surface_soil_moisture") - parser.add_argument('dtg', nargs="?", type=str, help="DTG", default=None) - parser.add_argument("-o", dest="output", type=str, help="Output file", default=None) - parser.add_argument('--debug', action="store_true", help="Debug", required=False, - default=False) - parser.add_argument('--version', action='version', version=surfex.__version__) - - if len(argv) < 3: - parser.print_help() - sys.exit(1) - - args = parser.parse_args(argv) - kwargs = {} - for arg in vars(args): - kwargs.update({arg: getattr(args, arg)}) - return kwargs - - def run_oi2soda(**kwargs): """Oi2soda.""" t2m_file = kwargs["t2m_file"] @@ -1476,135 +903,13 @@ def run_oi2soda(**kwargs): if sm_file is not None: s_m = {"file": sm_file, "var": kwargs["sm_var"]} - dtg = datetime.strptime(kwargs["dtg"], "%Y%m%d%H") - surfex.oi2soda(dtg, t2m=t2m, rh2m=rh2m, s_d=s_d, s_m=s_m, output=output) - - -def parse_args_lsm_file_assim(argv): - """Parse the command line input arguments for land-sea-mask for assimilation. + dtg = as_datetime(kwargs["dtg"]) + oi2soda(dtg, t2m=t2m, rh2m=rh2m, s_d=s_d, s_m=s_m, output=output) - Args: - argv (list): List with arguments. - - Returns: - dict: Parsed arguments. - - """ - parser = ArgumentParser(description="Create ASCII LSM input for SODA") - parser.add_argument('--options', type=open, action=LoadFromFile, help="Load options from file") - parser.add_argument('--file', type=str, help="Input file name", required=True) - parser.add_argument('--fileformat', type=str, help="Input fileformat", required=True) - parser.add_argument('--var', type=str, help="Variable in input file", required=False, - default="air_temperature_2m") - parser.add_argument('--converter', type=str, help="Converter for variable", required=False, - default="none") - parser.add_argument('--dtg', type=str, help="DTG", default=None, required=False) - parser.add_argument('--domain', type=str, help="Domain", required=True) - parser.add_argument("-o", dest="output", type=str, help="Output file", default=None) - parser.add_argument('--debug', action="store_true", help="Debug", required=False, default=False) - parser.add_argument('--version', action='version', version=surfex.__version__) - - if len(argv) < 3: - parser.print_help() - sys.exit(1) - - args = parser.parse_args(argv) - kwargs = {} - for arg in vars(args): - kwargs.update({arg: getattr(args, arg)}) - domain = kwargs["domain"] - logging.debug("domain=%s", domain) - if os.path.exists(domain): - domain_json = json.load(open(domain, "r", encoding="utf-8")) - kwargs.update({"geo": surfex.get_geo_object(domain_json)}) - else: - raise FileNotFoundError(domain) - dtg = kwargs["dtg"] - if dtg is not None and isinstance(dtg, str): - kwargs.update({"dtg": datetime.strptime(dtg, "%Y%m%d%H")}) - return kwargs - - -def run_lsm_file_assim(**kwargs): - """Create LSM file for assimilation.""" - validtime = kwargs["dtg"] - cache = surfex.cache.Cache(3600) - - geo = kwargs["geo"] - inputfile = kwargs["file"] - fileformat = kwargs["fileformat"] - converter = kwargs["converter"] - output = kwargs["output"] - - var = kwargs["var"] - - defs = { - "filepattern": inputfile, - "fileformat": fileformat, - "fcint": 10800, - "offset": 0, - } - - logging.debug("%s %s", var, fileformat) - converter_conf = { - "none": { - "name": var - } - } - - var = "LSM" - initial_basetime = validtime - timedelta(seconds=10800) - converter = surfex.read.Converter(converter, initial_basetime, defs, converter_conf, fileformat) - field = surfex.read.ConvertedInput(geo, var, converter).read_time_step(validtime, cache) - field = np.reshape(field, [geo.nlons, geo.nlats]) - field = np.transpose(field) - - file_handler = open(output, "w", encoding="utf-8") - for lat in range(0, geo.nlats): - for lon in range(0, geo.nlons): - file_handler.write(str(field[lat, lon]) + "\n") - file_handler.close() - - -def parse_args_hm2pysurfex(argv): - """Parse the command line input arguments for hm2pysurfex. - - Args: - argv (list): List with arguments. - - Returns: - dict: Parsed arguments. - - """ - parser = ArgumentParser("hm2pysurfex") - parser.add_argument('--options', type=open, action=LoadFromFile, help="Load options from file") - parser.add_argument("-c", dest="config", type=str, required=True, help="PySurfex config file") - parser.add_argument("-e", dest="environment", type=str, required=False, default=None, - help="Environment if not taken from running environment") - parser.add_argument("-o", dest="output", type=str, required=False, default=None, - help="Output toml file") - parser.add_argument('--debug', action="store_true", help="Debug", required=False, default=False) - parser.add_argument('--version', action='version', version=surfex.__version__) - - if len(argv) == 0: - parser.print_help() - sys.exit(1) - - args = parser.parse_args(argv) - kwargs = {} - for arg in vars(args): - kwargs.update({arg: getattr(args, arg)}) - return kwargs - - -def hm2pysurfex(**kwargs): +def run_hm2pysurfex(**kwargs): """Harmonie to pysurfex.""" pysurfex_config = kwargs["config"] - if os.path.exists(pysurfex_config): - pysurfex_config = toml.load(open(pysurfex_config, "r", encoding="utf-8")) - else: - raise FileNotFoundError("Could not find " + pysurfex_config) output = None if "output" in kwargs: @@ -1616,148 +921,13 @@ def hm2pysurfex(**kwargs): environment.update(json.load(open(environment_file, "r", encoding="utf-8"))) # Create configuration - config = surfex.ConfigurationFromHarmonie(environment, pysurfex_config) + config = ConfigurationFromHarmonieAndConfigFile(environment, pysurfex_config) if output is None: logging.info("Config settings %s", config.settings) else: - toml.dump(config.settings, open(output, "w", encoding="utf-8")) - - -def parse_args_bufr2json(argv): - """Parse the command line input arguments for bufr2json. - - Args: - argv (list): List with arguments. - - Returns: - dict: Parsed arguments. - - """ - parser = ArgumentParser("bufr2json") - parser.add_argument('--options', type=open, action=LoadFromFile, - help="Load options from file") - parser.add_argument("-b", dest="bufr", type=str, required=True, help="Bufr file") - parser.add_argument("-v", dest="vars", nargs="+", type=str, required=True, - help="Variables") - parser.add_argument("-o", dest="output", type=str, required=True, help="Output JSON file") - parser.add_argument("-dtg", dest="dtg", type=str, required=True, help="DTG (YYYYMMDHH)") - parser.add_argument("--indent", dest="indent", type=int, required=False, default=None, - help="Indent") - parser.add_argument("-range", dest="valid_range", type=str, help="Valid range in seconds", - default=3600) - parser.add_argument('--debug', action="store_true", help="Debug", required=False, - default=False) - parser.add_argument('--version', action='version', version=surfex.__version__) - - if len(argv) == 0: - parser.print_help() - sys.exit(1) - - args = parser.parse_args(argv) - kwargs = {} - for arg in vars(args): - kwargs.update({arg: getattr(args, arg)}) - return kwargs - - -def run_bufr2json(**kwargs): - """Run bufr to a json file.""" - variables = kwargs["vars"] - bufrfile = kwargs["bufr"] - output = kwargs["output"] - valid_dtg = kwargs["dtg"] - valid_range = kwargs["valid_range"] - indent = None - if "indent" in kwargs: - indent = kwargs["indent"] - lonrange = None - if "lonrange" in kwargs: - lonrange = kwargs["lonrange"] - latrange = None - if "latrange" in kwargs: - latrange = kwargs["latrange"] - - valid_dtg = datetime.strptime(valid_dtg, "%Y%m%d%H") - valid_range = timedelta(seconds=int(valid_range)) - bufr_set = surfex.BufrObservationSet(bufrfile, variables, valid_dtg, valid_range, - lonrange=lonrange, latrange=latrange, label="bufr") - - bufr_set.write_json_file(output, indent=indent) - - -def parse_args_plot_points(argv): - """Parse the command line input arguments for plotting points. - - Args: - argv (list): List with arguments. - - Returns: - dict: Parsed arguments. - - """ - parser = ArgumentParser("Plot points") - parser.add_argument('--options', type=open, action=LoadFromFile, help="Load options from file") - parser.add_argument('-g', '--geo', dest="geo", type=str, - help="Domain/points json geometry definition file", - default=None, required=False) - parser.add_argument('-v', '--variable', dest="variable", type=str, help="Variable name", - required=False) - parser.add_argument('-i', '--inputfile', dest="inputfile", type=str, help="Input file", - default=None, required=False) - parser.add_argument('-it', '--inputtype', dest="inputtype", type=str, help="Filetype", - default="surfex", required=False, - choices=["netcdf", "grib1", "grib2", "surfex", "obs"]) - parser.add_argument('-t', '--validtime', dest="validtime", type=str, help="Valid time", - default=None, required=False) - parser.add_argument('-o', '--output', dest="output", type=str, help="Output file", default=None, - required=False) - parser.add_argument("--no-contour", dest="no_contour", action="store_true") - parser.add_argument("--interpolator", type=str, default="nearest", required=False, - help="Interpolator") - grib = parser.add_argument_group('grib', 'Grib1/2 settings (-it grib1 or -it grib2)') - grib.add_argument('--indicatorOfParameter', type=int, help="Indicator of parameter [grib1]", - default=None) - grib.add_argument('--timeRangeIndicator', type=int, help="Time range indicator [grib1]", - default=0) - grib.add_argument('--levelType', type=str, help="Level type [grib1/grib2]", default="sfc") - grib.add_argument('--level', type=int, help="Level [grib1/grib2]", default=0) - grib.add_argument('--discipline', type=int, help="Discipline [grib2]", default=None) - grib.add_argument('--parameterCategory', type=int, help="Parameter category [grib2]", - default=None) - grib.add_argument('--parameterNumber', type=int, help="ParameterNumber [grib2]", default=None) - grib.add_argument('--typeOfStatisticalProcessing', type=int, - help="TypeOfStatisticalProcessing [grib2]", - default=-1) - - sfx = parser.add_argument_group('Surfex', 'Surfex settings (-it surfex)') - sfx.add_argument('--sfx_type', type=str, help="Surfex file type", default=None, - choices=[None, "forcing", "ascii", "nc", "netcdf", "texte"]) - - sfx.add_argument('--sfx_patches', type=int, help="Patches [ascii/texte]", default=-1) - sfx.add_argument('--sfx_layers', type=int, help="Layers [ascii/texte]", default=-1) - sfx.add_argument('--sfx_datatype', type=str, help="Datatype [ascii]", - choices=["string", "float", "integer"], default="float") - sfx.add_argument('--sfx_interval', type=str, help="Interval [texte]", default=None) - sfx.add_argument('--sfx_basetime', type=str, help="Basetime [texte]", default=None) - sfx.add_argument('--sfx_geo_input', type=str, default=None, - help="JSON file with domain defintion [forcing/netcdf/texte]") - - obs = parser.add_argument_group('Observations', 'Observation settings (scatter plot)') - obs.add_argument('--obs_type', type=str, help="Observation source type (-it obs)", - choices=[None, "json", "bufr", "frost", "netatmo"], default=None) - parser.add_argument('--debug', action="store_true", help="Debug", required=False, default=False) - parser.add_argument('--version', action='version', version=surfex.__version__) - - if len(argv) == 0: - parser.print_help() - sys.exit() - - args = parser.parse_args(argv) - kwargs = {} - for arg in vars(args): - kwargs.update({arg: getattr(args, arg)}) - return kwargs + with open(output, "w", encoding="utf-8") as fhandler: + toml.dump(config.settings, fhandler) def run_plot_points(**kwargs): @@ -1767,7 +937,7 @@ def run_plot_points(**kwargs): geo_file = kwargs["geo"] validtime = None if kwargs["validtime"] is not None: - validtime = datetime.strptime(kwargs["validtime"], "%Y%m%d%H") + validtime = as_datetime(kwargs["validtime"]) variable = None if "variable" in kwargs: variable = kwargs["variable"] @@ -1783,7 +953,7 @@ def run_plot_points(**kwargs): geo = None if geo_file is not None: domain_json = json.load(open(geo_file, "r", encoding="utf-8")) - geo = surfex.geo.get_geo_object(domain_json) + geo = get_geo_object(domain_json) contour = True if "no_contour" in kwargs: @@ -1795,15 +965,17 @@ def run_plot_points(**kwargs): if inputtype == "grib1": if filepattern is None: - raise Exception("You must provide a filepattern") + raise RuntimeError("You must provide a filepattern") par = kwargs["indicatorOfParameter"] ltp = kwargs["levelType"] lev = kwargs["level"] tri = kwargs["timeRangeIndicator"] - gribvar = surfex.Grib1Variable(par, ltp, lev, tri) - title = "grib1:" + gribvar.generate_grib_id() + " " + validtime.strftime("%Y%m%d%H") + gribvar = Grib1Variable(par, ltp, lev, tri) + title = ( + "grib1:" + gribvar.generate_grib_id() + " " + validtime.strftime("%Y%m%d%H") + ) var_dict = { "filepattern": filepattern, "fcint": 10800, @@ -1813,13 +985,13 @@ def run_plot_points(**kwargs): "type": ltp, "level": lev, "tri": tri, - "interpolator": interpolator + "interpolator": interpolator, } elif inputtype == "grib2": if filepattern is None: - raise Exception("You must provide a filepattern") + raise RuntimeError("You must provide a filepattern") discipline = kwargs["discipline"] parameter_category = kwargs["parameterCategory"] @@ -1828,12 +1000,20 @@ def run_plot_points(**kwargs): level = kwargs["level"] type_of_statistical_processing = kwargs["typeOfStatisticalProcessing"] - gribvar = surfex.grib.Grib2Variable(discipline, parameter_category, parameter_number, - level_type, level, tsp=type_of_statistical_processing) + gribvar = Grib2Variable( + discipline, + parameter_category, + parameter_number, + level_type, + level, + tsp=type_of_statistical_processing, + ) logging.debug(inputtype) logging.debug(gribvar) logging.debug(validtime) - title = f"{inputtype}: {gribvar.generate_grib_id()} {validtime.strftime('%Y%m%d%H')}" + title = ( + f"{inputtype}: {gribvar.generate_grib_id()} {validtime.strftime('%Y%m%d%H')}" + ) var_dict = { "fcint": 10800, @@ -1845,15 +1025,15 @@ def run_plot_points(**kwargs): "parameterNumber": parameter_number, "levelType": level_type, "level": level, - "typeOfStatisticalProcessing": type_of_statistical_processing + "typeOfStatisticalProcessing": type_of_statistical_processing, } elif inputtype == "netcdf": if variable is None: - raise Exception("You must provide a variable") + raise RuntimeError("You must provide a variable") if filepattern is None: - raise Exception("You must provide a filepattern") + raise RuntimeError("You must provide a filepattern") title = "netcdf: " + variable + " " + validtime.strftime("%Y%m%d%H") var_dict = { @@ -1862,15 +1042,15 @@ def run_plot_points(**kwargs): "fcint": 10800, "file_inc": 10800, "offset": 0, - "interpolator": interpolator + "interpolator": interpolator, } elif inputtype == "surfex": if variable is None: - raise Exception("You must provide a variable") + raise RuntimeError("You must provide a variable") if filepattern is None: - raise Exception("You must provide a filepattern") + raise RuntimeError("You must provide a filepattern") basetime = kwargs["sfx_basetime"] patches = kwargs["sfx_patches"] @@ -1881,11 +1061,17 @@ def run_plot_points(**kwargs): geo_input = None if geo_sfx_input is not None: domain_json = json.load(open(geo_sfx_input, "r", encoding="utf-8")) - geo_input = surfex.geo.get_geo_object(domain_json) - - sfx_var = surfex.SurfexFileVariable(variable, validtime=validtime, patches=patches, - layers=layers, basetime=basetime, interval=interval, - datatype=datatype) + geo_input = get_geo_object(domain_json) + + sfx_var = SurfexFileVariable( + variable, + validtime=validtime, + patches=patches, + layers=layers, + basetime=basetime, + interval=interval, + datatype=datatype, + ) title = inputtype + ": " + sfx_var.print_var() var_dict = { @@ -1900,25 +1086,26 @@ def run_plot_points(**kwargs): "fcint": 10800, "file_inc": 10800, "offset": 0, - "interpolator": interpolator + "interpolator": interpolator, } elif inputtype == "obs": contour = False if variable is None: - raise Exception("You must provide a variable") + raise RuntimeError("You must provide a variable") obs_input_type = kwargs["obs_type"] if obs_input_type is None: - raise Exception("You must provide an obs type") + raise RuntimeError("You must provide an obs type") if geo is None: - obs_time = datetime.strptime(kwargs["validtime"], "%Y%m%d%H") + obs_time = as_datetime(kwargs["validtime"]) varname = variable inputfile = kwargs["inputfile"] - geo = surfex.set_geo_from_obs_set(obs_time, obs_input_type, varname, inputfile, - lonrange=None, latrange=None) + geo = set_geo_from_obs_set( + obs_time, obs_input_type, varname, inputfile, lonrange=None, latrange=None + ) var_dict = { "filetype": obs_input_type, @@ -1927,38 +1114,35 @@ def run_plot_points(**kwargs): "filenames": [filepattern], "fcint": 10800, "file_inc": 10800, - "offset": 0 + "offset": 0, } title = inputtype + ": var=" + variable + " type=" + obs_input_type else: raise NotImplementedError - defs = { - var: { - inputtype: { - "converter": { - "none": var_dict - } - } - - } - } + defs = {var: {inputtype: {"converter": {"none": var_dict}}}} converter_conf = defs[var][inputtype]["converter"] if geo is None: - raise Exception("No geo is set") + raise RuntimeError("No geo is set") - cache = surfex.Cache(-1) + cache = Cache(-1) converter = "none" - converter = surfex.read.Converter(converter, validtime, defs, converter_conf, inputtype) - field = surfex.ConvertedInput(geo, var, converter).read_time_step(validtime, cache) + converter = Converter(converter, validtime, defs, converter_conf, inputtype) + field = ConvertedInput(geo, var, converter).read_time_step(validtime, cache) if field is None: - raise Exception("No field read") - - logging.debug("npoints=%s nlons=%s nlats=%s contour=%s field.shape=%s", geo.npoints, - geo.nlons, geo.nlats, contour, field.shape) + raise RuntimeError("No field read") + + logging.debug( + "npoints=%s nlons=%s nlats=%s contour=%s field.shape=%s", + geo.npoints, + geo.nlons, + geo.nlats, + contour, + field.shape, + ) if geo.npoints != geo.nlons and geo.npoints != geo.nlats: if contour: field = np.reshape(field, [geo.nlons, geo.nlats]) @@ -1966,9 +1150,13 @@ def run_plot_points(**kwargs): contour = False if plt is None: - raise Exception("Matplotlib is needed to plot") - logging.debug("lons.shape=%s lats.shape=%s field.shape=%s", geo.lons.shape, - geo.lats.shape, field.shape) + raise ModuleNotFoundError("Matplotlib is needed to plot") + logging.debug( + "lons.shape=%s lats.shape=%s field.shape=%s", + geo.lons.shape, + geo.lats.shape, + field.shape, + ) if contour: plt.contourf(geo.lons, geo.lats, field) else: @@ -1983,162 +1171,6 @@ def run_plot_points(**kwargs): plt.savefig(output) -def parse_plot_timeseries_args(argv): - """Parse the command line input arguments for plotting time series. - - Args: - argv (list): List with arguments. - - Returns: - dict: Parsed arguments. - - """ - parser = ArgumentParser("Plot timeseries from JSON time series file") - parser.add_argument('--options', type=open, action=LoadFromFile, help="Load options from file") - parser.add_argument('filename', type=str, default=None, help="JSON time series file") - parser.add_argument('-lon', type=float, default=None, help="Longitude", required=False) - parser.add_argument('-lat', type=float, default=None, help="Latitude", required=False) - parser.add_argument('-stid', type=str, default=None, help="Station id", required=False) - parser.add_argument('-stationlist', type=str, default=None, help="Station list", - required=False) - parser.add_argument('-start', type=str, default=None, help="Start time (YYYYMMDDHH)", - required=False) - parser.add_argument('-end', type=str, default=None, help="End time (YYYYMMDDHH)", - required=False) - parser.add_argument('-interval', type=int, default=None, help="Interval", required=False) - parser.add_argument('-o', '--output', dest="output", type=str, help="Input format", - default=None, required=False) - parser.add_argument('--debug', action="store_true", help="Debug", required=False, default=False) - parser.add_argument('--version', action='version', version=surfex.__version__) - - if len(argv) < 3: - parser.print_help() - sys.exit() - - args = parser.parse_args(argv) - kwargs = {} - for arg in vars(args): - kwargs.update({arg: getattr(args, arg)}) - return kwargs - - -def run_plot_timeseries_from_json(**kwargs): - """Plot time series from json.""" - lon = kwargs["lon"] - lat = kwargs["lat"] - stid = kwargs["stid"] - stationlist = kwargs["stationlist"] - starttime = kwargs["start"] - if starttime is not None: - starttime = datetime.strptime(kwargs["start"], "%Y%m%d%H") - endtime = kwargs["end"] - if endtime is not None: - endtime = datetime.strptime(kwargs["end"], "%Y%m%d%H") - interval = kwargs["interval"] - filename = kwargs["filename"] - output = kwargs["output"] - - if lon is None and lat is None: - if stid is None: - raise Exception("You must provide lon and lat or stid") - if stationlist is None: - raise Exception("You must provide a stationlist with the stid") - lons, lats = surfex.Observation.get_pos_from_stid(stationlist, [stid]) - lon = lons[0] - lat = lats[0] - - tseries = surfex.TimeSeriesFromJson(filename, lons=[lon], lats=[lat], starttime=starttime, - endtime=endtime, interval=interval) - - ntimes = len(tseries.times) - vals = np.zeros(ntimes) - for i, tseries_t_val in enumerate(tseries.times): - vals[i] = tseries_t_val[0] - - ts_stid = str(tseries.stids[0]) - if ts_stid == "NA" and stid is not None: - ts_stid = stid - - if plt is None: - raise Exception("Matplotlib is needed to plot") - - plt.title(f"var= {tseries.varname} lon: {str(lon)} lat: {str(lat)} stid: {ts_stid}") - plt.plot(tseries.times, vals) - if output is None: - plt.show() - else: - plt.savefig(output) - - -def parse_args_set_geo_from_obs_set(argv): - """Parse the command line input arguments for setting geo from obs set. - - Args: - argv (list): List with arguments. - - Returns: - dict: Parsed arguments. - - """ - parser = ArgumentParser("Set a point geometry from an observation set") - parser.add_argument('--options', type=open, action=LoadFromFile, help="Load options from file") - parser.add_argument("-v", type=str, dest="variable", help="Variable name", required=True) - parser.add_argument("-t", dest="validtime", help="Validtime (YYYYMMDDHH)", required=True) - parser.add_argument("-i", type=str, dest="inputfile", help="Input file", required=False) - parser.add_argument("-it", type=str, dest="obs_type", help="Input type", required=True, - choices=["netatmo", "frost", "bufr", "json"]) - parser.add_argument("--lonrange", type=str, dest="lonrange", help="Longitude range", - default=None, required=False) - parser.add_argument("--latrange", type=str, dest="latrange", help="Latitude range", - default=None, required=False) - parser.add_argument("-o", type=str, dest="output", help="Output file", required=True) - parser.add_argument('--debug', action="store_true", help="Debug", required=False, - default=False) - parser.add_argument('--version', action='version', version=surfex.__version__) - - if len(argv) == 0: - parser.print_help() - sys.exit(1) - - args = parser.parse_args(argv) - kwargs = {} - for arg in vars(args): - kwargs.update({arg: getattr(args, arg)}) - return kwargs - - -def parse_args_set_geo_from_stationlist(argv): - """Parse the command line input arguments for setting geo from station list. - - Args: - argv (list): List with arguments. - - Returns: - dict: Parsed arguments. - - """ - parser = ArgumentParser("Set a point geometry from a stationlist") - parser.add_argument('--options', type=open, action=LoadFromFile, help="Load options from file") - parser.add_argument('stationlist', type=str, help="Station list") - parser.add_argument("--lonrange", type=str, dest="lonrange", help="Longitude range", - default=None, required=False) - parser.add_argument("--latrange", type=str, dest="latrange", help="Latitude range", - default=None, required=False) - parser.add_argument("-o", type=str, dest="output", help="Output file", required=True) - parser.add_argument('--debug', action="store_true", help="Debug", required=False, default=False) - parser.add_argument('--version', action='version', version=surfex.__version__) - - if len(argv) == 0: - parser.print_help() - sys.exit(1) - - args = parser.parse_args(argv) - kwargs = {} - for arg in vars(args): - kwargs.update({arg: getattr(args, arg)}) - return kwargs - - def set_geo_from_stationlist(**kwargs): """Set geometry from station list.""" stationlist = kwargs["stationlist"] @@ -2161,7 +1193,7 @@ def set_geo_from_stationlist(**kwargs): raise FileNotFoundError("Station list does not exist!") for stid in stids: - lon, lat = surfex.Observation.get_pos_from_stid(stationlist, [stid]) + lon, lat = Observation.get_pos_from_stid(stationlist, [stid]) lon = lon[0] lat = lat[0] if lonrange[0] <= lon <= lonrange[1] and latrange[0] <= lat <= latrange[1]: @@ -2172,369 +1204,397 @@ def set_geo_from_stationlist(**kwargs): d_x = ["0.3"] * len(lons) geo_json = { - "nam_pgd_grid": { - "cgrid": "LONLATVAL" - }, - "nam_lonlatval": { - "xx": lons, - "xy": lats, - "xdx": d_x, - "xdy": d_x - } + "nam_pgd_grid": {"cgrid": "LONLATVAL"}, + "nam_lonlatval": {"xx": lons, "xy": lats, "xdx": d_x, "xdy": d_x}, } - return surfex.LonLatVal(geo_json) + return LonLatVal(geo_json) -def parse_merge_namelist_settings(argv): - """Parse the command line input arguments for merging namelist settings. +def sentinel_obs(argv=None): + """Command line interface. Args: - argv (list): List with arguments. + argv(list, optional): Arguments. Defaults to None. + """ + if argv is None: + argv = sys.argv[1:] + kwargs = parse_sentinel_obs(argv) + debug = kwargs.get("debug") + + if debug: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s", + level=logging.DEBUG, + ) + else: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(message)s", level=logging.INFO + ) + logging.info("************ sentinel_obs ******************") + fg_file = kwargs["fg_file"] + infiles = kwargs["infiles"] + step = kwargs["thinning"] + output = kwargs["output"] + varname = kwargs["varname"] + indent = kwargs["indent"] + + grid_lons, grid_lats, grid_sm_class = read_sentinel_nc(infiles) + fg_geo, validtime, grid_sm_fg, __, __ = read_first_guess_netcdf_file(fg_file, varname) + q_c = sm_obs_sentinel( + validtime, grid_sm_class, grid_lons, grid_lats, step, fg_geo, grid_sm_fg + ) + q_c.write_output(output, indent=indent) + - Returns: - dict: Parsed arguments. +def qc2obsmon(argv=None): + """Command line interface. + Args: + argv(list, optional): Arguments. Defaults to None. """ - parser = ArgumentParser("Merge namelist settings") - parser.add_argument('--options', type=open, action=LoadFromFile, help="Load options from file") - parser.add_argument('--version', action='version', - version=f'surfex {surfex.__version__}') - parser.add_argument('--json', '-j', type=str, nargs="+", required=True, - help="A JSON file with run options") - parser.add_argument('--indent', required=False, default=2, type=int, help="Indented output") - parser.add_argument('--output', '-o', required=True, nargs='?') - - if len(argv) == 1: - parser.print_help() - sys.exit() - - args = parser.parse_args(argv) - kwargs = {} - for arg in vars(args): - kwargs.update({arg: getattr(args, arg)}) - return kwargs - - -def run_merge_namelist_settings(my_files, output, indent=None): - """Merge namelist settings.""" - json_settings = {} - for fname in my_files: - if os.path.exists(fname): - surfex.Namelist.merge_json_namelist_file(json_settings, fname) - else: - raise FileNotFoundError + if argv is None: + argv = sys.argv[1:] + kwargs = parse_args_qc2obsmon(argv) + debug = kwargs.get("debug") + + if debug: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s", + level=logging.DEBUG, + ) + else: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(message)s", level=logging.INFO + ) + logging.info("************ qc2obsmon ******************") + write_obsmon_sqlite_file(**kwargs) - surfex.Namelist.nml2ascii(json_settings, output, indent=indent) +def prep(argv=None): + """Command line interface. + + Args: + argv(list, optional): Arguments. Defaults to None. + """ + if argv is None: + argv = sys.argv[1:] + kwargs = parse_args_surfex_binary(argv, "prep") + debug = kwargs.get("debug") + + if debug: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s", + level=logging.DEBUG, + ) + else: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(message)s", level=logging.INFO + ) + logging.info("************ prep ******************") + run_surfex_binary("prep", **kwargs) -def parse_merge_toml_settings(argv): - """Parse the command line input arguments for merging toml settings. + +def plot_points(argv=None): + """Command line interface. Args: - argv (list): List with arguments. + argv(list, optional): Arguments. Defaults to None. + """ + if argv is None: + argv = sys.argv[1:] + kwargs = parse_args_plot_points(argv) + debug = kwargs.get("debug") + + if debug: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s", + level=logging.DEBUG, + ) + else: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(message)s", level=logging.INFO + ) + logging.info("************ plot_points ******************") + run_plot_points(**kwargs) + - Returns: - dict: Parsed arguments. +def pgd(argv=None): + """Command line interface. + Args: + argv(list, optional): Arguments. Defaults to None. """ - parser = ArgumentParser("Merge toml files") - parser.add_argument('--options', type=open, action=LoadFromFile, help="Load options from file") - parser.add_argument('--toml', '-t', type=str, nargs="+", required=True, - help="TOML files with run options") - parser.add_argument('--output', '-o', required=True, nargs='?') + if argv is None: + argv = sys.argv[1:] + kwargs = parse_args_surfex_binary(argv, "pgd") + debug = kwargs.get("debug") + + if debug: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s", + level=logging.DEBUG, + ) + else: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(message)s", level=logging.INFO + ) + logging.info("************ pgd ******************") + run_surfex_binary("pgd", **kwargs) - if len(sys.argv) == 1: - parser.print_help() - sys.exit() - args = parser.parse_args(argv) - kwargs = {} - for arg in vars(args): - kwargs.update({arg: getattr(args, arg)}) - return kwargs +def perturbed_offline(argv=None): + """Command line interface. + Args: + argv(list, optional): Arguments. Defaults to None. + """ + if argv is None: + argv = sys.argv[1:] + kwargs = parse_args_surfex_binary(argv, "perturbed") + debug = kwargs.get("debug") + + if debug: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s", + level=logging.DEBUG, + ) + else: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(message)s", level=logging.INFO + ) + logging.info("************ offline ******************") + run_surfex_binary("perturbed", **kwargs) -def run_merge_toml_settings(**kwargs): - """Merge toml settings from files.""" - my_files = kwargs["toml"] - my_output = kwargs["output"] - merged_settings = surfex.merge_toml_env_from_files(my_files) +def offline(argv=None): + """Command line interface. - # Write merged settigns - toml.dump(merged_settings, open(my_output, "w", encoding="utf-8")) + Args: + argv(list, optional): Arguments. Defaults to None. + """ + if argv is None: + argv = sys.argv[1:] + kwargs = parse_args_surfex_binary(argv, "offline") + debug = kwargs.get("debug") + + if debug: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s", + level=logging.DEBUG, + ) + else: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(message)s", level=logging.INFO + ) + logging.info("************ offline ******************") + run_surfex_binary("offline", **kwargs) -def parse_args_merge_qc_data(argv): - """Parse the command line input arguments for merge of qc data. +def cli_oi2soda(argv=None): + """Command line interface. Args: - argv (list): List with arguments. + argv(list, optional): Arguments. Defaults to None. + """ + if argv is None: + argv = sys.argv[1:] + kwargs = parse_args_oi2soda(argv) + debug = kwargs.get("debug") + + if debug: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s", + level=logging.DEBUG, + ) + else: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(message)s", level=logging.INFO + ) + logging.info("************ oi2soda ******************") + run_oi2soda(**kwargs) - Returns: - dict: Parsed arguments. +def cli_modify_forcing(argv=None): + """Command line interface. + + Args: + argv(list, optional): Arguments. Defaults to None. """ - parser = ArgumentParser() - parser.add_argument('--options', type=open, action=LoadFromFile, help="Load options from file") - parser.add_argument("-i", type=str, nargs="+", dest="filenames", help="Input QC JSON files", - required=True) - parser.add_argument("-t", dest="validtime", help="Validtime (YYYYMMDDHH)", required=True) - parser.add_argument("--indent", type=int, help="Indent in output", default=None) - parser.add_argument("-o", type=str, dest="output", help="Output file", required=True) + if argv is None: + argv = sys.argv[1:] + kwargs = parse_args_modify_forcing(argv) + debug = kwargs.get("debug") + + if debug: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s", + level=logging.DEBUG, + ) + else: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(message)s", level=logging.INFO + ) + logging.info("************ modify_forcing ******************") + modify_forcing(**kwargs) - if len(argv) == 0: - parser.print_help() - sys.exit(1) - args = parser.parse_args(argv) - kwargs = {} - for arg in vars(args): - kwargs.update({arg: getattr(args, arg)}) - return kwargs +def cli_merge_qc_data(argv=None): + """Command line interface. + Args: + argv(list, optional): Arguments. Defaults to None. + """ + if argv is None: + argv = sys.argv[1:] + kwargs = parse_args_merge_qc_data(argv) + debug = kwargs.get("debug") + + if debug: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s", + level=logging.DEBUG, + ) + else: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(message)s", level=logging.INFO + ) + logging.info("************ merge_qc_data ******************") -def merge_qc_data(an_time, filenames, output, indent=None): - """Merge the qc data.""" - qc_data = surfex.merge_json_qc_data_sets(an_time, filenames) - qc_data.write_output(output, indent=indent) + qc_data = merge_json_qc_data_sets(kwargs.get("validtime"), kwargs.get("filenames")) + qc_data.write_output(kwargs.get("output"), indent=kwargs.get("indent")) -def parse_timeseries2json(argv): - """Parse the command line input arguments for time series to json. +def masterodb(argv=None): + """Command line interface. Args: - argv (list): List with arguments. + argv(list, optional): Arguments. Defaults to None. + """ + if argv is None: + argv = sys.argv[1:] + kwargs = parse_args_masterodb(argv) + debug = kwargs.get("debug") + + if debug: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s", + level=logging.DEBUG, + ) + else: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(message)s", level=logging.INFO + ) + logging.info("************ masterodb ******************") + run_masterodb(**kwargs) - Returns: - dict: Parsed arguments. - """ - parser = ArgumentParser("Convert a time series to json") - parser.add_argument('--options', type=open, action=LoadFromFile, help="Load options from file") - parser.add_argument('-v', '--varname', dest="varname", type=str, help="Variable name", - required=True) - parser.add_argument('-lons', dest="lons", type=float, nargs="+", help="Longitudes", - default=None, required=False) - parser.add_argument('-lats', dest="lats", type=float, nargs="+", help="Latitudes", - default=None, required=False) - parser.add_argument('-stids', dest="stations", type=str, nargs="+", help="Longitudes", - default=None, required=False) - parser.add_argument('-stations', dest="stationlist", type=str, help="Longitudes", default=None, - required=False) - parser.add_argument('-i', '--filepattern', dest="filepattern", type=str, help="Input file", - default="", required=False) - parser.add_argument('-it', '--inputtype', dest="inputtype", type=str, - help="Input type (format)", default="surfex", required=False, - choices=["netcdf", "grib1", "grib2", "surfex", "obs"]) - parser.add_argument('-start', dest="start", type=str, help="Start time (YYYYMMDDHH)", - required=True) - parser.add_argument('-end', dest="end", type=str, help="End time (YYYYMMDDHH)", required=True) - parser.add_argument('-int', dest="interval", type=int, help="Interval in seconds", - required=False, default=3600) - parser.add_argument('-indent', dest="indent", type=int, help="Indent", required=False, - default=None) - parser.add_argument('-fcint', dest="fcint", type=int, - help="Interval between analysis in seconds", required=False, - default=3 * 3600) - parser.add_argument('-file_inc', dest="file_inc", type=int, - help="Interval between analysis in seconds", - required=False, default=3 * 3600) - parser.add_argument('-offset', dest="offset", type=int, - help="Offset into next forecast by seconds", - required=False, default=0) - parser.add_argument('-sfx', dest="sfx_type", type=str, help="Input type for surfex files", - default=None, required=False, - choices=[None, "forcing", "ascii", "nc", "netcdf", "texte"]) - parser.add_argument('-geo', dest="geo_in", type=str, - help="JSON file with geometry needed for some surfex file types", - required=False, default=None) - parser.add_argument('-obs', dest="obs_set", type=str, help="Input type", default=None, - required=False, - choices=[None, "json", "bufr", "frost", "netatmo", "titan"]) - parser.add_argument('-o', '--output', dest="output", type=str, help="Output image", - default=None, required=False) - - if len(argv) == 0: - parser.print_help() - sys.exit() - - args = parser.parse_args(argv) - kwargs = {} - for arg in vars(args): - kwargs.update({arg: getattr(args, arg)}) - return kwargs - - -def run_timeseries2json(**kwargs): - """Run timeseries to json.""" - lons = kwargs["lons"] - lats = kwargs["lats"] - stations = kwargs["stations"] - stationlist = kwargs["stationlist"] - starttime = kwargs["start"] - endtime = kwargs["end"] - interval = kwargs["interval"] - varname = kwargs["varname"] - inputtype = kwargs["inputtype"] - file_inc = kwargs["file_inc"] - fcint = kwargs["fcint"] - offset = kwargs["offset"] - filepattern = kwargs["filepattern"] - indent = kwargs["indent"] - sfx_type = kwargs["sfx_type"] - obs_set = kwargs["obs_set"] - start = datetime.strptime(starttime, "%Y%m%d%H") - end = datetime.strptime(endtime, "%Y%m%d%H") - geo_in = None - if "geo_in" in kwargs: - geo_in = kwargs["geo_in"] - if isinstance(geo_in, str): - geo_in = json.load(open(geo_in, "r", encoding="utf-8")) - - # Get lon and lats from station list - if lons is None and lats is None: - if stations is None: - raise Exception("You must provide a station list if no stations are provided") - lons, lats = surfex.Observation.get_pos_from_stid(stationlist, stations) - - if len(lons) != len(lats): - raise Exception("Mismatch in longitudes and latitudes") - - delta = [0.1] * len(lons) - geo_json = { - "nam_pgd_grid": { - "cgrid": "LONLATVAL" - }, - "nam_lonlatval": { - "xx": lons, - "xy": lats, - "xdx": delta, - "xdy": delta - } - } - geo = surfex.LonLatVal(geo_json) - - settings = {} - if inputtype == "surfex": - settings.update({ - "varname": varname, - "filetype": sfx_type - }) - elif inputtype == "obs": - settings.update({ - "varname": varname, - "filetype": obs_set, - "fcint": fcint, - "file_inc": file_inc, - "offset": offset, - "filepattern": filepattern - }) - - conf = { - varname: { - inputtype: { - "converter": { - "none": settings - } - } - } - } +def hm2pysurfex(argv=None): + """Command line interface. - cache = surfex.Cache(7200) + Args: + argv(list, optional): Arguments. Defaults to None. + """ + if argv is None: + argv = sys.argv[1:] + kwargs = parse_args_hm2pysurfex(argv) + debug = kwargs.get("debug") + + if debug: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s", + level=logging.DEBUG, + ) + else: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(message)s", level=logging.INFO + ) + logging.info("************ hm2pysurfex ******************") + run_hm2pysurfex(**kwargs) - # Create var - converter = "none" - if geo_in is not None: - geo_in = surfex.get_geo_object(geo_in) - ts1 = surfex.TimeSeriesFromConverter(varname, inputtype, conf, geo, converter, start, end, - cache=cache, - interval=interval, geo_in=geo_in, - stids_file=stationlist) +def gridpp(argv=None): + """Command line interface. - ts1.write_json("ts.json", indent=indent) + Args: + argv(list, optional): Arguments. Defaults to None. + """ + if argv is None: + argv = sys.argv[1:] + kwargs = parse_args_gridpp(argv) + debug = kwargs.get("debug") + + if debug: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s", + level=logging.DEBUG, + ) + else: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(message)s", level=logging.INFO + ) + logging.info("************ gridpp ******************") + run_gridpp(**kwargs) -def parse_cryoclim_pseudoobs(argv): - """Parse the command line input arguments for cryoclim pseudo obs. +def dump_environ(argv=None): + """Command line interface. Args: - argv (list): List with arguments. + argv(list, optional): Arguments. Defaults to None. + """ + if argv is None: + argv = sys.argv[1:] + with open("rte.json", mode="w", encoding="utf-8") as file_handler: + json.dump(os.environ.copy(), file_handler) - Returns: - dict: Parsed arguments. - """ - parser = ArgumentParser("Create CRYOCLIM pseudo-obs") - parser.add_argument('--debug', action="store_true", help="Debug", - required=False, default=False) - parser.add_argument('--options', type=open, action=LoadFromFile, - help="Load options from file") - parser.add_argument('-v', '--varname', dest="varname", type=str, help="Variable name", - default="surface_snow_thickness", required=False) - parser.add_argument('-fg', dest="fg_file", type=str, help="First guess file", - default=None, required=True) - parser.add_argument('-i', dest="infiles", type=str, nargs="+", help="Infiles", - default=None, required=True) - parser.add_argument('-step', dest="thinning", type=int, help="Thinning step", - required=False, default=4) - parser.add_argument('-indent', dest="indent", type=int, help="Indent", - required=False, default=None) - parser.add_argument('-o', '--output', dest="output", type=str, help="Output image", - default=None, required=False) - - if len(argv) == 0: - parser.print_help() - sys.exit() - - args = parser.parse_args(argv) - kwargs = {} - for arg in vars(args): - kwargs.update({arg: getattr(args, arg)}) - return kwargs - - -def parse_sentinel_obs(argv): - """Parse the command line input arguments for sentinel observations. +def first_guess_for_oi(argv=None): + """Command line interface. Args: - argv (list): List with arguments. + argv(list, optional): Arguments. Defaults to None. + """ + if argv is None: + argv = sys.argv[1:] + + kwargs = parse_args_first_guess_for_oi(argv) + debug = kwargs.get("debug") - Returns: - dict: Parsed arguments. + if debug: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s", + level=logging.DEBUG, + ) + else: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(message)s", level=logging.INFO + ) + logging.info("************ FirstGuess4gridpp ******************") + run_first_guess_for_oi(**kwargs) + +def cryoclim_pseudoobs(argv=None): + """Command line interface. + + Args: + argv(list, optional): Arguments. Defaults to None. """ - parser = ArgumentParser("Create Sentinel-1 obs") - parser.add_argument('--debug', action="store_true", help="Debug", - required=False, default=False) - parser.add_argument('--options', type=open, action=LoadFromFile, help="Load options from file") - parser.add_argument('-v', '--varname', dest="varname", type=str, help="Variable name", - default="surface_soil_moisture", required=False) - parser.add_argument('-fg', dest="fg_file", type=str, help="First guess file", - default=None, required=True) - parser.add_argument('-i', dest="infiles", type=str, nargs="+", help="Infiles", - default=None, required=True) - parser.add_argument('-step', dest="thinning", type=int, help="Thinning step", - required=False, default=4) - parser.add_argument('-indent', dest="indent", type=int, help="Indent", - required=False, default=None) - parser.add_argument('-o', '--output', dest="output", type=str, help="Output image", - default=None, required=False) - - if len(argv) == 0: - parser.print_help() - sys.exit() - - args = parser.parse_args(argv) - kwargs = {} - for arg in vars(args): - kwargs.update({arg: getattr(args, arg)}) - return kwargs - - -def run_cryoclim_pseuodoobs(**kwargs): - """Create pseudo obs from cryoclim.""" + if argv is None: + argv = sys.argv[1:] + kwargs = parse_cryoclim_pseudoobs(argv) + debug = kwargs.get("debug") + + if debug: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s", + level=logging.DEBUG, + ) + else: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(message)s", level=logging.INFO + ) + logging.info("************ cryoclim_pseudoobs ******************") + fg_file = kwargs["fg_file"] infiles = kwargs["infiles"] step = kwargs["thinning"] @@ -2542,75 +1602,424 @@ def run_cryoclim_pseuodoobs(**kwargs): varname = kwargs["varname"] indent = kwargs["indent"] - grid_lons, grid_lats, grid_snow_class = surfex.read_cryoclim_nc(infiles) - fg_geo, validtime, grid_snow_fg, __, __ = surfex.read_first_guess_netcdf_file(fg_file, - varname) - q_c = surfex.snow_pseudo_obs_cryoclim(validtime, grid_snow_class, grid_lons, grid_lats, step, - fg_geo, grid_snow_fg) + grid_lons, grid_lats, grid_snow_class = read_cryoclim_nc(infiles) + fg_geo, validtime, grid_snow_fg, __, __ = read_first_guess_netcdf_file( + fg_file, varname + ) + q_c = snow_pseudo_obs_cryoclim( + validtime, grid_snow_class, grid_lons, grid_lats, step, fg_geo, grid_snow_fg + ) q_c.write_output(output, indent=indent) -def run_sentinel_obs(**kwargs): - """Create pseudo obs from cryoclim.""" - fg_file = kwargs["fg_file"] - infiles = kwargs["infiles"] - step = kwargs["thinning"] +def create_namelist(argv=None): + """Command line interface. + + Args: + argv(list, optional): Arguments. Defaults to None. + """ + if argv is None: + argv = sys.argv[1:] + kwargs = parse_args_create_namelist(argv) + debug = kwargs.get("debug") + mode = kwargs.get("mode") + if debug: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s", + level=logging.DEBUG, + ) + else: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(message)s", level=logging.INFO + ) + logging.info("************ %s ******************", mode) + run_create_namelist(**kwargs) + + +def create_lsm_file_assim(argv=None): + """Command line interface. + + Args: + argv(list, optional): Arguments. Defaults to None. + + Raises: + FileNotFoundError: Domain file not found + + """ + if argv is None: + argv = sys.argv[1:] + kwargs = parse_args_lsm_file_assim(argv) + debug = kwargs.get("debug") + + if debug: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s", + level=logging.DEBUG, + ) + else: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(message)s", level=logging.INFO + ) + logging.info("************ create_lsm_fil ******************") + + domain = kwargs["domain"] + logging.debug("domain=%s", domain) + if os.path.exists(domain): + domain_json = json.load(open(domain, "r", encoding="utf-8")) + geo = get_geo_object(domain_json) + else: + raise FileNotFoundError(domain) + validtime = as_datetime(kwargs["dtg"]) + + # TODO Move to a method outside cli + cache = Cache(3600) + inputfile = kwargs["file"] + fileformat = kwargs["fileformat"] + converter = kwargs["converter"] output = kwargs["output"] - varname = kwargs["varname"] - indent = kwargs["indent"] - grid_lons, grid_lats, grid_sm_class = surfex.read_sentinel_nc(infiles) - fg_geo, validtime, grid_sm_fg, __, __ = surfex.read_first_guess_netcdf_file(fg_file, - varname) - q_c = surfex.sm_obs_sentinel(validtime, grid_sm_class, grid_lons, grid_lats, step, fg_geo, - grid_sm_fg) - q_c.write_output(output, indent=indent) + var = kwargs["var"] + + defs = { + "filepattern": inputfile, + "fileformat": fileformat, + "filetype": "surf", + "fcint": 10800, + "offset": 0, + } + + logging.debug("%s %s", var, fileformat) + converter_conf = {"none": {"name": var, "varname": var}} + var = "LSM" + initial_basetime = validtime - as_timedelta(seconds=10800) + converter = Converter(converter, initial_basetime, defs, converter_conf, fileformat) + field = ConvertedInput(geo, var, converter).read_time_step(validtime, cache) + field = np.reshape(field, [geo.nlons, geo.nlats]) + field = np.transpose(field) -def parse_args_shape2ign(argv): - """Parse the command line input arguments for shape fiel to ign. + with open(output, mode="w", encoding="utf-8") as file_handler: + for lat in range(0, geo.nlats): + for lon in range(0, geo.nlons): + file_handler.write(str(field[lat, lon]) + "\n") + + +def create_forcing(argv=None): + """Command line interface. Args: - argv (list): List with arguments. + argv(list, optional): Arguments. Defaults to None. + """ + if argv is None: + argv = sys.argv[1:] + kwargs = parse_args_create_forcing(argv) + debug = kwargs.get("debug") + + if debug: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s", + level=logging.DEBUG, + ) + else: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(message)s", level=logging.INFO + ) + logging.info("************ create_forcing ******************") + options, var_objs, att_objs = set_forcing_config(**kwargs) + run_time_loop(options, var_objs, att_objs) - Returns: - dict: Parsed arguments. +def bufr2json(argv=None): + """Command line interface. + + Args: + argv(list, optional): Arguments. Defaults to None. """ - parser = ArgumentParser("Convert NVE shape files to IGN geometry") - parser.add_argument('--debug', action="store_true", help="Debug", - required=False, default=False) - parser.add_argument('--options', type=open, action=LoadFromFile, help="Load options from file") - parser.add_argument('-c', '--catchment', dest="catchment", type=str, help="Catchment name", - default="None", required=False) - parser.add_argument('-i', dest="infile", type=str, help="Infile/directory", - default=None, required=True) - parser.add_argument('-r', dest="ref_proj", type=str, - help="Reference projection (domain file)", - default=None, required=True) - parser.add_argument('--indent', dest="indent", type=str, help="Indent", default=None, - required=False) - parser.add_argument('-o', '--output', dest="output", type=str, - help="Output json geometry file", - default=None, required=False) - - if len(argv) == 0: - parser.print_help() - sys.exit() - - args = parser.parse_args(argv) - kwargs = {} - for arg in vars(args): - kwargs.update({arg: getattr(args, arg)}) - return kwargs - - -def run_shape2ign(**kwargs): - """Shape2ign.""" + if argv is None: + argv = sys.argv[1:] + kwargs = parse_args_bufr2json(argv) + debug = kwargs.get("debug") + + if debug: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s", + level=logging.DEBUG, + ) + else: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(message)s", level=logging.INFO + ) + logging.info("************ bufr2json ******************") + logging.warning("This method is depreciated. Please use create_obsset_file") + variables = kwargs.get("vars") + bufrfile = kwargs.get("bufr") + output = kwargs.get("output") + valid_dtg = as_datetime(kwargs.get("dtg")) + valid_range = as_timedelta(seconds=kwargs.get("valid_range")) + label = kwargs.get("label") + indent = kwargs.get("indent") + lonrange = kwargs.get("lonrange") + latrange = kwargs.get("latrange") + + create_obsset_file( + valid_dtg, + "bufr", + variables, + bufrfile, + output, + pos_t_range=valid_range, + lonrange=lonrange, + latrange=latrange, + label=label, + indent=indent, + ) + + +def obs2json(argv=None): + """Command line interface. + + Args: + argv(list, optional): Arguments. Defaults to None. + """ + if argv is None: + argv = sys.argv[1:] + kwargs = parse_args_obs2json(argv) + debug = kwargs.get("debug") + + if debug: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s", + level=logging.DEBUG, + ) + else: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(message)s", level=logging.INFO + ) + logging.info("************ obs2json ******************") + obs_type = kwargs.get("obs_type") + variables = kwargs.get("vars") + inputfile = kwargs.get("inputfile") + output = kwargs.get("output") + obs_time = as_datetime(kwargs.get("obs_time")) + label = kwargs.get("label") + unit = kwargs.get("unit") + level = kwargs.get("level") + obtypes = kwargs.get("obtypes") + subtypes = kwargs.get("subtypes") + pos_t_range = kwargs.get("pos_t_range") + neg_t_range = kwargs.get("neg_t_range") + indent = kwargs.get("indent") + lonrange = kwargs.get("lonrange") + latrange = kwargs.get("latrange") + if pos_t_range is not None: + pos_t_range = as_timedelta(seconds=pos_t_range) + if neg_t_range is not None: + neg_t_range = as_timedelta(seconds=neg_t_range) + + create_obsset_file( + obs_time, + obs_type, + variables, + inputfile, + output, + pos_t_range=pos_t_range, + neg_t_range=neg_t_range, + lonrange=lonrange, + latrange=latrange, + label=label, + unit=unit, + level=level, + indent=indent, + obtypes=obtypes, + subtypes=subtypes, + ) + + +def cli_set_domain(argv=None): + """Command line interface. + + Args: + argv(list, optional): Arguments. Defaults to None. + + Raises: + FileNotFoundError: File not found + RuntimeError: Domain not provided + + """ + if argv is None: + argv = sys.argv[1:] + + args = parse_set_domain(argv) + debug = args.debug + + if debug: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s", + level=logging.DEBUG, + ) + else: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(message)s", level=logging.INFO + ) + logging.info("************ set_domain ******************") + domain = args.domain + domains = args.domains + output = args.output + indent = args.indent + harmonie_mode = args.harmonie + if os.path.exists(domains): + with open(domains, mode="r", encoding="utf-8") as file_handler: + domains = json.load(file_handler) + domain_json = set_domain(domains, domain, hm_mode=harmonie_mode) + if domain_json is not None: + with open(output, mode="w", encoding="utf-8") as file_handler: + json.dump(domain_json, file_handler, indent=indent) + else: + raise RuntimeError("Domain not provided") + else: + raise FileNotFoundError + + +def cli_set_geo_from_obs_set(argv=None): + """Command line interface. + + Args: + argv(list, optional): Arguments. Defaults to None. + """ + if argv is None: + argv = sys.argv[1:] + + kwargs = parse_args_set_geo_from_obs_set(argv) + debug = kwargs.get("debug") + + if debug: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s", + level=logging.DEBUG, + ) + else: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(message)s", level=logging.INFO + ) + logging.info("************ set_geo_from_obs_set ******************") + + validtime = as_datetime(kwargs["validtime"]) + geo = set_geo_from_obs_set( + validtime, + kwargs["obs_type"], + kwargs["variable"], + kwargs["inputfile"], + kwargs["lonrange"], + kwargs["latrange"], + ) + output = kwargs["output"] + with open(output, mode="w", encoding="utf-8") as file_handler: + json.dump(geo.json, file_handler) + + +def cli_set_geo_from_stationlist(argv=None): + """Command line interface. + + Args: + argv(list, optional): Arguments. Defaults to None. + """ + if argv is None: + argv = sys.argv[1:] + + kwargs = parse_args_set_geo_from_stationlist(argv) + debug = kwargs.get("debug") + + if debug: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s", + level=logging.DEBUG, + ) + else: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(message)s", level=logging.INFO + ) + logging.info("************ set_geo_from_stationlist ******************") + geo = set_geo_from_stationlist(**kwargs) + output = kwargs["output"] + with open(output, mode="w", encoding="utf-8") as file_handler: + json.dump(geo.json, file_handler) + + +def cli_shape2ign(argv=None): + """Command line interface. + + Args: + argv(list, optional): Arguments. Defaults to None. + """ + if argv is None: + argv = sys.argv[1:] + + kwargs = parse_args_shape2ign(argv) + debug = kwargs.get("debug") + + if debug: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s", + level=logging.DEBUG, + ) + else: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(message)s", level=logging.INFO + ) + logging.info("************ shape2ign ******************") catchment = kwargs.get("catchment") infile = kwargs.get("infile") output = kwargs.get("output") indent = kwargs.get("indent") ref_proj = kwargs.get("ref_proj") - surfex.shape2ign(catchment, infile, output, ref_proj, indent=indent) + shape2ign(catchment, infile, output, ref_proj, indent=indent) + + +def soda(argv=None): + """Command line interface. + + Args: + argv(list, optional): Arguments. Defaults to None. + """ + if argv is None: + argv = sys.argv[1:] + + kwargs = parse_args_surfex_binary(argv, "soda") + debug = kwargs.get("debug") + + if debug: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s", + level=logging.DEBUG, + ) + else: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(message)s", level=logging.INFO + ) + logging.info("************ soda ******************") + run_surfex_binary("soda", **kwargs) + + +def titan(argv=None): + """Command line interface. + + Args: + argv(list, optional): Arguments. Defaults to None. + """ + if argv is None: + argv = sys.argv[1:] + kwargs = parse_args_titan(argv) + debug = kwargs.get("debug") + + if debug: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s", + level=logging.DEBUG, + ) + else: + logging.basicConfig( + format="%(asctime)s %(levelname)s %(message)s", level=logging.INFO + ) + logging.info("************ titan ******************") + run_titan(**kwargs) diff --git a/surfex/cmd_parsing.py b/surfex/cmd_parsing.py new file mode 100644 index 0000000..79bc32b --- /dev/null +++ b/surfex/cmd_parsing.py @@ -0,0 +1,2183 @@ +"""Command line interfaces.""" +import os +import sys +from argparse import Action, ArgumentParser + +import yaml + +try: + import matplotlib.pyplot as plt +except ModuleNotFoundError: + plt = None + + +from . import __version__ + + +class LoadFromFile(Action): + """Load arguments from a file.""" + + def __call__(self, parser, namespace, values, option_string=None): + """Override __call__ method.""" + with values as f_h: + # parse arguments in the file and store them in the target namespace + parser.parse_args(f_h.read().split(), namespace) + + +def parse_args_create_forcing(argv): + """Parse arguments to create forcing. + + Args: + argv (list): List with arguments. + + Returns: + dict: Parsed arguments. + + """ + parser = ArgumentParser(description="Create offline forcing") + parser.add_argument("dtg_start", type=str, help="Start DTG", nargs="?") + parser.add_argument("dtg_stop", type=str, help="Stop DTG", nargs="?") + parser.add_argument( + "-d", + dest="domain", + type=str, + help="Domain file describing the points or locations", + nargs="?", + required=False, + default=None, + ) + parser.add_argument( + "--harmonie", + action="store_true", + default=False, + help="Surfex configuration (domain) created from Harmonie environment", + ) + parser.add_argument( + "--config_exp_surfex", + dest="config_exp_surfex", + type=str, + help="Toml configuration file for surfex settings potentially " + + "used if --harmonie is set", + default=None, + nargs="?", + ) + parser.add_argument( + "-fb", type=str, help="First base time unless equal to dtg_start", default=None + ) + parser.add_argument( + "--options", type=open, action=LoadFromFile, help="Load options from file" + ) + parser.add_argument( + "-c", + "--config", + dest="user_config", + type=str, + help="Configuration file " + + "in yaml format describing customized variable setup", + default=None, + nargs="?", + ) + parser.add_argument( + "-t", "--timestep", type=int, help="Surfex time step", default=3600, nargs="?" + ) + parser.add_argument( + "-ci", + "--cache_interval", + type=int, + help="clear cached fields after..", + default=3600, + nargs="?", + ) + parser.add_argument( + "-i", + "--input_format", + type=str, + help="Default input file format", + default="netcdf", + choices=["netcdf", "grib1", "grib2", "surfex", "fa"], + ) + parser.add_argument( + "-ig", + "--input_geo", + dest="geo_input", + type=str, + help="Default input geometry if needed", + default=None, + required=False, + ) + parser.add_argument( + "-o", + "--output_format", + type=str, + help="Output file format", + default="nc4", + choices=["netcdf", "nc4", "ascii"], + nargs="?", + ) + parser.add_argument("-a", dest="analysis", action="store_true", default=False) + parser.add_argument( + "--interpolation", + dest="interpolation", + required=False, + default="bilinear", + choices=["nearest", "bilinear"], + ) + parser.add_argument("-of", type=str, help="Output file name", default=None, nargs="?") + parser.add_argument( + "-p", "--pattern", type=str, help="Filepattern", default=None, nargs="?" + ) + parser.add_argument( + "--zref", + type=str, + help="Temperature/humidity reference height", + default="ml", + choices=["ml", "screen"], + ) + parser.add_argument( + "--uref", + type=str, + help="Wind reference height: screen/ml/", + default="ml", + choices=["ml", "screen"], + ) + parser.add_argument("--debug", help="Show debug information", action="store_true") + parser.add_argument( + "--single", help="Print single time step twice", action="store_true" + ) + parser.add_argument("--version", action="version", version=__version__) + + group_ta = parser.add_argument_group("TA", description="Air temperature [K]") + group_ta.add_argument( + "--ta", + type=str, + help="Input format", + default="default", + choices=["default", "netcdf", "grib1", "grib2", "surfex"], + ) + group_ta.add_argument( + "--ta_converter", + type=str, + help="Converter function to air temperature", + default="none", + choices=["none"], + ) + + group_qa = parser.add_argument_group("QA", description="Specific humidity") + group_qa.add_argument( + "--qa", + type=str, + help="Input format", + default="default", + choices=["default", "netcdf", "grib1", "grib2", "surfex"], + ) + group_qa.add_argument( + "--qa_converter", + type=str, + help="Converter function to specific humidity", + default="none", + choices=["none", "rh2q", "rh2q_mslp"], + ) + + group_ps = parser.add_argument_group("PS", description="Surface air pressure [Pa]") + group_ps.add_argument( + "--ps", + type=str, + help="Surface air pressure input format", + default="default", + choices=["default", "netcdf", "grib1", "grib2", "surfex", "constant"], + ) + group_ps.add_argument( + "--ps_converter", + type=str, + help="Converter function to surface air pressure", + default="none", + choices=["none", "mslp2ps"], + ) + + group_dir_sw = parser.add_argument_group( + "DIR_SW", description="Direct shortwave radiation" + ) + group_dir_sw.add_argument( + "--dir_sw", + type=str, + help="Direct short wave radiation input format", + default="default", + choices=["default", "netcdf", "grib1", "grib2", "surfex", "constant"], + ) + group_dir_sw.add_argument( + "--dir_sw_converter", + type=str, + help="Converter function to direct short wave radiation", + default="none", + choices=["none", "analysis"], + ) + + group_sca_sw = parser.add_argument_group( + "SCA_SW", description="Scattered short wave radiation flux" + ) + group_sca_sw.add_argument( + "--sca_sw", + type=str, + help="Scattered short wave radiation input format", + default="default", + choices=["netcdf", "grib1", "grib2", "surfex", "constant"], + ) + group_sca_sw.add_argument( + "--sca_sw_converter", + type=str, + help="Converter function to scattered shortwave radiation flux", + default="none", + choices=["none"], + ) + + group_lw = parser.add_argument_group("LW", description="Long wave radiation flux") + group_lw.add_argument( + "--lw", + type=str, + help="Long wave radiation input format", + default="default", + choices=["netcdf", "grib1", "grib2", "surfex", "constant"], + ) + group_lw.add_argument( + "--lw_converter", + type=str, + help="Converter function to long wave radiation flux", + default="none", + choices=["none", "analysis"], + ) + + group_rain = parser.add_argument_group("RAIN", description="Rainfall rate") + group_rain.add_argument( + "--rain", + type=str, + help="Input format", + default="default", + choices=["default", "netcdf", "grib1", "grib2", "surfex"], + ) + group_rain.add_argument( + "--rain_converter", + type=str, + help="Converter function to rainfall rate", + default="totalprec", + choices=["none", "totalprec", "calcrain"], + ) + + group_snow = parser.add_argument_group("SNOW", description="Snowfall rate") + group_snow.add_argument( + "--snow", + type=str, + help="Input format", + default="default", + choices=["default", "netcdf", "grib1", "grib2", "surfex"], + ) + group_snow.add_argument( + "--snow_converter", + type=str, + help="Converter function to snowfall rate", + default="none", + choices=["none", "calcsnow", "snowplusgraupel"], + ) + + group_wind = parser.add_argument_group("WIND", description="Wind speed") + group_wind.add_argument( + "--wind", + type=str, + help="Input format", + default="default", + choices=["default", "netcdf", "grib1", "grib2", "surfex"], + ) + group_wind.add_argument( + "--wind_converter", + type=str, + help="Converter function to windspeed", + default="windspeed", + choices=["none", "windspeed"], + ) + + group_wind_dir = parser.add_argument_group("WIND_DIR", description="Wind direction") + group_wind_dir.add_argument( + "--wind_dir", + type=str, + help="Input format", + default="default", + choices=["default", "netcdf", "grib1", "grib2", "surfex"], + ) + group_wind_dir.add_argument( + "--wind_dir_converter", + type=str, + help="Converter function to wind direction", + default="winddir", + choices=["none", "winddir"], + ) + + group_co2 = parser.add_argument_group("CO2", description="Carbon dioxide") + group_co2.add_argument( + "--co2", + type=str, + help="CO2 input format", + default="default", + choices=["netcdf", "grib1", "constant", "grib2", "surfex"], + ) + group_co2.add_argument( + "--co2_converter", + type=str, + help="Converter function to carbon dioxide", + default="none", + choices=["none"], + ) + + group_zs = parser.add_argument_group("ZS", description="Surface geopotential") + group_zs.add_argument( + "--zsoro", + type=str, + help="ZS input format", + default="default", + choices=["netcdf", "grib1", "grib2", "surfex", "constant"], + ) + group_zs.add_argument( + "--zsoro_converter", + type=str, + help="Converter function to ZS", + default="none", + choices=["none", "phi2m"], + ) + + group_zval = parser.add_argument_group( + "ZREF", description="Reference height for temperature " "and humidity" + ) + group_zval.add_argument( + "--zval", + type=str, + help="ZREF input format", + default="default", + choices=["netcdf", "grib1", "grib2", "surfex", "constant"], + ) + group_zval.add_argument( + "--zval_converter", + type=str, + help="Converter function to ZREF", + default="none", + choices=["none"], + ) + + group_uval = parser.add_argument_group( + "UREF", description="Reference height for wind" + ) + group_uval.add_argument( + "--uval", + type=str, + help="UREF input format", + default="default", + choices=["netcdf", "grib1", "grib2", "surfex", "constant"], + ) + group_uval.add_argument( + "--uval_converter", + type=str, + help="Converter function to UREF", + default="none", + choices=["none"], + ) + + if len(argv) < 4: + parser.print_help() + sys.exit(1) + + args = parser.parse_args(argv) + kwargs = {} + for arg in vars(args): + kwargs.update({arg: getattr(args, arg)}) + + user_config = {} + if "user_config" in kwargs and kwargs["user_config"] is not None: + user_config = ( + yaml.safe_load(open(kwargs["user_config"], mode="r", encoding="utf-8")) or {} + ) + kwargs.update({"user_config": user_config}) + + # Find name of global config file + root = __file__ + if os.path.islink(root): + root = os.path.realpath(root) + base = os.path.dirname(os.path.abspath(root)) + yaml_config = base + "/cfg/config.yml" + + default_conf = yaml.safe_load( + open(yaml_config, mode="r", encoding="utf-8") + ) or sys.exit(1) + kwargs.update({"config": default_conf}) + return kwargs + + +def parse_args_modify_forcing(argv): + """Parse arguments to modify forcing. + + Args: + argv (list): List with arguments. + + Returns: + dict: Parsed arguments. + + """ + parser = ArgumentParser(description="Modify offline forcing NetCDF file") + parser.add_argument( + "-i", + "--input_file", + type=str, + help="Input forcing file", + nargs="?", + required=True, + ) + parser.add_argument( + "-t", + "--time_step", + type=str, + help="Time step ", + nargs="?", + required=False, + default=-1, + ) + parser.add_argument( + "-o", + "--output_file", + type=str, + help="Output forcing file", + nargs="?", + required=True, + ) + parser.add_argument("variables", type=str, nargs="+", help="Variables to substitute") + + if len(argv) == 0: + parser.print_help() + sys.exit(1) + + args = parser.parse_args(argv) + kwargs = {} + for arg in vars(args): + kwargs.update({arg: getattr(args, arg)}) + return kwargs + + +def parse_args_qc2obsmon(argv): + """Parse arguments for qc2obsmon. + + Args: + argv (list): List with arguments. + + Returns: + dict: Parsed arguments. + + """ + parser = ArgumentParser("Create SQLite data base for obsmon") + parser.add_argument("dtg", type=str, help="YYYYMMDDHH") + parser.add_argument("varname", type=str, help="Variable name") + parser.add_argument("qc", type=str, help="QC dataset JSONfile") + parser.add_argument( + "--options", type=open, action=LoadFromFile, help="Load options from file" + ) + parser.add_argument( + "--operator", + type=str, + help="Obs operator", + choices=["bilinear", "nearest"], + default="bilinear", + required=False, + ) + parser.add_argument("--fg_file", type=str, help="First guess file", required=True) + parser.add_argument("--an_file", type=str, help="Analysis file", required=True) + parser.add_argument("--file_var", type=str, help="File variable", required=True) + parser.add_argument( + "-o", dest="output", type=str, nargs="?", help="output file", default="ecma.db" + ) + parser.add_argument( + "--debug", action="store_true", help="Debug", required=False, default=False + ) + parser.add_argument("--version", action="version", version=__version__) + + if len(argv) == 0: + parser.print_help() + sys.exit(1) + + args = parser.parse_args(argv) + kwargs = {} + for arg in vars(args): + kwargs.update({arg: getattr(args, arg)}) + return kwargs + + +def parse_args_first_guess_for_oi(argv): + """Parse arguments for firstguess4oi. + + Args: + argv (list): List with arguments. + + Returns: + dict: Parsed arguments. + + """ + parser = ArgumentParser(description="Create first guess file for gridpp") + parser.add_argument( + "--options", type=open, action=LoadFromFile, help="Load options from file" + ) + parser.add_argument( + "-dtg", dest="dtg", type=str, help="Date (YYYYMMDDHH)", required=True + ) + parser.add_argument( + "-i", "--inputfile", type=str, default=None, help="Default input file", nargs="?" + ) + parser.add_argument( + "-if", dest="inputformat", type=str, help="Input file format", default="grib2" + ) + parser.add_argument( + "-d", dest="domain", type=str, help="Domain", required=False, default=None + ) + parser.add_argument( + "--harmonie", + action="store_true", + default=False, + help="Surfex configuration (domain) created from Harmonie environment", + ) + + parser.add_argument( + "-t2m_file", type=str, default=None, help="File with T2M", nargs="?" + ) + parser.add_argument( + "-t2m_format", + type=str, + default=None, + help="File format for file with T2M", + nargs="?", + choices=["grib1", "grib2", "netcdf", "surfex", "fa"], + ) + parser.add_argument( + "-t2m_converter", + type=str, + default="none", + help="Converter for T2M", + nargs="?", + choices=["none", "tap"], + ) + parser.add_argument( + "-rh2m_file", type=str, default=None, help="File with RH2M", nargs="?" + ) + parser.add_argument( + "-rh2m_format", + type=str, + default=None, + help="File format for file with RH2M", + nargs="?", + choices=["grib1", "grib2", "netcdf", "surfex", "fa"], + ) + parser.add_argument( + "-rh2m_converter", + type=str, + default="none", + help="Converter for RH2M", + nargs="?", + choices=["none", "rhp"], + ) + + parser.add_argument( + "-sd_file", type=str, default=None, help="Snow depth file", nargs="?" + ) + parser.add_argument( + "-sd_format", + type=str, + default=None, + help="Snow depth file format", + nargs="?", + choices=["grib1", "grib2", "netcdf", "surfex", "fa"], + ) + parser.add_argument( + "--sd_converter", + type=str, + default="none", + help="", + nargs="?", + choices=["none", "sweclim", "swe2sd", "sdp"], + ) + + parser.add_argument( + "-cb_file", type=str, default=None, help="Cloud base file", nargs="?" + ) + parser.add_argument( + "-cb_format", + type=str, + default=None, + help="Cloud base file format", + nargs="?", + choices=["grib1", "grib2", "netcdf", "surfex", "fa"], + ) + parser.add_argument( + "--cb_converter", + type=str, + default="cloud_base", + help="", + nargs="?", + choices=["cloud_base"], + ) + + parser.add_argument( + "-sm_file", type=str, default=None, help="Soil moisture file", nargs="?" + ) + parser.add_argument( + "-sm_format", + type=str, + default=None, + help="Soil moisture file format", + nargs="?", + choices=["grib1", "grib2", "netcdf", "surfex", "fa"], + ) + parser.add_argument( + "--sm_converter", + type=str, + default="none", + help="", + nargs="?", + choices=["none", "smp"], + ) + + parser.add_argument( + "-laf_file", + type=str, + default=None, + help="Land area fraction grib file", + nargs="?", + ) + parser.add_argument( + "-laf_format", + type=str, + default=None, + help="Snow depth file format", + nargs="?", + choices=["grib1", "grib2", "netcdf", "surfex", "fa"], + ) + parser.add_argument( + "--laf_converter", + type=str, + default="nature_town", + help="", + nargs="?", + choices=["none", "sea2land", "nature_town"], + ) + + parser.add_argument( + "-altitude_file", type=str, default=None, help="SURFEX grib file", nargs="?" + ) + parser.add_argument( + "-altitude_format", + type=str, + default=None, + help="Snow depth file format", + nargs="?", + choices=["grib1", "grib2", "netcdf", "surfex", "fa"], + ) + parser.add_argument( + "--altitude_converter", + type=str, + default="phi2m", + help="", + nargs="?", + choices=["none", "phi2m"], + ) + + parser.add_argument( + "-o", dest="output", type=str, help="Output file", default="raw.nc" + ) + parser.add_argument( + "--config", + "-c", + dest="input_config", + type=str, + help="YAML config file", + default="first_guess.yml", + nargs="?", + ) + parser.add_argument( + "variables", + nargs="+", + choices=[ + "air_temperature_2m", + "relative_humidity_2m", + "surface_snow_thickness", + "cloud_base", + "surface_soil_moisture", + ], + help="Variables to create first guess for", + ) + parser.add_argument( + "--debug", action="store_true", help="Debug", required=False, default=False + ) + parser.add_argument("--version", action="version", version=__version__) + + if len(argv) == 0: + parser.print_help() + sys.exit(1) + + args = parser.parse_args(argv) + kwargs = {} + for arg in vars(args): + kwargs.update({arg: getattr(args, arg)}) + return kwargs + + +def parse_args_masterodb(argv): + """Parse the command line input arguments for masterodb. + + Args: + argv (list): List with arguments. + + Returns: + dict: Parsed arguments. + + """ + parser = ArgumentParser(description="SURFEX for MASTERRODB") + parser.add_argument( + "--options", type=open, action=LoadFromFile, help="Load options from file" + ) + parser.add_argument("--version", action="version", version=f"surfex {__version__}") + parser.add_argument( + "--debug", action="store_true", help="Debug", required=False, default=False + ) + parser.add_argument( + "--wrapper", "-w", type=str, default="", help="Execution wrapper command" + ) + parser.add_argument( + "--harmonie", + action="store_true", + default=False, + help="Surfex configuration created from Harmonie environment", + ) + parser.add_argument( + "--pgd", type=str, nargs="?", required=True, help="Name of the PGD file" + ) + parser.add_argument( + "--prep", type=str, nargs="?", required=True, help="Name of the PREP file" + ) + parser.add_argument( + "--force", "-f", action="store_true", default=False, help="Force re-creation" + ) + parser.add_argument("--rte", "-r", required=True, nargs="?") + parser.add_argument("--config", "-c", required=False, nargs="?") + parser.add_argument( + "--system_file_paths", + "-s", + required=True, + nargs="?", + help="Input file paths on your system", + ) + parser.add_argument("--namelist_path", "-n", required=True, nargs="?") + parser.add_argument( + "--domain", type=str, required=False, help="JSON file with domain" + ) + parser.add_argument("--dtg", type=str, required=False, default=None) + parser.add_argument("--output", "-o", type=str, required=False, default=None) + parser.add_argument( + "--only_archive", action="store_true", default=False, help="Only call archiving" + ) + parser.add_argument( + "--tolerate_missing", + action="store_true", + default=False, + help="Tolerate missing files", + ) + parser.add_argument( + "--print_namelist", + action="store_true", + default=False, + help="Print namelsist used", + ) + parser.add_argument( + "--mode", "-m", type=str, required=True, choices=["forecast", "canari"] + ) + parser.add_argument( + "--archive", + "-a", + required=False, + default=None, + nargs="?", + help="JSON file with archive output", + ) + parser.add_argument( + "--binary", + "-b", + required=False, + default=None, + nargs="?", + help="Full path of MASTERODB binary", + ) + + if len(argv) == 0: + parser.print_help() + sys.exit(1) + + args = parser.parse_args(argv) + kwargs = {} + for arg in vars(args): + kwargs.update({arg: getattr(args, arg)}) + return kwargs + + +def parse_args_surfex_binary(argv, mode): + """Parse the command line input arguments for surfex binary. + + Args: + argv (list): List with arguments. + mode(str): Type of surfex binary + + Raises: + NotImplementedError: Mode not implemented + + Returns: + dict: Parsed arguments. + + """ + pert = False + need_pgd = True + need_prep = True + if mode == "pgd": + need_pgd = False + need_prep = False + desc = "Create physiography for SURFEX (PGD)" + elif mode == "prep": + need_prep = False + desc = "Prepare initial conditions for SURFEX" + elif mode == "offline": + desc = "Run Offline SURFEX" + elif mode == "soda": + desc = "Run SURFEX data assimilation (SODA)" + elif mode == "perturbed": + pert = True + desc = "Run perturbed Offline SURFEX" + else: + raise NotImplementedError(mode + " is not implemented!") + + parser = ArgumentParser(description=desc) + parser.add_argument( + "--options", type=open, action=LoadFromFile, help="Load options from file" + ) + parser.add_argument("--version", action="version", version=__version__) + parser.add_argument( + "--debug", action="store_true", help="Debug", required=False, default=False + ) + parser.add_argument( + "--wrapper", "-w", type=str, default="", help="Execution wrapper command" + ) + if need_pgd: + parser.add_argument( + "--pgd", type=str, nargs="?", required=True, help="Name of the PGD file" + ) + if need_prep: + parser.add_argument( + "--prep", type=str, nargs="?", required=True, help="Name of the PREP file" + ) + if mode == "prep": + parser.add_argument("--prep_file", required=False, default=None, nargs="?") + parser.add_argument("--prep_filetype", required=False, default=None, nargs="?") + parser.add_argument("--prep_pgdfile", required=False, default=None, nargs="?") + parser.add_argument("--prep_pgdfiletype", required=False, default=None, nargs="?") + if mode == "offline" or mode == "perturbed": + parser.add_argument( + "--forc_zs", + action="store_true", + default=False, + help="Set model ZS to forcing ZS", + ) + parser.add_argument("--forcing_dir", required=False, default=None, nargs="?") + parser.add_argument("--force", "-f", action="store_true", help="Force re-creation") + parser.add_argument( + "--harmonie", + action="store_true", + default=False, + help="Surfex configuration created from Harmonie environment", + ) + parser.add_argument( + "--print_namelist", action="store_true", default=False, help="Print namelist used" + ) + parser.add_argument( + "--tolerate_missing", + action="store_true", + default=False, + help="Tolerate missing files", + ) + parser.add_argument( + "--masterodb", + action="store_true", + default=False, + help="Input file written by masterodb", + ) + parser.add_argument("--rte", "-r", required=True, nargs="?") + parser.add_argument("--config", "-c", required=False, nargs="?") + parser.add_argument( + "--system_file_paths", + "-s", + required=True, + nargs="?", + help="Input file paths on your system", + ) + parser.add_argument("--namelist_path", "-n", required=True, nargs="?") + parser.add_argument( + "--domain", type=str, required=False, help="JSON file with domain" + ) + parser.add_argument("--output", "-o", type=str, required=True) + parser.add_argument("--dtg", type=str, required=False, default=None) + if pert: + parser.add_argument("--pert", "-p", type=int, required=False, default=None) + parser.add_argument( + "--negpert", action="store_true", default=False, help="Negative perturbation" + ) + parser.add_argument( + "--archive", + "-a", + type=str, + required=False, + default=None, + nargs="?", + help="JSON file with archive output", + ) + parser.add_argument("binary", type=str, help="Command to run") + + if len(argv) == 0: + parser.print_help() + sys.exit(1) + + args = parser.parse_args(argv) + kwargs = {} + for arg in vars(args): + kwargs.update({arg: getattr(args, arg)}) + return kwargs + + +def parse_args_create_namelist(argv): + """Parse the command line input arguments for creating a namelist. + + Args: + argv (list): List with arguments. + + Returns: + dict: Parsed arguments. + + """ + parser = ArgumentParser(description="Create namelist") + parser.add_argument("--version", action="version", version=__version__) + parser.add_argument( + "--debug", action="store_true", help="Debug", required=False, default=False + ) + parser.add_argument( + "--wrapper", "-w", type=str, default="", help="Execution wrapper command" + ) + parser.add_argument("mode", type=str, help="Type of namelist") + parser.add_argument("--method", required=False, default="blocks", nargs="?") + parser.add_argument("--prep_file", required=False, default=None, nargs="?") + parser.add_argument("--prep_filetype", required=False, default=None, nargs="?") + parser.add_argument("--prep_pgdfile", required=False, default=None, nargs="?") + parser.add_argument("--prep_pgdfiletype", required=False, default=None, nargs="?") + parser.add_argument( + "--forc_zs", action="store_true", default=False, help="Set model ZS to forcing ZS" + ) + parser.add_argument("--forcing_dir", required=False, default=None, nargs="?") + parser.add_argument( + "--harmonie", + action="store_true", + default=False, + help="Surfex configuration created from Harmonie environment", + ) + parser.add_argument( + "--system_file_paths", + "-s", + required=True, + nargs="?", + help="Input file paths on your system", + ) + parser.add_argument("--config", "-c", required=False, nargs="?") + parser.add_argument("--namelist_path", "-n", required=True, nargs="?") + parser.add_argument( + "--domain", type=str, required=False, help="JSON file with domain" + ) + parser.add_argument("--output", "-o", type=str, required=False) + parser.add_argument("--dtg", type=str, required=False, default=None) + + if len(argv) == 0: + parser.print_help() + sys.exit(1) + + args = parser.parse_args(argv) + kwargs = {} + for arg in vars(args): + kwargs.update({arg: getattr(args, arg)}) + return kwargs + + +def parse_args_gridpp(argv): + """Parse the command line input arguments for gridpp. + + Args: + argv (list): List with arguments. + + Returns: + dict: Parsed arguments. + + """ + parser = ArgumentParser(description="Create horisontal OI analysis") + parser.add_argument( + "--options", type=open, action=LoadFromFile, help="Load options from file" + ) + parser.add_argument( + "-i", + "--input_file", + type=str, + help="Input NetCDF file with all variables", + required=True, + ) + parser.add_argument( + "-obs", + "--obs_file", + type=str, + help="Input JSON file with QC observations", + required=True, + ) + parser.add_argument( + "-o", + "--output_file", + type=str, + help="Output NetCDF file with all variables", + required=True, + ) + parser.add_argument("-v", "--var", type=str, help="Variable", required=True) + parser.add_argument("-hor", dest="hlength", type=float, required=True) + parser.add_argument( + "-vert", dest="vlength", type=float, default=100000, required=False + ) + parser.add_argument( + "--wlength", dest="wlength", type=float, default=0.0, required=False + ) + parser.add_argument( + "--maxLocations", dest="max_locations", type=int, default=20, required=False + ) + parser.add_argument( + "--elevGradient", + dest="elev_gradient", + type=float, + default=0, + required=False, + choices=[0, -0.0065], + ) + parser.add_argument( + "--epsilon", dest="epsilon", type=float, default=0.25, required=False + ) + parser.add_argument( + "--minvalue", dest="minvalue", type=float, default=None, required=False + ) + parser.add_argument( + "--maxvalue", dest="maxvalue", type=float, default=None, required=False + ) + parser.add_argument( + "--only_diff", + action="store_true", + help="Only write differences to file", + required=False, + default=False, + ) + parser.add_argument( + "--debug", action="store_true", help="Debug", required=False, default=False + ) + parser.add_argument("--version", action="version", version=__version__) + + if len(argv) == 0: + parser.print_help() + sys.exit(1) + + args = parser.parse_args(argv) + kwargs = {} + for arg in vars(args): + kwargs.update({arg: getattr(args, arg)}) + return kwargs + + +def parse_args_titan(argv): + """Parse the command line input arguments for titan. + + Args: + argv (list): List with arguments. + + Returns: + dict: Parsed arguments. + + """ + parser = ArgumentParser(description="Do quality control of observations") + parser.add_argument( + "--options", type=open, action=LoadFromFile, help="Load options from file" + ) + parser.add_argument( + "-i", + "--input_file", + type=str, + help="Input json file with observation sets and test settings", + required=True, + ) + parser.add_argument( + "-o", + "--output_file", + type=str, + help="Output json file with quality checked observations", + required=False, + default="qc_obs.json", + ) + parser.add_argument( + "-v", "--variable", type=str, required=True, help="Observation variable" + ) + parser.add_argument("--indent", type=int, default=None, help="Indent") + parser.add_argument( + "-dtg", type=str, help="Date time group YYYYMMDDHH", required=True + ) + parser.add_argument( + "--harmonie", + action="store_true", + default=False, + help="Surfex configuration created from Harmonie environment", + ) + parser.add_argument( + "tests", nargs="+", type=str, help="Which tests to run and order to run" + ) + parser.add_argument( + "--blacklist", + dest="blacklist_file", + type=str, + required=False, + default=None, + help="JSON file with blacklist", + ) + parser.add_argument( + "--domain", type=str, required=False, default=None, help="JSON file with domain" + ) + parser.add_argument( + "--debug", action="store_true", help="Debug", required=False, default=False + ) + parser.add_argument("--version", action="version", version=__version__) + + if len(argv) == 0: + parser.print_help() + sys.exit(1) + + args = parser.parse_args(argv) + kwargs = {} + for arg in vars(args): + kwargs.update({arg: getattr(args, arg)}) + return kwargs + + +def parse_args_oi2soda(argv): + """Parse the command line input arguments for oi2soda. + + Args: + argv (list): List with arguments. + + Returns: + dict: Parsed arguments. + + """ + parser = ArgumentParser(description="Create ASCII input for SODA from gridpp files") + parser.add_argument( + "--options", type=open, action=LoadFromFile, help="Load options from file" + ) + parser.add_argument( + "--t2m_file", type=str, help="NetCDF file for T2M", required=False, default=None + ) + parser.add_argument( + "--t2m_var", + type=str, + help="NetCDF variable name for T2M", + required=False, + default="air_temperature_2m", + ) + parser.add_argument( + "--rh2m_file", type=str, help="NetCDF file for RH2M", required=False, default=None + ) + parser.add_argument( + "--rh2m_var", + type=str, + help="NetCDF variable name for RH2M", + required=False, + default="relative_humidity_2m", + ) + parser.add_argument( + "--sd_file", type=str, help="NetCDF file for SD", required=False, default=None + ) + parser.add_argument( + "--sd_var", + type=str, + help="NetCDF variable name for SD", + required=False, + default="surface_snow_thickness", + ) + parser.add_argument( + "--sm_file", type=str, help="NetCDF file for SM", required=False, default=None + ) + parser.add_argument( + "--sm_var", + type=str, + help="NetCDF variable name for SM", + required=False, + default="surface_soil_moisture", + ) + parser.add_argument("dtg", nargs="?", type=str, help="DTG", default=None) + parser.add_argument("-o", dest="output", type=str, help="Output file", default=None) + parser.add_argument( + "--debug", action="store_true", help="Debug", required=False, default=False + ) + parser.add_argument("--version", action="version", version=__version__) + + if len(argv) < 3: + parser.print_help() + sys.exit(1) + + args = parser.parse_args(argv) + kwargs = {} + for arg in vars(args): + kwargs.update({arg: getattr(args, arg)}) + return kwargs + + +def parse_args_lsm_file_assim(argv): + """Parse the command line input arguments for land-sea-mask for assimilation. + + Args: + argv (list): List with arguments. + + Returns: + dict: Parsed arguments. + + """ + parser = ArgumentParser(description="Create ASCII LSM input for SODA") + parser.add_argument( + "--options", type=open, action=LoadFromFile, help="Load options from file" + ) + parser.add_argument("--file", type=str, help="Input file name", required=True) + parser.add_argument("--fileformat", type=str, help="Input fileformat", required=True) + parser.add_argument( + "--var", + type=str, + help="Variable in input file", + required=False, + default="air_temperature_2m", + ) + parser.add_argument( + "--converter", + type=str, + help="Converter for variable", + required=False, + default="none", + ) + parser.add_argument("--dtg", type=str, help="DTG", default=None, required=False) + parser.add_argument("--domain", type=str, help="Domain", required=True) + parser.add_argument("-o", dest="output", type=str, help="Output file", default=None) + parser.add_argument( + "--debug", action="store_true", help="Debug", required=False, default=False + ) + parser.add_argument("--version", action="version", version=__version__) + + if len(argv) < 3: + parser.print_help() + sys.exit(1) + + args = parser.parse_args(argv) + kwargs = {} + for arg in vars(args): + kwargs.update({arg: getattr(args, arg)}) + + return kwargs + + +def parse_args_hm2pysurfex(argv): + """Parse the command line input arguments for hm2pysurfex. + + Args: + argv (list): List with arguments. + + Returns: + dict: Parsed arguments. + + """ + parser = ArgumentParser("hm2pysurfex") + parser.add_argument( + "--options", type=open, action=LoadFromFile, help="Load options from file" + ) + parser.add_argument( + "-c", dest="config", type=str, required=True, help="PySurfex config file" + ) + parser.add_argument( + "-e", + dest="environment", + type=str, + required=False, + default=None, + help="Environment if not taken from running environment", + ) + parser.add_argument( + "-o", + dest="output", + type=str, + required=False, + default=None, + help="Output toml file", + ) + parser.add_argument( + "--debug", action="store_true", help="Debug", required=False, default=False + ) + parser.add_argument("--version", action="version", version=__version__) + + if len(argv) == 0: + parser.print_help() + sys.exit(1) + + args = parser.parse_args(argv) + kwargs = {} + for arg in vars(args): + kwargs.update({arg: getattr(args, arg)}) + return kwargs + + +def parse_args_bufr2json(argv): + """Parse the command line input arguments for bufr2json. + + Args: + argv (list): List with arguments. + + Returns: + dict: Parsed arguments. + + """ + parser = ArgumentParser("bufr2json") + parser.add_argument( + "--options", type=open, action=LoadFromFile, help="Load options from file" + ) + parser.add_argument("-b", dest="bufr", type=str, required=True, help="Bufr file") + parser.add_argument( + "-v", dest="vars", nargs="+", type=str, required=True, help="Variables" + ) + parser.add_argument( + "-o", dest="output", type=str, required=True, help="Output JSON file" + ) + parser.add_argument( + "-dtg", dest="dtg", type=str, required=True, help="DTG (YYYYMMDHH)" + ) + parser.add_argument( + "--indent", dest="indent", type=int, required=False, default=None, help="Indent" + ) + parser.add_argument( + "-range", + dest="valid_range", + type=int, + help="Valid range in seconds", + default=3600, + ) + parser.add_argument( + "--debug", action="store_true", help="Debug", required=False, default=False + ) + parser.add_argument("--version", action="version", version=__version__) + + if len(argv) == 0: + parser.print_help() + sys.exit(1) + + args = parser.parse_args(argv) + kwargs = {} + for arg in vars(args): + kwargs.update({arg: getattr(args, arg)}) + return kwargs + + +def parse_args_obs2json(argv): + """Parse the command line input arguments for obs2json. + + Args: + argv (list): List with arguments. + + Returns: + dict: Parsed arguments. + + """ + parser = ArgumentParser("obs2json") + parser.add_argument( + "--options", type=open, action=LoadFromFile, help="Load options from file" + ) + parser.add_argument( + "-t", + dest="obs_type", + type=str, + required=True, + help="Observations type", + choices=["bufr", "netatmo", "frost", "obsoul", "json"], + ) + parser.add_argument( + "-i", dest="inputfile", type=str, nargs="+", required=True, help="inputfile(s)" + ) + parser.add_argument( + "-v", dest="vars", nargs="+", type=str, required=True, help="Variables" + ) + parser.add_argument( + "-o", dest="output", type=str, required=True, help="Output JSON file" + ) + parser.add_argument( + "-dtg", dest="obs_time", type=str, required=True, help="DTG (YYYYMMDHH)" + ) + parser.add_argument( + "--indent", dest="indent", type=int, required=False, default=None, help="Indent" + ) + parser.add_argument( + "--pos_t_range", + dest="pos_t_range", + type=int, + help="Valid range in seconds after obs_time", + default=3600, + ) + parser.add_argument( + "--neg_t_range", + dest="neg_t_range", + type=int, + help="Valid range in seconds before obs_time", + default=3600, + ) + parser.add_argument( + "--label", dest="label", type=str, required=False, default=None, help="Label" + ) + parser.add_argument( + "--unit", dest="unit", type=str, required=False, default=None, help="Unit (FROST)" + ) + parser.add_argument( + "--level", + dest="level", + type=str, + required=False, + default=None, + help="Level (FROST)", + ) + parser.add_argument( + "--obtypes", + dest="obtypes", + type=str, + required=False, + default=None, + help="Obtypes (obsoul)", + ) + parser.add_argument( + "--subtypes", + dest="subtypes", + type=str, + required=False, + default=None, + help="Subtypes (obsoul)", + ) + parser.add_argument( + "--debug", action="store_true", help="Debug", required=False, default=False + ) + parser.add_argument("--version", action="version", version=__version__) + + if len(argv) == 0: + parser.print_help() + sys.exit(1) + + args = parser.parse_args(argv) + kwargs = {} + for arg in vars(args): + kwargs.update({arg: getattr(args, arg)}) + return kwargs + + +def parse_args_plot_points(argv): + """Parse the command line input arguments for plotting points. + + Args: + argv (list): List with arguments. + + Returns: + dict: Parsed arguments. + + """ + parser = ArgumentParser("Plot points") + parser.add_argument( + "--options", type=open, action=LoadFromFile, help="Load options from file" + ) + parser.add_argument( + "-g", + "--geo", + dest="geo", + type=str, + help="Domain/points json geometry definition file", + default=None, + required=False, + ) + parser.add_argument( + "-v", + "--variable", + dest="variable", + type=str, + help="Variable name", + required=False, + ) + parser.add_argument( + "-i", + "--inputfile", + dest="inputfile", + type=str, + help="Input file", + default=None, + required=False, + ) + parser.add_argument( + "-it", + "--inputtype", + dest="inputtype", + type=str, + help="Filetype", + default="surfex", + required=False, + choices=["netcdf", "grib1", "grib2", "surfex", "obs"], + ) + parser.add_argument( + "-t", + "--validtime", + dest="validtime", + type=str, + help="Valid time", + default=None, + required=False, + ) + parser.add_argument( + "-o", + "--output", + dest="output", + type=str, + help="Output file", + default=None, + required=False, + ) + parser.add_argument("--no-contour", dest="no_contour", action="store_true") + parser.add_argument( + "--interpolator", type=str, default="nearest", required=False, help="Interpolator" + ) + grib = parser.add_argument_group("grib", "Grib1/2 settings (-it grib1 or -it grib2)") + grib.add_argument( + "--indicatorOfParameter", + type=int, + help="Indicator of parameter [grib1]", + default=None, + ) + grib.add_argument( + "--timeRangeIndicator", type=int, help="Time range indicator [grib1]", default=0 + ) + grib.add_argument( + "--levelType", type=str, help="Level type [grib1/grib2]", default="sfc" + ) + grib.add_argument("--level", type=int, help="Level [grib1/grib2]", default=0) + grib.add_argument("--discipline", type=int, help="Discipline [grib2]", default=None) + grib.add_argument( + "--parameterCategory", type=int, help="Parameter category [grib2]", default=None + ) + grib.add_argument( + "--parameterNumber", type=int, help="ParameterNumber [grib2]", default=None + ) + grib.add_argument( + "--typeOfStatisticalProcessing", + type=int, + help="TypeOfStatisticalProcessing [grib2]", + default=-1, + ) + + sfx = parser.add_argument_group("Surfex", "Surfex settings (-it surfex)") + sfx.add_argument( + "--sfx_type", + type=str, + help="Surfex file type", + default=None, + choices=[None, "forcing", "ascii", "nc", "netcdf", "texte"], + ) + + sfx.add_argument("--sfx_patches", type=int, help="Patches [ascii/texte]", default=-1) + sfx.add_argument("--sfx_layers", type=int, help="Layers [ascii/texte]", default=-1) + sfx.add_argument( + "--sfx_datatype", + type=str, + help="Datatype [ascii]", + choices=["string", "float", "integer"], + default="float", + ) + sfx.add_argument("--sfx_interval", type=str, help="Interval [texte]", default=None) + sfx.add_argument("--sfx_basetime", type=str, help="Basetime [texte]", default=None) + sfx.add_argument( + "--sfx_geo_input", + type=str, + default=None, + help="JSON file with domain defintion [forcing/netcdf/texte]", + ) + + obs = parser.add_argument_group("Observations", "Observation settings (scatter plot)") + obs.add_argument( + "--obs_type", + type=str, + help="Observation source type (-it obs)", + choices=[None, "json", "bufr", "frost", "netatmo"], + default=None, + ) + parser.add_argument( + "--debug", action="store_true", help="Debug", required=False, default=False + ) + parser.add_argument("--version", action="version", version=__version__) + + if len(argv) == 0: + parser.print_help() + sys.exit() + + args = parser.parse_args(argv) + kwargs = {} + for arg in vars(args): + kwargs.update({arg: getattr(args, arg)}) + return kwargs + + +def parse_args_set_geo_from_obs_set(argv): + """Parse the command line input arguments for setting geo from obs set. + + Args: + argv (list): List with arguments. + + Returns: + dict: Parsed arguments. + + """ + parser = ArgumentParser("Set a point geometry from an observation set") + parser.add_argument( + "--options", type=open, action=LoadFromFile, help="Load options from file" + ) + parser.add_argument( + "-v", type=str, dest="variable", help="Variable name", required=True + ) + parser.add_argument( + "-t", dest="validtime", help="Validtime (YYYYMMDDHH)", required=True + ) + parser.add_argument( + "-i", type=str, dest="inputfile", help="Input file", required=False + ) + parser.add_argument( + "-it", + type=str, + dest="obs_type", + help="Input type", + required=True, + choices=["netatmo", "frost", "bufr", "json"], + ) + parser.add_argument( + "--lonrange", + type=str, + dest="lonrange", + help="Longitude range", + default=None, + required=False, + ) + parser.add_argument( + "--latrange", + type=str, + dest="latrange", + help="Latitude range", + default=None, + required=False, + ) + parser.add_argument("-o", type=str, dest="output", help="Output file", required=True) + parser.add_argument( + "--debug", action="store_true", help="Debug", required=False, default=False + ) + parser.add_argument("--version", action="version", version=__version__) + + if len(argv) == 0: + parser.print_help() + sys.exit(1) + + args = parser.parse_args(argv) + kwargs = {} + for arg in vars(args): + kwargs.update({arg: getattr(args, arg)}) + return kwargs + + +def parse_args_set_geo_from_stationlist(argv): + """Parse the command line input arguments for setting geo from station list. + + Args: + argv (list): List with arguments. + + Returns: + dict: Parsed arguments. + + """ + parser = ArgumentParser("Set a point geometry from a stationlist") + parser.add_argument( + "--options", type=open, action=LoadFromFile, help="Load options from file" + ) + parser.add_argument("stationlist", type=str, help="Station list") + parser.add_argument( + "--lonrange", + type=str, + dest="lonrange", + help="Longitude range", + default=None, + required=False, + ) + parser.add_argument( + "--latrange", + type=str, + dest="latrange", + help="Latitude range", + default=None, + required=False, + ) + parser.add_argument("-o", type=str, dest="output", help="Output file", required=True) + parser.add_argument( + "--debug", action="store_true", help="Debug", required=False, default=False + ) + parser.add_argument("--version", action="version", version=__version__) + + if len(argv) == 0: + parser.print_help() + sys.exit(1) + + args = parser.parse_args(argv) + kwargs = {} + for arg in vars(args): + kwargs.update({arg: getattr(args, arg)}) + return kwargs + + +def parse_args_merge_qc_data(argv): + """Parse the command line input arguments for merge of qc data. + + Args: + argv (list): List with arguments. + + Returns: + dict: Parsed arguments. + + """ + parser = ArgumentParser() + parser.add_argument( + "--options", type=open, action=LoadFromFile, help="Load options from file" + ) + parser.add_argument( + "-i", + type=str, + nargs="+", + dest="filenames", + help="Input QC JSON files", + required=True, + ) + parser.add_argument( + "-t", dest="validtime", help="Validtime (YYYYMMDDHH)", required=True + ) + parser.add_argument("--indent", type=int, help="Indent in output", default=None) + parser.add_argument("-o", type=str, dest="output", help="Output file", required=True) + + if len(argv) == 0: + parser.print_help() + sys.exit(1) + + args = parser.parse_args(argv) + kwargs = {} + for arg in vars(args): + kwargs.update({arg: getattr(args, arg)}) + return kwargs + + +def parse_timeseries2json(argv): + """Parse the command line input arguments for time series to json. + + Args: + argv (list): List with arguments. + + Returns: + dict: Parsed arguments. + + """ + parser = ArgumentParser("Convert a time series to json") + parser.add_argument( + "--options", type=open, action=LoadFromFile, help="Load options from file" + ) + parser.add_argument( + "-v", "--varname", dest="varname", type=str, help="Variable name", required=True + ) + parser.add_argument( + "-lons", + dest="lons", + type=float, + nargs="+", + help="Longitudes", + default=None, + required=False, + ) + parser.add_argument( + "-lats", + dest="lats", + type=float, + nargs="+", + help="Latitudes", + default=None, + required=False, + ) + parser.add_argument( + "-stids", + dest="stations", + type=str, + nargs="+", + help="Longitudes", + default=None, + required=False, + ) + parser.add_argument( + "-stations", + dest="stationlist", + type=str, + help="Longitudes", + default=None, + required=False, + ) + parser.add_argument( + "-i", + "--filepattern", + dest="filepattern", + type=str, + help="Input file", + default="", + required=False, + ) + parser.add_argument( + "-it", + "--inputtype", + dest="inputtype", + type=str, + help="Input type (format)", + default="surfex", + required=False, + choices=["netcdf", "grib1", "grib2", "surfex", "obs"], + ) + parser.add_argument( + "-start", dest="start", type=str, help="Start time (YYYYMMDDHH)", required=True + ) + parser.add_argument( + "-end", dest="end", type=str, help="End time (YYYYMMDDHH)", required=True + ) + parser.add_argument( + "-int", + dest="interval", + type=int, + help="Interval in seconds", + required=False, + default=3600, + ) + parser.add_argument( + "-indent", dest="indent", type=int, help="Indent", required=False, default=None + ) + parser.add_argument( + "-fcint", + dest="fcint", + type=int, + help="Interval between analysis in seconds", + required=False, + default=3 * 3600, + ) + parser.add_argument( + "-file_inc", + dest="file_inc", + type=int, + help="Interval between analysis in seconds", + required=False, + default=3 * 3600, + ) + parser.add_argument( + "-offset", + dest="offset", + type=int, + help="Offset into next forecast by seconds", + required=False, + default=0, + ) + parser.add_argument( + "-sfx", + dest="sfx_type", + type=str, + help="Input type for surfex files", + default=None, + required=False, + choices=[None, "forcing", "ascii", "nc", "netcdf", "texte"], + ) + parser.add_argument( + "-geo", + dest="geo_in", + type=str, + help="JSON file with geometry needed for some surfex file types", + required=False, + default=None, + ) + parser.add_argument( + "-obs", + dest="obs_set", + type=str, + help="Input type", + default=None, + required=False, + choices=[None, "json", "bufr", "frost", "netatmo", "titan"], + ) + parser.add_argument( + "-o", + "--output", + dest="output", + type=str, + help="Output image", + default=None, + required=False, + ) + + if len(argv) == 0: + parser.print_help() + sys.exit() + + args = parser.parse_args(argv) + kwargs = {} + for arg in vars(args): + kwargs.update({arg: getattr(args, arg)}) + return kwargs + + +def parse_cryoclim_pseudoobs(argv): + """Parse the command line input arguments for cryoclim pseudo obs. + + Args: + argv (list): List with arguments. + + Returns: + dict: Parsed arguments. + + """ + parser = ArgumentParser("Create CRYOCLIM pseudo-obs") + parser.add_argument( + "--debug", action="store_true", help="Debug", required=False, default=False + ) + parser.add_argument( + "--options", type=open, action=LoadFromFile, help="Load options from file" + ) + parser.add_argument( + "-v", + "--varname", + dest="varname", + type=str, + help="Variable name", + default="surface_snow_thickness", + required=False, + ) + parser.add_argument( + "-fg", + dest="fg_file", + type=str, + help="First guess file", + default=None, + required=True, + ) + parser.add_argument( + "-i", + dest="infiles", + type=str, + nargs="+", + help="Infiles", + default=None, + required=True, + ) + parser.add_argument( + "-step", + dest="thinning", + type=int, + help="Thinning step", + required=False, + default=4, + ) + parser.add_argument( + "-indent", dest="indent", type=int, help="Indent", required=False, default=None + ) + parser.add_argument( + "-o", + "--output", + dest="output", + type=str, + help="Output observation set", + default=None, + required=False, + ) + + if len(argv) == 0: + parser.print_help() + sys.exit() + + args = parser.parse_args(argv) + kwargs = {} + for arg in vars(args): + kwargs.update({arg: getattr(args, arg)}) + return kwargs + + +def parse_sentinel_obs(argv): + """Parse the command line input arguments for sentinel observations. + + Args: + argv (list): List with arguments. + + Returns: + dict: Parsed arguments. + + """ + parser = ArgumentParser("Create Sentinel-1 obs") + parser.add_argument( + "--debug", action="store_true", help="Debug", required=False, default=False + ) + parser.add_argument( + "--options", type=open, action=LoadFromFile, help="Load options from file" + ) + parser.add_argument( + "-v", + "--varname", + dest="varname", + type=str, + help="Variable name", + default="surface_soil_moisture", + required=False, + ) + parser.add_argument( + "-fg", + dest="fg_file", + type=str, + help="First guess file", + default=None, + required=True, + ) + parser.add_argument( + "-i", + dest="infiles", + type=str, + nargs="+", + help="Infiles", + default=None, + required=True, + ) + parser.add_argument( + "-step", + dest="thinning", + type=int, + help="Thinning step", + required=False, + default=4, + ) + parser.add_argument( + "-indent", dest="indent", type=int, help="Indent", required=False, default=None + ) + parser.add_argument( + "-o", + "--output", + dest="output", + type=str, + help="Output image", + default=None, + required=False, + ) + + if len(argv) == 0: + parser.print_help() + sys.exit() + + args = parser.parse_args(argv) + kwargs = {} + for arg in vars(args): + kwargs.update({arg: getattr(args, arg)}) + return kwargs + + +def parse_args_shape2ign(argv): + """Parse the command line input arguments for shape fiel to ign. + + Args: + argv (list): List with arguments. + + Returns: + dict: Parsed arguments. + + """ + parser = ArgumentParser("Convert NVE shape files to IGN geometry") + parser.add_argument( + "--debug", action="store_true", help="Debug", required=False, default=False + ) + parser.add_argument( + "--options", type=open, action=LoadFromFile, help="Load options from file" + ) + parser.add_argument( + "-c", + "--catchment", + dest="catchment", + type=str, + help="Catchment name", + default="None", + required=False, + ) + parser.add_argument( + "-i", + dest="infile", + type=str, + help="Infile/directory", + default=None, + required=True, + ) + parser.add_argument( + "-r", + dest="ref_proj", + type=str, + help="Reference projection (domain file)", + default=None, + required=True, + ) + parser.add_argument( + "--indent", dest="indent", type=str, help="Indent", default=None, required=False + ) + parser.add_argument( + "-o", + "--output", + dest="output", + type=str, + help="Output json geometry file", + default=None, + required=False, + ) + + if len(argv) == 0: + parser.print_help() + sys.exit() + + args = parser.parse_args(argv) + kwargs = {} + for arg in vars(args): + kwargs.update({arg: getattr(args, arg)}) + return kwargs + + +def parse_set_domain(argv): + """Parse the command line input arguments.""" + parser = ArgumentParser() + + parser.add_argument("--version", action="version", version=f"surfex {__version__}") + parser.add_argument("--domain", "-d", required=True, type=str, help="Name of domain") + parser.add_argument("--domains", required=True, type=str, help="Domain definitions") + parser.add_argument( + "--harmonie", action="store_true", help="Domain in harmonie definition" + ) + parser.add_argument( + "--indent", required=False, default=2, type=int, help="Indented output" + ) + parser.add_argument("--output", "-o", required=True, nargs="?") + parser.add_argument("--debug", help="Show debug information", action="store_true") + + if len(argv) == 1: + parser.print_help() + sys.exit() + + return parser.parse_args(argv) diff --git a/surfex/configuration.py b/surfex/configuration.py index e2bf689..ba8097b 100644 --- a/surfex/configuration.py +++ b/surfex/configuration.py @@ -1,9 +1,13 @@ """Configuration.""" -import os -import logging import json +import logging +import os + import toml -import surfex + +from .geo import ConfProj +from .platform import SystemFilePaths +from .util import merge_toml_env class Configuration(object): @@ -34,22 +38,29 @@ def dump_json(self, filename, indent=None): """Dump configuration to json file. Args: - filename (_type_): _description_ - indent (_type_, optional): _description_. Defaults to None. - - Raises: - Exception: _description_ + filename (str): Filename + indent (int, optional): Indentation. Defaults to None. """ - if json is None: - raise Exception("json module not loaded") - logging.debug("settings %s", self.settings) json.dump(self.settings, open(filename, "w", encoding="utf-8"), indent=indent) - def setting_is(self, setting, value, sep="#", abort=True, default=None, - system_variables=None, check_parsing=True, validtime=None, basedtg=None, - mbr=None, tstep=None, pert=None, var=None): + def setting_is( + self, + setting, + value, + sep="#", + abort=True, + default=None, + system_variables=None, + check_parsing=True, + validtime=None, + basedtg=None, + mbr=None, + tstep=None, + pert=None, + var=None, + ): """Check if setting is value. Args: @@ -68,19 +79,45 @@ def setting_is(self, setting, value, sep="#", abort=True, default=None, var (_type_, optional): _description_. Defaults to None. Returns: - _type_: _description_ + bool: True if found """ - if self.get_setting(setting, sep=sep, abort=abort, default=default, - system_variables=system_variables, check_parsing=check_parsing, - validtime=validtime, basedtg=basedtg, mbr=mbr, tstep=tstep, pert=pert, - var=var) == value: + if ( + self.get_setting( + setting, + sep=sep, + abort=abort, + default=default, + system_variables=system_variables, + check_parsing=check_parsing, + validtime=validtime, + basedtg=basedtg, + mbr=mbr, + tstep=tstep, + pert=pert, + var=var, + ) + == value + ): return True else: return False - def setting_is_not(self, setting, value, sep="#", abort=True, default=None, - system_variables=None, check_parsing=True, validtime=None, basedtg=None, - mbr=None, tstep=None, pert=None, var=None): + def setting_is_not( + self, + setting, + value, + sep="#", + abort=True, + default=None, + system_variables=None, + check_parsing=True, + validtime=None, + basedtg=None, + mbr=None, + tstep=None, + pert=None, + var=None, + ): """Check if setting is not value. Args: @@ -102,10 +139,23 @@ def setting_is_not(self, setting, value, sep="#", abort=True, default=None, _type_: _description_ """ found = False - if self.get_setting(setting, sep=sep, abort=abort, default=default, - system_variables=system_variables, check_parsing=check_parsing, - validtime=validtime, basedtg=basedtg, mbr=mbr, tstep=tstep, pert=pert, - var=var) == value: + if ( + self.get_setting( + setting, + sep=sep, + abort=abort, + default=default, + system_variables=system_variables, + check_parsing=check_parsing, + validtime=validtime, + basedtg=basedtg, + mbr=mbr, + tstep=tstep, + pert=pert, + var=var, + ) + == value + ): found = True if found: @@ -113,21 +163,33 @@ def setting_is_not(self, setting, value, sep="#", abort=True, default=None, else: return True - def value_is_one_of(self, settings, value, sep="#", abort=True, - system_variables=None, check_parsing=True, validtime=None, basedtg=None, - mbr=None, tstep=None, pert=None, var=None): + def value_is_one_of( + self, + settings, + value, + sep="#", + abort=True, + system_variables=None, + check_parsing=True, + validtime=None, + basedtg=None, + mbr=None, + tstep=None, + pert=None, + var=None, + ): """Check if value is one of the settings. Args: - settings (list): - value: - sep (str): - abort (bool): + settings (list): Settings + value(any): Value + sep (str, optional): Separator. Defaults to "#" + abort (bool, optional): Abort. Defaults to True system_variables (dict): Arbitrary settings to substitute @NAME@ = system_variables={"NAME": "Value"} check_parsing (bool): Check if all @@ pairs were parsed - validtime (datetime.daetime): Parse setting with this as validtime - basedtg (datetime.datetime): Parse setting with this as base time + validtime (as_datetime): Parse setting with this as validtime + basedtg (as_datetime): Parse setting with this as base time mbr (int): Parse setting with this as ensemble member number (@E@/@EE@/@EEE@) tstep (int): Parse setting with this as timestep to get step number (@TTT@/@TTTT@) pert (int): Parse setting with this as perturbation number @PERT@ @@ -136,6 +198,9 @@ def value_is_one_of(self, settings, value, sep="#", abort=True, Returns: found (bool): True if value is found in any of the settings + Raises: + ValueError: Expected a list as input + See Also: self.get_setting() surfex.SystemFilePaths.parse_setting() @@ -143,22 +208,43 @@ def value_is_one_of(self, settings, value, sep="#", abort=True, """ if not isinstance(settings, list): - raise Exception("Expected a list as input, got ", type(settings)) + raise ValueError("Expected a list as input, got ", type(settings)) + found = False for check_s in settings: - setting = self.get_setting(check_s, sep=sep, abort=abort, - system_variables=system_variables, - check_parsing=check_parsing, - validtime=validtime, basedtg=basedtg, mbr=mbr, tstep=tstep, - pert=pert, var=var) + setting = self.get_setting( + check_s, + sep=sep, + abort=abort, + system_variables=system_variables, + check_parsing=check_parsing, + validtime=validtime, + basedtg=basedtg, + mbr=mbr, + tstep=tstep, + pert=pert, + var=var, + ) if setting == value: return True return found - def value_is_not_one_of(self, setting, value, sep="#", abort=True, - system_variables=None, check_parsing=True, validtime=None, - basedtg=None, mbr=None, tstep=None, pert=None, var=None): + def value_is_not_one_of( + self, + setting, + value, + sep="#", + abort=True, + system_variables=None, + check_parsing=True, + validtime=None, + basedtg=None, + mbr=None, + tstep=None, + pert=None, + var=None, + ): """Check if value is not one of. Args: @@ -179,20 +265,40 @@ def value_is_not_one_of(self, setting, value, sep="#", abort=True, _type_: _description_ """ - found = self.value_is_one_of(setting, value, sep=sep, abort=abort, - system_variables=system_variables, - check_parsing=check_parsing, - validtime=validtime, basedtg=basedtg, mbr=mbr, tstep=tstep, - pert=pert, var=var) + found = self.value_is_one_of( + setting, + value, + sep=sep, + abort=abort, + system_variables=system_variables, + check_parsing=check_parsing, + validtime=validtime, + basedtg=basedtg, + mbr=mbr, + tstep=tstep, + pert=pert, + var=var, + ) if found: return False else: return True - def setting_is_one_of(self, setting, values, sep="#", abort=True, - system_variables=None, - check_parsing=True, validtime=None, basedtg=None, mbr=None, tstep=None, - pert=None, var=None): + def setting_is_one_of( + self, + setting, + values, + sep="#", + abort=True, + system_variables=None, + check_parsing=True, + validtime=None, + basedtg=None, + mbr=None, + tstep=None, + pert=None, + var=None, + ): """Check if setting is one of values. Args: @@ -210,28 +316,49 @@ def setting_is_one_of(self, setting, values, sep="#", abort=True, var (_type_, optional): _description_. Defaults to None. Raises: - Exception: _description_ + ValueError: Excpected a list as input Returns: - _type_: _description_ + bool: True if found """ found = False - setting = self.get_setting(setting, sep=sep, abort=abort, - system_variables=system_variables, check_parsing=check_parsing, - validtime=validtime, basedtg=basedtg, mbr=mbr, tstep=tstep, - pert=pert, var=var) + setting = self.get_setting( + setting, + sep=sep, + abort=abort, + system_variables=system_variables, + check_parsing=check_parsing, + validtime=validtime, + basedtg=basedtg, + mbr=mbr, + tstep=tstep, + pert=pert, + var=var, + ) if not isinstance(values, list): - raise Exception("Excpected a list as input, got ", type(values)) + raise ValueError("Excpected a list as input, got ", type(values)) + for val in values: if setting == val: found = True return found - def setting_is_not_one_of(self, setting, values, sep="#", abort=True, - system_variables=None, check_parsing=True, validtime=None, - basedtg=None, mbr=None, - tstep=None, pert=None, var=None): + def setting_is_not_one_of( + self, + setting, + values, + sep="#", + abort=True, + system_variables=None, + check_parsing=True, + validtime=None, + basedtg=None, + mbr=None, + tstep=None, + pert=None, + var=None, + ): """Check if setting is not one of values. Args: @@ -249,22 +376,43 @@ def setting_is_not_one_of(self, setting, values, sep="#", abort=True, var (_type_, optional): _description_. Defaults to None. Returns: - _type_: _description_ + bool: _description_ """ - found = self.setting_is_one_of(setting, values, sep=sep, abort=abort, - system_variables=system_variables, - check_parsing=check_parsing, - validtime=validtime, basedtg=basedtg, mbr=mbr, tstep=tstep, - pert=pert, var=var) + found = self.setting_is_one_of( + setting, + values, + sep=sep, + abort=abort, + system_variables=system_variables, + check_parsing=check_parsing, + validtime=validtime, + basedtg=basedtg, + mbr=mbr, + tstep=tstep, + pert=pert, + var=var, + ) if found: return False else: return True - def get_setting(self, setting, sep="#", abort=True, default=None, system_variables=None, - check_parsing=True, validtime=None, basedtg=None, mbr=None, tstep=None, - pert=None, var=None): + def get_setting( + self, + setting, + sep="#", + abort=True, + default=None, + system_variables=None, + check_parsing=True, + validtime=None, + basedtg=None, + mbr=None, + tstep=None, + pert=None, + var=None, + ): """Get configurations setting. Settings are nested in blocks. @@ -273,8 +421,8 @@ def get_setting(self, setting, sep="#", abort=True, default=None, system_variabl E.g setting = "SURFEX#ASSIM#ASSIM_SCHEMES" Args: - setting: The requested setting - default: A fallback setting in case setting is not found + setting (str): The requested setting + default (any): A fallback setting in case setting is not found sep (str): A separation character between different configuration blocks abort (bool): Abort if setting is not found and default not set system_variables (dict): Arbitrary settings to substitute @@ -295,8 +443,8 @@ def get_setting(self, setting, sep="#", abort=True, default=None, system_variabl surfex.SystemFilePaths.parse_setting() surfex.SystemFilePaths.substitute_string() - Raise: - KeyError + Raises: + KeyError: Key not found """ settings = self.settings @@ -314,11 +462,19 @@ def get_setting(self, setting, sep="#", abort=True, default=None, system_variabl if key in this_setting: this_setting = this_setting[key] # Time information - this_setting = surfex.SystemFilePaths.substitute_string( - this_setting, system_variables=system_variables) - this_setting = surfex.SystemFilePaths.parse_setting( - this_setting, check_parsing=check_parsing, validtime=validtime, - basedtg=basedtg, mbr=mbr, tstep=tstep, pert=pert, var=var) + this_setting = SystemFilePaths.substitute_string( + this_setting, system_variables=system_variables + ) + this_setting = SystemFilePaths.parse_setting( + this_setting, + check_parsing=check_parsing, + validtime=validtime, + basedtg=basedtg, + mbr=mbr, + tstep=tstep, + pert=pert, + var=var, + ) else: if default is not None: this_setting = default @@ -355,7 +511,7 @@ def update_setting(self, setting, value, sep="#"): for key in reversed(keys[0:-1]): dsetting = {key: dsetting} - self.settings = surfex.merge_toml_env(self.settings, dsetting) + self.settings = merge_toml_env(self.settings, dsetting) class ConfigurationFromHarmonie(Configuration): @@ -375,6 +531,9 @@ def __init__(self, env, conf): env (dict): System environment e.g. os.environ conf (dict): The default configuration for this deterministic run/ensemble member + Raises: + NotImplementedError: soil_texture not implemented + """ Configuration.__init__(self, conf) @@ -389,13 +548,11 @@ def __init__(self, env, conf): gsize = float(env["GSIZE"]) if "LGSIZE" in env: gsize = float(env["LGSIZE"]) - trunc = 2 # linear + trunc = 2 # linear if "TRUNC" in env: trunc = int(env["TRUNC"]) domain_dict = { - "nam_pgd_grid": { - "cgrid": "CONF PROJ" - }, + "nam_pgd_grid": {"cgrid": "CONF PROJ"}, "nam_conf_proj": { "xlat0": float(env["LAT0"]), "xlon0": float(env["LON0"]), @@ -410,9 +567,9 @@ def __init__(self, env, conf): "xdx": gsize, "xdy": gsize, "xtrunc": trunc, - } + }, } - geo = surfex.ConfProj(domain_dict) + geo = ConfProj(domain_dict) self.geo = geo logging.debug("GEO: %s", self.geo) @@ -451,7 +608,7 @@ def __init__(self, env, conf): # LISBA_CANOPY Activates surface boundary multi layer scheme over land in SURFEX # (must be .FALSE. for NPATCH>1) - canopy = env["LISBA_CANOPY"].replace('.','') + canopy = env["LISBA_CANOPY"].replace(".", "") if canopy.strip().lower()[0] == "t": canopy = True else: @@ -508,7 +665,7 @@ def __init__(self, env, conf): self.update_setting("SURFEX#ISBA#YSAND", ysand + ".dir") self.update_setting("SURFEX#ISBA#YCLAY", yclay + ".dir") - # LDB_VERSION = 3.0 # Lake database version. + # Lake database version. self.update_setting("SURFEX#FLAKE#LDB_VERSION", env["LDB_VERSION"]) # Treeheight @@ -521,7 +678,7 @@ def __init__(self, env, conf): # XSCALE_H_TREE Scale the tree height with this factor self.update_setting("SURFEX#TREEDRAG#XSCALE_H_TREE", env["XSCALE_H_TREE"]) if "LFAKETREE" in env: - if env["LFAKETREE"].replace('.','').strip().lower()[0] == "t": + if env["LFAKETREE"].replace(".", "").strip().lower()[0] == "t": lfaketree = True else: lfaketree = False @@ -547,7 +704,7 @@ def __init__(self, env, conf): self.update_setting("SURFEX#ASSIM#SCHEMES#SEA", ana_sea) if "LECSST" in env: - if env["LECSST"].replace('.','').strip().lower()[0] == "t": + if env["LECSST"].replace(".", "").strip().lower()[0] == "t": lecsst = True else: lecsst = False @@ -664,12 +821,12 @@ def __init__(self, env, conf): if (env["SNOW_CYCLES"]) == "": snow_cycles = [] else: - snow_cycles = (str(env["SNOW_CYCLES"]).split(" ")) + snow_cycles = str(env["SNOW_CYCLES"]).split(" ") self.update_setting("SURFEX#ASSIM#ISBA#UPDATE_SNOW_CYCLES", snow_cycles) lswepsini = False if "LSWEPSINI" in env: - if env["LSWEPSINI"].replace('.','').strip().lower()[0] == "t": + if env["LSWEPSINI"].replace(".", "").strip().lower()[0] == "t": lswepsini = True else: lswepsini = False @@ -680,7 +837,7 @@ def __init__(self, env, conf): self.update_setting("SURFEX#ASSIM#ISBA#XSWEPSINI", xswepsini) lswepsmin = False if "LSWEPSMIN" in env: - if env["LSWEPSMIN"].replace('.','').strip().lower()[0] == "t": + if env["LSWEPSMIN"].replace(".", "").strip().lower()[0] == "t": lswepsmin = True else: lswepsmin = False @@ -692,7 +849,7 @@ def __init__(self, env, conf): lpatch1 = False if "LPATCH1" in env: - if env["LPATCH1"].replace('.','').strip().lower()[0] == "t": + if env["LPATCH1"].replace(".", "").strip().lower()[0] == "t": lpatch1 = True else: lpatch1 = False @@ -707,13 +864,13 @@ def __init__(self, env, conf): self.update_setting("SURFEX#SEA#PERTFLUX", False) if env["PERTSURF"] == "model": if "LPERTSURF" in env: - if env["LPERTSURF"].replace('.','').strip().lower()[0] == "t": + if env["LPERTSURF"].replace(".", "").strip().lower()[0] == "t": self.update_setting("SURFEX#ISBA#PERTSURF", True) self.update_setting("SURFEX#SEA#PERTFLUX", True) # Volatile sea ice (climate mode) if "LVOLATILE_SIC" in env: - if env["LVOLATILE_SIC"].replace('.','').strip().lower()[0] == "t": + if env["LVOLATILE_SIC"].replace(".", "").strip().lower()[0] == "t": self.update_setting("SURFEX.SEA.LVOLATILE_SIC", True) else: self.update_setting("SURFEX.SEA.LVOLATILE_SIC", False) @@ -730,12 +887,13 @@ def __init__(self, env, conf_file): conf_file (str): Filename with configuration """ - conf = toml.load(open(conf_file, "r", encoding="utf-8")) + with open(conf_file, "r", encoding="utf-8") as fhandler: + conf = toml.load(fhandler) ConfigurationFromHarmonie.__init__(self, env, conf) -class ConfigurationFromJsonFile(Configuration): - """Configuration from a json file.""" +class ConfigurationFromTomlFile(Configuration): + """Configuration from a TOML file.""" def __init__(self, filename): """Construct the configuration. @@ -744,5 +902,6 @@ def __init__(self, filename): filename (str): File name """ - settings = json.load(open(filename, "r", encoding="utf-8")) + with open(filename, mode="r", encoding="utf-8") as fhandler: + settings = toml.load(fhandler) Configuration.__init__(self, settings) diff --git a/surfex/datetime_utils.py b/surfex/datetime_utils.py new file mode 100644 index 0000000..b6d5657 --- /dev/null +++ b/surfex/datetime_utils.py @@ -0,0 +1,51 @@ +#!/usr/bin/env python3 +"""Implement helper routines to deal with dates and times.""" +from datetime import date, datetime, timedelta + + +# TODO use ISO times +def as_datetime(dtg): + """Convert string to datetime.""" + if len(dtg) == 10: + fmt = "%Y%m%d%H" + elif len(dtg) == 12: + fmt = "%Y%m%d%H%M" + elif len(dtg) == 14: + fmt = "%Y%m%d%H%M%S" + else: + raise RuntimeError(f"dtg={dtg} len(dtg) is {len(dtg)}") + + return datetime.strptime(dtg, fmt) + + +def as_datetime_string(dtg): + """Convert string to datetime.""" + fmt = "%Y%m%d%H%M%S" + return dtg.strftime(fmt) + + +def as_timedelta(seconds=0): + """Convert seconds to timedelta.""" + return timedelta(seconds=seconds) + + +def fromtimestamp(validtime): + """Convert timestamp to validtime.""" + return datetime.fromtimestamp(validtime) + + +def utcfromtimestamp(epochtime): + """Convert timestamp to validtime.""" + return datetime.utcfromtimestamp(epochtime) + + +def isdatetime(obj): + """Check if is a datetime objects.""" + return isinstance(obj, date) + + +def as_datetime_args(year=None, month=None, day=None, hour=0, minute=0, second=0): + """Set datetime object from args.""" + return datetime( + year=year, month=month, day=day, hour=hour, minute=minute, second=second + ) diff --git a/surfex/ecoclimap.py b/surfex/ecoclimap.py new file mode 100644 index 0000000..299f0d9 --- /dev/null +++ b/surfex/ecoclimap.py @@ -0,0 +1,254 @@ +"""Ecoclimap handling.""" +import logging +import os + + +class ExternalSurfexInputFile(object): + """Wrapper around external input data to surfex. + + Can have special treatment for each format. + Uses internally the SystemFilePaths class + """ + + def __init__(self, system_file_paths): + """Construct ExternalSurfexInputFile. + + Args: + system_file_paths (surfex.SystemFilePaths): Match system specific files. + + """ + self.system_file_paths = system_file_paths + + def set_input_data_from_format( + self, + dtype, + fname, + default_dir=None, + check_existence=False, + check_parsing=True, + validtime=None, + basedtg=None, + mbr=None, + tstep=None, + pert=None, + var=None, + system_variables=None, + linkbasename=None, + ): + """Set input data based on format. + + Args: + dtype (_type_): _description_ + fname (_type_): _description_ + default_dir (_type_, optional): _description_. Defaults to None. + check_existence (bool, optional): _description_. Defaults to False. + check_parsing (bool, optional): _description_. Defaults to True. + validtime (_type_, optional): _description_. Defaults to None. + basedtg (_type_, optional): _description_. Defaults to None. + mbr (_type_, optional): _description_. Defaults to None. + tstep (_type_, optional): _description_. Defaults to None. + pert (_type_, optional): _description_. Defaults to None. + var (_type_, optional): _description_. Defaults to None. + system_variables (_type_, optional): _description_. Defaults to None. + linkbasename (_type_, optional): _description_. Defaults to None. + + Returns: + dict: File name mappings. + + """ + fname_with_path = self.system_file_paths.get_system_file( + dtype, + fname, + default_dir=default_dir, + check_existence=check_existence, + check_parsing=check_parsing, + validtime=validtime, + basedtg=basedtg, + mbr=mbr, + tstep=tstep, + pert=pert, + var=var, + system_variables=system_variables, + ) + + if fname.endswith(".dir"): + basename = os.path.splitext(os.path.basename(fname))[0] + + basedir = self.system_file_paths.get_system_path( + dtype, + default_dir=default_dir, + check_existence=check_existence, + check_parsing=check_parsing, + validtime=validtime, + basedtg=basedtg, + mbr=mbr, + tstep=tstep, + pert=pert, + var=var, + ) + logging.debug("%s %s %s", basename, basedir, fname_with_path) + hdr_file = basedir + "/" + basename + ".hdr" + dir_file = basedir + "/" + basename + ".dir" + if linkbasename is None: + linkbasename = basename + return {linkbasename + ".hdr": hdr_file, linkbasename + ".dir": dir_file} + elif fname.endswith(".json"): + return {} + else: + return {fname: fname_with_path} + + +class Ecoclimap(object): + """Ecoclimap.""" + + def __init__(self, config, system_file_paths=None): + """Construct ecoclimap data object. + + Args: + config (surfex.Configuration): Surfex configuration. + system_file_paths (surfex.SystemFilePaths, optional): Mapping of local file structure + to look for inut files. + Defaults to None. + + """ + self.config = config + self.system_file_paths = system_file_paths + self.cover_dir = "ecoclimap_cover_dir" + self.bin_dir = "ecoclimap_bin_dir" + self.ecoclimap_files = [ + "ecoclimapI_covers_param.bin", + "ecoclimapII_af_covers_param.bin", + "ecoclimapII_eu_covers_param.bin", + ] + self.decadal_data_types = None + + def set_input(self, check_existence=True): + """Set input. + + Args: + check_existence (bool, optional): _description_. Defaults to True. + + Raises: + RuntimeError: System file path must be set for this method + + Returns: + dict: File mappings. + + """ + if self.system_file_paths is None: + raise RuntimeError("System file path must be set for this method") + + data = {} + for fname in self.ecoclimap_files: + fname_data = self.system_file_paths.get_system_file( + self.bin_dir, + fname, + default_dir="climdir", + check_existence=check_existence, + ) + data.update({fname: fname_data}) + return data + + def set_bin_files(self, check_existence=True): + """Set bin files. + + Args: + check_existence (bool, optional): Check if files exist. Defaults to True. + + Returns: + dict: File mappings. + + """ + return self.set_input(check_existence=check_existence) + + +class EcoclimapSG(Ecoclimap): + """Ecoclimap SG.""" + + def __init__(self, config, system_file_paths=None, veg_types=20, decades=36): + """Construct ecoclimap SG. + + Args: + config (_type_): _description_ + system_file_paths (_type_, optional): _description_. Defaults to None. + veg_types (int, optional): _description_. Defaults to 20. + decades (int, optional): _description_. Defaults to 36. + + """ + Ecoclimap.__init__(self, config, system_file_paths=system_file_paths) + self.veg_types = veg_types + self.decades = decades + self.cover_file = self.config.get_setting("SURFEX#COVER#SG") + self.cover_dir = "ecoclimap_sg_cover_dir" + self.decadal_data_types = [ + "ALBNIR_SOIL", + "ALBNIR_VEG", + "ALBVIS_SOIL", + "ALBVIS_VEG", + "LAI", + ] + + def set_bin_files(self, check_existence=True): + """set_bin_files not used for SG.""" + + def set_input(self, check_existence=True): + """Set input data. + + Args: + check_existence (bool, optional): Check if files are existing. Defaults to True. + + Raises: + RuntimeError: System file path must be set for this method + + Returns: + dict: Mapping of files. + + """ + if self.system_file_paths is None: + raise RuntimeError("System file path must be set for this method") + + data = {} + tree_height_dir = "tree_height_dir" + fname = self.config.get_setting("SURFEX#COVER#H_TREE") + if fname != "" and fname is not None: + ext_data = ExternalSurfexInputFile(self.system_file_paths) + data.update( + ext_data.set_input_data_from_format( + tree_height_dir, fname, check_existence=check_existence + ) + ) + + decadal_data_types = [ + "ALBNIR_SOIL", + "ALBNIR_VEG", + "ALBVIS_SOIL", + "ALBVIS_VEG", + "LAI", + ] + for decadal_data_type in decadal_data_types: + for __ in range(1, self.veg_types + 1): + for decade in range(1, self.decades + 1): + filepattern = self.config.get_setting( + "SURFEX#COVER#" + decadal_data_type, check_parsing=False + ) + fname = self.parse_fnames(filepattern, decade) + dtype = decadal_data_type.lower() + "_dir" + ext_data = ExternalSurfexInputFile(self.system_file_paths) + dat = ext_data.set_input_data_from_format( + dtype, fname, check_existence=check_existence + ) + data.update(dat) + return data + + @staticmethod + def parse_fnames(filepattern, decade): + """Parse file names.""" + filename = filepattern + decade = decade - 1 + mmm = int(decade / 3) + 1 + cmm = f"{mmm:02d}" + cdd = ((decade % 3) * 10) + 5 + cdd = f"{cdd:02d}" + filename = filename.replace("@MM@", str(cmm)) + filename = filename.replace("@CDD@", str(cdd)) + return filename diff --git a/surfex/fa.py b/surfex/fa.py index b409bc2..dbf4a2b 100644 --- a/surfex/fa.py +++ b/surfex/fa.py @@ -1,25 +1,25 @@ """FA support.""" -import numpy as np import logging -import pyproj -import surfex + try: - import epygram # type: ignore + from epygram.formats import resource except ImportError: - epygram = None + resource = None + + +from .geo import ConfProj +from .interpolation import Interpolation class Fa(object): """Fichier Arpege.""" - def __init__(self, fname, debug=False): + def __init__(self, fname): """Construct a FA object. Args: fname (str): filename - debug (bool, optional): _description_. Defaults to False. """ - self.debug = debug self.fname = fname self.projection = None self.lons = None @@ -35,33 +35,37 @@ def field(self, varname, validtime): validtime (_type_): _description_ Raises: - Exception: _description_ - NotImplementedError: _description_ + ModuleNotFoundError: You need epygram to read FA files + NotImplementedError: Geometry not implemented Returns: tuple: np.field, surfex.Geometry """ - if epygram is None: - raise Exception("You need epygram to read FA files") + if resource is None: + raise ModuleNotFoundError("You need epygram to read FA files") else: - resource = epygram.formats.resource(self.fname, openmode='r') - field = resource.readfield(varname) + fa_file = resource(self.fname, openmode="r") + field = fa_file.readfield(varname) # TODO this might not work with forcing... zone = "CI" crnrs = field.geometry.gimme_corners_ij(subzone=zone) - - range_x = slice(crnrs['ll'][0], crnrs['lr'][0] + 1) - range_y = slice(crnrs['lr'][1], crnrs['ur'][1] + 1) + + range_x = slice(crnrs["ll"][0], crnrs["lr"][0] + 1) + range_y = slice(crnrs["lr"][1], crnrs["ur"][1] + 1) # TODO: check time - logging.info("Not checking validtime for FA variable at the moment: %s", str(validtime)) + logging.info( + "Not checking validtime for FA variable at the moment: %s", str(validtime) + ) - if field.geometry.name == "lambert" or field.geometry.name == "polar_stereographic": + if ( + field.geometry.name == "lambert" + or field.geometry.name == "polar_stereographic" + ): n_y = field.geometry.dimensions["Y_CIzone"] n_x = field.geometry.dimensions["X_CIzone"] - ll_lon, ll_lat = field.geometry.gimme_corners_ll()["ll"] - lon0 = field.geometry.projection['reference_lon'].get('degrees') - lat0 = field.geometry.projection['reference_lat'].get('degrees') + lon0 = field.geometry.projection["reference_lon"].get("degrees") + lat0 = field.geometry.projection["reference_lat"].get("degrees") c0, c1 = field.geometry.getcenter() lonc = c0.get("degrees") latc = c1.get("degrees") @@ -71,10 +75,7 @@ def field(self, varname, validtime): ilate = field.geometry.dimensions["Y"] - n_y domain = { - "nam_conf_proj": { - "xlon0": lon0, - "xlat0": lat0 - }, + "nam_conf_proj": {"xlon0": lon0, "xlat0": lat0}, "nam_conf_proj_grid": { "xloncen": lonc, "xlatcen": latc, @@ -83,10 +84,10 @@ def field(self, varname, validtime): "xdx": d_x, "xdy": d_y, "ilone": ilone, - "ilate": ilate - } + "ilate": ilate, + }, } - geo_out = surfex.geo.ConfProj(domain) + geo_out = ConfProj(domain) if field.geometry.name == "polar_stereographic": data = field.data[range_y, range_x].T else: @@ -100,15 +101,16 @@ def points(self, varname, geo, validtime=None, interpolation="nearest"): Args: varname (str): Variable name - geo (surfex.Geo): Geometry - validtime (datetime.datetime): Validtime - interpolation (str): Interpoaltion method + geo (surfex.geo.Geo): Geometry + validtime (as_datetime): Validtime + interpolation (str, optional): Interpoaltion method. Defaults to "nearest". + Returns: - np.array: vector with inpterpolated values + tuple: field, interpolator """ field, geo_in = self.field(varname, validtime) - interpolator = surfex.interpolation.Interpolation(interpolation, geo_in, geo) + interpolator = Interpolation(interpolation, geo_in, geo) field = interpolator.interpolate(field) return field, interpolator diff --git a/surfex/file.py b/surfex/file.py index 217d101..c4829ad 100644 --- a/surfex/file.py +++ b/surfex/file.py @@ -1,15 +1,19 @@ """Surfex file related stuff.""" -import os -import shutil +import abc import logging +import os import re -import abc -from datetime import timedelta, datetime -# from netCDF4 import Dataset, num2date, chartostring +import shutil + import netCDF4 -import pyproj import numpy as np -import surfex +import pyproj + +from .datetime_utils import as_datetime, as_datetime_args, as_timedelta, isdatetime +from .fa import Fa +from .geo import IGN, ConfProj, LonLatReg, LonLatVal +from .interpolation import Interpolation +from .util import remove_existing_file class SurfexIO(object): @@ -75,7 +79,7 @@ def interpolate_field(field, geo_in, geo_out, interpolation="bilinear"): tuple: (np.array, surfex.Interpolator) """ - interpolator = surfex.interpolation.Interpolation(interpolation, geo_in, geo_out) + interpolator = Interpolation(interpolation, geo_in, geo_out) field = interpolator.interpolate(field) return field, interpolator @@ -87,8 +91,9 @@ class SurfexSurfIO(object): """ - def __init__(self, surfexfile, csurf_filetype, input_file=None, symlink=True, - archive_file=None): + def __init__( + self, surfexfile, csurf_filetype, input_file=None, symlink=True, archive_file=None + ): """Construct the surfex surf file. Args: @@ -117,18 +122,18 @@ def symlink_input(self): if self.input_file is not None: f_out = os.getcwd() + "/" + self.filename logging.debug("input_file: %s file_out: %s", self.input_file, f_out) - surfex.read.remove_existing_file(self.input_file, f_out) + remove_existing_file(self.input_file, f_out) if os.path.abspath(self.input_file) != f_out: - logging.info("Symlink " + self.input_file + " -> " + f_out) + logging.info("Symlink %s -> %s", self.input_file, f_out) os.symlink(self.input_file, f_out) def copy_input(self): """Copy the input file.""" if self.input_file is not None: f_out = os.getcwd() + "/" + self.filename - surfex.read.remove_existing_file(self.input_file, f_out) + remove_existing_file(self.input_file, f_out) if os.path.abspath(self.input_file) != f_out: - logging.info("Copy " + self.input_file + " -> " + f_out) + logging.info("Copy %s -> %s", self.input_file, f_out) shutil.copy2(self.input_file, f_out) def archive_output_file(self): @@ -140,10 +145,8 @@ def archive_output_file(self): if os.path.abspath(self.archive_file) != f_in: logging.info("Move %s to %s", f_in, self.archive_file) if os.path.islink(self.archive_file): - # print("is link") os.unlink(self.archive_file) if os.path.isfile(self.archive_file): - # print("is file") os.remove(self.archive_file) if os.path.isdir(self.archive_file): shutil.rmtree(self.archive_file) @@ -153,8 +156,16 @@ def archive_output_file(self): class PGDFile(SurfexSurfIO): """PGD file.""" - def __init__(self, csurf_filetype, cpgdfile, input_file=None, symlink=True, - archive_file=None, lfagmap=False, masterodb=False): + def __init__( + self, + csurf_filetype, + cpgdfile, + input_file=None, + symlink=True, + archive_file=None, + lfagmap=False, + masterodb=False, + ): """Construct PGD file object. Args: @@ -170,20 +181,38 @@ def __init__(self, csurf_filetype, cpgdfile, input_file=None, symlink=True, logging.debug("PGDFile") logging.debug("%s %s %s", cpgdfile, csurf_filetype, masterodb) - cpgdfile = get_surfex_io_object(cpgdfile, filetype="surf", - fileformat=csurf_filetype, - lfagmap=lfagmap, masterodb=masterodb) - - SurfexSurfIO.__init__(self, cpgdfile, csurf_filetype, input_file=input_file, - archive_file=archive_file, symlink=symlink) + cpgdfile = get_surfex_io_object( + cpgdfile, + filetype="surf", + fileformat=csurf_filetype, + lfagmap=lfagmap, + masterodb=masterodb, + ) + + SurfexSurfIO.__init__( + self, + cpgdfile, + csurf_filetype, + input_file=input_file, + archive_file=archive_file, + symlink=symlink, + ) self.need_pgd = False class PREPFile(SurfexSurfIO): """PREP file.""" - def __init__(self, csurf_filetype, cprepfile, input_file=None, symlink=True, - archive_file=None, lfagmap=False, masterodb=False): + def __init__( + self, + csurf_filetype, + cprepfile, + input_file=None, + symlink=True, + archive_file=None, + lfagmap=False, + masterodb=False, + ): """Construct PREP file object. Args: @@ -197,20 +226,37 @@ def __init__(self, csurf_filetype, cprepfile, input_file=None, symlink=True, """ logging.debug("PREPFile %s", input_file) - cprepfile = get_surfex_io_object(cprepfile, filetype="surf", - fileformat=csurf_filetype, - lfagmap=lfagmap, masterodb=masterodb,) - - SurfexSurfIO.__init__(self, cprepfile, csurf_filetype, input_file=input_file, - archive_file=archive_file, symlink=symlink) + cprepfile = get_surfex_io_object( + cprepfile, + filetype="surf", + fileformat=csurf_filetype, + lfagmap=lfagmap, + masterodb=masterodb, + ) + + SurfexSurfIO.__init__( + self, + cprepfile, + csurf_filetype, + input_file=input_file, + archive_file=archive_file, + symlink=symlink, + ) self.need_pgd = True class SURFFile(SurfexSurfIO): """SURFOUT file.""" - def __init__(self, csurf_filetype, csurffile, archive_file=None, input_file=None, - lfagmap=False, masterodb=False): + def __init__( + self, + csurf_filetype, + csurffile, + archive_file=None, + input_file=None, + lfagmap=False, + masterodb=False, + ): """Construct SURFOUT file object. Result of a surfex binary. @@ -219,27 +265,44 @@ def __init__(self, csurf_filetype, csurffile, archive_file=None, input_file=None csurf_filetype (str): File type csurffile (str): Name of the PREP file input_file (str, optional): Input file. Defaults to None. - symlink (bool, optional): Symlink input_file to surfexfile. Defaults to True. archive_file (str, optional): Location to store the result. Defaults to None. lfagmap (bool, optional): File use LFAGMAP. Defaults to False. masterodb (bool, optional): File produced by masterodb. Defaults to False. """ logging.debug("SURFFile") - csurffile = get_surfex_io_object(csurffile, filetype="surf", - fileformat=csurf_filetype, - lfagmap=lfagmap, masterodb=masterodb) - - SurfexSurfIO.__init__(self, csurffile, csurf_filetype, input_file=input_file, - archive_file=archive_file) + csurffile = get_surfex_io_object( + csurffile, + filetype="surf", + fileformat=csurf_filetype, + lfagmap=lfagmap, + masterodb=masterodb, + ) + + SurfexSurfIO.__init__( + self, + csurffile, + csurf_filetype, + input_file=input_file, + archive_file=archive_file, + ) self.need_pgd = True class SurfexFileVariable(object): """Surfex Variable.""" - def __init__(self, varname, validtime=None, patches=1, layers=1, basetime=None, - interval=None, datatype="float"): + def __init__( + self, + varname, + validtime=None, + patches=1, + layers=1, + basetime=None, + interval=None, + datatype="float", + tiletype="FULL", + ): """Construct a surfex file variable. Args: @@ -250,6 +313,7 @@ def __init__(self, varname, validtime=None, patches=1, layers=1, basetime=None, basetime (datetime.datetime, optional): _description_. Defaults to None. interval (int, optional): Interval. Defaults to None. datatype (str, optional): Data type for variable. Defaults to "float". + tiletype (str, optional): Tiletype """ self.varname = varname @@ -259,14 +323,17 @@ def __init__(self, varname, validtime=None, patches=1, layers=1, basetime=None, self.basetime = basetime self.interval = interval self.validtime = validtime + self.datatype = datatype + self.tiletype = tiletype def print_var(self): """Print variable information.""" return self.varname -def get_surfex_io_object(fname, filetype="surf", fileformat=None, geo=None, lfagmap=False, - masterodb=False): +def get_surfex_io_object( + fname, filetype="surf", fileformat=None, geo=None, lfagmap=False, masterodb=False +): """Get the surfexIO object. Args: @@ -277,15 +344,25 @@ def get_surfex_io_object(fname, filetype="surf", fileformat=None, geo=None, lfag lfagmap (bool, optional): File use LFAGMAP. Defaults to False. masterodb (bool, optional): File produced by masterodb. Defaults to False. + Raises: + RuntimeError: Invalid filetype + NotImplementedError: Filetype not implemented + RuntimeError: Format needs a geometry + Returns: SurfexIO: SurfexIO object. """ logging.debug("get_surfex_io_object") if filetype is not None: - if filetype.lower() != "surf" and filetype.lower() != "ts" \ - and filetype.lower() != "forcing": - raise Exception("Invalid filetype: " + filetype + " Allowed: surf/ts/forcing") + if ( + filetype.lower() != "surf" + and filetype.lower() != "ts" + and filetype.lower() != "forcing" + ): + raise RuntimeError( + "Invalid filetype: " + filetype + " Allowed: surf/ts/forcing" + ) if fileformat is None: fileformat, filetype = guess_file_format(fname, filetype) @@ -306,28 +383,25 @@ def get_surfex_io_object(fname, filetype="surf", fileformat=None, geo=None, lfag elif fileformat.lower() == "netcdf": if filetype.lower() == "ts": if geo is None: - raise Exception("Format NetCDF needs a geometry") + raise RuntimeError("Format NetCDF needs a geometry") obj = NetCDFSurfexFile(fname, geo) elif filetype.lower() == "forcing": if geo is None: - raise Exception("Format NetCDF needs a geometry for reading forcing files") + raise RuntimeError( + "Format NetCDF needs a geometry for reading forcing files" + ) obj = ForcingFileNetCDF(fname, geo) else: raise NotImplementedError elif fileformat.lower() == "texte": if geo is None: - raise Exception("Format TEXTE needs a geometry") + raise RuntimeError("Format TEXTE needs a geometry") obj = TexteSurfexFile(fname, geo) elif fileformat.lower() == "fa": if filetype.lower() == "surf": obj = FaSurfexFile(fname, geo=geo, lfagmap=lfagmap, masterodb=masterodb) else: raise NotImplementedError - # elif fileformat.lower() == "sfx": - # if filetype.lower() == "surf": - # obj = FaSurfexFile(fname, geo=geo, lfagmap=True, masterodb=masterodb) - # else: - # raise NotImplementedError else: raise NotImplementedError("Format not implemented: " + fileformat) @@ -343,8 +417,8 @@ def guess_file_format(fname, ftype=None): ftype (str, optional): Filetype if known/wished. Defaults to None. Raises: - Exception: _description_ - Exception: _description_ + RuntimeError: Can not-auto decide filetype for files + RuntimeError: Filetype and/or format not set Returns: tuple: fileformat, filetype @@ -365,7 +439,7 @@ def guess_file_format(fname, ftype=None): for needle in needles: if re.search(needle, f_n): ftype = "ts" - for needle in needles: + for __ in needles: if ext.endswith("TXT"): ftype = "ts" needles = ["Forc_.*", "FORCING.*"] @@ -374,8 +448,10 @@ def guess_file_format(fname, ftype=None): ftype = "forcing" if re.search("SURFOUT.*", f_n) and ftype is None: - raise Exception("Can not-auto decide filetype for files called SURFOUT.*.txt. " - + "Specify either surf or ts") + raise RuntimeError( + "Can not-auto decide filetype for files called SURFOUT.*.txt. " + + "Specify either surf or ts" + ) fileformat = None logging.info("Trying to guess the file format from extension: %s", ext) @@ -394,7 +470,9 @@ def guess_file_format(fname, ftype=None): fileformat = "fa" if ftype is None or fileformat is None: - raise Exception("Filetype and/or format not set: " + str(ftype) + " & " + str(fileformat)) + raise RuntimeError( + "Filetype and/or format not set: " + str(ftype) + " & " + str(fileformat) + ) logging.info("Filetype: %s format: %s", ftype, fileformat) return fileformat, ftype @@ -408,6 +486,7 @@ def __init__(self, filename, geo=None): Args: filename (str): Filename + geo(surfex.geo.Geo, optional): Geometry, Defaults to None. """ suffix = SurfFileTypeExtension("ASCII").suffix @@ -426,7 +505,7 @@ def get_geo(self): Raises: FileNotFoundError: _description_ - Exception: _description_ + RuntimeError: No grid found NotImplementedError: _description_ Returns: @@ -436,76 +515,95 @@ def get_geo(self): raise FileNotFoundError("File does not exist: " + str(self.filename)) grid = self.read("GRID_TYPE", "FULL", "string") - if len(grid) == 0: - raise Exception("No grid found") + if grid is None: + raise RuntimeError("No grid found") - if grid[0] == "IGN": + if grid == "IGN": domain = { "nam_ign": { - "clambert": self.read("LAMBERT", "&FULL", "integer")[0], + "clambert": self.read("LAMBERT", "&FULL", "integer"), + "npoints": self.read("NPOINTS", "&FULL", "integer"), "xx": self.read("XX", "&FULL", "float"), "xy": self.read("XY", "&FULL", "float"), "xdx": self.read("XDX", "&FULL", "float"), - "xdy": self.read("XY", "&FULL", "float") + "xdy": self.read("XY", "&FULL", "float"), + "xx_llcorner": self.read("XX_LLCORNER", "&FULL", "float"), + "xy_llcorner": self.read("XY_LLCORNER", "&FULL", "float"), + "xcellsize": self.read("XCELLSIZE", "&FULL", "float"), + "ncols": self.read("NCOLS", "&FULL", "integer"), + "nrows": self.read("NROWS", "&FULL", "integer"), } } - return surfex.geo.IGN(domain) + return IGN(domain) - elif grid[0] == "LONLATVAL": + elif grid == "LONLATVAL": domain = { "nam_lonlatval": { "xx": self.read("XX", "&FULL", "float"), "xy": self.read("XY", "&FULL", "float"), "xdx": self.read("DX", "&FULL", "float"), - "xdy": self.read("DY", "&FULL", "float") + "xdy": self.read("DY", "&FULL", "float"), } } - return surfex.geo.LonLatVal(domain) + return LonLatVal(domain) - elif grid[0] == "LONLAT REG": + elif grid == "LONLAT REG": domain = { - "nam_lonlatval_reg": { - "lonmin": self.read("LONMIN", "&FULL", "float")[0], - "latmin": self.read("LATMIN", "&FULL", "float")[0], - "lonmax": self.read("LONMAX", "&FULL", "float")[0], - "latmax": self.read("LATMAX", "&FULL", "float")[0], - "nlon": self.read("NLON", "&FULL", "integer")[0], - "nlat": self.read("NLAT", "&FULL", "integer")[0], - "reg_lon": self.read("REG_LON", "&FULL", "float")[0], - "reg_lat": self.read("REG_LAT", "&FULL", "float")[0] + "nam_lonlat_reg": { + "xlonmin": self.read("LONMIN", "&FULL", "float"), + "xlatmin": self.read("LATMIN", "&FULL", "float"), + "xlonmax": self.read("LONMAX", "&FULL", "float"), + "xlatmax": self.read("LATMAX", "&FULL", "float"), + "nlon": self.read("NLON", "&FULL", "integer"), + "nlat": self.read("NLAT", "&FULL", "integer"), } } - return surfex.geo.LonLatReg(domain) - - elif grid[0] == "CONF PROJ": - lon0 = self.read("LON0", "&FULL", "float")[0] - lat0 = self.read("LAT0", "&FULL", "float")[0] - n_x = self.read("IMAX", "&FULL", "integer")[0] - n_y = self.read("JMAX", "&FULL", "integer")[0] - d_x = self.read("XX", "&FULL", "float")[0] - d_y = self.read("XX", "&FULL", "float")[0] - - ll_lon = self.read("LONORI", "&FULL", "float")[0] - ll_lat = self.read("LATORI", "&FULL", "float")[0] - - earth = 6.37122e+6 - proj_string = f"+proj=lcc +lat_0={str(lat0)} +lon_0={str(lon0)} +lat_1={str(lat0)} " \ - f"+lat_2={str(lat0)} +units=m +no_defs +R={str(earth)}" + return LonLatReg(domain) + + elif grid == "CONF PROJ": + lon0 = self.read("LON0", "&FULL", "float") + lat0 = self.read("LAT0", "&FULL", "float") + n_x = self.read("IMAX", "&FULL", "integer") + n_y = self.read("JMAX", "&FULL", "integer") + d_x = self.read("XX", "&FULL", "float") + d_y = self.read("XX", "&FULL", "float") + if d_x.shape[0] > 1: + d_x = d_x[1] - d_x[0] + if d_y.shape[0] > 1: + d_y = d_y[1] - d_y[0] + ll_lon = self.read("LONORI", "&FULL", "float") + ll_lat = self.read("LATORI", "&FULL", "float") + + logging.info( + "lon0=%s lat0=%s n_x=%s, n_y=%s d_x=%s dy=%s, ll_lon=%s, ll_lat=%s", + lon0, + lat0, + n_x, + n_y, + d_x, + d_y, + ll_lon, + ll_lat, + ) + earth = 6.37122e6 + proj_string = ( + f"+proj=lcc +lat_0={str(lat0)} +lon_0={str(lon0)} +lat_1={str(lat0)} " + f"+lat_2={str(lat0)} +units=m +no_defs +R={str(earth)}" + ) proj = pyproj.CRS.from_string(proj_string) wgs84 = pyproj.CRS.from_string("EPSG:4326") - x_0, y_0 = pyproj.Transformer.from_crs(wgs84, proj, - always_xy=True).transform(ll_lon, ll_lat) + x_0, y_0 = pyproj.Transformer.from_crs(wgs84, proj, always_xy=True).transform( + ll_lon, ll_lat + ) x_c = x_0 + 0.5 * (n_x - 1) * d_x y_c = y_0 + 0.5 * (n_y - 1) * d_y - lonc, latc = pyproj.Transformer.from_crs(proj, wgs84, - always_xy=True).transform(x_c, y_c) + lonc, latc = pyproj.Transformer.from_crs( + proj, wgs84, always_xy=True + ).transform(x_c, y_c) domain = { - "nam_conf_proj": { - "xlon0": lon0, - "xlat0": lat0 - }, + "nam_conf_proj": {"xlon0": lon0, "xlat0": lat0}, "nam_conf_proj_grid": { "xloncen": lonc, "xlatcen": latc, @@ -514,11 +612,10 @@ def get_geo(self): "xdx": d_x, "xdy": d_x, "ilone": 0, - "ilate": 0 - } + "ilate": 0, + }, } - # print(domain) - return surfex.geo.ConfProj(domain) + return ConfProj(domain) else: raise NotImplementedError("Grid " + str(grid[0]) + " not implemented!") @@ -526,70 +623,81 @@ def read(self, read_par, read_tile, datatype): """Read the file. Args: - read_par (_type_): _description_ - read_tile (_type_): _description_ - datatype (_type_): _description_ + read_par (str): Parameter to read + read_tile (str): Tile to read + datatype (str): Datatype Raises: - NotImplementedError: _description_ - Exception: _description_ + NotImplementedError: Datatype not implemented + RuntimeError: Could not read datatype Returns: - _type_: _description_ + numpy.array: Values read """ # Add & if not given - if read_tile.find('&') < 0: - read_tile = '&' + read_tile - # print read_tile,read_par + if read_tile.find("&") < 0: + read_tile = "&" + read_tile file = open(self.filename, mode="r", encoding="utf-8") read_desc = False read_value = False values = [] for line in file: - # for line in file.read().splitlines(): - - # print "T:"+line words = line.split() + print(read_value, read_desc) + print(words) if len(words) > 0: - # print "Line:",read_desc,read_value,":",line if read_value and not read_desc: - if words[0].find('&') < 0: - # print "Value:", line + if words[0].find("&") < 0: try: if datatype.lower() == "float": for word in words: val = float(word.replace("D", "E")) - if val == 1e+20: + if val == 1e20: val = np.nan values.append(val) elif datatype.lower() == "string": str_words = [] for word in words: str_words.append(word) - values.append(" ".join(str_words)) - elif datatype.lower() == "integer" or datatype.lower() == "int": + values = " ".join(str_words) + elif ( + datatype.lower() == "integer" or datatype.lower() == "int" + ): for word in words: values.append(int(word)) + elif ( + datatype.lower() == "logical" + or datatype.lower() == "bool" + ): + for word in words: + if word.lower().strip()[0] == "t": + values = True + else: + values = False else: - raise NotImplementedError("Type not implemented " + str(datatype)) + raise NotImplementedError( + "Type not implemented " + str(datatype) + ) except ValueError: - raise Exception(f"Conversion from {str(words)} to {str(datatype)} " - "does not work! Try a different datatype!") \ - from ValueError + raise RuntimeError( + f"Conversion from {str(words)} to {str(datatype)} " + "does not work! Try a different datatype!" + ) from ValueError if read_desc: - # print "Description: ", words[0] read_desc = False read_value = True - if words[0].find('&') >= 0: + if words[0].find("&") >= 0: tile = words[0] par = words[1] read_value = False - if tile.strip().lower() == read_tile.lower() \ - and par.lower() == read_par.lower(): + if ( + tile.strip().lower() == read_tile.lower() + and par.lower() == read_par.lower() + ): read_desc = True read_value = False logging.info("Found: %s %s", str(tile), str(par)) @@ -597,14 +705,19 @@ def read(self, read_par, read_tile, datatype): # Description could be empty else: if read_desc: - # print "Description: ", words[0] read_desc = False read_value = True - if len(values) == 0: - logging.info("No values found!") + if isinstance(values, list): + if len(values) == 0: + logging.info("No values found for %s", read_par) + return None - values = np.asarray(values) + if len(values) > 1: + values = np.asarray(values) + else: + values = values[0] + logging.info("Returning values: %s", values) return values def field(self, var, validtime=None): @@ -618,14 +731,13 @@ def field(self, var, validtime=None): np.darray: Field, surfex.Geo in read file """ - # TODO read_par = var.varname - read_tile = "&FULL" - datatype = "float" + read_tile = var.tiletype + datatype = var.datatype field = self.read(read_par, read_tile, datatype) + geo_in = self.get_geo() field = np.reshape(field, [geo_in.nlons, geo_in.nlats], order="F") - # field = np.transpose(field) return field, geo_in def points(self, var, geo_out, validtime=None, interpolation="nearest"): @@ -643,8 +755,9 @@ def points(self, var, geo_out, validtime=None, interpolation="nearest"): """ field, geo_in = self.field(var, validtime=validtime) - points, interpolator = SurfexIO.interpolate_field(field, geo_in, geo_out, - interpolation=interpolation) + points, interpolator = SurfexIO.interpolate_field( + field, geo_in, geo_out, interpolation=interpolation + ) return points, interpolator @@ -672,6 +785,9 @@ def __init__(self, filename, geo=None): def get_geo(self): """Get geometry in file. + Raises: + NotImplementedError: Grid not implemented + Returns: surfex.Geometry: Surfex geometry in file @@ -682,35 +798,35 @@ def get_geo(self): logging.debug("Geo not open %s as a netCDF file", self.filename) return None cgrid = str(netCDF4.chartostring(f_h["GRID_TYPE"][:])).strip() - # print(":" + cgrid + ":") if cgrid == "CONF PROJ": - lon0 = f_h["LON0"][:] - lat0 = f_h["LAT0"][:] + lon0 = float(f_h["LON0"][0]) + lat0 = float(f_h["LAT0"][0]) n_x = int(f_h["IMAX"][0]) n_y = int(f_h["JMAX"][0]) d_x = float(f_h["DX"][0][0]) d_y = float(f_h["DY"][0][0]) - ll_lon = f_h["LONORI"][:] - ll_lat = f_h["LATORI"][:] - earth = 6.37122e+6 - proj_string = f"+proj=lcc +lat_0={str(lat0)} +lon_0={str(lon0)} +lat_1={str(lat0)} " \ - f"+lat_2={str(lat0)} +units=m +no_defs +R={str(earth)}" + ll_lon = float(f_h["LONORI"][0]) + ll_lat = float(f_h["LATORI"][0]) + earth = 6.37122e6 + proj_string = ( + f"+proj=lcc +lat_0={str(lat0)} +lon_0={str(lon0)} +lat_1={str(lat0)} " + f"+lat_2={str(lat0)} +units=m +no_defs +R={str(earth)}" + ) proj = pyproj.CRS.from_string(proj_string) wgs84 = pyproj.CRS.from_string("EPSG:4326") - x_0, y_0 = pyproj.Transformer.from_crs(wgs84, proj, - always_xy=True).transform(ll_lon, ll_lat) + x_0, y_0 = pyproj.Transformer.from_crs(wgs84, proj, always_xy=True).transform( + ll_lon, ll_lat + ) x_c = x_0 + 0.5 * (n_x + 1) * d_x y_c = y_0 + 0.5 * (n_y + 1) * d_y - lonc, latc = pyproj.Transformer.from_crs(proj, wgs84, - always_xy=True).transform(x_c, y_c) + lonc, latc = pyproj.Transformer.from_crs( + proj, wgs84, always_xy=True + ).transform(x_c, y_c) domain = { - "nam_conf_proj": { - "xlon0": lon0, - "xlat0": lat0 - }, + "nam_conf_proj": {"xlon0": lon0, "xlat0": lat0}, "nam_conf_proj_grid": { "xloncen": lonc, "xlatcen": latc, @@ -719,10 +835,10 @@ def get_geo(self): "xdx": d_x, "xdy": d_y, "ilone": 0, - "ilate": 0 - } + "ilate": 0, + }, } - return surfex.geo.ConfProj(domain) + return ConfProj(domain) elif cgrid == "IGN": domain = { "nam_ign": { @@ -730,10 +846,10 @@ def get_geo(self): "xx": f_h["XX"][:], "xy": f_h["XY"][:], "xdx": f_h["DX"][:], - "xdy": f_h["DY"][:] + "xdy": f_h["DY"][:], } } - return surfex.geo.IGN(domain) + return IGN(domain) elif cgrid == "LONLATVAL": domain = { @@ -741,10 +857,10 @@ def get_geo(self): "xx": f_h["XX"][:], "xy": f_h["XY"][:], "xdx": f_h["DX"][:], - "xdy": f_h["DY"][:] + "xdy": f_h["DY"][:], } } - return surfex.geo.LonLatVal(domain) + return LonLatVal(domain) elif cgrid == "LONLAT REG": domain = { @@ -759,7 +875,7 @@ def get_geo(self): "reg_lat": f_h["REG_LAT"][0], } } - return surfex.geo.LonLatReg(domain) + return LonLatReg(domain) else: raise NotImplementedError(cgrid + " is not implemented") @@ -771,6 +887,9 @@ def field(self, var, validtime=None): validtime (datetime.datetime, optional): Valid time. Defaults to None. validtime (_type_, optional): _description_. Defaults to None. + Raises: + RuntimeError: Mismatch in times in file and the wanted time + Returns: np.darray: Field, surfex.Geo in read file @@ -779,7 +898,8 @@ def field(self, var, validtime=None): if validtime is None: pass else: - if hasattr(f_h, "DTCUR-YEAR"): + time_in_file = None + try: year = f_h["DTCUR-YEAR"][0] month = f_h["DTCUR-MONTH"][0] day = f_h["DTCUR-DAY"][0] @@ -787,25 +907,24 @@ def field(self, var, validtime=None): hour = int(time / 3600) # TODO minutes - time_in_file = datetime(year=year, month=month, day=day, hour=hour) + time_in_file = as_datetime_args( + year=year, month=month, day=day, hour=hour + ) + except IndexError: + logging.warning("Could not checking time") + + if time_in_file is not None: if validtime != time_in_file: logging.error("%s %s", time_in_file, validtime) - raise Exception("Mismatch in times in file and the wanted time") - else: - print("Not checking time") + raise RuntimeError("Mismatch in times in file and the wanted time") geo_in = self.get_geo() field = f_h[var.varname][:] - # print(fh[var.varname]) fillvalue = f_h[var.varname].getncattr("_FillValue") - # if np.any(np.isnan(field)): logging.info("Set %s to nan", fillvalue) field = field.filled(np.nan) - # print(field) # Reshape to fortran 2D style - # field = np.reshape(field, [geo_in.nlons, geo_in.nlats], order="F") - # Does not work wih interpolator field = np.transpose(field) return field, geo_in @@ -823,8 +942,9 @@ def points(self, var, geo_out, validtime=None, interpolation="nearest"): """ field, geo_in = self.field(var, validtime=validtime) - points, interpolator = SurfexIO.interpolate_field(field, geo_in, geo_out, - interpolation=interpolation) + points, interpolator = SurfexIO.interpolate_field( + field, geo_in, geo_out, interpolation=interpolation + ) return points, interpolator @@ -849,41 +969,34 @@ def __init__(self, filename, geo=None, masterodb=False, lfagmap=True): if not filename.endswith(extension_suffix): filename = filename + extension_suffix - # if geo is None: - # geo = self.get_geo() - SurfexIO.__init__(self, filename, geo, extension) self.lfagmap = lfagmap - # def get_geo(self): - # # TODO read geo from SURFEX FA file - # # geo = None - # return None - def field(self, var, validtime=None): """Read field from FA file. Args: var (SurfexFileVariable): Variable in surfex file. validtime (datetime.datetime, optional): Valid time. Defaults to None. - validtime (_type_, optional): _description_. Defaults to None. + + Raises: + RuntimeError: validtime must be a datetime object Returns: np.darray: Field, surfex.Geo in read file """ - file_handler = surfex.fa.Fa(self.filename) + file_handler = Fa(self.filename) if validtime is None: pass - elif not isinstance(validtime, datetime): - raise Exception("validime must be a datetime object") + elif isdatetime(validtime): + raise RuntimeError("validtime must be a datetime object") - geo_in = self.geo - field = file_handler.field(var.varname, validtime) + field, geo_in = file_handler.field(var.varname, validtime) # Reshape to fortran 2D style + logging.debug("field=%s, field.shape=%s", field, field.shape) field = np.reshape(field, [geo_in.nlons, geo_in.nlats], order="F") - field = np.transpose(field) return field, geo_in def points(self, var, geo_out, validtime=None, interpolation="nearest"): @@ -901,8 +1014,9 @@ def points(self, var, geo_out, validtime=None, interpolation="nearest"): """ field, geo_in = self.field(var, validtime=validtime) - points, interpolator = SurfexIO.interpolate_field(field, geo_in, geo_out, - interpolation=interpolation) + points, interpolator = SurfexIO.interpolate_field( + field, geo_in, geo_out, interpolation=interpolation + ) return points, interpolator @@ -960,6 +1074,14 @@ def read(self, var, times): var (SurfexFileVariable): Variable in surfex file. times (list): List of datetime.datetime objects + Raises: + ValueError: times must be list or tuple + ValueError: patches must be list or tuple + ValueError: patches must be list or tuple + RuntimeError: Variable not found! + NotImplementedError: Dimension not implemented + RuntimeError: Mismatch in points + Returns: (tuple): (np.array. surfex.Geometry) @@ -968,11 +1090,11 @@ def read(self, var, times): patches = var.patches if not isinstance(times, (list, tuple)): - raise Exception("times must be list or tuple") + raise ValueError("times must be list or tuple") if not isinstance(layers, (list, tuple)): - raise Exception("patches must be list or tuple") + raise ValueError("patches must be list or tuple") if not isinstance(patches, (list, tuple)): - raise Exception("patches must be list or tuple") + raise ValueError("patches must be list or tuple") values = np.array([]) times_read = [] @@ -981,30 +1103,28 @@ def read(self, var, times): mapping = {} npatch = 1 - if self.file_handler.variables[var].shape[0] > 0: - # p rint self.fh.variables[var] - for dim in self.file_handler.variables[var].dimensions: - # print dim,ndims - dimlen = self.file_handler.variables[var].shape[ndims] + if self.file_handler.variables[var.varname].shape[0] > 0: + for dim in self.file_handler.variables[var.varname].dimensions: + dimlen = self.file_handler.variables[var.varname].shape[ndims] this_dim = [] if dim == "time": mapping[0] = ndims - times_for_var = self.file_handler.variables['time'] + times_for_var = self.file_handler.variables["time"] units = times_for_var.units try: t_cal = times_for_var.calendar except AttributeError: # Attribute doesn't exist - t_cal = u"gregorian" # or standard + t_cal = "gregorian" # or standard indices = list(range(0, dimlen)) - times_for_var = netCDF4.num2date(times_for_var[indices], units=units, - calendar=t_cal) + times_for_var = netCDF4.num2date( + times_for_var[indices], units=units, calendar=t_cal + ) if len(times) > 0: for t_to_find, t_to_find_val in enumerate(times): for tstep in range(0, len(indices)): if times_for_var[tstep] == t_to_find_val: - # print t, t_to_find, times_for_var[t], times[t_to_find] this_dim.append(tstep) times_read.append(times[t_to_find]) else: @@ -1020,7 +1140,7 @@ def read(self, var, times): elif dim == "yy": mapping[2] = ndims this_dim = list(range(0, dimlen)) - elif dim == "Number_of_Tile": + elif dim == "Number_of_Patches": mapping[3] = ndims npatch = dimlen if len(patches) > 0: @@ -1032,7 +1152,6 @@ def read(self, var, times): mapping[4] = ndims npatch = dimlen if len(layers) > 0: - # nlayers = len(layers) this_dim = layers else: this_dim = list(range(0, dimlen)) @@ -1048,15 +1167,15 @@ def read(self, var, times): dim_indices.append(this_dim) ndims = ndims + 1 - field = self.file_handler.variables[var][dim_indices] + print(dim_indices) + field = self.file_handler.variables[var.varname][dim_indices] # Add extra dimensions - # print mapping i = 0 reverse_mapping = [] for dim in range(0, 5): if dim not in mapping: - # print "Adding dimension " + str(d) + logging.debug("Adding dimension %s", dim) field = np.expand_dims(field, len(dim_indices) + i) reverse_mapping.append(len(dim_indices) + i) i = i + 1 @@ -1064,8 +1183,6 @@ def read(self, var, times): reverse_mapping.append(mapping[dim]) # Transpose to 5D array - # print "Transpose to 5D array" - # print reverse_mapping field = np.transpose(field, reverse_mapping) npoints = self.geo.npoints * npatch @@ -1073,8 +1190,7 @@ def read(self, var, times): for tstep in range(0, field.shape[0]): field2d = np.empty(npoints) i = 0 - # print t,npatch,npoints,field.shape,field2d.shape - for patch, in range(0, npatch): + for (patch,) in range(0, npatch): if self.geo.mask is not None: iii = 0 j = 0 @@ -1082,9 +1198,9 @@ def read(self, var, times): # dimensions than the PGD dimension and mask needs x first. for xxx in range(-1, field.shape[1] + 1): for yyy in range(-1, field.shape[2] + 1): - if xxx in range(0, field.shape[1]) and \ - yyy in range(0, field.shape[2]): - # print i, ii,j, t, x, y, p, self.geo.mask[j] + if xxx in range(0, field.shape[1]) and yyy in range( + 0, field.shape[2] + ): if self.geo.mask[j] == iii: field2d[i] = np.nan if field[tstep, xxx, yyy, patch] != np.nan: @@ -1100,14 +1216,16 @@ def read(self, var, times): field2d[i] = field[tstep, xxx, yyy, patch] i = i + 1 if i != npoints: - raise Exception("Mismatch in points " + str(i) + "!=" + str(npoints)) + raise RuntimeError( + "Mismatch in points " + str(i) + "!=" + str(npoints) + ) values = np.append(values, field2d) # Re-shape to proper format values = np.reshape(values, [field.shape[0], npoints]) else: - raise Exception("Variable " + var + " not found!") + raise RuntimeError("Variable " + var.varname + " not found!") return values, self.geo @@ -1147,8 +1265,9 @@ def points(self, var, geo_out, validtime=None, interpolation="nearest"): """ field, geo_in = self.field(var, validtime=validtime) - points, interpolator = SurfexIO.interpolate_field(field, geo_in, geo_out, - interpolation=interpolation) + points, interpolator = SurfexIO.interpolate_field( + field, geo_in, geo_out, interpolation=interpolation + ) return points, interpolator @@ -1170,14 +1289,14 @@ def read(self, variable, times): """Read file. Args: - var (SurfexFileVariable): Variable in surfex file. + variable (SurfexFileVariable): Variable in surfex file. times (list): List of datetime.datetime to read. Raises: - Exception: _description_ - Exception: _description_ - Exception: _description_ - Exception: _description_ + RuntimeError: Basetime must be set for TEXTE + RuntimeError: Interval must be set for TEXTE + RuntimeError: times must be list or tuple + RuntimeError: Dimension of domain does not match end of line Returns: tuple: (np.array, surfex.Geometry) @@ -1190,19 +1309,19 @@ def read(self, variable, times): npatch = variable.patches if base_time is None: - raise Exception("Basetime must be set for TEXTE") + raise RuntimeError("Basetime must be set for TEXTE") if interval is None: - raise Exception("Interval must be set for TEXTE") + raise RuntimeError("Interval must be set for TEXTE") if not isinstance(times, (list, tuple)): - raise Exception("times must be list or tuple") + raise RuntimeError("times must be list or tuple") values = np.array([]) times_read = np.array([]) end_of_line = self.geo.npoints * npatch this_time = np.empty(self.geo.npoints * npatch) - tstep = 1 + tstep = 0 col = 0 for line in self.file.read().splitlines(): @@ -1210,33 +1329,40 @@ def read(self, variable, times): if len(words) > 0: for i, word in enumerate(words): val = float(word.replace("D", "E")) - if val == 1e+20: + if val == 1e20: val = np.nan this_time[col] = val col = col + 1 if col == end_of_line: - - if times is None or (base_time - + timedelta(seconds=(tstep * interval))) in times: + if ( + times is None + or (base_time + as_timedelta(seconds=(tstep * interval))) + in times + ): values = np.append(values, this_time) - times_read = np.append(times_read, base_time - + timedelta(seconds=(tstep * interval))) - # print i, col, base_time + timedelta(seconds=(t * interval)), this_time + times_read = np.append( + times_read, + base_time + as_timedelta(seconds=(tstep * interval)), + ) tstep = tstep + 1 col = 0 this_time[:] = np.nan if i != len(words) - 1: - raise Exception("Dimension of domain does not match end of line! " - + str(i) + " != " + str(len(words) - 1)) + raise RuntimeError( + "Dimension of domain does not match end of line! " + + str(i) + + " != " + + str(len(words) - 1) + ) if times_read.shape[0] > 0: + values = np.asarray(values) values = np.reshape(values, [times_read.shape[0], this_time.shape[0]]) else: logging.info("No data found!") - # print values.shape self.file.close() return values, self.geo @@ -1251,9 +1377,7 @@ def field(self, var, validtime=None): tuple: (np.array, surfex.Geometry) """ - if validtime is None: - raise Exception("You must set times to read forcing data") - else: + if validtime is not None: validtime = [validtime] field, geo_in = self.read(var, validtime) @@ -1275,8 +1399,9 @@ def points(self, var, geo_out, validtime=None, interpolation="nearest"): """ field, geo_in = self.field(var, validtime=validtime) - points, interpolator = SurfexIO.interpolate_field(field, geo_in, geo_out, - interpolation=interpolation) + points, interpolator = SurfexIO.interpolate_field( + field, geo_in, geo_out, interpolation=interpolation + ) return points, interpolator @@ -1295,8 +1420,6 @@ def __init__(self, fname, geo): self.file_handler = netCDF4.Dataset(fname, "r") self.lons = self.file_handler.variables["LON"] self.lats = self.file_handler.variables["LAT"] - # self.n_x = self.lons.shape[0] - # self.n_y = self.lats.shape[0] SurfexIO.__init__(self, fname, geo, "nc") def read_field(self, variable, times): @@ -1307,12 +1430,12 @@ def read_field(self, variable, times): times (_type_): _description_ Raises: - Exception: _description_ - Exception: _description_ - Exception: _description_ + RuntimeError: You must set time! + RuntimeError: No points found + RuntimeError: Valid time not found in file! Returns: - _type_: _description_ + tuple: field, geo """ var = variable.varname @@ -1324,7 +1447,7 @@ def read_field(self, variable, times): field = self.file_handler.variables[var][0:dimlen] else: if len(times) == 0: - raise Exception("You must set time!") + raise RuntimeError("You must set time!") times_read = [] ndims = 0 @@ -1333,23 +1456,21 @@ def read_field(self, variable, times): dimlen = self.file_handler.variables[var].shape[ndims] if dim == "time": - times_for_var = self.file_handler.variables['time'] + times_for_var = self.file_handler.variables["time"] units = times_for_var.units try: t_cal = times_for_var.calendar except AttributeError: # Attribute doesn't exist - t_cal = u"gregorian" # or standard + t_cal = "gregorian" # or standard indices = list(range(0, dimlen)) - times_for_var = netCDF4.num2date(times_for_var[indices], units=units, - calendar=t_cal) - # print(times_for_var) + times_for_var = netCDF4.num2date( + times_for_var[indices], units=units, calendar=t_cal + ) for times_to_read_val in times: - # print(times_to_read, times[times_to_read]) for tstep, times_for_var_val in enumerate(times_for_var): - # print(t, times_for_var[t], times[times_to_read]) test_time = times_for_var_val.strftime("%Y%m%d%H") - test_time = datetime.strptime(test_time, "%Y%m%d%H") + test_time = as_datetime(test_time) if test_time == times_to_read_val: times_read.append(tstep) logging.debug("%s %s", tstep, times_to_read_val) @@ -1359,13 +1480,13 @@ def read_field(self, variable, times): ndims = ndims + 1 if npoints == 0: - raise Exception("No points found") + raise RuntimeError("No points found") if len(times_read) == 0 and len(times) > 0: logging.error("%s", times) - raise Exception("Valid time not found in file!") + raise RuntimeError("Valid time not found in file!") - field = self.file_handler.variables[var][times_read, 0: npoints] + field = self.file_handler.variables[var][times_read, 0:npoints] else: logging.warning("Variable %s not found!", var) return field, self.geo @@ -1375,18 +1496,14 @@ def field(self, var, validtime=None): Args: var (_type_): _description_ - validtime (_type_, optional): _description_. Defaults to None. - - Raises: - Exception: _description_ + validtime (list, optional): Validtime. Defaults to None. Returns: - _type_: _description_ + tuple: field, geo """ if validtime is None: validtime = [] - # raise Exception("You must set times to read forcing data") else: validtime = [validtime] @@ -1411,14 +1528,26 @@ def points(self, var, geo_out, validtime=None, interpolation=None): """ field, geo_in = self.field(var, validtime=validtime) - points, interpolator = SurfexIO.interpolate_field(field, geo_in, geo_out, - interpolation=interpolation) - + points, interpolator = SurfexIO.interpolate_field( + field, geo_in, geo_out, interpolation=interpolation + ) return points, interpolator -def read_surfex_field(varname, filename, validtime=None, basetime=None, patches=-1, layers=-1, - fileformat=None, filetype=None, geo=None, datatype=None, interval=None): +def read_surfex_field( + varname, + filename, + validtime=None, + basetime=None, + patches=-1, + layers=-1, + fileformat=None, + filetype=None, + geo=None, + datatype=None, + interval=None, + tiletype="FULL", +): """Read surfex field. Args: @@ -1433,41 +1562,65 @@ def read_surfex_field(varname, filename, validtime=None, basetime=None, patches= geo (_type_, optional): _description_. Defaults to None. datatype (_type_, optional): _description_. Defaults to None. interval (_type_, optional): _description_. Defaults to None. + tiletype(str, optional): Tiletype. Defaults to "FULL". Raises: - NotImplementedError: _description_ - Exception: _description_ + RuntimeError: Not implemented and geo is None + RuntimeError: You need to provide a geo object. Returns: - _type_: _description_ + field (np.ndarray): Field """ if fileformat is None: - fileformat, filetype = surfex.file.guess_file_format(filename, filetype) + fileformat, filetype = guess_file_format(filename, filetype) if filetype == "surf": if fileformat.lower() == "ascii": - geo = surfex.file.AsciiSurfexFile(filename).geo + geo = AsciiSurfexFile(filename).geo elif fileformat.lower() == "nc": - geo = surfex.file.NCSurfexFile(filename).geo + geo = NCSurfexFile(filename).geo else: if geo is None: - raise NotImplementedError("Not implemnted and geo is None") + raise RuntimeError("Not implemented and geo is None") elif geo is None: - raise Exception("You need to provide a geo object. Filetype is: " + str(filetype)) - - sfx_io = surfex.file.get_surfex_io_object(filename, filetype=filetype, fileformat=fileformat, - geo=geo) - var = surfex.file.SurfexFileVariable(varname, validtime=validtime, patches=patches, - layers=layers, basetime=basetime, interval=interval, - datatype=datatype) + raise RuntimeError( + "You need to provide a geo object. Filetype is: " + str(filetype) + ) + + sfx_io = get_surfex_io_object( + filename, filetype=filetype, fileformat=fileformat, geo=geo + ) + var = SurfexFileVariable( + varname, + validtime=validtime, + patches=patches, + layers=layers, + basetime=basetime, + interval=interval, + datatype=datatype, + tiletype=tiletype, + ) field, __ = sfx_io.field(var, validtime=validtime) return field -def read_surfex_points(varname, filename, geo_out, validtime=None, basetime=None, patches=-1, - layers=-1, fileformat=None, filetype=None, geo=None, datatype=None, - interval=None, interpolation="nearest"): +def read_surfex_points( + varname, + filename, + geo_out, + validtime=None, + basetime=None, + patches=-1, + layers=-1, + fileformat=None, + filetype=None, + geo=None, + datatype=None, + interval=None, + interpolation="nearest", + tiletype="FULL", +): """Read surfex points. Args: @@ -1484,74 +1637,48 @@ def read_surfex_points(varname, filename, geo_out, validtime=None, basetime=None datatype (str, optional): Data type. Defaults to None. interval (int, optional): Interval between times. Defaults to None. interpolation (str, optional): Interpolation method. Defaults to "nearest". + tiletype(str, optional): Tiletype. Defaults to "FULL". Raises: NotImplementedError: _description_ - Exception: _description_ + RuntimeError: _description_ Returns: np.darray: Field """ if fileformat is None: - fileformat, filetype = surfex.file.guess_file_format(filename, filetype) + fileformat, filetype = guess_file_format(filename, filetype) if filetype == "surf": if fileformat.lower() == "ascii": - geo = surfex.file.AsciiSurfexFile(filename).geo + geo = AsciiSurfexFile(filename).geo elif fileformat.lower() == "nc": - geo = surfex.file.NCSurfexFile(filename).geo + geo = NCSurfexFile(filename).geo else: if geo is None: - raise NotImplementedError(f"{fileformat} is not implemented and geo is None") + raise NotImplementedError( + f"{fileformat} is not implemented and geo is None" + ) elif geo is None: - raise Exception("You need to provide a geo object. Filetype is: " + str(filetype)) - - sfx_io = surfex.file.get_surfex_io_object(filename, filetype=filetype, fileformat=fileformat, - geo=geo) - var = surfex.file.SurfexFileVariable(varname, validtime=validtime, patches=patches, - layers=layers, basetime=basetime, interval=interval, - datatype=datatype) - field, geo_out = sfx_io.points(var, geo_out, validtime=validtime, interpolation=interpolation) + raise RuntimeError( + "You need to provide a geo object. Filetype is: " + str(filetype) + ) + + sfx_io = get_surfex_io_object( + filename, filetype=filetype, fileformat=fileformat, geo=geo + ) + var = SurfexFileVariable( + varname, + validtime=validtime, + patches=patches, + layers=layers, + basetime=basetime, + interval=interval, + datatype=datatype, + tiletype=tiletype, + ) + field, geo_out = sfx_io.points( + var, geo_out, validtime=validtime, interpolation=interpolation + ) return field - - -def parse_filepattern(file_pattern, basetime, validtime): - """Parse the file pattern. - - Args: - file_pattern (str): File pattern. - basetime (datetime.datetime): Base time. - validtime (datetime.datetime): Valid time. - - Returns: - str: File name - - """ - if basetime is None or validtime is None: - return file_pattern - - logging.debug("file_pattern=%s basetime=%s validtime=%s", file_pattern, basetime, validtime) - file_name = str(file_pattern) - year = basetime.strftime('%Y') - year2 = basetime.strftime('%y') - month = basetime.strftime('%m') - day = basetime.strftime('%d') - hour = basetime.strftime('%H') - mins = basetime.strftime('%M') - d_t = validtime - basetime - ll_d = f"{int(d_t.seconds / 3600):d}" - ll_2 = f"{int(d_t.seconds / 3600):02d}" - ll_3 = f"{int(d_t.seconds / 3600):03d}" - ll_4 = f"{int(d_t.seconds / 3600):04d}" - file_name = file_name.replace('@YYYY@', year) - file_name = file_name.replace('@YY@', year2) - file_name = file_name.replace('@MM@', month) - file_name = file_name.replace('@DD@', day) - file_name = file_name.replace('@HH@', hour) - file_name = file_name.replace('@mm@', mins) - file_name = file_name.replace('@L@', ll_d) - file_name = file_name.replace('@LL@', ll_2) - file_name = file_name.replace('@LLL@', ll_3) - file_name = file_name.replace('@LLLL@', ll_4) - return file_name diff --git a/surfex/forcing.py b/surfex/forcing.py index 8882ca1..433b6f1 100644 --- a/surfex/forcing.py +++ b/surfex/forcing.py @@ -1,16 +1,23 @@ """Forcing.""" import abc -import time import copy -import shutil -import os import json -from datetime import datetime, timedelta import logging +import os +import shutil +import time + import netCDF4 import numpy as np import toml -import surfex + +from .cache import Cache +from .configuration import ConfigurationFromHarmonie +from .datetime_utils import as_datetime, as_timedelta +from .file import ForcingFileNetCDF +from .geo import get_geo_object +from .read import ConstantValue, ConvertedInput, Converter +from .util import deep_update # TODO: should be abstract? @@ -27,7 +34,7 @@ class SurfexNetCDFForcing(SurfexForcing): def __init__(self, filename, geo): """Construct netcdf forcing.""" SurfexForcing.__init__(self) - self.io_object = surfex.ForcingFileNetCDF(filename, geo) + self.io_object = ForcingFileNetCDF(filename, geo) class SurfexOutputForcing(object): @@ -57,8 +64,10 @@ def __init__(self, base_time, geo, ntimes, var_objs, time_step_intervall): def _check_sanity(self): if len(self.var_objs) != self.nparameters: - raise Exception(f"Inconsistent number of parameter. {str(len(self.var_objs))} != " - f"{str(self.nparameters)}") + raise Exception( + f"Inconsistent number of parameter. {str(len(self.var_objs))} != " + f"{str(self.nparameters)}" + ) # Check if all parameters are present for var_obj in self.var_objs: @@ -87,7 +96,7 @@ def _check_sanity(self): @abc.abstractmethod def write_forcing(self, var_objs, this_time, cache): """Write forcing.""" - raise NotImplementedError('users must define writeForcing to use this base class') + raise NotImplementedError("users must define writeForcing to use this base class") class NetCDFOutput(SurfexOutputForcing): @@ -109,8 +118,19 @@ class NetCDFOutput(SurfexOutputForcing): "CO2": "CO2air", } - def __init__(self, base_time, geo, fname, ntimes, var_objs, att_objs, att_time, cache, - time_step, fmt="netcdf"): + def __init__( + self, + base_time, + geo, + fname, + ntimes, + var_objs, + att_objs, + att_time, + cache, + time_step, + fmt="netcdf", + ): """Construct netcdf forcing. Args: @@ -125,6 +145,9 @@ def __init__(self, base_time, geo, fname, ntimes, var_objs, att_objs, att_time, time_step (_type_): _description_ fmt (str, optional): _description_. Defaults to "netcdf". + Raises: + NotImplementedError: NotImplementedError + """ SurfexOutputForcing.__init__(self, base_time, geo, ntimes, var_objs, time_step) if fmt == "netcdf": @@ -139,7 +162,9 @@ def __init__(self, base_time, geo, fname, ntimes, var_objs, att_objs, att_time, fname = "FORCING.nc" self.fname = fname self.tmp_fname = self.fname + ".tmp" - self.file_handler = netCDF4.Dataset(self.tmp_fname, 'w', format=self.output_format) + self.file_handler = netCDF4.Dataset( + self.tmp_fname, "w", format=self.output_format + ) self._define_forcing(geo, att_objs, att_time, cache) def write_forcing(self, var_objs, this_time, cache): @@ -162,7 +187,7 @@ def write_forcing(self, var_objs, this_time, cache): logging.info("Preparation took %s seconds", str(toc - tic)) self.forcing_file[self.translation[this_var]][self.time_step, :] = field - self.forcing_file['TIME'][self.time_step] = self.time_step_value + self.forcing_file["TIME"][self.time_step] = self.time_step_value def _define_forcing(self, geo, att_objs, att_time, cache): logging.info("Define netcdf forcing") @@ -175,7 +200,9 @@ def _define_forcing(self, geo, att_objs, att_time, cache): logging.info("Define: %s", this_obj.var_name) if this_var == "ZS": zs_oro = this_obj.read_time_step(att_time, cache) - zs_oro = zs_oro.reshape([self.geo.nlats, self.geo.nlons], order="F").flatten() + zs_oro = zs_oro.reshape( + [self.geo.nlats, self.geo.nlons], order="F" + ).flatten() elif this_var == "ZREF": zref = this_obj.read_time_step(att_time, cache) zref = zref.reshape([self.geo.nlats, self.geo.nlons], order="F").flatten() @@ -184,102 +211,187 @@ def _define_forcing(self, geo, att_objs, att_time, cache): uref = uref.reshape([self.geo.nlats, self.geo.nlons], order="F").flatten() # DIMS - self.forcing_file['NPOINTS'] = self.file_handler.createDimension("Number_of_points", - geo.npoints) - self.forcing_file['NTIMES'] = self.file_handler.createDimension("time", self.ntimes) + self.forcing_file["NPOINTS"] = self.file_handler.createDimension( + "Number_of_points", geo.npoints + ) + self.forcing_file["NTIMES"] = self.file_handler.createDimension( + "time", self.ntimes + ) # DEFINE VARS - self.forcing_file['TIME'] = self.file_handler.createVariable("time", "f4", ("time",)) - self.forcing_file['TIME'].units = "hours since " \ - + f"{self.base_time.strftime('%Y-%m-%d %H')}:00:00 0:00" - self.forcing_file['TSTEP'] = self.file_handler.createVariable("FRC_TIME_STP", "f4") - self.forcing_file['TSTEP'].longname = "Forcing_Time_Step" - self.forcing_file['TSTEP'][:] = self.time_step_intervall - self.forcing_file['LON'] = self.file_handler.createVariable( - "LON", "f4", ("Number_of_points",)) - self.forcing_file['LON'].longname = "Longitude" - self.forcing_file['LON'][:] = geo.lonlist - self.forcing_file['LAT'] = self.file_handler.createVariable( - "LAT", "f4", ("Number_of_points",)) - self.forcing_file['LAT'].longname = "Latitude" - self.forcing_file['LAT'][:] = geo.latlist - self.forcing_file['ZS'] = self.file_handler.createVariable( - "ZS", "f4", ("Number_of_points",)) - self.forcing_file['ZS'].longname = "Surface_Orography" - self.forcing_file['ZS'][:] = zs_oro - self.forcing_file['ZREF'] = self.file_handler.createVariable( - "ZREF", "f4", ("Number_of_points",)) - self.forcing_file['ZREF'].longname = "Reference_Height" - self.forcing_file['ZREF'].units = "m" - self.forcing_file['ZREF'][:] = zref - self.forcing_file['UREF'] = self.file_handler.createVariable( - "UREF", "f4", ("Number_of_points",)) - self.forcing_file['UREF'].longname = "Reference_Height_for_Wind" - self.forcing_file['UREF'].units = "m" - self.forcing_file['UREF'][:] = uref + self.forcing_file["TIME"] = self.file_handler.createVariable( + "time", "f4", ("time",) + ) + self.forcing_file["TIME"].units = ( + "hours since " + f"{self.base_time.strftime('%Y-%m-%d %H')}:00:00 0:00" + ) + self.forcing_file["TSTEP"] = self.file_handler.createVariable( + "FRC_TIME_STP", "f4" + ) + self.forcing_file["TSTEP"].longname = "Forcing_Time_Step" + self.forcing_file["TSTEP"][:] = self.time_step_intervall + self.forcing_file["LON"] = self.file_handler.createVariable( + "LON", "f4", ("Number_of_points",) + ) + self.forcing_file["LON"].longname = "Longitude" + self.forcing_file["LON"][:] = geo.lonlist + self.forcing_file["LAT"] = self.file_handler.createVariable( + "LAT", "f4", ("Number_of_points",) + ) + self.forcing_file["LAT"].longname = "Latitude" + self.forcing_file["LAT"][:] = geo.latlist + self.forcing_file["ZS"] = self.file_handler.createVariable( + "ZS", "f4", ("Number_of_points",) + ) + self.forcing_file["ZS"].longname = "Surface_Orography" + self.forcing_file["ZS"][:] = zs_oro + self.forcing_file["ZREF"] = self.file_handler.createVariable( + "ZREF", "f4", ("Number_of_points",) + ) + self.forcing_file["ZREF"].longname = "Reference_Height" + self.forcing_file["ZREF"].units = "m" + self.forcing_file["ZREF"][:] = zref + self.forcing_file["UREF"] = self.file_handler.createVariable( + "UREF", "f4", ("Number_of_points",) + ) + self.forcing_file["UREF"].longname = "Reference_Height_for_Wind" + self.forcing_file["UREF"].units = "m" + self.forcing_file["UREF"][:] = uref # Define time dependent variables for this_obj in self.var_objs: this_var = this_obj.var_name if this_var == "TA": - self.forcing_file['Tair'] = self.file_handler.createVariable( - "Tair", "f4", ("time", "Number_of_points",)) - self.forcing_file['Tair'].longname = "Near_Surface_Air_Temperature" - self.forcing_file['Tair'].units = "K" + self.forcing_file["Tair"] = self.file_handler.createVariable( + "Tair", + "f4", + ( + "time", + "Number_of_points", + ), + ) + self.forcing_file["Tair"].longname = "Near_Surface_Air_Temperature" + self.forcing_file["Tair"].units = "K" elif this_var == "QA": - self.forcing_file['Qair'] = self.file_handler.createVariable( - "Qair", "f4", ("time", "Number_of_points",)) - self.forcing_file['Qair'].longname = "Near_Surface_Specific_Humidity" - self.forcing_file['Qair'].units = "kg/kg" + self.forcing_file["Qair"] = self.file_handler.createVariable( + "Qair", + "f4", + ( + "time", + "Number_of_points", + ), + ) + self.forcing_file["Qair"].longname = "Near_Surface_Specific_Humidity" + self.forcing_file["Qair"].units = "kg/kg" elif this_var == "PS": - self.forcing_file['PSurf'] = self.file_handler.createVariable( - "PSurf", "f4", ("time", "Number_of_points",)) - self.forcing_file['PSurf'].longname = "Surface_Pressure" - self.forcing_file['PSurf'].units = "Pa" + self.forcing_file["PSurf"] = self.file_handler.createVariable( + "PSurf", + "f4", + ( + "time", + "Number_of_points", + ), + ) + self.forcing_file["PSurf"].longname = "Surface_Pressure" + self.forcing_file["PSurf"].units = "Pa" elif this_var == "DIR_SW": - self.forcing_file['DIR_SWdown'] = self.file_handler.createVariable( - "DIR_SWdown", "f4", ("time", "Number_of_points",)) - self.forcing_file['DIR_SWdown'].longname = \ - "Surface_Incident_Downwelling_Shortwave_Radiation" - self.forcing_file['DIR_SWdown'].units = "W/m2" + self.forcing_file["DIR_SWdown"] = self.file_handler.createVariable( + "DIR_SWdown", + "f4", + ( + "time", + "Number_of_points", + ), + ) + self.forcing_file[ + "DIR_SWdown" + ].longname = "Surface_Incident_Downwelling_Shortwave_Radiation" + self.forcing_file["DIR_SWdown"].units = "W/m2" elif this_var == "SCA_SW": - self.forcing_file['SCA_SWdown'] = self.file_handler.createVariable( - "SCA_SWdown", "f4", ("time", "Number_of_points",)) - self.forcing_file['SCA_SWdown'].longname = \ - "Surface_Incident_Diffuse_Shortwave_Radiation" - self.forcing_file['SCA_SWdown'].units = "W/m2" + self.forcing_file["SCA_SWdown"] = self.file_handler.createVariable( + "SCA_SWdown", + "f4", + ( + "time", + "Number_of_points", + ), + ) + self.forcing_file[ + "SCA_SWdown" + ].longname = "Surface_Incident_Diffuse_Shortwave_Radiation" + self.forcing_file["SCA_SWdown"].units = "W/m2" elif this_var == "LW": - self.forcing_file['LWdown'] = self.file_handler.createVariable( - "LWdown", "f4", ("time", "Number_of_points",)) - self.forcing_file['LWdown'].longname = "Surface_Incident_Diffuse_Longwave_Radiation" - self.forcing_file['LWdown'].units = "W/m2" + self.forcing_file["LWdown"] = self.file_handler.createVariable( + "LWdown", + "f4", + ( + "time", + "Number_of_points", + ), + ) + self.forcing_file[ + "LWdown" + ].longname = "Surface_Incident_Diffuse_Longwave_Radiation" + self.forcing_file["LWdown"].units = "W/m2" elif this_var == "RAIN": - self.forcing_file['Rainf'] = self.file_handler.createVariable( - "Rainf", "f4", ("time", "Number_of_points",)) - self.forcing_file['Rainf'].longname = "Rainfall_Rate" - self.forcing_file['Rainf'].units = "kg/m2/s" + self.forcing_file["Rainf"] = self.file_handler.createVariable( + "Rainf", + "f4", + ( + "time", + "Number_of_points", + ), + ) + self.forcing_file["Rainf"].longname = "Rainfall_Rate" + self.forcing_file["Rainf"].units = "kg/m2/s" elif this_var == "SNOW": - self.forcing_file['Snowf'] = self.file_handler.createVariable( - "Snowf", "f4", ("time", "Number_of_points",)) - self.forcing_file['Snowf'].longname = "Snowfall_Rate" - self.forcing_file['Snowf'].units = "kg/m2/s" + self.forcing_file["Snowf"] = self.file_handler.createVariable( + "Snowf", + "f4", + ( + "time", + "Number_of_points", + ), + ) + self.forcing_file["Snowf"].longname = "Snowfall_Rate" + self.forcing_file["Snowf"].units = "kg/m2/s" elif this_var == "WIND": - self.forcing_file['Wind'] = self.file_handler.createVariable( - "Wind", "f4", ("time", "Number_of_points",)) - self.forcing_file['Wind'].longname = "Wind_Speed" - self.forcing_file['Wind'].units = "m/s" + self.forcing_file["Wind"] = self.file_handler.createVariable( + "Wind", + "f4", + ( + "time", + "Number_of_points", + ), + ) + self.forcing_file["Wind"].longname = "Wind_Speed" + self.forcing_file["Wind"].units = "m/s" elif this_var == "WIND_DIR": - self.forcing_file['Wind_DIR'] = self.file_handler.createVariable( - "Wind_DIR", "f4", ("time", "Number_of_points",)) - self.forcing_file['Wind_DIR'].longname = "Wind_Direction" + self.forcing_file["Wind_DIR"] = self.file_handler.createVariable( + "Wind_DIR", + "f4", + ( + "time", + "Number_of_points", + ), + ) + self.forcing_file["Wind_DIR"].longname = "Wind_Direction" elif this_var == "CO2": - self.forcing_file['CO2air'] = self.file_handler.createVariable( - "CO2air", "f4", ("time", "Number_of_points",)) - self.forcing_file['CO2air'].longname = "Near_Surface_CO2_Concentration" - self.forcing_file['CO2air'].units = "kg/m3" + self.forcing_file["CO2air"] = self.file_handler.createVariable( + "CO2air", + "f4", + ( + "time", + "Number_of_points", + ), + ) + self.forcing_file["CO2air"].longname = "Near_Surface_CO2_Concentration" + self.forcing_file["CO2air"].units = "kg/m3" else: - raise NotImplementedError(f"This should never happen! {this_var} is not defined!") + raise NotImplementedError( + f"This should never happen! {this_var} is not defined!" + ) def finalize(self): """Finalize the forcing. Close the file.""" @@ -291,8 +403,18 @@ def finalize(self): class AsciiOutput(SurfexOutputForcing): """Forcing in ASCII format.""" - def __init__(self, base_time, geo, fname, ntimes, var_objs, att_objs, att_time, cache, - time_step): + def __init__( + self, + base_time, + geo, + fname, + ntimes, + var_objs, + att_objs, + att_time, + cache, + time_step, + ): """Construct ASCII forcing output.""" SurfexOutputForcing.__init__(self, base_time, geo, ntimes, var_objs, time_step) self.output_format = "ascii" @@ -334,20 +456,20 @@ def _define_forcing(self, geo, att_objs, att_time, cache): elif this_var == "UREF": uref = this_obj.read_time_step(att_time, cache) - second = (self.base_time - self.base_time.replace(hour=0, - minute=0, - second=0, - microsecond=0)).total_seconds() + second = ( + self.base_time + - self.base_time.replace(hour=0, minute=0, second=0, microsecond=0) + ).total_seconds() fmt = "%15.8f" cols = 50 - file_handler = open(self.fname, 'w', encoding="utf-8") - file_handler.write(str(geo.npoints) + '\n') - file_handler.write(str(self.ntimes) + '\n') - file_handler.write(str(self.time_step_intervall) + '\n') - file_handler.write(self.base_time.strftime("%Y") + '\n') - file_handler.write(self.base_time.strftime("%m") + '\n') - file_handler.write(self.base_time.strftime("%d") + '\n') - file_handler.write(str(second) + '\n') + file_handler = open(self.fname, "w", encoding="utf-8") + file_handler.write(str(geo.npoints) + "\n") + file_handler.write(str(self.ntimes) + "\n") + file_handler.write(str(self.time_step_intervall) + "\n") + file_handler.write(self.base_time.strftime("%Y") + "\n") + file_handler.write(self.base_time.strftime("%m") + "\n") + file_handler.write(self.base_time.strftime("%d") + "\n") + file_handler.write(str(second) + "\n") write_formatted_array(file_handler, geo.lons, cols, fmt) write_formatted_array(file_handler, geo.lats, cols, fmt) write_formatted_array(file_handler, zs_oro, cols, fmt) @@ -359,8 +481,8 @@ def _define_forcing(self, geo, att_objs, att_time, cache): nam = key if key == "WIND_DIR": nam = "DIR" - self.forcing_file[key] = "Forc_" + nam + '.txt' - self.file_handler[key] = open(self.forcing_file[key], 'w', encoding="utf-8") + self.forcing_file[key] = "Forc_" + nam + ".txt" + self.file_handler[key] = open(self.forcing_file[key], "w", encoding="utf-8") def finalize(self): """Finalize forcing.""" @@ -372,34 +494,33 @@ def finalize(self): def write_formatted_array(file, array, columns, fileformat): """Write a formatted array.""" astr = np.empty(array.size - array.size % columns, dtype="float64") - astr = array[0:astr.size] - astr = astr.reshape((columns, astr.size / columns), order='F') + astr = array[0 : astr.size] + astr = astr.reshape((columns, astr.size / columns), order="F") mlw = (len(fileformat % 0)) * (columns + 1) - formatter = {'float_kind': lambda x: fileformat % x} - astr_end = np.array2string(array[astr.size:], - separator='', - max_line_width=mlw, - formatter=formatter)[1:-1] - np.savetxt(file, astr.T, fmt=fileformat, newline='\n', delimiter='') - file.write(astr_end + '\n') + formatter = {"float_kind": lambda x: fileformat % x} + astr_end = np.array2string( + array[astr.size :], separator="", max_line_width=mlw, formatter=formatter + )[1:-1] + np.savetxt(file, astr.T, fmt=fileformat, newline="\n", delimiter="") + file.write(astr_end + "\n") def run_time_loop(options, var_objs, att_objs): """Run time loop.""" tic = time.time() - this_time = options['start'] + this_time = options["start"] - cache = surfex.cache.Cache(options['cache_interval']) - time_step = options['timestep'] + cache = Cache(options["cache_interval"]) + time_step = options["timestep"] single = False if "single" in options: single = options["single"] # Find how many time steps we want to write ntimes = 0 - while this_time <= options['stop']: + while this_time <= options["stop"]: ntimes = ntimes + 1 - this_time = this_time + timedelta(seconds=options['timestep']) + this_time = this_time + as_timedelta(seconds=options["timestep"]) if single: time_step = 1 if ntimes == 1: @@ -409,40 +530,61 @@ def run_time_loop(options, var_objs, att_objs): raise Exception("Option single should be used with one time step") # Create output object - if str.lower(options['output_format']) == "netcdf" or \ - str.lower(options['output_format']) == "nc4": + if ( + str.lower(options["output_format"]) == "netcdf" + or str.lower(options["output_format"]) == "nc4" + ): # Set att_time the same as start - att_time = options['start'] - output = surfex.forcing.NetCDFOutput(options['start'], options['geo_out'], - options['output_file'], ntimes, - var_objs, att_objs, att_time, cache, time_step, - fmt=str.lower(options['output_format'])) - elif str.lower(options['output_format']) == "ascii": - att_time = options['start'] - output = surfex.forcing.AsciiOutput(options['start'], options['geo_out'], - options['output_file'], ntimes, - var_objs, att_objs, att_time, cache, time_step) + att_time = options["start"] + output = NetCDFOutput( + options["start"], + options["geo_out"], + options["output_file"], + ntimes, + var_objs, + att_objs, + att_time, + cache, + time_step, + fmt=str.lower(options["output_format"]), + ) + elif str.lower(options["output_format"]) == "ascii": + att_time = options["start"] + output = AsciiOutput( + options["start"], + options["geo_out"], + options["output_file"], + ntimes, + var_objs, + att_objs, + att_time, + cache, + time_step, + ) else: - raise NotImplementedError("Invalid output format " + options['output_format']) + raise NotImplementedError("Invalid output format " + options["output_format"]) # Loop output time steps - this_time = options['start'] - while this_time <= options['stop']: + this_time = options["start"] + while this_time <= options["stop"]: # Write for each time step - logging.info("Creating forcing for: %s time_step: %s", - this_time.strftime('%Y%m%d%H'), str(output.time_step)) + logging.info( + "Creating forcing for: %s time_step: %s", + this_time.strftime("%Y%m%d%H"), + str(output.time_step), + ) output.write_forcing(var_objs, this_time, cache) output.time_step = output.time_step + 1 if not single: output.time_step_value = output.time_step - this_time = this_time + timedelta(seconds=options['timestep']) + this_time = this_time + as_timedelta(seconds=options["timestep"]) if cache is not None: cache.clean_fields(this_time) else: output.time_step_value = 0 if output.time_step > 1: - this_time = this_time + timedelta(seconds=options['timestep']) + this_time = this_time + as_timedelta(seconds=options["timestep"]) # Finalize forcing output.finalize() @@ -450,8 +592,16 @@ def run_time_loop(options, var_objs, att_objs): logging.info("Forcing generation took %s seconds", str(toc - tic)) -def set_input_object(sfx_var, merged_conf, geo, forcingformat, selected_converter, ref_height, - first_base_time, timestep): +def set_input_object( + sfx_var, + merged_conf, + geo, + forcingformat, + selected_converter, + ref_height, + first_base_time, + timestep, +): """Set the input parameter for a specific SURFEX forcing variable based on input. Args: @@ -467,6 +617,9 @@ def set_input_object(sfx_var, merged_conf, geo, forcingformat, selected_converte Returns: _type_: _description_ + Raises: + KeyError: KeyError + """ ######################################### # 1. Gobal configuration from yaml file @@ -498,17 +651,25 @@ def set_input_object(sfx_var, merged_conf, geo, forcingformat, selected_converte else: if ref_height in conf[sfx_var]: if forcingformat not in conf[sfx_var][ref_height]: - msg = f"{str(conf[sfx_var])}: " \ - + f"Missing definitions for {sfx_var} and format: {forcingformat}" + msg = ( + f"{str(conf[sfx_var])}: " + + f"Missing definitions for {sfx_var} and format: {forcingformat}" + ) raise KeyError(msg) if conf[sfx_var][ref_height][forcingformat] is None: - raise KeyError(f"{str(conf[sfx_var])}: Missing definitions for {sfx_var}") + raise KeyError( + f"{str(conf[sfx_var])}: Missing definitions for {sfx_var}" + ) if "converter" in conf[sfx_var][ref_height][forcingformat]: - conf_dict = copy.deepcopy(conf[sfx_var][ref_height][forcingformat]["converter"]) + conf_dict = copy.deepcopy( + conf[sfx_var][ref_height][forcingformat]["converter"] + ) else: raise KeyError("No converter defined for " + sfx_var) else: - raise KeyError("No ref height \"" + ref_height + "\" defined for " + sfx_var) + raise KeyError( + 'No ref height "' + ref_height + '" defined for ' + sfx_var + ) ############################################################## ############################################################## @@ -527,17 +688,20 @@ def set_input_object(sfx_var, merged_conf, geo, forcingformat, selected_converte else: raise KeyError("No constant defined for " + sfx_var) else: - raise KeyError("No ref height \"" + ref_height + "\" defined for " + sfx_var) + raise KeyError( + 'No ref height "' + ref_height + '" defined for ' + sfx_var + ) - obj = surfex.read.ConstantValue(geo, sfx_var, const_dict) + obj = ConstantValue(geo, sfx_var, const_dict) else: # Construct the converter - converter = surfex.read.Converter(selected_converter, first_base_time, defs, conf_dict, - forcingformat) + converter = Converter( + selected_converter, first_base_time, defs, conf_dict, forcingformat + ) # Construct the input object - obj = surfex.read.ConvertedInput(geo, sfx_var, converter) + obj = ConvertedInput(geo, sfx_var, converter) return obj @@ -550,13 +714,15 @@ def set_forcing_config(**kwargs): if kwargs["config_exp_surfex"] is not None: config_exp = kwargs["config_exp_surfex"] if config_exp is None: - config_exp = surfex.__path__[0] + "/cfg/config_exp_surfex.toml" + config_exp = ( + f"{os.path.abspath(os.path.dirname(__file__))}/cfg/config_exp_surfex.toml" + ) logging.info("Using default config from: %s", config_exp) input_data = toml.load(open(config_exp, "r", encoding="utf-8")) - config = surfex.ConfigurationFromHarmonie(os.environ, input_data) + config = ConfigurationFromHarmonie(os.environ, input_data) geo_out = config.geo elif "domain" in kwargs and kwargs["domain"] is not None: - geo_out = surfex.get_geo_object(json.load(open(kwargs["domain"], "r", encoding="utf-8"))) + geo_out = get_geo_object(json.load(open(kwargs["domain"], "r", encoding="utf-8"))) else: raise Exception("No geometry is set") @@ -683,17 +849,19 @@ def set_forcing_config(**kwargs): # Time information if (int(dtg_start) or int(dtg_stop)) < 1000010100: - raise Exception("Invalid start and stop times! " + str(dtg_start) + " " + str(dtg_stop)) + raise Exception( + "Invalid start and stop times! " + str(dtg_start) + " " + str(dtg_stop) + ) - start = datetime.strptime(str.strip(str(dtg_start)), '%Y%m%d%H') - stop = datetime.strptime(str.strip(str(dtg_stop)), '%Y%m%d%H') + start = as_datetime(str.strip(str(dtg_start))) + stop = as_datetime(str.strip(str(dtg_stop))) if file_base is None: first_base_time = start else: - first_base_time = datetime.strptime(str.strip(str(file_base)), '%Y%m%d%H') + first_base_time = as_datetime(str.strip(str(file_base))) # Merge all settings with user all settings - merged_conf = surfex.deep_update(config, user_config) + merged_conf = deep_update(config, user_config) # Replace global settings from fileformat = input_format @@ -713,17 +881,18 @@ def set_forcing_config(**kwargs): geo_input = kwargs["geo_input"] if geo_input is not None: if os.path.exists(geo_input): - geo_input = surfex.get_geo_object(json.load(open(geo_input, "r", encoding="utf-8"))) + geo_input = get_geo_object( + json.load(open(geo_input, "r", encoding="utf-8")) + ) merged_conf[fileformat]["geo_input"] = geo_input else: - surfex.info("Input geometry " + geo_input + " does not exist") + logging.info("Input geometry %s does not exist", geo_input) # Set attributes atts = ["ZS", "ZREF", "UREF"] att_objs = [] for att_var in atts: - # att_var = atts[i] # Override with command line options for a given variable ref_height = None cformat = fileformat @@ -748,12 +917,33 @@ def set_forcing_config(**kwargs): else: raise NotImplementedError - att_objs.append(set_input_object(att_var, merged_conf, geo_out, cformat, selected_converter, - ref_height, first_base_time, timestep)) + att_objs.append( + set_input_object( + att_var, + merged_conf, + geo_out, + cformat, + selected_converter, + ref_height, + first_base_time, + timestep, + ) + ) # Set forcing variables (time dependent) - variables = ["TA", "QA", "PS", "DIR_SW", "SCA_SW", "LW", "RAIN", "SNOW", "WIND", "WIND_DIR", - "CO2"] + variables = [ + "TA", + "QA", + "PS", + "DIR_SW", + "SCA_SW", + "LW", + "RAIN", + "SNOW", + "WIND", + "WIND_DIR", + "CO2", + ] var_objs = [] # Search in config file for parameters to override for sfx_var in variables: @@ -810,21 +1000,31 @@ def set_forcing_config(**kwargs): selected_converter = co2_converter else: raise NotImplementedError - var_objs.append(set_input_object(sfx_var, merged_conf, geo_out, cformat, selected_converter, - ref_height, first_base_time, timestep)) + var_objs.append( + set_input_object( + sfx_var, + merged_conf, + geo_out, + cformat, + selected_converter, + ref_height, + first_base_time, + timestep, + ) + ) # Save options options = dict() - options['output_format'] = output_format - options['output_file'] = outfile - options['start'] = start - options['stop'] = stop - options['timestep'] = timestep - options['geo_out'] = geo_out - options['single'] = False + options["output_format"] = output_format + options["output_file"] = outfile + options["start"] = start + options["stop"] = stop + options["timestep"] = timestep + options["geo_out"] = geo_out + options["single"] = False if "single" in kwargs: - options['single'] = kwargs["single"] - options['cache_interval'] = cache_interval + options["single"] = kwargs["single"] + options["cache_interval"] = cache_interval return options, var_objs, att_objs @@ -836,12 +1036,14 @@ def modify_forcing(**kwargs): time_step = kwargs["time_step"] variables = kwargs["variables"] - ifile = netCDF4.Dataset(infile, 'r') + ifile = netCDF4.Dataset(infile, "r") ofile = netCDF4.Dataset(outfile, "r+") for var in variables: print("Modify variable " + var) - print("input", ifile[var][time_step, :], ifile[var][time_step, :].shape, time_step) + print( + "input", ifile[var][time_step, :], ifile[var][time_step, :].shape, time_step + ) print("output", ofile[var][0, :], ofile[var][0, :].shape) ofile[var][0, :] = ifile[var][time_step, :] ofile.sync() diff --git a/surfex/geo.py b/surfex/geo.py index 2bcd6eb..addbebe 100644 --- a/surfex/geo.py +++ b/surfex/geo.py @@ -1,18 +1,22 @@ """Geometry.""" -from abc import ABC, abstractmethod -import os -import math import json import logging -import pyproj +import math +import os +from abc import ABC, abstractmethod + import numpy as np -import surfex +import pyproj + try: from osgeo import ogr # type: ignore except Exception: ogr = None +from .namelist import BaseNamelist + + class Geo(object): """Geometry.""" @@ -77,8 +81,23 @@ def identifier(self): f_lat = str(round(float(self.latrange[0]), 2)) l_lat = str(round(float(self.latrange[-1]), 2)) - tag = ":" + str(self.npoints) + ":" + str(self.nlons) + ":" + str(self.nlats) + ":" \ - + f_lon + ":" + l_lon + ":" + f_lat + ":" + l_lat + ":" + tag = ( + ":" + + str(self.npoints) + + ":" + + str(self.nlons) + + ":" + + str(self.nlats) + + ":" + + f_lon + + ":" + + l_lon + + ":" + + f_lat + + ":" + + l_lat + + ":" + ) tag = tag.replace(" ", "") logging.debug("TAG: %s", tag) return tag @@ -139,7 +158,7 @@ def update_namelist(self, nml): Returns: _type_: _description_ """ - return NotImplementedError + raise NotImplementedError @abstractmethod def subset(self, geo): @@ -148,7 +167,7 @@ def subset(self, geo): Args: geo (surfex.Geo): Geometry to check. """ - return NotImplementedError + raise NotImplementedError class ConfProj(SurfexGeo): @@ -158,21 +177,32 @@ def __init__(self, from_json): """Construct conf proj geo. Args: - from_json (_type_): _description_ - debug (bool, optional): _description_. Defaults to False. + from_json (dict): Domain definition + + Raises: + KeyError: Missing keys1 + KeyError: Missing keys2 + KeyError: Missing keys3 + KeyError: Missing keys4 """ self.cgrid = "CONF PROJ" self.json = from_json - domain_dict = surfex.BaseNamelist.lower_case_namelist_dict(from_json) + domain_dict = BaseNamelist.lower_case_namelist_dict(from_json) logging.debug("from_json: %s", from_json) self.ilone = None self.ilate = None self.xtrunc = None if "nam_conf_proj_grid" in domain_dict: - if "nimax" and "njmax" and "xloncen" and "xlatcen" and "xdx" and "xdy" \ - in domain_dict["nam_conf_proj_grid"]: + if ( + "nimax" + and "njmax" + and "xloncen" + and "xlatcen" + and "xdx" + and "xdy" in domain_dict["nam_conf_proj_grid"] + ): self.nimax = domain_dict["nam_conf_proj_grid"]["nimax"] self.njmax = domain_dict["nam_conf_proj_grid"]["njmax"] self.xloncen = domain_dict["nam_conf_proj_grid"]["xloncen"] @@ -199,22 +229,26 @@ def __init__(self, from_json): else: raise KeyError("Missing key4") - earth = 6.37122e+6 + earth = 6.37122e6 if self.xlat0 == 90.0 or self.xlat0 == -90.0: - proj_string = f"+proj=stere +lat_0={str(self.xlat0)} +lon_0={str(self.xlon0)} "\ - f"+lat_ts={str(self.xlat0)}" + proj_string = ( + f"+proj=stere +lat_0={str(self.xlat0)} +lon_0={str(self.xlon0)} " + f"+lat_ts={str(self.xlat0)}" + ) else: - proj_string = f"+proj=lcc +lat_0={str(self.xlat0)} +lon_0={str(self.xlon0)} " \ - f"+lat_1={str(self.xlat0)} +lat_2={str(self.xlat0)} " \ - f"+units=m +no_defs +R={str(earth)}" + proj_string = ( + f"+proj=lcc +lat_0={str(self.xlat0)} +lon_0={str(self.xlon0)} " + f"+lat_1={str(self.xlat0)} +lat_2={str(self.xlat0)} " + f"+units=m +no_defs +R={str(earth)}" + ) logging.debug("Proj string: %s", proj_string) proj = pyproj.CRS.from_string(proj_string) wgs84 = pyproj.CRS.from_string("EPSG:4326") - xloncen, xlatcen = \ - pyproj.Transformer.from_crs(wgs84, proj, - always_xy=True).transform(self.xloncen, self.xlatcen) + xloncen, xlatcen = pyproj.Transformer.from_crs( + wgs84, proj, always_xy=True + ).transform(self.xloncen, self.xlatcen) x_0 = float(xloncen) - (0.5 * ((float(self.nimax) - 1.0) * self.xdx)) y_0 = float(xlatcen) - (0.5 * ((float(self.njmax) - 1.0) * self.xdy)) @@ -222,7 +256,7 @@ def __init__(self, from_json): self.y_0 = y_0 xxx = np.empty([self.nimax]) yyy = np.empty([self.njmax]) - # TODO vectorize + # TODO vectorize for i in range(0, self.nimax): xxx[i] = x_0 + (float(i) * self.xdx) for j in range(0, self.njmax): @@ -231,16 +265,14 @@ def __init__(self, from_json): self.yyy = yyy y_v, x_v = np.meshgrid(yyy, xxx) logging.debug("x_v.shape=%s y_v.shape=%s", x_v.shape, y_v.shape) - lons, lats = pyproj.Transformer.from_crs(proj, wgs84, always_xy=True).transform(x_v, y_v) + lons, lats = pyproj.Transformer.from_crs(proj, wgs84, always_xy=True).transform( + x_v, y_v + ) logging.debug("lons.shape=%s lats.shape=%s", lons.shape, lats.shape) logging.debug("lons.shape=%s", lons) logging.debug("lats.shape=%s", lats) SurfexGeo.__init__(self, proj, lons, lats) - # raise - # SurfexGeo.__init__(self, proj, npoints, self.nimax, self.njmax, - # np.reshape(lons, [npoints], order="F"), - # np.reshape(lats, [npoints], order="F"), from_json) def update_namelist(self, nml): """Update namelist. @@ -252,47 +284,49 @@ def update_namelist(self, nml): nml (f90nml.Namelist): Namelist object. """ if self.ilate is None or self.ilate is None: - nml.update({ - "nam_pgd_grid": { - "cgrid": self.cgrid - }, - "nam_conf_proj": { - "xlon0": self.xlon0, - "xlat0": self.xlat0, - "xrpk": math.sin(math.radians(self.xlat0)), - "xbeta": 0}, - "nam_conf_proj_grid": { - "xlatcen": self.xlatcen, - "xloncen": self.xloncen, - "nimax": self.nimax, - "njmax": self.njmax, - "xdx": self.xdx, - "xdy": self.xdy, - "xtrunc": self.xtrunc + nml.update( + { + "nam_pgd_grid": {"cgrid": self.cgrid}, + "nam_conf_proj": { + "xlon0": self.xlon0, + "xlat0": self.xlat0, + "xrpk": math.sin(math.radians(self.xlat0)), + "xbeta": 0, + }, + "nam_conf_proj_grid": { + "xlatcen": self.xlatcen, + "xloncen": self.xloncen, + "nimax": self.nimax, + "njmax": self.njmax, + "xdx": self.xdx, + "xdy": self.xdy, + "xtrunc": self.xtrunc, + }, } - }) + ) else: - nml.update({ - "nam_pgd_grid": { - "cgrid": self.cgrid - }, - "nam_conf_proj": { - "xlon0": self.xlon0, - "xlat0": self.xlat0, - "xrpk": math.sin(math.radians(self.xlat0)), - "xbeta": 0}, - "nam_conf_proj_grid": { - "ilone": self.ilone, - "ilate": self.ilate, - "xlatcen": self.xlatcen, - "xloncen": self.xloncen, - "nimax": self.nimax, - "njmax": self.njmax, - "xdx": self.xdx, - "xdy": self.xdy, - "xtrunc":self.xtrunc + nml.update( + { + "nam_pgd_grid": {"cgrid": self.cgrid}, + "nam_conf_proj": { + "xlon0": self.xlon0, + "xlat0": self.xlat0, + "xrpk": math.sin(math.radians(self.xlat0)), + "xbeta": 0, + }, + "nam_conf_proj_grid": { + "ilone": self.ilone, + "ilate": self.ilate, + "xlatcen": self.xlatcen, + "xloncen": self.xloncen, + "nimax": self.nimax, + "njmax": self.njmax, + "xdx": self.xdx, + "xdy": self.xdy, + "xtrunc": self.xtrunc, + }, } - }) + ) return nml def subset(self, geo): @@ -325,17 +359,17 @@ def subset(self, geo): y_0 = None for i in range(0, geo.nimax): - # print("Test i:", i, geo.x[i], self.x0) if round(self.x_0, 4) == round(geo.xxx[i], 4): x_0 = i break for j in range(0, geo.njmax): - # print("Test j:", j, geo.y[j], self.y0) if round(self.y_0, 4) == round(geo.yyy[j], 4): y_0 = j break if x_0 is not None and y_0 is not None: - logging.info("Grid is a subset of input grid %s %s", str(x_0), str(y_0)) + logging.info( + "Grid is a subset of input grid %s %s", str(x_0), str(y_0) + ) lons = np.arange(x_0, x_0 + self.nimax, 1).tolist() lats = np.arange(y_0, y_0 + self.njmax, 1).tolist() @@ -353,10 +387,14 @@ def __init__(self, from_json): Args: from_json (dict): Domain description, + Raises: + KeyError: Missing key + KeyError: Missing keys + """ self.cgrid = "LONLATVAL" self.json = from_json - domain_dict = surfex.BaseNamelist.lower_case_namelist_dict(from_json) + domain_dict = BaseNamelist.lower_case_namelist_dict(from_json) if "nam_lonlatval" in domain_dict: if "xx" and "xy" and "xdx" and "xdy" in domain_dict["nam_lonlatval"]: @@ -382,17 +420,17 @@ def update_namelist(self, nml): Returns: nml (f90nml.Namelist): Namelist object. """ - nml.update({ - "nam_pgd_grid": { - "cgrid": self.cgrid - }, - "nam_lonlatval": { - "xx": self.x_x, - "xy": self.x_y, - "xdx": self.xdx, - "xdy": self.xdy + nml.update( + { + "nam_pgd_grid": {"cgrid": self.cgrid}, + "nam_lonlatval": { + "xx": self.x_x, + "xy": self.x_y, + "xdx": self.xdx, + "xdy": self.xdy, + }, } - }) + ) return nml def subset(self, geo): @@ -400,6 +438,9 @@ def subset(self, geo): Args: geo (surfex.Geo): Geometry to check. + + Returns: + tuple: lons, lats """ logging.info("Subset not implemented") lons = [] @@ -416,14 +457,24 @@ def __init__(self, from_json): Args: from_json (_type_): _description_ + Raises: + KeyError: Missing key + KeyError: Missing keys + """ self.cgrid = "CARTESIAN" self.json = from_json - domain_dict = surfex.BaseNamelist.lower_case_namelist_dict(from_json) + domain_dict = BaseNamelist.lower_case_namelist_dict(from_json) if "nam_cartesian" in domain_dict: - if "xlat0" and "xlon0" and "nimax" and "njmax" and "xdx" and "xdy" in \ - domain_dict["nam_cartesian"]: + if ( + "xlat0" + and "xlon0" + and "nimax" + and "njmax" + and "xdx" + and "xdy" in domain_dict["nam_cartesian"] + ): self.xlat0 = domain_dict["nam_cartesian"]["xlat0"] self.xlon0 = domain_dict["nam_cartesian"]["xlon0"] self.nimax = domain_dict["nam_cartesian"]["nimax"] @@ -440,11 +491,9 @@ def __init__(self, from_json): SurfexGeo.__init__(self, proj, np.asarray(lons), np.asarray(lats)) else: - print("Missing keys") - raise KeyError + raise KeyError("Missing keys") else: - print("Missing key") - raise KeyError + raise KeyError("Missing key") def update_namelist(self, nml): """Update namelist. @@ -456,19 +505,19 @@ def update_namelist(self, nml): nml (f90nml.Namelist): Namelist object. """ print(nml) - nml.update({ - "nam_pgd_grid": { - "cgrid": self.cgrid - }, - "nam_cartesian": { - "xlat0": self.xlat0, - "xlon0": self.xlon0, - "nimax": self.nimax, - "njmax": self.njmax, - "xdx": self.xdx, - "xdy": self.xdy + nml.update( + { + "nam_pgd_grid": {"cgrid": self.cgrid}, + "nam_cartesian": { + "xlat0": self.xlat0, + "xlon0": self.xlon0, + "nimax": self.nimax, + "njmax": self.njmax, + "xdx": self.xdx, + "xdy": self.xdy, + }, } - }) + ) return nml def subset(self, geo): @@ -476,6 +525,10 @@ def subset(self, geo): Args: geo (surfex.Geo): Geometry to check. + + Returns: + tuple: lons, lats + """ logging.info("Subset not implemented") lons = [] @@ -492,14 +545,25 @@ def __init__(self, from_json): Args: from_json (dict): Domain definition. + Raises: + KeyError: Missing key + KeyError: Missing keys + ZeroDivisionError: nlon and/or nlat is 0 + """ self.cgrid = "LONLAT REG" self.json = from_json - domain_dict = surfex.BaseNamelist.lower_case_namelist_dict(from_json) + domain_dict = BaseNamelist.lower_case_namelist_dict(from_json) if "nam_lonlat_reg" in domain_dict: - if "xlonmin" and "xlonmax" and "xlatmin" and "xlatmax" and "nlon" and "nlat" \ - in domain_dict["nam_lonlat_reg"]: + if ( + "xlonmin" + and "xlonmax" + and "xlatmin" + and "xlatmax" + and "nlon" + and "nlat" in domain_dict["nam_lonlat_reg"] + ): self.xlonmin = domain_dict["nam_lonlat_reg"]["xlonmin"] self.xlonmax = domain_dict["nam_lonlat_reg"]["xlonmax"] self.xlatmin = domain_dict["nam_lonlat_reg"]["xlatmin"] @@ -507,18 +571,16 @@ def __init__(self, from_json): self.nlon = domain_dict["nam_lonlat_reg"]["nlon"] self.nlat = domain_dict["nam_lonlat_reg"]["nlat"] else: - print("Missing keys") - raise KeyError + raise KeyError("Missing keys") else: - print("Missing key") - raise KeyError + raise KeyError("Missing key") proj_string = "+proj=longlat +datum=WGS84 +no_defs +ellps=WGS84" proj = pyproj.CRS.from_string(proj_string) lons = [] lats = [] if self.nlon == 0 or self.nlat == 0: - raise ZeroDivisionError + raise ZeroDivisionError("nlon and/or nlat is 0") dlon = (self.xlonmax - self.xlonmin) / (self.nlon - 1) dlat = (self.xlatmax - self.xlatmin) / (self.nlat - 1) @@ -542,19 +604,19 @@ def update_namelist(self, nml): Returns: nml (f90nml.Namelist): Namelist object. """ - nml.update({ - "nam_pgd_grid": { - "cgrid": self.cgrid - }, - "nam_lonlat_reg": { - "xlonmin": self.xlonmin, - "xlonmax": self.xlonmax, - "xlatmin": self.xlatmin, - "xlatmax": self.xlatmax, - "nlon": self.nlon, - "nlat": self.nlat + nml.update( + { + "nam_pgd_grid": {"cgrid": self.cgrid}, + "nam_lonlat_reg": { + "xlonmin": self.xlonmin, + "xlonmax": self.xlonmax, + "xlatmin": self.xlatmin, + "xlatmax": self.xlatmax, + "nlon": self.nlon, + "nlat": self.nlat, + }, } - }) + ) return nml def subset(self, geo): @@ -562,6 +624,10 @@ def subset(self, geo): Args: geo (surfex.Geo): Geometry to check. + + Returns: + tuple: lons, lats + """ logging.info("Subset not implemented") lons = [] @@ -579,15 +645,30 @@ def __init__(self, from_json, recreate=False): from_json (dict): Domain definition. recreate (bool, optional): Recreate the cached mask. Defaults to False. + Raises: + NotImplementedError: Projection not implemented + KeyError: Missing key + KeyError: Missing keys + """ self.cgrid = "IGN" self.json = from_json - domain_dict = surfex.BaseNamelist.lower_case_namelist_dict(from_json) + domain_dict = BaseNamelist.lower_case_namelist_dict(from_json) if "nam_ign" in domain_dict: - if "clambert" and "npoints" and "xx" and "xy" and "xdx" and "xdy" and "xx_llcorner" \ - and "xy_llcorner" and "xcellsize" and "ncols" and "nrows" \ - in domain_dict["nam_ign"]: + if ( + "clambert" + and "npoints" + and "xx" + and "xy" + and "xdx" + and "xdy" + and "xx_llcorner" + and "xy_llcorner" + and "xcellsize" + and "ncols" + and "nrows" in domain_dict["nam_ign"] + ): self.clambert = domain_dict["nam_ign"]["clambert"] npoints = domain_dict["nam_ign"]["npoints"] @@ -601,17 +682,17 @@ def __init__(self, from_json, recreate=False): self.ncols = domain_dict["nam_ign"]["ncols"] self.nrows = domain_dict["nam_ign"]["nrows"] else: - print("Missing keys") - raise KeyError + raise KeyError("Missing keys") else: - print("Missing key") - raise KeyError + raise KeyError("Missing key") if self.clambert == 7: - proj4 = "+proj=lcc +lat_0=63.5 +lon_0=15.0 +lat_1=63.5 +lat_2=63.5 " \ - "+no_defs +R=6.37122e+6" + proj4 = ( + "+proj=lcc +lat_0=63.5 +lon_0=15.0 +lat_1=63.5 +lat_2=63.5 " + "+no_defs +R=6.37122e+6" + ) self.xloncen = 17 - self.xlatcen = 63. + self.xlatcen = 63.0 self.xlon0 = 15 self.xlat0 = 63.5 else: @@ -628,9 +709,9 @@ def __init__(self, from_json, recreate=False): lons = [] lats = [] for i in range(0, npoints): - lon, lat = pyproj.Transformer.from_crs(proj, wgs84, - always_xy=True).transform(self.x_x[i], - self.x_y[i]) + lon, lat = pyproj.Transformer.from_crs(proj, wgs84, always_xy=True).transform( + self.x_x[i], self.x_y[i] + ) lons.append(lon) lats.append(lat) @@ -641,17 +722,17 @@ def get_coord(pin, pdin, coord, recreate=False): """Get the IGN coordinates. Args: - pin (_type_): _description_ - pdin (_type_): _description_ - coord (_type_): _description_ + pin (list): _description_ + pdin (list): _description_ + coord (list): _description_ recreate (bool, optional): _description_. Defaults to False. Returns: - _type_: _description_ + list: Output coordinates """ pout = [] - cache = "/tmp/." + coord + "_cached" + cache = "/tmp/." + coord + "_cached" # noqa S108 if os.path.isfile(cache) and not recreate: with open(cache, mode="r", encoding="utf-8") as file_handler: cached_coord = file_handler.read().splitlines() @@ -662,38 +743,35 @@ def get_coord(pin, pdin, coord, recreate=False): zdout = [] ksize = 0 if len(pin) > 0: - zdout.append(float(pdin[0]) / 2.) + zdout.append(float(pdin[0]) / 2.0) pout.append(pin[0]) ksize = 1 if len(pin) > 1: ksize = 2 pout.append(pin[0] - pdin[0]) - zdout.append(0.) + zdout.append(0.0) if len(pin) > 2: ksize = 3 pout.append(pin[0] + pdin[0]) - zdout.append(0.) + zdout.append(0.0) - # print ksize for i, pinval in enumerate(pin): for j in range(0, ksize): - # print i,j,len(pin),ksize,pout[j],pin[i] if pout[j] == pinval: break if j == ksize - 1: ksize = ksize + 1 pout.append(pinval) - zdout.append(float(pdin[i]) / 2.) + zdout.append(float(pdin[i]) / 2.0) # Mesh constrains for j in range(0, ksize): - # print i, j, len(pin), ksize, pout[j], pin[i] if pout[j] < pin[i] and (pout[j] + zdout[j]) >= (pin[i] - pdin[i]): break if j == ksize - 1: ksize = ksize + 1 pout.append(pin[i] - pdin[i]) - zdout.append(0.) + zdout.append(0.0) for j in range(0, ksize): if pout[j] > pin[i] and (pout[j] - zdout[j]) <= (pin[i] + pdin[i]): @@ -701,7 +779,7 @@ def get_coord(pin, pdin, coord, recreate=False): if j == ksize - 1: ksize = ksize + 1 pout.append(pin[i] + pdin[i]) - zdout.append(0.) + zdout.append(0.0) # Sort pout pout = sorted(pout) @@ -731,7 +809,7 @@ def ign_mask(pxall, pyall, xxx, yyy, recreate): """ mask = [] - cache = "/tmp/.mask" + cache = "/tmp/.mask" # noqa S108 if os.path.isfile(cache) and not recreate: with open(cache, mode="r", encoding="utf-8") as file_handler: cached_mask = file_handler.read().splitlines() @@ -753,7 +831,6 @@ def ign_mask(pxall, pyall, xxx, yyy, recreate): count = count + 1 for k, xval in enumerate(xxx): if xval == pxall_val and yyy[k] == pyall_val: - # print i,j,k,l,xx[k],pxall[i],yy[k],pyall[j] mask.append(count) break @@ -764,7 +841,6 @@ def ign_mask(pxall, pyall, xxx, yyy, recreate): for mask_ind in mask: file_handler.write(str(mask_ind) + "\n") - # f.write("mask="+str(mask)+"\n") logging.info("Created mask: %s", mask) return mask @@ -777,24 +853,24 @@ def update_namelist(self, nml): Returns: nml (f90nml.Namelist): Namelist object. """ - nml.update({ - "nam_pgd_grid": { - "cgrid": self.cgrid - }, - "nam_ign": { - "clambert": self.clambert, - "npoints": self.npoints, - "xx": self.x_x, - "xy": self.x_y, - "xdx": self.xdx, - "xdy": self.xdy, - "xx_llcorner": self.xx_llcorner, - "xy_llcorner": self.xy_llcorner, - "xcellsize": self.xcellsize, - "ncols": self.ncols, - "nrows": self.nrows + nml.update( + { + "nam_pgd_grid": {"cgrid": self.cgrid}, + "nam_ign": { + "clambert": self.clambert, + "npoints": self.npoints, + "xx": self.x_x, + "xy": self.x_y, + "xdx": self.xdx, + "xdy": self.xdy, + "xx_llcorner": self.xx_llcorner, + "xy_llcorner": self.xy_llcorner, + "xcellsize": self.xcellsize, + "ncols": self.ncols, + "nrows": self.nrows, + }, } - }) + ) return nml def subset(self, geo): @@ -802,6 +878,10 @@ def subset(self, geo): Args: geo (surfex.Geo): Geometry to check. + + Returns: + tuple: lons, lats + """ logging.info("Subset not implemented") lons = [] @@ -815,6 +895,11 @@ def get_geo_object(from_json): Args: from_json (dict): Domain definition. + Raises: + NotImplementedError: Grid not implemented + KeyError: Missing grid information cgrid + KeyError: nam_pgd_grid not set! + Returns: surfex.Geo: Surfex geometry. @@ -849,9 +934,13 @@ def set_domain(settings, domain, hm_mode=False): """Set domain. Args: - settings (_type_): _description_ - domain (_type_): _description_ - hm_mode (bool, optional): _description_. Defaults to False. + settings (dict): Domain definitions + domain (str): Domain name + hm_mode (bool, optional): Harmonie definition. Defaults to False. + + Raises: + KeyError: Domain not found + ValueError: Settings should be a dict Returns: dict: Domain dictionary @@ -866,9 +955,7 @@ def set_domain(settings, domain, hm_mode=False): ezone = settings[domain]["EZONE"] domain_dict = { - "nam_pgd_grid": { - "cgrid": "CONF PROJ" - }, + "nam_pgd_grid": {"cgrid": "CONF PROJ"}, "nam_conf_proj": { "xlat0": settings[domain]["LAT0"], "xlon0": settings[domain]["LON0"], @@ -882,16 +969,16 @@ def set_domain(settings, domain, hm_mode=False): "njmax": settings[domain]["NLAT"] - ezone, "xdx": settings[domain]["GSIZE"], "xdy": settings[domain]["GSIZE"], - } + }, } else: domain_dict = settings[domain] return domain_dict logging.error("Domain not found: %s", domain) - raise Exception("Domain not found: " + domain) + raise KeyError("Domain not found: " + domain) logging.error("Settings should be a dict") - raise Exception("Settings should be a dict") + raise ValueError("Settings should be a dict") def shape2ign(catchment, infile, output, ref_proj, indent=None): @@ -906,11 +993,13 @@ def shape2ign(catchment, infile, output, ref_proj, indent=None): """ from_json = json.load(open(ref_proj, mode="r", encoding="utf-8")) - geo = surfex.get_geo_object(from_json) - earth = 6.37122e+6 - proj_string = f"+proj=lcc +lat_0={str(geo.xlat0)} +lon_0={str(geo.xlon0)} " \ - f"+lat_1={str(geo.xlat0)} +lat_2={str(geo.xlat0)} " \ - f"+units=m +no_defs +R={str(earth)}" + geo = get_geo_object(from_json) + earth = 6.37122e6 + proj_string = ( + f"+proj=lcc +lat_0={str(geo.xlat0)} +lon_0={str(geo.xlon0)} " + f"+lat_1={str(geo.xlat0)} +lat_2={str(geo.xlat0)} " + f"+units=m +no_defs +R={str(earth)}" + ) logging.debug(proj_string) proj = pyproj.CRS.from_string(proj_string) @@ -934,7 +1023,9 @@ def shape2ign(catchment, infile, output, ref_proj, indent=None): lats.append(point[1]) values.append(point[2]) - xxx, yyy = pyproj.Transformer.from_crs(wgs84, proj, always_xy=True).transform(lons, lats) + xxx, yyy = pyproj.Transformer.from_crs(wgs84, proj, always_xy=True).transform( + lons, lats + ) x_1 = min(xxx) x_2 = max(xxx) y_1 = min(yyy) @@ -971,9 +1062,7 @@ def shape2ign(catchment, infile, output, ref_proj, indent=None): npoints = npoints + 1 nam_json = { - "nam_pgd_grid": { - "cgrid": "IGN" - }, + "nam_pgd_grid": {"cgrid": "IGN"}, "nam_ign": { "clambert": 7, "npoints": npoints, @@ -985,8 +1074,8 @@ def shape2ign(catchment, infile, output, ref_proj, indent=None): "xy_llcorner": 0, "xcellsize": "250", "ncols": 0, - "nrows": 0 - } + "nrows": 0, + }, } with open(output, "w", encoding="utf-8") as file_handler: json.dump(nam_json, file_handler, indent=indent) diff --git a/surfex/grib.py b/surfex/grib.py index 309a946..304d47a 100644 --- a/surfex/grib.py +++ b/surfex/grib.py @@ -1,8 +1,9 @@ """Grib treatment.""" import logging + import numpy as np import pyproj -import surfex + try: import eccodes import gribapi @@ -13,11 +14,15 @@ eccodes = None gribapi = None # Needed in Python 3.5 -except: +except: # noqa eccodes = None gribapi = None +from .geo import ConfProj, Geo, LonLatReg +from .interpolation import Interpolation + + class Grib(object): """Grib class.""" @@ -46,237 +51,237 @@ def field(self, gribvar, time): Returns: np.ndarray: Field + Raises: + NotImplementedError: NotImplementedError + RuntimeError: If eccodes not available + """ if eccodes is None: - raise Exception("eccodes not found. Needed for reading grib files") - - keys = ["bitmapPresent"] + raise RuntimeError("eccodes not found. Needed for reading grib files") logging.debug("Look for %s", gribvar.generate_grib_id()) field = None geo_out = None - file_handler = open(self.fname, mode="rb") - while 1: - gid = eccodes.codes_grib_new_from_file(file_handler) - - if gid is None: - logging.warning("Could not find key") - gribvar.print_keys() - file_handler.close() - return field, geo_out - else: - # print("\n Next key") - # print_grib_id(gid) - if gribvar.matches(gid): - - # print("Found key") - # gribvar.print_keys() - - values = self.read_field_in_message(gid, time) - logging.debug("read values = %s", values) - - grid_type = str(eccodes.codes_get(gid, "gridType")) - logging.debug("grid_type=%s", grid_type) - if grid_type.lower() == "rotated_ll": - geo_keys = [ - 'Ni', - 'Nj', - 'latitudeOfFirstGridPointInDegrees', - 'longitudeOfFirstGridPointInDegrees', - 'latitudeOfLastGridPointInDegrees', - 'longitudeOfLastGridPointInDegrees', - 'iDirectionIncrementInDegrees', - 'jDirectionIncrementInDegrees', - "latitudeOfSouthernPoleInDegrees", - "longitudeOfSouthernPoleInDegrees", - 'iScansNegatively', - 'jScansPositively' - ] - geo_info = self.read_geo_info(gid, geo_keys) - - n_x = geo_info["Ni"] - n_y = geo_info["Nj"] - - ll_lon = geo_info["longitudeOfFirstGridPointInDegrees"] - ll_lat = geo_info["latitudeOfFirstGridPointInDegrees"] - dlon = geo_info["iDirectionIncrementInDegrees"] - dlat = geo_info["jDirectionIncrementInDegrees"] - sp_lon = geo_info["longitudeOfSouthernPoleInDegrees"] - iscan = geo_info["iScansNegatively"] - jscan = geo_info["jScansPositively"] - if sp_lon < -180.0: - sp_lon = sp_lon + 360. - elif sp_lon > 180.0: - sp_lon = sp_lon - 360. - sp_lat = -1 * geo_info["latitudeOfSouthernPoleInDegrees"] - earth = 6.371229e+6 - - proj_string = f"+proj=ob_tran +o_proj=longlat +o_lat_p={sp_lat}"\ - f" +R={str(earth)} +no_defs" - logging.info(proj_string) - logging.info("ll_lon=%s ll_lat=%s", ll_lon, ll_lat) - logging.info("polon=%s polat=%s", sp_lon, sp_lat) - logging.info("dlon=%s dlat=%s", dlon, dlat) - logging.info("iscan=%s jscan=%s", iscan, jscan) - proj = pyproj.CRS.from_string(proj_string) - wgs84 = pyproj.CRS.from_string("EPSG:4326") - - lons = [] - for i in range(0, n_x): - if int(iscan) == 1: - lon = ll_lon - (float(i) * dlon) - else: - lon = ll_lon + (float(i) * dlon) - if lon < -180.0: - lon = lon + 360. - elif lon > 180.0: - lon = lon - 360. - lons.append(lon) - lats = [] - for j in range(0, n_y): - if int(jscan) == 1: - lat = ll_lat + (float(j) * dlat) - else: - lat = ll_lat - (float(j) * dlat) - if lat > 90.0: - lat = lat - 90.0 - elif lat < -90.0: - lat = lat + 90. - lats.append(lat) - - lons = np.array(lons) - lats = np.array(lats) - longitudes, latitudes = np.meshgrid(lons, lats, indexing='ij') - lons, lats = \ - pyproj.Transformer.from_crs(proj, wgs84, always_xy=True).transform(longitudes, latitudes) - lons = lons + sp_lon - - field = np.reshape(values, [n_x, n_y], order="F") - if geo_out is None: - geo_out = surfex.geo.Geo(lons, lats) - - elif grid_type.lower() == "regular_ll": - geo_keys = [ - 'Ni', - 'Nj', - 'latitudeOfFirstGridPointInDegrees', - 'longitudeOfFirstGridPointInDegrees', - 'latitudeOfLastGridPointInDegrees', - 'longitudeOfLastGridPointInDegrees', - 'iDirectionIncrementInDegrees', - 'jDirectionIncrementInDegrees' - ] - geo_info = self.read_geo_info(gid, geo_keys) - n_x = geo_info["Ni"] - n_y = geo_info["Nj"] - lon0 = geo_info["longitudeOfFirstGridPointInDegrees"] - lat0 = geo_info["latitudeOfFirstGridPointInDegrees"] - d_x = geo_info["iDirectionIncrementInDegrees"] - d_y = geo_info["jDirectionIncrementInDegrees"] - lons = [] - lats = [] - for i in range(0, n_x): - lons.append(lon0 + (float(i)*d_x)) - for j in range(0, n_y): - lats.append(lat0 - (float(j)*d_y)) - lon1 = lons[-1] - lat1 = lats[-1] - lons = np.array(lons) - lats = np.array(lats) - lons, lats = np.meshgrid(lons, lats) - field = np.reshape(values, [n_x, n_y], order="F") + with open(self.fname, mode="rb") as file_handler: + while 1: + gid = eccodes.codes_grib_new_from_file(file_handler) - if geo_out is None: - domain = { - "nam_lonlat_reg": { - "xlonmin": lon0, - "xlonmax": lon1, - "xlatmin": lat0, - "xlatmax": lat1, - "nlon": n_x, - "nlat": n_y + if gid is None: + logging.warning("Could not find key") + gribvar.print_keys() + file_handler.close() + return field, geo_out + else: + if gribvar.matches(gid): + values = self.read_field_in_message(gid, time) + logging.debug("read values = %s", values) + + grid_type = str(eccodes.codes_get(gid, "gridType")) + logging.debug("grid_type=%s", grid_type) + if grid_type.lower() == "rotated_ll": + geo_keys = [ + "Ni", + "Nj", + "latitudeOfFirstGridPointInDegrees", + "longitudeOfFirstGridPointInDegrees", + "latitudeOfLastGridPointInDegrees", + "longitudeOfLastGridPointInDegrees", + "iDirectionIncrementInDegrees", + "jDirectionIncrementInDegrees", + "latitudeOfSouthernPoleInDegrees", + "longitudeOfSouthernPoleInDegrees", + "iScansNegatively", + "jScansPositively", + ] + geo_info = self.read_geo_info(gid, geo_keys) + + n_x = geo_info["Ni"] + n_y = geo_info["Nj"] + + ll_lon = geo_info["longitudeOfFirstGridPointInDegrees"] + ll_lat = geo_info["latitudeOfFirstGridPointInDegrees"] + dlon = geo_info["iDirectionIncrementInDegrees"] + dlat = geo_info["jDirectionIncrementInDegrees"] + sp_lon = geo_info["longitudeOfSouthernPoleInDegrees"] + iscan = geo_info["iScansNegatively"] + jscan = geo_info["jScansPositively"] + if sp_lon < -180.0: + sp_lon = sp_lon + 360.0 + elif sp_lon > 180.0: + sp_lon = sp_lon - 360.0 + sp_lat = -1 * geo_info["latitudeOfSouthernPoleInDegrees"] + earth = 6.371229e6 + + proj_string = ( + f"+proj=ob_tran +o_proj=longlat +o_lat_p={sp_lat}" + f" +R={str(earth)} +no_defs" + ) + logging.info(proj_string) + logging.info("ll_lon=%s ll_lat=%s", ll_lon, ll_lat) + logging.info("polon=%s polat=%s", sp_lon, sp_lat) + logging.info("dlon=%s dlat=%s", dlon, dlat) + logging.info("iscan=%s jscan=%s", iscan, jscan) + proj = pyproj.CRS.from_string(proj_string) + wgs84 = pyproj.CRS.from_string("EPSG:4326") + + lons = [] + for i in range(0, n_x): + if int(iscan) == 1: + lon = ll_lon - (float(i) * dlon) + else: + lon = ll_lon + (float(i) * dlon) + if lon < -180.0: + lon = lon + 360.0 + elif lon > 180.0: + lon = lon - 360.0 + lons.append(lon) + lats = [] + for j in range(0, n_y): + if int(jscan) == 1: + lat = ll_lat + (float(j) * dlat) + else: + lat = ll_lat - (float(j) * dlat) + if lat > 90.0: + lat = lat - 90.0 + elif lat < -90.0: + lat = lat + 90.0 + lats.append(lat) + + lons = np.array(lons) + lats = np.array(lats) + longitudes, latitudes = np.meshgrid(lons, lats, indexing="ij") + lons, lats = pyproj.Transformer.from_crs( + proj, wgs84, always_xy=True + ).transform(longitudes, latitudes) + lons = lons + sp_lon + + field = np.reshape(values, [n_x, n_y], order="F") + if geo_out is None: + geo_out = Geo(lons, lats) + + elif grid_type.lower() == "regular_ll": + geo_keys = [ + "Ni", + "Nj", + "latitudeOfFirstGridPointInDegrees", + "longitudeOfFirstGridPointInDegrees", + "latitudeOfLastGridPointInDegrees", + "longitudeOfLastGridPointInDegrees", + "iDirectionIncrementInDegrees", + "jDirectionIncrementInDegrees", + ] + geo_info = self.read_geo_info(gid, geo_keys) + n_x = geo_info["Ni"] + n_y = geo_info["Nj"] + lon0 = geo_info["longitudeOfFirstGridPointInDegrees"] + lat0 = geo_info["latitudeOfFirstGridPointInDegrees"] + d_x = geo_info["iDirectionIncrementInDegrees"] + d_y = geo_info["jDirectionIncrementInDegrees"] + lons = [] + lats = [] + for i in range(0, n_x): + lons.append(lon0 + (float(i) * d_x)) + for j in range(0, n_y): + lats.append(lat0 - (float(j) * d_y)) + lon1 = lons[-1] + lat1 = lats[-1] + lons = np.array(lons) + lats = np.array(lats) + lons, lats = np.meshgrid(lons, lats) + field = np.reshape(values, [n_x, n_y], order="F") + + if geo_out is None: + domain = { + "nam_lonlat_reg": { + "xlonmin": lon0, + "xlonmax": lon1, + "xlatmin": lat0, + "xlatmax": lat1, + "nlon": n_x, + "nlat": n_y, + } } - } - geo_out = surfex.geo.LonLatReg(domain) - - elif grid_type.lower() == "lambert": - geo_keys = [ - "Nx", - "Ny", - "latitudeOfFirstGridPointInDegrees", - "longitudeOfFirstGridPointInDegrees", - "LoVInDegrees", - "DxInMetres", - "DyInMetres", - "iScansNegatively", - "jScansPositively", - "jPointsAreConsecutive", - "Latin1InDegrees", - "LaDInDegrees", - "Latin2InDegrees", - "latitudeOfSouthernPoleInDegrees", - "longitudeOfSouthernPoleInDegrees" - ] - geo_info = self.read_geo_info(gid, geo_keys) - - n_x = geo_info["Nx"] - n_y = geo_info["Ny"] - - lon0 = geo_info["LoVInDegrees"] - lat0 = geo_info["LaDInDegrees"] - ll_lon = geo_info["longitudeOfFirstGridPointInDegrees"] - ll_lat = geo_info["latitudeOfFirstGridPointInDegrees"] - d_x = geo_info["DxInMetres"] - d_y = geo_info["DyInMetres"] - - earth = 6.37122e+6 - proj_string = f"+proj=lcc +lat_0={str(lat0)} +lon_0={str(lon0)} "\ - f"+lat_1={str(lat0)} +lat_2={str(lat0)} "\ - f"+units=m +no_defs +R={str(earth)}" - - proj = pyproj.CRS.from_string(proj_string) - wgs84 = pyproj.CRS.from_string("EPSG:4326") - x_0, y_0 = pyproj.Transformer.from_crs( - wgs84, proj, always_xy=True).transform(ll_lon, ll_lat) - x_c = x_0 + 0.5 * (n_x - 1) * d_x - y_c = y_0 + 0.5 * (n_y - 1) * d_y - lonc, latc = pyproj.Transformer.from_crs( - proj, wgs84, always_xy=True).transform(x_c, y_c) - - # TODO we should investigate scan angle and if done correctly - # order should probaly be "C" and not "F" - field = np.reshape(values, [n_x, n_y], order="F") + geo_out = LonLatReg(domain) + + elif grid_type.lower() == "lambert": + geo_keys = [ + "Nx", + "Ny", + "latitudeOfFirstGridPointInDegrees", + "longitudeOfFirstGridPointInDegrees", + "LoVInDegrees", + "DxInMetres", + "DyInMetres", + "iScansNegatively", + "jScansPositively", + "jPointsAreConsecutive", + "Latin1InDegrees", + "LaDInDegrees", + "Latin2InDegrees", + "latitudeOfSouthernPoleInDegrees", + "longitudeOfSouthernPoleInDegrees", + ] + geo_info = self.read_geo_info(gid, geo_keys) + + n_x = geo_info["Nx"] + n_y = geo_info["Ny"] + + lon0 = geo_info["LoVInDegrees"] + lat0 = geo_info["LaDInDegrees"] + ll_lon = geo_info["longitudeOfFirstGridPointInDegrees"] + ll_lat = geo_info["latitudeOfFirstGridPointInDegrees"] + d_x = geo_info["DxInMetres"] + d_y = geo_info["DyInMetres"] + + earth = 6.37122e6 + proj_string = ( + f"+proj=lcc +lat_0={str(lat0)} +lon_0={str(lon0)} " + f"+lat_1={str(lat0)} +lat_2={str(lat0)} " + f"+units=m +no_defs +R={str(earth)}" + ) + + proj = pyproj.CRS.from_string(proj_string) + wgs84 = pyproj.CRS.from_string("EPSG:4326") + x_0, y_0 = pyproj.Transformer.from_crs( + wgs84, proj, always_xy=True + ).transform(ll_lon, ll_lat) + x_c = x_0 + 0.5 * (n_x - 1) * d_x + y_c = y_0 + 0.5 * (n_y - 1) * d_y + lonc, latc = pyproj.Transformer.from_crs( + proj, wgs84, always_xy=True + ).transform(x_c, y_c) + + # TODO we should investigate scan angle and if done correctly + # order should probaly be "C" and not "F" + field = np.reshape(values, [n_x, n_y], order="F") + + if geo_out is None: + domain = { + "nam_conf_proj": {"xlon0": lon0, "xlat0": lat0}, + "nam_conf_proj_grid": { + "xloncen": lonc, + "xlatcen": latc, + "nimax": n_x, + "njmax": n_y, + "xdx": d_x, + "xdy": d_y, + "ilone": 0, + "ilate": 0, + }, + } + geo_out = ConfProj(domain) + else: + raise NotImplementedError( + str(grid_type) + " not implemented yet!" + ) + eccodes.codes_release(gid) if geo_out is None: - domain = { - "nam_conf_proj": { - "xlon0": lon0, - "xlat0": lat0 - }, - "nam_conf_proj_grid": { - "xloncen": lonc, - "xlatcen": latc, - "nimax": n_x, - "njmax": n_y, - "xdx": d_x, - "xdy": d_y, - "ilone": 0, - "ilate": 0 - } - } - geo_out = surfex.geo.ConfProj(domain) - else: - raise NotImplementedError(str(grid_type) + " not implemented yet!") + raise RuntimeError("No geometry is found in file") + return field, geo_out eccodes.codes_release(gid) - file_handler.close() - - if geo_out is None: - raise Exception("No geometry is found in file") - - return field, geo_out - eccodes.codes_release(gid) @staticmethod def read_geo_info(gid, keys): @@ -293,7 +298,7 @@ def read_geo_info(gid, keys): geo_dict = {} for key in keys: try: - logging.debug(' %s: %s', key, eccodes.codes_get(gid, key)) + logging.debug(" %s: %s", key, eccodes.codes_get(gid, key)) geo_dict.update({key: eccodes.codes_get(gid, key)}) except eccodes.KeyValueNotFoundError as err: logging.debug(' Key="%s" was not found: %s', key, err.msg) @@ -314,11 +319,13 @@ def read_field_in_message(gid, time): """ try: - logging.debug('There are %d values, average is %f, min is %f, max is %f', - eccodes.codes_get_size(gid, 'values'), - eccodes.codes_get(gid, 'average'), - eccodes.codes_get(gid, 'min'), - eccodes.codes_get(gid, 'max')) + logging.debug( + "There are %d values, average is %f, min is %f, max is %f", + eccodes.codes_get_size(gid, "values"), + eccodes.codes_get(gid, "average"), + eccodes.codes_get(gid, "min"), + eccodes.codes_get(gid, "max"), + ) field = eccodes.codes_get_values(gid) # TODO Check time consistency @@ -328,17 +335,17 @@ def read_field_in_message(gid, time): try: has_bitmap = int(eccodes.codes_get(gid, "bitmapPresent")) except eccodes.KeyValueNotFoundError as err: - logging.debug(' Key="%s" was not found: %s', key, err.msg) + logging.debug(' Key="bitmapPresent" was not found: %s', err.msg) except eccodes.CodesInternalError as err: - logging.error('Error with key="%s" : %s', key, err.msg) + logging.error('Error with key="bitmapPresent" : %s', err.msg) if has_bitmap == 1: missing_value = eccodes.codes_get(gid, "missingValue") field[field == missing_value] = np.nan return field except eccodes.KeyValueNotFoundError as err: - logging.debug(' Key="%s" was not found: %s', key, err.msg) + logging.debug(' Key="missingValue" was not found: %s', err.msg) except eccodes.CodesInternalError as err: - logging.error('Error with key="%s" : %s', key, err.msg) + logging.error('Error with key="missingValue" : %s', err.msg) return None def points(self, gribvar, geo, validtime=None, interpolation="bilinear"): @@ -355,12 +362,12 @@ def points(self, gribvar, geo, validtime=None, interpolation="bilinear"): """ field, geo_in = self.field(gribvar, validtime) - interpolator = surfex.interpolation.Interpolation(interpolation, geo_in, geo) + interpolator = Interpolation(interpolation, geo_in, geo) field = interpolator.interpolate(field) return field, interpolator -class Grib1Variable(): +class Grib1Variable: """Grib1 variable.""" def __init__(self, par, typ, level, tri): @@ -395,9 +402,13 @@ def matches(self, gid): Returns: bool: True if found + + Raises: + RuntimeError: If eccodes not found + """ if eccodes is None: - raise Exception("eccodes not found. Needed for reading grib files") + raise RuntimeError("eccodes not found. Needed for reading grib files") version = int(eccodes.codes_get(gid, "editionNumber")) if version == 1: @@ -408,8 +419,15 @@ def matches(self, gid): logging.debug("Checking grib1 record: %s %s %s %s", par, typ, lev, tri) logging.debug(self.generate_grib_id()) - if self.par == par and self.level == lev and self.typ == typ and self.tri == tri: - logging.debug("Found matching grib1 record: %s %s %s %s", par, lev, typ, tri) + if ( + self.par == par + and self.level == lev + and self.typ == typ + and self.tri == tri + ): + logging.debug( + "Found matching grib1 record: %s %s %s %s", par, lev, typ, tri + ) return True else: return False @@ -442,12 +460,12 @@ def __init__(self, discipline, pca, pnr, typ, lev, tsp=-1): """Construct a grib2 variable. Args: - discipline (_type_): _description_ - pca (_type_): _description_ - pnr (_type_): _description_ - typ (_type_): _description_ - lev (_type_): _description_ - tsp (int, optional): _description_. Defaults to -1. + discipline (int): discipline + pca (int): parameterCatergory + pnr (int): parameterNumber + typ (int): levelType + lev (int): level + tsp (int, optional): typeOfStatisticalProcessing. Defaults to -1. """ self.version = 2 self.discipline = int(discipline) @@ -461,16 +479,16 @@ def matches(self, gid): """Check if matches. Args: - gid (_type_): _description_ + gid (int): Grib ID Raises: - Exception: _description_ + ModuleNotFoundError: If not eccodes found Returns: - _type_: _description_ + bool: True if matches """ if eccodes is None: - raise Exception("eccodes not found. Needed for reading grib files") + raise ModuleNotFoundError("eccodes not found. Needed for reading grib files") version = int(eccodes.codes_get(gid, "editionNumber")) if version == 2: @@ -481,24 +499,38 @@ def matches(self, gid): level = int(eccodes.codes_get(gid, "level")) try: type_of_statistical_processing = eccodes.codes_get( - gid, "typeOfStatisticalProcessing") + gid, "typeOfStatisticalProcessing" + ) type_of_statistical_processing = int(type_of_statistical_processing) except gribapi.errors.KeyValueNotFoundError: type_of_statistical_processing = -1 - logging.debug("Checking grib2 record: %s %s %s %s %s %s", - discipline, parameter_category, parameter_number, level_type, level, - type_of_statistical_processing) + logging.debug( + "Checking grib2 record: %s %s %s %s %s %s", + discipline, + parameter_category, + parameter_number, + level_type, + level, + type_of_statistical_processing, + ) logging.debug("grib2 ID: %s", self.generate_grib_id()) - if self.discipline == discipline and \ - self.parameter_category == parameter_category and \ - self.parameter_number == parameter_number and \ - self.level_type == level_type and \ - self.level == level and \ - self.type_of_statistical_processing == type_of_statistical_processing: - logging.debug("Found matching grib2 record: %s %s %s %s %s", - discipline, parameter_category, parameter_number, - level_type, level) + if ( + self.discipline == discipline + and self.parameter_category == parameter_category + and self.parameter_number == parameter_number + and self.level_type == level_type + and self.level == level + and self.type_of_statistical_processing == type_of_statistical_processing + ): + logging.debug( + "Found matching grib2 record: %s %s %s %s %s", + discipline, + parameter_category, + parameter_number, + level_type, + level, + ) return True else: return False diff --git a/surfex/input_methods.py b/surfex/input_methods.py new file mode 100644 index 0000000..24feb5f --- /dev/null +++ b/surfex/input_methods.py @@ -0,0 +1,397 @@ +"""Input methods.""" +import glob +import logging +import os + +from .bufr import BufrObservationSet +from .datetime_utils import as_timedelta +from .geo import LonLatVal +from .obs import JsonObservationSet, MetFrostObservations, NetatmoObservationSet +from .obsoul import ObservationDataSetFromObsoulFile +from .util import parse_filepattern + + +def get_datasources(obs_time, settings): + """Get data sources. + + Main data source interface setting data ObservationSet objects based on settings dictionary + + Args: + obs_time (datetime.datetime): Observation time + settings (dict): Settings + + Raises: + NotImplementedError: Unknown observation file format + NotImplementedError: Only one file reading implemented + RuntimeError: No filenames or filepattern found + RuntimeError: You must set variable name + RuntimeError: You must set varname to read NETATMO JSON files + RuntimeError: You must set variable name + + Returns: + datasources(list): List of observation data sets + """ + datasources = [] + for obs_set in settings: + + kwargs = {} + kwargs.update({"label": obs_set}) + + if "filetype" in settings[obs_set]: + filetype = settings[obs_set]["filetype"] + filepattern = None + if "filepattern" in settings[obs_set]: + filepattern = settings[obs_set]["filepattern"] + + validtime = obs_time + if filetype.lower() == "bufr": + if isinstance(filepattern, list): + if len(filepattern) > 1: + raise NotImplementedError("Only one file reading implemented") + filepattern = filepattern[0] + filename = parse_filepattern(filepattern, obs_time, validtime) + if "varname" in settings[obs_set]: + varname = settings[obs_set]["varname"] + else: + raise RuntimeError("You must set variable name") + + if "lonrange" in settings[obs_set]: + kwargs.update({"lonrange": settings[obs_set]["lonrange"]}) + if "latrange" in settings[obs_set]: + kwargs.update({"latrange": settings[obs_set]["latrange"]}) + if "dt" in settings[obs_set]: + deltat = settings[obs_set]["dt"] + else: + deltat = 1800 + + valid_range = as_timedelta(seconds=deltat) + if os.path.exists(filename): + datasources.append( + BufrObservationSet( + filename, varname, obs_time, valid_range, **kwargs + ) + ) + else: + logging.warning("WARNING: filename %s not set. Not added.", filename) + + elif filetype.lower() == "netatmo": + filenames = None + if "filenames" in settings[obs_set]: + filenames = settings[obs_set]["filenames"] + if filenames is None: + if "filepattern" in settings[obs_set]: + filepattern = settings[obs_set]["filepattern"] + neg_t_range = 15 + if "neg_t_range" in settings[obs_set]: + neg_t_range = settings[obs_set]["neg_t_range"] + pos_t_range = 15 + if "pos_t_range" in settings[obs_set]: + pos_t_range = settings[obs_set]["pos_t_range"] + + dtg = validtime - as_timedelta(seconds=int(neg_t_range) * 60) + end_dtg = validtime + as_timedelta(seconds=int(pos_t_range) * 60) + + filenames = [] + while dtg < end_dtg: + fname = parse_filepattern(filepattern, dtg, dtg) + fname = glob.glob(fname) + if len(fname) == 1: + fname = fname[0] + if os.path.exists(fname) and fname not in filenames: + filenames.append(fname) + dtg = dtg + as_timedelta(seconds=60) + else: + raise RuntimeError("No filenames or filepattern found") + if "varname" in settings[obs_set]: + variable = settings[obs_set]["varname"] + else: + raise RuntimeError("You must set varname to read NETATMO JSON files") + + if "lonrange" in settings[obs_set]: + kwargs.update({"lonrange": settings[obs_set]["lonrange"]}) + if "latrange" in settings[obs_set]: + kwargs.update({"latrange": settings[obs_set]["latrange"]}) + if "dt" in settings[obs_set]: + kwargs.update({"dt": settings[obs_set]["dt"]}) + else: + kwargs.update({"dt": 1800}) + + if filenames is not None: + datasources.append( + NetatmoObservationSet(filenames, variable, obs_time, **kwargs) + ) + else: + logging.warning("WARNING: filenames not set. Not added.") + + elif filetype.lower() == "frost": + if "varname" in settings[obs_set]: + varname = settings[obs_set]["varname"] + else: + raise RuntimeError("You must set variable name") + + if "lonrange" in settings[obs_set]: + kwargs.update({"lonrange": settings[obs_set]["lonrange"]}) + if "latrange" in settings[obs_set]: + kwargs.update({"latrange": settings[obs_set]["latrange"]}) + if "unit" in settings[obs_set]: + kwargs.update({"unit": settings[obs_set]["unit"]}) + if "level" in settings[obs_set]: + kwargs.update({"level": settings[obs_set]["level"]}) + kwargs.update({"validtime": obs_time}) + datasources.append(MetFrostObservations(varname, **kwargs)) + elif filetype.lower() == "obsoul": + if isinstance(filepattern, list): + if len(filepattern) > 1: + raise NotImplementedError("Only one file reading implemented") + filepattern = filepattern[0] + filename = parse_filepattern(filepattern, obs_time, validtime) + obnumber = None + neg_dt = None + pos_dt = None + obtypes = None + subtypes = None + if "obnumber" in settings[obs_set]: + obnumber = int(settings[obs_set]["obnumber"]) + if "neg_dt" in settings[obs_set]: + neg_dt = int(settings[obs_set]["neg_dt"]) + if "pos_dt" in settings[obs_set]: + pos_dt = int(settings[obs_set]["pos_dt"]) + if "obtypes" in settings[obs_set]: + obtypes = settings[obs_set]["obtypes"] + if "subtypes" in settings[obs_set]: + subtypes = settings[obs_set]["subtypes"] + if os.path.exists(filename): + datasources.append( + ObservationDataSetFromObsoulFile( + filename, + an_time=obs_time, + neg_dt=neg_dt, + pos_dt=pos_dt, + obtypes=obtypes, + subtypes=subtypes, + obnumber=obnumber, + ) + ) + else: + print("WARNING: filename " + filename + " not existing. Not added.") + elif filetype.lower() == "json": + if isinstance(filepattern, list): + if len(filepattern) > 1: + raise NotImplementedError("Only one file reading implemented") + filepattern = filepattern[0] + filename = parse_filepattern(filepattern, obs_time, validtime) + varname = None + if "varname" in settings[obs_set]: + varname = settings[obs_set]["varname"] + + kwargs.update({"var": varname}) + if os.path.exists(filename): + datasources.append(JsonObservationSet(filename, **kwargs)) + else: + logging.warning( + "WARNING: filename %s not existing. Not added.", filename + ) + else: + raise NotImplementedError("Unknown observation file format") + else: + logging.info("No file type provided") + return datasources + + +def set_geo_from_obs_set( + obs_time, obs_type, varname, inputfile, lonrange=None, latrange=None +): + """Set geometry from obs file. + + Args: + obs_time (as_datetime): Observation time + obs_type (str): Observation file type + varname (str): _Observation variable + inputfile (str): Input file with obs set + lonrange (tuple, optional): Longitude range (min, max). Defaults to None. + latrange (tuple, optional): Latitude range (min, max). Defaults to None. + + Returns: + geo (Geo): Surfex geometry + + """ + settings = { + "obs": { + "varname": varname, + "filetype": obs_type, + "inputfile": inputfile, + "filepattern": inputfile, + } + } + if lonrange is None: + lonrange = [-180, 180] + if latrange is None: + latrange = [-90, 90] + + logging.debug("%s", settings) + logging.debug("Get data source") + __, lons, lats, __, __, __, __ = get_datasources(obs_time, settings)[0].get_obs() + + selected_lons = [] + selected_lats = [] + for i, lon in enumerate(lons): + lat = lats[i] + + if lonrange[0] <= lon <= lonrange[1] and latrange[0] <= lat <= latrange[1]: + lon = round(lon, 5) + lat = round(lat, 5) + selected_lons.append(lon) + selected_lats.append(lat) + + d_x = ["0.3"] * len(selected_lons) + geo_json = { + "nam_pgd_grid": {"cgrid": "LONLATVAL"}, + "nam_lonlatval": { + "xx": selected_lons, + "xy": selected_lats, + "xdx": d_x, + "xdy": d_x, + }, + } + geo = LonLatVal(geo_json) + return geo + + +def get_obsset( + obs_time, + obs_type, + varname, + inputfile, + lonrange=None, + latrange=None, + label=None, + neg_t_range=None, + pos_t_range=None, + unit=None, + level=None, + obtypes=None, + subtypes=None, +): + """Create an observation set from an input data set. + + Args: + obs_time (as_datetime): Observation time + obs_type (str): Observation file type + varname (list): _Observation variable(s) + inputfile (list): Input file(s) with obs set + pos_t_range (int, optional): Time window duration after obs_time in seconds + neg_t_range (int, optional): Time window duration after obs_time in seconds + lonrange (tuple, optional): Longitude range (min, max). Defaults to None. + latrange (tuple, optional): Latitude range (min, max). Defaults to None. + label (str, optional): Obs set label. Default to None which means it will be the same as obs_type + unit (str, optional): Unit (FROST) + level (str, optional): Level (FROST) + obtypes (list, optional): Obstypes (obsoul) + subtypes (list, optional): Subtypes (obsoul) + + Returns: + obsset (ObservationSet): Observation set + + """ + if label is None: + label = obs_type + if isinstance(varname, str): + varname = [varname] + if isinstance(inputfile, str): + inputfile = [inputfile] + if lonrange is None: + lonrange = [-180, 180] + if latrange is None: + latrange = [-90, 90] + dt = None + if dt is None and pos_t_range is not None: + dt = pos_t_range + if dt is None and neg_t_range is not None: + dt = neg_t_range + dt_seconds = dt + if dt is not None: + dt_seconds = int(dt.total_seconds()) + pos_t_range_seconds = pos_t_range + if pos_t_range is not None: + pos_t_range_seconds = int(pos_t_range.total_seconds()) + neg_t_range_seconds = neg_t_range + if neg_t_range is not None: + neg_t_range_seconds = int(neg_t_range.total_seconds()) + settings = { + label: { + "varname": varname, + "filetype": obs_type, + "inputfile": inputfile, + "filepattern": inputfile, + "dt": dt_seconds, + "label": label, + "lonrange": lonrange, + "latrange": latrange, + "unit": unit, + "level": level, + "obtypes": obtypes, + "subtypes": subtypes, + "pos_t_range": pos_t_range_seconds, + "neg_t_range": neg_t_range_seconds, + } + } + + logging.debug("%s", settings) + logging.debug("Get data source") + return get_datasources(obs_time, settings)[0] + + +def create_obsset_file( + obs_time, + obs_type, + varname, + inputfile, + output, + lonrange=None, + latrange=None, + label=None, + indent=None, + neg_t_range=None, + pos_t_range=None, + unit=None, + level=None, + obtypes=None, + subtypes=None, +): + """Create an observation set from an input data set. + + Args: + obs_time (as_datetime): Observation time + obs_type (str): Observation file type + varname (list): _Observation variable(s) + inputfile (list): Input file(s) with obs set + output (str): Output file + pos_t_range (int, optional): Time window duration after obs_time in seconds + neg_t_range (int, optional): Time window duration after obs_time in seconds + lonrange (tuple, optional): Longitude range (min, max). Defaults to None. + latrange (tuple, optional): Latitude range (min, max). Defaults to None. + label (str, optional): Obs set label. Default to None which means it will be the same as obs_type + indent (int, optional): File indentation. Default to None. + unit (str, optional): Unit (FROST) + level (str, optional): Level (FROST) + obtypes (list, optional): Obstypes (obsoul) + subtypes (list, optional): Subtypes (obsoul) + + """ + logging.debug("Get data source") + obsset = get_obsset( + obs_time, + obs_type, + varname, + inputfile, + lonrange=lonrange, + latrange=latrange, + label=label, + neg_t_range=neg_t_range, + pos_t_range=pos_t_range, + unit=unit, + level=level, + obtypes=obtypes, + subtypes=subtypes, + ) + obsset.write_json_file(output, indent=indent) diff --git a/surfex/interpolation.py b/surfex/interpolation.py index acfa220..96e43eb 100644 --- a/surfex/interpolation.py +++ b/surfex/interpolation.py @@ -1,7 +1,109 @@ -"""Interpolation.""" +"""Interpolation. All interfaces to gridpp.""" import logging + +try: + import gridpp +except ModuleNotFoundError: + gridpp = None import numpy as np -import gridpp + + +class Grid: + """A gridpp Grid wrapper. + + Gridpp expects lat, lon, but pysurfex uses lon, lat. + This class handles the conversion for the grid + + """ + + def __init__(self, grid_lons, grid_lats, elevs=None): + """Construct a gridpp Grid wrapper. + + Args: + grid_lons (np.ndarray): grid longitudes + grid_lats (np.ndarray): grid latitudes + elevs (np.ndarray, optional): elevations + + Raises: + RuntimeError: You need gridpp for interpolation + + """ + if gridpp is None: + raise RuntimeError("You need gridpp for interpolation") + logging.debug("gridpp.__file__ = %s", gridpp.__file__) + logging.debug("grid_lons_shape in 0 %s", grid_lons.shape) + logging.debug("grid_lats_shape in 0 %s", grid_lats.shape) + grid_lons = np.transpose(grid_lons) + grid_lats = np.transpose(grid_lats) + logging.debug("grid_lons_shape in 1 %s", grid_lons.shape) + logging.debug("grid_lats_shape in 1 %s", grid_lats.shape) + if elevs is None: + self.grid = gridpp.Grid(grid_lats, grid_lons) + else: + elevs = np.transpose(elevs) + self.grid = gridpp.Grid(grid_lats, grid_lons, elevs) + + +class Points: + """A gridpp Points wrapper. + + Gridpp expects lat, lon, but pysurfex uses lon, lat. + This class handles the conversion for the grid + + """ + + def __init__(self, p_lons, p_lats, p_elevs=None): + """Construct a gridpp Points wrapper. + + Args: + p_lons(np.array): Point longitudes + p_lats(np.array): Point latitudes + p_elevs(np.array, optional): Point elevations. Defaults to None. + + Raises: + RuntimeError: You need gridpp for interpolation + + """ + if gridpp is None: + raise RuntimeError("You need gridpp for interpolation") + if isinstance(p_lons, list): + p_lons = np.asarray(p_lons) + if isinstance(p_lats, list): + p_lats = np.asarray(p_lats) + logging.debug("p_lons_shape in %s", p_lons.shape) + logging.debug("p_lats_shape in %s", p_lats.shape) + self.lons = np.transpose(p_lons) + self.lats = np.transpose(p_lats) + self.elevs = p_elevs + logging.debug("self.lons_shape in %s", self.lons.shape) + logging.debug("self.lats_shape in %s", self.lats.shape) + if p_elevs is None: + self.points = gridpp.Points(self.lats, self.lons) + else: + p_elevs = np.transpose(p_elevs) + self.points = gridpp.Points(self.lats, self.lons, p_elevs) + + def inside_grid(self, grid, distance=2500.0): + """Check if inside grid. + + Args: + grid(Grid): Grid object + distance(float, optional): Max distance from grid. Defaults to 2500.0. + + Returns: + inside_grid(list): Logical mask if inside grid. + """ + inside_grid = [] + # Check if they are in grid + for i in range(0, self.lons.shape[0]): + lon = self.lons[i] + lat = self.lats[i] + neighbours = grid.grid.get_num_neighbours(lat, lon, distance) + if neighbours == 0: + inside_grid.append(False) + else: + inside_grid.append(True) + return inside_grid class Interpolation(object): @@ -11,45 +113,37 @@ def __init__(self, operator, geo_in, geo_out): """Construct an intrpolation object. Args: - operator (_type_): _description_ - geo_in (_type_): _description_ - geo_out (_type_): _description_ + operator (str): Operator + geo_in (surfex.geo.Geo): Input geometry + geo_out (surfex.geo.Geo): Output geometry Raises: - Exception: _description_ + RuntimeError: You can not interpolate without specifying an output geometry """ self.operator = operator self.geo_in = geo_in self.geo_out = geo_out if self.geo_out is None: - raise Exception("You can not interpolate without specifying an output geometry") - + raise RuntimeError( + "You can not interpolate without specifying an output geometry" + ) # Input if self.geo_in is not None: logging.debug("grid_lons.shape in %s", geo_in.lons.shape) logging.debug("grid_lats.shape in %s", geo_in.lats.shape) - grid_lons = np.transpose(geo_in.lons) - grid_lats = np.transpose(geo_in.lats) - self.var_lons = grid_lons - self.var_lats = grid_lats + self.var_lons = geo_in.lons + self.var_lats = geo_in.lats self.identical = self.geo_out.is_identical(self.geo_in) - logging.debug("grid_lons: %s", grid_lons) - logging.debug("grid_lats: %s", grid_lats) - logging.debug("grid_lons.shape for interpolation %s", grid_lons.shape) - logging.debug("grid_lats.shape for interpolation %s", grid_lats.shape) - self.grid = gridpp.Grid(grid_lats, grid_lons) else: self.var_lons = None self.var_lats = None - self.grid = None self.identical = False # Output - lons = np.array(self.geo_out.lonlist) - lats = np.array(self.geo_out.latlist) + self.lons = self.geo_out.lonlist + self.lats = self.geo_out.latlist self.npoints = self.geo_out.npoints - self.points = gridpp.Points(lats, lons) def interpolate(self, field2d, undefined=None): """Do interpolation. @@ -59,9 +153,9 @@ def interpolate(self, field2d, undefined=None): undefined (float, optional): Undefined value if field2d is None. Defaults to None. Raises: - Exception: _description_ - Exception: _description_ - NotImplementedError: _description_ + RuntimeError: Input domain and ouput domain differ. + RuntimeError: You try to interpolate a missing field! + NotImplementedError: Interpolation method not implemented Returns: np.array: interpolated_field @@ -70,33 +164,37 @@ def interpolate(self, field2d, undefined=None): if field2d is None and undefined is not None: return np.full((self.geo_out.nlons * self.geo_out.nlats), undefined) elif field2d is None: - raise Exception("You try to interpolate a missing field!") + raise RuntimeError("You try to interpolate a missing field!") else: logging.debug("field2d.shape %s", field2d.shape) - logging.debug("gridpp.__file__ = %s", gridpp.__file__) if self.identical or self.operator == "none": if self.operator == "none": if not self.identical: - raise Exception("Input domain and ouput domain differ. " - "You must interpolate!") + raise RuntimeError( + "Input domain and ouput domain differ. " + "You must interpolate!" + ) logging.info("No interpolation chosen") else: - logging.info("Input and output domain are identical. " - "No interpolation is needed") + logging.info( + "Input and output domain are identical. " + "No interpolation is needed" + ) interpolated_field = field2d.reshape(self.npoints) else: sub_lons, sub_lats = self.geo_out.subset(self.geo_in) if len(sub_lons) == 0 and len(sub_lats) == 0: - logging.info("Doing '%s' interpolation for %s points", self.operator, - str(self.npoints)) - if self.operator == "nearest": - field2d = np.transpose(field2d) - interpolated_field = gridpp.nearest(self.grid, self.points, field2d) - field2d = np.transpose(field2d) - elif self.operator == "bilinear": - field2d = np.transpose(field2d) - interpolated_field = gridpp.bilinear(self.grid, self.points, field2d) - field2d = np.transpose(field2d) + logging.info( + "Doing '%s' interpolation for %s points", + self.operator, + str(self.npoints), + ) + if self.operator == "nearest" or self.operator == "bilinear": + grid = Grid(self.var_lons, self.var_lats) + points = Points(self.lons, self.lats) + interpolated_field = grid2points( + grid, points, field2d, operator=self.operator + ) elif self.operator == "none": interpolated_field = field2d.reshape(self.npoints) else: @@ -119,16 +217,29 @@ def distance(lon1, lat1, lon2, lat2): Computes the great circle distance between two points using the haversine formula. Values can be vectors. + + Args: + lon1(float): lon1 + lat1(float): lat1 + lon2(float): lon2 + lat2(float): lat2 + + Returns: + cval(float): Circle distance + """ # Convert from degrees to radians pi_constant = 3.14159265 - lon1 = lon1 * 2 * pi_constant / 360. - lat1 = lat1 * 2 * pi_constant / 360. - lon2 = lon2 * 2 * pi_constant / 360. - lat2 = lat2 * 2 * pi_constant / 360. + lon1 = lon1 * 2 * pi_constant / 360.0 + lat1 = lat1 * 2 * pi_constant / 360.0 + lon2 = lon2 * 2 * pi_constant / 360.0 + lat2 = lat2 * 2 * pi_constant / 360.0 dlon = lon2 - lon1 dlat = lat2 - lat1 - aval = np.sin(dlat / 2.) ** 2 + np.cos(lat1) * np.cos(lat2) * np.sin(dlon / 2.) ** 2 + aval = ( + np.sin(dlat / 2.0) ** 2 + + np.cos(lat1) * np.cos(lat2) * np.sin(dlon / 2.0) ** 2 + ) cval = 2 * np.arcsin(np.sqrt(aval)) * 6.367e6 return cval @@ -137,24 +248,23 @@ def alpha_grid_rot(self): lon = self.var_lons lat = self.var_lats n_x = lat.shape[0] - # ny = lat.shape[1] dlon = np.zeros(lat.shape) dlat = np.zeros(lat.shape) i_1 = np.arange(n_x - 1) - dlon[0:-1, :] = np.sign(lon[i_1 + 1, :] - lon[i_1, :]) * self.distance(lon[i_1, :], - lat[i_1, :], - lon[i_1 + 1, :], - lat[i_1, :]) - dlat[0:-1, :] = -np.sign(lat[i_1 + 1, :] - lat[i_1, :]) * self.distance(lon[i_1, :], - lat[i_1, :], - lon[i_1, :], - lat[i_1 + 1, :]) + dlon[0:-1, :] = np.sign(lon[i_1 + 1, :] - lon[i_1, :]) * self.distance( + lon[i_1, :], lat[i_1, :], lon[i_1 + 1, :], lat[i_1, :] + ) + dlat[0:-1, :] = -np.sign(lat[i_1 + 1, :] - lat[i_1, :]) * self.distance( + lon[i_1, :], lat[i_1, :], lon[i_1, :], lat[i_1 + 1, :] + ) - dlon[-1, :] = np.sign(lon[-1, :] - lon[-2, :]) * self.distance(lon[-2, :], lat[-2, :], - lon[-1, :], lat[-2, :]) - dlat[-1, :] = -np.sign(lat[-1, :] - lat[-2, :]) * self.distance(lon[-2, :], lat[-2, :], - lon[-2, :], lat[-1, :]) + dlon[-1, :] = np.sign(lon[-1, :] - lon[-2, :]) * self.distance( + lon[-2, :], lat[-2, :], lon[-1, :], lat[-2, :] + ) + dlat[-1, :] = -np.sign(lat[-1, :] - lat[-2, :]) * self.distance( + lon[-2, :], lat[-2, :], lon[-2, :], lat[-1, :] + ) alpha = np.rad2deg(np.arctan2(dlon, dlat)) return alpha @@ -164,57 +274,297 @@ def fill_field(field_tmp, geo, radius=1): """Fill field. Args: - field_tmp (_type_): _description_ - radius (int, optional): _description_. Defaults to 1. + field_tmp (np.ndarray): Field + geo (surfex.geo.Geo): Geometry + radius (int, optional): Radius. Defaults to 1. Returns: - _type_: _description_ + tuple: field, nans + + Raises: + RuntimeError: You need gridpp for fill_field + """ + if gridpp is None: + raise RuntimeError("You need gridpp for fill_field") ovalues = gridpp.neighbourhood(field_tmp, radius, gridpp.Mean) nans = 0 for i in range(0, geo.nlons): for j in range(0, geo.nlats): if np.isnan(field_tmp[i][j]): nans = nans + 1 - # print("Sub ", i, j, nn) field_tmp[i][j] = ovalues[i][j] return field_tmp, nans -def grid2points(grid_lons, grid_lats, p_lons, p_lats, grid_values, operator="bilinear"): +def gridpos2points( + grid_lons, + grid_lats, + p_lons, + p_lats, + grid_values, + operator="bilinear", + elev_gradient=None, +): + """Convert grid positions to points. + + Args: + grid_lons (np.ndarray): Grid longitudes + grid_lats (np.ndarray): Grid latitudes + p_lons (np.ndaray): Point longitudes + p_lats (np.ndarray): Point latitudes + grid_values (np.ndarray): Grid values + operator (str, optional): Interpolation operator. Defaults to "bilinear". + elev_gradient (float, optional): Elevation gradient for downscaler + + Returns: + np.ndarray: Interpolated values + """ + grid = Grid(grid_lons, grid_lats) + points = Points(p_lons, p_lats) + return grid2points( + grid, points, grid_values, operator=operator, elev_gradient=elev_gradient + ) + + +def grid2points(grid, points, grid_values, operator="bilinear", elev_gradient=None): """Convert a grid to points. Args: - grid_lons (_type_): _description_ - grid_lats (_type_): _description_ - p_lons (_type_): _description_ - p_lats (_type_): _description_ - grid_values (_type_): _description_ + grid (Grid): Grid object + points (Points): Points object + grid_values (np.ndarray): Grid values + operator (str, optional): Interpolation operator. Defaults to "bilinear". + elev_gradient (float, optional): Elevation gradient for downscaler + + Raises: + NotImplementedError: Operator not implemented + RuntimeError: You need gridpp for interpolation Returns: - _type_: _description_ + np.ndarray: Interpolated values """ - points = gridpp.Points(p_lons, p_lats) - fg_grid = gridpp.Grid(grid_lons, grid_lats) + if gridpp is None: + raise RuntimeError("You need gridpp for interpolation") + logging.debug("grid_values.shape 0: %s", grid_values.shape) + grid_values = np.transpose(grid_values) + logging.debug("grid_values.shape 1: %s", grid_values.shape) if operator == "bilinear": - return gridpp.bilinear(fg_grid, points, grid_values) - if operator == "linear": - return gridpp.nearest(fg_grid, points, grid_values) + if elev_gradient is None: + values = gridpp.bilinear(grid.grid, points.points, grid_values) + else: + values = gridpp.simple_gradient( + grid.grid, points.points, grid_values, elev_gradient, gridpp.Bilinear + ) + elif operator == "nearest": + if elev_gradient is None: + values = gridpp.nearest(grid.grid, points.points, grid_values) + else: + values = gridpp.simple_gradient( + grid.grid, points.points, grid_values, elev_gradient, gridpp.Nearest + ) + else: + raise NotImplementedError(f"Operator {operator} not implemented!") + return values -def get_num_neighbours(grid_lons, grid_lats, p_lon, p_lat, distance=2500.): +def inside_grid(grid_lons, grid_lats, p_lons, p_lats, distance=2500.0): """Get number of neighbours. Args: - grid_lons (_type_): _description_ - grid_lats (_type_): _description_ - p_lon (_type_): _description_ - p_lat (_type_): _description_ - distance (_type_, optional): _description_. Defaults to 2500.. + grid_lons (np.ndarray): Grid longitudes + grid_lats (np.ndarray): Grid latitudes + p_lons (np.array): Point longitudes + p_lats (np.array): Point latitudes + distance (float, optional): Max distance from points. Defaults to 2500.0. + + Returns: + inside_grid(list): Boolean mask + + """ + grid = Grid(grid_lons, grid_lats) + points = Points(p_lons, p_lats) + return points.inside_grid(grid, distance=distance) + + +class ObsOperator(object): + """Obs operator. Class to convert a field to an observation point.""" + + def __init__(self, operator, geo, dataset, grid_values, max_distance=5000): + """Construct the observation operator. + + Args: + operator (str): Interpolation operator. + geo (surfex.Geo): Surfex geometry. + dataset (QCDataSet): QC data set. + grid_values (np.darray): Values in the grid. + max_distance (int, optional): Max allowed deviation in meters from grid borders. + Defaults to 5000. + + """ + lons = dataset.lons + lats = dataset.lats + logging.info( + 'Setting up "%s" observation operator for %s points', operator, str(len(lons)) + ) + obs_values = gridpos2points( + geo.lons, geo.lats, lons, lats, grid_values, operator=operator + ) + self.inside_grid = inside_grid( + geo.lons, geo.lats, lons, lats, distance=max_distance + ) + self.obs_values = obs_values + + def get_obs_value(self, pos=None): + """Get the observed value. + + Args: + pos (int, optional): Position. Defaults to None. + + Raises: + NotImplementedError: Specific position not implemented yet! + + Returns: + float: Observation value for index. + """ + if pos is None: + return self.obs_values + else: + raise NotImplementedError("Specific position not implemented yet!") + + def is_in_grid(self, index): + """Check if index is in grid. + + Args: + index (int): Index to check. + + Returns: + bool: True if inside + """ + return self.inside_grid[index] + + +def horizontal_oi( + geo, + background, + observations, + gelevs, + hlength=10000.0, + vlength=10000.0, + wlength=0.5, + elev_gradient=None, + structure_function="Barnes", + max_locations=50, + epsilon=0.5, + minvalue=None, + maxvalue=None, + interpol="bilinear", + only_diff=False, +): + """Do horizontal OI. + + Args: + geo (_type_): _description_ + background (_type_): _description_ + observations (_type_): _description_ + gelevs (_type_): _description_ + hlength (_type_, optional): _description_. Defaults to 10000.. + vlength (_type_, optional): _description_. Defaults to 10000.. + wlength (float, optional): _description_. Defaults to 0.5. + elev_gradient (int, optional): _description_. Defaults to 0. + structure_function (str, optional): _description_. Defaults to "Barnes". + max_locations (int, optional): _description_. Defaults to 50. + epsilon (float, optional): _description_. Defaults to 0.5. + minvalue (_type_, optional): _description_. Defaults to None. + maxvalue (_type_, optional): _description_. Defaults to None. + interpol (str, optional): _description_. Defaults to "bilinear". + only_diff (bool, optional): _description_. Defaults to False. + + Raises: + NotImplementedError: Structure function not implemented + RuntimeError: You need gridpp to perform OI Returns: _type_: _description_ """ - fg_grid = gridpp.Grid(grid_lons, grid_lats) - return fg_grid.get_num_neighbours(p_lon, p_lat, distance) + if gridpp is None: + raise RuntimeError("You need gridpp to perform OI") + + logging.debug(gridpp.__file__) + logging.debug(gridpp.__version__) + glats = geo.lats + glons = geo.lons + + def obs2vectors(my_obs): + return ( + my_obs.lons, + my_obs.lats, + my_obs.stids, + my_obs.elevs, + my_obs.values, + my_obs.cis, + my_obs.lafs, + ) + + vectors = np.vectorize(obs2vectors) + lons, lats, __, elevs, values, __, __ = vectors(observations) + + bgrid = Grid(glons, glats, gelevs) + points = Points(lons, lats, elevs) + pbackground = grid2points( + bgrid, points, background, operator=interpol, elev_gradient=elev_gradient + ) + + # Remove undefined backgrounds + if any(np.isnan(pbackground)): + print("Found undefined backgrounds. Remove them") + lons2 = [] + lats2 = [] + elevs2 = [] + values2 = [] + for point in range(0, len(lons)): + if np.isnan(pbackground[point]): + logging.info( + "Undefined background in lon=%s lat=%s value=%s", + lons[point], + lats[point], + values[point], + ) + else: + lons2.append(lons[point]) + lats2.append(lats[point]) + elevs2.append(elevs[point]) + values2.append(values[point]) + values = values2 + points = Points(lons2, lats2, elevs2) + pbackground = grid2points( + bgrid, points, background, operator=interpol, elev_gradient=elev_gradient + ) + + variance_ratios = np.full(points.points.size(), epsilon) + + if structure_function == "Barnes": + structure = gridpp.BarnesStructure(hlength, vlength, wlength) + else: + raise NotImplementedError + + background = np.transpose(background) + field = gridpp.optimal_interpolation( + bgrid.grid, + background, + points.points, + values, + variance_ratios, + pbackground, + structure, + max_locations, + ) + field = np.asarray(field) + if minvalue is not None: + field[field < minvalue] = minvalue + if maxvalue is not None: + field[field > maxvalue] = maxvalue + if only_diff: + field[field == background] = np.nan + return np.transpose(field) diff --git a/surfex/namelist.py b/surfex/namelist.py index 7a7217d..2abb286 100644 --- a/surfex/namelist.py +++ b/surfex/namelist.py @@ -1,425 +1,32 @@ """Namelist.""" -import os import json import logging -from datetime import datetime, timedelta -import surfex -import yaml -import f90nml - - -class SystemFilePaths(object): - """Matches files and paths depending on possibly system specific settings. - - User can provide a default system dir to nest dependencies. - """ - - def __init__(self, system_file_paths): - """Construct SystemFilePaths. - - Args: - system_file_paths (_type_): _description_ - """ - self.system_file_paths = system_file_paths - # self.system_variables = None - - def get_system_path(self, dtype, default_dir=None, check_existence=False, - check_parsing=False, validtime=None, basedtg=None, mbr=None, tstep=None, - pert=None, var=None): - """Get the system path for a given data type. - - Args: - dtype (str): The data type you want to get the path for (clim_dir/bin_dir etc) - default_dir (str): A fallback if the desired dtype is not found - check_existence (bool): Check if the path found also exists - check_parsing (bool): Check if parsing was successful (all @@ pairs substituted) - validtime (datetime.dateime): Parse setting with this as valid time - basedtg (datetime.dateime): Parse setting with this as base time - mbr (int): Parse setting with this as ensemble member - tstep (int): Parse setting with this as time step - pert (int): Parse setting with this as pertubation number - var (str): Parse setting with this as variable - - Returns: - data_dir (str): - - Raises: - Exception: If path not found and check_existence is True - - See Also: - self.parse_setting - - """ - logging.debug("Search for: %s Default: %s", dtype, str(default_dir)) - - data_dir = self.find_matching_data_dir(dtype, default_dir=default_dir, - check_existence=check_existence, - check_parsing=check_parsing, validtime=validtime, - basedtg=basedtg, mbr=mbr, tstep=tstep, pert=pert, - var=var) - if data_dir is None: - if default_dir is None: - raise Exception("No system path found for " + dtype) - - logging.debug("Find default path") - data_dir = self.find_matching_data_dir(default_dir, default_dir=default_dir, - check_existence=check_existence, - check_parsing=check_parsing, - validtime=validtime, basedtg=basedtg, - mbr=mbr, tstep=tstep, pert=pert, var=var) - if data_dir is None: - logging.debug("No default path found for %s", default_dir) - - logging.debug("data_dir %s", data_dir) - return data_dir - - def find_matching_data_dir(self, dtype, default_dir=None, check_existence=False, - check_parsing=False, validtime=None, basedtg=None, mbr=None, - tstep=None, pert=None, var=None): - """Find a matching path from the system path for a given data type. - - Args: - dtype (str): The data type you want to get the path for (clim_dir/bin_dir etc) - default_dir (str): A fallback if the desired dtype is not found - check_existence (bool): Check if the path found also exists - check_parsing (bool): Check if parsing was successful (all @@ pairs substituted) - validtime (datetime.dateime): Parse setting with this as valid time - basedtg (datetime.dateime): Parse setting with this as base time - mbr (int): Parse setting with this as ensemble member - tstep (int): Parse setting with this as time step - pert (int): Parse setting with this as pertubation number - var (str): Parse setting with this as variable - - Returns: - data_dir (str): - - Raises: - Exception: If path not found and check_existence is True - - See Also: - self.parse_setting - - """ - command = None - for sfp in self.system_file_paths: - if sfp == dtype: - logging.debug("Found %s %s %s", sfp, type(sfp), self.system_file_paths) - data_dir = self.system_file_paths[sfp] - # If dict, also a command is attached - if isinstance(data_dir, dict): - for key in data_dir: - logging.debug(key, data_dir[key]) - command = str(data_dir[key]) - data_dir = str(key) - logging.debug("Data directory before parsing is: %s", data_dir) - if not isinstance(data_dir, str): - raise Exception("data dir is not a string!") - data_dir = self.parse_setting(self.substitute_string(data_dir), - check_parsing=check_parsing, validtime=validtime, - basedtg=basedtg, mbr=mbr, tstep=tstep, pert=pert, - var=var) - # Add command to data_dir again - if command is not None: - data_dir = {data_dir: command} - logging.debug("Data directory after parsing is is: %s", data_dir) - if check_existence: - if not os.path.exists(data_dir) and default_dir is None: - raise NotADirectoryError(data_dir) - return data_dir - return None - - def get_system_file(self, dtype, fname, default_dir=None, check_existence=False, - check_parsing=False, validtime=None, basedtg=None, mbr=None, tstep=None, - pert=None, var=None, system_variables=None): - """Get the system path for a given data type and add a file name to the path. - - Args: - dtype (str): The data type you want to get the path for (clim_dir/bin_dir etc) - fname (str): Name of the file you want to join to the system path - default_dir (str): A fallback if the desired dtype is not found - check_existence (bool): Check if the path found also exists - check_parsing (bool): Check if parsing was successful (all @@ pairs substituted) - validtime (datetime.dateime): Parse setting with this as valid time - basedtg (datetime.dateime): Parse setting with this as base time - mbr (int): Parse setting with this as ensemble member - tstep (int): Parse setting with this as time step - pert (int): Parse setting with this as pertubation number - var (str): Parse setting with this as variable - system_variables (dict): Arbitrary settings to substitute @NAME@ = - system_variables={"NAME": "Value"} - - Returns: - data_dir (str): - - Raises: - Exception: If path not found and check_existence is True - - See Also: - self.parse_setting - self.substitute_string - - """ - command = None - path = self.get_system_path(dtype, default_dir=default_dir, check_existence=check_existence, - check_parsing=check_parsing, validtime=validtime, - basedtg=basedtg, mbr=mbr, tstep=tstep, pert=pert, var=var) - - # If dict, also a command is attached - if isinstance(path, dict): - for key in path: - command = str(path[key]) - path = str(key) - fname = self.parse_setting(fname, check_parsing=check_parsing, validtime=validtime, - basedtg=basedtg, mbr=mbr, tstep=tstep, pert=pert, var=var) - fname = self.substitute_string(fname, system_variables=system_variables) - if path is None: - logging.info("No path found for: %s", dtype) - else: - fname = path + "/" + fname - if check_existence: - if not os.path.exists(fname): - raise FileNotFoundError(fname) - if command is not None: - fname = {fname: command} - return fname - - @staticmethod - def parse_setting(setting, check_parsing=False, validtime=None, basedtg=None, mbr=None, - tstep=None, pert=None, var=None): - """Parse setting with date/time/experiment specific values. - - Args: - setting: The value of dictionary key which should be processes. Parser if type is str. - check_parsing (bool): Check if all @@ pairs were parsed - validtime (datetime.daetime): Parse setting with this as validtime - basedtg (datetime.datetime): Parse setting with this as base time - mbr (int): Parse setting with this as ensemble member number (@E@/@EE@/@EEE@) - tstep (int): Parse setting with this as timestep to get step number (@TTT@/@TTTT@) - pert (int): Parse setting with this as perturbation number @PERT@ - var (str): Parse setting with this as the variable (@VAR@) - - Returns: - setting: Possibly parsed setting is type is str - - See Also: - self.parse_setting - self.substitute_string - - """ - # Check on arguments - if isinstance(setting, str): - - if basedtg is not None: - if isinstance(basedtg, str): - basedtg = datetime.strptime(basedtg, "%Y%m%d%H") - if validtime is not None: - if isinstance(validtime, str): - validtime = datetime.strptime(validtime, "%Y%m%d%H") - else: - validtime = basedtg - - if basedtg is not None and validtime is not None: - lead_time = validtime - basedtg - setting = str(setting).replace("@YYYY_LL@", validtime.strftime("%Y")) - setting = str(setting).replace("@MM_LL@", validtime.strftime("%m")) - setting = str(setting).replace("@DD_LL@", validtime.strftime("%d")) - setting = str(setting).replace("@HH_LL@", validtime.strftime("%H")) - setting = str(setting).replace("@mm_LL@", validtime.strftime("%M")) - lead_seconds = int(lead_time.total_seconds()) - # lead_minutes = int(lead_seconds / 3600) - lead_hours = int(lead_seconds / 3600) - setting = str(setting).replace("@LL@", f"{lead_hours:02d}") - setting = str(setting).replace("@LLL@", f"{lead_hours:03d}") - setting = str(setting).replace("@LLLL@", f"{lead_hours:04d}") - if tstep is not None: - lead_step = int(lead_seconds / tstep) - setting = str(setting).replace("@TTT@", f"{lead_step:03d}") - setting = str(setting).replace("@TTTT@", f"{lead_step:04d}") - - if basedtg is not None: - setting = str(setting).replace("@YMD@", basedtg.strftime("%Y%m%d")) - setting = str(setting).replace("@YYYY@", basedtg.strftime("%Y")) - setting = str(setting).replace("@YY@", basedtg.strftime("%y")) - setting = str(setting).replace("@MM@", basedtg.strftime("%m")) - setting = str(setting).replace("@DD@", basedtg.strftime("%d")) - setting = str(setting).replace("@HH@", basedtg.strftime("%H")) - setting = str(setting).replace("@mm@", basedtg.strftime("%M")) - - if mbr is not None: - setting = str(setting).replace("@E@", f"mbr{int(mbr):d}") - setting = str(setting).replace("@EE@", f"mbr{int(mbr):02d}") - setting = str(setting).replace("@EEE@", f"mbr{int(mbr):03d}") - else: - setting = str(setting).replace("@E@", "") - setting = str(setting).replace("@EE@", "") - setting = str(setting).replace("@EEE@", "") - - if pert is not None: - logging.debug("replace %s in %s", pert, setting) - setting = str(setting).replace("@PERT@", str(pert)) - logging.debug("replaced %s in %s", pert, setting) - - if var is not None: - setting = str(setting).replace("@VAR@", var) - - if check_parsing: - if isinstance(setting, str) and setting.count("@") > 1: - raise Exception("Setting was not substituted properly? " + setting) - - return setting - - @staticmethod - def substitute_string(setting, system_variables=None): - """Substitute setting if string with OS values of values from system_variables. - - Args: - setting: if setting is string it can be subst - system_variables (dict): Arbitrary settings to substitute @NAME@ = - system_variables={"NAME": "Value"} - - Returns: - setting: A setting possibly substituted if type is str - - """ - if isinstance(setting, str): - env_vals = ["USER", "HOME", "PWD"] - for env_val in env_vals: - if env_val in os.environ: - setting = setting.replace("@" + env_val + "@", os.environ[env_val]) - else: - logging.debug("%s not found in environment", env_val) - - if system_variables is not None: - logging.debug(system_variables) - for var in system_variables: - logging.debug(var, system_variables) - setting = setting.replace("@" + str(var) + "@", str(system_variables[var])) - - return setting - - def add_system_file_path(self, name, path, system_variables=None, check_parsing=True, - validtime=None, basedtg=None, mbr=None, - tstep=None, pert=None, var=None): - """Add a system file path to be used. - - Args: - name (str): The data type you want to get the path for (clim_dir/bin_dir etc) - path (str): Name of the file you want to join to the system path - system_variables (dict): Arbitrary settings to substitute @NAME@ = system_variables= - {"NAME": "Value"} - check_parsing (bool): Check if parsing was successful (all @@ pairs substituted) - validtime (datetime.dateime): Parse setting with this as valid time - basedtg (datetime.dateime): Parse setting with this as base time - mbr (int): Parse setting with this as ensemble member - tstep (int): Parse setting with this as time step - pert (int): Parse setting with this as pertubation number - var (str): Parse setting with this as variable - - Returns: - None - - See Also: - self.parse_setting - self.substitute_string - - """ - path = self.substitute_string(path, system_variables=system_variables) - path = self.parse_setting(path, check_parsing=check_parsing, validtime=validtime, - basedtg=basedtg, mbr=mbr, tstep=tstep, pert=pert, var=var) - self.system_file_paths.update({name: path}) - - -class SystemFilePathsFromFile(SystemFilePaths): - """System file paths.""" - - def __init__(self, system_file_paths): - """System file path from a file. - - Args: - system_file_paths (_type_): _description_ - - """ - system_file_paths = json.load(open(system_file_paths, "r", encoding="utf-8")) - SystemFilePaths.__init__(self, system_file_paths) - - -class ExternalSurfexInputFile(object): - """Wrapper around external input data to surfex. - - Can have special treatment for each format. - Uses internally the SystemFilePaths class - """ - - def __init__(self, system_file_paths): - """Construct ExternalSurfexInputFile. - - Args: - system_file_paths (surfex.SystemFilePaths): Match system specific files. - - """ - self.system_file_paths = system_file_paths - - def set_input_data_from_format(self, dtype, fname, default_dir=None, check_existence=False, - check_parsing=True, validtime=None, basedtg=None, mbr=None, - tstep=None, pert=None, - var=None, system_variables=None, linkbasename=None): - """Set input data based on format. - - Args: - dtype (_type_): _description_ - fname (_type_): _description_ - default_dir (_type_, optional): _description_. Defaults to None. - check_existence (bool, optional): _description_. Defaults to False. - check_parsing (bool, optional): _description_. Defaults to True. - validtime (_type_, optional): _description_. Defaults to None. - basedtg (_type_, optional): _description_. Defaults to None. - mbr (_type_, optional): _description_. Defaults to None. - tstep (_type_, optional): _description_. Defaults to None. - pert (_type_, optional): _description_. Defaults to None. - var (_type_, optional): _description_. Defaults to None. - system_variables (_type_, optional): _description_. Defaults to None. - linkbasename (_type_, optional): _description_. Defaults to None. - - Returns: - dict: File name mappings. +import os - """ - fname_with_path = self.system_file_paths.get_system_file(dtype, fname, - default_dir=default_dir, - check_existence=check_existence, - check_parsing=check_parsing, - validtime=validtime, - basedtg=basedtg, mbr=mbr, - tstep=tstep, pert=pert, var=var, - system_variables=system_variables) +import f90nml +import yaml - if fname.endswith(".dir"): - basename = os.path.splitext(os.path.basename(fname))[0] - - basedir = self.system_file_paths.get_system_path(dtype, default_dir=default_dir, - check_existence=check_existence, - check_parsing=check_parsing, - validtime=validtime, basedtg=basedtg, - mbr=mbr, - tstep=tstep, pert=pert, var=var) - logging.debug("%s %s %s", basename, basedir, fname_with_path) - hdr_file = basedir + "/" + basename + ".hdr" - dir_file = basedir + "/" + basename + ".dir" - if linkbasename is None: - linkbasename = basename - return {linkbasename + ".hdr": hdr_file, linkbasename + ".dir": dir_file} - elif fname.endswith(".json"): - return {} - else: - return {fname: fname_with_path} +from .datetime_utils import as_datetime, as_timedelta +from .ecoclimap import Ecoclimap, EcoclimapSG class BaseNamelist(object): """Base namelist.""" - def __init__(self, program, config, input_path, forc_zs=False, prep_file=None, - prep_filetype=None, prep_pgdfile=None, prep_pgdfiletype=None, dtg=None, fcint=3, - geo=None): + def __init__( + self, + program, + config, + input_path, + forc_zs=False, + prep_file=None, + prep_filetype=None, + prep_pgdfile=None, + prep_pgdfiletype=None, + dtg=None, + fcint=3, + geo=None, + ): """Construct a base namelists class to be implemented by namelist implementations. Args: @@ -435,13 +42,17 @@ def __init__(self, program, config, input_path, forc_zs=False, prep_file=None, fcint (int): The intervall between the cycles. Used for first guesses. geo (surfex.Geo): Surfex geometry. The domain you want to run on + Raises: + RuntimeError: Needed input + NotImplementedError: Mode not implemented + """ self.config = config self.input_path = input_path self.forc_zs = forc_zs if dtg is not None: if isinstance(dtg, str): - dtg = datetime.strptime(dtg, "%Y%m%d%H") + dtg = as_datetime(dtg) self.dtg = dtg check_parsing = True if self.dtg is None: @@ -452,7 +63,7 @@ def __init__(self, program, config, input_path, forc_zs=False, prep_file=None, # The time stamp of next cycle file forecast_length = self.fcint if self.dtg is not None: - self.end_of_forecast = self.dtg + timedelta(hours=forecast_length) + self.end_of_forecast = self.dtg + as_timedelta(seconds=forecast_length * 3600) else: self.end_of_forecast = None @@ -465,12 +76,16 @@ def __init__(self, program, config, input_path, forc_zs=False, prep_file=None, self.set_pgd_namelist() elif program == "prep": if prep_file is None: - raise Exception("Prep need an input file either as a json namelist or a surfex " - "supported format") - self.set_prep_namelist(prep_file=prep_file, - prep_filetype=prep_filetype, - prep_pgdfile=prep_pgdfile, - prep_pgdfiletype=prep_pgdfiletype) + raise RuntimeError( + "Prep need an input file either as a json namelist or a surfex " + "supported format" + ) + self.set_prep_namelist( + prep_file=prep_file, + prep_filetype=prep_filetype, + prep_pgdfile=prep_pgdfile, + prep_pgdfiletype=prep_pgdfiletype, + ) elif program == "offline" or program == "perturbed": self.set_offline_namelist() elif program == "soda": @@ -489,48 +104,131 @@ def prolog(self, check_parsing): """ # IO self.input_list.append({"file": self.input_path + "/io.json"}) - self.input_list.append({"json": {"NAM_IO_OFFLINE": { - "CSURF_FILETYPE": self.config.get_setting("SURFEX#IO#CSURF_FILETYPE")}}}) - self.input_list.append({"json": {"NAM_IO_OFFLINE": { - "CTIMESERIES_FILETYPE": self.config.get_setting("SURFEX#IO#CTIMESERIES_FILETYPE")}}}) - self.input_list.append({"json": {"NAM_IO_OFFLINE": { - "CFORCING_FILETYPE": self.config.get_setting("SURFEX#IO#CFORCING_FILETYPE")}}}) - self.input_list.append({"json": {"NAM_IO_OFFLINE": { - "CPGDFILE": self.config.get_setting("SURFEX#IO#CPGDFILE")}}}) - self.input_list.append({"json": {"NAM_IO_OFFLINE": { - "CPREPFILE": self.config.get_setting("SURFEX#IO#CPREPFILE")}}}) - self.input_list.append({"json": {"NAM_IO_OFFLINE": { - "CSURFFILE": self.config.get_setting("SURFEX#IO#CSURFFILE", - validtime=self.end_of_forecast, - basedtg=self.dtg, - check_parsing=check_parsing)}}}) - self.input_list.append({"json": {"NAM_IO_OFFLINE": { - "XTSTEP_SURF": self.config.get_setting("SURFEX#IO#XTSTEP")}}}) - self.input_list.append({"json": {"NAM_IO_OFFLINE": { - "XTSTEP_OUTPUT": self.config.get_setting("SURFEX#IO#XTSTEP_OUTPUT")}}}) - self.input_list.append({"json": {"NAM_WRITE_SURF_ATM": { - "LSPLIT_PATCH": self.config.get_setting("SURFEX#IO#LSPLIT_PATCH")}}}) + self.input_list.append( + { + "json": { + "NAM_IO_OFFLINE": { + "CSURF_FILETYPE": self.config.get_setting( + "SURFEX#IO#CSURF_FILETYPE" + ) + } + } + } + ) + self.input_list.append( + { + "json": { + "NAM_IO_OFFLINE": { + "CTIMESERIES_FILETYPE": self.config.get_setting( + "SURFEX#IO#CTIMESERIES_FILETYPE" + ) + } + } + } + ) + self.input_list.append( + { + "json": { + "NAM_IO_OFFLINE": { + "CFORCING_FILETYPE": self.config.get_setting( + "SURFEX#IO#CFORCING_FILETYPE" + ) + } + } + } + ) + self.input_list.append( + { + "json": { + "NAM_IO_OFFLINE": { + "CPGDFILE": self.config.get_setting("SURFEX#IO#CPGDFILE") + } + } + } + ) + self.input_list.append( + { + "json": { + "NAM_IO_OFFLINE": { + "CPREPFILE": self.config.get_setting("SURFEX#IO#CPREPFILE") + } + } + } + ) + self.input_list.append( + { + "json": { + "NAM_IO_OFFLINE": { + "CSURFFILE": self.config.get_setting( + "SURFEX#IO#CSURFFILE", + validtime=self.end_of_forecast, + basedtg=self.dtg, + check_parsing=check_parsing, + ) + } + } + } + ) + self.input_list.append( + { + "json": { + "NAM_IO_OFFLINE": { + "XTSTEP_SURF": self.config.get_setting("SURFEX#IO#XTSTEP") + } + } + } + ) + self.input_list.append( + { + "json": { + "NAM_IO_OFFLINE": { + "XTSTEP_OUTPUT": self.config.get_setting( + "SURFEX#IO#XTSTEP_OUTPUT" + ) + } + } + } + ) + self.input_list.append( + { + "json": { + "NAM_WRITE_SURF_ATM": { + "LSPLIT_PATCH": self.config.get_setting("SURFEX#IO#LSPLIT_PATCH") + } + } + } + ) if self.forc_zs: self.input_list.append({"json": {"NAM_IO_OFFLINE": {"LSET_FORC_ZS": True}}}) # Constants and parameters self.input_list.append({"file": self.input_path + "/constants.json"}) - self.input_list.append({"json": {"NAM_SURF_ATM": { - "XRIMAX": self.config.get_setting("SURFEX#PARAMETERS#XRIMAX")}}}) + self.input_list.append( + { + "json": { + "NAM_SURF_ATM": { + "XRIMAX": self.config.get_setting("SURFEX#PARAMETERS#XRIMAX") + } + } + } + ) def set_pgd_namelist(self): """Set pgd namelist.""" # PGS schemes - self.input_list.append({ - "json": { - "NAM_PGD_SCHEMES": { - "CSEA": self.config.get_setting("SURFEX#TILES#SEA"), - "CWATER": self.config.get_setting("SURFEX#TILES#INLAND_WATER"), - "CNATURE": self.config.get_setting("SURFEX#TILES#NATURE"), - "CTOWN": self.config.get_setting("SURFEX#TILES#TOWN") + self.input_list.append( + { + "json": { + "NAM_PGD_SCHEMES": { + "CSEA": self.config.get_setting("SURFEX#TILES#SEA"), + "CWATER": self.config.get_setting("SURFEX#TILES#INLAND_WATER"), + "CNATURE": self.config.get_setting("SURFEX#TILES#NATURE"), + "CTOWN": self.config.get_setting("SURFEX#TILES#TOWN"), + } } - }}) + } + ) eco_sg = self.config.get_setting("SURFEX#COVER#SG") # Ecoclimap SG @@ -538,25 +236,41 @@ def set_pgd_namelist(self): if self.config.get_setting("SURFEX#COVER#SG"): ecoclimap = EcoclimapSG(self.config) - self.input_list.append({"json": {"NAM_DATA_ISBA": {"NTIME": ecoclimap.decades}}}) + self.input_list.append( + {"json": {"NAM_DATA_ISBA": {"NTIME": ecoclimap.decades}}} + ) fname = self.config.get_setting("SURFEX#COVER#H_TREE") if fname != "" and fname is not None: - self.input_list.append(self.set_dirtyp_data_namelist("NAM_DATA_ISBA", "H_TREE", - fname, vtype=1)) - - decadal_data_types = ["ALBNIR_SOIL", "ALBNIR_VEG", "ALBVIS_SOIL", "ALBVIS_VEG", "LAI"] + self.input_list.append( + self.set_dirtyp_data_namelist( + "NAM_DATA_ISBA", "H_TREE", fname, vtype=1 + ) + ) + + decadal_data_types = [ + "ALBNIR_SOIL", + "ALBNIR_VEG", + "ALBVIS_SOIL", + "ALBVIS_VEG", + "LAI", + ] for decadal_data_type in decadal_data_types: for vtt in range(1, ecoclimap.veg_types + 1): for decade in range(1, ecoclimap.decades + 1): - filepattern = self.config.get_setting("SURFEX#COVER#" + decadal_data_type, - check_parsing=False) + filepattern = self.config.get_setting( + "SURFEX#COVER#" + decadal_data_type, check_parsing=False + ) fname = ecoclimap.parse_fnames(filepattern, decade) - self.input_list.append(self.set_dirtyp_data_namelist("NAM_DATA_ISBA", - decadal_data_type, - fname, - vtype=vtt, - decade=decade)) + self.input_list.append( + self.set_dirtyp_data_namelist( + "NAM_DATA_ISBA", + decadal_data_type, + fname, + vtype=vtt, + decade=decade, + ) + ) ecoclimap_dir = "ecoclimap_dir" if self.config.get_setting("SURFEX#COVER#SG"): @@ -567,32 +281,38 @@ def set_pgd_namelist(self): "YSAND": "sand_dir", "YCLAY": "clay_dir", "YSOC_TOP": "soc_top_dir", - "YSOC_SUB": "soc_sub_dir" + "YSOC_SUB": "soc_sub_dir", }, - "COVER": { - "YCOVER": ecoclimap_dir - }, - "ZS": { - "YZS": "oro_dir" - } + "COVER": {"YCOVER": ecoclimap_dir}, + "ZS": {"YZS": "oro_dir"}, } for namelist_section, ftypes in possible_direct_data.items(): # for ftype in possible_direct_data[namelist_section]: for ftype in ftypes: - fname = str(self.config.get_setting("SURFEX#" + namelist_section + "#" + ftype)) - self.input_list.append(self.set_direct_data_namelist("NAM_" + namelist_section, - ftype, fname, - self.input_path)) + fname = str( + self.config.get_setting("SURFEX#" + namelist_section + "#" + ftype) + ) + self.input_list.append( + self.set_direct_data_namelist( + "NAM_" + namelist_section, ftype, fname, self.input_path + ) + ) # Set ISBA properties if self.config.get_setting("SURFEX#ISBA#SCHEME") == "DIF": - self.input_list.append({"json": {"NAM_ISBA": {"CISBA": "DIF", "NGROUND_LAYER": 14}}}) + self.input_list.append( + {"json": {"NAM_ISBA": {"CISBA": "DIF", "NGROUND_LAYER": 14}}} + ) if os.path.exists(self.input_path + "/isba_dif.json"): self.input_list.append({"file": self.input_path + "/isba_dif.json"}) elif self.config.get_setting("SURFEX#ISBA#SCHEME") == "3-L": - self.input_list.append({"json": {"NAM_ISBA": {"CISBA": "3-L", "NGROUND_LAYER": 3}}}) + self.input_list.append( + {"json": {"NAM_ISBA": {"CISBA": "3-L", "NGROUND_LAYER": 3}}} + ) elif self.config.get_setting("SURFEX#ISBA#SCHEME") == "2-L": - self.input_list.append({"json": {"NAM_ISBA": {"CISBA": "2-L", "NGROUND_LAYER": 2}}}) + self.input_list.append( + {"json": {"NAM_ISBA": {"CISBA": "2-L", "NGROUND_LAYER": 2}}} + ) # Set patches npatch = self.config.get_setting("SURFEX#ISBA#NPATCH") @@ -621,41 +341,51 @@ def set_pgd_namelist(self): # Treedrag if self.config.get_setting("SURFEX#TREEDRAG#TREEDATA_FILE") != "": treeheight = self.config.get_setting("SURFEX#TREEDRAG#TREEDATA_FILE") - self.input_list.append({ - "json": { - "NAM_DATA_ISBA": { - "CFNAM_H_TREE(4)": treeheight, - "CFTYP_H_TREE(4)": "ASCLLV", - "CFNAM_H_TREE(5)": treeheight, - "CFTYP_H_TREE(5)": "ASCLLV", - "CFNAM_H_TREE(6)": treeheight, - "CFTYP_H_TREE(6)": "ASCLLV"} + self.input_list.append( + { + "json": { + "NAM_DATA_ISBA": { + "CFNAM_H_TREE(4)": treeheight, + "CFTYP_H_TREE(4)": "ASCLLV", + "CFNAM_H_TREE(5)": treeheight, + "CFTYP_H_TREE(5)": "ASCLLV", + "CFNAM_H_TREE(6)": treeheight, + "CFTYP_H_TREE(6)": "ASCLLV", + } + } } - }) + ) if self.config.get_setting("SURFEX#TOWN#LTOWN_TO_ROCK"): if self.config.get_setting("SURFEX#TILES#TOWN") != "NONE": - logging.warning("WARNING: TOWN is not NONE and you want LTOWN_TO_ROCK. " - "Setting it to NONE!") - self.input_list.append({"json": {"NAM_PGD_ARRANGE_COVER": {"LTOWN_TO_ROCK": True}}}) + logging.warning( + "WARNING: TOWN is not NONE and you want LTOWN_TO_ROCK. " + "Setting it to NONE!" + ) + self.input_list.append( + {"json": {"NAM_PGD_ARRANGE_COVER": {"LTOWN_TO_ROCK": True}}} + ) self.input_list.append({"json": {"NAM_PGD_SCHEMES": {"TOWN": "NONE"}}}) if self.config.get_setting("SURFEX#TILES#INLAND_WATER") == "FLAKE": - self.input_list.append({ - "json": { - "NAM_DATA_FLAKE": { - "YWATER_DEPTH": "GlobalLakeDepth", - "YWATER_DEPTHFILETYPE": "DIRECT", - "YWATER_DEPTH_STATUS": "GlobalLakeStatus" + self.input_list.append( + { + "json": { + "NAM_DATA_FLAKE": { + "YWATER_DEPTH": "GlobalLakeDepth", + "YWATER_DEPTHFILETYPE": "DIRECT", + "YWATER_DEPTH_STATUS": "GlobalLakeStatus", + } } } - }) + ) # Sea self.input_list.append({"file": self.input_path + "/sea.json"}) - def set_prep_namelist(self, prep_file=None, prep_filetype=None, prep_pgdfile=None, - prep_pgdfiletype=None): + def set_prep_namelist( + self, prep_file=None, prep_filetype=None, prep_pgdfile=None, prep_pgdfiletype=None + ): """Set prep namelist. Args: @@ -665,15 +395,14 @@ def set_prep_namelist(self, prep_file=None, prep_filetype=None, prep_pgdfile=Non prep_pgdfiletype (_type_, optional): _description_. Defaults to None. Raises: - Exception: _description_ - Exception: _description_ - Exception: _description_ + RuntimeError: Filetype for input to PREP is not set! + RuntimeError: Filetype for PGD input to PREP is not set! """ if prep_file is not None and prep_filetype is None: - raise Exception("Filetype for input to PREP is not set!") + raise RuntimeError("Filetype for input to PREP is not set!") if prep_pgdfile is not None and prep_pgdfiletype is None: - raise Exception("Filetype for PGD input to PREP is not set!") + raise RuntimeError("Filetype for PGD input to PREP is not set!") self.input_list.append({"file": self.input_path + "/prep.json"}) if prep_file is not None: @@ -682,34 +411,54 @@ def set_prep_namelist(self, prep_file=None, prep_filetype=None, prep_pgdfile=Non else: fname = os.path.basename(prep_file) self.input_list.append({"json": {"NAM_PREP_SURF_ATM": {"CFILE": fname}}}) - self.input_list.append({"json": {"NAM_PREP_SURF_ATM": { - "CFILETYPE": prep_filetype}}}) + self.input_list.append( + {"json": {"NAM_PREP_SURF_ATM": {"CFILETYPE": prep_filetype}}} + ) if prep_pgdfile is not None: fname = os.path.basename(prep_pgdfile) - self.input_list.append({"json": {"NAM_PREP_SURF_ATM": { - "CFILEPGD": fname}}}) - self.input_list.append({"json": {"NAM_PREP_SURF_ATM": { - "CFILEPGDTYPE": prep_pgdfiletype}}}) + self.input_list.append( + {"json": {"NAM_PREP_SURF_ATM": {"CFILEPGD": fname}}} + ) + self.input_list.append( + { + "json": { + "NAM_PREP_SURF_ATM": {"CFILEPGDTYPE": prep_pgdfiletype} + } + } + ) if self.dtg is not None: - # prep_time = datetime.strptime(dtg, "%Y%m%d%H") prep_time = self.dtg - self.input_list.append({"json": {"NAM_PREP_SURF_ATM": { - "NYEAR": int(prep_time.strftime("%Y"))}}}) - self.input_list.append({"json": {"NAM_PREP_SURF_ATM": { - "NMONTH": int(prep_time.strftime("%m"))}}}) - self.input_list.append({"json": {"NAM_PREP_SURF_ATM": { - "NDAY": int(prep_time.strftime("%d"))}}}) - self.input_list.append({"json": {"NAM_PREP_SURF_ATM": { - "XTIME": float(prep_time.strftime("%H")) * 3600.}}}) + self.input_list.append( + {"json": {"NAM_PREP_SURF_ATM": {"NYEAR": int(prep_time.strftime("%Y"))}}} + ) + self.input_list.append( + {"json": {"NAM_PREP_SURF_ATM": {"NMONTH": int(prep_time.strftime("%m"))}}} + ) + self.input_list.append( + {"json": {"NAM_PREP_SURF_ATM": {"NDAY": int(prep_time.strftime("%d"))}}} + ) + self.input_list.append( + { + "json": { + "NAM_PREP_SURF_ATM": { + "XTIME": float(prep_time.strftime("%H")) * 3600.0 + } + } + } + ) else: - raise Exception("You must provide a DTG for prep") + raise RuntimeError("You must provide a DTG for prep") if self.config.get_setting("SURFEX#SEA#ICE") == "SICE": - self.input_list.append({"json": {"NAM_PREP_SEAFLUX": {"CSEAICE_SCHEME": "SICE"}}}) + self.input_list.append( + {"json": {"NAM_PREP_SEAFLUX": {"CSEAICE_SCHEME": "SICE"}}} + ) self.input_list.append({"file": self.input_path + "/prep_sice.json"}) if self.config.get_setting("SURFEX#TILES#INLAND_WATER") == "FLAKE": lclim_lake = self.config.get_setting("SURFEX#FLAKE#LCLIM") - self.input_list.append({"json": {"NAM_PREP_FLAKE": {"LCLIM_LAKE": lclim_lake}}}) + self.input_list.append( + {"json": {"NAM_PREP_FLAKE": {"LCLIM_LAKE": lclim_lake}}} + ) # Set extra ISBA-DIF properties (Not needed in prep?) if self.config.get_setting("SURFEX#ISBA#SCHEME") == "DIF": @@ -718,7 +467,9 @@ def set_prep_namelist(self, prep_file=None, prep_filetype=None, prep_pgdfile=Non # ISBA CANOPY lisba_canopy = self.config.get_setting("SURFEX#ISBA#CANOPY") - self.input_list.append({"json": {"NAM_PREP_ISBA": {"LISBA_CANOPY": lisba_canopy}}}) + self.input_list.append( + {"json": {"NAM_PREP_ISBA": {"LISBA_CANOPY": lisba_canopy}}} + ) # Snow self.input_list.append({"file": self.input_path + "/prep_snow.json"}) @@ -733,7 +484,7 @@ def set_offline_namelist(self): """Set offline namelist. Raises: - Exception: _description_ + RuntimeError: Mismatch in nnco/cobs_m """ self.input_list.append({"file": self.input_path + "/offline.json"}) @@ -762,8 +513,7 @@ def set_offline_namelist(self): sso = self.config.get_setting("SURFEX#SSO#SCHEME") self.input_list.append({"json": {"NAM_SSON": {"CROUGH": sso}}}) if sso == "OROTUR": - if isinstance(self.geo, surfex.ConfProj): - self.input_list.append({"json": {"NAM_SSON": {"XSOROT": self.geo.xdx}}}) + self.input_list.append({"json": {"NAM_SSON": {"XSOROT": self.geo.xdx}}}) # Perturbed offline settings self.input_list.append({"json": {"NAM_VAR": {"NIVAR": 0}}}) @@ -777,30 +527,48 @@ def set_offline_namelist(self): xtprt_m = self.config.get_setting("SURFEX#ASSIM#ISBA#EKF#XTPRT_M") xsigma_m = self.config.get_setting("SURFEX#ASSIM#ISBA#EKF#XSIGMA_M") for var, cvar_val in enumerate(cvar_m): - self.input_list.append({"json": {"NAM_VAR": { - "CVAR_M(" + str(var + 1) + ")": cvar_val}}}) - self.input_list.append({"json": {"NAM_VAR": { - "NNCV(" + str(var + 1) + ")": nncv[var]}}}) - self.input_list.append({"json": {"NAM_VAR": { - "XTPRT_M(" + str(var + 1) + ")": xtprt_m[var]}}}) - self.input_list.append({"json": {"NAM_VAR": { - "XSIGMA_M(" + str(var + 1) + ")": xsigma_m[var]}}}) + self.input_list.append( + {"json": {"NAM_VAR": {"CVAR_M(" + str(var + 1) + ")": cvar_val}}} + ) + self.input_list.append( + {"json": {"NAM_VAR": {"NNCV(" + str(var + 1) + ")": nncv[var]}}} + ) + self.input_list.append( + {"json": {"NAM_VAR": {"XTPRT_M(" + str(var + 1) + ")": xtprt_m[var]}}} + ) + self.input_list.append( + { + "json": { + "NAM_VAR": {"XSIGMA_M(" + str(var + 1) + ")": xsigma_m[var]} + } + } + ) if nncv[var] == 1: nvar += 1 self.input_list.append({"json": {"NAM_VAR": {"NVAR": nvar}}}) if self.config.get_setting("SURFEX#ASSIM#SCHEMES#ISBA") == "ENKF": - self.input_list.append({"json": {"NAM_ASSIM": { - "CASSIM_ISBA": self.config.get_setting("SURFEX#ASSIM#SCHEMES#ISBA")}}}) + self.input_list.append( + { + "json": { + "NAM_ASSIM": { + "CASSIM_ISBA": self.config.get_setting( + "SURFEX#ASSIM#SCHEMES#ISBA" + ) + } + } + } + ) nvar = 0 cvar_m = self.config.get_setting("SURFEX#ASSIM#ISBA#ENKF#CVAR_M") nncv = self.config.get_setting("SURFEX#ASSIM#ISBA#ENKF#NNCV") - # nens_m = self.config.get_setting("SURFEX#ASSIM#ISBA#ENKF#NENS_M") for var, cvar_val in enumerate(cvar_m): - self.input_list.append({"json": {"NAM_VAR": { - "CVAR_M(" + str(var + 1) + ")": cvar_val}}}) - self.input_list.append({"json": {"NAM_VAR": { - "NNCV(" + str(var + 1) + ")": nncv[var]}}}) + self.input_list.append( + {"json": {"NAM_VAR": {"CVAR_M(" + str(var + 1) + ")": cvar_val}}} + ) + self.input_list.append( + {"json": {"NAM_VAR": {"NNCV(" + str(var + 1) + ")": nncv[var]}}} + ) if nncv[var] == 1: nvar += 1 self.input_list.append({"json": {"NAM_VAR": {"NVAR": nvar}}}) @@ -808,32 +576,37 @@ def set_offline_namelist(self): # TODO the need for this in forecast must be removed! nobstype = 0 nnco = self.config.get_setting("SURFEX#ASSIM#OBS#NNCO") - # nobstype_m = self.config.get_setting("SURFEX#ASSIM#OBS#NOBSTYPE_M") cobs_m = self.config.get_setting("SURFEX#ASSIM#OBS#COBS_M") if len(nnco) != len(cobs_m): - raise Exception("Mismatch in nnco/cobs_m") + raise RuntimeError("Mismatch in nnco/cobs_m") for obs, obs_val in enumerate(nnco): - self.input_list.append({"json": {"NAM_OBS": { - "NNCO(" + str(obs + 1) + ")": obs_val}}}) - self.input_list.append({"json": {"NAM_OBS": { - "COBS_M(" + str(obs + 1) + ")": cobs_m[obs]}}}) + self.input_list.append( + {"json": {"NAM_OBS": {"NNCO(" + str(obs + 1) + ")": obs_val}}} + ) + self.input_list.append( + {"json": {"NAM_OBS": {"COBS_M(" + str(obs + 1) + ")": cobs_m[obs]}}} + ) if nnco[obs] == 1: nobstype += 1 self.input_list.append({"json": {"NAM_OBS": {"NOBSTYPE": nobstype}}}) # Climate setting if self.config.get_setting("SURFEX#SEA#LVOLATILE_SIC"): - self.input_list.append({"json": {"NAM_SEAICEn ": {"LVOLATILE_SIC": True, - "XSIC_EFOLDING_TIME": 1.0}}}) + self.input_list.append( + { + "json": { + "NAM_SEAICEn ": {"LVOLATILE_SIC": True, "XSIC_EFOLDING_TIME": 1.0} + } + } + ) def set_soda_namelist(self): """Set SODA namelist. Raises: - Exception: _description_ - Exception: _description_ - Exception: _description_ - Exception: _description_ + RuntimeError: Mismatch in nnco/cobs_m/xerrobs_m + RuntimeError: You must provide a DTG when using a list for snow + RuntimeError: Mismatch in nncv/cvar_m/xsigma_m/xtprt_m """ self.input_list.append({"file": self.input_path + "/soda.json"}) @@ -845,61 +618,162 @@ def set_soda_namelist(self): lobsnat = self.config.get_setting("SURFEX#ASSIM#OBS#LOBSNAT") self.input_list.append({"json": {"NAM_OBS": {"LOBSNAT": lobsnat}}}) cfile_format_obs = self.config.get_setting("SURFEX#ASSIM#OBS#CFILE_FORMAT_OBS") - self.input_list.append({"json": {"NAM_OBS": {"CFILE_FORMAT_OBS": cfile_format_obs}}}) + self.input_list.append( + {"json": {"NAM_OBS": {"CFILE_FORMAT_OBS": cfile_format_obs}}} + ) nobstype = 0 nnco = self.config.get_setting("SURFEX#ASSIM#OBS#NNCO") cobs_m = self.config.get_setting("SURFEX#ASSIM#OBS#COBS_M") - # nobstype_m = self.config.get_setting("SURFEX#ASSIM#OBS#NOBSTYPE_M") xerrobs_m = self.config.get_setting("SURFEX#ASSIM#OBS#XERROBS_M") logging.debug("%s %s %s", nnco, cobs_m, xerrobs_m) if len(nnco) != len(cobs_m) or len(nnco) != len(xerrobs_m): - raise Exception("Mismatch in nnco/cobs_m/xerrobs_m") + raise RuntimeError("Mismatch in nnco/cobs_m/xerrobs_m") for obs, obs_val in enumerate(nnco): - self.input_list.append({"json": {"NAM_OBS": { - "NNCO(" + str(obs + 1) + ")": obs_val}}}) - self.input_list.append({"json": {"NAM_OBS": { - "COBS_M(" + str(obs + 1) + ")": cobs_m[obs]}}}) - self.input_list.append({"json": {"NAM_OBS": { - "XERROBS_M(" + str(obs + 1) + ")": xerrobs_m[obs]}}}) + self.input_list.append( + {"json": {"NAM_OBS": {"NNCO(" + str(obs + 1) + ")": obs_val}}} + ) + self.input_list.append( + {"json": {"NAM_OBS": {"COBS_M(" + str(obs + 1) + ")": cobs_m[obs]}}} + ) + self.input_list.append( + {"json": {"NAM_OBS": {"XERROBS_M(" + str(obs + 1) + ")": xerrobs_m[obs]}}} + ) if nnco[obs] == 1: nobstype += 1 self.input_list.append({"json": {"NAM_OBS": {"NOBSTYPE": nobstype}}}) - self.input_list.append({"json": {"NAM_OBS": { - "LSWE": self.config.get_setting("SURFEX#ASSIM#OBS#LSWE")}}}) + self.input_list.append( + { + "json": { + "NAM_OBS": {"LSWE": self.config.get_setting("SURFEX#ASSIM#OBS#LSWE")} + } + } + ) # LSM - self.input_list.append({"json": {"NAM_ASSIM": { - "CFILE_FORMAT_LSM": self.config.get_setting("SURFEX#ASSIM#CFILE_FORMAT_LSM")}}}) + self.input_list.append( + { + "json": { + "NAM_ASSIM": { + "CFILE_FORMAT_LSM": self.config.get_setting( + "SURFEX#ASSIM#CFILE_FORMAT_LSM" + ) + } + } + } + ) # Sea - self.input_list.append({"json": {"NAM_ASSIM": { - "CASSIM_SEA": self.config.get_setting("SURFEX#ASSIM#SCHEMES#SEA")}}}) - self.input_list.append({"json": {"NAM_ASSIM": { - "CFILE_FORMAT_SST": self.config.get_setting("SURFEX#ASSIM#SEA#CFILE_FORMAT_SST")}}}) - self.input_list.append({"json": {"NAM_ASSIM": { - "LREAD_SST_FROM_FILE": - self.config.get_setting("SURFEX#ASSIM#SEA#LREAD_SST_FROM_FILE")}}}) - self.input_list.append({"json": {"NAM_ASSIM": { - "LEXTRAP_SEA": self.config.get_setting("SURFEX#ASSIM#SEA#LEXTRAP_SEA")}}}) - self.input_list.append({"json": {"NAM_ASSIM": { - "LECSST": self.config.get_setting("SURFEX#ASSIM#SEA#LECSST")}}}) + self.input_list.append( + { + "json": { + "NAM_ASSIM": { + "CASSIM_SEA": self.config.get_setting("SURFEX#ASSIM#SCHEMES#SEA") + } + } + } + ) + self.input_list.append( + { + "json": { + "NAM_ASSIM": { + "CFILE_FORMAT_SST": self.config.get_setting( + "SURFEX#ASSIM#SEA#CFILE_FORMAT_SST" + ) + } + } + } + ) + self.input_list.append( + { + "json": { + "NAM_ASSIM": { + "LREAD_SST_FROM_FILE": self.config.get_setting( + "SURFEX#ASSIM#SEA#LREAD_SST_FROM_FILE" + ) + } + } + } + ) + self.input_list.append( + { + "json": { + "NAM_ASSIM": { + "LEXTRAP_SEA": self.config.get_setting( + "SURFEX#ASSIM#SEA#LEXTRAP_SEA" + ) + } + } + } + ) + self.input_list.append( + { + "json": { + "NAM_ASSIM": { + "LECSST": self.config.get_setting("SURFEX#ASSIM#SEA#LECSST") + } + } + } + ) # Water - self.input_list.append({"json": {"NAM_ASSIM": { - "CASSIM_WATER": self.config.get_setting("SURFEX#ASSIM#SCHEMES#INLAND_WATER")}}}) - self.input_list.append({"json": {"NAM_ASSIM": { - "LWATERTG2": self.config.get_setting("SURFEX#ASSIM#INLAND_WATER#LWATERTG2")}}}) - self.input_list.append({"json": {"NAM_ASSIM": { - "LEXTRAP_WATER": self.config.get_setting("SURFEX#ASSIM#INLAND_WATER#LEXTRAP_WATER")}}}) + self.input_list.append( + { + "json": { + "NAM_ASSIM": { + "CASSIM_WATER": self.config.get_setting( + "SURFEX#ASSIM#SCHEMES#INLAND_WATER" + ) + } + } + } + ) + self.input_list.append( + { + "json": { + "NAM_ASSIM": { + "LWATERTG2": self.config.get_setting( + "SURFEX#ASSIM#INLAND_WATER#LWATERTG2" + ) + } + } + } + ) + self.input_list.append( + { + "json": { + "NAM_ASSIM": { + "LEXTRAP_WATER": self.config.get_setting( + "SURFEX#ASSIM#INLAND_WATER#LEXTRAP_WATER" + ) + } + } + } + ) # Nature - self.input_list.append({"json": {"NAM_ASSIM": { - "CASSIM_ISBA": self.config.get_setting("SURFEX#ASSIM#SCHEMES#ISBA")}}}) + self.input_list.append( + { + "json": { + "NAM_ASSIM": { + "CASSIM_ISBA": self.config.get_setting( + "SURFEX#ASSIM#SCHEMES#ISBA" + ) + } + } + } + ) # Snow - self.input_list.append({"json": {"NAM_ASSIM": { - "LPATCH1":self.config.get_setting("SURFEX#ASSIM#ISBA#LPATCH1")}}}) + self.input_list.append( + { + "json": { + "NAM_ASSIM": { + "LPATCH1": self.config.get_setting("SURFEX#ASSIM#ISBA#LPATCH1") + } + } + } + ) laesnm = False snow_cycles = self.config.get_setting("SURFEX#ASSIM#ISBA#UPDATE_SNOW_CYCLES") @@ -912,8 +786,10 @@ def set_soda_namelist(self): logging.debug("true") laesnm = True else: - raise Exception("You must provide a DTG when using a list for snow " - "assimilation cycles") + raise RuntimeError( + "You must provide a DTG when using a list for snow " + "assimilation cycles" + ) self.input_list.append({"json": {"NAM_ASSIM": {"LAESNM": laesnm}}}) # Set OI settings @@ -927,10 +803,28 @@ def set_soda_namelist(self): elif ua_physics == "alaro": self.input_list.append({"json": {"NAM_ASSIM": {"LAROME": False}}}) - self.input_list.append({"json": {"NAM_NACVEG": {"XSIGT2MO": - self.config.get_setting("SURFEX#ASSIM#ISBA#OI#XSIGT2MO")}}}) - self.input_list.append({"json": {"NAM_NACVEG": {"XSIGH2MO": - self.config.get_setting("SURFEX#ASSIM#ISBA#OI#XSIGH2MO")}}}) + self.input_list.append( + { + "json": { + "NAM_NACVEG": { + "XSIGT2MO": self.config.get_setting( + "SURFEX#ASSIM#ISBA#OI#XSIGT2MO" + ) + } + } + } + ) + self.input_list.append( + { + "json": { + "NAM_NACVEG": { + "XSIGH2MO": self.config.get_setting( + "SURFEX#ASSIM#ISBA#OI#XSIGH2MO" + ) + } + } + } + ) self.input_list.append({"json": {"NAM_NACVEG": {"XRCLIMCA": 0.0}}}) self.input_list.append({"json": {"NAM_NACVEG": {"XRCLISST": 0.05}}}) self.input_list.append({"json": {"NAM_NACVEG": {"NECHGU": self.fcint}}}) @@ -951,40 +845,64 @@ def set_soda_namelist(self): xsigma_m = self.config.get_setting("SURFEX#ASSIM#ISBA#EKF#XSIGMA_M") xtprt_m = self.config.get_setting("SURFEX#ASSIM#ISBA#EKF#XTPRT_M") nncv = self.config.get_setting("SURFEX#ASSIM#ISBA#EKF#NNCV") - if len(nncv) != len(cvar_m) or len(nncv) != len(xsigma_m) or len(nncv) != len(xtprt_m): - raise Exception("Mismatch in nncv/cvar_m/xsigma_m/xtprt_m") + if ( + len(nncv) != len(cvar_m) + or len(nncv) != len(xsigma_m) + or len(nncv) != len(xtprt_m) + ): + raise RuntimeError("Mismatch in nncv/cvar_m/xsigma_m/xtprt_m") for var, cvar_val in enumerate(cvar_m): self.input_list.append( - {"json": {"NAM_VAR": {"CVAR_M(" + str(var + 1) + ")": cvar_val}}}) - self.input_list.append({"json": {"NAM_VAR": { - "XSIGMA_M(" + str(var + 1) + ")": xsigma_m[var]}}}) - self.input_list.append({"json": {"NAM_VAR": { - "XTPRT_M(" + str(var + 1) + ")": xtprt_m[var]}}}) - self.input_list.append({"json": {"NAM_VAR": { - "NNCV(" + str(var + 1) + ")": nncv[var]}}}) + {"json": {"NAM_VAR": {"CVAR_M(" + str(var + 1) + ")": cvar_val}}} + ) + self.input_list.append( + { + "json": { + "NAM_VAR": {"XSIGMA_M(" + str(var + 1) + ")": xsigma_m[var]} + } + } + ) + self.input_list.append( + {"json": {"NAM_VAR": {"XTPRT_M(" + str(var + 1) + ")": xtprt_m[var]}}} + ) + self.input_list.append( + {"json": {"NAM_VAR": {"NNCV(" + str(var + 1) + ")": nncv[var]}}} + ) if nncv[var] == 1: nvar += 1 self.input_list.append({"json": {"NAM_VAR": {"NIVAR": 0}}}) self.input_list.append({"json": {"NAM_VAR": {"NVAR": nvar}}}) xscale_q = self.config.get_setting("SURFEX#ASSIM#ISBA#EKF#XSCALE_Q") self.input_list.append({"json": {"NAM_VAR": {"XSCALE_Q": xscale_q}}}) - self.input_list.append({"json": {"NAM_IO_VARASSIM": { - "LPRT": False, - "LBEV": self.config.get_setting("SURFEX#ASSIM#ISBA#EKF#EVOLVE_B"), - "LBFIXED": not self.config.get_setting("SURFEX#ASSIM#ISBA#EKF#EVOLVE_B") - }}}) + self.input_list.append( + { + "json": { + "NAM_IO_VARASSIM": { + "LPRT": False, + "LBEV": self.config.get_setting( + "SURFEX#ASSIM#ISBA#EKF#EVOLVE_B" + ), + "LBFIXED": not self.config.get_setting( + "SURFEX#ASSIM#ISBA#EKF#EVOLVE_B" + ), + } + } + } + ) if self.config.get_setting("SURFEX#ASSIM#SCHEMES#ISBA") == "ENKF": nvar = 0 cvar_m = self.config.get_setting("SURFEX#ASSIM#ISBA#ENKF#CVAR_M") nncv = self.config.get_setting("SURFEX#ASSIM#ISBA#ENKF#NNCV") if len(nncv) != len(cvar_m): - raise Exception("Mismatch in nncv/cvar_m") + raise RuntimeError("Mismatch in nncv/cvar_m") for var, cvar_val in enumerate(cvar_m): self.input_list.append( - {"json": {"NAM_VAR": {"CVAR_M(" + str(var + 1) + ")": cvar_val}}}) - self.input_list.append({"json": {"NAM_VAR": { - "NNCV(" + str(var + 1) + ")": nncv[var]}}}) + {"json": {"NAM_VAR": {"CVAR_M(" + str(var + 1) + ")": cvar_val}}} + ) + self.input_list.append( + {"json": {"NAM_VAR": {"NNCV(" + str(var + 1) + ")": nncv[var]}}} + ) if nncv[var] == 1: nvar += 1 @@ -1013,8 +931,10 @@ def override(self): """Overide.""" # Override posssibility if os.path.exists(self.input_path + "/override.json"): - logging.warning("WARNING: Override settings with content from %s/override.json", - self.input_path) + logging.warning( + "WARNING: Override settings with content from %s/override.json", + self.input_path, + ) self.input_list.append({"file": self.input_path + "/override.json"}) @staticmethod @@ -1036,13 +956,27 @@ def set_direct_data_namelist(lnamelist_section, ldtype, ldname, linput_path): filetype_name = ldtype if ldtype == "YSOC_TOP" or ldtype == "YSOC_SUB": filetype_name = "YSOC" - return {"json": json.loads('{"' + lnamelist_section + '": { "' + ldtype + '": "' - + basename + '", ' + '"' + filetype_name + 'FILETYPE": "DIRECT"}}')} + return { + "json": json.loads( + '{"' + + lnamelist_section + + '": { "' + + ldtype + + '": "' + + basename + + '", ' + + '"' + + filetype_name + + 'FILETYPE": "DIRECT"}}' + ) + } if ldname.endswith(".json"): return {"file": linput_path + "/" + ldname} @staticmethod - def set_dirtyp_data_namelist(lnamelist_section, ldtype, ldname, vtype=None, decade=None): + def set_dirtyp_data_namelist( + lnamelist_section, ldtype, ldname, vtype=None, decade=None + ): """Set dirtyp data namelist. Args: @@ -1069,8 +1003,17 @@ def set_dirtyp_data_namelist(lnamelist_section, ldtype, ldname, vtype=None, deca if vtype is not None or decade is not None: filetype_name = filetype_name + ")" return { - "json": json.loads('{"' + lnamelist_section + '": { "CFNAM_' + filetype_name + '": "' - + basename + '", "CFTYP_' + filetype_name + '": "DIRTYPE"}}') + "json": json.loads( + '{"' + + lnamelist_section + + '": { "CFNAM_' + + filetype_name + + '": "' + + basename + + '", "CFTYP_' + + filetype_name + + '": "DIRTYPE"}}' + ) } @staticmethod @@ -1166,10 +1109,10 @@ def ascii_file2nml(input_fname, input_fmt="json"): f90nml.Namelist: Namelist object. """ - if input_fmt == 'json': + if input_fmt == "json": with open(input_fname, mode="r", encoding="utf-8") as input_file: output_data = json.load(input_file) - elif input_fmt == 'yaml': + elif input_fmt == "yaml": with open(input_fname, mode="r", encoding="utf-8") as input_file: output_data = yaml.safe_load(input_file) output_data = f90nml.Namelist(output_data) @@ -1186,11 +1129,15 @@ def nml2ascii(input_data, output_file, output_fmt="json", indent=2): indent (int, optional): Indentation. Defaults to 2. """ - if output_fmt == 'json': + if output_fmt == "json": input_data = input_data.todict(complex_tuple=True) - json.dump(input_data, open(output_file, "w", encoding="utf-8"), indent=indent, - separators=(',', ': ')) - elif output_fmt == 'yaml': + json.dump( + input_data, + open(output_file, "w", encoding="utf-8"), + indent=indent, + separators=(",", ": "), + ) + elif output_fmt == "yaml": input_data = input_data.todict(complex_tuple=True) yaml.dump(input_data, output_file, default_flow_style=False) @@ -1202,6 +1149,9 @@ def merge_json_namelist_file(old_dict, my_file): old_dict (dict): Exististing settings my_file (str): Filename with new settings + Raises: + FileNotFoundError: Namelist input not found + Returns: dict: Merged settings. @@ -1223,12 +1173,17 @@ def get_namelist(self): if "file" in inp: json_file = str(inp["file"]) if not os.path.exists(json_file): - raise FileNotFoundError("Needed namelist input does not exist: " + json_file) + raise FileNotFoundError( + "Needed namelist input does not exist: " + json_file + ) else: - merged_json_settings = self.merge_json_namelist_file(merged_json_settings, - json_file) + merged_json_settings = self.merge_json_namelist_file( + merged_json_settings, json_file + ) elif "json" in inp: - merged_json_settings = self.merge_namelist_dicts(merged_json_settings, inp["json"]) + merged_json_settings = self.merge_namelist_dicts( + merged_json_settings, inp["json"] + ) else: logging.error("Can not handle input type %s", str(inp)) raise Exception @@ -1239,9 +1194,20 @@ def get_namelist(self): class Namelist(object): """Base namelist.""" - def __init__(self, program, config, input_path, forc_zs=False, prep_file=None, - prep_filetype=None, prep_pgdfile=None, prep_pgdfiletype=None, dtg=None, fcint=3, - geo=None): + def __init__( + self, + program, + config, + input_path, + forc_zs=False, + prep_file=None, + prep_filetype=None, + prep_pgdfile=None, + prep_pgdfiletype=None, + dtg=None, + fcint=3, + geo=None, + ): """Construct a base namelists class to be implemented by namelist implementations. Args: @@ -1257,13 +1223,18 @@ def __init__(self, program, config, input_path, forc_zs=False, prep_file=None, fcint (int): The intervall between the cycles. Used for first guesses. geo (surfex.Geo): Surfex geometry. The domain you want to run on + Raises: + RuntimeError: Input + RuntimeError: Merged dictionary contains a @ in value + NotImplementedError: Mode is not implemented + """ self.config = config self.input_path = input_path self.forc_zs = forc_zs if dtg is not None: if isinstance(dtg, str): - dtg = datetime.strptime(dtg, "%Y%m%d%H") + dtg = as_datetime(dtg) self.dtg = dtg check_parsing = True if self.dtg is None: @@ -1274,13 +1245,12 @@ def __init__(self, program, config, input_path, forc_zs=False, prep_file=None, # The time stamp of next cycle file forecast_length = self.fcint if self.dtg is not None: - self.end_of_forecast = self.dtg + timedelta(hours=forecast_length) + self.end_of_forecast = self.dtg + as_timedelta(seconds=forecast_length * 3600) else: self.end_of_forecast = None logging.info("Creating JSON namelist input for program: %s", program) - # self.input_list = [] merged_dict = {} merged_dict = self.prolog(merged_dict, check_parsing=check_parsing) # Program specific settings @@ -1288,13 +1258,17 @@ def __init__(self, program, config, input_path, forc_zs=False, prep_file=None, merged_dict = self.set_pgd_namelist(merged_dict) elif program == "prep": if prep_file is None: - raise Exception("Prep need an input file either as a json namelist or a surfex " - "supported format") - merged_dict = self.set_prep_namelist(merged_dict, - prep_file=prep_file, - prep_filetype=prep_filetype, - prep_pgdfile=prep_pgdfile, - prep_pgdfiletype=prep_pgdfiletype) + raise RuntimeError( + "Prep need an input file either as a json namelist or a surfex " + "supported format" + ) + merged_dict = self.set_prep_namelist( + merged_dict, + prep_file=prep_file, + prep_filetype=prep_filetype, + prep_pgdfile=prep_pgdfile, + prep_pgdfiletype=prep_pgdfiletype, + ) elif program == "offline" or program == "perturbed": merged_dict = self.set_offline_namelist(merged_dict) elif program == "soda": @@ -1310,7 +1284,9 @@ def __init__(self, program, config, input_path, forc_zs=False, prep_file=None, keys = [] for key, value in merged_dict[block].items(): if isinstance(value, str) and value[0] == "@" and value[-1] == "@": - logging.info("Delete non-substituted placeholder %s for key %s", value, key) + logging.info( + "Delete non-substituted placeholder %s for key %s", value, key + ) keys.append(key) if len(keys) > 0: non_parsed.update({block: keys}) @@ -1322,9 +1298,12 @@ def __init__(self, program, config, input_path, forc_zs=False, prep_file=None, for block in merged_dict: for key, value in merged_dict[block].items(): if isinstance(value, str) and "@" in value[0] and "@" in value[-1]: - logging.error("Merged dictionary contains a @ in value %s for key %s", - value, key) - raise Exception() + logging.error( + "Merged dictionary contains a @ in value %s for key %s", + value, + key, + ) + raise RuntimeError("Merged dictionary contains a @ in value") self.namelist_dict = merged_dict @@ -1332,78 +1311,154 @@ def prolog(self, merged_dict, check_parsing=True): """Prolog. Args: - check_parsing (bool): Check if parsing is ok. + merged_dict(dict): Merged settings. + check_parsing (bool, optional): Check if parsing is ok. Defaults to True. + + Returns: + merged_dict(dict): Merged settings """ - merged_dict = self.merge_json_namelist_file(merged_dict, self.input_path + "/prolog.json") + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/prolog.json" + ) # IO - merged_dict = self.merge_json_namelist_file(merged_dict, self.input_path + "/io.json") - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/constants.json") + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/io.json" + ) + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/constants.json" + ) # IO manipulation - merged_dict = self.sub(merged_dict, "NAM_IO_OFFLINE", "CSURF_FILETYPE", - self.config.get_setting("SURFEX#IO#CSURF_FILETYPE")) - merged_dict = self.sub(merged_dict, "NAM_IO_OFFLINE", "CTIMESERIES_FILETYPE", - self.config.get_setting("SURFEX#IO#CTIMESERIES_FILETYPE")) - merged_dict = self.sub(merged_dict, "NAM_IO_OFFLINE", "CFORCING_FILETYPE", - self.config.get_setting("SURFEX#IO#CFORCING_FILETYPE")) - merged_dict = self.sub(merged_dict, "NAM_IO_OFFLINE", "CPGDFILE", - self.config.get_setting("SURFEX#IO#CPGDFILE")) - merged_dict = self.sub(merged_dict, "NAM_IO_OFFLINE", "CPREPFILE", - self.config.get_setting("SURFEX#IO#CPREPFILE")) - merged_dict = self.sub(merged_dict, "NAM_IO_OFFLINE", "CSURFFILE", - self.config.get_setting("SURFEX#IO#CSURFFILE", - validtime=self.end_of_forecast, - basedtg=self.dtg, - check_parsing=check_parsing)) - merged_dict = self.sub(merged_dict, "NAM_IO_OFFLINE", "XTSTEP_SURF", - self.config.get_setting("SURFEX#IO#XTSTEP")) - merged_dict = self.sub(merged_dict, "NAM_IO_OFFLINE", "XTSTEP_OUTPUT", - self.config.get_setting("SURFEX#IO#XTSTEP_OUTPUT")) - merged_dict = self.sub(merged_dict, "NAM_WRITE_SURF_ATM", "LSPLIT_PATCH", - self.config.get_setting("SURFEX#IO#LSPLIT_PATCH")) - merged_dict = self.sub(merged_dict, "NAM_IO_OFFLINE", "LSET_FORC_ZS", self.forc_zs) + merged_dict = self.sub( + merged_dict, + "NAM_IO_OFFLINE", + "CSURF_FILETYPE", + self.config.get_setting("SURFEX#IO#CSURF_FILETYPE"), + ) + merged_dict = self.sub( + merged_dict, + "NAM_IO_OFFLINE", + "CTIMESERIES_FILETYPE", + self.config.get_setting("SURFEX#IO#CTIMESERIES_FILETYPE"), + ) + merged_dict = self.sub( + merged_dict, + "NAM_IO_OFFLINE", + "CFORCING_FILETYPE", + self.config.get_setting("SURFEX#IO#CFORCING_FILETYPE"), + ) + merged_dict = self.sub( + merged_dict, + "NAM_IO_OFFLINE", + "CPGDFILE", + self.config.get_setting("SURFEX#IO#CPGDFILE"), + ) + merged_dict = self.sub( + merged_dict, + "NAM_IO_OFFLINE", + "CPREPFILE", + self.config.get_setting("SURFEX#IO#CPREPFILE"), + ) + merged_dict = self.sub( + merged_dict, + "NAM_IO_OFFLINE", + "CSURFFILE", + self.config.get_setting( + "SURFEX#IO#CSURFFILE", + validtime=self.end_of_forecast, + basedtg=self.dtg, + check_parsing=check_parsing, + ), + ) + merged_dict = self.sub( + merged_dict, + "NAM_IO_OFFLINE", + "XTSTEP_SURF", + self.config.get_setting("SURFEX#IO#XTSTEP"), + ) + merged_dict = self.sub( + merged_dict, + "NAM_IO_OFFLINE", + "XTSTEP_OUTPUT", + self.config.get_setting("SURFEX#IO#XTSTEP_OUTPUT"), + ) + merged_dict = self.sub( + merged_dict, + "NAM_WRITE_SURF_ATM", + "LSPLIT_PATCH", + self.config.get_setting("SURFEX#IO#LSPLIT_PATCH"), + ) + merged_dict = self.sub( + merged_dict, "NAM_IO_OFFLINE", "LSET_FORC_ZS", self.forc_zs + ) # Constants and parameters - merged_dict = self.sub(merged_dict, "NAM_SURF_ATM", "XRIMAX", - self.config.get_setting("SURFEX#PARAMETERS#XRIMAX")) + merged_dict = self.sub( + merged_dict, + "NAM_SURF_ATM", + "XRIMAX", + self.config.get_setting("SURFEX#PARAMETERS#XRIMAX"), + ) return merged_dict def set_pgd_namelist(self, merged_dict): """Set pgd namelist.""" - merged_dict = self.merge_json_namelist_file(merged_dict, self.input_path + "/pgd.json") - merged_dict = self.sub(merged_dict, "NAM_PGD_SCHEMES", "CSEA", - self.config.get_setting("SURFEX#TILES#SEA")) - merged_dict = self.sub(merged_dict, "NAM_PGD_SCHEMES", "CWATER", - self.config.get_setting("SURFEX#TILES#INLAND_WATER")) - merged_dict = self.sub(merged_dict, "NAM_PGD_SCHEMES", "CNATURE", - self.config.get_setting("SURFEX#TILES#NATURE")) - merged_dict = self.sub(merged_dict, "NAM_PGD_SCHEMES", "CTOWN", - self.config.get_setting("SURFEX#TILES#TOWN")) + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/pgd.json" + ) + merged_dict = self.sub( + merged_dict, + "NAM_PGD_SCHEMES", + "CSEA", + self.config.get_setting("SURFEX#TILES#SEA"), + ) + merged_dict = self.sub( + merged_dict, + "NAM_PGD_SCHEMES", + "CWATER", + self.config.get_setting("SURFEX#TILES#INLAND_WATER"), + ) + merged_dict = self.sub( + merged_dict, + "NAM_PGD_SCHEMES", + "CNATURE", + self.config.get_setting("SURFEX#TILES#NATURE"), + ) + merged_dict = self.sub( + merged_dict, + "NAM_PGD_SCHEMES", + "CTOWN", + self.config.get_setting("SURFEX#TILES#TOWN"), + ) if self.config.get_setting("SURFEX#TOWN#LTOWN_TO_ROCK"): if self.config.get_setting("SURFEX#TILES#TOWN") != "NONE": - logging.warning("WARNING: TOWN is not NONE and you want LTOWN_TO_ROCK. " - "Setting it to NONE!") - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/pgd_arrange_cover.json") - merged_dict = self.sub(merged_dict, "NAM_PGD_ARRANGE_COVER", "LTOWN_TO_ROCK", True) + logging.warning( + "WARNING: TOWN is not NONE and you want LTOWN_TO_ROCK. " + "Setting it to NONE!" + ) + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/pgd_arrange_cover.json" + ) + merged_dict = self.sub( + merged_dict, "NAM_PGD_ARRANGE_COVER", "LTOWN_TO_ROCK", True + ) merged_dict = self.sub(merged_dict, "NAM_PGD_SCHEMES", "CTOWN", "NONE") # Cover - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/pgd_cover.json") + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/pgd_cover.json" + ) # COVER - # ecoclimap_dir = "ecoclimap_dir" eco_sg = self.config.get_setting("SURFEX#COVER#SG") merged_dict = self.sub(merged_dict, "NAM_FRAC", "LECOSG", eco_sg) # Ecoclimap SG if eco_sg: - # ecoclimap_dir = "ecoclimap_sg_cover_dir" - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/pgd_eco_sg.json") + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/pgd_eco_sg.json" + ) ecoclimap = EcoclimapSG(self.config) else: ecoclimap = Ecoclimap(self.config) @@ -1415,8 +1470,9 @@ def set_pgd_namelist(self, merged_dict): merged_dict = self.sub(merged_dict, "NAM_COVER", "YCOVERFILETYPE", filetype) # ZS - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/pgd_zs.json") + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/pgd_zs.json" + ) fname = str(self.config.get_setting("SURFEX#ZS#YZS")) fname, filetype = self.get_filetype_from_suffix(fname) merged_dict = self.sub(merged_dict, "NAM_ZS", "YZS", fname) @@ -1424,54 +1480,89 @@ def set_pgd_namelist(self, merged_dict): merged_dict = self.sub(merged_dict, "NAM_ZS", "YZSFILETYPE", filetype) # Sea - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/pgd_sea.json") + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/pgd_sea.json" + ) # Inland water - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/pgd_inland_water.json") + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/pgd_inland_water.json" + ) if self.config.get_setting("SURFEX#TILES#INLAND_WATER") == "FLAKE": - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/pgd_flake.json") + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/pgd_flake.json" + ) # PGD ISBA if self.config.get_setting("SURFEX#TILES#NATURE") == "ISBA": - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/pgd_isba.json") + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/pgd_isba.json" + ) if eco_sg: - merged_dict = self.sub(merged_dict, "NAM_DATA_ISBA", "NTIME", ecoclimap.decades) + merged_dict = self.sub( + merged_dict, "NAM_DATA_ISBA", "NTIME", ecoclimap.decades + ) fname = self.config.get_setting("SURFEX#COVER#H_TREE") if fname != "" and fname is not None: if fname.endswith(".dir"): fname = fname.replace(".dir", "") for vtt in range(1, ecoclimap.veg_types + 1): - merged_dict = self.sub(merged_dict, "NAM_DATA_ISBA", "CFNAM_H_TREE(@VEGTYPE@)", - fname, vtype=vtt) - merged_dict = self.sub(merged_dict, "NAM_DATA_ISBA", "CFTYP_H_TREE(@VEGTYPE@)", - "DIRTYPE", vtype=vtt) + merged_dict = self.sub( + merged_dict, + "NAM_DATA_ISBA", + "CFNAM_H_TREE(@VEGTYPE@)", + fname, + vtype=vtt, + ) + merged_dict = self.sub( + merged_dict, + "NAM_DATA_ISBA", + "CFTYP_H_TREE(@VEGTYPE@)", + "DIRTYPE", + vtype=vtt, + ) key = "CFNAM_H_TREE(@VEGTYPE@)" merged_dict = self.delete(merged_dict, "NAM_DATA_ISBA", key) key = "CFTYP_H_TREE(@VEGTYPE@)" merged_dict = self.delete(merged_dict, "NAM_DATA_ISBA", key) - decadal_data_types = ["ALBNIR_SOIL", "ALBNIR_VEG", "ALBVIS_SOIL", "ALBVIS_VEG", "LAI"] + decadal_data_types = [ + "ALBNIR_SOIL", + "ALBNIR_VEG", + "ALBVIS_SOIL", + "ALBVIS_VEG", + "LAI", + ] for decadal_data_type in decadal_data_types: for vtt in range(1, ecoclimap.veg_types + 1): for decade in range(1, ecoclimap.decades + 1): - filepattern = self.config.get_setting("SURFEX#COVER#" + decadal_data_type, - check_parsing=False) + filepattern = self.config.get_setting( + "SURFEX#COVER#" + decadal_data_type, check_parsing=False + ) fname = ecoclimap.parse_fnames(filepattern, decade) if fname.endswith(".dir"): fname = fname.replace(".dir", "") key = f"CFNAM_{decadal_data_type}(@VEGTYPE@,@DECADE@)" - merged_dict = self.sub(merged_dict, "NAM_DATA_ISBA", key, fname, vtype=vtt, - decade=decade) + merged_dict = self.sub( + merged_dict, + "NAM_DATA_ISBA", + key, + fname, + vtype=vtt, + decade=decade, + ) key = f"CFTYP_{decadal_data_type}(@VEGTYPE@,@DECADE@)" - merged_dict = self.sub(merged_dict, "NAM_DATA_ISBA", key, "DIRTYPE", - vtype=vtt, decade=decade) + merged_dict = self.sub( + merged_dict, + "NAM_DATA_ISBA", + key, + "DIRTYPE", + vtype=vtt, + decade=decade, + ) # Delete generic placehoder key = f"CFNAM_{decadal_data_type}(@VEGTYPE@,@DECADE@)" @@ -1494,18 +1585,21 @@ def set_pgd_namelist(self, merged_dict): # Set ISBA properties if self.config.get_setting("SURFEX#ISBA#SCHEME") == "DIF": - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/pgd_isba_dif.json") + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/pgd_isba_dif.json" + ) merged_dict = self.sub(merged_dict, "NAM_ISBA", "CISBA", "DIF") merged_dict = self.sub(merged_dict, "NAM_ISBA", "NGROUND_LAYER", 14) elif self.config.get_setting("SURFEX#ISBA#SCHEME") == "3-L": - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/pgd_isba_3-L.json") + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/pgd_isba_3-L.json" + ) merged_dict = self.sub(merged_dict, "NAM_ISBA", "CISBA", "3-L") merged_dict = self.sub(merged_dict, "NAM_ISBA", "NGROUND_LAYER", 3) elif self.config.get_setting("SURFEX#ISBA#SCHEME") == "2-L": - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/pgd_isba_2-L.json") + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/pgd_isba_2-L.json" + ) merged_dict = self.sub(merged_dict, "NAM_ISBA", "CISBA", "2-L") merged_dict = self.sub(merged_dict, "NAM_ISBA", "NGROUND_LAYER", 2) @@ -1517,37 +1611,54 @@ def set_pgd_namelist(self, merged_dict): lmeb = self.config.get_setting("SURFEX#ISBA#MEB") merged_dict = self.sub(merged_dict, "NAM_ISBA", "LMEB", lmeb) if lmeb: - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/meb_settings.json") + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/meb_settings.json" + ) # RSMIN if self.config.get_setting("SURFEX#COVER#SG"): - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/pgd_rsmin_sg.json") - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/pgd_rsmin_sg_mod.json") + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/pgd_rsmin_sg.json" + ) + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/pgd_rsmin_sg_mod.json" + ) else: - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/pgd_rsmin.json") - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/pgd_rsmin_mod.json") + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/pgd_rsmin.json" + ) + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/pgd_rsmin_mod.json" + ) # CV if self.config.get_setting("SURFEX#COVER#SG"): - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/pgd_cv_sg.json") + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/pgd_cv_sg.json" + ) else: - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/pgd_cv.json") + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/pgd_cv.json" + ) # Treedrag (from ASCLLV files) if self.config.get_setting("SURFEX#TREEDRAG#TREEDATA_FILE") != "": treeheight = self.config.get_setting("SURFEX#TREEDRAG#TREEDATA_FILE") for vtt in [4, 5, 6]: - merged_dict = self.sub(merged_dict, "NAM_DATA_ISBA", "CFNAM_H_TREE(@VEGTYPE@)", - treeheight, vtype=vtt) - merged_dict = self.sub(merged_dict, "NAM_DATA_ISBA", "CFNAM_H_TREE(@VEGTYPE@)", - "ASCLLV", vtype=vtt) + merged_dict = self.sub( + merged_dict, + "NAM_DATA_ISBA", + "CFNAM_H_TREE(@VEGTYPE@)", + treeheight, + vtype=vtt, + ) + merged_dict = self.sub( + merged_dict, + "NAM_DATA_ISBA", + "CFNAM_H_TREE(@VEGTYPE@)", + "ASCLLV", + vtype=vtt, + ) key = "CFNAM_H_TREE(@VEGTYPE@)" merged_dict = self.delete(merged_dict, "NAM_DATA_ISBA", key) @@ -1556,130 +1667,176 @@ def set_pgd_namelist(self, merged_dict): return merged_dict - def set_prep_namelist(self, merged_dict, prep_file=None, prep_filetype=None, prep_pgdfile=None, - prep_pgdfiletype=None): + def set_prep_namelist( + self, + merged_dict, + prep_file=None, + prep_filetype=None, + prep_pgdfile=None, + prep_pgdfiletype=None, + ): """Set prep namelist. Args: + merged_dict(dict): Merged settings. prep_file (_type_, optional): _description_. Defaults to None. prep_filetype (_type_, optional): _description_. Defaults to None. prep_pgdfile (_type_, optional): _description_. Defaults to None. prep_pgdfiletype (_type_, optional): _description_. Defaults to None. + Raises: + FileNotFoundError: Main prep namelist input + RuntimeError: You must provide a DTG for prep + RuntimeError: Filetype for input to PREP is not set! + RuntimeError: Filetype for PGD input to PREP is not set + + Returns: + merged_dict(dict): Merged settings. + """ if prep_file is not None and prep_filetype is None: - raise Exception("Filetype for input to PREP is not set!") + raise RuntimeError("Filetype for input to PREP is not set!") if prep_pgdfile is not None and prep_pgdfiletype is None: - raise Exception("Filetype for PGD input to PREP is not set!") + raise RuntimeError("Filetype for PGD input to PREP is not set!") - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/prep.json") + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/prep.json" + ) if prep_file is not None: if prep_file.endswith(".json"): if not os.path.exists(prep_file): raise FileNotFoundError(f"{prep_file} does not exist!") - merged_dict = self.merge_json_namelist_file(merged_dict, - prep_file) + merged_dict = self.merge_json_namelist_file(merged_dict, prep_file) else: fname = os.path.basename(prep_file) merged_dict = self.sub(merged_dict, "NAM_PREP_SURF_ATM", "CFILE", fname) - merged_dict = self.sub(merged_dict, "NAM_PREP_SURF_ATM", "CFILETYPE", prep_filetype) + merged_dict = self.sub( + merged_dict, "NAM_PREP_SURF_ATM", "CFILETYPE", prep_filetype + ) if prep_pgdfile is not None: fname = os.path.basename(prep_pgdfile) - merged_dict = self.sub(merged_dict, "NAM_PREP_SURF_ATM", "CFILEPGD", fname) - merged_dict = self.sub(merged_dict, "NAM_PREP_SURF_ATM", "CFILEPGDTYPE", - prep_pgdfiletype) + merged_dict = self.sub( + merged_dict, "NAM_PREP_SURF_ATM", "CFILEPGD", fname + ) + merged_dict = self.sub( + merged_dict, "NAM_PREP_SURF_ATM", "CFILEPGDTYPE", prep_pgdfiletype + ) else: - merged_dict = self.delete(merged_dict, "NAM_PREP_SURF_ATM", "CFILEPGD") - merged_dict = self.delete(merged_dict, "NAM_PREP_SURF_ATM", "CFILEPGDTYPE") + merged_dict = self.delete( + merged_dict, "NAM_PREP_SURF_ATM", "CFILEPGD" + ) + merged_dict = self.delete( + merged_dict, "NAM_PREP_SURF_ATM", "CFILEPGDTYPE" + ) if self.dtg is not None: - # prep_time = datetime.strptime(dtg, "%Y%m%d%H") prep_time = self.dtg - merged_dict = self.sub(merged_dict, "NAM_PREP_SURF_ATM", "NYEAR", - int(prep_time.strftime("%Y"))) - merged_dict = self.sub(merged_dict, "NAM_PREP_SURF_ATM", "NMONTH", - int(prep_time.strftime("%m"))) - merged_dict = self.sub(merged_dict, "NAM_PREP_SURF_ATM", "NDAY", - int(prep_time.strftime("%d"))) - merged_dict = self.sub(merged_dict, "NAM_PREP_SURF_ATM", "XTIME", - float(prep_time.strftime("%H")) * 3600.) + merged_dict = self.sub( + merged_dict, "NAM_PREP_SURF_ATM", "NYEAR", int(prep_time.strftime("%Y")) + ) + merged_dict = self.sub( + merged_dict, "NAM_PREP_SURF_ATM", "NMONTH", int(prep_time.strftime("%m")) + ) + merged_dict = self.sub( + merged_dict, "NAM_PREP_SURF_ATM", "NDAY", int(prep_time.strftime("%d")) + ) + merged_dict = self.sub( + merged_dict, + "NAM_PREP_SURF_ATM", + "XTIME", + float(prep_time.strftime("%H")) * 3600.0, + ) else: - raise Exception("You must provide a DTG for prep") + raise RuntimeError("You must provide a DTG for prep") if self.config.get_setting("SURFEX#TILES#SEA") == "SEAFLX": - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/prep_seaflux.json") + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/prep_seaflux.json" + ) sea_ice = self.config.get_setting("SURFEX#SEA#ICE") - merged_dict = self.sub(merged_dict, "NAM_PREP_SEAFLUX", "CSEAICE_SCHEME", sea_ice) + merged_dict = self.sub( + merged_dict, "NAM_PREP_SEAFLUX", "CSEAICE_SCHEME", sea_ice + ) if sea_ice == "SICE": - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/prep_sice.json") + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/prep_sice.json" + ) if self.config.get_setting("SURFEX#TILES#INLAND_WATER") == "FLAKE": - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/prep_flake.json") + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/prep_flake.json" + ) lclim_lake = self.config.get_setting("SURFEX#FLAKE#LCLIM") - merged_dict = self.sub(merged_dict, "NAM_PREP_FLAKE", "LCLIM_LAKE", lclim_lake) + merged_dict = self.sub( + merged_dict, "NAM_PREP_FLAKE", "LCLIM_LAKE", lclim_lake + ) if self.config.get_setting("SURFEX#TILES#NATURE") == "ISBA": - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/prep_isba.json") + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/prep_isba.json" + ) # Set extra ISBA-DIF properties (Not needed in prep?) if self.config.get_setting("SURFEX#ISBA#SCHEME") == "DIF": - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/prep_isba_dif.json") + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/prep_isba_dif.json" + ) # ISBA CANOPY lisba_canopy = self.config.get_setting("SURFEX#ISBA#CANOPY") - merged_dict = self.sub(merged_dict, "NAM_PREP_ISBA", "LISBA_CANOPY", lisba_canopy) + merged_dict = self.sub( + merged_dict, "NAM_PREP_ISBA", "LISBA_CANOPY", lisba_canopy + ) # Snow - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/prep_isba_snow.json") + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/prep_isba_snow.json" + ) snow_scheme = self.config.get_setting("SURFEX#ISBA#SNOW") - merged_dict = self.sub(merged_dict, "NAM_PREP_ISBA_SNOW", "CSNOW", snow_scheme) + merged_dict = self.sub( + merged_dict, "NAM_PREP_ISBA_SNOW", "CSNOW", snow_scheme + ) if self.config.get_setting("SURFEX#ISBA#SNOW") == "CRO": - merged_dict = \ - self.merge_json_namelist_file(merged_dict, - self.input_path + "/prep_isba_snow_crocus.json") + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/prep_isba_snow_crocus.json" + ) return merged_dict def set_offline_namelist(self, merged_dict): - """Set offline namelist. - - Raises: - Exception: _description_ - - """ - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/offline.json") + """Set offline namelist.""" + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/offline.json" + ) if self.config.get_setting("SURFEX#IO#LSELECT"): - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/selected_output.json") + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/selected_output.json" + ) # SEAFLX settings if self.config.get_setting("SURFEX#TILES#SEA") == "SEAFLX": - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/offline_seaflux.json") + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/offline_seaflux.json" + ) # Surface perturbations pertflux = self.config.get_setting("SURFEX#SEA#PERTFLUX") merged_dict = self.sub(merged_dict, "NAM_SEAFLUXn", "LPERTFLUX", pertflux) if self.config.get_setting("SURFEX#TILES#INLAND_WATER") == "WATFLX": - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/offline_watflux.json") + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/offline_watflux.json" + ) if self.config.get_setting("SURFEX#TILES#INLAND_WATER") == "FLAKE": - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/offline_flake.json") + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/offline_flake.json" + ) # ISBA settings if self.config.get_setting("SURFEX#TILES#NATURE") == "ISBA": - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/offline_isba.json") + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/offline_isba.json" + ) pertsurf = self.config.get_setting("SURFEX#ISBA#PERTSURF") merged_dict = self.sub(merged_dict, "NAM_ISBAn", "LPERTSURF", pertsurf) xcgmax = self.config.get_setting("SURFEX#ISBA#XCGMAX", abort=False) @@ -1691,19 +1848,20 @@ def set_offline_namelist(self, merged_dict): # SSO sso = self.config.get_setting("SURFEX#SSO#SCHEME") - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/offline_sso.json") + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/offline_sso.json" + ) merged_dict = self.sub(merged_dict, "NAM_SSON", "CROUGH", sso) if sso == "OROTUR": - merged_dict = \ - self.merge_json_namelist_file(merged_dict, - self.input_path + "/offline_sso_orotur.json") - if isinstance(self.geo, surfex.ConfProj): - merged_dict = self.sub(merged_dict, "NAM_ISBAn", "XSOROT", self.geo.xdx) + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/offline_sso_orotur.json" + ) + merged_dict = self.sub(merged_dict, "NAM_ISBAn", "XSOROT", self.geo.xdx) if self.config.get_setting("SURFEX#TILES#TOWN") == "TEB": - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/offline_teb.json") + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/offline_teb.json" + ) # Perturbed offline settings (are overridden when running binary) # Make sure variables are existing and consistent @@ -1714,8 +1872,9 @@ def set_offline_namelist(self, merged_dict): # Climate setting if self.config.get_setting("SURFEX#SEA#LVOLATILE_SIC"): - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/offline_seaice.json") + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/offline_seaice.json" + ) merged_dict = self.sub(merged_dict, "NAM_SEAICEn", "LVOLATILE_SIC", True) merged_dict = self.sub(merged_dict, "NAM_SEAICEn", "XSIC_EFOLDING_TIME", 1.0) @@ -1739,9 +1898,9 @@ def prepare_offline_perturbation(self, merged_dict): merged_dict["NAM_IO_VARASSIM"].update({"LPRT": False}) if self.config.get_setting("SURFEX#ASSIM#SCHEMES#ISBA") == "EKF": - merged_dict = \ - self.merge_json_namelist_file(merged_dict, - self.input_path + "/offline_assim_pert.json") + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/offline_assim_pert.json" + ) merged_dict = self.sub(merged_dict, "NAM_ASSIM", "CASSIM_ISBA", "EKF") nvar = 0 @@ -1750,12 +1909,18 @@ def prepare_offline_perturbation(self, merged_dict): xtprt_m = self.config.get_setting("SURFEX#ASSIM#ISBA#EKF#XTPRT_M") xsigma_m = self.config.get_setting("SURFEX#ASSIM#ISBA#EKF#XSIGMA_M") for var, cvar_val in enumerate(cvar_m): - merged_dict = self.sub(merged_dict, "NAM_VAR", "CVAR_M(@VAR@)", cvar_val, var=var + 1) - merged_dict = self.sub(merged_dict, "NAM_VAR", "NNCV(@VAR@)", nncv[var], var=var + 1) - merged_dict = self.sub(merged_dict, "NAM_VAR", "XTPRT_M(@VAR@)", xtprt_m[var], - var=var + 1) - merged_dict = self.sub(merged_dict, "NAM_VAR", "XSIGMA_M(@VAR@)", xsigma_m[var], - var=var + 1) + merged_dict = self.sub( + merged_dict, "NAM_VAR", "CVAR_M(@VAR@)", cvar_val, var=var + 1 + ) + merged_dict = self.sub( + merged_dict, "NAM_VAR", "NNCV(@VAR@)", nncv[var], var=var + 1 + ) + merged_dict = self.sub( + merged_dict, "NAM_VAR", "XTPRT_M(@VAR@)", xtprt_m[var], var=var + 1 + ) + merged_dict = self.sub( + merged_dict, "NAM_VAR", "XSIGMA_M(@VAR@)", xsigma_m[var], var=var + 1 + ) if nncv[var] == 1: nvar += 1 merged_dict = self.delete(merged_dict, "NAM_VAR", "XSIGMA_M(@VAR@)") @@ -1763,23 +1928,24 @@ def prepare_offline_perturbation(self, merged_dict): merged_dict = self.delete(merged_dict, "NAM_VAR", "CVAR_M(@VAR@)") merged_dict = self.delete(merged_dict, "NAM_VAR", "NNCV(@VAR@)") merged_dict = self.sub(merged_dict, "NAM_VAR", "NVAR", nvar) - # self.input_list.append({"json": {"NAM_VAR": {"NVAR": nvar}}}) if self.config.get_setting("SURFEX#ASSIM#SCHEMES#ISBA") == "ENKF": - merged_dict = \ - self.merge_json_namelist_file(merged_dict, - self.input_path + "/offline_assim_pert.json") + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/offline_assim_pert.json" + ) merged_dict = self.sub(merged_dict, "NAM_ASSIM", "CASSIM_ISBA", "ENKF") nvar = 0 cvar_m = self.config.get_setting("SURFEX#ASSIM#ISBA#ENKF#CVAR_M") nncv = self.config.get_setting("SURFEX#ASSIM#ISBA#ENKF#NNCV") - # nens_m = self.config.get_setting("SURFEX#ASSIM#ISBA#ENKF#NENS_M") for var, cvar_val in enumerate(cvar_m): - # print(merged_dict) - merged_dict = self.sub(merged_dict, "NAM_VAR", "CVAR_M(@VAR@)", cvar_val, var=var + 1) - merged_dict = self.sub(merged_dict, "NAM_VAR", "NNCV(@VAR@)", nncv[var], var=var + 1) + merged_dict = self.sub( + merged_dict, "NAM_VAR", "CVAR_M(@VAR@)", cvar_val, var=var + 1 + ) + merged_dict = self.sub( + merged_dict, "NAM_VAR", "NNCV(@VAR@)", nncv[var], var=var + 1 + ) if nncv[var] == 1: nvar += 1 merged_dict = self.delete(merged_dict, "NAM_VAR", "XSIGMA_M(@VAR@)") @@ -1787,44 +1953,50 @@ def prepare_offline_perturbation(self, merged_dict): merged_dict = self.delete(merged_dict, "NAM_VAR", "CVAR_M(@VAR@)") merged_dict = self.delete(merged_dict, "NAM_VAR", "NNCV(@VAR@)") merged_dict = self.sub(merged_dict, "NAM_VAR", "NVAR", nvar) - # self.input_list.append({"json": {"NAM_VAR": {"NVAR": nvar}}}) return merged_dict def set_obs(self, merged_dict): """Set obs. Args: - merged_dict (_type_): _description_ + merged_dict (dict): Merged settings Raises: - Exception: _description_ + RuntimeError: Mismatch in nnco/cobs_m/xerrobs_m Returns: _type_: _description_ """ - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/soda_obs.json") + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/soda_obs.json" + ) lobsheader = self.config.get_setting("SURFEX#ASSIM#OBS#LOBSHEADER") merged_dict = self.sub(merged_dict, "NAM_OBS", "LOBSHEADER", lobsheader) lobsnat = self.config.get_setting("SURFEX#ASSIM#OBS#LOBSNAT") merged_dict = self.sub(merged_dict, "NAM_OBS", "LOBSNAT", lobsnat) cfile_format_obs = self.config.get_setting("SURFEX#ASSIM#OBS#CFILE_FORMAT_OBS") - merged_dict = self.sub(merged_dict, "NAM_OBS", "CFILE_FORMAT_OBS", cfile_format_obs) + merged_dict = self.sub( + merged_dict, "NAM_OBS", "CFILE_FORMAT_OBS", cfile_format_obs + ) nobstype = 0 nnco = self.config.get_setting("SURFEX#ASSIM#OBS#NNCO") cobs_m = self.config.get_setting("SURFEX#ASSIM#OBS#COBS_M") xerrobs_m = self.config.get_setting("SURFEX#ASSIM#OBS#XERROBS_M") logging.debug("%s %s %s", nnco, cobs_m, xerrobs_m) if len(nnco) != len(cobs_m) or len(nnco) != len(xerrobs_m): - raise Exception("Mismatch in nnco/cobs_m/xerrobs_m") + raise RuntimeError("Mismatch in nnco/cobs_m/xerrobs_m") for obs, obs_val in enumerate(nnco): - merged_dict = self.sub(merged_dict, "NAM_OBS", "COBS_M(@VAR@)", cobs_m[obs], - var=obs + 1) - merged_dict = self.sub(merged_dict, "NAM_OBS", "NNCO(@VAR@)", obs_val, var=obs + 1) - merged_dict = self.sub(merged_dict, "NAM_OBS", "XERROBS_M(@VAR@)", xerrobs_m[obs], - var=obs + 1) + merged_dict = self.sub( + merged_dict, "NAM_OBS", "COBS_M(@VAR@)", cobs_m[obs], var=obs + 1 + ) + merged_dict = self.sub( + merged_dict, "NAM_OBS", "NNCO(@VAR@)", obs_val, var=obs + 1 + ) + merged_dict = self.sub( + merged_dict, "NAM_OBS", "XERROBS_M(@VAR@)", xerrobs_m[obs], var=obs + 1 + ) if nnco[obs] == 1: nobstype += 1 merged_dict = self.delete(merged_dict, "NAM_OBS", "COBS_M(@VAR@)") @@ -1841,11 +2013,17 @@ def set_soda_namelist(self, merged_dict): Args: merged_dict (dict): Merged dict + Raises: + RuntimeError: You must provide a DTG when using a list for snow + RuntimeError: Mismatch in nncv/cvar_m + RuntimeError: Mismatch in nncv/cvar_m/xsigma_m/xtprt_m + Returns: dict: Merged dict. """ - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/soda.json") + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/soda.json" + ) merged_dict = self.sub(merged_dict, "NAM_ASSIM", "LASSIM", True) @@ -1853,32 +2031,72 @@ def set_soda_namelist(self, merged_dict): merged_dict = self.set_obs(merged_dict) # LSM - merged_dict = self.sub(merged_dict, "NAM_ASSIM", "CFILE_FORMAT_LSM", - self.config.get_setting("SURFEX#ASSIM#CFILE_FORMAT_LSM")) + merged_dict = self.sub( + merged_dict, + "NAM_ASSIM", + "CFILE_FORMAT_LSM", + self.config.get_setting("SURFEX#ASSIM#CFILE_FORMAT_LSM"), + ) # Sea - merged_dict = self.sub(merged_dict, "NAM_ASSIM", "CASSIM_SEA", - self.config.get_setting("SURFEX#ASSIM#SCHEMES#SEA")) - merged_dict = self.sub(merged_dict, "NAM_ASSIM", "CFILE_FORMAT_SST", - self.config.get_setting("SURFEX#ASSIM#SEA#CFILE_FORMAT_SST")) - merged_dict = self.sub(merged_dict, "NAM_ASSIM", "LREAD_SST_FROM_FILE", - self.config.get_setting("SURFEX#ASSIM#SEA#LREAD_SST_FROM_FILE")) - merged_dict = self.sub(merged_dict, "NAM_ASSIM", "LEXTRAP_SEA", - self.config.get_setting("SURFEX#ASSIM#SEA#LEXTRAP_SEA")) - merged_dict = self.sub(merged_dict, "NAM_ASSIM", "LECSST", - self.config.get_setting("SURFEX#ASSIM#SEA#LECSST")) + merged_dict = self.sub( + merged_dict, + "NAM_ASSIM", + "CASSIM_SEA", + self.config.get_setting("SURFEX#ASSIM#SCHEMES#SEA"), + ) + merged_dict = self.sub( + merged_dict, + "NAM_ASSIM", + "CFILE_FORMAT_SST", + self.config.get_setting("SURFEX#ASSIM#SEA#CFILE_FORMAT_SST"), + ) + merged_dict = self.sub( + merged_dict, + "NAM_ASSIM", + "LREAD_SST_FROM_FILE", + self.config.get_setting("SURFEX#ASSIM#SEA#LREAD_SST_FROM_FILE"), + ) + merged_dict = self.sub( + merged_dict, + "NAM_ASSIM", + "LEXTRAP_SEA", + self.config.get_setting("SURFEX#ASSIM#SEA#LEXTRAP_SEA"), + ) + merged_dict = self.sub( + merged_dict, + "NAM_ASSIM", + "LECSST", + self.config.get_setting("SURFEX#ASSIM#SEA#LECSST"), + ) # Water - merged_dict = self.sub(merged_dict, "NAM_ASSIM", "CASSIM_WATER", - self.config.get_setting("SURFEX#ASSIM#SCHEMES#INLAND_WATER")) - merged_dict = self.sub(merged_dict, "NAM_ASSIM", "LWATERTG2", - self.config.get_setting("SURFEX#ASSIM#INLAND_WATER#LWATERTG2")) - merged_dict = self.sub(merged_dict, "NAM_ASSIM", "LEXTRAP_WATER", - self.config.get_setting("SURFEX#ASSIM#INLAND_WATER#LEXTRAP_WATER")) + merged_dict = self.sub( + merged_dict, + "NAM_ASSIM", + "CASSIM_WATER", + self.config.get_setting("SURFEX#ASSIM#SCHEMES#INLAND_WATER"), + ) + merged_dict = self.sub( + merged_dict, + "NAM_ASSIM", + "LWATERTG2", + self.config.get_setting("SURFEX#ASSIM#INLAND_WATER#LWATERTG2"), + ) + merged_dict = self.sub( + merged_dict, + "NAM_ASSIM", + "LEXTRAP_WATER", + self.config.get_setting("SURFEX#ASSIM#INLAND_WATER#LEXTRAP_WATER"), + ) # Nature - merged_dict = self.sub(merged_dict, "NAM_ASSIM", "CASSIM_ISBA", - self.config.get_setting("SURFEX#ASSIM#SCHEMES#ISBA")) + merged_dict = self.sub( + merged_dict, + "NAM_ASSIM", + "CASSIM_ISBA", + self.config.get_setting("SURFEX#ASSIM#SCHEMES#ISBA"), + ) # Snow laesnm = False @@ -1892,14 +2110,17 @@ def set_soda_namelist(self, merged_dict): logging.debug("true") laesnm = True else: - raise Exception("You must provide a DTG when using a list for snow " - "assimilation cycles") + raise RuntimeError( + "You must provide a DTG when using a list for snow " + "assimilation cycles" + ) merged_dict = self.sub(merged_dict, "NAM_ASSIM", "LAESNM", laesnm) # Set OI settings if self.config.get_setting("SURFEX#ASSIM#SCHEMES#ISBA") == "OI": - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/soda_isba_oi.json") + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/soda_isba_oi.json" + ) ua_physics = self.config.get_setting("FORECAST#PHYSICS", abort=False) if ua_physics is None: logging.warning("FORECAST#PHYSICS not set. Assume arome physics") @@ -1909,10 +2130,18 @@ def set_soda_namelist(self, merged_dict): elif ua_physics == "alaro": merged_dict = self.sub(merged_dict, "NAM_ASSIM", "LAROME", False) - merged_dict = self.sub(merged_dict, "NAM_NACVEG", "XSIGT2MO", - self.config.get_setting("SURFEX#ASSIM#ISBA#OI#XSIGT2MO")) - merged_dict = self.sub(merged_dict, "NAM_NACVEG", "XSIGH2MO", - self.config.get_setting("SURFEX#ASSIM#ISBA#OI#XSIGH2MO")) + merged_dict = self.sub( + merged_dict, + "NAM_NACVEG", + "XSIGT2MO", + self.config.get_setting("SURFEX#ASSIM#ISBA#OI#XSIGT2MO"), + ) + merged_dict = self.sub( + merged_dict, + "NAM_NACVEG", + "XSIGH2MO", + self.config.get_setting("SURFEX#ASSIM#ISBA#OI#XSIGH2MO"), + ) merged_dict = self.sub(merged_dict, "NAM_NACVEG", "XRCLIMCA", 0.0) merged_dict = self.sub(merged_dict, "NAM_NACVEG", "XRCLISST", 0.05) merged_dict = self.sub(merged_dict, "NAM_NACVEG", "NECHGU", self.fcint) @@ -1924,8 +2153,9 @@ def set_soda_namelist(self, merged_dict): merged_dict = self.sub(merged_dict, "NAM_ASSIM", "CFILE_FORMAT_FG", f_fg) if self.config.get_setting("SURFEX#ASSIM#SCHEMES#ISBA") == "EKF": - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/soda_isba_ekf.json") + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/soda_isba_ekf.json" + ) nvar = 0 llincheck = self.config.get_setting("SURFEX#ASSIM#ISBA#EKF#LLINCHECK") merged_dict = self.sub(merged_dict, "NAM_ASSIM", "LLINCHECK", llincheck) @@ -1935,15 +2165,25 @@ def set_soda_namelist(self, merged_dict): xsigma_m = self.config.get_setting("SURFEX#ASSIM#ISBA#EKF#XSIGMA_M") xtprt_m = self.config.get_setting("SURFEX#ASSIM#ISBA#EKF#XTPRT_M") nncv = self.config.get_setting("SURFEX#ASSIM#ISBA#EKF#NNCV") - if len(nncv) != len(cvar_m) or len(nncv) != len(xsigma_m) or len(nncv) != len(xtprt_m): - raise Exception("Mismatch in nncv/cvar_m/xsigma_m/xtprt_m") + if ( + len(nncv) != len(cvar_m) + or len(nncv) != len(xsigma_m) + or len(nncv) != len(xtprt_m) + ): + raise RuntimeError("Mismatch in nncv/cvar_m/xsigma_m/xtprt_m") for var, cvar_val in enumerate(cvar_m): - merged_dict = self.sub(merged_dict, "NAM_VAR", "CVAR_M(@VAR@)", cvar_val, var=var + 1) - merged_dict = self.sub(merged_dict, "NAM_VAR", "NNCV(@VAR@)", nncv[var], var=var + 1) - merged_dict = self.sub(merged_dict, "NAM_VAR", "XTPRT_M(@VAR@)", xtprt_m[var], - var=var + 1) - merged_dict = self.sub(merged_dict, "NAM_VAR", "XSIGMA_M(@VAR@)", xsigma_m[var], - var=var + 1) + merged_dict = self.sub( + merged_dict, "NAM_VAR", "CVAR_M(@VAR@)", cvar_val, var=var + 1 + ) + merged_dict = self.sub( + merged_dict, "NAM_VAR", "NNCV(@VAR@)", nncv[var], var=var + 1 + ) + merged_dict = self.sub( + merged_dict, "NAM_VAR", "XTPRT_M(@VAR@)", xtprt_m[var], var=var + 1 + ) + merged_dict = self.sub( + merged_dict, "NAM_VAR", "XSIGMA_M(@VAR@)", xsigma_m[var], var=var + 1 + ) if nncv[var] == 1: nvar += 1 merged_dict = self.delete(merged_dict, "NAM_VAR", "XSIGMA_M(@VAR@)") @@ -1954,24 +2194,36 @@ def set_soda_namelist(self, merged_dict): merged_dict = self.sub(merged_dict, "NAM_VAR", "NVAR", nvar) xscale_q = self.config.get_setting("SURFEX#ASSIM#ISBA#EKF#XSCALE_Q") merged_dict = self.sub(merged_dict, "NAM_VAR", "XSCALE_Q", xscale_q) - # self.input_list.append({"json": {"NAM_VAR": {"XSCALE_Q": xscale_q}}}) merged_dict = self.sub(merged_dict, "NAM_IO_VARASSIM", "LPRT", False) - merged_dict = self.sub(merged_dict, "NAM_IO_VARASSIM", "LBEV", - self.config.get_setting("SURFEX#ASSIM#ISBA#EKF#EVOLVE_B")) - merged_dict = self.sub(merged_dict, "NAM_IO_VARASSIM", "LBFIXED", - not self.config.get_setting("SURFEX#ASSIM#ISBA#EKF#EVOLVE_B")) + merged_dict = self.sub( + merged_dict, + "NAM_IO_VARASSIM", + "LBEV", + self.config.get_setting("SURFEX#ASSIM#ISBA#EKF#EVOLVE_B"), + ) + merged_dict = self.sub( + merged_dict, + "NAM_IO_VARASSIM", + "LBFIXED", + not self.config.get_setting("SURFEX#ASSIM#ISBA#EKF#EVOLVE_B"), + ) if self.config.get_setting("SURFEX#ASSIM#SCHEMES#ISBA") == "ENKF": - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/soda_isba_enkf.json") + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/soda_isba_enkf.json" + ) nvar = 0 cvar_m = self.config.get_setting("SURFEX#ASSIM#ISBA#ENKF#CVAR_M") nncv = self.config.get_setting("SURFEX#ASSIM#ISBA#ENKF#NNCV") if len(nncv) != len(cvar_m): - raise Exception("Mismatch in nncv/cvar_m") + raise RuntimeError("Mismatch in nncv/cvar_m") for var, cvar_val in enumerate(cvar_m): - merged_dict = self.sub(merged_dict, "NAM_VAR", "CVAR_M(@VAR@)", cvar_val, var=var + 1) - merged_dict = self.sub(merged_dict, "NAM_VAR", "NNCV(@VAR@)", nncv[var], var=var + 1) + merged_dict = self.sub( + merged_dict, "NAM_VAR", "CVAR_M(@VAR@)", cvar_val, var=var + 1 + ) + merged_dict = self.sub( + merged_dict, "NAM_VAR", "NNCV(@VAR@)", nncv[var], var=var + 1 + ) if nncv[var] == 1: nvar += 1 @@ -1989,28 +2241,34 @@ def epilog(self, merged_dict): """Epilog.""" # Always set these if self.config.get_setting("SURFEX#SEA#ICE") == "SICE": - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/epilog_sice.json") + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/epilog_sice.json" + ) - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/epilog_treedrag.json") + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/epilog_treedrag.json" + ) lfaketrees = self.config.get_setting("SURFEX#TREEDRAG#FAKETREES", abort=False) if lfaketrees is not None: merged_dict = self.sub(merged_dict, "NAM_TREEDRAG", "LFAKETREE", lfaketrees) if self.config.get_setting("SURFEX#TILES#INLAND_WATER") == "FLAKE": - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/epilog_flake.json") + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/epilog_flake.json" + ) return merged_dict def override(self, merged_dict): """Overide.""" # Override posssibility if os.path.exists(self.input_path + "/override.json"): - logging.warning("WARNING: Override settings with content from %s/override.json", - self.input_path) - merged_dict = self.merge_json_namelist_file(merged_dict, - self.input_path + "/override.json") + logging.warning( + "WARNING: Override settings with content from %s/override.json", + self.input_path, + ) + merged_dict = self.merge_json_namelist_file( + merged_dict, self.input_path + "/override.json" + ) return merged_dict @staticmethod @@ -2021,11 +2279,10 @@ def sub(merged_dict, nam_block, key, value, vtype=None, decade=None, var=None): merged_dict (dict): Merged dict nam_block (str): Namelist block key (str): Key - value (): Value - - Raises: - FileNotFoundError: _description_ - Exception: _description_ + value (any): Value + vtype(int, optional): Veg type. Defaults to None. + decade(int, optional): Decade type. Defaults to None. + var(int, optional): Var type. Defaults to None. Returns: dict: Merged dict @@ -2062,7 +2319,9 @@ def sub(merged_dict, nam_block, key, value, vtype=None, decade=None, var=None): if "@" + key + "@" in setting: setting = value else: - logging.warning("No string substitution. Using existing value: %s", setting) + logging.warning( + "No string substitution. Using existing value: %s", setting + ) merged_dict[nam_block][key] = setting else: logging.warning("No setting found in namelist for: %s", key) @@ -2128,13 +2387,27 @@ def set_direct_data_namelist(lnamelist_section, ldtype, ldname, linput_path): filetype_name = ldtype if ldtype == "YSOC_TOP" or ldtype == "YSOC_SUB": filetype_name = "YSOC" - return {"json": json.loads('{"' + lnamelist_section + '": { "' + ldtype + '": "' - + basename + '", ' + '"' + filetype_name + 'FILETYPE": "DIRECT"}}')} + return { + "json": json.loads( + '{"' + + lnamelist_section + + '": { "' + + ldtype + + '": "' + + basename + + '", ' + + '"' + + filetype_name + + 'FILETYPE": "DIRECT"}}' + ) + } if ldname.endswith(".json"): return {"file": linput_path + "/" + ldname} @staticmethod - def set_dirtyp_data_namelist(lnamelist_section, ldtype, ldname, vtype=None, decade=None): + def set_dirtyp_data_namelist( + lnamelist_section, ldtype, ldname, vtype=None, decade=None + ): """Set dirtyp data namelist. Args: @@ -2161,8 +2434,17 @@ def set_dirtyp_data_namelist(lnamelist_section, ldtype, ldname, vtype=None, deca if vtype is not None or decade is not None: filetype_name = filetype_name + ")" return { - "json": json.loads('{"' + lnamelist_section + '": { "CFNAM_' + filetype_name + '": "' - + basename + '", "CFTYP_' + filetype_name + '": "DIRTYPE"}}') + "json": json.loads( + '{"' + + lnamelist_section + + '": { "CFNAM_' + + filetype_name + + '": "' + + basename + + '", "CFTYP_' + + filetype_name + + '": "DIRTYPE"}}' + ) } @staticmethod @@ -2258,10 +2540,10 @@ def ascii_file2nml(input_fname, input_fmt="json"): f90nml.Namelist: Namelist object. """ - if input_fmt == 'json': + if input_fmt == "json": with open(input_fname, mode="r", encoding="utf-8") as input_file: output_data = json.load(input_file) - elif input_fmt == 'yaml': + elif input_fmt == "yaml": with open(input_fname, mode="r", encoding="utf-8") as input_file: output_data = yaml.safe_load(input_file) output_data = f90nml.Namelist(output_data) @@ -2278,11 +2560,15 @@ def nml2ascii(input_data, output_file, output_fmt="json", indent=2): indent (int, optional): Indentation. Defaults to 2. """ - if output_fmt == 'json': + if output_fmt == "json": input_data = input_data.todict(complex_tuple=True) - json.dump(input_data, open(output_file, "w", encoding="utf-8"), indent=indent, - separators=(',', ': ')) - elif output_fmt == 'yaml': + json.dump( + input_data, + open(output_file, "w", encoding="utf-8"), + indent=indent, + separators=(",", ": "), + ) + elif output_fmt == "yaml": input_data = input_data.todict(complex_tuple=True) yaml.dump(input_data, output_file, default_flow_style=False) @@ -2313,718 +2599,3 @@ def get_namelist(self): logging.debug("Constructing namelist:") merged_json_settings = self.namelist_dict return self.ascii2nml(merged_json_settings) - - -class Ecoclimap(object): - """Ecoclimap.""" - - def __init__(self, config, system_file_paths=None): - """Construct ecoclimap data object. - - Args: - config (surfex.Configuration): Surfex configuration. - system_file_paths (surfex.SystemFilePaths, optional): Mapping of local file structure - to look for inut files. - Defaults to None. - - """ - self.config = config - self.system_file_paths = system_file_paths - self.cover_dir = "ecoclimap_cover_dir" - self.bin_dir = "ecoclimap_bin_dir" - self.ecoclimap_files = ["ecoclimapI_covers_param.bin", "ecoclimapII_af_covers_param.bin", - "ecoclimapII_eu_covers_param.bin"] - self.decadal_data_types = None - - def set_input(self, check_existence=True): - """Set input. - - Args: - check_existence (bool, optional): _description_. Defaults to True. - - Returns: - dict: File mappings. - - """ - if self.system_file_paths is None: - raise Exception("System file path must be set for this method") - data = {} - for fname in self.ecoclimap_files: - fname_data = self.system_file_paths.get_system_file(self.bin_dir, fname, - default_dir="climdir", - check_existence=check_existence) - data.update({fname: fname_data}) - return data - - def set_bin_files(self, check_existence=True): - """Set bin files. - - Args: - check_existence (bool, optional): Check if files exist. Defaults to True. - - Returns: - dict: File mappings. - - """ - return self.set_input(check_existence=check_existence) - - -class EcoclimapSG(Ecoclimap): - """Ecoclimap SG.""" - - def __init__(self, config, system_file_paths=None, veg_types=20, decades=36): - """Construct ecoclimap SG. - - Args: - config (_type_): _description_ - system_file_paths (_type_, optional): _description_. Defaults to None. - veg_types (int, optional): _description_. Defaults to 20. - decades (int, optional): _description_. Defaults to 36. - - """ - Ecoclimap.__init__(self, config, system_file_paths=system_file_paths) - self.veg_types = veg_types - self.decades = decades - self.cover_file = self.config.get_setting("SURFEX#COVER#SG") - self.cover_dir = "ecoclimap_sg_cover_dir" - self.decadal_data_types = ["ALBNIR_SOIL", "ALBNIR_VEG", "ALBVIS_SOIL", "ALBVIS_VEG", "LAI"] - - def set_bin_files(self, check_existence=True): - """set_bin_files not used for SG.""" - - def set_input(self, check_existence=True): - """Set input data. - - Args: - check_existence (bool, optional): Check if files are existing. Defaults to True. - - Returns: - dict: Mapping of files. - - """ - if self.system_file_paths is None: - raise Exception("System file path must be set for this method") - - data = {} - tree_height_dir = "tree_height_dir" - fname = self.config.get_setting("SURFEX#COVER#H_TREE") - if fname != "" and fname is not None: - ext_data = ExternalSurfexInputFile(self.system_file_paths) - data.update(ext_data.set_input_data_from_format(tree_height_dir, fname, - check_existence=check_existence)) - - decadal_data_types = ["ALBNIR_SOIL", "ALBNIR_VEG", "ALBVIS_SOIL", "ALBVIS_VEG", "LAI"] - for decadal_data_type in decadal_data_types: - for __ in range(1, self.veg_types + 1): - for decade in range(1, self.decades + 1): - filepattern = self.config.get_setting("SURFEX#COVER#" + decadal_data_type, - check_parsing=False) - fname = self.parse_fnames(filepattern, decade) - dtype = decadal_data_type.lower() + "_dir" - ext_data = ExternalSurfexInputFile(self.system_file_paths) - dat = ext_data.set_input_data_from_format(dtype, fname, - check_existence=check_existence) - data.update(dat) - return data - - @staticmethod - def parse_fnames(filepattern, decade): - """Parse file names.""" - filename = filepattern - decade = decade - 1 - mmm = int(decade / 3) + 1 - cmm = f"{mmm:02d}" - cdd = ((decade % 3) * 10) + 5 - cdd = f"{cdd:02d}" - filename = filename.replace("@MM@", str(cmm)) - filename = filename.replace("@CDD@", str(cdd)) - return filename - - -class PgdInputData(surfex.JsonInputData): - """PGD input.""" - - def __init__(self, config, system_file_paths, check_existence=True): - """Construct PD input. - - Args: - config (_type_): _description_ - system_file_paths (_type_): _description_ - check_existence (bool, optional): _description_. Defaults to True. - - """ - # Ecoclimap settings - eco_sg = config.get_setting("SURFEX#COVER#SG") - if eco_sg: - ecoclimap = EcoclimapSG(config, system_file_paths=system_file_paths) - else: - ecoclimap = Ecoclimap(config, system_file_paths=system_file_paths) - - data = ecoclimap.set_input(check_existence=check_existence) - - ext_data = ExternalSurfexInputFile(system_file_paths) - # Set direct input files - if config.get_setting("SURFEX#TILES#INLAND_WATER") == "FLAKE": - version = config.get_setting("SURFEX#FLAKE#LDB_VERSION") - if version != "": - version = "_V" + version - datadir = "flake_dir" - fname = "GlobalLakeDepth" + version + ".dir" - linkbasename = "GlobalLakeDepth" - data.update(ext_data.set_input_data_from_format(datadir, fname, default_dir="climdir", - linkbasename=linkbasename, - check_existence=check_existence)) - fname = "GlobalLakeStatus" + version + ".dir" - linkbasename = "GlobalLakeStatus" - data.update(ext_data.set_input_data_from_format(datadir, fname, default_dir="climdir", - linkbasename=linkbasename, - check_existence=check_existence)) - - possible_direct_data = { - "ISBA": { - "YSAND": "sand_dir", - "YCLAY": "clay_dir", - "YSOC_TOP": "soc_top_dir", - "YSOC_SUB": "soc_sub_dir" - }, - "COVER": { - "YCOVER": ecoclimap.cover_dir - }, - "ZS": { - "YZS": "oro_dir" - } - } - for namelist_section, ftypes in possible_direct_data.items(): - for ftype, data_dir in ftypes.items(): - # data_dir = possible_direct_data[namelist_section][ftype] - fname = str(config.get_setting("SURFEX#" + namelist_section + "#" + ftype)) - data.update(ext_data.set_input_data_from_format(data_dir, fname, - default_dir="climdir", - check_existence=check_existence)) - - # Treedrag - if config.get_setting("SURFEX#TREEDRAG#TREEDATA_FILE") != "": - fname = config.get_setting("SURFEX#TREEDRAG#TREEDATA_FILE") - data_dir = "tree_height_dir" - data.update(ext_data.set_input_data_from_format(data_dir, fname, default_dir="climdir", - check_existence=check_existence)) - - surfex.JsonInputData.__init__(self, data) - - -class PrepInputData(surfex.JsonInputData): - """Input data for PREP.""" - - def __init__(self, config, system_file_paths, check_existence=True, prep_file=None, - prep_pgdfile=None): - """Construct input data for PREP. - - Args: - config (_type_): _description_ - system_file_paths (_type_): _description_ - check_existence (bool, optional): _description_. Defaults to True. - prep_file (_type_, optional): _description_. Defaults to None. - prep_pgdfile (_type_, optional): _description_. Defaults to None. - - """ - data = {} - # Ecoclimap settings - eco_sg = config.get_setting("SURFEX#COVER#SG") - if not eco_sg: - ecoclimap = Ecoclimap(config, system_file_paths) - data.update(ecoclimap.set_bin_files(check_existence=check_existence)) - - logging.debug("prep class %s", system_file_paths.__class__) - ext_data = ExternalSurfexInputFile(system_file_paths) - # ext_data = system_file_paths - if prep_file is not None: - if not prep_file.endswith(".json"): - fname = os.path.basename(prep_file) - if fname != prep_file: - data.update({fname: prep_file}) - if prep_pgdfile is not None: - fname = os.path.basename(prep_pgdfile) - if fname != prep_pgdfile: - data.update({fname: prep_pgdfile}) - - if config.get_setting("SURFEX#TILES#INLAND_WATER") == "FLAKE": - data_dir = "flake_dir" - fname = "LAKE_LTA_NEW.nc" - data.update(ext_data.set_input_data_from_format(data_dir, fname, default_dir="climdir", - check_existence=check_existence)) - - surfex.JsonInputData.__init__(self, data) - - -class OfflineInputData(surfex.JsonInputData): - """Input data for offline.""" - - def __init__(self, config, system_file_paths, check_existence=True): - """Construct input data for offline. - - Args: - config (_type_): _description_ - system_file_paths (_type_): _description_ - check_existence (bool, optional): _description_. Defaults to True. - - Raises: - NotImplementedError: _description_ - - """ - data = {} - # Ecoclimap settings - eco_sg = config.get_setting("SURFEX#COVER#SG") - if not eco_sg: - ecoclimap = Ecoclimap(config, system_file_paths) - data.update(ecoclimap.set_bin_files(check_existence=check_existence)) - - data_dir = "forcing_dir" - if config.get_setting("SURFEX#IO#CFORCING_FILETYPE") == "NETCDF": - fname = "FORCING.nc" - data.update({fname: system_file_paths.get_system_file(data_dir, fname, - default_dir=None)}) - else: - raise NotImplementedError - - surfex.JsonInputData.__init__(self, data) - - -class InlineForecastInputData(surfex.JsonInputData): - """Inline forecast input data.""" - - def __init__(self, config, system_file_paths, check_existence=True): - """Construct input data for inline forecast. - - Args: - config (_type_): _description_ - system_file_paths (_type_): _description_ - check_existence (bool, optional): _description_. Defaults to True. - """ - data = {} - # Ecoclimap settings - eco_sg = config.get_setting("SURFEX#COVER#SG") - if not eco_sg: - ecoclimap = Ecoclimap(config, system_file_paths) - data.update(ecoclimap.set_bin_files(check_existence=check_existence)) - - surfex.JsonInputData.__init__(self, data) - - -class SodaInputData(surfex.JsonInputData): - """Input data for SODA.""" - - def __init__(self, config, system_file_paths, check_existence=True, masterodb=True, - perturbed_file_pattern=None, dtg=None): - """Construct input data for SODA. - - Args: - config (_type_): _description_ - system_file_paths (_type_): _description_ - check_existence (bool, optional): _description_. Defaults to True. - masterodb (bool, optional): _description_. Defaults to True. - perturbed_file_pattern (_type_, optional): _description_. Defaults to None. - dtg (_type_, optional): _description_. Defaults to None. - - """ - self.config = config - self.system_file_paths = system_file_paths - self.file_paths = ExternalSurfexInputFile(self.system_file_paths) - if dtg is not None: - if isinstance(dtg, str): - dtg = datetime.strptime(dtg, "%Y%m%d%H") - self.dtg = dtg - surfex.JsonInputData.__init__(self, {}) - - # Ecoclimap settings - eco_sg = self.config.get_setting("SURFEX#COVER#SG") - if not eco_sg: - ecoclimap = Ecoclimap(self.config, self.system_file_paths) - self.add_data(ecoclimap.set_bin_files(check_existence=check_existence)) - - # OBS - nnco = self.config.get_setting("SURFEX#ASSIM#OBS#NNCO") - need_obs = False - for pobs in nnco: - if pobs == 1: - need_obs = True - if need_obs: - self.add_data(self.set_input_observations(check_existence=check_existence)) - - # SEA - if self.config.get_setting("SURFEX#ASSIM#SCHEMES#SEA") != "NONE": - if self.config.get_setting("SURFEX#ASSIM#SCHEMES#SEA") == "INPUT": - self.add_data(self.set_input_sea_assimilation(check_existence=check_existence)) - - # WATER - if self.config.get_setting("SURFEX#ASSIM#SCHEMES#INLAND_WATER") != "NONE": - pass - - # NATURE - if self.config.get_setting("SURFEX#ASSIM#SCHEMES#ISBA") != "NONE": - if self.config.setting_is("SURFEX#ASSIM#SCHEMES#ISBA", "EKF"): - data = self.set_input_vertical_soil_ekf(check_existence=check_existence, - masterodb=masterodb, - pert_fp=perturbed_file_pattern) - self.add_data(data) - if self.config.setting_is("SURFEX#ASSIM#SCHEMES#ISBA", "OI"): - self.add_data(self.set_input_vertical_soil_oi()) - if self.config.setting_is("SURFEX#ASSIM#SCHEMES#ISBA", "ENKF"): - self.add_data(self.set_input_vertical_soil_enkf(check_existence=check_existence, - masterodb=masterodb, - pert_fp=perturbed_file_pattern)) - - # Town - if self.config.get_setting("SURFEX#ASSIM#SCHEMES#TEB") != "NONE": - pass - - def set_input_observations(self, check_existence=True): - """Input data for observations. - - Args: - check_existence (bool, optional): _description_. Defaults to True. - - Returns: - _type_: _description_ - - """ - cfile_format_obs = self.config.get_setting("SURFEX#ASSIM#OBS#CFILE_FORMAT_OBS") - if cfile_format_obs == "ASCII": - if self.dtg is None: - raise Exception("Obs ASCII file needs DTG information") - cyy = self.dtg.strftime("%y") - cmm = self.dtg.strftime("%m") - cdd = self.dtg.strftime("%d") - chh = self.dtg.strftime("%H") - target = "OBSERVATIONS_" + cyy + cmm + cdd + "H" + chh + ".DAT" - elif cfile_format_obs == "FA": - target = "ICMSHANAL+0000" - else: - raise NotImplementedError(cfile_format_obs) - - data_dir = "obs_dir" - obsfile = self.system_file_paths.get_system_file(data_dir, target, default_dir="assim_dir", - check_existence=check_existence, - basedtg=self.dtg) - obssettings = { - target: obsfile - } - return obssettings - - def set_input_sea_assimilation(self, check_existence=True): - """Input data for sea assimilation. - - Args: - check_existence (bool, optional): _description_. Defaults to True. - - Returns: - _type_: _description_ - - """ - cfile_format_sst = self.config.get_setting("SURFEX#ASSIM#SEA#CFILE_FORMAT_SST") - if cfile_format_sst.upper() == "ASCII": - target = "SST_SIC.DAT" - elif cfile_format_sst.upper() == "FA": - target = "SST_SIC" - else: - raise NotImplementedError(cfile_format_sst) - - data_dir = "sst_file_dir" - sstfile = self.system_file_paths.get_system_file(data_dir, target, basedtg=self.dtg, - check_existence=check_existence, - default_dir="assim_dir") - sea_settings = { - target: sstfile - } - return sea_settings - - def set_input_vertical_soil_oi(self): - """Input data for OI in soil. - - Returns: - _type_: _description_ - - """ - oi_settings = {} - # Climate - cfile_format_clim = self.config.get_setting("SURFEX#ASSIM#ISBA#OI#CFILE_FORMAT_CLIM") - if cfile_format_clim.upper() == "ASCII": - target = "CLIMATE.DAT" - elif cfile_format_clim.upper() == "FA": - target = "clim_isba" - else: - raise NotImplementedError(cfile_format_clim) - - data_dir = "climdir" - climfile = self.system_file_paths.get_system_file(data_dir, target, default_dir="assim_dir", - check_existence=True) - oi_settings.update({target: climfile}) - - # First guess for SURFEX - cfile_format_fg = self.config.get_setting("SURFEX#ASSIM#ISBA#OI#CFILE_FORMAT_FG") - if cfile_format_fg.upper() == "ASCII": - if self.dtg is None: - raise Exception("First guess in ASCII format needs DTG information") - cyy = self.dtg.strftime("%y") - cmm = self.dtg.strftime("%m") - cdd = self.dtg.strftime("%d") - chh = self.dtg.strftime("%H") - target = "FIRST_GUESS_" + cyy + cmm + cdd + "H" + chh + ".DAT" - elif cfile_format_fg.upper() == "FA": - target = "FG_OI_MAIN" - else: - raise NotImplementedError(cfile_format_fg) - - data_dir = "first_guess_dir" - first_guess = self.system_file_paths.get_system_file(data_dir, target, - default_dir="assim_dir", - basedtg=self.dtg, check_existence=True) - oi_settings.update({target: first_guess}) - - data_dir = "ascat_dir" - ascatfile = self.system_file_paths.get_system_file(data_dir, target, - default_dir="assim_dir", - basedtg=self.dtg, check_existence=True) - oi_settings.update({"ASCAT_SM.DAT": ascatfile}) - - # OI coefficients - data_dir = "oi_coeffs_dir" - oi_coeffs = self.config.get_setting("SURFEX#ASSIM#ISBA#OI#COEFFS") - oi_coeffs = self.system_file_paths.get_system_file(data_dir, oi_coeffs, - default_dir="assim_dir", - check_existence=True) - oi_settings.update({"fort.61": oi_coeffs}) - - # LSM - cfile_format_lsm = self.config.get_setting("SURFEX#ASSIM#CFILE_FORMAT_LSM") - if cfile_format_lsm.upper() == "ASCII": - target = "LSM.DAT" - elif cfile_format_lsm.upper() == "FA": - target = "FG_OI_MAIN" - else: - raise NotImplementedError(cfile_format_lsm) - - data_dir = "lsm_dir" - lsmfile = self.system_file_paths.get_system_file(data_dir, target, default_dir="assim_dir", - basedtg=self.dtg, check_existence=True) - oi_settings.update({target: lsmfile}) - return oi_settings - - def set_input_vertical_soil_ekf(self, check_existence=True, masterodb=True, - pert_fp=None, geo=None): - """Input data for EKF in soil. - - Args: - check_existence (bool, optional): _description_. Defaults to True. - masterodb (bool, optional): _description_. Defaults to True. - pert_fp (_type_, optional): _description_. Defaults to None. - geo (_type_, optional): _description_. Defaults to None. - - Returns: - _type_: _description_ - - """ - if self.dtg is None: - raise Exception("You must set DTG") - - cyy = self.dtg.strftime("%y") - cmm = self.dtg.strftime("%m") - cdd = self.dtg.strftime("%d") - chh = self.dtg.strftime("%H") - ekf_settings = {} - - # TODO - fcint = 3 - fg_dtg = self.dtg - timedelta(hours=fcint) - data_dir = "first_guess_dir" - first_guess = self.system_file_paths.get_system_path(data_dir, default_dir="assim_dir", - validtime=self.dtg, basedtg=fg_dtg, - check_existence=check_existence) - # First guess for SURFEX - csurf_filetype = self.config.get_setting("SURFEX#IO#CSURF_FILETYPE").lower() - fgf = self.config.get_setting("SURFEX#IO#CSURFFILE", validtime=self.dtg, basedtg=fg_dtg) - first_guess = first_guess + "/" + fgf - if csurf_filetype == "ascii": - fg_file = surfex.AsciiSurfexFile(first_guess, geo=geo) - fgf = fg_file.filename - elif csurf_filetype == "nc": - logging.debug("%s", fgf) - fg_file = surfex.NCSurfexFile(first_guess, geo=geo) - fgf = fg_file.filename - elif csurf_filetype == "fa": - lfagmap = self.config.get_setting("SURFEX#IO#LFAGMAP") - # TODO for now assume that first guess always is a inline forecast with FA format - fg_file = surfex.FaSurfexFile(first_guess, lfagmap=lfagmap, masterodb=masterodb) - fgf = fg_file.filename - else: - raise NotImplementedError - - # We never run inline model for perturbations or in SODA - extension = fg_file.extension - if csurf_filetype == "fa": - extension = "fa" - - ekf_settings.update({"PREP_INIT." + extension: fgf}) - ekf_settings.update({"PREP_" + cyy + cmm + cdd + "H" + chh + "." + extension: fgf}) - - nncv = self.config.get_setting("SURFEX#ASSIM#ISBA#EKF#NNCV") - llincheck = self.config.get_setting("SURFEX#ASSIM#ISBA#EKF#LLINCHECK") - lnncv = len(nncv) + 1 - if llincheck: - lnncv = (len(nncv) * 2) + 1 - pert_ekf = 0 - pert_input = 0 - for ppp in range(0, lnncv): - exists = False - if ppp > 0: - p_p = ppp - if llincheck and ppp > len(nncv): - p_p = ppp - len(nncv) - if nncv[p_p - 1] == 1: - exists = True - pert_input = ppp - else: - exists = True - - if exists: - data_dir = "perturbed_run_dir" - if pert_fp is None: - logging.info("Use default CSURFFILE for perturbed file names") - pert_fp = \ - self.config.get_setting("SURFEX#IO#CSURFFILE", - check_parsing=False) + "." + extension - - # TODO depending on when perturbations are run - pert_run = self.system_file_paths.get_system_file(data_dir, - pert_fp, - validtime=self.dtg, - basedtg=fg_dtg, - check_existence=check_existence, - default_dir="assim_dir", - pert=pert_input) - - target = "PREP_" + cyy + cmm + cdd + "H" + chh + "_EKF_PERT" + str(pert_ekf) + "." \ - + extension - ekf_settings.update({target: pert_run}) - pert_ekf = pert_ekf + 1 - - # LSM - # Fetch first_guess needed for LSM for extrapolations - if self.config.get_setting("SURFEX#ASSIM#INLAND_WATER#LEXTRAP_WATER"): - cfile_format_lsm = self.config.get_setting("SURFEX#ASSIM#CFILE_FORMAT_LSM") - if cfile_format_lsm.upper() == "ASCII": - target = "LSM.DAT" - elif cfile_format_lsm.upper() == "FA": - target = "FG_OI_MAIN" - else: - raise NotImplementedError(cfile_format_lsm) - - data_dir = "lsm_dir" - lsmfile = self.system_file_paths.get_system_file(data_dir, target, - default_dir="assim_dir", - validtime=self.dtg, basedtg=fg_dtg, - check_existence=check_existence) - ekf_settings.update({target: lsmfile}) - return ekf_settings - - def set_input_vertical_soil_enkf(self, check_existence=True, masterodb=True, - pert_fp=None, geo=None): - """Input data for ENKF in soil. - - Args: - check_existence (bool, optional): _description_. Defaults to True. - masterodb (bool, optional): _description_. Defaults to True. - pert_fp (_type_, optional): _description_. Defaults to None. - geo (_type_, optional): _description_. Defaults to None. - - Returns: - _type_: _description_ - - """ - if self.dtg is None: - raise Exception("You must set DTG") - - cyy = self.dtg.strftime("%y") - cmm = self.dtg.strftime("%m") - cdd = self.dtg.strftime("%d") - chh = self.dtg.strftime("%H") - enkf_settings = {} - - # First guess for SURFEX - csurf_filetype = self.config.get_setting("SURFEX#IO#CSURF_FILETYPE").lower() - - # TODO - fcint = 3 - fg_dtg = self.dtg - timedelta(hours=fcint) - fgf = self.config.get_setting("SURFEX#IO#CSURFFILE", validtime=self.dtg, basedtg=fg_dtg) - if csurf_filetype == "ascii": - fg_file = surfex.AsciiSurfexFile(fgf, geo=geo) - fgf = fg_file.filename - elif csurf_filetype == "nc": - fg_file = surfex.NCSurfexFile(fgf, geo=geo) - fgf = fg_file.filename - elif csurf_filetype == "fa": - lfagmap = self.config.get_setting("SURFEX#IO#LFAGMAP") - # TODO for now assume that first guess always is a inline forecast with FA format - fg_file = surfex.FaSurfexFile(fgf, lfagmap=lfagmap, geo=geo, masterodb=masterodb) - fgf = fg_file.filename - else: - raise NotImplementedError - - data_dir = "first_guess_dir" - first_guess = self.system_file_paths.get_system_file(data_dir, fgf, default_dir="assim_dir", - validtime=self.dtg, basedtg=fg_dtg, - check_existence=check_existence) - - # We newer run inline model for perturbations or in SODA - extension = fg_file.extension - if csurf_filetype == "fa": - extension = "fa" - - nens_m = self.config.get_setting("SURFEX#ASSIM#ISBA#ENKF#NENS_M") - - enkf_settings.update({"PREP_INIT." + extension: first_guess}) - enkf_settings.update({"PREP_" + cyy + cmm + cdd + "H" + chh + "." + extension: first_guess}) - enkf_settings.update({"PREP_" + cyy + cmm + cdd + "H" + chh + "_EKF_ENS" + str(nens_m) + "." - + extension: first_guess}) - - # nncv = self.config.get_setting("SURFEX#ASSIM#ISBA#ENKF#NNCV") - # pert_enkf = 0 - # pert_input = 0 - for ppp in range(0, nens_m): - data_dir = "perturbed_run_dir" - if pert_fp is None: - logging.info("Use default CSURFFILE for perturbed file names") - perturbed_file_pattern = \ - self.config.get_setting("SURFEX#IO#CSURFFILE", check_parsing=False) + "." \ - + extension - - # TODO depending on when perturbations are run - perturbed_run = self.system_file_paths.get_system_file(data_dir, - perturbed_file_pattern, - validtime=self.dtg, - basedtg=fg_dtg, - check_existence=check_existence, - default_dir="assim_dir", - pert=ppp) - - target = "PREP_" + cyy + cmm + cdd + "H" + chh + "_EKF_ENS" + str(ppp) + "." + extension - enkf_settings.update({target: perturbed_run}) - - # LSM - # Fetch first_guess needed for LSM for extrapolations - if self.config.get_setting("SURFEX#ASSIM#INLAND_WATER#LEXTRAP_WATER"): - cfile_format_lsm = self.config.get_setting("SURFEX#ASSIM#CFILE_FORMAT_LSM") - if cfile_format_lsm.upper() == "ASCII": - target = "LSM.DAT" - elif cfile_format_lsm.upper() == "FA": - target = "FG_OI_MAIN" - else: - raise NotImplementedError(cfile_format_lsm) - - data_dir = "lsm_dir" - lsmfile = self.system_file_paths.get_system_file(data_dir, target, - default_dir="assim_dir", - validtime=self.dtg, basedtg=fg_dtg, - check_existence=check_existence) - enkf_settings.update({target: lsmfile}) - return enkf_settings diff --git a/surfex/netcdf.py b/surfex/netcdf.py index e73e34f..b12acbf 100644 --- a/surfex/netcdf.py +++ b/surfex/netcdf.py @@ -1,11 +1,12 @@ """Netcdf.""" +import logging import os import re -import logging -from datetime import datetime, date from enum import Enum -import numpy as np + import netCDF4 +import numpy as np + try: import cfunits except ModuleNotFoundError: @@ -14,7 +15,11 @@ cfunits = None except: # noqa cfunits = None -import surfex + + +from .datetime_utils import fromtimestamp, isdatetime, utcfromtimestamp +from .geo import ConfProj, Geo +from .interpolation import Interpolation class Netcdf(object): @@ -24,21 +29,26 @@ def __init__(self, filename): """Construct NetCDF. Args: - filename (_type_): _description_ + filename (str): Filename """ self.filename = filename logging.debug("filename: %s", filename) self.file = netCDF4.Dataset(filename, "r") - def num_height(self, field): - """num_height.""" - - def num_time(self, field): - """num_time.""" - - def slice(self, var_name, levels=None, members=None, times=None, xcoords=None, ycoords=None, - deaccumulate=False, instantanious=0., units=None, lev_from_ind=False): + def nc_slice( + self, + var_name, + levels=None, + members=None, + times=None, + xcoords=None, + ycoords=None, + deaccumulate=False, + instantanious=0.0, + units=None, + lev_from_ind=False, + ): """Assembles a 5D field in order lon,lat,time,height,ensemble. Arguments: @@ -53,13 +63,24 @@ def slice(self, var_name, levels=None, members=None, times=None, xcoords=None, y units (str): CF unit for the variable to be read lev_from_ind (bool): level list are indices and not values + Raises: + NotImplementedError: Subsetting of the input dimensions not implemented yet! + ValueError: Times must be a list! + ValueError: Levels must be a list! + ValueError: Members must be a list! + RuntimeError: No ensemble members found + RuntimeError: cfunits not loaded! + ValueError: Axis is not defined!") + Returns: - np.array: 5D array with values + np.array: 5D array with values """ var = NetCDFFileVariable(self.file, var_name) if xcoords is not None or ycoords is not None: - raise Exception("Subsetting of the input dimensions not implemented yet!") + raise NotImplementedError( + "Subsetting of the input dimensions not implemented yet!" + ) tinfo = "" if times is not None: @@ -76,16 +97,19 @@ def slice(self, var_name, levels=None, members=None, times=None, xcoords=None, y prev_time_steps.append(0) else: if not isinstance(times, (list, tuple)): - raise Exception("Times must be a list!") - if isinstance(times[0], date): + raise ValueError("Times must be a list!") + if isdatetime(times[0]): logging.debug("Time provided in call as datetime objects") times_in_var = var.datetimes for i, times_in_var_val in enumerate(times_in_var): - logging.debug("i %s times_in_var %s times %s", i, times_in_var_val, times) + logging.debug( + "i %s times_in_var %s times %s", i, times_in_var_val, times + ) for tval in times: # Time steps requested - logging.debug("i=%s, times_in_var_val=%s tval=%s", - i, times_in_var_val, tval) + logging.debug( + "i=%s, times_in_var_val=%s tval=%s", i, times_in_var_val, tval + ) if times_in_var[i] == tval: times_to_read.append(i) if i > 0: @@ -113,7 +137,7 @@ def slice(self, var_name, levels=None, members=None, times=None, xcoords=None, y else: logging.debug("Level provided in call. lev_from_ind=%s", str(lev_from_ind)) if not isinstance(levels, (list, tuple)): - raise Exception("Levels must be a list!") + raise ValueError("Levels must be a list!") levels_in_var = var.levels for i in range(0, levels_in_var.shape[0]): for level_ind in levels: @@ -122,7 +146,9 @@ def slice(self, var_name, levels=None, members=None, times=None, xcoords=None, y levels_to_read.append(i) else: # NB! Round number to avoid round off when matching - if round(float(levels_in_var[i]), 5) == round(float(level_ind), 5): + if round(float(levels_in_var[i]), 5) == round( + float(level_ind), 5 + ): levels_to_read.append(i) members_to_read = [] @@ -131,7 +157,7 @@ def slice(self, var_name, levels=None, members=None, times=None, xcoords=None, y members_to_read.append(i) else: if not isinstance(members, (list, tuple)): - raise Exception("Members must be a list!") + raise ValueError("Members must be a list!") logging.debug("Ensemble members provided in call") members_in_var = var.members for i in range(0, members_in_var.shape[0]): @@ -140,7 +166,7 @@ def slice(self, var_name, levels=None, members=None, times=None, xcoords=None, y members_to_read.append(i) if len(members_to_read) == 0: - raise Exception("No ensemble members found for " + var.var_name) + raise RuntimeError("No ensemble members found for " + var.var_name) lons = var.lons lats = var.lats @@ -150,15 +176,21 @@ def slice(self, var_name, levels=None, members=None, times=None, xcoords=None, y dim_y = lats.shape[1] logging.debug("lons.shape=%s lats.shape=%s", lons.shape, lats.shape) - geo = surfex.geo.Geo(lons, lats) + geo = Geo(lons, lats) dim_t = max(len(times_to_read), 1) dim_levels = max(len(levels_to_read), 1) dim_members = max(len(members_to_read), 1) logging.debug("Dimensions in output") - logging.debug("%s %s %s %s %s", str(dim_x), str(dim_y), str(dim_t), str(dim_levels), - str(dim_members)) + logging.debug( + "%s %s %s %s %s", + str(dim_x), + str(dim_y), + str(dim_t), + str(dim_levels), + str(dim_members), + ) lon_ind = slice(0, dim_x, 1) lat_ind = slice(0, dim_y, 1) @@ -188,7 +220,7 @@ def slice(self, var_name, levels=None, members=None, times=None, xcoords=None, y prev_dims.append(members_to_read) mapping[4] = i else: - raise Exception(str(types[i]) + " is not defined!") + raise ValueError(str(types[i]) + " is not defined!") logging.debug("Read %s with dimensions: %s", var.var_name, str(dims)) if deaccumulate: @@ -200,8 +232,10 @@ def slice(self, var_name, levels=None, members=None, times=None, xcoords=None, y field = self.file[var.var_name][dims] if units is not None: if cfunits is None: - raise Exception("cfunits not loaded!") - field = cfunits.Units.conform(field, cfunits.Units(var.units), cfunits.Units(units)) + raise RuntimeError("cfunits not loaded!") + field = cfunits.Units.conform( + field, cfunits.Units(var.units), cfunits.Units(units) + ) # Deaccumulation if deaccumulate: @@ -209,9 +243,10 @@ def slice(self, var_name, levels=None, members=None, times=None, xcoords=None, y previous_field = self.file[var.var_name][prev_dims] if units is not None: if cfunits is None: - raise Exception("cfunits not loaded!") - previous_field = cfunits.Units.conform(previous_field, cfunits.Units(var.units), - cfunits.Units(units)) + raise RuntimeError("cfunits not loaded!") + previous_field = cfunits.Units.conform( + previous_field, cfunits.Units(var.units), cfunits.Units(units) + ) field = np.subtract(original_field, previous_field) # Create instantanious values @@ -242,29 +277,25 @@ def field(self, var_name, level=None, member=None, validtime=None, units=None): """Read field. Args: - var_name (_type_): _description_ - level (_type_, optional): _description_. Defaults to None. - member (_type_, optional): _description_. Defaults to None. - validtime (_type_, optional): _description_. Defaults to None. - units (_type_, optional): _description_. Defaults to None. - - Raises: - Exception: _description_ + var_name (str): Variable name + level (int, optional): Level. Defaults to None. + member (int, optional): Realization. Defaults to None. + validtime (surfex.datetime_utils.as_datetime, optional): Validtime. Defaults to None. + units (str, optional): Units. Defaults to None. Returns: - _type_: _description_ + tuple: Field, Geo """ if validtime is None: validtime = [] - elif not isinstance(validtime, datetime): - raise Exception("validime must be a datetime object") else: validtime = [validtime] logging.debug("level %s member %s validtime %s", level, member, validtime) - field, geo_in = self.slice(var_name, levels=level, members=member, times=validtime, - units=units) + field, geo_in = self.nc_slice( + var_name, levels=level, members=member, times=validtime, units=units + ) # Reshape to fortran 2D style field = np.reshape(field, [geo_in.nlons, geo_in.nlats], order="F") return field, geo_in @@ -273,26 +304,24 @@ def points(self, var, geo, validtime=None, interpolation="bilinear"): """Read a field and interpolate it to requested positions. Args: - var (_type_): _description_ - geo (_type_): _description_ - validtime (_type_, optional): _description_. Defaults to None. - interpolation (str, optional): _description_. Defaults to "bilinear". + var (NetCDFReadVariable): NetCDF variable + geo (surfx.geo.Geo): Geometry + validtime (surfex.datetime_utils.as_datetime, optional): Validtime. Defaults to None. + interpolation (str, optional): Interpolation. Defaults to "bilinear". Returns: - _type_: _description_ + tuple: Field, Interpolator """ - # field4d, geo_in = self.slice(var_name, levels=level, members=member, times=validtime, - # units=units) - # field2d = np.transpose(np.reshape(field4d, [geo_in.nlons, geo_in.nlats], order="F")) var_name = var.name level = var.level member = var.member units = var.units logging.debug("level %s member %s validtime %s", level, member, validtime) - field, geo_in = self.field(var_name, level=level, member=member, validtime=validtime, - units=units) - interpolator = surfex.interpolation.Interpolation(interpolation, geo_in, geo) + field, geo_in = self.field( + var_name, level=level, member=member, validtime=validtime, units=units + ) + interpolator = Interpolation(interpolation, geo_in, geo) field = interpolator.interpolate(field) return field, interpolator @@ -352,11 +381,10 @@ def axis_types(self): """Get axis_types.""" types = [] if self.var_name not in self.file.variables: - raise Exception(self.var_name + " is missing in file!") + raise RuntimeError(self.var_name + " is missing in file!") if self.file.variables[self.var_name]: for dim_name in self.file.variables[self.var_name].dimensions: - # dim_name = self.file.variables[self.var_name].dimensions[i] if dim_name == "longitude" or dim_name == "lon": types.append(Axis.LON) elif dim_name == "x": @@ -401,8 +429,11 @@ def units(self): def lats(self): """Get lats. + Raises: + RuntimeError: No latitude found + Returns: - np.array: 2D array of latitudes + np.array: 2D array of latitudes """ latvals = np.array([]) @@ -418,15 +449,18 @@ def lats(self): latvals = np.transpose(latvals, (1, 0)) if latvals.shape[0] == 0: - raise Exception("No latitude found for " + self.var_name) + raise RuntimeError("No latitude found for " + self.var_name) return latvals @property def lons(self): """Get lons. + Raises: + RuntimeError: No longitude found + Returns: - np.array: 2D array of longitudes + np.array: 2D array of longitudes """ lonvals = np.array([]) @@ -441,13 +475,16 @@ def lons(self): lonvals = np.transpose(lonvals, (1, 0)) if lonvals.shape[0] == 0: - raise Exception("No longitude found for " + self.var_name) + raise RuntimeError("No longitude found for " + self.var_name) return lonvals @property def datetimes(self): """Get datetimes. + Raises: + RuntimeError: cfunits not loaded! + Returns: list() @@ -459,12 +496,16 @@ def datetimes(self): val = self.file.variables[self.dim_names[i]] for tval in val: if cfunits is None: - raise Exception("cfunits not loaded!") - epochtime = int(cfunits.Units.conform( - tval, cfunits.Units(val.units), - cfunits.Units("seconds since 1970-01-01 00:00:00"))) + raise RuntimeError("cfunits not loaded!") + epochtime = int( + cfunits.Units.conform( + tval, + cfunits.Units(val.units), + cfunits.Units("seconds since 1970-01-01 00:00:00"), + ) + ) logging.debug("epoctime %s", epochtime) - d_t = datetime.utcfromtimestamp(epochtime) + d_t = utcfromtimestamp(epochtime) logging.debug("dt %s", d_t) times.append(d_t) @@ -531,20 +572,26 @@ def is_level(axis_type): """Check if is level. Args: - axis_type (_type_): _description_ + axis_type (Axis): Acis type Returns: - _type_: _description_ + bool: If axis is a level type """ - if axis_type == Axis.HEIGHT or axis_type == Axis.PESSURE or axis_type == Axis.GEOZ or \ - axis_type == Axis.HYBRID: + if ( + axis_type == Axis.HEIGHT + or axis_type == Axis.PESSURE + or axis_type == Axis.GEOZ + or axis_type == Axis.HYBRID + ): return True else: return False -def create_netcdf_first_guess_template(my_variables, my_nx, my_ny, fname="raw.nc", geo=None): +def create_netcdf_first_guess_template( + my_variables, my_nx, my_ny, fname="raw.nc", geo=None +): """Create netCDF template file for first guess. Args: @@ -586,34 +633,42 @@ def create_netcdf_first_guess_template(my_variables, my_nx, my_ny, fname="raw.nc my_fg.variables["y"].standard_name = "projection_y_coordinate" my_fg.variables["y"].units = "m" - standard_name = {"air_temperature_2m": "air_temperature", - "relative_humidity_2m": "relative_humidity", - "altitude": "altitude", - "surface_snow_thickness": "surface_snow_thickness", - "surface_soil_moisture": "surface_soil_moisture", - "cloud_base": "cloud_base", - "land_area_fraction": "land_area_fraction"} - long_name = {"air_temperature_2m": "Screen level temperature (T2M)", - "relative_humidity_2m": "Screen level relative humidity (RH2M)", - "altitude": "Altitude", - "surface_snow_thickness": "Surface snow thickness", - "surface_soil_moisture": "Surface soil moisture", - "cloud_base": "Cloud base", - "land_area_fraction": "Land Area Fraction"} - units = {"air_temperature_2m": "K", - "relative_humidity_2m": "1", - "altitude": "m", - "surface_snow_thickness": "m", - "surface_soil_moisture": "m3/m3", - "cloud_base": "m", - "land_area_fraction": "1"} - fillvalue = {"air_temperature_2m": "9.96921e+36", - "relative_humidity_2m": "9.96921e+36", - "altitude": "9.96921e+36", - "surface_snow_thickness": "9.96921e+36", - "surface_soil_moisture": "9.96921e+36", - "cloud_base": "9.96921e+36", - "land_area_fraction": "9.96921e+36"} + standard_name = { + "air_temperature_2m": "air_temperature", + "relative_humidity_2m": "relative_humidity", + "altitude": "altitude", + "surface_snow_thickness": "surface_snow_thickness", + "surface_soil_moisture": "surface_soil_moisture", + "cloud_base": "cloud_base", + "land_area_fraction": "land_area_fraction", + } + long_name = { + "air_temperature_2m": "Screen level temperature (T2M)", + "relative_humidity_2m": "Screen level relative humidity (RH2M)", + "altitude": "Altitude", + "surface_snow_thickness": "Surface snow thickness", + "surface_soil_moisture": "Surface soil moisture", + "cloud_base": "Cloud base", + "land_area_fraction": "Land Area Fraction", + } + units = { + "air_temperature_2m": "K", + "relative_humidity_2m": "1", + "altitude": "m", + "surface_snow_thickness": "m", + "surface_soil_moisture": "m3/m3", + "cloud_base": "m", + "land_area_fraction": "1", + } + fillvalue = { + "air_temperature_2m": "9.96921e+36", + "relative_humidity_2m": "9.96921e+36", + "altitude": "9.96921e+36", + "surface_snow_thickness": "9.96921e+36", + "surface_soil_moisture": "9.96921e+36", + "cloud_base": "9.96921e+36", + "land_area_fraction": "9.96921e+36", + } for my_var in my_variables: my_fg.createVariable(my_var, "f4", ("y", "x"), fill_value=fillvalue[my_var]) @@ -623,7 +678,7 @@ def create_netcdf_first_guess_template(my_variables, my_nx, my_ny, fname="raw.nc # Global attributes if geo is not None: - if isinstance(geo, surfex.ConfProj): + if isinstance(geo, ConfProj): my_fg.setncattr("gridtype", "lambert") my_fg.setncattr("dlon", float(geo.xdx)) my_fg.setncattr("dlat", float(geo.xdy)) @@ -644,10 +699,11 @@ def read_first_guess_netcdf_file(input_file, var): var (_type_): _description_ Raises: - NotImplementedError: _description_ + NotImplementedError: Only conf proj implemented when reading geo from file + RuntimeError: cfunits not loaded! Returns: - _type_: _description_ + tuple: geo, validtime, background, glafs, gelevs """ file_handler = netCDF4.Dataset(input_file) @@ -655,11 +711,15 @@ def read_first_guess_netcdf_file(input_file, var): lats = file_handler["latitude"][:] if cfunits is None: - raise Exception("cfunits not loaded!") - validtime = int(cfunits.Units.conform(file_handler["time"][:], - cfunits.Units(file_handler["time"].units), - cfunits.Units("seconds since 1970-01-01 00:00:00"))) - validtime = datetime.fromtimestamp(validtime) + raise RuntimeError("cfunits not loaded!") + validtime = int( + cfunits.Units.conform( + file_handler["time"][:], + cfunits.Units(file_handler["time"].units), + cfunits.Units("seconds since 1970-01-01 00:00:00"), + ) + ) + validtime = fromtimestamp(validtime) n_x = lons.shape[1] n_y = lons.shape[0] @@ -678,16 +738,16 @@ def read_first_guess_netcdf_file(input_file, var): }, "nam_conf_proj": { "xlon0": file_handler.getncattr("projlon"), - "xlat0": file_handler.getncattr("projlat") - } + "xlat0": file_handler.getncattr("projlat"), + }, } - geo = surfex.ConfProj(from_json) + geo = ConfProj(from_json) else: raise NotImplementedError else: lons = np.array(np.transpose(np.reshape(lons, [n_y, n_x], order="F"))) lats = np.array(np.transpose(np.reshape(lats, [n_y, n_x], order="F"))) - geo = surfex.Geo(lons, lats) + geo = Geo(lons, lats) background = file_handler[var][:] background = np.array(np.reshape(background, [n_x * n_y])) @@ -710,8 +770,9 @@ def read_first_guess_netcdf_file(input_file, var): return geo, validtime, background, glafs, gelevs -def write_analysis_netcdf_file(filename, field, var, validtime, elevs, lafs, new_file=True, - geo=None): +def write_analysis_netcdf_file( + filename, field, var, validtime, elevs, lafs, new_file=True, geo=None +): """Write analysis NetCDF file. Args: @@ -737,13 +798,17 @@ def write_analysis_netcdf_file(filename, field, var, validtime, elevs, lafs, new if new_file: if geo is None: raise Exception("You need to provide geo to write a new file") - file_handler = create_netcdf_first_guess_template([var, "altitude", "land_area_fraction"], - geo.nlons, geo.nlats, fname=filename, - geo=geo) + file_handler = create_netcdf_first_guess_template( + [var, "altitude", "land_area_fraction"], + geo.nlons, + geo.nlats, + fname=filename, + geo=geo, + ) file_handler.variables["longitude"][:] = np.transpose(geo.lons) file_handler.variables["latitude"][:] = np.transpose(geo.lats) - file_handler.variables["x"][:] = [i for i in range(0, geo.nlons)] - file_handler.variables["y"][:] = [i for i in range(0, geo.nlats)] + file_handler.variables["x"][:] = list(range(0, geo.nlons)) + file_handler.variables["y"][:] = list(range(0, geo.nlats)) file_handler.variables["altitude"][:] = np.transpose(elevs) file_handler.variables["land_area_fraction"][:] = np.transpose(lafs) @@ -760,14 +825,19 @@ def oi2soda(dtg, t2m=None, rh2m=None, s_d=None, s_m=None, output=None): """Convert analysis to ASCII obs file for SODA. Args: - dtg (_type_): _description_ - t2m (_type_, optional): _description_. Defaults to None. - rh2m (_type_, optional): _description_. Defaults to None. - s_d (_type_, optional): _description_. Defaults to None. - s:m (_type_, optional): _description_. Defaults to None. - output (_type_, optional): _description_. Defaults to None. + dtg (surfex.datetime_utils): Analysis time + t2m (dict, optional): Screen level temperature var and file name. Defaults to None. + rh2m (dict, optional): Screen level relative humidiy var and file name. Defaults to None. + s_d (dict, optional): Snow depth var and file name. Defaults to None. + s_m (dict, optional): Soil moisture var and file name. Defaults to None. + output (str, optional): Output file name. Defaults to None. + + Raises: + RuntimeError: You must specify at least one file to read from + RuntimeError: Mismatch in ?? dimension """ + def check_input_to_soda_dimensions(my_nx, my_ny, nx1, ny1): if my_nx < 0: @@ -775,9 +845,13 @@ def check_input_to_soda_dimensions(my_nx, my_ny, nx1, ny1): if my_ny < 0: my_ny = ny1 if my_nx != nx1: - raise Exception("Mismatch in nx dimension " + str(my_nx) + " != " + str(nx1)) + raise RuntimeError( + "Mismatch in nx dimension " + str(my_nx) + " != " + str(nx1) + ) if my_ny != ny1: - raise Exception("Mismatch in ny dimension " + str(my_ny) + " != " + str(ny1)) + raise RuntimeError( + "Mismatch in ny dimension " + str(my_ny) + " != " + str(ny1) + ) return my_nx, my_ny @@ -798,12 +872,16 @@ def check_input_to_soda_dimensions(my_nx, my_ny, nx1, ny1): logging.debug("T2m %s %s", t2m["var"], t2m_fh.variables[t2m["var"]].shape) i = i + 1 - n_x, n_y = check_input_to_soda_dimensions(n_x, n_y, t2m_fh.variables[t2m["var"]].shape[1], - t2m_fh.variables[t2m["var"]].shape[0]) + n_x, n_y = check_input_to_soda_dimensions( + n_x, + n_y, + t2m_fh.variables[t2m["var"]].shape[1], + t2m_fh.variables[t2m["var"]].shape[0], + ) t2m_var = t2m_fh.variables[t2m["var"]][:] logging.debug("%s %s", t2m_var.shape, n_x * n_y) t2m_var = t2m_var.reshape([n_y * n_x], order="C") - t2m_var = t2m_var.filled(fill_value=999.) + t2m_var = t2m_var.filled(fill_value=999.0) t2m_var = t2m_var.tolist() if rh2m is not None: @@ -811,11 +889,15 @@ def check_input_to_soda_dimensions(my_nx, my_ny, nx1, ny1): logging.debug("RH2m %s %s", rh2m["var"], rh2m_fh.variables[rh2m["var"]].shape) i = i + 1 - n_x, n_y = check_input_to_soda_dimensions(n_x, n_y, rh2m_fh.variables[rh2m["var"]].shape[1], - rh2m_fh.variables[rh2m["var"]].shape[0]) + n_x, n_y = check_input_to_soda_dimensions( + n_x, + n_y, + rh2m_fh.variables[rh2m["var"]].shape[1], + rh2m_fh.variables[rh2m["var"]].shape[0], + ) rh2m_var = rh2m_fh.variables[rh2m["var"]][:] rh2m_var = rh2m_var.reshape([n_y * n_x], order="C") - rh2m_var = rh2m_var.filled(fill_value=999.) + rh2m_var = rh2m_var.filled(fill_value=999.0) rh2m_var = rh2m_var.tolist() if s_d is not None: @@ -823,12 +905,16 @@ def check_input_to_soda_dimensions(my_nx, my_ny, nx1, ny1): logging.debug("SD %s %s", s_d["var"], sd_fh.variables[s_d["var"]].shape) i = i + 1 - n_x, n_y = check_input_to_soda_dimensions(n_x, n_y, sd_fh.variables[s_d["var"]].shape[1], - sd_fh.variables[s_d["var"]].shape[0]) + n_x, n_y = check_input_to_soda_dimensions( + n_x, + n_y, + sd_fh.variables[s_d["var"]].shape[1], + sd_fh.variables[s_d["var"]].shape[0], + ) sd_var = sd_fh.variables[s_d["var"]][:] sd_var = sd_var.reshape([n_y * n_x], order="C") - sd_var = sd_var.filled(fill_value=999.) + sd_var = sd_var.filled(fill_value=999.0) sd_var = sd_var.tolist() if s_m is not None: @@ -836,19 +922,25 @@ def check_input_to_soda_dimensions(my_nx, my_ny, nx1, ny1): logging.debug("SM %s %s", s_m["var"], sm_fh.variables[s_m["var"]].shape) i = i + 1 - n_x, n_y = check_input_to_soda_dimensions(n_x, n_y, sm_fh.variables[s_m["var"]].shape[1], - sm_fh.variables[s_m["var"]].shape[0]) + n_x, n_y = check_input_to_soda_dimensions( + n_x, + n_y, + sm_fh.variables[s_m["var"]].shape[1], + sm_fh.variables[s_m["var"]].shape[0], + ) sm_var = sm_fh.variables[s_m["var"]][:] sm_var = sm_var.reshape([n_y * n_x], order="C") - sm_var = sm_var.filled(fill_value=999.) + sm_var = sm_var.filled(fill_value=999.0) sm_var = sm_var.tolist() if i == 0: - raise Exception("You must specify at least one file to read from!") + raise RuntimeError("You must specify at least one file to read from!") if output is None: - output = "OBSERVATIONS_" + str(cyy) + str(cmm) + str(cdd) + "H" + str(chh) + ".DAT" + output = ( + "OBSERVATIONS_" + str(cyy) + str(cmm) + str(cdd) + "H" + str(chh) + ".DAT" + ) with open(output, mode="w", encoding="utf-8") as out: for i in range(0, n_x * n_y): @@ -872,6 +964,9 @@ def read_cryoclim_nc(infiles): Args: infiles (list): Input files. + Raises: + RuntimeError: "No files were read properly" + Returns: tuple: grid_lons, grid_lats, grid_snow_class @@ -881,7 +976,7 @@ def read_cryoclim_nc(infiles): grid_snow_class = None for filename in infiles: if os.path.exists(filename): - surfex.info("Reading: " + filename) + logging.info("Reading: %s", filename) ncf = netCDF4.Dataset(filename, "r") grid_lons = ncf["lon"][:] grid_lats = ncf["lat"][:] @@ -895,7 +990,7 @@ def read_cryoclim_nc(infiles): logging.warning("Warning file %s does not exists", filename) if grid_lons is None or grid_lats is None or grid_snow_class is None: - raise Exception("No files were read properly") + raise RuntimeError("No files were read properly") return grid_lons, grid_lats, grid_snow_class @@ -906,24 +1001,27 @@ def read_sentinel_nc(infiles): Args: infiles (list): Input files. + Raises: + RuntimeError: "No files were read properly" + + Returns: + tuple: longitudes, latitudes, soil moisture """ grid_lons = None grid_lats = None grid_sm = None for filename in infiles: if os.path.exists(filename): - surfex.info("Reading: " + filename) + logging.info("Reading: %s", filename) nch = netCDF4.Dataset(filename, "r") grid_lons = nch["LON"][:] grid_lats = nch["LAT"][:] grid_sm = nch["surface_soil_moisture"][:] -# grid_sm[grid_snow_class_read == 1] = 1 -# grid_snow_class[grid_snow_class_read == 0] = 0 nch.close() else: logging.warning("Warning file %s does not exists", filename) if grid_lons is None or grid_lats is None or grid_sm is None: - raise Exception("No files were read properly") + raise RuntimeError("No files were read properly") return grid_lons, grid_lats, grid_sm diff --git a/surfex/obs.py b/surfex/obs.py index 9495d28..32b764e 100644 --- a/surfex/obs.py +++ b/surfex/obs.py @@ -1,12 +1,11 @@ """obs.""" -import os -import logging -from datetime import datetime, timedelta import json -import glob -import requests +import logging +import os + import numpy as np -import surfex +import requests + try: import cfunits except ModuleNotFoundError: @@ -17,276 +16,10 @@ cfunits = None -class Observation(object): - """Observation class.""" - - def __init__(self, obstime, lon, lat, value, elev=np.nan, stid="NA", varname=None): - """Construct an observation. - - Args: - obstime (_type_): _description_ - lon (_type_): _description_ - lat (_type_): _description_ - value (_type_): _description_ - elev (_type_, optional): _description_. Defaults to np.nan. - stid (str, optional): _description_. Defaults to "NA". - varname (_type_, optional): _description_. Defaults to None. - - """ - self.obstime = obstime - self.lon = float(lon) - self.lat = float(lat) - self.stid = stid - self.elev = float(elev) - self.value = float(value) - self.varname = varname - - def print_obs(self): - """Print observation.""" - print("observation: ", self.obstime, self.lon, self.lat, self.stid, self.value, self.elev) - - @staticmethod - def vectors2obs(obstime, lon, lat, stid, elev, value, varname): - """Convert vectors to observations. - - Args: - obstime (_type_): _description_ - lon (_type_): _description_ - lat (_type_): _description_ - stid (_type_): _description_ - elev (_type_): _description_ - value (_type_): _description_ - varname (_type_): _description_ - - Returns: - Observation: Observation object. - """ - return Observation(obstime, lon, lat, value, elev=elev, varname=varname, stid=stid) - - @staticmethod - def obs2vectors(my_obs): - """Convert observations to vectors. - - Args: - my_obs (_type_): _description_ - - Returns: - _type_: _description_ - """ - # print(my_obs.obstime, my_obs.lon, my_obs.lat, my_obs.stid, my_obs.elev, my_obs.value) - return my_obs.obstime, my_obs.lon, my_obs.lat, my_obs.stid, my_obs.elev, my_obs.value, \ - my_obs.varname - - @staticmethod - def format_lon(lon): - """Format longitude.""" - lon = f"{float(lon):10.5f}" - return lon - - @staticmethod - def format_lat(lat): - """Format latitude.""" - lat = f"{float(lat):10.5f}" - return lat - - @staticmethod - def get_pos_from_stid(filename, stids): - """Get pos from station ID. - - Args: - filename (_type_): _description_ - stids (_type_): _description_ - - Raises: - Exception: _description_ - - Returns: - _type_: _description_ - - """ - lons = [] - lats = [] - with open(filename, mode="r", encoding="utf-8") as file_handler: - ids_from_file = json.load(file_handler) - for stid in stids: - found = False - for stid1 in ids_from_file: - if stid == stid1: - found = True - lon = float(ids_from_file[stid1]["lon"]) - lat = float(ids_from_file[stid1]["lat"]) - lons.append(lon) - lats.append(lat) - if not found: - raise Exception("Could not find station id " + stid + " in file " + filename) - return lons, lats - - @staticmethod - def get_stid_from_stationlist(stationlist, lons, lats): - """Get station ID from station list. - - Args: - stationlist (str): Filename of station list - lons (list): Longitudes - lats (list): Latitudes - - Returns: - list: Station IDs - - """ - index_pos = {} - with open(stationlist, mode="r", encoding="utf-8") as file_handler: - ids_from_file = json.load(file_handler) - for stid in ids_from_file: - lon = ids_from_file[stid]["lon"] - lat = ids_from_file[stid]["lat"] - pos = Observation.format_lon(lon) + ":" + Observation.format_lat(lat) - index_pos.update({pos: stid}) - - stids = [] - for i, lon in enumerate(lons): - lat = lats[i] - pos = Observation.format_lon(lon) + ":" + Observation.format_lat(lat) - if pos in index_pos: - stids.append(index_pos[pos]) - else: - stids.append("NA") - return stids - - -def get_datasources(obs_time, settings): - """Get data sources. - - Main data source interface setting data ObservationSet objects based on settings dictionary - - Args: - obs_time (datetime.datetime): Observation time - settings (dict): Settings - - """ - # nmissing = 0 - datasources = [] - for obs_set in settings: - - kwargs = {} - kwargs.update({"label": obs_set}) - - # tolerate_nmissing = False - # if "tolerate_missing" in settings[obs_set]: - # tolerate_nmissing = settings[obs_set]["tolerate_nmissing"] - - if "filetype" in settings[obs_set]: - filetype = settings[obs_set]["filetype"] - filepattern = None - if "filepattern" in settings[obs_set]: - filepattern = settings[obs_set]["filepattern"] - - validtime = obs_time - if filetype.lower() == "bufr": - filename = surfex.file.parse_filepattern(filepattern, obs_time, validtime) - if "varname" in settings[obs_set]: - varname = settings[obs_set]["varname"] - else: - raise Exception("You must set variable name") - - if "lonrange" in settings[obs_set]: - kwargs.update({"lonrange": settings[obs_set]["lonrange"]}) - if "latrange" in settings[obs_set]: - kwargs.update({"latrange": settings[obs_set]["latrange"]}) - if "dt" in settings[obs_set]: - deltat = settings[obs_set]["dt"] - else: - deltat = 1800 - - print("kwargs", kwargs) - valid_range = timedelta(seconds=deltat) - if os.path.exists(filename): - datasources.append(surfex.bufr.BufrObservationSet(filename, [varname], obs_time, - valid_range, **kwargs)) - else: - print("WARNING: filename " + filename + " not set. Not added.") - - elif filetype.lower() == "netatmo": - filenames = None - if "filenames" in settings[obs_set]: - filenames = settings[obs_set]["filenames"] - if filenames is None: - if "filepattern" in settings[obs_set]: - filepattern = settings[obs_set]["filepattern"] - neg_t_range = 15 - if "neg_t_range" in settings[obs_set]: - neg_t_range = settings[obs_set]["neg_t_range"] - pos_t_range = 15 - if "pos_t_range" in settings[obs_set]: - pos_t_range = settings[obs_set]["pos_t_range"] - - dtg = validtime - timedelta(minutes=int(neg_t_range)) - end_dtg = validtime + timedelta(minutes=int(pos_t_range)) - - filenames = [] - while dtg < end_dtg: - fname = surfex.file.parse_filepattern(filepattern, dtg, dtg) - fname = glob.glob(fname) - # print(fname) - if len(fname) == 1: - fname = fname[0] - if os.path.exists(fname) and fname not in filenames: - filenames.append(fname) - dtg = dtg + timedelta(minutes=1) - else: - raise Exception("No filenames or filepattern found") - if "varname" in settings[obs_set]: - variable = settings[obs_set]["varname"] - else: - raise Exception("You must set varname to read NETATMO JSON files") - - if "lonrange" in settings[obs_set]: - kwargs.update({"lonrange": settings[obs_set]["lonrange"]}) - if "latrange" in settings[obs_set]: - kwargs.update({"latrange": settings[obs_set]["latrange"]}) - if "dt" in settings[obs_set]: - kwargs.update({"dt": settings[obs_set]["dt"]}) - else: - kwargs.update({"dt": 1800}) - - if filenames is not None: - datasources.append(NetatmoObservationSet(filenames, variable, obs_time, - **kwargs)) - else: - print("WARNING: filenames not set. Not added.") - - elif filetype.lower() == "frost": - if "varname" in settings[obs_set]: - varname = settings[obs_set]["varname"] - else: - raise Exception("You must set variable name") - - if "lonrange" in settings[obs_set]: - kwargs.update({"lonrange": settings[obs_set]["lonrange"]}) - if "latrange" in settings[obs_set]: - kwargs.update({"latrange": settings[obs_set]["latrange"]}) - if "unit" in settings[obs_set]: - kwargs.update({"unit": settings[obs_set]["unit"]}) - if "level" in settings[obs_set]: - kwargs.update({"level": settings[obs_set]["level"]}) - kwargs.update({"validtime": obs_time}) - datasources.append(MetFrostObservations(varname, **kwargs)) - elif filetype.lower() == "json": - filename = surfex.file.parse_filepattern(filepattern, obs_time, validtime) - varname = None - if "varname" in settings[obs_set]: - varname = settings[obs_set]["varname"] - - kwargs.update({"var": varname}) - if os.path.exists(filename): - datasources.append(JsonObservationSet(filename, **kwargs)) - else: - print("WARNING: filename " + filename + " not existing. Not added.") - else: - raise NotImplementedError("Unknown observation file format") - else: - print("No file type provided") - return datasources +from .datetime_utils import as_datetime, as_datetime_args, as_timedelta, utcfromtimestamp +from .interpolation import gridpos2points, inside_grid +from .observation import Observation +from .titan import QCDataSet, dataset_from_file class ObservationSet(object): @@ -332,8 +65,8 @@ def get_pos_index(self, lon, lat): Returns: int: Found position index. """ - lon = surfex.Observation.format_lon(lon) - lat = surfex.Observation.format_lat(lat) + lon = Observation.format_lon(lon) + lat = Observation.format_lat(lat) pos = lon + ":" + lat if pos in self.index_pos: return self.index_pos[pos] @@ -350,11 +83,13 @@ def get_obs(self): obs2vectors = np.vectorize(Observation.obs2vectors) logging.debug("Obs dim %s", len(self.observations)) if len(self.observations) > 0: - times, lons, lats, stids, elevs, values, varnames = obs2vectors(self.observations) + times, lons, lats, stids, elevs, values, varnames = obs2vectors( + self.observations + ) for point, lon in enumerate(lons): - lon = surfex.Observation.format_lon(lon) - lat = surfex.Observation.format_lat(lats[point]) + lon = Observation.format_lon(lon) + lat = Observation.format_lat(lats[point]) stid = str(stids[point]) pos = lon + ":" + lat @@ -388,7 +123,7 @@ def matching_obs(self, my_obs): if my_obs.obstime == self.observations.obstimes[i]: lon = self.observations.obstimes[i] lat = self.observations.obstimes[i] - pos = surfex.Observation.format_lon(lon) + ":" + surfex.Observation.format_lat(lat) + pos = Observation.format_lon(lon) + ":" + Observation.format_lat(lat) if pos in self.index_pos: found = True @@ -412,7 +147,7 @@ def points(self, geo, validtime=None): for i in range(0, geo.nlons): lon = geo.lonlist[i] lat = geo.latlist[i] - pos = surfex.Observation.format_lon(lon) + ":" + surfex.Observation.format_lat(lat) + pos = Observation.format_lon(lon) + ":" + Observation.format_lat(lat) lons.append(lon) lats.append(lat) @@ -427,7 +162,7 @@ def points(self, geo, validtime=None): my_times.append(None) my_stids.append("NA") my_values.append(np.nan) - print("Could not find position " + pos + " in this data source") + logging.info("Could not find position %s in this data source", pos) my_values = np.asanyarray(my_values) return my_times, my_values, my_stids @@ -442,16 +177,23 @@ def write_json_file(self, filename, indent=None): obs2vectors = np.vectorize(Observation.obs2vectors) data = {} if len(self.observations) > 0: - obstimes, lons, lats, stids, elevs, values, varnames = obs2vectors(self.observations) + obstimes, lons, lats, stids, elevs, values, varnames = obs2vectors( + self.observations + ) for obs, lon in enumerate(lons): - data.update({obs: { - "obstime": obstimes[obs].strftime("%Y%m%d%H%M%S"), - "varname": varnames[obs], - "lon": lon, - "lat": lats[obs], - "stid": stids[obs], - "elev": elevs[obs], - "value": values[obs]}}) + data.update( + { + obs: { + "obstime": obstimes[obs].strftime("%Y%m%d%H%M%S"), + "varname": varnames[obs], + "lon": lon, + "lat": lats[obs], + "stid": stids[obs], + "elev": elevs[obs], + "value": values[obs], + } + } + ) with open(filename, mode="w", encoding="utf-8") as file_handler: json.dump(data, file_handler, indent=indent) @@ -459,32 +201,45 @@ def write_json_file(self, filename, indent=None): class NetatmoObservationSet(ObservationSet): """Observation set from netatmo.""" - def __init__(self, filenames, variable, target_time, dt=3600, re=True, - lonrange=None, latrange=None, label="netatmo"): + def __init__( + self, + filenames, + variable, + target_time, + dt=3600, + re=True, + lonrange=None, + latrange=None, + label="netatmo", + ): """Construct netatmo obs. Args: - filenames (_type_): _description_ - variable (_type_): _description_ - target_time (_type_): _description_ + filenames (list): Filenames + variable (str): Variable + target_time (as_datetime): _description_ dt (int, optional): _description_. Defaults to 3600. re (bool, optional): _description_. Defaults to True. lonrange (_type_, optional): _description_. Defaults to None. latrange (_type_, optional): _description_. Defaults to None. label (str, optional): _description_. Defaults to "netatmo". + Raises: + RuntimeError: Lonrange must be a list with length 2 + RuntimeError: Latrange must be a list with length 2 + """ if lonrange is None: lonrange = [-180, 180] if not isinstance(lonrange, list) or len(lonrange) != 2: - raise Exception(f"Lonrange must be a list with length 2 {lonrange}") + raise RuntimeError(f"Lonrange must be a list with length 2 {lonrange}") if latrange is None: latrange = [-90, 90] if not isinstance(latrange, list) or len(latrange) != 2: - raise Exception(f"Latrange must be a list with length 2 {latrange}") + raise RuntimeError(f"Latrange must be a list with length 2 {latrange}") data = {} # key: id, value: list of values times = {} # key: id, value: list of times @@ -502,7 +257,7 @@ def __init__(self, filenames, variable, target_time, dt=3600, re=True, num_missing_elev = 0 num_wrong_time = 0 for ifilename in filenames: - with open(ifilename, mode='r', encoding="utf-8") as ifile: + with open(ifilename, mode="r", encoding="utf-8") as ifile: text = ifile.read() try: @@ -515,15 +270,16 @@ def __init__(self, filenames, variable, target_time, dt=3600, re=True, if text[0] == "{": text = f"[{text}" - text = text.replace('}]{', '}{') - text = text.replace('}][{', '},{') - text = text.replace('}{', '},{') - text = f'{"data": {text}}' + text = text.replace("}]{", "}{") + text = text.replace("}][{", "},{") + text = text.replace("}{", "},{") + print(text) + text = '{"data": %s}' % text raw = json.loads(text) raw = raw["data"] logging.debug("Parsing %d stations in %s", len(raw), ifilename) - except Exception as exc: - logging.error("Could not parse %s. Exception: %s", ifilename, str(exc)) + except RuntimeError: + logging.error("Could not parse %s.", ifilename) continue for line in raw: @@ -531,7 +287,6 @@ def __init__(self, filenames, variable, target_time, dt=3600, re=True, my_id = line["_id"] location = line["location"] curr_data = line["data"] - # print(curr_data) if variable in curr_data: if "time_utc" in curr_data: time_utc = curr_data["time_utc"] @@ -553,8 +308,15 @@ def __init__(self, filenames, variable, target_time, dt=3600, re=True, times[my_id] = list() elev = np.nan - metadata[my_id] = {"lon": lon, "lat": lat, "elev": elev} - if np.isnan(metadata[my_id]["elev"]) and "altitude" in line: + metadata[my_id] = { + "lon": lon, + "lat": lat, + "elev": elev, + } + if ( + np.isnan(metadata[my_id]["elev"]) + and "altitude" in line + ): metadata[my_id]["elev"] = line["altitude"] value = curr_data[variable] @@ -574,26 +336,33 @@ def __init__(self, filenames, variable, target_time, dt=3600, re=True, if target_time is not None: num_valid_stations = 0 for my_id, time in times.items(): - this_diff_times = \ - [(datetime.utcfromtimestamp(t) - target_time).total_seconds() for t in time] - curr_times = [datetime.utcfromtimestamp(t) for t in time] - # print(this_diff_times, target_time, np.min(np.abs(np.array(this_diff_times))), dt) + this_diff_times = [ + (utcfromtimestamp(t) - target_time).total_seconds() for t in time + ] + curr_times = [utcfromtimestamp(t) for t in time] if np.min(np.abs(np.array(this_diff_times))) < dt: ibest = int(np.argmin(np.abs(np.array(this_diff_times)))) curr_time = curr_times[ibest] elev = metadata[my_id]["elev"] observations.append( - Observation(curr_time, metadata[my_id]["lon"], metadata[my_id]["lat"], - data[my_id][ibest], elev=elev, varname=variable)) + Observation( + curr_time, + metadata[my_id]["lon"], + metadata[my_id]["lat"], + data[my_id][ibest], + elev=elev, + varname=variable, + ) + ) num_valid_stations += 1 else: num_valid_stations = len(data) - logging.debug("Found %d valid observations:", num_valid_stations) - logging.debug(" %d missing obs", num_missing_obs) - logging.debug(" %d missing metadata", num_missing_metadata) - logging.debug(" %d missing timestamp", num_missing_time) - logging.debug(" %d wrong timestamp", num_wrong_time) + logging.info("Found %d valid observations:", num_valid_stations) + logging.info(" %d missing obs", num_missing_obs) + logging.info(" %d missing metadata", num_missing_metadata) + logging.info(" %d missing timestamp", num_missing_time) + logging.info(" %d wrong timestamp", num_wrong_time) if not re: extra = " (not removed)" else: @@ -606,9 +375,23 @@ def __init__(self, filenames, variable, target_time, dt=3600, re=True, class MetFrostObservations(ObservationSet): """Observations from MET-Norway obs API (frost).""" - def __init__(self, varname, stations=None, level=None, num_tries=3, wmo=None, - providers=None, xproviders=None, blacklist=None, validtime=None, dt=3600, - lonrange=None, latrange=None, unit=None, label="frost"): + def __init__( + self, + varname, + stations=None, + level=None, + num_tries=3, + wmo=None, + providers=None, + xproviders=None, + blacklist=None, + validtime=None, + dt=3600, + lonrange=None, + latrange=None, + unit=None, + label="frost", + ): """Construct obs set from Frost. Args: @@ -639,10 +422,10 @@ def __init__(self, varname, stations=None, level=None, num_tries=3, wmo=None, blacklist = [] # extract client ID from environment variable - if 'CLIENTID' not in os.environ: - raise KeyError('error: CLIENTID not found in environment\n') + if "CLIENTID" not in os.environ: + raise KeyError("error: CLIENTID not found in environment\n") - client_id = os.environ['CLIENTID'] + client_id = os.environ["CLIENTID"] # Get all the stations (in an area or country) # Make list of station IDs and dictionary of their lat,long,elev @@ -656,103 +439,137 @@ def __init__(self, varname, stations=None, level=None, num_tries=3, wmo=None, # 'geometry': 'POLYGON((10 60, 10 59, 11 60, 11 59))' # area around Oslo # 'country': 'NO' # everything in Norway - parameters = {'types': 'SensorSystem', - 'fields': 'id,geometry,masl,wmoid,stationholders'} + parameters = { + "types": "SensorSystem", + "fields": "id,geometry,masl,wmoid,stationholders", + } if lonrange is not None and latrange is not None: - parameters.update({"geometry": "POLYGON((" + str(lonrange[0]) + " " - + str(latrange[0]) + ", " - + str(lonrange[0]) + " " - + str(latrange[1]) + ", " - + str(lonrange[1]) + " " - + str(latrange[1]) + ", " - + str(lonrange[1]) + " " - + str(latrange[0]) + " ))"}) - logging.debug('Request parameters: %s', str(parameters)) - req = requests.get('https://frost.met.no/sources/v0.jsonld', - parameters, auth=(client_id, ''), - timeout=30) - - logging.debug("Request https://frost.met.no/sources/v0.jsonld returned %s", - req.status_code) + parameters.update( + { + "geometry": "POLYGON((" + + str(lonrange[0]) + + " " + + str(latrange[0]) + + ", " + + str(lonrange[0]) + + " " + + str(latrange[1]) + + ", " + + str(lonrange[1]) + + " " + + str(latrange[1]) + + ", " + + str(lonrange[1]) + + " " + + str(latrange[0]) + + " ))" + } + ) + logging.debug("Request parameters: %s", str(parameters)) + req = requests.get( + "https://frost.met.no/sources/v0.jsonld", + parameters, + auth=(client_id, ""), + timeout=30, + ) + + logging.debug( + "Request https://frost.met.no/sources/v0.jsonld returned %s", + req.status_code, + ) # extract list of stations (if response was valid) if req.status_code == 200: - data = req.json()['data'] - # print(data) + data = req.json()["data"] ids = [] count_discard = 0 + print(data) for data_block in data: - my_id = data_block['id'] - if 'masl' in data_block: - elev = data_block['masl'] + my_id = data_block["id"] + if "masl" in data_block: + elev = data_block["masl"] else: elev = -999 # missing value # filter data for WMO and non WMO keep_this_id = True - if 'wmoId' in data_block and wmo is not None and wmo == 0: + if "wmoId" in data_block and wmo is not None and wmo == 0: # station is WMO skip keep_this_id = False - logging.debug('throwing out this id (is WMO): %s', my_id) - elif 'wmoId' not in data_block and wmo is not None and wmo == 1: + logging.debug("throwing out this id (is WMO): %s", my_id) + elif "wmoId" not in data_block and wmo is not None and wmo == 1: # station is not WMO skip keep_this_id = False - logging.debug('throwing out this id (not WMO): %s', my_id) + logging.debug("throwing out this id (not WMO): %s", my_id) # filter out stations with incomplete data - if keep_this_id and 'geometry' not in data_block: + if keep_this_id and "geometry" not in data_block: keep_this_id = False - logging.debug('throwing out this id (no geometry): %s', my_id) + logging.debug("throwing out this id (no geometry): %s", my_id) # filters for station holders - if 'stationHolders' not in data_block: + if "stationHolders" not in data_block: keep_this_id = False - logging.debug('throwing out this id (no stationHolders): %s', my_id) + logging.debug( + "throwing out this id (no stationHolders): %s", my_id + ) # select station providers elif providers is not None: - providers = providers.split(',') - station_holders = data_block['stationHolders'] + providers = providers.split(",") + station_holders = data_block["stationHolders"] if not any(x in station_holders for x in providers): keep_this_id = False - logging.debug('throwing out this id (station holder): %s', - str(station_holders)) + logging.debug( + "throwing out this id (station holder): %s", + str(station_holders), + ) # or exclude certain station providers elif xproviders is not None: - xproviders = xproviders.split(',') - station_holders = data_block['stationHolders'] + xproviders = xproviders.split(",") + station_holders = data_block["stationHolders"] if any(x in station_holders for x in xproviders): keep_this_id = False - logging.debug('throwing out this id (exclude station holder): %s', - str(station_holders)) + logging.debug( + "throwing out this id (exclude station holder): %s", + str(station_holders), + ) # filter out blacklisted stations if my_id in blacklist: keep_this_id = False - logging.debug('throwing out blacklisted id: %s', my_id) + logging.debug("throwing out blacklisted id: %s", my_id) if stations is not None: if my_id not in stations: keep_this_id = False - logging.debug("Throwing out station because not in station list %s", - my_id) - - logging.debug('Keep this ID: %s bool: %s', str(my_id), str(keep_this_id)) + logging.debug( + "Throwing out station because not in station list %s", + my_id, + ) + + logging.debug( + "Keep this ID: %s bool: %s", str(my_id), str(keep_this_id) + ) if keep_this_id: # write into dict ids.append(my_id) - # print('station: ' + str(id) + '\n' + str(data[i])) # create a dictionary for these stations to store lat,long,elev for each - station_dict[my_id] = [data_block['geometry']['coordinates'][1], - data_block['geometry']['coordinates'][0], elev] + station_dict[my_id] = [ + data_block["geometry"]["coordinates"][1], + data_block["geometry"]["coordinates"][0], + elev, + ] else: count_discard = count_discard + 1 - logging.debug('Number of stations: %s', str(len(ids))) - logging.debug('Number of stations , debug=debugdiscarded: %s', str(count_discard)) + logging.debug("Number of stations: %s", str(len(ids))) + logging.debug( + "Number of stations , debug=debugdiscarded: %s", str(count_discard) + ) break if req.status_code == 404: - print('STATUS: No data was found for the list of query Ids.') + print("STATUS: No data was found for the list of query Ids.") break if tries > num_tries: - raise Exception('ERROR: could not retrieve observations.') + raise Exception("ERROR: could not retrieve observations.") # # Use the station ID list to get the observation for each station @@ -761,11 +578,11 @@ def __init__(self, varname, stations=None, level=None, num_tries=3, wmo=None, # check how long the list of stations is and potentially break it up to shorten observations = [] it_ids = len(ids) - dt = timedelta(seconds=dt) + dt = as_timedelta(seconds=dt) while it_ids > 0: if it_ids > 50: # get last 50 - sub_id_list = ids[it_ids - 50:it_ids] + sub_id_list = ids[it_ids - 50 : it_ids] it_ids = it_ids - 50 else: # get the rest if <50 @@ -776,7 +593,7 @@ def __init__(self, varname, stations=None, level=None, num_tries=3, wmo=None, while tries <= num_tries: tries += 1 # use the list of stations and get the observations for those - parameters2 = {'sources': ','.join(sub_id_list), 'elements': varname} + parameters2 = {"sources": ",".join(sub_id_list), "elements": varname} date = validtime.strftime("%Y%m%d") hour = validtime.strftime("%H") @@ -785,37 +602,54 @@ def __init__(self, varname, stations=None, level=None, num_tries=3, wmo=None, # make these into a format that works for FROST date_string = date hour_string = hour - date_string_frost = date_string[0:4] + '-' + date_string[4:6] \ - + '-' + date_string[6:8] + 'T' + hour_string - parameters2['referencetime'] = date_string_frost + date_string_frost = ( + date_string[0:4] + + "-" + + date_string[4:6] + + "-" + + date_string[6:8] + + "T" + + hour_string + ) + parameters2["referencetime"] = date_string_frost # do not have date and time, so use latest else: - parameters2['referencetime'] = 'latest' - parameters2['maxage'] = 'PT30M' - parameters2['limit'] = 1 - - logging.debug('Request parameters2: %s', str(parameters2)) - req = requests.get('https://frost.met.no/observations/v0.jsonld', - parameters2, auth=(client_id, ''), - timeout=30) - - logging.debug("Request https://frost.met.no/observations/v0.jsonld returned %s", - req.status_code) + parameters2["referencetime"] = "latest" + parameters2["maxage"] = "PT30M" + parameters2["limit"] = 1 + + logging.debug("Request parameters2: %s", str(parameters2)) + req = requests.get( + "https://frost.met.no/observations/v0.jsonld", + parameters2, + auth=(client_id, ""), + timeout=30, + ) + + logging.debug( + "Request https://frost.met.no/observations/v0.jsonld returned %s", + req.status_code, + ) if req.status_code == 200: - data = req.json()['data'] + data = req.json()["data"] for data_block in data: # Check that reference time is ok, since sometimes future observations # can be present when 'latest' is chosen for reference time - ref_str = data_block['referenceTime'] + ref_str = data_block["referenceTime"] ref_year = int(ref_str[0:4]) ref_month = int(ref_str[5:7]) ref_day = int(ref_str[8:10]) ref_hour = int(ref_str[11:13]) ref_min = int(ref_str[14:16]) ref_sec = int(ref_str[17:19]) - ref_time = datetime(year=ref_year, month=ref_month, day=ref_day, - hour=ref_hour, minute=ref_min, - second=ref_sec) + ref_time = as_datetime_args( + year=ref_year, + month=ref_month, + day=ref_day, + hour=ref_hour, + minute=ref_min, + second=ref_sec, + ) logging.debug("ref_time %s validtime %s", ref_time, validtime) read_unit = None @@ -831,13 +665,21 @@ def __init__(self, varname, stations=None, level=None, num_tries=3, wmo=None, all_found = True for key in level: if key in obs["level"]: - if str(level[key]) != str(obs["level"][key]): - logging.debug("%s != %s", - level[key], obs["level"][key]) + if str(level[key]) != str( + obs["level"][key] + ): + logging.debug( + "%s != %s", + level[key], + obs["level"][key], + ) all_found = False else: - logging.debug("%s == %s", - level[key], obs["level"][key]) + logging.debug( + "%s == %s", + level[key], + obs["level"][key], + ) if not all_found: levels_ok = False @@ -848,30 +690,35 @@ def __init__(self, varname, stations=None, level=None, num_tries=3, wmo=None, if keep_this_obs: logging.debug("Keep this obs") - value = data_block['observations'][0]['value'] + value = data_block["observations"][0]["value"] if len(str(value)) > 0: # not all stations have observations - source_id = str(data_block['sourceId']) - my_id = source_id.split(':') + source_id = str(data_block["sourceId"]) + my_id = source_id.split(":") if unit is not None: if read_unit is not None: if cfunits is None: raise Exception("cfunits not loaded!") read_unit = cfunits.Units(read_unit) unit = cfunits.Units(unit) - value = cfunits.Units.conform(value, read_unit, unit) + value = cfunits.Units.conform( + value, read_unit, unit + ) else: raise Exception("Did not read a unit to convert!") ids_obs_dict[my_id[0]] = value - # print(ids_obs_dict) - logging.debug('Station list length: %s, total number of observations ' - 'retrieved: %s', str(len(sub_id_list)), str(len(ids_obs_dict))) + logging.debug( + "Station list length: %s, total number of observations " + "retrieved: %s", + str(len(sub_id_list)), + str(len(ids_obs_dict)), + ) break if req.status_code == 404: - print('STATUS: No data was found for the list of query Ids.') + print("STATUS: No data was found for the list of query Ids.") break if tries > num_tries: - raise Exception('ERROR: could not retrieve observations.') + raise Exception("ERROR: could not retrieve observations.") for station, station_id in ids_obs_dict.items(): value = float(station_id) @@ -880,8 +727,17 @@ def __init__(self, varname, stations=None, level=None, num_tries=3, wmo=None, lat = id_info[0] lon = id_info[1] elev = id_info[2] - observations.append(Observation(validtime, lon, lat, value, stid=str(stid), - elev=elev, varname=varname)) + observations.append( + Observation( + validtime, + lon, + lat, + value, + stid=str(stid), + elev=elev, + varname=varname, + ) + ) ObservationSet.__init__(self, observations, label=label) @@ -897,13 +753,16 @@ def __init__(self, filename, label="json", var=None): label (str, optional): Label of set. Defaults to "json". var (str, optional): Variable name. Defaults to None. + Raises: + RuntimeError: Varname is not found + """ with open(filename, mode="r", encoding="utf-8") as file_handler: obs = json.load(file_handler) observations = [] for i in range(0, len(obs)): ind = str(i) - obstime = datetime.strptime(obs[ind]["obstime"], "%Y%m%d%H%M%S") + obstime = as_datetime(obs[ind]["obstime"]) lon = obs[ind]["lon"] lat = obs[ind]["lat"] elev = obs[ind]["elev"] @@ -914,11 +773,14 @@ def __init__(self, filename, label="json", var=None): varname = obs[ind]["varname"] if varname == "" and var is not None: - raise Exception("Varname is not found " + varname) + raise RuntimeError("Varname is not found " + varname) if var is None or var == varname: - observations.append(Observation(obstime, lon, lat, value, stid=stid, elev=elev, - varname=varname)) + observations.append( + Observation( + obstime, lon, lat, value, stid=stid, elev=elev, varname=varname + ) + ) ObservationSet.__init__(self, observations, label=label) @@ -935,18 +797,33 @@ def __init__(self, an_time, filename, label=""): label (str, optional): _description_. Defaults to "". """ - qc_obs = surfex.dataset_from_file(an_time, filename) + qc_obs = dataset_from_file(an_time, filename) observations = [] for i in range(0, len(qc_obs)): observations.append( - Observation(qc_obs.obstimes[i], qc_obs.lons[i], qc_obs.lats[i], - qc_obs.elevs[i], qc_obs.values[i])) + Observation( + qc_obs.obstimes[i], + qc_obs.lons[i], + qc_obs.lats[i], + qc_obs.elevs[i], + qc_obs.values[i], + ) + ) ObservationSet.__init__(self, observations, label=label) -def snow_pseudo_obs_cryoclim(validtime, grid_snow_class, grid_lons, grid_lats, step, fg_geo, - grid_snow_fg, fg_threshold=2.0, new_snow_depth=0.01): +def snow_pseudo_obs_cryoclim( + validtime, + grid_snow_class, + grid_lons, + grid_lats, + step, + fg_geo, + grid_snow_fg, + fg_threshold=2.0, + new_snow_depth=0.01, +): """Cryoclim snow. Args: @@ -975,7 +852,7 @@ def snow_pseudo_obs_cryoclim(validtime, grid_snow_class, grid_lons, grid_lats, s res_lons = [] res_lats = [] p_snow_class = {} - for i in range(0, n_x): + for __ in range(0, n_x): jjj = 0 for __ in range(0, n_y): res_lons.append(grid_lons[iii, jjj]) @@ -985,9 +862,16 @@ def snow_pseudo_obs_cryoclim(validtime, grid_snow_class, grid_lons, grid_lats, s jjj = jjj + step iii = iii + step - p_fg_snow_depth = surfex.grid2points(fg_geo.lons, fg_geo.lats, - np.asarray(res_lons), np.asarray(res_lats), - grid_snow_fg) + p_fg_snow_depth = gridpos2points( + fg_geo.lons, fg_geo.lats, np.asarray(res_lons), np.asarray(res_lats), grid_snow_fg + ) + in_grid = inside_grid( + fg_geo.lons, + fg_geo.lats, + np.asarray(res_lons), + np.asarray(res_lats), + distance=2500.0, + ) # Ordering of points must be the same..... obs = [] @@ -1001,17 +885,12 @@ def snow_pseudo_obs_cryoclim(validtime, grid_snow_class, grid_lons, grid_lats, s logging.debug("%s %s %s %s", i, p_snow_fg, res_lons[i], res_lats[i]) if not np.isnan(p_snow_fg): # Check if in grid - neighbours = surfex.get_num_neighbours(fg_geo.lons, fg_geo.lats, - float(res_lons[i]), float(res_lats[i]), - distance=2500.) - - if neighbours > 0: + if in_grid[i]: obs_value = np.nan if p_snow_class[str(i)] == 1: if p_snow_fg > 0: if fg_threshold is not None: if p_snow_fg <= fg_threshold: - # print(p_snow_fg) obs_value = p_snow_fg else: obs_value = p_snow_fg @@ -1026,15 +905,25 @@ def snow_pseudo_obs_cryoclim(validtime, grid_snow_class, grid_lons, grid_lats, s cis.append(0) lafs.append(0) providers.append(0) - obs.append(surfex.Observation(validtime, res_lons[i], res_lats[i], obs_value)) + obs.append( + Observation(validtime, res_lons[i], res_lats[i], obs_value) + ) logging.info("Possible pseudo-observations: %s", n_x * n_y) logging.info("Pseudo-observations created: %s", len(obs)) - return surfex.QCDataSet(validtime, obs, flags, cis, lafs, providers) - - -def sm_obs_sentinel(validtime, grid_sm_class, grid_lons, grid_lats, step, fg_geo, grid_sm_fg, - fg_threshold=1.): + return QCDataSet(validtime, obs, flags, cis, lafs, providers) + + +def sm_obs_sentinel( + validtime, + grid_sm_class, + grid_lons, + grid_lats, + step, + fg_geo, + grid_sm_fg, + fg_threshold=1.0, +): """Sentinel. Args: @@ -1063,7 +952,7 @@ def sm_obs_sentinel(validtime, grid_sm_class, grid_lons, grid_lats, step, fg_geo res_lons = [] res_lats = [] p_sm_class = {} - for i in range(0, n_x): + for __ in range(0, n_x): jjj = 0 for __ in range(0, n_y): res_lons.append(grid_lons[iii, jjj]) @@ -1073,10 +962,16 @@ def sm_obs_sentinel(validtime, grid_sm_class, grid_lons, grid_lats, step, fg_geo jjj = jjj + step iii = iii + step - p_fg_sm = surfex.grid2points(fg_geo.lons, fg_geo.lats, - np.asarray(res_lons), np.asarray(res_lats), - grid_sm_fg) - + p_fg_sm = gridpos2points( + fg_geo.lons, fg_geo.lats, np.asarray(res_lons), np.asarray(res_lats), grid_sm_fg + ) + in_grid = inside_grid( + fg_geo.lons, + fg_geo.lats, + np.asarray(res_lons), + np.asarray(res_lats), + distance=2500.0, + ) # Ordering of points must be the same..... obs = [] flags = [] @@ -1088,13 +983,9 @@ def sm_obs_sentinel(validtime, grid_sm_class, grid_lons, grid_lats, step, fg_geo p_sm_fg = p_fg_sm[i] if not np.isnan(p_sm_fg): # Check if in grid - neighbours = surfex.get_num_neighbours(fg_geo.lons, fg_geo.lats, - float(res_lons[i]), float(res_lats[i]), - distance=2500.) - - if neighbours > 0: + if in_grid[i]: obs_value = np.nan - if ((p_sm_class[str(i)] > 1) or (p_sm_class[str(i)] < 0)): + if (p_sm_class[str(i)] > 1) or (p_sm_class[str(i)] < 0): if p_sm_fg <= fg_threshold: obs_value = p_sm_fg else: @@ -1108,69 +999,16 @@ def sm_obs_sentinel(validtime, grid_sm_class, grid_lons, grid_lats, step, fg_geo cis.append(0) lafs.append(0) providers.append(0) - obs.append(surfex.Observation(validtime, res_lons[i], res_lats[i], obs_value, - varname="surface_soil_moisture")) + obs.append( + Observation( + validtime, + res_lons[i], + res_lats[i], + obs_value, + varname="surface_soil_moisture", + ) + ) logging.info("Possible pseudo-observations: %s", n_x * n_y) logging.info("Pseudo-observations created: %s", len(obs)) - return surfex.QCDataSet(validtime, obs, flags, cis, lafs, providers) - - -def set_geo_from_obs_set(obs_time, obs_type, varname, inputfile, lonrange=None, latrange=None): - """Set geometry from obs file. - - Args: - obs_time (_type_): _description_ - obs_type (_type_): _description_ - varname (_type_): _description_ - inputfile (_type_): _description_ - lonrange (_type_, optional): _description_. Defaults to None. - latrange (_type_, optional): _description_. Defaults to None. - - Returns: - _type_: _description_ - - """ - settings = { - "obs": { - "varname": varname, - "filetype": obs_type, - "inputfile": inputfile, - "filepattern": inputfile - } - } - if lonrange is None: - lonrange = [-180, 180] - if latrange is None: - latrange = [-90, 90] - - logging.debug("%s", settings) - logging.debug("Get data source") - __, lons, lats, __, __, __, __ = surfex.get_datasources(obs_time, settings)[0].get_obs() - - selected_lons = [] - selected_lats = [] - for i, lon in enumerate(lons): - lat = lats[i] - - if lonrange[0] <= lon <= lonrange[1] and latrange[0] <= lat <= latrange[1]: - lon = round(lon, 5) - lat = round(lat, 5) - # print(i, lon, lat) - selected_lons.append(lon) - selected_lats.append(lat) - - d_x = ["0.3"] * len(selected_lons) - geo_json = { - "nam_pgd_grid": { - "cgrid": "LONLATVAL" - }, - "nam_lonlatval": { - "xx": selected_lons, - "xy": selected_lats, - "xdx": d_x, - "xdy": d_x - } - } - geo = surfex.LonLatVal(geo_json) - return geo + return QCDataSet(validtime, obs, flags, cis, lafs, providers) diff --git a/surfex/observation.py b/surfex/observation.py new file mode 100644 index 0000000..16415bd --- /dev/null +++ b/surfex/observation.py @@ -0,0 +1,159 @@ +"""Observation.""" +import json + +import numpy as np + + +class Observation(object): + """Observation class.""" + + def __init__(self, obstime, lon, lat, value, elev=np.nan, stid="NA", varname=None): + """Construct an observation. + + Args: + obstime (_type_): _description_ + lon (_type_): _description_ + lat (_type_): _description_ + value (_type_): _description_ + elev (_type_, optional): _description_. Defaults to np.nan. + stid (str, optional): _description_. Defaults to "NA". + varname (_type_, optional): _description_. Defaults to None. + + """ + self.obstime = obstime + self.lon = float(lon) + self.lat = float(lat) + self.stid = stid + self.elev = float(elev) + self.value = float(value) + self.varname = varname + + def print_obs(self): + """Print observation.""" + print( + "observation: ", + self.obstime, + self.lon, + self.lat, + self.stid, + self.value, + self.elev, + ) + + @staticmethod + def vectors2obs(obstime, lon, lat, stid, elev, value, varname): + """Convert vectors to observations. + + Args: + obstime (_type_): _description_ + lon (_type_): _description_ + lat (_type_): _description_ + stid (_type_): _description_ + elev (_type_): _description_ + value (_type_): _description_ + varname (_type_): _description_ + + Returns: + Observation: Observation object. + """ + return Observation( + obstime, lon, lat, value, elev=elev, varname=varname, stid=stid + ) + + @staticmethod + def obs2vectors(my_obs): + """Convert observations to vectors. + + Args: + my_obs (_type_): _description_ + + Returns: + _type_: _description_ + """ + return ( + my_obs.obstime, + my_obs.lon, + my_obs.lat, + my_obs.stid, + my_obs.elev, + my_obs.value, + my_obs.varname, + ) + + @staticmethod + def format_lon(lon): + """Format longitude.""" + lon = f"{float(lon):10.5f}" + return lon + + @staticmethod + def format_lat(lat): + """Format latitude.""" + lat = f"{float(lat):10.5f}" + return lat + + @staticmethod + def get_pos_from_stid(filename, stids): + """Get pos from station ID. + + Args: + filename (_type_): _description_ + stids (_type_): _description_ + + Raises: + Exception: _description_ + + Returns: + _type_: _description_ + + """ + lons = [] + lats = [] + with open(filename, mode="r", encoding="utf-8") as file_handler: + ids_from_file = json.load(file_handler) + for stid in stids: + found = False + for stid1 in ids_from_file: + if stid == stid1: + found = True + lon = float(ids_from_file[stid1]["lon"]) + lat = float(ids_from_file[stid1]["lat"]) + lons.append(lon) + lats.append(lat) + if not found: + raise Exception( + "Could not find station id " + stid + " in file " + filename + ) + return lons, lats + + @staticmethod + def get_stid_from_stationlist(stationlist, lons, lats): + """Get station ID from station list. + + Args: + stationlist (str): Filename of station list + lons (list): Longitudes + lats (list): Latitudes + + Returns: + list: Station IDs + + """ + index_pos = {} + with open(stationlist, mode="r", encoding="utf-8") as file_handler: + ids_from_file = json.load(file_handler) + for stid in ids_from_file: + lon = ids_from_file[stid]["lon"] + lat = ids_from_file[stid]["lat"] + pos = Observation.format_lon(lon) + ":" + Observation.format_lat(lat) + index_pos.update({pos: stid}) + + stids = [] + for i, lon in enumerate(lons): + lat = lats[i] + pos = Observation.format_lon(lon) + ":" + Observation.format_lat(lat) + if pos in index_pos: + stids.append(index_pos[pos]) + else: + stids.append("NA") + return stids diff --git a/surfex/obsmon.py b/surfex/obsmon.py index aa7c956..1d277eb 100644 --- a/surfex/obsmon.py +++ b/surfex/obsmon.py @@ -1,8 +1,7 @@ """Obsmon.""" -from datetime import datetime import logging + import numpy as np -import surfex try: import sqlite3 @@ -11,6 +10,12 @@ logging.warning("Could not import sqlite3 modules") +from .datetime_utils import as_datetime +from .netcdf import read_first_guess_netcdf_file +from .obs import Observation +from .titan import Departure, dataset_from_file + + def open_db(dbname): """Open database. @@ -18,14 +23,14 @@ def open_db(dbname): dbname (str): File name. Raises: - Exception: _description_ + RuntimeError: Need SQLite Returns: - sqlite3.connect: Data base connection. + sqlite3.connect: A connection """ if sqlite3 is None: - raise Exception("You need SQLITE for obsmon") + raise RuntimeError("You need SQLITE for obsmon") conn = sqlite3.connect(dbname) return conn @@ -52,16 +57,20 @@ def create_db(conn, modes, stat_cols): cursor = conn.cursor() # Create usage table - cmd = "CREATE TABLE IF NOT EXISTS usage (DTG INT, obnumber INT, obname CHAR(20), " \ - "satname CHAR(20), varname CHAR(20), level INT, latitude FLOAT, longitude FLOAT, " \ - "statid CHAR(20), obsvalue FLOAT, fg_dep FLOAT, an_dep FLOAT, biascrl FLOAT, " \ - "active INT, rejected INT, passive INT, blacklisted INT, anflag INT)" + cmd = ( + "CREATE TABLE IF NOT EXISTS usage (DTG INT, obnumber INT, obname CHAR(20), " + "satname CHAR(20), varname CHAR(20), level INT, latitude FLOAT, longitude FLOAT, " + "statid CHAR(20), obsvalue FLOAT, fg_dep FLOAT, an_dep FLOAT, biascrl FLOAT, " + "active INT, rejected INT, passive INT, blacklisted INT, anflag INT)" + ) cursor.execute(cmd) # Create obsmon table - cmd = "CREATE TABLE IF NOT EXISTS obsmon (DTG INT, obnumber INT, obname CHAR(20), " \ - "satname CHAR(20), varname CHAR(20), level INT, passive INT" + cmd = ( + "CREATE TABLE IF NOT EXISTS obsmon (DTG INT, obnumber INT, obname CHAR(20), " + "satname CHAR(20), varname CHAR(20), level INT, passive INT" + ) for mode in modes: for col in stat_cols: cmd = cmd + "," + col + "_" + mode + " FLOAT" @@ -69,7 +78,9 @@ def create_db(conn, modes, stat_cols): cmd = cmd + ")" cursor.execute(cmd) - cursor.execute('''CREATE INDEX IF NOT EXISTS obsmon_index on usage(DTG,obnumber,obname)''') + cursor.execute( + """CREATE INDEX IF NOT EXISTS obsmon_index on usage(DTG,obnumber,obname)""" + ) # Save (commit) the changes conn.commit() @@ -84,9 +95,6 @@ def populate_usage_db(conn, dtg, varname, observations): varname (_type_): _description_ observations (_type_): _description_ - Returns: - _type_: _description_ - """ logging.info("Update usage") obnumber = "1" @@ -98,15 +106,23 @@ def populate_usage_db(conn, dtg, varname, observations): # Insert a row of data def obs2vectors(my_obs): - return my_obs.lons, my_obs.lats, my_obs.stids, my_obs.elevs, my_obs.values, \ - my_obs.flags, my_obs.fg_dep, my_obs.an_dep + return ( + my_obs.lons, + my_obs.lats, + my_obs.stids, + my_obs.elevs, + my_obs.values, + my_obs.flags, + my_obs.fg_dep, + my_obs.an_dep, + ) vectors = np.vectorize(obs2vectors) lons, lats, stids, __, values, flags, fg_deps, an_deps = vectors(observations) for i, lon_val in enumerate(lons): - lon = surfex.Observation.format_lon(lon_val) - lat = surfex.Observation.format_lat(lats[i]) + lon = Observation.format_lon(lon_val) + lat = Observation.format_lat(lats[i]) stid = str(stids[i]) if stid == "NA": stid = "NULL" @@ -130,9 +146,35 @@ def obs2vectors(my_obs): if status == "0": status = "1" - cmd = "INSERT INTO usage VALUES(" + str(dtg) + "," + obnumber + ",\"" + obname + \ - "\",\"" + satname + "\",\"" + varname + "\"," + level + "," + lat + "," + lon + \ - "," + stid + "," + value + "," + fg_dep + "," + an_dep + ",0,0,0,0,0," + status + ")" + cmd = ( + "INSERT INTO usage VALUES(" + + str(dtg) + + "," + + obnumber + + ',"' + + obname + + '","' + + satname + + '","' + + varname + + '",' + + level + + "," + + lat + + "," + + lon + + "," + + stid + + "," + + value + + "," + + fg_dep + + "," + + an_dep + + ",0,0,0,0,0," + + status + + ")" + ) logging.info(cmd) cursor.execute(cmd) @@ -307,18 +349,41 @@ def populate_obsmon_db(conn, dtg, statistics, modes, stat_cols, varname): level = "0" cursor = conn.cursor() - cmd = "SELECT * FROM obsmon WHERE DTG==" + dtg + " AND obnumber==" + obnumber + \ - " AND obname ==\"" + obname + "\" AND varname==\"" + varname + "\" AND LEVEL == " + level + cmd = ( + "SELECT * FROM obsmon WHERE DTG==" # noqa + + dtg + + " AND obnumber==" + + obnumber + + ' AND obname =="' + + obname + + '" AND varname=="' + + varname + + '" AND LEVEL == ' + + level + ) # noqa cursor.execute(cmd) records = len(cursor.fetchall()) if records > 1: - print(cmd) + logging.info(cmd) raise Exception("You should not have ", records, " in your database") if records == 0: - cmd = "INSERT INTO obsmon VALUES(" + dtg + "," + obnumber + ",\"" + obname + "\",\"" + \ - satname + "\",\"" + varname + "\"," + level + ",0" + cmd = ( + "INSERT INTO obsmon VALUES(" + + dtg + + "," + + obnumber + + ',"' + + obname + + '","' + + satname + + '","' + + varname + + '",' + + level + + ",0" + ) else: cmd = "UPDATE obsmon SET " first = True @@ -336,10 +401,21 @@ def populate_obsmon_db(conn, dtg, statistics, modes, stat_cols, varname): if records == 0: cmd = cmd + ")" else: - cmd = cmd + " WHERE DTG==" + dtg + " AND obnumber==" + obnumber + " AND obname==\"" + \ - obname + "\" AND varname==\"" + varname + "\" AND LEVEL == " + level - - # print(cmd) + cmd = ( + cmd + + " WHERE DTG==" + + dtg + + " AND obnumber==" + + obnumber + + ' AND obname=="' + + obname + + '" AND varname=="' + + varname + + '" AND LEVEL == ' + + level + ) + + logging.info(cmd) cursor.execute(cmd) # Save (commit) the changes conn.commit() @@ -348,12 +424,23 @@ def populate_obsmon_db(conn, dtg, statistics, modes, stat_cols, varname): def write_obsmon_sqlite_file(**kwargs): """Write obsmon sqlite file.""" modes = ["total", "land", "sea"] - stat_cols = ["nobs", "fg_bias", "fg_abs_bias", "fg_rms", "fg_dep", "fg_uncorr", - "bc", "an_bias", "an_abs_bias", "an_rms", "an_dep"] + stat_cols = [ + "nobs", + "fg_bias", + "fg_abs_bias", + "fg_rms", + "fg_dep", + "fg_uncorr", + "bc", + "an_bias", + "an_abs_bias", + "an_rms", + "an_dep", + ] an_time = kwargs["dtg"] if isinstance(an_time, str): - an_time = datetime.strptime(an_time, "%Y%m%d%H") + an_time = as_datetime(an_time) dtg = an_time.strftime("%Y%m%d%H") varname = kwargs["varname"] dbname = kwargs["output"] @@ -363,7 +450,7 @@ def write_obsmon_sqlite_file(**kwargs): operator = kwargs["operator"] q_c = kwargs["qc"] - obs_titan = surfex.dataset_from_file(an_time, q_c, skip_flags=[150]) + obs_titan = dataset_from_file(an_time, q_c, skip_flags=[150]) conn = open_db(dbname) create_db(conn, modes, stat_cols) @@ -373,16 +460,25 @@ def write_obsmon_sqlite_file(**kwargs): an_var = kwargs["file_var"] # Only first guess file implemented at the moment - geo_in, __, an_field, __, __ = surfex.read_first_guess_netcdf_file(an_file, an_var) - geo_in, __, fg_field, __, __ = surfex.read_first_guess_netcdf_file(fg_file, fg_var) + geo_in, __, an_field, __, __ = read_first_guess_netcdf_file(an_file, an_var) + geo_in, __, fg_field, __, __ = read_first_guess_netcdf_file(fg_file, fg_var) - fg_dep = surfex.Departure(operator, geo_in, obs_titan, fg_field, "first_guess").get_departure() - an_dep = surfex.Departure(operator, geo_in, obs_titan, an_field, "analysis").get_departure() + fg_dep = Departure( + operator, geo_in, obs_titan, fg_field, "first_guess" + ).get_departure() + an_dep = Departure(operator, geo_in, obs_titan, an_field, "analysis").get_departure() - obs_titan = surfex.dataset_from_file(an_time, q_c, skip_flags=[150, 199], - fg_dep=fg_dep, an_dep=an_dep) + obs_titan = dataset_from_file( + an_time, q_c, skip_flags=[150, 199], fg_dep=fg_dep, an_dep=an_dep + ) populate_usage_db(conn, dtg, varname, obs_titan) - populate_obsmon_db(conn, dtg, calculate_statistics(obs_titan, modes, stat_cols), - modes, stat_cols, varname) + populate_obsmon_db( + conn, + dtg, + calculate_statistics(obs_titan, modes, stat_cols), + modes, + stat_cols, + varname, + ) close_db(conn) diff --git a/surfex/obsoul.py b/surfex/obsoul.py new file mode 100644 index 0000000..ee78b33 --- /dev/null +++ b/surfex/obsoul.py @@ -0,0 +1,174 @@ +"""Obsoul.""" +import logging + +import numpy as np + +from .datetime_utils import as_datetime, as_timedelta +from .obs import ObservationSet +from .observation import Observation + + +class ObservationDataSetFromObsoul(ObservationSet): + """Observation set from obsoul file.""" + + def __init__( + self, + content, + an_time=None, + label="", + obnumber=None, + obtypes=None, + subtypes=None, + neg_dt=None, + pos_dt=None, + ): + """Constuct obs set from a obsoul json file. + + Args: + content(str): Obsoul content + an_time (as_datetime, optional): Analysis time + neg_dt (int, optional): Negative timedelta. Defaults to None. + pos_dt (int, optional): Positive timedelta. Defaults to None. + obtypes (list, optional): Observation types. Defaults to None. + subtypes (list, optional): Observation sub types. Defaults to None. + obnumber (int, optional): Observation number. Defaults to None. + label (str, optional): Label for observation set. Defaults to "". + """ + observations = [] + lineno = 0 + first_line = True + lines = content.split("\n") + logging.debug("Found %s lines", lines) + while lineno < len(lines) - 1: + line = lines[lineno] + if line.strip() == "": + lineno += 1 + if first_line: + line = lines[lineno] + parts = line.split() + logging.debug("First row parts %s", parts) + first_line = False + lineno += 1 + + # Header + line = lines[lineno] + logging.debug("Header %s", line) + obt = int(line[4:7]) + subt = int(line[7:17]) + lon = float(line[17:27]) + lat = float(line[27:38]) + stid = line[38:50].replace("'", "").strip() + elev = np.nan + date = line[50:60].strip() + time = int(line[60:67].strip()) + yyyymmdhhmm = f"{date}{time:06d}" + logging.debug("Date=%s Time=%s", date, time) + obtime = as_datetime(yyyymmdhhmm) + records = int(line[80:86]) + lineno += 1 + + for __ in range(0, records): + line = lines[lineno] + parts = line.split() + logging.debug("Data parts %s", parts) + obn = int(parts[0]) + add_obs = True + logging.debug("obtypes=%s obn=%s", obtypes, obn) + if obtypes is not None: + if obt in obtypes: + if subtypes is not None: + if subt in subtypes: + pass + else: + logging.debug("Wrong subtype %s %s", subt, subtypes) + add_obs = False + else: + logging.debug("Wrong obtype %s %s", obt, obtypes) + add_obs = False + + if obnumber is not None: + if obn != obnumber: + logging.debug("Wrong obnumber %s %s", obn, obnumber) + add_obs = False + + # Remove if outside window + if an_time is not None: + if neg_dt is not None and pos_dt is not None: + n_dt = as_timedelta(seconds=neg_dt) + p_dt = as_timedelta(seconds=pos_dt) + if (an_time - n_dt) <= obtime <= (an_time + p_dt): + pass + else: + logging.debug( + "Outside time window %s %s %s %s", + obtime, + an_time, + n_dt, + p_dt, + ) + add_obs = False + else: + logging.debug( + "Not checking time window. neg_dt=%s and/or pos_dt=%s are None", + neg_dt, + pos_dt, + ) + value = parts[3] + logging.debug("Obs %s %s %s %s", lineno, obn, obnumber, add_obs) + if add_obs: + observations.append( + Observation( + obtime, + lon, + lat, + value, + elev=elev, + stid=stid, + varname=str(obnumber), + ) + ) + lineno += 1 + logging.debug("nObs %s", len(observations)) + ObservationSet.__init__(self, observations, label=label) + + +class ObservationDataSetFromObsoulFile(ObservationDataSetFromObsoul): + """Observation set from obsoul file.""" + + def __init__( + self, + filename, + an_time=None, + neg_dt=None, + pos_dt=None, + label="", + obnumber=None, + obtypes=None, + subtypes=None, + ): + """Constuct obs set from a obsoul file. + + Args: + filename (str): File name + an_time (as_datetime, optional): Analysis time + neg_dt (int, optional): Negative timedelta. Defaults to None. + pos_dt (int, optional): Positive timedelta. Defaults to None. + obtypes (list, optional): Observation types. Defaults to None. + subtypes (list, optional): Observation sub types. Defaults to None. + obnumber (int, optional): Observation number. Defaults to None. + label (str, optional): Label for observation set. Defaults to "". + """ + logging.info("Opening OBSOUL file %s", filename) + with open(filename, mode="r", encoding="utf8") as fhandler: + content = fhandler.read() + ObservationDataSetFromObsoul.__init__( + self, + content, + an_time=an_time, + label=label, + obnumber=obnumber, + pos_dt=pos_dt, + neg_dt=neg_dt, + obtypes=obtypes, + subtypes=subtypes, + ) diff --git a/surfex/platform.py b/surfex/platform.py new file mode 100644 index 0000000..1dff6ff --- /dev/null +++ b/surfex/platform.py @@ -0,0 +1,437 @@ +"""Platform dependent settings.""" +import json +import logging +import os + +from .datetime_utils import as_datetime + + +class SystemFilePaths(object): + """Matches files and paths depending on possibly system specific settings. + + User can provide a default system dir to nest dependencies. + """ + + def __init__(self, system_file_paths): + """Construct SystemFilePaths. + + Args: + system_file_paths (_type_): _description_ + """ + self.system_file_paths = system_file_paths + + def get_system_path( + self, + dtype, + default_dir=None, + check_existence=False, + check_parsing=False, + validtime=None, + basedtg=None, + mbr=None, + tstep=None, + pert=None, + var=None, + ): + """Get the system path for a given data type. + + Args: + dtype (str): The data type you want to get the path for (clim_dir/bin_dir etc) + default_dir (str): A fallback if the desired dtype is not found + check_existence (bool): Check if the path found also exists + check_parsing (bool): Check if parsing was successful (all @@ pairs substituted) + validtime (as_datetime): Parse setting with this as valid time + basedtg (as_datetime): Parse setting with this as base time + mbr (int): Parse setting with this as ensemble member + tstep (int): Parse setting with this as time step + pert (int): Parse setting with this as pertubation number + var (str): Parse setting with this as variable + + Returns: + data_dir (str): + + Raises: + RuntimeError: If path not found and check_existence is True + + See Also: + self.parse_setting + + """ + logging.debug("Search for: %s Default: %s", dtype, str(default_dir)) + + data_dir = self.find_matching_data_dir( + dtype, + default_dir=default_dir, + check_existence=check_existence, + check_parsing=check_parsing, + validtime=validtime, + basedtg=basedtg, + mbr=mbr, + tstep=tstep, + pert=pert, + var=var, + ) + if data_dir is None: + if default_dir is None: + raise RuntimeError("No system path found for " + dtype) + + logging.debug("Find default path") + data_dir = self.find_matching_data_dir( + default_dir, + default_dir=default_dir, + check_existence=check_existence, + check_parsing=check_parsing, + validtime=validtime, + basedtg=basedtg, + mbr=mbr, + tstep=tstep, + pert=pert, + var=var, + ) + if data_dir is None: + logging.debug("No default path found for %s", default_dir) + + logging.debug("data_dir %s", data_dir) + return data_dir + + def find_matching_data_dir( + self, + dtype, + default_dir=None, + check_existence=False, + check_parsing=False, + validtime=None, + basedtg=None, + mbr=None, + tstep=None, + pert=None, + var=None, + ): + """Find a matching path from the system path for a given data type. + + Args: + dtype (str): The data type you want to get the path for (clim_dir/bin_dir etc) + default_dir (str): A fallback if the desired dtype is not found + check_existence (bool): Check if the path found also exists + check_parsing (bool): Check if parsing was successful (all @@ pairs substituted) + validtime (as_datetime): Parse setting with this as valid time + basedtg (as_datetime): Parse setting with this as base time + mbr (int): Parse setting with this as ensemble member + tstep (int): Parse setting with this as time step + pert (int): Parse setting with this as pertubation number + var (str): Parse setting with this as variable + + Returns: + data_dir (str): + + Raises: + ValueError: data dir is not a string! + NotADirectoryError: Not a directory + + See Also: + self.parse_setting + + """ + command = None + for sfp in self.system_file_paths: + if sfp == dtype: + logging.debug("Found %s %s %s", sfp, type(sfp), self.system_file_paths) + data_dir = self.system_file_paths[sfp] + # If dict, also a command is attached + if isinstance(data_dir, dict): + for key in data_dir: + logging.debug(key, data_dir[key]) + command = str(data_dir[key]) + data_dir = str(key) + logging.debug("Data directory before parsing is: %s", data_dir) + if not isinstance(data_dir, str): + raise ValueError("data dir is not a string!") + data_dir = self.parse_setting( + self.substitute_string(data_dir), + check_parsing=check_parsing, + validtime=validtime, + basedtg=basedtg, + mbr=mbr, + tstep=tstep, + pert=pert, + var=var, + ) + # Add command to data_dir again + if command is not None: + data_dir = {data_dir: command} + logging.debug("Data directory after parsing is is: %s", data_dir) + if check_existence: + if not os.path.exists(data_dir) and default_dir is None: + raise NotADirectoryError(data_dir) + return data_dir + return None + + def get_system_file( + self, + dtype, + fname, + default_dir=None, + check_existence=False, + check_parsing=False, + validtime=None, + basedtg=None, + mbr=None, + tstep=None, + pert=None, + var=None, + system_variables=None, + ): + """Get the system path for a given data type and add a file name to the path. + + Args: + dtype (str): The data type you want to get the path for (clim_dir/bin_dir etc) + fname (str): Name of the file you want to join to the system path + default_dir (str): A fallback if the desired dtype is not found + check_existence (bool): Check if the path found also exists + check_parsing (bool): Check if parsing was successful (all @@ pairs substituted) + validtime (as_datetime): Parse setting with this as valid time + basedtg (as_datetime): Parse setting with this as base time + mbr (int): Parse setting with this as ensemble member + tstep (int): Parse setting with this as time step + pert (int): Parse setting with this as pertubation number + var (str): Parse setting with this as variable + system_variables (dict): Arbitrary settings to substitute @NAME@ = + system_variables={"NAME": "Value"} + + Raises: + FileNotFoundError: If file not found + + Returns: + data_dir (str): + + See Also: + self.parse_setting + self.substitute_string + + """ + command = None + path = self.get_system_path( + dtype, + default_dir=default_dir, + check_existence=check_existence, + check_parsing=check_parsing, + validtime=validtime, + basedtg=basedtg, + mbr=mbr, + tstep=tstep, + pert=pert, + var=var, + ) + + # If dict, also a command is attached + if isinstance(path, dict): + for key in path: + command = str(path[key]) + path = str(key) + fname = self.parse_setting( + fname, + check_parsing=check_parsing, + validtime=validtime, + basedtg=basedtg, + mbr=mbr, + tstep=tstep, + pert=pert, + var=var, + ) + fname = self.substitute_string(fname, system_variables=system_variables) + if path is None: + logging.info("No path found for: %s", dtype) + else: + fname = path + "/" + fname + if check_existence: + if not os.path.exists(fname): + raise FileNotFoundError(fname) + if command is not None: + fname = {fname: command} + return fname + + @staticmethod + def parse_setting( + setting, + check_parsing=False, + validtime=None, + basedtg=None, + mbr=None, + tstep=None, + pert=None, + var=None, + ): + """Parse setting with date/time/experiment specific values. + + Args: + setting (str): The value of dictionary key which should be processes. Parser if type is str. + check_parsing (bool): Check if all @@ pairs were parsed + validtime (datetime.daetime): Parse setting with this as validtime + basedtg (datetime.datetime): Parse setting with this as base time + mbr (int): Parse setting with this as ensemble member number (@E@/@EE@/@EEE@) + tstep (int): Parse setting with this as timestep to get step number (@TTT@/@TTTT@) + pert (int): Parse setting with this as perturbation number @PERT@ + var (str): Parse setting with this as the variable (@VAR@) + + Raises: + RuntimeError: Setting was not substituted properly? + + Returns: + setting: Possibly parsed setting is type is str + + See Also: + self.parse_setting + self.substitute_string + + """ + # Check on arguments + if isinstance(setting, str): + + if basedtg is not None: + if isinstance(basedtg, str): + basedtg = as_datetime(basedtg) + if validtime is not None: + if isinstance(validtime, str): + validtime = as_datetime(validtime) + else: + validtime = basedtg + + if basedtg is not None and validtime is not None: + lead_time = validtime - basedtg + setting = str(setting).replace("@YYYY_LL@", validtime.strftime("%Y")) + setting = str(setting).replace("@MM_LL@", validtime.strftime("%m")) + setting = str(setting).replace("@DD_LL@", validtime.strftime("%d")) + setting = str(setting).replace("@HH_LL@", validtime.strftime("%H")) + setting = str(setting).replace("@mm_LL@", validtime.strftime("%M")) + lead_seconds = int(lead_time.total_seconds()) + lead_hours = int(lead_seconds / 3600) + setting = str(setting).replace("@LL@", f"{lead_hours:02d}") + setting = str(setting).replace("@LLL@", f"{lead_hours:03d}") + setting = str(setting).replace("@LLLL@", f"{lead_hours:04d}") + if tstep is not None: + lead_step = int(lead_seconds / tstep) + setting = str(setting).replace("@TTT@", f"{lead_step:03d}") + setting = str(setting).replace("@TTTT@", f"{lead_step:04d}") + + if basedtg is not None: + setting = str(setting).replace("@YMD@", basedtg.strftime("%Y%m%d")) + setting = str(setting).replace("@YYYY@", basedtg.strftime("%Y")) + setting = str(setting).replace("@YY@", basedtg.strftime("%y")) + setting = str(setting).replace("@MM@", basedtg.strftime("%m")) + setting = str(setting).replace("@DD@", basedtg.strftime("%d")) + setting = str(setting).replace("@HH@", basedtg.strftime("%H")) + setting = str(setting).replace("@mm@", basedtg.strftime("%M")) + + if mbr is not None: + setting = str(setting).replace("@E@", f"mbr{int(mbr):d}") + setting = str(setting).replace("@EE@", f"mbr{int(mbr):02d}") + setting = str(setting).replace("@EEE@", f"mbr{int(mbr):03d}") + else: + setting = str(setting).replace("@E@", "") + setting = str(setting).replace("@EE@", "") + setting = str(setting).replace("@EEE@", "") + + if pert is not None: + logging.debug("replace %s in %s", pert, setting) + setting = str(setting).replace("@PERT@", str(pert)) + logging.debug("replaced %s in %s", pert, setting) + + if var is not None: + setting = str(setting).replace("@VAR@", var) + + if check_parsing: + if isinstance(setting, str) and setting.count("@") > 1: + raise RuntimeError("Setting was not substituted properly? " + setting) + + return setting + + @staticmethod + def substitute_string(setting, system_variables=None): + """Substitute setting if string with OS values of values from system_variables. + + Args: + setting (str): if setting is string it can be subst + system_variables (dict): Arbitrary settings to substitute @NAME@ = + system_variables={"NAME": "Value"} + + Returns: + setting: A setting possibly substituted if type is str + + """ + if isinstance(setting, str): + env_vals = ["USER", "HOME", "PWD"] + for env_val in env_vals: + if env_val in os.environ: + setting = setting.replace("@" + env_val + "@", os.environ[env_val]) + else: + logging.debug("%s not found in environment", env_val) + + if system_variables is not None: + logging.debug(system_variables) + for var in system_variables: + logging.debug(var, system_variables) + setting = setting.replace( + "@" + str(var) + "@", str(system_variables[var]) + ) + + return setting + + def add_system_file_path( + self, + name, + path, + system_variables=None, + check_parsing=True, + validtime=None, + basedtg=None, + mbr=None, + tstep=None, + pert=None, + var=None, + ): + """Add a system file path to be used. + + Args: + name (str): The data type you want to get the path for (clim_dir/bin_dir etc) + path (str): Name of the file you want to join to the system path + system_variables (dict): Arbitrary settings to substitute @NAME@ = system_variables= + {"NAME": "Value"} + check_parsing (bool): Check if parsing was successful (all @@ pairs substituted) + validtime (as_datetime): Parse setting with this as valid time + basedtg (as_datetime): Parse setting with this as base time + mbr (int): Parse setting with this as ensemble member + tstep (int): Parse setting with this as time step + pert (int): Parse setting with this as pertubation number + var (str): Parse setting with this as variable + + See Also: + self.parse_setting + self.substitute_string + + """ + path = self.substitute_string(path, system_variables=system_variables) + path = self.parse_setting( + path, + check_parsing=check_parsing, + validtime=validtime, + basedtg=basedtg, + mbr=mbr, + tstep=tstep, + pert=pert, + var=var, + ) + self.system_file_paths.update({name: path}) + + +class SystemFilePathsFromFile(SystemFilePaths): + """System file paths.""" + + def __init__(self, system_file_paths): + """System file path from a file. + + Args: + system_file_paths (_type_): _description_ + + """ + system_file_paths = json.load(open(system_file_paths, "r", encoding="utf-8")) + SystemFilePaths.__init__(self, system_file_paths) diff --git a/surfex/plot.py b/surfex/plot.py new file mode 100644 index 0000000..3d38b59 --- /dev/null +++ b/surfex/plot.py @@ -0,0 +1 @@ +"""Plot methods.""" diff --git a/surfex/read.py b/surfex/read.py index fc09afc..f7cb3e9 100644 --- a/surfex/read.py +++ b/surfex/read.py @@ -1,10 +1,13 @@ """Converter and read data.""" -import logging -import os import copy -from abc import abstractmethod, ABCMeta +import logging +from abc import ABCMeta, abstractmethod + import numpy as np -import surfex + +from .interpolation import fill_field +from .util import deep_update +from .variable import Variable class ReadData(object): @@ -16,28 +19,20 @@ def __init__(self, geo, var_name): """Construct readData object.""" self.geo = geo self.var_name = var_name - # print "Constructed "+self.__class__.__name__+" for " + self.var_name @abstractmethod def read_time_step(self, validtime, cache): """To be implemented.""" - raise NotImplementedError('users must define read_time_step to use this base class') + raise NotImplementedError( + "users must define read_time_step to use this base class" + ) @abstractmethod def print_info(self): """To be implemented.""" - raise NotImplementedError('users must define read_time_step to use this base class') - - -# class Points(object): -# def __init__(self, values, interpolator): -# self.values = values -# self.interpolator = interpolator - -# class TwoDField(object): -# def __init__(self, values, geo): -# self.values = values -# self.geo = geo + raise NotImplementedError( + "users must define read_time_step to use this base class" + ) # Direct data can be ead with this class with converter = None @@ -76,8 +71,10 @@ def read_time_step(self, validtime, cache): field = self.converter.read_time_step(self.geo, validtime, cache) # Preserve positive values for precipitation # TODO - # if self.var_name == "RAIN" or self.var_name == "SNOW": - # field[field < 0.] = 0. + """ + if self.var_name == "RAIN" or self.var_name == "SNOW": + field[field < 0.] = 0. + """ return field def print_info(self): @@ -96,13 +93,16 @@ def __init__(self, geo, var_name, var_dict): var_name (str): Variable name var_dict (dict): Variable definition + Raises: + RuntimeError: Constant value must have a value! + """ ReadData.__init__(self, geo, var_name) self.var_dict = var_dict if "value" in self.var_dict: self.value = self.var_dict["value"] else: - raise Exception("Constant value must have a value!") + raise RuntimeError("Constant value must have a value!") def read_time_step(self, validtime, cache): """Read time step. @@ -117,7 +117,6 @@ def read_time_step(self, validtime, cache): """ field = np.array([float(i) for i in range(0, self.geo.npoints)]) field.fill(self.value) - # print field.shape return field def print_info(self): @@ -125,34 +124,6 @@ def print_info(self): return self.var_name -def remove_existing_file(f_in, f_out): - """Remove existing file. - - Args: - f_in (_type_): _description_ - f_out (_type_): _description_ - - Raises: - FileNotFoundError: _description_ - IsADirectoryError: _description_ - - """ - if f_in is None: - raise FileNotFoundError("Input file not set") - # If files are not the same file - if os.path.abspath(f_in) != os.path.abspath(f_out): - if os.path.isdir(f_out): - raise IsADirectoryError(f_out + " is a directory! Please remove it if desired") - if os.path.islink(f_out): - os.unlink(f_out) - if os.path.isfile(f_out): - os.remove(f_out) - # files have the same path. Remove if it is a symlink - else: - if os.path.islink(f_out): - os.unlink(f_out) - - ####################################################### ####################################################### @@ -163,34 +134,25 @@ class Converter(object): Main interface to read a field is done through a converter. The converter name is default "None" to read a plain field without any conversion. - Args: - name (str): name of the converter - initial_time (datetime.datetime): The valid time you want to read - defs (dict): A dictionary defining the variables - conf (dict): A dictionary defining the converter - fileformat (str): Fileformat of the converter - """ def __init__(self, name, initial_time, defs, conf, fileformat): """Initialize the converter. Args: - name (_type_): _description_ - initial_time (_type_): _description_ - defs (_type_): _description_ - conf (_type_): _description_ - fileformat (_type_): _description_ + name (str): Converter name + initial_time (as_datetime): The valid time you want to read + defs (dict): A dictionary defining the variables + conf (dict): A dictionary defining the converter + fileformat (str): File format Raises: - KeyError: _description_ - NotImplementedError: _description_ + KeyError: Missing definitions + NotImplementedError: Converter not implemented """ self.name = name self.initial_time = initial_time - # self.validtime = validtime - # self.basetime = basetime logging.debug("Converter name: %s", self.name) logging.debug("Converter config: %s", conf) @@ -205,29 +167,41 @@ def __init__(self, name, initial_time, defs, conf, fileformat): self.temp = self.create_variable(fileformat, defs, conf[self.name]["t"]) self.pres = self.create_variable(fileformat, defs, conf[self.name]["p"]) elif name == "mslp2ps": - self.altitude = self.create_variable(fileformat, defs, conf[self.name]["altitude"]) + self.altitude = self.create_variable( + fileformat, defs, conf[self.name]["altitude"] + ) self.temp = self.create_variable(fileformat, defs, conf[self.name]["t"]) self.pres = self.create_variable(fileformat, defs, conf[self.name]["mslp"]) elif name == "rh2q_mslp": self.r_h = self.create_variable(fileformat, defs, conf[self.name]["rh"]) self.temp = self.create_variable(fileformat, defs, conf[self.name]["t"]) - self.altitude = self.create_variable(fileformat, defs, conf[self.name]["altitude"]) + self.altitude = self.create_variable( + fileformat, defs, conf[self.name]["altitude"] + ) self.pres = self.create_variable(fileformat, defs, conf[self.name]["mslp"]) elif name == "windspeed" or name == "winddir": self.x_wind = self.create_variable(fileformat, defs, conf[self.name]["x"]) self.y_wind = self.create_variable(fileformat, defs, conf[self.name]["y"]) elif name == "totalprec": - self.totalprec = self.create_variable(fileformat, defs, conf[self.name]["totalprec"]) + self.totalprec = self.create_variable( + fileformat, defs, conf[self.name]["totalprec"] + ) self.snow = self.create_variable(fileformat, defs, conf[self.name]["snow"]) elif name == "calcsnow": - self.totalprec = self.create_variable(fileformat, defs, conf[self.name]["totalprec"]) + self.totalprec = self.create_variable( + fileformat, defs, conf[self.name]["totalprec"] + ) self.temp = self.create_variable(fileformat, defs, conf[self.name]["t"]) elif name == "calcrain": - self.totalprec = self.create_variable(fileformat, defs, conf[self.name]["totalprec"]) + self.totalprec = self.create_variable( + fileformat, defs, conf[self.name]["totalprec"] + ) self.temp = self.create_variable(fileformat, defs, conf[self.name]["t"]) elif name == "snowplusgraupel": self.snow = self.create_variable(fileformat, defs, conf[self.name]["snow"]) - self.graupel = self.create_variable(fileformat, defs, conf[self.name]["graupel"]) + self.graupel = self.create_variable( + fileformat, defs, conf[self.name]["graupel"] + ) elif name == "phi2m": self.phi = self.create_variable(fileformat, defs, conf[self.name]["phi"]) elif self.name == "swe2sd": @@ -249,13 +223,16 @@ def __init__(self, name, initial_time, defs, conf, fileformat): self.smp1 = self.create_variable(fileformat, defs, conf[self.name]["smp1"]) self.smp2 = self.create_variable(fileformat, defs, conf[self.name]["smp2"]) elif self.name == "nature_town": - self.nature_fraction = self.create_variable(fileformat, defs, - conf[self.name]["nature_fraction"]) - self.town_fraction = self.create_variable(fileformat, defs, - conf[self.name]["town_fraction"]) + self.nature_fraction = self.create_variable( + fileformat, defs, conf[self.name]["nature_fraction"] + ) + self.town_fraction = self.create_variable( + fileformat, defs, conf[self.name]["town_fraction"] + ) elif self.name == "cloud_base": - self.cloud_base = self.create_variable(fileformat, defs, - conf[self.name]["cloud_base"]) + self.cloud_base = self.create_variable( + fileformat, defs, conf[self.name]["cloud_base"] + ) else: raise NotImplementedError("Converter " + self.name + " not implemented") @@ -274,7 +251,7 @@ def create_variable(self, fileformat, defs, var_dict): var_dict (dict): Variable dictionary Raises: - NotImplementedError: Not implemented + RuntimeError: Variable is not set Returns: field: The read field @@ -285,10 +262,10 @@ def create_variable(self, fileformat, defs, var_dict): defs = copy.deepcopy(defs) var_dict = copy.deepcopy(var_dict) if var_dict is None: - raise Exception("Variable is not set") - merged_dict = surfex.data_merge(defs, var_dict) + raise RuntimeError("Variable is not set") + merged_dict = deep_update(defs, var_dict) - var = surfex.variable.Variable(fileformat, merged_dict, self.initial_time) + var = Variable(fileformat, merged_dict, self.initial_time) logging.debug(var.print_variable_info()) return var @@ -309,7 +286,12 @@ def mslp2ps(mslp, altitude, temp): gravity = 9.81 dry_air = 287.0 - pres = np.multiply(mslp, np.exp(np.divide(np.multiply(-altitude, gravity), np.multiply(dry_air, temp)))) + pres = np.multiply( + mslp, + np.exp( + np.divide(np.multiply(-altitude, gravity), np.multiply(dry_air, temp)) + ), + ) return pres @@ -317,28 +299,26 @@ def read_time_step(self, geo, validtime, cache): """Read time step. Args: - geo (_type_): _description_ - validtime (_type_): _description_ - cache (_type_): _description_ + geo (Geo): Geometry_ + validtime (as_datetime): Validtime + cache (Cache): Cache Raises: - Exception: _description_ + KeyError:Could not found climatological mean for month NotImplementedError: _description_ Returns: - _type_: _description_ + field (np.ndarray): Read and converted field """ gravity = 9.81 field = None - # field = np.empty(geo.npoints) # Specific reading for each converter if self.name == "none" or self.name == "analysis": field = self.var.read_variable(geo, validtime, cache) elif self.name == "windspeed" or self.name == "winddir": field_x = self.x_wind.read_variable(geo, validtime, cache) field_y = self.y_wind.read_variable(geo, validtime, cache) - # field_y = self.y.read_variable(geo,validtime,cache) if self.name == "windspeed": field = np.sqrt(np.square(field_x) + np.square(field_y)) np.where(field < 0.005, field, 0) @@ -346,24 +326,27 @@ def read_time_step(self, geo, validtime, cache): field = np.mod(90 - np.rad2deg(np.arctan2(field_y, field_x)), 360) elif self.name == "rh2q" or self.name == "rh2q_mslp": + """ + ZES = 6.112 * exp((17.67 * (ZT - 273.15)) / ((ZT - 273.15) + 243.5)) + ZE = ZRH * ZES + ZRATIO = 0.622 * ZE / (ZPRES / 100.) + RH2Q = 1. / (1. / ZRATIO + 1.) + """ field_r_h = self.r_h.read_variable(geo, validtime, cache) # % field_temp = self.temp.read_variable(geo, validtime, cache) # In K field_pres = self.pres.read_variable(geo, validtime, cache) # In Pa if self.name == "rh2q_mslp": - field_altitude = self.altitude.read_variable(geo, validtime, cache) # In m + field_altitude = self.altitude.read_variable( + geo, validtime, cache + ) # In m field_pres = self.mslp2ps(field_pres, field_altitude, field_temp) - field_p_mb = np.divide(field_pres, 100.) + field_p_mb = np.divide(field_pres, 100.0) field_t_c = np.subtract(field_temp, 273.15) exp = np.divide(np.multiply(17.67, field_t_c), np.add(field_t_c, 243.5)) esat = np.multiply(6.112, np.exp(exp)) - field = np.divide(np.multiply(0.622, field_r_h / 100.) * esat, field_p_mb) - - # ZES = 6.112 * exp((17.67 * (ZT - 273.15)) / ((ZT - 273.15) + 243.5)) - # ZE = ZRH * ZES - # ZRATIO = 0.622 * ZE / (ZPRES / 100.) - # RH2Q = 1. / (1. / ZRATIO + 1.) + field = np.divide(np.multiply(0.622, field_r_h / 100.0) * esat, field_p_mb) elif self.name == "mslp2ps": field_pres = self.pres.read_variable(geo, validtime, cache) # In Pa @@ -374,9 +357,9 @@ def read_time_step(self, geo, validtime, cache): field_totalprec = self.totalprec.read_variable(geo, validtime, cache) field_snow = self.snow.read_variable(geo, validtime, cache) field = np.subtract(field_totalprec, field_snow) - if any(field[field < 0.]): - print("Set negative rain values to zero") - field[field < 0.] = 0 + if any(field[field < 0.0]): + logging.info("Set negative rain values to zero") + field[field < 0.0] = 0 elif self.name == "calcrain": field_totalprec = self.totalprec.read_variable(geo, validtime, cache) @@ -385,18 +368,7 @@ def read_time_step(self, geo, validtime, cache): field[field_t <= 274.16] = 0 elif self.name == "calcsnow": field_totalprec = self.totalprec.read_variable(geo, validtime, cache) - # field_rh = self.rh.read_variable(geo, validtime,cache) # field_t = self.temp.read_variable(geo, validtime, cache) # In K - # field_p = self.p.read_variable(geo, validtime,cache) # In Pa - # tc = field_t + 273.15 - # e = (field_rh)*0.611*exp((17.63*tc)/(tc+243.04)); - # Td = (116.9 + 243.04*log(e))/(16.78-log(e)); - # gamma = 0.00066 * field_p/1000; - # delta = (4098*e)/pow(Td+243.04,2); - # if(gamma + delta == 0): - # print("problem?") - # wetbulbTemperature = (gamma * tc + delta * Td)/(gamma + delta); - # wetbulbTemperatureK = wetbulbTemperature + 273.15; field = field_totalprec field[field_t > 274.16] = 0 elif self.name == "snowplusgraupel": @@ -405,21 +377,34 @@ def read_time_step(self, geo, validtime, cache): elif self.name == "phi2m": field = self.phi.read_variable(geo, validtime, cache) field = np.divide(field, gravity) - field[(field < 0)] = 0. + field[(field < 0)] = 0.0 elif self.name == "swe2sd": field = self.swe.read_variable(geo, validtime, cache) rho = self.swe.read_variable(geo, validtime, cache) field = np.divide(field, rho) elif self.name == "sweclim": field = self.swe.read_variable(geo, validtime, cache) - rhoclim = {"01": 222., "02": 233., "03": 240., "04": 278., "05": 212., "06": 312., - "07": 312., "08": 143., - "09": 143., "10": 161., "11": 182., "12": 213.} + rhoclim = { + "01": 222.0, + "02": 233.0, + "03": 240.0, + "04": 278.0, + "05": 212.0, + "06": 312.0, + "07": 312.0, + "08": 143.0, + "09": 143.0, + "10": 161.0, + "11": 182.0, + "12": 213.0, + } month = validtime.strftime("%m") if month in rhoclim: field = np.divide(field, rhoclim[month]) else: - raise Exception("Could not found climatological mean for month " + str(month)) + raise KeyError( + "Could not found climatological mean for month " + str(month) + ) elif self.name == "sea2land": field = self.sea.read_variable(geo, validtime, cache) field = np.subtract(1, field) @@ -453,7 +438,7 @@ def read_time_step(self, geo, validtime, cache): iteration = 0 while np.any(np.isnan(field_2d)): logging.debug("Filling cloud base") - field_2d, nans = surfex.fill_field(field_2d, geo, radius=3) + field_2d, nans = fill_field(field_2d, geo, radius=3) iteration = iteration + 1 logging.debug("Iteration %s NaNs: %s", iteration, nans) diff --git a/surfex/run.py b/surfex/run.py index d0867ad..3e1619b 100644 --- a/surfex/run.py +++ b/surfex/run.py @@ -1,12 +1,11 @@ """Run time methods.""" -import os import logging -import sys -import subprocess -from abc import ABC, abstractmethod -import json +import os import shutil -import surfex +import sys +from subprocess import PIPE, STDOUT, CalledProcessError, Popen + +from .util import remove_existing_file class BatchJob(object): @@ -30,29 +29,39 @@ def run(self, cmd): Args: cmd (str): Command to run. + Raises: + CalledProcessError: Command failed. + RuntimeError: No command provided! + """ if cmd is None: - raise Exception("No command provided!") + raise RuntimeError("No command provided!") cmd = self.wrapper + " " + cmd if "OMP_NUM_THREADS" in self.rte: logging.info("BATCH: %s", self.rte["OMP_NUM_THREADS"]) logging.info("Batch running %s", cmd) - process = subprocess.Popen(cmd, shell=True, env=self.rte, stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - universal_newlines=True, bufsize=1) + process = Popen( # noqaS602 + cmd, + shell=True, # noqaS602 + env=self.rte, + stdout=PIPE, + stderr=STDOUT, + universal_newlines=True, + bufsize=1, + ) # Poll process for new output until finished while True: nextline = process.stdout.readline() - if nextline == '' and process.poll() is not None: + if nextline == "" and process.poll() is not None: break sys.stdout.write(nextline) sys.stdout.flush() return_code = process.wait() if return_code != 0: - raise subprocess.CalledProcessError(return_code, cmd) + raise CalledProcessError(return_code, cmd) class SURFEXBinary(object): @@ -67,6 +76,11 @@ def __init__(self, binary, batch, iofile, settings, input_data, **kwargs): iofile (surfex.SurfexIO): Input file to command. settings (f90nml.Namelist): Fortran namelist namelist input_data (surfex.InputDataToSurfexBinaries): Input to binary + kwargs (dict): Key word arguments. + + Raises: + FileNotFoundError: Input file not found + RuntimeError: Execution failed """ self.binary = binary @@ -87,10 +101,10 @@ def __init__(self, binary, batch, iofile, settings, input_data, **kwargs): self.input_data = input_data self.input_data.prepare_input() - if os.path.exists('OPTIONS.nam'): - os.remove('OPTIONS.nam') - self.settings.write('OPTIONS.nam') - with open('OPTIONS.nam', mode="r", encoding="utf-8") as file_handler: + if os.path.exists("OPTIONS.nam"): + os.remove("OPTIONS.nam") + self.settings.write("OPTIONS.nam") + with open("OPTIONS.nam", mode="r", encoding="utf-8") as file_handler: content = file_handler.read() if self.print_namelist: logging.info(content) @@ -99,10 +113,10 @@ def __init__(self, binary, batch, iofile, settings, input_data, **kwargs): logging.debug(self.pgdfile.filename) try: logging.info("PGD is %s", self.pgdfile.filename) - if self.pgdfile.input_file is not None and \ - os.path.abspath(self.pgdfile.filename) \ - != os.path.abspath(self.pgdfile.input_file): - surfex.read.remove_existing_file(self.pgdfile.input_file, self.pgdfile.filename) + if self.pgdfile.input_file is not None and os.path.abspath( + self.pgdfile.filename + ) != os.path.abspath(self.pgdfile.input_file): + remove_existing_file(self.pgdfile.input_file, self.pgdfile.filename) os.symlink(self.pgdfile.input_file, self.pgdfile.filename) if not os.path.exists(self.pgdfile.filename): raise FileNotFoundError(f"PGD {self.pgdfile.filename} not found!") @@ -111,11 +125,10 @@ def __init__(self, binary, batch, iofile, settings, input_data, **kwargs): if self.surfout is not None: try: logging.info("PREP is %s", self.iofile.filename) - if self.iofile.input_file is not None and \ - os.path.abspath(self.iofile.filename) \ - != os.path.abspath(self.iofile.input_file): - surfex.read.remove_existing_file(self.iofile.input_file, - self.iofile.filename) + if self.iofile.input_file is not None and os.path.abspath( + self.iofile.filename + ) != os.path.abspath(self.iofile.input_file): + remove_existing_file(self.iofile.input_file, self.iofile.filename) os.symlink(self.iofile.input_file, self.iofile.filename) if not os.path.exists(self.iofile.filename): raise FileNotFoundError(f"PREP {self.iofile.filename} not found!") @@ -129,8 +142,12 @@ def __init__(self, binary, batch, iofile, settings, input_data, **kwargs): except Exception as exc: raise RuntimeError(repr(exc)) from Exception - listings = ["LISTING_PGD0.txt", "LISTING_PREP0.txt", "LISTING_OFFLINE0.txt", - "LISTING_SODA0.txt"] + listings = [ + "LISTING_PGD0.txt", + "LISTING_PREP0.txt", + "LISTING_OFFLINE0.txt", + "LISTING_SODA0.txt", + ] for listing in listings: if os.path.exists(listing): with open(listing, mode="r", encoding="utf-8") as file_handler: @@ -149,8 +166,20 @@ def __init__(self, binary, batch, iofile, settings, input_data, **kwargs): class PerturbedOffline(SURFEXBinary): """Pertubed offline.""" - def __init__(self, binary, batch, io, pert_number, settings, input_data, surfout=None, - archive_data=None, pgdfile=None, print_namelist=False, negpert=False): + def __init__( + self, + binary, + batch, + io, + pert_number, + settings, + input_data, + surfout=None, + archive_data=None, + pgdfile=None, + print_namelist=False, + negpert=False, + ): """Perturbed offline. Args: @@ -168,16 +197,16 @@ def __init__(self, binary, batch, io, pert_number, settings, input_data, surfout """ self.pert_number = pert_number - settings['nam_io_varassim']['LPRT'] = True - settings['nam_var']['nivar'] = int(pert_number) + settings["nam_io_varassim"]["LPRT"] = True + settings["nam_var"]["nivar"] = int(pert_number) # Handle negative pertubations if negpert: - nvar = int(settings['nam_var']['nvar']) + nvar = int(settings["nam_var"]["nvar"]) ipert = 0 npert = 1 for nvi in range(0, nvar): - key = 'nncv(' + str(nvi + 1) + ')' - val = int(settings['nam_var'][key]) + key = "nncv(" + str(nvi + 1) + ")" + val = int(settings["nam_var"][key]) # Check if active if val == 1: npert = 1 @@ -185,19 +214,38 @@ def __init__(self, binary, batch, io, pert_number, settings, input_data, surfout npert = npert + 1 for __ in range(0, npert): ipert = ipert + 1 - key = 'xtprt_m(' + str(ipert) + ')' - val = settings['nam_var'][key] - settings['nam_var'][key] = -val - SURFEXBinary.__init__(self, binary, batch, io, settings, input_data, surfout=surfout, - archive_data=archive_data, pgdfile=pgdfile, - print_namelist=print_namelist) + key = "xtprt_m(" + str(ipert) + ")" + val = settings["nam_var"][key] + settings["nam_var"][key] = -val + SURFEXBinary.__init__( + self, + binary, + batch, + io, + settings, + input_data, + surfout=surfout, + archive_data=archive_data, + pgdfile=pgdfile, + print_namelist=print_namelist, + ) class Masterodb(object): """Masterodb.""" - def __init__(self, pgdfile, prepfile, surffile, settings, input_data, binary=None, - archive_data=None, print_namelist=True, batch=None): + def __init__( + self, + pgdfile, + prepfile, + surffile, + settings, + input_data, + binary=None, + archive_data=None, + print_namelist=True, + batch=None, + ): """Masterodb. Args: @@ -230,20 +278,21 @@ def __init__(self, pgdfile, prepfile, surffile, settings, input_data, binary=Non self.input.prepare_input() # Prepare namelist - if os.path.exists('EXSEG1.nam'): - os.remove('EXSEG1.nam') + if os.path.exists("EXSEG1.nam"): + os.remove("EXSEG1.nam") - self.settings.write('EXSEG1.nam') - with open('EXSEG1.nam', mode="r", encoding="utf-8") as file_handler: + self.settings.write("EXSEG1.nam") + with open("EXSEG1.nam", mode="r", encoding="utf-8") as file_handler: content = file_handler.read() if self.print_namelist: logging.info(content) logging.info("PGD file for MASTERODB %s", self.pgdfile.filename) - if self.pgdfile.input_file is not None and \ - os.path.abspath(self.pgdfile.filename) != os.path.abspath(self.pgdfile.input_file): + if self.pgdfile.input_file is not None and os.path.abspath( + self.pgdfile.filename + ) != os.path.abspath(self.pgdfile.input_file): logging.info("Input PGD file is: %s", self.pgdfile.input_file) - surfex.read.remove_existing_file(self.pgdfile.input_file, self.pgdfile.filename) + remove_existing_file(self.pgdfile.input_file, self.pgdfile.filename) os.symlink(self.pgdfile.input_file, self.pgdfile.filename) if not os.path.exists(self.pgdfile.filename): @@ -251,12 +300,12 @@ def __init__(self, pgdfile, prepfile, surffile, settings, input_data, binary=Non raise FileNotFoundError(self.pgdfile.filename) logging.info("PREP file for MASTERODB %s", self.prepfile.filename) - if self.prepfile.input_file is not None and \ - os.path.abspath(self.prepfile.filename) \ - != os.path.abspath(self.prepfile.input_file): + if self.prepfile.input_file is not None and os.path.abspath( + self.prepfile.filename + ) != os.path.abspath(self.prepfile.input_file): logging.info("Input PREP file is: %s", self.prepfile.input_file) - surfex.read.remove_existing_file(self.prepfile.input_file, self.prepfile.filename) + remove_existing_file(self.prepfile.input_file, self.prepfile.filename) os.symlink(self.prepfile.input_file, self.prepfile.filename) if not os.path.exists(self.prepfile.filename): @@ -276,149 +325,6 @@ def archive_output(self): self.archive.archive_files() -class InputDataToSurfexBinaries(ABC): - """Abstract input data.""" - - def __init__(self): - """Construct.""" - - @abstractmethod - def prepare_input(self): - """Prepare input.""" - return NotImplementedError - - -class OutputDataFromSurfexBinaries(ABC): - """Abstract output data.""" - - def __init__(self): - """Construct.""" - - @abstractmethod - def archive_files(self): - """Archive files.""" - return NotImplementedError - - -class JsonOutputData(OutputDataFromSurfexBinaries): - """Output data.""" - - def __init__(self, data): - """Output data from dict. - - Args: - data (dict): Output data. - - """ - OutputDataFromSurfexBinaries.__init__(self) - self.data = data - - def archive_files(self): - """Archive files.""" - for output_file, target in self.data.items(): - - logging.info("%s -> %s", output_file, target) - command = "mv" - if isinstance(target, dict): - for key in target: - logging.debug("%s %s %s", output_file, key, target[key]) - command = target[key] - target = key - - cmd = command + " " + output_file + " " + target - try: - logging.info(cmd) - subprocess.check_call(cmd, shell=True) - except IOError: - logging.error("%s failed", cmd) - raise Exception(cmd + " failed") from IOError - - -class JsonOutputDataFromFile(JsonOutputData): - """JSON output data.""" - - def __init__(self, file): - """Construct from json file.""" - with open(file, mode="r", encoding="utf-8") as file_handler: - data = json.load(file_handler) - JsonOutputData.__init__(self, data) - - def archive_files(self): - """Archive files.""" - JsonOutputData.archive_files(self) - - -class JsonInputData(InputDataToSurfexBinaries): - """JSON input data.""" - - def __init__(self, data): - """Construct input data. - - Args: - data (dict): Input data. - """ - InputDataToSurfexBinaries.__init__(self) - self.data = data - - def prepare_input(self): - """Prepare input.""" - for target, input_file in self.data.items(): - - logging.info("%s -> %s", target, input_file) - logging.debug(os.path.realpath(target)) - command = None - if isinstance(input_file, dict): - for key in input_file: - logging.debug(key) - logging.debug(input_file[key]) - command = str(input_file[key]) - input_file = str(key) - command = command.replace("@INPUT@", input_file) - command = command.replace("@TARGET@", target) - - if os.path.realpath(target) == os.path.realpath(input_file): - logging.info("Target and input file is the same file") - else: - if command is None: - cmd = "ln -sf " + input_file + " " + target - else: - cmd = command - try: - logging.info(cmd) - subprocess.check_call(cmd, shell=True) - except IOError: - raise(cmd + " failed") from IOError - - def add_data(self, data): - """Add data. - - Args: - data (_type_): _description_ - """ - for key in data: - value = data[key] - self.data.update({key: value}) - - -class JsonInputDataFromFile(JsonInputData): - """JSON input data.""" - - def __init__(self, file): - """Construct JSON input data. - - Args: - file (_type_): _description_ - - """ - with open(file, mode="r", encoding="utf-8") as file_handler: - data = json.load(file_handler) - JsonInputData.__init__(self, data) - - def prepare_input(self): - """Prepare input.""" - JsonInputData.prepare_input(self) - - # TODO is it used? def create_working_dir(workdir, enter=True): """Create working dir.""" diff --git a/surfex/timeseries.py b/surfex/timeseries.py index 3253ace..8ebb6ae 100644 --- a/surfex/timeseries.py +++ b/surfex/timeseries.py @@ -1,8 +1,9 @@ """Time series.""" -from datetime import datetime, timedelta -import logging import json -import surfex +import logging + +from .datetime_utils import as_timedelta +from .obs import Observation class TimeSeries(object): @@ -31,31 +32,9 @@ def __init__(self, times, values, lons, lats, stids, stids_file=None, varname="N for i, lon in enumerate(self.lons): lat = self.lats[i] - pos = surfex.Observation.format_lon(lon) + ":" + surfex.Observation.format_lat(lat) + pos = Observation.format_lon(lon) + ":" + Observation.format_lat(lat) self.index_pos.update({pos: i}) - if stids_file is not None: - self.stids = self.update_stids_from_file(stids_file) - - def update_stids_from_file(self, filename): - """Update stids from file. - - Args: - filename (_type_): _description_ - - Returns: - _type_: _description_ - """ - stids = self.stids - ids_from_file = json.load(open(filename, "r", encoding="utf-8")) - for stid in ids_from_file: - lon = ids_from_file[stid]["lon"] - lat = ids_from_file[stid]["lat"] - pos = surfex.Observation.format_lon(lon) + ":" + surfex.Observation.format_lat(lat) - if pos in self.index_pos: - stids[self.index_pos[pos]] = stid - return stids - def write_json(self, filename, indent=None): """Write json file. @@ -65,154 +44,70 @@ def write_json(self, filename, indent=None): """ data = {} for i, time_val in enumerate(self.times): - data.update({ - time_val.strftime("%Y%m%d%H%M%S"): { - "values": self.values[i].tolist() - } - }) + data.update( + {time_val.strftime("%Y%m%d%H%M%S"): {"values": self.values[i].tolist()}} + ) data = { "lons": self.lons.tolist(), "lats": self.lats.tolist(), "stids": self.stids, "varname": self.varname, - "data": data + "data": data, } json.dump(data, open(filename, "w", encoding="utf-8"), indent=indent) -class TimeSeriesFromJson(TimeSeries): - """Time series from json.""" - - def __init__(self, filename, starttime=None, endtime=None, interval=None, lons=None, lats=None): - """Construct. - - Args: - filename (_type_): _description_ - starttime (_type_, optional): _description_. Defaults to None. - endtime (_type_, optional): _description_. Defaults to None. - interval (_type_, optional): _description_. Defaults to None. - lons (_type_, optional): _description_. Defaults to None. - lats (_type_, optional): _description_. Defaults to None. - - Raises: - Exception: _description_ - Exception: _description_ - """ - data = json.load(open(filename, "r", encoding="utf-8")) - times = [] - values = [] - ts_lons = data["lons"] - ts_lats = data["lats"] - ts_stids = data["stids"] - mask = [] - lons1 = [] - lats1 = [] - stids1 = [] - for i, ts_lon in enumerate(ts_lons): - if lons is not None and lats is not None: - lon1 = surfex.Observation.format_lon(float(ts_lon)) - lat1 = surfex.Observation.format_lat(float(ts_lats[i])) - if len(lons) != len(lats): - raise Exception("Mismach in longitudes and latitudes") - for j, lon in enumerate(lons): - lon = surfex.Observation.format_lon(float(lon)) - lat = surfex.Observation.format_lat(float(lats[j])) - if lon == lon1 and lat == lat1: - mask.append(i) - lons1.append(ts_lon) - lats1.append(ts_lats[i]) - stids1.append(ts_stids[i]) - break - else: - mask.append(i) - lons1.append(ts_lons[i]) - lats1.append(ts_lats[i]) - stids1.append(ts_stids[i]) - - if lons is not None and lats is not None: - if len(mask) != len(lons): - print(ts_lons, ts_lats) - print(lons, lats) - raise Exception("You asked for " + str(len(lons) - len(mask)) - + " position(s) not in the file") - - varname = data["varname"] - validtime = None - if starttime is not None: - validtime = starttime - - for dtime in data["data"]: - add = True - this_time = datetime.strptime(dtime, "%Y%m%d%H%M%S") - if starttime is not None and this_time < starttime: - add = False - if endtime is not None and this_time > endtime: - add = False - if interval is not None and validtime is not None and this_time != validtime: - add = False - - if add: - times.append(this_time) - this_values = [] - for mask_ind in mask: - this_values.append(data["data"][dtime]["values"][mask_ind]) - values.append(this_values) - else: - print("Skip this time ", this_time) - - if validtime is not None: - if interval is not None: - validtime = validtime + timedelta(seconds=interval) - TimeSeries.__init__(self, times, values, lons1, lats1, stids1, varname=varname) - - class TimeSeriesFromConverter(TimeSeries): """Time-Series from a converter.""" - def __init__(self, var, fileformat, conf, geo, converter, start, end, interval=3600, - cache=None, stids_file=None, geo_in=None): + def __init__( + self, var, geo, converter, start, end, interval=3600, cache=None, stids_file=None + ): """Construct. Args: var (_type_): _description_ - fileformat (_type_): _description_ - conf (_type_): _description_ geo (_type_): _description_ - converter (_type_): _description_ + converter (Converter): _description_ start (_type_): _description_ end (_type_): _description_ interval (int, optional): _description_. Defaults to 3600. - geo_in (_type_, optional): _description_. Defaults to None. cache (_type_, optional): _description_. Defaults to None. stids_file (_type_, optional): _description_. Defaults to None. """ - # validtime = start - basetime = start - defs = {} - - converter = surfex.Converter( - converter, basetime, defs, conf[var][fileformat]["converter"], fileformat) times = [] values = [] # Loop output time steps this_time = start while this_time <= end: # Write for each time step - logging.info("Creating time series for: %s time_step: %s", - this_time.strftime('%Y%m%d%H'), str(this_time)) + logging.info( + "Creating time series for: %s time_step: %s", + this_time.strftime("%Y%m%d%H"), + str(this_time), + ) values.append(converter.read_time_step(geo, this_time, cache)) times.append(this_time) - this_time = this_time + timedelta(seconds=interval) + this_time = this_time + as_timedelta(seconds=interval) if cache is not None: cache.clean_fields(this_time) if stids_file is not None: - stids = surfex.Observation.get_stid_from_stationlist( - stids_file, geo.lonlist, geo.latlist) + stids = Observation.get_stid_from_stationlist( + stids_file, geo.lonlist, geo.latlist + ) else: stids = ["NA"] * geo.nlons - TimeSeries.__init__(self, times, values, geo.lonlist, geo.latlist, stids, - stids_file=stids_file, varname=var) + TimeSeries.__init__( + self, + times, + values, + geo.lonlist, + geo.latlist, + stids, + stids_file=stids_file, + varname=var, + ) diff --git a/surfex/titan.py b/surfex/titan.py index 235ab12..06f6056 100644 --- a/surfex/titan.py +++ b/surfex/titan.py @@ -1,19 +1,20 @@ """Titan.""" -import os -import logging -import json -from datetime import datetime import abc +import json +import logging +import os + import numpy as np -import surfex + try: import titanlib as tit except ImportError: tit = None -try: - import gridpp -except ImportError: - gridpp = None + +from .datetime_utils import as_datetime, as_datetime_string +from .interpolation import ObsOperator, inside_grid +from .netcdf import read_first_guess_netcdf_file +from .observation import Observation class QualityControl(object): @@ -62,8 +63,7 @@ def set_flags(global_flags, flags, mask, code): Returns: list: Updated global flags. """ - imask = np.where( - (np.array(global_flags) == 0) & (np.array([flag for flag in flags]) == 1))[0] + imask = np.where((np.array(global_flags) == 0) & (np.array(list(flags)) == 1))[0] imask = np.intersect1d(imask, np.array(mask)) if len(imask) > 0: global_flags[imask] = code @@ -96,6 +96,9 @@ def set_input(self, size, minval=None, maxval=None): minval (float, optional): Minimum value. Defaults to None. maxval (float, optional): Maximum value. Defaults to None. + Raises: + RuntimeError: You must set minval and maxval + """ used_min = self.def_min used_max = self.def_max @@ -105,7 +108,7 @@ def set_input(self, size, minval=None, maxval=None): used_max = maxval if used_min is None or used_max is None: - raise Exception("You must set minval and maxval") + raise RuntimeError("You must set minval and maxval") logging.debug("minval: %s", used_min) logging.debug("maxval: %s", used_max) @@ -127,6 +130,12 @@ def test(self, dataset, mask, code=102): mask (list): Active data. code (int, optional): Code to use for flagging. Defaults to 102. + Raises: + ModuleNotFoundError: titanlib was not loaded properly + + Returns: + global_flags(list): Global flags. + """ if tit is None: raise ModuleNotFoundError("titanlib was not loaded properly") @@ -147,9 +156,15 @@ def test(self, dataset, mask, code=102): global_flags[mask_ind] = code for i, mask_ind in enumerate(mask): - logging.debug("test=%s i=%s m_i=%s value(m_i)=%s flag(i)=%s global_flag(m_i)=%s", - self.name, i, mask_ind, dataset.values[mask_ind], flags[i], - global_flags[mask_ind]) + logging.debug( + "test=%s i=%s m_i=%s value(m_i)=%s flag(i)=%s global_flag(m_i)=%s", + self.name, + i, + mask_ind, + dataset.values[mask_ind], + flags[i], + global_flags[mask_ind], + ) return global_flags @@ -157,8 +172,15 @@ def test(self, dataset, mask, code=102): class FirstGuess(QualityControl): """First guess check.""" - def __init__(self, geo_in, fg_field, negdiff=None, posdiff=None, max_distance=5000, - operator="bilinear"): + def __init__( + self, + geo_in, + fg_field, + negdiff=None, + posdiff=None, + max_distance=5000, + operator="bilinear", + ): """Construct first guess QC check. Args: @@ -188,6 +210,9 @@ def set_input(self, size, posdiff=None, negdiff=None): posdiff (float, optional): Positive diff. Defaults to None. negdiff (float, optional): Negative diff. Defaults to None. + Raises: + RuntimeError: You must set negdiff and posdiff + """ used_negdiff = self.def_negdiff used_posdiff = self.def_posdiff @@ -199,7 +224,7 @@ def set_input(self, size, posdiff=None, negdiff=None): used_negdiff = negdiff if used_negdiff is None or used_posdiff is None: - raise Exception("You must set negdiff and posdiff") + raise RuntimeError("You must set negdiff and posdiff") logging.debug("posdiff: %s", used_posdiff) logging.debug("negdiff: %s", used_negdiff) @@ -221,12 +246,23 @@ def test(self, dataset, mask, code=108): mask (list): Active data. code (int, optional): Code to use for flagging. Defaults to 108. + Returns: + global_flags(list): Global flags. + + Raises: + ModuleNotFoundError: titanlib was not loaded properly + """ if tit is None: raise ModuleNotFoundError("titanlib was not loaded properly") - fg_operator = ObsOperator(self.operator, self.geo_in, dataset, self.fg_field, - max_distance=self.max_distance) + fg_operator = ObsOperator( + self.operator, + self.geo_in, + dataset, + self.fg_field, + max_distance=self.max_distance, + ) fg_vals = fg_operator.get_obs_value() minvals = [] maxvals = [] @@ -248,11 +284,21 @@ def test(self, dataset, mask, code=108): else: global_flags[mask[i]] = 199 for i, mask_ind in enumerate(mask): - logging.debug("test=%s i=%s m_i=%s lon(m_i)=%s lat(m_i)=%s min_val(i)=%s value(i)=%s " - "max_val(i)=%s flag(i)=%s global_flag(m_i)=%s fg_val(m_i)=%s", - self.name, i, mask_ind, dataset.lons[mask_ind], dataset.lats[mask_ind], - minvals[i], values[i], maxvals[i], flags[i], global_flags[mask_ind], - fg_vals[mask_ind]) + logging.debug( + "test=%s i=%s m_i=%s lon(m_i)=%s lat(m_i)=%s min_val(i)=%s value(i)=%s " + "max_val(i)=%s flag(i)=%s global_flag(m_i)=%s fg_val(m_i)=%s", + self.name, + i, + mask_ind, + dataset.lons[mask_ind], + dataset.lats[mask_ind], + minvals[i], + values[i], + maxvals[i], + flags[i], + global_flags[mask_ind], + fg_vals[mask_ind], + ) return global_flags @@ -260,8 +306,15 @@ def test(self, dataset, mask, code=108): class Fraction(QualityControl): """Fraction test.""" - def __init__(self, geo_in, fraction_field, minval=None, maxval=None, max_distance=5000, - operator="bilinear"): + def __init__( + self, + geo_in, + fraction_field, + minval=None, + maxval=None, + max_distance=5000, + operator="bilinear", + ): """Construct fraction test. Args: @@ -291,6 +344,9 @@ def set_input(self, size, minval=None, maxval=None): minval (float, optional): Minimum value. Defaults to None. maxval (float, optional): Maximum value. Defaults to None. + Raises: + RuntimeError: You must set min and max + """ used_min = self.def_min used_max = self.def_max @@ -302,7 +358,7 @@ def set_input(self, size, minval=None, maxval=None): used_max = maxval if used_min is None or used_max is None: - raise Exception("You must set min and max") + raise RuntimeError("You must set min and max") logging.debug("min: %s", used_min) logging.debug("max: %s", used_max) @@ -327,13 +383,21 @@ def test(self, dataset, mask, code=151): Returns: global_flags(list): Global flags. + Raises: + ModuleNotFoundError: titanlib was not loaded properly + """ if tit is None: raise ModuleNotFoundError("titanlib was not loaded properly") logging.debug("Obs operator") - fraction = ObsOperator(self.operator, self.geo_in, dataset, self.fraction_field, - max_distance=self.max_distance) + fraction = ObsOperator( + self.operator, + self.geo_in, + dataset, + self.fraction_field, + max_distance=self.max_distance, + ) logging.debug("get_obs_value") fraction_vals = fraction.get_obs_value() @@ -365,11 +429,21 @@ def test(self, dataset, mask, code=151): global_flags[mask[i]] = 199 for i, mask_ind in enumerate(mask): - logging.debug("test=%s i=%s m_i=%s lon(m_i)=%s lat(m_i)=%s min_val(i)=%s value(i)=%s " - "maxval(i)=%s flag(i)=%s global_flag(m_i)=%s fraction(m_i)=%s", - self.name, i, mask_ind, dataset.lons[mask_ind], dataset.lats[mask_ind], - minvals[i], values[i], maxvals[i], flags[i], global_flags[mask_ind], - fraction_vals[mask_ind]) + logging.debug( + "test=%s i=%s m_i=%s lon(m_i)=%s lat(m_i)=%s min_val(i)=%s value(i)=%s " + "maxval(i)=%s flag(i)=%s global_flag(m_i)=%s fraction(m_i)=%s", + self.name, + i, + mask_ind, + dataset.lons[mask_ind], + dataset.lats[mask_ind], + minvals[i], + values[i], + maxvals[i], + flags[i], + global_flags[mask_ind], + fraction_vals[mask_ind], + ) return global_flags @@ -377,10 +451,24 @@ def test(self, dataset, mask, code=151): class Sct(QualityControl): """Spatial consistency check.""" - def __init__(self, num_min=5, num_max=100, inner_radius=50000, outer_radius=150000, - num_iterations=5, num_min_prof=20, min_elev_diff=200, min_horizonal_scale=10000, - vertical_scale=200, pos=4, neg=8, eps2=0.5, cmin=0.9, cmax=1.1, - missing_elev_to_zero=False): + def __init__( + self, + num_min=5, + num_max=100, + inner_radius=50000, + outer_radius=150000, + num_iterations=5, + num_min_prof=20, + min_elev_diff=200, + min_horizonal_scale=10000, + vertical_scale=200, + pos=4, + neg=8, + eps2=0.5, + cmin=0.9, + cmax=1.1, + missing_elev_to_zero=False, + ): """Construct SCT test. Args: @@ -467,6 +555,15 @@ def test(self, dataset, mask, code=105): mask (list): Active data. code (int, optional): Code to use for flagging. Defaults to 105. + Raises: + ModuleNotFoundError: titanlib was not loaded properly + RuntimeError: Longitude is not defined! + RuntimeError: Latitude is not defined! + RuntimeError: Value is not defined! + + Returns: + global_flags(list): Global flags. + """ if tit is None: raise ModuleNotFoundError("titanlib was not loaded properly") @@ -484,7 +581,6 @@ def test(self, dataset, mask, code=105): if np.isnan(dataset.elevs[old_mask_ind]): if not self.missing_elev_to_zero: ind = i - nmissing_elev - # print(i, old_mask[i], nmissing_elev, ind, len(self.pos)) self.pos.pop(ind) self.neg.pop(ind) self.eps2.pop(ind) @@ -506,23 +602,30 @@ def test(self, dataset, mask, code=105): # DEBUG if np.isnan(dataset.lons[mask_ind]): logging.error(i, "lon") - raise Exception("Longitude is not defined!") + raise RuntimeError("Longitude is not defined!") if np.isnan(dataset.lats[mask_ind]): logging.error(i, "lat") - raise Exception("Latitude is not defined!") + raise RuntimeError("Latitude is not defined!") if np.isnan(dataset.values[mask_ind]): logging.error(i, "value") - raise Exception("Value is not defined!") + raise RuntimeError("Value is not defined!") if nmissing_elev > 0: if self.missing_elev_to_zero: - logging.info("Found %s / %s observations with undefined elevations which were " - "set to zero", str(nmissing_elev), str(len(old_mask))) + logging.info( + "Found %s / %s observations with undefined elevations which were " + "set to zero", + str(nmissing_elev), + str(len(old_mask)), + ) else: - logging.info("Removed %s / %s obsevations with undefined elevations", - str(nmissing_elev), str(len(old_mask))) + logging.info( + "Removed %s / %s obsevations with undefined elevations", + str(nmissing_elev), + str(len(old_mask)), + ) logging.info("Running sct") if len(values) > 0: @@ -531,10 +634,22 @@ def test(self, dataset, mask, code=105): elevs = np.asarray(elevs) values = np.asarray(values) points = tit.Points(lats, lons, elevs) - answer = tit.sct(points, values, self.num_min, self.num_max, self.inner_radius, - self.outer_radius, self.num_iterations, self.num_min_prof, - self.min_elev_diff, self.min_horizonal_scale, self.vertical_scale, - self.pos, self.neg, self.eps2) + answer = tit.sct( + points, + values, + self.num_min, + self.num_max, + self.inner_radius, + self.outer_radius, + self.num_iterations, + self.num_min_prof, + self.min_elev_diff, + self.min_horizonal_scale, + self.vertical_scale, + self.pos, + self.neg, + self.eps2, + ) flags = answer[0] sct = answer[1] @@ -546,10 +661,18 @@ def test(self, dataset, mask, code=105): dataset.normalize_ci(mask, self.cmin, self.cmax) for i, mask_ind in enumerate(mask): - logging.debug("test=%s i=%s m_i=%s value(m_i)=%s sct(i)=%s rep(i)=%s flag(i)=%s " - "global_flag(m_i)=%s", self.name, i, mask_ind, - dataset.values[mask_ind], sct[i], rep[i], int(flags[i]), - int(global_flags[mask_ind])) + logging.debug( + "test=%s i=%s m_i=%s value(m_i)=%s sct(i)=%s rep(i)=%s flag(i)=%s " + "global_flag(m_i)=%s", + self.name, + i, + mask_ind, + dataset.values[mask_ind], + sct[i], + rep[i], + int(flags[i]), + int(global_flags[mask_ind]), + ) else: logging.info("No observations to run test on") @@ -559,8 +682,16 @@ def test(self, dataset, mask, code=105): class Buddy(QualityControl): """Buddy test.""" - def __init__(self, diff_elev_max=200000., adjust_for_elev_diff=True, - distance_lim=1000000., priorities=1, buddies_min=1, thresholds=1., obs_to_check=1): + def __init__( + self, + diff_elev_max=200000.0, + adjust_for_elev_diff=True, + distance_lim=1000000.0, + priorities=1, + buddies_min=1, + thresholds=1.0, + obs_to_check=1, + ): """Construct buddy test. Args: @@ -587,8 +718,15 @@ def __init__(self, diff_elev_max=200000., adjust_for_elev_diff=True, self.def_obs_to_check = obs_to_check QualityControl.__init__(self, "buddy") - def set_input(self, size, distance_lim=None, priorities=None, buddies_min=None, - thresholds=None, obs_to_check=None): + def set_input( + self, + size, + distance_lim=None, + priorities=None, + buddies_min=None, + thresholds=None, + obs_to_check=None, + ): """Set input. Args: @@ -622,7 +760,7 @@ def set_input(self, size, distance_lim=None, priorities=None, buddies_min=None, if obs_to_check is not None: used_distance_lim = obs_to_check - for __ in range(0, len(size)): + for __ in range(0, size): distance_lim.append(used_distance_lim) priorities.append(used_priorities) buddies_min.append(used_buddies_min) @@ -649,6 +787,13 @@ def test(self, dataset, mask, code=104): mask (list): Active data. code (int, optional): Code to use for flagging. Defaults to 104. + Raises: + ModuleNotFoundError: titanlib was not loaded properly + RuntimeError: Buddy check failed! + + Returns: + global_flags(list): Global flags. + """ if tit is None: raise ModuleNotFoundError("titanlib was not loaded properly") @@ -657,11 +802,6 @@ def test(self, dataset, mask, code=104): # Buddy does not work properly for dataset. # Also without data set the values must be set without subscripts - # status = dataset.buddy_check(self.distance_lim, self.priorities, - # self.buddies_min, self.thresholds, - # self.diff_elev_max, self.adjust_for_elev_diff, - # self.obs_to_check, mask) - lons = [] lats = [] elevs = [] @@ -673,21 +813,34 @@ def test(self, dataset, mask, code=104): values.append(dataset.values[i]) points = tit.Points(lats, lons, elevs) - status, flags = tit.buddy_check(points, values, - self.distance_lim, self.priorities, - self.buddies_min, self.thresholds, self.diff_elev_max, - self.adjust_for_elev_diff, self.obs_to_check) + status, flags = tit.buddy_check( + points, + values, + self.distance_lim, + self.priorities, + self.buddies_min, + self.thresholds, + self.diff_elev_max, + self.adjust_for_elev_diff, + self.obs_to_check, + ) if not status: - raise Exception("Buddy check failed!") + raise RuntimeError("Buddy check failed!") for i, mask_ind in enumerate(mask): if global_flags[mask_ind] == 0 and flags[i] == 1: global_flags[mask_ind] = code for i, mask_ind in enumerate(mask): - logging.debug("test=%s i=%s m_i=%s value=%s flag(i)=%s global_flag(m_i)=%s", - self.name, i, mask_ind, dataset.values[i], dataset.flags[i], - global_flags[mask_ind]) + logging.debug( + "test=%s i=%s m_i=%s value=%s flag(i)=%s global_flag(m_i)=%s", + self.name, + i, + mask_ind, + dataset.values[i], + dataset.flags[i], + global_flags[mask_ind], + ) return global_flags @@ -706,7 +859,7 @@ def __init__(self, an_time, minval=None, maxval=None, offset=0): """ if isinstance(an_time, str): - an_time = datetime.strptime(an_time, "%Y%m%d%H") + an_time = as_datetime(an_time) self.unixtime = int(an_time.strftime("%s")) self.def_min = minval self.def_max = maxval @@ -725,6 +878,9 @@ def set_input(self, size, minval=None, maxval=None, offset=None): maxval (float, optional): Maximum value. Defaults to None. offset (int, optional): Offset. Defaults to 0. + Raises: + RuntimeError: You must set min and max values! + """ used_min = self.def_min used_max = self.def_max @@ -736,7 +892,7 @@ def set_input(self, size, minval=None, maxval=None, offset=None): if offset is not None: used_offset = offset if used_min is None or used_max is None: - raise Exception("You must set min and max values!") + raise RuntimeError("You must set min and max values!") minvals = [] maxvals = [] @@ -757,6 +913,9 @@ def test(self, dataset, mask, code=103): mask (list): Active data. code (int, optional): Code to use for flagging. Defaults to 103. + Returns: + global_flags(list): Global flags. + """ lons = [] lats = [] @@ -770,8 +929,9 @@ def test(self, dataset, mask, code=103): values.append(val) points = tit.Points(lats, lons, elevs) - flags = tit.range_check_climatology(points, values, self.unixtime, self.maxvals, - self.minvals) + flags = tit.range_check_climatology( + points, values, self.unixtime, self.maxvals, self.minvals + ) global_flags = dataset.flags for i, mask_ind in enumerate(mask): @@ -779,11 +939,22 @@ def test(self, dataset, mask, code=103): global_flags[mask_ind] = code for i, mask_ind in enumerate(mask): - logging.debug("test=%s i=%s m_i=%s lon(i)=%s lat(i)=%s elev(i)=%s min_val(i)=%s " - "value(i)=%s maxval(i)=%s value(m_i)=%s flag(i)=%s globalflag(m_i)=%s", - self.name, i, mask_ind, lons[i], lats[i], elevs[i], self.minvals[i], - values[i], self.maxvals[i], dataset.values[mask_ind], flags[i], - global_flags[mask_ind]) + logging.debug( + "test=%s i=%s m_i=%s lon(i)=%s lat(i)=%s elev(i)=%s min_val(i)=%s " + "value(i)=%s maxval(i)=%s value(m_i)=%s flag(i)=%s globalflag(m_i)=%s", + self.name, + i, + mask_ind, + lons[i], + lats[i], + elevs[i], + self.minvals[i], + values[i], + self.maxvals[i], + dataset.values[mask_ind], + flags[i], + global_flags[mask_ind], + ) return global_flags @@ -810,6 +981,9 @@ def test(self, dataset, mask, code=115): mask (list): Active data. code (int, optional): Code to use for flagging. Defaults to 115. + Returns: + flags(list): Flags. + """ data = {} flags = dataset.flags @@ -822,12 +996,21 @@ def test(self, dataset, mask, code=115): if pos in data: obstime = data[pos]["obstime"] - logging.debug("Found a redundant observation %s %s %s %s", - i, pos, dataset.stids[i], obstime1) + logging.debug( + "Found a redundant observation %s %s %s %s", + i, + pos, + dataset.stids[i], + obstime1, + ) # New best position in time. Flag the previous if abs(self.an_time - obstime1) < abs(self.an_time - obstime): - logging.debug("Found a better redundant observation %s %s %s", - pos, obstime1, obstime) + logging.debug( + "Found a better redundant observation %s %s %s", + pos, + obstime1, + obstime, + ) ind = data[pos]["index"] flags[ind] = code data.update({pos: {"obstime": obstime, "index": i}}) @@ -837,9 +1020,15 @@ def test(self, dataset, mask, code=115): data.update({pos: {"obstime": obstime1, "index": i}}) for i, mask_ind in enumerate(mask): - logging.debug("test=%s i=%s m_i=%s time(m_i)=%s value(m_i)=%s flags(i)=%s ", - self.name, i, mask_ind, dataset.obstimes[mask_ind], - dataset.values[mask_ind], flags[mask_ind]) + logging.debug( + "test=%s i=%s m_i=%s time(m_i)=%s value(m_i)=%s flags(i)=%s ", + self.name, + i, + mask_ind, + dataset.obstimes[mask_ind], + dataset.values[mask_ind], + flags[mask_ind], + ) return flags @@ -853,9 +1042,13 @@ def __init__(self, blacklist): Args: blacklist (dict): Blacklist positions/stids + Raises: + RuntimeError: You must set blacklist as a dict + RuntimeError: Blacklist must have the same length for both lons and lats + """ if blacklist is None or not isinstance(blacklist, dict): - raise Exception("You must set blacklist as a dict") + raise RuntimeError("You must set blacklist as a dict") blacklist_pos = {} blacklist_stid = {} @@ -863,10 +1056,12 @@ def __init__(self, blacklist): for i in range(0, len(blacklist["lons"])): if len(blacklist["lons"]) != len(blacklist["lats"]): - raise Exception("Blacklist must have the same length for both lons and lats") + raise RuntimeError( + "Blacklist must have the same length for both lons and lats" + ) - lon = surfex.Observation.format_lon(float(blacklist["lons"][i])) - lat = surfex.Observation.format_lat(float(blacklist["lats"][i])) + lon = Observation.format_lon(float(blacklist["lons"][i])) + lat = Observation.format_lat(float(blacklist["lats"][i])) pos = str(lon) + ":" + str(lat) blacklist_pos.update({pos: 1}) @@ -892,13 +1087,16 @@ def test(self, dataset, mask, code=100): mask (list): Active data. code (int, optional): Code to use for flagging. Defaults to 100. + Returns: + flags(list): Flags. + """ flags = dataset.flags for i, lon_val in enumerate(dataset.lons): if i in mask: - lon = surfex.Observation.format_lon(lon_val) - lat = surfex.Observation.format_lat(dataset.lats[i]) + lon = Observation.format_lon(lon_val) + lat = Observation.format_lat(dataset.lats[i]) stid = dataset.stids[i] pos = lon + ":" + lat @@ -911,9 +1109,16 @@ def test(self, dataset, mask, code=100): flags[i] = code for i, mask_ind in enumerate(mask): - logging.debug("test=%s i=%s m_i=%s lon(m_i)=%s lat(m_i)=%s stid(m_i)=%s flag(m_i)=%s", - self.name, i, mask_ind, dataset.lons[mask_ind], - dataset.lats[mask_ind], dataset.stids[mask_ind], flags[mask_ind]) + logging.debug( + "test=%s i=%s m_i=%s lon(m_i)=%s lat(m_i)=%s stid(m_i)=%s flag(m_i)=%s", + self.name, + i, + mask_ind, + dataset.lons[mask_ind], + dataset.lats[mask_ind], + dataset.stids[mask_ind], + flags[mask_ind], + ) return flags @@ -928,13 +1133,15 @@ def __init__(self, domain_geo, max_distance=5000): domain_geo (surfex.Geo): Surfex geometry max_distance (int, optional): Maximum distance to grid border. Defaults to 5000. + Raises: + RuntimeError: Domain geo was not set! + """ if domain_geo is None: - raise Exception("Domain geo was not set!") + raise RuntimeError("Domain geo was not set!") - lons = np.transpose(np.asarray(domain_geo.lons)) - lats = np.transpose(np.asarray(domain_geo.lats)) - self.grid = gridpp.Grid(lats, lons) + self.lons = domain_geo.lons + self.lats = domain_geo.lats self.max_distance = max_distance QualityControl.__init__(self, "domain") @@ -949,19 +1156,30 @@ def test(self, dataset, mask, code=199): mask (list): Active data. code (int, optional): Code to use for flagging. Defaults to 199. + Returns: + flags(list): Flags. + """ flags = dataset.flags - for i, mask_ind in enumerate(mask): - lon = dataset.lons[mask_ind] - lat = dataset.lats[mask_ind] - neighbours = self.grid.get_num_neighbours(lat, lon, self.max_distance) - if neighbours == 0: + in_grid = inside_grid( + self.lons, self.lats, dataset.lons, dataset.lats, distance=self.max_distance + ) + # TODO vectorize + for __, mask_ind in enumerate(mask): + if not in_grid[mask_ind]: flags[mask_ind] = code for i, mask_ind in enumerate(mask): - logging.debug("test=%s i=%s m_i=%s lon(m_i)=%s lat(m_i)=%s stid(m_i)=%s flag(m_i)=%s", - self.name, i, mask_ind, dataset.lons[mask_ind], dataset.lats[mask_ind], - dataset.stids[mask_ind], flags[mask_ind]) + logging.debug( + "test=%s i=%s m_i=%s lon(m_i)=%s lat(m_i)=%s stid(m_i)=%s flag(m_i)=%s", + self.name, + i, + mask_ind, + dataset.lons[mask_ind], + dataset.lats[mask_ind], + dataset.stids[mask_ind], + flags[mask_ind], + ) return flags @@ -983,6 +1201,9 @@ def test(self, dataset, mask, code=101): mask (list): Active data. code (int, optional): Code to use for flagging. Defaults to 101. + Returns: + flags(list): Flags. + """ flags = dataset.flags for mask_ind in mask: @@ -990,9 +1211,16 @@ def test(self, dataset, mask, code=101): flags[mask_ind] = code for i, mask_ind in enumerate(mask): - logging.debug("test=%s i=%s m_i=%s lon(m_i)=%s lat(m_i)=%s stid(m_i)=%s flag(m_i)=%s", - self.name, i, mask_ind, dataset.lons[mask_ind], dataset.lats[mask_ind], - dataset.stids[mask_ind], flags[mask_ind]) + logging.debug( + "test=%s i=%s m_i=%s lon(m_i)=%s lat(m_i)=%s stid(m_i)=%s flag(m_i)=%s", + self.name, + i, + mask_ind, + dataset.lons[mask_ind], + dataset.lats[mask_ind], + dataset.stids[mask_ind], + flags[mask_ind], + ) return flags @@ -1007,6 +1235,13 @@ def define_quality_control(test_list, settings, an_time, domain_geo=None, blackl domain_geo(surfex.Geo): Geo object blacklist(dict): Optional blacklist. Needd for blacklist test + Raises: + NotImplementedError: Test not implemented + RuntimeError: You must set the name of fg file and variable + RuntimeError: You must set the name of fraction file and variable + RuntimeError: You must set both fraction_field and fraction_geo + RuntimeError: You must set both fg_field and fg_geo + Returns: tests(list): List of QualityControl objects @@ -1047,18 +1282,17 @@ def define_quality_control(test_list, settings, an_time, domain_geo=None, blackl kwargs.update({opt: test_options[opt]}) if fg_geo is None and fg_field is None: if fg_file is None or fg_var is None: - raise Exception("You must set the name of fg file and variable") - fg_geo, __, fg_field, __, __ = surfex.read_first_guess_netcdf_file(fg_file, fg_var) + raise RuntimeError("You must set the name of fg file and variable") + fg_geo, __, fg_field, __, __ = read_first_guess_netcdf_file( + fg_file, fg_var + ) else: if fg_geo is None or fg_field is None: - raise Exception("You must set both fg_field and fg_geo") + raise RuntimeError("You must set both fg_field and fg_geo") tests.append(FirstGuess(fg_geo, fg_field, **kwargs)) elif qct.lower() == "fraction": - kwargs.update({ - "minval": 0.99, - "maxval": 1.01 - }) + kwargs.update({"minval": 0.99, "maxval": 1.01}) fraction_var = None fraction_file = None fraction_field = None @@ -1079,19 +1313,31 @@ def define_quality_control(test_list, settings, an_time, domain_geo=None, blackl if fraction_geo is None and fraction_field is None: if fraction_var is None or fraction_file is None: - raise Exception("You must set the name of fraction file and variable") + raise RuntimeError( + "You must set the name of fraction file and variable" + ) - fraction_geo, __, fraction_field, __, __ = \ - surfex.read_first_guess_netcdf_file(fraction_file, fraction_var) + fraction_geo, __, fraction_field, __, __ = read_first_guess_netcdf_file( + fraction_file, fraction_var + ) else: if fraction_field is None or fraction_geo is None: - raise Exception("You must set both fraction_field and fraction_geo") + raise RuntimeError( + "You must set both fraction_field and fraction_geo" + ) tests.append(Fraction(fraction_geo, fraction_field, **kwargs)) elif qct.lower() == "buddy": if test_options is not None: - opts = ["diff_elev_max", "adjust_for_elev_diff", "distance_lim", "priorities", - "buddies_min", "thresholds", "obs_to_check"] + opts = [ + "diff_elev_max", + "adjust_for_elev_diff", + "distance_lim", + "priorities", + "buddies_min", + "thresholds", + "obs_to_check", + ] for opt in opts: if opt in test_options: kwargs.update({opt: test_options[opt]}) @@ -1109,9 +1355,21 @@ def define_quality_control(test_list, settings, an_time, domain_geo=None, blackl elif qct.lower() == "sct": if test_options is not None: - opts = ["num_min", "num_max", "inner_radius", "outer_radius", "num_iterations", - "num_min_prof", "min_elev_diff", "min_horizontal_scale", "pos", "neg", - "eps2", "cmin", "cmax"] + opts = [ + "num_min", + "num_max", + "inner_radius", + "outer_radius", + "num_iterations", + "num_min_prof", + "min_elev_diff", + "min_horizontal_scale", + "pos", + "neg", + "eps2", + "cmin", + "cmax", + ] for opt in opts: if opt in test_options: kwargs.update({opt: test_options[opt]}) @@ -1158,8 +1416,19 @@ def define_quality_control(test_list, settings, an_time, domain_geo=None, blackl class QCDataSet(object): """QC data set.""" - def __init__(self, analysis_time, observations, flags, cis, lafs, providers, passed_tests=None, - fg_dep=None, an_dep=None, remove_invalid_elevs=False): + def __init__( + self, + analysis_time, + observations, + flags, + cis, + lafs, + providers, + passed_tests=None, + fg_dep=None, + an_dep=None, + remove_invalid_elevs=False, + ): """Construct QC data set. Args: @@ -1224,19 +1493,19 @@ def __init__(self, analysis_time, observations, flags, cis, lafs, providers, pas self.providers = providers if passed_tests is None: passed_tests = [] - for i in range(0, len(observations)): + for __ in range(0, len(observations)): passed_tests.append([]) self.passed_tests = passed_tests else: self.passed_tests = passed_tests if fg_dep is None: fg_dep = [] - for i in range(0, len(observations)): + for __ in range(0, len(observations)): fg_dep.append(np.nan) self.fg_dep = fg_dep if an_dep is None: an_dep = [] - for i in range(0, len(observations)): + for __ in range(0, len(observations)): an_dep.append(np.nan) self.an_dep = an_dep @@ -1289,24 +1558,26 @@ def write_output(self, filename, indent=None): """ data = {} for i, lon_val in enumerate(self.lons): - data.update({ - i: { - "varname": self.varnames[i], - "obstime": datetime.strftime(self.obstimes[i], "%Y%m%d%H%M%S"), - "lon": lon_val, - "lat": self.lats[i], - "stid": self.stids[i], - "elev": self.elevs[i], - "value": self.values[i], - "flag": self.flags[i], - "ci": self.cis[i], - "laf": self.lafs[i], - "provider": self.providers[i], - "fg_dep": self.fg_dep[i], - "an_dep": self.an_dep[i], - "passed_tests": self.passed_tests[i] + data.update( + { + i: { + "varname": self.varnames[i], + "obstime": as_datetime_string(self.obstimes[i]), + "lon": lon_val, + "lat": self.lats[i], + "stid": self.stids[i], + "elev": self.elevs[i], + "value": self.values[i], + "flag": self.flags[i], + "ci": self.cis[i], + "laf": self.lafs[i], + "provider": self.providers[i], + "fg_dep": self.fg_dep[i], + "an_dep": self.an_dep[i], + "passed_tests": self.passed_tests[i], + } } - }) + ) json.dump(data, open(filename, mode="w", encoding="utf-8"), indent=indent) def normalize_ci(self, mask, cmin, cmax): @@ -1319,8 +1590,11 @@ def normalize_ci(self, mask, cmin, cmax): """ nsize = len(mask) + corep = [] + for i in range(0, nsize): + corep.append(self.cis[mask[i]]) + corep = np.asarray(corep) if nsize > 0: - corep = np.asarray(self.cis[mask]) def ecdf(xxx): xxx = np.sort(xxx) @@ -1328,7 +1602,7 @@ def ecdf(xxx): def _ecdf(vvv): # side='right' because we want Pr(x <= v) - return (np.searchsorted(xxx, vvv, side='right') + 1) / nnn + return (np.searchsorted(xxx, vvv, side="right") + 1) / nnn return _ecdf @@ -1357,14 +1631,16 @@ def _ecdf(vvv): corep1 = (qcorep[gav] - qav) / (qmx - qav) corep[gav] = corep_mean[gav] + (corep_max[gav] - corep_mean[gav]) * corep1 - self.cis[mask] = qcorep + for i in range(0, nsize): + self.cis[mask[i]] = qcorep[i] class TitanDataSet(QCDataSet): """Titan QC data set.""" - def __init__(self, var, settings, tests, datasources, an_time, corep=1, - test_flags=None): + def __init__( + self, var, settings, tests, datasources, an_time, corep=1, test_flags=None + ): """Titan Data set. Args: @@ -1376,6 +1652,9 @@ def __init__(self, var, settings, tests, datasources, an_time, corep=1, corep (int, optional): Correlations between observations. Defaults to 1. test_flags (dict, optional): Dictionary to set custom test flags. Defaults to None. + Raises: + ModuleNotFoundError: itanlib was not loaded properly + """ if tit is None: raise ModuleNotFoundError("titanlib was not loaded properly") @@ -1397,7 +1676,15 @@ def __init__(self, var, settings, tests, datasources, an_time, corep=1, # Get global data for obs_set in self.datasources: - lobstimes, llons, llats, lstids, lelevs, lvalues, lvarnames = obs_set.get_obs() + ( + lobstimes, + llons, + llats, + lstids, + lelevs, + lvalues, + lvarnames, + ) = obs_set.get_obs() obstimes = obstimes + lobstimes lons = lons + llons lats = lats + llats @@ -1405,10 +1692,10 @@ def __init__(self, var, settings, tests, datasources, an_time, corep=1, elevs = elevs + lelevs values = values + lvalues varnames = varnames + lvarnames - for i in range(0, len(llons)): + for __ in range(0, len(llons)): providers.append(obs_set.label) - for i in range(0, len(lons)): + for __ in range(0, len(lons)): passed_tests.append([]) flags = np.zeros(len(lons)) cis = np.ones(len(lons)) * corep @@ -1416,21 +1703,38 @@ def __init__(self, var, settings, tests, datasources, an_time, corep=1, observations = [] for i, lon_val in enumerate(lons): - observations.append(surfex.Observation(obstimes[i], lon_val, lats[i], values[i], - elev=elevs[i], stid=stids[i], - varname=varnames[i])) + observations.append( + Observation( + obstimes[i], + lon_val, + lats[i], + values[i], + elev=elevs[i], + stid=stids[i], + varname=varnames[i], + ) + ) points = tit.Points(lats, lons, elevs) self.titan_dataset = tit.Dataset(points, values) if passed_tests is None: passed_tests = [] - for i in range(0, len(lons)): + for __ in range(0, len(lons)): passed_tests.append([]) self.passed_tests = passed_tests else: self.passed_tests = passed_tests - QCDataSet.__init__(self, an_time, observations, flags, cis, lafs, providers, - passed_tests=None, remove_invalid_elevs=False) + QCDataSet.__init__( + self, + an_time, + observations, + flags, + cis, + lafs, + providers, + passed_tests=None, + remove_invalid_elevs=False, + ) def perform_tests(self): """Perform the tests.""" @@ -1442,8 +1746,10 @@ def perform_tests(self): for obs_set in self.datasources: if obs_set.label == "": - raise Exception("Observations set for quality control are " - "assumed to have a label") + raise RuntimeError( + "Observations set for quality control are " + "assumed to have a label" + ) print("obs_set", obs_set.label) size = obs_set.size @@ -1461,26 +1767,35 @@ def perform_tests(self): if "tests" in self.settings["sets"][obs_set.label]: if test.name in self.settings["sets"][obs_set.label]["tests"]: test_settings.update( - self.settings["sets"][obs_set.label]["tests"][test.name]) + self.settings["sets"][obs_set.label]["tests"][ + test.name + ] + ) do_test = test_settings["do_test"] if do_test: del test_settings["do_test"] logging.debug("findex %s size %s", findex, size) - lmask = np.where(np.asarray(self.flags[findex:findex + size]) == 0)[0].tolist() + lmask = np.where(np.asarray(self.flags[findex : findex + size]) == 0)[ + 0 + ].tolist() for lmask_ind in lmask: lmask_ind = lmask_ind + findex mask = mask + lmask # Set input for this set - logging.info("Test %s size=%s settings=%s", test.name, len(mask), test_settings) - # t.set_input(test_settings, len(mask)) + logging.info( + "Test %s size=%s settings=%s", test.name, len(mask), test_settings + ) test.set_input(len(lmask), **test_settings) else: - logging.info("Test %s is de-ativated for this data source %s", - test.name, obs_set.label) + logging.info( + "Test %s is de-ativated for this data source %s", + test.name, + obs_set.label, + ) findex = findex + size @@ -1505,8 +1820,16 @@ def perform_tests(self): if self.flags[mask_ind] == 199: outside = outside + 1 - summary.update({test.name: { - "tested": len(mask), "ok": ok_obs, "bad": bad, "outside": outside}}) + summary.update( + { + test.name: { + "tested": len(mask), + "ok": ok_obs, + "bad": bad, + "outside": outside, + } + } + ) kept = 0 flagged = 0 @@ -1520,12 +1843,16 @@ def perform_tests(self): logging.info("\n") logging.info("Total number of observations: %s", len(self.flags)) logging.info(" Kept: %s", kept) - logging.info(" Flagged: %s (bad metadata: %s)", flagged, self.metadata) + logging.info( + " Flagged: %s (bad metadata: %s)", flagged, self.metadata + ) logging.info("\n") for test in self.tests: outside = "" if summary[test.name]["outside"] > 0: - outside = " (" + str(summary[test.name]["outside"]) + " exceeding max distance)" + outside = ( + " (" + str(summary[test.name]["outside"]) + " exceeding max distance)" + ) logging.info("Test: %s", test.name) logging.info(" tested: %s", summary[test.name]["tested"]) logging.info(" ok: %s", summary[test.name]["ok"]) @@ -1533,81 +1860,6 @@ def perform_tests(self): logging.info("\n") -class ObsOperator(object): - """Obs operator. Class to convert a field to an observation point.""" - - def __init__(self, operator, geo, dataset, grid_values, max_distance=5000): - """Construct the observation operator. - - Args: - operator (str): Interpolation operator. - geo (surfex.Geo): Surfex geometry. - dataset (QCDataSet): QC data set. - grid_values (np.darray): Values in the grid. - max_distance (int, optional): Max allowed deviation in meters from grid borders. - Defaults to 5000. - - """ - # TODO rewrite to use lonlatval geo - grid_lons = np.transpose(geo.lons) - grid_lats = np.transpose(geo.lats) - grid_values = np.transpose(grid_values) - - grid = gridpp.Grid(grid_lats, grid_lons) - lons = dataset.lons - lats = dataset.lats - points = gridpp.Points(lats, lons) - - logging.info("Setting up \"%s\" observation operator for %s points", - operator, str(len(lons))) - if operator == "nearest": - obs_values = gridpp.nearest(grid, points, grid_values) - elif operator == "bilinear": - obs_values = gridpp.bilinear(grid, points, grid_values) - else: - raise NotImplementedError(operator) - - inside_grid = [] - # Check if they are in grid - for i in range(0, len(obs_values)): - lon = lons[i] - lat = lats[i] - neighbours = grid.get_num_neighbours(lat, lon, max_distance) - # print(i, lons[i], lats[i], obs_values[i], nn) - if neighbours == 0: - inside_grid.append(False) - else: - inside_grid.append(True) - - self.inside_grid = inside_grid - self.obs_values = obs_values - - def get_obs_value(self, pos=None): - """Get the observed value. - - Args: - pos (int, optional): Position. Defaults to None. - - Returns: - float: Observation value for index. - """ - if pos is None: - return self.obs_values - else: - raise NotImplementedError - - def is_in_grid(self, index): - """Check if index is in grid. - - Args: - index (int): Index to check. - - Returns: - bool: True if inside - """ - return self.inside_grid[index] - - class Departure(object): """Departure. Difference between an observation and a value.""" @@ -1623,9 +1875,13 @@ def __init__(self, operator, geo, dataset, grid_values, mode, max_distance=5000) max_distance (int, optional): Max allowed deviation in meters from grid borders. Defaults to 5000. + Raises: + NotImplementedError: Mode not implemented + """ - self.obs_operator = ObsOperator(operator, geo, dataset, grid_values, - max_distance=max_distance) + self.obs_operator = ObsOperator( + operator, geo, dataset, grid_values, max_distance=max_distance + ) obs_values = self.obs_operator.get_obs_value() values = [] @@ -1674,7 +1930,9 @@ def get_values(self, pos=None): return self.obs_operator.get_obs_value(pos=pos) -def dataset_from_file(an_time, filename, qc_flag=None, skip_flags=None, fg_dep=None, an_dep=None): +def dataset_from_file( + an_time, filename, qc_flag=None, skip_flags=None, fg_dep=None, an_dep=None +): """Get a QCDataSet from a json file. Args: @@ -1690,11 +1948,19 @@ def dataset_from_file(an_time, filename, qc_flag=None, skip_flags=None, fg_dep=N """ data = json.load(open(filename, mode="r", encoding="utf-8")) - return dataset_from_json(an_time, data, qc_flag=qc_flag, skip_flags=skip_flags, - fg_dep=fg_dep, an_dep=an_dep) - - -def dataset_from_json(an_time, data, qc_flag=None, skip_flags=None, fg_dep=None, an_dep=None): + return dataset_from_json( + an_time, + data, + qc_flag=qc_flag, + skip_flags=skip_flags, + fg_dep=fg_dep, + an_dep=an_dep, + ) + + +def dataset_from_json( + an_time, data, qc_flag=None, skip_flags=None, fg_dep=None, an_dep=None +): """Create a QCDataSet data set read from a json file. Args: @@ -1726,18 +1992,19 @@ def dataset_from_json(an_time, data, qc_flag=None, skip_flags=None, fg_dep=None, if skip_flags is not None: for sfl in skip_flags: - # print(i, int(f), int(data[i]["flag"])) if int(data[i]["flag"]) == int(sfl): add = False if add: - obstime = datetime.strptime(data[i]["obstime"], "%Y%m%d%H%M%S") + obstime = as_datetime(data[i]["obstime"]) lon = data[i]["lon"] lat = data[i]["lat"] stid = data[i]["stid"] elev = data[i]["elev"] value = data[i]["value"] - observations.append(surfex.Observation(obstime, lon, lat, value, stid=stid, elev=elev)) + observations.append( + Observation(obstime, lon, lat, value, stid=stid, elev=elev) + ) if "provider" in data[i]: providers.append(data[i]["provider"]) else: @@ -1766,8 +2033,17 @@ def dataset_from_json(an_time, data, qc_flag=None, skip_flags=None, fg_dep=None, if len(passed_tests) == 0: passed_tests = None - return QCDataSet(an_time, observations, flags, cis, lafs, providers, passed_tests=passed_tests, - fg_dep=fg_deps, an_dep=an_deps) + return QCDataSet( + an_time, + observations, + flags, + cis, + lafs, + providers, + passed_tests=passed_tests, + fg_dep=fg_deps, + an_dep=an_deps, + ) def merge_json_qc_data_sets(an_time, filenames, qc_flag=None, skip_flags=None): @@ -1793,8 +2069,7 @@ def merge_json_qc_data_sets(an_time, filenames, qc_flag=None, skip_flags=None): for dd1 in data1: lon1 = data1[dd1]["lon"] lat1 = data1[dd1]["lat"] - pos1 = surfex.Observation.format_lon(lon1) + ":" \ - + surfex.Observation.format_lat(lat1) + pos1 = f"{Observation.format_lon(lon1)}:{Observation.format_lat(lat1)}" if pos1 not in index_pos: index_pos.update({pos1: ind}) data.update({str(ind): data1[dd1]}) diff --git a/surfex/util.py b/surfex/util.py index a894e12..88f261d 100644 --- a/surfex/util.py +++ b/surfex/util.py @@ -1,78 +1,13 @@ """Misc.""" -import os -# from datetime import datetime import collections -import toml - - -# def unixtime_to_datenum(time): -# -# """ Converts unixtime into datenum -# -# Arguments: -# time (int): unixtime in seconds since 1970 -# -# Returns: -# int: datenum value -# -# """ -# dt = datetime.utcfromtimestamp(time) -# try: -# from matplotlib import dates -# dt2 = dates.date2num(dt) -# return dt2 -# except ImportError: -# raise Exception("You need to have dates installed") - - -class YamlReaderError(Exception): - """Error.""" - - -def data_merge(aaa, bbb): - """Merge bbb into aaa and return merged result. - - NOTE: tuples and arbitrary objects are not handled as it is - totally ambiguous what should happen +import logging +import os - """ - key = None - # ## debug output - # sys.stderr.write("DEBUG: %s to %s\n" %(b,a)) - try: - if aaa is None or isinstance(aaa, str) or isinstance(aaa, int) or isinstance(aaa, float): - # border case for first run or if a is a primitive - aaa = bbb - elif isinstance(aaa, list): - # lists can be only appended - if isinstance(bbb, list): - # merge lists - aaa.extend(bbb) - else: - # append to list - aaa.append(bbb) - elif isinstance(aaa, dict): - # dicts must be merged - if isinstance(bbb, dict): - for key in bbb: - if key in aaa: - aaa[key] = data_merge(aaa[key], bbb[key]) - else: - aaa[key] = bbb[key] - else: - raise YamlReaderError(f'Cannot merge non-dict "{bbb}" into dict "{aaa}"') - else: - raise YamlReaderError(f'NOT IMPLEMENTED "{bbb}" into "{aaa}"') - except TypeError as exc: - raise YamlReaderError(f'TypeError "{exc}" in key "{key}" when merging ' - f' "{bbb}" into "{aaa}"') \ - from TypeError - return aaa +import toml def merge_toml_env(old_env, mods): """Merge.""" - # print(mods) return deep_update(old_env, mods) @@ -81,12 +16,9 @@ def merge_toml_env_from_files(toml_files): merged_env = {} for toml_file in toml_files: if os.path.exists(toml_file): - # print(toml_file) with open(toml_file, mode="r", encoding="utf-8") as file_handler: modification = toml.load(file_handler) - # print(modification) merged_env = merge_toml_env(merged_env, modification) - # print(merged_env) else: print("WARNING: File not found " + toml_file) return merged_env @@ -96,16 +28,92 @@ def deep_update(source, overrides): """Update a nested dictionary or similar mapping. Modify ``source`` in place. + + Args: + source(dict): Source data + overrides(dict): Delta data to override + + Returns: + source(dict): Updated dict """ for key, value in overrides.items(): - if isinstance(value, collections.Mapping) and value: + if isinstance(value, collections.abc.Mapping) and value: returned = deep_update(source.get(key, {}), value) - # print("Returned:", key, returned) source[key] = returned else: override = overrides[key] - # print("Override:", key, override) - source[key] = override - return source + + +def remove_existing_file(f_in, f_out): + """Remove existing file. + + Args: + f_in (_type_): _description_ + f_out (_type_): _description_ + + Raises: + FileNotFoundError: _description_ + IsADirectoryError: _description_ + + """ + if f_in is None: + raise FileNotFoundError("Input file not set") + # If files are not the same file + if os.path.abspath(f_in) != os.path.abspath(f_out): + if os.path.isdir(f_out): + raise IsADirectoryError( + f_out + " is a directory! Please remove it if desired" + ) + if os.path.islink(f_out): + os.unlink(f_out) + if os.path.isfile(f_out): + os.remove(f_out) + # files have the same path. Remove if it is a symlink + else: + if os.path.islink(f_out): + os.unlink(f_out) + + +def parse_filepattern(file_pattern, basetime, validtime): + """Parse the file pattern. + + Args: + file_pattern (str): File pattern. + basetime (datetime.datetime): Base time. + validtime (datetime.datetime): Valid time. + + Returns: + str: File name + + """ + if basetime is None or validtime is None: + return file_pattern + + logging.debug( + "file_pattern=%s basetime=%s validtime=%s", file_pattern, basetime, validtime + ) + file_name = str(file_pattern) + year = basetime.strftime("%Y") + year2 = basetime.strftime("%y") + month = basetime.strftime("%m") + day = basetime.strftime("%d") + hour = basetime.strftime("%H") + mins = basetime.strftime("%M") + d_t = validtime - basetime + ll_d = f"{int(d_t.seconds / 3600):d}" + ll_2 = f"{int(d_t.seconds / 3600):02d}" + ll_3 = f"{int(d_t.seconds / 3600):03d}" + ll_4 = f"{int(d_t.seconds / 3600):04d}" + file_name = file_name.replace("@YYYY@", year) + file_name = file_name.replace("@YY@", year2) + file_name = file_name.replace("@MM@", month) + file_name = file_name.replace("@DD@", day) + file_name = file_name.replace("@HH@", hour) + file_name = file_name.replace("@mm@", mins) + file_name = file_name.replace("@L@", ll_d) + file_name = file_name.replace("@LL@", ll_2) + file_name = file_name.replace("@LLL@", ll_3) + file_name = file_name.replace("@LLLL@", ll_4) + return file_name diff --git a/surfex/variable.py b/surfex/variable.py index 4047b1d..1ecada3 100644 --- a/surfex/variable.py +++ b/surfex/variable.py @@ -1,9 +1,17 @@ """Variable.""" -import logging import copy -from datetime import timedelta +import logging + import numpy as np -import surfex + +from .datetime_utils import as_timedelta +from .fa import Fa +from .file import SurfexFileVariable, get_surfex_io_object +from .geo import get_geo_object +from .grib import Grib, Grib1Variable, Grib2Variable +from .input_methods import get_datasources +from .netcdf import Netcdf, NetCDFReadVariable +from .util import parse_filepattern class Variable(object): @@ -13,31 +21,45 @@ def __init__(self, var_type, var_dict, initial_basetime, prefer_forecast=True): """Construct variable. Args: - var_type (_type_): _description_ - var_dict (_type_): _description_ - initial_basetime (_type_): _description_ - debug (bool, optional): _description_. Defaults to False. - prefer_forecast (bool, optional): _description_. Defaults to True. + var_type (str): Variable type. + var_dict (dict): Variable definitions + initial_basetime (datetime): Initial basetime + prefer_forecast (bool, optional): Prefer forecasts instead of analysis. Defaults to True. Raises: - NotImplementedError: _description_ - Exception: _description_ - Exception: _description_ - Exception: _description_ + NotImplementedError: Variable not implemented + RuntimeError: No filepattern provided + RuntimeError: variable must have attribute + RuntimeError: You can not have larger offset than the frequency of forecasts """ self.var_type = var_type if self.var_type == "netcdf": mandatory = ["name", "fcint", "offset", "filepattern"] elif self.var_type == "grib1": - mandatory = ["parameter", "type", "level", "tri", "fcint", "offset", "filepattern"] + mandatory = [ + "parameter", + "type", + "level", + "tri", + "fcint", + "offset", + "filepattern", + ] elif self.var_type == "grib2": - mandatory = ["discipline", "parameterCategory", "parameterNumber", "levelType", "level", - "typeOfStatisticalProcessing", "fcint", "offset", "filepattern"] + mandatory = [ + "discipline", + "parameterCategory", + "parameterNumber", + "levelType", + "level", + "typeOfStatisticalProcessing", + "fcint", + "offset", + "filepattern", + ] elif self.var_type == "surfex": mandatory = ["varname", "fcint", "offset", "filepattern"] - # , "patches", "layers", "accumulated", "fcint", "offset", "file_inc", "filepattern", - # "fileformat", "filetype"] elif self.var_type == "fa": mandatory = ["name", "fcint", "offset", "filepattern"] elif self.var_type == "obs": @@ -47,8 +69,13 @@ def __init__(self, var_type, var_dict, initial_basetime, prefer_forecast=True): for mand_val in mandatory: if mand_val not in var_dict: - raise Exception(var_type + " variable must have attribute " + mand_val - + " var_dict:" + str(var_dict)) + raise RuntimeError( + var_type + + " variable must have attribute " + + mand_val + + " var_dict:" + + str(var_dict) + ) self.var_dict = copy.deepcopy(var_dict) interval = 3600 @@ -58,7 +85,7 @@ def __init__(self, var_type, var_dict, initial_basetime, prefer_forecast=True): if "filepattern" in var_dict: self.filepattern = var_dict["filepattern"] else: - raise Exception("No filepattern provided") + raise RuntimeError("No filepattern provided") self.initial_basetime = initial_basetime self.fcint = int(self.var_dict["fcint"]) self.offset = int(self.var_dict["offset"]) @@ -67,10 +94,14 @@ def __init__(self, var_type, var_dict, initial_basetime, prefer_forecast=True): self.prefer_forecast = self.var_dict["prefer_forecast"] if self.offset > self.fcint: - raise Exception("You can not have larger offset than the frequency of forecasts " - + str(self.offset) + " > " + str(self.fcint)) + raise RuntimeError( + "You can not have larger offset than the frequency of forecasts " + + str(self.offset) + + " > " + + str(self.fcint) + ) - logging.debug("Constructed %s for %s", self.__class__.__name__, str(self.var_dict)) + logging.debug("Constructed variable for %s", str(self.var_dict)) def get_filename(self, validtime, previoustime=None): """Get the filename. @@ -87,7 +118,7 @@ def get_filename(self, validtime, previoustime=None): basetime = self.get_basetime(validtime, previoustime=previoustime) if previoustime is not None: validtime = previoustime - return surfex.file.parse_filepattern(self.filepattern, basetime, validtime) + return parse_filepattern(self.filepattern, basetime, validtime) def get_filehandler(self, validtime, cache=None, previoustime=None): """Get the file handler. @@ -97,6 +128,9 @@ def get_filehandler(self, validtime, cache=None, previoustime=None): cache (surfex.Cache, optional): Cache. Defaults to None. previoustime (datetime.datetime, optional): Previous valid time. Defaults to None. + Raises: + NotImplementedError: Variable type not implemented + Returns: tuple: Filehandler and file name @@ -106,11 +140,11 @@ def get_filehandler(self, validtime, cache=None, previoustime=None): file_handler = cache.get_file_handler(filename) else: if self.var_type == "netcdf": - file_handler = surfex.netcdf.Netcdf(filename) + file_handler = Netcdf(filename) elif self.var_type == "grib1" or self.var_type == "grib2": - file_handler = surfex.grib.Grib(filename) + file_handler = Grib(filename) elif self.var_type == "fa": - file_handler = surfex.fa.Fa(filename) + file_handler = Fa(filename) elif self.var_type == "surfex": fileformat = None if "fileformat" in self.var_dict: @@ -123,15 +157,16 @@ def get_filehandler(self, validtime, cache=None, previoustime=None): geo_in = self.var_dict["geo_input"] elif "geo_input_file" in self.var_dict: geo_in_file = self.var_dict["geo_input_file"] - geo_in = surfex.get_geo_object(open(geo_in_file, "r", encoding="utf-8")) + geo_in = get_geo_object(open(geo_in_file, "r", encoding="utf-8")) - file_handler = surfex.file.get_surfex_io_object(filename, fileformat=fileformat, - filetype=filetype, geo=geo_in) + file_handler = get_surfex_io_object( + filename, fileformat=fileformat, filetype=filetype, geo=geo_in + ) elif self.var_type == "obs": var_dict = self.var_dict var_dict = {"set": var_dict} basetime = self.get_basetime(validtime) - file_handler = surfex.get_datasources(basetime, var_dict)[0] + file_handler = get_datasources(basetime, var_dict)[0] else: raise NotImplementedError @@ -174,8 +209,9 @@ def read_var_points(self, var, geo, validtime, previoustime=None, cache=None): if "units" in self.var_dict: kwargs.update({"units": str([self.var_dict["units"]][0])}) - filehandler, filename = self.get_filehandler(validtime, cache=cache, - previoustime=previoustime) + filehandler, filename = self.get_filehandler( + validtime, cache=cache, previoustime=previoustime + ) if previoustime is not None: validtime = previoustime logging.debug("new validtime to read previous time %s", validtime) @@ -196,8 +232,9 @@ def read_var_points(self, var, geo, validtime, previoustime=None, cache=None): if self.var_type == "obs": __, field, __ = filehandler.points(geo, validtime=validtime) else: - field, interpolator = filehandler.points(var, geo, interpolation=interpolation, - validtime=validtime) + field, interpolator = filehandler.points( + var, geo, interpolation=interpolation, validtime=validtime + ) field = self.rotate_geographic_wind(field, interpolator) if field is not None: @@ -214,8 +251,11 @@ def set_var(self, validtime=None): Args: validtime (datetime.datetime, optional): Valid time. Defaults to None. + Raises: + NotImplementedError: Variable not implemented + Returns: - _type_: _description_ + tuple: accumulated, instant, var """ accumulated = False @@ -237,7 +277,7 @@ def set_var(self, validtime=None): if member is not None: if not isinstance(member, list): member = [member] - var = surfex.NetCDFReadVariable(name, level=level, units=units, member=member) + var = NetCDFReadVariable(name, level=level, units=units, member=member) elif self.var_type == "grib1": par = self.var_dict["parameter"] typ = self.var_dict["type"] @@ -245,7 +285,7 @@ def set_var(self, validtime=None): tri = self.var_dict["tri"] if tri == 4: accumulated = True - var = surfex.grib.Grib1Variable(par, typ, level, tri) + var = Grib1Variable(par, typ, level, tri) elif self.var_type == "grib2": discipline = self.var_dict["discipline"] p_c = self.var_dict["parameterCategory"] @@ -257,7 +297,7 @@ def set_var(self, validtime=None): tsp = self.var_dict["typeOfStatisticalProcessing"] if tsp == 1: accumulated = True - var = surfex.grib.Grib2Variable(discipline, p_c, p_n, l_t, lev, tsp=tsp) + var = Grib2Variable(discipline, p_c, p_n, l_t, lev, tsp=tsp) elif self.var_type == "surfex": varname = self.var_dict["varname"] layers = None @@ -268,15 +308,24 @@ def set_var(self, validtime=None): patches = self.var_dict["patches"] if "accumulated" in self.var_dict: accumulated = self.var_dict["accumulated"] - datatype = None + datatype = "float" if "datatype" in self.var_dict: datatype = self.var_dict["datatype"] + tiletype = "FULL" + if "tiletype" in self.var_dict: + tiletype = self.var_dict["tiletype"] basetime = self.get_basetime(validtime) - var = surfex.file.SurfexFileVariable(varname, validtime=validtime, patches=patches, - layers=layers, - basetime=basetime, - interval=self.interval, datatype=datatype) + var = SurfexFileVariable( + varname, + validtime=validtime, + patches=patches, + layers=layers, + basetime=basetime, + interval=self.interval, + datatype=datatype, + tiletype=tiletype, + ) elif self.var_type == "fa": var = self.var_dict["name"] elif self.var_type == "obs": @@ -305,6 +354,9 @@ def read_variable(self, geo, validtime, cache=None): validtime (datetime.datetime): Valid time. cache (surfex.cache): Cache. Defaults to None + Raises: + RuntimeError: Negative accumulated value found + Returns: np.darray: Field read and interpolated. @@ -314,13 +366,13 @@ def read_variable(self, geo, validtime, cache=None): previous_field = None if accumulated: # Re-read field - previoustime = validtime - timedelta(seconds=self.interval) + previoustime = validtime - as_timedelta(seconds=self.interval) # Don't read if previous time is older than the very first basetime if previoustime >= self.initial_basetime: logging.debug("Re-read %s", previoustime) - previous_field = self.read_var_points(var, geo, validtime=validtime, - previoustime=previoustime, - cache=cache) + previous_field = self.read_var_points( + var, geo, validtime=validtime, previoustime=previoustime, cache=cache + ) else: previous_field = np.zeros([geo.npoints]) elif instant > 0: @@ -331,8 +383,8 @@ def read_variable(self, geo, validtime, cache=None): # Deaccumulate if either two files are read or if instant is > 0. if accumulated or instant > 0: field = self.deaccumulate(field, previous_field, instant) - if any(field[field < 0.]): - raise Exception("Negative accumulated value found for " + var) + if any(field[field < 0.0]): + raise RuntimeError("Negative accumulated value found for " + var) return field def print_variable_info(self): @@ -360,18 +412,22 @@ def deaccumulate(self, field, previous_field, instant): return None else: field = np.subtract(field, previous_field) - if any(field[field < 0.]): + if any(field[field < 0.0]): neg = [] for i in range(0, field.shape[0]): - if field[i] < 0.: + if field[i] < 0.0: neg.append(field[i]) neg = np.asarray(neg) - logging.warning("Deaccumulated field has %s negative values. lowest: %s mean: %s", - str(neg.shape[0]), str(np.nanmin(neg)), str(np.nanmean(neg))) - field[field < 0.] = 0 - if any(field[field < 0.]): + logging.warning( + "Deaccumulated field has %s negative values. lowest: %s mean: %s", + str(neg.shape[0]), + str(np.nanmin(neg)), + str(np.nanmean(neg)), + ) + field[field < 0.0] = 0 + if any(field[field < 0.0]): raise Exception("Should not be negative values") - if float(instant) != 0.: + if float(instant) != 0.0: field = np.divide(field, float(instant)) return field @@ -400,7 +456,7 @@ def get_basetime(self, validtime, previoustime=None, allow_different_basetime=Fa first = False offset = self.offset - basetime = validtime - timedelta(seconds=offset) + basetime = validtime - as_timedelta(seconds=offset) if basetime <= self.initial_basetime: first = True basetime = self.initial_basetime @@ -414,35 +470,46 @@ def get_basetime(self, validtime, previoustime=None, allow_different_basetime=Fa logging.debug(" Basetime with offset: %s", basetime) # Modify based on fcint - seconds_since_midnight = \ - int((basetime - basetime.replace(hour=0, minute=0, - second=0, microsecond=0)).total_seconds()) + seconds_since_midnight = int( + ( + basetime - basetime.replace(hour=0, minute=0, second=0, microsecond=0) + ).total_seconds() + ) if seconds_since_midnight == 86400: seconds_since_midnight = 0 - basetime_inc = int(seconds_since_midnight / int(timedelta(seconds=self.fcint).total_seconds())) - - prefer_forecast = timedelta(seconds=0) - if seconds_since_midnight == basetime_inc * int(timedelta(seconds=self.fcint).seconds): + basetime_inc = int( + seconds_since_midnight / int(as_timedelta(seconds=self.fcint).total_seconds()) + ) + + prefer_forecast = as_timedelta(seconds=0) + if seconds_since_midnight == basetime_inc * int( + as_timedelta(seconds=self.fcint).seconds + ): if first: logging.debug("First basetime") else: if self.prefer_forecast: logging.debug("Prefer forecasts instead of analyis") - prefer_forecast = timedelta(seconds=self.fcint) + prefer_forecast = as_timedelta(seconds=self.fcint) else: logging.debug("Prefer analysis instead of forecast") - fcint = timedelta(seconds=self.fcint) - basetime = basetime.replace(hour=0, minute=0, second=0, microsecond=0) + \ - (basetime_inc * fcint) - prefer_forecast + fcint = as_timedelta(seconds=self.fcint) + basetime = ( + basetime.replace(hour=0, minute=0, second=0, microsecond=0) + + (basetime_inc * fcint) + - prefer_forecast + ) if previoustime is not None: if allow_different_basetime: raise NotImplementedError logging.debug("seconds_since_midnight: %s", seconds_since_midnight) - logging.debug(" cycle seconds: %s", basetime_inc - * int(timedelta(seconds=self.fcint).seconds)) + logging.debug( + " cycle seconds: %s", + basetime_inc * int(as_timedelta(seconds=self.fcint).seconds), + ) logging.debug(" prefer_forecast: %s", self.prefer_forecast) logging.debug(" prefer_forecast_inc: %s", prefer_forecast) logging.debug(" basetime_inc: %s", basetime_inc) diff --git a/test/bin/CANARI b/test/bin/CANARI deleted file mode 100755 index 305e38a..0000000 --- a/test/bin/CANARI +++ /dev/null @@ -1,31 +0,0 @@ -#!/bin/bash - -if [ "$#" -ne "1" -a "$#" -ne "2" ]; then - echo "Usage $0 mode step" - exit 1 -fi - -set -x -mode=$1 -step=$2 -case $mode in - "CANARI") - offline=1 - outfile="ICMSHHARMANAL.sfx" - ;; - "MASTERODB") - soda=1 - outfile="ICMSHHARM+$step.sfx" - ;; - *) - echo "Not defined $mode" - exit 1 - ;; -esac - -if [ ! -f EXSEC1.nam ]; then - echo Namelist is missing"" - exit 1 -fi - -touch $outfile diff --git a/test/bin/MASTERODB b/test/bin/MASTERODB deleted file mode 100755 index 515e76f..0000000 --- a/test/bin/MASTERODB +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/bash -set -x -basename=`basename $0` -mode=`echo $basename | awk -F '_' '{print $2}'` - -case $mode in - "CANARI") - outfile="unittest_ICMSHHARMANAL+0000.sfx" - ;; - "") - outfile="unittest_ICMSHHARM+0003.sfx" - ;; - *) - echo "Not defined $mode" - exit 1 - ;; -esac - -if [ ! -f EXSEG1.nam ]; then - echo Namelist is missing"" - exit 1 -fi - -touch $outfile diff --git a/test/bin/MASTERODB_CANARI b/test/bin/MASTERODB_CANARI deleted file mode 120000 index 0f33d68..0000000 --- a/test/bin/MASTERODB_CANARI +++ /dev/null @@ -1 +0,0 @@ -MASTERODB \ No newline at end of file diff --git a/test/bin/OFFLINE.exe b/test/bin/OFFLINE.exe deleted file mode 120000 index 324516f..0000000 --- a/test/bin/OFFLINE.exe +++ /dev/null @@ -1 +0,0 @@ -PGD.exe \ No newline at end of file diff --git a/test/bin/OFFLINE_NC b/test/bin/OFFLINE_NC deleted file mode 120000 index 0ed03d9..0000000 --- a/test/bin/OFFLINE_NC +++ /dev/null @@ -1 +0,0 @@ -SURFEX \ No newline at end of file diff --git a/test/bin/PGD.exe b/test/bin/PGD.exe deleted file mode 100755 index 80767fe..0000000 --- a/test/bin/PGD.exe +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/bash - -basename=`basename $0` -cmd=`echo $basename | awk -F '.' '{print $1}'` -dirname=`dirname $0` - -${dirname}/${cmd}_NC || exit 1 - diff --git a/test/bin/PGD_NC b/test/bin/PGD_NC deleted file mode 120000 index 0ed03d9..0000000 --- a/test/bin/PGD_NC +++ /dev/null @@ -1 +0,0 @@ -SURFEX \ No newline at end of file diff --git a/test/bin/PREP.exe b/test/bin/PREP.exe deleted file mode 120000 index 324516f..0000000 --- a/test/bin/PREP.exe +++ /dev/null @@ -1 +0,0 @@ -PGD.exe \ No newline at end of file diff --git a/test/bin/PREP_NC b/test/bin/PREP_NC deleted file mode 120000 index 0ed03d9..0000000 --- a/test/bin/PREP_NC +++ /dev/null @@ -1 +0,0 @@ -SURFEX \ No newline at end of file diff --git a/test/bin/SODA.exe b/test/bin/SODA.exe deleted file mode 120000 index 324516f..0000000 --- a/test/bin/SODA.exe +++ /dev/null @@ -1 +0,0 @@ -PGD.exe \ No newline at end of file diff --git a/test/bin/SODA_NC b/test/bin/SODA_NC deleted file mode 120000 index 0ed03d9..0000000 --- a/test/bin/SODA_NC +++ /dev/null @@ -1 +0,0 @@ -SURFEX \ No newline at end of file diff --git a/test/bin/SURFEX b/test/bin/SURFEX deleted file mode 100755 index b2791aa..0000000 --- a/test/bin/SURFEX +++ /dev/null @@ -1,56 +0,0 @@ -#!/bin/bash - -basename=`basename $0` -mode=`echo $basename | awk -F '_' '{print $1}'` -filetype=`echo $basename | awk -F '_' '{print $2}'` - -case $filetype in - "NC") - suffix=".nc" - ;; - "FA") - suffix=".fa" - ;; - "ASCII") - suffix=".txt" - ;; - *) - echo "Not a valid filetype $filetype" - exit 1 - ;; -esac - -pgd=0 -prep=0 -offline=0 -soda=0 -case $mode in - "PGD") - pgd=1 - outfile="PGD$suffix" - ;; - "PREP") - prep=1 - outfile="PREP$suffix" - ;; - "OFFLINE") - offline=1 - outfile="SURFOUT$suffix" - ;; - "SODA") - soda=1 - outfile="SURFOUT$suffix" - ;; - *) - echo "Not defined $mode" - exit 1 - ;; -esac - -if [ ! -f OPTIONS.nam ]; then - echo Namelist is missing"" - exit 1 -fi - -touch $outfile -exit 0 diff --git a/test/fixtures/config.yml b/test/fixtures/config.yml deleted file mode 100644 index 895d2cb..0000000 --- a/test/fixtures/config.yml +++ /dev/null @@ -1,165 +0,0 @@ -grib1: - fcint: 10800 - offset: 0 - timestep: 3600 - parameter: -1 - type: 105 - level: 0 - tri: 0 - prefer_forecast: True - filepattern: "archive/@YYYY@/@MM@/@DD@/@HH@/fc@YYYY@@MM@@DD@@HH@_@LLL@grib_fp" - blueprint: - 0: "archive/2019/11/13/00/fc2019111300_000grib_fp" - 1: "archive/2019/11/13/00/fc2019111300_001grib_fp" - 2: "archive/2019/11/13/00/fc2019111300_002grib_fp" - 3: "archive/2019/11/13/00/fc2019111300_003grib_fp" - 4: "archive/2019/11/13/03/fc2019111303_001grib_fp" - 5: "archive/2019/11/13/03/fc2019111303_002grib_fp" - 6: "archive/2019/11/13/03/fc2019111303_003grib_fp" - 7: "archive/2019/11/13/06/fc2019111306_001grib_fp" - 8: "archive/2019/11/13/06/fc2019111306_002grib_fp" - 9: "archive/2019/11/13/06/fc2019111306_003grib_fp" - 10: "archive/2019/11/13/09/fc2019111309_001grib_fp" - blueprint_previous: - 1: "archive/2019/11/13/00/fc2019111300_000grib_fp" - 2: "archive/2019/11/13/00/fc2019111300_001grib_fp" - 3: "archive/2019/11/13/00/fc2019111300_002grib_fp" - 4: "archive/2019/11/13/03/fc2019111303_000grib_fp" - 5: "archive/2019/11/13/03/fc2019111303_001grib_fp" - 6: "archive/2019/11/13/03/fc2019111303_002grib_fp" - 7: "archive/2019/11/13/06/fc2019111306_000grib_fp" - 8: "archive/2019/11/13/06/fc2019111306_001grib_fp" - 9: "archive/2019/11/13/06/fc2019111306_002grib_fp" - 10: "archive/2019/11/13/09/fc2019111309_000grib_fp" - -grib2: - fcint: 21600 - offset: 10800 - timestep: 3600 - discipline: 0 - parameterCategory: 0 - parameterNumber: 0 - levelType: 0 - level: 0 - typeOfStatisticalProcessing: 0 - prefer_forecast: True - filepattern: "archive/@YYYY@/@MM@/@DD@/@HH@/fc@YYYY@@MM@@DD@@HH@_@LLL@grib2_fp" - blueprint: - 0: "archive/2019/11/13/00/fc2019111300_002grib2_fp" - 1: "archive/2019/11/13/00/fc2019111300_003grib2_fp" - 2: "archive/2019/11/13/00/fc2019111300_004grib2_fp" - 3: "archive/2019/11/13/00/fc2019111300_005grib2_fp" - 4: "archive/2019/11/13/00/fc2019111300_006grib2_fp" - 5: "archive/2019/11/13/00/fc2019111300_007grib2_fp" - 6: "archive/2019/11/13/00/fc2019111300_008grib2_fp" - 7: "archive/2019/11/13/00/fc2019111300_009grib2_fp" - 8: "archive/2019/11/13/06/fc2019111306_004grib2_fp" - 9: "archive/2019/11/13/06/fc2019111306_005grib2_fp" - 10: "archive/2019/11/13/06/fc2019111306_006grib2_fp" - 11: "archive/2019/11/13/06/fc2019111306_007grib2_fp" - 12: "archive/2019/11/13/06/fc2019111306_008grib2_fp" - 13: "archive/2019/11/13/06/fc2019111306_009grib2_fp" - 14: "archive/2019/11/13/12/fc2019111312_004grib2_fp" - 15: "archive/2019/11/13/12/fc2019111312_005grib2_fp" - 16: "archive/2019/11/13/12/fc2019111312_006grib2_fp" - 17: "archive/2019/11/13/12/fc2019111312_007grib2_fp" - 18: "archive/2019/11/13/12/fc2019111312_008grib2_fp" - 19: "archive/2019/11/13/12/fc2019111312_009grib2_fp" - 20: "archive/2019/11/13/18/fc2019111318_004grib2_fp" - 21: "archive/2019/11/13/18/fc2019111318_005grib2_fp" - 22: "archive/2019/11/13/18/fc2019111318_006grib2_fp" - 23: "archive/2019/11/13/18/fc2019111318_007grib2_fp" - 24: "archive/2019/11/13/18/fc2019111318_008grib2_fp" - 25: "archive/2019/11/13/18/fc2019111318_009grib2_fp" - 26: "archive/2019/11/14/00/fc2019111400_004grib2_fp" - 27: "archive/2019/11/14/00/fc2019111400_005grib2_fp" - 28: "archive/2019/11/14/00/fc2019111400_006grib2_fp" - 29: "archive/2019/11/14/00/fc2019111400_007grib2_fp" - 30: "archive/2019/11/14/00/fc2019111400_008grib2_fp" - 31: "archive/2019/11/14/00/fc2019111400_009grib2_fp" - 32: "archive/2019/11/14/06/fc2019111406_004grib2_fp" - blueprint_previous: - 0: "archive/2019/11/13/00/fc2019111300_001grib2_fp" - 1: "archive/2019/11/13/00/fc2019111300_002grib2_fp" - 2: "archive/2019/11/13/00/fc2019111300_003grib2_fp" - 3: "archive/2019/11/13/00/fc2019111300_004grib2_fp" - 4: "archive/2019/11/13/00/fc2019111300_005grib2_fp" - 5: "archive/2019/11/13/00/fc2019111300_006grib2_fp" - 6: "archive/2019/11/13/00/fc2019111300_007grib2_fp" - 7: "archive/2019/11/13/00/fc2019111300_008grib2_fp" - 8: "archive/2019/11/13/06/fc2019111306_003grib2_fp" - 9: "archive/2019/11/13/06/fc2019111306_004grib2_fp" - 10: "archive/2019/11/13/06/fc2019111306_005grib2_fp" - 11: "archive/2019/11/13/06/fc2019111306_006grib2_fp" - 12: "archive/2019/11/13/06/fc2019111306_007grib2_fp" - 13: "archive/2019/11/13/06/fc2019111306_008grib2_fp" - 14: "archive/2019/11/13/12/fc2019111312_003grib2_fp" - 15: "archive/2019/11/13/12/fc2019111312_004grib2_fp" - 16: "archive/2019/11/13/12/fc2019111312_005grib2_fp" - 17: "archive/2019/11/13/12/fc2019111312_006grib2_fp" - 18: "archive/2019/11/13/12/fc2019111312_007grib2_fp" - 19: "archive/2019/11/13/12/fc2019111312_008grib2_fp" - 20: "archive/2019/11/13/18/fc2019111318_003grib2_fp" - 21: "archive/2019/11/13/18/fc2019111318_004grib2_fp" - 22: "archive/2019/11/13/18/fc2019111318_005grib2_fp" - 23: "archive/2019/11/13/18/fc2019111318_006grib2_fp" - 24: "archive/2019/11/13/18/fc2019111318_007grib2_fp" - 25: "archive/2019/11/13/18/fc2019111318_008grib2_fp" - 26: "archive/2019/11/14/00/fc2019111400_003grib2_fp" - 27: "archive/2019/11/14/00/fc2019111400_004grib2_fp" - 28: "archive/2019/11/14/00/fc2019111400_005grib2_fp" - 29: "archive/2019/11/14/00/fc2019111400_006grib2_fp" - 30: "archive/2019/11/14/00/fc2019111400_007grib2_fp" - 31: "archive/2019/11/14/00/fc2019111400_008grib2_fp" - 32: "archive/2019/11/14/06/fc2019111406_003grib2_fp" -netcdf: - fcint: 21600 - offset: 10800 - timestep: 3600 - name: test - filepattern: "archive/@YYYY@/@MM@/@DD@/meps@YYYY@@MM@@DD@Z@HH@.nc" - blueprint: - 0: "archive/2019/11/13/meps20191113Z00.nc" - 1: "archive/2019/11/13/meps20191113Z00.nc" - 2: "archive/2019/11/13/meps20191113Z00.nc" - 3: "archive/2019/11/13/meps20191113Z00.nc" - 4: "archive/2019/11/13/meps20191113Z00.nc" - 5: "archive/2019/11/13/meps20191113Z00.nc" - 6: "archive/2019/11/13/meps20191113Z00.nc" - 7: "archive/2019/11/13/meps20191113Z00.nc" - 8: "archive/2019/11/13/meps20191113Z00.nc" - 9: "archive/2019/11/13/meps20191113Z00.nc" - 10: "archive/2019/11/13/meps20191113Z06.nc" - blueprint_previous: - 1: "archive/2019/11/13/meps20191113Z00.nc" - 2: "archive/2019/11/13/meps20191113Z00.nc" - 3: "archive/2019/11/13/meps20191113Z00.nc" - 4: "archive/2019/11/13/meps20191113Z00.nc" - 5: "archive/2019/11/13/meps20191113Z00.nc" - 6: "archive/2019/11/13/meps20191113Z00.nc" - 7: "archive/2019/11/13/meps20191113Z00.nc" - 8: "archive/2019/11/13/meps20191113Z00.nc" - 9: "archive/2019/11/13/meps20191113Z00.nc" - 10: "archive/2019/11/13/meps20191113Z06.nc" - -met_nordic: - fcint: 3600 - offset: 0 - timestep: 3600 - name: test - accumulated: false - instant: 3600 - prefer_forecast: false - filepattern: "archive/@YYYY@/@MM@/@DD@/met_nordic_@YYYY@@MM@@DD@Z@HH@.nc" - blueprint: - 0: "archive/2019/11/13/met_nordic_20191113Z00.nc" - 1: "archive/2019/11/13/met_nordic_20191113Z01.nc" - 2: "archive/2019/11/13/met_nordic_20191113Z02.nc" - 3: "archive/2019/11/13/met_nordic_20191113Z03.nc" - 4: "archive/2019/11/13/met_nordic_20191113Z04.nc" - 5: "archive/2019/11/13/met_nordic_20191113Z05.nc" - 6: "archive/2019/11/13/met_nordic_20191113Z06.nc" - 7: "archive/2019/11/13/met_nordic_20191113Z07.nc" - 8: "archive/2019/11/13/met_nordic_20191113Z08.nc" - 9: "archive/2019/11/13/met_nordic_20191113Z09.nc" - 10: "archive/2019/11/13/met_nordic_20191113Z10.nc" diff --git a/test/nam/POLYNOMES_ISBA b/test/nam/POLYNOMES_ISBA deleted file mode 100644 index 79e05ef..0000000 --- a/test/nam/POLYNOMES_ISBA +++ /dev/null @@ -1,48 +0,0 @@ - .0022 .0049 .0204 -.0011 -.0348 .0115 .3494 -.5030 - .0018 .0042 .0180 -.0017 -.0323 .0069 .4124 -.6089 - .0010 .0029 .0140 -.0026 -.0288 .0016 .4481 -.6934 - -.0002 .0013 .0085 -.0037 -.0245 -.0041 .4541 -.7507 - -.0016 -.0006 .0019 -.0049 -.0198 -.0098 .4299 -.7769 - -.0033 -.0026 -.0052 -.0062 -.0148 -.0152 .3773 -.7703 - -.0050 -.0047 -.0125 -.0075 -.0100 -.0197 .2997 -.7313 - -.0066 -.0066 -.0195 -.0087 -.0058 -.0232 .2026 -.6626 - -.0081 -.0082 -.0256 -.0098 -.0023 -.0254 .0924 -.5687 - -.0093 -.0094 -.0304 -.0105 .0002 -.0262 -.0233 -.4562 - -.0101 -.0102 -.0337 -.0110 .0015 -.0254 -.1365 -.3327 - -.0106 -.0105 -.0351 -.0111 .0015 -.0232 -.2396 -.2066 - -.0106 -.0102 -.0347 -.0108 .0002 -.0197 -.3256 -.0865 - -.0102 -.0094 -.0323 -.0103 -.0023 -.0151 -.3886 .0194 - -.0094 -.0082 -.0283 -.0094 -.0058 -.0098 -.4243 .1039 - -.0082 -.0065 -.0228 -.0083 -.0101 -.0040 -.4303 .1612 - -.0067 -.0046 -.0163 -.0071 -.0149 .0017 -.4061 .1875 - -.0051 -.0026 -.0091 -.0057 -.0198 .0070 -.3535 .1809 - -.0034 -.0006 -.0018 -.0044 -.0246 .0116 -.2759 .1419 - -.0018 .0013 .0052 -.0032 -.0289 .0151 -.1788 .0731 - -.0003 .0029 .0113 -.0022 -.0324 .0173 -.0686 -.0207 - .0009 .0042 .0161 -.0015 -.0349 .0180 .0470 -.1333 - .0018 .0050 .0194 -.0010 -.0361 .0173 .1603 -.2568 - .0022 .0052 .0208 -.0009 -.0361 .0151 .2634 -.3829 - .0942 -.0637 .3582 .0156 .1077 -.1266 .0000 5.8369 - .1117 -.0164 .2402 .0157 .1069 -.1189 -2.6029 6.2965 - .1299 .0343 .1344 .0143 .0979 -.0930 -2.5553 6.5530 - .1474 .0850 .0480 .0113 .0814 -.0506 -2.3103 6.5889 - .1631 .1323 -.0131 .0071 .0585 .0054 -1.8847 6.4018 - .1758 .1728 -.0447 .0019 .0307 .0712 -1.3074 6.0044 - .1848 .2039 -.0447 -.0039 -.0001 .1422 -.6178 5.4238 - .1894 .2235 -.0130 -.0099 -.0317 .2137 .1371 4.6995 - .1894 .2301 .0481 -.0158 -.0621 .2808 .9059 3.8810 - .1846 .2234 .1346 -.0211 -.0891 .3388 1.6362 3.0239 - .1754 .2038 .2405 -.0255 -.1109 .3839 2.2782 2.1868 - .1625 .1726 .3585 -.0286 -.1260 .4130 2.7882 1.4266 - .1467 .1320 .4807 -.0303 -.1334 .4241 3.1313 .7952 - .1292 .0847 .5988 -.0304 -.1326 .4164 3.2843 .3355 - .1110 .0340 .7046 -.0289 -.1237 .3905 3.2367 .0790 - .0935 -.0167 .7910 -.0260 -.1071 .3481 2.9917 .0431 - .0778 -.0639 .8521 -.0218 -.0842 .2921 2.5661 .2302 - .0651 -.1045 .8837 -.0166 -.0564 .2263 1.9888 .6277 - .0561 -.1356 .8836 -.0108 -.0256 .1553 1.2992 1.2083 - .0515 -.1552 .8520 -.0047 .0060 .0838 .5443 1.9326 - .0516 -.1618 .7908 .0012 .0364 .0168 -.2245 2.7511 - .0563 -.1551 .7044 .0065 .0634 -.0413 -.9548 3.6082 - .0655 -.1355 .5985 .0108 .0852 -.0864 -1.5968 4.4453 - .0784 -.1043 .4805 .0140 .1003 -.1155 -2.1067 5.2055 diff --git a/test/nam/constants.json b/test/nam/constants.json deleted file mode 100644 index fd2dd04..0000000 --- a/test/nam/constants.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "nam_surf_csts": { - "xz0sn": 0.003, - "xz0hsn": 0.0003 - } -} \ No newline at end of file diff --git a/test/nam/cv.json b/test/nam/cv.json deleted file mode 100644 index 311292a..0000000 --- a/test/nam/cv.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "NAM_DATA_ISBA": { - "XUNIF_CV(1)": 2.0E-5, - "XUNIF_CV(2)": 2.0E-5, - "XUNIF_CV(3)": 2.0E-5, - "XUNIF_CV(4)": 1.0E-5, - "XUNIF_CV(5)": 1.0E-5, - "XUNIF_CV(6)": 1.0E-5, - "XUNIF_CV(7)": 2.0E-5, - "XUNIF_CV(8)": 2.0E-5, - "XUNIF_CV(9)": 2.0E-5, - "XUNIF_CV(10)": 2.0E-5, - "XUNIF_CV(11)": 2.0E-5, - "XUNIF_CV(12)": 2.0E-5, - "XUNIF_CV(13)": 1.0E-5, - "XUNIF_CV(14)": 1.0E-5, - "XUNIF_CV(15)": 1.0E-5, - "XUNIF_CV(16)": 1.0E-5, - "XUNIF_CV(17)": 1.0E-5, - "XUNIF_CV(18)": 2.0E-5, - "XUNIF_CV(19)": 2.0E-5 - } -} diff --git a/test/nam/cv_sg.json b/test/nam/cv_sg.json deleted file mode 100644 index 0c6e0f5..0000000 --- a/test/nam/cv_sg.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "NAM_DATA_ISBA": { - "XUNIF_CV(1)": 2.0E-5, - "XUNIF_CV(2)": 2.0E-5, - "XUNIF_CV(3)": 2.0E-5, - "XUNIF_CV(4)": 1.0E-5, - "XUNIF_CV(5)": 1.0E-5, - "XUNIF_CV(6)": 1.0E-5, - "XUNIF_CV(7)": 1.0E-5, - "XUNIF_CV(8)": 1.0E-5, - "XUNIF_CV(9)": 1.0E-5, - "XUNIF_CV(10)": 1.0E-5, - "XUNIF_CV(11)": 1.0E-5, - "XUNIF_CV(12)": 1.0E-5, - "XUNIF_CV(13)": 2.0E-5, - "XUNIF_CV(14)": 2.0E-5, - "XUNIF_CV(15)": 2.0E-5, - "XUNIF_CV(16)": 2.0E-5, - "XUNIF_CV(17)": 2.0E-5, - "XUNIF_CV(18)": 2.0E-5, - "XUNIF_CV(19)": 1.0E-5, - "XUNIF_CV(20)": 2.0E-5 - } -} diff --git a/test/nam/flake.json b/test/nam/flake.json deleted file mode 100644 index ff3233a..0000000 --- a/test/nam/flake.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "nam_cover": { - "LRM_RIVER": true - }, - "nam_flaken": { - "lsediments": false - } -} \ No newline at end of file diff --git a/test/nam/gridpp.json b/test/nam/gridpp.json deleted file mode 100644 index 4e54e3a..0000000 --- a/test/nam/gridpp.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "surface_air_temperature_2m": { - "h": "200", - "d": "60000" - }, - "surface_relative_humidity_2m": { - "h": "200" - }, - "surface_snow_thickness": { - "h": "200", - "d": "60000" - } -} \ No newline at end of file diff --git a/test/nam/io.json b/test/nam/io.json deleted file mode 100644 index d954a12..0000000 --- a/test/nam/io.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "nam_io_offline": { - "lfagmap": true, - "lrestart": true, - "lrestart_2m": true, - "xtstep_surf": 60, - "xtstep_output": 3600.0, - "nhalo": 0, - "lset_forc_zs": false - }, - "nam_write_surf_atm": { - "lsplit_patch": false - } -} \ No newline at end of file diff --git a/test/nam/meb_settings.json b/test/nam/meb_settings.json deleted file mode 100644 index a156b6d..0000000 --- a/test/nam/meb_settings.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "NAM_SGH_ISBAn": { - "CRUNOFF": "DT92", - "CRAIN": "SGH", - "CHORT": "DEF", - "LSOC": true - }, - "NAM_MEB_ISBA":{ - "LMEB_PATCH(1)": false, - "LMEB_PATCH(2)": true, - "LMEB_LITTER": false - } -} \ No newline at end of file diff --git a/test/nam/offline.json b/test/nam/offline.json deleted file mode 100644 index 867d6e1..0000000 --- a/test/nam/offline.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "NAM_DIAG_SURFn": { - "N2M": 2, - "LSURF_BUDGET": true, - "LSURF_BUDGETC": true, - "LRESET_BUDGETC": true, - "LCOEF": true, - "LSURF_VARS": true - }, - "NAM_DIAG_SURF_ATMn": { - "LT2MMW": true - }, - "NAM_DIAG_ISBAn": { - "LPATCH_BUDGET": true, - "LPGD": true, - "LSURF_MISC_BUDGET": true - }, - "NAM_SSOn": { - "XFRACZ0": 15.0, - "XSOROT": 2500.0, - "XVOROT": 8.0, - "XCOROT": 500 - }, - "NAM_SEAFLUXn": { - "LPWG": false, - "LPRECIP": false, - "LPWEBB": false - }, - "NAM_ISBAn": { - "LCANOPY_DRAG": true, - "XCDRAG": 0.01 - }, - "NAM_SURF_ATM": { - "XCD_COEFF1": 10.0, - "XCD_COEFF2": 5.0, - "XCH_COEFF1": 15.0 - } -} \ No newline at end of file diff --git a/test/nam/prep.json b/test/nam/prep.json deleted file mode 100644 index b37d20e..0000000 --- a/test/nam/prep.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "nam_prep_isba": { - "lextrap_tg": true, - "lextrap_wg": true, - "lextrap_wgi": true, - "lextrap_sn": true - }, - "nam_diag_isban": { - "lpatch_budget": true - }, - "NAM_PREP_SEAFLUX": { - "LSEA_SBL": false - } -} \ No newline at end of file diff --git a/test/nam/prep_from_namelist_values.json b/test/nam/prep_from_namelist_values.json deleted file mode 100644 index e3b2ac1..0000000 --- a/test/nam/prep_from_namelist_values.json +++ /dev/null @@ -1,36 +0,0 @@ -{ - "nam_prep_isba": { - "xhug_surf": 0.2, - "xhug_root": 0.2, - "xhug_deep": 0.2, - "xtg_surf": 285.0, - "xtg_root": 288.0, - "xtg_deep": 292.0 - }, - "nam_prep_teb": { - "xti_bld": 285.0, - "xti_road": 285.0, - "xts_road": 285.0, - "xts_roof": 285.0, - "xts_wall": 285.0, - "xws_road": 0.0, - "xws_roof": 0.0 - }, - "NAM_PREP_SEAFLUX": { - "XSST_UNIF": 280, - "XSSS_UNIF": 35, - "XSIC_UNIF": 2 - }, - "NAM_PREP_WATFLUX": { - "XTS_WATER_UNIF": 280 - }, - "NAM_PREP_FLAKE": { - "XTS_UNIF": 280 - }, - "NAM_PREP_SEAICE_SICE": { - "XICE_TUNIF": 270, - "LINIT_FROM_SST": true, - "NUM_LAYERS": 4, - "XICE_THICKNESS": 0.5 - } -} \ No newline at end of file diff --git a/test/nam/prep_sice.json b/test/nam/prep_sice.json deleted file mode 100644 index 0a24007..0000000 --- a/test/nam/prep_sice.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "nam_prep_seaice_sice": { - "xice_thickness": 0.75, - "num_layers": 4 - } -} \ No newline at end of file diff --git a/test/nam/prep_snow.json b/test/nam/prep_snow.json deleted file mode 100644 index c62e355..0000000 --- a/test/nam/prep_snow.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "nam_prep_isba_snow": { - "lswemax": true - } -} diff --git a/test/nam/rsmin.json b/test/nam/rsmin.json deleted file mode 100644 index afe64ab..0000000 --- a/test/nam/rsmin.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "NAM_DATA_ISBA": { - "XUNIF_RSMIN(1)": 40.0, - "XUNIF_RSMIN(2)": 40.0, - "XUNIF_RSMIN(3)": 40.0, - "XUNIF_RSMIN(4)": 150.0, - "XUNIF_RSMIN(5)": 150.0, - "XUNIF_RSMIN(6)": 175.0, - "XUNIF_RSMIN(7)": 40.0, - "XUNIF_RSMIN(8)": 120.0, - "XUNIF_RSMIN(9)": 120.0, - "XUNIF_RSMIN(10)": 40.0, - "XUNIF_RSMIN(11)": 130.0, - "XUNIF_RSMIN(12)": 40.0, - "XUNIF_RSMIN(13)": 150.0, - "XUNIF_RSMIN(14)": 150.0, - "XUNIF_RSMIN(15)": 150.0, - "XUNIF_RSMIN(16)": 150.0, - "XUNIF_RSMIN(17)": 150.0, - "XUNIF_RSMIN(18)": 40.0, - "XUNIF_RSMIN(19)": 150.0 - } -} \ No newline at end of file diff --git a/test/nam/rsmin_mod.json b/test/nam/rsmin_mod.json deleted file mode 100644 index 5571163..0000000 --- a/test/nam/rsmin_mod.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "NAM_DATA_ISBA": { - "XUNIF_RSMIN(4)": 150.0, - "XUNIF_RSMIN(5)": 150.0, - "XUNIF_RSMIN(7)": 40.0, - "XUNIF_RSMIN(8)": 120.0, - "XUNIF_RSMIN(13)": 150.0, - "XUNIF_RSMIN(14)": 150.0, - "XUNIF_RSMIN(15)": 150.0, - "XUNIF_RSMIN(16)": 150.0, - "XUNIF_RSMIN(17)": 150.0 - } -} \ No newline at end of file diff --git a/test/nam/rsmin_sg.json b/test/nam/rsmin_sg.json deleted file mode 100644 index 8a52a05..0000000 --- a/test/nam/rsmin_sg.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "NAM_DATA_ISBA": { - "XUNIF_RSMIN(1)": 40.0, - "XUNIF_RSMIN(2)": 40.0, - "XUNIF_RSMIN(3)": 40.0, - "XUNIF_RSMIN(4)": 150.0, - "XUNIF_RSMIN(5)": 150.0, - "XUNIF_RSMIN(6)": 150.0, - "XUNIF_RSMIN(7)": 150.0, - "XUNIF_RSMIN(8)": 175.0, - "XUNIF_RSMIN(9)": 150.0, - "XUNIF_RSMIN(10)": 150.0, - "XUNIF_RSMIN(11)": 150.0, - "XUNIF_RSMIN(12)": 150.0, - "XUNIF_RSMIN(13)": 40.0, - "XUNIF_RSMIN(14)": 40.0, - "XUNIF_RSMIN(15)": 120.0, - "XUNIF_RSMIN(16)": 40.0, - "XUNIF_RSMIN(17)": 40.0, - "XUNIF_RSMIN(18)": 120.0, - "XUNIF_RSMIN(19)": 150.0, - "XUNIF_RSMIN(20)": 40.0 - } -} \ No newline at end of file diff --git a/test/nam/rsmin_sg_mod.json b/test/nam/rsmin_sg_mod.json deleted file mode 100644 index 4dd723c..0000000 --- a/test/nam/rsmin_sg_mod.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "NAM_DATA_ISBA": { - "XUNIF_RSMIN(4)": 150.0, - "XUNIF_RSMIN(5)": 150.0, - "XUNIF_RSMIN(6)": 150.0, - "XUNIF_RSMIN(7)": 150.0, - "XUNIF_RSMIN(9)": 150.0, - "XUNIF_RSMIN(10)": 150.0, - "XUNIF_RSMIN(11)": 150.0, - "XUNIF_RSMIN(12)": 150.0, - "XUNIF_RSMIN(16)": 40.0, - "XUNIF_RSMIN(17)": 40.0, - "XUNIF_RSMIN(18)": 120.0 - } -} \ No newline at end of file diff --git a/test/nam/sea.json b/test/nam/sea.json deleted file mode 100644 index 96fc525..0000000 --- a/test/nam/sea.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "nam_seabathy": { - "xunif_seabathy": 0.0 - } -} diff --git a/test/nam/selected_output.json b/test/nam/selected_output.json deleted file mode 100644 index 6b23bf6..0000000 --- a/test/nam/selected_output.json +++ /dev/null @@ -1,131 +0,0 @@ -{ - "nam_write_diag_surfn": { - "lselect": true, - "cselect": [ - "FMU", - "FMV", - "T2M", - "T2MMIN", - "T2MMAX", - "Q2M", - "HU2M", - "HU2MMIN", - "HU2MMAX", - "ZON10M", - "MER10M", - "W10M", - "W10MMAX", - "TS", - "RI", - "RN", - "H", - "LE", - "SWD", - "SWU", - "LWD", - "LWU", - "GFLUX", - "Z0", - "Z0H", - "T2M_ISBA", - "T2MMIN_ISBA", - "T2MMAX_ISBA", - "Q2M_ISBA", - "MER10M_ISBA", - "ZON10M_ISBA", - "HU2M_ISBA", - "W10M_ISBA", - "W10MMAX_ISBA", - "PSNG_ISBA", - "PSNV_ISBA", - "PSN_ISBA", - "DSNOW_T_ISBA", - "TSWI_R_ISBA", - "TALB_ISBA", - "TS_ISBA", - "RNC_ISBA", - "HC_ISBA", - "LEC_ISBA", - "FMUC_ISBA", - "FMVC_ISBA", - "SWDC_ISBA", - "SWUC_ISBA", - "LWDC_ISBA", - "LWUC_ISBA", - "T2M_P", - "T2MMIN_P", - "T2MMAX_P", - "Q2M_P", - "HU2M_P", - "ZON10M_P", - "MER10M_P", - "W10M_P", - "TG1", - "TG2", - "WG1", - "WG2", - "WGI1", - "WGI2", - "RSN_VEG1", - "WSN_VEG1", - "ASN_VEG", - "PSN", - "LAI", - "RNC_P", - "HC_P", - "LEC_P", - "FMUC_P", - "FMVC_P", - "SWDC_P", - "SWUC_P", - "LWDC_P", - "LWUC_P", - "T2M_SEA", - "HU2M_SEA", - "Q2M_SEA", - "MER10M_SEA", - "ZON10M_SEA", - "W10M_SEA", - "W10MMAX_SEA", - "SST", - "SIC", - "TICE_01", - "TICE_02", - "TICE_03", - "TICE_04", - "T2M_WAT", - "Q2M_WAT", - "HU2M_WAT", - "ZON10M_WAT", - "MER10M_WAT", - "W10M_WAT", - "TS_WATER", - "T2M_TEB", - "Q2M_TEB", - "HU2M_TEB", - "ZON10M_TEB", - "MER10M_TEB", - "T2MMIN_TEB", - "T2MMAX_TEB", - "HU2MMIN_TEB", - "HU2MMAX_TEB", - "W10MMAX_TEB", - "TROAD1", - "T_SNOW", - "T_ICE", - "T_WML", - "T_MNW", - "T_BOT", - "H_ML", - "H_SNOW", - "H_ICE", - "WS_ROAD", - "WSN_RD1", - "VEG", - "EMIS", - "CD", - "CH", - "CE" - ] - } -} diff --git a/test/nam/sice.json b/test/nam/sice.json deleted file mode 100644 index d93a5b9..0000000 --- a/test/nam/sice.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "nam_seaice_sice": { - "lice_mass_balance": true, - "lice_has_snow": false, - "xocean_heat_flux": 2.0 - } -} \ No newline at end of file diff --git a/test/nam/soda.json b/test/nam/soda.json deleted file mode 100644 index 60d7c80..0000000 --- a/test/nam/soda.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "NAM_ASSIM": { - "LECSST": true, - "NPRINTLEV": 1, - "LRELCLIMSNOW": false, - "LEXTRAP_NATURE": false, - "LEXTRAP_SNOW": false - } -} \ No newline at end of file diff --git a/test/nam/soda_isba_oi.json b/test/nam/soda_isba_oi.json deleted file mode 100644 index f503eb8..0000000 --- a/test/nam/soda_isba_oi.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "NAM_ASSIM": { - "LAROME": true - }, - "NAM_NACVEG": { - "NECHGU": 3, - "XRCLIMCA": 0.0, - "XRCLISST": 0.05, - "XSIGH2MO": 0.10, - "XSIGT2MO": 1.0, - "LOBS2M": true, - "LOBSWG": false - } -} \ No newline at end of file diff --git a/test/nam/treedrag.json b/test/nam/treedrag.json deleted file mode 100644 index 0bc0a48..0000000 --- a/test/nam/treedrag.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "nam_treedrag": { - "xz0_limit": 1.6 - } -} diff --git a/test/settings/conf_proj_test.json b/test/settings/conf_proj_test.json deleted file mode 100644 index 1894d05..0000000 --- a/test/settings/conf_proj_test.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "nam_pgd_grid": { - "cgrid": "CONF PROJ" - }, - "nam_conf_proj": { - "xlat0": 59.5, - "xlon0": 9 - }, - "nam_conf_proj_grid": { - "ilone": 1, - "ilate": 1, - "xlatcen": 60, - "xloncen": 10, - "nimax": 9, - "njmax": 19, - "xdx": 10000.0, - "xdy": 10000.0 - } -} diff --git a/test/settings/domains.json b/test/settings/domains.json deleted file mode 100644 index 1caa8ff..0000000 --- a/test/settings/domains.json +++ /dev/null @@ -1,96 +0,0 @@ -{ - "CONF_PROJ_TEST": { - "nam_pgd_grid": { - "cgrid": "CONF PROJ" - }, - "nam_conf_proj": { - "xlat0": 59.5, - "xlon0": 9 - }, - "nam_conf_proj_grid": { - "ilone": 1, - "ilate": 1, - "xlatcen": 60, - "xloncen": 10, - "nimax": 9, - "njmax": 19, - "xdx": 10000.0, - "xdy": 10000.0 - } - }, - "LONLAT_REG_TEST": { - "nam_pgd_grid": { - "cgrid": "LONLAT REG" - }, - "nam_lonlat_reg": { - "xlonmin": 10, - "xlonmax": 11, - "xlatmin": 60, - "xlatmax": 61, - "nlon": 11, - "nlat": 11 - } - }, - "LONLATVAL_TEST": { - "nam_pgd_grid": { - "cgrid": "LONLATVAL" - }, - "nam_lonlatval": { - "xx": [ - 10.0, - 11.0 - ], - "xy": [ - 61.0, - 62.0 - ], - "xdx": [ - 0.3, - 0.3 - ], - "xdy": [ - 0.3, - 0.3 - ] - } - }, - "IGN_TEST": { - "nam_pgd_grid": { - "cgrid": "IGN" - }, - "nam_ign": { - "clambert": 7, - "npoints": 2, - "xx": [ - 100000, 110000 - ], - "xy":[ - 100000, 110000 - ], - "xdx": [ - 1000, 1000 - ], - "xdy": [ - 1000,1000 - ], - "xx_llcorner": 10000, - "xy_llcorner": 10000, - "xcellsize": 1000, - "ncols": 10, - "nrows": 10 - } - }, - "CARTESIAN_TEST": { - "nam_pgd_grid": { - "cgrid": "CARTESIAN" - }, - "nam_cartesian": { - "xlat0": 0, - "xlon0": 0, - "nimax": 10, - "njmax": 10, - "xdx": 1000, - "xdy": 1000 - } - } -} diff --git a/test/settings/hm_env.json b/test/settings/hm_env.json deleted file mode 100644 index 3d3be2b..0000000 --- a/test/settings/hm_env.json +++ /dev/null @@ -1,229 +0,0 @@ -{ - "NPATCH": "2", - "ARCHIVE_ROOT_FG": "/firstguess", - "LSPBDC": "no", - "COMPILE_DABYFA": "no", - "ANASURF": "CANARI_OI_MAIN", - "TSPAN": "5400", - "BDDIR": "/archive/ECMWF/@YYYY@/@MM@/@DD@/@HH@", - "MAIL_ON_ABORT": "", - "COMPILE_ENKF": "no", - "ARCH": "linuxgfortran", - "FESTAT": "no", - "SLAFDIFF": "0", - "NLEV": "60", - "LETKF_LAG": "no", - "NNCV": "1,1,1,1", - "XCH_COEFF1": "15.0", - "NNCO": "1,1,0,0,1", - "STREAM": "", - "HH_LIST": "00-21:3", - "SPPT": "no", - "ARCHIVE_ROOT": "/archive", - "LSMIXBC": "yes", - "ARSTRATEGY": "climate:fg:verif:odb_stuff: [an|fc]_fa:pp_gr:fldver", - "TAU_SPP": "21600.", - "DISPLAY": ":1", - "XCLIP_RATIO_SDT": "5.0", - "ECFSGROUP": "hirald", - "EPERT_MODE": "after", - "ENS_BD_CLUSTER": "no", - "OBDIR": "/archive/observations/@YYYY@/@MM@/@DD@/@HH@", - "FCINT_FG": "03", - "SFXSELTIMES": "0-540:15", - "AD_TEST": "yes", - "TSTEP4D": "120,120", - "VLEV": "65", - "PERTSURF": "none", - "SCALE_PERT": "yes", - "OBSMONITOR": "obstat", - "LOCAL_DEFINITION_TEMPLATES": "LOCAL_DEFINITION_TEMPLATES", - "LUNBC": "yes", - "PERTDIA_BDINT": "6", - "GRID_TYPE": "CUSTOM", - "NOUTERLOOP": "2", - "XZ0SN": "0.003", - "MSG_PATH": "/CLOUDS/", - "INT_SINI_FILE": "//SURFXINI.fa", - "ALARO_VERSION": "0", - "CNMEXP": "HARM", - "ANAATMO": "3DVAR", - "SWRITUPTIMES": "0-540:60", - "CSNOW": "D95", - "SURFEX_OFFLINE_BINARIES": "no", - "JB_REF_DOMAIN": "METCOOP25B", - "FREQ_RESET_TEMP": "3", - "TOPO_SOURCE": "gmted2010", - "TAU_SDT": "28800", - "CLIMDIR": "/climate/METCOOP25D", - "EXP": "HM2PYSURFEX", - "IO_SERVER": "yes", - "ARCHIVE_ENSMBR": "", - "BUFRTAB_DIR": "", - "RSMIN_CONIFEROUS_FACTOR": "1.44", - "BDSTRATEGY": "simulate_metcoop", - "LSPG_SDT": ".FALSE.", - "DFI": "none", - "ENSINIPERT": "bnd", - "EXTRARCH": "/archive/extract", - "SFXWFTIMES": "0-540:60", - "VERT_DISC": "vfd", - "XCD_COEFF1": "10.0", - "XCD_COEFF2": "5.0", - "XGRASS_H_DNM": "3.0", - "LDB_VERSION": "3.0", - "DESKTOP_SESSION": "ubuntu", - "PWRITUPTIMES": "0-09:15", - "USE_MSG": "yes", - "DOMAIN": "METCOOP25S", - "CREATE_CLIMATE": "yes", - "HGT_QS": "yes", - "FP_PRECISION": "double", - "TAUS": "5400", - "MODIFY_LAKES": "T", - "LGRADSP": "no", - "ECFSLOC": "ec", - "RSMIN_C3_FACTOR": "1.5", - "CH_RES_SPEC": "yes", - "ILRES": "2,2", - "XRIMAX": "0.0", - "SPP": "no", - "MAKEODB2": "no", - "GRIB_API": "GRIB_API", - "INT_BDFILE": "//ELSCFHARMALBC@NNN@", - "BDINT": "1", - "RSMIN_DECIDUOUS_FACTOR": "1.13", - "BINDIR": "/bin", - "CV_HIGHVEG": "1.0E-5", - "USE_REARCHIVE_EPS_EC_OPER": "no", - "XDG_SESSION_TYPE": "x11", - "RSMIN_C4_FACTOR": "1.13", - "CROUGH": "NONE", - "SLAFLAG": "0", - "MULTITASK": "yes", - "SPGADTMIN_SDT": "0.15", - "MASS_FLUX_SCHEME": "edmfm", - "HOST_SURFEX": "no", - "FULLFAFTIMES": "0-540:15", - "TFLAG_FG": "h", - "CISBA": "3-L", - "PERTATMO": "none", - "XCGMAX": "2.0E-5", - "COMPCENTRE": "MET", - "SURFEX_LAKES": "FLAKE", - "FLDEXTR": "yes", - "REARCHIVE_EPS_EC_OPER": "no", - "NBDMAX": "4", - "SST_SOURCES": "IFS NEMO", - "CAERO": "tegen", - "CFORCING_FILETYPE": "NETCDF", - "GRIB_API_LIB": "GRIB_API_LIB", - "FLDEXTR_TASKS": "4", - "BUILD": "yes", - "ECOCLIMAP_VERSION": "SG", - "SURFEX_SEA_ICE": "sice", - "XLCOR_SPP": "1000000.", - "ENSCTL": "", - "PFFULLWFTIMES": "-1", - "LL_LIST": "03", - "HOST_MODEL": "ifs", - "XSCALE_H_TREE": "0.658", - "XCSMAX": "2.0E-4", - "SPGADTMIN_SPP": "0.15", - "AUXLIBS": "AUXLIBS", - "TEND_DIAG": "no", - "ODB_VERSION": "CY33R1.007", - "WRK": "/", - "EREF": "35000.", - "OBSEXTR": "yes", - "HWRITUPTIMES": "0-540:15", - "CONT_ON_FAILURE": "0", - "TSTEPTRAJ": "600", - "XZ0HSN": "0.0003", - "SFXSWFTIMES": "-1", - "BDLIB": "ECMWF", - "QT_IM_MODULE": "ibus", - "RUNNING_MODE": "research", - "SIMULATION_TYPE": "nwp", - "CONVERTFA": "yes", - "XLCOR_SDT": "2000000", - "TL_TEST": "yes", - "RCR_POSTP": "no", - "FLDVER": "no", - "ARCHIVE_ECMWF": "yes", - "JB_INTERPOL": "yes", - "SDEV_SDT": "0.20", - "TESTBED_CASES": "1", - "LISBA_CANOPY": ".FALSE.", - "SLAFK": "1.0", - "CV_LOWVEG": "2.0E-5", - "SOIL_TEXTURE_VERSION": "SOILGRID", - "MEPS_VERSION": "test", - "Q_IN_SP": "no", - "SHLVL": "1", - "MAKEGRIB_LISTENERS": "1", - "GRIB_API_INCLUDE": "GRIB_API_INCLUDE", - "ECFS_EPS_EC_BD_PATH": "ECFS_EPS_EC_BD_PATH", - "ODB_DIR": "/disk1/odb", - "SURFEX_LSELECT": "yes", - "PHYSICS": "arome", - "ARCHIVE_FORMAT": "GRIB2", - "VERITIMES": "00-540:60", - "HARATU": "yes", - "BDCLIM": "/ECMWF/climate", - "SINGLEOBS": "no", - "IO_SERVER_BD": "yes", - "SPGADTMAX_SDT": "3.0", - "FORCE2": "no", - "FORCE1": "no", - "CHKEVO": "no", - "SDEV_SPP": "0.2", - "XALLEN_TERM": "2.5", - "DYNAMICS": "nh", - "ECMWF_LOCAL_TABLE_PATH": "ECMWF_LOCAL_TABLE_PATH", - "TESTBED_LIST": "TESTBED_LIST", - "OBSMON_SYNC": "no", - "LETKF_3DSCREEN": "yes", - "TFLAG": "min", - "FLDVER_HOURS": "06 12 18 24 30 36 42 48 54 60 66", - "ANASURF_MODE": "before", - "AUGMENT_CV": "NO", - "ENSMSEL": "", - "HYBRID": "no", - "FREQ_RESET_GUST": "1", - "NCNAMES": "nwp", - "STATNW": "yes", - "ENSBDMBR": "", - "POSTP": "inline", - "ECOCLIMAP_PARAM_BINDIR": "/climate", - "SPGADTMAX_SPP": "3.0", - "SPGQ_SDT": "0.5", - "ANASURF_OI_COEFF": "POLYNOMES_ISBA_MF6", - "VFLDEXP": "HM2PYSURFEX", - "AI_ACCUMULATION_HOURS": "720", - "TASK_LIMIT": "-1", - "EZONE": "1", - "GSIZE": "10000.0", - "LAT0": "59.5", - "LATC": "60.0", - "LON0": "9.0", - "LONC": "10.0", - "NDGUXG": "19", - "NDLUXG": "9", - "NLAT": "20", - "NLON": "10", - "NMSMAX": "-1", - "NNOEXTZX": "0", - "NNOEXTZY": "0", - "NSMAX": "-1", - "SINLAT0": "-1", - "TSTEP": "90", - "FCINT": "3", - "TEFRCL": "3600", - "NXGSTPERIOD": "3600", - "MODEL": "MASTERODB", - "ENSMSELX": "-1", - "ENSMBR": "-1", - "LL": "03", - "METER_UNIT": "hhmm" -} diff --git a/test/settings/nc.json b/test/settings/nc.json deleted file mode 100644 index ef80ccd..0000000 --- a/test/settings/nc.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "nam_io_offline": { - "csurf_filetype": "NC", - "cpgdfile": "PGD_TEST", - "cprepfile": "PREP_TEST", - "csurffile": "SURFOUT_TEST" - } -} diff --git a/test/settings/nc.toml b/test/settings/nc.toml deleted file mode 100644 index cd7f596..0000000 --- a/test/settings/nc.toml +++ /dev/null @@ -1,3 +0,0 @@ -[SURFEX.IO] -CSURF_FILETYPE = "NC" # IO settings NC/FA/ASCII. Inline runs use FA - diff --git a/test/settings/test_system.json b/test/settings/test_system.json deleted file mode 100644 index 8eb4293..0000000 --- a/test/settings/test_system.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "climdir": "climdir", - "ecoclimap_bin_dir": "ecoclimap_bin_dir", - "assim_dir": "assim", - "first_guess_dir": "testdata/@YYYY@@MM@@DD@@HH@/" -} diff --git a/test/test_converter.py b/test/test_converter.py deleted file mode 100644 index dc523b2..0000000 --- a/test/test_converter.py +++ /dev/null @@ -1,123 +0,0 @@ -"""Test converter.""" -import unittest -import logging -from datetime import datetime -import json -import numpy as np -import surfex - -logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - - -class ConverterTest(unittest.TestCase): - """Test converter.""" - - testdata = "testdata/" - with open("test/settings/domains.json", mode="r", encoding="utf-8") as file_handler: - domains = json.load(file_handler) - domain = surfex.geo.set_domain(domains, "CONF_PROJ_TEST") - my_geo = surfex.geo.get_geo_object(domain) - - fileformat = "surfex" - var = "TG1P1" - converter = "none" - config = { - "surfex": { - "fcint": 10800, - "file_inc": 3600, - "offset": 0 - }, - "TG1P1": { - "surfex": { - "converter": { - "none": { - "varname": "TG1P1", - "filepattern": testdata + "/PREP_CONF_PROJ.nc" - } - } - } - } - } - - logging.debug("var=%s fileformat=%s", var, fileformat) - defs = config[fileformat] - converter_conf = config[var][fileformat]["converter"] - - validtime = datetime(year=2020, month=2, day=1, hour=6) - cache = surfex.Cache(7200) - converter = surfex.read.Converter(converter, validtime, defs, converter_conf, fileformat) - field = surfex.read.ConvertedInput(my_geo, var, converter).read_time_step(validtime, cache) - field = np.reshape(field, [my_geo.nlons, my_geo.nlats]) - - with open("test/settings/domains.json", mode="r", encoding="utf-8") as file_handler: - domains = json.load(file_handler) - domain = surfex.geo.set_domain(domains, "CONF_PROJ_TEST") - my_geo = surfex.geo.get_geo_object(domain) - - fileformat = "surfex" - var = "FRAC_NATURE" - converter = "none" - config = { - "surfex": { - "fcint": 10800, - "file_inc": 3600, - "offset": 0 - }, - "FRAC_NATURE": { - "surfex": { - "converter": { - "none": { - "varname": "FRAC_NATURE", - "filepattern": testdata + "/PGD_CONF_PROJ.txt" - } - } - } - } - } - - logging.debug("var=%s fileformat=%s", var, fileformat) - defs = config[fileformat] - converter_conf = config[var][fileformat]["converter"] - - validtime = datetime(year=2020, month=2, day=1, hour=6) - cache = surfex.Cache(7200) - converter = surfex.read.Converter(converter, validtime, defs, converter_conf, fileformat) - surfex.read.ConvertedInput(my_geo, var, converter).read_time_step(validtime, cache) - field = np.reshape(field, [my_geo.nlons, my_geo.nlats]) - - with open("test/settings/domains.json", mode="r", encoding="utf-8") as file_handler: - domains = json.load(file_handler) - domain = surfex.geo.set_domain(domains, "CONF_PROJ_TEST") - my_geo = surfex.geo.get_geo_object(domain) - - fileformat = "netcdf" - var = "T2M" - converter = "none" - config = { - "netcdf": { - "fcint": 10800, - "file_inc": 3600, - "offset": 0 - }, - "T2M": { - "netcdf": { - "converter": { - "none": { - "name": "air_temperature_2m", - "filepattern": testdata + "/meps_det_2_5km_20201113T03Z.nc" - } - } - } - } - } - - logging.debug("var=%s fileformat=%s", var, fileformat) - defs = config[fileformat] - converter_conf = config[var][fileformat]["converter"] - - validtime = datetime(year=2020, month=11, day=13, hour=3) - cache = surfex.Cache(7200) - converter = surfex.read.Converter(converter, validtime, defs, converter_conf, fileformat) - surfex.read.ConvertedInput(my_geo, var, converter).read_time_step(validtime, cache) - field = np.reshape(field, [my_geo.nlons, my_geo.nlats]) diff --git a/test/test_firstguess4oi.py b/test/test_firstguess4oi.py deleted file mode 100644 index 9eb872e..0000000 --- a/test/test_firstguess4oi.py +++ /dev/null @@ -1,52 +0,0 @@ -"""Test first guess for OI.""" -import unittest -import logging -import surfex - - -logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - - -class FirstGuess4OiTest(unittest.TestCase): - """Test first guess for OI.""" - - def setUp(self): - """Set up.""" - self.testdata = "testdata/" - - def test_firstguess4oi_grib1(self): - """Test first guess from grib1.""" - argv = [ - "-c", "surfex/cfg/first_guess.yml", - "-i", self.testdata + "/fc2020111303+0003grib1", - "-if", "grib1", - "-dtg", "2020111306", - "-d", "test/settings/conf_proj_test.json", - "--laf_converter", "none", - "--debug", - "-o", "unittest_FirstGuess4gridpp_grib1.nc", - "air_temperature_2m", - "relative_humidity_2m", - "surface_snow_thickness" - ] - kwargs = surfex.parse_args_first_guess_for_oi(argv) - surfex.first_guess_for_oi(**kwargs) - - def test_firstguess4oi_grib2(self): - """Test first guess from grib2.""" - argv = [ - "-c", "surfex/cfg/first_guess.yml", - "-i", self.testdata + "/fc2020111303+0003grib2", - "-if", "grib2", - "-dtg", "2020111306", - "-d", "test/settings/conf_proj_test.json", - "--laf_converter", "none", - "--debug", - "-o", "unittest_FirstGuess4gridpp_grib2.nc", - "air_temperature_2m", - "relative_humidity_2m", - "surface_snow_thickness" - ] - kwargs = surfex.parse_args_first_guess_for_oi(argv) - surfex.first_guess_for_oi(**kwargs) diff --git a/test/test_forcing.py b/test/test_forcing.py deleted file mode 100644 index 5a3f0d5..0000000 --- a/test/test_forcing.py +++ /dev/null @@ -1,79 +0,0 @@ -"""Test forcing.""" -import unittest -import logging -import surfex - - -logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - - -class ForcingTest(unittest.TestCase): - """Test forcing.""" - - def setUp(self): - """Set up.""" - self.testdata = "testdata/" - - def test_forcing_nc(self): - """Test forcing from netcdf files.""" - argv = ["2020111303", "2020111306", - "-d", "test/settings/conf_proj_test.json", - "-p", self.testdata + "/meps_det_2_5km_@YYYY@@MM@@DD@T@HH@Z.nc", - "-i", "netcdf", - "--zref", "ml", - "--uref", "ml", - "--co2", "constant", - "--sca_sw", "constant", - "--zval", "constant", - "--zsoro_converter", "phi2m", - "--zval", "constant", - "--uval", "constant", - "-of", "unittest_FORCING_netcdf.nc", - "--debug" - ] - kwargs = surfex.parse_args_create_forcing(argv) - options, var_objs, att_objs = surfex.forcing.set_forcing_config(**kwargs) - surfex.forcing.run_time_loop(options, var_objs, att_objs) - - def test_forcing_grib1(self): - """Test forcing from netcdf grib1 files.""" - argv = ["2020111303", "2020111306", - "-d", "test/settings/conf_proj_test.json", - "-p", self.testdata + "/fc@YYYY@@MM@@DD@@HH@+@LLLL@grib1", - "-i", "grib1", - "--zref", "ml", - "--uref", "ml", - "--co2", "constant", - "--sca_sw", "constant", - "--zval", "constant", - "--zsoro_converter", "phi2m", - "--zval", "constant", - "--uval", "constant", - "-of", "unittest_FORCING_grib1.nc", - "--debug" - ] - kwargs = surfex.parse_args_create_forcing(argv) - options, var_objs, att_objs = surfex.forcing.set_forcing_config(**kwargs) - surfex.forcing.run_time_loop(options, var_objs, att_objs) - - def test_forcing_grib2(self): - """Test forcing from grib2 files.""" - argv = ["2020111303", "2020111306", - "-d", "test/settings/conf_proj_test.json", - "-p", self.testdata + "/fc@YYYY@@MM@@DD@@HH@+@LLLL@grib2", - "-i", "grib2", - "--zref", "ml", - "--uref", "ml", - "--co2", "constant", - "--sca_sw", "constant", - "--zval", "constant", - "--zsoro_converter", "phi2m", - "--zval", "constant", - "--uval", "constant", - "-of", "unittest_FORCING_grib2.nc", - "--debug" - ] - kwargs = surfex.parse_args_create_forcing(argv) - options, var_objs, att_objs = surfex.forcing.set_forcing_config(**kwargs) - surfex.forcing.run_time_loop(options, var_objs, att_objs) diff --git a/test/test_geo.py b/test/test_geo.py deleted file mode 100644 index de042d3..0000000 --- a/test/test_geo.py +++ /dev/null @@ -1,275 +0,0 @@ -"""Test geometry.""" -import unittest -import json -import logging -import surfex - - -logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - - -class GeoTest(unittest.TestCase): - """Test geometry.""" - - def setUp(self): - """Set up.""" - self.domain_conf_proj = { - "nam_conf_proj_grid": { - "xlatcen": 60, - "ilone": 1, - "xdx": 2500.0, - "njmax": 2, - "xloncen": 10, - "xdy": 2500.0, - "nimax": 3, - "ilate": 2 - }, - "nam_pgd_grid": { - "cgrid": "CONF PROJ" - }, - "nam_conf_proj": { - "xlon0": 0, - "xlat0": 50, - } - } - - def test_geo_not_defined(self): - """Test geometry not defined.""" - domain = {"nam_pgd_grid": {"cgrid": "not_existing"}} - with self.assertRaises(NotImplementedError): - surfex.geo.get_geo_object(domain) - - def test_get_geo_obj(self): - """Test get geometry object.""" - domain = {"not_existing": {"some_key": "some_value"}} - with self.assertRaises(KeyError): - surfex.geo.get_geo_object(domain) - - domain = {"nam_pgd_grid": {"not_existing": "some_value"}} - with self.assertRaises(KeyError): - surfex.geo.get_geo_object(domain) - - def test_geo_conf_proj(self): - """Test conf proj geometry.""" - my_geo = surfex.geo.get_geo_object(self.domain_conf_proj) - - json_settings = {"nam_io_offline": {"csurf_filetype": "NC"}} - my_settings = surfex.BaseNamelist.ascii2nml(json_settings) - my_geo.update_namelist(my_settings) - self.assertEqual(self.domain_conf_proj["nam_pgd_grid"]["cgrid"], my_geo.cgrid) - print(my_geo.identifier()) - - new_domain = {"not_existing": {"not_existing": "some_value"}, - "nam_conf_proj_grid": self.domain_conf_proj["nam_conf_proj_grid"]} - with self.assertRaises(KeyError): - surfex.geo.ConfProj(new_domain) - - new_domain = {"not_existing": {"not_existing": "some_value"}, - "nam_conf_proj": self.domain_conf_proj["nam_conf_proj"]} - with self.assertRaises(KeyError): - surfex.geo.ConfProj(new_domain) - - new_domain = {"nam_conf_proj": {"not_existing": "some_value"}, - "nam_conf_proj_grid": self.domain_conf_proj["nam_conf_proj_grid"]} - with self.assertRaises(KeyError): - surfex.geo.ConfProj(new_domain) - - new_domain = {"nam_conf_proj_grid": {"not_existing": "some_value"}, - "nam_conf_proj": self.domain_conf_proj["nam_conf_proj"]} - with self.assertRaises(KeyError): - surfex.geo.ConfProj(new_domain) - - self.assertAlmostEqual(my_geo.lons[0][0], 9.95323219) - self.assertAlmostEqual(my_geo.lats[0][0], 59.99198266) - self.assertAlmostEqual(my_geo.xxx[0], 561109.9103510105) - self.assertAlmostEqual(my_geo.yyy[0], 1154504.0851275164) - - def test_geo_lonlat_reg(self): - """Test lonlat geometry.""" - domain = { - "nam_pgd_grid": { - "cgrid": "LONLAT REG" - }, - "nam_lonlat_reg": { - "xlonmin": 10, - "xlonmax": 11, - "xlatmin": 60, - "xlatmax": 61, - "nlon": 11, - "nlat": 11 - } - } - my_geo = surfex.get_geo_object(domain) - json_settings = {"nam_io_offline": {"csurf_filetype": "NC"}} - my_settings = surfex.BaseNamelist.ascii2nml(json_settings) - my_settings = my_geo.update_namelist(my_settings) - self.assertEqual(domain["nam_pgd_grid"]["cgrid"], my_geo.cgrid) - self.assertEqual(my_settings["nam_pgd_grid"]["cgrid"], my_geo.cgrid) - - domain = { - "nam_pgd_grid": { - "cgrid": "LONLAT REG" - }, - "nam_lonlat_reg": { - "xlonmin": 10, - "xlonmax": 11, - "xlatmin": 60, - "xlatmax": 61, - "nlon": 0, - "nlat": 11 - } - } - with self.assertRaises(ZeroDivisionError): - surfex.geo.LonLatReg(domain) - - domain = {"not_existing": {"existing": "some_value"}} - with self.assertRaises(KeyError): - surfex.geo.LonLatReg(domain) - - domain = {"nam_lonlat_reg": {"not_existing": "some_value"}} - with self.assertRaises(KeyError): - surfex.geo.LonLatReg(domain) - - def test_geo_lonlatval(self): - """Test lonlatval geometry.""" - domain = { - "nam_pgd_grid": { - "cgrid": "LONLATVAL" - }, - "nam_lonlatval": { - "xx": [10.0, 11.0], - "xy": [60.0, 61.0], - "xdx": [0.1, 0.1], - "xdy": [0.1, 0.1] - } - } - my_geo = surfex.get_geo_object(domain) - json_settings = {"nam_io_offline": {"csurf_filetype": "NC"}} - my_settings = surfex.BaseNamelist.ascii2nml(json_settings) - my_settings = my_geo.update_namelist(my_settings) - self.assertEqual(domain["nam_pgd_grid"]["cgrid"], my_geo.cgrid) - self.assertEqual(my_settings["nam_pgd_grid"]["cgrid"], my_geo.cgrid) - - domain = {"not_existing": {"existing": "some_value"}} - with self.assertRaises(KeyError): - surfex.geo.LonLatVal(domain) - - domain = {"nam_lonlatval": {"not_existing": "some_value"}} - with self.assertRaises(KeyError): - surfex.geo.LonLatVal(domain) - - def test_geo_cartesian(self): - """Test cartesian geometry.""" - domain = { - "nam_pgd_grid": { - "cgrid": "CARTESIAN" - }, - "nam_cartesian": { - "xlat0": 0, - "xlon0": 0, - "nimax": 11, - "njmax": 21, - "xdx": 0.1, - "xdy": 0.05 - } - } - my_geo = surfex.get_geo_object(domain) - json_settings = {"nam_io_offline": {"csurf_filetype": "NC"}} - my_settings = surfex.BaseNamelist.ascii2nml(json_settings) - my_settings = my_geo.update_namelist(my_settings) - self.assertEqual(domain["nam_pgd_grid"]["cgrid"], my_geo.cgrid) - self.assertEqual(my_settings["nam_pgd_grid"]["cgrid"], my_geo.cgrid) - - domain = {"not_existing": {"existing": "some_value"}} - with self.assertRaises(KeyError): - surfex.geo.Cartesian(domain) - - domain = {"nam_cartesian": {"not_existing": "some_value"}} - with self.assertRaises(KeyError): - surfex.geo.Cartesian(domain) - - def test_geo_ign(self): - """Test ign geometry.""" - domain = { - "nam_pgd_grid": { - "cgrid": "IGN" - }, - "nam_ign": { - "clambert": 7, - "npoints": 3, - "xx": [11000, 13000, 11000], - "xy": [21000, 21000, 23000], - "xdx": [1000, 1000, 1000], - "xdy": [1000, 1000, 1000], - "xx_llcorner": 0, - "xy_llcorner": 0, - "xcellsize": 1000, - "ncols": 1, - "nrows": 1 - } - } - my_geo = surfex.geo.IGN(domain, recreate=True) - json_settings = {"nam_io_offline": {"csurf_filetype": "NC"}} - my_settings = surfex.BaseNamelist.ascii2nml(json_settings) - my_settings = my_geo.update_namelist(my_settings) - self.assertEqual(domain["nam_pgd_grid"]["cgrid"], my_geo.cgrid) - self.assertEqual(my_settings["nam_pgd_grid"]["cgrid"], my_geo.cgrid) - - my_geo1 = surfex.geo.IGN(domain, recreate=False) - my_geo2 = surfex.geo.IGN(domain, recreate=True) - self.assertTrue(my_geo1.is_identical(my_geo2)) - - domain = { - "nam_pgd_grid": { - "cgrid": "IGN" - }, - "nam_ign": { - "clambert": -99, - "npoints": 0, - "xx": 11, - "xy": 21, - "xdx": 1000, - "xdy": 1000, - "xx_llcorner": 0, - "xy_llcorner": 0, - "xcellsize": 1000, - "ncols": 1, - "nrows": 1 - } - } - with self.assertRaises(NotImplementedError): - surfex.get_geo_object(domain) - - domain = {"not_existing": {"existing": "some_value"}} - with self.assertRaises(KeyError): - surfex.geo.IGN(domain) - - domain = {"nam_ign": {"not_existing": "some_value"}} - with self.assertRaises(KeyError): - surfex.geo.IGN(domain) - - def test_set_domain(self): - """Test set domain.""" - domains = {"NAME": {"nam_pgd_grid": {"cgrid": "some_projection"}}} - domain = surfex.geo.set_domain(domains, "NAME") - self.assertEqual(domains["NAME"]["nam_pgd_grid"]["cgrid"], domain["nam_pgd_grid"]["cgrid"]) - - with self.assertRaises(Exception): - surfex.geo.set_domain(domains, "not_existing") - - domains = ["NAME"] - with self.assertRaises(Exception): - surfex.geo.set_domain(domains, "NAME") - - with open("test/settings/domains.json", mode="r", encoding="utf-8") as file_handler: - domains = json.load(file_handler) - domain_name = "CONF_PROJ_TEST" - domain_json = surfex.geo.set_domain(domains, domain_name) - with open("test/settings/conf_proj_test.json", mode="r", encoding="utf-8") as file_handler: - saved_domain = json.load(file_handler) - self.assertDictEqual(domain_json, saved_domain) - - -if __name__ == '__main__': - unittest.main() diff --git a/test/test_grib.py b/test/test_grib.py deleted file mode 100644 index a468c6c..0000000 --- a/test/test_grib.py +++ /dev/null @@ -1,96 +0,0 @@ -"""Test grib.""" -import unittest -import logging -from datetime import datetime -import json -import os -import surfex - - -logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - - -class GribTest(unittest.TestCase): - """Test grib.""" - - def setUp(self): - """Set up.""" - self.testdata = "testdata/" - self.rootdir = os.path.abspath(os.curdir) - with open("test/settings/domains.json", mode="r", encoding="utf-8") as file_handler: - domains = json.load(file_handler) - domain = surfex.geo.set_domain(domains, "CONF_PROJ_TEST") - self.geo = surfex.geo.get_geo_object(domain) - self.converter = "none" - self.config = { - "grib1": { - "fcint": 10800, - "file_inc": 3600, - "offset": 0 - }, - "grib2": { - "fcint": 10800, - "file_inc": 3600, - "offset": 0 - }, - "t2m": { - "grib1": { - "converter": { - "none": { - "parameter": 11, - "type": 105, - "level": 2, - "tri": 0, - "filepattern": self.testdata + "/fc2020111303+0003grib1" - } - } - } - }, - "t1": { - "grib2": { - "converter": { - "none": { - "discipline": 0, - "parameterCategory": 0, - "parameterNumber": 0, - "levelType": 103, - "typeOfStatisticalProcessing": -1, - "level": 2, - "filepattern": self.testdata + "/fc2020111303+0003grib2" - } - } - } - }, - } - - def test_grib1_from_converter(self): - """Test grib1 from converter.""" - # Grib 1 - fileformat = "grib1" - var = "t2m" - print(var, fileformat) - defs = self.config[fileformat] - converter_conf = self.config[var][fileformat]["converter"] - - validtime = datetime(year=2020, month=3, day=30, hour=6) - cache = surfex.Cache(7200) - initial_basetime = validtime - converter = surfex.read.Converter(self.converter, initial_basetime, defs, converter_conf, - fileformat) - surfex.read.ConvertedInput(self.geo, var, converter).read_time_step(validtime, cache) - - def test_grib2_from_converter(self): - """Test grib2 from converter.""" - fileformat = "grib2" - var = "t1" - print(var, fileformat) - defs = self.config[fileformat] - converter_conf = self.config[var][fileformat]["converter"] - - validtime = datetime(year=2020, month=3, day=30, hour=6) - cache = surfex.Cache(7200) - initial_basetime = validtime - converter = surfex.read.Converter(self.converter, initial_basetime, defs, converter_conf, - fileformat) - surfex.read.ConvertedInput(self.geo, var, converter).read_time_step(validtime, cache) diff --git a/test/test_gridpp.py b/test/test_gridpp.py deleted file mode 100644 index 7ed039e..0000000 --- a/test/test_gridpp.py +++ /dev/null @@ -1,58 +0,0 @@ -"""Test gridpp.""" -import unittest -import logging -import surfex - - -logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - - -class GridppTest(unittest.TestCase): - """Test gridpp.""" - - def setUp(self): - """Set up.""" - self.testdata = "testdata/" - - def test_gridpp_t2m(self): - """Test gridpp for t2m.""" - argv = ["-i", self.testdata + "/unittest_FirstGuess4gridpp_grib2.nc", - "-o", "unittest_an_t2m.nc", - "-obs", self.testdata + "/unittest_qc_t2m.json", - "-hor", "30000", - "-vert", "300", - "-v", "air_temperature_2m", - "--elevGradient", "-0.0065" - ] - kwargs = surfex.parse_args_gridpp(argv) - print(kwargs) - surfex.run_gridpp(**kwargs) - - def test_gridpp_rh2m(self): - """Test gridpp for rh2m.""" - argv = ["-i", self.testdata + "/unittest_FirstGuess4gridpp_grib1.nc", - "-o", "unittest_an_rh2m.nc", - "-obs", self.testdata + "/unittest_qc_rh2m.json", - "-hor", "30000", - "-vert", "600", - "-v", "relative_humidity_2m", - "--elevGradient", "0" - ] - kwargs = surfex.parse_args_gridpp(argv) - print(kwargs) - surfex.run_gridpp(**kwargs) - - def test_gridpp_sd(self): - """Test gridpp for sd.""" - argv = ["-i", self.testdata + "/unittest_FirstGuess4gridpp_grib2.nc", - "-o", "unittest_an_sd.nc", - "-obs", self.testdata + "/unittest_qc_sd.json", - "-hor", "60000", - "-vert", "500", - "-v", "surface_snow_thickness", - "--elevGradient", "0" - ] - kwargs = surfex.parse_args_gridpp(argv) - print(kwargs) - surfex.run_gridpp(**kwargs) diff --git a/test/test_hm2pysurfex.py b/test/test_hm2pysurfex.py deleted file mode 100644 index bb264e4..0000000 --- a/test/test_hm2pysurfex.py +++ /dev/null @@ -1,23 +0,0 @@ -"""Harmonie environment to pysurfex.""" -import unittest -import logging -import surfex - - -logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - - -class Hm2PysurfexTest(unittest.TestCase): - """Harmonie environment to pysurfex.""" - - @staticmethod - def test_hm2pysurfex_client(): - """Test harmonie to pysurfex client.""" - argv = [ - "-c", "surfex/cfg/config_exp_surfex.toml", - "-o", "unittest_config_from_hm.toml", - "-e", "test/settings/hm_env.json" - ] - kwargs = surfex.parse_args_hm2pysurfex(argv) - surfex.hm2pysurfex(**kwargs) diff --git a/test/test_obs.py b/test/test_obs.py deleted file mode 100644 index e402439..0000000 --- a/test/test_obs.py +++ /dev/null @@ -1,140 +0,0 @@ -"""Observation tests.""" -import unittest -import logging -from datetime import datetime -import surfex -from unittest.mock import patch - -logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - - -class ObsTest(unittest.TestCase): - """Observation tests.""" - - def setUp(self): - """Set up.""" - self.testdata = "testdata/" - self.settings = { - "t2m": { - "netatmo_label": { - "filepattern": self.testdata + "/20201113T05@mm@01Z_all.json", - "varname": "Temperature", - "filetype": "netatmo", - "lonrange": [9.5, 10.5], - "latrange": [59.5, 60.5] - }, - "bufr_label": { - "filepattern": self.testdata + "/ob2020111306", - "filetype": "bufr", - "varname": "airTemperatureAt2M", - "unit": "K", - "range": 1800 - }#, - #"frost_label": { - # "varname": "air_temperature", - # "unit": "K", - # "filetype": "frost", - # "debug": True, - # "lonrange": [10, 11], - # "latrange": [59, 60], - # "level": { - # "levelType": "height_above_ground", - # "unit": "m", - # "value": 2 - # }, - #} - }, - "rh2m": { - "netatmo_label": { - "filepattern": self.testdata + "/20201113T05@mm@01Z_all.json", - "varname": "Humidity", - "filetype": "netatmo", - "lonrange": [9.5, 10.5], - "latrange": [59.5, 60.5] - }, - "bufr_label": { - "filepattern": self.testdata + "/ob2020111306", - "filetype": "bufr", - "varname": "relativeHumidityAt2M", - "unit": "1", - "range": 1800 - }#, - #"frost_label": { - # "varname": "relative_humidity", - # "unit": "1", - # "filetype": "frost", - # "lonrange": [10, 11], - # "latrange": [59, 60], - # "level": { - # "levelType": "height_above_ground", - # "unit": "m", - # "value": 2 - # }, - #} - }, - "sd": { - "bufr_label": { - "filepattern": self.testdata + "/ob2020111306", - "filetype": "bufr", - "varname": "totalSnowDepth", - "unit": "m", - "range": 1800 - }#, - #"frost_label": { - # "varname": "surface_snow_thickness", - # "unit": "m", - # "filetype": "frost", - # "lonrange": [10, 11], - # "latrange": [59, 60] - #} - } - } - - @patch('surfex.obs.requests.get') - def test_obs_t2m(self, mock_request): - """Test t2m observations.""" - an_time = datetime(2020, 11, 13, 6) - settings = self.settings["t2m"] - logging.debug(settings) - datasources = surfex.get_datasources(an_time, settings) - for das in datasources: - fit = settings[das.label]["filetype"] - das.write_json_file("unittest_" + fit + "_t2m.json", indent=2) - logging.debug(das.size) - - @patch('surfex.obs.requests.get') - def test_obs_rh2m(self, mock_request): - """Test rh2m observations.""" - an_time = datetime(2020, 11, 13, 6) - settings = self.settings["rh2m"] - logging.debug("RH2M settings %s", settings) - datasources = surfex.get_datasources(an_time, settings) - for das in datasources: - fit = settings[das.label]["filetype"] - das.write_json_file("unittest_" + fit + "_rh2m.json", indent=2) - logging.debug(das.size) - - @patch('surfex.obs.requests.get') - def test_obs_sd(self, mock_request): - """Test sd observations.""" - an_time = datetime(2020, 11, 13, 6) - settings = self.settings["sd"] - logging.debug(settings) - datasources = surfex.get_datasources(an_time, settings) - for das in datasources: - fit = settings[das.label]["filetype"] - das.write_json_file("unittest_" + fit + "_sd.json", indent=2) - logging.debug(das.size) - - def test_bufr2json(self): - """Test bufr to json conversion.""" - argv = [ - "-v", "airTemperatureAt2M", - "-b", self.testdata + "/ob2020111306", - "-o", "unittest_bufr2json_t2m.json", - "-dtg", "2020111306", - "-range", "1800" - ] - kwargs = surfex.parse_args_bufr2json(argv) - surfex.run_bufr2json(**kwargs) diff --git a/test/test_obsmon.py b/test/test_obsmon.py deleted file mode 100644 index a3517b4..0000000 --- a/test/test_obsmon.py +++ /dev/null @@ -1,35 +0,0 @@ -"""Test obsmon.""" -import unittest -import logging -import sqlite3 -import surfex - - -logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - - -class ObsmonTest(unittest.TestCase): - """Test obsmon.""" - - def setUp(self): - """Set up.""" - self.testdata = "testdata/" - - def test_obsmon(self): - """Test obsmon.""" - dbn = "unittest_ecma.db" - - argv = ["2020111306", - "t2m", - self.testdata + "/unittest_qc_t2m.json", - "--fg_file", self.testdata + "/unittest_FirstGuess4gridpp_grib2.nc", - "--an_file", self.testdata + "/unittest_an_t2m.nc", - "--file_var", "air_temperature_2m", - "-o", dbn - ] - kwargs = surfex.parse_args_qc2obsmon(argv) - print(kwargs) - surfex.write_obsmon_sqlite_file(**kwargs) - - sqlite3.connect(dbn) diff --git a/test/test_oi2soda.py b/test/test_oi2soda.py deleted file mode 100644 index c39e24d..0000000 --- a/test/test_oi2soda.py +++ /dev/null @@ -1,44 +0,0 @@ -"""Test oi2soda.""" -import unittest -import logging -import surfex - - -logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - - -class Oi2SodaTest(unittest.TestCase): - """Test oi2soda.""" - - def setUp(self): - """Set up.""" - self.testdata = "testdata/" - - def test_oi2soda(self): - """Test oi2soda.""" - argv = [ - "--t2m_file", self.testdata + "/unittest_an_t2m.nc", - "--t2m_var", "air_temperature_2m", - "--rh2m_file", self.testdata + "/unittest_an_rh2m.nc", - "--rh2m_var", "relative_humidity_2m", - "--sd_file", self.testdata + "/unittest_an_sd.nc", - "--sd_var", "surface_snow_thickness", - "--debug", - "-o", "unittest_OBSERVATIONS_200330H06.DAT", - "2020033006" - ] - kwargs = surfex.parse_args_oi2soda(argv) - surfex.run_oi2soda(**kwargs) - - def test_oi2soda_only_rh(self): - """Test oi2soda only for rh2m.""" - argv = [ - "--rh2m_file", self.testdata + "/unittest_an_rh2m.nc", - "--rh2m_var", "relative_humidity_2m", - "--debug", - "-o", "unittest_OBSERVATIONS_200330H06.DAT", - "2020033006" - ] - kwargs = surfex.parse_args_oi2soda(argv) - surfex.run_oi2soda(**kwargs) diff --git a/test/test_plot.py b/test/test_plot.py deleted file mode 100644 index ed956fa..0000000 --- a/test/test_plot.py +++ /dev/null @@ -1,78 +0,0 @@ -"""Test plotting.""" -import unittest -import logging -import surfex - - -logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - - -class PlotTest(unittest.TestCase): - """Test plotting.""" - - def setUp(self): - """Set up.""" - self.testdata = "testdata/" - - def test_plot_grib1(self): - """Test plotting from grib1.""" - argv = [ - "-it", "grib1", - "-t", "2020111306", - "-g", "test/settings/conf_proj_test.json", - "--indicatorOfParameter", "11", - "--level", "2", - "--levelType", "105", - "-i", self.testdata + "/fc2020111303+0003grib1", - "-o", "unittest_output_plot_grib1.png", - "--debug" - ] - kwargs = surfex.parse_args_plot_points(argv) - surfex.run_plot_points(**kwargs) - - def test_plot_grib2(self): - """Test plotting from grib2.""" - argv = [ - "-it", "grib2", - "-t", "2020111306", - "-g", "test/settings/conf_proj_test.json", - "--levelType", "103", - "--discipline", "0", - "--parameterCategory", "0", - "--parameterNumber", "0", - "--level", "2", - "-i", self.testdata + "/fc2020111303+0003grib2", - "-o", "unittest_output_plot_grib2.png", - "--debug" - ] - kwargs = surfex.parse_args_plot_points(argv) - surfex.run_plot_points(**kwargs) - - def test_plot_netcdf(self): - """Test plotting from netcdf.""" - argv = [ - "-it", "netcdf", - "-t", "2020111306", - "-g", "test/settings/conf_proj_test.json", - "-v", "air_temperature_2m", - "-i", self.testdata + "/meps_det_2_5km_20201113T03Z.nc", - "-o", "unittest_output_plot_nc.png", - "--debug" - ] - kwargs = surfex.parse_args_plot_points(argv) - surfex.run_plot_points(**kwargs) - - def test_plot_obs_frost_json(self): - """Test plotting from frost json data.""" - argv = [ - "-it", "obs", - "--obs_type", "json", - "-t", "2020111306", - "-v", "air_temperature", - "-i", self.testdata + "/unittest_frost_t2m.json", - "-o", "unittest_output_plot_obs_frost_json.png", - "--debug" - ] - kwargs = surfex.parse_args_plot_points(argv) - surfex.run_plot_points(**kwargs) diff --git a/test/test_run_binary.py b/test/test_run_binary.py deleted file mode 100644 index 507a974..0000000 --- a/test/test_run_binary.py +++ /dev/null @@ -1,381 +0,0 @@ -"""Test ruinning a binary emulator.""" -import unittest -import logging -import os -import json -import tomlkit -import surfex - - -logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - - -class RunTestNC(unittest.TestCase): - """Test running NC filetype.""" - - def setUp(self): - """Set up.""" - self.testdata = "testdata/" - self.rootdir = os.path.abspath(os.curdir) - self.config_exp_surfex = self.rootdir + "/surfex/cfg/config_exp_surfex.toml" - self.grid = "conf_proj" - self.domain = self.rootdir + "/test/settings/" + self.grid + "_test.json" - self.system = self.rootdir + "/test/settings/test_system.json" - - def tearDown(self): - """Tear down.""" - - @staticmethod - def _clean_offline_test_nc(prepare=False, extra_files=None): - """Clean before test. - - Args: - prepare (bool, optional): To prepare or not. Defaults to False. - extra_files (list, optional): List of extra files. Defaults to None. - """ - files = ["ecoclimapI_covers_param.bin", - "ecoclimapII_af_covers_param.bin", - "ecoclimapII_eu_covers_param.bin", - "PREP_INIT.nc", - "PREP_200220H03.nc", - "PREP_200220H03_EKF_PERT0.nc", - "PREP_200220H03_EKF_PERT1.nc", - "PREP_200220H03_EKF_PERT2.nc", - "GlobalLakeDepth_V3.0.hdr", - "GlobalLakeDepth_V3.0.dir", - "GlobalLakeStatus_V3.0.hdr", - "GlobalLakeStatus_V3.0.dir", - "sand_fao.hdr", - "sand_fao.dir", - "clay_fao.hdr", - "clay_fao.dir", - "soc_top.hdr", - "soc_top.dir", - "soc_sub.hdr", - "soc_sub.dir", - "ECOCLIMAP_2_5_p.hdr", - "ECOCLIMAP_2_5_p.dir", - "gmted2010.hdr", - "gmted2010.dir", - "LAKE_LTA_NEW.nc", - "FORCING.nc", - "SODA_TEST_conf_proj.nc", - "PGD.nc", - "PREP.nc", - "SURFOUT.nc", - "OBSERVATIONS_200220H03.DAT", - "OPTIONS.nam", - "log0", - "LISTING_PGD0.txt", - "LISTING_PREP0.txt", - "LISTING_OFFLINE0.txt", - "LISTING_SODA0.txt", - ] - if extra_files is not None: - files = files + extra_files - for dfn in files: - if os.path.islink(dfn): - logging.debug("Removing symlink %s", dfn) - os.unlink(dfn) - if os.path.exists(dfn): - logging.debug("Removing %s", dfn) - os.remove(dfn) - else: - if not prepare: - logging.debug("Not found %s", dfn) - - @staticmethod - def _clean_masterodb_test(prepare=False, extra_files=None): - - files = ["ecoclimapI_covers_param.bin", - "ecoclimapII_af_covers_param.bin", - "ecoclimapII_eu_covers_param.bin", - "PREP_INIT.sfx", - "PREP_200220H03.sfx", - "PREP_200220H03_EKF_PERT0.sfx", - "PREP_200220H03_EKF_PERT1.sfx", - "PREP_200220H03_EKF_PERT2.sfx", - "Const.Clim.sfx", - "ICMSHHARMINIT.sfx", - "ICMSHHARMANAL+0000.sfx", - "ICMSHHARM+0003.sfx", - "EXSEG1.nam", - "log0", - "LISTING_PGD0.txt", - "LISTING_PREP0.txt", - "LISTING_OFFLINE0.txt", - "LISTING_SODA0.txt", - "NODE_01" - ] - - if extra_files is not None: - files = files + extra_files - for dfn in files: - if os.path.islink(dfn): - logging.debug("Removing symlink %s", dfn) - os.unlink(dfn) - if os.path.exists(dfn): - logging.debug("Removing %s", dfn) - os.remove(dfn) - else: - if not prepare: - logging.debug("Not found %s", dfn) - - def test_run_test_nc(self): - """Test run NC.""" - my_format = "NC" - this_config = self.rootdir + "/test/settings/" + my_format.lower() + ".toml" - extension = ".nc" - - config_file = "config_run_test_nc.toml" - rte = "rte_run_test_nc.json" - config = surfex.merge_toml_env_from_files([self.config_exp_surfex, this_config]) - - # Prepare - extra_files = [config_file, rte] - self._clean_offline_test_nc(prepare=True, extra_files=extra_files) - - # PGD - task = "pgd" - with open(rte, mode="w", encoding="utf-8") as file_handler: - json.dump(dict(os.environ), file_handler) - with open(config_file, mode="w", encoding="utf-8") as file_handler: - tomlkit.dump(config, file_handler) - - output = os.getcwd() + "/unittest_PGD_TEST_" + self.grid + extension - if "PGD_BINARY" in os.environ and os.environ["PGD_BINARY"] != "": - binary = os.environ["PGD_BINARY"] - else: - # binary = "touch PGD" + extension - binary = self.rootdir + "/test/bin/PGD_" + my_format - - argv = [ - "-w", "", - "-c", config_file, - "--domain", self.domain, - "-s", self.system, - "-n", self.rootdir + "/test/nam/", - "-r", rte, - "-f", - "--tolerate_missing", - "-o", output, - binary - ] - kwargs = surfex.parse_args_surfex_binary(argv, task) - surfex.run_surfex_binary(task, **kwargs) - - # PREP - task = "prep" - - with open(rte, mode="w", encoding="utf-8") as file_handler: - json.dump(dict(os.environ), file_handler) - with open(config_file, mode="w", encoding="utf-8") as file_handler: - tomlkit.dump(config, file_handler) - - # pgd = output - pgd = f"{self.rootdir}/testdata/PGD_CONF_PROJ.txt" - output = os.getcwd() + "/unittest_PREP_TEST_" + self.grid + extension - if "PREP_BINARY" in os.environ and os.environ["PREP_BINARY"] != "": - binary = os.environ["PREP_BINARY"] - else: - binary = self.rootdir + "/test/bin/PREP_" + my_format - - argv = [ - "-w", "", - "--domain", self.domain, - "--pgd", pgd, - "--prep_file", self.rootdir + "/test/nam/prep_from_namelist_values.json", - "--prep_filetype", "json", - "--dtg", "2020022000", - "-c", config_file, - "-s", self.system, - "-n", self.rootdir + "/test/nam/", - "-r", rte, - "-f", - "--tolerate_missing", - "-o", output, - binary - ] - kwargs = surfex.parse_args_surfex_binary(argv, task) - surfex.run_surfex_binary(task, **kwargs) - - # OFFLINE - task = "offline" - - with open(rte, mode="w", encoding="utf-8") as file_handler: - json.dump(dict(os.environ), file_handler) - with open(config_file, mode="w", encoding="utf-8") as file_handler: - tomlkit.dump(config, file_handler) - - if "OFFLINE_BINARY" in os.environ and os.environ["OFFLINE_BINARY"] != "": - binary = os.environ["OFFLINE_BINARY"] - else: - binary = self.rootdir + "/test/bin/OFFLINE_" + my_format - - prep = output - output = os.getcwd() + "/unittest_OFFLINE_TEST_" + self.grid + extension - argv = [ - "-w", "", - "--domain", self.domain, - "--pgd", pgd, - "--prep", prep, - "-c", config_file, - "-s", self.system, - "-n", self.rootdir + "/test/nam/", - "-r", rte, - "-f", - "--tolerate_missing", - "-o", output, - "--forc_zs", - "--forcing_dir", "testdata", - binary - ] - kwargs = surfex.parse_args_surfex_binary(argv, task) - # kwargs.update({"check_existence": False}) - surfex.run_surfex_binary(task, **kwargs) - - # SODA - task = "soda" - - with open(rte, mode="w", encoding="utf-8") as file_handler: - json.dump(dict(os.environ), file_handler) - with open(config_file, mode="w", encoding="utf-8") as file_handler: - tomlkit.dump(config, file_handler) - - if "SODA_BINARY" in os.environ and os.environ["SODA_BINARY"] != "": - binary = os.environ["SODA_BINARY"] - else: - binary = self.rootdir + "/test/bin/SODA_" + my_format - - prep = output - output = "unittest_SODA_TEST_" + self.grid + extension - argv = [ - "-w", "", - "--domain", self.domain, - "--pgd", pgd, - "--prep", prep, - "--dtg", "2020022003", - "-c", config_file, - "-s", self.system, - "-n", self.rootdir + "/test/nam/", - "-r", rte, - "-f", - "--tolerate_missing", - "-o", output, - binary - ] - kwargs = surfex.parse_args_surfex_binary(argv, task) - surfex.run_surfex_binary(task, **kwargs) - - # Clean up - self._clean_offline_test_nc(extra_files=extra_files) - - def test_masterodb(self): - """Test masterodb.""" - my_format = "FA" - this_config = self.rootdir + "/test/settings/" + my_format.lower() + ".toml" - - config_file = "config_run_test_masterodb.toml" - rte = "rte_run_test_masterodb.json" - config = surfex.merge_toml_env_from_files([self.config_exp_surfex, this_config]) - - # Prepare - extra_files = [config_file, rte] - self._clean_masterodb_test(prepare=True, extra_files=extra_files) - - # Forecast - with open(rte, mode="w", encoding="utf-8") as file_handler: - json.dump(dict(os.environ), file_handler) - with open(config_file, mode="w", encoding="utf-8") as file_handler: - tomlkit.dump(config, file_handler) - - binary = self.rootdir + "/test/bin/MASTERODB" - pgd = "Const.Clim.sfx" - prep = "ICMSHHARMINIT.sfx" - os.system("touch " + pgd) - os.system("touch " + prep) - output = os.getcwd() + "/unittest_ICMSHHARM+0003.sfx" - argv = [ - "-w", "", - "-m", "forecast", - "--domain", self.domain, - "--pgd", pgd, - "--prep", prep, - "-c", config_file, - "-s", self.system, - "-n", self.rootdir + "/test/nam/", - "-r", rte, - "-f", - "--tolerate_missing", - "-o", output, - "-b", binary - ] - kwargs = surfex.parse_args_masterodb(argv) - # kwargs.update({"check_existence": False}) - surfex.run_masterodb(**kwargs) - - # CANARI - with open(rte, mode="w", encoding="utf-8") as file_handler: - json.dump(dict(os.environ), file_handler) - # with open(config_file, mode="w", encoding="utf-8") as file_handler: - # toml.dump(config, file_handler) - - binary = self.rootdir + "/test/bin/MASTERODB_CANARI" - prep = output - output = os.getcwd() + "/unittest_ICMSHANAL+0000.sfx" - argv = [ - "-w", "", - "-m", "canari", - "--domain", self.domain, - "--pgd", pgd, - "--prep", prep, - "--dtg", "2020022003", - "-c", config_file, - "-s", self.system, - "-n", self.rootdir + "/test/nam/", - "-r", rte, - "-f", - "--tolerate_missing", - "-o", output, - "-b", binary - ] - kwargs = surfex.parse_args_masterodb(argv) - surfex.run_masterodb(**kwargs) - - # Clean up - self._clean_masterodb_test(extra_files=extra_files) - - @staticmethod - def test_input_json_from_file(): - """Test input from a json file.""" - fname = "test_in" + str(os.getpid()) - with open(fname, mode="w", encoding="utf-8") as file_handler: - # {"testfile":{ "fname": "ln -sf"}}} - file_handler.write("{\"testfile_in" + str(os.getpid()) + "\": {\"" + fname - + "\": \"ln -sf @INFILE@ @TARGET@\"}}") - - my_input = surfex.JsonInputDataFromFile(fname) - my_input.prepare_input() - os.remove(fname) - os.remove("testfile_in" + str(os.getpid())) - - @staticmethod - def test_output_json_from_file(): - """Test output from a json file.""" - fname = "test_out_" + str(os.getpid()) - file_to_archive = "test_to_archive_" + str(os.getpid()) - destination = "test_archive_destination_" + str(os.getpid()) - os.system("touch " + file_to_archive) - with open(fname, mode="w", encoding="utf-8") as file_handler: - file_handler.write("{\"" + file_to_archive + "\": {\"" + destination + "\": \"cp\"}}") - - my_output = surfex.JsonOutputDataFromFile(fname) - my_output.archive_files() - os.remove(fname) - os.remove(file_to_archive) - os.remove(destination) - - -if __name__ == '__main__': - unittest.main() diff --git a/test/test_titan.py b/test/test_titan.py deleted file mode 100644 index ff12693..0000000 --- a/test/test_titan.py +++ /dev/null @@ -1,338 +0,0 @@ -"""Test titan.""" -import unittest -import logging -import json -import os -import surfex - - -logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - - -class TitanTest(unittest.TestCase): - """Test titan.""" - - def setUp(self): - """Set up.""" - self.testdata = "testdata/" - self.qc_settings = { - "t2m": { - "do_test": True, - "plausibility": { - "minval": 200, - "maxval": 350 - }, - "firstguess": { - "fg_file": self.testdata + "/unittest_FirstGuess4gridpp_grib2.nc", - "fg_var": "air_temperature_2m", - "negdiff": 2, - "posdiff": 3, - "do_test": False - }, - # Not working yet - "buddy": { - "do_test": False - }, - "climatology": { - "do_test": False, - "minval": 270, - "maxval": 275 - }, - "sct": { - }, - "redundancy": { - }, - "blacklist": { - }, - "domain": { - }, - "nometa": { - }, - "fraction": { - "fraction_file": self.testdata + "/unittest_FirstGuess4gridpp_grib2.nc", - "fraction_var": "land_area_fraction", - }, - "sets": { - "bufr": { - "filepattern": "testdata_obs/bufr_t2m.json", - "filetype": "json", - "varname": "airTemperatureAt2M", - "tests": { - "firstguess": { - "do_test": True - } - } - }, - "netatmo": { - "filepattern": self.testdata + "/unittest_netatmo_t2m.json", - "varname": "Temperature", - "filetype": "json", - "tests": { - "firstguess": { - } - } - }, - "frost": { - "filepattern": self.testdata + "/unittest_frost_t2m.json", - "varname": "air_temperature", - "filetype": "json", - "tests": { - "firstguess": { - "do_test": True - } - } - } - } - }, - "rh2m": { - "do_test": True, - "plausibility": { - "minval": 0, - "maxval": 1 - }, - "firstguess": { - "fg_file": self.testdata + "/unittest_FirstGuess4gridpp_grib1.nc", - "fg_var": "relative_humidity_2m", - "negdiff": 0.2, - "posdiff": 0.2, - "do_test": False - }, - # Not working yet - "buddy": { - "do_test": False - }, - "climatology": { - "do_test": False, - "minval": 0, - "maxval": 1 - }, - "sct": { - }, - "redundancy": { - }, - "blacklist": { - }, - "domain": { - }, - "nometa": { - }, - "fraction": { - "fraction_file": self.testdata + "/unittest_FirstGuess4gridpp_grib1.nc", - "fraction_var": "land_area_fraction", - }, - "sets": { - "bufr": { - "filepattern": self.testdata + "/unittest_bufr_rh2m.json", - "filetype": "json", - "varname": "relativeHumidityAt2M", - "tests": { - "firstguess": { - "do_test": True - } - } - }, - "netatmo": { - "filepattern": self.testdata + "/unittest_netatmo_rh2m.json", - "varname": "Humidity", - "filetype": "json", - "tests": { - "firstguess": { - } - } - }, - "frost": { - "filepattern": self.testdata + "/unittest_frost_rh2m.json", - "varname": "relative_humidity", - "filetype": "json", - "tests": { - "firstguess": { - "do_test": True - } - } - } - } - }, - "sd": { - "do_test": True, - "plausibility": { - "minval": 0, - "maxval": 50 - }, - "firstguess": { - "fg_file": self.testdata + "/unittest_FirstGuess4gridpp_grib2.nc", - "fg_var": "surface_snow_thickness", - "negdiff": 0.4, - "posdiff": 0.4, - "do_test": True - }, - # Not working yet - "buddy": { - "do_test": False - }, - "climatology": { - "do_test": False, - "minval": 0, - "maxval": 1 - }, - "sct": { - }, - "redundancy": { - }, - "blacklist": { - }, - "domain": { - }, - "nometa": { - }, - "fraction": { - "fraction_file": self.testdata + "/unittest_FirstGuess4gridpp_grib2.nc", - "fraction_var": "land_area_fraction", - }, - "sets": { - "bufr": { - "filepattern": self.testdata + "/unittest_bufr_sd.json", - "filetype": "json", - "varname": "totalSnowDepth", - "tests": { - "firstguess": { - "do_test": True - } - } - }, - "frost": { - "filepattern": self.testdata + "/unittest_frost_sd.json", - "varname": "surface_snow_thickness", - "filetype": "json", - "tests": { - "firstguess": { - "do_test": True - } - } - } - } - } - } - - def test_titan_t2m(self): - """Test titan for t2m.""" - with open("unittest_qc_settings_t2m.json", mode="w", encoding="utf-8") as file_handler: - json.dump(self.qc_settings, file_handler) - with open("unittest_blacklist_t2m.json", mode="w", encoding="utf-8") as file_handler: - json.dump({}, file_handler) - argv = [ - "-i", "unittest_qc_settings_t2m.json", - "-v", "t2m", - "-dtg", "2020033006", - "--blacklist", "unittest_blacklist_t2m.json", - "--domain", "test/settings/conf_proj_test.json", - "-o", "unittest_qc_t2m.json", - "domain", "blacklist", "nometa", "plausibility", "redundancy", "firstguess", "fraction", - "buddy", "climatology", "sct" - ] - kwargs = surfex.parse_args_titan(argv) - surfex.run_titan(**kwargs) - - def test_titan_t2m_harmonie(self): - """Test titan for t2m from harmonie.""" - with open("unittest_qc_settings_t2m_hm.json", mode="w", encoding="utf-8") as file_handler: - json.dump(self.qc_settings, file_handler) - with open("unittest_blacklist_t2m_hm.json", mode="w", encoding="utf-8") as file_handler: - json.dump({}, file_handler) - argv = [ - "-i", "unittest_qc_settings_t2m_hm.json", - "-v", "t2m", - "-dtg", "2020033006", - "--harmonie", - "--blacklist", "unittest_blacklist_t2m_hm.json", - "-o", "unittest_qc_t2m_hm.json", - "domain", "blacklist", "nometa", "plausibility", "redundancy", "firstguess", "fraction", - "buddy", "climatology", "sct" - ] - with open("test/settings/hm_env.json", mode="r", encoding="utf-8") as file_handler: - env = json.load(file_handler) - os.environ.update(env) - kwargs = surfex.parse_args_titan(argv) - surfex.run_titan(**kwargs) - - def test_titan_rh2m(self): - """Test titan for rh2m.""" - with open("unittest_qc_settings_rh2m.json", mode="w", encoding="utf-8") as file_handler: - json.dump(self.qc_settings, file_handler) - with open("unittest_blacklist_rh2m.json", mode="w", encoding="utf-8") as file_handler: - json.dump({}, file_handler) - argv = [ - "-i", "unittest_qc_settings_rh2m.json", - "-v", "rh2m", - "-dtg", "2020033006", - "--blacklist", "unittest_blacklist_rh2m.json", - "--domain", "test/settings/conf_proj_test.json", - "-o", "unittest_qc_rh2m.json", - "domain", "blacklist", "nometa", "plausibility", "redundancy", "firstguess", "fraction", - "buddy", "climatology", "sct" - ] - kwargs = surfex.parse_args_titan(argv) - surfex.run_titan(**kwargs) - - def test_titan_rh2m_harmonie(self): - """Test titan for rh2m from harmonie.""" - with open("unittest_qc_settings_rh2m_hm.json", mode="w", encoding="utf-8") as file_handler: - json.dump(self.qc_settings, file_handler) - with open("unittest_blacklist_rh2m_hm.json", mode="w", encoding="utf-8") as file_handler: - json.dump({}, file_handler) - argv = [ - "-i", "unittest_qc_settings_rh2m_hm.json", - "-v", "rh2m", - "-dtg", "2020033006", - "--harmonie", - "--blacklist", "unittest_blacklist_rh2m_hm.json", - "-o", "unittest_qc_rh2m_hm.json", - "domain", "blacklist", "nometa", "plausibility", "redundancy", "firstguess", "fraction", - "buddy", "climatology", "sct" - ] - with open("test/settings/hm_env.json", mode="r", encoding="utf-8") as file_handler: - env = json.load(file_handler) - os.environ.update(env) - kwargs = surfex.parse_args_titan(argv) - surfex.run_titan(**kwargs) - - def test_titan_sd(self): - """Test titan for sd.""" - with open("unittest_qc_settings_sd.json", mode="w", encoding="utf-8") as file_handler: - json.dump(self.qc_settings, file_handler) - with open("unittest_blacklist_sd.json", mode="w", encoding="utf-8") as file_handler: - json.dump({}, file_handler) - argv = [ - "-i", "unittest_qc_settings_sd.json", - "-v", "sd", - "-dtg", "2020033006", - "--blacklist", "unittest_blacklist_sd.json", - "--domain", "test/settings/conf_proj_test.json", - "-o", "unittest_qc_sd.json", - "domain", "blacklist", "nometa", "plausibility", "redundancy", "firstguess", "fraction", - "buddy", "climatology", "sct" - ] - kwargs = surfex.parse_args_titan(argv) - surfex.run_titan(**kwargs) - - def test_titan_sd_harmonie(self): - """Test titan for sd from harmonie.""" - with open("unittest_qc_settings_sd_hm.json", mode="w", encoding="utf-8") as file_handler: - json.dump(self.qc_settings, file_handler) - with open("unittest_blacklist_sd_hm.json", mode="w", encoding="utf-8") as file_handler: - json.dump({}, file_handler) - argv = [ - "-i", "unittest_qc_settings_sd_hm.json", - "-v", "sd", - "-dtg", "2020033006", - "--harmonie", - "--blacklist", "unittest_blacklist_sd_hm.json", - "-o", "unittest_qc_sd_hm.json", - "domain", "blacklist", "nometa", "plausibility", "redundancy", "firstguess", "fraction", - "buddy", "climatology", "sct" - ] - with open("test/settings/hm_env.json", mode="r", encoding="utf-8") as file_handler: - env = json.load(file_handler) - os.environ.update(env) - kwargs = surfex.parse_args_titan(argv) - surfex.run_titan(**kwargs) diff --git a/test/test_variable.py b/test/test_variable.py deleted file mode 100644 index 74ff574..0000000 --- a/test/test_variable.py +++ /dev/null @@ -1,109 +0,0 @@ -"""Test variable.""" -import unittest -import logging -from datetime import datetime, timedelta -import yaml -from surfex.variable import Variable - - -logging.basicConfig(format='%(asctime)s %(levelname)s %(pathname)s:%(lineno)s %(message)s', - level=logging.DEBUG) - - -class TestVariable(unittest.TestCase): - """Test variable.""" - - def setUp(self): - """Set up.""" - with open("test/fixtures/config.yml", mode='r', encoding="utf-8") as cfgf: - self.cfg = yaml.safe_load(cfgf) - - def test_open_new_file_nc(self): - """Test to open a netcdf file.""" - initialtime = datetime(2019, 11, 13, 0) - intervall = 3600 - case = "netcdf" - - var_dict = self.cfg[case] - var_type = case - for i in range(11): - with self.subTest(i=i): - validtime = initialtime + timedelta(seconds=intervall * i) - previoustime = validtime - timedelta(seconds=intervall) - variable = Variable(var_type, var_dict, initialtime) - previous_filename = variable.get_filename(validtime, previoustime=previoustime) - filename = variable.get_filename(validtime) - self.assertEqual(filename, var_dict['blueprint'][i]) - if i > 0: - self.assertEqual(previous_filename, var_dict['blueprint_previous'][i]) - - def test_open_new_file_grib1(self): - """Test to open a grib1 file.""" - initialtime = datetime(2019, 11, 13, 0) - intervall = 3600 - case = "grib1" - - var_dict = self.cfg[case] - var_type = case - for i in range(11): - with self.subTest(i=i): - validtime = initialtime + timedelta(seconds=intervall * i) - previoustime = validtime - timedelta(seconds=intervall) - variable = Variable(var_type, var_dict, initialtime) - previous_filename = variable.get_filename(validtime, previoustime=previoustime) - filename = variable.get_filename(validtime) - self.assertEqual(filename, var_dict['blueprint'][i]) - if i > 0: - self.assertEqual(previous_filename, var_dict['blueprint_previous'][i]) - - def test_open_new_file_grib2(self): - """Test to open a grib2 file.""" - initialtime = datetime(2019, 11, 13, 2) - intervall = 3600 - case = "grib2" - - var_dict = self.cfg[case] - var_type = case - for i in range(11): - with self.subTest(i=i): - validtime = initialtime + timedelta(seconds=intervall * i) - previoustime = validtime - timedelta(seconds=intervall) - variable = Variable(var_type, var_dict, initialtime) - previous_filename = variable.get_filename(validtime, previoustime=previoustime) - filename = variable.get_filename(validtime) - self.assertEqual(filename, var_dict['blueprint'][i]) - if i > 0: - self.assertEqual(previous_filename, var_dict['blueprint_previous'][i]) - - def test_open_new_file_an(self): - """Test to open a met nordic file.""" - initialtime = datetime(2019, 11, 13, 0) - intervall = 3600 - case = "met_nordic" - - var_dict = self.cfg[case] - var_type = case - if var_type == "met_nordic": - var_type = "netcdf" - for i in range(11): - with self.subTest(i=i): - validtime = initialtime + timedelta(seconds=intervall * i) - variable = Variable(var_type, var_dict, initialtime) - filename = variable.get_filename(validtime) - self.assertEqual(filename, var_dict['blueprint'][i]) - - def test_open_new_file_fail(self): - """Test failing to open a file.""" - initialtime = datetime(2019, 11, 13, 0) - case = "met_nordic" - var_dict = self.cfg[case] - var_dict["offset"] = 7200 - var_type = case - if var_type == "met_nordic": - var_type = "netcdf" - with self.assertRaises(Exception): - Variable(var_type, var_dict, initialtime) - - -if __name__ == "__main__": - unittest.main() diff --git a/test/__init__.py b/tests/__init__.py similarity index 100% rename from test/__init__.py rename to tests/__init__.py diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..9e32712 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,817 @@ +"""Mockers.""" +import json +import os + +import numpy as np +import pytest +from netCDF4 import Dataset + +from surfex.datetime_utils import as_datetime +from surfex.geo import ConfProj + + +@pytest.fixture(scope="module") +def config_exp_surfex_toml(): + fname = f"{os.path.abspath(os.path.dirname(__file__))}/../surfex//cfg/config_exp_surfex.toml" + return fname + + +@pytest.fixture(scope="module") +def conf_proj_domain_dict(): + domain = { + "nam_pgd_grid": {"cgrid": "CONF PROJ"}, + "nam_conf_proj": {"xlat0": 59.5, "xlon0": 9}, + "nam_conf_proj_grid": { + "ilone": 1, + "ilate": 1, + "xlatcen": 60, + "xloncen": 10, + "nimax": 9, + "njmax": 19, + "xdx": 10000.0, + "xdy": 10000.0, + }, + } + return domain + + +@pytest.fixture(scope="module") +def conf_proj_2x3_dict(): + conf_proj_2x3_dict = { + "nam_pgd_grid": {"cgrid": "CONF PROJ"}, + "nam_conf_proj": {"xlat0": 59.5, "xlon0": 9}, + "nam_conf_proj_grid": { + "ilone": 1, + "ilate": 1, + "xlatcen": 60, + "xloncen": 10, + "nimax": 2, + "njmax": 3, + "xdx": 10000.0, + "xdy": 10000.0, + }, + } + return conf_proj_2x3_dict + + +@pytest.fixture(scope="module") +def conf_proj_2x3(conf_proj_2x3_dict): + return ConfProj(conf_proj_2x3_dict) + + +@pytest.fixture(scope="module") +def conf_proj_2x3_file(tmp_path_factory, conf_proj_2x3_dict): + fname = f"{tmp_path_factory.getbasetemp().as_posix()}/conf_proj_2x3.json" + with open(fname, mode="w", encoding="utf-8") as fhandler: + json.dump(conf_proj_2x3_dict, fhandler) + return fname + + +@pytest.fixture(scope="module") +def conf_proj_domain_file(conf_proj_domain_dict, tmp_path_factory): + fname = f"{tmp_path_factory.getbasetemp().as_posix()}/conf_proj_domain.json" + with open(fname, mode="w", encoding="utf-8") as fhandler: + json.dump(conf_proj_domain_dict, fhandler) + return fname + + +@pytest.fixture(scope="module") +def conf_proj_domain(conf_proj_domain_dict): + return ConfProj(conf_proj_domain_dict) + + +@pytest.fixture(scope="module") +def obsset(): + obs = { + "0": { + "obstime": "20201113060000", + "varname": "air_temperature", + "lon": 10.578, + "lat": 59.4352, + "stid": "17280", + "elev": 14.0, + "value": 278.04999999999995, + }, + "1": { + "obstime": "20201113060000", + "varname": "air_temperature", + "lon": 10.8312, + "lat": 59.685, + "stid": "17875", + "elev": 91.0, + "value": 277.15, + }, + "2": { + "obstime": "20201113070000", + "varname": "air_temperature", + "lon": 10.578, + "lat": 59.4352, + "stid": "17280", + "elev": 14.0, + "value": 280.0, + }, + } + return obs + + +@pytest.fixture(scope="module") +def an_time(): + return as_datetime("202002200600") + + +@pytest.fixture(scope="module") +def obstime_str(): + return "20200220060000" + + +@pytest.fixture(scope="module") +def obstime(obstime_str): + return as_datetime(obstime_str) + + +@pytest.fixture(scope="module") +def obsset_fname(tmp_path_factory, obsset, obstime_str): + filename = ( + f"{tmp_path_factory.getbasetemp().as_posix()}/obsset_file_{obstime_str}.json" + ) + with open(filename, mode="w", encoding="utf-8") as fhandler: + json.dump(obsset, fhandler) + return filename + + +@pytest.fixture(scope="module") +def obsset_filepattern(tmp_path_factory): + filepattern = f"{tmp_path_factory.getbasetemp().as_posix}/obsset_file_@YYYY@@MM@@DD@@HH@@mm@@SS@.json" + return filepattern + + +@pytest.fixture(scope="module") +def qc_dataset(obstime_str): + qc_data = { + "0": { + "varname": "air_temperature", + "obstime": obstime_str, + "lon": 6.9933000000000005, + "lat": 62.191, + "stid": "1111", + "elev": 900.0, + "value": 273.5, + "flag": 0.0, + "ci": 1.0, + "laf": 1.0, + "provider": "bufr", + "fg_dep": np.nan, + "an_dep": np.nan, + "passed_tests": [ + "domain", + "blacklist", + "nometa", + "plausibility", + "redundancy", + "firstguess", + "fraction", + "sct", + ], + }, + "1": { + "varname": "air_temperature", + "obstime": obstime_str, + "lon": 7.8173, + "lat": 59.767500000000005, + "stid": "NA", + "elev": 1340.0, + "value": 274.5, + "flag": 199.0, + "ci": 1.0, + "laf": 1.0, + "provider": "bufr", + "fg_dep": np.nan, + "an_dep": np.nan, + "passed_tests": [], + }, + } + return qc_data + + +@pytest.fixture(scope="module") +def get_nam_path(tmp_path_factory): + nam_dir = f"{tmp_path_factory.getbasetemp().as_posix()}/nam" + if not os.path.exists(nam_dir): + os.makedirs(nam_dir, exist_ok=True) + files = [ + "io", + "constants", + "rsmin", + "rsmin_mod", + "cv", + "sea", + "treedrag", + "flake", + "prep_from_namelist_values", + "prep", + "prep_snow", + "offline", + "soda", + "selected_output", + "override", + ] + for fff in files: + with open(f"{nam_dir}/{fff}.json", mode="w", encoding="utf-8") as nam: + json.dump({}, nam) + return nam_dir + + +@pytest.fixture(scope="module") +def rotated_ll_t2m_grib1(tmp_path_factory): + keys = { + "editionNumber": 1, + "gridType": "rotated_ll", + "Ni": 9, + "Nj": 19, + "latitudeOfFirstGridPointInDegrees": 59, + "longitudeOfFirstGridPointInDegrees": 9.5, + "latitudeOfLastGridPointInDegrees": 60.9, + "longitudeOfLastGridPointInDegrees": 10.4, + "iDirectionIncrementInDegrees": 0.1, + "jDirectionIncrementInDegrees": 0.1, + "latitudeOfSouthernPoleInDegrees": 0, + "longitudeOfSouthernPoleInDegrees": 0, + "iScansNegatively": 1, + "jScansPositively": 0, + "indicatorOfParameter": 11, + "levelType": 105, + "level": 2, + "timeRangeIndicator": 0, + "bitmapPresent": 0, + } + fname = f"{tmp_path_factory.getbasetemp().as_posix()}/rotated_ll_t2m.grib1" + with open(fname, mode="w", encoding="utf-8") as fhandler: + json.dump(keys, fhandler) + return fname + + +@pytest.fixture(scope="module") +def rotated_ll_t1_grib2(tmp_path_factory): + keys = { + "editionNumber": 2, + "gridType": "rotated_ll", + "Ni": 9, + "Nj": 19, + "latitudeOfFirstGridPointInDegrees": 59, + "longitudeOfFirstGridPointInDegrees": 9.5, + "latitudeOfLastGridPointInDegrees": 60.9, + "longitudeOfLastGridPointInDegrees": 10.4, + "iDirectionIncrementInDegrees": 0.1, + "jDirectionIncrementInDegrees": 0.1, + "latitudeOfSouthernPoleInDegrees": 0, + "longitudeOfSouthernPoleInDegrees": 0, + "iScansNegatively": 1, + "jScansPositively": 0, + "discipline": 0, + "parameterCategory": 0, + "parameterNumber": 0, + "levelType": 103, + "typeOfStatisticalProcessing": -1, + "level": 2, + "bitmapPresent": 0, + } + fname = f"{tmp_path_factory.getbasetemp().as_posix()}/rotated_ll_t1.grib2" + with open(fname, mode="w", encoding="utf-8") as fhandler: + json.dump(keys, fhandler) + return fname + + +@pytest.fixture(scope="module") +def lambert_t2m_grib1(tmp_path_factory): + keys = { + "editionNumber": 1, + "gridType": "lambert", + "Nx": 9, + "Ny": 19, + "latitudeOfFirstGridPointInDegrees": 58.828, + "longitudeOfFirstGridPointInDegrees": 7.893, + "LoVInDegrees": 15, + "DxInMetres": 2500, + "DyInMetres": 2500, + "iScansNegatively": 0, + "jScansPositively": 1, + "jPointsAreConsecutive": 1, + "Latin1InDegrees": 63.3, + "LaDInDegrees": 63.3, + "Latin2InDegrees": 63.3, + "latitudeOfSouthernPoleInDegrees": -90, + "longitudeOfSouthernPoleInDegrees": 0, + "indicatorOfParameter": 11, + "levelType": 105, + "level": 2, + "timeRangeIndicator": 0, + "bitmapPresent": 0, + } + fname = f"{tmp_path_factory.getbasetemp().as_posix()}/lambert_t2m.grib1" + with open(fname, mode="w", encoding="utf-8") as fhandler: + json.dump(keys, fhandler) + return fname + + +@pytest.fixture(scope="module") +def lambert_t1_grib2(tmp_path_factory): + keys = { + "editionNumber": 2, + "gridType": "lambert", + "Nx": 9, + "Ny": 19, + "latitudeOfFirstGridPointInDegrees": 58.828, + "longitudeOfFirstGridPointInDegrees": 7.893, + "LoVInDegrees": 15, + "DxInMetres": 2500, + "DyInMetres": 2500, + "iScansNegatively": 0, + "jScansPositively": 1, + "jPointsAreConsecutive": 1, + "Latin1InDegrees": 63.3, + "LaDInDegrees": 63.3, + "Latin2InDegrees": 63.3, + "latitudeOfSouthernPoleInDegrees": -90, + "longitudeOfSouthernPoleInDegrees": 0, + "discipline": 0, + "parameterCategory": 0, + "parameterNumber": 0, + "levelType": 103, + "typeOfStatisticalProcessing": -1, + "level": 2, + "bitmapPresent": 0, + } + fname = f"{tmp_path_factory.getbasetemp().as_posix()}/lambert_tl.grib2" + with open(fname, mode="w", encoding="utf-8") as fhandler: + json.dump(keys, fhandler) + return fname + + +@pytest.fixture(scope="module") +def regular_ll_t2m_grib1(tmp_path_factory): + keys = { + "editionNumber": 1, + "gridType": "regular_ll", + "Ni": 9, + "Nj": 19, + "latitudeOfFirstGridPointInDegrees": 59, + "longitudeOfFirstGridPointInDegrees": 9.5, + "latitudeOfLastGridPointInDegrees": 60.9, + "longitudeOfLastGridPointInDegrees": 10.4, + "iDirectionIncrementInDegrees": 0.1, + "jDirectionIncrementInDegrees": 0.1, + "indicatorOfParameter": 11, + "levelType": 105, + "level": 2, + "timeRangeIndicator": 0, + "bitmapPresent": 0, + } + fname = f"{tmp_path_factory.getbasetemp().as_posix()}/regular_ll_t2m.grib1" + with open(fname, mode="w", encoding="utf-8") as fhandler: + json.dump(keys, fhandler) + return fname + + +@pytest.fixture(scope="module") +def regular_ll_t1_grib2(tmp_path_factory): + keys = { + "editionNumber": 2, + "gridType": "regular_ll", + "Ni": 9, + "Nj": 19, + "latitudeOfFirstGridPointInDegrees": 59, + "longitudeOfFirstGridPointInDegrees": 9.5, + "latitudeOfLastGridPointInDegrees": 60.9, + "longitudeOfLastGridPointInDegrees": 10.4, + "iDirectionIncrementInDegrees": 0.1, + "jDirectionIncrementInDegrees": 0.1, + "discipline": 0, + "parameterCategory": 0, + "parameterNumber": 0, + "levelType": 103, + "typeOfStatisticalProcessing": -1, + "level": 2, + "bitmapPresent": 0, + } + fname = f"{tmp_path_factory.getbasetemp().as_posix()}/regular_ll_t1.grib2" + with open(fname, mode="w", encoding="utf-8") as fhandler: + json.dump(keys, fhandler) + return fname + + +@pytest.fixture(scope="module") +def bufr_file(tmp_path_factory): + keys = { + "latitude": 60.0, + "localLatitude": 60.0, + "longitude": 10.0, + "localLongitude": 10.0, + "year": 2020, + "month": 2, + "day": 20, + "hour": 6, + "minute": 2, + "heightOfStationGroundAboveMeanSeaLevel": 230, + "heightOfStation": 230, + "stationNumber": 479, + "blockNumber": 10, + "airTemperatureAt2M": 273.15, + "/heightOfSensorAboveLocalGroundOrDeckOfMarinePlatform=2/airTemperature": None, + "/heightOfSensorAboveLocalGroundOrDeckOfMarinePlatform=1.5/airTemperature": None, + } + fname = f"{tmp_path_factory.getbasetemp().as_posix()}/obs.bufr" + with open(fname, mode="w", encoding="utf-8") as fhandler: + json.dump(keys, fhandler, indent=2) + return fname + + +@pytest.fixture(scope="module") +def obsoul_cryoclim_cy43(tmp_path_factory): + fname = f"{tmp_path_factory.getbasetemp().as_posix()}/cryoclim.obsoul" + with open(fname, mode="w", encoding="utf-8") as fhandler: + fhandler.write( + """ + 1 2 + 15 1 17 80.47041 24.15402 'CRYO ' 20200220 60000 -2.14748e+09 1 1111 0 + 92 999999.00 -2147483647.00 0.100 2048 + 15 1 17 80.45985 22.95117 'CRYO ' 20200220 60000 -2.14748e+09 1 1111 0 + 92 999999.00 -2147483647.00 0.100 2048 + 15 1 17 80.44224 23.19859 'CRYO ' 20200220 60000 -2.14748e+09 1 1111 0 + 92 999999.00 -2147483647.00 0.100 2048 + 15 1 17 80.21594 25.50849 'CRYO ' 20200220 60000 -2.14748e+09 1 1111 0 + 92 999999.00 -2147483647.00 0.000 2048 +""" + ) + return fname + + +@pytest.fixture() +def data_thredds_nc_file(tmp_path_factory): + fname = f"{tmp_path_factory.getbasetemp().as_posix()}/data_thredds_nc.nc" + cdlfname = f"{tmp_path_factory.getbasetemp().as_posix()}/data_thredds_nc.cdl" + with open(cdlfname, mode="w", encoding="utf-8") as fhandler: + fhandler.write( + """ +netcdf meps_thredds { +dimensions: + time = UNLIMITED ; + height0 = 1 ; + height1 = 1 ; + height7 = 1 ; + hybrid = 2 ; + x = 2 ; + y = 3 ; +variables: + double time(time) ; + time:long_name = "time" ; + time:standard_name = "time" ; + time:units = "seconds since 1970-01-01 00:00:00 +00:00" ; + double forecast_reference_time ; + forecast_reference_time:units = "seconds since 1970-01-01 00:00:00 +00:00" ; + forecast_reference_time:standard_name = "forecast_reference_time" ; + double hybrid(hybrid) ; + int projection_lambert ; + projection_lambert:grid_mapping_name = "lambert_conformal_conic" ; + projection_lambert:standard_parallel = 63.3, 63.3 ; + projection_lambert:longitude_of_central_meridian = 15. ; + projection_lambert:latitude_of_projection_origin = 63.3 ; + projection_lambert:earth_radius = 6371000. ; + projection_lambert:proj4 = "+proj=lcc +lat_0=63.3 +lon_0=15 +lat_1=63.3 +lat_2=63.3 +no_defs +R=6.371e+06" ; + float x(x) ; + float y(y) ; + double longitude(y, x) ; + double latitude(y, x) ; + float air_temperature_2m(time, height1, y, x) ; + air_temperature_2m:_FillValue = 9.96921e+36f ; + air_temperature_2m:long_name = "Screen level temperature (T2M)" ; + air_temperature_2m:standard_name = "air_temperature" ; + air_temperature_2m:units = "K" ; + air_temperature_2m:grid_mapping = "projection_lambert" ; + air_temperature_2m:coordinates = "longitude latitude" ; + float x_wind_ml(time, hybrid, y, x) ; + float y_wind_ml(time, hybrid, y, x) ; + float x_wind_10m(time, height7, y, x) ; + float y_wind_10m(time, height7, y, x) ; + float integral_of_surface_downwelling_shortwave_flux_in_air_wrt_time(time, height0, y, x) ; + float integral_of_surface_downwelling_longwave_flux_in_air_wrt_time(time, height0, y, x) ; + float snowfall_amount_acc(time, height0, y, x) ; + float precipitation_amount_acc(time, height0, y, x) ; + float surface_air_pressure(time, height0, y, x) ; + float air_temperature_ml(time, hybrid, y, x); + float specific_humidity_ml(time, hybrid, y, x) ; + float relative_humidity_2m(time, height1, y, x) ; + float surface_geopotential(time, y, x); + float land_area_fraction(y, x) ; + float liquid_water_content_of_surface_snow(time, height0, y, x); + float height0(height0) ; + float height1(height1) ; + float height7(height7) ; + +data: + +forecast_reference_time = 1582178400; + +time = 1582178400, 1582182000; + +height0 = 0; +height1 = 2; +height7 = 10; + +x = 1, 2; + +y = 1, 2, 3; + +hybrid = 0.995552182197571, 0.998519629240036 ; + +longitude = 10.0, 10.1, 10.2, 10.3, 10.4, 10.5, 10.6; + +latitude = 60.0, 60.1, 60.2, 60.3, 60.4, 60.5, 60.6; + +land_area_fraction = 0, 0, 1, 1, 1, 0; + +surface_geopotential = +100, 200, 300, 400, 500, 600, +200, 300, 400, 500, 600, 700; + +air_temperature_ml = +271.0, 272.0, 273.0, 274.0, 275.0, 276.0, +271.0, 272.0, 273.0, 274.0, 275.0, 276.0, +271.0, 272.0, 273.0, 274.0, 275.0, 276.0, +271.0, 272.0, 273.0, 274.0, 275.0, 276.0; + +air_temperature_2m = +271.0, 272.0, 273.0, 274.0, 275.0, 276.0, +272.0, 273.0, 274.0, 275.0, 276.0, 277.0; + +relative_humidity_2m = +0.1, 0.2, 0.3, 0.4, 0.5, 1.0, +0.2, 0.3, 0.4, 0.5, 0.6, 1.0; + +liquid_water_content_of_surface_snow = +200, 0, 230, 20, 0, 1000, +300, 0, 330, 30, 0, 3000; +} +""" + ) + Dataset(fname, mode="w").fromcdl( + cdlfname, ncfilename=fname, mode="a", format="NETCDF3_CLASSIC" + ) + return fname + + +@pytest.fixture() +def firstguess4gridpp(tmp_path_factory): + fname = f"{tmp_path_factory.getbasetemp().as_posix()}/FirstGuess4gridpp.nc" + cdlfname = f"{tmp_path_factory.getbasetemp().as_posix()}/FirstGuess4gridpp.cdl" + with open(cdlfname, mode="w", encoding="utf-8") as fhandler: + fhandler.write( + """ +netcdf FirstGuess4gridpp { +dimensions: + y = 3 ; + x = 2 ; + time = 1 ; +variables: + double time(time) ; + time:long_name = "time" ; + time:standard_name = "time" ; + time:units = "seconds since 1970-01-01 00:00:00 +00:00" ; + double longitude(y, x) ; + longitude:units = "degree_east" ; + longitude:long_name = "longitude" ; + longitude:standard_name = "longitude" ; + double latitude(y, x) ; + latitude:units = "degree_north" ; + latitude:long_name = "latitude" ; + latitude:standard_name = "latitude" ; + float x(x) ; + x:long_name = "x-coordinate in Cartesian system" ; + x:standard_name = "projection_x_coordinate" ; + x:units = "m" ; + float y(y) ; + y:long_name = "y-coordinate in Cartesian system" ; + y:standard_name = "projection_y_coordinate" ; + y:units = "m" ; + float air_temperature_2m(y, x) ; + air_temperature_2m:_FillValue = 9.96921e+36f ; + air_temperature_2m:long_name = "Screen level temperature (T2M)" ; + air_temperature_2m:standard_name = "air_temperature" ; + air_temperature_2m:units = "K" ; + float relative_humidity_2m(y, x) ; + relative_humidity_2m:_FillValue = 9.96921e+36f ; + relative_humidity_2m:long_name = "Screen level relative humidity (RH2M)" ; + relative_humidity_2m:standard_name = "relative_humidity" ; + relative_humidity_2m:units = "1" ; + float surface_snow_thickness(y, x) ; + surface_snow_thickness:_FillValue = 9.96921e+36f ; + surface_snow_thickness:long_name = "Surface snow thickness" ; + surface_snow_thickness:standard_name = "surface_snow_thickness" ; + surface_snow_thickness:units = "m" ; + float altitude(y, x) ; + altitude:_FillValue = 9.96921e+36f ; + altitude:long_name = "Altitude" ; + altitude:standard_name = "altitude" ; + altitude:units = "m" ; + float land_area_fraction(y, x) ; + land_area_fraction:_FillValue = 9.96921e+36f ; + land_area_fraction:long_name = "Land Area Fraction" ; + land_area_fraction:standard_name = "land_area_fraction" ; + land_area_fraction:units = "1" ; + +// global attributes: + :projection = "lambert" ; + :dlon = 10000. ; + :dlat = 10000. ; + :projlat = 59.5 ; + :projlat2 = 59.5 ; + :projlon = 9LL ; + :lonc = 10LL ; + :latc = 59.5 ; + +data: + +time = 1582178400; + +x = 1, 2; + +y = 1, 2, 3; + +longitude = 10.0, 10.1, 10.2, 10.3, 10.4, 10.5, 10.6; + +longitude = 60.0, 60.1, 60.2, 60.3, 60.4, 60.5, 60.6; + +air_temperature_2m = + 271, 272, 273, 274, 275, 276; + +relative_humidity_2m = +0.1, 0.2, 0.3, 0.4, 0.8, 1.0; + +surface_snow_thickness = + 0.2, 0, 1.4, 0.2, 0, 0.4; + +altitude = + 228.3734, 106.4936, 62.45805, 161.9377, 97.11469, 68.86867; + +land_area_fraction = + 1, 1, 0, 1, 0.1664643, 0.1266151; +} +""" + ) + if not os.path.exists(fname): + Dataset(fname, mode="w").fromcdl(cdlfname, ncfilename=fname, mode="a") + return fname + + +class DummyFrostRequest: + def __init__(self): + """Construct dummy Frost request.""" + self.status_code = 200 + + @staticmethod + def json(): + data = { + "data": [ + { + "id": "id", + "masl": 10, + "wmoId": None, + "geometry": {"coordinates": [10, 60]}, + "stationHolders": "", + "referenceTime": "2020X02X20X00X00X00", + "sourceId": "id", + "observations": [ + { + "unit": "K", + "level": None, + "value": 273.15, + } + ], + } + ] + } + return data + + +class DummyFaPos: + def __init__(self, value): + """Construct dummy FA position.""" + self.value = value + + def get(self, mode): + return self.value + + +class DummyFAGeometry: + def __init__(self, geometry): + """Construct dummy FA geometry.""" + self.name = geometry["name"] + self.dimensions = geometry["dimensions"] + self.projection = { + "reference_lon": DummyFaPos(geometry["projection"]["reference_lon"]), + "reference_lat": DummyFaPos(geometry["projection"]["reference_lat"]), + } + self.grid = geometry["grid"] + self.center = geometry["center"] + + def getcenter(self): + return DummyFaPos(self.center["lon"]), DummyFaPos(self.center["lat"]) + + @staticmethod + def gimme_corners_ij(subzone=None): + return {"ll": [0, 0], "lr": [8, 0], "ur": [8, 18]} + + +class DummyFAField: + def __init__(self): + """Construct dummy FA field.""" + geometry = { + "name": "lambert", + "dimensions": { + "Y_CIzone": 3, + "X_CIzone": 2, + "X": 2, + "Y": 3, + }, + "projection": {"reference_lon": 10.0, "reference_lat": 60.0}, + "center": {"lon": 10.0, "lat": 60.0}, + "grid": {"X_resolution": 10000, "Y_resolution": 10000}, + "corners": {"ll": {10.0, 60.0}}, + } + self.geometry = DummyFAGeometry(geometry) + self.data = np.zeros_like([np.arange(2 * 3)]) + + +class MyFaResource: + def __init__(self, name, openmode=None): + """Construct dummy FA resource.""" + self.name = name + + def readfield(self, name): + print("Read FA field ", name) + return DummyFAField() + + +@pytest.fixture(scope="module") +def _mockers(session_mocker): + """Define mockers used in the tests for the tasks' `run` methods.""" + + def dummy_frost_data(*args, **kwargs): + print("Frost request ", args, kwargs) + return DummyFrostRequest() + + def my_codes_grib_new_from_file(file_handler): + print(file_handler) + gid = json.load(file_handler) + print(gid) + return gid + + def my_codes_bufr_new_from_file(file_handler): + try: + gid = json.load(file_handler) + file_handler.close() + except ValueError: + gid = None + return gid + + def my_codes_get(gid, key): + print("codes_get", key) + av_keys = ["average", "min", "max"] + if key in av_keys: + return -1 + else: + return gid[key] + + def my_codes_get_size(gid, key): + print("codes_get_size", key) + try: + nx = gid["Ni"] + ny = gid["Nj"] + except KeyError: + nx = gid["Nx"] + ny = gid["Ny"] + return nx * ny + + def my_codes_get_values(gid): + try: + nx = gid["Ni"] + ny = gid["Nj"] + except KeyError: + nx = gid["Nx"] + ny = gid["Ny"] + return np.zeros_like([np.arange(nx * ny)]) + + # Do the actual mocking + session_mocker.patch("surfex.obs.requests.get", new=dummy_frost_data) + session_mocker.patch( + "surfex.grib.eccodes.codes_grib_new_from_file", new=my_codes_grib_new_from_file + ) + session_mocker.patch("surfex.grib.eccodes.codes_get", new=my_codes_get) + session_mocker.patch("surfex.grib.eccodes.codes_get_long", new=my_codes_get) + session_mocker.patch("surfex.grib.eccodes.codes_get_size", new=my_codes_get_size) + session_mocker.patch("surfex.grib.eccodes.codes_get_values", new=my_codes_get_values) + session_mocker.patch("surfex.grib.eccodes.codes_release") + session_mocker.patch( + "surfex.bufr.eccodes.codes_bufr_new_from_file", new=my_codes_bufr_new_from_file + ) + session_mocker.patch("surfex.bufr.eccodes.codes_set") + session_mocker.patch("surfex.fa.resource", new=MyFaResource) diff --git a/tests/smoke/test_cli_fg_titan_oi_obsmon.py b/tests/smoke/test_cli_fg_titan_oi_obsmon.py new file mode 100644 index 0000000..55dd13b --- /dev/null +++ b/tests/smoke/test_cli_fg_titan_oi_obsmon.py @@ -0,0 +1,623 @@ +"""Test fg + titan + gridpp + obsmon.""" +import json +import os +import shutil + +import numpy as np +import pytest + +from surfex.cli import ( + cli_merge_qc_data, + cli_oi2soda, + first_guess_for_oi, + gridpp, + qc2obsmon, + titan, +) + +an_time = "2020022006" + + +def hm_env(): + env = { + "NPATCH": "2", + "ARCHIVE_ROOT_FG": "/firstguess", + "LSPBDC": "no", + "COMPILE_DABYFA": "no", + "ANASURF": "CANARI_OI_MAIN", + "TSPAN": "5400", + "BDDIR": "/archive/ECMWF/@YYYY@/@MM@/@DD@/@HH@", + "MAIL_ON_ABORT": "", + "COMPILE_ENKF": "no", + "ARCH": "linuxgfortran", + "FESTAT": "no", + "SLAFDIFF": "0", + "NLEV": "60", + "LETKF_LAG": "no", + "NNCV": "1,1,1,1", + "XCH_COEFF1": "15.0", + "NNCO": "1,1,0,0,1", + "STREAM": "", + "HH_LIST": "00-21:3", + "SPPT": "no", + "ARCHIVE_ROOT": "/archive", + "LSMIXBC": "yes", + "ARSTRATEGY": "climate:fg:verif:odb_stuff: [an|fc]_fa:pp_gr:fldver", + "TAU_SPP": "21600.", + "DISPLAY": ":1", + "XCLIP_RATIO_SDT": "5.0", + "ECFSGROUP": "hirald", + "EPERT_MODE": "after", + "ENS_BD_CLUSTER": "no", + "OBDIR": "/archive/observations/@YYYY@/@MM@/@DD@/@HH@", + "FCINT_FG": "03", + "SFXSELTIMES": "0-540:15", + "AD_TEST": "yes", + "TSTEP4D": "120,120", + "VLEV": "65", + "PERTSURF": "none", + "SCALE_PERT": "yes", + "OBSMONITOR": "obstat", + "LOCAL_DEFINITION_TEMPLATES": "LOCAL_DEFINITION_TEMPLATES", + "LUNBC": "yes", + "PERTDIA_BDINT": "6", + "GRID_TYPE": "CUSTOM", + "NOUTERLOOP": "2", + "XZ0SN": "0.003", + "MSG_PATH": "/CLOUDS/", + "INT_SINI_FILE": "//SURFXINI.fa", + "ALARO_VERSION": "0", + "CNMEXP": "HARM", + "ANAATMO": "3DVAR", + "SWRITUPTIMES": "0-540:60", + "CSNOW": "D95", + "SURFEX_OFFLINE_BINARIES": "no", + "JB_REF_DOMAIN": "METCOOP25B", + "FREQ_RESET_TEMP": "3", + "TOPO_SOURCE": "gmted2010", + "TAU_SDT": "28800", + "CLIMDIR": "/climate/METCOOP25D", + "EXP": "HM2PYSURFEX", + "IO_SERVER": "yes", + "ARCHIVE_ENSMBR": "", + "BUFRTAB_DIR": "", + "RSMIN_CONIFEROUS_FACTOR": "1.44", + "BDSTRATEGY": "simulate_metcoop", + "LSPG_SDT": ".FALSE.", + "DFI": "none", + "ENSINIPERT": "bnd", + "EXTRARCH": "/archive/extract", + "SFXWFTIMES": "0-540:60", + "VERT_DISC": "vfd", + "XCD_COEFF1": "10.0", + "XCD_COEFF2": "5.0", + "XGRASS_H_DNM": "3.0", + "LDB_VERSION": "3.0", + "DESKTOP_SESSION": "ubuntu", + "PWRITUPTIMES": "0-09:15", + "USE_MSG": "yes", + "DOMAIN": "METCOOP25S", + "CREATE_CLIMATE": "yes", + "HGT_QS": "yes", + "FP_PRECISION": "double", + "TAUS": "5400", + "MODIFY_LAKES": "T", + "LGRADSP": "no", + "ECFSLOC": "ec", + "RSMIN_C3_FACTOR": "1.5", + "CH_RES_SPEC": "yes", + "ILRES": "2,2", + "XRIMAX": "0.0", + "SPP": "no", + "MAKEODB2": "no", + "GRIB_API": "GRIB_API", + "INT_BDFILE": "//ELSCFHARMALBC@NNN@", + "BDINT": "1", + "RSMIN_DECIDUOUS_FACTOR": "1.13", + "BINDIR": "/bin", + "CV_HIGHVEG": "1.0E-5", + "USE_REARCHIVE_EPS_EC_OPER": "no", + "XDG_SESSION_TYPE": "x11", + "RSMIN_C4_FACTOR": "1.13", + "CROUGH": "NONE", + "SLAFLAG": "0", + "MULTITASK": "yes", + "SPGADTMIN_SDT": "0.15", + "MASS_FLUX_SCHEME": "edmfm", + "HOST_SURFEX": "no", + "FULLFAFTIMES": "0-540:15", + "TFLAG_FG": "h", + "CISBA": "3-L", + "PERTATMO": "none", + "XCGMAX": "2.0E-5", + "COMPCENTRE": "MET", + "SURFEX_LAKES": "FLAKE", + "FLDEXTR": "yes", + "REARCHIVE_EPS_EC_OPER": "no", + "NBDMAX": "4", + "SST_SOURCES": "IFS NEMO", + "CAERO": "tegen", + "CFORCING_FILETYPE": "NETCDF", + "GRIB_API_LIB": "GRIB_API_LIB", + "FLDEXTR_TASKS": "4", + "BUILD": "yes", + "ECOCLIMAP_VERSION": "SG", + "SURFEX_SEA_ICE": "sice", + "XLCOR_SPP": "1000000.", + "ENSCTL": "", + "PFFULLWFTIMES": "-1", + "LL_LIST": "03", + "HOST_MODEL": "ifs", + "XSCALE_H_TREE": "0.658", + "XCSMAX": "2.0E-4", + "SPGADTMIN_SPP": "0.15", + "AUXLIBS": "AUXLIBS", + "TEND_DIAG": "no", + "ODB_VERSION": "CY33R1.007", + "WRK": "/", + "EREF": "35000.", + "OBSEXTR": "yes", + "HWRITUPTIMES": "0-540:15", + "CONT_ON_FAILURE": "0", + "TSTEPTRAJ": "600", + "XZ0HSN": "0.0003", + "SFXSWFTIMES": "-1", + "BDLIB": "ECMWF", + "QT_IM_MODULE": "ibus", + "RUNNING_MODE": "research", + "SIMULATION_TYPE": "nwp", + "CONVERTFA": "yes", + "XLCOR_SDT": "2000000", + "TL_TEST": "yes", + "RCR_POSTP": "no", + "FLDVER": "no", + "ARCHIVE_ECMWF": "yes", + "JB_INTERPOL": "yes", + "SDEV_SDT": "0.20", + "TESTBED_CASES": "1", + "LISBA_CANOPY": ".FALSE.", + "SLAFK": "1.0", + "CV_LOWVEG": "2.0E-5", + "SOIL_TEXTURE_VERSION": "SOILGRID", + "MEPS_VERSION": "test", + "Q_IN_SP": "no", + "SHLVL": "1", + "MAKEGRIB_LISTENERS": "1", + "GRIB_API_INCLUDE": "GRIB_API_INCLUDE", + "ECFS_EPS_EC_BD_PATH": "ECFS_EPS_EC_BD_PATH", + "ODB_DIR": "/disk1/odb", + "SURFEX_LSELECT": "yes", + "PHYSICS": "arome", + "ARCHIVE_FORMAT": "GRIB2", + "VERITIMES": "00-540:60", + "HARATU": "yes", + "BDCLIM": "/ECMWF/climate", + "SINGLEOBS": "no", + "IO_SERVER_BD": "yes", + "SPGADTMAX_SDT": "3.0", + "FORCE2": "no", + "FORCE1": "no", + "CHKEVO": "no", + "SDEV_SPP": "0.2", + "XALLEN_TERM": "2.5", + "DYNAMICS": "nh", + "ECMWF_LOCAL_TABLE_PATH": "ECMWF_LOCAL_TABLE_PATH", + "TESTBED_LIST": "TESTBED_LIST", + "OBSMON_SYNC": "no", + "LETKF_3DSCREEN": "yes", + "TFLAG": "min", + "FLDVER_HOURS": "06 12 18 24 30 36 42 48 54 60 66", + "ANASURF_MODE": "before", + "AUGMENT_CV": "NO", + "ENSMSEL": "", + "HYBRID": "no", + "FREQ_RESET_GUST": "1", + "NCNAMES": "nwp", + "STATNW": "yes", + "ENSBDMBR": "", + "POSTP": "inline", + "ECOCLIMAP_PARAM_BINDIR": "/climate", + "SPGADTMAX_SPP": "3.0", + "SPGQ_SDT": "0.5", + "ANASURF_OI_COEFF": "POLYNOMES_ISBA_MF6", + "VFLDEXP": "HM2PYSURFEX", + "AI_ACCUMULATION_HOURS": "720", + "TASK_LIMIT": "-1", + "EZONE": "1", + "GSIZE": "10000.0", + "LAT0": "59.5", + "LATC": "60.0", + "LON0": "9.0", + "LONC": "10.0", + "NDGUXG": "19", + "NDLUXG": "9", + "NLAT": "20", + "NLON": "10", + "NMSMAX": "-1", + "NNOEXTZX": "0", + "NNOEXTZY": "0", + "NSMAX": "-1", + "SINLAT0": "-1", + "TSTEP": "90", + "FCINT": "3", + "TEFRCL": "3600", + "NXGSTPERIOD": "3600", + "MODEL": "MASTERODB", + "ENSMSELX": "-1", + "ENSMBR": "-1", + "LL": "03", + "METER_UNIT": "hhmm", + } + for key, value in env.items(): + os.environ[key] = value + + +def create_titan_settings(qc_fname, first_guess_file, blacklist_fname, json_obs_file): + qc_settings = { + "t2m": { + "do_test": True, + "plausibility": {"minval": 200, "maxval": 350}, + "firstguess": { + "fg_file": first_guess_file, + "fg_var": "air_temperature_2m", + "negdiff": 2, + "posdiff": 3, + "do_test": False, + }, + # Not working yet + "buddy": {"do_test": False}, + "climatology": {"do_test": False, "minval": 270, "maxval": 275}, + "sct": {}, + "redundancy": {}, + "blacklist": {}, + "domain": {}, + "nometa": {}, + "fraction": { + "fraction_file": first_guess_file, + "fraction_var": "land_area_fraction", + }, + "sets": { + "label": { + "filepattern": json_obs_file, + "filetype": "json", + "varname": "airTemperatureAt2M", + "tests": {"firstguess": {"do_test": True}}, + } + }, + }, + "rh2m": { + "do_test": True, + "plausibility": {"minval": 0, "maxval": 1}, + "firstguess": { + "fg_file": first_guess_file, + "fg_var": "relative_humidity_2m", + "negdiff": 0.2, + "posdiff": 0.2, + "do_test": False, + }, + # Not working yet + "buddy": {"do_test": False}, + "climatology": {"do_test": False, "minval": 0, "maxval": 1}, + "sct": {}, + "redundancy": {}, + "blacklist": {}, + "domain": {}, + "nometa": {}, + "fraction": { + "fraction_file": first_guess_file, + "fraction_var": "land_area_fraction", + }, + "sets": { + "label": { + "filepattern": json_obs_file, + "filetype": "json", + "varname": "relativeHumidityAt2M", + "tests": {"firstguess": {"do_test": True}}, + } + }, + }, + "sd": { + "do_test": True, + "plausibility": {"minval": 0, "maxval": 50}, + "firstguess": { + "fg_file": first_guess_file, + "fg_var": "surface_snow_thickness", + "negdiff": 0.4, + "posdiff": 0.4, + "do_test": True, + }, + # Not working yet + "buddy": {"do_test": False}, + "climatology": {"do_test": False, "minval": 0, "maxval": 1}, + "sct": {}, + "redundancy": {}, + "blacklist": {}, + "domain": {}, + "nometa": {}, + "fraction": { + "fraction_file": first_guess_file, + "fraction_var": "land_area_fraction", + }, + "sets": { + "label": { + "filepattern": json_obs_file, + "filetype": "json", + "varname": "totalSnowDepth", + "tests": {"firstguess": {"do_test": True}}, + } + }, + }, + } + with open(qc_fname, mode="w", encoding="utf-8") as file_handler: + json.dump(qc_settings, file_handler) + with open(blacklist_fname, mode="w", encoding="utf-8") as file_handler: + json.dump({}, file_handler) + + +def create_obs_data(var, obs_fname): + if var == "t2m": + name = "airTemperatureAt2M" + val = 273 + elif var == "rh2m": + name = "relativeHumidityAt2M" + val = 85 + elif var == "sd": + name = "totalSnowDepth" + val = 0.25 + else: + raise NotImplementedError + qc_data = { + "0": { + "varname": name, + "obstime": "202002200600", + "lon": 6.9933000000000005, + "lat": 62.191, + "stid": "1111", + "elev": 900.0, + "value": val, + "flag": 0.0, + "ci": 1.0, + "laf": 1.0, + "provider": "bufr", + "fg_dep": np.nan, + "an_dep": np.nan, + "passed_tests": [ + "domain", + "blacklist", + "nometa", + "plausibility", + "redundancy", + "firstguess", + "fraction", + "sct", + ], + }, + "1": { + "varname": name, + "obstime": "202002200600", + "lon": 7.8173, + "lat": 59.767500000000005, + "stid": "NA", + "elev": 1340.0, + "value": val, + "flag": 199.0, + "ci": 1.0, + "laf": 1.0, + "provider": "bufr", + "fg_dep": np.nan, + "an_dep": np.nan, + "passed_tests": [], + }, + } + + json.dump(qc_data, open(obs_fname, mode="w", encoding="utf-8")) + + +@pytest.fixture(params=["t2m", "rh2m", "sd"]) +def _qc_gridpp_obsmon( + tmp_path_factory, request, conf_proj_domain_file, firstguess4gridpp, hm +): + harmonie = [] + if hm == "harmonie": + hm_env() + harmonie = ["--harmonie"] + var = request.param + translation = { + "t2m": { + "elevGradient": "-0.0065", + "nc_name": "air_temperature_2m", + "hor": "30000.0", + "vert": "300.0", + }, + "rh2m": { + "elevGradient": "0.0", + "nc_name": "relative_humidity_2m", + "hor": "40000.0", + "vert": "400.0", + }, + "sd": { + "elevGradient": "0.0", + "nc_name": "surface_snow_thickness", + "hor": "60000.0", + "vert": "500.0", + }, + } + + first_guess_file = firstguess4gridpp + # Create observations + obs_fname = f"{tmp_path_factory.getbasetemp().as_posix()}/obs_{var}.json" + create_obs_data(var, obs_fname) + + # Titan + qc_settings_fname = ( + f"{tmp_path_factory.getbasetemp().as_posix()}/qc_settings_{var}.json" + ) + qc_fname = f"{tmp_path_factory.getbasetemp().as_posix()}/qc _{var}.json" + blacklist_fname = f"{tmp_path_factory.getbasetemp().as_posix()}/blacklist_{var}.json" + create_titan_settings(qc_settings_fname, first_guess_file, blacklist_fname, obs_fname) + + with pytest.raises(SystemExit): + titan(argv=["fail"]) + + argv = [ + "-i", + qc_settings_fname, + "-v", + var, + "-dtg", + an_time, + "--blacklist", + blacklist_fname, + "--domain", + conf_proj_domain_file, + "-o", + qc_fname, + "--debug", + "domain", + "blacklist", + "nometa", + "plausibility", + "redundancy", + "firstguess", + "fraction", + "buddy", + "climatology", + "sct", + ] + argv += harmonie + titan(argv=argv) + + shutil.copy(qc_fname, f"{qc_fname}-1") + shutil.copy(qc_fname, f"{qc_fname}-2") + argv = [ + "-t", + an_time, + "-i", + f"{qc_fname}-1", + f"{qc_fname}-2", + "-o", + f"{qc_fname}-merged", + ] + cli_merge_qc_data(argv=argv) + + # gridpp + with pytest.raises(SystemExit): + gridpp(argv=["fail"]) + + analysis_file = f"{tmp_path_factory.getbasetemp().as_posix()}/an_{var}.nc" + argv = [ + "-i", + first_guess_file, + "-o", + analysis_file, + "-obs", + qc_fname, + "-hor", + translation[var]["hor"], + "-vert", + translation[var]["vert"], + "-v", + translation[var]["nc_name"], + "--elevGradient", + translation[var]["elevGradient"], + ] + gridpp(argv=argv) + + output = f"{tmp_path_factory.getbasetemp().as_posix()}/OBSERVATIONS_200330H06.DAT" + with pytest.raises(SystemExit): + cli_oi2soda(argv=["fail"]) + + argv = [ + "--t2m_file", + first_guess_file, + "--t2m_var", + "air_temperature_2m", + "--rh2m_file", + first_guess_file, + "--rh2m_var", + "relative_humidity_2m", + "--sd_file", + first_guess_file, + "--sd_var", + "surface_snow_thickness", + "--debug", + "-o", + output, + "2020033006", + ] + cli_oi2soda(argv=argv) + + # Obsmon + db_file = f"{tmp_path_factory.getbasetemp().as_posix()}/ecma.db" + obsmon_test(var, qc_fname, first_guess_file, analysis_file, db_file) + + +def obsmon_test(var, qc_fname, first_guess_file, analysis_file, db_file): + + translation = { + "t2m": "air_temperature_2m", + "rh2m": "relative_humidity_2m", + "sd": "surface_snow_thickness", + } + nc_name = translation[var] + + with pytest.raises(SystemExit): + qc2obsmon(argv=["fail"]) + + argv = [ + an_time, + var, + qc_fname, + "--fg_file", + first_guess_file, + "--an_file", + analysis_file, + "--file_var", + nc_name, + "-o", + db_file, + ] + qc2obsmon(argv=argv) + + +@pytest.mark.usefixtures("_qc_gridpp_obsmon") +@pytest.mark.parametrize("hm", ["no-harmonie", "harmonie"]) +def test_qc_gridpp_obsmon(): + _qc_gridpp_obsmon + + +@pytest.mark.parametrize("hm", ["no-harmonie", "harmonie"]) +def test_first_guess(tmp_path_factory, conf_proj_2x3_file, data_thredds_nc_file, hm): + output = f"{tmp_path_factory.getbasetemp().as_posix()}/FirstGuess4gridpp_output.nc" + harmonie = [] + if hm == "harmonie": + hm_env() + harmonie = ["--harmonie"] + + with pytest.raises(SystemExit): + first_guess_for_oi(argv=["fail"]) + + argv = [ + "-c", + "surfex/cfg/first_guess.yml", + "-i", + data_thredds_nc_file, + "-if", + "netcdf", + "-dtg", + an_time, + "-d", + conf_proj_2x3_file, + "--laf_converter", + "none", + "--sd_converter", + "sweclim", + "--debug", + "-o", + output, + "air_temperature_2m", + "relative_humidity_2m", + "surface_snow_thickness", + ] + argv += harmonie + first_guess_for_oi(argv=argv) diff --git a/tests/smoke/test_cli_forcing.py b/tests/smoke/test_cli_forcing.py new file mode 100644 index 0000000..446f9bd --- /dev/null +++ b/tests/smoke/test_cli_forcing.py @@ -0,0 +1,63 @@ +"""Test forcing.""" +import contextlib +import os +import shutil +from pathlib import Path + +import pytest + +from surfex.cli import cli_modify_forcing, create_forcing + + +@contextlib.contextmanager +def working_directory(path): + """Change working directory and returns to previous on exit.""" + prev_cwd = Path.cwd() + os.chdir(path) + try: + yield + finally: + os.chdir(prev_cwd) + + +@pytest.mark.usefixtures("_mockers") +def test_forcing_nc(conf_proj_domain_file, tmp_path_factory, data_thredds_nc_file): + """Test forcing from netcdf files.""" + pattern = data_thredds_nc_file + output = f"{tmp_path_factory.getbasetemp().as_posix()}/FORCING_nc.nc" + argv = [ + "2020022006", + "2020022007", + "-d", + conf_proj_domain_file, + "-p", + pattern, + "-i", + "netcdf", + "--zref", + "ml", + "--uref", + "ml", + "--co2", + "constant", + "--sca_sw", + "constant", + "--zval", + "constant", + "--zsoro_converter", + "phi2m", + "--zval", + "constant", + "--uval", + "constant", + "-of", + output, + "--debug", + ] + create_forcing(argv=argv) + + input_file = output + output_file = input_file + ".modified" + shutil.copy(input_file, output_file) + argv = ["-i", input_file, "-o", output_file, "DIR_SWdown"] + cli_modify_forcing(argv=argv) diff --git a/tests/smoke/test_cli_hm2pysurfex.py b/tests/smoke/test_cli_hm2pysurfex.py new file mode 100644 index 0000000..85e5a59 --- /dev/null +++ b/tests/smoke/test_cli_hm2pysurfex.py @@ -0,0 +1,250 @@ +"""Harmonie environment to pysurfex.""" +import json + +import pytest + +from surfex.cli import hm2pysurfex + + +@pytest.fixture() +def hm_env(): + hm_env = { + "NPATCH": "2", + "ARCHIVE_ROOT_FG": "/firstguess", + "LSPBDC": "no", + "COMPILE_DABYFA": "no", + "ANASURF": "CANARI_OI_MAIN", + "TSPAN": "5400", + "BDDIR": "/archive/ECMWF/@YYYY@/@MM@/@DD@/@HH@", + "MAIL_ON_ABORT": "", + "COMPILE_ENKF": "no", + "ARCH": "linuxgfortran", + "FESTAT": "no", + "SLAFDIFF": "0", + "NLEV": "60", + "LETKF_LAG": "no", + "NNCV": "1,1,1,1", + "XCH_COEFF1": "15.0", + "NNCO": "1,1,0,0,1", + "STREAM": "", + "HH_LIST": "00-21:3", + "SPPT": "no", + "ARCHIVE_ROOT": "/archive", + "LSMIXBC": "yes", + "ARSTRATEGY": "climate:fg:verif:odb_stuff: [an|fc]_fa:pp_gr:fldver", + "TAU_SPP": "21600.", + "DISPLAY": ":1", + "XCLIP_RATIO_SDT": "5.0", + "ECFSGROUP": "hirald", + "EPERT_MODE": "after", + "ENS_BD_CLUSTER": "no", + "OBDIR": "/archive/observations/@YYYY@/@MM@/@DD@/@HH@", + "FCINT_FG": "03", + "SFXSELTIMES": "0-540:15", + "AD_TEST": "yes", + "TSTEP4D": "120,120", + "VLEV": "65", + "PERTSURF": "none", + "SCALE_PERT": "yes", + "OBSMONITOR": "obstat", + "LOCAL_DEFINITION_TEMPLATES": "LOCAL_DEFINITION_TEMPLATES", + "LUNBC": "yes", + "PERTDIA_BDINT": "6", + "GRID_TYPE": "CUSTOM", + "NOUTERLOOP": "2", + "XZ0SN": "0.003", + "MSG_PATH": "/CLOUDS/", + "INT_SINI_FILE": "//SURFXINI.fa", + "ALARO_VERSION": "0", + "CNMEXP": "HARM", + "ANAATMO": "3DVAR", + "SWRITUPTIMES": "0-540:60", + "CSNOW": "D95", + "SURFEX_OFFLINE_BINARIES": "no", + "JB_REF_DOMAIN": "METCOOP25B", + "FREQ_RESET_TEMP": "3", + "TOPO_SOURCE": "gmted2010", + "TAU_SDT": "28800", + "CLIMDIR": "/climate/METCOOP25D", + "EXP": "HM2PYSURFEX", + "IO_SERVER": "yes", + "ARCHIVE_ENSMBR": "", + "BUFRTAB_DIR": "", + "RSMIN_CONIFEROUS_FACTOR": "1.44", + "BDSTRATEGY": "simulate_metcoop", + "LSPG_SDT": ".FALSE.", + "DFI": "none", + "ENSINIPERT": "bnd", + "EXTRARCH": "/archive/extract", + "SFXWFTIMES": "0-540:60", + "VERT_DISC": "vfd", + "XCD_COEFF1": "10.0", + "XCD_COEFF2": "5.0", + "XGRASS_H_DNM": "3.0", + "LDB_VERSION": "3.0", + "DESKTOP_SESSION": "ubuntu", + "PWRITUPTIMES": "0-09:15", + "USE_MSG": "yes", + "DOMAIN": "METCOOP25S", + "CREATE_CLIMATE": "yes", + "HGT_QS": "yes", + "FP_PRECISION": "double", + "TAUS": "5400", + "MODIFY_LAKES": "T", + "LGRADSP": "no", + "ECFSLOC": "ec", + "RSMIN_C3_FACTOR": "1.5", + "CH_RES_SPEC": "yes", + "ILRES": "2,2", + "XRIMAX": "0.0", + "SPP": "no", + "MAKEODB2": "no", + "GRIB_API": "GRIB_API", + "INT_BDFILE": "//ELSCFHARMALBC@NNN@", + "BDINT": "1", + "RSMIN_DECIDUOUS_FACTOR": "1.13", + "BINDIR": "/bin", + "CV_HIGHVEG": "1.0E-5", + "USE_REARCHIVE_EPS_EC_OPER": "no", + "XDG_SESSION_TYPE": "x11", + "RSMIN_C4_FACTOR": "1.13", + "CROUGH": "NONE", + "SLAFLAG": "0", + "MULTITASK": "yes", + "SPGADTMIN_SDT": "0.15", + "MASS_FLUX_SCHEME": "edmfm", + "HOST_SURFEX": "no", + "FULLFAFTIMES": "0-540:15", + "TFLAG_FG": "h", + "CISBA": "3-L", + "PERTATMO": "none", + "XCGMAX": "2.0E-5", + "COMPCENTRE": "MET", + "SURFEX_LAKES": "FLAKE", + "FLDEXTR": "yes", + "REARCHIVE_EPS_EC_OPER": "no", + "NBDMAX": "4", + "SST_SOURCES": "IFS NEMO", + "CAERO": "tegen", + "CFORCING_FILETYPE": "NETCDF", + "GRIB_API_LIB": "GRIB_API_LIB", + "FLDEXTR_TASKS": "4", + "BUILD": "yes", + "ECOCLIMAP_VERSION": "SG", + "SURFEX_SEA_ICE": "sice", + "XLCOR_SPP": "1000000.", + "ENSCTL": "", + "PFFULLWFTIMES": "-1", + "LL_LIST": "03", + "HOST_MODEL": "ifs", + "XSCALE_H_TREE": "0.658", + "XCSMAX": "2.0E-4", + "SPGADTMIN_SPP": "0.15", + "AUXLIBS": "AUXLIBS", + "TEND_DIAG": "no", + "ODB_VERSION": "CY33R1.007", + "WRK": "/", + "EREF": "35000.", + "OBSEXTR": "yes", + "HWRITUPTIMES": "0-540:15", + "CONT_ON_FAILURE": "0", + "TSTEPTRAJ": "600", + "XZ0HSN": "0.0003", + "SFXSWFTIMES": "-1", + "BDLIB": "ECMWF", + "QT_IM_MODULE": "ibus", + "RUNNING_MODE": "research", + "SIMULATION_TYPE": "nwp", + "CONVERTFA": "yes", + "XLCOR_SDT": "2000000", + "TL_TEST": "yes", + "RCR_POSTP": "no", + "FLDVER": "no", + "ARCHIVE_ECMWF": "yes", + "JB_INTERPOL": "yes", + "SDEV_SDT": "0.20", + "TESTBED_CASES": "1", + "LISBA_CANOPY": ".FALSE.", + "SLAFK": "1.0", + "CV_LOWVEG": "2.0E-5", + "SOIL_TEXTURE_VERSION": "SOILGRID", + "MEPS_VERSION": "test", + "Q_IN_SP": "no", + "SHLVL": "1", + "MAKEGRIB_LISTENERS": "1", + "GRIB_API_INCLUDE": "GRIB_API_INCLUDE", + "ECFS_EPS_EC_BD_PATH": "ECFS_EPS_EC_BD_PATH", + "ODB_DIR": "/disk1/odb", + "SURFEX_LSELECT": "yes", + "PHYSICS": "arome", + "ARCHIVE_FORMAT": "GRIB2", + "VERITIMES": "00-540:60", + "HARATU": "yes", + "BDCLIM": "/ECMWF/climate", + "SINGLEOBS": "no", + "IO_SERVER_BD": "yes", + "SPGADTMAX_SDT": "3.0", + "FORCE2": "no", + "FORCE1": "no", + "CHKEVO": "no", + "SDEV_SPP": "0.2", + "XALLEN_TERM": "2.5", + "DYNAMICS": "nh", + "ECMWF_LOCAL_TABLE_PATH": "ECMWF_LOCAL_TABLE_PATH", + "TESTBED_LIST": "TESTBED_LIST", + "OBSMON_SYNC": "no", + "LETKF_3DSCREEN": "yes", + "TFLAG": "min", + "FLDVER_HOURS": "06 12 18 24 30 36 42 48 54 60 66", + "ANASURF_MODE": "before", + "AUGMENT_CV": "NO", + "ENSMSEL": "", + "HYBRID": "no", + "FREQ_RESET_GUST": "1", + "NCNAMES": "nwp", + "STATNW": "yes", + "ENSBDMBR": "", + "POSTP": "inline", + "ECOCLIMAP_PARAM_BINDIR": "/climate", + "SPGADTMAX_SPP": "3.0", + "SPGQ_SDT": "0.5", + "ANASURF_OI_COEFF": "POLYNOMES_ISBA_MF6", + "VFLDEXP": "HM2PYSURFEX", + "AI_ACCUMULATION_HOURS": "720", + "TASK_LIMIT": "-1", + "EZONE": "1", + "GSIZE": "10000.0", + "LAT0": "59.5", + "LATC": "60.0", + "LON0": "9.0", + "LONC": "10.0", + "NDGUXG": "19", + "NDLUXG": "9", + "NLAT": "20", + "NLON": "10", + "NMSMAX": "-1", + "NNOEXTZX": "0", + "NNOEXTZY": "0", + "NSMAX": "-1", + "SINLAT0": "-1", + "TSTEP": "90", + "FCINT": "3", + "TEFRCL": "3600", + "NXGSTPERIOD": "3600", + "MODEL": "MASTERODB", + "ENSMSELX": "-1", + "ENSMBR": "-1", + "LL": "03", + "METER_UNIT": "hhmm", + } + return hm_env + + +def test_hm2pysurfex_client(tmp_path_factory, hm_env): + """Test harmonie to pysurfex client.""" + output = f"{tmp_path_factory.getbasetemp().as_posix()}/config_from_hm.json" + hm_env_file = f"{tmp_path_factory.getbasetemp().as_posix()}/hm_env.json" + with open(hm_env_file, mode="w", encoding="utf8") as hm_file: + json.dump(hm_env, hm_file) + argv = ["-c", "surfex/cfg/config_exp_surfex.toml", "-o", output, "-e", hm_env_file] + hm2pysurfex(argv=argv) diff --git a/tests/smoke/test_cli_misc.py b/tests/smoke/test_cli_misc.py new file mode 100644 index 0000000..025f27e --- /dev/null +++ b/tests/smoke/test_cli_misc.py @@ -0,0 +1,212 @@ +"""Test first guess for OI.""" +import json + +import pytest +from netCDF4 import Dataset + +from surfex.cli import ( + cli_set_geo_from_obs_set, + cli_set_geo_from_stationlist, + create_lsm_file_assim, + cryoclim_pseudoobs, + dump_environ, + sentinel_obs, +) +from surfex.geo import get_geo_object + + +def test_dump_environ(): + dump_environ(argv=[]) + + +def test_set_geo_from_stationlist(tmp_path_factory): + fname = f"{tmp_path_factory.getbasetemp().as_posix()}/stationlist.json" + geo = f"{tmp_path_factory.getbasetemp().as_posix()}/geofromstationlist.json" + with open(fname, mode="w", encoding="utf-8") as fhandler: + json.dump({"17280": {"lon": 10.578, "lat": 59.4352}}, fhandler) + argv = [fname, "-o", geo] + cli_set_geo_from_stationlist(argv=argv) + with open(geo, mode="r", encoding="utf-8") as fhandler: + get_geo_object(json.load(fhandler)) + + +def test_cli_set_geo_from_obs_set(obsset_fname, tmp_path_factory): + geo = f"{tmp_path_factory.getbasetemp().as_posix()}/geofromobssetjson" + argv = [ + "-it", + "json", + "-i", + obsset_fname, + "-t", + "20201113060000", + "-v", + "air_temperature", + "-o", + geo, + ] + cli_set_geo_from_obs_set(argv=argv) + with open(geo, mode="r", encoding="utf-8") as fhandler: + get_geo_object(json.load(fhandler)) + + +@pytest.fixture() +def data_cryoclim_nc_file(tmp_path_factory): + fname = f"{tmp_path_factory.getbasetemp().as_posix()}/cryoclim_nc.nc" + cdlfname = f"{tmp_path_factory.getbasetemp().as_posix()}/cryoclim_nc.cdl" + with open(cdlfname, mode="w", encoding="utf-8") as fhandler: + fhandler.write( + """ +netcdf cryoclim { +dimensions: + time = 1 ; + xc = 2 ; + yc = 3 ; +variables: + int lambert_conformal_conic ; + lambert_conformal_conic:grid_mapping_name = "lambert_conformal_conic" ; + lambert_conformal_conic:standard_parallel = 63., 63. ; + lambert_conformal_conic:longitude_of_central_meridian = 15. ; + lambert_conformal_conic:latitude_of_projection_origin = 63. ; + lambert_conformal_conic:earth_radius = 6371000. ; + lambert_conformal_conic:proj4 = "+proj=lcc +lon_0=15 +lat_0=63 +lat_1=63 +lat_2=63 +R=6371000 +no_defs" ; + double time(time) ; + time:axis = "T" ; + time:long_name = "reference time of product" ; + time:standard_name = "time" ; + time:units = "seconds since 1978-01-01 00:00:00" ; + time:calendar = "standard" ; + time:bounds = "time_bnds" ; + double xc(xc) ; + xc:axis = "X" ; + xc:long_name = "x-coordinate in Cartesian system" ; + xc:standard_name = "projection_x_coordinate" ; + xc:units = "m" ; + double yc(yc) ; + yc:axis = "Y" ; + yc:long_name = "y-coordinate in Cartesian system" ; + yc:standard_name = "projection_y_coordinate" ; + yc:units = "m" ; + float lon(yc, xc) ; + lon:long_name = "longitude coordinate" ; + lon:standard_name = "longitude" ; + lon:units = "degrees_east" ; + float lat(yc, xc) ; + lat:long_name = "latitude coordinate" ; + lat:standard_name = "latitude" ; + lat:units = "degrees_north" ; + int classed_product(time, yc, xc) ; + classed_product:_FillValue = -99 ; + classed_product:least_significant_digit = 3 ; + classed_product:units = "1" ; + classed_product:long_name = "-1: ocean, 0: snow free, 1: snow, 3: clouded, 4: no data" ; + classed_product:coordinates = "lat lon" ; + classed_product:grid_mapping = "lambert_conformal_conic" ; + +data: + +time = _; + +lon = 10, 11; + +lat = 59, 60, 61; + +classed_product = 0, 1, 0, 3, 0, 4; +} +""" + ) + Dataset(fname, mode="w").fromcdl( + cdlfname, ncfilename=fname, mode="a", format="NETCDF3_CLASSIC" + ) + return fname + + +def test_cryoclim_pseudoobs(tmp_path_factory, data_cryoclim_nc_file, firstguess4gridpp): + + out_fname = f"{tmp_path_factory.getbasetemp().as_posix()}/output_cryoclim.json" + argv = [ + "-step", + "4", + "-fg", + firstguess4gridpp, + "-i", + data_cryoclim_nc_file, + "-v", + "surface_snow_thickness", + "-o", + out_fname, + ] + cryoclim_pseudoobs(argv=argv) + + +def test_create_lsm_file_assim( + tmp_path_factory, conf_proj_domain_file, data_thredds_nc_file +): + output = f"{tmp_path_factory.getbasetemp().as_posix()}/output_lsm.DAT" + argv = [ + "--file", + data_thredds_nc_file, + "--fileformat", + "netcdf", + "--var", + "land_area_fraction", + "--dtg", + "2020022006", + "--domain", + conf_proj_domain_file, + "-o", + output, + "--debug", + ] + create_lsm_file_assim(argv=argv) + + +@pytest.fixture() +def data_sentinel_nc_file(tmp_path_factory): + fname = f"{tmp_path_factory.getbasetemp().as_posix()}/sentinel_nc.nc" + cdlfname = f"{tmp_path_factory.getbasetemp().as_posix()}/sentinel_nc.cdl" + with open(cdlfname, mode="w", encoding="utf-8") as fhandler: + fhandler.write( + """ +netcdf sentinel { +dimensions: + xc = 2 ; + yc = 3 ; +variables: + double xc(xc) ; + double yc(yc) ; + float LON(yc, xc) ; + float LAT(yc, xc) ; + float surface_soil_moisture(yc, xc) ; + +data: + +LON = 10, 11; + +LAT = 59, 60, 61; + +surface_soil_moisture = 0.01, 0.01, 0.01, 0.03, 0.001, 0.001; +} +""" + ) + Dataset(fname, mode="w").fromcdl( + cdlfname, ncfilename=fname, mode="a", format="NETCDF3_CLASSIC" + ) + return fname + + +def test_sentinel(tmp_path_factory, data_sentinel_nc_file, firstguess4gridpp): + + out_fname = f"{tmp_path_factory.getbasetemp().as_posix()}/output_sentinel.json" + argv = [ + "-step", + "4", + "-fg", + firstguess4gridpp, + "-i", + data_sentinel_nc_file, + "-v", + "surface_snow_thickness", + "-o", + out_fname, + ] + sentinel_obs(argv=argv) diff --git a/tests/smoke/test_cli_namelist.py b/tests/smoke/test_cli_namelist.py new file mode 100644 index 0000000..079c0d9 --- /dev/null +++ b/tests/smoke/test_cli_namelist.py @@ -0,0 +1,67 @@ +"""Test create_namelist.""" +import json + +import pytest + +from surfex.cli import create_namelist + + +@pytest.fixture() +def get_system(tmp_path_factory): + system_file = f"{tmp_path_factory.getbasetemp().as_posix()}/system.json" + system = { + "climdir": "climdir", + "ecoclimap_bin_dir": "ecoclimap_bin_dir", + "assim_dir": "assim", + "first_guess_dir": "testdata/@YYYY@@MM@@DD@@HH@/", + } + with open(system_file, mode="w", encoding="utf-8") as file_handler: + json.dump(system, file_handler) + return system_file + + +@pytest.fixture() +def prep_file(tmp_path_factory): + prep_file = f"{tmp_path_factory.getbasetemp().as_posix()}/prep_input.json" + with open(prep_file, mode="w", encoding="utf-8") as file_handler: + json.dump({}, file_handler) + return prep_file + + +@pytest.mark.parametrize("mode", ["pgd", "prep", "offline", "soda"]) +def test_create_namelist( + tmp_path_factory, + mode, + config_exp_surfex_toml, + get_nam_path, + get_system, + conf_proj_2x3_file, + prep_file, +): + output = f"{tmp_path_factory.getbasetemp().as_posix()}/namelist_{mode}" + with pytest.raises(SystemExit): + create_namelist(argv=["fail"]) + + extra = [] + if mode == "prep" or mode == "soda": + extra += ["--dtg", "2020010100"] + if mode == "prep": + extra += ["--prep_file", prep_file, "--prep_filetype", "json"] + argv = [ + "-c", + config_exp_surfex_toml, + "-n", + get_nam_path, + "-o", + output, + "--domain", + conf_proj_2x3_file, + "-s", + get_system, + mode, + ] + argv += extra + create_namelist(argv=argv) + + argv += ["--harmonie"] + create_namelist(argv=argv) diff --git a/tests/smoke/test_cli_obs.py b/tests/smoke/test_cli_obs.py new file mode 100644 index 0000000..05c0cff --- /dev/null +++ b/tests/smoke/test_cli_obs.py @@ -0,0 +1,50 @@ +"""Observation tests.""" +import json + +import pytest + +from surfex.cli import bufr2json, obs2json +from surfex.obs import ObservationSet + + +@pytest.mark.usefixtures("_mockers") +def test_bufr2json(tmp_path_factory, obstime_str, bufr_file): + """Test bufr to json conversion.""" + output = f"{tmp_path_factory.getbasetemp().as_posix()}/bufr2json_t2m.json" + argv = [ + "-v", + "airTemperatureAt2M", + "-b", + bufr_file, + "-o", + output, + "-dtg", + obstime_str, + "-range", + "1800", + ] + bufr2json(argv=argv) + + +def test_obs2json_obsoul(obstime_str, obsoul_cryoclim_cy43, tmp_path_factory): + """Test obs2json for obsoul.""" + output = f"{tmp_path_factory.getbasetemp().as_posix()}/obs2json_obsoul.json" + argv = [ + "-t", + "obsoul", + "-v", + "92", + "-dtg", + obstime_str, + "-i", + obsoul_cryoclim_cy43, + "-o", + output, + ] + obs2json(argv=argv) + + with open(output, mode="r", encoding="utf-8") as fhandler: + data = json.load(fhandler) + obsset = ObservationSet(data) + assert len(obsset.observations) == 4 + assert obsset.observations["2"]["value"] == 0.1 diff --git a/tests/smoke/test_cli_plot.py b/tests/smoke/test_cli_plot.py new file mode 100644 index 0000000..a920e73 --- /dev/null +++ b/tests/smoke/test_cli_plot.py @@ -0,0 +1,106 @@ +"""Test plotting.""" +import pytest + +from surfex.cli import plot_points + + +@pytest.mark.usefixtures("_mockers") +def test_plot_grib1(tmp_path_factory, conf_proj_2x3_file, lambert_t2m_grib1): + """Test plotting from grib1.""" + output_file = f"{tmp_path_factory.getbasetemp().as_posix()}/output_plot_grib1.png" + argv = [ + "-it", + "grib1", + "-t", + "2020022006", + "-g", + conf_proj_2x3_file, + "--indicatorOfParameter", + "11", + "--level", + "2", + "--levelType", + "105", + "-i", + lambert_t2m_grib1, + "-o", + output_file, + "--debug", + ] + plot_points(argv=argv) + + +@pytest.mark.usefixtures("_mockers") +def test_plot_grib2(tmp_path_factory, conf_proj_2x3_file, lambert_t1_grib2): + """Test plotting from grib2.""" + output_file = f"{tmp_path_factory.getbasetemp().as_posix()}/output_plot_grib2.png" + argv = [ + "-it", + "grib2", + "-t", + "2020022006", + "-g", + conf_proj_2x3_file, + "--levelType", + "103", + "--discipline", + "0", + "--parameterCategory", + "0", + "--parameterNumber", + "0", + "--level", + "2", + "-i", + lambert_t1_grib2, + "-o", + output_file, + "--debug", + ] + plot_points(argv=argv) + + +@pytest.mark.usefixtures("_mockers") +def test_plot_netcdf(tmp_path_factory, conf_proj_2x3_file, data_thredds_nc_file): + """Test plotting from netcdf.""" + output_file = f"{tmp_path_factory.getbasetemp().as_posix()}/output_plot_nc.png" + argv = [ + "-it", + "netcdf", + "-t", + "2020022006", + "-g", + conf_proj_2x3_file, + "-v", + "air_temperature_2m", + "-i", + data_thredds_nc_file, + "-o", + output_file, + "--debug", + ] + plot_points(argv=argv) + + +@pytest.mark.usefixtures("_mockers") +def test_plot_obs_frost_json(tmp_path_factory, obsset_fname, obstime_str): + """Test plotting from frost json data.""" + output_file = ( + f"{tmp_path_factory.getbasetemp().as_posix()}/output_plot_obs_frost_json.png" + ) + argv = [ + "-it", + "obs", + "--obs_type", + "json", + "-t", + obstime_str, + "-v", + "air_temperature", + "-i", + obsset_fname, + "-o", + output_file, + "--debug", + ] + plot_points(argv=argv) diff --git a/tests/smoke/test_cli_run_binary.py b/tests/smoke/test_cli_run_binary.py new file mode 100644 index 0000000..2b359c7 --- /dev/null +++ b/tests/smoke/test_cli_run_binary.py @@ -0,0 +1,400 @@ +"""Test ruinning a binary emulator.""" +import contextlib +import json +import os +from pathlib import Path + +import pytest +import toml + +from surfex.cli import masterodb, offline, perturbed_offline, pgd, prep, soda +from surfex.util import merge_toml_env_from_files + + +@contextlib.contextmanager +def working_directory(path): + """Change working directory and returns to previous on exit.""" + prev_cwd = Path.cwd() + os.chdir(path) + try: + yield + finally: + os.chdir(prev_cwd) + + +@pytest.fixture() +def get_nc_config_file(config_exp_surfex_toml, tmp_path_factory): + this_config = f"{tmp_path_factory.getbasetemp().as_posix()}/nc.toml" + config_file = f"{tmp_path_factory.getbasetemp().as_posix()}/config.toml" + nc_config = {"SURFEX": {"IO": {"CSURF_FILETYPE": "NC"}}} + with open(this_config, mode="w", encoding="utf-8") as fhandler: + toml.dump(nc_config, fhandler) + + config = merge_toml_env_from_files([config_exp_surfex_toml, this_config]) + with open(config_file, mode="w", encoding="utf-8") as file_handler: + toml.dump(config, file_handler) + return config_file + + +@pytest.fixture() +def get_fa_config_file(config_exp_surfex_toml, tmp_path_factory): + this_config = f"{tmp_path_factory.getbasetemp().as_posix()}/fa.toml" + config_file = f"{tmp_path_factory.getbasetemp().as_posix()}/config.toml" + nc_config = {"SURFEX": {"IO": {"CSURF_FILETYPE": "FA"}}} + with open(this_config, mode="w", encoding="utf-8") as fhandler: + toml.dump(nc_config, fhandler) + + config = merge_toml_env_from_files([config_exp_surfex_toml, this_config]) + with open(config_file, mode="w", encoding="utf-8") as file_handler: + toml.dump(config, file_handler) + return config_file + + +@pytest.fixture() +def get_rte_file(tmp_path_factory): + rte = f"{tmp_path_factory.getbasetemp().as_posix()}/rte_cli_run_binary.json" + with open(rte, mode="w", encoding="utf-8") as file_handler: + json.dump(dict(os.environ), file_handler) + return rte + + +@pytest.fixture() +def get_system(tmp_path_factory): + system_file = ( + f"{tmp_path_factory.getbasetemp().as_posix()}/system_cli_run_binary.json" + ) + system = { + "climdir": "climdir", + "ecoclimap_bin_dir": "ecoclimap_bin_dir", + "assim_dir": "assim", + "first_guess_dir": "testdata/@YYYY@@MM@@DD@@HH@/", + } + with open(system_file, mode="w", encoding="utf-8") as file_handler: + json.dump(system, file_handler) + return system_file + + +@pytest.mark.usefixtures("_mockers") +def test_run_pgd( + get_nc_config_file, + get_rte_file, + get_system, + conf_proj_2x3_file, + get_nam_path, + tmp_path_factory, +): + """Test run NC.""" + # PGD + + output = f"{tmp_path_factory.getbasetemp().as_posix()}/archive/PGD_test.nc" + binary = "touch PGD.nc" + + argv = [ + "-w", + "", + "-c", + get_nc_config_file, + "--domain", + conf_proj_2x3_file, + "-s", + get_system, + "-n", + get_nam_path, + "-r", + get_rte_file, + "-f", + "--tolerate_missing", + "-o", + output, + binary, + ] + with working_directory(tmp_path_factory.getbasetemp()): + pgd(argv=argv) + + +@pytest.mark.usefixtures("_mockers") +def test_run_prep( + get_nc_config_file, + get_system, + get_rte_file, + get_nam_path, + conf_proj_2x3_file, + tmp_path_factory, +): + + # PREP + + pgd = tmp_path_factory.getbasetemp() / "PGD_input.nc" + pgd.touch() + output = f"{tmp_path_factory.getbasetemp().as_posix()}/archive/PREP_test.nc" + binary = "touch PREP.nc" + + argv = [ + "-w", + "", + "--domain", + conf_proj_2x3_file, + "--pgd", + pgd.as_posix(), + "--prep_file", + get_nam_path + "/prep_from_namelist_values.json", + "--prep_filetype", + "json", + "--dtg", + "2020022000", + "-c", + get_nc_config_file, + "-s", + get_system, + "-n", + get_nam_path, + "-r", + get_rte_file, + "-f", + "--tolerate_missing", + "-o", + output, + binary, + ] + with working_directory(tmp_path_factory.getbasetemp()): + prep(argv=argv) + + +@pytest.mark.usefixtures("_mockers") +def test_run_offline( + get_nc_config_file, + get_rte_file, + get_system, + get_nam_path, + tmp_path_factory, + conf_proj_2x3_file, +): + # OFFLINE + + pgd = tmp_path_factory.getbasetemp() / "PGD_input.nc" + pgd.touch() + prep = tmp_path_factory.getbasetemp() / "PREP_input.nc" + prep.touch() + output = f"{tmp_path_factory.getbasetemp().as_posix()}/archive/SURFOUT_test.nc" + binary = "touch SURFOUT.nc" + + argv = [ + "-w", + "", + "--domain", + conf_proj_2x3_file, + "--pgd", + pgd.as_posix(), + "--prep", + prep.as_posix(), + "-c", + get_nc_config_file, + "-s", + get_system, + "-n", + get_nam_path, + "-r", + get_rte_file, + "-f", + "--tolerate_missing", + "-o", + output, + "--forc_zs", + "--forcing_dir", + "testdata", + binary, + ] + with working_directory(tmp_path_factory.getbasetemp()): + offline(argv=argv) + + +@pytest.mark.usefixtures("_mockers") +def test_run_perturbed( + get_nc_config_file, + get_rte_file, + get_system, + get_nam_path, + tmp_path_factory, + conf_proj_2x3_file, +): + # PERTURBED OFFLINE + + pgd = tmp_path_factory.getbasetemp() / "PGD_input.nc" + pgd.touch() + prep = tmp_path_factory.getbasetemp() / "PREP_input.nc" + prep.touch() + output = f"{tmp_path_factory.getbasetemp().as_posix()}/archive/SURFOUT_1_test.nc" + binary = "touch SURFOUT.nc" + + argv = [ + "-w", + "", + "--domain", + conf_proj_2x3_file, + "--pgd", + pgd.as_posix(), + "--prep", + prep.as_posix(), + "-c", + get_nc_config_file, + "-s", + get_system, + "-n", + get_nam_path, + "-r", + get_rte_file, + "--pert", + "1", + "-f", + "--tolerate_missing", + "-o", + output, + "--forc_zs", + "--forcing_dir", + "testdata", + binary, + ] + with working_directory(tmp_path_factory.getbasetemp()): + perturbed_offline(argv=argv) + + +@pytest.mark.usefixtures("_mockers") +def test_run_soda( + get_nc_config_file, + get_system, + get_rte_file, + get_nam_path, + conf_proj_2x3_file, + tmp_path_factory, +): + # SODA + + pgd = tmp_path_factory.getbasetemp() / "PGD_input.nc" + pgd.touch() + prep = tmp_path_factory.getbasetemp() / "PREP_input.nc" + prep.touch() + output = f"{tmp_path_factory.getbasetemp().as_posix()}/archive/ANALYSIS_test.nc" + binary = "touch SURFOUT.nc" + + argv = [ + "-w", + "", + "--domain", + conf_proj_2x3_file, + "--pgd", + pgd.as_posix(), + "--prep", + prep.as_posix(), + "--dtg", + "2020022006", + "-c", + get_nc_config_file, + "-s", + get_system, + "-n", + get_nam_path, + "-r", + get_rte_file, + "-f", + "--tolerate_missing", + "-o", + output, + binary, + ] + with working_directory(tmp_path_factory.getbasetemp()): + soda(argv) + + +@pytest.mark.usefixtures("_mockers") +def test_masterodb_forecast( + get_fa_config_file, + get_system, + get_rte_file, + get_nam_path, + conf_proj_2x3_file, + tmp_path_factory, +): + """Test masterodb.""" + pgd = tmp_path_factory.getbasetemp() / "Const.Clim.sfx" + pgd.touch() + prep = tmp_path_factory.getbasetemp() / "ICMSHHARMINIT.sfx" + prep.touch() + output = f"{tmp_path_factory.getbasetemp().as_posix()}/archive/ICMSHHARM+0003.sfx" + binary = "touch ICMSHHARM+0003.fa" + + argv = [ + "-w", + "", + "-m", + "forecast", + "--domain", + conf_proj_2x3_file, + "--pgd", + pgd.as_posix(), + "--prep", + prep.as_posix(), + "-c", + get_fa_config_file, + "-s", + get_system, + "-n", + get_nam_path, + "-r", + get_rte_file, + "-f", + "--tolerate_missing", + "-o", + output, + "-b", + binary, + ] + with working_directory(tmp_path_factory.getbasetemp()): + masterodb(argv=argv) + + +@pytest.mark.usefixtures("_mockers") +def test_masterodb_canari( + get_fa_config_file, + get_system, + get_rte_file, + get_nam_path, + conf_proj_2x3_file, + tmp_path_factory, +): + # CANARI + pgd = tmp_path_factory.getbasetemp() / "Const.Clim.sfx" + pgd.touch() + prep = tmp_path_factory.getbasetemp() / "ICMSHHARMINIT.sfx" + prep.touch() + output = f"{tmp_path_factory.getbasetemp().as_posix()}/archive/ICMSHHARM+0003.sfx" + binary = "touch ICMSHANAL.sfx" + argv = [ + "-w", + "", + "-m", + "canari", + "--domain", + conf_proj_2x3_file, + "--pgd", + pgd.as_posix(), + "--prep", + prep.as_posix(), + "--dtg", + "2020022006", + "-c", + get_fa_config_file, + "-s", + get_system, + "-n", + get_nam_path, + "-r", + get_rte_file, + "-f", + "--tolerate_missing", + "-o", + output, + "-b", + binary, + ] + with working_directory(tmp_path_factory.getbasetemp()): + masterodb(argv=argv) diff --git a/tests/smoke/test_cli_set_geo.py b/tests/smoke/test_cli_set_geo.py new file mode 100644 index 0000000..ddf3a0f --- /dev/null +++ b/tests/smoke/test_cli_set_geo.py @@ -0,0 +1,103 @@ +"""Test geometry.""" +import json + +import pytest + +from surfex.cli import cli_set_domain, cli_shape2ign + + +@pytest.fixture() +def ref_domain_file(tmp_path_factory, ref_domain_dict): + fname = f"{tmp_path_factory.getbasetemp().as_posix()}/conf_proj_ref.json" + json.dump(ref_domain_dict, open(fname, mode="w", encoding="utf-8")) + return fname + + +@pytest.fixture() +def ref_domain_dict(): + domain_dict = { + "nam_pgd_grid": {"cgrid": "CONF PROJ"}, + "nam_conf_proj": {"xlat0": 59.5, "xlon0": 9}, + "nam_conf_proj_grid": { + "ilone": 1, + "ilate": 1, + "xlatcen": 60, + "xloncen": 10, + "nimax": 9, + "njmax": 19, + "xdx": 10000.0, + "xdy": 10000.0, + }, + } + return domain_dict + + +@pytest.fixture() +def domains(): + return { + "CONF_PROJ_TEST": { + "nam_pgd_grid": {"cgrid": "CONF PROJ"}, + "nam_conf_proj": {"xlat0": 59.5, "xlon0": 9}, + "nam_conf_proj_grid": { + "ilone": 1, + "ilate": 1, + "xlatcen": 60, + "xloncen": 10, + "nimax": 9, + "njmax": 19, + "xdx": 10000.0, + "xdy": 10000.0, + }, + } + } + + +@pytest.fixture() +def domains_file(domains, tmp_path_factory): + fname = f"{tmp_path_factory.getbasetemp().as_posix()}/domains.json" + with open(fname, mode="w", encoding="utf-8") as fhandler: + json.dump(domains, fhandler) + return fname + + +def test_set_domain(domains, domains_file, tmp_path_factory): + """Test set domain.""" + saved_domain = domains["CONF_PROJ_TEST"] + domain_file = f"{tmp_path_factory.getbasetemp().as_posix()}/set_geo_domain.json" + argv = [ + "-d", + "CONF_PROJ_TEST", + "--domains", + domains_file, + "-o", + domain_file, + "--debug", + ] + cli_set_domain(argv=argv) + with open(domain_file) as fhandler: + domain_json = json.load(fhandler) + assert domain_json == saved_domain + + argv = ["-d", "not-existing", "--domains", domains_file, "-o", domain_file, "--debug"] + with pytest.raises(KeyError): + cli_set_domain(argv=argv) + + +def test_shape2ign(tmp_path_factory, ref_domain_file, mocker): + infile = f"{tmp_path_factory.getbasetemp().as_posix()}/input" + output = f"{tmp_path_factory.getbasetemp().as_posix()}/ign_geo.json" + argv = [ + "-c", + "catchment", + "-i", + infile, + "-r", + ref_domain_file, + "-o", + output, + "--indent", + "2", + ] + mocker.patch("surfex.geo.ogr") + with pytest.raises(TypeError): + cli_shape2ign(argv=argv) diff --git a/tests/unit/test_binary_input_data.py b/tests/unit/test_binary_input_data.py new file mode 100644 index 0000000..98f4a26 --- /dev/null +++ b/tests/unit/test_binary_input_data.py @@ -0,0 +1,110 @@ +"""Test binary input data to surfex commands.""" +import contextlib +import os +from pathlib import Path + +import pytest + +from surfex.binary_input import JsonOutputData, SodaInputData +from surfex.configuration import ConfigurationFromTomlFile +from surfex.platform import SystemFilePaths + + +@pytest.fixture() +def default_config(config_exp_surfex_toml): + return ConfigurationFromTomlFile(config_exp_surfex_toml) + + +@pytest.fixture() +def climdir(tmp_path_factory): + climdir = tmp_path_factory.getbasetemp() / "climdir" + climdir.mkdir(exist_ok=True) + lsm = climdir / "CLIMATE.DAT" + lsm.touch() + return climdir.as_posix() + + +@pytest.fixture() +def assim_dir(tmp_path_factory): + assim_dir = tmp_path_factory.getbasetemp() / "assim" + assim_dir.mkdir(exist_ok=True) + sst_file = assim_dir / "SST_SIC.DAT" + sst_file.touch() + (assim_dir / "OBSERVATIONS_200220H06.DAT").touch() + (assim_dir / "POLYNOMES_ISBA").touch() + (assim_dir / "FIRST_GUESS_200220H06.DAT").touch() + (assim_dir / "SURFOUT.nc").touch() + (assim_dir / "LSM.DAT").touch() + return assim_dir.as_posix() + + +@pytest.fixture() +def first_guess_dir(tmp_path_factory): + first_guess_dir = tmp_path_factory.getbasetemp() / "first_guess" + first_guess_dir.mkdir(exist_ok=True) + (first_guess_dir / "FIRST_GUESS_200220H06.DAT").touch() + (first_guess_dir / "SURFOUT.nc").touch() + return first_guess_dir.as_posix() + + +@pytest.fixture() +def get_system(climdir, assim_dir, first_guess_dir): + system = { + "climdir": climdir, + "ecoclimap_bin_dir": "", + "assim_dir": assim_dir, + "first_guess_dir": first_guess_dir, + } + return SystemFilePaths(system) + + +@pytest.fixture() +def soda_input_data(default_config, get_system, an_time): + return SodaInputData(default_config, get_system, check_existence=False, dtg=an_time) + + +def test_soda_oi(soda_input_data): + soda_input_data.set_input_vertical_soil_oi() + + +def test_soda_enkf(soda_input_data): + soda_input_data.set_input_vertical_soil_enkf() + + +def test_soda_ekf(soda_input_data): + soda_input_data.set_input_vertical_soil_ekf() + + +def test_soda_observations(soda_input_data): + soda_input_data.set_input_observations() + + +def test_soda_sea_assimilation(soda_input_data): + soda_input_data.set_input_sea_assimilation() + + +@contextlib.contextmanager +def working_directory(path): + """Change working directory and returns to previous on exit.""" + prev_cwd = Path.cwd() + os.chdir(path) + try: + yield + finally: + os.chdir(prev_cwd) + + +def test_json_output(tmp_path_factory): + + target = tmp_path_factory.getbasetemp() / "target_output_file" + target2 = tmp_path_factory.getbasetemp() / "target_output_file2" + destination = tmp_path_factory.getbasetemp() / "destination_output_file" + destination2 = tmp_path_factory.getbasetemp() / "destination_output_file2" + target.touch() + target2.touch() + data = { + "target_output_file": destination.as_posix(), + "target_output_file2": {destination2.as_posix(): "cp"}, + } + with working_directory(tmp_path_factory.getbasetemp()): + JsonOutputData(data).archive_files() diff --git a/tests/unit/test_bufr.py b/tests/unit/test_bufr.py new file mode 100644 index 0000000..fb4a3c2 --- /dev/null +++ b/tests/unit/test_bufr.py @@ -0,0 +1,34 @@ +"""Bufr testing.""" +import pytest + +from surfex.bufr import BufrObservationSet +from surfex.datetime_utils import as_timedelta +from surfex.input_methods import get_datasources + + +@pytest.fixture() +def settings(bufr_file): + settings_dict = { + "label": { + "filetype": "bufr", + "filepattern": bufr_file, + "varname": "airTemperatureAt2m", + "lonrange": [0, 20], + "latrange": [55, 65], + "dt": 1800, + } + } + return settings_dict + + +def test_get_bufr_datasource(obstime, settings): + get_datasources(obstime, settings) + + +@pytest.mark.usefixtures("_mockers") +def test_read_bufr(bufr_file, obstime): + variables = ["airTemperatureAt2M"] + bufr_set = BufrObservationSet( + bufr_file, variables, obstime, as_timedelta(seconds=1800) + ) + assert len(bufr_set.observations) == 1 diff --git a/tests/unit/test_converter.py b/tests/unit/test_converter.py new file mode 100644 index 0000000..3511719 --- /dev/null +++ b/tests/unit/test_converter.py @@ -0,0 +1,40 @@ +"""Test converter.""" +from datetime import datetime + +import numpy as np + +from surfex.cache import Cache +from surfex.geo import get_geo_object +from surfex.read import ConvertedInput, Converter + + +def test_converter_meps_nc(conf_proj_2x3_dict, data_thredds_nc_file): + """Test converter.""" + my_geo = get_geo_object(conf_proj_2x3_dict) + + fileformat = "netcdf" + var = "T2M" + converter = "none" + config = { + "netcdf": {"fcint": 10800, "file_inc": 3600, "offset": 0}, + "T2M": { + "netcdf": { + "converter": { + "none": { + "name": "air_temperature_2m", + "filepattern": data_thredds_nc_file, + } + } + } + }, + } + + defs = config[fileformat] + converter_conf = config[var][fileformat]["converter"] + + validtime = datetime(year=2020, month=2, day=20, hour=6) + cache = Cache(7200) + converter = Converter(converter, validtime, defs, converter_conf, fileformat) + field = ConvertedInput(my_geo, var, converter).read_time_step(validtime, cache) + field = np.reshape(field, [my_geo.nlons, my_geo.nlats]) + assert field.shape == (2, 3) diff --git a/tests/unit/test_fa.py b/tests/unit/test_fa.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/test_file.py b/tests/unit/test_file.py new file mode 100644 index 0000000..b042712 --- /dev/null +++ b/tests/unit/test_file.py @@ -0,0 +1,793 @@ +"""Test file.py.""" +import json +import os + +import numpy as np +import pytest +from netCDF4 import Dataset + +from surfex.cache import Cache +from surfex.datetime_utils import as_datetime +from surfex.file import ( + AsciiSurfexFile, + FaSurfexFile, + ForcingFileNetCDF, + NCSurfexFile, + NetCDFSurfexFile, + SurfexFileVariable, + TexteSurfexFile, + read_surfex_field, + read_surfex_points, +) +from surfex.read import ConvertedInput, Converter + + +@pytest.fixture() +def ascii_conf_proj_float_record_file(tmp_path_factory): + fname = f"{tmp_path_factory.getbasetemp().as_posix()}/ascii_conf_proj_float.txt" + with open(fname, mode="w", encoding="utf-8") as fhandler: + fhandler.write( + """ + &FULL VERSION +(-) + 8 + &FULL BUG +(-) + 1 + &FULL STORAGETYPE +(-) +PRE + &FULL DIM_FULL +(-) + 6 + &FULL WRITE_EXT +(-) + F + &FULL SPLIT_PATCH +(-) + T + &FULL DTCUR%TDATE +s + 2020 2 20 + &FULL DTCUR%TIME +s + 21600.000000000000 + &FULL GRID_TYPE +GRID TYPE +CONF PROJ + &FULL LAT0 + + 59.500000000000000 + &FULL LON0 + + 9.0000000000000000 + &FULL RPK + + 0.86162916044152571 + &FULL BETA + + 0.0000000000000000 + &FULL LATORI + + 59.104442729963196 + &FULL LONORI + + 9.0980661139040411 + &FULL IMAX + + 2 + &FULL JMAX + + 3 + &FULL XX + + 0.10000000D+05 0.20000000D+05 + + &FULL YY + + 0.10000000D+05 0.20000000D+05 0.30000000D+05 + &NATURE TG2P1 + X_Y_TG2 (K) + 0.28500000D+03 0.28500000D+03 + 0.28500000D+03 0.28500000D+03 + 0.28500000D+03 0.28500000D+03 + &NATURE TG1P1 + X_Y_TG1 (K) + 0.28500000D+03 0.28500000D+03 + 0.28500000D+03 0.28500000D+03 + 0.28500000D+03 0.28500000D+03 + &NATURE TG1P2 + X_Y_TG1 (K) + 0.28500000D+03 0.28500000D+03 + 0.28500000D+03 0.28500000D+03 + 0.28500000D+03 0.28500000D+03 + &FULL STRING_TYPE + DESCRIPTION + VALUE + &FULL LOGICAL_TRUE + DESCRIPTION + T + &FULL LOGICAL_FALSE + + F + &FULL INTEGER_TYPE + + 99 +""" + ) + return fname + + +@pytest.fixture() +def ascii_ign_file(tmp_path_factory): + fname = f"{tmp_path_factory.getbasetemp().as_posix()}/ascii_ign.txt" + with open(fname, mode="w", encoding="utf-8") as fhandler: + fhandler.write( + """ + &FULL GRID_TYPE +GRID TYPE +IGN + &FULL LAMBERT + + 7 + &FULL NPOINTS + + 6 + &FULL XX + + 0.10000000D+05 0.20000000D+05 0.30000000D+05 0.40000000D+05 0.50000000D+05 0.60000000D+05 + &FULL XY + + 0.10000000D+05 0.20000000D+05 0.30000000D+05 0.40000000D+05 0.50000000D+05 0.60000000D+05 + &FULL XDX + + 0.10000000D+05 0.20000000D+05 0.30000000D+05 0.40000000D+05 0.50000000D+05 0.60000000D+05 + &FULL XDY + + 0.10000000D+05 0.20000000D+05 0.30000000D+05 0.40000000D+05 0.50000000D+05 0.60000000D+05 + """ + ) + return fname + + +@pytest.fixture() +def ascii_lonlatval_file(tmp_path_factory): + fname = f"{tmp_path_factory.getbasetemp().as_posix()}/ascii_lonlatval.txt" + with open(fname, mode="w", encoding="utf-8") as fhandler: + fhandler.write( + """ + &FULL GRID_TYPE +GRID TYPE +LONLATVAL + &FULL XX + + 9.8 9.9 10.0 10.1 10.2 10.3 + &FULL XY + + 59.6 59.7 59.8 59.9 60.0 60.1 + &FULL XDX + + 0.10000000D+01 0.10000000D+01 0.30000000D+01 0.40000000D+01 0.50000000D+01 0.60000000D+01 + &FULL XDY + + 0.10000000D+01 0.20000000D+01 0.30000000D+01 0.40000000D+01 0.50000000D+01 0.60000000D+01 +""" + ) + return fname + + +@pytest.fixture() +def ascii_lonlat_reg_file(tmp_path_factory): + fname = f"{tmp_path_factory.getbasetemp().as_posix()}/ascii_lonlat_reg.txt" + with open(fname, mode="w", encoding="utf-8") as fhandler: + fhandler.write( + """ + &FULL GRID_TYPE +GRID TYPE +LONLAT REG + &FULL LONMIN + + 9.9 + &FULL LATMIN + + 59.9 + &FULL LONMAX + + 10.1 + &FULL LATMAX + + 60.2 + &FULL NLON + + 2 + &FULL NLAT + + 3 +""" + ) + return fname + + +# AsciiSurfexFile +def test_read_ascii_record_conf_proj(ascii_conf_proj_float_record_file): + ascii_file = AsciiSurfexFile(ascii_conf_proj_float_record_file) + field = ascii_file.read("TG1P1", "NATURE", "float") + assert field.shape[0] == 6 + + value = ascii_file.read("LOGICAL_TRUE", "FULL", "logical") + assert value + + value = ascii_file.read("LOGICAL_FALSE", "FULL", "logical") + assert not value + + string = ascii_file.read("STRING_TYPE", "FULL", "string") + assert string == "VALUE" + + string = ascii_file.read("INTEGER_TYPE", "FULL", "integer") + assert string == 99 + + +def test_read_ascii_record_conf_proj_geo_provided( + ascii_conf_proj_float_record_file, conf_proj_2x3 +): + ascii_file = AsciiSurfexFile(ascii_conf_proj_float_record_file, geo=conf_proj_2x3) + field = ascii_file.read("TG1P1", "NATURE", "float") + assert field.shape[0] == 6 + + +def test_read_ascii_geo_ign(ascii_ign_file): + if os.path.exists("/tmp/.mask"): # noqa + os.remove("/tmp/.mask") # noqa + ascii_file = AsciiSurfexFile(ascii_ign_file) + grid_type = ascii_file.read("GRID_TYPE", "FULL", "string") + assert grid_type == "IGN" + + +def test_read_ascii_geo_lonlatval(ascii_lonlatval_file): + ascii_file = AsciiSurfexFile(ascii_lonlatval_file) + grid_type = ascii_file.read("GRID_TYPE", "FULL", "string") + assert grid_type == "LONLATVAL" + + +def test_read_ascii_geo_lonlat_reg(ascii_lonlat_reg_file): + ascii_file = AsciiSurfexFile(ascii_lonlat_reg_file) + grid_type = ascii_file.read("GRID_TYPE", "FULL", "string") + assert grid_type == "LONLAT REG" + + +@pytest.fixture() +def texte_file(tmp_path_factory): + fname = f"{tmp_path_factory.getbasetemp().as_posix()}/texte.TXT" + with open(fname, mode="w", encoding="utf-8") as fhandler: + fhandler.write( + """ + 1.0 2.0 3.0 4.0 5.0 6.0 +""" + ) + return fname + + +def test_texte_surfex_file(texte_file, conf_proj_2x3): + texte_file = TexteSurfexFile(texte_file, conf_proj_2x3) + validtime = as_datetime("2020022006") + var = SurfexFileVariable("VAR", basetime=as_datetime("2020022006"), interval=3600) + field, __ = texte_file.points(var, conf_proj_2x3, validtime=validtime) + assert field.shape[0] == 6 + + +def test_read_surfex_field(ascii_conf_proj_float_record_file): + validtime = as_datetime("2020022006") + field = read_surfex_field( + "TG1P1", + ascii_conf_proj_float_record_file, + validtime=validtime, + fileformat="ascii", + filetype="surf", + datatype="float", + tiletype="NATURE", + ) + assert field.shape == (2, 3) + + +def test_read_surfex_points(ascii_conf_proj_float_record_file, conf_proj_2x3): + validtime = as_datetime("2020022006") + field = read_surfex_points( + "TG1P1", + ascii_conf_proj_float_record_file, + conf_proj_2x3, + validtime=validtime, + fileformat="ascii", + filetype="surf", + tiletype="NATURE", + datatype="float", + ) + assert field.shape[0] == 6 + + +def test_converter_ascii(conf_proj_2x3, ascii_conf_proj_float_record_file): + """Test converter.""" + my_geo = conf_proj_2x3 + + fileformat = "surfex" + var = "FRAC_NATURE" + converter = "none" + config = { + "surfex": {"fcint": 10800, "file_inc": 3600, "offset": 0}, + "FRAC_NATURE": { + "surfex": { + "converter": { + "none": { + "varname": "TG1P1", + "filepattern": ascii_conf_proj_float_record_file, + "filetype": "surf", + "datatype": "float", + "tiletype": "NATURE", + } + } + } + }, + } + + defs = config[fileformat] + converter_conf = config[var][fileformat]["converter"] + + validtime = as_datetime("2020022006") + cache = Cache(7200) + converter = Converter(converter, validtime, defs, converter_conf, fileformat) + field = ConvertedInput(my_geo, var, converter).read_time_step(validtime, cache) + field = np.reshape(field, [my_geo.nlons, my_geo.nlats]) + + +@pytest.fixture() +def fa_conf_proj_file(tmp_path_factory): + fname = f"{tmp_path_factory.getbasetemp().as_posix()}/fa_conf_proj.fa" + with open(fname, mode="w", encoding="utf-8") as fhandler: + data = {"key": "value"} + json.dump(data, fhandler) + return fname + + +# FA +@pytest.mark.usefixtures("_mockers") +def test_read_fa_field(fa_conf_proj_file, conf_proj_2x3): + fa_file = FaSurfexFile(fa_conf_proj_file) + var = SurfexFileVariable("X001.TG1") + fa_file.points(var, conf_proj_2x3) + + +# NC +@pytest.fixture() +def data_converter_nc2(tmp_path_factory): + fname = f"{tmp_path_factory.getbasetemp().as_posix()}/data_converter_nc.nc" + with open(fname, mode="w", encoding="utf-8") as fhandler: + data = { + "dimensions": {"xx": 2, "yy": 3, "char_len": 40}, + "variables": { + "DTCUR-YEAR": { + "dimensions": None, + "datatype": "int", + }, + "DTCUR-MONTH": { + "dimensions": None, + "datatype": "int", + }, + "DTCUR-DAY": { + "dimensions": None, + "datatype": "int", + }, + "DTCUR-TIME": { + "dimensions": None, + "datatype": "double", + }, + "GRID_TYPE": { + "dimensions": ["char_len"], + "datatype": "int", + }, + "LON0": { + "dimensions": None, + "datatype": "double", + }, + "LAT0": { + "dimensions": None, + "datatype": "double", + }, + "LONORI": { + "dimensions": None, + "datatype": "double", + }, + "LATORI": { + "dimensions": None, + "datatype": "double", + }, + "DX": { + "dimensions": ["yy", "xx"], + "datatype": "double", + }, + "DY": { + "dimensions": ["yy", "xx"], + "datatype": "double", + }, + "TG1P1": { + "dimensions": ["yy", "xx"], + "datatype": "double", + }, + }, + "data": { + "DTCUR-YEAR": 2020, + "DTCUR-MONTH": 2, + "DTCUR-DAY": 20, + "DTCUR-TIME": 21600.0, + "GRID_TYPE": "CONF PROJ", + "LON0": 10.0, + "LAT0": 60.0, + "IMAX": 2, + "JMAX": 3, + "DX": 10000.0, + "DY": 10000.0, + "LONORI": 10.0, + "LATORI": 60.0, + "TG1P1": [1, 2, 3, 4, 5, 6], + }, + } + json.dump(data, fhandler) + return fname + + +@pytest.fixture() +def data_surfex_nc_file(tmp_path_factory): + fname = f"{tmp_path_factory.getbasetemp().as_posix()}/data_surfex_nc.nc" + cdlfname = f"{tmp_path_factory.getbasetemp().as_posix()}/data_surfex_nc.cdl" + with open(cdlfname, mode="w", encoding="utf-8") as fhandler: + fhandler.write( + """ +netcdf PREP_CONF_PROJ { +dimensions: + xx = 2 ; + yy = 3 ; + char_len = 40 ; +variables: + int VERSION ; + VERSION:long_name = "VERSION" ; + VERSION:comment = "(-)" ; + int BUG ; + BUG:long_name = "BUG" ; + BUG:comment = "(-)" ; + char STORAGETYPE(char_len) ; + STORAGETYPE:len = 3 ; + STORAGETYPE:long_name = "STORAGETYPE" ; + STORAGETYPE:comment = "(-)" ; + int DIM_FULL ; + DIM_FULL:long_name = "DIM_FULL" ; + DIM_FULL:comment = "(-)" ; + char WRITE_EXT ; + WRITE_EXT:long_name = "WRITE_EXT" ; + WRITE_EXT:comment = "(-)" ; + char SPLIT_PATCH ; + SPLIT_PATCH:long_name = "SPLIT_PATCH" ; + SPLIT_PATCH:comment = "(-)" ; + int DTCUR-YEAR ; + DTCUR-YEAR:long_name = "DTCUR-YEAR" ; + DTCUR-YEAR:comment = "s" ; + int DTCUR-MONTH ; + DTCUR-MONTH:long_name = "DTCUR-MONTH" ; + DTCUR-MONTH:comment = "s" ; + int DTCUR-DAY ; + DTCUR-DAY:long_name = "DTCUR-DAY" ; + DTCUR-DAY:comment = "s" ; + double DTCUR-TIME ; + DTCUR-TIME:long_name = "DTCUR-TIME" ; + DTCUR-TIME:comment = "s" ; + char GRID_TYPE(char_len) ; + GRID_TYPE:len = 9 ; + GRID_TYPE:long_name = "GRID_TYPE" ; + GRID_TYPE:comment = "GRID TYPE" ; + double LAT0 ; + LAT0:long_name = "LAT0" ; + LAT0:comment = "" ; + double LON0 ; + LON0:long_name = "LON0" ; + LON0:comment = "" ; + double RPK ; + RPK:long_name = "RPK" ; + RPK:comment = "" ; + double BETA ; + BETA:long_name = "BETA" ; + BETA:comment = "" ; + double LATORI ; + LATORI:long_name = "LATORI" ; + LATORI:comment = "" ; + double LONORI ; + LONORI:long_name = "LONORI" ; + LONORI:comment = "" ; + int IMAX ; + IMAX:long_name = "IMAX" ; + IMAX:comment = "" ; + int JMAX ; + JMAX:long_name = "JMAX" ; + JMAX:comment = "" ; + double XX(yy, xx) ; + XX:_FillValue = 1.e+20 ; + XX:long_name = "XX" ; + XX:comment = "" ; + double YY(yy, xx) ; + YY:_FillValue = 1.e+20 ; + YY:long_name = "YY" ; + YY:comment = "" ; + double DX(yy, xx) ; + DX:_FillValue = 1.e+20 ; + DX:long_name = "DX" ; + DX:comment = "" ; + double DY(yy, xx) ; + DY:_FillValue = 1.e+20 ; + DY:long_name = "DY" ; + DY:comment = "" ; + double TG1P1(yy, xx) ; + TG1P1:_FillValue = 1.e+20 ; + TG1P1:long_name = "TG1P1" ; + TG1P1:comment = "X_Y_TG1 (K)" ; + +data: + + VERSION = 8 ; + + BUG = 1 ; + + STORAGETYPE = "PRE " ; + + DIM_FULL = 6 ; + + WRITE_EXT = "F" ; + + SPLIT_PATCH = "T" ; + + DTCUR-YEAR = 2020 ; + + DTCUR-MONTH = 2 ; + + DTCUR-DAY = 20 ; + + DTCUR-TIME = 21600.0 ; + + GRID_TYPE = "CONF PROJ " ; + + LAT0 = 59.5 ; + + LON0 = 9 ; + + RPK = 0.861629160441526 ; + + BETA = 0 ; + + LATORI = 59.1044427299632 ; + + LONORI = 9.09806611390404 ; + + IMAX = 2 ; + + JMAX = 3 ; + + XX = + 10000, 20000, 30000, 40000, 50000, 60000; + + YY = + 10000, 20000, 30000, 40000, 50000, 60000; + + DX = + 10000, 10000, 10000, 10000, 10000, 10000; + + DY = + 10000, 10000, 10000, 10000, 10000, 10000; + + TG1P1 = + 285, 285, 285, 285, 285, 285; +} +""" + ) + Dataset(fname, mode="w").fromcdl( + cdlfname, ncfilename=fname, mode="a", format="NETCDF3_CLASSIC" + ) + return fname + + +@pytest.fixture() +def data_forcing_nc_file(tmp_path_factory): + fname = f"{tmp_path_factory.getbasetemp().as_posix()}/data_forcing_nc.nc" + cdlfname = f"{tmp_path_factory.getbasetemp().as_posix()}/data_forcing_nc.cdl" + with open(cdlfname, mode="w", encoding="utf-8") as fhandler: + fhandler.write( + """ +netcdf FORCING { +dimensions: + Number_of_points = 6 ; + time = 2 ; +variables: + float time(time) ; + time:units = "hours since 2020-02-20 00:00:00 0:00" ; + float FRC_TIME_STP ; + FRC_TIME_STP:longname = "Forcing_Time_Step" ; + float LON(Number_of_points) ; + LON:longname = "Longitude" ; + float LAT(Number_of_points) ; + LAT:longname = "Latitude" ; + float ZS(Number_of_points) ; + ZS:longname = "Surface_Orography" ; + float ZREF(Number_of_points) ; + ZREF:longname = "Reference_Height" ; + ZREF:units = "m" ; + float UREF(Number_of_points) ; + UREF:longname = "Reference_Height_for_Wind" ; + UREF:units = "m" ; + float Tair(time, Number_of_points) ; + Tair:longname = "Near_Surface_Air_Temperature" ; + Tair:units = "K" ; + float Qair(time, Number_of_points) ; + Qair:longname = "Near_Surface_Specific_Humidity" ; + Qair:units = "kg/kg" ; + float PSurf(time, Number_of_points) ; + PSurf:longname = "Surface_Pressure" ; + PSurf:units = "Pa" ; + float DIR_SWdown(time, Number_of_points) ; + DIR_SWdown:longname = "Surface_Incident_Downwelling_Shortwave_Radiation" ; + DIR_SWdown:units = "W/m2" ; + float SCA_SWdown(time, Number_of_points) ; + SCA_SWdown:longname = "Surface_Incident_Diffuse_Shortwave_Radiation" ; + SCA_SWdown:units = "W/m2" ; + float LWdown(time, Number_of_points) ; + LWdown:longname = "Surface_Incident_Diffuse_Longwave_Radiation" ; + LWdown:units = "W/m2" ; + float Rainf(time, Number_of_points) ; + Rainf:longname = "Rainfall_Rate" ; + Rainf:units = "kg/m2/s" ; + float Snowf(time, Number_of_points) ; + Snowf:longname = "Snowfall_Rate" ; + Snowf:units = "kg/m2/s" ; + float Wind(time, Number_of_points) ; + Wind:longname = "Wind_Speed" ; + Wind:units = "m/s" ; + float Wind_DIR(time, Number_of_points) ; + Wind_DIR:longname = "Wind_Direction" ; + float CO2air(time, Number_of_points) ; + CO2air:longname = "Near_Surface_CO2_Concentration" ; + CO2air:units = "kg/m3" ; +data: + + time = 0, 1 ; + + FRC_TIME_STP = 3600 ; + + LON = 9.273919, 9.274644, 9.275373, 9.276107, 9.276843, 9.277584; + + LAT = 59.19412, 59.28405, 59.37397, 59.4639, 59.55383, 59.64376; + + ZS = 236.897, 57.3353, 180.6278, 152.952, 137.3557, 271.6574; + + Tair = + 271.1142, 271.2324, 271.9492, 271.5849, 270.9042, 269.662, + 270.7881, 270.5206, 271.3282, 271.1231, 270.2364, 268.9913; +} +""" + ) + Dataset(fname, mode="w").fromcdl( + cdlfname, ncfilename=fname, mode="a", format="NETCDF3_CLASSIC" + ) + return fname + + +@pytest.mark.usefixtures("_mockers") +def test_converter_nc(conf_proj_2x3, data_surfex_nc_file): + """Test converter.""" + my_geo = conf_proj_2x3 + + fileformat = "surfex" + var = "TG1P1" + converter = "none" + config = { + "surfex": {"fcint": 10800, "file_inc": 3600, "offset": 0}, + "TG1P1": { + "surfex": { + "converter": { + "none": { + "varname": "TG1P1", + "filepattern": data_surfex_nc_file, + } + } + } + }, + } + + defs = config[fileformat] + converter_conf = config[var][fileformat]["converter"] + + validtime = as_datetime("2020022006") + cache = Cache(7200) + converter = Converter(converter, validtime, defs, converter_conf, fileformat) + field = ConvertedInput(my_geo, var, converter).read_time_step(validtime, cache) + assert field.shape[0] == 6 + + +def test_nc_file(conf_proj_2x3, data_surfex_nc_file): + nc_file = NCSurfexFile(data_surfex_nc_file) + validtime = as_datetime("2020022006") + var = SurfexFileVariable("TG1P1") + field, __ = nc_file.points(var, conf_proj_2x3, validtime=validtime) + assert field.shape[0] == 6 + + +def test_netcdf_forcing_file(conf_proj_2x3, data_forcing_nc_file): + nc_file = ForcingFileNetCDF(data_forcing_nc_file, geo=conf_proj_2x3) + var = SurfexFileVariable("Tair") + validtime = as_datetime("2020022000") + field, __ = nc_file.points(var, conf_proj_2x3, validtime=validtime) + assert field.shape[0] == 6 + + +@pytest.fixture() +def data_timeseries_netcdf_file(tmp_path_factory): + fname = f"{tmp_path_factory.getbasetemp().as_posix()}/data_timeseries_netcdf.nc" + cdlfname = f"{tmp_path_factory.getbasetemp().as_posix()}/data_timeries_netcdf.cdl" + with open(cdlfname, mode="w", encoding="utf-8") as fhandler: + fhandler.write( + """ +netcdf ISBA_PROGNOSTIC.OUT { +dimensions: + xx = 2 ; + yy = 3 ; + snow_layer = 1 ; + Number_of_Patches = 1 ; + time = UNLIMITED ; // (2 currently) +variables: + int Projection_Type ; + Projection_Type:grid_mapping_name = "lambert_conformal_conic" ; + Projection_Type:earth_radius = 6371229. ; + Projection_Type:longitude_of_central_meridian = 21.3 ; + Projection_Type:latitude_of_projection_origin = 78.65 ; + Projection_Type:false_easting = 234500. ; + Projection_Type:false_northing = 273500. ; + Projection_Type:rotation = 0. ; + Projection_Type:x_resolution = 1000. ; + Projection_Type:y_resolution = 1000. ; + Projection_Type:standard_parallel = 78.65 ; + double xx(xx) ; + xx:_FillValue = 1.e+20 ; + xx:units = "meters" ; + double yy(yy) ; + yy:_FillValue = 1.e+20 ; + yy:units = "meters" ; + double time(time) ; + time:_FillValue = 1.e+20 ; + time:units = "hours since 2020-02-20 06:00:00" ; + double TG1(time, Number_of_Patches, yy, xx) ; + TG1:_FillValue = 1.e+20 ; + TG1:long_name = "X_Y_TG1" ; + TG1:units = "K" ; + TG1:grid_mapping = "Projection_Type" ; + +data: + + Projection_Type = _ ; + + xx = 1000, 2000, 3000, 4000, 5000, 6000; + + yy = 1000, 2000, 3000, 4000, 5000, 6000; + + time = 1, 2; + + TG1 = 272.515206303509, 272.477738673727, 272.439227443228, 272.398908654572, + 272.366437323069, 272.341285478363, 272.515206303509, 272.477738673727, + 272.439227443228, 272.398908654572, 272.366437323069, 272.341285478363; +} +""" + ) + Dataset(fname, mode="w").fromcdl( + cdlfname, ncfilename=fname, mode="a", format="NETCDF3_CLASSIC" + ) + return fname + + +def test_timeseries_netcdf_file(conf_proj_2x3, data_timeseries_netcdf_file): + nc_file = NetCDFSurfexFile(data_timeseries_netcdf_file, conf_proj_2x3) + var = SurfexFileVariable("TG1", layers=[1], patches=[1]) + validtime = as_datetime("2020022000") + # TODO: Fix this reading + with pytest.raises(IndexError): + nc_file.points(var, conf_proj_2x3, validtime=validtime) diff --git a/tests/unit/test_frost.py b/tests/unit/test_frost.py new file mode 100644 index 0000000..568cf4f --- /dev/null +++ b/tests/unit/test_frost.py @@ -0,0 +1,21 @@ +"""Test observations from frost API.""" +import os + +import pytest + +from surfex.input_methods import get_datasources + + +@pytest.mark.usefixtures("_mockers") +def test_get_bufr_datasource(obstime): + settings = { + "label": { + "filetype": "frost", + "varname": "air_temperature", + "lonrange": [0, 20], + "latrange": [55, 65], + "dt": 1800, + } + } + os.environ["CLIENTID"] = "dummy" + get_datasources(obstime, settings) diff --git a/tests/unit/test_geo.py b/tests/unit/test_geo.py new file mode 100644 index 0000000..33ed516 --- /dev/null +++ b/tests/unit/test_geo.py @@ -0,0 +1,245 @@ +"""Test geometry.""" +import pytest + +from surfex.geo import ( + IGN, + Cartesian, + ConfProj, + LonLatReg, + LonLatVal, + get_geo_object, + set_domain, +) +from surfex.namelist import BaseNamelist + + +def test_geo_not_defined(): + """Test geometry not defined.""" + domain = {"nam_pgd_grid": {"cgrid": "not_existing"}} + with pytest.raises(NotImplementedError): + get_geo_object(domain) + + +def test_get_geo_obj(): + """Test get geometry object.""" + domain = {"not_existing": {"some_key": "some_value"}} + with pytest.raises(KeyError): + get_geo_object(domain) + + domain = {"nam_pgd_grid": {"not_existing": "some_value"}} + with pytest.raises(KeyError): + get_geo_object(domain) + + +def test_geo_conf_proj(conf_proj_2x3_dict): + """Test conf proj geometry.""" + my_geo = get_geo_object(conf_proj_2x3_dict) + + json_settings = {"nam_io_offline": {"csurf_filetype": "NC"}} + my_settings = BaseNamelist.ascii2nml(json_settings) + my_geo.update_namelist(my_settings) + assert conf_proj_2x3_dict["nam_pgd_grid"]["cgrid"] == my_geo.cgrid + print(my_geo.identifier()) + + new_domain = { + "not_existing": {"not_existing": "some_value"}, + "nam_conf_proj_grid": conf_proj_2x3_dict["nam_conf_proj_grid"], + } + with pytest.raises(KeyError): + ConfProj(new_domain) + + new_domain = { + "not_existing": {"not_existing": "some_value"}, + "nam_conf_proj": conf_proj_2x3_dict["nam_conf_proj"], + } + with pytest.raises(KeyError): + ConfProj(new_domain) + + new_domain = { + "nam_conf_proj": {"not_existing": "some_value"}, + "nam_conf_proj_grid": conf_proj_2x3_dict["nam_conf_proj_grid"], + } + with pytest.raises(KeyError): + ConfProj(new_domain) + + new_domain = { + "nam_conf_proj_grid": {"not_existing": "some_value"}, + "nam_conf_proj": conf_proj_2x3_dict["nam_conf_proj"], + } + with pytest.raises(KeyError): + ConfProj(new_domain) + + assert my_geo.lons[0][0], pytest.approx(9.95323219) + assert my_geo.lats[0][0], pytest.approx(59.99198266) + assert my_geo.xxx[0], pytest.approx(561109.9103510105) + assert my_geo.yyy[0], pytest.approx(1154504.0851275164) + + +def test_geo_lonlat_reg(): + """Test lonlat geometry.""" + domain = { + "nam_pgd_grid": {"cgrid": "LONLAT REG"}, + "nam_lonlat_reg": { + "xlonmin": 10, + "xlonmax": 11, + "xlatmin": 60, + "xlatmax": 61, + "nlon": 11, + "nlat": 11, + }, + } + my_geo = get_geo_object(domain) + json_settings = {"nam_io_offline": {"csurf_filetype": "NC"}} + my_settings = BaseNamelist.ascii2nml(json_settings) + my_settings = my_geo.update_namelist(my_settings) + assert domain["nam_pgd_grid"]["cgrid"] == my_geo.cgrid + assert my_settings["nam_pgd_grid"]["cgrid"] == my_geo.cgrid + + domain = { + "nam_pgd_grid": {"cgrid": "LONLAT REG"}, + "nam_lonlat_reg": { + "xlonmin": 10, + "xlonmax": 11, + "xlatmin": 60, + "xlatmax": 61, + "nlon": 0, + "nlat": 11, + }, + } + with pytest.raises(ZeroDivisionError): + LonLatReg(domain) + + domain = {"not_existing": {"existing": "some_value"}} + with pytest.raises(KeyError): + LonLatReg(domain) + + domain = {"nam_lonlat_reg": {"not_existing": "some_value"}} + with pytest.raises(KeyError): + LonLatReg(domain) + + +def test_geo_lonlatval(): + """Test lonlatval geometry.""" + domain = { + "nam_pgd_grid": {"cgrid": "LONLATVAL"}, + "nam_lonlatval": { + "xx": [10.0, 11.0], + "xy": [60.0, 61.0], + "xdx": [0.1, 0.1], + "xdy": [0.1, 0.1], + }, + } + my_geo = get_geo_object(domain) + json_settings = {"nam_io_offline": {"csurf_filetype": "NC"}} + my_settings = BaseNamelist.ascii2nml(json_settings) + my_settings = my_geo.update_namelist(my_settings) + assert domain["nam_pgd_grid"]["cgrid"] == my_geo.cgrid + assert my_settings["nam_pgd_grid"]["cgrid"] == my_geo.cgrid + + domain = {"not_existing": {"existing": "some_value"}} + with pytest.raises(KeyError): + LonLatVal(domain) + + domain = {"nam_lonlatval": {"not_existing": "some_value"}} + with pytest.raises(KeyError): + LonLatVal(domain) + + +def test_geo_cartesian(): + """Test cartesian geometry.""" + domain = { + "nam_pgd_grid": {"cgrid": "CARTESIAN"}, + "nam_cartesian": { + "xlat0": 0, + "xlon0": 0, + "nimax": 11, + "njmax": 21, + "xdx": 0.1, + "xdy": 0.05, + }, + } + my_geo = get_geo_object(domain) + json_settings = {"nam_io_offline": {"csurf_filetype": "NC"}} + my_settings = BaseNamelist.ascii2nml(json_settings) + my_settings = my_geo.update_namelist(my_settings) + assert domain["nam_pgd_grid"]["cgrid"] == my_geo.cgrid + assert my_settings["nam_pgd_grid"]["cgrid"] == my_geo.cgrid + + domain = {"not_existing": {"existing": "some_value"}} + with pytest.raises(KeyError): + Cartesian(domain) + + domain = {"nam_cartesian": {"not_existing": "some_value"}} + with pytest.raises(KeyError): + Cartesian(domain) + + +def test_geo_ign(): + """Test ign geometry.""" + domain = { + "nam_pgd_grid": {"cgrid": "IGN"}, + "nam_ign": { + "clambert": 7, + "npoints": 3, + "xx": [11000, 13000, 11000], + "xy": [21000, 21000, 23000], + "xdx": [1000, 1000, 1000], + "xdy": [1000, 1000, 1000], + "xx_llcorner": 0, + "xy_llcorner": 0, + "xcellsize": 1000, + "ncols": 1, + "nrows": 1, + }, + } + my_geo = IGN(domain, recreate=True) + json_settings = {"nam_io_offline": {"csurf_filetype": "NC"}} + my_settings = BaseNamelist.ascii2nml(json_settings) + my_settings = my_geo.update_namelist(my_settings) + assert domain["nam_pgd_grid"]["cgrid"] == my_geo.cgrid + assert my_settings["nam_pgd_grid"]["cgrid"] == my_geo.cgrid + + my_geo1 = IGN(domain, recreate=False) + my_geo2 = IGN(domain, recreate=True) + assert my_geo1.is_identical(my_geo2) + + domain = { + "nam_pgd_grid": {"cgrid": "IGN"}, + "nam_ign": { + "clambert": -99, + "npoints": 0, + "xx": 11, + "xy": 21, + "xdx": 1000, + "xdy": 1000, + "xx_llcorner": 0, + "xy_llcorner": 0, + "xcellsize": 1000, + "ncols": 1, + "nrows": 1, + }, + } + with pytest.raises(NotImplementedError): + get_geo_object(domain) + + domain = {"not_existing": {"existing": "some_value"}} + with pytest.raises(KeyError): + IGN(domain) + + domain = {"nam_ign": {"not_existing": "some_value"}} + with pytest.raises(KeyError): + IGN(domain) + + +def test_set_domain_unittest(): + """Test set domain.""" + domains = {"NAME": {"nam_pgd_grid": {"cgrid": "some_projection"}}} + domain = set_domain(domains, "NAME") + assert domains["NAME"]["nam_pgd_grid"]["cgrid"] == domain["nam_pgd_grid"]["cgrid"] + + with pytest.raises(KeyError): + set_domain(domains, "not_existing") + + domains = ["NAME"] + with pytest.raises(ValueError): # noqa PT011 + set_domain(domains, "NAME") diff --git a/tests/unit/test_grib.py b/tests/unit/test_grib.py new file mode 100644 index 0000000..c971602 --- /dev/null +++ b/tests/unit/test_grib.py @@ -0,0 +1,163 @@ +"""Test grib.""" +import json + +import pytest + +from surfex.cache import Cache +from surfex.datetime_utils import as_datetime +from surfex.grib import Grib, Grib1Variable, Grib2Variable +from surfex.read import ConvertedInput, Converter + + +@pytest.fixture() +def converter_config(lambert_t2m_grib1, lambert_t1_grib2): + config = { + "grib1": {"fcint": 10800, "file_inc": 3600, "offset": 0}, + "grib2": {"fcint": 10800, "file_inc": 3600, "offset": 0}, + "t2m": { + "grib1": { + "converter": { + "none": { + "parameter": 11, + "type": 105, + "level": 2, + "tri": 0, + "filepattern": lambert_t2m_grib1, + } + } + } + }, + "t1": { + "grib2": { + "converter": { + "none": { + "discipline": 0, + "parameterCategory": 0, + "parameterNumber": 0, + "levelType": 103, + "typeOfStatisticalProcessing": -1, + "level": 2, + "filepattern": lambert_t1_grib2, + } + } + } + }, + } + return config + + +def get_var(edition, conf): + kwargs = conf["none"] + if edition == 1: + parameter = kwargs["parameter"] + typ = kwargs["type"] + level = kwargs["level"] + tri = kwargs["tri"] + var = Grib1Variable(parameter, typ, level, tri) + return var + elif edition == 2: + discipline = kwargs["discipline"] + parameter_category = kwargs["parameterCategory"] + parameter_number = kwargs["parameterNumber"] + level_type = kwargs["levelType"] + level = kwargs["level"] + type_of_statistical_processing = kwargs["typeOfStatisticalProcessing"] + var = Grib2Variable( + discipline, + parameter_category, + parameter_number, + level_type, + level, + type_of_statistical_processing, + ) + return var + + +def write_json_file(fname, keys): + with open(fname, mode="w", encoding="utf-8") as fhandler: + json.dump(keys, fhandler) + + +@pytest.mark.usefixtures("_mockers") +def test_grib1_from_converter(converter_config, conf_proj_domain): + """Test grib1 from converter.""" + # Grib 1 + fileformat = "grib1" + var = "t2m" + print(var, fileformat) + defs = converter_config[fileformat] + converter_conf = converter_config[var][fileformat]["converter"] + + var = get_var(1, converter_conf) + validtime = as_datetime("2020111306") + cache = Cache(7200) + initial_basetime = validtime + converter = Converter("none", initial_basetime, defs, converter_conf, fileformat) + ConvertedInput(conf_proj_domain, var, converter).read_time_step(validtime, cache) + + +@pytest.mark.usefixtures("_mockers") +def test_grib2_from_converter(converter_config, conf_proj_domain): + """Test grib2 from converter.""" + fileformat = "grib2" + var = "t1" + print(var, fileformat) + defs = converter_config[fileformat] + converter_conf = converter_config[var][fileformat]["converter"] + + var = get_var(2, converter_conf) + validtime = as_datetime("2020111306") + cache = Cache(7200) + initial_basetime = validtime + converter = Converter("none", initial_basetime, defs, converter_conf, fileformat) + ConvertedInput(conf_proj_domain, var, converter).read_time_step(validtime, cache) + + +@pytest.mark.usefixtures("_mockers") +def test_read_rotated_ll_grib1(converter_config, rotated_ll_t2m_grib1): + + converter_conf = converter_config["t2m"]["grib1"]["converter"] + var = get_var(1, converter_conf) + grib_file = Grib(rotated_ll_t2m_grib1) + assert not var.is_accumulated() + var.print_keys() + validtime = as_datetime("2020111306") + grib_file.field(var, validtime) + + +@pytest.mark.usefixtures("_mockers") +def test_read_rotated_ll_grib2(converter_config, rotated_ll_t1_grib2): + + converter_conf = converter_config["t1"]["grib2"]["converter"] + var = get_var(2, converter_conf) + grib_file = Grib(rotated_ll_t1_grib2) + assert not var.is_accumulated() + var.print_keys() + validtime = as_datetime("2020111306") + grib_file.field(var, validtime) + + +@pytest.mark.usefixtures("_mockers") +def test_read_regular_ll_grib1(converter_config, regular_ll_t2m_grib1): + + converter_conf = converter_config["t2m"]["grib1"]["converter"] + var = get_var(1, converter_conf) + + grib_file = Grib(regular_ll_t2m_grib1) + assert not var.is_accumulated() + var.print_keys() + validtime = as_datetime("2020111306") + grib_file.field(var, validtime) + + +@pytest.mark.usefixtures("_mockers") +def test_read_regular_ll_grib2(converter_config, regular_ll_t1_grib2): + + converter_conf = converter_config["t1"]["grib2"]["converter"] + var = get_var(2, converter_conf) + + grib_file = Grib(regular_ll_t1_grib2) + assert not var.is_accumulated() + var.print_keys() + validtime = as_datetime("2020111306") + grib_file.field(var, validtime) diff --git a/tests/unit/test_interpolation.py b/tests/unit/test_interpolation.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/test_namelist.py b/tests/unit/test_namelist.py new file mode 100644 index 0000000..c5c0691 --- /dev/null +++ b/tests/unit/test_namelist.py @@ -0,0 +1,80 @@ +"""Test namelist settings.""" +import json + +import pytest + +from surfex.configuration import ConfigurationFromTomlFile +from surfex.datetime_utils import as_datetime +from surfex.namelist import BaseNamelist, Namelist + + +@pytest.fixture() +def namelist_dict(): + dict_data = { + "nam_block": {"key": "val"}, + "TEST": {"@VEGTYPE@@DECADE@@VAR@": "@VEGTYPE@@DECADE@@VAR@"}, + } + return dict_data + + +@pytest.fixture() +def namelist_file(namelist_dict, tmp_path_factory): + fname = f"{tmp_path_factory.getbasetemp().as_posix()}/namelist_dict.json" + with open(fname, mode="w", encoding="utf-8") as fhandler: + json.dump(namelist_dict, fhandler) + return fname + + +def test_namelist( + config_exp_surfex_toml, get_nam_path, namelist_dict, namelist_file, tmp_path_factory +): + nml = BaseNamelist.ascii_file2nml(namelist_file) + output_file = f"{tmp_path_factory.getbasetemp().as_posix()}/namelist_output_testjson" + prep_file = f"{tmp_path_factory.getbasetemp().as_posix()}/namelist_prep_input.json" + BaseNamelist.nml2ascii(nml, output_file) + BaseNamelist.capitalize_namelist_dict(namelist_dict) + BaseNamelist.lower_case_namelist_dict(namelist_dict) + + programs = ["pgd", "prep", "offline", "soda"] + for program in programs: + kwargs = { + "geo": None, + "fcint": 3, + "dtg": None, + "forc_zs": False, + "prep_file": None, + "prep_filetype": None, + "prep_pgdfile": None, + "prep_pgdfiletype": None, + } + if program == "prep": + kwargs.update( + { + "dtg": as_datetime("2020022000"), + "prep_file": prep_file, + "prep_filetype": "json", + } + ) + if program == "offline": + kwargs.update({"forc_zs": True}) + if program == "soda": + kwargs.update({"dtg": as_datetime("2020022000")}) + config = ConfigurationFromTomlFile(config_exp_surfex_toml) + config.update_setting("SURFEX#COVER#SG", True) + config.update_setting("SURFEX#ISBA#SCHEME", "DIF") + BaseNamelist(program, config, get_nam_path, **kwargs) + config.update_setting("SURFEX#ASSIM#SCHEMES#ISBA", "OI") + BaseNamelist(program, config, get_nam_path, **kwargs) + config.update_setting("SURFEX#ASSIM#SCHEMES#ISBA", "ENKF") + BaseNamelist(program, config, get_nam_path, **kwargs) + + BaseNamelist.set_direct_data_namelist( + "DATA_ISBA", "YSOC_TOP", "/data/db.dir", "input_path" + ) + BaseNamelist.set_direct_data_namelist( + "DATA_ISBA", "YSOC_TOP", "/data/db.json", "input_path" + ) + + key = "@VEGTYPE@@DECADE@@VAR@" + value = "value" + Namelist.sub(namelist_dict, "TEST", key, value, vtype="1", decade="1", var="VAR") diff --git a/tests/unit/test_netatmo.py b/tests/unit/test_netatmo.py new file mode 100644 index 0000000..bec2b59 --- /dev/null +++ b/tests/unit/test_netatmo.py @@ -0,0 +1,85 @@ +"""Test netatmo data.""" +import pytest + +from surfex.datetime_utils import as_datetime +from surfex.input_methods import get_datasources + + +@pytest.fixture() +def netatmo_obs_time(): + obstime = as_datetime("2020022006") + return obstime + + +@pytest.fixture() +def netatmo_file(tmp_path_factory): + fname = f"{tmp_path_factory.getbasetemp().as_posix()}/netatmo.json" + with open(fname, mode="w", encoding="utf-8") as fhandler: + fhandler.write( + """ +[ + { + "location":[8.842586,59.990364], + "_id":"enc:16:2SNthpvB7zyy/aNRdXbYXD1KYpVSDpvd+JCXxKUZrW2XFaXJRD/OpEfPfKCSHE0r", + "data":{ + "Pressure":1038.2, + "Humidity":93, + "Temperature":5.1, + "time_utc":1582178314 + }, + "altitude":354 + },{ + "location":[10.95949,60.87645], + "_id":"enc:16:ELUVSPVmgjpAcRdAtLSP6g2f6Y9fY8NGnRd/X0ZkfKMcWxTPK8GjEmTv8AbPTjo6", + "data":{ + "Pressure":1019.4, + "Rain":0.1, + "time_day_rain":1582178316, + "sum_rain_1":0.1, + "time_hour_rain":1582178315, + "wind":{ + "1605245567":[4,90], + "1605245869":[3,130], + "1605245978":[2,142], + "1605246272":[2,135], + "1605246574":[3,102] + }, + "wind_gust":{ + "1605245567":[10,122], + "1605245869":[8,80], + "1605245978":[7,138], + "1605246272":[6,119], + "1605246574":[7,90] + }, + "Humidity":90, + "Temperature":4.6, + "time_utc":1582178313 + }, + "altitude":162 + } +] + """ + ) + return fname + + +@pytest.fixture() +def settings(netatmo_file): + settings_dict = { + "label": { + "filetype": "netatmo", + "filepattern": netatmo_file, + "varname": "Temperature", + "lonrange": [-10, 20], + "latrange": [-10, 70], + "dt": 1800, + } + } + return settings_dict + + +def test_get_netatmo_datasource(netatmo_obs_time, settings): + dataset = get_datasources(netatmo_obs_time, settings) + assert len(dataset) == 1 + print(dataset[0]) + assert len(dataset[0].observations) == 2 diff --git a/tests/unit/test_netcdf.py b/tests/unit/test_netcdf.py new file mode 100644 index 0000000..d871d46 --- /dev/null +++ b/tests/unit/test_netcdf.py @@ -0,0 +1,10 @@ +"""Test netCDF features.""" + +from surfex.datetime_utils import as_datetime +from surfex.netcdf import Netcdf + + +def test_read_thredds_nc(data_thredds_nc_file): + nc_file = Netcdf(data_thredds_nc_file) + field, __ = nc_file.field("air_temperature_2m", validtime=as_datetime("2020022006")) + assert field.shape == (2, 3) diff --git a/tests/unit/test_obsoul.py b/tests/unit/test_obsoul.py new file mode 100644 index 0000000..6180889 --- /dev/null +++ b/tests/unit/test_obsoul.py @@ -0,0 +1,65 @@ +"""Obsoul unit testing.""" +import pytest + +from surfex.input_methods import get_datasources +from surfex.obsoul import ObservationDataSetFromObsoulFile + + +@pytest.fixture() +def obsoul_carra1(tmp_path_factory): + fname = f"{tmp_path_factory.getbasetemp().as_posix()}/carra1.obsoul" + with open(fname, mode="w", encoding="utf-8") as fhandler: + fhandler.write( + """ + 20200220 06 + 17 4 1165 73.94860 29.84100 '2600537 ' 20200220 90000 0.000000000 1 1111 100000 + 1 -100130.00000000000 1.7000000000000000E+038 0.0000000000000000 2064 + 862 5 24035 69.32000 16.13000 '01010 ' 20200220 110000 10.000000000 7 11111 0 + 39 100900.00000000000 1.7000000000000000E+038 275.60000000000002 3680 + 58 100900.00000000000 1.7000000000000000E+038 90.000000000000000 3680 + 7 100900.00000000000 6.3026700825046466E-004 4.0506081908848988E-003 3680 + 41 100900.00000000000 4.0000000000000000 330.00000000000000 3680 + 1 100000.00000000000 1.7000000000000000E+038 750.00000000000000 2560 + 2 100000.00000000000 1.7000000000000000E+038 275.19999999999999 2560 + 29 100000.00000000000 1.7000000000000000E+038 90.000000000000000 2560 +1102 5 81035 63.71000 9.60100 '01241 ' 20200220 110000 10.00000000 10 11111 0 + 39 101700.00000000000 1.7000000000000000E+038 270.30000000000001 3680 + 58 101700.00000000000 1.7000000000000000E+038 83.000000000000000 3680 + 7 101700.00000000000 4.1321236892005844E-004 2.4490122137644232E-003 3680 + 41 101700.00000000000 6.0000000000000000 115.00000000000000 3680 + 2 100800.00000000000 1.7000000000000000E+038 269.69999999999999 10304 + 29 100800.00000000000 1.7000000000000000E+038 80.000000000000000 10304 + 7 100800.00000000000 3.9631707201687026E-004 2.2638899838061258E-003 10304 + 1 100000.00000000000 1.7000000000000000E+038 1370.0000000000000 2560 + 2 100000.00000000000 1.7000000000000000E+038 269.88999999999999 2560 + 29 100000.00000000000 1.7000000000000000E+038 73.000000000000000 2560 +""" + ) + return fname + + +@pytest.fixture() +def cryoclim_settings(obsoul_cryoclim_cy43): + settings_dict = { + "label": { + "filetype": "obsoul", + "filepattern": obsoul_cryoclim_cy43, + "varname": "Temperature", + "lonrange": [-10, 20], + "latrange": [-10, 70], + "dt": 1800, + } + } + return settings_dict + + +def test_get_obsoul_cryoclim_datasource(obstime, cryoclim_settings): + dataset = get_datasources(obstime, cryoclim_settings) + assert len(dataset) == 1 + print(dataset[0]) + assert len(dataset[0].observations) == 4 + + +def test_get_obsoul_carra1(obsoul_carra1): + obsset = ObservationDataSetFromObsoulFile(obsoul_carra1) + assert len(obsset.observations) == 18 diff --git a/tests/unit/test_obsset.py b/tests/unit/test_obsset.py new file mode 100644 index 0000000..a6ba2e4 --- /dev/null +++ b/tests/unit/test_obsset.py @@ -0,0 +1,60 @@ +"""Test Observation sets.""" +import json + +import pytest + +from surfex.datetime_utils import as_datetime +from surfex.input_methods import get_datasources + + +@pytest.fixture() +def obs_time(): + obstime = as_datetime("2020022000") + return obstime + + +@pytest.fixture() +def filepattern(tmp_path_factory): + filename = tmp_path_factory.getbasetemp() / "obsset_file.json" + data = { + "0": { + "obstime": "20201113060000", + "varname": "air_temperature", + "lon": 10.578, + "lat": 59.4352, + "stid": "17280", + "elev": 14.0, + "value": 278.04999999999995, + }, + "1": { + "obstime": "20201113060000", + "varname": "air_temperature", + "lon": 10.578, + "lat": 59.4352, + "stid": "17280", + "elev": 14.0, + "value": 277.15, + }, + } + with open(filename, mode="w", encoding="utf-8") as fhandler: + json.dump(data, fhandler) + return filename + + +@pytest.fixture() +def settings(filepattern): + settings_dict = { + "label": { + "filetype": "json", + "filepattern": filepattern, + "varname": "air_temperature", + "lonrange": [0, 20], + "latrange": [55, 65], + "dt": 1800, + } + } + return settings_dict + + +def test_get_bufr_datasource(obs_time, settings): + get_datasources(obs_time, settings) diff --git a/tests/unit/test_oi2soda.py b/tests/unit/test_oi2soda.py new file mode 100644 index 0000000..d86cc70 --- /dev/null +++ b/tests/unit/test_oi2soda.py @@ -0,0 +1,21 @@ +"""Test oi2soda.""" +from datetime import datetime + +from netCDF4 import Dataset + +from surfex.netcdf import oi2soda + + +def test_oi2soda_only_rh(tmp_path_factory): + """Test oi2soda only for rh2m.""" + rh2mfile = f"{tmp_path_factory.getbasetemp().as_posix()}/rh2n.nc" + rh2m_var = "relative_humidity_2m" + ncf = Dataset(rh2mfile, mode="w") + ncf.createDimension("nx", 9) + ncf.createDimension("ny", 19) + ncf.createVariable(rh2m_var, "f4", ("nx", "ny")) + ncf.close() + output = f"{tmp_path_factory.getbasetemp().as_posix()}/oi2soda.txt" + dtg = datetime(year=2020, month=3, day=30, hour=6) + rh2m = {"file": rh2mfile, "var": rh2m_var} + oi2soda(dtg, rh2m=rh2m, output=output) diff --git a/tests/unit/test_timeseries.py b/tests/unit/test_timeseries.py new file mode 100644 index 0000000..9e3ae03 --- /dev/null +++ b/tests/unit/test_timeseries.py @@ -0,0 +1,59 @@ +"""Test timeseries.""" +import json + +import pytest + +from surfex.datetime_utils import as_datetime +from surfex.geo import LonLatVal +from surfex.read import Converter +from surfex.timeseries import TimeSeriesFromConverter + + +@pytest.fixture() +def obsset_ts(tmp_path_factory): + fname = f"{tmp_path_factory.getbasetemp().as_posix()}/obs_set_t2m.json" + obs = { + "0": { + "obstime": "20201113060000", + "varname": "air_temperature", + "lon": 10.578, + "lat": 59.4352, + "stid": "17280", + "elev": 14.0, + "value": 278.04999999999995, + }, + "1": { + "obstime": "20201113060000", + "varname": "air_temperature", + "lon": 10.578, + "lat": 59.4352, + "stid": "17280", + "elev": 14.0, + "value": 277.15, + }, + } + json.dump(obs, open(fname, mode="w", encoding="utf-8")) + return fname + + +def test_timeseries_from_converter_from_obs(obsset_ts, tmp_path_factory): + starttime = as_datetime("20201113060000") + endtime = as_datetime("20201113070000") + defs = {"filetype": "json", "fcint": 3600, "offset": 0, "filepattern": obsset_ts} + conf = {"none": {"name": "air_temperature"}} + fileformat = "obs" + converter = Converter("none", starttime, defs, conf, fileformat) + + positions = { + "nam_lonlatval": { + "xx": [10.578], + "xy": [59.4352], + "xdx": [0.1], + "xdy": [0.1], + } + } + geo = LonLatVal(positions) + ts = TimeSeriesFromConverter("air_temperature", geo, converter, starttime, endtime) + + output_file = f"{tmp_path_factory.getbasetemp().as_posix()}/ts_air_temperature.json" + ts.write_json(output_file, indent=2) diff --git a/tests/unit/test_titan.py b/tests/unit/test_titan.py new file mode 100644 index 0000000..7e78f5f --- /dev/null +++ b/tests/unit/test_titan.py @@ -0,0 +1,140 @@ +"""Test titan.""" +import numpy as np +import pytest + +from surfex.titan import ( + Blacklist, + Buddy, + Climatology, + DomainCheck, + FirstGuess, + Fraction, + NoMeta, + Plausibility, + Redundancy, + Sct, + dataset_from_json, +) + + +def obs_set(an_time): + + obs_set = { + "0": { + "varname": "airTemperatureAt2M", + "obstime": "20200220060000", + "lon": 6.9933000000000005, + "lat": 62.191, + "stid": "1111", + "elev": 900.0, + "value": 273, + "flag": 0.0, + "ci": 1.0, + "laf": 1.0, + "provider": "bufr", + "fg_dep": np.nan, + "an_dep": np.nan, + "passed_tests": [], + }, + "1": { + "varname": "airTemperatureAt2M", + "obstime": "20200220060000", + "lon": 7.8173, + "lat": 59.767500000000005, + "stid": "NA", + "elev": 1340.0, + "value": 274, + "flag": 0.0, + "ci": 1.0, + "laf": 1.0, + "provider": "bufr", + "fg_dep": np.nan, + "an_dep": np.nan, + "passed_tests": [], + }, + } + obs_set = obs_set.copy() + return dataset_from_json(an_time, obs_set) + + +def test_plausibility1(an_time): + mask = [0, 1] + test = Plausibility(minval=272, maxval=273.5) + test.set_input(2) + flags = test.test(obs_set(an_time), mask) + assert flags == [0.0, 102] + + +def test_plausibility2(an_time): + mask = [1] + test = Plausibility(minval=273.5, maxval=274.5) + test.set_input(1) + flags = test.test(obs_set(an_time), mask) + assert flags == [0.0, 0.0] + + +def test_blacklist(an_time): + mask = [0, 1] + blacklist = {"lons": [6.9933], "lats": [62.191]} + qc = Blacklist(blacklist) + qc.set_input(2) + qc.test(obs_set(an_time), mask) + + +def test_buddy(an_time): + mask = [0, 1] + buddy = Buddy() + buddy.set_input(2) + with pytest.raises(TypeError): + buddy.test(obs_set(an_time), mask) + + +def test_domain(conf_proj_2x3, an_time): + mask = [0, 1] + qc = DomainCheck(conf_proj_2x3) + qc.set_input(2) + qc.test(obs_set(an_time), mask) + + +def test_first_guess(conf_proj_2x3, an_time): + mask = [0, 1] + first_guess = np.ndarray(shape=(2, 3), dtype=float) + qc = FirstGuess(conf_proj_2x3, first_guess, negdiff=0.1, posdiff=0.2) + qc.set_input(2) + qc.test(obs_set(an_time), mask) + + +def test_fraction(conf_proj_2x3, an_time): + mask = [0, 1] + lsm = np.ndarray(shape=(2, 3), dtype=float) + qc = Fraction(conf_proj_2x3, lsm, minval=0, maxval=1) + qc.set_input(2) + qc.test(obs_set(an_time), mask) + + +def test_sct(an_time): + mask = [0, 1] + sct = Sct() + sct.set_input(2) + sct.test(obs_set(an_time), mask) + + +def test_no_meta(an_time): + mask = [0, 1] + qc = NoMeta() + qc.set_input(2) + qc.test(obs_set(an_time), mask) + + +def test_redundancy(an_time): + mask = [0, 1] + qc = Redundancy(an_time) + qc.set_input(2) + qc.test(obs_set(an_time), mask) + + +def test_climatology(an_time): + mask = [0, 1] + clim = Climatology(an_time, minval=270, maxval=280) + clim.set_input(2) + clim.test(obs_set(an_time), mask) diff --git a/tests/unit/test_variable.py b/tests/unit/test_variable.py new file mode 100644 index 0000000..ec0c57c --- /dev/null +++ b/tests/unit/test_variable.py @@ -0,0 +1,273 @@ +"""Test variable.""" +import pytest + +from surfex.datetime_utils import as_datetime_args, as_timedelta +from surfex.variable import Variable + + +@pytest.fixture() +def fixture(): + cfg = { + "grib1": { + "fcint": 10800, + "offset": 0, + "timestep": 3600, + "parameter": -1, + "type": 105, + "level": 0, + "tri": 0, + "prefer_forecast": True, + "filepattern": "archive/@YYYY@/@MM@/@DD@/@HH@/fc@YYYY@@MM@@DD@@HH@_@LLL@grib_fp", + "blueprint": { + "0": "archive/2019/11/13/00/fc2019111300_000grib_fp", + "1": "archive/2019/11/13/00/fc2019111300_001grib_fp", + "2": "archive/2019/11/13/00/fc2019111300_002grib_fp", + "3": "archive/2019/11/13/00/fc2019111300_003grib_fp", + "4": "archive/2019/11/13/03/fc2019111303_001grib_fp", + "5": "archive/2019/11/13/03/fc2019111303_002grib_fp", + "6": "archive/2019/11/13/03/fc2019111303_003grib_fp", + "7": "archive/2019/11/13/06/fc2019111306_001grib_fp", + "8": "archive/2019/11/13/06/fc2019111306_002grib_fp", + "9": "archive/2019/11/13/06/fc2019111306_003grib_fp", + "10": "archive/2019/11/13/09/fc2019111309_001grib_fp", + }, + "blueprint_previous": { + "1": "archive/2019/11/13/00/fc2019111300_000grib_fp", + "2": "archive/2019/11/13/00/fc2019111300_001grib_fp", + "3": "archive/2019/11/13/00/fc2019111300_002grib_fp", + "4": "archive/2019/11/13/03/fc2019111303_000grib_fp", + "5": "archive/2019/11/13/03/fc2019111303_001grib_fp", + "6": "archive/2019/11/13/03/fc2019111303_002grib_fp", + "7": "archive/2019/11/13/06/fc2019111306_000grib_fp", + "8": "archive/2019/11/13/06/fc2019111306_001grib_fp", + "9": "archive/2019/11/13/06/fc2019111306_002grib_fp", + "10": "archive/2019/11/13/09/fc2019111309_000grib_fp", + }, + }, + "grib2": { + "fcint": 21600, + "offset": 10800, + "timestep": 3600, + "discipline": 0, + "parameterCategory": 0, + "parameterNumber": 0, + "levelType": 0, + "level": 0, + "typeOfStatisticalProcessing": 0, + "prefer_forecast": True, + "filepattern": "archive/@YYYY@/@MM@/@DD@/@HH@/fc@YYYY@@MM@@DD@@HH@_@LLL@grib2_fp", + "blueprint": { + "0": "archive/2019/11/13/00/fc2019111300_002grib2_fp", + "1": "archive/2019/11/13/00/fc2019111300_003grib2_fp", + "2": "archive/2019/11/13/00/fc2019111300_004grib2_fp", + "3": "archive/2019/11/13/00/fc2019111300_005grib2_fp", + "4": "archive/2019/11/13/00/fc2019111300_006grib2_fp", + "5": "archive/2019/11/13/00/fc2019111300_007grib2_fp", + "6": "archive/2019/11/13/00/fc2019111300_008grib2_fp", + "7": "archive/2019/11/13/00/fc2019111300_009grib2_fp", + "8": "archive/2019/11/13/06/fc2019111306_004grib2_fp", + "9": "archive/2019/11/13/06/fc2019111306_005grib2_fp", + "10": "archive/2019/11/13/06/fc2019111306_006grib2_fp", + "11": "archive/2019/11/13/06/fc2019111306_007grib2_fp", + "12": "archive/2019/11/13/06/fc2019111306_008grib2_fp", + "13": "archive/2019/11/13/06/fc2019111306_009grib2_fp", + "14": "archive/2019/11/13/12/fc2019111312_004grib2_fp", + "15": "archive/2019/11/13/12/fc2019111312_005grib2_fp", + "16": "archive/2019/11/13/12/fc2019111312_006grib2_fp", + "17": "archive/2019/11/13/12/fc2019111312_007grib2_fp", + "18": "archive/2019/11/13/12/fc2019111312_008grib2_fp", + "19": "archive/2019/11/13/12/fc2019111312_009grib2_fp", + "20": "archive/2019/11/13/18/fc2019111318_004grib2_fp", + "21": "archive/2019/11/13/18/fc2019111318_005grib2_fp", + "22": "archive/2019/11/13/18/fc2019111318_006grib2_fp", + "23": "archive/2019/11/13/18/fc2019111318_007grib2_fp", + "24": "archive/2019/11/13/18/fc2019111318_008grib2_fp", + "25": "archive/2019/11/13/18/fc2019111318_009grib2_fp", + "26": "archive/2019/11/14/00/fc2019111400_004grib2_fp", + "27": "archive/2019/11/14/00/fc2019111400_005grib2_fp", + "28": "archive/2019/11/14/00/fc2019111400_006grib2_fp", + "29": "archive/2019/11/14/00/fc2019111400_007grib2_fp", + "30": "archive/2019/11/14/00/fc2019111400_008grib2_fp", + "31": "archive/2019/11/14/00/fc2019111400_009grib2_fp", + "32": "archive/2019/11/14/06/fc2019111406_004grib2_fp", + }, + "blueprint_previous": { + "0": "archive/2019/11/13/00/fc2019111300_001grib2_fp", + "1": "archive/2019/11/13/00/fc2019111300_002grib2_fp", + "2": "archive/2019/11/13/00/fc2019111300_003grib2_fp", + "3": "archive/2019/11/13/00/fc2019111300_004grib2_fp", + "4": "archive/2019/11/13/00/fc2019111300_005grib2_fp", + "5": "archive/2019/11/13/00/fc2019111300_006grib2_fp", + "6": "archive/2019/11/13/00/fc2019111300_007grib2_fp", + "7": "archive/2019/11/13/00/fc2019111300_008grib2_fp", + "8": "archive/2019/11/13/06/fc2019111306_003grib2_fp", + "9": "archive/2019/11/13/06/fc2019111306_004grib2_fp", + "10": "archive/2019/11/13/06/fc2019111306_005grib2_fp", + "11": "archive/2019/11/13/06/fc2019111306_006grib2_fp", + "12": "archive/2019/11/13/06/fc2019111306_007grib2_fp", + "13": "archive/2019/11/13/06/fc2019111306_008grib2_fp", + "14": "archive/2019/11/13/12/fc2019111312_003grib2_fp", + "15": "archive/2019/11/13/12/fc2019111312_004grib2_fp", + "16": "archive/2019/11/13/12/fc2019111312_005grib2_fp", + "17": "archive/2019/11/13/12/fc2019111312_006grib2_fp", + "18": "archive/2019/11/13/12/fc2019111312_007grib2_fp", + "19": "archive/2019/11/13/12/fc2019111312_008grib2_fp", + "20": "archive/2019/11/13/18/fc2019111318_003grib2_fp", + "21": "archive/2019/11/13/18/fc2019111318_004grib2_fp", + "22": "archive/2019/11/13/18/fc2019111318_005grib2_fp", + "23": "archive/2019/11/13/18/fc2019111318_006grib2_fp", + "24": "archive/2019/11/13/18/fc2019111318_007grib2_fp", + "25": "archive/2019/11/13/18/fc2019111318_008grib2_fp", + "26": "archive/2019/11/14/00/fc2019111400_003grib2_fp", + "27": "archive/2019/11/14/00/fc2019111400_004grib2_fp", + "28": "archive/2019/11/14/00/fc2019111400_005grib2_fp", + "29": "archive/2019/11/14/00/fc2019111400_006grib2_fp", + "30": "archive/2019/11/14/00/fc2019111400_007grib2_fp", + "31": "archive/2019/11/14/00/fc2019111400_008grib2_fp", + "32": "archive/2019/11/14/06/fc2019111406_003grib2_fp", + }, + }, + "netcdf": { + "fcint": 21600, + "offset": 10800, + "timestep": 3600, + "name": "test", + "filepattern": "archive/@YYYY@/@MM@/@DD@/meps@YYYY@@MM@@DD@Z@HH@.nc", + "blueprint": { + "0": "archive/2019/11/13/meps20191113Z00.nc", + "1": "archive/2019/11/13/meps20191113Z00.nc", + "2": "archive/2019/11/13/meps20191113Z00.nc", + "3": "archive/2019/11/13/meps20191113Z00.nc", + "4": "archive/2019/11/13/meps20191113Z00.nc", + "5": "archive/2019/11/13/meps20191113Z00.nc", + "6": "archive/2019/11/13/meps20191113Z00.nc", + "7": "archive/2019/11/13/meps20191113Z00.nc", + "8": "archive/2019/11/13/meps20191113Z00.nc", + "9": "archive/2019/11/13/meps20191113Z00.nc", + "10": "archive/2019/11/13/meps20191113Z06.nc", + }, + "blueprint_previous": { + "1": "archive/2019/11/13/meps20191113Z00.nc", + "2": "archive/2019/11/13/meps20191113Z00.nc", + "3": "archive/2019/11/13/meps20191113Z00.nc", + "4": "archive/2019/11/13/meps20191113Z00.nc", + "5": "archive/2019/11/13/meps20191113Z00.nc", + "6": "archive/2019/11/13/meps20191113Z00.nc", + "7": "archive/2019/11/13/meps20191113Z00.nc", + "8": "archive/2019/11/13/meps20191113Z00.nc", + "9": "archive/2019/11/13/meps20191113Z00.nc", + "10": "archive/2019/11/13/meps20191113Z06.nc", + }, + }, + "met_nordic": { + "fcint": 3600, + "offset": 0, + "timestep": 3600, + "name": "test", + "accumulated": False, + "instant": 3600, + "prefer_forecast": False, + "filepattern": "archive/@YYYY@/@MM@/@DD@/met_nordic_@YYYY@@MM@@DD@Z@HH@.nc", + "blueprint": { + "0": "archive/2019/11/13/met_nordic_20191113Z00.nc", + "1": "archive/2019/11/13/met_nordic_20191113Z01.nc", + "2": "archive/2019/11/13/met_nordic_20191113Z02.nc", + "3": "archive/2019/11/13/met_nordic_20191113Z03.nc", + "4": "archive/2019/11/13/met_nordic_20191113Z04.nc", + "5": "archive/2019/11/13/met_nordic_20191113Z05.nc", + "6": "archive/2019/11/13/met_nordic_20191113Z06.nc", + "7": "archive/2019/11/13/met_nordic_20191113Z07.nc", + "8": "archive/2019/11/13/met_nordic_20191113Z08.nc", + "9": "archive/2019/11/13/met_nordic_20191113Z09.nc", + "10": "archive/2019/11/13/met_nordic_20191113Z10.nc", + }, + }, + } + return cfg + + +def test_open_new_file_nc(fixture): + """Test to open a netcdf file.""" + initialtime = as_datetime_args(year=2019, month=11, day=13) + intervall = 3600 + case = "netcdf" + + var_dict = fixture[case] + var_type = case + for i in range(11): + validtime = initialtime + as_timedelta(seconds=intervall * i) + previoustime = validtime - as_timedelta(seconds=intervall) + variable = Variable(var_type, var_dict, initialtime) + previous_filename = variable.get_filename(validtime, previoustime=previoustime) + filename = variable.get_filename(validtime) + assert filename == var_dict["blueprint"][str(i)] + if i > 0: + assert previous_filename == var_dict["blueprint_previous"][str(i)] + + +def test_open_new_file_grib1(fixture): + """Test to open a grib1 file.""" + initialtime = as_datetime_args(year=2019, month=11, day=13) + intervall = 3600 + case = "grib1" + + var_dict = fixture[case] + var_type = case + for i in range(11): + validtime = initialtime + as_timedelta(seconds=intervall * i) + previoustime = validtime - as_timedelta(seconds=intervall) + variable = Variable(var_type, var_dict, initialtime) + previous_filename = variable.get_filename(validtime, previoustime=previoustime) + filename = variable.get_filename(validtime) + assert filename == var_dict["blueprint"][str(i)] + if i > 0: + assert previous_filename == var_dict["blueprint_previous"][str(i)] + + +def test_open_new_file_grib2(fixture): + """Test to open a grib2 file.""" + initialtime = as_datetime_args(year=2019, month=11, day=13, hour=2) + intervall = 3600 + case = "grib2" + + var_dict = fixture[case] + var_type = case + for i in range(11): + validtime = initialtime + as_timedelta(seconds=intervall * i) + previoustime = validtime - as_timedelta(seconds=intervall) + variable = Variable(var_type, var_dict, initialtime) + previous_filename = variable.get_filename(validtime, previoustime=previoustime) + filename = variable.get_filename(validtime) + assert filename == var_dict["blueprint"][str(i)] + if i > 0: + assert previous_filename == var_dict["blueprint_previous"][str(i)] + + +def test_open_new_file_an(fixture): + """Test to open a met nordic file.""" + initialtime = as_datetime_args(year=2019, month=11, day=13) + intervall = 3600 + case = "met_nordic" + + var_dict = fixture[case] + var_type = case + if var_type == "met_nordic": + var_type = "netcdf" + for i in range(11): + validtime = initialtime + as_timedelta(seconds=intervall * i) + variable = Variable(var_type, var_dict, initialtime) + filename = variable.get_filename(validtime) + assert filename == var_dict["blueprint"][str(i)] + + +def test_open_new_file_fail(fixture): + """Test failing to open a file.""" + initialtime = as_datetime_args(year=2019, month=11, day=13) + case = "met_nordic" + var_dict = fixture[case] + var_dict["offset"] = 7200 + var_type = case + if var_type == "met_nordic": + var_type = "netcdf" + with pytest.raises(RuntimeError): + Variable(var_type, var_dict, initialtime)