From 02b1c2eb94282350e17f6a1144cd67de83383e71 Mon Sep 17 00:00:00 2001 From: anikaweinmann Date: Thu, 28 Jul 2022 07:09:50 +0200 Subject: [PATCH 1/2] black --- .github/workflows/flake8.yml | 28 + docs/conf.py | 92 +- scripts/main.py | 17 +- setup.py | 12 +- src/actinia_satellite_plugin/__init__.py | 2 +- .../aws_sentinel2a_query.py | 340 ++-- src/actinia_satellite_plugin/endpoints.py | 61 +- .../ephemeral_sentinel2_ndvi_processor.py | 1377 ++++++++--------- .../persistent_landsat_timeseries_creator.py | 315 ++-- ...persistent_sentinel2_timeseries_creator.py | 302 ++-- .../satellite_query.py | 460 +++--- tests/test_aws_sentinel_service.py | 68 +- tests/test_landsat.py | 157 +- tests/test_landsat_timeseries_creation.py | 135 +- tests/test_resource_base.py | 20 +- tests/test_satellite_query.py | 146 +- tests/test_sentinel2_ndvi.py | 138 +- tests/test_sentinel2_timeseries_creation.py | 436 +++--- 18 files changed, 2346 insertions(+), 1760 deletions(-) create mode 100644 .github/workflows/flake8.yml diff --git a/.github/workflows/flake8.yml b/.github/workflows/flake8.yml new file mode 100644 index 0000000..ac510c4 --- /dev/null +++ b/.github/workflows/flake8.yml @@ -0,0 +1,28 @@ +name: Python code quality check + +on: + push: + branches: [ main ] + pull_request: + # The branches below must be a subset of the branches above + branches: [ main ] + +jobs: + + flake8-actinia: + + runs-on: ubuntu-20.04 + + steps: + - uses: actions/checkout@v2 + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: 3.8 + - name: Install + run: | + python -m pip install --upgrade pip + pip install flake8==3.8.0 + - name: Run Flake8 + run: | + flake8 --config=.flake8 --count --statistics --show-source --jobs=$(nproc) . diff --git a/docs/conf.py b/docs/conf.py index 0dec06a..d7985fd 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -13,13 +13,14 @@ import inspect import shutil -__location__ = os.path.join(os.getcwd(), os.path.dirname( - inspect.getfile(inspect.currentframe()))) +__location__ = os.path.join( + os.getcwd(), os.path.dirname(inspect.getfile(inspect.currentframe())) +) # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.join(__location__, '../src')) +sys.path.insert(0, os.path.join(__location__, "../src")) # -- Run sphinx-apidoc ------------------------------------------------------ # This hack is necessary since RTD does not issue `sphinx-apidoc` before running @@ -46,10 +47,12 @@ from distutils.version import LooseVersion cmd_line_template = "sphinx-apidoc -f -o {outputdir} {moduledir}" - cmd_line = cmd_line_template.format(outputdir=output_dir, moduledir=module_dir) + cmd_line = cmd_line_template.format( + outputdir=output_dir, moduledir=module_dir + ) args = cmd_line.split(" ") - if LooseVersion(sphinx.__version__) >= LooseVersion('1.7'): + if LooseVersion(sphinx.__version__) >= LooseVersion("1.7"): args = args[1:] apidoc.main(args) @@ -63,35 +66,43 @@ # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', - 'sphinx.ext.autosummary', 'sphinx.ext.viewcode', 'sphinx.ext.coverage', - 'sphinx.ext.doctest', 'sphinx.ext.ifconfig', 'sphinx.ext.mathjax', - 'sphinx.ext.napoleon'] +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.intersphinx", + "sphinx.ext.todo", + "sphinx.ext.autosummary", + "sphinx.ext.viewcode", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.ifconfig", + "sphinx.ext.mathjax", + "sphinx.ext.napoleon", +] # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix of source filenames. -source_suffix = '.rst' +source_suffix = ".rst" # The encoding of source files. # source_encoding = 'utf-8-sig' # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = u'actinia_satellite_plugin' -copyright = u'2018, Soeren Gebbert' +project = u"actinia_satellite_plugin" +copyright = u"2018, Soeren Gebbert" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. -version = '' # Is set by calling `setup.py docs` +version = "" # Is set by calling `setup.py docs` # The full version, including alpha/beta/rc tags. -release = '' # Is set by calling `setup.py docs` +release = "" # Is set by calling `setup.py docs` # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -105,7 +116,7 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ['_build'] +exclude_patterns = ["_build"] # The reST default role (used for this markup: `text`) to use for all documents. # default_role = None @@ -122,7 +133,7 @@ # show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] @@ -135,7 +146,7 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = 'alabaster' +html_theme = "alabaster" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the @@ -169,7 +180,7 @@ # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] +html_static_path = ["_static"] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. @@ -213,27 +224,30 @@ # html_file_suffix = None # Output file base name for HTML help builder. -htmlhelp_basename = 'actinia_satellite_plugin-doc' +htmlhelp_basename = "actinia_satellite_plugin-doc" # -- Options for LaTeX output -------------------------------------------------- latex_elements = { -# The paper size ('letterpaper' or 'a4paper'). -# 'papersize': 'letterpaper', - -# The font size ('10pt', '11pt' or '12pt'). -# 'pointsize': '10pt', - -# Additional stuff for the LaTeX preamble. -# 'preamble': '', + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ - ('index', 'user_guide.tex', u'actinia_satellite_plugin Documentation', - u'Soeren Gebbert', 'manual'), + ( + "index", + "user_guide.tex", + u"actinia_satellite_plugin Documentation", + u"Soeren Gebbert", + "manual", + ), ] # The name of an image file (relative to this directory) to place at the top of @@ -257,13 +271,13 @@ # latex_domain_indices = True # -- External mapping ------------------------------------------------------------ -python_version = '.'.join(map(str, sys.version_info[0:2])) +python_version = ".".join(map(str, sys.version_info[0:2])) intersphinx_mapping = { - 'sphinx': ('http://www.sphinx-doc.org/en/stable', None), - 'python': ('https://docs.python.org/' + python_version, None), - 'matplotlib': ('https://matplotlib.org', None), - 'numpy': ('https://docs.scipy.org/doc/numpy', None), - 'sklearn': ('http://scikit-learn.org/stable', None), - 'pandas': ('http://pandas.pydata.org/pandas-docs/stable', None), - 'scipy': ('https://docs.scipy.org/doc/scipy/reference', None), + "sphinx": ("http://www.sphinx-doc.org/en/stable", None), + "python": ("https://docs.python.org/" + python_version, None), + "matplotlib": ("https://matplotlib.org", None), + "numpy": ("https://docs.scipy.org/doc/numpy", None), + "sklearn": ("http://scikit-learn.org/stable", None), + "pandas": ("http://pandas.pydata.org/pandas-docs/stable", None), + "scipy": ("https://docs.scipy.org/doc/scipy/reference", None), } diff --git a/scripts/main.py b/scripts/main.py index 30ed17a..553e090 100644 --- a/scripts/main.py +++ b/scripts/main.py @@ -12,26 +12,27 @@ from actinia_core.core.common.process_queue import create_process_queue __license__ = "GPLv3" -__author__ = "Sören Gebbert" -__copyright__ = "Copyright 2016, Sören Gebbert" +__author__ = "Sören Gebbert" +__copyright__ = "Copyright 2016, Sören Gebbert" __maintainer__ = "Sören Gebbert" -__email__ = "soerengebbert@googlemail.com" +__email__ = "soerengebbert@googlemail.com" -if os.path.exists(DEFAULT_CONFIG_PATH) is True and os.path.isfile(DEFAULT_CONFIG_PATH): +if os.path.exists(DEFAULT_CONFIG_PATH) is True and os.path.isfile( + DEFAULT_CONFIG_PATH +): global_config.read(DEFAULT_CONFIG_PATH) # Create the endpoints based on the global config create_endpoints() # Connect the redis interfaces -connect(global_config.REDIS_SERVER_URL, - global_config.REDIS_SERVER_PORT) +connect(global_config.REDIS_SERVER_URL, global_config.REDIS_SERVER_PORT) # Create the process queue create_process_queue(global_config) ######################################################################################################################## -if __name__ == '__main__': +if __name__ == "__main__": # Connect to the database - flask_app.run(host='0.0.0.0', port=8080, debug=True) + flask_app.run(host="0.0.0.0", port=8080, debug=True) diff --git a/setup.py b/setup.py index 178bc7b..4620e31 100644 --- a/setup.py +++ b/setup.py @@ -21,11 +21,13 @@ def setup_package(): - needs_sphinx = {'build_sphinx', 'upload_docs'}.intersection(sys.argv) - sphinx = ['sphinx'] if needs_sphinx else [] - setup(setup_requires=['pyscaffold>=3.0a0,<3.1a0'] + sphinx, - entry_points=entry_points, - use_pyscaffold=True) + needs_sphinx = {"build_sphinx", "upload_docs"}.intersection(sys.argv) + sphinx = ["sphinx"] if needs_sphinx else [] + setup( + setup_requires=["pyscaffold>=3.0a0,<3.1a0"] + sphinx, + entry_points=entry_points, + use_pyscaffold=True, + ) if __name__ == "__main__": diff --git a/src/actinia_satellite_plugin/__init__.py b/src/actinia_satellite_plugin/__init__.py index 4bb95dc..12a5551 100644 --- a/src/actinia_satellite_plugin/__init__.py +++ b/src/actinia_satellite_plugin/__init__.py @@ -6,4 +6,4 @@ dist_name = __name__ __version__ = get_distribution(dist_name).version except DistributionNotFound: - __version__ = 'unknown' + __version__ = "unknown" diff --git a/src/actinia_satellite_plugin/aws_sentinel2a_query.py b/src/actinia_satellite_plugin/aws_sentinel2a_query.py index 6fb2029..8fbb8d5 100644 --- a/src/actinia_satellite_plugin/aws_sentinel2a_query.py +++ b/src/actinia_satellite_plugin/aws_sentinel2a_query.py @@ -7,7 +7,9 @@ from actinia_core.core.common.app import auth from actinia_core.core.common.api_logger import log_api_call from actinia_core.models.response_models import SimpleResponseModel -from actinia_core.core.common.aws_sentinel_interface import AWSSentinel2AInterface +from actinia_core.core.common.aws_sentinel_interface import ( + AWSSentinel2AInterface, +) from actinia_core.rest.base.resource_base import ResourceBase from flask_restful_swagger_2 import swagger, Schema from copy import deepcopy @@ -21,195 +23,223 @@ class BandInformationEntry(Schema): - type = 'object' + type = "object" properties = { - 'file_name': { - 'type': 'string', - 'description': 'The suggested file name of this band from the requested satellite scene' + "file_name": { + "type": "string", + "description": "The suggested file name of this band from the requested satellite scene", }, - 'map_name': { - 'type': 'string', - 'description': 'The suggested GRASS GIS raster map name of this band from the requested satellite scene' + "map_name": { + "type": "string", + "description": "The suggested GRASS GIS raster map name of this band from the requested satellite scene", + }, + "public_url": { + "type": "string", + "description": "The download URl of the band from requested satellite scene", }, - 'public_url': { - 'type': 'string', - 'description': 'The download URl of the band from requested satellite scene' - } } - required = ['public_url', 'map_name', 'file_name'] + required = ["public_url", "map_name", "file_name"] - example = {'file_name': 'S2A_MSIL1C_20170202T090201_N0204_R007_T36TVT_20170202T090155_tile_1_band_B04.jp2', - 'map_name': 'S2A_MSIL1C_20170202T090201_N0204_R007_T36TVT_20170202T090155_tile_1_band_B04', - 'public_url': 'http://sentinel-s2-l1c.s3.amazonaws.com/tiles/36/T/VT/2017/2/2/0/B04.jp2'} + example = { + "file_name": "S2A_MSIL1C_20170202T090201_N0204_R007_T36TVT_20170202T090155_tile_1_band_B04.jp2", + "map_name": "S2A_MSIL1C_20170202T090201_N0204_R007_T36TVT_20170202T090155_tile_1_band_B04", + "public_url": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles/36/T/VT/2017/2/2/0/B04.jp2", + } class Sentinel2ATileEntry(Schema): - type = 'object' + type = "object" properties = { - 'B01': BandInformationEntry, - 'B02': BandInformationEntry, - 'B03': BandInformationEntry, - 'B04': BandInformationEntry, - 'B05': BandInformationEntry, - 'B06': BandInformationEntry, - 'B07': BandInformationEntry, - 'B08': BandInformationEntry, - 'B09': BandInformationEntry, - 'B10': BandInformationEntry, - 'B11': BandInformationEntry, - 'B12': BandInformationEntry, - 'info': { - 'type': 'string', - 'description': 'The url to Sentinel2A scene information' - }, - 'metadata': { - 'type': 'string', - 'description': 'The url to Sentinel2A scene XML metadata' - }, - 'preview': { - 'type': 'string', - 'description': 'The url to Sentinel2A scene preview image' - }, - 'url': { - 'type': 'string', - 'description': 'The url to Sentinel2A scene root directory that contains all informations about the scene' - }, - 'timestamp': { - 'type': 'string', - 'description': 'The sensing time of the scene', - 'format': 'dateTime' - } + "B01": BandInformationEntry, + "B02": BandInformationEntry, + "B03": BandInformationEntry, + "B04": BandInformationEntry, + "B05": BandInformationEntry, + "B06": BandInformationEntry, + "B07": BandInformationEntry, + "B08": BandInformationEntry, + "B09": BandInformationEntry, + "B10": BandInformationEntry, + "B11": BandInformationEntry, + "B12": BandInformationEntry, + "info": { + "type": "string", + "description": "The url to Sentinel2A scene information", + }, + "metadata": { + "type": "string", + "description": "The url to Sentinel2A scene XML metadata", + }, + "preview": { + "type": "string", + "description": "The url to Sentinel2A scene preview image", + }, + "url": { + "type": "string", + "description": "The url to Sentinel2A scene root directory that contains all informations about the scene", + }, + "timestamp": { + "type": "string", + "description": "The sensing time of the scene", + "format": "dateTime", + }, + } + required = ["info", "metadata", "preview", "url", "timestamp"] + + example = { + "B04": { + "file_name": "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_20151207T003302_tile_14_band_B04.jp2", + "map_name": "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_20151207T003302_tile_14_band_B04", + "public_url": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/B04.jp2", + }, + "B08": { + "file_name": "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_20151207T003302_tile_14_band_B08.jp2", + "map_name": "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_20151207T003302_tile_14_band_B08", + "public_url": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/B08.jp2", + }, + "info": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/tileInfo.json", + "metadata": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/metadata.xml", + "preview": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/preview.jpg", + "timestamp": "2015-12-07T00:33:02.634Z", + "url": "http://sentinel-s2-l1c.s3-website.eu-central-1.amazonaws.com/#tiles/57/V/XE/2015/12/7/0/", } - required = ['info', 'metadata', 'preview', 'url', 'timestamp'] - - example = {'B04': { - 'file_name': 'S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_20151207T003302_tile_14_band_B04.jp2', - 'map_name': 'S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_20151207T003302_tile_14_band_B04', - 'public_url': 'http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/B04.jp2'}, - 'B08': { - 'file_name': 'S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_20151207T003302_tile_14_band_B08.jp2', - 'map_name': 'S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_20151207T003302_tile_14_band_B08', - 'public_url': 'http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/B08.jp2'}, - 'info': 'http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/tileInfo.json', - 'metadata': 'http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/metadata.xml', - 'preview': 'http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/preview.jpg', - 'timestamp': '2015-12-07T00:33:02.634Z', - 'url': 'http://sentinel-s2-l1c.s3-website.eu-central-1.amazonaws.com/#tiles/57/V/XE/2015/12/7/0/'} class Sentinel2ASceneEntry(Schema): - type = 'object' + type = "object" properties = { - 'product_id': { - 'type': 'string', - 'description': 'The id of the Sentinel2A scene' - }, - 'tiles': { - 'type': 'array', - 'items': Sentinel2ATileEntry, - 'description': 'A list of sentinel2A scenes' - } + "product_id": { + "type": "string", + "description": "The id of the Sentinel2A scene", + }, + "tiles": { + "type": "array", + "items": Sentinel2ATileEntry, + "description": "A list of sentinel2A scenes", + }, } required = ["product_id", "tiles"] - example = {'product_id': 'S2A_MSIL1C_20170202T090201_N0204_R007_T36TVT_20170202T090155', - 'tiles': [{'B04': { - 'file_name': 'S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_20151207T003302_tile_14_band_B04.jp2', - 'map_name': 'S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_20151207T003302_tile_14_band_B04', - 'public_url': 'http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/B04.jp2'}, - 'B08': { - 'file_name': 'S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_20151207T003302_tile_14_band_B08.jp2', - 'map_name': 'S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_20151207T003302_tile_14_band_B08', - 'public_url': 'http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/B08.jp2'}, - 'info': 'http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/tileInfo.json', - 'metadata': 'http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/metadata.xml', - 'preview': 'http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/preview.jpg', - 'timestamp': '2015-12-07T00:33:02.634Z', - 'url': 'http://sentinel-s2-l1c.s3-website.eu-central-1.amazonaws.com/#tiles/57/V/XE/2015/12/7/0/'}]} + example = { + "product_id": "S2A_MSIL1C_20170202T090201_N0204_R007_T36TVT_20170202T090155", + "tiles": [ + { + "B04": { + "file_name": "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_20151207T003302_tile_14_band_B04.jp2", + "map_name": "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_20151207T003302_tile_14_band_B04", + "public_url": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/B04.jp2", + }, + "B08": { + "file_name": "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_20151207T003302_tile_14_band_B08.jp2", + "map_name": "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_20151207T003302_tile_14_band_B08", + "public_url": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/B08.jp2", + }, + "info": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/tileInfo.json", + "metadata": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/metadata.xml", + "preview": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/preview.jpg", + "timestamp": "2015-12-07T00:33:02.634Z", + "url": "http://sentinel-s2-l1c.s3-website.eu-central-1.amazonaws.com/#tiles/57/V/XE/2015/12/7/0/", + } + ], + } class Sentinel2ASceneList(Schema): - type = 'object' + type = "object" properties = { - 'tiles': { - 'type': 'array', - 'items': Sentinel2ASceneEntry, - 'description': 'A list of sentinel2A scenes' + "tiles": { + "type": "array", + "items": Sentinel2ASceneEntry, + "description": "A list of sentinel2A scenes", } } - example = [{'product_id': 'S2A_MSIL1C_20170202T090201_N0204_R007_T36TVT_20170202T090155', - 'tiles': [{'B04': { - 'file_name': 'S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_20151207T003302_tile_14_band_B04.jp2', - 'map_name': 'S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_20151207T003302_tile_14_band_B04', - 'public_url': 'http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/B04.jp2'}, - 'B08': { - 'file_name': 'S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_20151207T003302_tile_14_band_B08.jp2', - 'map_name': 'S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_20151207T003302_tile_14_band_B08', - 'public_url': 'http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/B08.jp2'}, - 'info': 'http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/tileInfo.json', - 'metadata': 'http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/metadata.xml', - 'preview': 'http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/preview.jpg', - 'timestamp': '2015-12-07T00:33:02.634Z', - 'url': 'http://sentinel-s2-l1c.s3-website.eu-central-1.amazonaws.com/#tiles/57/V/XE/2015/12/7/0/'}]}] + example = [ + { + "product_id": "S2A_MSIL1C_20170202T090201_N0204_R007_T36TVT_20170202T090155", + "tiles": [ + { + "B04": { + "file_name": "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_20151207T003302_tile_14_band_B04.jp2", + "map_name": "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_20151207T003302_tile_14_band_B04", + "public_url": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/B04.jp2", + }, + "B08": { + "file_name": "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_20151207T003302_tile_14_band_B08.jp2", + "map_name": "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_20151207T003302_tile_14_band_B08", + "public_url": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/B08.jp2", + }, + "info": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/tileInfo.json", + "metadata": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/metadata.xml", + "preview": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/preview.jpg", + "timestamp": "2015-12-07T00:33:02.634Z", + "url": "http://sentinel-s2-l1c.s3-website.eu-central-1.amazonaws.com/#tiles/57/V/XE/2015/12/7/0/", + } + ], + } + ] class Sentinel2ASceneListModel(Schema): - """This schema defines the JSON input of the sentinel time series creator resource - """ - type = 'object' + """This schema defines the JSON input of the sentinel time series creator resource""" + + type = "object" properties = { - 'bands': { - 'type': 'array', - 'items': {'type': 'string'}, - 'description': 'A list of band names that should be downloaded and imported for each Sentinel-2 scene.' - 'Available are the following band names: "B01", "B02", "B03", "B04", "B05", "B06", "B07",' - '"B08", "B8A", "B09" "B10", "B11", "B12"' - }, - 'product_ids': { - 'type': 'array', - 'items': {'type': 'string'}, - 'description': 'A list of Sentinel-2 scene names of which the tile download urls ' - 'and metadata infor urls should be provided.' - } + "bands": { + "type": "array", + "items": {"type": "string"}, + "description": "A list of band names that should be downloaded and imported for each Sentinel-2 scene." + 'Available are the following band names: "B01", "B02", "B03", "B04", "B05", "B06", "B07",' + '"B08", "B8A", "B09" "B10", "B11", "B12"', + }, + "product_ids": { + "type": "array", + "items": {"type": "string"}, + "description": "A list of Sentinel-2 scene names of which the tile download urls " + "and metadata infor urls should be provided.", + }, + } + example = { + "bands": ["B04", "B08"], + "product_ids": [ + "S2A_MSIL1C_20170212T104141_N0204_R008_T31TGJ_20170212T104138", + "S2A_MSIL1C_20170227T095021_N0204_R079_T34TBM_20170227T095613", + "S2A_MSIL1C_20170202T104241_N0204_R008_T32UNE_20170202T104236", + ], } - example = {"bands": ["B04", "B08"], - "product_ids": ["S2A_MSIL1C_20170212T104141_N0204_R008_T31TGJ_20170212T104138", - "S2A_MSIL1C_20170227T095021_N0204_R079_T34TBM_20170227T095613", - "S2A_MSIL1C_20170202T104241_N0204_R008_T32UNE_20170202T104236"]} - required = ['bands', 'product_ids'] + required = ["bands", "product_ids"] SCHEMA_DOC = { - 'tags': ['Satellite Image Algorithms'], - 'description': 'Generate the download urls for a list of sentinel2A scenes and band numbers. ' - 'Minimum required user role: user.', - 'consumes': ['application/json'], - 'parameters': [ + "tags": ["Satellite Image Algorithms"], + "description": "Generate the download urls for a list of sentinel2A scenes and band numbers. " + "Minimum required user role: user.", + "consumes": ["application/json"], + "parameters": [ { - 'name': 'scenes', - 'description': 'The list of Sentinel-2 scenes and the band names', - 'required': True, - 'in': 'body', - 'schema': Sentinel2ASceneListModel + "name": "scenes", + "description": "The list of Sentinel-2 scenes and the band names", + "required": True, + "in": "body", + "schema": Sentinel2ASceneListModel, } ], - 'responses': { - '200': { - 'description': 'The result of the Sentinel-2 time series import', - 'schema': Sentinel2ASceneList - }, - '400': { - 'description': 'The error message and a detailed log why Sentinel ' - '2A scene download url creation did not succeeded', - 'schema': SimpleResponseModel - } - } + "responses": { + "200": { + "description": "The result of the Sentinel-2 time series import", + "schema": Sentinel2ASceneList, + }, + "400": { + "description": "The error message and a detailed log why Sentinel " + "2A scene download url creation did not succeeded", + "schema": SimpleResponseModel, + }, + }, } class AWSSentinel2ADownloadLinkQuery(ResourceBase): - """Query AWS Sentinel2A archives to receive scene download links - """ + """Query AWS Sentinel2A archives to receive scene download links""" + decorators = [log_api_call, auth.login_required] @swagger.doc(deepcopy(SCHEMA_DOC)) @@ -221,8 +251,10 @@ def post(self): rdc = self.preprocess(has_json=True, has_xml=False) - result = iface.get_sentinel_urls(product_ids=rdc.request_data["product_ids"], - bands=rdc.request_data["bands"]) + result = iface.get_sentinel_urls( + product_ids=rdc.request_data["product_ids"], + bands=rdc.request_data["bands"], + ) return make_response(jsonify(result), 200) except Exception as e: diff --git a/src/actinia_satellite_plugin/endpoints.py b/src/actinia_satellite_plugin/endpoints.py index 5c76ccb..e46e24e 100644 --- a/src/actinia_satellite_plugin/endpoints.py +++ b/src/actinia_satellite_plugin/endpoints.py @@ -5,11 +5,19 @@ """ from .satellite_query import LandsatQuery, Sentinel2Query -from .ephemeral_landsat_ndvi_processor import AsyncEphemeralLandsatProcessingResource -from .ephemeral_sentinel2_ndvi_processor import AsyncEphemeralSentinel2ProcessingResource,\ - AsyncEphemeralSentinel2ProcessingResourceGCS -from .persistent_landsat_timeseries_creator import AsyncLandsatTimeSeriesCreatorResource -from .persistent_sentinel2_timeseries_creator import AsyncSentinel2TimeSeriesCreatorResource +from .ephemeral_landsat_ndvi_processor import ( + AsyncEphemeralLandsatProcessingResource, +) +from .ephemeral_sentinel2_ndvi_processor import ( + AsyncEphemeralSentinel2ProcessingResource, + AsyncEphemeralSentinel2ProcessingResourceGCS, +) +from .persistent_landsat_timeseries_creator import ( + AsyncLandsatTimeSeriesCreatorResource, +) +from .persistent_sentinel2_timeseries_creator import ( + AsyncSentinel2TimeSeriesCreatorResource, +) from .aws_sentinel2a_query import AWSSentinel2ADownloadLinkQuery __license__ = "GPLv3" @@ -20,17 +28,32 @@ def create_endpoints(flask_api): - flask_api.add_resource(LandsatQuery, '/landsat_query') - flask_api.add_resource(Sentinel2Query, '/sentinel2_query') - flask_api.add_resource(AsyncEphemeralLandsatProcessingResource, '/landsat_process//' - '/' - '') - flask_api.add_resource(AsyncEphemeralSentinel2ProcessingResourceGCS, - '/sentinel2_process_gcs/ndvi/') - flask_api.add_resource(AsyncEphemeralSentinel2ProcessingResource, - '/sentinel2_process/ndvi/') - flask_api.add_resource(AsyncLandsatTimeSeriesCreatorResource, '/locations//mapsets/' - '/landsat_import') - flask_api.add_resource(AsyncSentinel2TimeSeriesCreatorResource, '/locations//mapsets/' - '/sentinel2_import') - flask_api.add_resource(AWSSentinel2ADownloadLinkQuery, '/sentinel2a_aws_query') + flask_api.add_resource(LandsatQuery, "/landsat_query") + flask_api.add_resource(Sentinel2Query, "/sentinel2_query") + flask_api.add_resource( + AsyncEphemeralLandsatProcessingResource, + "/landsat_process//" + "/" + "", + ) + flask_api.add_resource( + AsyncEphemeralSentinel2ProcessingResourceGCS, + "/sentinel2_process_gcs/ndvi/", + ) + flask_api.add_resource( + AsyncEphemeralSentinel2ProcessingResource, + "/sentinel2_process/ndvi/", + ) + flask_api.add_resource( + AsyncLandsatTimeSeriesCreatorResource, + "/locations//mapsets/" + "/landsat_import", + ) + flask_api.add_resource( + AsyncSentinel2TimeSeriesCreatorResource, + "/locations//mapsets/" + "/sentinel2_import", + ) + flask_api.add_resource( + AWSSentinel2ADownloadLinkQuery, "/sentinel2a_aws_query" + ) diff --git a/src/actinia_satellite_plugin/ephemeral_sentinel2_ndvi_processor.py b/src/actinia_satellite_plugin/ephemeral_sentinel2_ndvi_processor.py index a362e37..9208475 100644 --- a/src/actinia_satellite_plugin/ephemeral_sentinel2_ndvi_processor.py +++ b/src/actinia_satellite_plugin/ephemeral_sentinel2_ndvi_processor.py @@ -5,24 +5,33 @@ from copy import deepcopy from flask import jsonify, make_response from flask_restful_swagger_2 import swagger -from actinia_core.processing.actinia_processing.ephemeral.ephemeral_processing_with_export import EphemeralProcessingWithExport +from actinia_core.processing.actinia_processing.ephemeral.ephemeral_processing_with_export import ( + EphemeralProcessingWithExport, +) from actinia_core.rest.base.resource_base import ResourceBase -from actinia_core.core.common.google_satellite_bigquery_interface import GoogleSatelliteBigQueryInterface +from actinia_core.core.common.google_satellite_bigquery_interface import ( + GoogleSatelliteBigQueryInterface, +) from actinia_core.core.common.redis_interface import enqueue_job -from actinia_core.core.common.sentinel_processing_library import Sentinel2Processing +from actinia_core.core.common.sentinel_processing_library import ( + Sentinel2Processing, +) from actinia_core.core.common.process_object import Process from actinia_core.core.common.exceptions import AsyncProcessError -from actinia_core.models.response_models import UnivarResultModel, ProcessingResponseModel +from actinia_core.models.response_models import ( + UnivarResultModel, + ProcessingResponseModel, +) from actinia_core.core.common.app import auth from actinia_core.core.common.api_logger import log_api_call from actinia_core.models.response_models import ProcessingErrorResponseModel from actinia_core.core.common.app import URL_PREFIX __license__ = "GPLv3" -__author__ = "Sören Gebbert" -__copyright__ = "Copyright 2016, Sören Gebbert" +__author__ = "Sören Gebbert" +__copyright__ = "Copyright 2016, Sören Gebbert" __maintainer__ = "Sören Gebbert" -__email__ = "soerengebbert@googlemail.com" +__email__ = "soerengebbert@googlemail.com" class SentinelNDVIResponseModel(ProcessingResponseModel): @@ -30,636 +39,558 @@ class SentinelNDVIResponseModel(ProcessingResponseModel): It is used as schema to define the *process_result* in a ProcessingResponseModel derivative. """ - type = 'object' + + type = "object" properties = deepcopy(ProcessingResponseModel.properties) properties["process_results"] = {} properties["process_results"]["type"] = "array" properties["process_results"]["items"] = UnivarResultModel required = deepcopy(ProcessingResponseModel.required) example = { - "accept_datetime": "2018-05-30 12:25:43.987713", - "accept_timestamp": 1527683143.9877105, - "api_info": { - "endpoint": "asyncephemeralsentinel2processingresource", - "method": "POST", - "path": f"{URL_PREFIX}/sentinel2_process/ndvi/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749", - "request_url": f"http://localhost:8080{URL_PREFIX}/sentinel2_process/ndvi/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749" - }, - "datetime": "2018-05-30 12:29:11.800608", - "http_code": 200, - "message": "Processing successfully finished", - "process_chain_list": [ - { - "1": { - "flags": "g", - "inputs": { - "map": "ndvi" + "accept_datetime": "2018-05-30 12:25:43.987713", + "accept_timestamp": 1527683143.9877105, + "api_info": { + "endpoint": "asyncephemeralsentinel2processingresource", + "method": "POST", + "path": f"{URL_PREFIX}/sentinel2_process/ndvi/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749", + "request_url": f"http://localhost:8080{URL_PREFIX}/sentinel2_process/ndvi/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749", + }, + "datetime": "2018-05-30 12:29:11.800608", + "http_code": 200, + "message": "Processing successfully finished", + "process_chain_list": [ + { + "1": { + "flags": "g", + "inputs": {"map": "ndvi"}, + "module": "r.univar", + "outputs": { + "output": { + "name": "/actinia/workspace/temp_db/gisdbase_103a050c380e4f50b36efd3f77bd1419/.tmp/tmp7il3n0jk.univar" + } + }, + } }, - "module": "r.univar", - "outputs": { - "output": { - "name": "/actinia/workspace/temp_db/gisdbase_103a050c380e4f50b36efd3f77bd1419/.tmp/tmp7il3n0jk.univar" - } - } - } - }, - { - "1": { - "inputs": { - "map": "ndvi" + { + "1": {"inputs": {"map": "ndvi"}, "module": "d.rast"}, + "2": { + "flags": "n", + "inputs": {"at": "8,92,0,7", "raster": "ndvi"}, + "module": "d.legend", + }, + "3": { + "inputs": {"at": "20,4", "style": "line"}, + "module": "d.barscale", + }, }, - "module": "d.rast" - }, - "2": { - "flags": "n", - "inputs": { - "at": "8,92,0,7", - "raster": "ndvi" - }, - "module": "d.legend" - }, - "3": { - "inputs": { - "at": "20,4", - "style": "line" - }, - "module": "d.barscale" - } - } - ], - "process_log": [ - { - "executable": "/usr/bin/wget", - "parameter": [ - "-t5", - "-c", - "-q", - "https://storage.googleapis.com/gcp-public-data-sentinel-2/tiles/50/R/KR/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749.SAFE/GRANULE/L1C_T50RKR_A007608_20161206T030749/IMG_DATA/T50RKR_20161206T030112_B08.jp2" - ], - "return_code": 0, - "run_time": 49.85953092575073, - "stderr": [ - "" - ], - "stdout": "" - }, - { - "executable": "/usr/bin/wget", - "parameter": [ - "-t5", - "-c", - "-q", - "https://storage.googleapis.com/gcp-public-data-sentinel-2/tiles/50/R/KR/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749.SAFE/GRANULE/L1C_T50RKR_A007608_20161206T030749/IMG_DATA/T50RKR_20161206T030112_B04.jp2" - ], - "return_code": 0, - "run_time": 38.676433801651, - "stderr": [ - "" - ], - "stdout": "" - }, - { - "executable": "/bin/mv", - "parameter": [ - "/actinia/workspace/temp_db/gisdbase_103a050c380e4f50b36efd3f77bd1419/.tmp/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749.gml", - "/actinia/workspace/download_cache/superadmin/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749.gml" - ], - "return_code": 0, - "run_time": 0.05118393898010254, - "stderr": [ - "" - ], - "stdout": "" - }, - { - "executable": "/bin/mv", - "parameter": [ - "/actinia/workspace/temp_db/gisdbase_103a050c380e4f50b36efd3f77bd1419/.tmp/T50RKR_20161206T030112_B08.jp2", - "/actinia/workspace/download_cache/superadmin/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B08" - ], - "return_code": 0, - "run_time": 0.35857558250427246, - "stderr": [ - "" - ], - "stdout": "" - }, - { - "executable": "/bin/mv", - "parameter": [ - "/actinia/workspace/temp_db/gisdbase_103a050c380e4f50b36efd3f77bd1419/.tmp/T50RKR_20161206T030112_B04.jp2", - "/actinia/workspace/download_cache/superadmin/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B04" - ], - "return_code": 0, - "run_time": 0.15271401405334473, - "stderr": [ - "" - ], - "stdout": "" - }, - { - "executable": "python3", - "parameter": [ - "/usr/local/bin/grass", - "-e", - "-c", - "/actinia/workspace/download_cache/superadmin/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B08", - "/actinia/workspace/temp_db/gisdbase_103a050c380e4f50b36efd3f77bd1419/sentinel2" - ], - "return_code": 0, - "run_time": 0.36118006706237793, - "stderr": [ - "Default locale settings are missing. GRASS running with C locale.WARNING: Searched for a web browser, but none found", - "Creating new GRASS GIS location/mapset...", - "Cleaning up temporary files...", - "" - ], - "stdout": "Default locale not found, using UTF-8\n" - }, - { - "executable": "v.import", - "parameter": [ - "input=/actinia/workspace/download_cache/superadmin/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749.gml", - "output=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749", - "--q" - ], - "return_code": 0, - "run_time": 0.3551313877105713, - "stderr": [ - "WARNING: Projection of dataset does not appear to match current location.", - "", - "Location PROJ_INFO is:", - "name: WGS 84 / UTM zone 50N", - "datum: wgs84", - "ellps: wgs84", - "proj: utm", - "zone: 50", - "no_defs: defined", - "", - "Dataset PROJ_INFO is:", - "name: WGS 84", - "datum: wgs84", - "ellps: wgs84", - "proj: ll", - "no_defs: defined", - "", - "ERROR: proj", - "", - "WARNING: Width for column fid set to 255 (was not specified by OGR), some strings may be truncated!", - "" - ], - "stdout": "" - }, - { - "executable": "v.timestamp", - "parameter": [ - "map=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749", - "date=06 dec 2016 03:07:49" - ], - "return_code": 0, - "run_time": 0.050455570220947266, - "stderr": [ - "" - ], - "stdout": "" - }, - { - "executable": "/usr/bin/gdal_translate", - "parameter": [ - "-projwin", - "113.949663", - "28.011816", - "115.082607", - "27.001706", - "-of", - "vrt", - "-projwin_srs", - "EPSG:4326", - "/actinia/workspace/download_cache/superadmin/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B08", - "/actinia/workspace/download_cache/superadmin/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B08.vrt" - ], - "return_code": 0, - "run_time": 0.05114293098449707, - "stderr": [ - "Warning 1: Computed -srcwin 5 -225 10971 11419 falls partially outside raster extent. Going on however.", - "" - ], - "stdout": "Input file size is 10980, 10980\n" - }, - { - "executable": "r.import", - "parameter": [ - "input=/actinia/workspace/download_cache/superadmin/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B08.vrt", - "output=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B08_uncropped", - "--q" - ], - "return_code": 0, - "run_time": 16.326167583465576, - "stderr": [ - "" - ], - "stdout": "" - }, - { - "executable": "g.region", - "parameter": [ - "align=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B08_uncropped", - "vector=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749", - "-g" - ], - "return_code": 0, - "run_time": 0.10460591316223145, - "stderr": [ - "" - ], - "stdout": "projection=1\nzone=50\nn=3100030\ns=2990100\nw=199960\ne=309790\nnsres=10\newres=10\nrows=10993\ncols=10983\ncells=120736119\n" - }, - { - "executable": "r.mask", - "parameter": [ - "vector=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749" - ], - "return_code": 0, - "run_time": 7.36047887802124, - "stderr": [ - "Reading areas...", - "0..100", - "Writing raster map...", - "0..3..6..9..12..15..18..21..24..27..30..33..36..39..42..45..48..51..54..57..60..63..66..69..72..75..78..81..84..87..90..93..96..99..100", - "Reading areas...", - "0..100", - "Writing raster map...", - "0..3..6..9..12..15..18..21..24..27..30..33..36..39..42..45..48..51..54..57..60..63..66..69..72..75..78..81..84..87..90..93..96..99..100", - "All subsequent raster operations will be limited to the MASK area. Removing or renaming raster map named 'MASK' will restore raster operations to normal.", - "" - ], - "stdout": "" - }, - { - "executable": "r.mapcalc", - "parameter": [ - "expression=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B08 = float(S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B08_uncropped)" - ], - "return_code": 0, - "run_time": 10.695591926574707, - "stderr": [ - "" - ], - "stdout": "" - }, - { - "executable": "r.timestamp", - "parameter": [ - "map=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B08", - "date=06 dec 2016 03:07:49" - ], - "return_code": 0, - "run_time": 0.053069353103637695, - "stderr": [ - "" - ], - "stdout": "" - }, - { - "executable": "g.remove", - "parameter": [ - "type=raster", - "name=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B08_uncropped", - "-f" - ], - "return_code": 0, - "run_time": 0.050362348556518555, - "stderr": [ - "Removing raster ", - "" - ], - "stdout": "" - }, - { - "executable": "r.mask", - "parameter": [ - "-r" - ], - "return_code": 0, - "run_time": 0.10059237480163574, - "stderr": [ - "Raster MASK removed", - "" - ], - "stdout": "" - }, - { - "executable": "/usr/bin/gdal_translate", - "parameter": [ - "-projwin", - "113.949663", - "28.011816", - "115.082607", - "27.001706", - "-of", - "vrt", - "-projwin_srs", - "EPSG:4326", - "/actinia/workspace/download_cache/superadmin/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B04", - "/actinia/workspace/download_cache/superadmin/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B04.vrt" - ], - "return_code": 0, - "run_time": 0.05096769332885742, - "stderr": [ - "Warning 1: Computed -srcwin 5 -225 10971 11419 falls partially outside raster extent. Going on however.", - "" - ], - "stdout": "Input file size is 10980, 10980\n" - }, - { - "executable": "r.import", - "parameter": [ - "input=/actinia/workspace/download_cache/superadmin/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B04.vrt", - "output=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B04_uncropped", - "--q" - ], - "return_code": 0, - "run_time": 16.76022958755493, - "stderr": [ - "" - ], - "stdout": "" - }, - { - "executable": "g.region", - "parameter": [ - "align=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B04_uncropped", - "vector=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749", - "-g" - ], - "return_code": 0, - "run_time": 0.0505826473236084, - "stderr": [ - "" - ], - "stdout": "projection=1\nzone=50\nn=3100030\ns=2990100\nw=199960\ne=309790\nnsres=10\newres=10\nrows=10993\ncols=10983\ncells=120736119\n" - }, - { - "executable": "r.mask", - "parameter": [ - "vector=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749" - ], - "return_code": 0, - "run_time": 6.779608249664307, - "stderr": [ - "Reading areas...", - "0..100", - "Writing raster map...", - "0..3..6..9..12..15..18..21..24..27..30..33..36..39..42..45..48..51..54..57..60..63..66..69..72..75..78..81..84..87..90..93..96..99..100", - "Reading areas...", - "0..100", - "Writing raster map...", - "0..3..6..9..12..15..18..21..24..27..30..33..36..39..42..45..48..51..54..57..60..63..66..69..72..75..78..81..84..87..90..93..96..99..100", - "All subsequent raster operations will be limited to the MASK area. Removing or renaming raster map named 'MASK' will restore raster operations to normal.", - "" - ], - "stdout": "" - }, - { - "executable": "r.mapcalc", - "parameter": [ - "expression=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B04 = float(S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B04_uncropped)" - ], - "return_code": 0, - "run_time": 10.141529321670532, - "stderr": [ - "" - ], - "stdout": "" - }, - { - "executable": "r.timestamp", - "parameter": [ - "map=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B04", - "date=06 dec 2016 03:07:49" - ], - "return_code": 0, - "run_time": 0.05050253868103027, - "stderr": [ - "" - ], - "stdout": "" - }, - { - "executable": "g.remove", - "parameter": [ - "type=raster", - "name=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B04_uncropped", - "-f" - ], - "return_code": 0, - "run_time": 0.05098080635070801, - "stderr": [ - "Removing raster ", - "" - ], - "stdout": "" - }, - { - "executable": "r.mask", - "parameter": [ - "-r" - ], - "return_code": 0, - "run_time": 0.10424232482910156, - "stderr": [ - "Raster MASK removed", - "" - ], - "stdout": "" - }, - { - "executable": "r.mapcalc", - "parameter": [ - "expression=ndvi = (float(S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B08) - float(S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B04))/(float(S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B08) + float(S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B04))" - ], - "return_code": 0, - "run_time": 20.28681755065918, - "stderr": [ - "" - ], - "stdout": "" - }, - { - "executable": "r.colors", - "parameter": [ - "color=ndvi", - "map=ndvi" - ], - "return_code": 0, - "run_time": 0.05031251907348633, - "stderr": [ - "Color table for raster map set to 'ndvi'", - "" - ], - "stdout": "" - }, - { - "executable": "r.univar", - "parameter": [ - "map=ndvi", - "output=/actinia/workspace/temp_db/gisdbase_103a050c380e4f50b36efd3f77bd1419/.tmp/tmp7il3n0jk.univar", - "-g" - ], - "return_code": 0, - "run_time": 4.54892897605896, - "stderr": [ - "" - ], - "stdout": "" - }, - { - "executable": "d.rast", - "parameter": [ - "map=ndvi" - ], - "return_code": 0, - "run_time": 2.0198700428009033, - "stderr": [ - "0..3..6..9..12..15..18..21..24..27..30..33..36..39..42..45..48..51..54..57..60..63..66..69..72..75..78..81..84..87..90..93..96..99..100", - "" - ], - "stdout": "" - }, - { - "executable": "d.legend", - "parameter": [ - "raster=ndvi", - "at=8,92,0,7", - "-n" - ], - "return_code": 0, - "run_time": 0.4614551067352295, - "stderr": [ - "" - ], - "stdout": "" - }, - { - "executable": "d.barscale", - "parameter": [ - "style=line", - "at=20,4" - ], - "return_code": 0, - "run_time": 0.416748046875, - "stderr": [ - "" - ], - "stdout": "" - }, - { - "executable": "g.region", - "parameter": [ - "raster=ndvi", - "-g" - ], - "return_code": 0, - "run_time": 0.051720619201660156, - "stderr": [ - "" - ], - "stdout": "projection=1\nzone=50\nn=3100030\ns=2990100\nw=199960\ne=309790\nnsres=10\newres=10\nrows=10993\ncols=10983\ncells=120736119\n" - }, - { - "executable": "r.out.gdal", - "parameter": [ - "-fm", - "input=ndvi", - "format=GTiff", - "createopt=COMPRESS=LZW", - "output=/actinia/workspace/temp_db/gisdbase_103a050c380e4f50b36efd3f77bd1419/.tmp/ndvi.tiff" - ], - "return_code": 0, - "run_time": 12.550397157669067, - "stderr": [ - "Checking GDAL data type and nodata value...", - "2..5..8..11..14..17..20..23..26..29..32..35..38..41..44..47..50..53..56..59..62..65..68..71..74..77..80..83..86..89..92..95..98..100", - "Using GDAL data type ", - "Input raster map contains cells with NULL-value (no-data). The value -nan will be used to represent no-data values in the input map. You can specify a nodata value with the nodata option.", - "Exporting raster data to GTiff format...", - "ERROR 6: SetColorTable() only supported for Byte or UInt16 bands in TIFF format.", - "2..5..8..11..14..17..20..23..26..29..32..35..38..41..44..47..50..53..56..59..62..65..68..71..74..77..80..83..86..89..92..95..98..100", - "r.out.gdal complete. File created.", - "" - ], - "stdout": "" - } - ], - "process_results": [ - { - "cells": 120736119.0, - "coeff_var": 39.2111992829072, - "max": 0.80298912525177, - "mean": 0.345280366103636, - "mean_of_abs": 0.347984182813063, - "min": -0.96863466501236, - "n": 120371030.0, - "name": "ndvi", - "null_cells": 365089.0, - "range": 1.77162379026413, - "stddev": 0.135388572437648, - "sum": 41561753.3066718, - "variance": 0.0183300655467043 - } - ], - "progress": { - "num_of_steps": 33, - "step": 32 - }, - "resource_id": "resource_id-6b849585-576f-40b5-a514-34a7cf1f97ce", - "status": "finished", - "time_delta": 207.813636302948, - "timestamp": 1527683351.8002071, - "urls": { - "resources": [ - f"http://localhost:8080{URL_PREFIX}/resource/superadmin/resource_id-6b849585-576f-40b5-a514-34a7cf1f97ce/tmpsaeegg0q.png", - f"http://localhost:8080{URL_PREFIX}/resource/superadmin/resource_id-6b849585-576f-40b5-a514-34a7cf1f97ce/ndvi.tiff" ], - "status": f"http://localhost:8080{URL_PREFIX}/resources/superadmin/resource_id-6b849585-576f-40b5-a514-34a7cf1f97ce" - }, - "user_id": "superadmin" + "process_log": [ + { + "executable": "/usr/bin/wget", + "parameter": [ + "-t5", + "-c", + "-q", + "https://storage.googleapis.com/gcp-public-data-sentinel-2/tiles/50/R/KR/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749.SAFE/GRANULE/L1C_T50RKR_A007608_20161206T030749/IMG_DATA/T50RKR_20161206T030112_B08.jp2", + ], + "return_code": 0, + "run_time": 49.85953092575073, + "stderr": [""], + "stdout": "", + }, + { + "executable": "/usr/bin/wget", + "parameter": [ + "-t5", + "-c", + "-q", + "https://storage.googleapis.com/gcp-public-data-sentinel-2/tiles/50/R/KR/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749.SAFE/GRANULE/L1C_T50RKR_A007608_20161206T030749/IMG_DATA/T50RKR_20161206T030112_B04.jp2", + ], + "return_code": 0, + "run_time": 38.676433801651, + "stderr": [""], + "stdout": "", + }, + { + "executable": "/bin/mv", + "parameter": [ + "/actinia/workspace/temp_db/gisdbase_103a050c380e4f50b36efd3f77bd1419/.tmp/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749.gml", + "/actinia/workspace/download_cache/superadmin/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749.gml", + ], + "return_code": 0, + "run_time": 0.05118393898010254, + "stderr": [""], + "stdout": "", + }, + { + "executable": "/bin/mv", + "parameter": [ + "/actinia/workspace/temp_db/gisdbase_103a050c380e4f50b36efd3f77bd1419/.tmp/T50RKR_20161206T030112_B08.jp2", + "/actinia/workspace/download_cache/superadmin/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B08", + ], + "return_code": 0, + "run_time": 0.35857558250427246, + "stderr": [""], + "stdout": "", + }, + { + "executable": "/bin/mv", + "parameter": [ + "/actinia/workspace/temp_db/gisdbase_103a050c380e4f50b36efd3f77bd1419/.tmp/T50RKR_20161206T030112_B04.jp2", + "/actinia/workspace/download_cache/superadmin/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B04", + ], + "return_code": 0, + "run_time": 0.15271401405334473, + "stderr": [""], + "stdout": "", + }, + { + "executable": "python3", + "parameter": [ + "/usr/local/bin/grass", + "-e", + "-c", + "/actinia/workspace/download_cache/superadmin/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B08", + "/actinia/workspace/temp_db/gisdbase_103a050c380e4f50b36efd3f77bd1419/sentinel2", + ], + "return_code": 0, + "run_time": 0.36118006706237793, + "stderr": [ + "Default locale settings are missing. GRASS running with C locale.WARNING: Searched for a web browser, but none found", + "Creating new GRASS GIS location/mapset...", + "Cleaning up temporary files...", + "", + ], + "stdout": "Default locale not found, using UTF-8\n", + }, + { + "executable": "v.import", + "parameter": [ + "input=/actinia/workspace/download_cache/superadmin/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749.gml", + "output=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749", + "--q", + ], + "return_code": 0, + "run_time": 0.3551313877105713, + "stderr": [ + "WARNING: Projection of dataset does not appear to match current location.", + "", + "Location PROJ_INFO is:", + "name: WGS 84 / UTM zone 50N", + "datum: wgs84", + "ellps: wgs84", + "proj: utm", + "zone: 50", + "no_defs: defined", + "", + "Dataset PROJ_INFO is:", + "name: WGS 84", + "datum: wgs84", + "ellps: wgs84", + "proj: ll", + "no_defs: defined", + "", + "ERROR: proj", + "", + "WARNING: Width for column fid set to 255 (was not specified by OGR), some strings may be truncated!", + "", + ], + "stdout": "", + }, + { + "executable": "v.timestamp", + "parameter": [ + "map=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749", + "date=06 dec 2016 03:07:49", + ], + "return_code": 0, + "run_time": 0.050455570220947266, + "stderr": [""], + "stdout": "", + }, + { + "executable": "/usr/bin/gdal_translate", + "parameter": [ + "-projwin", + "113.949663", + "28.011816", + "115.082607", + "27.001706", + "-of", + "vrt", + "-projwin_srs", + "EPSG:4326", + "/actinia/workspace/download_cache/superadmin/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B08", + "/actinia/workspace/download_cache/superadmin/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B08.vrt", + ], + "return_code": 0, + "run_time": 0.05114293098449707, + "stderr": [ + "Warning 1: Computed -srcwin 5 -225 10971 11419 falls partially outside raster extent. Going on however.", + "", + ], + "stdout": "Input file size is 10980, 10980\n", + }, + { + "executable": "r.import", + "parameter": [ + "input=/actinia/workspace/download_cache/superadmin/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B08.vrt", + "output=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B08_uncropped", + "--q", + ], + "return_code": 0, + "run_time": 16.326167583465576, + "stderr": [""], + "stdout": "", + }, + { + "executable": "g.region", + "parameter": [ + "align=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B08_uncropped", + "vector=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749", + "-g", + ], + "return_code": 0, + "run_time": 0.10460591316223145, + "stderr": [""], + "stdout": "projection=1\nzone=50\nn=3100030\ns=2990100\nw=199960\ne=309790\nnsres=10\newres=10\nrows=10993\ncols=10983\ncells=120736119\n", + }, + { + "executable": "r.mask", + "parameter": [ + "vector=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749" + ], + "return_code": 0, + "run_time": 7.36047887802124, + "stderr": [ + "Reading areas...", + "0..100", + "Writing raster map...", + "0..3..6..9..12..15..18..21..24..27..30..33..36..39..42..45..48..51..54..57..60..63..66..69..72..75..78..81..84..87..90..93..96..99..100", + "Reading areas...", + "0..100", + "Writing raster map...", + "0..3..6..9..12..15..18..21..24..27..30..33..36..39..42..45..48..51..54..57..60..63..66..69..72..75..78..81..84..87..90..93..96..99..100", + "All subsequent raster operations will be limited to the MASK area. Removing or renaming raster map named 'MASK' will restore raster operations to normal.", + "", + ], + "stdout": "", + }, + { + "executable": "r.mapcalc", + "parameter": [ + "expression=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B08 = float(S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B08_uncropped)" + ], + "return_code": 0, + "run_time": 10.695591926574707, + "stderr": [""], + "stdout": "", + }, + { + "executable": "r.timestamp", + "parameter": [ + "map=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B08", + "date=06 dec 2016 03:07:49", + ], + "return_code": 0, + "run_time": 0.053069353103637695, + "stderr": [""], + "stdout": "", + }, + { + "executable": "g.remove", + "parameter": [ + "type=raster", + "name=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B08_uncropped", + "-f", + ], + "return_code": 0, + "run_time": 0.050362348556518555, + "stderr": [ + "Removing raster ", + "", + ], + "stdout": "", + }, + { + "executable": "r.mask", + "parameter": ["-r"], + "return_code": 0, + "run_time": 0.10059237480163574, + "stderr": ["Raster MASK removed", ""], + "stdout": "", + }, + { + "executable": "/usr/bin/gdal_translate", + "parameter": [ + "-projwin", + "113.949663", + "28.011816", + "115.082607", + "27.001706", + "-of", + "vrt", + "-projwin_srs", + "EPSG:4326", + "/actinia/workspace/download_cache/superadmin/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B04", + "/actinia/workspace/download_cache/superadmin/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B04.vrt", + ], + "return_code": 0, + "run_time": 0.05096769332885742, + "stderr": [ + "Warning 1: Computed -srcwin 5 -225 10971 11419 falls partially outside raster extent. Going on however.", + "", + ], + "stdout": "Input file size is 10980, 10980\n", + }, + { + "executable": "r.import", + "parameter": [ + "input=/actinia/workspace/download_cache/superadmin/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B04.vrt", + "output=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B04_uncropped", + "--q", + ], + "return_code": 0, + "run_time": 16.76022958755493, + "stderr": [""], + "stdout": "", + }, + { + "executable": "g.region", + "parameter": [ + "align=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B04_uncropped", + "vector=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749", + "-g", + ], + "return_code": 0, + "run_time": 0.0505826473236084, + "stderr": [""], + "stdout": "projection=1\nzone=50\nn=3100030\ns=2990100\nw=199960\ne=309790\nnsres=10\newres=10\nrows=10993\ncols=10983\ncells=120736119\n", + }, + { + "executable": "r.mask", + "parameter": [ + "vector=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749" + ], + "return_code": 0, + "run_time": 6.779608249664307, + "stderr": [ + "Reading areas...", + "0..100", + "Writing raster map...", + "0..3..6..9..12..15..18..21..24..27..30..33..36..39..42..45..48..51..54..57..60..63..66..69..72..75..78..81..84..87..90..93..96..99..100", + "Reading areas...", + "0..100", + "Writing raster map...", + "0..3..6..9..12..15..18..21..24..27..30..33..36..39..42..45..48..51..54..57..60..63..66..69..72..75..78..81..84..87..90..93..96..99..100", + "All subsequent raster operations will be limited to the MASK area. Removing or renaming raster map named 'MASK' will restore raster operations to normal.", + "", + ], + "stdout": "", + }, + { + "executable": "r.mapcalc", + "parameter": [ + "expression=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B04 = float(S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B04_uncropped)" + ], + "return_code": 0, + "run_time": 10.141529321670532, + "stderr": [""], + "stdout": "", + }, + { + "executable": "r.timestamp", + "parameter": [ + "map=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B04", + "date=06 dec 2016 03:07:49", + ], + "return_code": 0, + "run_time": 0.05050253868103027, + "stderr": [""], + "stdout": "", + }, + { + "executable": "g.remove", + "parameter": [ + "type=raster", + "name=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B04_uncropped", + "-f", + ], + "return_code": 0, + "run_time": 0.05098080635070801, + "stderr": [ + "Removing raster ", + "", + ], + "stdout": "", + }, + { + "executable": "r.mask", + "parameter": ["-r"], + "return_code": 0, + "run_time": 0.10424232482910156, + "stderr": ["Raster MASK removed", ""], + "stdout": "", + }, + { + "executable": "r.mapcalc", + "parameter": [ + "expression=ndvi = (float(S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B08) - float(S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B04))/(float(S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B08) + float(S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B04))" + ], + "return_code": 0, + "run_time": 20.28681755065918, + "stderr": [""], + "stdout": "", + }, + { + "executable": "r.colors", + "parameter": ["color=ndvi", "map=ndvi"], + "return_code": 0, + "run_time": 0.05031251907348633, + "stderr": [ + "Color table for raster map set to 'ndvi'", + "", + ], + "stdout": "", + }, + { + "executable": "r.univar", + "parameter": [ + "map=ndvi", + "output=/actinia/workspace/temp_db/gisdbase_103a050c380e4f50b36efd3f77bd1419/.tmp/tmp7il3n0jk.univar", + "-g", + ], + "return_code": 0, + "run_time": 4.54892897605896, + "stderr": [""], + "stdout": "", + }, + { + "executable": "d.rast", + "parameter": ["map=ndvi"], + "return_code": 0, + "run_time": 2.0198700428009033, + "stderr": [ + "0..3..6..9..12..15..18..21..24..27..30..33..36..39..42..45..48..51..54..57..60..63..66..69..72..75..78..81..84..87..90..93..96..99..100", + "", + ], + "stdout": "", + }, + { + "executable": "d.legend", + "parameter": ["raster=ndvi", "at=8,92,0,7", "-n"], + "return_code": 0, + "run_time": 0.4614551067352295, + "stderr": [""], + "stdout": "", + }, + { + "executable": "d.barscale", + "parameter": ["style=line", "at=20,4"], + "return_code": 0, + "run_time": 0.416748046875, + "stderr": [""], + "stdout": "", + }, + { + "executable": "g.region", + "parameter": ["raster=ndvi", "-g"], + "return_code": 0, + "run_time": 0.051720619201660156, + "stderr": [""], + "stdout": "projection=1\nzone=50\nn=3100030\ns=2990100\nw=199960\ne=309790\nnsres=10\newres=10\nrows=10993\ncols=10983\ncells=120736119\n", + }, + { + "executable": "r.out.gdal", + "parameter": [ + "-fm", + "input=ndvi", + "format=GTiff", + "createopt=COMPRESS=LZW", + "output=/actinia/workspace/temp_db/gisdbase_103a050c380e4f50b36efd3f77bd1419/.tmp/ndvi.tiff", + ], + "return_code": 0, + "run_time": 12.550397157669067, + "stderr": [ + "Checking GDAL data type and nodata value...", + "2..5..8..11..14..17..20..23..26..29..32..35..38..41..44..47..50..53..56..59..62..65..68..71..74..77..80..83..86..89..92..95..98..100", + "Using GDAL data type ", + "Input raster map contains cells with NULL-value (no-data). The value -nan will be used to represent no-data values in the input map. You can specify a nodata value with the nodata option.", + "Exporting raster data to GTiff format...", + "ERROR 6: SetColorTable() only supported for Byte or UInt16 bands in TIFF format.", + "2..5..8..11..14..17..20..23..26..29..32..35..38..41..44..47..50..53..56..59..62..65..68..71..74..77..80..83..86..89..92..95..98..100", + "r.out.gdal complete. File created.", + "", + ], + "stdout": "", + }, + ], + "process_results": [ + { + "cells": 120736119.0, + "coeff_var": 39.2111992829072, + "max": 0.80298912525177, + "mean": 0.345280366103636, + "mean_of_abs": 0.347984182813063, + "min": -0.96863466501236, + "n": 120371030.0, + "name": "ndvi", + "null_cells": 365089.0, + "range": 1.77162379026413, + "stddev": 0.135388572437648, + "sum": 41561753.3066718, + "variance": 0.0183300655467043, + } + ], + "progress": {"num_of_steps": 33, "step": 32}, + "resource_id": "resource_id-6b849585-576f-40b5-a514-34a7cf1f97ce", + "status": "finished", + "time_delta": 207.813636302948, + "timestamp": 1527683351.8002071, + "urls": { + "resources": [ + f"http://localhost:8080{URL_PREFIX}/resource/superadmin/resource_id-6b849585-576f-40b5-a514-34a7cf1f97ce/tmpsaeegg0q.png", + f"http://localhost:8080{URL_PREFIX}/resource/superadmin/resource_id-6b849585-576f-40b5-a514-34a7cf1f97ce/ndvi.tiff", + ], + "status": f"http://localhost:8080{URL_PREFIX}/resources/superadmin/resource_id-6b849585-576f-40b5-a514-34a7cf1f97ce", + }, + "user_id": "superadmin", } # required.append("process_results") SWAGGER_DOC = { - 'tags': ['Satellite Image Algorithms'], - 'description': 'NDVI computation of an arbitrary Sentinel-2 scene.' - 'The processing is as follows: A user specific Sentinel-2 scene (Bands 04 and 08)' - 'will be download and imported into an ephemeral database.. ' - 'The NDVI will be computed via r.mapcalc. ' - 'The result of the computation is available as gzipped geotiff file. In addition, ' - 'the univariate statistic will be computed ' - 'as well as a preview image including a legend and scale.' - ' Minimum required user role: user.', - 'parameters': [ - { - 'name': 'product_id', - 'description': 'The product id of a sentinel scene', - 'required': True, - 'in': 'path', - 'type': 'string', - 'default': 'S2A_MSIL1C_20170212T104141_N0204_R008_T31TGJ_20170212T104138' + "tags": ["Satellite Image Algorithms"], + "description": "NDVI computation of an arbitrary Sentinel-2 scene." + "The processing is as follows: A user specific Sentinel-2 scene (Bands 04 and 08)" + "will be download and imported into an ephemeral database.. " + "The NDVI will be computed via r.mapcalc. " + "The result of the computation is available as gzipped geotiff file. In addition, " + "the univariate statistic will be computed " + "as well as a preview image including a legend and scale." + " Minimum required user role: user.", + "parameters": [ + { + "name": "product_id", + "description": "The product id of a sentinel scene", + "required": True, + "in": "path", + "type": "string", + "default": "S2A_MSIL1C_20170212T104141_N0204_R008_T31TGJ_20170212T104138", } ], - 'responses': { - '200': { - 'description': 'This response includes all created resources ' - 'as URL as well as the processing log and other metadata.', - 'schema':SentinelNDVIResponseModel - }, - '400': { - 'description': 'The error message and a detailed log why NDVI processing of ' - 'a sentinel2 scene did not succeeded', - 'schema':ProcessingErrorResponseModel - } - } + "responses": { + "200": { + "description": "This response includes all created resources " + "as URL as well as the processing log and other metadata.", + "schema": SentinelNDVIResponseModel, + }, + "400": { + "description": "The error message and a detailed log why NDVI processing of " + "a sentinel2 scene did not succeeded", + "schema": ProcessingErrorResponseModel, + }, + }, } @@ -674,7 +605,7 @@ def extract_sensor_id_from_scene_id(scene_id): The sencor id """ - return "%s0%s"%(scene_id[0:2], scene_id[2:3]) + return "%s0%s" % (scene_id[0:2], scene_id[2:3]) class AsyncEphemeralSentinel2ProcessingResource(ResourceBase): @@ -683,6 +614,7 @@ class AsyncEphemeralSentinel2ProcessingResource(ResourceBase): to download and process Sentinel-2 satellite images in an ephemeral GRASS location and stores the result in a network storage like GlusterFS or NFS """ + decorators = [log_api_call, auth.login_required] def __init__(self): @@ -691,8 +623,7 @@ def __init__(self): @swagger.doc(deepcopy(SWAGGER_DOC)) def post(self, product_id): - """NDVI computation of an arbitrary Sentinel-2 scene. - """ + """NDVI computation of an arbitrary Sentinel-2 scene.""" rdc = self.preprocess(has_json=False, location_name="sentinel2") rdc.set_user_data(product_id) @@ -708,6 +639,7 @@ class AsyncEphemeralSentinel2ProcessingResourceGCS(ResourceBase): to download and process Sentinel-2 satellite images in an ephemeral GRASS location and uploads the result to a google cloud storage bucket. """ + decorators = [log_api_call, auth.login_required] def __init__(self): @@ -716,8 +648,7 @@ def __init__(self): @swagger.doc(deepcopy(SWAGGER_DOC)) def post(self, product_id): - """NDVI computation of an arbitrary Sentinel-2 scene. The results are stored in the Google Cloud Storage. - """ + """NDVI computation of an arbitrary Sentinel-2 scene. The results are stored in the Google Cloud Storage.""" rdc = self.preprocess(has_json=False, location_name="sentinel2") rdc.set_user_data(product_id) rdc.set_storage_model_to_gcs() @@ -733,9 +664,9 @@ def start_job(*args): class EphemeralSentinelProcessing(EphemeralProcessingWithExport): - """ - """ - def __init__(self,rdc): + """""" + + def __init__(self, rdc): """ Setup the variables of this class @@ -750,12 +681,21 @@ def __init__(self,rdc): self.product_id = self.rdc.user_data self.sentinel2_band_file_list = {} self.gml_footprint = "" - self.user_download_cache_path = os.path.join(self.config.DOWNLOAD_CACHE, self.user_id) - self.raster_result_list = [] # The raster layer names which must be exported, stats computed - # and preview image created - self.module_results = [] # A list of r.univar output classes for each vegetation index - self.response_model_class = SentinelNDVIResponseModel # The class that is used to create the response - self.required_bands = ["B08", "B04"] # The Sentinel-2 bands that are required for NDVI processing + self.user_download_cache_path = os.path.join( + self.config.DOWNLOAD_CACHE, self.user_id + ) + self.raster_result_list = ( + [] + ) # The raster layer names which must be exported, stats computed + # and preview image created + self.module_results = ( + [] + ) # A list of r.univar output classes for each vegetation index + self.response_model_class = SentinelNDVIResponseModel # The class that is used to create the response + self.required_bands = [ + "B08", + "B04", + ] # The Sentinel-2 bands that are required for NDVI processing self.query_result = None def _prepare_sentinel2_download(self): @@ -784,15 +724,24 @@ def _prepare_sentinel2_download(self): os.putenv("HOME", "/tmp") try: - self.query_result = self.query_interface.get_sentinel_urls([self.product_id,], self.required_bands) + self.query_result = self.query_interface.get_sentinel_urls( + [ + self.product_id, + ], + self.required_bands, + ) except Exception as e: - raise AsyncProcessError("Error in querying Sentinel-2 product <%s> " - "in Google BigQuery Sentinel-2 database. " - "Error: %s"%(self.product_id, str(e))) + raise AsyncProcessError( + "Error in querying Sentinel-2 product <%s> " + "in Google BigQuery Sentinel-2 database. " + "Error: %s" % (self.product_id, str(e)) + ) if not self.query_result: - raise AsyncProcessError("Unable to find Sentinel-2 product <%s> " - "in Google BigQuery Sentinel-2 database"%self.product_id) + raise AsyncProcessError( + "Unable to find Sentinel-2 product <%s> " + "in Google BigQuery Sentinel-2 database" % self.product_id + ) def _create_temp_database(self, mapsets=[]): """Create a temporary gis database and location with a PERMANENT mapset for processing @@ -802,7 +751,9 @@ def _create_temp_database(self, mapsets=[]): """ if not self.sentinel2_band_file_list: - raise AsyncProcessError("Unable to create a temporary GIS database, no data is available") + raise AsyncProcessError( + "Unable to create a temporary GIS database, no data is available" + ) try: geofile = self.sentinel2_band_file_list[self.required_bands[0]][0] @@ -818,23 +769,38 @@ def _create_temp_database(self, mapsets=[]): executable_params.append("-e") executable_params.append("-c") executable_params.append(geofile) - executable_params.append(os.path.join(self.temp_grass_data_base, self.location_name)) + executable_params.append( + os.path.join(self.temp_grass_data_base, self.location_name) + ) - self.message_logger.info("%s %s"%(self.config.GRASS_GIS_START_SCRIPT, executable_params)) + self.message_logger.info( + "%s %s" + % (self.config.GRASS_GIS_START_SCRIPT, executable_params) + ) self._update_num_of_steps(1) - p = Process(exec_type="exec", - executable="python3", - executable_params=executable_params) + p = Process( + exec_type="exec", + executable="python3", + executable_params=executable_params, + ) # Create the GRASS location, this will create the location and mapset paths self._run_process(p) except Exception as e: - raise AsyncProcessError("Unable to create a temporary GIS database and location at <%s>" - ", Exception: %s"%(os.path.join(self.temp_grass_data_base, - self.location_name, "PERMANENT"), - str(e))) + raise AsyncProcessError( + "Unable to create a temporary GIS database and location at <%s>" + ", Exception: %s" + % ( + os.path.join( + self.temp_grass_data_base, + self.location_name, + "PERMANENT", + ), + str(e), + ) + ) def _run_r_univar_command(self, raster_name): """Compute the univariate statistics for a raster layer @@ -844,19 +810,24 @@ def _run_r_univar_command(self, raster_name): raster_name: """ - result_file = tempfile.mktemp(suffix=".univar", dir=self.temp_file_path) + result_file = tempfile.mktemp( + suffix=".univar", dir=self.temp_file_path + ) univar_command = dict() - univar_command["1"] = {"module":"r.univar", - "inputs":{"map":raster_name}, - "outputs":{"output":{"name":result_file}}, - "flags":"g"} + univar_command["1"] = { + "module": "r.univar", + "inputs": {"map": raster_name}, + "outputs": {"output": {"name": result_file}}, + "flags": "g", + } - process_list = self._validate_process_chain(process_chain=univar_command, - skip_permission_check=True) + process_list = self._validate_process_chain( + process_chain=univar_command, skip_permission_check=True + ) self._execute_process_list(process_list=process_list) result_list = open(result_file, "r").readlines() - results = {"name":raster_name} + results = {"name": raster_name} for line in result_list: if "=" in line: @@ -888,21 +859,28 @@ def _render_preview_image(self, raster_name): os.putenv("GRASS_RENDER_FILE_READ", "TRUE") render_commands = {} - render_commands["1"] = {"module":"d.rast","inputs":{"map":raster_name}} - # "flags":"n"} + render_commands["1"] = { + "module": "d.rast", + "inputs": {"map": raster_name}, + } + # "flags":"n"} - render_commands["2"] = {"module":"d.legend","inputs":{"raster":raster_name, - "at":"8,92,0,7"}, - "flags":"n"} + render_commands["2"] = { + "module": "d.legend", + "inputs": {"raster": raster_name, "at": "8,92,0,7"}, + "flags": "n", + } - render_commands["3"] = {"module":"d.barscale","inputs":{"style":"line", - "at":"20,4"}, - # "flags":"n" # No "n" flag in grass72 - } + render_commands["3"] = { + "module": "d.barscale", + "inputs": {"style": "line", "at": "20,4"}, + # "flags":"n" # No "n" flag in grass72 + } # Run the selected modules - process_list = self._validate_process_chain(process_chain=render_commands, - skip_permission_check=True) + process_list = self._validate_process_chain( + process_chain=render_commands, skip_permission_check=True + ) self._execute_process_list(process_list) # Attach the png preview image to the resource URL list @@ -935,9 +913,10 @@ def _create_output_resources(self, raster_result_list): self._run_r_univar_command(raster_name) # Render a preview image for this raster layer self._render_preview_image(raster_name) - export_dict = {"name":raster_name, - "export":{"format":"GTiff", - "type":"raster"}} + export_dict = { + "name": raster_name, + "export": {"format": "GTiff", "type": "raster"}, + } # Add the raster layer to the export list self.resource_export_list.append(export_dict) @@ -949,14 +928,14 @@ def _create_output_resources(self, raster_result_list): def _execute(self): """Overwrite this function in subclasses - - Setup user credentials and working paths - - Create the resource directory - - Download and store the sentinel2 scene files - - Initialize and create the temporal database and location - - Analyse the process chains - - Run the modules - - Export the results - - Cleanup + - Setup user credentials and working paths + - Create the resource directory + - Download and store the sentinel2 scene files + - Initialize and create the temporal database and location + - Analyse the process chains + - Run the modules + - Export the results + - Cleanup """ # Setup the user credentials and logger @@ -968,16 +947,21 @@ def _execute(self): # Setup the download cache self._prepare_sentinel2_download() - process_lib = Sentinel2Processing(self.config, - self.product_id, - self.query_result, - self.required_bands, - self.temp_file_path, - self.user_download_cache_path, - self._send_resource_update, - self.message_logger) - - download_commands, self.sentinel2_band_file_list = process_lib.get_sentinel2_download_process_list() + process_lib = Sentinel2Processing( + self.config, + self.product_id, + self.query_result, + self.required_bands, + self.temp_file_path, + self.user_download_cache_path, + self._send_resource_update, + self.message_logger, + ) + + ( + download_commands, + self.sentinel2_band_file_list, + ) = process_lib.get_sentinel2_download_process_list() # Download the sentinel scene if not in the download cache if download_commands: @@ -985,7 +969,9 @@ def _execute(self): self._execute_process_list(process_list=download_commands) # Setup GRASS - self._create_temporary_grass_environment(source_mapset_name="PERMANENT") + self._create_temporary_grass_environment( + source_mapset_name="PERMANENT" + ) # Import and prepare the sentinel scenes import_commands = process_lib.get_sentinel2_import_process_list() @@ -995,7 +981,9 @@ def _execute(self): # Generate the ndvi command nir = self.sentinel2_band_file_list["B08"][1] red = self.sentinel2_band_file_list["B04"][1] - ndvi_commands = process_lib.get_ndvi_r_mapcalc_process_list(red, nir, "ndvi") + ndvi_commands = process_lib.get_ndvi_r_mapcalc_process_list( + red, nir, "ndvi" + ) self._update_num_of_steps(len(ndvi_commands)) self._execute_process_list(process_list=ndvi_commands) @@ -1005,8 +993,7 @@ def _execute(self): self._create_output_resources(self.raster_result_list) def _final_cleanup(self): - """Overwrite this function in subclasses to perform the final cleanup - """ + """Overwrite this function in subclasses to perform the final cleanup""" # Clean up and remove the temporary gisdbase self._cleanup() # Remove resource directories diff --git a/src/actinia_satellite_plugin/persistent_landsat_timeseries_creator.py b/src/actinia_satellite_plugin/persistent_landsat_timeseries_creator.py index d4faa4d..c57933d 100644 --- a/src/actinia_satellite_plugin/persistent_landsat_timeseries_creator.py +++ b/src/actinia_satellite_plugin/persistent_landsat_timeseries_creator.py @@ -9,111 +9,126 @@ from datetime import timedelta from copy import deepcopy from flask_restful_swagger_2 import swagger, Schema -from actinia_core.models.response_models import ProcessingResponseModel, ProcessingErrorResponseModel -from actinia_core.processing.actinia_processing.ephemeral.persistent_processing import PersistentProcessing +from actinia_core.models.response_models import ( + ProcessingResponseModel, + ProcessingErrorResponseModel, +) +from actinia_core.processing.actinia_processing.ephemeral.persistent_processing import ( + PersistentProcessing, +) from actinia_core.rest.base.resource_base import ResourceBase from actinia_core.core.common.redis_interface import enqueue_job -from actinia_core.core.common.google_satellite_bigquery_interface import GoogleSatelliteBigQueryInterface -from actinia_core.core.common.landsat_processing_library import LandsatProcessing, SCENE_BANDS, extract_sensor_id_from_scene_id, RASTER_SUFFIXES +from actinia_core.core.common.google_satellite_bigquery_interface import ( + GoogleSatelliteBigQueryInterface, +) +from actinia_core.core.common.landsat_processing_library import ( + LandsatProcessing, + SCENE_BANDS, + extract_sensor_id_from_scene_id, + RASTER_SUFFIXES, +) from actinia_core.core.common.exceptions import AsyncProcessError __license__ = "GPLv3" -__author__ = "Sören Gebbert" -__copyright__ = "Copyright 2016, Sören Gebbert" +__author__ = "Sören Gebbert" +__copyright__ = "Copyright 2016, Sören Gebbert" __maintainer__ = "Sören Gebbert" -__email__ = "soerengebbert@googlemail.com" +__email__ = "soerengebbert@googlemail.com" class LandsatSceneListModel(Schema): - """This schema defines the JSON input of the Landsat time series creator resource - """ - type = 'object' + """This schema defines the JSON input of the Landsat time series creator resource""" + + type = "object" properties = { "atcor_method": { - 'description': 'The method for atmospheric correction', - 'type': 'string', - 'enum': ["TOAR", "DOS1", "DOS4"] + "description": "The method for atmospheric correction", + "type": "string", + "enum": ["TOAR", "DOS1", "DOS4"], }, - 'strds': { - 'type': 'string', - 'description': 'The basename of the new space-time raster datasets (strds). ' - 'One for each band will be created and the basename will be ' - 'extended by a band specific suffix.' + "strds": { + "type": "string", + "description": "The basename of the new space-time raster datasets (strds). " + "One for each band will be created and the basename will be " + "extended by a band specific suffix.", + }, + "scene_ids": { + "type": "array", + "items": {"type": "string"}, + "description": "A list of Landsat scene names that should be downloaded and imported. Only scenes from a " + "specific satelleite can be imported and represented as strds. " + "Mixing of different satellites is not permitted. All bands will be imported " + "and atmospherically corrected", }, - 'scene_ids': { - 'type': 'array', - 'items': {'type':'string'}, - 'description': 'A list of Landsat scene names that should be downloaded and imported. Only scenes from a ' - 'specific satelleite can be imported and represented as strds. ' - 'Mixing of different satellites is not permitted. All bands will be imported ' - 'and atmospherically corrected' - } } - example = {"strds":"Landsat_4", - "atcor_method": "TOAR", - "scene_ids":["LT41970251990147XXX00", - "LT41970251990147XXX01", - "LT41970251990147XXX02", - "LT41970251990147XXX03"]} - - required = ['strds', 'scene_ids', 'atcor_method'] - - -SCHEMA_DOC={ - 'tags': ['Satellite Image Algorithms'], - 'description': 'Download and import Landsat scenes into a new mapset and create a space-time raster dataset ' - 'for each imported band. ' - 'The resulting data will be located in a persistent user database. ' - 'The location name is part of the path and must exist. The mapset will ' - 'be created while importing and should not already exist in the location. The names of the' - 'Landsat scenes that should be downloaded must be specified ' - 'in the HTTP body as application/json content. In addition, the basename of the ' - 'STRDS that should manage the Landsat scenes ' - 'must be provided in the application/json content. For each band a separate ' - 'strds will be cerated and the STRDS base name will be extended with the band number.' - 'This call is performed asynchronously. The provided resource URL must be pulled ' - 'to receive the status of the import. The data is available in the provided ' - 'location/mapset, after the download and import finished. Minimum required user role: user.', - 'consumes':['application/json'], - 'parameters': [ + example = { + "strds": "Landsat_4", + "atcor_method": "TOAR", + "scene_ids": [ + "LT41970251990147XXX00", + "LT41970251990147XXX01", + "LT41970251990147XXX02", + "LT41970251990147XXX03", + ], + } + + required = ["strds", "scene_ids", "atcor_method"] + + +SCHEMA_DOC = { + "tags": ["Satellite Image Algorithms"], + "description": "Download and import Landsat scenes into a new mapset and create a space-time raster dataset " + "for each imported band. " + "The resulting data will be located in a persistent user database. " + "The location name is part of the path and must exist. The mapset will " + "be created while importing and should not already exist in the location. The names of the" + "Landsat scenes that should be downloaded must be specified " + "in the HTTP body as application/json content. In addition, the basename of the " + "STRDS that should manage the Landsat scenes " + "must be provided in the application/json content. For each band a separate " + "strds will be cerated and the STRDS base name will be extended with the band number." + "This call is performed asynchronously. The provided resource URL must be pulled " + "to receive the status of the import. The data is available in the provided " + "location/mapset, after the download and import finished. Minimum required user role: user.", + "consumes": ["application/json"], + "parameters": [ { - 'name': 'location_name', - 'description': 'The location name to import the Landsat scenes in', - 'required': True, - 'in': 'path', - 'type': 'string' + "name": "location_name", + "description": "The location name to import the Landsat scenes in", + "required": True, + "in": "path", + "type": "string", }, { - 'name': 'mapset_name', - 'description': 'The name of the mapset to import the Landsat scenes in', - 'required': True, - 'in': 'path', - 'type': 'string' + "name": "mapset_name", + "description": "The name of the mapset to import the Landsat scenes in", + "required": True, + "in": "path", + "type": "string", }, { - 'name': 'tiles', - 'description': 'The list of Landsat scenes, the band names and the target STRDS names', - 'required': True, - 'in': 'body', - 'schema': LandsatSceneListModel - } + "name": "tiles", + "description": "The list of Landsat scenes, the band names and the target STRDS names", + "required": True, + "in": "body", + "schema": LandsatSceneListModel, + }, ], - 'responses': { - '200': { - 'description': 'The result of the Landsat time series import', - 'schema':ProcessingResponseModel + "responses": { + "200": { + "description": "The result of the Landsat time series import", + "schema": ProcessingResponseModel, }, - '400': { - 'description':'The error message and a detailed log why Landsat ' - 'time series import did not succeeded', - 'schema':ProcessingErrorResponseModel - } - } - } + "400": { + "description": "The error message and a detailed log why Landsat " + "time series import did not succeeded", + "schema": ProcessingErrorResponseModel, + }, + }, +} class AsyncLandsatTimeSeriesCreatorResource(ResourceBase): - def __init__(self): ResourceBase.__init__(self) @@ -155,9 +170,9 @@ def post(self, location_name, mapset_name): """ # Preprocess the post call - rdc = self.preprocess(has_json=True, - location_name=location_name, - mapset_name=mapset_name) + rdc = self.preprocess( + has_json=True, location_name=location_name, mapset_name=mapset_name + ) # RedisQueue approach enqueue_job(self.job_timeout, start_job, rdc) @@ -178,6 +193,7 @@ class LandsatTimeSeriesCreator(PersistentProcessing): The Sentiel2A scenes are downloaded , imported and pre-processed before they are registered in the band specific space time datasets. """ + def __init__(self, rdc): """Constructor @@ -194,7 +210,9 @@ def __init__(self, rdc): self.scene_ids = self.rdc.request_data["scene_ids"] self.strds_basename = self.rdc.request_data["strds"] self.atcor_method = self.rdc.request_data["atcor_method"] - self.user_download_cache_path = os.path.join(self.config.DOWNLOAD_CACHE, self.user_id) + self.user_download_cache_path = os.path.join( + self.config.DOWNLOAD_CACHE, self.user_id + ) self.query_result = None self.required_bands = None self.sensor_id = None @@ -232,8 +250,10 @@ def _prepare_download(self): sensor_ids[self.sensor_id] = self.sensor_id if len(sensor_ids) > 2: - raise AsyncProcessError("Different satellites are in the list of scenes to be imported. " - "Only scenes from a single satellite an be imported at once.") + raise AsyncProcessError( + "Different satellites are in the list of scenes to be imported. " + "Only scenes from a single satellite an be imported at once." + ) # All bands are imported, except the MTL file self.required_bands = SCENE_BANDS[self.sensor_id][0:-1] @@ -241,15 +261,21 @@ def _prepare_download(self): self._send_resource_update("Sending Google BigQuery request.") try: - self.query_result = self.query_interface.get_landsat_urls(self.scene_ids, self.required_bands) + self.query_result = self.query_interface.get_landsat_urls( + self.scene_ids, self.required_bands + ) except Exception as e: - raise AsyncProcessError("Error in querying Landsat product <%s> " - "in Google BigQuery Landsat database. " - "Error: %s"%(self.scene_ids, str(e))) + raise AsyncProcessError( + "Error in querying Landsat product <%s> " + "in Google BigQuery Landsat database. " + "Error: %s" % (self.scene_ids, str(e)) + ) if not self.query_result: - raise AsyncProcessError("Unable to find Landsat product <%s> " - "in Google BigQuery Landsat database"%self.scene_ids) + raise AsyncProcessError( + "Unable to find Landsat product <%s> " + "in Google BigQuery Landsat database" % self.scene_ids + ) def _import_scenes(self): """Import all found Sentinel2 scenes with their bands and create the space time raster datasets @@ -265,14 +291,19 @@ def _import_scenes(self): for scene_id in self.query_result: - process_lib = LandsatProcessing(config=self.config, - scene_id=scene_id, - temp_file_path=self.temp_file_path, - download_cache=self.user_download_cache_path, - send_resource_update=self._send_resource_update, - message_logger=self.message_logger) + process_lib = LandsatProcessing( + config=self.config, + scene_id=scene_id, + temp_file_path=self.temp_file_path, + download_cache=self.user_download_cache_path, + send_resource_update=self._send_resource_update, + message_logger=self.message_logger, + ) - download_commands, self.sentinel2_band_file_list = process_lib.get_download_process_list() + ( + download_commands, + self.sentinel2_band_file_list, + ) = process_lib.get_download_process_list() # Download the Landsat scene if it is not in the download cache if download_commands: @@ -281,7 +312,11 @@ def _import_scenes(self): # Import and atmospheric correction import_commands = process_lib.get_import_process_list() all_commands.extend(import_commands) - atcor_method_commands = process_lib.get_i_landsat_toar_process_list(atcor_method=self.atcor_method) + atcor_method_commands = ( + process_lib.get_i_landsat_toar_process_list( + atcor_method=self.atcor_method + ) + ) all_commands.extend(atcor_method_commands) # Run the commands @@ -297,50 +332,71 @@ def _import_scenes(self): for i in range(len(self.required_bands)): band = self.required_bands[i] - strds = self.strds_basename + "_%s"%band + strds = self.strds_basename + "_%s" % band result_dict[band] = [] - create_strds = {"module":"t.create", - "inputs":{"output":strds, - "title":"Landsat time series for band %s"%band, - "description":"Landsat time series for band %s"%band, - "temporaltype":"absolute", - "type":"strds"}} + create_strds = { + "module": "t.create", + "inputs": { + "output": strds, + "title": "Landsat time series for band %s" % band, + "description": "Landsat time series for band %s" % band, + "temporaltype": "absolute", + "type": "strds", + }, + } register_commands[str(counter)] = create_strds counter += 1 # Create the input file - map_list_file_name = os.path.join(self.user_download_cache_path, strds) + map_list_file_name = os.path.join( + self.user_download_cache_path, strds + ) map_list_file = open(map_list_file_name, "w") # Use only the product ids that were found in the big query for scene_id in self.query_result: # We need to create a time interval, otherwise the temporal algebra will not work :/ - start_time = dtparser.parse( self.query_result[scene_id]["timestamp"].split(".")[0]) + start_time = dtparser.parse( + self.query_result[scene_id]["timestamp"].split(".")[0] + ) end_time = start_time + timedelta(seconds=1) index = SCENE_BANDS[self.sensor_id].index(band) raster_suffix = RASTER_SUFFIXES[self.sensor_id][index] - map_name = "%s_%s%s"%(scene_id, self.atcor_method, raster_suffix) - - result_dict[band].append([map_name, str(start_time), str(end_time)]) - - map_list_file.write("%s|%s|%s\n"%(map_name, str(start_time), str(end_time))) + map_name = "%s_%s%s" % ( + scene_id, + self.atcor_method, + raster_suffix, + ) + + result_dict[band].append( + [map_name, str(start_time), str(end_time)] + ) + + map_list_file.write( + "%s|%s|%s\n" % (map_name, str(start_time), str(end_time)) + ) map_list_file.close() - register_maps = {"module":"t.register", - "inputs":{"input":strds, - "file":map_list_file_name, - "type":"raster"}} + register_maps = { + "module": "t.register", + "inputs": { + "input": strds, + "file": map_list_file_name, + "type": "raster", + }, + } register_commands[str(counter)] = register_maps counter += 1 - process_list = self._validate_process_chain(process_chain=register_commands, - skip_permission_check=True) + process_list = self._validate_process_chain( + process_chain=register_commands, skip_permission_check=True + ) self._execute_process_list(process_list=process_list) self.module_results = result_dict @@ -354,8 +410,10 @@ def _execute(self): self._check_lock_target_mapset() if self.target_mapset_exists is True: - raise AsyncProcessError("Sentinel time series can only be create in a new mapset. " - "Mapset <%s> already exists."%self.target_mapset_name) + raise AsyncProcessError( + "Sentinel time series can only be create in a new mapset. " + "Mapset <%s> already exists." % self.target_mapset_name + ) # Init GRASS environment and create the temporary mapset with the same name as the target mapset # This is required to register the raster maps in the temporary directory, but use them in @@ -367,8 +425,9 @@ def _execute(self): # Initialize the GRASS environment and switch into PERMANENT # mapset, which is always linked - self._create_grass_environment(grass_data_base=self.temp_grass_data_base, - mapset_name="PERMANENT") + self._create_grass_environment( + grass_data_base=self.temp_grass_data_base, mapset_name="PERMANENT" + ) # Create the temporary mapset and switch into it self._create_temporary_mapset(temp_mapset_name=self.target_mapset_name) @@ -384,8 +443,10 @@ def _execute(self): # Abort if a single scene is missing if len(missing_scene_ids) > 0: - raise AsyncProcessError("Unable to find product ids <%s> in the " - "Google BigQuery database"%str(missing_scene_ids)) + raise AsyncProcessError( + "Unable to find product ids <%s> in the " + "Google BigQuery database" % str(missing_scene_ids) + ) self._import_scenes() diff --git a/src/actinia_satellite_plugin/persistent_sentinel2_timeseries_creator.py b/src/actinia_satellite_plugin/persistent_sentinel2_timeseries_creator.py index 1f41337..5fd7e84 100644 --- a/src/actinia_satellite_plugin/persistent_sentinel2_timeseries_creator.py +++ b/src/actinia_satellite_plugin/persistent_sentinel2_timeseries_creator.py @@ -8,108 +8,120 @@ from datetime import timedelta from copy import deepcopy from flask_restful_swagger_2 import swagger, Schema -from actinia_core.models.response_models import ProcessingResponseModel, ProcessingErrorResponseModel -from actinia_core.processing.actinia_processing.ephemeral.persistent_processing import PersistentProcessing +from actinia_core.models.response_models import ( + ProcessingResponseModel, + ProcessingErrorResponseModel, +) +from actinia_core.processing.actinia_processing.ephemeral.persistent_processing import ( + PersistentProcessing, +) from actinia_core.rest.base.resource_base import ResourceBase from actinia_core.core.common.redis_interface import enqueue_job -from actinia_core.core.common.google_satellite_bigquery_interface import GoogleSatelliteBigQueryInterface -from actinia_core.core.common.sentinel_processing_library import Sentinel2Processing +from actinia_core.core.common.google_satellite_bigquery_interface import ( + GoogleSatelliteBigQueryInterface, +) +from actinia_core.core.common.sentinel_processing_library import ( + Sentinel2Processing, +) from actinia_core.core.common.exceptions import AsyncProcessError __license__ = "GPLv3" -__author__ = "Sören Gebbert" -__copyright__ = "Copyright 2016, Sören Gebbert" +__author__ = "Sören Gebbert" +__copyright__ = "Copyright 2016, Sören Gebbert" __maintainer__ = "Sören Gebbert" -__email__ = "soerengebbert@googlemail.com" +__email__ = "soerengebbert@googlemail.com" class Sentinel2ASceneListModel(Schema): - """This schema defines the JSON input of the sentinel time series creator resource - """ - type = 'object' + """This schema defines the JSON input of the sentinel time series creator resource""" + + type = "object" properties = { - 'bands': { - 'type': 'array', - 'items': {'type':'string'}, - 'description': 'A list of band names that should be downloaded and imported for each Sentinel-2 scene.' - 'Available are the following band names: "B01", "B02", "B03", "B04", "B05", "B06", "B07",' - '"B08", "B8A", "B09" "B10", "B11", "B12"' + "bands": { + "type": "array", + "items": {"type": "string"}, + "description": "A list of band names that should be downloaded and imported for each Sentinel-2 scene." + 'Available are the following band names: "B01", "B02", "B03", "B04", "B05", "B06", "B07",' + '"B08", "B8A", "B09" "B10", "B11", "B12"', + }, + "strds": { + "type": "array", + "items": {"type": "string"}, + "description": "The names of the new space-time raster datasets, one for each band", }, - 'strds': { - 'type': 'array', - 'items': {'type':'string'}, - 'description': 'The names of the new space-time raster datasets, one for each band' + "product_ids": { + "type": "array", + "items": {"type": "string"}, + "description": "A list of Sentinel-2 scene names that should be downloaded and imported", }, - 'product_ids': { - 'type': 'array', - 'items': {'type':'string'}, - 'description': 'A list of Sentinel-2 scene names that should be downloaded and imported' - } } - example = {"bands":["B04","B08"], - "strds":["Sentinel_B04", "Sentinel_b08"], - "product_ids":["S2A_MSIL1C_20170212T104141_N0204_R008_T31TGJ_20170212T104138", - "S2A_MSIL1C_20170227T095021_N0204_R079_T34TBM_20170227T095613", - "S2A_MSIL1C_20170202T104241_N0204_R008_T32UNE_20170202T104236"]} + example = { + "bands": ["B04", "B08"], + "strds": ["Sentinel_B04", "Sentinel_b08"], + "product_ids": [ + "S2A_MSIL1C_20170212T104141_N0204_R008_T31TGJ_20170212T104138", + "S2A_MSIL1C_20170227T095021_N0204_R079_T34TBM_20170227T095613", + "S2A_MSIL1C_20170202T104241_N0204_R008_T32UNE_20170202T104236", + ], + } - required = ['bands', 'strds', 'product_ids'] - - -SCHEMA_DOC={ - 'tags': ['Satellite Image Algorithms'], - 'description': 'Download and import Sentinel2A scenes into a new mapset and create a space-time raster dataset ' - 'for each imported band. ' - 'The resulting data will be located in a persistent user database. ' - 'The location name is part of the path and must exist. The mapset will ' - 'be created while importing and should not already exist in the location. The names of the' - 'Sentinel-2 scenes and the band names that should be downloaded must be specified ' - 'in the HTTP body as application/json content. In addition, the names of the ' - 'STRDS that should manage the sentinel scenes ' - 'must be provided in the application/json content. For each band a separate ' - 'STRDS name must be provided.' - 'This call is performed asynchronously. The provided resource URL must be pulled ' - 'to receive the status of the import. The data is available in the provided ' - 'location/mapset, after the download and import finished. Minimum required user role: user.', - 'consumes':['application/json'], - 'parameters': [ + required = ["bands", "strds", "product_ids"] + + +SCHEMA_DOC = { + "tags": ["Satellite Image Algorithms"], + "description": "Download and import Sentinel2A scenes into a new mapset and create a space-time raster dataset " + "for each imported band. " + "The resulting data will be located in a persistent user database. " + "The location name is part of the path and must exist. The mapset will " + "be created while importing and should not already exist in the location. The names of the" + "Sentinel-2 scenes and the band names that should be downloaded must be specified " + "in the HTTP body as application/json content. In addition, the names of the " + "STRDS that should manage the sentinel scenes " + "must be provided in the application/json content. For each band a separate " + "STRDS name must be provided." + "This call is performed asynchronously. The provided resource URL must be pulled " + "to receive the status of the import. The data is available in the provided " + "location/mapset, after the download and import finished. Minimum required user role: user.", + "consumes": ["application/json"], + "parameters": [ { - 'name': 'location_name', - 'description': 'The location name to import the Sentinel2A scenes in', - 'required': True, - 'in': 'path', - 'type': 'string' + "name": "location_name", + "description": "The location name to import the Sentinel2A scenes in", + "required": True, + "in": "path", + "type": "string", }, { - 'name': 'mapset_name', - 'description': 'The name of the mapset to import the Sentinel2A scenes in', - 'required': True, - 'in': 'path', - 'type': 'string' + "name": "mapset_name", + "description": "The name of the mapset to import the Sentinel2A scenes in", + "required": True, + "in": "path", + "type": "string", }, { - 'name': 'tiles', - 'description': 'The list of Sentinel-2 scenes, the band names and the target STRDS names', - 'required': True, - 'in': 'body', - 'schema': Sentinel2ASceneListModel - } + "name": "tiles", + "description": "The list of Sentinel-2 scenes, the band names and the target STRDS names", + "required": True, + "in": "body", + "schema": Sentinel2ASceneListModel, + }, ], - 'responses': { - '200': { - 'description': 'The result of the Sentinel-2 time series import', - 'schema':ProcessingResponseModel + "responses": { + "200": { + "description": "The result of the Sentinel-2 time series import", + "schema": ProcessingResponseModel, }, - '400': { - 'description':'The error message and a detailed log why Sentinel ' - '2A time series import did not succeeded', - 'schema':ProcessingErrorResponseModel - } - } - } + "400": { + "description": "The error message and a detailed log why Sentinel " + "2A time series import did not succeeded", + "schema": ProcessingErrorResponseModel, + }, + }, +} class AsyncSentinel2TimeSeriesCreatorResource(ResourceBase): - def __init__(self): ResourceBase.__init__(self) @@ -151,9 +163,9 @@ def post(self, location_name, mapset_name): """ # Preprocess the post call - rdc = self.preprocess(has_json=True, - location_name=location_name, - mapset_name=mapset_name) + rdc = self.preprocess( + has_json=True, location_name=location_name, mapset_name=mapset_name + ) # RedisQueue approach enqueue_job(self.job_timeout, start_job, rdc) @@ -174,6 +186,7 @@ class AsyncSentinel2TimeSeriesCreator(PersistentProcessing): The Sentiel2A scenes are downloaded , imported and pre-processed before they are registered in the band specific space time datasets. """ + def __init__(self, rdc): """Constructor @@ -190,7 +203,9 @@ def __init__(self, rdc): self.product_ids = self.rdc.request_data["product_ids"] self.strds_ids = self.rdc.request_data["strds"] self.required_bands = self.rdc.request_data["bands"] - self.user_download_cache_path = os.path.join(self.config.DOWNLOAD_CACHE, self.user_id) + self.user_download_cache_path = os.path.join( + self.config.DOWNLOAD_CACHE, self.user_id + ) self.query_result = None def _prepare_sentinel2_download(self): @@ -221,15 +236,21 @@ def _prepare_sentinel2_download(self): self._send_resource_update("Sending Google BigQuery request.") try: - self.query_result = self.query_interface.get_sentinel_urls(self.product_ids, self.required_bands) + self.query_result = self.query_interface.get_sentinel_urls( + self.product_ids, self.required_bands + ) except Exception as e: - raise AsyncProcessError("Error in querying Sentinel-2 product <%s> " - "in Google BigQuery Sentinel-2 database. " - "Error: %s"%(self.product_ids, str(e))) + raise AsyncProcessError( + "Error in querying Sentinel-2 product <%s> " + "in Google BigQuery Sentinel-2 database. " + "Error: %s" % (self.product_ids, str(e)) + ) if not self.query_result: - raise AsyncProcessError("Unable to find Sentinel-2 product <%s> " - "in Google BigQuery Sentinel-2 database"%self.product_ids) + raise AsyncProcessError( + "Unable to find Sentinel-2 product <%s> " + "in Google BigQuery Sentinel-2 database" % self.product_ids + ) def _import_sentinel2_scenes(self): """Import all found Sentinel2 scenes with their bands and create the space time raster datasets @@ -246,12 +267,21 @@ def _import_sentinel2_scenes(self): # Use only the product ids that were found in the big query for product_id in self.query_result: - process_lib = Sentinel2Processing(self.config, product_id, self.query_result, - self.required_bands, self.temp_file_path, - self.user_download_cache_path, self._send_resource_update, - self.message_logger) - - download_commands, self.sentinel2_band_file_list = process_lib.get_sentinel2_download_process_list() + process_lib = Sentinel2Processing( + self.config, + product_id, + self.query_result, + self.required_bands, + self.temp_file_path, + self.user_download_cache_path, + self._send_resource_update, + self.message_logger, + ) + + ( + download_commands, + self.sentinel2_band_file_list, + ) = process_lib.get_sentinel2_download_process_list() # Download the sentinel scene if not in the download cache if download_commands: @@ -278,45 +308,68 @@ def _import_sentinel2_scenes(self): result_dict[band] = [] - create_strds = {"module":"t.create", - "inputs":{"output":strds, - "title":"Sentinel2A time series for band %s"%band, - "description":"Sentinel2A time series for band %s"%band, - "temporaltype":"absolute", - "type":"strds"}} + create_strds = { + "module": "t.create", + "inputs": { + "output": strds, + "title": "Sentinel2A time series for band %s" % band, + "description": "Sentinel2A time series for band %s" % band, + "temporaltype": "absolute", + "type": "strds", + }, + } register_commands[str(counter)] = create_strds counter += 1 # Create the input file - map_list_file_name = os.path.join(self.user_download_cache_path, strds) + map_list_file_name = os.path.join( + self.user_download_cache_path, strds + ) map_list_file = open(map_list_file_name, "w") # Use only the product ids that were found in the big query for product_id in self.query_result: # We need to create a time interval, otherwise the temporal algebra will not work :/ - start_time = dtparser.parse( self.query_result[product_id]["timestamp"].split(".")[0]) + start_time = dtparser.parse( + self.query_result[product_id]["timestamp"].split(".")[0] + ) end_time = start_time + timedelta(seconds=1) - result_dict[band].append([self.query_result[product_id][band]["file"], - str(start_time), - str(end_time)]) - - map_list_file.write("%s|%s|%s\n"%(self.query_result[product_id][band]["file"], - str(start_time), str(end_time))) + result_dict[band].append( + [ + self.query_result[product_id][band]["file"], + str(start_time), + str(end_time), + ] + ) + + map_list_file.write( + "%s|%s|%s\n" + % ( + self.query_result[product_id][band]["file"], + str(start_time), + str(end_time), + ) + ) map_list_file.close() - register_maps = {"module":"t.register", - "inputs":{"input":strds, - "file":map_list_file_name, - "type":"raster"}} + register_maps = { + "module": "t.register", + "inputs": { + "input": strds, + "file": map_list_file_name, + "type": "raster", + }, + } register_commands[str(counter)] = register_maps counter += 1 - process_list = self._validate_process_chain(process_chain=register_commands, - skip_permission_check=True) + process_list = self._validate_process_chain( + process_chain=register_commands, skip_permission_check=True + ) self._execute_process_list(process_list=process_list) self.module_results = result_dict @@ -327,7 +380,9 @@ def _execute(self): self._setup() if len(self.required_bands) != len(self.strds_ids): - raise AsyncProcessError("The number of bands and the number of strds must be equal") + raise AsyncProcessError( + "The number of bands and the number of strds must be equal" + ) for band in self.required_bands: if self.required_bands.count(band) > 2: @@ -341,8 +396,10 @@ def _execute(self): self._check_lock_target_mapset() if self.target_mapset_exists is True: - raise AsyncProcessError("Sentinel time series can only be create in a new mapset. " - "Mapset <%s> already exists."%self.target_mapset_name) + raise AsyncProcessError( + "Sentinel time series can only be create in a new mapset. " + "Mapset <%s> already exists." % self.target_mapset_name + ) # Init GRASS environment and create the temporary mapset with the same name as the target mapset # This is required to register the raster maps in the temporary directory, but use them in @@ -354,8 +411,9 @@ def _execute(self): # Initialize the GRASS environment and switch into PERMANENT # mapset, which is always linked - self._create_grass_environment(grass_data_base=self.temp_grass_data_base, - mapset_name="PERMANENT") + self._create_grass_environment( + grass_data_base=self.temp_grass_data_base, mapset_name="PERMANENT" + ) # Create the temporary mapset and switch into it self._create_temporary_mapset(temp_mapset_name=self.target_mapset_name) @@ -371,8 +429,10 @@ def _execute(self): # Abort if a single scene is missing if len(missing_product_ids) > 0: - raise AsyncProcessError("Unable to find product ids <%s> in the " - "Google BigQuery database"%str(missing_product_ids)) + raise AsyncProcessError( + "Unable to find product ids <%s> in the " + "Google BigQuery database" % str(missing_product_ids) + ) self._import_sentinel2_scenes() diff --git a/src/actinia_satellite_plugin/satellite_query.py b/src/actinia_satellite_plugin/satellite_query.py index e8583da..11aeeb5 100644 --- a/src/actinia_satellite_plugin/satellite_query.py +++ b/src/actinia_satellite_plugin/satellite_query.py @@ -9,63 +9,65 @@ from copy import deepcopy from flask_restful import reqparse from actinia_core.core.common.config import global_config -from actinia_core.core.common.google_satellite_bigquery_interface import GoogleSatelliteBigQueryInterface +from actinia_core.core.common.google_satellite_bigquery_interface import ( + GoogleSatelliteBigQueryInterface, +) from actinia_core.core.common.app import auth from actinia_core.core.common.api_logger import log_api_call from actinia_core.models.response_models import SimpleResponseModel __license__ = "GPLv3" -__author__ = "Sören Gebbert" -__copyright__ = "Copyright 2016, Sören Gebbert" +__author__ = "Sören Gebbert" +__copyright__ = "Copyright 2016, Sören Gebbert" __maintainer__ = "Sören Gebbert" -__email__ = "soerengebbert@googlemail.com" +__email__ = "soerengebbert@googlemail.com" class SatelliteSceneEntry(Schema): - type = 'object' + type = "object" properties = { - 'scene_id': { - 'type': 'string', - 'description': 'The id of the satellite scene' + "scene_id": { + "type": "string", + "description": "The id of the satellite scene", }, - 'sensing_time': { - 'type': 'string', - 'description': 'The sensing time of the scene', + "sensing_time": { + "type": "string", + "description": "The sensing time of the scene", }, - 'cloud_cover': { - 'type': 'number', - 'format': 'double', - 'description': 'Cloud cover of the scene 0-100' + "cloud_cover": { + "type": "number", + "format": "double", + "description": "Cloud cover of the scene 0-100", }, - 'east_lon': { - 'type': 'number', - 'format': 'double', - 'description': 'Eastern border of the scene' + "east_lon": { + "type": "number", + "format": "double", + "description": "Eastern border of the scene", }, - 'west_lon': { - 'type': 'number', - 'format': 'double', - 'description': 'Western border of the scene' + "west_lon": { + "type": "number", + "format": "double", + "description": "Western border of the scene", }, - 'north_lat': { - 'type': 'number', - 'format': 'double', - 'description': 'Northern border of the scene' + "north_lat": { + "type": "number", + "format": "double", + "description": "Northern border of the scene", }, - 'south_lat': { - 'type': 'number', - 'format': 'double', - 'description': 'Southern border of the scene' + "south_lat": { + "type": "number", + "format": "double", + "description": "Southern border of the scene", + }, + "total_size": { + "type": "number", + "format": "double", + "description": "Total size of the scene", }, - 'total_size': { - 'type': 'number', - 'format': 'double', - 'description': 'Total size of the scene' - } } - required = ['scene_id', 'sensing_time'] + required = ["scene_id", "sensing_time"] - example = { + example = { "cloud_cover": "100.0", "east_lon": 117.334772, "north_lat": -73.8673822679, @@ -73,212 +75,238 @@ class SatelliteSceneEntry(Schema): "sensing_time": "2017-01-01T00:37:59.459000Z", "south_lat": -74.8755595194, "total_size": 608562784, - "west_lon": 113.568673296 - } + "west_lon": 113.568673296, + } class SatelliteSceneList(Schema): - type = 'object' + type = "object" properties = { - 'resource_list': { - 'type': 'array', - 'items': SatelliteSceneEntry, - 'description': 'A list of satellite scenes' + "resource_list": { + "type": "array", + "items": SatelliteSceneEntry, + "description": "A list of satellite scenes", } } required = ["resource_list"] - example = [{ - "cloud_cover": "100.0", - "east_lon": 117.334772, - "north_lat": -73.8673822679, - "scene_id": "S2A_MSIL1C_20170101T003802_N0204_R116_T50CMC_20170101T003759", - "sensing_time": "2017-01-01T00:37:59.459000Z", - "south_lat": -74.8755595194, - "total_size": 608562784, - "west_lon": 113.568673296 - }, - { - "cloud_cover": "100.0", - "east_lon": 117.355376908, - "north_lat": -74.7623823271, - "scene_id": "S2A_MSIL1C_20170101T003802_N0204_R116_T50CMB_20170101T003759", - "sensing_time": "2017-01-01T00:37:59.459000Z", - "south_lat": -75.7719656592, - "total_size": 604326630, - "west_lon": 113.35802037 - } + example = [ + { + "cloud_cover": "100.0", + "east_lon": 117.334772, + "north_lat": -73.8673822679, + "scene_id": "S2A_MSIL1C_20170101T003802_N0204_R116_T50CMC_20170101T003759", + "sensing_time": "2017-01-01T00:37:59.459000Z", + "south_lat": -74.8755595194, + "total_size": 608562784, + "west_lon": 113.568673296, + }, + { + "cloud_cover": "100.0", + "east_lon": 117.355376908, + "north_lat": -74.7623823271, + "scene_id": "S2A_MSIL1C_20170101T003802_N0204_R116_T50CMB_20170101T003759", + "sensing_time": "2017-01-01T00:37:59.459000Z", + "south_lat": -75.7719656592, + "total_size": 604326630, + "west_lon": 113.35802037, + }, ] -SCHEMA_LAND_DOC={ - 'tags': ['Satellite Image Algorithms'], - 'description': 'Query the Google Landsat archives using time interval, lat/lon coordinates, ' - 'scene id, spacecraft id and cloud cover. ' - 'All scenes that are located within the time interval and that intersect ' - 'the given latitude/longitude coordinates are returned as a list of ' - 'scene names with associated time stamps. ' - 'Minimum required user role: user.', - 'parameters': [ + +SCHEMA_LAND_DOC = { + "tags": ["Satellite Image Algorithms"], + "description": "Query the Google Landsat archives using time interval, lat/lon coordinates, " + "scene id, spacecraft id and cloud cover. " + "All scenes that are located within the time interval and that intersect " + "the given latitude/longitude coordinates are returned as a list of " + "scene names with associated time stamps. " + "Minimum required user role: user.", + "parameters": [ { - 'name': 'scene_id', - 'description': 'The scene id of the landsat scenes that should be searched', - 'required': False, - 'in': 'query', - 'type': 'string' + "name": "scene_id", + "description": "The scene id of the landsat scenes that should be searched", + "required": False, + "in": "query", + "type": "string", }, { - 'name': 'spacecraft_id', - 'description': 'The spacecraft id of the landsat scenes that should be searched', - 'required': False, - 'in': 'query', - 'type': 'string', - 'enum': ["LANDSAT_4", "LANDSAT_5", "LANDSAT_7", "LANDSAT_8"] + "name": "spacecraft_id", + "description": "The spacecraft id of the landsat scenes that should be searched", + "required": False, + "in": "query", + "type": "string", + "enum": ["LANDSAT_4", "LANDSAT_5", "LANDSAT_7", "LANDSAT_8"], }, { - 'name': 'start_time', - 'description': 'The start time of the search interval', - 'required': False, - 'in': 'query', - 'type': 'string', - 'format': 'dateTime' + "name": "start_time", + "description": "The start time of the search interval", + "required": False, + "in": "query", + "type": "string", + "format": "dateTime", }, { - 'name': 'end_time', - 'description': 'The end time of the search interval', - 'required': False, - 'in': 'query', - 'type': 'string', - 'format': 'dateTime' + "name": "end_time", + "description": "The end time of the search interval", + "required": False, + "in": "query", + "type": "string", + "format": "dateTime", }, { - 'name': 'lon', - 'description': 'The longitude coordinate with which the scenes should intersect', - 'required': False, - 'in': 'query', - 'type': 'number', - 'format': 'double' + "name": "lon", + "description": "The longitude coordinate with which the scenes should intersect", + "required": False, + "in": "query", + "type": "number", + "format": "double", }, { - 'name': 'lat', - 'description': 'The latitude coordinate with which the scenes should intersect', - 'required': False, - 'in': 'query', - 'type': 'number', - 'format': 'double' + "name": "lat", + "description": "The latitude coordinate with which the scenes should intersect", + "required": False, + "in": "query", + "type": "number", + "format": "double", }, { - 'name': 'cloud_covert', - 'description': 'Cloud cover between 0 - 100', - 'required': False, - 'in': 'query', - 'type': 'number', - 'format': 'double' - } + "name": "cloud_covert", + "description": "Cloud cover between 0 - 100", + "required": False, + "in": "query", + "type": "number", + "format": "double", + }, ], - 'responses': { - '200': { - 'description': 'A list of satellite scenes that fit the search', - 'schema':SatelliteSceneList + "responses": { + "200": { + "description": "A list of satellite scenes that fit the search", + "schema": SatelliteSceneList, }, - '400': { - 'description':'The error message if the search did not succeeded', - 'schema':SimpleResponseModel - } - } - } - + "400": { + "description": "The error message if the search did not succeeded", + "schema": SimpleResponseModel, + }, + }, +} -SCHEMA_SENT_DOC={ - 'tags': ['Satellite Image Algorithms'], - 'description': 'Query the Google Sentinel2 archives using time interval, lat/lon coordinates, ' - 'scene id and cloud cover. ' - 'All scenes that are located within the time interval and that intersect ' - 'the given latitude/longitude coordinates are returned as a list of ' - 'scene names with associated time stamps. ' - 'Minimum required user role: user.', - 'parameters': [ +SCHEMA_SENT_DOC = { + "tags": ["Satellite Image Algorithms"], + "description": "Query the Google Sentinel2 archives using time interval, lat/lon coordinates, " + "scene id and cloud cover. " + "All scenes that are located within the time interval and that intersect " + "the given latitude/longitude coordinates are returned as a list of " + "scene names with associated time stamps. " + "Minimum required user role: user.", + "parameters": [ { - 'name': 'scene_id', - 'description': 'The scene id also named product id of the Sentinel2A scenes that should be searched', - 'required': False, - 'in': 'query', - 'type': 'string' + "name": "scene_id", + "description": "The scene id also named product id of the Sentinel2A scenes that should be searched", + "required": False, + "in": "query", + "type": "string", }, { - 'name': 'start_time', - 'description': 'The start time of the search interval', - 'required': False, - 'in': 'query', - 'type': 'string', - 'format': 'dateTime' + "name": "start_time", + "description": "The start time of the search interval", + "required": False, + "in": "query", + "type": "string", + "format": "dateTime", }, { - 'name': 'end_time', - 'description': 'The end time of the search interval', - 'required': False, - 'in': 'query', - 'type': 'string', - 'format': 'dateTime' + "name": "end_time", + "description": "The end time of the search interval", + "required": False, + "in": "query", + "type": "string", + "format": "dateTime", }, { - 'name': 'lon', - 'description': 'The longitude coordinate with which the scenes should intersect', - 'required': False, - 'in': 'query', - 'type': 'number', - 'format': 'double' + "name": "lon", + "description": "The longitude coordinate with which the scenes should intersect", + "required": False, + "in": "query", + "type": "number", + "format": "double", }, { - 'name': 'lat', - 'description': 'The latitude coordinate with which the scenes should intersect', - 'required': False, - 'in': 'query', - 'type': 'number', - 'format': 'double' + "name": "lat", + "description": "The latitude coordinate with which the scenes should intersect", + "required": False, + "in": "query", + "type": "number", + "format": "double", }, { - 'name': 'cloud_covert', - 'description': 'Cloud cover between 0 - 100', - 'required': False, - 'in': 'query', - 'type': 'number', - 'format': 'double' - } + "name": "cloud_covert", + "description": "Cloud cover between 0 - 100", + "required": False, + "in": "query", + "type": "number", + "format": "double", + }, ], - 'responses': { - '200': { - 'description': 'A list of satellite scenes that fit the search', - 'schema':SatelliteSceneList + "responses": { + "200": { + "description": "A list of satellite scenes that fit the search", + "schema": SatelliteSceneList, }, - '400': { - 'description':'The error message if the search did not succeeded', - 'schema':SimpleResponseModel - } - } - } + "400": { + "description": "The error message if the search did not succeeded", + "schema": SimpleResponseModel, + }, + }, +} class SatelliteQuery(Resource): - """Query the satellites Landsat4-8 and Sentinel2A archives in the google BigQuery database - """ + """Query the satellites Landsat4-8 and Sentinel2A archives in the google BigQuery database""" + decorators = [log_api_call, auth.login_required] # Create a temporal module where, order, column parser query_parser = reqparse.RequestParser() - query_parser.add_argument('scene_id', type=str, location='args', - help='Landsat scene id') - query_parser.add_argument('spacecraft_id', type=str, location='args', - help='Landsat spacecraft id') - query_parser.add_argument('start_time', type=str, location='args', - help='The start time of the search interval') - query_parser.add_argument('end_time', type=str, location='args', - help='The end time of the search interval') - query_parser.add_argument('lon', type=str, location='args', - help='The longitude coordinate with which the scenes should intersect') - query_parser.add_argument('lat', type=str, location='args', - help='The latitude coordinate with which the scenes should intersect') - query_parser.add_argument('cloud_cover', type=str, location='args', - help='Cloud cover between 0 - 100') + query_parser.add_argument( + "scene_id", type=str, location="args", help="Landsat scene id" + ) + query_parser.add_argument( + "spacecraft_id", + type=str, + location="args", + help="Landsat spacecraft id", + ) + query_parser.add_argument( + "start_time", + type=str, + location="args", + help="The start time of the search interval", + ) + query_parser.add_argument( + "end_time", + type=str, + location="args", + help="The end time of the search interval", + ) + query_parser.add_argument( + "lon", + type=str, + location="args", + help="The longitude coordinate with which the scenes should intersect", + ) + query_parser.add_argument( + "lat", + type=str, + location="args", + help="The latitude coordinate with which the scenes should intersect", + ) + query_parser.add_argument( + "cloud_cover", + type=str, + location="args", + help="Cloud cover between 0 - 100", + ) def _get(self, satellite): @@ -311,18 +339,24 @@ def _get(self, satellite): iface = GoogleSatelliteBigQueryInterface(global_config) if satellite == "landsat": - result = iface.query_landsat_archive(start_time=start_time, - end_time=end_time, - lon=lon, lat=lat, - cloud_cover=cloud_cover, - scene_id=scene_id, - spacecraft_id=spacecraft_id) + result = iface.query_landsat_archive( + start_time=start_time, + end_time=end_time, + lon=lon, + lat=lat, + cloud_cover=cloud_cover, + scene_id=scene_id, + spacecraft_id=spacecraft_id, + ) else: - result = iface.query_sentinel2_archive(start_time=start_time, - end_time=end_time, - lon=lon, lat=lat, - cloud_cover=cloud_cover, - scene_id=scene_id) + result = iface.query_sentinel2_archive( + start_time=start_time, + end_time=end_time, + lon=lon, + lat=lat, + cloud_cover=cloud_cover, + scene_id=scene_id, + ) return make_response(jsonify(result), 200) except Exception as e: result = {"status": "error", "message": str(e)} @@ -330,8 +364,8 @@ def _get(self, satellite): class LandsatQuery(SatelliteQuery): - """Query the Landsat4-8 archives - """ + """Query the Landsat4-8 archives""" + @swagger.doc(deepcopy(SCHEMA_LAND_DOC)) def get(self): """Query the Google Landsat archives using time interval, lat/lon coordinates, scene id, spacecraft id and cloud cover.""" @@ -339,8 +373,8 @@ def get(self): class Sentinel2Query(SatelliteQuery): - """Query the Sentinel2A archives - """ + """Query the Sentinel2A archives""" + @swagger.doc(deepcopy(SCHEMA_SENT_DOC)) def get(self): """Query the Google Sentinel2 archives using time interval, lat/lon coordinates, scene id and cloud cover.""" diff --git a/tests/test_aws_sentinel_service.py b/tests/test_aws_sentinel_service.py index 08c6a6c..0ec74c9 100644 --- a/tests/test_aws_sentinel_service.py +++ b/tests/test_aws_sentinel_service.py @@ -3,6 +3,7 @@ from pprint import pprint from flask.json import loads as json_load from flask.json import dumps as json_dump + try: from .test_resource_base import ActiniaResourceTestCaseBase, URL_PREFIX except: @@ -16,43 +17,66 @@ __email__ = "soerengebbert@googlemail.com" -SCENES = {"product_ids":["S2A_MSIL1C_20170202T090201_N0204_R007_T36TVT_20170202T090155", - "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_20151207T003302", - "S2A_MSIL1C_20170218T143751_N0204_R096_T20PRT_20170218T143931"], - "bands":["B04", "B08"]} +SCENES = { + "product_ids": [ + "S2A_MSIL1C_20170202T090201_N0204_R007_T36TVT_20170202T090155", + "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_20151207T003302", + "S2A_MSIL1C_20170218T143751_N0204_R096_T20PRT_20170218T143931", + ], + "bands": ["B04", "B08"], +} -SCENES_ERROR = {"product_ids":["S2A_MSIL1C_20170202T090201_N0204_R007_T36TVT_20170202T090155_NOPE", - "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_20151207T003302", - "S2A_MSIL1C_20170218T143751_N0204_R096_T20PRT_20170218T143931"], - "bands":["B04", "B08"]} +SCENES_ERROR = { + "product_ids": [ + "S2A_MSIL1C_20170202T090201_N0204_R007_T36TVT_20170202T090155_NOPE", + "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_20151207T003302", + "S2A_MSIL1C_20170218T143751_N0204_R096_T20PRT_20170218T143931", + ], + "bands": ["B04", "B08"], +} class AWSSentinelServiceTestCase(ActiniaResourceTestCaseBase): - def test_1(self): - rv = self.server.post(URL_PREFIX + '/sentinel2a_aws_query', - headers=self.user_auth_header, - data=json_dump(SCENES), - content_type="application/json") + rv = self.server.post( + URL_PREFIX + "/sentinel2a_aws_query", + headers=self.user_auth_header, + data=json_dump(SCENES), + content_type="application/json", + ) pprint(json_load(rv.data)) self.assertTrue(len(json_load(rv.data)) == 3) - self.assertEqual(rv.status_code, 200, "HTML status code is wrong %i"%rv.status_code) - self.assertEqual(rv.mimetype, "application/json", "Wrong mimetype %s"%rv.mimetype) + self.assertEqual( + rv.status_code, + 200, + "HTML status code is wrong %i" % rv.status_code, + ) + self.assertEqual( + rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype + ) def test_error_1(self): - rv = self.server.post(URL_PREFIX + '/sentinel2a_aws_query', - headers=self.user_auth_header, - data=json_dump(SCENES_ERROR), - content_type="application/json") + rv = self.server.post( + URL_PREFIX + "/sentinel2a_aws_query", + headers=self.user_auth_header, + data=json_dump(SCENES_ERROR), + content_type="application/json", + ) pprint(json_load(rv.data)) - self.assertEqual(rv.status_code, 400, "HTML status code is wrong %i"%rv.status_code) - self.assertEqual(rv.mimetype, "application/json", "Wrong mimetype %s"%rv.mimetype) + self.assertEqual( + rv.status_code, + 400, + "HTML status code is wrong %i" % rv.status_code, + ) + self.assertEqual( + rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype + ) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/test_landsat.py b/tests/test_landsat.py index 69458ee..1c02a61 100644 --- a/tests/test_landsat.py +++ b/tests/test_landsat.py @@ -2,6 +2,7 @@ import unittest from pprint import pprint from flask.json import loads as json_load + try: from .test_resource_base import ActiniaResourceTestCaseBase, URL_PREFIX except: @@ -9,77 +10,147 @@ __license__ = "GPLv3" -__author__ = "Sören Gebbert" -__copyright__ = "Copyright 2016, Sören Gebbert" +__author__ = "Sören Gebbert" +__copyright__ = "Copyright 2016, Sören Gebbert" __maintainer__ = "Soeren Gebbert" -__email__ = "soerengebbert@googlemail.com" +__email__ = "soerengebbert@googlemail.com" class LandsatProcessingTestCase(ActiniaResourceTestCaseBase): - def test_landsat_computation_DOS4(self): - rv = self.server.post(URL_PREFIX + '/landsat_process/LT41970251990147XXX03/DOS4/NDVI', - headers=self.admin_auth_header) + rv = self.server.post( + URL_PREFIX + "/landsat_process/LT41970251990147XXX03/DOS4/NDVI", + headers=self.admin_auth_header, + ) pprint(json_load(rv.data)) - self.assertEqual(rv.status_code, 200, "HTML status code is wrong %i"%rv.status_code) - self.assertEqual(rv.mimetype, "application/json", "Wrong mimetype %s"%rv.mimetype) - - self.waitAsyncStatusAssertHTTP(rv, headers=self.admin_auth_header, - http_status=200, status="finished") - - rv = self.server.get(URL_PREFIX + '/download_cache', headers=self.admin_auth_header) + self.assertEqual( + rv.status_code, + 200, + "HTML status code is wrong %i" % rv.status_code, + ) + self.assertEqual( + rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype + ) + + self.waitAsyncStatusAssertHTTP( + rv, + headers=self.admin_auth_header, + http_status=200, + status="finished", + ) + + rv = self.server.get( + URL_PREFIX + "/download_cache", headers=self.admin_auth_header + ) pprint(json_load(rv.data)) - self.assertEqual(rv.status_code, 200, "HTML status code is wrong %i"%rv.status_code) - self.assertEqual(rv.mimetype, "application/json", "Wrong mimetype %s"%rv.mimetype) + self.assertEqual( + rv.status_code, + 200, + "HTML status code is wrong %i" % rv.status_code, + ) + self.assertEqual( + rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype + ) self.assertTrue("used" in json_load(rv.data)["process_results"]) self.assertTrue("quota" in json_load(rv.data)["process_results"]) self.assertTrue("free" in json_load(rv.data)["process_results"]) - self.assertTrue("free_percent" in json_load(rv.data)["process_results"]) + self.assertTrue( + "free_percent" in json_load(rv.data)["process_results"] + ) def test_landsat_computation_TOAR(self): - rv = self.server.post(URL_PREFIX + '/landsat_process/LT41970251990147XXX03/TOAR/NDVI', - headers=self.admin_auth_header) + rv = self.server.post( + URL_PREFIX + "/landsat_process/LT41970251990147XXX03/TOAR/NDVI", + headers=self.admin_auth_header, + ) pprint(json_load(rv.data)) - self.assertEqual(rv.status_code, 200, "HTML status code is wrong %i"%rv.status_code) - self.assertEqual(rv.mimetype, "application/json", "Wrong mimetype %s"%rv.mimetype) - - self.waitAsyncStatusAssertHTTP(rv, headers=self.admin_auth_header, - http_status=200, status="finished") - - rv = self.server.get(URL_PREFIX + '/download_cache', headers=self.admin_auth_header) + self.assertEqual( + rv.status_code, + 200, + "HTML status code is wrong %i" % rv.status_code, + ) + self.assertEqual( + rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype + ) + + self.waitAsyncStatusAssertHTTP( + rv, + headers=self.admin_auth_header, + http_status=200, + status="finished", + ) + + rv = self.server.get( + URL_PREFIX + "/download_cache", headers=self.admin_auth_header + ) pprint(json_load(rv.data)) - self.assertEqual(rv.status_code, 200, "HTML status code is wrong %i"%rv.status_code) - self.assertEqual(rv.mimetype, "application/json", "Wrong mimetype %s"%rv.mimetype) + self.assertEqual( + rv.status_code, + 200, + "HTML status code is wrong %i" % rv.status_code, + ) + self.assertEqual( + rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype + ) self.assertTrue("used" in json_load(rv.data)["process_results"]) self.assertTrue("quota" in json_load(rv.data)["process_results"]) self.assertTrue("free" in json_load(rv.data)["process_results"]) - self.assertTrue("free_percent" in json_load(rv.data)["process_results"]) + self.assertTrue( + "free_percent" in json_load(rv.data)["process_results"] + ) def test_landsat_computation_error_1(self): - rv = self.server.post(URL_PREFIX + '/landsat_process/LT41970251990147XXX03/POS/NDVI', - headers=self.admin_auth_header) + rv = self.server.post( + URL_PREFIX + "/landsat_process/LT41970251990147XXX03/POS/NDVI", + headers=self.admin_auth_header, + ) pprint(json_load(rv.data)) - self.assertEqual(rv.status_code, 400, "HTML status code is wrong %i"%rv.status_code) - self.assertEqual(rv.mimetype, "application/json", "Wrong mimetype %s"%rv.mimetype) + self.assertEqual( + rv.status_code, + 400, + "HTML status code is wrong %i" % rv.status_code, + ) + self.assertEqual( + rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype + ) def test_landsat_computation_error_2(self): - rv = self.server.post(URL_PREFIX + '/landsat_process/LT41970251990147XXX03/TOAR/LOLO', - headers=self.admin_auth_header) + rv = self.server.post( + URL_PREFIX + "/landsat_process/LT41970251990147XXX03/TOAR/LOLO", + headers=self.admin_auth_header, + ) pprint(json_load(rv.data)) - self.assertEqual(rv.status_code, 400, "HTML status code is wrong %i"%rv.status_code) - self.assertEqual(rv.mimetype, "application/json", "Wrong mimetype %s"%rv.mimetype) + self.assertEqual( + rv.status_code, + 400, + "HTML status code is wrong %i" % rv.status_code, + ) + self.assertEqual( + rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype + ) def test_landsat_computation_error_3(self): - rv = self.server.post(URL_PREFIX + '/landsat_process/LT41970241987299XXX0_NOPE/TOAR/NDVI', - headers=self.admin_auth_header) + rv = self.server.post( + URL_PREFIX + + "/landsat_process/LT41970241987299XXX0_NOPE/TOAR/NDVI", + headers=self.admin_auth_header, + ) pprint(json_load(rv.data)) - self.assertEqual(rv.status_code, 200, "HTML status code is wrong %i"%rv.status_code) - self.assertEqual(rv.mimetype, "application/json", "Wrong mimetype %s"%rv.mimetype) + self.assertEqual( + rv.status_code, + 200, + "HTML status code is wrong %i" % rv.status_code, + ) + self.assertEqual( + rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype + ) + + self.waitAsyncStatusAssertHTTP( + rv, headers=self.admin_auth_header, http_status=400, status="error" + ) - self.waitAsyncStatusAssertHTTP(rv, headers=self.admin_auth_header, - http_status=400, status="error") -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/test_landsat_timeseries_creation.py b/tests/test_landsat_timeseries_creation.py index 4ec126c..ac57778 100644 --- a/tests/test_landsat_timeseries_creation.py +++ b/tests/test_landsat_timeseries_creation.py @@ -3,6 +3,7 @@ from pprint import pprint from flask.json import loads as json_load from flask.json import dumps as json_dump + try: from .test_resource_base import ActiniaResourceTestCaseBase, URL_PREFIX except: @@ -10,21 +11,24 @@ __license__ = "GPLv3" -__author__ = "Sören Gebbert" -__copyright__ = "Copyright 2016, Sören Gebbert" +__author__ = "Sören Gebbert" +__copyright__ = "Copyright 2016, Sören Gebbert" __maintainer__ = "Soeren Gebbert" -__email__ = "soerengebbert@googlemail.com" +__email__ = "soerengebbert@googlemail.com" # Module change example for r.slope.aspect with g.region adjustment -SCENE_IDS = {"strds":"Landsat_4", - "atcor_method": "TOAR", - "scene_ids":["LT41970251990147XXX03"]} +SCENE_IDS = { + "strds": "Landsat_4", + "atcor_method": "TOAR", + "scene_ids": ["LT41970251990147XXX03"], +} -WRONG_SCENE_IDS = {"strds":"Landsat_4", - "atcor_method": "TOAR", - "scene_ids":["LT41970251990147XXX00", - "LT41970251990147XXX01"]} +WRONG_SCENE_IDS = { + "strds": "Landsat_4", + "atcor_method": "TOAR", + "scene_ids": ["LT41970251990147XXX00", "LT41970251990147XXX01"], +} test_mapsets = ["A"] @@ -42,45 +46,75 @@ def check_remove_test_mapsets(self): """ - rv = self.server.get(URL_PREFIX + '/locations/LL/mapsets', - headers=self.admin_auth_header) + rv = self.server.get( + URL_PREFIX + "/locations/LL/mapsets", + headers=self.admin_auth_header, + ) pprint(json_load(rv.data)) - self.assertEqual(rv.status_code, 200, "HTML status code is wrong %i"%rv.status_code) - self.assertEqual(rv.mimetype, "application/json", "Wrong mimetype %s"%rv.mimetype) + self.assertEqual( + rv.status_code, + 200, + "HTML status code is wrong %i" % rv.status_code, + ) + self.assertEqual( + rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype + ) mapsets = json_load(rv.data)["process_results"] for mapset in test_mapsets: if mapset in mapsets: # Unlock mapset for deletion - rv = self.server.post(URL_PREFIX + '/locations/LL/mapsets/%s' % mapset, - headers=self.admin_auth_header) + rv = self.server.post( + URL_PREFIX + "/locations/LL/mapsets/%s" % mapset, + headers=self.admin_auth_header, + ) pprint(json_load(rv.data)) # Delete the mapset if it already exists - rv = self.server.delete(URL_PREFIX + '/locations/LL/mapsets/%s' % mapset, - headers=self.admin_auth_header) + rv = self.server.delete( + URL_PREFIX + "/locations/LL/mapsets/%s" % mapset, + headers=self.admin_auth_header, + ) pprint(json_load(rv.data)) - self.assertEqual(rv.status_code, 200, "HTML status code is wrong %i"%rv.status_code) - self.assertEqual(rv.mimetype, "application/json", "Wrong mimetype %s"%rv.mimetype) + self.assertEqual( + rv.status_code, + 200, + "HTML status code is wrong %i" % rv.status_code, + ) + self.assertEqual( + rv.mimetype, + "application/json", + "Wrong mimetype %s" % rv.mimetype, + ) def test_1(self): """Test the import of two scenes with 2 bands in a new mapset A""" self.check_remove_test_mapsets() ############################################################################ - rv = self.server.post(URL_PREFIX + '/locations/LL/mapsets/A/landsat_import', - headers=self.admin_auth_header, - data=json_dump(SCENE_IDS), - content_type="application/json") + rv = self.server.post( + URL_PREFIX + "/locations/LL/mapsets/A/landsat_import", + headers=self.admin_auth_header, + data=json_dump(SCENE_IDS), + content_type="application/json", + ) pprint(json_load(rv.data)) self.waitAsyncStatusAssertHTTP(rv, headers=self.admin_auth_header) - rv = self.server.get(URL_PREFIX + '/locations/LL/mapsets/A/strds', - headers=self.admin_auth_header) + rv = self.server.get( + URL_PREFIX + "/locations/LL/mapsets/A/strds", + headers=self.admin_auth_header, + ) pprint(json_load(rv.data)) - self.assertEqual(rv.status_code, 200, "HTML status code is wrong %i"%rv.status_code) - self.assertEqual(rv.mimetype, "application/json", "Wrong mimetype %s"%rv.mimetype) + self.assertEqual( + rv.status_code, + 200, + "HTML status code is wrong %i" % rv.status_code, + ) + self.assertEqual( + rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype + ) strds_list = json_load(rv.data)["process_results"] self.assertTrue("Landsat_4_B1" in strds_list) @@ -96,29 +130,40 @@ def test_wrong_scene_ids(self): self.check_remove_test_mapsets() ############################################################################ - rv = self.server.post(URL_PREFIX + '/locations/LL/mapsets/A/landsat_import', - headers=self.admin_auth_header, - data=json_dump(WRONG_SCENE_IDS), - content_type="application/json") + rv = self.server.post( + URL_PREFIX + "/locations/LL/mapsets/A/landsat_import", + headers=self.admin_auth_header, + data=json_dump(WRONG_SCENE_IDS), + content_type="application/json", + ) pprint(json_load(rv.data)) - self.waitAsyncStatusAssertHTTP(rv, headers=self.admin_auth_header, - http_status=400, status="error", - message_check="AsyncProcessError:") + self.waitAsyncStatusAssertHTTP( + rv, + headers=self.admin_auth_header, + http_status=400, + status="error", + message_check="AsyncProcessError:", + ) def test_1_error_mapset_exists(self): - """PERMANENT mapset exists. hence an error message is expected - """ - rv = self.server.post(URL_PREFIX + '/locations/LL/mapsets/PERMANENT/landsat_import', - headers=self.admin_auth_header, - data=json_dump(SCENE_IDS), - content_type="application/json") + """PERMANENT mapset exists. hence an error message is expected""" + rv = self.server.post( + URL_PREFIX + "/locations/LL/mapsets/PERMANENT/landsat_import", + headers=self.admin_auth_header, + data=json_dump(SCENE_IDS), + content_type="application/json", + ) pprint(json_load(rv.data)) - self.waitAsyncStatusAssertHTTP(rv, headers=self.admin_auth_header, - http_status=400, status="error", - message_check="AsyncProcessError:") + self.waitAsyncStatusAssertHTTP( + rv, + headers=self.admin_auth_header, + http_status=400, + status="error", + message_check="AsyncProcessError:", + ) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/test_resource_base.py b/tests/test_resource_base.py index 059bcc7..166e82e 100644 --- a/tests/test_resource_base.py +++ b/tests/test_resource_base.py @@ -11,10 +11,10 @@ from actinia_core.endpoints import create_endpoints as create_actinia_endpoints __license__ = "GPLv3" -__author__ = "Sören Gebbert" -__copyright__ = "Copyright 2016-2019, Sören Gebbert" +__author__ = "Sören Gebbert" +__copyright__ = "Copyright 2016-2019, Sören Gebbert" __maintainer__ = "Sören Gebbert" -__email__ = "soerengebbert@googlemail.com" +__email__ = "soerengebbert@googlemail.com" redis_pid = None server_test = False @@ -43,17 +43,20 @@ def setup_environment(): # GRASS # Setup the test environment - global_config.GRASS_GIS_BASE="/usr/local/grass/" - global_config.GRASS_GIS_START_SCRIPT="/usr/local/bin/grass" + global_config.GRASS_GIS_BASE = "/usr/local/grass/" + global_config.GRASS_GIS_START_SCRIPT = "/usr/local/bin/grass" # global_config.GRASS_DATABASE= "/usr/local/grass_test_db" # global_config.GRASS_DATABASE = "%s/actinia/grass_test_db" % home global_config.GRASS_TMP_DATABASE = "/tmp" if server_test is False and custom_actinia_cfg is False: # Start the redis server for user and logging management - redis_pid = os.spawnl(os.P_NOWAIT, "/usr/bin/redis-server", - "common/redis.conf", - "--port %i" % global_config.REDIS_SERVER_PORT) + redis_pid = os.spawnl( + os.P_NOWAIT, + "/usr/bin/redis-server", + "common/redis.conf", + "--port %i" % global_config.REDIS_SERVER_PORT, + ) time.sleep(1) if server_test is False and custom_actinia_cfg is not False: @@ -67,6 +70,7 @@ def stop_redis(): if redis_pid is not None: os.kill(redis_pid, signal.SIGTERM) + # Register the redis stop function atexit.register(stop_redis) # Setup the environment diff --git a/tests/test_satellite_query.py b/tests/test_satellite_query.py index e92b619..35385bf 100644 --- a/tests/test_satellite_query.py +++ b/tests/test_satellite_query.py @@ -2,6 +2,7 @@ import unittest from pprint import pprint from flask.json import loads as json_load + try: from .test_resource_base import ActiniaResourceTestCaseBase, URL_PREFIX except: @@ -9,101 +10,172 @@ __license__ = "GPLv3" -__author__ = "Sören Gebbert" -__copyright__ = "Copyright 2016, Sören Gebbert" +__author__ = "Sören Gebbert" +__copyright__ = "Copyright 2016, Sören Gebbert" __maintainer__ = "Soeren Gebbert" -__email__ = "soerengebbert@googlemail.com" +__email__ = "soerengebbert@googlemail.com" class GrassModuleTestCase(ActiniaResourceTestCaseBase): - def test_landsat_query_time_interval(self): - rv = self.server.get(URL_PREFIX + '/landsat_query?start_time=2001-01-01T00:00:00&end_time=2001-01-01T01:00:00', - headers=self.user_auth_header) + rv = self.server.get( + URL_PREFIX + + "/landsat_query?start_time=2001-01-01T00:00:00&end_time=2001-01-01T01:00:00", + headers=self.user_auth_header, + ) data = json_load(rv.data) - self.assertEqual(rv.status_code, 200, "HTML status code is wrong %i"%rv.status_code) - self.assertEqual(rv.mimetype, "application/json", "Wrong mimetype %s"%rv.mimetype) + self.assertEqual( + rv.status_code, + 200, + "HTML status code is wrong %i" % rv.status_code, + ) + self.assertEqual( + rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype + ) data = json_load(rv.data) self.assertTrue(len(data) >= 1) def test_sentinel2_query_time_interval(self): - rv = self.server.get(URL_PREFIX + '/sentinel2_query?start_time=2017-01-01T00:00:00&end_time=2017-01-01T00:30:00', - headers=self.user_auth_header) + rv = self.server.get( + URL_PREFIX + + "/sentinel2_query?start_time=2017-01-01T00:00:00&end_time=2017-01-01T00:30:00", + headers=self.user_auth_header, + ) # print rv.data - self.assertEqual(rv.status_code, 200, "HTML status code is wrong %i"%rv.status_code) - self.assertEqual(rv.mimetype, "application/json", "Wrong mimetype %s"%rv.mimetype) + self.assertEqual( + rv.status_code, + 200, + "HTML status code is wrong %i" % rv.status_code, + ) + self.assertEqual( + rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype + ) data = json_load(rv.data) self.assertEqual(len(data), 204) def test_landsat_query_time_interval_lat_lon(self): - rv = self.server.get(URL_PREFIX + '/landsat_query?start_time=2001-01-01T00:00:00&end_time=2001-01-01T01:00:00&lon=154&lat=51', - headers=self.user_auth_header) + rv = self.server.get( + URL_PREFIX + + "/landsat_query?start_time=2001-01-01T00:00:00&end_time=2001-01-01T01:00:00&lon=154&lat=51", + headers=self.user_auth_header, + ) pprint(json_load(rv.data)) - self.assertEqual(rv.status_code, 200, "HTML status code is wrong %i"%rv.status_code) - self.assertEqual(rv.mimetype, "application/json", "Wrong mimetype %s"%rv.mimetype) + self.assertEqual( + rv.status_code, + 200, + "HTML status code is wrong %i" % rv.status_code, + ) + self.assertEqual( + rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype + ) data = json_load(rv.data) self.assertTrue(len(data) >= 1) def test_landsat_query_scene_id(self): - rv = self.server.get(URL_PREFIX + '/landsat_query?scene_id=LE71010632001001EDC01', - headers=self.user_auth_header) + rv = self.server.get( + URL_PREFIX + "/landsat_query?scene_id=LE71010632001001EDC01", + headers=self.user_auth_header, + ) pprint(json_load(rv.data)) - self.assertEqual(rv.status_code, 200, "HTML status code is wrong %i"%rv.status_code) - self.assertEqual(rv.mimetype, "application/json", "Wrong mimetype %s"%rv.mimetype) + self.assertEqual( + rv.status_code, + 200, + "HTML status code is wrong %i" % rv.status_code, + ) + self.assertEqual( + rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype + ) data = json_load(rv.data) self.assertTrue(len(data) >= 1) def test_sentinel2_query_scene_id(self): - rv = self.server.get(URL_PREFIX + '/sentinel2_query?scene_id=S2B_MSIL1C_20171010T131249_N0205_R081_T26VPR_20171010T131243', - headers=self.user_auth_header) + rv = self.server.get( + URL_PREFIX + + "/sentinel2_query?scene_id=S2B_MSIL1C_20171010T131249_N0205_R081_T26VPR_20171010T131243", + headers=self.user_auth_header, + ) pprint(json_load(rv.data)) - self.assertEqual(rv.status_code, 200, "HTML status code is wrong %i"%rv.status_code) - self.assertEqual(rv.mimetype, "application/json", "Wrong mimetype %s"%rv.mimetype) + self.assertEqual( + rv.status_code, + 200, + "HTML status code is wrong %i" % rv.status_code, + ) + self.assertEqual( + rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype + ) data = json_load(rv.data) self.assertTrue(len(data) >= 1) def test_landsat_query_scene_id_cloud(self): - rv = self.server.get(URL_PREFIX + '/landsat_query?scene_id=LE71010632001001EDC01&cloud_cover=100.0', - headers=self.user_auth_header) + rv = self.server.get( + URL_PREFIX + + "/landsat_query?scene_id=LE71010632001001EDC01&cloud_cover=100.0", + headers=self.user_auth_header, + ) pprint(json_load(rv.data)) - self.assertEqual(rv.status_code, 200, "HTML status code is wrong %i"%rv.status_code) - self.assertEqual(rv.mimetype, "application/json", "Wrong mimetype %s"%rv.mimetype) + self.assertEqual( + rv.status_code, + 200, + "HTML status code is wrong %i" % rv.status_code, + ) + self.assertEqual( + rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype + ) data = json_load(rv.data) self.assertTrue(len(data) >= 1) def test_landsat_query_scene_id_cloud_spacecraft(self): - rv = self.server.get(URL_PREFIX + '/landsat_query?scene_id=LE71010632001001EDC01&cloud_cover=100.0&spacecraft_id=LANDSAT_7', - headers=self.user_auth_header) + rv = self.server.get( + URL_PREFIX + + "/landsat_query?scene_id=LE71010632001001EDC01&cloud_cover=100.0&spacecraft_id=LANDSAT_7", + headers=self.user_auth_header, + ) pprint(json_load(rv.data)) - self.assertEqual(rv.status_code, 200, "HTML status code is wrong %i"%rv.status_code) - self.assertEqual(rv.mimetype, "application/json", "Wrong mimetype %s"%rv.mimetype) + self.assertEqual( + rv.status_code, + 200, + "HTML status code is wrong %i" % rv.status_code, + ) + self.assertEqual( + rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype + ) data = json_load(rv.data) self.assertTrue(len(data) >= 1) def test_z_landsat_query_cloud_spacecraft(self): - rv = self.server.get(URL_PREFIX + '/landsat_query?start_time=1983-09-01T01:00:00&end_time=1983-09-01T01:20:00&cloud_cover=0.0&spacecraft_id=LANDSAT_4', - headers=self.user_auth_header) + rv = self.server.get( + URL_PREFIX + + "/landsat_query?start_time=1983-09-01T01:00:00&end_time=1983-09-01T01:20:00&cloud_cover=0.0&spacecraft_id=LANDSAT_4", + headers=self.user_auth_header, + ) pprint(json_load(rv.data)) - self.assertEqual(rv.status_code, 200, "HTML status code is wrong %i"%rv.status_code) - self.assertEqual(rv.mimetype, "application/json", "Wrong mimetype %s"%rv.mimetype) + self.assertEqual( + rv.status_code, + 200, + "HTML status code is wrong %i" % rv.status_code, + ) + self.assertEqual( + rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype + ) data = json_load(rv.data) self.assertTrue(len(data) >= 1) -if __name__ == '__main__': + +if __name__ == "__main__": unittest.main() diff --git a/tests/test_sentinel2_ndvi.py b/tests/test_sentinel2_ndvi.py index a0423ac..b840395 100644 --- a/tests/test_sentinel2_ndvi.py +++ b/tests/test_sentinel2_ndvi.py @@ -2,6 +2,7 @@ import unittest from pprint import pprint from flask.json import loads as json_load + try: from .test_resource_base import ActiniaResourceTestCaseBase, URL_PREFIX except: @@ -9,28 +10,40 @@ __license__ = "GPLv3" -__author__ = "Sören Gebbert" -__copyright__ = "Copyright 2016, Sören Gebbert" +__author__ = "Sören Gebbert" +__copyright__ = "Copyright 2016, Sören Gebbert" __maintainer__ = "Soeren Gebbert" -__email__ = "soerengebbert@googlemail.com" +__email__ = "soerengebbert@googlemail.com" class SentinelProcessingTestCase(ActiniaResourceTestCaseBase): - def test_ndvi_computation_small(self): # Large scene # rv = self.server.post(URL_PREFIX + '/sentinel2_process/S2A_MSIL1C_20170216T102101_N0204_R065_T32UPV_20170216T102204', # headers=self.admin_auth_header) # Small scene - rv = self.server.post(URL_PREFIX + '/sentinel2_process/ndvi/S2A_MSIL1C_20170212T104141_N0204_R008_T31TGJ_20170212T104138', - headers=self.admin_auth_header) + rv = self.server.post( + URL_PREFIX + + "/sentinel2_process/ndvi/S2A_MSIL1C_20170212T104141_N0204_R008_T31TGJ_20170212T104138", + headers=self.admin_auth_header, + ) pprint(json_load(rv.data)) - self.assertEqual(rv.status_code, 200, "HTML status code is wrong %i"%rv.status_code) - self.assertEqual(rv.mimetype, "application/json", "Wrong mimetype %s"%rv.mimetype) - - self.waitAsyncStatusAssertHTTP(rv, headers=self.admin_auth_header, - http_status=200, status="finished") + self.assertEqual( + rv.status_code, + 200, + "HTML status code is wrong %i" % rv.status_code, + ) + self.assertEqual( + rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype + ) + + self.waitAsyncStatusAssertHTTP( + rv, + headers=self.admin_auth_header, + http_status=200, + status="finished", + ) def incative_test_ndvi_computation_small_gcs(self): @@ -38,14 +51,27 @@ def incative_test_ndvi_computation_small_gcs(self): # rv = self.server.post(URL_PREFIX + '/sentinel2_process/S2A_MSIL1C_20170216T102101_N0204_R065_T32UPV_20170216T102204', # headers=self.admin_auth_header) # Small scene - rv = self.server.post(URL_PREFIX + '/sentinel2_process_gcs/ndvi/S2A_MSIL1C_20170212T104141_N0204_R008_T31TGJ_20170212T104138', - headers=self.admin_auth_header) + rv = self.server.post( + URL_PREFIX + + "/sentinel2_process_gcs/ndvi/S2A_MSIL1C_20170212T104141_N0204_R008_T31TGJ_20170212T104138", + headers=self.admin_auth_header, + ) pprint(json_load(rv.data)) - self.assertEqual(rv.status_code, 200, "HTML status code is wrong %i"%rv.status_code) - self.assertEqual(rv.mimetype, "application/json", "Wrong mimetype %s"%rv.mimetype) - - self.waitAsyncStatusAssertHTTP(rv, headers=self.admin_auth_header, - http_status=200, status="finished") + self.assertEqual( + rv.status_code, + 200, + "HTML status code is wrong %i" % rv.status_code, + ) + self.assertEqual( + rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype + ) + + self.waitAsyncStatusAssertHTTP( + rv, + headers=self.admin_auth_header, + http_status=200, + status="finished", + ) def incative_test_ndvi_computation_big(self): @@ -53,34 +79,68 @@ def incative_test_ndvi_computation_big(self): # rv = self.server.post(URL_PREFIX + '/sentinel2_process/S2A_MSIL1C_20170212T104141_N0204_R008_T31TGJ_20170212T104138', # headers=self.admin_auth_header) # Large scene - rv = self.server.post(URL_PREFIX + '/sentinel2_process/ndvi/S2A_MSIL1C_20170216T102101_N0204_R065_T32UPV_20170216T102204', - headers=self.admin_auth_header) + rv = self.server.post( + URL_PREFIX + + "/sentinel2_process/ndvi/S2A_MSIL1C_20170216T102101_N0204_R065_T32UPV_20170216T102204", + headers=self.admin_auth_header, + ) pprint(json_load(rv.data)) - self.assertEqual(rv.status_code, 200, "HTML status code is wrong %i"%rv.status_code) - self.assertEqual(rv.mimetype, "application/json", "Wrong mimetype %s"%rv.mimetype) - - self.waitAsyncStatusAssertHTTP(rv, headers=self.admin_auth_header, - http_status=200, status="finished") + self.assertEqual( + rv.status_code, + 200, + "HTML status code is wrong %i" % rv.status_code, + ) + self.assertEqual( + rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype + ) + + self.waitAsyncStatusAssertHTTP( + rv, + headers=self.admin_auth_header, + http_status=200, + status="finished", + ) def incative_test_computation_error_1(self): - rv = self.server.post(URL_PREFIX + '/sentinel2_process/ndvi/S2A_MSIL1C_20170212T1041_BLABLA_8_T31TGJ_20170212T104138.SAFE', - headers=self.admin_auth_header) + rv = self.server.post( + URL_PREFIX + + "/sentinel2_process/ndvi/S2A_MSIL1C_20170212T1041_BLABLA_8_T31TGJ_20170212T104138.SAFE", + headers=self.admin_auth_header, + ) pprint(json_load(rv.data)) - self.assertEqual(rv.status_code, 200, "HTML status code is wrong %i"%rv.status_code) - self.assertEqual(rv.mimetype, "application/json", "Wrong mimetype %s"%rv.mimetype) - - self.waitAsyncStatusAssertHTTP(rv, headers=self.admin_auth_header, - http_status=400, status="error") + self.assertEqual( + rv.status_code, + 200, + "HTML status code is wrong %i" % rv.status_code, + ) + self.assertEqual( + rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype + ) + + self.waitAsyncStatusAssertHTTP( + rv, headers=self.admin_auth_header, http_status=400, status="error" + ) def incative_test_computation_error_2(self): - rv = self.server.post(URL_PREFIX + '/sentinel2_process_gcs/ndvi/S2A_MSIL1C_20170212T1041_BLABLA_8_T31TGJ_20170212T104138.SAFE', - headers=self.admin_auth_header) + rv = self.server.post( + URL_PREFIX + + "/sentinel2_process_gcs/ndvi/S2A_MSIL1C_20170212T1041_BLABLA_8_T31TGJ_20170212T104138.SAFE", + headers=self.admin_auth_header, + ) pprint(json_load(rv.data)) - self.assertEqual(rv.status_code, 200, "HTML status code is wrong %i"%rv.status_code) - self.assertEqual(rv.mimetype, "application/json", "Wrong mimetype %s"%rv.mimetype) + self.assertEqual( + rv.status_code, + 200, + "HTML status code is wrong %i" % rv.status_code, + ) + self.assertEqual( + rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype + ) + + self.waitAsyncStatusAssertHTTP( + rv, headers=self.admin_auth_header, http_status=400, status="error" + ) - self.waitAsyncStatusAssertHTTP(rv, headers=self.admin_auth_header, - http_status=400, status="error") -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/test_sentinel2_timeseries_creation.py b/tests/test_sentinel2_timeseries_creation.py index f885fa7..5198fbc 100644 --- a/tests/test_sentinel2_timeseries_creation.py +++ b/tests/test_sentinel2_timeseries_creation.py @@ -3,6 +3,7 @@ from pprint import pprint from flask.json import loads as json_load from flask.json import dumps as json_dump + try: from .test_resource_base import ActiniaResourceTestCaseBase, URL_PREFIX except: @@ -10,17 +11,21 @@ __license__ = "GPLv3" -__author__ = "Sören Gebbert" -__copyright__ = "Copyright 2016, Sören Gebbert" +__author__ = "Sören Gebbert" +__copyright__ = "Copyright 2016, Sören Gebbert" __maintainer__ = "Soeren Gebbert" -__email__ = "soerengebbert@googlemail.com" +__email__ = "soerengebbert@googlemail.com" # Module change example for r.slope.aspect with g.region adjustment -PRODUCT_IDS = {"bands":["B04", "B08"], - "strds":["S2A_B04", "S2A_B08"], - "product_ids":["S2A_MSIL1C_20170212T104141_N0204_R008_T31TGJ_20170212T104138", - "S2A_MSIL1C_20170227T095021_N0204_R079_T34TBM_20170227T095613"]} +PRODUCT_IDS = { + "bands": ["B04", "B08"], + "strds": ["S2A_B04", "S2A_B08"], + "product_ids": [ + "S2A_MSIL1C_20170212T104141_N0204_R008_T31TGJ_20170212T104138", + "S2A_MSIL1C_20170227T095021_N0204_R079_T34TBM_20170227T095613", + ], +} # # PRODUCT_IDS = {"bands":["B04", "B08"], # "strds":["S2A_B04", "S2A_B08"], @@ -103,135 +108,147 @@ # "S2A_MSIL1C_20170213T101121_N0204_R022_T33UVT_20170213T101553"]} # Sentinel2A time series of Germany -GERMANY={"bands":["B04", "B08"], -"strds":["S2A_B04", "S2A_B08"], -"product_ids":[ -"S2A_MSIL1C_20170213T101121_N0204_R022_T32UQV_20170213T101553", -"S2A_MSIL1C_20170213T101121_N0204_R022_T33UUR_20170213T101553", -"S2A_MSIL1C_20170213T101121_N0204_R022_T33UUS_20170213T101553", -"S2A_MSIL1C_20170216T102101_N0204_R065_T32UMU_20170216T102204", -"S2A_MSIL1C_20170216T102101_N0204_R065_T32UNU_20170216T102204", -"S2A_MSIL1C_20170216T102101_N0204_R065_T32UPU_20170216T102204", -"S2A_MSIL1C_20170216T102101_N0204_R065_T32UPV_20170216T102204", -"S2A_MSIL1C_20170216T102101_N0204_R065_T32UQB_20170216T102204", -"S2A_MSIL1C_20170216T102101_N0204_R065_T32UQC_20170216T102204", -"S2A_MSIL1C_20170216T102101_N0204_R065_T32UQD_20170216T102204", -"S2A_MSIL1C_20170216T102101_N0204_R065_T32UQE_20170216T102204", -"S2A_MSIL1C_20170216T102101_N0204_R065_T32UQU_20170216T102204", -"S2A_MSIL1C_20170216T102101_N0204_R065_T32UQV_20170216T102204", -"S2A_MSIL1C_20170216T102101_N0204_R065_T33UUR_20170216T102204", -"S2A_MSIL1C_20170216T102101_N0204_R065_T33UUS_20170216T102204", -"S2A_MSIL1C_20170216T102101_N0204_R065_T33UUT_20170216T102204", -"S2A_MSIL1C_20170216T102101_N0204_R065_T33UUU_20170216T102204", -"S2A_MSIL1C_20170311T103011_N0204_R108_T32UMU_20170311T103014", -"S2A_MSIL1C_20170315T101021_N0204_R022_T32UQE_20170315T101214", -"S2A_MSIL1C_20170410T103021_N0204_R108_T32UMU_20170410T103020", -"S2A_MSIL1C_20170410T103021_N0204_R108_T32UNU_20170410T103020", -"S2A_MSIL1C_20170410T103021_N0204_R108_T32UNV_20170410T103020", -"S2A_MSIL1C_20170420T103021_N0204_R108_T32UNA_20170420T103454", -"S2A_MSIL1C_20170424T101031_N0204_R022_T32UPU_20170424T101120", -"S2A_MSIL1C_20170424T101031_N0204_R022_T32UPV_20170424T101120", -"S2A_MSIL1C_20170424T101031_N0204_R022_T32UQA_20170424T101120", -"S2A_MSIL1C_20170424T101031_N0204_R022_T32UQB_20170424T101120", -"S2A_MSIL1C_20170424T101031_N0204_R022_T32UQU_20170424T101120", -"S2A_MSIL1C_20170424T101031_N0204_R022_T32UQV_20170424T101120", -"S2A_MSIL1C_20170424T101031_N0204_R022_T33UUP_20170424T101120", -"S2A_MSIL1C_20170424T101031_N0204_R022_T33UUQ_20170424T101120", -"S2A_MSIL1C_20170424T101031_N0204_R022_T33UUR_20170424T101120", -"S2A_MSIL1C_20170430T103021_N0205_R108_T32UMA_20170430T103024", -"S2A_MSIL1C_20170430T103021_N0205_R108_T32UME_20170430T103024", -"S2A_MSIL1C_20170430T103021_N0205_R108_T32UMU_20170430T103024", -"S2A_MSIL1C_20170430T103021_N0205_R108_T32UMV_20170430T103024", -"S2A_MSIL1C_20170430T103021_N0205_R108_T32UND_20170430T103024", -"S2A_MSIL1C_20170430T103021_N0205_R108_T32UNV_20170430T103024", -"S2A_MSIL1C_20170430T103021_N0205_R108_T32UPC_20170430T103024", -"S2A_MSIL1C_20170507T102031_N0205_R065_T32UPE_20170507T102319", -"S2A_MSIL1C_20170510T103031_N0205_R108_T32ULB_20170510T103025", -"S2A_MSIL1C_20170510T103031_N0205_R108_T32UMA_20170510T103025", -"S2A_MSIL1C_20170510T103031_N0205_R108_T32UMB_20170510T103025", -"S2A_MSIL1C_20170510T103031_N0205_R108_T32UMU_20170510T103025", -"S2A_MSIL1C_20170510T103031_N0205_R108_T32UMV_20170510T103025", -"S2A_MSIL1C_20170510T103031_N0205_R108_T32UNA_20170510T103025", -"S2A_MSIL1C_20170510T103031_N0205_R108_T32UNB_20170510T103025", -"S2A_MSIL1C_20170510T103031_N0205_R108_T32UNU_20170510T103025", -"S2A_MSIL1C_20170510T103031_N0205_R108_T32UNV_20170510T103025", -"S2A_MSIL1C_20170517T102031_N0205_R065_T32UQU_20170517T102352", -"S2A_MSIL1C_20170527T102031_N0205_R065_T32UMU_20170527T102301", -"S2A_MSIL1C_20170527T102031_N0205_R065_T32UNA_20170527T102301", -"S2A_MSIL1C_20170527T102031_N0205_R065_T32UNB_20170527T102301", -"S2A_MSIL1C_20170527T102031_N0205_R065_T32UNC_20170527T102301", -"S2A_MSIL1C_20170527T102031_N0205_R065_T32UNU_20170527T102301", -"S2A_MSIL1C_20170527T102031_N0205_R065_T32UNV_20170527T102301", -"S2A_MSIL1C_20170527T102031_N0205_R065_T32UPA_20170527T102301", -"S2A_MSIL1C_20170527T102031_N0205_R065_T32UPB_20170527T102301", -"S2A_MSIL1C_20170527T102031_N0205_R065_T32UPC_20170527T102301", -"S2A_MSIL1C_20170527T102031_N0205_R065_T32UPD_20170527T102301", -"S2A_MSIL1C_20170527T102031_N0205_R065_T32UPE_20170527T102301", -"S2A_MSIL1C_20170527T102031_N0205_R065_T32UPU_20170527T102301", -"S2A_MSIL1C_20170527T102031_N0205_R065_T32UPV_20170527T102301", -"S2A_MSIL1C_20170527T102031_N0205_R065_T32UQA_20170527T102301", -"S2A_MSIL1C_20170527T102031_N0205_R065_T32UQB_20170527T102301", -"S2A_MSIL1C_20170527T102031_N0205_R065_T32UQC_20170527T102301", -"S2A_MSIL1C_20170527T102031_N0205_R065_T32UQD_20170527T102301", -"S2A_MSIL1C_20170527T102031_N0205_R065_T32UQE_20170527T102301", -"S2A_MSIL1C_20170527T102031_N0205_R065_T32UQU_20170527T102301", -"S2A_MSIL1C_20170527T102031_N0205_R065_T32UQV_20170527T102301", -"S2A_MSIL1C_20170527T102031_N0205_R065_T33UUR_20170527T102301", -"S2A_MSIL1C_20170527T102031_N0205_R065_T33UUS_20170527T102301", -"S2A_MSIL1C_20170527T102031_N0205_R065_T33UUT_20170527T102301", -"S2A_MSIL1C_20170527T102031_N0205_R065_T33UUU_20170527T102301", -"S2A_MSIL1C_20170527T102031_N0205_R065_T33UUV_20170527T102301", -"S2A_MSIL1C_20170613T101031_N0205_R022_T32UPU_20170613T101608", -"S2A_MSIL1C_20170613T101031_N0205_R022_T32UPV_20170613T101608", -"S2A_MSIL1C_20170613T101031_N0205_R022_T32UQU_20170613T101608", -"S2A_MSIL1C_20170613T101031_N0205_R022_T32UQV_20170613T101608", -"S2A_MSIL1C_20170613T101031_N0205_R022_T33UUP_20170613T101608", -"S2A_MSIL1C_20170613T101031_N0205_R022_T33UUQ_20170613T101608", -"S2A_MSIL1C_20170619T103021_N0205_R108_T32ULB_20170619T103021", -"S2A_MSIL1C_20170619T103021_N0205_R108_T32UMA_20170619T103021", -"S2A_MSIL1C_20170619T103021_N0205_R108_T32UMB_20170619T103021", -"S2A_MSIL1C_20170619T103021_N0205_R108_T32UMC_20170619T103021", -"S2A_MSIL1C_20170619T103021_N0205_R108_T32UMU_20170619T103021", -"S2A_MSIL1C_20170619T103021_N0205_R108_T32UMV_20170619T103021", -"S2A_MSIL1C_20170619T103021_N0205_R108_T32UNA_20170619T103021", -"S2A_MSIL1C_20170619T103021_N0205_R108_T32UNB_20170619T103021", -"S2A_MSIL1C_20170619T103021_N0205_R108_T32UNC_20170619T103021", -"S2A_MSIL1C_20170619T103021_N0205_R108_T32UNU_20170619T103021", -"S2A_MSIL1C_20170619T103021_N0205_R108_T32UNV_20170619T103021", -"S2A_MSIL1C_20170619T103021_N0205_R108_T32UPB_20170619T103021", -"S2A_MSIL1C_20170619T103021_N0205_R108_T32UPC_20170619T103021", -"S2A_MSIL1C_20170626T102021_N0205_R065_T32UMU_20170626T102321", -"S2A_MSIL1C_20170626T102021_N0205_R065_T32UNU_20170626T102321", -"S2A_MSIL1C_20170626T102021_N0205_R065_T32UPU_20170626T102321", -"S2A_MSIL1C_20170626T102021_N0205_R065_T32UQU_20170626T102321", -"S2A_MSIL1C_20170706T102021_N0205_R065_T32UNA_20170706T102301", -"S2A_MSIL1C_20170706T102021_N0205_R065_T32UNB_20170706T102301"]} - - - -WRONG_PRODUCT_IDS = {"bands":["B04", "B08"], - "strds":["S2A_B04", "S2A_B08"], - "product_ids":["S2A_MSIL1C_20170212T104141_N0204_R008_T31TGJ_20170212T104138_NOPE", - "S2A_MSIL1C_20170227T095021_N0204_R079_T34TBM_20170227T095613_NOPE", - "S2A_MSIL1C_20170202T104241_N0204_R008_T32UNE_20170202T104236_NOPE"]} - -WRONG_BANDS_IDS = {"bands":["B04_NOPE", "B08_NOPE"], - "strds":["S2A_B04", "S2A_B08"], - "product_ids":["S2A_MSIL1C_20170212T104141_N0204_R008_T31TGJ_20170212T104138", - "S2A_MSIL1C_20170227T095021_N0204_R079_T34TBM_20170227T095613"]} - -PRODUCT_IDS_ONE_WRONG = {"bands":["B04", "B08"], - "strds":["S2A_B04", "S2A_B08"], - "product_ids":["S2A_MSIL1C_20170212T104141_N0204_R008_T31TGJ_20170212T104138", - "S2A_MSIL1C_20170202T104241_N0204_R008_T32UNE_20170202T104236_NOPE"]} +GERMANY = { + "bands": ["B04", "B08"], + "strds": ["S2A_B04", "S2A_B08"], + "product_ids": [ + "S2A_MSIL1C_20170213T101121_N0204_R022_T32UQV_20170213T101553", + "S2A_MSIL1C_20170213T101121_N0204_R022_T33UUR_20170213T101553", + "S2A_MSIL1C_20170213T101121_N0204_R022_T33UUS_20170213T101553", + "S2A_MSIL1C_20170216T102101_N0204_R065_T32UMU_20170216T102204", + "S2A_MSIL1C_20170216T102101_N0204_R065_T32UNU_20170216T102204", + "S2A_MSIL1C_20170216T102101_N0204_R065_T32UPU_20170216T102204", + "S2A_MSIL1C_20170216T102101_N0204_R065_T32UPV_20170216T102204", + "S2A_MSIL1C_20170216T102101_N0204_R065_T32UQB_20170216T102204", + "S2A_MSIL1C_20170216T102101_N0204_R065_T32UQC_20170216T102204", + "S2A_MSIL1C_20170216T102101_N0204_R065_T32UQD_20170216T102204", + "S2A_MSIL1C_20170216T102101_N0204_R065_T32UQE_20170216T102204", + "S2A_MSIL1C_20170216T102101_N0204_R065_T32UQU_20170216T102204", + "S2A_MSIL1C_20170216T102101_N0204_R065_T32UQV_20170216T102204", + "S2A_MSIL1C_20170216T102101_N0204_R065_T33UUR_20170216T102204", + "S2A_MSIL1C_20170216T102101_N0204_R065_T33UUS_20170216T102204", + "S2A_MSIL1C_20170216T102101_N0204_R065_T33UUT_20170216T102204", + "S2A_MSIL1C_20170216T102101_N0204_R065_T33UUU_20170216T102204", + "S2A_MSIL1C_20170311T103011_N0204_R108_T32UMU_20170311T103014", + "S2A_MSIL1C_20170315T101021_N0204_R022_T32UQE_20170315T101214", + "S2A_MSIL1C_20170410T103021_N0204_R108_T32UMU_20170410T103020", + "S2A_MSIL1C_20170410T103021_N0204_R108_T32UNU_20170410T103020", + "S2A_MSIL1C_20170410T103021_N0204_R108_T32UNV_20170410T103020", + "S2A_MSIL1C_20170420T103021_N0204_R108_T32UNA_20170420T103454", + "S2A_MSIL1C_20170424T101031_N0204_R022_T32UPU_20170424T101120", + "S2A_MSIL1C_20170424T101031_N0204_R022_T32UPV_20170424T101120", + "S2A_MSIL1C_20170424T101031_N0204_R022_T32UQA_20170424T101120", + "S2A_MSIL1C_20170424T101031_N0204_R022_T32UQB_20170424T101120", + "S2A_MSIL1C_20170424T101031_N0204_R022_T32UQU_20170424T101120", + "S2A_MSIL1C_20170424T101031_N0204_R022_T32UQV_20170424T101120", + "S2A_MSIL1C_20170424T101031_N0204_R022_T33UUP_20170424T101120", + "S2A_MSIL1C_20170424T101031_N0204_R022_T33UUQ_20170424T101120", + "S2A_MSIL1C_20170424T101031_N0204_R022_T33UUR_20170424T101120", + "S2A_MSIL1C_20170430T103021_N0205_R108_T32UMA_20170430T103024", + "S2A_MSIL1C_20170430T103021_N0205_R108_T32UME_20170430T103024", + "S2A_MSIL1C_20170430T103021_N0205_R108_T32UMU_20170430T103024", + "S2A_MSIL1C_20170430T103021_N0205_R108_T32UMV_20170430T103024", + "S2A_MSIL1C_20170430T103021_N0205_R108_T32UND_20170430T103024", + "S2A_MSIL1C_20170430T103021_N0205_R108_T32UNV_20170430T103024", + "S2A_MSIL1C_20170430T103021_N0205_R108_T32UPC_20170430T103024", + "S2A_MSIL1C_20170507T102031_N0205_R065_T32UPE_20170507T102319", + "S2A_MSIL1C_20170510T103031_N0205_R108_T32ULB_20170510T103025", + "S2A_MSIL1C_20170510T103031_N0205_R108_T32UMA_20170510T103025", + "S2A_MSIL1C_20170510T103031_N0205_R108_T32UMB_20170510T103025", + "S2A_MSIL1C_20170510T103031_N0205_R108_T32UMU_20170510T103025", + "S2A_MSIL1C_20170510T103031_N0205_R108_T32UMV_20170510T103025", + "S2A_MSIL1C_20170510T103031_N0205_R108_T32UNA_20170510T103025", + "S2A_MSIL1C_20170510T103031_N0205_R108_T32UNB_20170510T103025", + "S2A_MSIL1C_20170510T103031_N0205_R108_T32UNU_20170510T103025", + "S2A_MSIL1C_20170510T103031_N0205_R108_T32UNV_20170510T103025", + "S2A_MSIL1C_20170517T102031_N0205_R065_T32UQU_20170517T102352", + "S2A_MSIL1C_20170527T102031_N0205_R065_T32UMU_20170527T102301", + "S2A_MSIL1C_20170527T102031_N0205_R065_T32UNA_20170527T102301", + "S2A_MSIL1C_20170527T102031_N0205_R065_T32UNB_20170527T102301", + "S2A_MSIL1C_20170527T102031_N0205_R065_T32UNC_20170527T102301", + "S2A_MSIL1C_20170527T102031_N0205_R065_T32UNU_20170527T102301", + "S2A_MSIL1C_20170527T102031_N0205_R065_T32UNV_20170527T102301", + "S2A_MSIL1C_20170527T102031_N0205_R065_T32UPA_20170527T102301", + "S2A_MSIL1C_20170527T102031_N0205_R065_T32UPB_20170527T102301", + "S2A_MSIL1C_20170527T102031_N0205_R065_T32UPC_20170527T102301", + "S2A_MSIL1C_20170527T102031_N0205_R065_T32UPD_20170527T102301", + "S2A_MSIL1C_20170527T102031_N0205_R065_T32UPE_20170527T102301", + "S2A_MSIL1C_20170527T102031_N0205_R065_T32UPU_20170527T102301", + "S2A_MSIL1C_20170527T102031_N0205_R065_T32UPV_20170527T102301", + "S2A_MSIL1C_20170527T102031_N0205_R065_T32UQA_20170527T102301", + "S2A_MSIL1C_20170527T102031_N0205_R065_T32UQB_20170527T102301", + "S2A_MSIL1C_20170527T102031_N0205_R065_T32UQC_20170527T102301", + "S2A_MSIL1C_20170527T102031_N0205_R065_T32UQD_20170527T102301", + "S2A_MSIL1C_20170527T102031_N0205_R065_T32UQE_20170527T102301", + "S2A_MSIL1C_20170527T102031_N0205_R065_T32UQU_20170527T102301", + "S2A_MSIL1C_20170527T102031_N0205_R065_T32UQV_20170527T102301", + "S2A_MSIL1C_20170527T102031_N0205_R065_T33UUR_20170527T102301", + "S2A_MSIL1C_20170527T102031_N0205_R065_T33UUS_20170527T102301", + "S2A_MSIL1C_20170527T102031_N0205_R065_T33UUT_20170527T102301", + "S2A_MSIL1C_20170527T102031_N0205_R065_T33UUU_20170527T102301", + "S2A_MSIL1C_20170527T102031_N0205_R065_T33UUV_20170527T102301", + "S2A_MSIL1C_20170613T101031_N0205_R022_T32UPU_20170613T101608", + "S2A_MSIL1C_20170613T101031_N0205_R022_T32UPV_20170613T101608", + "S2A_MSIL1C_20170613T101031_N0205_R022_T32UQU_20170613T101608", + "S2A_MSIL1C_20170613T101031_N0205_R022_T32UQV_20170613T101608", + "S2A_MSIL1C_20170613T101031_N0205_R022_T33UUP_20170613T101608", + "S2A_MSIL1C_20170613T101031_N0205_R022_T33UUQ_20170613T101608", + "S2A_MSIL1C_20170619T103021_N0205_R108_T32ULB_20170619T103021", + "S2A_MSIL1C_20170619T103021_N0205_R108_T32UMA_20170619T103021", + "S2A_MSIL1C_20170619T103021_N0205_R108_T32UMB_20170619T103021", + "S2A_MSIL1C_20170619T103021_N0205_R108_T32UMC_20170619T103021", + "S2A_MSIL1C_20170619T103021_N0205_R108_T32UMU_20170619T103021", + "S2A_MSIL1C_20170619T103021_N0205_R108_T32UMV_20170619T103021", + "S2A_MSIL1C_20170619T103021_N0205_R108_T32UNA_20170619T103021", + "S2A_MSIL1C_20170619T103021_N0205_R108_T32UNB_20170619T103021", + "S2A_MSIL1C_20170619T103021_N0205_R108_T32UNC_20170619T103021", + "S2A_MSIL1C_20170619T103021_N0205_R108_T32UNU_20170619T103021", + "S2A_MSIL1C_20170619T103021_N0205_R108_T32UNV_20170619T103021", + "S2A_MSIL1C_20170619T103021_N0205_R108_T32UPB_20170619T103021", + "S2A_MSIL1C_20170619T103021_N0205_R108_T32UPC_20170619T103021", + "S2A_MSIL1C_20170626T102021_N0205_R065_T32UMU_20170626T102321", + "S2A_MSIL1C_20170626T102021_N0205_R065_T32UNU_20170626T102321", + "S2A_MSIL1C_20170626T102021_N0205_R065_T32UPU_20170626T102321", + "S2A_MSIL1C_20170626T102021_N0205_R065_T32UQU_20170626T102321", + "S2A_MSIL1C_20170706T102021_N0205_R065_T32UNA_20170706T102301", + "S2A_MSIL1C_20170706T102021_N0205_R065_T32UNB_20170706T102301", + ], +} + + +WRONG_PRODUCT_IDS = { + "bands": ["B04", "B08"], + "strds": ["S2A_B04", "S2A_B08"], + "product_ids": [ + "S2A_MSIL1C_20170212T104141_N0204_R008_T31TGJ_20170212T104138_NOPE", + "S2A_MSIL1C_20170227T095021_N0204_R079_T34TBM_20170227T095613_NOPE", + "S2A_MSIL1C_20170202T104241_N0204_R008_T32UNE_20170202T104236_NOPE", + ], +} + +WRONG_BANDS_IDS = { + "bands": ["B04_NOPE", "B08_NOPE"], + "strds": ["S2A_B04", "S2A_B08"], + "product_ids": [ + "S2A_MSIL1C_20170212T104141_N0204_R008_T31TGJ_20170212T104138", + "S2A_MSIL1C_20170227T095021_N0204_R079_T34TBM_20170227T095613", + ], +} + +PRODUCT_IDS_ONE_WRONG = { + "bands": ["B04", "B08"], + "strds": ["S2A_B04", "S2A_B08"], + "product_ids": [ + "S2A_MSIL1C_20170212T104141_N0204_R008_T31TGJ_20170212T104138", + "S2A_MSIL1C_20170202T104241_N0204_R008_T32UNE_20170202T104236_NOPE", + ], +} test_mapsets = ["A"] class AsyncSentielTimeSeriesCreationTestCaseAdmin(ActiniaResourceTestCaseBase): - """test the download and creation of sentinel2 time series in a new mapset - - """ + """test the download and creation of sentinel2 time series in a new mapset""" def check_remove_test_mapsets(self): """ @@ -240,62 +257,97 @@ def check_remove_test_mapsets(self): """ - rv = self.server.get(URL_PREFIX + '/locations/LL/mapsets', - headers=self.admin_auth_header) + rv = self.server.get( + URL_PREFIX + "/locations/LL/mapsets", + headers=self.admin_auth_header, + ) pprint(json_load(rv.data)) - self.assertEqual(rv.status_code, 200, "HTML status code is wrong %i"%rv.status_code) - self.assertEqual(rv.mimetype, "application/json", "Wrong mimetype %s"%rv.mimetype) + self.assertEqual( + rv.status_code, + 200, + "HTML status code is wrong %i" % rv.status_code, + ) + self.assertEqual( + rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype + ) mapsets = json_load(rv.data)["process_results"] for mapset in test_mapsets: if mapset in mapsets: # Unlock mapset for deletion - rv = self.server.post(URL_PREFIX + '/locations/LL/mapsets/%s' % mapset, - headers=self.admin_auth_header) + rv = self.server.post( + URL_PREFIX + "/locations/LL/mapsets/%s" % mapset, + headers=self.admin_auth_header, + ) pprint(json_load(rv.data)) # Delete the mapset if it already exists - rv = self.server.delete(URL_PREFIX + '/locations/LL/mapsets/%s' % mapset, - headers=self.admin_auth_header) + rv = self.server.delete( + URL_PREFIX + "/locations/LL/mapsets/%s" % mapset, + headers=self.admin_auth_header, + ) pprint(json_load(rv.data)) - self.assertEqual(rv.status_code, 200, "HTML status code is wrong %i"%rv.status_code) - self.assertEqual(rv.mimetype, "application/json", "Wrong mimetype %s"%rv.mimetype) + self.assertEqual( + rv.status_code, + 200, + "HTML status code is wrong %i" % rv.status_code, + ) + self.assertEqual( + rv.mimetype, + "application/json", + "Wrong mimetype %s" % rv.mimetype, + ) def test_1(self): """Test the import of two scenes with 2 bands in a new mapset A""" self.check_remove_test_mapsets() ############################################################################ - rv = self.server.post(URL_PREFIX + '/locations/LL/mapsets/A/sentinel2_import', - headers=self.admin_auth_header, - data=json_dump(PRODUCT_IDS), - content_type="application/json") + rv = self.server.post( + URL_PREFIX + "/locations/LL/mapsets/A/sentinel2_import", + headers=self.admin_auth_header, + data=json_dump(PRODUCT_IDS), + content_type="application/json", + ) pprint(json_load(rv.data)) self.waitAsyncStatusAssertHTTP(rv, headers=self.admin_auth_header) - rv = self.server.get(URL_PREFIX + '/locations/LL/mapsets/A/strds', - headers=self.admin_auth_header) + rv = self.server.get( + URL_PREFIX + "/locations/LL/mapsets/A/strds", + headers=self.admin_auth_header, + ) pprint(json_load(rv.data)) - self.assertEqual(rv.status_code, 200, "HTML status code is wrong %i"%rv.status_code) - self.assertEqual(rv.mimetype, "application/json", "Wrong mimetype %s"%rv.mimetype) + self.assertEqual( + rv.status_code, + 200, + "HTML status code is wrong %i" % rv.status_code, + ) + self.assertEqual( + rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype + ) strds_list = json_load(rv.data)["process_results"] self.assertTrue("S2A_B04" in strds_list) self.assertTrue("S2A_B08" in strds_list) def test_1_error_mapset_exists(self): - """PERMANENT mapset exists. hence an error message is expected - """ - rv = self.server.post(URL_PREFIX + '/locations/LL/mapsets/PERMANENT/sentinel2_import', - headers=self.admin_auth_header, - data=json_dump(PRODUCT_IDS), - content_type="application/json") + """PERMANENT mapset exists. hence an error message is expected""" + rv = self.server.post( + URL_PREFIX + "/locations/LL/mapsets/PERMANENT/sentinel2_import", + headers=self.admin_auth_header, + data=json_dump(PRODUCT_IDS), + content_type="application/json", + ) pprint(json_load(rv.data)) - self.waitAsyncStatusAssertHTTP(rv, headers=self.admin_auth_header, - http_status=400, status="error", - message_check="AsyncProcessError:") + self.waitAsyncStatusAssertHTTP( + rv, + headers=self.admin_auth_header, + http_status=400, + status="error", + message_check="AsyncProcessError:", + ) def test_2_error_wrong_product_ids(self): """Check for all wrong product ids @@ -305,46 +357,62 @@ def test_2_error_wrong_product_ids(self): """ self.check_remove_test_mapsets() - rv = self.server.post(URL_PREFIX + '/locations/LL/mapsets/A/sentinel2_import', - headers=self.admin_auth_header, - data=json_dump(WRONG_PRODUCT_IDS), - content_type="application/json") + rv = self.server.post( + URL_PREFIX + "/locations/LL/mapsets/A/sentinel2_import", + headers=self.admin_auth_header, + data=json_dump(WRONG_PRODUCT_IDS), + content_type="application/json", + ) pprint(json_load(rv.data)) - self.waitAsyncStatusAssertHTTP(rv, headers=self.admin_auth_header, - http_status=400, status="error", - message_check="AsyncProcessError:") + self.waitAsyncStatusAssertHTTP( + rv, + headers=self.admin_auth_header, + http_status=400, + status="error", + message_check="AsyncProcessError:", + ) def test_3_error_wrong_bands_ids(self): - """Check for wrong band ids - """ + """Check for wrong band ids""" self.check_remove_test_mapsets() - rv = self.server.post(URL_PREFIX + '/locations/LL/mapsets/A/sentinel2_import', - headers=self.admin_auth_header, - data=json_dump(WRONG_BANDS_IDS), - content_type="application/json") + rv = self.server.post( + URL_PREFIX + "/locations/LL/mapsets/A/sentinel2_import", + headers=self.admin_auth_header, + data=json_dump(WRONG_BANDS_IDS), + content_type="application/json", + ) pprint(json_load(rv.data)) - self.waitAsyncStatusAssertHTTP(rv, headers=self.admin_auth_header, - http_status=400, status="error", - message_check="AsyncProcessError:") + self.waitAsyncStatusAssertHTTP( + rv, + headers=self.admin_auth_header, + http_status=400, + status="error", + message_check="AsyncProcessError:", + ) def test_4_error_wrong_product_id(self): - """Check if a missing product id is found - """ + """Check if a missing product id is found""" self.check_remove_test_mapsets() - rv = self.server.post(URL_PREFIX + '/locations/LL/mapsets/A/sentinel2_import', - headers=self.admin_auth_header, - data=json_dump(PRODUCT_IDS_ONE_WRONG), - content_type="application/json") + rv = self.server.post( + URL_PREFIX + "/locations/LL/mapsets/A/sentinel2_import", + headers=self.admin_auth_header, + data=json_dump(PRODUCT_IDS_ONE_WRONG), + content_type="application/json", + ) pprint(json_load(rv.data)) - self.waitAsyncStatusAssertHTTP(rv, headers=self.admin_auth_header, - http_status=400, status="error", - message_check="AsyncProcessError:") + self.waitAsyncStatusAssertHTTP( + rv, + headers=self.admin_auth_header, + http_status=400, + status="error", + message_check="AsyncProcessError:", + ) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() From ff3aeb2259ec081f081d2d3378f9376a8d2190d5 Mon Sep 17 00:00:00 2001 From: anikaweinmann Date: Thu, 28 Jul 2022 07:56:55 +0200 Subject: [PATCH 2/2] flake8 --- docs/conf.py | 25 +- scripts/main.py | 1 - .../aws_sentinel2a_query.py | 149 ++++--- .../ephemeral_landsat_ndvi_processor.py | 397 ++++++++++++------ .../ephemeral_sentinel2_ndvi_processor.py | 317 +++++++++----- .../persistent_landsat_timeseries_creator.py | 113 +++-- ...persistent_sentinel2_timeseries_creator.py | 127 +++--- .../satellite_query.py | 52 ++- tests/test_aws_sentinel_service.py | 8 +- tests/test_landsat.py | 2 +- tests/test_landsat_timeseries_creation.py | 7 +- tests/test_resource_base.py | 6 +- tests/test_satellite_query.py | 33 +- tests/test_sentinel2_ndvi.py | 31 +- tests/test_sentinel2_timeseries_creation.py | 86 +--- 15 files changed, 830 insertions(+), 524 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index d7985fd..1baa421 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# This file is execfile()d with the current directory set to its containing dir. +# This file is execfiled with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. @@ -23,8 +23,8 @@ sys.path.insert(0, os.path.join(__location__, "../src")) # -- Run sphinx-apidoc ------------------------------------------------------ -# This hack is necessary since RTD does not issue `sphinx-apidoc` before running -# `sphinx-build -b html . _build/html`. See Issue: +# This hack is necessary since RTD does not issue `sphinx-apidoc` before +# running `sphinx-build -b html . _build/html`. See Issue: # https://github.com/rtfd/readthedocs.org/issues/1139 # DON'T FORGET: Check the box "Install your project inside a virtualenv using # setup.py install" in the RTD Advanced Settings. @@ -59,13 +59,13 @@ except Exception as e: print("Running `sphinx-apidoc` failed!\n{}".format(e)) -# -- General configuration ----------------------------------------------------- +# -- General configuration ---------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. # needs_sphinx = '1.0' -# Add any Sphinx extension module names here, as strings. They can be extensions -# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = [ "sphinx.ext.autodoc", "sphinx.ext.intersphinx", @@ -118,7 +118,8 @@ # directories to ignore when looking for source files. exclude_patterns = ["_build"] -# The reST default role (used for this markup: `text`) to use for all documents. +# The reST default role (used for this markup: `text`) to use for all +# documents. # default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. @@ -142,7 +143,7 @@ # keep_warnings = False -# -- Options for HTML output --------------------------------------------------- +# -- Options for HTML output -------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. @@ -227,7 +228,7 @@ htmlhelp_basename = "actinia_satellite_plugin-doc" -# -- Options for LaTeX output -------------------------------------------------- +# -- Options for LaTeX output ------------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). @@ -238,8 +239,8 @@ # 'preamble': '', } -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, author, documentclass [howto/manual]). +# Grouping the document tree into LaTeX files. List of tuples (source start +# file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ( "index", @@ -270,7 +271,7 @@ # If false, no module index is generated. # latex_domain_indices = True -# -- External mapping ------------------------------------------------------------ +# -- External mapping --------------------------------------------------------- python_version = ".".join(map(str, sys.version_info[0:2])) intersphinx_mapping = { "sphinx": ("http://www.sphinx-doc.org/en/stable", None), diff --git a/scripts/main.py b/scripts/main.py index 553e090..82673c9 100644 --- a/scripts/main.py +++ b/scripts/main.py @@ -32,7 +32,6 @@ create_process_queue(global_config) -######################################################################################################################## if __name__ == "__main__": # Connect to the database flask_app.run(host="0.0.0.0", port=8080, debug=True) diff --git a/src/actinia_satellite_plugin/aws_sentinel2a_query.py b/src/actinia_satellite_plugin/aws_sentinel2a_query.py index 8fbb8d5..8a6ef85 100644 --- a/src/actinia_satellite_plugin/aws_sentinel2a_query.py +++ b/src/actinia_satellite_plugin/aws_sentinel2a_query.py @@ -27,23 +27,29 @@ class BandInformationEntry(Schema): properties = { "file_name": { "type": "string", - "description": "The suggested file name of this band from the requested satellite scene", + "description": "The suggested file name of this band from the " + "requested satellite scene", }, "map_name": { "type": "string", - "description": "The suggested GRASS GIS raster map name of this band from the requested satellite scene", + "description": "The suggested GRASS GIS raster map name of this " + "band from the requested satellite scene", }, "public_url": { "type": "string", - "description": "The download URl of the band from requested satellite scene", + "description": "The download URl of the band from requested " + "satellite scene", }, } required = ["public_url", "map_name", "file_name"] example = { - "file_name": "S2A_MSIL1C_20170202T090201_N0204_R007_T36TVT_20170202T090155_tile_1_band_B04.jp2", - "map_name": "S2A_MSIL1C_20170202T090201_N0204_R007_T36TVT_20170202T090155_tile_1_band_B04", - "public_url": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles/36/T/VT/2017/2/2/0/B04.jp2", + "file_name": "S2A_MSIL1C_20170202T090201_N0204_R007_T36TVT_" + "20170202T090155_tile_1_band_B04.jp2", + "map_name": "S2A_MSIL1C_20170202T090201_N0204_R007_T36TVT_" + "20170202T090155_tile_1_band_B04", + "public_url": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles/36/T/" + "VT/2017/2/2/0/B04.jp2", } @@ -76,7 +82,8 @@ class Sentinel2ATileEntry(Schema): }, "url": { "type": "string", - "description": "The url to Sentinel2A scene root directory that contains all informations about the scene", + "description": "The url to Sentinel2A scene root directory that " + "contains all informations about the scene", }, "timestamp": { "type": "string", @@ -88,20 +95,30 @@ class Sentinel2ATileEntry(Schema): example = { "B04": { - "file_name": "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_20151207T003302_tile_14_band_B04.jp2", - "map_name": "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_20151207T003302_tile_14_band_B04", - "public_url": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/B04.jp2", + "file_name": "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20" + "151207T003302_20151207T003302_tile_14_band_B04.jp2", + "map_name": "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_" + "V20151207T003302_20151207T003302_tile_14_band_B04", + "public_url": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/" + "XE/2015/12/7/0/B04.jp2", }, "B08": { - "file_name": "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_20151207T003302_tile_14_band_B08.jp2", - "map_name": "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_20151207T003302_tile_14_band_B08", - "public_url": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/B08.jp2", + "file_name": "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20" + "151207T003302_20151207T003302_tile_14_band_B08.jp2", + "map_name": "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_" + "V20151207T003302_20151207T003302_tile_14_band_B08", + "public_url": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/" + "XE/2015/12/7/0/B08.jp2", }, - "info": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/tileInfo.json", - "metadata": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/metadata.xml", - "preview": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/preview.jpg", + "info": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/" + "12/7/0/tileInfo.json", + "metadata": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/" + "2015/12/7/0/metadata.xml", + "preview": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/" + "2015/12/7/0/preview.jpg", "timestamp": "2015-12-07T00:33:02.634Z", - "url": "http://sentinel-s2-l1c.s3-website.eu-central-1.amazonaws.com/#tiles/57/V/XE/2015/12/7/0/", + "url": "http://sentinel-s2-l1c.s3-website.eu-central-1.amazonaws.com/" + "#tiles/57/V/XE/2015/12/7/0/", } @@ -120,24 +137,39 @@ class Sentinel2ASceneEntry(Schema): } required = ["product_id", "tiles"] example = { - "product_id": "S2A_MSIL1C_20170202T090201_N0204_R007_T36TVT_20170202T090155", + "product_id": "S2A_MSIL1C_20170202T090201_N0204_R007_T36TVT_" + "20170202T090155", "tiles": [ { "B04": { - "file_name": "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_20151207T003302_tile_14_band_B04.jp2", - "map_name": "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_20151207T003302_tile_14_band_B04", - "public_url": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/B04.jp2", + "file_name": "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_" + "R102_V20151207T003302_20151207T003302_tile_" + "14_band_B04.jp2", + "map_name": "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102" + "_V20151207T003302_20151207T003302_tile_14_" + "band_B04", + "public_url": "http://sentinel-s2-l1c.s3.amazonaws.com/" + "tiles/57/V/XE/2015/12/7/0/B04.jp2", }, "B08": { - "file_name": "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_20151207T003302_tile_14_band_B08.jp2", - "map_name": "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_20151207T003302_tile_14_band_B08", - "public_url": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/B08.jp2", + "file_name": "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_" + "R102_V20151207T003302_20151207T003302_tile_" + "14_band_B08.jp2", + "map_name": "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102" + "_V20151207T003302_20151207T003302_tile_14_" + "band_B08", + "public_url": "http://sentinel-s2-l1c.s3.amazonaws.com/" + "tiles/57/V/XE/2015/12/7/0/B08.jp2", }, - "info": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/tileInfo.json", - "metadata": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/metadata.xml", - "preview": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/preview.jpg", + "info": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/" + "XE/2015/12/7/0/tileInfo.json", + "metadata": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles/" + "57/V/XE/2015/12/7/0/metadata.xml", + "preview": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles/" + "57/V/XE/2015/12/7/0/preview.jpg", "timestamp": "2015-12-07T00:33:02.634Z", - "url": "http://sentinel-s2-l1c.s3-website.eu-central-1.amazonaws.com/#tiles/57/V/XE/2015/12/7/0/", + "url": "http://sentinel-s2-l1c.s3-website.eu-central-1." + "amazonaws.com/#tiles/57/V/XE/2015/12/7/0/", } ], } @@ -154,24 +186,39 @@ class Sentinel2ASceneList(Schema): } example = [ { - "product_id": "S2A_MSIL1C_20170202T090201_N0204_R007_T36TVT_20170202T090155", + "product_id": "S2A_MSIL1C_20170202T090201_N0204_R007_T36TVT_" + "20170202T090155", "tiles": [ { "B04": { - "file_name": "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_20151207T003302_tile_14_band_B04.jp2", - "map_name": "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_20151207T003302_tile_14_band_B04", - "public_url": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/B04.jp2", + "file_name": "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157" + "_R102_V20151207T003302_20151207T003302_" + "tile_14_band_B04.jp2", + "map_name": "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_" + "R102_V20151207T003302_20151207T003302_" + "tile_14_band_B04", + "public_url": "http://sentinel-s2-l1c.s3.amazonaws." + "com/tiles/57/V/XE/2015/12/7/0/B04.jp2", }, "B08": { - "file_name": "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_20151207T003302_tile_14_band_B08.jp2", - "map_name": "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_20151207T003302_tile_14_band_B08", - "public_url": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/B08.jp2", + "file_name": "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157" + "_R102_V20151207T003302_20151207T003302_" + "tile_14_band_B08.jp2", + "map_name": "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157" + "_R102_V20151207T003302_20151207T003302_" + "tile_14_band_B08", + "public_url": "http://sentinel-s2-l1c.s3.amazonaws.com" + "/tiles/57/V/XE/2015/12/7/0/B08.jp2", }, - "info": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/tileInfo.json", - "metadata": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/metadata.xml", - "preview": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/V/XE/2015/12/7/0/preview.jpg", + "info": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles/57/" + "V/XE/2015/12/7/0/tileInfo.json", + "metadata": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles" + "/57/V/XE/2015/12/7/0/metadata.xml", + "preview": "http://sentinel-s2-l1c.s3.amazonaws.com/tiles" + "/57/V/XE/2015/12/7/0/preview.jpg", "timestamp": "2015-12-07T00:33:02.634Z", - "url": "http://sentinel-s2-l1c.s3-website.eu-central-1.amazonaws.com/#tiles/57/V/XE/2015/12/7/0/", + "url": "http://sentinel-s2-l1c.s3-website.eu-central-1." + "amazonaws.com/#tiles/57/V/XE/2015/12/7/0/", } ], } @@ -179,22 +226,26 @@ class Sentinel2ASceneList(Schema): class Sentinel2ASceneListModel(Schema): - """This schema defines the JSON input of the sentinel time series creator resource""" + """This schema defines the JSON input of the sentinel time series creator + resource + """ type = "object" properties = { "bands": { "type": "array", "items": {"type": "string"}, - "description": "A list of band names that should be downloaded and imported for each Sentinel-2 scene." - 'Available are the following band names: "B01", "B02", "B03", "B04", "B05", "B06", "B07",' - '"B08", "B8A", "B09" "B10", "B11", "B12"', + "description": "A list of band names that should be downloaded " + "and imported for each Sentinel-2 scene." + 'Available are the following band names: "B01", "B02", "B03", ' + '"B04", "B05", "B06", "B07", "B08", "B8A", "B09" "B10", "B11", ' + '"B12"', }, "product_ids": { "type": "array", "items": {"type": "string"}, - "description": "A list of Sentinel-2 scene names of which the tile download urls " - "and metadata infor urls should be provided.", + "description": "A list of Sentinel-2 scene names of which the tile" + " download urls and metadata infor urls should be provided.", }, } example = { @@ -211,8 +262,8 @@ class Sentinel2ASceneListModel(Schema): SCHEMA_DOC = { "tags": ["Satellite Image Algorithms"], - "description": "Generate the download urls for a list of sentinel2A scenes and band numbers. " - "Minimum required user role: user.", + "description": "Generate the download urls for a list of sentinel2A scenes" + " and band numbers. Minimum required user role: user.", "consumes": ["application/json"], "parameters": [ { @@ -244,7 +295,9 @@ class AWSSentinel2ADownloadLinkQuery(ResourceBase): @swagger.doc(deepcopy(SCHEMA_DOC)) def post(self): - """Generate the download urls for a list of sentinel2A scenes and band numbers.""" + """Generate the download urls for a list of sentinel2A scenes and band + numbers. + """ try: iface = AWSSentinel2AInterface(global_config) diff --git a/src/actinia_satellite_plugin/ephemeral_landsat_ndvi_processor.py b/src/actinia_satellite_plugin/ephemeral_landsat_ndvi_processor.py index 6939a49..dac4b08 100644 --- a/src/actinia_satellite_plugin/ephemeral_landsat_ndvi_processor.py +++ b/src/actinia_satellite_plugin/ephemeral_landsat_ndvi_processor.py @@ -9,11 +9,16 @@ from actinia_core.core.common.api_logger import log_api_call from flask_restful_swagger_2 import swagger from actinia_core.rest.base.resource_base import ResourceBase -from actinia_core.processing.actinia_processing.ephemeral.ephemeral_processing_with_export import EphemeralProcessingWithExport +from actinia_core.processing.actinia_processing.ephemeral\ + .ephemeral_processing_with_export import EphemeralProcessingWithExport from actinia_core.core.common.redis_interface import enqueue_job from actinia_core.core.common.exceptions import AsyncProcessError -from actinia_core.models.response_models import UnivarResultModel, ProcessingResponseModel -from actinia_core.core.common.landsat_processing_library import LandsatProcessing +from actinia_core.models.response_models import ( + UnivarResultModel, + ProcessingResponseModel, +) +from actinia_core.core.common.landsat_processing_library import \ + LandsatProcessing from actinia_core.models.response_models import ProcessingErrorResponseModel from actinia_core.core.common.app import URL_PREFIX @@ -24,24 +29,30 @@ __email__ = "soerengebbert@googlemail.com" SCENE_SUFFIXES = { - "LT04": ["_B1.TIF", "_B2.TIF", "_B3.TIF", "_B4.TIF", "_B5.TIF", "_B6.TIF", "_B7.TIF", "_MTL.txt"], - "LT05": ["_B1.TIF", "_B2.TIF", "_B3.TIF", "_B4.TIF", "_B5.TIF", "_B6.TIF", "_B7.TIF", "_MTL.txt"], - "LE07": ["_B1.TIF", "_B2.TIF", "_B3.TIF", "_B4.TIF", "_B5.TIF", "_B6_VCID_2.TIF", "_B6_VCID_1.TIF", - "_B7.TIF", "_B8.TIF", "_MTL.txt"], - "LC08": ["_B1.TIF", "_B2.TIF", "_B3.TIF", "_B4.TIF", "_B5.TIF", "_B6.TIF", "_B7.TIF", - "_B8.TIF", "_B9.TIF", "_B10.TIF", "_B11.TIF", "_MTL.txt"]} + "LT04": ["_B1.TIF", "_B2.TIF", "_B3.TIF", "_B4.TIF", "_B5.TIF", "_B6.TIF", + "_B7.TIF", "_MTL.txt"], + "LT05": ["_B1.TIF", "_B2.TIF", "_B3.TIF", "_B4.TIF", "_B5.TIF", "_B6.TIF", + "_B7.TIF", "_MTL.txt"], + "LE07": ["_B1.TIF", "_B2.TIF", "_B3.TIF", "_B4.TIF", "_B5.TIF", + "_B6_VCID_2.TIF", "_B6_VCID_1.TIF", "_B7.TIF", "_B8.TIF", + "_MTL.txt"], + "LC08": ["_B1.TIF", "_B2.TIF", "_B3.TIF", "_B4.TIF", "_B5.TIF", + "_B6.TIF", "_B7.TIF", "_B8.TIF", "_B9.TIF", "_B10.TIF", + "_B11.TIF", "_MTL.txt"]} RASTER_SUFFIXES = { "LT04": [".1", ".2", ".3", ".4", ".5", ".6", ".7"], "LT05": [".1", ".2", ".3", ".4", ".5", ".6", ".7"], "LE07": [".1", ".2", ".3", ".4", ".5", ".61", ".62", ".7", ".8"], - "LC08": [".1", ".2", ".3", ".4", ".5", ".6", ".7", ".8", ".9", ".10", ".11"]} + "LC08": [".1", ".2", ".3", ".4", ".5", ".6", ".7", ".8", ".9", ".10", + ".11"]} class LandsatNDVIResponseModel(ProcessingResponseModel): """The response of the Landsat vegetation index computation - It is used as schema to define the *process_result* in a ProcessingResponseModel derivative. + It is used as schema to define the *process_result* in a + ProcessingResponseModel derivative. """ type = 'object' properties = deepcopy(ProcessingResponseModel.properties) @@ -55,8 +66,10 @@ class LandsatNDVIResponseModel(ProcessingResponseModel): "api_info": { "endpoint": "asyncephemerallandsatprocessingresource", "method": "POST", - "path": f"{URL_PREFIX}/landsat_process/LC80440342016259LGN00/TOAR/NDVI", - "request_url": f"http://localhost:5000{URL_PREFIX}/landsat_process/LC80440342016259LGN00/TOAR/NDVI" + "path": f"{URL_PREFIX}/landsat_process/LC80440342016259LGN00/TOAR/" + "NDVI", + "request_url": f"http://localhost:5000{URL_PREFIX}/landsat_process/" + "LC80440342016259LGN00/TOAR/NDVI" }, "datetime": "2018-05-30 11:22:58.315162", "http_code": 200, @@ -71,7 +84,8 @@ class LandsatNDVIResponseModel(ProcessingResponseModel): "module": "r.univar", "outputs": { "output": { - "name": "/actinia/workspace/temp_db/gisdbase_4e879f3951334a559612abab4352b069/.tmp/tmpkiv0uv6z.univar" + "name": "/actinia/workspace/temp_db/gisdbase_4e879f3951334a559" + "612abab4352b069/.tmp/tmpkiv0uv6z.univar" } } } @@ -102,8 +116,10 @@ class LandsatNDVIResponseModel(ProcessingResponseModel): "-c", "-q", "-O", - "/actinia/workspace/temp_db/gisdbase_4e879f3951334a559612abab4352b069/.tmp/LC80440342016259LGN00_B6.TIF", - "https://storage.googleapis.com/gcp-public-data-landsat/LC08/PRE/044/034/LC80440342016259LGN00/LC80440342016259LGN00_B6.TIF" + "/actinia/workspace/temp_db/gisdbase_4e879f3951334a559612abab4352" + "b069/.tmp/LC80440342016259LGN00_B6.TIF", + "https://storage.googleapis.com/gcp-public-data-landsat/LC08/PRE/" + "044/034/LC80440342016259LGN00/LC80440342016259LGN00_B6.TIF" ], "return_code": 0, "run_time": 23.63347291946411, @@ -115,8 +131,10 @@ class LandsatNDVIResponseModel(ProcessingResponseModel): { "executable": "/bin/mv", "parameter": [ - "/actinia/workspace/temp_db/gisdbase_4e879f3951334a559612abab4352b069/.tmp/LC80440342016259LGN00_B6.TIF", - "/actinia/workspace/download_cache/superadmin/LC80440342016259LGN00_B6.TIF" + "/actinia/workspace/temp_db/gisdbase_4e879f3951334a559612abab4352" + "b069/.tmp/LC80440342016259LGN00_B6.TIF", + "/actinia/workspace/download_cache/superadmin/LC80440342016259L" + "GN00_B6.TIF" ], "return_code": 0, "run_time": 0.05022144317626953, @@ -132,8 +150,10 @@ class LandsatNDVIResponseModel(ProcessingResponseModel): "-c", "-q", "-O", - "/actinia/workspace/temp_db/gisdbase_4e879f3951334a559612abab4352b069/.tmp/LC80440342016259LGN00_B7.TIF", - "https://storage.googleapis.com/gcp-public-data-landsat/LC08/PRE/044/034/LC80440342016259LGN00/LC80440342016259LGN00_B7.TIF" + "/actinia/workspace/temp_db/gisdbase_4e879f3951334a559612abab435" + "2b069/.tmp/LC80440342016259LGN00_B7.TIF", + "https://storage.googleapis.com/gcp-public-data-landsat/LC08/PRE/" + "044/034/LC80440342016259LGN00/LC80440342016259LGN00_B7.TIF" ], "return_code": 0, "run_time": 22.89448094367981, @@ -145,8 +165,10 @@ class LandsatNDVIResponseModel(ProcessingResponseModel): { "executable": "/bin/mv", "parameter": [ - "/actinia/workspace/temp_db/gisdbase_4e879f3951334a559612abab4352b069/.tmp/LC80440342016259LGN00_B7.TIF", - "/actinia/workspace/download_cache/superadmin/LC80440342016259LGN00_B7.TIF" + "/actinia/workspace/temp_db/gisdbase_4e879f3951334a559612abab435" + "2b069/.tmp/LC80440342016259LGN00_B7.TIF", + "/actinia/workspace/download_cache/superadmin/LC80440342016259L" + "GN00_B7.TIF" ], "return_code": 0, "run_time": 0.051961421966552734, @@ -162,8 +184,10 @@ class LandsatNDVIResponseModel(ProcessingResponseModel): "-c", "-q", "-O", - "/actinia/workspace/temp_db/gisdbase_4e879f3951334a559612abab4352b069/.tmp/LC80440342016259LGN00_B8.TIF", - "https://storage.googleapis.com/gcp-public-data-landsat/LC08/PRE/044/034/LC80440342016259LGN00/LC80440342016259LGN00_B8.TIF" + "/actinia/workspace/temp_db/gisdbase_4e879f3951334a559612abab4352" + "b069/.tmp/LC80440342016259LGN00_B8.TIF", + "https://storage.googleapis.com/gcp-public-data-landsat/LC08/PRE/" + "044/034/LC80440342016259LGN00/LC80440342016259LGN00_B8.TIF" ], "return_code": 0, "run_time": 83.04966020584106, @@ -175,8 +199,10 @@ class LandsatNDVIResponseModel(ProcessingResponseModel): { "executable": "/bin/mv", "parameter": [ - "/actinia/workspace/temp_db/gisdbase_4e879f3951334a559612abab4352b069/.tmp/LC80440342016259LGN00_B8.TIF", - "/actinia/workspace/download_cache/superadmin/LC80440342016259LGN00_B8.TIF" + "/actinia/workspace/temp_db/gisdbase_4e879f3951334a559612abab4352" + "b069/.tmp/LC80440342016259LGN00_B8.TIF", + "/actinia/workspace/download_cache/superadmin/LC80440342016259LG" + "N00_B8.TIF" ], "return_code": 0, "run_time": 0.05012321472167969, @@ -192,8 +218,10 @@ class LandsatNDVIResponseModel(ProcessingResponseModel): "-c", "-q", "-O", - "/actinia/workspace/temp_db/gisdbase_4e879f3951334a559612abab4352b069/.tmp/LC80440342016259LGN00_B9.TIF", - "https://storage.googleapis.com/gcp-public-data-landsat/LC08/PRE/044/034/LC80440342016259LGN00/LC80440342016259LGN00_B9.TIF" + "/actinia/workspace/temp_db/gisdbase_4e879f3951334a559612abab4352b" + "069/.tmp/LC80440342016259LGN00_B9.TIF", + "https://storage.googleapis.com/gcp-public-data-landsat/LC08/PRE/" + "044/034/LC80440342016259LGN00/LC80440342016259LGN00_B9.TIF" ], "return_code": 0, "run_time": 11.948487043380737, @@ -205,8 +233,10 @@ class LandsatNDVIResponseModel(ProcessingResponseModel): { "executable": "/bin/mv", "parameter": [ - "/actinia/workspace/temp_db/gisdbase_4e879f3951334a559612abab4352b069/.tmp/LC80440342016259LGN00_B9.TIF", - "/actinia/workspace/download_cache/superadmin/LC80440342016259LGN00_B9.TIF" + "/actinia/workspace/temp_db/gisdbase_4e879f3951334a559612abab4352b" + "069/.tmp/LC80440342016259LGN00_B9.TIF", + "/actinia/workspace/download_cache/superadmin/LC80440342016259LG" + "N00_B9.TIF" ], "return_code": 0, "run_time": 0.05081939697265625, @@ -222,8 +252,10 @@ class LandsatNDVIResponseModel(ProcessingResponseModel): "-c", "-q", "-O", - "/actinia/workspace/temp_db/gisdbase_4e879f3951334a559612abab4352b069/.tmp/LC80440342016259LGN00_B10.TIF", - "https://storage.googleapis.com/gcp-public-data-landsat/LC08/PRE/044/034/LC80440342016259LGN00/LC80440342016259LGN00_B10.TIF" + "/actinia/workspace/temp_db/gisdbase_4e879f3951334a559612abab4352" + "b069/.tmp/LC80440342016259LGN00_B10.TIF", + "https://storage.googleapis.com/gcp-public-data-landsat/LC08/PRE/" + "044/034/LC80440342016259LGN00/LC80440342016259LGN00_B10.TIF" ], "return_code": 0, "run_time": 15.688527345657349, @@ -235,8 +267,10 @@ class LandsatNDVIResponseModel(ProcessingResponseModel): { "executable": "/bin/mv", "parameter": [ - "/actinia/workspace/temp_db/gisdbase_4e879f3951334a559612abab4352b069/.tmp/LC80440342016259LGN00_B10.TIF", - "/actinia/workspace/download_cache/superadmin/LC80440342016259LGN00_B10.TIF" + "/actinia/workspace/temp_db/gisdbase_4e879f3951334a559612abab4352" + "b069/.tmp/LC80440342016259LGN00_B10.TIF", + "/actinia/workspace/download_cache/superadmin/LC80440342016259LG" + "N00_B10.TIF" ], "return_code": 0, "run_time": 0.05163097381591797, @@ -252,8 +286,10 @@ class LandsatNDVIResponseModel(ProcessingResponseModel): "-c", "-q", "-O", - "/actinia/workspace/temp_db/gisdbase_4e879f3951334a559612abab4352b069/.tmp/LC80440342016259LGN00_B11.TIF", - "https://storage.googleapis.com/gcp-public-data-landsat/LC08/PRE/044/034/LC80440342016259LGN00/LC80440342016259LGN00_B11.TIF" + "/actinia/workspace/temp_db/gisdbase_4e879f3951334a559612abab4352" + "b069/.tmp/LC80440342016259LGN00_B11.TIF", + "https://storage.googleapis.com/gcp-public-data-landsat/LC08/PRE/" + "044/034/LC80440342016259LGN00/LC80440342016259LGN00_B11.TIF" ], "return_code": 0, "run_time": 15.100370645523071, @@ -265,8 +301,10 @@ class LandsatNDVIResponseModel(ProcessingResponseModel): { "executable": "/bin/mv", "parameter": [ - "/actinia/workspace/temp_db/gisdbase_4e879f3951334a559612abab4352b069/.tmp/LC80440342016259LGN00_B11.TIF", - "/actinia/workspace/download_cache/superadmin/LC80440342016259LGN00_B11.TIF" + "/actinia/workspace/temp_db/gisdbase_4e879f3951334a559612abab4352" + "b069/.tmp/LC80440342016259LGN00_B11.TIF", + "/actinia/workspace/download_cache/superadmin/LC80440342016259LG" + "N00_B11.TIF" ], "return_code": 0, "run_time": 0.05057358741760254, @@ -282,8 +320,10 @@ class LandsatNDVIResponseModel(ProcessingResponseModel): "-c", "-q", "-O", - "/actinia/workspace/temp_db/gisdbase_4e879f3951334a559612abab4352b069/.tmp/LC80440342016259LGN00_MTL.txt", - "https://storage.googleapis.com/gcp-public-data-landsat/LC08/PRE/044/034/LC80440342016259LGN00/LC80440342016259LGN00_MTL.txt" + "/actinia/workspace/temp_db/gisdbase_4e879f3951334a559612abab4352" + "b069/.tmp/LC80440342016259LGN00_MTL.txt", + "https://storage.googleapis.com/gcp-public-data-landsat/LC08/PRE/" + "044/034/LC80440342016259LGN00/LC80440342016259LGN00_MTL.txt" ], "return_code": 0, "run_time": 0.25395917892456055, @@ -295,8 +335,10 @@ class LandsatNDVIResponseModel(ProcessingResponseModel): { "executable": "/bin/mv", "parameter": [ - "/actinia/workspace/temp_db/gisdbase_4e879f3951334a559612abab4352b069/.tmp/LC80440342016259LGN00_MTL.txt", - "/actinia/workspace/download_cache/superadmin/LC80440342016259LGN00_MTL.txt" + "/actinia/workspace/temp_db/gisdbase_4e879f3951334a559612abab4352" + "b069/.tmp/LC80440342016259LGN00_MTL.txt", + "/actinia/workspace/download_cache/superadmin/LC80440342016259LG" + "N00_MTL.txt" ], "return_code": 0, "run_time": 0.05015206336975098, @@ -311,13 +353,16 @@ class LandsatNDVIResponseModel(ProcessingResponseModel): "/usr/local/bin/grass", "-e", "-c", - "/actinia/workspace/download_cache/superadmin/LC80440342016259LGN00_B1.TIF", - "/actinia/workspace/temp_db/gisdbase_4e879f3951334a559612abab4352b069/Landsat" + "/actinia/workspace/download_cache/superadmin/LC80440342016259LG" + "N00_B1.TIF", + "/actinia/workspace/temp_db/gisdbase_4e879f3951334a559612abab4352b" + "069/Landsat" ], "return_code": 0, "run_time": 0.15161657333374023, "stderr": [ - "Default locale settings are missing. GRASS running with C locale.WARNING: Searched for a web browser, but none found", + "Default locale settings are missing. GRASS running with C locale." + "WARNING: Searched for a web browser, but none found", "Creating new GRASS GIS location/mapset...", "Cleaning up temporary files...", "" @@ -327,7 +372,8 @@ class LandsatNDVIResponseModel(ProcessingResponseModel): { "executable": "r.import", "parameter": [ - "input=/actinia/workspace/download_cache/superadmin/LC80440342016259LGN00_B1.TIF", + "input=/actinia/workspace/download_cache/superadmin/LC804403420162" + "59LGN00_B1.TIF", "output=LC80440342016259LGN00.1", "--q" ], @@ -341,7 +387,8 @@ class LandsatNDVIResponseModel(ProcessingResponseModel): { "executable": "r.import", "parameter": [ - "input=/actinia/workspace/download_cache/superadmin/LC80440342016259LGN00_B2.TIF", + "input=/actinia/workspace/download_cache/superadmin/LC804403420162" + "59LGN00_B2.TIF", "output=LC80440342016259LGN00.2", "--q" ], @@ -355,7 +402,8 @@ class LandsatNDVIResponseModel(ProcessingResponseModel): { "executable": "r.import", "parameter": [ - "input=/actinia/workspace/download_cache/superadmin/LC80440342016259LGN00_B3.TIF", + "input=/actinia/workspace/download_cache/superadmin/LC80440342016" + "259LGN00_B3.TIF", "output=LC80440342016259LGN00.3", "--q" ], @@ -369,7 +417,8 @@ class LandsatNDVIResponseModel(ProcessingResponseModel): { "executable": "r.import", "parameter": [ - "input=/actinia/workspace/download_cache/superadmin/LC80440342016259LGN00_B4.TIF", + "input=/actinia/workspace/download_cache/superadmin/LC8044034201" + "6259LGN00_B4.TIF", "output=LC80440342016259LGN00.4", "--q" ], @@ -383,7 +432,8 @@ class LandsatNDVIResponseModel(ProcessingResponseModel): { "executable": "r.import", "parameter": [ - "input=/actinia/workspace/download_cache/superadmin/LC80440342016259LGN00_B5.TIF", + "input=/actinia/workspace/download_cache/superadmin/LC80440342016" + "259LGN00_B5.TIF", "output=LC80440342016259LGN00.5", "--q" ], @@ -397,7 +447,8 @@ class LandsatNDVIResponseModel(ProcessingResponseModel): { "executable": "r.import", "parameter": [ - "input=/actinia/workspace/download_cache/superadmin/LC80440342016259LGN00_B6.TIF", + "input=/actinia/workspace/download_cache/superadmin/LC80440342016" + "259LGN00_B6.TIF", "output=LC80440342016259LGN00.6", "--q" ], @@ -411,7 +462,8 @@ class LandsatNDVIResponseModel(ProcessingResponseModel): { "executable": "r.import", "parameter": [ - "input=/actinia/workspace/download_cache/superadmin/LC80440342016259LGN00_B7.TIF", + "input=/actinia/workspace/download_cache/superadmin/LC80440342016" + "259LGN00_B7.TIF", "output=LC80440342016259LGN00.7", "--q" ], @@ -425,7 +477,8 @@ class LandsatNDVIResponseModel(ProcessingResponseModel): { "executable": "r.import", "parameter": [ - "input=/actinia/workspace/download_cache/superadmin/LC80440342016259LGN00_B8.TIF", + "input=/actinia/workspace/download_cache/superadmin/LC80440342016" + "259LGN00_B8.TIF", "output=LC80440342016259LGN00.8", "--q" ], @@ -439,7 +492,8 @@ class LandsatNDVIResponseModel(ProcessingResponseModel): { "executable": "r.import", "parameter": [ - "input=/actinia/workspace/download_cache/superadmin/LC80440342016259LGN00_B9.TIF", + "input=/actinia/workspace/download_cache/superadmin/LC8044034201" + "6259LGN00_B9.TIF", "output=LC80440342016259LGN00.9", "--q" ], @@ -453,7 +507,8 @@ class LandsatNDVIResponseModel(ProcessingResponseModel): { "executable": "r.import", "parameter": [ - "input=/actinia/workspace/download_cache/superadmin/LC80440342016259LGN00_B10.TIF", + "input=/actinia/workspace/download_cache/superadmin/LC80440342016" + "259LGN00_B10.TIF", "output=LC80440342016259LGN00.10", "--q" ], @@ -467,7 +522,8 @@ class LandsatNDVIResponseModel(ProcessingResponseModel): { "executable": "r.import", "parameter": [ - "input=/actinia/workspace/download_cache/superadmin/LC80440342016259LGN00_B11.TIF", + "input=/actinia/workspace/download_cache/superadmin/LC80440342016" + "259LGN00_B11.TIF", "output=LC80440342016259LGN00.11", "--q" ], @@ -482,7 +538,8 @@ class LandsatNDVIResponseModel(ProcessingResponseModel): "executable": "i.landsat.toar", "parameter": [ "input=LC80440342016259LGN00.", - "metfile=/actinia/workspace/download_cache/superadmin/LC80440342016259LGN00_MTL.txt", + "metfile=/actinia/workspace/download_cache/superadmin/LC80440342" + "016259LGN00_MTL.txt", "method=uncorrected", "output=LC80440342016259LGN00_TOAR.", "--q" @@ -510,7 +567,9 @@ class LandsatNDVIResponseModel(ProcessingResponseModel): "return_code": 0, "run_time": 45.43833112716675, "stderr": [ - "0..3..6..9..12..15..18..21..24..27..30..33..36..39..42..45..48..51..54..57..60..63..66..69..72..75..78..81..84..87..90..93..96..99..100", + "0..3..6..9..12..15..18..21..24..27..30..33..36..39..42..45..48." + ".51..54..57..60..63..66..69..72..75..78..81..84..87..90..93..96." + ".99..100", "" ], "stdout": "" @@ -524,7 +583,8 @@ class LandsatNDVIResponseModel(ProcessingResponseModel): "return_code": 0, "run_time": 0.050219058990478516, "stderr": [ - "Color table for raster map set to 'ndvi'", + "Color table for raster map set" + " to 'ndvi'", "" ], "stdout": "" @@ -533,7 +593,8 @@ class LandsatNDVIResponseModel(ProcessingResponseModel): "executable": "r.univar", "parameter": [ "map=LC80440342016259LGN00_TOAR_NDVI", - "output=/actinia/workspace/temp_db/gisdbase_4e879f3951334a559612abab4352b069/.tmp/tmpkiv0uv6z.univar", + "output=/actinia/workspace/temp_db/gisdbase_4e879f3951334a559612a" + "bab4352b069/.tmp/tmpkiv0uv6z.univar", "-g" ], "return_code": 0, @@ -552,7 +613,9 @@ class LandsatNDVIResponseModel(ProcessingResponseModel): "return_code": 0, "run_time": 1.2287390232086182, "stderr": [ - "0..3..6..9..12..15..18..21..24..27..30..33..36..39..42..45..48..51..54..57..60..63..66..69..72..75..78..81..84..87..90..93..96..99..100", + "0..3..6..9..12..15..18..21..24..27..30..33..36..39..42..45..48.." + "51..54..57..60..63..66..69..72..75..78..81..84..87..90..93..96.." + "99..100", "" ], "stdout": "" @@ -582,7 +645,8 @@ class LandsatNDVIResponseModel(ProcessingResponseModel): "stderr": [ "" ], - "stdout": "projection=1\nzone=10\nn=4264515\ns=4030185\nw=464385\ne=694515\nnsres=30\newres=30\nrows=7811\ncols=7671\ncells=59918181\n" + "stdout": "projection=1\nzone=10\nn=4264515\ns=4030185\nw=464385\ne" + "=694515\nnsres=30\newres=30\nrows=7811\ncols=7671\ncells=59918181\n" }, { "executable": "r.out.gdal", @@ -591,19 +655,29 @@ class LandsatNDVIResponseModel(ProcessingResponseModel): "input=LC80440342016259LGN00_TOAR_NDVI", "format=GTiff", "createopt=COMPRESS=LZW", - "output=/actinia/workspace/temp_db/gisdbase_4e879f3951334a559612abab4352b069/.tmp/LC80440342016259LGN00_TOAR_NDVI.tiff" + "output=/actinia/workspace/temp_db/gisdbase_4e879f3951334a559612ab" + "ab4352b069/.tmp/LC80440342016259LGN00_TOAR_NDVI.tiff" ], "return_code": 0, "run_time": 8.784564018249512, "stderr": [ "Checking GDAL data type and nodata value...", - "2..5..8..11..14..17..20..23..26..29..32..35..38..41..44..47..50..53..56..59..62..65..68..71..74..77..80..83..86..89..92..95..98..100", + "2..5..8..11..14..17..20..23..26..29..32..35..38..41..44..47..50." + ".53..56..59..62..65..68..71..74..77..80..83..86..89..92..95..98." + ".100", "Using GDAL data type ", - "Input raster map contains cells with NULL-value (no-data). The value -nan will be used to represent no-data values in the input map. You can specify a nodata value with the nodata option.", + "Input raster map contains cells with NULL-value (no-data). The " + "value -nan will be used to represent no-data values in the input" + " map. You can specify a nodata value with the nodata option.", "Exporting raster data to GTiff format...", - "ERROR 6: SetColorTable() only supported for Byte or UInt16 bands in TIFF format.", - "2..5..8..11..14..17..20..23..26..29..32..35..38..41..44..47..50..53..56..59..62..65..68..71..74..77..80..83..86..89..92..95..98..100", - "r.out.gdal complete. File created.", + "ERROR 6: SetColorTable() only supported for Byte or UInt16 bands" + " in TIFF format.", + "2..5..8..11..14..17..20..23..26..29..32..35..38..41..44..47..50." + ".53..56..59..62..65..68..71..74..77..80..83..86..89..92..95..98." + ".100", + "r.out.gdal complete. File created.", "" ], "stdout": "" @@ -636,10 +710,14 @@ class LandsatNDVIResponseModel(ProcessingResponseModel): "timestamp": 1527679378.31516, "urls": { "resources": [ - f"http://localhost:5000{URL_PREFIX}/resource/superadmin/resource_id-6282c634-42e1-417c-a092-c9b21c3283cc/tmp80apvh0h.png", - f"http://localhost:5000{URL_PREFIX}/resource/superadmin/resource_id-6282c634-42e1-417c-a092-c9b21c3283cc/LC80440342016259LGN00_TOAR_NDVI.tiff" + f"http://localhost:5000{URL_PREFIX}/resource/superadmin/resource_id" + "-6282c634-42e1-417c-a092-c9b21c3283cc/tmp80apvh0h.png", + f"http://localhost:5000{URL_PREFIX}/resource/superadmin/resource_id-" + "6282c634-42e1-417c-a092-c9b21c3283cc/LC80440342016259LGN00_TOAR_" + "NDVI.tiff" ], - "status": f"http://localhost:5000{URL_PREFIX}/resources/superadmin/resource_id-6282c634-42e1-417c-a092-c9b21c3283cc" + "status": f"http://localhost:5000{URL_PREFIX}/resources/superadmin/" + "resource_id-6282c634-42e1-417c-a092-c9b21c3283cc" }, "user_id": "superadmin" } @@ -662,35 +740,39 @@ def extract_sensor_id_from_scene_id(scene_id): def landsat_scene_id_to_google_url(landsat_scene_id, suffix): - """Convert a landsat scene id into the public google download URL for the required file + """Convert a landsat scene id into the public google download URL for the + required file Args: landsat_scene_id (str): The Landsat scene id - suffix (str): The suffix of the file to create the url for, i.e.: "_B1.TIF" or "_MTL.txt" + suffix (str): The suffix of the file to create the url for, + i.e.: "_B1.TIF" or "_MTL.txt" Returns: (str) The URL to the scene file """ # URL example - # https://storage.googleapis.com/gcp-public-data-landsat/LT04/PRE/006/016/LT40060161989006XXX02/LT40060161989006XXX02_MTL.txt + # https://storage.googleapis.com/gcp-public-data-landsat/LT04/PRE/006/016 + # /LT40060161989006XXX02/LT40060161989006XXX02_MTL.txt # Create the download URL components from the Landsat scene id landsat_sensor_id = extract_sensor_id_from_scene_id(landsat_scene_id) path = landsat_scene_id[3:6] row = landsat_scene_id[6:9] - url = "https://storage.googleapis.com/gcp-public-data-landsat/%s/PRE/%s/%s/%s/%s%s" % (landsat_sensor_id, - path, row, - landsat_scene_id, - landsat_scene_id, - suffix) + url = ( + "https://storage.googleapis.com/gcp-public-data-landsat/" + f"{landsat_sensor_id}/PRE/{path}/{row}/{landsat_scene_id}/" + f"{landsat_scene_id}{suffix}" + ) return url class AsyncEphemeralLandsatProcessingResource(ResourceBase): """ This class represents a resource that runs asynchronous processing tasks - to download and process Landsat TM satellite images in an ephemeral GRASS location + to download and process Landsat TM satellite images in an ephemeral GRASS + location """ decorators = [log_api_call, auth.login_required] @@ -701,15 +783,20 @@ def __init__(self): @swagger.doc({ 'tags': ['Satellite Image Algorithms'], - 'description': 'Vegetation index computation from an atmospherically corrected Landsat scene. ' - 'The Landsat scene is located in the google cloud storage. ' - 'The processing is as follows: A user specific Landsat scene (LT4, LT5, LE7 and LC8) ' - 'will be download and imported into an ephemeral database. Then atmospheric correction ' - 'will be performed, with either TOAR or DOS4, depending on the users choice. The user specific ' - 'vegetation index will be computed based on the TOAR or DOS4 data. The result of ' - 'the computation is available as gzipped Geotiff file. In addition, ' + 'description': 'Vegetation index computation from an atmospherically ' + 'corrected Landsat scene. The Landsat scene' + ' is located in the google cloud storage. ' + 'The processing is as follows: A user specific Landsat ' + 'scene (LT4, LT5, LE7 and LC8) will be download and' + ' imported into an ephemeral database. Then atmospheric' + ' correction will be performed, with either TOAR or ' + 'DOS4, depending on the users choice. The user specific' + ' vegetation index will be computed based on the TOAR ' + 'or DOS4 data. The result of the computation ' + 'is available as gzipped Geotiff file. In addition, ' 'the univariate statistic will be computed ' - 'as well as a preview image including a legend. Minimum required user role: user.', + 'as well as a preview image including a legend. ' + 'Minimum required user role: user.', 'parameters': [ { 'name': 'landsat_id', @@ -732,7 +819,8 @@ def __init__(self): }, { 'name': 'processing_method', - 'description': 'The method that should be used to compute the vegetation index', + 'description': 'The method that should be used to compute the ' + 'vegetation index', 'required': True, 'in': 'path', 'type': 'string', @@ -744,25 +832,29 @@ def __init__(self): 'responses': { '200': { 'description': 'This response includes all created resources ' - 'as URL as well as the processing log and other metadata.', + 'as URL as well as the processing log and other' + ' metadata.', 'schema': LandsatNDVIResponseModel }, '400': { - 'description': 'The error message and a detailed log why NDVI processing of ' + 'description': 'The error message and a detailed log why NDVI' + ' processing of ' 'a Landsat scene did not succeeded', 'schema': ProcessingErrorResponseModel } } }) def post(self, landsat_id, atcor_method, processing_method): - """Vegetation index computation from an atmospherically corrected Landsat scene. + """Vegetation index computation from an atmospherically corrected + Landsat scene. This method will download a single Landsat scene with all bands, - create a temporary GRASS location and imports the data into it. Then it will - apply a TOAR or DOS4/1 atmospheric correction, depending on the users choice. - The imported scenes are then processed via i.vi. The result is analyzed with r.univar - and rendered via d.rast and d.legend. The preview image and the resulting ndvi raster map - are stored in the download location. + create a temporary GRASS location and imports the data into it. Then + it will apply a TOAR or DOS4/1 atmospheric correction, depending on + the users choice. + The imported scenes are then processed via i.vi. The result is analyzed + with r.univar and rendered via d.rast and d.legend. The preview image + and the resulting ndvi raster map are stored in the download location. As download location are available: - local node storage - NFS/GlusterFS storage @@ -772,21 +864,25 @@ def post(self, landsat_id, atcor_method, processing_method): """ supported_sensors = ["LT04", "LT05", "LE07", "LC08"] supported_atcor = ["TOAR", "DOS1", "DOS4"] - supported_methods = ["NDVI", "ARVI", "DVI", "EVI", "EVI2", "GVI", "GARI", - "GEMI", "IPVI", "PVI", "SR", "VARI", "WDVI"] + supported_methods = ["NDVI", "ARVI", "DVI", "EVI", "EVI2", "GVI", + "GARI", "GEMI", "IPVI", "PVI", "SR", "VARI", + "WDVI"] sensor_id = extract_sensor_id_from_scene_id(landsat_id) if sensor_id not in supported_sensors: - return self.get_error_response(message="Wrong scene name. " - "Available sensors are: %s" % ",".join(supported_sensors)) + return self.get_error_response( + message="Wrong scene name. " + "Available sensors are: %s" % ",".join(supported_sensors)) if atcor_method not in supported_atcor: - return self.get_error_response(message="Wrong atmospheric correction name. " - "Available atmospheric corrections are: %s" % ",".join( - supported_atcor)) + return self.get_error_response( + message="Wrong atmospheric correction name. " + "Available atmospheric corrections are: %s" % ",".join( + supported_atcor)) if processing_method not in supported_methods: - return self.get_error_response(message="Wrong processing method name. " - "Available methods are: %s" % ",".join(supported_methods)) + return self.get_error_response( + message="Wrong processing method name. " + "Available methods are: %s" % ",".join(supported_methods)) # Preprocess the post call rdc = self.preprocess(has_json=False, location_name="Landsat") @@ -814,29 +910,39 @@ def __init__(self, rdc): Setup the variables of this class Args: - rdc (ResourceDataContainer): The data container that contains all required variables for processing + rdc (ResourceDataContainer): The data container that contains all + required variables for processing """ EphemeralProcessingWithExport.__init__(self, rdc) - self.landsat_scene_id, self.atcor_method, self.processing_method = self.rdc.user_data - self.landsat_sensor_id = extract_sensor_id_from_scene_id(self.landsat_scene_id) + self.landsat_scene_id, self.atcor_method, self.processing_method = \ + self.rdc.user_data + self.landsat_sensor_id = extract_sensor_id_from_scene_id( + self.landsat_scene_id) self.landsat_band_file_list = [] - self.user_download_cache_path = os.path.join(self.config.DOWNLOAD_CACHE, self.user_id) - self.raster_result_list = [] # The raster layer names which must be exported, stats computed + self.user_download_cache_path = os.path.join( + self.config.DOWNLOAD_CACHE, self.user_id) + # The raster layer names which must be exported, stats computed # and preview image created - self.module_results = [] # A list of r.univar output classes for each vegetation index - self.response_model_class = LandsatNDVIResponseModel # The class that is used to create the response + self.raster_result_list = [] + # A list of r.univar output classes for each vegetation index + self.module_results = [] + # The class that is used to create the response + self.response_model_class = LandsatNDVIResponseModel def _create_temp_database(self, mapsets=[]): - """Create a temporary gis database and location with a PERMANENT mapset for processing + """Create a temporary gis database and location with a PERMANENT mapset + for processing Raises: This function raises AsyncProcessError in case of an error. """ if not self.landsat_band_file_list: - raise AsyncProcessError("Unable to create a temporary GIS database, no data is available") + raise AsyncProcessError( + "Unable to create a temporary GIS database, no data is " + "available") try: geofile = self.landsat_band_file_list[0] @@ -851,9 +957,11 @@ def _create_temp_database(self, mapsets=[]): executable_params.append("-e") executable_params.append("-c") executable_params.append(geofile) - executable_params.append(os.path.join(self.temp_grass_data_base, self.location_name)) + executable_params.append(os.path.join( + self.temp_grass_data_base, self.location_name)) - self.message_logger.info("%s %s"%(self.config.GRASS_GIS_START_SCRIPT, executable_params)) + self.message_logger.info( + f"{self.config.GRASS_GIS_START_SCRIPT} {executable_params}") self._update_num_of_steps(1) @@ -861,13 +969,22 @@ def _create_temp_database(self, mapsets=[]): executable="python3", executable_params=executable_params) - # Create the GRASS location, this will create the location adn mapset paths + # Create the GRASS location, this will create the location and + # mapset paths self._run_process(p) except Exception as e: - raise AsyncProcessError("Unable to create a temporary GIS database and location at <%s>" - ", Exception: %s" % (os.path.join(self.temp_grass_data_base, - self.location_name, "PERMANENT"), - str(e))) + raise AsyncProcessError( + "Unable to create a temporary GIS database and location at " + "<%s>, Exception: %s" + % ( + os.path.join( + self.temp_grass_data_base, + self.location_name, + "PERMANENT", + ), + str(e), + ) + ) def _run_r_univar_command(self, raster_name): """Compute the univariate statistics for a raster layer @@ -877,7 +994,8 @@ def _run_r_univar_command(self, raster_name): raster_name: """ - result_file = tempfile.mktemp(suffix=".univar", dir=self.temp_file_path) + result_file = tempfile.mktemp( + suffix=".univar", dir=self.temp_file_path) univar_command = dict() univar_command["1"] = {"module": "r.univar", "inputs": {"map": raster_name}, @@ -903,7 +1021,8 @@ def _render_preview_image(self, raster_name): process chain entry to setup the extent from the options. Args: - options: The parser options that contain n, s, e and w entries for region settings + options: The parser options that contain n, s, e and w entries for + region settings result_file: The resulting PNG file name Returns: @@ -950,7 +1069,8 @@ def _render_preview_image(self, raster_name): self.resource_url_list.append(resource_url) def _create_output_resources(self, raster_result_list): - """Create the output resources from the raster layer that are the result of the processing + """Create the output resources from the raster layer that are the + result of the processing The following resources will be computed @@ -993,21 +1113,24 @@ def _execute(self): # Create and check the resource directory self.storage_interface.setup() - process_lib = LandsatProcessing(config=self.config, - temp_file_path=self.temp_file_path, - scene_id=self.landsat_scene_id, - download_cache=self.user_download_cache_path, - message_logger=self.message_logger, - send_resource_update=self._send_resource_update) + process_lib = LandsatProcessing( + config=self.config, + temp_file_path=self.temp_file_path, + scene_id=self.landsat_scene_id, + download_cache=self.user_download_cache_path, + message_logger=self.message_logger, + send_resource_update=self._send_resource_update) # Generate the download, import and processing command lists download_pl, file_infos = process_lib.get_download_process_list() self._update_num_of_steps(len(download_pl)) import_pl = process_lib.get_import_process_list() self._update_num_of_steps(len(import_pl)) - toar_pl = process_lib.get_i_landsat_toar_process_list(self.atcor_method) + toar_pl = process_lib.get_i_landsat_toar_process_list( + self.atcor_method) self._update_num_of_steps(len(toar_pl)) - ivi_pl = process_lib.get_i_vi_process_list(atcor_method=self.atcor_method, - processing_method=self.processing_method) + ivi_pl = process_lib.get_i_vi_process_list( + atcor_method=self.atcor_method, + processing_method=self.processing_method) self._update_num_of_steps(len(ivi_pl)) # Download all bands from the scene @@ -1015,13 +1138,15 @@ def _execute(self): self._execute_process_list(download_pl) self.landsat_band_file_list = process_lib.file_list - self._create_temporary_grass_environment(source_mapset_name="PERMANENT") + self._create_temporary_grass_environment( + source_mapset_name="PERMANENT") # Run the import, TOAR and i.vi self._execute_process_list(import_pl) self._execute_process_list(toar_pl) self._execute_process_list(ivi_pl) - # The ndvi result is an internal variable of the landsat process library + # The ndvi result is an internal variable of the landsat process + # library self.raster_result_list.append(process_lib.ndvi_name) # Create the output resources: stats, preview and geotiff diff --git a/src/actinia_satellite_plugin/ephemeral_sentinel2_ndvi_processor.py b/src/actinia_satellite_plugin/ephemeral_sentinel2_ndvi_processor.py index 9208475..69aace1 100644 --- a/src/actinia_satellite_plugin/ephemeral_sentinel2_ndvi_processor.py +++ b/src/actinia_satellite_plugin/ephemeral_sentinel2_ndvi_processor.py @@ -5,9 +5,10 @@ from copy import deepcopy from flask import jsonify, make_response from flask_restful_swagger_2 import swagger -from actinia_core.processing.actinia_processing.ephemeral.ephemeral_processing_with_export import ( - EphemeralProcessingWithExport, -) +from actinia_core.processing.actinia_processing.ephemeral\ + .ephemeral_processing_with_export import ( + EphemeralProcessingWithExport, + ) from actinia_core.rest.base.resource_base import ResourceBase from actinia_core.core.common.google_satellite_bigquery_interface import ( GoogleSatelliteBigQueryInterface, @@ -37,7 +38,8 @@ class SentinelNDVIResponseModel(ProcessingResponseModel): """The response of the Sentinel2A vegetation index computation - It is used as schema to define the *process_result* in a ProcessingResponseModel derivative. + It is used as schema to define the *process_result* in a + ProcessingResponseModel derivative. """ type = "object" @@ -52,8 +54,11 @@ class SentinelNDVIResponseModel(ProcessingResponseModel): "api_info": { "endpoint": "asyncephemeralsentinel2processingresource", "method": "POST", - "path": f"{URL_PREFIX}/sentinel2_process/ndvi/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749", - "request_url": f"http://localhost:8080{URL_PREFIX}/sentinel2_process/ndvi/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749", + "path": f"{URL_PREFIX}/sentinel2_process/ndvi/S2A_MSIL1C_20161206" + "T030112_N0204_R032_T50RKR_20161206T030749", + "request_url": f"http://localhost:8080{URL_PREFIX}/sentinel2_" + "process/ndvi/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_" + "20161206T030749", }, "datetime": "2018-05-30 12:29:11.800608", "http_code": 200, @@ -66,7 +71,9 @@ class SentinelNDVIResponseModel(ProcessingResponseModel): "module": "r.univar", "outputs": { "output": { - "name": "/actinia/workspace/temp_db/gisdbase_103a050c380e4f50b36efd3f77bd1419/.tmp/tmp7il3n0jk.univar" + "name": "/actinia/workspace/temp_db/gisdbase_" + "103a050c380e4f50b36efd3f77bd1419/.tmp/" + "tmp7il3n0jk.univar" } }, } @@ -91,7 +98,10 @@ class SentinelNDVIResponseModel(ProcessingResponseModel): "-t5", "-c", "-q", - "https://storage.googleapis.com/gcp-public-data-sentinel-2/tiles/50/R/KR/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749.SAFE/GRANULE/L1C_T50RKR_A007608_20161206T030749/IMG_DATA/T50RKR_20161206T030112_B08.jp2", + "https://storage.googleapis.com/gcp-public-data-sentinel-2" + "/tiles/50/R/KR/S2A_MSIL1C_20161206T030112_N0204_R032_" + "T50RKR_20161206T030749.SAFE/GRANULE/L1C_T50RKR_A007608_" + "20161206T030749/IMG_DATA/T50RKR_20161206T030112_B08.jp2", ], "return_code": 0, "run_time": 49.85953092575073, @@ -104,7 +114,10 @@ class SentinelNDVIResponseModel(ProcessingResponseModel): "-t5", "-c", "-q", - "https://storage.googleapis.com/gcp-public-data-sentinel-2/tiles/50/R/KR/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749.SAFE/GRANULE/L1C_T50RKR_A007608_20161206T030749/IMG_DATA/T50RKR_20161206T030112_B04.jp2", + "https://storage.googleapis.com/gcp-public-data-sentinel-2" + "/tiles/50/R/KR/S2A_MSIL1C_20161206T030112_N0204_R032_T50" + "RKR_20161206T030749.SAFE/GRANULE/L1C_T50RKR_A007608_2016" + "1206T030749/IMG_DATA/T50RKR_20161206T030112_B04.jp2", ], "return_code": 0, "run_time": 38.676433801651, @@ -114,8 +127,11 @@ class SentinelNDVIResponseModel(ProcessingResponseModel): { "executable": "/bin/mv", "parameter": [ - "/actinia/workspace/temp_db/gisdbase_103a050c380e4f50b36efd3f77bd1419/.tmp/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749.gml", - "/actinia/workspace/download_cache/superadmin/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749.gml", + "/actinia/workspace/temp_db/gisdbase_103a050c380e4f50b36ef" + "d3f77bd1419/.tmp/S2A_MSIL1C_20161206T030112_N0204_R032_" + "T50RKR_20161206T030749.gml", + "/actinia/workspace/download_cache/superadmin/S2A_MSIL1C_" + "20161206T030112_N0204_R032_T50RKR_20161206T030749.gml", ], "return_code": 0, "run_time": 0.05118393898010254, @@ -125,8 +141,10 @@ class SentinelNDVIResponseModel(ProcessingResponseModel): { "executable": "/bin/mv", "parameter": [ - "/actinia/workspace/temp_db/gisdbase_103a050c380e4f50b36efd3f77bd1419/.tmp/T50RKR_20161206T030112_B08.jp2", - "/actinia/workspace/download_cache/superadmin/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B08", + "/actinia/workspace/temp_db/gisdbase_103a050c380e4f50b36e" + "fd3f77bd1419/.tmp/T50RKR_20161206T030112_B08.jp2", + "/actinia/workspace/download_cache/superadmin/S2A_MSIL1C_" + "20161206T030112_N0204_R032_T50RKR_20161206T030749_B08", ], "return_code": 0, "run_time": 0.35857558250427246, @@ -136,8 +154,10 @@ class SentinelNDVIResponseModel(ProcessingResponseModel): { "executable": "/bin/mv", "parameter": [ - "/actinia/workspace/temp_db/gisdbase_103a050c380e4f50b36efd3f77bd1419/.tmp/T50RKR_20161206T030112_B04.jp2", - "/actinia/workspace/download_cache/superadmin/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B04", + "/actinia/workspace/temp_db/gisdbase_103a050c380e4f50b36e" + "fd3f77bd1419/.tmp/T50RKR_20161206T030112_B04.jp2", + "/actinia/workspace/download_cache/superadmin/S2A_MSIL1C_" + "20161206T030112_N0204_R032_T50RKR_20161206T030749_B04", ], "return_code": 0, "run_time": 0.15271401405334473, @@ -150,13 +170,17 @@ class SentinelNDVIResponseModel(ProcessingResponseModel): "/usr/local/bin/grass", "-e", "-c", - "/actinia/workspace/download_cache/superadmin/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B08", - "/actinia/workspace/temp_db/gisdbase_103a050c380e4f50b36efd3f77bd1419/sentinel2", + "/actinia/workspace/download_cache/superadmin/S2A_MSIL1C_" + "20161206T030112_N0204_R032_T50RKR_20161206T030749_B08", + "/actinia/workspace/temp_db/gisdbase_103a050c380e4f50b36ef" + "d3f77bd1419/sentinel2", ], "return_code": 0, "run_time": 0.36118006706237793, "stderr": [ - "Default locale settings are missing. GRASS running with C locale.WARNING: Searched for a web browser, but none found", + "Default locale settings are missing. GRASS running with C" + " locale.WARNING: Searched for a web browser, but none " + "found", "Creating new GRASS GIS location/mapset...", "Cleaning up temporary files...", "", @@ -166,14 +190,18 @@ class SentinelNDVIResponseModel(ProcessingResponseModel): { "executable": "v.import", "parameter": [ - "input=/actinia/workspace/download_cache/superadmin/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749.gml", - "output=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749", + "input=/actinia/workspace/download_cache/superadmin/S2A_" + "MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T0307" + "49.gml", + "output=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_2016" + "1206T030749", "--q", ], "return_code": 0, "run_time": 0.3551313877105713, "stderr": [ - "WARNING: Projection of dataset does not appear to match current location.", + "WARNING: Projection of dataset does not appear to match " + "current location.", "", "Location PROJ_INFO is:", "name: WGS 84 / UTM zone 50N", @@ -192,7 +220,8 @@ class SentinelNDVIResponseModel(ProcessingResponseModel): "", "ERROR: proj", "", - "WARNING: Width for column fid set to 255 (was not specified by OGR), some strings may be truncated!", + "WARNING: Width for column fid set to 255 (was not " + "specified by OGR), some strings may be truncated!", "", ], "stdout": "", @@ -200,7 +229,8 @@ class SentinelNDVIResponseModel(ProcessingResponseModel): { "executable": "v.timestamp", "parameter": [ - "map=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749", + "map=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_" + "20161206T030749", "date=06 dec 2016 03:07:49", ], "return_code": 0, @@ -220,13 +250,16 @@ class SentinelNDVIResponseModel(ProcessingResponseModel): "vrt", "-projwin_srs", "EPSG:4326", - "/actinia/workspace/download_cache/superadmin/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B08", - "/actinia/workspace/download_cache/superadmin/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B08.vrt", + "/actinia/workspace/download_cache/superadmin/S2A_MSIL1C_" + "20161206T030112_N0204_R032_T50RKR_20161206T030749_B08", + "/actinia/workspace/download_cache/superadmin/S2A_MSIL1C_2" + "0161206T030112_N0204_R032_T50RKR_20161206T030749_B08.vrt", ], "return_code": 0, "run_time": 0.05114293098449707, "stderr": [ - "Warning 1: Computed -srcwin 5 -225 10971 11419 falls partially outside raster extent. Going on however.", + "Warning 1: Computed -srcwin 5 -225 10971 11419 falls " + "partially outside raster extent. Going on however.", "", ], "stdout": "Input file size is 10980, 10980\n", @@ -234,8 +267,11 @@ class SentinelNDVIResponseModel(ProcessingResponseModel): { "executable": "r.import", "parameter": [ - "input=/actinia/workspace/download_cache/superadmin/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B08.vrt", - "output=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B08_uncropped", + "input=/actinia/workspace/download_cache/superadmin/S2A_" + "MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_" + "B08.vrt", + "output=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_2016" + "1206T030749_B08_uncropped", "--q", ], "return_code": 0, @@ -246,19 +282,24 @@ class SentinelNDVIResponseModel(ProcessingResponseModel): { "executable": "g.region", "parameter": [ - "align=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B08_uncropped", - "vector=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749", + "align=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_2016" + "1206T030749_B08_uncropped", + "vector=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_2016" + "1206T030749", "-g", ], "return_code": 0, "run_time": 0.10460591316223145, "stderr": [""], - "stdout": "projection=1\nzone=50\nn=3100030\ns=2990100\nw=199960\ne=309790\nnsres=10\newres=10\nrows=10993\ncols=10983\ncells=120736119\n", + "stdout": "projection=1\nzone=50\nn=3100030\ns=2990100\nw=" + "199960\ne=309790\nnsres=10\newres=10\nrows=10993\ncols=10983" + "\ncells=120736119\n", }, { "executable": "r.mask", "parameter": [ - "vector=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749" + "vector=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_2016" + "1206T030749" ], "return_code": 0, "run_time": 7.36047887802124, @@ -266,12 +307,18 @@ class SentinelNDVIResponseModel(ProcessingResponseModel): "Reading areas...", "0..100", "Writing raster map...", - "0..3..6..9..12..15..18..21..24..27..30..33..36..39..42..45..48..51..54..57..60..63..66..69..72..75..78..81..84..87..90..93..96..99..100", + "0..3..6..9..12..15..18..21..24..27..30..33..36..39..42.." + "45..48..51..54..57..60..63..66..69..72..75..78..81..84.." + "87..90..93..96..99..100", "Reading areas...", "0..100", "Writing raster map...", - "0..3..6..9..12..15..18..21..24..27..30..33..36..39..42..45..48..51..54..57..60..63..66..69..72..75..78..81..84..87..90..93..96..99..100", - "All subsequent raster operations will be limited to the MASK area. Removing or renaming raster map named 'MASK' will restore raster operations to normal.", + "0..3..6..9..12..15..18..21..24..27..30..33..36..39..42.." + "45..48..51..54..57..60..63..66..69..72..75..78..81..84.." + "87..90..93..96..99..100", + "All subsequent raster operations will be limited to the " + "MASK area. Removing or renaming raster map named 'MASK' " + "will restore raster operations to normal.", "", ], "stdout": "", @@ -279,7 +326,9 @@ class SentinelNDVIResponseModel(ProcessingResponseModel): { "executable": "r.mapcalc", "parameter": [ - "expression=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B08 = float(S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B08_uncropped)" + "expression=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_" + "20161206T030749_B08 = float(S2A_MSIL1C_20161206T030112_" + "N0204_R032_T50RKR_20161206T030749_B08_uncropped)" ], "return_code": 0, "run_time": 10.695591926574707, @@ -289,7 +338,8 @@ class SentinelNDVIResponseModel(ProcessingResponseModel): { "executable": "r.timestamp", "parameter": [ - "map=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B08", + "map=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_201612" + "06T030749_B08", "date=06 dec 2016 03:07:49", ], "return_code": 0, @@ -301,13 +351,15 @@ class SentinelNDVIResponseModel(ProcessingResponseModel): "executable": "g.remove", "parameter": [ "type=raster", - "name=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B08_uncropped", + "name=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_201612" + "06T030749_B08_uncropped", "-f", ], "return_code": 0, "run_time": 0.050362348556518555, "stderr": [ - "Removing raster ", + "Removing raster ", "", ], "stdout": "", @@ -332,13 +384,16 @@ class SentinelNDVIResponseModel(ProcessingResponseModel): "vrt", "-projwin_srs", "EPSG:4326", - "/actinia/workspace/download_cache/superadmin/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B04", - "/actinia/workspace/download_cache/superadmin/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B04.vrt", + "/actinia/workspace/download_cache/superadmin/S2A_MSIL1C_" + "20161206T030112_N0204_R032_T50RKR_20161206T030749_B04", + "/actinia/workspace/download_cache/superadmin/S2A_MSIL1C_2" + "0161206T030112_N0204_R032_T50RKR_20161206T030749_B04.vrt", ], "return_code": 0, "run_time": 0.05096769332885742, "stderr": [ - "Warning 1: Computed -srcwin 5 -225 10971 11419 falls partially outside raster extent. Going on however.", + "Warning 1: Computed -srcwin 5 -225 10971 11419 falls " + "partially outside raster extent. Going on however.", "", ], "stdout": "Input file size is 10980, 10980\n", @@ -346,8 +401,11 @@ class SentinelNDVIResponseModel(ProcessingResponseModel): { "executable": "r.import", "parameter": [ - "input=/actinia/workspace/download_cache/superadmin/S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B04.vrt", - "output=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B04_uncropped", + "input=/actinia/workspace/download_cache/superadmin/S2A_" + "MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749" + "_B04.vrt", + "output=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_2016" + "1206T030749_B04_uncropped", "--q", ], "return_code": 0, @@ -358,19 +416,24 @@ class SentinelNDVIResponseModel(ProcessingResponseModel): { "executable": "g.region", "parameter": [ - "align=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B04_uncropped", - "vector=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749", + "align=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_2016" + "1206T030749_B04_uncropped", + "vector=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_2016" + "1206T030749", "-g", ], "return_code": 0, "run_time": 0.0505826473236084, "stderr": [""], - "stdout": "projection=1\nzone=50\nn=3100030\ns=2990100\nw=199960\ne=309790\nnsres=10\newres=10\nrows=10993\ncols=10983\ncells=120736119\n", + "stdout": "projection=1\nzone=50\nn=3100030\ns=2990100\nw=" + "199960\ne=309790\nnsres=10\newres=10\nrows=10993\ncols=" + "10983\ncells=120736119\n", }, { "executable": "r.mask", "parameter": [ - "vector=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749" + "vector=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_2016" + "1206T030749" ], "return_code": 0, "run_time": 6.779608249664307, @@ -378,12 +441,18 @@ class SentinelNDVIResponseModel(ProcessingResponseModel): "Reading areas...", "0..100", "Writing raster map...", - "0..3..6..9..12..15..18..21..24..27..30..33..36..39..42..45..48..51..54..57..60..63..66..69..72..75..78..81..84..87..90..93..96..99..100", + "0..3..6..9..12..15..18..21..24..27..30..33..36..39..42.." + "45..48..51..54..57..60..63..66..69..72..75..78..81..84.." + "87..90..93..96..99..100", "Reading areas...", "0..100", "Writing raster map...", - "0..3..6..9..12..15..18..21..24..27..30..33..36..39..42..45..48..51..54..57..60..63..66..69..72..75..78..81..84..87..90..93..96..99..100", - "All subsequent raster operations will be limited to the MASK area. Removing or renaming raster map named 'MASK' will restore raster operations to normal.", + "0..3..6..9..12..15..18..21..24..27..30..33..36..39..42.." + "45..48..51..54..57..60..63..66..69..72..75..78..81..84.." + "87..90..93..96..99..100", + "All subsequent raster operations will be limited to the " + "MASK area. Removing or renaming raster map named 'MASK' " + "will restore raster operations to normal.", "", ], "stdout": "", @@ -391,7 +460,9 @@ class SentinelNDVIResponseModel(ProcessingResponseModel): { "executable": "r.mapcalc", "parameter": [ - "expression=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B04 = float(S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B04_uncropped)" + "expression=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_" + "20161206T030749_B04 = float(S2A_MSIL1C_20161206T030112_" + "N0204_R032_T50RKR_20161206T030749_B04_uncropped)" ], "return_code": 0, "run_time": 10.141529321670532, @@ -401,7 +472,8 @@ class SentinelNDVIResponseModel(ProcessingResponseModel): { "executable": "r.timestamp", "parameter": [ - "map=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B04", + "map=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_2016" + "1206T030749_B04", "date=06 dec 2016 03:07:49", ], "return_code": 0, @@ -413,13 +485,15 @@ class SentinelNDVIResponseModel(ProcessingResponseModel): "executable": "g.remove", "parameter": [ "type=raster", - "name=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B04_uncropped", + "name=S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_2016" + "1206T030749_B04_uncropped", "-f", ], "return_code": 0, "run_time": 0.05098080635070801, "stderr": [ - "Removing raster ", + "Removing raster ", "", ], "stdout": "", @@ -435,7 +509,12 @@ class SentinelNDVIResponseModel(ProcessingResponseModel): { "executable": "r.mapcalc", "parameter": [ - "expression=ndvi = (float(S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B08) - float(S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B04))/(float(S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B08) + float(S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_20161206T030749_B04))" + "expression=ndvi = (float(S2A_MSIL1C_20161206T030112_" + "N0204_R032_T50RKR_20161206T030749_B08) - float(S2A_MSIL1C" + "_20161206T030112_N0204_R032_T50RKR_20161206T030749_B04))" + "/(float(S2A_MSIL1C_20161206T030112_N0204_R032_T50RKR_" + "20161206T030749_B08) + float(S2A_MSIL1C_20161206T030112" + "_N0204_R032_T50RKR_20161206T030749_B04))" ], "return_code": 0, "run_time": 20.28681755065918, @@ -457,7 +536,8 @@ class SentinelNDVIResponseModel(ProcessingResponseModel): "executable": "r.univar", "parameter": [ "map=ndvi", - "output=/actinia/workspace/temp_db/gisdbase_103a050c380e4f50b36efd3f77bd1419/.tmp/tmp7il3n0jk.univar", + "output=/actinia/workspace/temp_db/gisdbase_103a050c380e4f" + "50b36efd3f77bd1419/.tmp/tmp7il3n0jk.univar", "-g", ], "return_code": 0, @@ -471,7 +551,9 @@ class SentinelNDVIResponseModel(ProcessingResponseModel): "return_code": 0, "run_time": 2.0198700428009033, "stderr": [ - "0..3..6..9..12..15..18..21..24..27..30..33..36..39..42..45..48..51..54..57..60..63..66..69..72..75..78..81..84..87..90..93..96..99..100", + "0..3..6..9..12..15..18..21..24..27..30..33..36..39..42.." + "45..48..51..54..57..60..63..66..69..72..75..78..81..84.." + "87..90..93..96..99..100", "", ], "stdout": "", @@ -498,7 +580,9 @@ class SentinelNDVIResponseModel(ProcessingResponseModel): "return_code": 0, "run_time": 0.051720619201660156, "stderr": [""], - "stdout": "projection=1\nzone=50\nn=3100030\ns=2990100\nw=199960\ne=309790\nnsres=10\newres=10\nrows=10993\ncols=10983\ncells=120736119\n", + "stdout": "projection=1\nzone=50\nn=3100030\ns=2990100\nw=1999" + "60\ne=309790\nnsres=10\newres=10\nrows=10993\ncols=10983\n" + "cells=120736119\n", }, { "executable": "r.out.gdal", @@ -507,19 +591,30 @@ class SentinelNDVIResponseModel(ProcessingResponseModel): "input=ndvi", "format=GTiff", "createopt=COMPRESS=LZW", - "output=/actinia/workspace/temp_db/gisdbase_103a050c380e4f50b36efd3f77bd1419/.tmp/ndvi.tiff", + "output=/actinia/workspace/temp_db/gisdbase_103a050c380e4f" + "50b36efd3f77bd1419/.tmp/ndvi.tiff", ], "return_code": 0, "run_time": 12.550397157669067, "stderr": [ "Checking GDAL data type and nodata value...", - "2..5..8..11..14..17..20..23..26..29..32..35..38..41..44..47..50..53..56..59..62..65..68..71..74..77..80..83..86..89..92..95..98..100", + "2..5..8..11..14..17..20..23..26..29..32..35..38..41..44.." + "47..50..53..56..59..62..65..68..71..74..77..80..83..86.." + "89..92..95..98..100", "Using GDAL data type ", - "Input raster map contains cells with NULL-value (no-data). The value -nan will be used to represent no-data values in the input map. You can specify a nodata value with the nodata option.", + "Input raster map contains cells with NULL-value (no-data)" + ". The value -nan will be used to represent no-data values" + " in the input map. You can specify a nodata value with " + "the nodata option.", "Exporting raster data to GTiff format...", - "ERROR 6: SetColorTable() only supported for Byte or UInt16 bands in TIFF format.", - "2..5..8..11..14..17..20..23..26..29..32..35..38..41..44..47..50..53..56..59..62..65..68..71..74..77..80..83..86..89..92..95..98..100", - "r.out.gdal complete. File created.", + "ERROR 6: SetColorTable() only supported for Byte or " + "UInt16 bands in TIFF format.", + "2..5..8..11..14..17..20..23..26..29..32..35..38..41..44." + ".47..50..53..56..59..62..65..68..71..74..77..80..83..86." + ".89..92..95..98..100", + "r.out.gdal complete. File " + "created.", "", ], "stdout": "", @@ -549,10 +644,14 @@ class SentinelNDVIResponseModel(ProcessingResponseModel): "timestamp": 1527683351.8002071, "urls": { "resources": [ - f"http://localhost:8080{URL_PREFIX}/resource/superadmin/resource_id-6b849585-576f-40b5-a514-34a7cf1f97ce/tmpsaeegg0q.png", - f"http://localhost:8080{URL_PREFIX}/resource/superadmin/resource_id-6b849585-576f-40b5-a514-34a7cf1f97ce/ndvi.tiff", + f"http://localhost:8080{URL_PREFIX}/resource/superadmin/" + "resource_id-6b849585-576f-40b5-a514-34a7cf1f97ce/" + "tmpsaeegg0q.png", + f"http://localhost:8080{URL_PREFIX}/resource/superadmin/" + "resource_id-6b849585-576f-40b5-a514-34a7cf1f97ce/ndvi.tiff", ], - "status": f"http://localhost:8080{URL_PREFIX}/resources/superadmin/resource_id-6b849585-576f-40b5-a514-34a7cf1f97ce", + "status": f"http://localhost:8080{URL_PREFIX}/resources/superadmin" + "/resource_id-6b849585-576f-40b5-a514-34a7cf1f97ce", }, "user_id": "superadmin", } @@ -562,10 +661,12 @@ class SentinelNDVIResponseModel(ProcessingResponseModel): SWAGGER_DOC = { "tags": ["Satellite Image Algorithms"], "description": "NDVI computation of an arbitrary Sentinel-2 scene." - "The processing is as follows: A user specific Sentinel-2 scene (Bands 04 and 08)" + "The processing is as follows: A user specific Sentinel-2 scene (Bands " + "04 and 08)" "will be download and imported into an ephemeral database.. " "The NDVI will be computed via r.mapcalc. " - "The result of the computation is available as gzipped geotiff file. In addition, " + "The result of the computation is available as gzipped geotiff file. " + "In addition, " "the univariate statistic will be computed " "as well as a preview image including a legend and scale." " Minimum required user role: user.", @@ -576,7 +677,8 @@ class SentinelNDVIResponseModel(ProcessingResponseModel): "required": True, "in": "path", "type": "string", - "default": "S2A_MSIL1C_20170212T104141_N0204_R008_T31TGJ_20170212T104138", + "default": "S2A_MSIL1C_20170212T104141_N0204_R008_T31TGJ_2017021" + "2T104138", } ], "responses": { @@ -586,7 +688,8 @@ class SentinelNDVIResponseModel(ProcessingResponseModel): "schema": SentinelNDVIResponseModel, }, "400": { - "description": "The error message and a detailed log why NDVI processing of " + "description": "The error message and a detailed log why NDVI " + "processing of " "a sentinel2 scene did not succeeded", "schema": ProcessingErrorResponseModel, }, @@ -611,8 +714,8 @@ def extract_sensor_id_from_scene_id(scene_id): class AsyncEphemeralSentinel2ProcessingResource(ResourceBase): """ This class represents a resource that runs asynchronous processing tasks - to download and process Sentinel-2 satellite images in an ephemeral GRASS location - and stores the result in a network storage like GlusterFS or NFS + to download and process Sentinel-2 satellite images in an ephemeral GRASS + location and stores the result in a network storage like GlusterFS or NFS """ decorators = [log_api_call, auth.login_required] @@ -636,8 +739,8 @@ def post(self, product_id): class AsyncEphemeralSentinel2ProcessingResourceGCS(ResourceBase): """ This class represents a resource that runs asynchronous processing tasks - to download and process Sentinel-2 satellite images in an ephemeral GRASS location - and uploads the result to a google cloud storage bucket. + to download and process Sentinel-2 satellite images in an ephemeral GRASS + location and uploads the result to a google cloud storage bucket. """ decorators = [log_api_call, auth.login_required] @@ -648,7 +751,10 @@ def __init__(self): @swagger.doc(deepcopy(SWAGGER_DOC)) def post(self, product_id): - """NDVI computation of an arbitrary Sentinel-2 scene. The results are stored in the Google Cloud Storage.""" + """ + NDVI computation of an arbitrary Sentinel-2 scene. The results are + stored in the Google Cloud Storage. + """ rdc = self.preprocess(has_json=False, location_name="sentinel2") rdc.set_user_data(product_id) rdc.set_storage_model_to_gcs() @@ -671,7 +777,8 @@ def __init__(self, rdc): Setup the variables of this class Args: - rdc (ResourceDataContainer): The data container that contains all required variables for processing + rdc (ResourceDataContainer): The data container that contains all + required variables for processing """ EphemeralProcessingWithExport.__init__(self, rdc) @@ -684,26 +791,32 @@ def __init__(self, rdc): self.user_download_cache_path = os.path.join( self.config.DOWNLOAD_CACHE, self.user_id ) + # The raster layer names which must be exported, stats computed + # and preview image created self.raster_result_list = ( [] - ) # The raster layer names which must be exported, stats computed - # and preview image created + ) + # A list of r.univar output classes for each vegetation index self.module_results = ( [] - ) # A list of r.univar output classes for each vegetation index - self.response_model_class = SentinelNDVIResponseModel # The class that is used to create the response + ) + # The class that is used to create the response + self.response_model_class = SentinelNDVIResponseModel + # The Sentinel-2 bands that are required for NDVI processing self.required_bands = [ "B08", "B04", - ] # The Sentinel-2 bands that are required for NDVI processing + ] self.query_result = None def _prepare_sentinel2_download(self): - """Check the download cache if the file already exists, to avoid redundant downloads. - The downloaded files will be stored in a temporary directory. After the download of all files - completes, the downloaded files will be moved to the download cache. This avoids broken - files in case a download was interrupted or stopped by termination. - + """ + Check the download cache if the file already exists, to avoid + redundant downloads. + The downloaded files will be stored in a temporary directory. + After the download of all files completes, the downloaded files will + be moved to the download cache. This avoids broken files in case a + download was interrupted or stopped by termination. """ # Create the download cache directory if it does not exists if os.path.exists(self.config.DOWNLOAD_CACHE): @@ -711,7 +824,8 @@ def _prepare_sentinel2_download(self): else: os.mkdir(self.config.DOWNLOAD_CACHE) - # Create the user specific download cache directory to put the downloaded files into it + # Create the user specific download cache directory to put the + # downloaded files into it if os.path.exists(self.user_download_cache_path): pass else: @@ -744,7 +858,9 @@ def _prepare_sentinel2_download(self): ) def _create_temp_database(self, mapsets=[]): - """Create a temporary gis database and location with a PERMANENT mapset for processing + """ + Create a temporary gis database and location with a PERMANENT mapset + for processing Raises: This function raises AsyncProcessError in case of an error. @@ -752,7 +868,8 @@ def _create_temp_database(self, mapsets=[]): """ if not self.sentinel2_band_file_list: raise AsyncProcessError( - "Unable to create a temporary GIS database, no data is available" + "Unable to create a temporary GIS database, no data is " + "available" ) try: @@ -786,12 +903,13 @@ def _create_temp_database(self, mapsets=[]): executable_params=executable_params, ) - # Create the GRASS location, this will create the location and mapset paths + # Create the GRASS location, this will create the location and + # mapset paths self._run_process(p) except Exception as e: raise AsyncProcessError( - "Unable to create a temporary GIS database and location at <%s>" - ", Exception: %s" + "Unable to create a temporary GIS database and location at " + "<%s>, Exception: %s" % ( os.path.join( self.temp_grass_data_base, @@ -841,7 +959,8 @@ def _render_preview_image(self, raster_name): process chain entry to setup the extent from the options. Args: - options: The parser options that contain n, s, e and w entries for region settings + options: The parser options that contain n, s, e and w entries for + region settings result_file: The resulting PNG file name Returns: @@ -899,7 +1018,9 @@ def _render_preview_image(self, raster_name): self.resource_url_list.append(resource_url) def _create_output_resources(self, raster_result_list): - """Create the output resources from the raster layer that are the result of the processing + """ + Create the output resources from the raster layer that are the + result of the processing The following resources will be computed @@ -993,7 +1114,9 @@ def _execute(self): self._create_output_resources(self.raster_result_list) def _final_cleanup(self): - """Overwrite this function in subclasses to perform the final cleanup""" + """ + Overwrite this function in subclasses to perform the final cleanup + """ # Clean up and remove the temporary gisdbase self._cleanup() # Remove resource directories diff --git a/src/actinia_satellite_plugin/persistent_landsat_timeseries_creator.py b/src/actinia_satellite_plugin/persistent_landsat_timeseries_creator.py index c57933d..96e3390 100644 --- a/src/actinia_satellite_plugin/persistent_landsat_timeseries_creator.py +++ b/src/actinia_satellite_plugin/persistent_landsat_timeseries_creator.py @@ -13,9 +13,8 @@ ProcessingResponseModel, ProcessingErrorResponseModel, ) -from actinia_core.processing.actinia_processing.ephemeral.persistent_processing import ( - PersistentProcessing, -) +from actinia_core.processing.actinia_processing.ephemeral\ + .persistent_processing import PersistentProcessing from actinia_core.rest.base.resource_base import ResourceBase from actinia_core.core.common.redis_interface import enqueue_job from actinia_core.core.common.google_satellite_bigquery_interface import ( @@ -37,7 +36,10 @@ class LandsatSceneListModel(Schema): - """This schema defines the JSON input of the Landsat time series creator resource""" + """ + This schema defines the JSON input of the Landsat time series creator + resource + """ type = "object" properties = { @@ -48,16 +50,19 @@ class LandsatSceneListModel(Schema): }, "strds": { "type": "string", - "description": "The basename of the new space-time raster datasets (strds). " + "description": "The basename of the new space-time raster datasets" + " (strds). " "One for each band will be created and the basename will be " "extended by a band specific suffix.", }, "scene_ids": { "type": "array", "items": {"type": "string"}, - "description": "A list of Landsat scene names that should be downloaded and imported. Only scenes from a " + "description": "A list of Landsat scene names that should be" + " downloaded and imported. Only scenes from a " "specific satelleite can be imported and represented as strds. " - "Mixing of different satellites is not permitted. All bands will be imported " + "Mixing of different satellites is not permitted. All bands will " + "be imported " "and atmospherically corrected", }, } @@ -77,19 +82,22 @@ class LandsatSceneListModel(Schema): SCHEMA_DOC = { "tags": ["Satellite Image Algorithms"], - "description": "Download and import Landsat scenes into a new mapset and create a space-time raster dataset " + "description": "Download and import Landsat scenes into a new mapset and " + "create a space-time raster dataset " "for each imported band. " "The resulting data will be located in a persistent user database. " "The location name is part of the path and must exist. The mapset will " - "be created while importing and should not already exist in the location. The names of the" + "be created while importing and should not already exist in the location. " + "The names of the" "Landsat scenes that should be downloaded must be specified " - "in the HTTP body as application/json content. In addition, the basename of the " - "STRDS that should manage the Landsat scenes " - "must be provided in the application/json content. For each band a separate " - "strds will be cerated and the STRDS base name will be extended with the band number." - "This call is performed asynchronously. The provided resource URL must be pulled " - "to receive the status of the import. The data is available in the provided " - "location/mapset, after the download and import finished. Minimum required user role: user.", + "in the HTTP body as application/json content. In addition, the basename" + " of the STRDS that should manage the Landsat scenes must" + " be provided in the application/json content. For each band a separate " + "strds will be cerated and the STRDS base name will be extended with the " + "band number. This call is performed asynchronously. The provided resource" + " URL must be pulled to receive the status of the import. The data is " + "available in the provided location/mapset, after the download and import" + " finished. Minimum required user role: user.", "consumes": ["application/json"], "parameters": [ { @@ -101,14 +109,16 @@ class LandsatSceneListModel(Schema): }, { "name": "mapset_name", - "description": "The name of the mapset to import the Landsat scenes in", + "description": "The name of the mapset to import the Landsat " + "scenes in", "required": True, "in": "path", "type": "string", }, { "name": "tiles", - "description": "The list of Landsat scenes, the band names and the target STRDS names", + "description": "The list of Landsat scenes, the band names and " + "the target STRDS names", "required": True, "in": "body", "schema": LandsatSceneListModel, @@ -134,13 +144,16 @@ def __init__(self): @swagger.doc(deepcopy(SCHEMA_DOC)) def post(self, location_name, mapset_name): - """Download and import Landsat scenes into a new mapset and create a space time dataset for each imported band. + """ + Download and import Landsat scenes into a new mapset and create a + space time dataset for each imported band. Args: location_name (str): The name of the location - target_mapset_name (str): The name of the mapset that should be created + target_mapset_name (str): The name of the mapset that should be + created - Process arguments must be provided as JSON document in the POST request:: + Process arguments must be provided as JSON document in the POST request {"strds":"Landsat_4_1983_09_01_01_30_00", "atcor_method": "TOAR", @@ -158,11 +171,13 @@ def post(self, location_name, mapset_name): { "HTTP code": 200, "Messages": "Resource accepted", - "Resource id": "resource_id-985164c9-1db9-49cf-b2c4-3e8e48500e31", + "Resource id": "resource_id-985164c9-1db9-49cf-b2c4-" + "3e8e48500e31", "Status": "accepted", "URLs": { "Resources": [], - "Status": "http://104.155.60.87/status/soeren/resource_id-985164c9-1db9-49cf-b2c4-3e8e48500e31" + "Status": "http://104.155.60.87/status/soeren/resource_id-" + "985164c9-1db9-49cf-b2c4-3e8e48500e31" }, "User id": "soeren" } @@ -187,18 +202,21 @@ def start_job(*args): class LandsatTimeSeriesCreator(PersistentProcessing): - """Create a space time raster dataset from all provided scene_ids for each Sentinel2A band + """ + Create a space time raster dataset from all provided scene_ids for each + Sentinel2A band in a new mapset. - The Sentiel2A scenes are downloaded , imported and pre-processed before they are registered - in the band specific space time datasets. + The Sentiel2A scenes are downloaded , imported and pre-processed before + they are registered in the band specific space time datasets. """ def __init__(self, rdc): """Constructor Args: - rdc (ResourceDataContainer): The data container that contains all required variables for processing + rdc (ResourceDataContainer): The data container that contains all + required variables for processing """ PersistentProcessing.__init__(self, rdc) @@ -218,9 +236,12 @@ def __init__(self, rdc): self.sensor_id = None def _prepare_download(self): - """Check the download cache if the file already exists, to avoid redundant downloads. - The downloaded files will be stored in a temporary directory. After the download of all files - completes, the downloaded files will be moved to the download cache. This avoids broken + """ + Check the download cache if the file already exists, to avoid redundant + downloads. + The downloaded files will be stored in a temporary directory. + After the download of all files completes, the downloaded files will + be moved to the download cache. This avoids broken files in case a download was interrupted or stopped by termination. """ @@ -230,7 +251,8 @@ def _prepare_download(self): else: os.mkdir(self.config.DOWNLOAD_CACHE) - # Create the user specific download cache directory to put the downloaded files into it + # Create the user specific download cache directory to put the + # downloaded files into it if os.path.exists(self.user_download_cache_path): pass else: @@ -245,14 +267,16 @@ def _prepare_download(self): sensor_ids = {} for scene_id in self.scene_ids: - # Check if the the list of scenes contains scenes from different satellites + # Check if the the list of scenes contains scenes from different + # satellites self.sensor_id = extract_sensor_id_from_scene_id(scene_id=scene_id) sensor_ids[self.sensor_id] = self.sensor_id if len(sensor_ids) > 2: raise AsyncProcessError( - "Different satellites are in the list of scenes to be imported. " - "Only scenes from a single satellite an be imported at once." + "Different satellites are in the list of scenes to be " + "imported. Only scenes from a single satellite an be " + "imported at once." ) # All bands are imported, except the MTL file @@ -278,8 +302,9 @@ def _prepare_download(self): ) def _import_scenes(self): - """Import all found Sentinel2 scenes with their bands and create the space time raster datasets - to register the maps in. + """ + Import all found Sentinel2 scenes with their bands and create the + space time raster datasets to register the maps in. Raises: AsyncProcessError: In case something went wrong @@ -324,7 +349,8 @@ def _import_scenes(self): self._execute_process_list(process_list=all_commands) # IMPORTANT: - # The registration must be performed in the temporary mapset with the same name as the target mapset, + # The registration must be performed in the temporary mapset with the + # same name as the target mapset, # since the temporal database will contain the mapset name. register_commands = {} @@ -359,7 +385,8 @@ def _import_scenes(self): # Use only the product ids that were found in the big query for scene_id in self.query_result: - # We need to create a time interval, otherwise the temporal algebra will not work :/ + # We need to create a time interval, otherwise the temporal + # algebra will not work :/ start_time = dtparser.parse( self.query_result[scene_id]["timestamp"].split(".")[0] ) @@ -415,9 +442,10 @@ def _execute(self): "Mapset <%s> already exists." % self.target_mapset_name ) - # Init GRASS environment and create the temporary mapset with the same name as the target mapset - # This is required to register the raster maps in the temporary directory, but use them in - # persistent directory + # Init GRASS environment and create the temporary mapset with the same + # name as the target mapset + # This is required to register the raster maps in the temporary + # directory, but use them in persistent directory # Create the temp database and link the # required mapsets into it @@ -432,7 +460,8 @@ def _execute(self): # Create the temporary mapset and switch into it self._create_temporary_mapset(temp_mapset_name=self.target_mapset_name) - # Setup the download cache and query the BigQuery database of google for scene_ids + # Setup the download cache and query the BigQuery database of google + # for scene_ids self._prepare_download() # Check if all product ids were found diff --git a/src/actinia_satellite_plugin/persistent_sentinel2_timeseries_creator.py b/src/actinia_satellite_plugin/persistent_sentinel2_timeseries_creator.py index 5fd7e84..d56b88e 100644 --- a/src/actinia_satellite_plugin/persistent_sentinel2_timeseries_creator.py +++ b/src/actinia_satellite_plugin/persistent_sentinel2_timeseries_creator.py @@ -12,9 +12,8 @@ ProcessingResponseModel, ProcessingErrorResponseModel, ) -from actinia_core.processing.actinia_processing.ephemeral.persistent_processing import ( - PersistentProcessing, -) +from actinia_core.processing.actinia_processing.ephemeral\ + .persistent_processing import PersistentProcessing from actinia_core.rest.base.resource_base import ResourceBase from actinia_core.core.common.redis_interface import enqueue_job from actinia_core.core.common.google_satellite_bigquery_interface import ( @@ -33,26 +32,33 @@ class Sentinel2ASceneListModel(Schema): - """This schema defines the JSON input of the sentinel time series creator resource""" + """ + This schema defines the JSON input of the sentinel time series creator + resource + """ type = "object" properties = { "bands": { "type": "array", "items": {"type": "string"}, - "description": "A list of band names that should be downloaded and imported for each Sentinel-2 scene." - 'Available are the following band names: "B01", "B02", "B03", "B04", "B05", "B06", "B07",' - '"B08", "B8A", "B09" "B10", "B11", "B12"', + "description": "A list of band names that should be downloaded " + "and imported for each Sentinel-2 scene." + 'Available are the following band names: "B01", "B02", "B03", ' + '"B04", "B05", "B06", "B07", "B08", "B8A", "B09" "B10", "B11", ' + '"B12"', }, "strds": { "type": "array", "items": {"type": "string"}, - "description": "The names of the new space-time raster datasets, one for each band", + "description": "The names of the new space-time raster datasets," + " one for each band", }, "product_ids": { "type": "array", "items": {"type": "string"}, - "description": "A list of Sentinel-2 scene names that should be downloaded and imported", + "description": "A list of Sentinel-2 scene names that should be " + "downloaded and imported", }, } example = { @@ -70,38 +76,44 @@ class Sentinel2ASceneListModel(Schema): SCHEMA_DOC = { "tags": ["Satellite Image Algorithms"], - "description": "Download and import Sentinel2A scenes into a new mapset and create a space-time raster dataset " + "description": "Download and import Sentinel2A scenes into a new mapset " + "and create a space-time raster dataset " "for each imported band. " "The resulting data will be located in a persistent user database. " "The location name is part of the path and must exist. The mapset will " - "be created while importing and should not already exist in the location. The names of the" - "Sentinel-2 scenes and the band names that should be downloaded must be specified " - "in the HTTP body as application/json content. In addition, the names of the " - "STRDS that should manage the sentinel scenes " - "must be provided in the application/json content. For each band a separate " - "STRDS name must be provided." - "This call is performed asynchronously. The provided resource URL must be pulled " - "to receive the status of the import. The data is available in the provided " - "location/mapset, after the download and import finished. Minimum required user role: user.", + "be created while importing and should not already exist in the location. " + "The names of the Sentinel-2 scenes and the band names that should" + " be downloaded must be specified in the HTTP body as" + " application/json content. In addition, the names of the " + "STRDS that should manage the sentinel scenes must" + " be provided in the application/json content. For each band a separate " + "STRDS name must be provided. This call " + "is performed asynchronously. The provided resource URL must be pulled " + "to receive the status of the import. The data is available in the " + "provided location/mapset, after the download and import finished. " + "Minimum required user role: user.", "consumes": ["application/json"], "parameters": [ { "name": "location_name", - "description": "The location name to import the Sentinel2A scenes in", + "description": "The location name to import the Sentinel2A scenes " + "in", "required": True, "in": "path", "type": "string", }, { "name": "mapset_name", - "description": "The name of the mapset to import the Sentinel2A scenes in", + "description": "The name of the mapset to import the Sentinel2A " + "scenes in", "required": True, "in": "path", "type": "string", }, { "name": "tiles", - "description": "The list of Sentinel-2 scenes, the band names and the target STRDS names", + "description": "The list of Sentinel-2 scenes, the band names and " + "the target STRDS names", "required": True, "in": "body", "schema": Sentinel2ASceneListModel, @@ -127,19 +139,25 @@ def __init__(self): @swagger.doc(deepcopy(SCHEMA_DOC)) def post(self, location_name, mapset_name): - """Download and import Sentinel2A scenes into a new mapset and create a space-time raster dataset for each imported band. + """ + Download and import Sentinel2A scenes into a new mapset and create + a space-time raster dataset for each imported band. Args: location_name (str): The name of the location - target_mapset_name (str): The name of the mapset that should be created + target_mapset_name (str): The name of the mapset that should be + created - Process arguments must be provided as JSON document in the POST request:: + Process arguments must be provided as JSON document in the POST request {"bands":["B04","B08"], "strds":["Sentinel_B04", "Sentinel_b08"], - "product_ids":["S2A_MSIL1C_20170212T104141_N0204_R008_T31TGJ_20170212T104138", - "S2A_MSIL1C_20170227T095021_N0204_R079_T34TBM_20170227T095613", - "S2A_MSIL1C_20170202T104241_N0204_R008_T32UNE_20170202T104236"]} + "product_ids":["S2A_MSIL1C_20170212T104141_N0204_R008_T31TGJ_" + "20170212T104138", + "S2A_MSIL1C_20170227T095021_N0204_R079_T34TBM_" + "20170227T095613", + "S2A_MSIL1C_20170202T104241_N0204_R008_T32UNE_" + "20170202T104236"]} Returns: flask.Response: @@ -151,11 +169,13 @@ def post(self, location_name, mapset_name): { "HTTP code": 200, "Messages": "Resource accepted", - "Resource id": "resource_id-985164c9-1db9-49cf-b2c4-3e8e48500e31", + "Resource id": "resource_id-985164c9-1db9-49cf-b2c4-" + "3e8e48500e31", "Status": "accepted", "URLs": { "Resources": [], - "Status": "http://104.155.60.87/status/soeren/resource_id-985164c9-1db9-49cf-b2c4-3e8e48500e31" + "Status": "http://104.155.60.87/status/soeren/resource_id-" + "985164c9-1db9-49cf-b2c4-3e8e48500e31" }, "User id": "soeren" } @@ -180,18 +200,20 @@ def start_job(*args): class AsyncSentinel2TimeSeriesCreator(PersistentProcessing): - """Create a space time raster dataset from all provided product_ids for each Sentinel2A band - in a new mapset. + """ + Create a space time raster dataset from all provided product_ids for + each Sentinel2A band in a new mapset. - The Sentiel2A scenes are downloaded , imported and pre-processed before they are registered - in the band specific space time datasets. + The Sentiel2A scenes are downloaded , imported and pre-processed before + they are registered in the band specific space time datasets. """ def __init__(self, rdc): """Constructor Args: - rdc (ResourceDataContainer): The data container that contains all required variables for processing + rdc (ResourceDataContainer): The data container that contains all + required variables for processing """ PersistentProcessing.__init__(self, rdc) @@ -209,10 +231,13 @@ def __init__(self, rdc): self.query_result = None def _prepare_sentinel2_download(self): - """Check the download cache if the file already exists, to avoid redundant downloads. - The downloaded files will be stored in a temporary directory. After the download of all files - completes, the downloaded files will be moved to the download cache. This avoids broken - files in case a download was interrupted or stopped by termination. + """ + Check the download cache if the file already exists, to avoid redundant + downloads. + The downloaded files will be stored in a temporary directory. + After the download of all files completes, the downloaded files will + be moved to the download cache. This avoids broken files in case a + download was interrupted or stopped by termination. """ # Create the download cache directory if it does not exists @@ -221,7 +246,8 @@ def _prepare_sentinel2_download(self): else: os.mkdir(self.config.DOWNLOAD_CACHE) - # Create the user specific download cache directory to put the downloaded files into it + # Create the user specific download cache directory to put the + # downloaded files into it if os.path.exists(self.user_download_cache_path): pass else: @@ -253,8 +279,9 @@ def _prepare_sentinel2_download(self): ) def _import_sentinel2_scenes(self): - """Import all found Sentinel2 scenes with their bands and create the space time raster datasets - to register the maps in. + """ + Import all found Sentinel2 scenes with their bands and create the + space time raster datasets to register the maps in. Raises: AsyncProcessError: In case something went wrong @@ -295,7 +322,8 @@ def _import_sentinel2_scenes(self): self._execute_process_list(process_list=all_commands) # IMPORTANT: - # The registration must be performed in the temporary mapset with the same name as the target mapset, + # The registration must be performed in the temporary mapset with the + # same name as the target mapset, # since the temporal database will contain the mapset name. register_commands = {} @@ -331,7 +359,8 @@ def _import_sentinel2_scenes(self): # Use only the product ids that were found in the big query for product_id in self.query_result: - # We need to create a time interval, otherwise the temporal algebra will not work :/ + # We need to create a time interval, otherwise the temporal + # algebra will not work :/ start_time = dtparser.parse( self.query_result[product_id]["timestamp"].split(".")[0] ) @@ -401,9 +430,10 @@ def _execute(self): "Mapset <%s> already exists." % self.target_mapset_name ) - # Init GRASS environment and create the temporary mapset with the same name as the target mapset - # This is required to register the raster maps in the temporary directory, but use them in - # persistent directory + # Init GRASS environment and create the temporary mapset with the same + # name as the target mapset + # This is required to register the raster maps in the temporary + # directory, but use them in persistent directory # Create the temp database and link the # required mapsets into it @@ -418,7 +448,8 @@ def _execute(self): # Create the temporary mapset and switch into it self._create_temporary_mapset(temp_mapset_name=self.target_mapset_name) - # Setup the download cache and query the BigQuery database of google for product_ids + # Setup the download cache and query the BigQuery database of google + # for product_ids self._prepare_sentinel2_download() # Check if all product ids were found diff --git a/src/actinia_satellite_plugin/satellite_query.py b/src/actinia_satellite_plugin/satellite_query.py index 11aeeb5..860dec2 100644 --- a/src/actinia_satellite_plugin/satellite_query.py +++ b/src/actinia_satellite_plugin/satellite_query.py @@ -71,7 +71,8 @@ class SatelliteSceneEntry(Schema): "cloud_cover": "100.0", "east_lon": 117.334772, "north_lat": -73.8673822679, - "scene_id": "S2A_MSIL1C_20170101T003802_N0204_R116_T50CMC_20170101T003759", + "scene_id": "S2A_MSIL1C_20170101T003802_N0204_R116_T50CMC_" + "20170101T003759", "sensing_time": "2017-01-01T00:37:59.459000Z", "south_lat": -74.8755595194, "total_size": 608562784, @@ -94,7 +95,8 @@ class SatelliteSceneList(Schema): "cloud_cover": "100.0", "east_lon": 117.334772, "north_lat": -73.8673822679, - "scene_id": "S2A_MSIL1C_20170101T003802_N0204_R116_T50CMC_20170101T003759", + "scene_id": "S2A_MSIL1C_20170101T003802_N0204_R116_T50CMC_" + "20170101T003759", "sensing_time": "2017-01-01T00:37:59.459000Z", "south_lat": -74.8755595194, "total_size": 608562784, @@ -104,7 +106,8 @@ class SatelliteSceneList(Schema): "cloud_cover": "100.0", "east_lon": 117.355376908, "north_lat": -74.7623823271, - "scene_id": "S2A_MSIL1C_20170101T003802_N0204_R116_T50CMB_20170101T003759", + "scene_id": "S2A_MSIL1C_20170101T003802_N0204_R116_T50CMB_20170" + "101T003759", "sensing_time": "2017-01-01T00:37:59.459000Z", "south_lat": -75.7719656592, "total_size": 604326630, @@ -115,7 +118,8 @@ class SatelliteSceneList(Schema): SCHEMA_LAND_DOC = { "tags": ["Satellite Image Algorithms"], - "description": "Query the Google Landsat archives using time interval, lat/lon coordinates, " + "description": "Query the Google Landsat archives using time interval," + " lat/lon coordinates, " "scene id, spacecraft id and cloud cover. " "All scenes that are located within the time interval and that intersect " "the given latitude/longitude coordinates are returned as a list of " @@ -124,14 +128,16 @@ class SatelliteSceneList(Schema): "parameters": [ { "name": "scene_id", - "description": "The scene id of the landsat scenes that should be searched", + "description": "The scene id of the landsat scenes that should " + "be searched", "required": False, "in": "query", "type": "string", }, { "name": "spacecraft_id", - "description": "The spacecraft id of the landsat scenes that should be searched", + "description": "The spacecraft id of the landsat scenes that " + "should be searched", "required": False, "in": "query", "type": "string", @@ -155,7 +161,8 @@ class SatelliteSceneList(Schema): }, { "name": "lon", - "description": "The longitude coordinate with which the scenes should intersect", + "description": "The longitude coordinate with which the scenes " + "should intersect", "required": False, "in": "query", "type": "number", @@ -163,7 +170,8 @@ class SatelliteSceneList(Schema): }, { "name": "lat", - "description": "The latitude coordinate with which the scenes should intersect", + "description": "The latitude coordinate with which the scenes " + "should intersect", "required": False, "in": "query", "type": "number", @@ -193,8 +201,8 @@ class SatelliteSceneList(Schema): SCHEMA_SENT_DOC = { "tags": ["Satellite Image Algorithms"], - "description": "Query the Google Sentinel2 archives using time interval, lat/lon coordinates, " - "scene id and cloud cover. " + "description": "Query the Google Sentinel2 archives using time interval, " + "lat/lon coordinates, scene id and cloud cover. " "All scenes that are located within the time interval and that intersect " "the given latitude/longitude coordinates are returned as a list of " "scene names with associated time stamps. " @@ -202,7 +210,8 @@ class SatelliteSceneList(Schema): "parameters": [ { "name": "scene_id", - "description": "The scene id also named product id of the Sentinel2A scenes that should be searched", + "description": "The scene id also named product id of the " + "Sentinel2A scenes that should be searched", "required": False, "in": "query", "type": "string", @@ -225,7 +234,8 @@ class SatelliteSceneList(Schema): }, { "name": "lon", - "description": "The longitude coordinate with which the scenes should intersect", + "description": "The longitude coordinate with which the scenes " + "should intersect", "required": False, "in": "query", "type": "number", @@ -233,7 +243,8 @@ class SatelliteSceneList(Schema): }, { "name": "lat", - "description": "The latitude coordinate with which the scenes should intersect", + "description": "The latitude coordinate with which the scenes " + "should intersect", "required": False, "in": "query", "type": "number", @@ -262,7 +273,10 @@ class SatelliteSceneList(Schema): class SatelliteQuery(Resource): - """Query the satellites Landsat4-8 and Sentinel2A archives in the google BigQuery database""" + """ + Query the satellites Landsat4-8 and Sentinel2A archives in the google + BigQuery database + """ decorators = [log_api_call, auth.login_required] @@ -368,7 +382,10 @@ class LandsatQuery(SatelliteQuery): @swagger.doc(deepcopy(SCHEMA_LAND_DOC)) def get(self): - """Query the Google Landsat archives using time interval, lat/lon coordinates, scene id, spacecraft id and cloud cover.""" + """ + Query the Google Landsat archives using time interval, lat/lon + coordinates, scene id, spacecraft id and cloud cover. + """ return self._get("landsat") @@ -377,5 +394,8 @@ class Sentinel2Query(SatelliteQuery): @swagger.doc(deepcopy(SCHEMA_SENT_DOC)) def get(self): - """Query the Google Sentinel2 archives using time interval, lat/lon coordinates, scene id and cloud cover.""" + """ + Query the Google Sentinel2 archives using time interval, lat/lon + coordinates, scene id and cloud cover. + """ return self._get("sentinel2") diff --git a/tests/test_aws_sentinel_service.py b/tests/test_aws_sentinel_service.py index 0ec74c9..36196b0 100644 --- a/tests/test_aws_sentinel_service.py +++ b/tests/test_aws_sentinel_service.py @@ -6,7 +6,7 @@ try: from .test_resource_base import ActiniaResourceTestCaseBase, URL_PREFIX -except: +except Exception: from test_resource_base import ActiniaResourceTestCaseBase, URL_PREFIX @@ -20,7 +20,8 @@ SCENES = { "product_ids": [ "S2A_MSIL1C_20170202T090201_N0204_R007_T36TVT_20170202T090155", - "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_20151207T003302", + "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_" + "20151207T003302", "S2A_MSIL1C_20170218T143751_N0204_R096_T20PRT_20170218T143931", ], "bands": ["B04", "B08"], @@ -29,7 +30,8 @@ SCENES_ERROR = { "product_ids": [ "S2A_MSIL1C_20170202T090201_N0204_R007_T36TVT_20170202T090155_NOPE", - "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_20151207T003302", + "S2A_OPER_PRD_MSIL1C_PDMC_20151207T031157_R102_V20151207T003302_" + "20151207T003302", "S2A_MSIL1C_20170218T143751_N0204_R096_T20PRT_20170218T143931", ], "bands": ["B04", "B08"], diff --git a/tests/test_landsat.py b/tests/test_landsat.py index 1c02a61..063c18c 100644 --- a/tests/test_landsat.py +++ b/tests/test_landsat.py @@ -5,7 +5,7 @@ try: from .test_resource_base import ActiniaResourceTestCaseBase, URL_PREFIX -except: +except Exception: from test_resource_base import ActiniaResourceTestCaseBase, URL_PREFIX diff --git a/tests/test_landsat_timeseries_creation.py b/tests/test_landsat_timeseries_creation.py index ac57778..694fb4c 100644 --- a/tests/test_landsat_timeseries_creation.py +++ b/tests/test_landsat_timeseries_creation.py @@ -6,7 +6,7 @@ try: from .test_resource_base import ActiniaResourceTestCaseBase, URL_PREFIX -except: +except Exception: from test_resource_base import ActiniaResourceTestCaseBase, URL_PREFIX @@ -36,7 +36,8 @@ class AsyncLandsatTimeSeriesCreationTestCaseAdmin(ActiniaResourceTestCaseBase): """test the download and creation of sentinel2 time series in a new mapset - TODO: Implement error tests for wrong scene ids and atmospheric correction methods + TODO: Implement error tests for wrong scene ids and atmospheric correction + methods """ def check_remove_test_mapsets(self): @@ -91,7 +92,6 @@ def test_1(self): """Test the import of two scenes with 2 bands in a new mapset A""" self.check_remove_test_mapsets() - ############################################################################ rv = self.server.post( URL_PREFIX + "/locations/LL/mapsets/A/landsat_import", headers=self.admin_auth_header, @@ -129,7 +129,6 @@ def test_wrong_scene_ids(self): """Test the import of two scenes with 2 bands in a new mapset A""" self.check_remove_test_mapsets() - ############################################################################ rv = self.server.post( URL_PREFIX + "/locations/LL/mapsets/A/landsat_import", headers=self.admin_auth_header, diff --git a/tests/test_resource_base.py b/tests/test_resource_base.py index 166e82e..b7d22b1 100644 --- a/tests/test_resource_base.py +++ b/tests/test_resource_base.py @@ -3,10 +3,10 @@ import os import signal import time -from flask_restful import Api -from actinia_core.testsuite import ActiniaTestCaseBase, URL_PREFIX + +from actinia_core.testsuite import ActiniaTestCaseBase from actinia_core.core.common.config import global_config -from actinia_core.core.common.app import flask_app, flask_api +from actinia_core.core.common.app import flask_api from actinia_satellite_plugin.endpoints import create_endpoints from actinia_core.endpoints import create_endpoints as create_actinia_endpoints diff --git a/tests/test_satellite_query.py b/tests/test_satellite_query.py index 35385bf..2b9e894 100644 --- a/tests/test_satellite_query.py +++ b/tests/test_satellite_query.py @@ -5,7 +5,7 @@ try: from .test_resource_base import ActiniaResourceTestCaseBase, URL_PREFIX -except: +except Exception: from test_resource_base import ActiniaResourceTestCaseBase, URL_PREFIX @@ -20,8 +20,8 @@ class GrassModuleTestCase(ActiniaResourceTestCaseBase): def test_landsat_query_time_interval(self): rv = self.server.get( - URL_PREFIX - + "/landsat_query?start_time=2001-01-01T00:00:00&end_time=2001-01-01T01:00:00", + f"{URL_PREFIX}/landsat_query?start_time=2001-01-01T00:00:00&" + "end_time=2001-01-01T01:00:00", headers=self.user_auth_header, ) data = json_load(rv.data) @@ -40,8 +40,8 @@ def test_landsat_query_time_interval(self): def test_sentinel2_query_time_interval(self): rv = self.server.get( - URL_PREFIX - + "/sentinel2_query?start_time=2017-01-01T00:00:00&end_time=2017-01-01T00:30:00", + f"{URL_PREFIX}/sentinel2_query?start_time=2017-01-01T00:00:00&" + "end_time=2017-01-01T00:30:00", headers=self.user_auth_header, ) # print rv.data @@ -60,8 +60,8 @@ def test_sentinel2_query_time_interval(self): def test_landsat_query_time_interval_lat_lon(self): rv = self.server.get( - URL_PREFIX - + "/landsat_query?start_time=2001-01-01T00:00:00&end_time=2001-01-01T01:00:00&lon=154&lat=51", + f"{URL_PREFIX}/landsat_query?start_time=2001-01-01T00:00:00&" + "end_time=2001-01-01T01:00:00&lon=154&lat=51", headers=self.user_auth_header, ) pprint(json_load(rv.data)) @@ -80,7 +80,7 @@ def test_landsat_query_time_interval_lat_lon(self): def test_landsat_query_scene_id(self): rv = self.server.get( - URL_PREFIX + "/landsat_query?scene_id=LE71010632001001EDC01", + f"{URL_PREFIX}/landsat_query?scene_id=LE71010632001001EDC01", headers=self.user_auth_header, ) pprint(json_load(rv.data)) @@ -99,8 +99,8 @@ def test_landsat_query_scene_id(self): def test_sentinel2_query_scene_id(self): rv = self.server.get( - URL_PREFIX - + "/sentinel2_query?scene_id=S2B_MSIL1C_20171010T131249_N0205_R081_T26VPR_20171010T131243", + f"{URL_PREFIX}/sentinel2_query?scene_id=S2B_MSIL1C_20171010T131249" + "_N0205_R081_T26VPR_20171010T131243", headers=self.user_auth_header, ) pprint(json_load(rv.data)) @@ -119,8 +119,8 @@ def test_sentinel2_query_scene_id(self): def test_landsat_query_scene_id_cloud(self): rv = self.server.get( - URL_PREFIX - + "/landsat_query?scene_id=LE71010632001001EDC01&cloud_cover=100.0", + f"{URL_PREFIX}/landsat_query?scene_id=LE71010632001001EDC01&" + "cloud_cover=100.0", headers=self.user_auth_header, ) pprint(json_load(rv.data)) @@ -139,8 +139,8 @@ def test_landsat_query_scene_id_cloud(self): def test_landsat_query_scene_id_cloud_spacecraft(self): rv = self.server.get( - URL_PREFIX - + "/landsat_query?scene_id=LE71010632001001EDC01&cloud_cover=100.0&spacecraft_id=LANDSAT_7", + f"{URL_PREFIX}/landsat_query?scene_id=LE71010632001001EDC01&" + "cloud_cover=100.0&spacecraft_id=LANDSAT_7", headers=self.user_auth_header, ) pprint(json_load(rv.data)) @@ -159,8 +159,9 @@ def test_landsat_query_scene_id_cloud_spacecraft(self): def test_z_landsat_query_cloud_spacecraft(self): rv = self.server.get( - URL_PREFIX - + "/landsat_query?start_time=1983-09-01T01:00:00&end_time=1983-09-01T01:20:00&cloud_cover=0.0&spacecraft_id=LANDSAT_4", + f"{URL_PREFIX}/landsat_query?start_time=1983-09-01T01:00:00&" + "end_time=1983-09-01T01:20:00&cloud_cover=0.0&spacecraft_id=" + "LANDSAT_4", headers=self.user_auth_header, ) pprint(json_load(rv.data)) diff --git a/tests/test_sentinel2_ndvi.py b/tests/test_sentinel2_ndvi.py index b840395..b3b22c5 100644 --- a/tests/test_sentinel2_ndvi.py +++ b/tests/test_sentinel2_ndvi.py @@ -5,7 +5,7 @@ try: from .test_resource_base import ActiniaResourceTestCaseBase, URL_PREFIX -except: +except Exception: from test_resource_base import ActiniaResourceTestCaseBase, URL_PREFIX @@ -20,12 +20,13 @@ class SentinelProcessingTestCase(ActiniaResourceTestCaseBase): def test_ndvi_computation_small(self): # Large scene - # rv = self.server.post(URL_PREFIX + '/sentinel2_process/S2A_MSIL1C_20170216T102101_N0204_R065_T32UPV_20170216T102204', + # rv = self.server.post(URL_PREFIX + '/sentinel2_process/S2A_MSIL1C_' + # '20170216T102101_N0204_R065_T32UPV_20170216T102204', # headers=self.admin_auth_header) # Small scene rv = self.server.post( - URL_PREFIX - + "/sentinel2_process/ndvi/S2A_MSIL1C_20170212T104141_N0204_R008_T31TGJ_20170212T104138", + f"{URL_PREFIX}/sentinel2_process/ndvi/S2A_MSIL1C_20170212T104141_" + "N0204_R008_T31TGJ_20170212T104138", headers=self.admin_auth_header, ) pprint(json_load(rv.data)) @@ -48,12 +49,13 @@ def test_ndvi_computation_small(self): def incative_test_ndvi_computation_small_gcs(self): # Large scene - # rv = self.server.post(URL_PREFIX + '/sentinel2_process/S2A_MSIL1C_20170216T102101_N0204_R065_T32UPV_20170216T102204', + # rv = self.server.post(URL_PREFIX + '/sentinel2_process/S2A_MSIL1C_' + # '20170216T102101_N0204_R065_T32UPV_20170216T102204', # headers=self.admin_auth_header) # Small scene rv = self.server.post( - URL_PREFIX - + "/sentinel2_process_gcs/ndvi/S2A_MSIL1C_20170212T104141_N0204_R008_T31TGJ_20170212T104138", + f"{URL_PREFIX}/sentinel2_process_gcs/ndvi/S2A_MSIL1C_" + "20170212T104141_N0204_R008_T31TGJ_20170212T104138", headers=self.admin_auth_header, ) pprint(json_load(rv.data)) @@ -76,12 +78,13 @@ def incative_test_ndvi_computation_small_gcs(self): def incative_test_ndvi_computation_big(self): # Small scene - # rv = self.server.post(URL_PREFIX + '/sentinel2_process/S2A_MSIL1C_20170212T104141_N0204_R008_T31TGJ_20170212T104138', + # rv = self.server.post(URL_PREFIX + '/sentinel2_process/S2A_MSIL1C_' + # '20170212T104141_N0204_R008_T31TGJ_20170212T104138', # headers=self.admin_auth_header) # Large scene rv = self.server.post( - URL_PREFIX - + "/sentinel2_process/ndvi/S2A_MSIL1C_20170216T102101_N0204_R065_T32UPV_20170216T102204", + f"{URL_PREFIX}/sentinel2_process/ndvi/S2A_MSIL1C_20170216T102101_" + "N0204_R065_T32UPV_20170216T102204", headers=self.admin_auth_header, ) pprint(json_load(rv.data)) @@ -103,8 +106,8 @@ def incative_test_ndvi_computation_big(self): def incative_test_computation_error_1(self): rv = self.server.post( - URL_PREFIX - + "/sentinel2_process/ndvi/S2A_MSIL1C_20170212T1041_BLABLA_8_T31TGJ_20170212T104138.SAFE", + f"{URL_PREFIX}/sentinel2_process/ndvi/S2A_MSIL1C_20170212T1041_" + "BLABLA_8_T31TGJ_20170212T104138.SAFE", headers=self.admin_auth_header, ) pprint(json_load(rv.data)) @@ -123,8 +126,8 @@ def incative_test_computation_error_1(self): def incative_test_computation_error_2(self): rv = self.server.post( - URL_PREFIX - + "/sentinel2_process_gcs/ndvi/S2A_MSIL1C_20170212T1041_BLABLA_8_T31TGJ_20170212T104138.SAFE", + f"{URL_PREFIX}/sentinel2_process_gcs/ndvi/S2A_MSIL1C_20170212T1041" + "_BLABLA_8_T31TGJ_20170212T104138.SAFE", headers=self.admin_auth_header, ) pprint(json_load(rv.data)) diff --git a/tests/test_sentinel2_timeseries_creation.py b/tests/test_sentinel2_timeseries_creation.py index 5198fbc..33aefdf 100644 --- a/tests/test_sentinel2_timeseries_creation.py +++ b/tests/test_sentinel2_timeseries_creation.py @@ -6,7 +6,7 @@ try: from .test_resource_base import ActiniaResourceTestCaseBase, URL_PREFIX -except: +except Exception: from test_resource_base import ActiniaResourceTestCaseBase, URL_PREFIX @@ -26,86 +26,6 @@ "S2A_MSIL1C_20170227T095021_N0204_R079_T34TBM_20170227T095613", ], } -# -# PRODUCT_IDS = {"bands":["B04", "B08"], -# "strds":["S2A_B04", "S2A_B08"], -# "product_ids":["S2B_MSIL1C_20170929T102009_N0205_R065_T32UPC_20170929T102007", -# "S2A_MSIL1C_20170904T102021_N0205_R065_T32UPC_20170904T102511", -# "S2B_MSIL1C_20170830T102019_N0205_R065_T32UPC_20170830T102531", -# "S2A_MSIL1C_20170825T102021_N0205_R065_T32UPC_20170825T102114", -# "S2B_MSIL1C_20170731T102019_N0205_R065_T32UPC_20170731T102348", -# "S2B_MSIL1C_20170711T102029_N0205_R065_T32UPC_20170711T102309", -# "S2A_MSIL1C_20170706T102021_N0205_R065_T32UPC_20170706T102301"]} -# -# # POST http://104.199.28.149/locations/LL/mapsets/S2A/sentinel2_import -# # with the following JSON body: -# {"bands":["B04", "B08"], -# "strds":["S2A_B04", "S2A_B08"], -# "product_ids":[ -# "S2A_MSIL1C_20170208T092131_N0204_R093_T35TLF_20170208T092143", -# "S2A_MSIL1C_20170208T092131_N0204_R093_T35TMJ_20170208T092143", -# "S2A_MSIL1C_20170208T092131_N0204_R093_T35UMB_20170208T092143", -# "S2A_MSIL1C_20170208T092131_N0204_R093_T35UPB_20170208T092143", -# "S2A_MSIL1C_20170208T092131_N0204_R093_T35UPR_20170208T092143", -# "S2A_MSIL1C_20170208T092131_N0204_R093_T35VMC_20170208T092143", -# "S2A_MSIL1C_20170208T092131_N0204_R093_T35VNC_20170208T092143", -# "S2A_MSIL1C_20170208T092131_N0204_R093_T36UUG_20170208T092143", -# "S2A_MSIL1C_20170208T110211_N0204_R094_T31TCJ_20170208T110245", -# "S2A_MSIL1C_20170208T110211_N0204_R094_T31TDM_20170208T110245", -# "S2A_MSIL1C_20170209T085111_N0204_R107_T35TQN_20170209T085559", -# "S2A_MSIL1C_20170209T085111_N0204_R107_T35UQP_20170209T085559", -# "S2A_MSIL1C_20170209T085111_N0204_R107_T36TVT_20170209T085559", -# "S2A_MSIL1C_20170209T085111_N0204_R107_T36TWT_20170209T085559", -# "S2A_MSIL1C_20170209T085111_N0204_R107_T36TXS_20170209T085559", -# "S2A_MSIL1C_20170209T085111_N0204_R107_T36TXT_20170209T085559", -# "S2A_MSIL1C_20170209T085111_N0204_R107_T36UUA_20170209T085559", -# "S2A_MSIL1C_20170209T085111_N0204_R107_T36UUU_20170209T085559", -# "S2A_MSIL1C_20170209T085111_N0204_R107_T36UVA_20170209T085559", -# "S2A_MSIL1C_20170209T085111_N0204_R107_T36UVB_20170209T085110", -# "S2A_MSIL1C_20170209T085111_N0204_R107_T36UVB_20170209T085559", -# "S2A_MSIL1C_20170209T085111_N0204_R107_T36UVC_20170209T085110", -# "S2A_MSIL1C_20170209T085111_N0204_R107_T36UVD_20170209T085110", -# "S2A_MSIL1C_20170209T085111_N0204_R107_T36UVU_20170209T085559", -# "S2A_MSIL1C_20170209T085111_N0204_R107_T36UVV_20170209T085559", -# "S2A_MSIL1C_20170209T085111_N0204_R107_T36UWA_20170209T085110", -# "S2A_MSIL1C_20170209T085111_N0204_R107_T36UWA_20170209T085559", -# "S2A_MSIL1C_20170209T085111_N0204_R107_T36UWB_20170209T085110", -# "S2A_MSIL1C_20170209T085111_N0204_R107_T36UWB_20170209T085559", -# "S2A_MSIL1C_20170209T085111_N0204_R107_T36UWD_20170209T085110", -# "S2A_MSIL1C_20170209T085111_N0204_R107_T36UWU_20170209T085559", -# "S2A_MSIL1C_20170209T085111_N0204_R107_T36UWV_20170209T085559", -# "S2A_MSIL1C_20170209T085111_N0204_R107_T36UXA_20170209T085110", -# "S2A_MSIL1C_20170209T085111_N0204_R107_T36UXA_20170209T085559", -# "S2A_MSIL1C_20170209T085111_N0204_R107_T36UXB_20170209T085559", -# "S2A_MSIL1C_20170209T085111_N0204_R107_T36UXU_20170209T085559", -# "S2A_MSIL1C_20170209T085111_N0204_R107_T36UXV_20170209T085559", -# "S2A_MSIL1C_20170209T085111_N0204_R107_T36UYA_20170209T085110", -# "S2A_MSIL1C_20170209T085111_N0204_R107_T36UYA_20170209T085559", -# "S2A_MSIL1C_20170209T085111_N0204_R107_T37UEB_20170209T085110", -# "S2A_MSIL1C_20170209T103151_N0204_R108_T31TEJ_20170209T103304", -# "S2A_MSIL1C_20170209T103151_N0204_R108_T32UPV_20170209T103304", -# "S2A_MSIL1C_20170209T103151_N0204_R108_T33VWD_20170209T103304", -# "S2A_MSIL1C_20170210T082051_N0204_R121_T37TCL_20170210T082239", -# "S2A_MSIL1C_20170210T100131_N0204_R122_T33TXL_20170210T100132", -# "S2A_MSIL1C_20170210T100131_N0204_R122_T33UVT_20170210T100132", -# "S2A_MSIL1C_20170210T100131_N0204_R122_T33UVU_20170210T100132", -# "S2A_MSIL1C_20170210T100131_N0204_R122_T34UEG_20170210T100132", -# "S2A_MSIL1C_20170210T100131_N0204_R122_T34VEH_20170210T100132", -# "S2A_MSIL1C_20170211T093111_N0204_R136_T34TGQ_20170211T093237", -# "S2A_MSIL1C_20170211T093111_N0204_R136_T34TGS_20170211T093237", -# "S2A_MSIL1C_20170211T093111_N0204_R136_T34UDU_20170211T093237", -# "S2A_MSIL1C_20170211T093111_N0204_R136_T34UEV_20170211T093237", -# "S2A_MSIL1C_20170211T111201_N0204_R137_T30TXR_20170211T111640", -# "S2A_MSIL1C_20170211T111201_N0204_R137_T30UXB_20170211T111155", -# "S2A_MSIL1C_20170212T090101_N0204_R007_T36UXC_20170212T090222", -# "S2A_MSIL1C_20170212T090101_N0204_R007_T37UCA_20170212T090222", -# "S2A_MSIL1C_20170212T104141_N0204_R008_T31TGJ_20170212T104138", -# "S2A_MSIL1C_20170213T101121_N0204_R022_T32UQD_20170213T101119", -# "S2A_MSIL1C_20170213T101121_N0204_R022_T33UUR_20170213T101553", -# "S2A_MSIL1C_20170213T101121_N0204_R022_T33UUS_20170213T101553", -# "S2A_MSIL1C_20170213T101121_N0204_R022_T33UUU_20170213T101119", -# "S2A_MSIL1C_20170213T101121_N0204_R022_T33UVS_20170213T101553", -# "S2A_MSIL1C_20170213T101121_N0204_R022_T33UVT_20170213T101553"]} # Sentinel2A time series of Germany GERMANY = { @@ -248,7 +168,8 @@ class AsyncSentielTimeSeriesCreationTestCaseAdmin(ActiniaResourceTestCaseBase): - """test the download and creation of sentinel2 time series in a new mapset""" + """Test the download and creation of sentinel2 time series in a new mapset + """ def check_remove_test_mapsets(self): """ @@ -302,7 +223,6 @@ def test_1(self): """Test the import of two scenes with 2 bands in a new mapset A""" self.check_remove_test_mapsets() - ############################################################################ rv = self.server.post( URL_PREFIX + "/locations/LL/mapsets/A/sentinel2_import", headers=self.admin_auth_header,